Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(401)

Side by Side Diff: src/heap.cc

Issue 39973003: Merge bleeding_edge. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: again Created 7 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
134 sweeping_time_(0.0), 134 sweeping_time_(0.0),
135 store_buffer_(this), 135 store_buffer_(this),
136 marking_(this), 136 marking_(this),
137 incremental_marking_(this), 137 incremental_marking_(this),
138 number_idle_notifications_(0), 138 number_idle_notifications_(0),
139 last_idle_notification_gc_count_(0), 139 last_idle_notification_gc_count_(0),
140 last_idle_notification_gc_count_init_(false), 140 last_idle_notification_gc_count_init_(false),
141 mark_sweeps_since_idle_round_started_(0), 141 mark_sweeps_since_idle_round_started_(0),
142 gc_count_at_last_idle_gc_(0), 142 gc_count_at_last_idle_gc_(0),
143 scavenges_since_last_idle_round_(kIdleScavengeThreshold), 143 scavenges_since_last_idle_round_(kIdleScavengeThreshold),
144 full_codegen_bytes_generated_(0),
145 crankshaft_codegen_bytes_generated_(0),
144 gcs_since_last_deopt_(0), 146 gcs_since_last_deopt_(0),
145 #ifdef VERIFY_HEAP 147 #ifdef VERIFY_HEAP
146 no_weak_object_verification_scope_depth_(0), 148 no_weak_object_verification_scope_depth_(0),
147 #endif 149 #endif
148 promotion_queue_(this), 150 promotion_queue_(this),
149 configured_(false), 151 configured_(false),
150 chunks_queued_for_free_(NULL), 152 chunks_queued_for_free_(NULL),
151 relocation_mutex_(NULL) { 153 relocation_mutex_(NULL) {
152 // Allow build-time customization of the max semispace size. Building 154 // Allow build-time customization of the max semispace size. Building
153 // V8 with snapshots and a non-default max semispace size is much 155 // V8 with snapshots and a non-default max semispace size is much
(...skipping 287 matching lines...) Expand 10 before | Expand all | Expand 10 after
441 443
442 #ifdef DEBUG 444 #ifdef DEBUG
443 ASSERT(!AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC); 445 ASSERT(!AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC);
444 446
445 if (FLAG_gc_verbose) Print(); 447 if (FLAG_gc_verbose) Print();
446 448
447 ReportStatisticsBeforeGC(); 449 ReportStatisticsBeforeGC();
448 #endif // DEBUG 450 #endif // DEBUG
449 451
450 store_buffer()->GCPrologue(); 452 store_buffer()->GCPrologue();
453
454 if (FLAG_concurrent_osr) {
455 isolate()->optimizing_compiler_thread()->AgeBufferedOsrJobs();
456 }
451 } 457 }
452 458
453 459
454 intptr_t Heap::SizeOfObjects() { 460 intptr_t Heap::SizeOfObjects() {
455 intptr_t total = 0; 461 intptr_t total = 0;
456 AllSpaces spaces(this); 462 AllSpaces spaces(this);
457 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) { 463 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
458 total += space->SizeOfObjects(); 464 total += space->SizeOfObjects();
459 } 465 }
460 return total; 466 return total;
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
501 } 507 }
502 508
503 isolate_->counters()->alive_after_last_gc()->Set( 509 isolate_->counters()->alive_after_last_gc()->Set(
504 static_cast<int>(SizeOfObjects())); 510 static_cast<int>(SizeOfObjects()));
505 511
506 isolate_->counters()->string_table_capacity()->Set( 512 isolate_->counters()->string_table_capacity()->Set(
507 string_table()->Capacity()); 513 string_table()->Capacity());
508 isolate_->counters()->number_of_symbols()->Set( 514 isolate_->counters()->number_of_symbols()->Set(
509 string_table()->NumberOfElements()); 515 string_table()->NumberOfElements());
510 516
517 if (full_codegen_bytes_generated_ + crankshaft_codegen_bytes_generated_ > 0) {
518 isolate_->counters()->codegen_fraction_crankshaft()->AddSample(
519 static_cast<int>((crankshaft_codegen_bytes_generated_ * 100.0) /
520 (crankshaft_codegen_bytes_generated_
521 + full_codegen_bytes_generated_)));
522 }
523
511 if (CommittedMemory() > 0) { 524 if (CommittedMemory() > 0) {
512 isolate_->counters()->external_fragmentation_total()->AddSample( 525 isolate_->counters()->external_fragmentation_total()->AddSample(
513 static_cast<int>(100 - (SizeOfObjects() * 100.0) / CommittedMemory())); 526 static_cast<int>(100 - (SizeOfObjects() * 100.0) / CommittedMemory()));
514 527
528 isolate_->counters()->heap_fraction_new_space()->
529 AddSample(static_cast<int>(
530 (new_space()->CommittedMemory() * 100.0) / CommittedMemory()));
531 isolate_->counters()->heap_fraction_old_pointer_space()->AddSample(
532 static_cast<int>(
533 (old_pointer_space()->CommittedMemory() * 100.0) /
534 CommittedMemory()));
535 isolate_->counters()->heap_fraction_old_data_space()->AddSample(
536 static_cast<int>(
537 (old_data_space()->CommittedMemory() * 100.0) /
538 CommittedMemory()));
539 isolate_->counters()->heap_fraction_code_space()->
540 AddSample(static_cast<int>(
541 (code_space()->CommittedMemory() * 100.0) / CommittedMemory()));
515 isolate_->counters()->heap_fraction_map_space()->AddSample( 542 isolate_->counters()->heap_fraction_map_space()->AddSample(
516 static_cast<int>( 543 static_cast<int>(
517 (map_space()->CommittedMemory() * 100.0) / CommittedMemory())); 544 (map_space()->CommittedMemory() * 100.0) / CommittedMemory()));
518 isolate_->counters()->heap_fraction_cell_space()->AddSample( 545 isolate_->counters()->heap_fraction_cell_space()->AddSample(
519 static_cast<int>( 546 static_cast<int>(
520 (cell_space()->CommittedMemory() * 100.0) / CommittedMemory())); 547 (cell_space()->CommittedMemory() * 100.0) / CommittedMemory()));
521 isolate_->counters()->heap_fraction_property_cell_space()-> 548 isolate_->counters()->heap_fraction_property_cell_space()->
522 AddSample(static_cast<int>( 549 AddSample(static_cast<int>(
523 (property_cell_space()->CommittedMemory() * 100.0) / 550 (property_cell_space()->CommittedMemory() * 100.0) /
524 CommittedMemory())); 551 CommittedMemory()));
552 isolate_->counters()->heap_fraction_lo_space()->
553 AddSample(static_cast<int>(
554 (lo_space()->CommittedMemory() * 100.0) / CommittedMemory()));
525 555
526 isolate_->counters()->heap_sample_total_committed()->AddSample( 556 isolate_->counters()->heap_sample_total_committed()->AddSample(
527 static_cast<int>(CommittedMemory() / KB)); 557 static_cast<int>(CommittedMemory() / KB));
528 isolate_->counters()->heap_sample_total_used()->AddSample( 558 isolate_->counters()->heap_sample_total_used()->AddSample(
529 static_cast<int>(SizeOfObjects() / KB)); 559 static_cast<int>(SizeOfObjects() / KB));
530 isolate_->counters()->heap_sample_map_space_committed()->AddSample( 560 isolate_->counters()->heap_sample_map_space_committed()->AddSample(
531 static_cast<int>(map_space()->CommittedMemory() / KB)); 561 static_cast<int>(map_space()->CommittedMemory() / KB));
532 isolate_->counters()->heap_sample_cell_space_committed()->AddSample( 562 isolate_->counters()->heap_sample_cell_space_committed()->AddSample(
533 static_cast<int>(cell_space()->CommittedMemory() / KB)); 563 static_cast<int>(cell_space()->CommittedMemory() / KB));
534 isolate_->counters()-> 564 isolate_->counters()->
535 heap_sample_property_cell_space_committed()-> 565 heap_sample_property_cell_space_committed()->
536 AddSample(static_cast<int>( 566 AddSample(static_cast<int>(
537 property_cell_space()->CommittedMemory() / KB)); 567 property_cell_space()->CommittedMemory() / KB));
568 isolate_->counters()->heap_sample_code_space_committed()->AddSample(
569 static_cast<int>(code_space()->CommittedMemory() / KB));
538 } 570 }
539 571
540 #define UPDATE_COUNTERS_FOR_SPACE(space) \ 572 #define UPDATE_COUNTERS_FOR_SPACE(space) \
541 isolate_->counters()->space##_bytes_available()->Set( \ 573 isolate_->counters()->space##_bytes_available()->Set( \
542 static_cast<int>(space()->Available())); \ 574 static_cast<int>(space()->Available())); \
543 isolate_->counters()->space##_bytes_committed()->Set( \ 575 isolate_->counters()->space##_bytes_committed()->Set( \
544 static_cast<int>(space()->CommittedMemory())); \ 576 static_cast<int>(space()->CommittedMemory())); \
545 isolate_->counters()->space##_bytes_used()->Set( \ 577 isolate_->counters()->space##_bytes_used()->Set( \
546 static_cast<int>(space()->SizeOfObjects())); 578 static_cast<int>(space()->SizeOfObjects()));
547 #define UPDATE_FRAGMENTATION_FOR_SPACE(space) \ 579 #define UPDATE_FRAGMENTATION_FOR_SPACE(space) \
(...skipping 1402 matching lines...) Expand 10 before | Expand all | Expand 10 after
1950 1982
1951 // Take another spin if there are now unswept objects in new space 1983 // Take another spin if there are now unswept objects in new space
1952 // (there are currently no more unswept promoted objects). 1984 // (there are currently no more unswept promoted objects).
1953 } while (new_space_front != new_space_.top()); 1985 } while (new_space_front != new_space_.top());
1954 1986
1955 return new_space_front; 1987 return new_space_front;
1956 } 1988 }
1957 1989
1958 1990
1959 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0); 1991 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0);
1992 STATIC_ASSERT((ConstantPoolArray::kHeaderSize & kDoubleAlignmentMask) == 0);
1960 1993
1961 1994
1962 INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap, 1995 INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap,
1963 HeapObject* object, 1996 HeapObject* object,
1964 int size)); 1997 int size));
1965 1998
1966 static HeapObject* EnsureDoubleAligned(Heap* heap, 1999 static HeapObject* EnsureDoubleAligned(Heap* heap,
1967 HeapObject* object, 2000 HeapObject* object,
1968 int size) { 2001 int size) {
1969 if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) { 2002 if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) {
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
2094 int size)) { 2127 int size)) {
2095 // Copy the content of source to target. 2128 // Copy the content of source to target.
2096 heap->CopyBlock(target->address(), source->address(), size); 2129 heap->CopyBlock(target->address(), source->address(), size);
2097 2130
2098 // Set the forwarding address. 2131 // Set the forwarding address.
2099 source->set_map_word(MapWord::FromForwardingAddress(target)); 2132 source->set_map_word(MapWord::FromForwardingAddress(target));
2100 2133
2101 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) { 2134 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
2102 // Update NewSpace stats if necessary. 2135 // Update NewSpace stats if necessary.
2103 RecordCopiedObject(heap, target); 2136 RecordCopiedObject(heap, target);
2104 HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
2105 Isolate* isolate = heap->isolate(); 2137 Isolate* isolate = heap->isolate();
2138 HeapProfiler* heap_profiler = isolate->heap_profiler();
2139 if (heap_profiler->is_profiling()) {
2140 heap_profiler->ObjectMoveEvent(source->address(), target->address(),
2141 size);
2142 }
2106 if (isolate->logger()->is_logging_code_events() || 2143 if (isolate->logger()->is_logging_code_events() ||
2107 isolate->cpu_profiler()->is_profiling()) { 2144 isolate->cpu_profiler()->is_profiling()) {
2108 if (target->IsSharedFunctionInfo()) { 2145 if (target->IsSharedFunctionInfo()) {
2109 PROFILE(isolate, SharedFunctionInfoMoveEvent( 2146 PROFILE(isolate, SharedFunctionInfoMoveEvent(
2110 source->address(), target->address())); 2147 source->address(), target->address()));
2111 } 2148 }
2112 } 2149 }
2113 } 2150 }
2114 2151
2115 if (marks_handling == TRANSFER_MARKS) { 2152 if (marks_handling == TRANSFER_MARKS) {
(...skipping 534 matching lines...) Expand 10 before | Expand all | Expand 10 after
2650 set_undetectable_ascii_string_map(Map::cast(obj)); 2687 set_undetectable_ascii_string_map(Map::cast(obj));
2651 Map::cast(obj)->set_is_undetectable(); 2688 Map::cast(obj)->set_is_undetectable();
2652 2689
2653 { MaybeObject* maybe_obj = 2690 { MaybeObject* maybe_obj =
2654 AllocateMap(FIXED_DOUBLE_ARRAY_TYPE, kVariableSizeSentinel); 2691 AllocateMap(FIXED_DOUBLE_ARRAY_TYPE, kVariableSizeSentinel);
2655 if (!maybe_obj->ToObject(&obj)) return false; 2692 if (!maybe_obj->ToObject(&obj)) return false;
2656 } 2693 }
2657 set_fixed_double_array_map(Map::cast(obj)); 2694 set_fixed_double_array_map(Map::cast(obj));
2658 2695
2659 { MaybeObject* maybe_obj = 2696 { MaybeObject* maybe_obj =
2697 AllocateMap(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel);
2698 if (!maybe_obj->ToObject(&obj)) return false;
2699 }
2700 set_constant_pool_array_map(Map::cast(obj));
2701
2702 { MaybeObject* maybe_obj =
2660 AllocateMap(BYTE_ARRAY_TYPE, kVariableSizeSentinel); 2703 AllocateMap(BYTE_ARRAY_TYPE, kVariableSizeSentinel);
2661 if (!maybe_obj->ToObject(&obj)) return false; 2704 if (!maybe_obj->ToObject(&obj)) return false;
2662 } 2705 }
2663 set_byte_array_map(Map::cast(obj)); 2706 set_byte_array_map(Map::cast(obj));
2664 2707
2665 { MaybeObject* maybe_obj = 2708 { MaybeObject* maybe_obj =
2666 AllocateMap(FREE_SPACE_TYPE, kVariableSizeSentinel); 2709 AllocateMap(FREE_SPACE_TYPE, kVariableSizeSentinel);
2667 if (!maybe_obj->ToObject(&obj)) return false; 2710 if (!maybe_obj->ToObject(&obj)) return false;
2668 } 2711 }
2669 set_free_space_map(Map::cast(obj)); 2712 set_free_space_map(Map::cast(obj));
(...skipping 243 matching lines...) Expand 10 before | Expand all | Expand 10 after
2913 Object* result; 2956 Object* result;
2914 { MaybeObject* maybe_result = AllocateRawCell(); 2957 { MaybeObject* maybe_result = AllocateRawCell();
2915 if (!maybe_result->ToObject(&result)) return maybe_result; 2958 if (!maybe_result->ToObject(&result)) return maybe_result;
2916 } 2959 }
2917 HeapObject::cast(result)->set_map_no_write_barrier(cell_map()); 2960 HeapObject::cast(result)->set_map_no_write_barrier(cell_map());
2918 Cell::cast(result)->set_value(value); 2961 Cell::cast(result)->set_value(value);
2919 return result; 2962 return result;
2920 } 2963 }
2921 2964
2922 2965
2923 MaybeObject* Heap::AllocatePropertyCell(Object* value) { 2966 MaybeObject* Heap::AllocatePropertyCell() {
2924 Object* result; 2967 Object* result;
2925 MaybeObject* maybe_result = AllocateRawPropertyCell(); 2968 MaybeObject* maybe_result = AllocateRawPropertyCell();
2926 if (!maybe_result->ToObject(&result)) return maybe_result; 2969 if (!maybe_result->ToObject(&result)) return maybe_result;
2927 2970
2928 HeapObject::cast(result)->set_map_no_write_barrier( 2971 HeapObject::cast(result)->set_map_no_write_barrier(
2929 global_property_cell_map()); 2972 global_property_cell_map());
2930 PropertyCell* cell = PropertyCell::cast(result); 2973 PropertyCell* cell = PropertyCell::cast(result);
2931 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), 2974 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()),
2932 SKIP_WRITE_BARRIER); 2975 SKIP_WRITE_BARRIER);
2933 cell->set_value(value); 2976 cell->set_value(the_hole_value());
2934 cell->set_type(Type::None()); 2977 cell->set_type(Type::None());
2935 maybe_result = cell->SetValueInferType(value);
2936 if (maybe_result->IsFailure()) return maybe_result;
2937 return result; 2978 return result;
2938 } 2979 }
2939 2980
2940 2981
2941 MaybeObject* Heap::AllocateBox(Object* value, PretenureFlag pretenure) { 2982 MaybeObject* Heap::AllocateBox(Object* value, PretenureFlag pretenure) {
2942 Box* result; 2983 Box* result;
2943 MaybeObject* maybe_result = AllocateStruct(BOX_TYPE); 2984 MaybeObject* maybe_result = AllocateStruct(BOX_TYPE);
2944 if (!maybe_result->To(&result)) return maybe_result; 2985 if (!maybe_result->To(&result)) return maybe_result;
2945 result->set_value(value); 2986 result->set_value(value);
2946 return result; 2987 return result;
(...skipping 1145 matching lines...) Expand 10 before | Expand all | Expand 10 after
4092 external_pointer); 4133 external_pointer);
4093 4134
4094 return result; 4135 return result;
4095 } 4136 }
4096 4137
4097 4138
4098 MaybeObject* Heap::CreateCode(const CodeDesc& desc, 4139 MaybeObject* Heap::CreateCode(const CodeDesc& desc,
4099 Code::Flags flags, 4140 Code::Flags flags,
4100 Handle<Object> self_reference, 4141 Handle<Object> self_reference,
4101 bool immovable, 4142 bool immovable,
4102 bool crankshafted) { 4143 bool crankshafted,
4144 int prologue_offset) {
4103 // Allocate ByteArray before the Code object, so that we do not risk 4145 // Allocate ByteArray before the Code object, so that we do not risk
4104 // leaving uninitialized Code object (and breaking the heap). 4146 // leaving uninitialized Code object (and breaking the heap).
4105 ByteArray* reloc_info; 4147 ByteArray* reloc_info;
4106 MaybeObject* maybe_reloc_info = AllocateByteArray(desc.reloc_size, TENURED); 4148 MaybeObject* maybe_reloc_info = AllocateByteArray(desc.reloc_size, TENURED);
4107 if (!maybe_reloc_info->To(&reloc_info)) return maybe_reloc_info; 4149 if (!maybe_reloc_info->To(&reloc_info)) return maybe_reloc_info;
4108 4150
4109 // Compute size. 4151 // Compute size.
4110 int body_size = RoundUp(desc.instr_size, kObjectAlignment); 4152 int body_size = RoundUp(desc.instr_size, kObjectAlignment);
4111 int obj_size = Code::SizeFor(body_size); 4153 int obj_size = Code::SizeFor(body_size);
4112 ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment)); 4154 ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment));
(...skipping 29 matching lines...) Expand all
4142 code->set_flags(flags); 4184 code->set_flags(flags);
4143 if (code->is_call_stub() || code->is_keyed_call_stub()) { 4185 if (code->is_call_stub() || code->is_keyed_call_stub()) {
4144 code->set_check_type(RECEIVER_MAP_CHECK); 4186 code->set_check_type(RECEIVER_MAP_CHECK);
4145 } 4187 }
4146 code->set_is_crankshafted(crankshafted); 4188 code->set_is_crankshafted(crankshafted);
4147 code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER); 4189 code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER);
4148 code->InitializeTypeFeedbackInfoNoWriteBarrier(undefined_value()); 4190 code->InitializeTypeFeedbackInfoNoWriteBarrier(undefined_value());
4149 code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER); 4191 code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER);
4150 code->set_gc_metadata(Smi::FromInt(0)); 4192 code->set_gc_metadata(Smi::FromInt(0));
4151 code->set_ic_age(global_ic_age_); 4193 code->set_ic_age(global_ic_age_);
4152 code->set_prologue_offset(kPrologueOffsetNotSet); 4194 code->set_prologue_offset(prologue_offset);
4153 if (code->kind() == Code::OPTIMIZED_FUNCTION) { 4195 if (code->kind() == Code::OPTIMIZED_FUNCTION) {
4154 code->set_marked_for_deoptimization(false); 4196 code->set_marked_for_deoptimization(false);
4155 } 4197 }
4198
4199 #ifdef ENABLE_DEBUGGER_SUPPORT
4200 if (code->kind() == Code::FUNCTION) {
4201 code->set_has_debug_break_slots(
4202 isolate_->debugger()->IsDebuggerActive());
4203 }
4204 #endif
4205
4156 // Allow self references to created code object by patching the handle to 4206 // Allow self references to created code object by patching the handle to
4157 // point to the newly allocated Code object. 4207 // point to the newly allocated Code object.
4158 if (!self_reference.is_null()) { 4208 if (!self_reference.is_null()) {
4159 *(self_reference.location()) = code; 4209 *(self_reference.location()) = code;
4160 } 4210 }
4161 // Migrate generated code. 4211 // Migrate generated code.
4162 // The generated code can contain Object** values (typically from handles) 4212 // The generated code can contain Object** values (typically from handles)
4163 // that are dereferenced during the copy to point directly to the actual heap 4213 // that are dereferenced during the copy to point directly to the actual heap
4164 // objects. These pointers can include references to the code object itself, 4214 // objects. These pointers can include references to the code object itself,
4165 // through the self_reference parameter. 4215 // through the self_reference parameter.
(...skipping 635 matching lines...) Expand 10 before | Expand all | Expand 10 after
4801 if (!maybe_result->To<JSFunctionProxy>(&result)) return maybe_result; 4851 if (!maybe_result->To<JSFunctionProxy>(&result)) return maybe_result;
4802 result->InitializeBody(map->instance_size(), Smi::FromInt(0)); 4852 result->InitializeBody(map->instance_size(), Smi::FromInt(0));
4803 result->set_handler(handler); 4853 result->set_handler(handler);
4804 result->set_hash(undefined_value(), SKIP_WRITE_BARRIER); 4854 result->set_hash(undefined_value(), SKIP_WRITE_BARRIER);
4805 result->set_call_trap(call_trap); 4855 result->set_call_trap(call_trap);
4806 result->set_construct_trap(construct_trap); 4856 result->set_construct_trap(construct_trap);
4807 return result; 4857 return result;
4808 } 4858 }
4809 4859
4810 4860
4811 MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
4812 ASSERT(constructor->has_initial_map());
4813 Map* map = constructor->initial_map();
4814 ASSERT(map->is_dictionary_map());
4815
4816 // Make sure no field properties are described in the initial map.
4817 // This guarantees us that normalizing the properties does not
4818 // require us to change property values to PropertyCells.
4819 ASSERT(map->NextFreePropertyIndex() == 0);
4820
4821 // Make sure we don't have a ton of pre-allocated slots in the
4822 // global objects. They will be unused once we normalize the object.
4823 ASSERT(map->unused_property_fields() == 0);
4824 ASSERT(map->inobject_properties() == 0);
4825
4826 // Initial size of the backing store to avoid resize of the storage during
4827 // bootstrapping. The size differs between the JS global object ad the
4828 // builtins object.
4829 int initial_size = map->instance_type() == JS_GLOBAL_OBJECT_TYPE ? 64 : 512;
4830
4831 // Allocate a dictionary object for backing storage.
4832 NameDictionary* dictionary;
4833 MaybeObject* maybe_dictionary =
4834 NameDictionary::Allocate(
4835 this,
4836 map->NumberOfOwnDescriptors() * 2 + initial_size);
4837 if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
4838
4839 // The global object might be created from an object template with accessors.
4840 // Fill these accessors into the dictionary.
4841 DescriptorArray* descs = map->instance_descriptors();
4842 for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
4843 PropertyDetails details = descs->GetDetails(i);
4844 ASSERT(details.type() == CALLBACKS); // Only accessors are expected.
4845 PropertyDetails d = PropertyDetails(details.attributes(), CALLBACKS, i + 1);
4846 Object* value = descs->GetCallbacksObject(i);
4847 MaybeObject* maybe_value = AllocatePropertyCell(value);
4848 if (!maybe_value->ToObject(&value)) return maybe_value;
4849
4850 MaybeObject* maybe_added = dictionary->Add(descs->GetKey(i), value, d);
4851 if (!maybe_added->To(&dictionary)) return maybe_added;
4852 }
4853
4854 // Allocate the global object and initialize it with the backing store.
4855 JSObject* global;
4856 MaybeObject* maybe_global = Allocate(map, OLD_POINTER_SPACE);
4857 if (!maybe_global->To(&global)) return maybe_global;
4858
4859 InitializeJSObjectFromMap(global, dictionary, map);
4860
4861 // Create a new map for the global object.
4862 Map* new_map;
4863 MaybeObject* maybe_map = map->CopyDropDescriptors();
4864 if (!maybe_map->To(&new_map)) return maybe_map;
4865 new_map->set_dictionary_map(true);
4866
4867 // Set up the global object as a normalized object.
4868 global->set_map(new_map);
4869 global->set_properties(dictionary);
4870
4871 // Make sure result is a global object with properties in dictionary.
4872 ASSERT(global->IsGlobalObject());
4873 ASSERT(!global->HasFastProperties());
4874 return global;
4875 }
4876
4877
4878 MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) { 4861 MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) {
4879 // Never used to copy functions. If functions need to be copied we 4862 // Never used to copy functions. If functions need to be copied we
4880 // have to be careful to clear the literals array. 4863 // have to be careful to clear the literals array.
4881 SLOW_ASSERT(!source->IsJSFunction()); 4864 SLOW_ASSERT(!source->IsJSFunction());
4882 4865
4883 // Make the clone. 4866 // Make the clone.
4884 Map* map = source->map(); 4867 Map* map = source->map();
4885 int object_size = map->instance_size(); 4868 int object_size = map->instance_size();
4886 Object* clone; 4869 Object* clone;
4887 4870
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
4920 CopyBlock(HeapObject::cast(clone)->address(), 4903 CopyBlock(HeapObject::cast(clone)->address(),
4921 source->address(), 4904 source->address(),
4922 object_size); 4905 object_size);
4923 4906
4924 if (site != NULL) { 4907 if (site != NULL) {
4925 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( 4908 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
4926 reinterpret_cast<Address>(clone) + object_size); 4909 reinterpret_cast<Address>(clone) + object_size);
4927 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); 4910 alloc_memento->set_map_no_write_barrier(allocation_memento_map());
4928 ASSERT(site->map() == allocation_site_map()); 4911 ASSERT(site->map() == allocation_site_map());
4929 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); 4912 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER);
4913 HeapProfiler* profiler = isolate()->heap_profiler();
4914 if (profiler->is_tracking_allocations()) {
4915 profiler->UpdateObjectSizeEvent(HeapObject::cast(clone)->address(),
4916 object_size);
4917 profiler->NewObjectEvent(alloc_memento->address(),
4918 AllocationMemento::kSize);
4919 }
4930 } 4920 }
4931 } 4921 }
4932 4922
4933 SLOW_ASSERT( 4923 SLOW_ASSERT(
4934 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); 4924 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind());
4935 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); 4925 FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
4936 FixedArray* properties = FixedArray::cast(source->properties()); 4926 FixedArray* properties = FixedArray::cast(source->properties());
4937 // Update elements if necessary. 4927 // Update elements if necessary.
4938 if (elements->length() > 0) { 4928 if (elements->length() > 0) {
4939 Object* elem; 4929 Object* elem;
(...skipping 429 matching lines...) Expand 10 before | Expand all | Expand 10 after
5369 HeapObject* dst = HeapObject::cast(obj); 5359 HeapObject* dst = HeapObject::cast(obj);
5370 dst->set_map_no_write_barrier(map); 5360 dst->set_map_no_write_barrier(map);
5371 CopyBlock( 5361 CopyBlock(
5372 dst->address() + FixedDoubleArray::kLengthOffset, 5362 dst->address() + FixedDoubleArray::kLengthOffset,
5373 src->address() + FixedDoubleArray::kLengthOffset, 5363 src->address() + FixedDoubleArray::kLengthOffset,
5374 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); 5364 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset);
5375 return obj; 5365 return obj;
5376 } 5366 }
5377 5367
5378 5368
5369 MaybeObject* Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src,
5370 Map* map) {
5371 int int64_entries = src->count_of_int64_entries();
5372 int ptr_entries = src->count_of_ptr_entries();
5373 int int32_entries = src->count_of_int32_entries();
5374 Object* obj;
5375 { MaybeObject* maybe_obj =
5376 AllocateConstantPoolArray(int64_entries, ptr_entries, int32_entries);
5377 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
5378 }
5379 HeapObject* dst = HeapObject::cast(obj);
5380 dst->set_map_no_write_barrier(map);
5381 CopyBlock(
5382 dst->address() + ConstantPoolArray::kLengthOffset,
5383 src->address() + ConstantPoolArray::kLengthOffset,
5384 ConstantPoolArray::SizeFor(int64_entries, ptr_entries, int32_entries)
5385 - ConstantPoolArray::kLengthOffset);
5386 return obj;
5387 }
5388
5389
5379 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { 5390 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
5380 if (length < 0 || length > FixedArray::kMaxLength) { 5391 if (length < 0 || length > FixedArray::kMaxLength) {
5381 return Failure::OutOfMemoryException(0xe); 5392 return Failure::OutOfMemoryException(0xe);
5382 } 5393 }
5383 int size = FixedArray::SizeFor(length); 5394 int size = FixedArray::SizeFor(length);
5384 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); 5395 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure);
5385 5396
5386 return AllocateRaw(size, space, OLD_POINTER_SPACE); 5397 return AllocateRaw(size, space, OLD_POINTER_SPACE);
5387 } 5398 }
5388 5399
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
5500 5511
5501 HeapObject* object; 5512 HeapObject* object;
5502 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); 5513 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE);
5503 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; 5514 if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
5504 } 5515 }
5505 5516
5506 return EnsureDoubleAligned(this, object, size); 5517 return EnsureDoubleAligned(this, object, size);
5507 } 5518 }
5508 5519
5509 5520
5521 MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries,
5522 int number_of_ptr_entries,
5523 int number_of_int32_entries) {
5524 ASSERT(number_of_int64_entries > 0 || number_of_ptr_entries > 0 ||
5525 number_of_int32_entries > 0);
5526 int size = ConstantPoolArray::SizeFor(number_of_int64_entries,
5527 number_of_ptr_entries,
5528 number_of_int32_entries);
5529 #ifndef V8_HOST_ARCH_64_BIT
5530 size += kPointerSize;
5531 #endif
5532 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
5533
5534 HeapObject* object;
5535 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_POINTER_SPACE);
5536 if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
5537 }
5538 object = EnsureDoubleAligned(this, object, size);
5539 HeapObject::cast(object)->set_map_no_write_barrier(constant_pool_array_map());
5540
5541 ConstantPoolArray* constant_pool =
5542 reinterpret_cast<ConstantPoolArray*>(object);
5543 constant_pool->SetEntryCounts(number_of_int64_entries,
5544 number_of_ptr_entries,
5545 number_of_int32_entries);
5546 MemsetPointer(
5547 HeapObject::RawField(
5548 constant_pool,
5549 constant_pool->OffsetOfElementAt(constant_pool->first_ptr_index())),
5550 undefined_value(),
5551 number_of_ptr_entries);
5552 return constant_pool;
5553 }
5554
5555
5510 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { 5556 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
5511 Object* result; 5557 Object* result;
5512 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); 5558 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure);
5513 if (!maybe_result->ToObject(&result)) return maybe_result; 5559 if (!maybe_result->ToObject(&result)) return maybe_result;
5514 } 5560 }
5515 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( 5561 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(
5516 hash_table_map()); 5562 hash_table_map());
5517 ASSERT(result->IsHashTable()); 5563 ASSERT(result->IsHashTable());
5518 return result; 5564 return result;
5519 } 5565 }
(...skipping 1359 matching lines...) Expand 10 before | Expand all | Expand 10 after
6879 6925
6880 6926
6881 MaybeObject* Heap::AddWeakObjectToCodeDependency(Object* obj, 6927 MaybeObject* Heap::AddWeakObjectToCodeDependency(Object* obj,
6882 DependentCode* dep) { 6928 DependentCode* dep) {
6883 ASSERT(!InNewSpace(obj)); 6929 ASSERT(!InNewSpace(obj));
6884 ASSERT(!InNewSpace(dep)); 6930 ASSERT(!InNewSpace(dep));
6885 MaybeObject* maybe_obj = 6931 MaybeObject* maybe_obj =
6886 WeakHashTable::cast(weak_object_to_code_table_)->Put(obj, dep); 6932 WeakHashTable::cast(weak_object_to_code_table_)->Put(obj, dep);
6887 WeakHashTable* table; 6933 WeakHashTable* table;
6888 if (!maybe_obj->To(&table)) return maybe_obj; 6934 if (!maybe_obj->To(&table)) return maybe_obj;
6935 if (ShouldZapGarbage() && weak_object_to_code_table_ != table) {
6936 WeakHashTable::cast(weak_object_to_code_table_)->Zap(the_hole_value());
6937 }
6889 set_weak_object_to_code_table(table); 6938 set_weak_object_to_code_table(table);
6890 ASSERT_EQ(dep, WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj)); 6939 ASSERT_EQ(dep, WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj));
6891 return weak_object_to_code_table_; 6940 return weak_object_to_code_table_;
6892 } 6941 }
6893 6942
6894 6943
6895 DependentCode* Heap::LookupWeakObjectToCodeDependency(Object* obj) { 6944 DependentCode* Heap::LookupWeakObjectToCodeDependency(Object* obj) {
6896 Object* dep = WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj); 6945 Object* dep = WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj);
6897 if (dep->IsDependentCode()) return DependentCode::cast(dep); 6946 if (dep->IsDependentCode()) return DependentCode::cast(dep);
6898 return DependentCode::cast(empty_fixed_array()); 6947 return DependentCode::cast(empty_fixed_array());
(...skipping 982 matching lines...) Expand 10 before | Expand all | Expand 10 after
7881 counters->count_of_FIXED_ARRAY_##name()->Increment( \ 7930 counters->count_of_FIXED_ARRAY_##name()->Increment( \
7882 static_cast<int>(object_counts_[index])); \ 7931 static_cast<int>(object_counts_[index])); \
7883 counters->count_of_FIXED_ARRAY_##name()->Decrement( \ 7932 counters->count_of_FIXED_ARRAY_##name()->Decrement( \
7884 static_cast<int>(object_counts_last_time_[index])); \ 7933 static_cast<int>(object_counts_last_time_[index])); \
7885 counters->size_of_FIXED_ARRAY_##name()->Increment( \ 7934 counters->size_of_FIXED_ARRAY_##name()->Increment( \
7886 static_cast<int>(object_sizes_[index])); \ 7935 static_cast<int>(object_sizes_[index])); \
7887 counters->size_of_FIXED_ARRAY_##name()->Decrement( \ 7936 counters->size_of_FIXED_ARRAY_##name()->Decrement( \
7888 static_cast<int>(object_sizes_last_time_[index])); 7937 static_cast<int>(object_sizes_last_time_[index]));
7889 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) 7938 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT)
7890 #undef ADJUST_LAST_TIME_OBJECT_COUNT 7939 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7940 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
7941 index = FIRST_CODE_AGE_SUB_TYPE + Code::k##name##CodeAge; \
7942 counters->count_of_CODE_AGE_##name()->Increment( \
7943 static_cast<int>(object_counts_[index])); \
7944 counters->count_of_CODE_AGE_##name()->Decrement( \
7945 static_cast<int>(object_counts_last_time_[index])); \
7946 counters->size_of_CODE_AGE_##name()->Increment( \
7947 static_cast<int>(object_sizes_[index])); \
7948 counters->size_of_CODE_AGE_##name()->Decrement( \
7949 static_cast<int>(object_sizes_last_time_[index]));
7950 CODE_AGE_LIST_WITH_NO_AGE(ADJUST_LAST_TIME_OBJECT_COUNT)
7951 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7891 7952
7892 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 7953 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
7893 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 7954 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
7894 ClearObjectStats(); 7955 ClearObjectStats();
7895 } 7956 }
7896 7957
7897 7958
7898 Heap::RelocationLock::RelocationLock(Heap* heap) : heap_(heap) { 7959 Heap::RelocationLock::RelocationLock(Heap* heap) : heap_(heap) {
7899 if (FLAG_concurrent_recompilation) { 7960 if (FLAG_concurrent_recompilation) {
7900 heap_->relocation_mutex_->Lock(); 7961 heap_->relocation_mutex_->Lock();
7901 #ifdef DEBUG 7962 #ifdef DEBUG
7902 heap_->relocation_mutex_locked_by_optimizer_thread_ = 7963 heap_->relocation_mutex_locked_by_optimizer_thread_ =
7903 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); 7964 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread();
7904 #endif // DEBUG 7965 #endif // DEBUG
7905 } 7966 }
7906 } 7967 }
7907 7968
7908 } } // namespace v8::internal 7969 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698