OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/assembler-inl.h" | 9 #include "src/assembler-inl.h" |
10 #include "src/ast/context-slot-cache.h" | 10 #include "src/ast/context-slot-cache.h" |
(...skipping 2102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2113 static_cast<double>(old_generation_allocation_limit_) * | 2113 static_cast<double>(old_generation_allocation_limit_) * |
2114 (tracer()->AverageSurvivalRatio() / 100))); | 2114 (tracer()->AverageSurvivalRatio() / 100))); |
2115 } | 2115 } |
2116 } | 2116 } |
2117 | 2117 |
2118 AllocationResult Heap::AllocatePartialMap(InstanceType instance_type, | 2118 AllocationResult Heap::AllocatePartialMap(InstanceType instance_type, |
2119 int instance_size) { | 2119 int instance_size) { |
2120 Object* result = nullptr; | 2120 Object* result = nullptr; |
2121 AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE); | 2121 AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE); |
2122 if (!allocation.To(&result)) return allocation; | 2122 if (!allocation.To(&result)) return allocation; |
2123 | |
2124 // Map::cast cannot be used due to uninitialized map field. | 2123 // Map::cast cannot be used due to uninitialized map field. |
2125 reinterpret_cast<Map*>(result)->set_map( | 2124 reinterpret_cast<Map*>(result)->set_map_after_allocation( |
2126 reinterpret_cast<Map*>(root(kMetaMapRootIndex))); | 2125 reinterpret_cast<Map*>(root(kMetaMapRootIndex)), SKIP_WRITE_BARRIER); |
2127 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); | 2126 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); |
2128 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); | 2127 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); |
2129 // Initialize to only containing tagged fields. | 2128 // Initialize to only containing tagged fields. |
2130 reinterpret_cast<Map*>(result)->set_visitor_id( | 2129 reinterpret_cast<Map*>(result)->set_visitor_id( |
2131 StaticVisitorBase::GetVisitorId(instance_type, instance_size, false)); | 2130 StaticVisitorBase::GetVisitorId(instance_type, instance_size, false)); |
2132 if (FLAG_unbox_double_fields) { | 2131 if (FLAG_unbox_double_fields) { |
2133 reinterpret_cast<Map*>(result) | 2132 reinterpret_cast<Map*>(result) |
2134 ->set_layout_descriptor(LayoutDescriptor::FastPointerLayout()); | 2133 ->set_layout_descriptor(LayoutDescriptor::FastPointerLayout()); |
2135 } | 2134 } |
2136 reinterpret_cast<Map*>(result)->clear_unused(); | 2135 reinterpret_cast<Map*>(result)->clear_unused(); |
(...skipping 12 matching lines...) Expand all Loading... |
2149 | 2148 |
2150 | 2149 |
2151 AllocationResult Heap::AllocateMap(InstanceType instance_type, | 2150 AllocationResult Heap::AllocateMap(InstanceType instance_type, |
2152 int instance_size, | 2151 int instance_size, |
2153 ElementsKind elements_kind) { | 2152 ElementsKind elements_kind) { |
2154 HeapObject* result = nullptr; | 2153 HeapObject* result = nullptr; |
2155 AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE); | 2154 AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE); |
2156 if (!allocation.To(&result)) return allocation; | 2155 if (!allocation.To(&result)) return allocation; |
2157 | 2156 |
2158 isolate()->counters()->maps_created()->Increment(); | 2157 isolate()->counters()->maps_created()->Increment(); |
2159 result->set_map_no_write_barrier(meta_map()); | 2158 result->set_map_after_allocation(meta_map(), SKIP_WRITE_BARRIER); |
2160 Map* map = Map::cast(result); | 2159 Map* map = Map::cast(result); |
2161 map->set_instance_type(instance_type); | 2160 map->set_instance_type(instance_type); |
2162 map->set_prototype(null_value(), SKIP_WRITE_BARRIER); | 2161 map->set_prototype(null_value(), SKIP_WRITE_BARRIER); |
2163 map->set_constructor_or_backpointer(null_value(), SKIP_WRITE_BARRIER); | 2162 map->set_constructor_or_backpointer(null_value(), SKIP_WRITE_BARRIER); |
2164 map->set_instance_size(instance_size); | 2163 map->set_instance_size(instance_size); |
2165 map->clear_unused(); | 2164 map->clear_unused(); |
2166 map->set_inobject_properties_or_constructor_function_index(0); | 2165 map->set_inobject_properties_or_constructor_function_index(0); |
2167 map->set_code_cache(empty_fixed_array(), SKIP_WRITE_BARRIER); | 2166 map->set_code_cache(empty_fixed_array(), SKIP_WRITE_BARRIER); |
2168 map->set_dependent_code(DependentCode::cast(empty_fixed_array()), | 2167 map->set_dependent_code(DependentCode::cast(empty_fixed_array()), |
2169 SKIP_WRITE_BARRIER); | 2168 SKIP_WRITE_BARRIER); |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2252 | 2251 |
2253 bool Heap::CreateInitialMaps() { | 2252 bool Heap::CreateInitialMaps() { |
2254 HeapObject* obj = nullptr; | 2253 HeapObject* obj = nullptr; |
2255 { | 2254 { |
2256 AllocationResult allocation = AllocatePartialMap(MAP_TYPE, Map::kSize); | 2255 AllocationResult allocation = AllocatePartialMap(MAP_TYPE, Map::kSize); |
2257 if (!allocation.To(&obj)) return false; | 2256 if (!allocation.To(&obj)) return false; |
2258 } | 2257 } |
2259 // Map::cast cannot be used due to uninitialized map field. | 2258 // Map::cast cannot be used due to uninitialized map field. |
2260 Map* new_meta_map = reinterpret_cast<Map*>(obj); | 2259 Map* new_meta_map = reinterpret_cast<Map*>(obj); |
2261 set_meta_map(new_meta_map); | 2260 set_meta_map(new_meta_map); |
2262 new_meta_map->set_map(new_meta_map); | 2261 new_meta_map->set_map_after_allocation(new_meta_map); |
2263 | 2262 |
2264 { // Partial map allocation | 2263 { // Partial map allocation |
2265 #define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name) \ | 2264 #define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name) \ |
2266 { \ | 2265 { \ |
2267 Map* map; \ | 2266 Map* map; \ |
2268 if (!AllocatePartialMap((instance_type), (size)).To(&map)) return false; \ | 2267 if (!AllocatePartialMap((instance_type), (size)).To(&map)) return false; \ |
2269 set_##field_name##_map(map); \ | 2268 set_##field_name##_map(map); \ |
2270 } | 2269 } |
2271 | 2270 |
2272 ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel, fixed_array); | 2271 ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel, fixed_array); |
(...skipping 236 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2509 | 2508 |
2510 AllocationSpace space = SelectSpace(pretenure); | 2509 AllocationSpace space = SelectSpace(pretenure); |
2511 | 2510 |
2512 HeapObject* result = nullptr; | 2511 HeapObject* result = nullptr; |
2513 { | 2512 { |
2514 AllocationResult allocation = AllocateRaw(size, space, kDoubleUnaligned); | 2513 AllocationResult allocation = AllocateRaw(size, space, kDoubleUnaligned); |
2515 if (!allocation.To(&result)) return allocation; | 2514 if (!allocation.To(&result)) return allocation; |
2516 } | 2515 } |
2517 | 2516 |
2518 Map* map = mode == MUTABLE ? mutable_heap_number_map() : heap_number_map(); | 2517 Map* map = mode == MUTABLE ? mutable_heap_number_map() : heap_number_map(); |
2519 HeapObject::cast(result)->set_map_no_write_barrier(map); | 2518 HeapObject::cast(result)->set_map_after_allocation(map, SKIP_WRITE_BARRIER); |
2520 return result; | 2519 return result; |
2521 } | 2520 } |
2522 | 2521 |
2523 AllocationResult Heap::AllocateCell(Object* value) { | 2522 AllocationResult Heap::AllocateCell(Object* value) { |
2524 int size = Cell::kSize; | 2523 int size = Cell::kSize; |
2525 STATIC_ASSERT(Cell::kSize <= kMaxRegularHeapObjectSize); | 2524 STATIC_ASSERT(Cell::kSize <= kMaxRegularHeapObjectSize); |
2526 | 2525 |
2527 HeapObject* result = nullptr; | 2526 HeapObject* result = nullptr; |
2528 { | 2527 { |
2529 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); | 2528 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); |
2530 if (!allocation.To(&result)) return allocation; | 2529 if (!allocation.To(&result)) return allocation; |
2531 } | 2530 } |
2532 result->set_map_no_write_barrier(cell_map()); | 2531 result->set_map_after_allocation(cell_map(), SKIP_WRITE_BARRIER); |
2533 Cell::cast(result)->set_value(value); | 2532 Cell::cast(result)->set_value(value); |
2534 return result; | 2533 return result; |
2535 } | 2534 } |
2536 | 2535 |
2537 AllocationResult Heap::AllocatePropertyCell() { | 2536 AllocationResult Heap::AllocatePropertyCell() { |
2538 int size = PropertyCell::kSize; | 2537 int size = PropertyCell::kSize; |
2539 STATIC_ASSERT(PropertyCell::kSize <= kMaxRegularHeapObjectSize); | 2538 STATIC_ASSERT(PropertyCell::kSize <= kMaxRegularHeapObjectSize); |
2540 | 2539 |
2541 HeapObject* result = nullptr; | 2540 HeapObject* result = nullptr; |
2542 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); | 2541 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); |
2543 if (!allocation.To(&result)) return allocation; | 2542 if (!allocation.To(&result)) return allocation; |
2544 | 2543 |
2545 result->set_map_no_write_barrier(global_property_cell_map()); | 2544 result->set_map_after_allocation(global_property_cell_map(), |
| 2545 SKIP_WRITE_BARRIER); |
2546 PropertyCell* cell = PropertyCell::cast(result); | 2546 PropertyCell* cell = PropertyCell::cast(result); |
2547 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), | 2547 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), |
2548 SKIP_WRITE_BARRIER); | 2548 SKIP_WRITE_BARRIER); |
2549 cell->set_property_details(PropertyDetails(Smi::kZero)); | 2549 cell->set_property_details(PropertyDetails(Smi::kZero)); |
2550 cell->set_value(the_hole_value()); | 2550 cell->set_value(the_hole_value()); |
2551 return result; | 2551 return result; |
2552 } | 2552 } |
2553 | 2553 |
2554 | 2554 |
2555 AllocationResult Heap::AllocateWeakCell(HeapObject* value) { | 2555 AllocationResult Heap::AllocateWeakCell(HeapObject* value) { |
2556 int size = WeakCell::kSize; | 2556 int size = WeakCell::kSize; |
2557 STATIC_ASSERT(WeakCell::kSize <= kMaxRegularHeapObjectSize); | 2557 STATIC_ASSERT(WeakCell::kSize <= kMaxRegularHeapObjectSize); |
2558 HeapObject* result = nullptr; | 2558 HeapObject* result = nullptr; |
2559 { | 2559 { |
2560 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); | 2560 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); |
2561 if (!allocation.To(&result)) return allocation; | 2561 if (!allocation.To(&result)) return allocation; |
2562 } | 2562 } |
2563 result->set_map_no_write_barrier(weak_cell_map()); | 2563 result->set_map_after_allocation(weak_cell_map(), SKIP_WRITE_BARRIER); |
2564 WeakCell::cast(result)->initialize(value); | 2564 WeakCell::cast(result)->initialize(value); |
2565 WeakCell::cast(result)->clear_next(the_hole_value()); | 2565 WeakCell::cast(result)->clear_next(the_hole_value()); |
2566 return result; | 2566 return result; |
2567 } | 2567 } |
2568 | 2568 |
2569 | 2569 |
2570 AllocationResult Heap::AllocateTransitionArray(int capacity) { | 2570 AllocationResult Heap::AllocateTransitionArray(int capacity) { |
2571 DCHECK(capacity > 0); | 2571 DCHECK(capacity > 0); |
2572 HeapObject* raw_array = nullptr; | 2572 HeapObject* raw_array = nullptr; |
2573 { | 2573 { |
2574 AllocationResult allocation = AllocateRawFixedArray(capacity, TENURED); | 2574 AllocationResult allocation = AllocateRawFixedArray(capacity, TENURED); |
2575 if (!allocation.To(&raw_array)) return allocation; | 2575 if (!allocation.To(&raw_array)) return allocation; |
2576 } | 2576 } |
2577 raw_array->set_map_no_write_barrier(transition_array_map()); | 2577 raw_array->set_map_after_allocation(transition_array_map(), |
| 2578 SKIP_WRITE_BARRIER); |
2578 TransitionArray* array = TransitionArray::cast(raw_array); | 2579 TransitionArray* array = TransitionArray::cast(raw_array); |
2579 array->set_length(capacity); | 2580 array->set_length(capacity); |
2580 MemsetPointer(array->data_start(), undefined_value(), capacity); | 2581 MemsetPointer(array->data_start(), undefined_value(), capacity); |
2581 // Transition arrays are tenured. When black allocation is on we have to | 2582 // Transition arrays are tenured. When black allocation is on we have to |
2582 // add the transition array to the list of encountered_transition_arrays. | 2583 // add the transition array to the list of encountered_transition_arrays. |
2583 if (incremental_marking()->black_allocation()) { | 2584 if (incremental_marking()->black_allocation()) { |
2584 array->set_next_link(encountered_transition_arrays(), | 2585 array->set_next_link(encountered_transition_arrays(), |
2585 UPDATE_WEAK_WRITE_BARRIER); | 2586 UPDATE_WEAK_WRITE_BARRIER); |
2586 set_encountered_transition_arrays(array); | 2587 set_encountered_transition_arrays(array); |
2587 } else { | 2588 } else { |
(...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2790 | 2791 |
2791 set_undefined_cell(*factory->NewCell(factory->undefined_value())); | 2792 set_undefined_cell(*factory->NewCell(factory->undefined_value())); |
2792 | 2793 |
2793 // Microtask queue uses the empty fixed array as a sentinel for "empty". | 2794 // Microtask queue uses the empty fixed array as a sentinel for "empty". |
2794 // Number of queued microtasks stored in Isolate::pending_microtask_count(). | 2795 // Number of queued microtasks stored in Isolate::pending_microtask_count(). |
2795 set_microtask_queue(empty_fixed_array()); | 2796 set_microtask_queue(empty_fixed_array()); |
2796 | 2797 |
2797 { | 2798 { |
2798 Handle<FixedArray> empty_sloppy_arguments_elements = | 2799 Handle<FixedArray> empty_sloppy_arguments_elements = |
2799 factory->NewFixedArray(2, TENURED); | 2800 factory->NewFixedArray(2, TENURED); |
2800 empty_sloppy_arguments_elements->set_map(sloppy_arguments_elements_map()); | 2801 empty_sloppy_arguments_elements->set_map_after_allocation( |
| 2802 sloppy_arguments_elements_map(), SKIP_WRITE_BARRIER); |
2801 set_empty_sloppy_arguments_elements(*empty_sloppy_arguments_elements); | 2803 set_empty_sloppy_arguments_elements(*empty_sloppy_arguments_elements); |
2802 } | 2804 } |
2803 | 2805 |
2804 { | 2806 { |
2805 Handle<WeakCell> cell = factory->NewWeakCell(factory->undefined_value()); | 2807 Handle<WeakCell> cell = factory->NewWeakCell(factory->undefined_value()); |
2806 set_empty_weak_cell(*cell); | 2808 set_empty_weak_cell(*cell); |
2807 cell->clear(); | 2809 cell->clear(); |
2808 } | 2810 } |
2809 | 2811 |
2810 set_detached_contexts(empty_fixed_array()); | 2812 set_detached_contexts(empty_fixed_array()); |
(...skipping 252 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3063 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); | 3065 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); |
3064 } | 3066 } |
3065 int size = ByteArray::SizeFor(length); | 3067 int size = ByteArray::SizeFor(length); |
3066 AllocationSpace space = SelectSpace(pretenure); | 3068 AllocationSpace space = SelectSpace(pretenure); |
3067 HeapObject* result = nullptr; | 3069 HeapObject* result = nullptr; |
3068 { | 3070 { |
3069 AllocationResult allocation = AllocateRaw(size, space); | 3071 AllocationResult allocation = AllocateRaw(size, space); |
3070 if (!allocation.To(&result)) return allocation; | 3072 if (!allocation.To(&result)) return allocation; |
3071 } | 3073 } |
3072 | 3074 |
3073 result->set_map_no_write_barrier(byte_array_map()); | 3075 result->set_map_after_allocation(byte_array_map(), SKIP_WRITE_BARRIER); |
3074 ByteArray::cast(result)->set_length(length); | 3076 ByteArray::cast(result)->set_length(length); |
3075 return result; | 3077 return result; |
3076 } | 3078 } |
3077 | 3079 |
3078 | 3080 |
3079 AllocationResult Heap::AllocateBytecodeArray(int length, | 3081 AllocationResult Heap::AllocateBytecodeArray(int length, |
3080 const byte* const raw_bytecodes, | 3082 const byte* const raw_bytecodes, |
3081 int frame_size, | 3083 int frame_size, |
3082 int parameter_count, | 3084 int parameter_count, |
3083 FixedArray* constant_pool) { | 3085 FixedArray* constant_pool) { |
3084 if (length < 0 || length > BytecodeArray::kMaxLength) { | 3086 if (length < 0 || length > BytecodeArray::kMaxLength) { |
3085 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); | 3087 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); |
3086 } | 3088 } |
3087 // Bytecode array is pretenured, so constant pool array should be to. | 3089 // Bytecode array is pretenured, so constant pool array should be to. |
3088 DCHECK(!InNewSpace(constant_pool)); | 3090 DCHECK(!InNewSpace(constant_pool)); |
3089 | 3091 |
3090 int size = BytecodeArray::SizeFor(length); | 3092 int size = BytecodeArray::SizeFor(length); |
3091 HeapObject* result = nullptr; | 3093 HeapObject* result = nullptr; |
3092 { | 3094 { |
3093 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); | 3095 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); |
3094 if (!allocation.To(&result)) return allocation; | 3096 if (!allocation.To(&result)) return allocation; |
3095 } | 3097 } |
3096 | 3098 |
3097 result->set_map_no_write_barrier(bytecode_array_map()); | 3099 result->set_map_after_allocation(bytecode_array_map(), SKIP_WRITE_BARRIER); |
3098 BytecodeArray* instance = BytecodeArray::cast(result); | 3100 BytecodeArray* instance = BytecodeArray::cast(result); |
3099 instance->set_length(length); | 3101 instance->set_length(length); |
3100 instance->set_frame_size(frame_size); | 3102 instance->set_frame_size(frame_size); |
3101 instance->set_parameter_count(parameter_count); | 3103 instance->set_parameter_count(parameter_count); |
3102 instance->set_interrupt_budget(interpreter::Interpreter::InterruptBudget()); | 3104 instance->set_interrupt_budget(interpreter::Interpreter::InterruptBudget()); |
3103 instance->set_osr_loop_nesting_level(0); | 3105 instance->set_osr_loop_nesting_level(0); |
3104 instance->set_bytecode_age(BytecodeArray::kNoAgeBytecodeAge); | 3106 instance->set_bytecode_age(BytecodeArray::kNoAgeBytecodeAge); |
3105 instance->set_constant_pool(constant_pool); | 3107 instance->set_constant_pool(constant_pool); |
3106 instance->set_handler_table(empty_fixed_array()); | 3108 instance->set_handler_table(empty_fixed_array()); |
3107 instance->set_source_position_table(empty_byte_array()); | 3109 instance->set_source_position_table(empty_byte_array()); |
3108 CopyBytes(instance->GetFirstBytecodeAddress(), raw_bytecodes, length); | 3110 CopyBytes(instance->GetFirstBytecodeAddress(), raw_bytecodes, length); |
3109 | 3111 |
3110 return result; | 3112 return result; |
3111 } | 3113 } |
3112 | 3114 |
3113 HeapObject* Heap::CreateFillerObjectAt(Address addr, int size, | 3115 HeapObject* Heap::CreateFillerObjectAt(Address addr, int size, |
3114 ClearRecordedSlots mode) { | 3116 ClearRecordedSlots mode) { |
3115 if (size == 0) return nullptr; | 3117 if (size == 0) return nullptr; |
3116 HeapObject* filler = HeapObject::FromAddress(addr); | 3118 HeapObject* filler = HeapObject::FromAddress(addr); |
3117 if (size == kPointerSize) { | 3119 if (size == kPointerSize) { |
3118 filler->set_map_no_write_barrier( | 3120 filler->set_map_after_allocation( |
3119 reinterpret_cast<Map*>(root(kOnePointerFillerMapRootIndex))); | 3121 reinterpret_cast<Map*>(root(kOnePointerFillerMapRootIndex)), |
| 3122 SKIP_WRITE_BARRIER); |
3120 } else if (size == 2 * kPointerSize) { | 3123 } else if (size == 2 * kPointerSize) { |
3121 filler->set_map_no_write_barrier( | 3124 filler->set_map_after_allocation( |
3122 reinterpret_cast<Map*>(root(kTwoPointerFillerMapRootIndex))); | 3125 reinterpret_cast<Map*>(root(kTwoPointerFillerMapRootIndex)), |
| 3126 SKIP_WRITE_BARRIER); |
3123 } else { | 3127 } else { |
3124 DCHECK_GT(size, 2 * kPointerSize); | 3128 DCHECK_GT(size, 2 * kPointerSize); |
3125 filler->set_map_no_write_barrier( | 3129 filler->set_map_after_allocation( |
3126 reinterpret_cast<Map*>(root(kFreeSpaceMapRootIndex))); | 3130 reinterpret_cast<Map*>(root(kFreeSpaceMapRootIndex)), |
| 3131 SKIP_WRITE_BARRIER); |
3127 FreeSpace::cast(filler)->nobarrier_set_size(size); | 3132 FreeSpace::cast(filler)->nobarrier_set_size(size); |
3128 } | 3133 } |
3129 if (mode == ClearRecordedSlots::kYes) { | 3134 if (mode == ClearRecordedSlots::kYes) { |
3130 ClearRecordedSlotRange(addr, addr + size); | 3135 ClearRecordedSlotRange(addr, addr + size); |
3131 } | 3136 } |
3132 | 3137 |
3133 // At this point, we may be deserializing the heap from a snapshot, and | 3138 // At this point, we may be deserializing the heap from a snapshot, and |
3134 // none of the maps have been created yet and are NULL. | 3139 // none of the maps have been created yet and are NULL. |
3135 DCHECK((filler->map() == NULL && !deserialization_complete_) || | 3140 DCHECK((filler->map() == NULL && !deserialization_complete_) || |
3136 filler->map()->IsMap()); | 3141 filler->map()->IsMap()); |
(...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3317 int length, ExternalArrayType array_type, void* external_pointer, | 3322 int length, ExternalArrayType array_type, void* external_pointer, |
3318 PretenureFlag pretenure) { | 3323 PretenureFlag pretenure) { |
3319 int size = FixedTypedArrayBase::kHeaderSize; | 3324 int size = FixedTypedArrayBase::kHeaderSize; |
3320 AllocationSpace space = SelectSpace(pretenure); | 3325 AllocationSpace space = SelectSpace(pretenure); |
3321 HeapObject* result = nullptr; | 3326 HeapObject* result = nullptr; |
3322 { | 3327 { |
3323 AllocationResult allocation = AllocateRaw(size, space); | 3328 AllocationResult allocation = AllocateRaw(size, space); |
3324 if (!allocation.To(&result)) return allocation; | 3329 if (!allocation.To(&result)) return allocation; |
3325 } | 3330 } |
3326 | 3331 |
3327 result->set_map_no_write_barrier(MapForFixedTypedArray(array_type)); | 3332 result->set_map_after_allocation(MapForFixedTypedArray(array_type), |
| 3333 SKIP_WRITE_BARRIER); |
3328 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(result); | 3334 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(result); |
3329 elements->set_base_pointer(Smi::kZero, SKIP_WRITE_BARRIER); | 3335 elements->set_base_pointer(Smi::kZero, SKIP_WRITE_BARRIER); |
3330 elements->set_external_pointer(external_pointer, SKIP_WRITE_BARRIER); | 3336 elements->set_external_pointer(external_pointer, SKIP_WRITE_BARRIER); |
3331 elements->set_length(length); | 3337 elements->set_length(length); |
3332 return elements; | 3338 return elements; |
3333 } | 3339 } |
3334 | 3340 |
3335 static void ForFixedTypedArray(ExternalArrayType array_type, int* element_size, | 3341 static void ForFixedTypedArray(ExternalArrayType array_type, int* element_size, |
3336 ElementsKind* element_kind) { | 3342 ElementsKind* element_kind) { |
3337 switch (array_type) { | 3343 switch (array_type) { |
(...skipping 24 matching lines...) Expand all Loading... |
3362 int size = OBJECT_POINTER_ALIGN(length * element_size + | 3368 int size = OBJECT_POINTER_ALIGN(length * element_size + |
3363 FixedTypedArrayBase::kDataOffset); | 3369 FixedTypedArrayBase::kDataOffset); |
3364 AllocationSpace space = SelectSpace(pretenure); | 3370 AllocationSpace space = SelectSpace(pretenure); |
3365 | 3371 |
3366 HeapObject* object = nullptr; | 3372 HeapObject* object = nullptr; |
3367 AllocationResult allocation = AllocateRaw( | 3373 AllocationResult allocation = AllocateRaw( |
3368 size, space, | 3374 size, space, |
3369 array_type == kExternalFloat64Array ? kDoubleAligned : kWordAligned); | 3375 array_type == kExternalFloat64Array ? kDoubleAligned : kWordAligned); |
3370 if (!allocation.To(&object)) return allocation; | 3376 if (!allocation.To(&object)) return allocation; |
3371 | 3377 |
3372 object->set_map_no_write_barrier(MapForFixedTypedArray(array_type)); | 3378 object->set_map_after_allocation(MapForFixedTypedArray(array_type), |
| 3379 SKIP_WRITE_BARRIER); |
3373 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object); | 3380 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object); |
3374 elements->set_base_pointer(elements, SKIP_WRITE_BARRIER); | 3381 elements->set_base_pointer(elements, SKIP_WRITE_BARRIER); |
3375 elements->set_external_pointer( | 3382 elements->set_external_pointer( |
3376 ExternalReference::fixed_typed_array_base_data_offset().address(), | 3383 ExternalReference::fixed_typed_array_base_data_offset().address(), |
3377 SKIP_WRITE_BARRIER); | 3384 SKIP_WRITE_BARRIER); |
3378 elements->set_length(length); | 3385 elements->set_length(length); |
3379 if (initialize) memset(elements->DataPtr(), 0, elements->DataSize()); | 3386 if (initialize) memset(elements->DataPtr(), 0, elements->DataSize()); |
3380 return elements; | 3387 return elements; |
3381 } | 3388 } |
3382 | 3389 |
(...skipping 20 matching lines...) Expand all Loading... |
3403 // be moved. | 3410 // be moved. |
3404 CreateFillerObjectAt(result->address(), object_size, | 3411 CreateFillerObjectAt(result->address(), object_size, |
3405 ClearRecordedSlots::kNo); | 3412 ClearRecordedSlots::kNo); |
3406 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); | 3413 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); |
3407 if (!allocation.To(&result)) return allocation; | 3414 if (!allocation.To(&result)) return allocation; |
3408 OnAllocationEvent(result, object_size); | 3415 OnAllocationEvent(result, object_size); |
3409 } | 3416 } |
3410 } | 3417 } |
3411 } | 3418 } |
3412 | 3419 |
3413 result->set_map_no_write_barrier(code_map()); | 3420 result->set_map_after_allocation(code_map(), SKIP_WRITE_BARRIER); |
3414 Code* code = Code::cast(result); | 3421 Code* code = Code::cast(result); |
3415 DCHECK(IsAligned(bit_cast<intptr_t>(code->address()), kCodeAlignment)); | 3422 DCHECK(IsAligned(bit_cast<intptr_t>(code->address()), kCodeAlignment)); |
3416 DCHECK(!memory_allocator()->code_range()->valid() || | 3423 DCHECK(!memory_allocator()->code_range()->valid() || |
3417 memory_allocator()->code_range()->contains(code->address()) || | 3424 memory_allocator()->code_range()->contains(code->address()) || |
3418 object_size <= code_space()->AreaSize()); | 3425 object_size <= code_space()->AreaSize()); |
3419 code->set_gc_metadata(Smi::kZero); | 3426 code->set_gc_metadata(Smi::kZero); |
3420 code->set_ic_age(global_ic_age_); | 3427 code->set_ic_age(global_ic_age_); |
3421 return code; | 3428 return code; |
3422 } | 3429 } |
3423 | 3430 |
(...skipping 28 matching lines...) Expand all Loading... |
3452 } | 3459 } |
3453 | 3460 |
3454 AllocationResult Heap::CopyBytecodeArray(BytecodeArray* bytecode_array) { | 3461 AllocationResult Heap::CopyBytecodeArray(BytecodeArray* bytecode_array) { |
3455 int size = BytecodeArray::SizeFor(bytecode_array->length()); | 3462 int size = BytecodeArray::SizeFor(bytecode_array->length()); |
3456 HeapObject* result = nullptr; | 3463 HeapObject* result = nullptr; |
3457 { | 3464 { |
3458 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); | 3465 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); |
3459 if (!allocation.To(&result)) return allocation; | 3466 if (!allocation.To(&result)) return allocation; |
3460 } | 3467 } |
3461 | 3468 |
3462 result->set_map_no_write_barrier(bytecode_array_map()); | 3469 result->set_map_after_allocation(bytecode_array_map(), SKIP_WRITE_BARRIER); |
3463 BytecodeArray* copy = BytecodeArray::cast(result); | 3470 BytecodeArray* copy = BytecodeArray::cast(result); |
3464 copy->set_length(bytecode_array->length()); | 3471 copy->set_length(bytecode_array->length()); |
3465 copy->set_frame_size(bytecode_array->frame_size()); | 3472 copy->set_frame_size(bytecode_array->frame_size()); |
3466 copy->set_parameter_count(bytecode_array->parameter_count()); | 3473 copy->set_parameter_count(bytecode_array->parameter_count()); |
3467 copy->set_constant_pool(bytecode_array->constant_pool()); | 3474 copy->set_constant_pool(bytecode_array->constant_pool()); |
3468 copy->set_handler_table(bytecode_array->handler_table()); | 3475 copy->set_handler_table(bytecode_array->handler_table()); |
3469 copy->set_source_position_table(bytecode_array->source_position_table()); | 3476 copy->set_source_position_table(bytecode_array->source_position_table()); |
3470 copy->set_interrupt_budget(bytecode_array->interrupt_budget()); | 3477 copy->set_interrupt_budget(bytecode_array->interrupt_budget()); |
3471 copy->set_osr_loop_nesting_level(bytecode_array->osr_loop_nesting_level()); | 3478 copy->set_osr_loop_nesting_level(bytecode_array->osr_loop_nesting_level()); |
3472 copy->set_bytecode_age(bytecode_array->bytecode_age()); | 3479 copy->set_bytecode_age(bytecode_array->bytecode_age()); |
3473 bytecode_array->CopyBytecodesTo(copy); | 3480 bytecode_array->CopyBytecodesTo(copy); |
3474 return copy; | 3481 return copy; |
3475 } | 3482 } |
3476 | 3483 |
3477 void Heap::InitializeAllocationMemento(AllocationMemento* memento, | 3484 void Heap::InitializeAllocationMemento(AllocationMemento* memento, |
3478 AllocationSite* allocation_site) { | 3485 AllocationSite* allocation_site) { |
3479 memento->set_map_no_write_barrier(allocation_memento_map()); | 3486 memento->set_map_after_allocation(allocation_memento_map(), |
| 3487 SKIP_WRITE_BARRIER); |
3480 DCHECK(allocation_site->map() == allocation_site_map()); | 3488 DCHECK(allocation_site->map() == allocation_site_map()); |
3481 memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER); | 3489 memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER); |
3482 if (FLAG_allocation_site_pretenuring) { | 3490 if (FLAG_allocation_site_pretenuring) { |
3483 allocation_site->IncrementMementoCreateCount(); | 3491 allocation_site->IncrementMementoCreateCount(); |
3484 } | 3492 } |
3485 } | 3493 } |
3486 | 3494 |
3487 | 3495 |
3488 AllocationResult Heap::Allocate(Map* map, AllocationSpace space, | 3496 AllocationResult Heap::Allocate(Map* map, AllocationSpace space, |
3489 AllocationSite* allocation_site) { | 3497 AllocationSite* allocation_site) { |
3490 DCHECK(gc_state_ == NOT_IN_GC); | 3498 DCHECK(gc_state_ == NOT_IN_GC); |
3491 DCHECK(map->instance_type() != MAP_TYPE); | 3499 DCHECK(map->instance_type() != MAP_TYPE); |
3492 int size = map->instance_size(); | 3500 int size = map->instance_size(); |
3493 if (allocation_site != NULL) { | 3501 if (allocation_site != NULL) { |
3494 size += AllocationMemento::kSize; | 3502 size += AllocationMemento::kSize; |
3495 } | 3503 } |
3496 HeapObject* result = nullptr; | 3504 HeapObject* result = nullptr; |
3497 AllocationResult allocation = AllocateRaw(size, space); | 3505 AllocationResult allocation = AllocateRaw(size, space); |
3498 if (!allocation.To(&result)) return allocation; | 3506 if (!allocation.To(&result)) return allocation; |
3499 // No need for write barrier since object is white and map is in old space. | 3507 // No need for write barrier since object is white and map is in old space. |
3500 result->set_map_no_write_barrier(map); | 3508 result->set_map_after_allocation(map, SKIP_WRITE_BARRIER); |
3501 if (allocation_site != NULL) { | 3509 if (allocation_site != NULL) { |
3502 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( | 3510 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( |
3503 reinterpret_cast<Address>(result) + map->instance_size()); | 3511 reinterpret_cast<Address>(result) + map->instance_size()); |
3504 InitializeAllocationMemento(alloc_memento, allocation_site); | 3512 InitializeAllocationMemento(alloc_memento, allocation_site); |
3505 } | 3513 } |
3506 return result; | 3514 return result; |
3507 } | 3515 } |
3508 | 3516 |
3509 | 3517 |
3510 void Heap::InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties, | 3518 void Heap::InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties, |
(...skipping 215 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3726 size = SeqTwoByteString::SizeFor(chars); | 3734 size = SeqTwoByteString::SizeFor(chars); |
3727 } | 3735 } |
3728 | 3736 |
3729 // Allocate string. | 3737 // Allocate string. |
3730 HeapObject* result = nullptr; | 3738 HeapObject* result = nullptr; |
3731 { | 3739 { |
3732 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); | 3740 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); |
3733 if (!allocation.To(&result)) return allocation; | 3741 if (!allocation.To(&result)) return allocation; |
3734 } | 3742 } |
3735 | 3743 |
3736 result->set_map_no_write_barrier(map); | 3744 result->set_map_after_allocation(map, SKIP_WRITE_BARRIER); |
3737 // Set length and hash fields of the allocated string. | 3745 // Set length and hash fields of the allocated string. |
3738 String* answer = String::cast(result); | 3746 String* answer = String::cast(result); |
3739 answer->set_length(chars); | 3747 answer->set_length(chars); |
3740 answer->set_hash_field(hash_field); | 3748 answer->set_hash_field(hash_field); |
3741 | 3749 |
3742 DCHECK_EQ(size, answer->Size()); | 3750 DCHECK_EQ(size, answer->Size()); |
3743 | 3751 |
3744 if (is_one_byte) { | 3752 if (is_one_byte) { |
3745 WriteOneByteData(t, SeqOneByteString::cast(answer)->GetChars(), chars); | 3753 WriteOneByteData(t, SeqOneByteString::cast(answer)->GetChars(), chars); |
3746 } else { | 3754 } else { |
(...skipping 22 matching lines...) Expand all Loading... |
3769 DCHECK(size <= SeqOneByteString::kMaxSize); | 3777 DCHECK(size <= SeqOneByteString::kMaxSize); |
3770 AllocationSpace space = SelectSpace(pretenure); | 3778 AllocationSpace space = SelectSpace(pretenure); |
3771 | 3779 |
3772 HeapObject* result = nullptr; | 3780 HeapObject* result = nullptr; |
3773 { | 3781 { |
3774 AllocationResult allocation = AllocateRaw(size, space); | 3782 AllocationResult allocation = AllocateRaw(size, space); |
3775 if (!allocation.To(&result)) return allocation; | 3783 if (!allocation.To(&result)) return allocation; |
3776 } | 3784 } |
3777 | 3785 |
3778 // Partially initialize the object. | 3786 // Partially initialize the object. |
3779 result->set_map_no_write_barrier(one_byte_string_map()); | 3787 result->set_map_after_allocation(one_byte_string_map(), SKIP_WRITE_BARRIER); |
3780 String::cast(result)->set_length(length); | 3788 String::cast(result)->set_length(length); |
3781 String::cast(result)->set_hash_field(String::kEmptyHashField); | 3789 String::cast(result)->set_hash_field(String::kEmptyHashField); |
3782 DCHECK_EQ(size, HeapObject::cast(result)->Size()); | 3790 DCHECK_EQ(size, HeapObject::cast(result)->Size()); |
3783 | 3791 |
3784 return result; | 3792 return result; |
3785 } | 3793 } |
3786 | 3794 |
3787 | 3795 |
3788 AllocationResult Heap::AllocateRawTwoByteString(int length, | 3796 AllocationResult Heap::AllocateRawTwoByteString(int length, |
3789 PretenureFlag pretenure) { | 3797 PretenureFlag pretenure) { |
3790 DCHECK_LE(0, length); | 3798 DCHECK_LE(0, length); |
3791 DCHECK_GE(String::kMaxLength, length); | 3799 DCHECK_GE(String::kMaxLength, length); |
3792 int size = SeqTwoByteString::SizeFor(length); | 3800 int size = SeqTwoByteString::SizeFor(length); |
3793 DCHECK(size <= SeqTwoByteString::kMaxSize); | 3801 DCHECK(size <= SeqTwoByteString::kMaxSize); |
3794 AllocationSpace space = SelectSpace(pretenure); | 3802 AllocationSpace space = SelectSpace(pretenure); |
3795 | 3803 |
3796 HeapObject* result = nullptr; | 3804 HeapObject* result = nullptr; |
3797 { | 3805 { |
3798 AllocationResult allocation = AllocateRaw(size, space); | 3806 AllocationResult allocation = AllocateRaw(size, space); |
3799 if (!allocation.To(&result)) return allocation; | 3807 if (!allocation.To(&result)) return allocation; |
3800 } | 3808 } |
3801 | 3809 |
3802 // Partially initialize the object. | 3810 // Partially initialize the object. |
3803 result->set_map_no_write_barrier(string_map()); | 3811 result->set_map_after_allocation(string_map(), SKIP_WRITE_BARRIER); |
3804 String::cast(result)->set_length(length); | 3812 String::cast(result)->set_length(length); |
3805 String::cast(result)->set_hash_field(String::kEmptyHashField); | 3813 String::cast(result)->set_hash_field(String::kEmptyHashField); |
3806 DCHECK_EQ(size, HeapObject::cast(result)->Size()); | 3814 DCHECK_EQ(size, HeapObject::cast(result)->Size()); |
3807 return result; | 3815 return result; |
3808 } | 3816 } |
3809 | 3817 |
3810 | 3818 |
3811 AllocationResult Heap::AllocateEmptyFixedArray() { | 3819 AllocationResult Heap::AllocateEmptyFixedArray() { |
3812 int size = FixedArray::SizeFor(0); | 3820 int size = FixedArray::SizeFor(0); |
3813 HeapObject* result = nullptr; | 3821 HeapObject* result = nullptr; |
3814 { | 3822 { |
3815 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); | 3823 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); |
3816 if (!allocation.To(&result)) return allocation; | 3824 if (!allocation.To(&result)) return allocation; |
3817 } | 3825 } |
3818 // Initialize the object. | 3826 // Initialize the object. |
3819 result->set_map_no_write_barrier(fixed_array_map()); | 3827 result->set_map_after_allocation(fixed_array_map(), SKIP_WRITE_BARRIER); |
3820 FixedArray::cast(result)->set_length(0); | 3828 FixedArray::cast(result)->set_length(0); |
3821 return result; | 3829 return result; |
3822 } | 3830 } |
3823 | 3831 |
3824 AllocationResult Heap::AllocateEmptyScopeInfo() { | 3832 AllocationResult Heap::AllocateEmptyScopeInfo() { |
3825 int size = FixedArray::SizeFor(0); | 3833 int size = FixedArray::SizeFor(0); |
3826 HeapObject* result = nullptr; | 3834 HeapObject* result = nullptr; |
3827 { | 3835 { |
3828 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); | 3836 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); |
3829 if (!allocation.To(&result)) return allocation; | 3837 if (!allocation.To(&result)) return allocation; |
3830 } | 3838 } |
3831 // Initialize the object. | 3839 // Initialize the object. |
3832 result->set_map_no_write_barrier(scope_info_map()); | 3840 result->set_map_after_allocation(scope_info_map(), SKIP_WRITE_BARRIER); |
3833 FixedArray::cast(result)->set_length(0); | 3841 FixedArray::cast(result)->set_length(0); |
3834 return result; | 3842 return result; |
3835 } | 3843 } |
3836 | 3844 |
3837 AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) { | 3845 AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) { |
3838 if (!InNewSpace(src)) { | 3846 if (!InNewSpace(src)) { |
3839 return src; | 3847 return src; |
3840 } | 3848 } |
3841 | 3849 |
3842 int len = src->length(); | 3850 int len = src->length(); |
3843 HeapObject* obj = nullptr; | 3851 HeapObject* obj = nullptr; |
3844 { | 3852 { |
3845 AllocationResult allocation = AllocateRawFixedArray(len, TENURED); | 3853 AllocationResult allocation = AllocateRawFixedArray(len, TENURED); |
3846 if (!allocation.To(&obj)) return allocation; | 3854 if (!allocation.To(&obj)) return allocation; |
3847 } | 3855 } |
3848 obj->set_map_no_write_barrier(fixed_array_map()); | 3856 obj->set_map_after_allocation(fixed_array_map(), SKIP_WRITE_BARRIER); |
3849 FixedArray* result = FixedArray::cast(obj); | 3857 FixedArray* result = FixedArray::cast(obj); |
3850 result->set_length(len); | 3858 result->set_length(len); |
3851 | 3859 |
3852 // Copy the content. | 3860 // Copy the content. |
3853 DisallowHeapAllocation no_gc; | 3861 DisallowHeapAllocation no_gc; |
3854 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); | 3862 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); |
3855 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); | 3863 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); |
3856 | 3864 |
3857 // TODO(mvstanton): The map is set twice because of protection against calling | 3865 // TODO(mvstanton): The map is set twice because of protection against calling |
3858 // set() on a COW FixedArray. Issue v8:3221 created to track this, and | 3866 // set() on a COW FixedArray. Issue v8:3221 created to track this, and |
3859 // we might then be able to remove this whole method. | 3867 // we might then be able to remove this whole method. |
3860 HeapObject::cast(obj)->set_map_no_write_barrier(fixed_cow_array_map()); | 3868 HeapObject::cast(obj)->set_map_after_allocation(fixed_cow_array_map(), |
| 3869 SKIP_WRITE_BARRIER); |
3861 return result; | 3870 return result; |
3862 } | 3871 } |
3863 | 3872 |
3864 | 3873 |
3865 AllocationResult Heap::AllocateEmptyFixedTypedArray( | 3874 AllocationResult Heap::AllocateEmptyFixedTypedArray( |
3866 ExternalArrayType array_type) { | 3875 ExternalArrayType array_type) { |
3867 return AllocateFixedTypedArray(0, array_type, false, TENURED); | 3876 return AllocateFixedTypedArray(0, array_type, false, TENURED); |
3868 } | 3877 } |
3869 | 3878 |
3870 | 3879 |
3871 AllocationResult Heap::CopyFixedArrayAndGrow(FixedArray* src, int grow_by, | 3880 AllocationResult Heap::CopyFixedArrayAndGrow(FixedArray* src, int grow_by, |
3872 PretenureFlag pretenure) { | 3881 PretenureFlag pretenure) { |
3873 int old_len = src->length(); | 3882 int old_len = src->length(); |
3874 int new_len = old_len + grow_by; | 3883 int new_len = old_len + grow_by; |
3875 DCHECK(new_len >= old_len); | 3884 DCHECK(new_len >= old_len); |
3876 HeapObject* obj = nullptr; | 3885 HeapObject* obj = nullptr; |
3877 { | 3886 { |
3878 AllocationResult allocation = AllocateRawFixedArray(new_len, pretenure); | 3887 AllocationResult allocation = AllocateRawFixedArray(new_len, pretenure); |
3879 if (!allocation.To(&obj)) return allocation; | 3888 if (!allocation.To(&obj)) return allocation; |
3880 } | 3889 } |
3881 | 3890 |
3882 obj->set_map_no_write_barrier(fixed_array_map()); | 3891 obj->set_map_after_allocation(fixed_array_map(), SKIP_WRITE_BARRIER); |
3883 FixedArray* result = FixedArray::cast(obj); | 3892 FixedArray* result = FixedArray::cast(obj); |
3884 result->set_length(new_len); | 3893 result->set_length(new_len); |
3885 | 3894 |
3886 // Copy the content. | 3895 // Copy the content. |
3887 DisallowHeapAllocation no_gc; | 3896 DisallowHeapAllocation no_gc; |
3888 WriteBarrierMode mode = obj->GetWriteBarrierMode(no_gc); | 3897 WriteBarrierMode mode = obj->GetWriteBarrierMode(no_gc); |
3889 for (int i = 0; i < old_len; i++) result->set(i, src->get(i), mode); | 3898 for (int i = 0; i < old_len; i++) result->set(i, src->get(i), mode); |
3890 MemsetPointer(result->data_start() + old_len, undefined_value(), grow_by); | 3899 MemsetPointer(result->data_start() + old_len, undefined_value(), grow_by); |
3891 return result; | 3900 return result; |
3892 } | 3901 } |
3893 | 3902 |
3894 AllocationResult Heap::CopyFixedArrayUpTo(FixedArray* src, int new_len, | 3903 AllocationResult Heap::CopyFixedArrayUpTo(FixedArray* src, int new_len, |
3895 PretenureFlag pretenure) { | 3904 PretenureFlag pretenure) { |
3896 if (new_len == 0) return empty_fixed_array(); | 3905 if (new_len == 0) return empty_fixed_array(); |
3897 | 3906 |
3898 DCHECK_LE(new_len, src->length()); | 3907 DCHECK_LE(new_len, src->length()); |
3899 | 3908 |
3900 HeapObject* obj = nullptr; | 3909 HeapObject* obj = nullptr; |
3901 { | 3910 { |
3902 AllocationResult allocation = AllocateRawFixedArray(new_len, pretenure); | 3911 AllocationResult allocation = AllocateRawFixedArray(new_len, pretenure); |
3903 if (!allocation.To(&obj)) return allocation; | 3912 if (!allocation.To(&obj)) return allocation; |
3904 } | 3913 } |
3905 obj->set_map_no_write_barrier(fixed_array_map()); | 3914 obj->set_map_after_allocation(fixed_array_map(), SKIP_WRITE_BARRIER); |
3906 | 3915 |
3907 FixedArray* result = FixedArray::cast(obj); | 3916 FixedArray* result = FixedArray::cast(obj); |
3908 result->set_length(new_len); | 3917 result->set_length(new_len); |
3909 | 3918 |
3910 // Copy the content. | 3919 // Copy the content. |
3911 DisallowHeapAllocation no_gc; | 3920 DisallowHeapAllocation no_gc; |
3912 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); | 3921 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); |
3913 for (int i = 0; i < new_len; i++) result->set(i, src->get(i), mode); | 3922 for (int i = 0; i < new_len; i++) result->set(i, src->get(i), mode); |
3914 return result; | 3923 return result; |
3915 } | 3924 } |
3916 | 3925 |
3917 AllocationResult Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { | 3926 AllocationResult Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { |
3918 int len = src->length(); | 3927 int len = src->length(); |
3919 HeapObject* obj = nullptr; | 3928 HeapObject* obj = nullptr; |
3920 { | 3929 { |
3921 AllocationResult allocation = AllocateRawFixedArray(len, NOT_TENURED); | 3930 AllocationResult allocation = AllocateRawFixedArray(len, NOT_TENURED); |
3922 if (!allocation.To(&obj)) return allocation; | 3931 if (!allocation.To(&obj)) return allocation; |
3923 } | 3932 } |
3924 obj->set_map_no_write_barrier(map); | 3933 obj->set_map_after_allocation(map, SKIP_WRITE_BARRIER); |
3925 | 3934 |
3926 FixedArray* result = FixedArray::cast(obj); | 3935 FixedArray* result = FixedArray::cast(obj); |
3927 DisallowHeapAllocation no_gc; | 3936 DisallowHeapAllocation no_gc; |
3928 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); | 3937 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); |
3929 | 3938 |
3930 // Eliminate the write barrier if possible. | 3939 // Eliminate the write barrier if possible. |
3931 if (mode == SKIP_WRITE_BARRIER) { | 3940 if (mode == SKIP_WRITE_BARRIER) { |
3932 CopyBlock(obj->address() + kPointerSize, src->address() + kPointerSize, | 3941 CopyBlock(obj->address() + kPointerSize, src->address() + kPointerSize, |
3933 FixedArray::SizeFor(len) - kPointerSize); | 3942 FixedArray::SizeFor(len) - kPointerSize); |
3934 return obj; | 3943 return obj; |
3935 } | 3944 } |
3936 | 3945 |
3937 // Slow case: Just copy the content one-by-one. | 3946 // Slow case: Just copy the content one-by-one. |
3938 result->set_length(len); | 3947 result->set_length(len); |
3939 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); | 3948 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); |
3940 return result; | 3949 return result; |
3941 } | 3950 } |
3942 | 3951 |
3943 | 3952 |
3944 AllocationResult Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, | 3953 AllocationResult Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, |
3945 Map* map) { | 3954 Map* map) { |
3946 int len = src->length(); | 3955 int len = src->length(); |
3947 HeapObject* obj = nullptr; | 3956 HeapObject* obj = nullptr; |
3948 { | 3957 { |
3949 AllocationResult allocation = AllocateRawFixedDoubleArray(len, NOT_TENURED); | 3958 AllocationResult allocation = AllocateRawFixedDoubleArray(len, NOT_TENURED); |
3950 if (!allocation.To(&obj)) return allocation; | 3959 if (!allocation.To(&obj)) return allocation; |
3951 } | 3960 } |
3952 obj->set_map_no_write_barrier(map); | 3961 obj->set_map_after_allocation(map, SKIP_WRITE_BARRIER); |
3953 CopyBlock(obj->address() + FixedDoubleArray::kLengthOffset, | 3962 CopyBlock(obj->address() + FixedDoubleArray::kLengthOffset, |
3954 src->address() + FixedDoubleArray::kLengthOffset, | 3963 src->address() + FixedDoubleArray::kLengthOffset, |
3955 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); | 3964 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); |
3956 return obj; | 3965 return obj; |
3957 } | 3966 } |
3958 | 3967 |
3959 | 3968 |
3960 AllocationResult Heap::AllocateRawFixedArray(int length, | 3969 AllocationResult Heap::AllocateRawFixedArray(int length, |
3961 PretenureFlag pretenure) { | 3970 PretenureFlag pretenure) { |
3962 if (length < 0 || length > FixedArray::kMaxLength) { | 3971 if (length < 0 || length > FixedArray::kMaxLength) { |
(...skipping 20 matching lines...) Expand all Loading... |
3983 DCHECK(empty_fixed_array()->IsFixedArray()); | 3992 DCHECK(empty_fixed_array()->IsFixedArray()); |
3984 if (length == 0) return empty_fixed_array(); | 3993 if (length == 0) return empty_fixed_array(); |
3985 | 3994 |
3986 DCHECK(!InNewSpace(filler)); | 3995 DCHECK(!InNewSpace(filler)); |
3987 HeapObject* result = nullptr; | 3996 HeapObject* result = nullptr; |
3988 { | 3997 { |
3989 AllocationResult allocation = AllocateRawFixedArray(length, pretenure); | 3998 AllocationResult allocation = AllocateRawFixedArray(length, pretenure); |
3990 if (!allocation.To(&result)) return allocation; | 3999 if (!allocation.To(&result)) return allocation; |
3991 } | 4000 } |
3992 | 4001 |
3993 result->set_map_no_write_barrier(fixed_array_map()); | 4002 result->set_map_after_allocation(fixed_array_map(), SKIP_WRITE_BARRIER); |
3994 FixedArray* array = FixedArray::cast(result); | 4003 FixedArray* array = FixedArray::cast(result); |
3995 array->set_length(length); | 4004 array->set_length(length); |
3996 MemsetPointer(array->data_start(), filler, length); | 4005 MemsetPointer(array->data_start(), filler, length); |
3997 return array; | 4006 return array; |
3998 } | 4007 } |
3999 | 4008 |
4000 | 4009 |
4001 AllocationResult Heap::AllocateFixedArray(int length, PretenureFlag pretenure) { | 4010 AllocationResult Heap::AllocateFixedArray(int length, PretenureFlag pretenure) { |
4002 return AllocateFixedArrayWithFiller(length, pretenure, undefined_value()); | 4011 return AllocateFixedArrayWithFiller(length, pretenure, undefined_value()); |
4003 } | 4012 } |
4004 | 4013 |
4005 | 4014 |
4006 AllocationResult Heap::AllocateUninitializedFixedArray(int length) { | 4015 AllocationResult Heap::AllocateUninitializedFixedArray(int length) { |
4007 if (length == 0) return empty_fixed_array(); | 4016 if (length == 0) return empty_fixed_array(); |
4008 | 4017 |
4009 HeapObject* obj = nullptr; | 4018 HeapObject* obj = nullptr; |
4010 { | 4019 { |
4011 AllocationResult allocation = AllocateRawFixedArray(length, NOT_TENURED); | 4020 AllocationResult allocation = AllocateRawFixedArray(length, NOT_TENURED); |
4012 if (!allocation.To(&obj)) return allocation; | 4021 if (!allocation.To(&obj)) return allocation; |
4013 } | 4022 } |
4014 | 4023 |
4015 obj->set_map_no_write_barrier(fixed_array_map()); | 4024 obj->set_map_after_allocation(fixed_array_map(), SKIP_WRITE_BARRIER); |
4016 FixedArray::cast(obj)->set_length(length); | 4025 FixedArray::cast(obj)->set_length(length); |
4017 return obj; | 4026 return obj; |
4018 } | 4027 } |
4019 | 4028 |
4020 | 4029 |
4021 AllocationResult Heap::AllocateUninitializedFixedDoubleArray( | 4030 AllocationResult Heap::AllocateUninitializedFixedDoubleArray( |
4022 int length, PretenureFlag pretenure) { | 4031 int length, PretenureFlag pretenure) { |
4023 if (length == 0) return empty_fixed_array(); | 4032 if (length == 0) return empty_fixed_array(); |
4024 | 4033 |
4025 HeapObject* elements = nullptr; | 4034 HeapObject* elements = nullptr; |
4026 AllocationResult allocation = AllocateRawFixedDoubleArray(length, pretenure); | 4035 AllocationResult allocation = AllocateRawFixedDoubleArray(length, pretenure); |
4027 if (!allocation.To(&elements)) return allocation; | 4036 if (!allocation.To(&elements)) return allocation; |
4028 | 4037 |
4029 elements->set_map_no_write_barrier(fixed_double_array_map()); | 4038 elements->set_map_after_allocation(fixed_double_array_map(), |
| 4039 SKIP_WRITE_BARRIER); |
4030 FixedDoubleArray::cast(elements)->set_length(length); | 4040 FixedDoubleArray::cast(elements)->set_length(length); |
4031 return elements; | 4041 return elements; |
4032 } | 4042 } |
4033 | 4043 |
4034 | 4044 |
4035 AllocationResult Heap::AllocateRawFixedDoubleArray(int length, | 4045 AllocationResult Heap::AllocateRawFixedDoubleArray(int length, |
4036 PretenureFlag pretenure) { | 4046 PretenureFlag pretenure) { |
4037 if (length < 0 || length > FixedDoubleArray::kMaxLength) { | 4047 if (length < 0 || length > FixedDoubleArray::kMaxLength) { |
4038 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); | 4048 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); |
4039 } | 4049 } |
(...skipping 11 matching lines...) Expand all Loading... |
4051 | 4061 |
4052 | 4062 |
4053 AllocationResult Heap::AllocateSymbol() { | 4063 AllocationResult Heap::AllocateSymbol() { |
4054 // Statically ensure that it is safe to allocate symbols in paged spaces. | 4064 // Statically ensure that it is safe to allocate symbols in paged spaces. |
4055 STATIC_ASSERT(Symbol::kSize <= kMaxRegularHeapObjectSize); | 4065 STATIC_ASSERT(Symbol::kSize <= kMaxRegularHeapObjectSize); |
4056 | 4066 |
4057 HeapObject* result = nullptr; | 4067 HeapObject* result = nullptr; |
4058 AllocationResult allocation = AllocateRaw(Symbol::kSize, OLD_SPACE); | 4068 AllocationResult allocation = AllocateRaw(Symbol::kSize, OLD_SPACE); |
4059 if (!allocation.To(&result)) return allocation; | 4069 if (!allocation.To(&result)) return allocation; |
4060 | 4070 |
4061 result->set_map_no_write_barrier(symbol_map()); | 4071 result->set_map_after_allocation(symbol_map(), SKIP_WRITE_BARRIER); |
4062 | 4072 |
4063 // Generate a random hash value. | 4073 // Generate a random hash value. |
4064 int hash = isolate()->GenerateIdentityHash(Name::kHashBitMask); | 4074 int hash = isolate()->GenerateIdentityHash(Name::kHashBitMask); |
4065 | 4075 |
4066 Symbol::cast(result) | 4076 Symbol::cast(result) |
4067 ->set_hash_field(Name::kIsNotArrayIndexMask | (hash << Name::kHashShift)); | 4077 ->set_hash_field(Name::kIsNotArrayIndexMask | (hash << Name::kHashShift)); |
4068 Symbol::cast(result)->set_name(undefined_value()); | 4078 Symbol::cast(result)->set_name(undefined_value()); |
4069 Symbol::cast(result)->set_flags(0); | 4079 Symbol::cast(result)->set_flags(0); |
4070 | 4080 |
4071 DCHECK(!Symbol::cast(result)->is_private()); | 4081 DCHECK(!Symbol::cast(result)->is_private()); |
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4277 incremental_marking()->MarkBlackAndPush(object); | 4287 incremental_marking()->MarkBlackAndPush(object); |
4278 } | 4288 } |
4279 #ifdef VERIFY_HEAP | 4289 #ifdef VERIFY_HEAP |
4280 DCHECK(pending_layout_change_object_ == nullptr); | 4290 DCHECK(pending_layout_change_object_ == nullptr); |
4281 pending_layout_change_object_ = object; | 4291 pending_layout_change_object_ = object; |
4282 #endif | 4292 #endif |
4283 } | 4293 } |
4284 | 4294 |
4285 #ifdef VERIFY_HEAP | 4295 #ifdef VERIFY_HEAP |
4286 void Heap::VerifyObjectLayoutChange(HeapObject* object, Map* new_map) { | 4296 void Heap::VerifyObjectLayoutChange(HeapObject* object, Map* new_map) { |
| 4297 // Check that Heap::NotifyObjectLayout was called for object transitions |
| 4298 // that are not safe for concurrent marking. |
| 4299 // If you see this check triggering for a freshly allocated object, |
| 4300 // use object->set_map_after_allocation() to initialize its map. |
4287 if (pending_layout_change_object_ == nullptr) { | 4301 if (pending_layout_change_object_ == nullptr) { |
4288 DCHECK(!object->IsJSObject() || | 4302 DCHECK(!object->IsJSObject() || |
4289 !object->map()->TransitionRequiresSynchronizationWithGC(new_map)); | 4303 !object->map()->TransitionRequiresSynchronizationWithGC(new_map)); |
4290 } else { | 4304 } else { |
4291 DCHECK_EQ(pending_layout_change_object_, object); | 4305 DCHECK_EQ(pending_layout_change_object_, object); |
4292 pending_layout_change_object_ = nullptr; | 4306 pending_layout_change_object_ = nullptr; |
4293 } | 4307 } |
4294 } | 4308 } |
4295 #endif | 4309 #endif |
4296 | 4310 |
(...skipping 2231 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6528 case LO_SPACE: | 6542 case LO_SPACE: |
6529 return "LO_SPACE"; | 6543 return "LO_SPACE"; |
6530 default: | 6544 default: |
6531 UNREACHABLE(); | 6545 UNREACHABLE(); |
6532 } | 6546 } |
6533 return NULL; | 6547 return NULL; |
6534 } | 6548 } |
6535 | 6549 |
6536 } // namespace internal | 6550 } // namespace internal |
6537 } // namespace v8 | 6551 } // namespace v8 |
OLD | NEW |