OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
(...skipping 1971 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1982 static_cast<intptr_t>( | 1982 static_cast<intptr_t>( |
1983 static_cast<double>(old_generation_allocation_limit_) * | 1983 static_cast<double>(old_generation_allocation_limit_) * |
1984 (tracer()->AverageSurvivalRatio() / 100))); | 1984 (tracer()->AverageSurvivalRatio() / 100))); |
1985 } | 1985 } |
1986 } | 1986 } |
1987 | 1987 |
1988 | 1988 |
1989 AllocationResult Heap::AllocatePartialMap(InstanceType instance_type, | 1989 AllocationResult Heap::AllocatePartialMap(InstanceType instance_type, |
1990 int instance_size) { | 1990 int instance_size) { |
1991 Object* result = nullptr; | 1991 Object* result = nullptr; |
1992 AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE); | 1992 AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); |
1993 if (!allocation.To(&result)) return allocation; | 1993 if (!allocation.To(&result)) return allocation; |
1994 | 1994 |
1995 // Map::cast cannot be used due to uninitialized map field. | 1995 // Map::cast cannot be used due to uninitialized map field. |
1996 reinterpret_cast<Map*>(result)->set_map( | 1996 reinterpret_cast<Map*>(result)->set_map( |
1997 reinterpret_cast<Map*>(root(kMetaMapRootIndex))); | 1997 reinterpret_cast<Map*>(root(kMetaMapRootIndex))); |
1998 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); | 1998 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); |
1999 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); | 1999 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); |
2000 // Initialize to only containing tagged fields. | 2000 // Initialize to only containing tagged fields. |
2001 reinterpret_cast<Map*>(result)->set_visitor_id( | 2001 reinterpret_cast<Map*>(result)->set_visitor_id( |
2002 StaticVisitorBase::GetVisitorId(instance_type, instance_size, false)); | 2002 StaticVisitorBase::GetVisitorId(instance_type, instance_size, false)); |
(...skipping 13 matching lines...) Expand all Loading... |
2016 reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3); | 2016 reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3); |
2017 reinterpret_cast<Map*>(result)->set_weak_cell_cache(Smi::FromInt(0)); | 2017 reinterpret_cast<Map*>(result)->set_weak_cell_cache(Smi::FromInt(0)); |
2018 return result; | 2018 return result; |
2019 } | 2019 } |
2020 | 2020 |
2021 | 2021 |
2022 AllocationResult Heap::AllocateMap(InstanceType instance_type, | 2022 AllocationResult Heap::AllocateMap(InstanceType instance_type, |
2023 int instance_size, | 2023 int instance_size, |
2024 ElementsKind elements_kind) { | 2024 ElementsKind elements_kind) { |
2025 HeapObject* result = nullptr; | 2025 HeapObject* result = nullptr; |
2026 AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE); | 2026 AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); |
2027 if (!allocation.To(&result)) return allocation; | 2027 if (!allocation.To(&result)) return allocation; |
2028 | 2028 |
2029 result->set_map_no_write_barrier(meta_map()); | 2029 result->set_map_no_write_barrier(meta_map()); |
2030 Map* map = Map::cast(result); | 2030 Map* map = Map::cast(result); |
2031 map->set_instance_type(instance_type); | 2031 map->set_instance_type(instance_type); |
2032 map->set_prototype(null_value(), SKIP_WRITE_BARRIER); | 2032 map->set_prototype(null_value(), SKIP_WRITE_BARRIER); |
2033 map->set_constructor_or_backpointer(null_value(), SKIP_WRITE_BARRIER); | 2033 map->set_constructor_or_backpointer(null_value(), SKIP_WRITE_BARRIER); |
2034 map->set_instance_size(instance_size); | 2034 map->set_instance_size(instance_size); |
2035 map->clear_unused(); | 2035 map->clear_unused(); |
2036 map->set_inobject_properties_or_constructor_function_index(0); | 2036 map->set_inobject_properties_or_constructor_function_index(0); |
(...skipping 20 matching lines...) Expand all Loading... |
2057 | 2057 |
2058 return map; | 2058 return map; |
2059 } | 2059 } |
2060 | 2060 |
2061 | 2061 |
2062 AllocationResult Heap::AllocateFillerObject(int size, bool double_align, | 2062 AllocationResult Heap::AllocateFillerObject(int size, bool double_align, |
2063 AllocationSpace space) { | 2063 AllocationSpace space) { |
2064 HeapObject* obj = nullptr; | 2064 HeapObject* obj = nullptr; |
2065 { | 2065 { |
2066 AllocationAlignment align = double_align ? kDoubleAligned : kWordAligned; | 2066 AllocationAlignment align = double_align ? kDoubleAligned : kWordAligned; |
2067 AllocationResult allocation = AllocateRaw(size, space, align); | 2067 AllocationResult allocation = AllocateRaw(size, space, space, align); |
2068 if (!allocation.To(&obj)) return allocation; | 2068 if (!allocation.To(&obj)) return allocation; |
2069 } | 2069 } |
2070 #ifdef DEBUG | 2070 #ifdef DEBUG |
2071 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 2071 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
2072 DCHECK(chunk->owner()->identity() == space); | 2072 DCHECK(chunk->owner()->identity() == space); |
2073 #endif | 2073 #endif |
2074 CreateFillerObjectAt(obj->address(), size); | 2074 CreateFillerObjectAt(obj->address(), size); |
2075 return obj; | 2075 return obj; |
2076 } | 2076 } |
2077 | 2077 |
(...skipping 292 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2370 PretenureFlag pretenure) { | 2370 PretenureFlag pretenure) { |
2371 // Statically ensure that it is safe to allocate heap numbers in paged | 2371 // Statically ensure that it is safe to allocate heap numbers in paged |
2372 // spaces. | 2372 // spaces. |
2373 int size = HeapNumber::kSize; | 2373 int size = HeapNumber::kSize; |
2374 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxRegularHeapObjectSize); | 2374 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxRegularHeapObjectSize); |
2375 | 2375 |
2376 AllocationSpace space = SelectSpace(pretenure); | 2376 AllocationSpace space = SelectSpace(pretenure); |
2377 | 2377 |
2378 HeapObject* result = nullptr; | 2378 HeapObject* result = nullptr; |
2379 { | 2379 { |
2380 AllocationResult allocation = AllocateRaw(size, space, kDoubleUnaligned); | 2380 AllocationResult allocation = |
| 2381 AllocateRaw(size, space, OLD_SPACE, kDoubleUnaligned); |
2381 if (!allocation.To(&result)) return allocation; | 2382 if (!allocation.To(&result)) return allocation; |
2382 } | 2383 } |
2383 | 2384 |
2384 Map* map = mode == MUTABLE ? mutable_heap_number_map() : heap_number_map(); | 2385 Map* map = mode == MUTABLE ? mutable_heap_number_map() : heap_number_map(); |
2385 HeapObject::cast(result)->set_map_no_write_barrier(map); | 2386 HeapObject::cast(result)->set_map_no_write_barrier(map); |
2386 HeapNumber::cast(result)->set_value(value); | 2387 HeapNumber::cast(result)->set_value(value); |
2387 return result; | 2388 return result; |
2388 } | 2389 } |
2389 | 2390 |
2390 #define SIMD_ALLOCATE_DEFINITION(TYPE, Type, type, lane_count, lane_type) \ | 2391 #define SIMD_ALLOCATE_DEFINITION(TYPE, Type, type, lane_count, lane_type) \ |
2391 AllocationResult Heap::Allocate##Type(lane_type lanes[lane_count], \ | 2392 AllocationResult Heap::Allocate##Type(lane_type lanes[lane_count], \ |
2392 PretenureFlag pretenure) { \ | 2393 PretenureFlag pretenure) { \ |
2393 int size = Type::kSize; \ | 2394 int size = Type::kSize; \ |
2394 STATIC_ASSERT(Type::kSize <= Page::kMaxRegularHeapObjectSize); \ | 2395 STATIC_ASSERT(Type::kSize <= Page::kMaxRegularHeapObjectSize); \ |
2395 \ | 2396 \ |
2396 AllocationSpace space = SelectSpace(pretenure); \ | 2397 AllocationSpace space = SelectSpace(pretenure); \ |
2397 \ | 2398 \ |
2398 HeapObject* result = nullptr; \ | 2399 HeapObject* result = nullptr; \ |
2399 { \ | 2400 { \ |
2400 AllocationResult allocation = \ | 2401 AllocationResult allocation = \ |
2401 AllocateRaw(size, space, kSimd128Unaligned); \ | 2402 AllocateRaw(size, space, OLD_SPACE, kSimd128Unaligned); \ |
2402 if (!allocation.To(&result)) return allocation; \ | 2403 if (!allocation.To(&result)) return allocation; \ |
2403 } \ | 2404 } \ |
2404 \ | 2405 \ |
2405 result->set_map_no_write_barrier(type##_map()); \ | 2406 result->set_map_no_write_barrier(type##_map()); \ |
2406 Type* instance = Type::cast(result); \ | 2407 Type* instance = Type::cast(result); \ |
2407 for (int i = 0; i < lane_count; i++) { \ | 2408 for (int i = 0; i < lane_count; i++) { \ |
2408 instance->set_lane(i, lanes[i]); \ | 2409 instance->set_lane(i, lanes[i]); \ |
2409 } \ | 2410 } \ |
2410 return result; \ | 2411 return result; \ |
2411 } | 2412 } |
2412 SIMD128_TYPES(SIMD_ALLOCATE_DEFINITION) | 2413 SIMD128_TYPES(SIMD_ALLOCATE_DEFINITION) |
2413 #undef SIMD_ALLOCATE_DEFINITION | 2414 #undef SIMD_ALLOCATE_DEFINITION |
2414 | 2415 |
2415 | 2416 |
2416 AllocationResult Heap::AllocateCell(Object* value) { | 2417 AllocationResult Heap::AllocateCell(Object* value) { |
2417 int size = Cell::kSize; | 2418 int size = Cell::kSize; |
2418 STATIC_ASSERT(Cell::kSize <= Page::kMaxRegularHeapObjectSize); | 2419 STATIC_ASSERT(Cell::kSize <= Page::kMaxRegularHeapObjectSize); |
2419 | 2420 |
2420 HeapObject* result = nullptr; | 2421 HeapObject* result = nullptr; |
2421 { | 2422 { |
2422 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); | 2423 AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); |
2423 if (!allocation.To(&result)) return allocation; | 2424 if (!allocation.To(&result)) return allocation; |
2424 } | 2425 } |
2425 result->set_map_no_write_barrier(cell_map()); | 2426 result->set_map_no_write_barrier(cell_map()); |
2426 Cell::cast(result)->set_value(value); | 2427 Cell::cast(result)->set_value(value); |
2427 return result; | 2428 return result; |
2428 } | 2429 } |
2429 | 2430 |
2430 | 2431 |
2431 AllocationResult Heap::AllocatePropertyCell() { | 2432 AllocationResult Heap::AllocatePropertyCell() { |
2432 int size = PropertyCell::kSize; | 2433 int size = PropertyCell::kSize; |
2433 STATIC_ASSERT(PropertyCell::kSize <= Page::kMaxRegularHeapObjectSize); | 2434 STATIC_ASSERT(PropertyCell::kSize <= Page::kMaxRegularHeapObjectSize); |
2434 | 2435 |
2435 HeapObject* result = nullptr; | 2436 HeapObject* result = nullptr; |
2436 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); | 2437 AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); |
2437 if (!allocation.To(&result)) return allocation; | 2438 if (!allocation.To(&result)) return allocation; |
2438 | 2439 |
2439 result->set_map_no_write_barrier(global_property_cell_map()); | 2440 result->set_map_no_write_barrier(global_property_cell_map()); |
2440 PropertyCell* cell = PropertyCell::cast(result); | 2441 PropertyCell* cell = PropertyCell::cast(result); |
2441 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), | 2442 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), |
2442 SKIP_WRITE_BARRIER); | 2443 SKIP_WRITE_BARRIER); |
2443 cell->set_property_details(PropertyDetails(Smi::FromInt(0))); | 2444 cell->set_property_details(PropertyDetails(Smi::FromInt(0))); |
2444 cell->set_value(the_hole_value()); | 2445 cell->set_value(the_hole_value()); |
2445 return result; | 2446 return result; |
2446 } | 2447 } |
2447 | 2448 |
2448 | 2449 |
2449 AllocationResult Heap::AllocateWeakCell(HeapObject* value) { | 2450 AllocationResult Heap::AllocateWeakCell(HeapObject* value) { |
2450 int size = WeakCell::kSize; | 2451 int size = WeakCell::kSize; |
2451 STATIC_ASSERT(WeakCell::kSize <= Page::kMaxRegularHeapObjectSize); | 2452 STATIC_ASSERT(WeakCell::kSize <= Page::kMaxRegularHeapObjectSize); |
2452 HeapObject* result = nullptr; | 2453 HeapObject* result = nullptr; |
2453 { | 2454 { |
2454 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); | 2455 AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); |
2455 if (!allocation.To(&result)) return allocation; | 2456 if (!allocation.To(&result)) return allocation; |
2456 } | 2457 } |
2457 result->set_map_no_write_barrier(weak_cell_map()); | 2458 result->set_map_no_write_barrier(weak_cell_map()); |
2458 WeakCell::cast(result)->initialize(value); | 2459 WeakCell::cast(result)->initialize(value); |
2459 WeakCell::cast(result)->clear_next(this); | 2460 WeakCell::cast(result)->clear_next(this); |
2460 return result; | 2461 return result; |
2461 } | 2462 } |
2462 | 2463 |
2463 | 2464 |
2464 void Heap::CreateApiObjects() { | 2465 void Heap::CreateApiObjects() { |
(...skipping 464 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2929 | 2930 |
2930 | 2931 |
2931 AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) { | 2932 AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) { |
2932 if (length < 0 || length > ByteArray::kMaxLength) { | 2933 if (length < 0 || length > ByteArray::kMaxLength) { |
2933 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); | 2934 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); |
2934 } | 2935 } |
2935 int size = ByteArray::SizeFor(length); | 2936 int size = ByteArray::SizeFor(length); |
2936 AllocationSpace space = SelectSpace(pretenure); | 2937 AllocationSpace space = SelectSpace(pretenure); |
2937 HeapObject* result = nullptr; | 2938 HeapObject* result = nullptr; |
2938 { | 2939 { |
2939 AllocationResult allocation = AllocateRaw(size, space); | 2940 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); |
2940 if (!allocation.To(&result)) return allocation; | 2941 if (!allocation.To(&result)) return allocation; |
2941 } | 2942 } |
2942 | 2943 |
2943 result->set_map_no_write_barrier(byte_array_map()); | 2944 result->set_map_no_write_barrier(byte_array_map()); |
2944 ByteArray::cast(result)->set_length(length); | 2945 ByteArray::cast(result)->set_length(length); |
2945 return result; | 2946 return result; |
2946 } | 2947 } |
2947 | 2948 |
2948 | 2949 |
2949 AllocationResult Heap::AllocateBytecodeArray(int length, | 2950 AllocationResult Heap::AllocateBytecodeArray(int length, |
2950 const byte* const raw_bytecodes, | 2951 const byte* const raw_bytecodes, |
2951 int frame_size, | 2952 int frame_size, |
2952 int parameter_count, | 2953 int parameter_count, |
2953 FixedArray* constant_pool) { | 2954 FixedArray* constant_pool) { |
2954 if (length < 0 || length > BytecodeArray::kMaxLength) { | 2955 if (length < 0 || length > BytecodeArray::kMaxLength) { |
2955 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); | 2956 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); |
2956 } | 2957 } |
2957 // Bytecode array is pretenured, so constant pool array should be to. | 2958 // Bytecode array is pretenured, so constant pool array should be to. |
2958 DCHECK(!InNewSpace(constant_pool)); | 2959 DCHECK(!InNewSpace(constant_pool)); |
2959 | 2960 |
2960 int size = BytecodeArray::SizeFor(length); | 2961 int size = BytecodeArray::SizeFor(length); |
2961 HeapObject* result = nullptr; | 2962 HeapObject* result = nullptr; |
2962 { | 2963 { |
2963 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); | 2964 AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); |
2964 if (!allocation.To(&result)) return allocation; | 2965 if (!allocation.To(&result)) return allocation; |
2965 } | 2966 } |
2966 | 2967 |
2967 result->set_map_no_write_barrier(bytecode_array_map()); | 2968 result->set_map_no_write_barrier(bytecode_array_map()); |
2968 BytecodeArray* instance = BytecodeArray::cast(result); | 2969 BytecodeArray* instance = BytecodeArray::cast(result); |
2969 instance->set_length(length); | 2970 instance->set_length(length); |
2970 instance->set_frame_size(frame_size); | 2971 instance->set_frame_size(frame_size); |
2971 instance->set_parameter_count(parameter_count); | 2972 instance->set_parameter_count(parameter_count); |
2972 instance->set_constant_pool(constant_pool); | 2973 instance->set_constant_pool(constant_pool); |
2973 CopyBytes(instance->GetFirstBytecodeAddress(), raw_bytecodes, length); | 2974 CopyBytes(instance->GetFirstBytecodeAddress(), raw_bytecodes, length); |
(...skipping 166 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3140 } | 3141 } |
3141 | 3142 |
3142 | 3143 |
3143 AllocationResult Heap::AllocateFixedTypedArrayWithExternalPointer( | 3144 AllocationResult Heap::AllocateFixedTypedArrayWithExternalPointer( |
3144 int length, ExternalArrayType array_type, void* external_pointer, | 3145 int length, ExternalArrayType array_type, void* external_pointer, |
3145 PretenureFlag pretenure) { | 3146 PretenureFlag pretenure) { |
3146 int size = FixedTypedArrayBase::kHeaderSize; | 3147 int size = FixedTypedArrayBase::kHeaderSize; |
3147 AllocationSpace space = SelectSpace(pretenure); | 3148 AllocationSpace space = SelectSpace(pretenure); |
3148 HeapObject* result = nullptr; | 3149 HeapObject* result = nullptr; |
3149 { | 3150 { |
3150 AllocationResult allocation = AllocateRaw(size, space); | 3151 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); |
3151 if (!allocation.To(&result)) return allocation; | 3152 if (!allocation.To(&result)) return allocation; |
3152 } | 3153 } |
3153 | 3154 |
3154 result->set_map_no_write_barrier(MapForFixedTypedArray(array_type)); | 3155 result->set_map_no_write_barrier(MapForFixedTypedArray(array_type)); |
3155 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(result); | 3156 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(result); |
3156 elements->set_base_pointer(Smi::FromInt(0), SKIP_WRITE_BARRIER); | 3157 elements->set_base_pointer(Smi::FromInt(0), SKIP_WRITE_BARRIER); |
3157 elements->set_external_pointer(external_pointer, SKIP_WRITE_BARRIER); | 3158 elements->set_external_pointer(external_pointer, SKIP_WRITE_BARRIER); |
3158 elements->set_length(length); | 3159 elements->set_length(length); |
3159 return elements; | 3160 return elements; |
3160 } | 3161 } |
(...skipping 24 matching lines...) Expand all Loading... |
3185 PretenureFlag pretenure) { | 3186 PretenureFlag pretenure) { |
3186 int element_size; | 3187 int element_size; |
3187 ElementsKind elements_kind; | 3188 ElementsKind elements_kind; |
3188 ForFixedTypedArray(array_type, &element_size, &elements_kind); | 3189 ForFixedTypedArray(array_type, &element_size, &elements_kind); |
3189 int size = OBJECT_POINTER_ALIGN(length * element_size + | 3190 int size = OBJECT_POINTER_ALIGN(length * element_size + |
3190 FixedTypedArrayBase::kDataOffset); | 3191 FixedTypedArrayBase::kDataOffset); |
3191 AllocationSpace space = SelectSpace(pretenure); | 3192 AllocationSpace space = SelectSpace(pretenure); |
3192 | 3193 |
3193 HeapObject* object = nullptr; | 3194 HeapObject* object = nullptr; |
3194 AllocationResult allocation = AllocateRaw( | 3195 AllocationResult allocation = AllocateRaw( |
3195 size, space, | 3196 size, space, OLD_SPACE, |
3196 array_type == kExternalFloat64Array ? kDoubleAligned : kWordAligned); | 3197 array_type == kExternalFloat64Array ? kDoubleAligned : kWordAligned); |
3197 if (!allocation.To(&object)) return allocation; | 3198 if (!allocation.To(&object)) return allocation; |
3198 | 3199 |
3199 object->set_map_no_write_barrier(MapForFixedTypedArray(array_type)); | 3200 object->set_map_no_write_barrier(MapForFixedTypedArray(array_type)); |
3200 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object); | 3201 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object); |
3201 elements->set_base_pointer(elements, SKIP_WRITE_BARRIER); | 3202 elements->set_base_pointer(elements, SKIP_WRITE_BARRIER); |
3202 elements->set_external_pointer( | 3203 elements->set_external_pointer( |
3203 ExternalReference::fixed_typed_array_base_data_offset().address(), | 3204 ExternalReference::fixed_typed_array_base_data_offset().address(), |
3204 SKIP_WRITE_BARRIER); | 3205 SKIP_WRITE_BARRIER); |
3205 elements->set_length(length); | 3206 elements->set_length(length); |
3206 if (initialize) memset(elements->DataPtr(), 0, elements->DataSize()); | 3207 if (initialize) memset(elements->DataPtr(), 0, elements->DataSize()); |
3207 return elements; | 3208 return elements; |
3208 } | 3209 } |
3209 | 3210 |
3210 | 3211 |
3211 AllocationResult Heap::AllocateCode(int object_size, bool immovable) { | 3212 AllocationResult Heap::AllocateCode(int object_size, bool immovable) { |
3212 DCHECK(IsAligned(static_cast<intptr_t>(object_size), kCodeAlignment)); | 3213 DCHECK(IsAligned(static_cast<intptr_t>(object_size), kCodeAlignment)); |
3213 AllocationResult allocation = AllocateRaw(object_size, CODE_SPACE); | 3214 AllocationResult allocation = |
| 3215 AllocateRaw(object_size, CODE_SPACE, CODE_SPACE); |
3214 | 3216 |
3215 HeapObject* result = nullptr; | 3217 HeapObject* result = nullptr; |
3216 if (!allocation.To(&result)) return allocation; | 3218 if (!allocation.To(&result)) return allocation; |
3217 | 3219 |
3218 if (immovable) { | 3220 if (immovable) { |
3219 Address address = result->address(); | 3221 Address address = result->address(); |
3220 // Code objects which should stay at a fixed address are allocated either | 3222 // Code objects which should stay at a fixed address are allocated either |
3221 // in the first page of code space (objects on the first page of each space | 3223 // in the first page of code space (objects on the first page of each space |
3222 // are never moved) or in large object space. | 3224 // are never moved) or in large object space. |
3223 if (!code_space_->FirstPage()->Contains(address) && | 3225 if (!code_space_->FirstPage()->Contains(address) && |
(...skipping 18 matching lines...) Expand all Loading... |
3242 return code; | 3244 return code; |
3243 } | 3245 } |
3244 | 3246 |
3245 | 3247 |
3246 AllocationResult Heap::CopyCode(Code* code) { | 3248 AllocationResult Heap::CopyCode(Code* code) { |
3247 AllocationResult allocation; | 3249 AllocationResult allocation; |
3248 | 3250 |
3249 HeapObject* result = nullptr; | 3251 HeapObject* result = nullptr; |
3250 // Allocate an object the same size as the code object. | 3252 // Allocate an object the same size as the code object. |
3251 int obj_size = code->Size(); | 3253 int obj_size = code->Size(); |
3252 allocation = AllocateRaw(obj_size, CODE_SPACE); | 3254 allocation = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE); |
3253 if (!allocation.To(&result)) return allocation; | 3255 if (!allocation.To(&result)) return allocation; |
3254 | 3256 |
3255 // Copy code object. | 3257 // Copy code object. |
3256 Address old_addr = code->address(); | 3258 Address old_addr = code->address(); |
3257 Address new_addr = result->address(); | 3259 Address new_addr = result->address(); |
3258 CopyBlock(new_addr, old_addr, obj_size); | 3260 CopyBlock(new_addr, old_addr, obj_size); |
3259 Code* new_code = Code::cast(result); | 3261 Code* new_code = Code::cast(result); |
3260 | 3262 |
3261 // Relocate the copy. | 3263 // Relocate the copy. |
3262 DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment)); | 3264 DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment)); |
(...skipping 18 matching lines...) Expand all Loading... |
3281 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment); | 3283 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment); |
3282 | 3284 |
3283 int new_obj_size = Code::SizeFor(new_body_size); | 3285 int new_obj_size = Code::SizeFor(new_body_size); |
3284 | 3286 |
3285 Address old_addr = code->address(); | 3287 Address old_addr = code->address(); |
3286 | 3288 |
3287 size_t relocation_offset = | 3289 size_t relocation_offset = |
3288 static_cast<size_t>(code->instruction_end() - old_addr); | 3290 static_cast<size_t>(code->instruction_end() - old_addr); |
3289 | 3291 |
3290 HeapObject* result = nullptr; | 3292 HeapObject* result = nullptr; |
3291 AllocationResult allocation = AllocateRaw(new_obj_size, CODE_SPACE); | 3293 AllocationResult allocation = |
| 3294 AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE); |
3292 if (!allocation.To(&result)) return allocation; | 3295 if (!allocation.To(&result)) return allocation; |
3293 | 3296 |
3294 // Copy code object. | 3297 // Copy code object. |
3295 Address new_addr = result->address(); | 3298 Address new_addr = result->address(); |
3296 | 3299 |
3297 // Copy header and instructions. | 3300 // Copy header and instructions. |
3298 CopyBytes(new_addr, old_addr, relocation_offset); | 3301 CopyBytes(new_addr, old_addr, relocation_offset); |
3299 | 3302 |
3300 Code* new_code = Code::cast(result); | 3303 Code* new_code = Code::cast(result); |
3301 new_code->set_relocation_info(reloc_info_array); | 3304 new_code->set_relocation_info(reloc_info_array); |
(...skipping 25 matching lines...) Expand all Loading... |
3327 if (FLAG_allocation_site_pretenuring) { | 3330 if (FLAG_allocation_site_pretenuring) { |
3328 allocation_site->IncrementMementoCreateCount(); | 3331 allocation_site->IncrementMementoCreateCount(); |
3329 } | 3332 } |
3330 } | 3333 } |
3331 | 3334 |
3332 | 3335 |
3333 AllocationResult Heap::Allocate(Map* map, AllocationSpace space, | 3336 AllocationResult Heap::Allocate(Map* map, AllocationSpace space, |
3334 AllocationSite* allocation_site) { | 3337 AllocationSite* allocation_site) { |
3335 DCHECK(gc_state_ == NOT_IN_GC); | 3338 DCHECK(gc_state_ == NOT_IN_GC); |
3336 DCHECK(map->instance_type() != MAP_TYPE); | 3339 DCHECK(map->instance_type() != MAP_TYPE); |
| 3340 // If allocation failures are disallowed, we may allocate in a different |
| 3341 // space when new space is full and the object is not a large object. |
| 3342 AllocationSpace retry_space = (space != NEW_SPACE) ? space : OLD_SPACE; |
3337 int size = map->instance_size(); | 3343 int size = map->instance_size(); |
3338 if (allocation_site != NULL) { | 3344 if (allocation_site != NULL) { |
3339 size += AllocationMemento::kSize; | 3345 size += AllocationMemento::kSize; |
3340 } | 3346 } |
3341 HeapObject* result = nullptr; | 3347 HeapObject* result = nullptr; |
3342 AllocationResult allocation = AllocateRaw(size, space); | 3348 AllocationResult allocation = AllocateRaw(size, space, retry_space); |
3343 if (!allocation.To(&result)) return allocation; | 3349 if (!allocation.To(&result)) return allocation; |
3344 // No need for write barrier since object is white and map is in old space. | 3350 // No need for write barrier since object is white and map is in old space. |
3345 result->set_map_no_write_barrier(map); | 3351 result->set_map_no_write_barrier(map); |
3346 if (allocation_site != NULL) { | 3352 if (allocation_site != NULL) { |
3347 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( | 3353 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( |
3348 reinterpret_cast<Address>(result) + map->instance_size()); | 3354 reinterpret_cast<Address>(result) + map->instance_size()); |
3349 InitializeAllocationMemento(alloc_memento, allocation_site); | 3355 InitializeAllocationMemento(alloc_memento, allocation_site); |
3350 } | 3356 } |
3351 return result; | 3357 return result; |
3352 } | 3358 } |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3434 // We can only clone normal objects or arrays. Copying anything else | 3440 // We can only clone normal objects or arrays. Copying anything else |
3435 // will break invariants. | 3441 // will break invariants. |
3436 CHECK(map->instance_type() == JS_OBJECT_TYPE || | 3442 CHECK(map->instance_type() == JS_OBJECT_TYPE || |
3437 map->instance_type() == JS_ARRAY_TYPE); | 3443 map->instance_type() == JS_ARRAY_TYPE); |
3438 | 3444 |
3439 int object_size = map->instance_size(); | 3445 int object_size = map->instance_size(); |
3440 HeapObject* clone = nullptr; | 3446 HeapObject* clone = nullptr; |
3441 | 3447 |
3442 DCHECK(site == NULL || AllocationSite::CanTrack(map->instance_type())); | 3448 DCHECK(site == NULL || AllocationSite::CanTrack(map->instance_type())); |
3443 | 3449 |
3444 int adjusted_object_size = | 3450 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; |
3445 site != NULL ? object_size + AllocationMemento::kSize : object_size; | |
3446 AllocationResult allocation = AllocateRaw(adjusted_object_size, NEW_SPACE); | |
3447 if (!allocation.To(&clone)) return allocation; | |
3448 | 3451 |
3449 SLOW_DCHECK(InNewSpace(clone)); | 3452 // If we're forced to always allocate, we use the general allocation |
3450 // Since we know the clone is allocated in new space, we can copy | 3453 // functions which may leave us with an object in old space. |
3451 // the contents without worrying about updating the write barrier. | 3454 if (always_allocate()) { |
3452 CopyBlock(clone->address(), source->address(), object_size); | 3455 { |
| 3456 AllocationResult allocation = |
| 3457 AllocateRaw(object_size, NEW_SPACE, OLD_SPACE); |
| 3458 if (!allocation.To(&clone)) return allocation; |
| 3459 } |
| 3460 Address clone_address = clone->address(); |
| 3461 CopyBlock(clone_address, source->address(), object_size); |
3453 | 3462 |
3454 if (site != NULL) { | 3463 // Update write barrier for all tagged fields that lie beyond the header. |
3455 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( | 3464 const int start_offset = JSObject::kHeaderSize; |
3456 reinterpret_cast<Address>(clone) + object_size); | 3465 const int end_offset = object_size; |
3457 InitializeAllocationMemento(alloc_memento, site); | 3466 |
| 3467 #if V8_DOUBLE_FIELDS_UNBOXING |
| 3468 LayoutDescriptorHelper helper(map); |
| 3469 bool has_only_tagged_fields = helper.all_fields_tagged(); |
| 3470 |
| 3471 if (!has_only_tagged_fields) { |
| 3472 for (int offset = start_offset; offset < end_offset;) { |
| 3473 int end_of_region_offset; |
| 3474 if (helper.IsTagged(offset, end_offset, &end_of_region_offset)) { |
| 3475 RecordWrites(clone_address, offset, |
| 3476 (end_of_region_offset - offset) / kPointerSize); |
| 3477 } |
| 3478 offset = end_of_region_offset; |
| 3479 } |
| 3480 } else { |
| 3481 #endif |
| 3482 // Object has only tagged fields. |
| 3483 RecordWrites(clone_address, start_offset, |
| 3484 (end_offset - start_offset) / kPointerSize); |
| 3485 #if V8_DOUBLE_FIELDS_UNBOXING |
| 3486 } |
| 3487 #endif |
| 3488 |
| 3489 } else { |
| 3490 wb_mode = SKIP_WRITE_BARRIER; |
| 3491 |
| 3492 { |
| 3493 int adjusted_object_size = |
| 3494 site != NULL ? object_size + AllocationMemento::kSize : object_size; |
| 3495 AllocationResult allocation = |
| 3496 AllocateRaw(adjusted_object_size, NEW_SPACE, NEW_SPACE); |
| 3497 if (!allocation.To(&clone)) return allocation; |
| 3498 } |
| 3499 SLOW_DCHECK(InNewSpace(clone)); |
| 3500 // Since we know the clone is allocated in new space, we can copy |
| 3501 // the contents without worrying about updating the write barrier. |
| 3502 CopyBlock(clone->address(), source->address(), object_size); |
| 3503 |
| 3504 if (site != NULL) { |
| 3505 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( |
| 3506 reinterpret_cast<Address>(clone) + object_size); |
| 3507 InitializeAllocationMemento(alloc_memento, site); |
| 3508 } |
3458 } | 3509 } |
3459 | 3510 |
3460 SLOW_DCHECK(JSObject::cast(clone)->GetElementsKind() == | 3511 SLOW_DCHECK(JSObject::cast(clone)->GetElementsKind() == |
3461 source->GetElementsKind()); | 3512 source->GetElementsKind()); |
3462 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); | 3513 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); |
3463 FixedArray* properties = FixedArray::cast(source->properties()); | 3514 FixedArray* properties = FixedArray::cast(source->properties()); |
3464 // Update elements if necessary. | 3515 // Update elements if necessary. |
3465 if (elements->length() > 0) { | 3516 if (elements->length() > 0) { |
3466 FixedArrayBase* elem = nullptr; | 3517 FixedArrayBase* elem = nullptr; |
3467 { | 3518 { |
3468 AllocationResult allocation; | 3519 AllocationResult allocation; |
3469 if (elements->map() == fixed_cow_array_map()) { | 3520 if (elements->map() == fixed_cow_array_map()) { |
3470 allocation = FixedArray::cast(elements); | 3521 allocation = FixedArray::cast(elements); |
3471 } else if (source->HasFastDoubleElements()) { | 3522 } else if (source->HasFastDoubleElements()) { |
3472 allocation = CopyFixedDoubleArray(FixedDoubleArray::cast(elements)); | 3523 allocation = CopyFixedDoubleArray(FixedDoubleArray::cast(elements)); |
3473 } else { | 3524 } else { |
3474 allocation = CopyFixedArray(FixedArray::cast(elements)); | 3525 allocation = CopyFixedArray(FixedArray::cast(elements)); |
3475 } | 3526 } |
3476 if (!allocation.To(&elem)) return allocation; | 3527 if (!allocation.To(&elem)) return allocation; |
3477 } | 3528 } |
3478 JSObject::cast(clone)->set_elements(elem, SKIP_WRITE_BARRIER); | 3529 JSObject::cast(clone)->set_elements(elem, wb_mode); |
3479 } | 3530 } |
3480 // Update properties if necessary. | 3531 // Update properties if necessary. |
3481 if (properties->length() > 0) { | 3532 if (properties->length() > 0) { |
3482 FixedArray* prop = nullptr; | 3533 FixedArray* prop = nullptr; |
3483 { | 3534 { |
3484 AllocationResult allocation = CopyFixedArray(properties); | 3535 AllocationResult allocation = CopyFixedArray(properties); |
3485 if (!allocation.To(&prop)) return allocation; | 3536 if (!allocation.To(&prop)) return allocation; |
3486 } | 3537 } |
3487 JSObject::cast(clone)->set_properties(prop, SKIP_WRITE_BARRIER); | 3538 JSObject::cast(clone)->set_properties(prop, wb_mode); |
3488 } | 3539 } |
3489 // Return the new clone. | 3540 // Return the new clone. |
3490 return clone; | 3541 return clone; |
3491 } | 3542 } |
3492 | 3543 |
3493 | 3544 |
3494 static inline void WriteOneByteData(Vector<const char> vector, uint8_t* chars, | 3545 static inline void WriteOneByteData(Vector<const char> vector, uint8_t* chars, |
3495 int len) { | 3546 int len) { |
3496 // Only works for one byte strings. | 3547 // Only works for one byte strings. |
3497 DCHECK(vector.length() == len); | 3548 DCHECK(vector.length() == len); |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3551 map = one_byte_internalized_string_map(); | 3602 map = one_byte_internalized_string_map(); |
3552 size = SeqOneByteString::SizeFor(chars); | 3603 size = SeqOneByteString::SizeFor(chars); |
3553 } else { | 3604 } else { |
3554 map = internalized_string_map(); | 3605 map = internalized_string_map(); |
3555 size = SeqTwoByteString::SizeFor(chars); | 3606 size = SeqTwoByteString::SizeFor(chars); |
3556 } | 3607 } |
3557 | 3608 |
3558 // Allocate string. | 3609 // Allocate string. |
3559 HeapObject* result = nullptr; | 3610 HeapObject* result = nullptr; |
3560 { | 3611 { |
3561 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); | 3612 AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); |
3562 if (!allocation.To(&result)) return allocation; | 3613 if (!allocation.To(&result)) return allocation; |
3563 } | 3614 } |
3564 | 3615 |
3565 result->set_map_no_write_barrier(map); | 3616 result->set_map_no_write_barrier(map); |
3566 // Set length and hash fields of the allocated string. | 3617 // Set length and hash fields of the allocated string. |
3567 String* answer = String::cast(result); | 3618 String* answer = String::cast(result); |
3568 answer->set_length(chars); | 3619 answer->set_length(chars); |
3569 answer->set_hash_field(hash_field); | 3620 answer->set_hash_field(hash_field); |
3570 | 3621 |
3571 DCHECK_EQ(size, answer->Size()); | 3622 DCHECK_EQ(size, answer->Size()); |
(...skipping 21 matching lines...) Expand all Loading... |
3593 AllocationResult Heap::AllocateRawOneByteString(int length, | 3644 AllocationResult Heap::AllocateRawOneByteString(int length, |
3594 PretenureFlag pretenure) { | 3645 PretenureFlag pretenure) { |
3595 DCHECK_LE(0, length); | 3646 DCHECK_LE(0, length); |
3596 DCHECK_GE(String::kMaxLength, length); | 3647 DCHECK_GE(String::kMaxLength, length); |
3597 int size = SeqOneByteString::SizeFor(length); | 3648 int size = SeqOneByteString::SizeFor(length); |
3598 DCHECK(size <= SeqOneByteString::kMaxSize); | 3649 DCHECK(size <= SeqOneByteString::kMaxSize); |
3599 AllocationSpace space = SelectSpace(pretenure); | 3650 AllocationSpace space = SelectSpace(pretenure); |
3600 | 3651 |
3601 HeapObject* result = nullptr; | 3652 HeapObject* result = nullptr; |
3602 { | 3653 { |
3603 AllocationResult allocation = AllocateRaw(size, space); | 3654 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); |
3604 if (!allocation.To(&result)) return allocation; | 3655 if (!allocation.To(&result)) return allocation; |
3605 } | 3656 } |
3606 | 3657 |
3607 // Partially initialize the object. | 3658 // Partially initialize the object. |
3608 result->set_map_no_write_barrier(one_byte_string_map()); | 3659 result->set_map_no_write_barrier(one_byte_string_map()); |
3609 String::cast(result)->set_length(length); | 3660 String::cast(result)->set_length(length); |
3610 String::cast(result)->set_hash_field(String::kEmptyHashField); | 3661 String::cast(result)->set_hash_field(String::kEmptyHashField); |
3611 DCHECK_EQ(size, HeapObject::cast(result)->Size()); | 3662 DCHECK_EQ(size, HeapObject::cast(result)->Size()); |
3612 | 3663 |
3613 return result; | 3664 return result; |
3614 } | 3665 } |
3615 | 3666 |
3616 | 3667 |
3617 AllocationResult Heap::AllocateRawTwoByteString(int length, | 3668 AllocationResult Heap::AllocateRawTwoByteString(int length, |
3618 PretenureFlag pretenure) { | 3669 PretenureFlag pretenure) { |
3619 DCHECK_LE(0, length); | 3670 DCHECK_LE(0, length); |
3620 DCHECK_GE(String::kMaxLength, length); | 3671 DCHECK_GE(String::kMaxLength, length); |
3621 int size = SeqTwoByteString::SizeFor(length); | 3672 int size = SeqTwoByteString::SizeFor(length); |
3622 DCHECK(size <= SeqTwoByteString::kMaxSize); | 3673 DCHECK(size <= SeqTwoByteString::kMaxSize); |
3623 AllocationSpace space = SelectSpace(pretenure); | 3674 AllocationSpace space = SelectSpace(pretenure); |
3624 | 3675 |
3625 HeapObject* result = nullptr; | 3676 HeapObject* result = nullptr; |
3626 { | 3677 { |
3627 AllocationResult allocation = AllocateRaw(size, space); | 3678 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); |
3628 if (!allocation.To(&result)) return allocation; | 3679 if (!allocation.To(&result)) return allocation; |
3629 } | 3680 } |
3630 | 3681 |
3631 // Partially initialize the object. | 3682 // Partially initialize the object. |
3632 result->set_map_no_write_barrier(string_map()); | 3683 result->set_map_no_write_barrier(string_map()); |
3633 String::cast(result)->set_length(length); | 3684 String::cast(result)->set_length(length); |
3634 String::cast(result)->set_hash_field(String::kEmptyHashField); | 3685 String::cast(result)->set_hash_field(String::kEmptyHashField); |
3635 DCHECK_EQ(size, HeapObject::cast(result)->Size()); | 3686 DCHECK_EQ(size, HeapObject::cast(result)->Size()); |
3636 return result; | 3687 return result; |
3637 } | 3688 } |
3638 | 3689 |
3639 | 3690 |
3640 AllocationResult Heap::AllocateEmptyFixedArray() { | 3691 AllocationResult Heap::AllocateEmptyFixedArray() { |
3641 int size = FixedArray::SizeFor(0); | 3692 int size = FixedArray::SizeFor(0); |
3642 HeapObject* result = nullptr; | 3693 HeapObject* result = nullptr; |
3643 { | 3694 { |
3644 AllocationResult allocation = AllocateRaw(size, OLD_SPACE); | 3695 AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); |
3645 if (!allocation.To(&result)) return allocation; | 3696 if (!allocation.To(&result)) return allocation; |
3646 } | 3697 } |
3647 // Initialize the object. | 3698 // Initialize the object. |
3648 result->set_map_no_write_barrier(fixed_array_map()); | 3699 result->set_map_no_write_barrier(fixed_array_map()); |
3649 FixedArray::cast(result)->set_length(0); | 3700 FixedArray::cast(result)->set_length(0); |
3650 return result; | 3701 return result; |
3651 } | 3702 } |
3652 | 3703 |
3653 | 3704 |
3654 AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) { | 3705 AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) { |
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3750 | 3801 |
3751 | 3802 |
3752 AllocationResult Heap::AllocateRawFixedArray(int length, | 3803 AllocationResult Heap::AllocateRawFixedArray(int length, |
3753 PretenureFlag pretenure) { | 3804 PretenureFlag pretenure) { |
3754 if (length < 0 || length > FixedArray::kMaxLength) { | 3805 if (length < 0 || length > FixedArray::kMaxLength) { |
3755 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); | 3806 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); |
3756 } | 3807 } |
3757 int size = FixedArray::SizeFor(length); | 3808 int size = FixedArray::SizeFor(length); |
3758 AllocationSpace space = SelectSpace(pretenure); | 3809 AllocationSpace space = SelectSpace(pretenure); |
3759 | 3810 |
3760 return AllocateRaw(size, space); | 3811 return AllocateRaw(size, space, OLD_SPACE); |
3761 } | 3812 } |
3762 | 3813 |
3763 | 3814 |
3764 AllocationResult Heap::AllocateFixedArrayWithFiller(int length, | 3815 AllocationResult Heap::AllocateFixedArrayWithFiller(int length, |
3765 PretenureFlag pretenure, | 3816 PretenureFlag pretenure, |
3766 Object* filler) { | 3817 Object* filler) { |
3767 DCHECK(length >= 0); | 3818 DCHECK(length >= 0); |
3768 DCHECK(empty_fixed_array()->IsFixedArray()); | 3819 DCHECK(empty_fixed_array()->IsFixedArray()); |
3769 if (length == 0) return empty_fixed_array(); | 3820 if (length == 0) return empty_fixed_array(); |
3770 | 3821 |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3821 PretenureFlag pretenure) { | 3872 PretenureFlag pretenure) { |
3822 if (length < 0 || length > FixedDoubleArray::kMaxLength) { | 3873 if (length < 0 || length > FixedDoubleArray::kMaxLength) { |
3823 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", | 3874 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", |
3824 kDoubleAligned); | 3875 kDoubleAligned); |
3825 } | 3876 } |
3826 int size = FixedDoubleArray::SizeFor(length); | 3877 int size = FixedDoubleArray::SizeFor(length); |
3827 AllocationSpace space = SelectSpace(pretenure); | 3878 AllocationSpace space = SelectSpace(pretenure); |
3828 | 3879 |
3829 HeapObject* object = nullptr; | 3880 HeapObject* object = nullptr; |
3830 { | 3881 { |
3831 AllocationResult allocation = AllocateRaw(size, space, kDoubleAligned); | 3882 AllocationResult allocation = |
| 3883 AllocateRaw(size, space, OLD_SPACE, kDoubleAligned); |
3832 if (!allocation.To(&object)) return allocation; | 3884 if (!allocation.To(&object)) return allocation; |
3833 } | 3885 } |
3834 | 3886 |
3835 return object; | 3887 return object; |
3836 } | 3888 } |
3837 | 3889 |
3838 | 3890 |
3839 AllocationResult Heap::AllocateSymbol() { | 3891 AllocationResult Heap::AllocateSymbol() { |
3840 // Statically ensure that it is safe to allocate symbols in paged spaces. | 3892 // Statically ensure that it is safe to allocate symbols in paged spaces. |
3841 STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize); | 3893 STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize); |
3842 | 3894 |
3843 HeapObject* result = nullptr; | 3895 HeapObject* result = nullptr; |
3844 AllocationResult allocation = AllocateRaw(Symbol::kSize, OLD_SPACE); | 3896 AllocationResult allocation = |
| 3897 AllocateRaw(Symbol::kSize, OLD_SPACE, OLD_SPACE); |
3845 if (!allocation.To(&result)) return allocation; | 3898 if (!allocation.To(&result)) return allocation; |
3846 | 3899 |
3847 result->set_map_no_write_barrier(symbol_map()); | 3900 result->set_map_no_write_barrier(symbol_map()); |
3848 | 3901 |
3849 // Generate a random hash value. | 3902 // Generate a random hash value. |
3850 int hash; | 3903 int hash; |
3851 int attempts = 0; | 3904 int attempts = 0; |
3852 do { | 3905 do { |
3853 hash = isolate()->random_number_generator()->NextInt() & Name::kHashBitMask; | 3906 hash = isolate()->random_number_generator()->NextInt() & Name::kHashBitMask; |
3854 attempts++; | 3907 attempts++; |
(...skipping 2225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6080 } | 6133 } |
6081 | 6134 |
6082 | 6135 |
6083 // static | 6136 // static |
6084 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6137 int Heap::GetStaticVisitorIdForMap(Map* map) { |
6085 return StaticVisitorBase::GetVisitorId(map); | 6138 return StaticVisitorBase::GetVisitorId(map); |
6086 } | 6139 } |
6087 | 6140 |
6088 } // namespace internal | 6141 } // namespace internal |
6089 } // namespace v8 | 6142 } // namespace v8 |
OLD | NEW |