Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(657)

Side by Side Diff: src/heap/heap.cc

Issue 1374163002: [heap] Reland Remove retry space from AllocateRaw. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/heap.h" 5 #include "src/heap/heap.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
(...skipping 1970 matching lines...) Expand 10 before | Expand all | Expand 10 after
1981 static_cast<intptr_t>( 1981 static_cast<intptr_t>(
1982 static_cast<double>(old_generation_allocation_limit_) * 1982 static_cast<double>(old_generation_allocation_limit_) *
1983 (tracer()->AverageSurvivalRatio() / 100))); 1983 (tracer()->AverageSurvivalRatio() / 100)));
1984 } 1984 }
1985 } 1985 }
1986 1986
1987 1987
1988 AllocationResult Heap::AllocatePartialMap(InstanceType instance_type, 1988 AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
1989 int instance_size) { 1989 int instance_size) {
1990 Object* result = nullptr; 1990 Object* result = nullptr;
1991 AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); 1991 AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE);
1992 if (!allocation.To(&result)) return allocation; 1992 if (!allocation.To(&result)) return allocation;
1993 1993
1994 // Map::cast cannot be used due to uninitialized map field. 1994 // Map::cast cannot be used due to uninitialized map field.
1995 reinterpret_cast<Map*>(result)->set_map( 1995 reinterpret_cast<Map*>(result)->set_map(
1996 reinterpret_cast<Map*>(root(kMetaMapRootIndex))); 1996 reinterpret_cast<Map*>(root(kMetaMapRootIndex)));
1997 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); 1997 reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
1998 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); 1998 reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
1999 // Initialize to only containing tagged fields. 1999 // Initialize to only containing tagged fields.
2000 reinterpret_cast<Map*>(result)->set_visitor_id( 2000 reinterpret_cast<Map*>(result)->set_visitor_id(
2001 StaticVisitorBase::GetVisitorId(instance_type, instance_size, false)); 2001 StaticVisitorBase::GetVisitorId(instance_type, instance_size, false));
(...skipping 13 matching lines...) Expand all
2015 reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3); 2015 reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3);
2016 reinterpret_cast<Map*>(result)->set_weak_cell_cache(Smi::FromInt(0)); 2016 reinterpret_cast<Map*>(result)->set_weak_cell_cache(Smi::FromInt(0));
2017 return result; 2017 return result;
2018 } 2018 }
2019 2019
2020 2020
2021 AllocationResult Heap::AllocateMap(InstanceType instance_type, 2021 AllocationResult Heap::AllocateMap(InstanceType instance_type,
2022 int instance_size, 2022 int instance_size,
2023 ElementsKind elements_kind) { 2023 ElementsKind elements_kind) {
2024 HeapObject* result = nullptr; 2024 HeapObject* result = nullptr;
2025 AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); 2025 AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE);
2026 if (!allocation.To(&result)) return allocation; 2026 if (!allocation.To(&result)) return allocation;
2027 2027
2028 result->set_map_no_write_barrier(meta_map()); 2028 result->set_map_no_write_barrier(meta_map());
2029 Map* map = Map::cast(result); 2029 Map* map = Map::cast(result);
2030 map->set_instance_type(instance_type); 2030 map->set_instance_type(instance_type);
2031 map->set_prototype(null_value(), SKIP_WRITE_BARRIER); 2031 map->set_prototype(null_value(), SKIP_WRITE_BARRIER);
2032 map->set_constructor_or_backpointer(null_value(), SKIP_WRITE_BARRIER); 2032 map->set_constructor_or_backpointer(null_value(), SKIP_WRITE_BARRIER);
2033 map->set_instance_size(instance_size); 2033 map->set_instance_size(instance_size);
2034 map->clear_unused(); 2034 map->clear_unused();
2035 map->set_inobject_properties_or_constructor_function_index(0); 2035 map->set_inobject_properties_or_constructor_function_index(0);
(...skipping 20 matching lines...) Expand all
2056 2056
2057 return map; 2057 return map;
2058 } 2058 }
2059 2059
2060 2060
2061 AllocationResult Heap::AllocateFillerObject(int size, bool double_align, 2061 AllocationResult Heap::AllocateFillerObject(int size, bool double_align,
2062 AllocationSpace space) { 2062 AllocationSpace space) {
2063 HeapObject* obj = nullptr; 2063 HeapObject* obj = nullptr;
2064 { 2064 {
2065 AllocationAlignment align = double_align ? kDoubleAligned : kWordAligned; 2065 AllocationAlignment align = double_align ? kDoubleAligned : kWordAligned;
2066 AllocationResult allocation = AllocateRaw(size, space, space, align); 2066 AllocationResult allocation = AllocateRaw(size, space, align);
2067 if (!allocation.To(&obj)) return allocation; 2067 if (!allocation.To(&obj)) return allocation;
2068 } 2068 }
2069 #ifdef DEBUG 2069 #ifdef DEBUG
2070 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); 2070 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
2071 DCHECK(chunk->owner()->identity() == space); 2071 DCHECK(chunk->owner()->identity() == space);
2072 #endif 2072 #endif
2073 CreateFillerObjectAt(obj->address(), size); 2073 CreateFillerObjectAt(obj->address(), size);
2074 return obj; 2074 return obj;
2075 } 2075 }
2076 2076
(...skipping 292 matching lines...) Expand 10 before | Expand all | Expand 10 after
2369 PretenureFlag pretenure) { 2369 PretenureFlag pretenure) {
2370 // Statically ensure that it is safe to allocate heap numbers in paged 2370 // Statically ensure that it is safe to allocate heap numbers in paged
2371 // spaces. 2371 // spaces.
2372 int size = HeapNumber::kSize; 2372 int size = HeapNumber::kSize;
2373 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxRegularHeapObjectSize); 2373 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxRegularHeapObjectSize);
2374 2374
2375 AllocationSpace space = SelectSpace(pretenure); 2375 AllocationSpace space = SelectSpace(pretenure);
2376 2376
2377 HeapObject* result = nullptr; 2377 HeapObject* result = nullptr;
2378 { 2378 {
2379 AllocationResult allocation = 2379 AllocationResult allocation = AllocateRaw(size, space, kDoubleUnaligned);
2380 AllocateRaw(size, space, OLD_SPACE, kDoubleUnaligned);
2381 if (!allocation.To(&result)) return allocation; 2380 if (!allocation.To(&result)) return allocation;
2382 } 2381 }
2383 2382
2384 Map* map = mode == MUTABLE ? mutable_heap_number_map() : heap_number_map(); 2383 Map* map = mode == MUTABLE ? mutable_heap_number_map() : heap_number_map();
2385 HeapObject::cast(result)->set_map_no_write_barrier(map); 2384 HeapObject::cast(result)->set_map_no_write_barrier(map);
2386 HeapNumber::cast(result)->set_value(value); 2385 HeapNumber::cast(result)->set_value(value);
2387 return result; 2386 return result;
2388 } 2387 }
2389 2388
2390 #define SIMD_ALLOCATE_DEFINITION(TYPE, Type, type, lane_count, lane_type) \ 2389 #define SIMD_ALLOCATE_DEFINITION(TYPE, Type, type, lane_count, lane_type) \
2391 AllocationResult Heap::Allocate##Type(lane_type lanes[lane_count], \ 2390 AllocationResult Heap::Allocate##Type(lane_type lanes[lane_count], \
2392 PretenureFlag pretenure) { \ 2391 PretenureFlag pretenure) { \
2393 int size = Type::kSize; \ 2392 int size = Type::kSize; \
2394 STATIC_ASSERT(Type::kSize <= Page::kMaxRegularHeapObjectSize); \ 2393 STATIC_ASSERT(Type::kSize <= Page::kMaxRegularHeapObjectSize); \
2395 \ 2394 \
2396 AllocationSpace space = SelectSpace(pretenure); \ 2395 AllocationSpace space = SelectSpace(pretenure); \
2397 \ 2396 \
2398 HeapObject* result = nullptr; \ 2397 HeapObject* result = nullptr; \
2399 { \ 2398 { \
2400 AllocationResult allocation = \ 2399 AllocationResult allocation = \
2401 AllocateRaw(size, space, OLD_SPACE, kSimd128Unaligned); \ 2400 AllocateRaw(size, space, kSimd128Unaligned); \
2402 if (!allocation.To(&result)) return allocation; \ 2401 if (!allocation.To(&result)) return allocation; \
2403 } \ 2402 } \
2404 \ 2403 \
2405 result->set_map_no_write_barrier(type##_map()); \ 2404 result->set_map_no_write_barrier(type##_map()); \
2406 Type* instance = Type::cast(result); \ 2405 Type* instance = Type::cast(result); \
2407 for (int i = 0; i < lane_count; i++) { \ 2406 for (int i = 0; i < lane_count; i++) { \
2408 instance->set_lane(i, lanes[i]); \ 2407 instance->set_lane(i, lanes[i]); \
2409 } \ 2408 } \
2410 return result; \ 2409 return result; \
2411 } 2410 }
2412 SIMD128_TYPES(SIMD_ALLOCATE_DEFINITION) 2411 SIMD128_TYPES(SIMD_ALLOCATE_DEFINITION)
2413 #undef SIMD_ALLOCATE_DEFINITION 2412 #undef SIMD_ALLOCATE_DEFINITION
2414 2413
2415 2414
2416 AllocationResult Heap::AllocateCell(Object* value) { 2415 AllocationResult Heap::AllocateCell(Object* value) {
2417 int size = Cell::kSize; 2416 int size = Cell::kSize;
2418 STATIC_ASSERT(Cell::kSize <= Page::kMaxRegularHeapObjectSize); 2417 STATIC_ASSERT(Cell::kSize <= Page::kMaxRegularHeapObjectSize);
2419 2418
2420 HeapObject* result = nullptr; 2419 HeapObject* result = nullptr;
2421 { 2420 {
2422 AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); 2421 AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
2423 if (!allocation.To(&result)) return allocation; 2422 if (!allocation.To(&result)) return allocation;
2424 } 2423 }
2425 result->set_map_no_write_barrier(cell_map()); 2424 result->set_map_no_write_barrier(cell_map());
2426 Cell::cast(result)->set_value(value); 2425 Cell::cast(result)->set_value(value);
2427 return result; 2426 return result;
2428 } 2427 }
2429 2428
2430 2429
2431 AllocationResult Heap::AllocatePropertyCell() { 2430 AllocationResult Heap::AllocatePropertyCell() {
2432 int size = PropertyCell::kSize; 2431 int size = PropertyCell::kSize;
2433 STATIC_ASSERT(PropertyCell::kSize <= Page::kMaxRegularHeapObjectSize); 2432 STATIC_ASSERT(PropertyCell::kSize <= Page::kMaxRegularHeapObjectSize);
2434 2433
2435 HeapObject* result = nullptr; 2434 HeapObject* result = nullptr;
2436 AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); 2435 AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
2437 if (!allocation.To(&result)) return allocation; 2436 if (!allocation.To(&result)) return allocation;
2438 2437
2439 result->set_map_no_write_barrier(global_property_cell_map()); 2438 result->set_map_no_write_barrier(global_property_cell_map());
2440 PropertyCell* cell = PropertyCell::cast(result); 2439 PropertyCell* cell = PropertyCell::cast(result);
2441 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), 2440 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()),
2442 SKIP_WRITE_BARRIER); 2441 SKIP_WRITE_BARRIER);
2443 cell->set_property_details(PropertyDetails(Smi::FromInt(0))); 2442 cell->set_property_details(PropertyDetails(Smi::FromInt(0)));
2444 cell->set_value(the_hole_value()); 2443 cell->set_value(the_hole_value());
2445 return result; 2444 return result;
2446 } 2445 }
2447 2446
2448 2447
2449 AllocationResult Heap::AllocateWeakCell(HeapObject* value) { 2448 AllocationResult Heap::AllocateWeakCell(HeapObject* value) {
2450 int size = WeakCell::kSize; 2449 int size = WeakCell::kSize;
2451 STATIC_ASSERT(WeakCell::kSize <= Page::kMaxRegularHeapObjectSize); 2450 STATIC_ASSERT(WeakCell::kSize <= Page::kMaxRegularHeapObjectSize);
2452 HeapObject* result = nullptr; 2451 HeapObject* result = nullptr;
2453 { 2452 {
2454 AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); 2453 AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
2455 if (!allocation.To(&result)) return allocation; 2454 if (!allocation.To(&result)) return allocation;
2456 } 2455 }
2457 result->set_map_no_write_barrier(weak_cell_map()); 2456 result->set_map_no_write_barrier(weak_cell_map());
2458 WeakCell::cast(result)->initialize(value); 2457 WeakCell::cast(result)->initialize(value);
2459 WeakCell::cast(result)->clear_next(this); 2458 WeakCell::cast(result)->clear_next(this);
2460 return result; 2459 return result;
2461 } 2460 }
2462 2461
2463 2462
2464 void Heap::CreateApiObjects() { 2463 void Heap::CreateApiObjects() {
(...skipping 464 matching lines...) Expand 10 before | Expand all | Expand 10 after
2929 2928
2930 2929
2931 AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) { 2930 AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
2932 if (length < 0 || length > ByteArray::kMaxLength) { 2931 if (length < 0 || length > ByteArray::kMaxLength) {
2933 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); 2932 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
2934 } 2933 }
2935 int size = ByteArray::SizeFor(length); 2934 int size = ByteArray::SizeFor(length);
2936 AllocationSpace space = SelectSpace(pretenure); 2935 AllocationSpace space = SelectSpace(pretenure);
2937 HeapObject* result = nullptr; 2936 HeapObject* result = nullptr;
2938 { 2937 {
2939 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); 2938 AllocationResult allocation = AllocateRaw(size, space);
2940 if (!allocation.To(&result)) return allocation; 2939 if (!allocation.To(&result)) return allocation;
2941 } 2940 }
2942 2941
2943 result->set_map_no_write_barrier(byte_array_map()); 2942 result->set_map_no_write_barrier(byte_array_map());
2944 ByteArray::cast(result)->set_length(length); 2943 ByteArray::cast(result)->set_length(length);
2945 return result; 2944 return result;
2946 } 2945 }
2947 2946
2948 2947
2949 AllocationResult Heap::AllocateBytecodeArray(int length, 2948 AllocationResult Heap::AllocateBytecodeArray(int length,
2950 const byte* const raw_bytecodes, 2949 const byte* const raw_bytecodes,
2951 int frame_size, 2950 int frame_size,
2952 int parameter_count, 2951 int parameter_count,
2953 FixedArray* constant_pool) { 2952 FixedArray* constant_pool) {
2954 if (length < 0 || length > BytecodeArray::kMaxLength) { 2953 if (length < 0 || length > BytecodeArray::kMaxLength) {
2955 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); 2954 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
2956 } 2955 }
2957 // Bytecode array is pretenured, so constant pool array should be to. 2956 // Bytecode array is pretenured, so constant pool array should be to.
2958 DCHECK(!InNewSpace(constant_pool)); 2957 DCHECK(!InNewSpace(constant_pool));
2959 2958
2960 int size = BytecodeArray::SizeFor(length); 2959 int size = BytecodeArray::SizeFor(length);
2961 HeapObject* result = nullptr; 2960 HeapObject* result = nullptr;
2962 { 2961 {
2963 AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); 2962 AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
2964 if (!allocation.To(&result)) return allocation; 2963 if (!allocation.To(&result)) return allocation;
2965 } 2964 }
2966 2965
2967 result->set_map_no_write_barrier(bytecode_array_map()); 2966 result->set_map_no_write_barrier(bytecode_array_map());
2968 BytecodeArray* instance = BytecodeArray::cast(result); 2967 BytecodeArray* instance = BytecodeArray::cast(result);
2969 instance->set_length(length); 2968 instance->set_length(length);
2970 instance->set_frame_size(frame_size); 2969 instance->set_frame_size(frame_size);
2971 instance->set_parameter_count(parameter_count); 2970 instance->set_parameter_count(parameter_count);
2972 instance->set_constant_pool(constant_pool); 2971 instance->set_constant_pool(constant_pool);
2973 CopyBytes(instance->GetFirstBytecodeAddress(), raw_bytecodes, length); 2972 CopyBytes(instance->GetFirstBytecodeAddress(), raw_bytecodes, length);
(...skipping 166 matching lines...) Expand 10 before | Expand all | Expand 10 after
3140 } 3139 }
3141 3140
3142 3141
3143 AllocationResult Heap::AllocateFixedTypedArrayWithExternalPointer( 3142 AllocationResult Heap::AllocateFixedTypedArrayWithExternalPointer(
3144 int length, ExternalArrayType array_type, void* external_pointer, 3143 int length, ExternalArrayType array_type, void* external_pointer,
3145 PretenureFlag pretenure) { 3144 PretenureFlag pretenure) {
3146 int size = FixedTypedArrayBase::kHeaderSize; 3145 int size = FixedTypedArrayBase::kHeaderSize;
3147 AllocationSpace space = SelectSpace(pretenure); 3146 AllocationSpace space = SelectSpace(pretenure);
3148 HeapObject* result = nullptr; 3147 HeapObject* result = nullptr;
3149 { 3148 {
3150 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); 3149 AllocationResult allocation = AllocateRaw(size, space);
3151 if (!allocation.To(&result)) return allocation; 3150 if (!allocation.To(&result)) return allocation;
3152 } 3151 }
3153 3152
3154 result->set_map_no_write_barrier(MapForFixedTypedArray(array_type)); 3153 result->set_map_no_write_barrier(MapForFixedTypedArray(array_type));
3155 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(result); 3154 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(result);
3156 elements->set_base_pointer(Smi::FromInt(0), SKIP_WRITE_BARRIER); 3155 elements->set_base_pointer(Smi::FromInt(0), SKIP_WRITE_BARRIER);
3157 elements->set_external_pointer(external_pointer, SKIP_WRITE_BARRIER); 3156 elements->set_external_pointer(external_pointer, SKIP_WRITE_BARRIER);
3158 elements->set_length(length); 3157 elements->set_length(length);
3159 return elements; 3158 return elements;
3160 } 3159 }
(...skipping 24 matching lines...) Expand all
3185 PretenureFlag pretenure) { 3184 PretenureFlag pretenure) {
3186 int element_size; 3185 int element_size;
3187 ElementsKind elements_kind; 3186 ElementsKind elements_kind;
3188 ForFixedTypedArray(array_type, &element_size, &elements_kind); 3187 ForFixedTypedArray(array_type, &element_size, &elements_kind);
3189 int size = OBJECT_POINTER_ALIGN(length * element_size + 3188 int size = OBJECT_POINTER_ALIGN(length * element_size +
3190 FixedTypedArrayBase::kDataOffset); 3189 FixedTypedArrayBase::kDataOffset);
3191 AllocationSpace space = SelectSpace(pretenure); 3190 AllocationSpace space = SelectSpace(pretenure);
3192 3191
3193 HeapObject* object = nullptr; 3192 HeapObject* object = nullptr;
3194 AllocationResult allocation = AllocateRaw( 3193 AllocationResult allocation = AllocateRaw(
3195 size, space, OLD_SPACE, 3194 size, space,
3196 array_type == kExternalFloat64Array ? kDoubleAligned : kWordAligned); 3195 array_type == kExternalFloat64Array ? kDoubleAligned : kWordAligned);
3197 if (!allocation.To(&object)) return allocation; 3196 if (!allocation.To(&object)) return allocation;
3198 3197
3199 object->set_map_no_write_barrier(MapForFixedTypedArray(array_type)); 3198 object->set_map_no_write_barrier(MapForFixedTypedArray(array_type));
3200 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object); 3199 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object);
3201 elements->set_base_pointer(elements, SKIP_WRITE_BARRIER); 3200 elements->set_base_pointer(elements, SKIP_WRITE_BARRIER);
3202 elements->set_external_pointer( 3201 elements->set_external_pointer(
3203 ExternalReference::fixed_typed_array_base_data_offset().address(), 3202 ExternalReference::fixed_typed_array_base_data_offset().address(),
3204 SKIP_WRITE_BARRIER); 3203 SKIP_WRITE_BARRIER);
3205 elements->set_length(length); 3204 elements->set_length(length);
3206 if (initialize) memset(elements->DataPtr(), 0, elements->DataSize()); 3205 if (initialize) memset(elements->DataPtr(), 0, elements->DataSize());
3207 return elements; 3206 return elements;
3208 } 3207 }
3209 3208
3210 3209
3211 AllocationResult Heap::AllocateCode(int object_size, bool immovable) { 3210 AllocationResult Heap::AllocateCode(int object_size, bool immovable) {
3212 DCHECK(IsAligned(static_cast<intptr_t>(object_size), kCodeAlignment)); 3211 DCHECK(IsAligned(static_cast<intptr_t>(object_size), kCodeAlignment));
3213 AllocationResult allocation = 3212 AllocationResult allocation = AllocateRaw(object_size, CODE_SPACE);
3214 AllocateRaw(object_size, CODE_SPACE, CODE_SPACE);
3215 3213
3216 HeapObject* result = nullptr; 3214 HeapObject* result = nullptr;
3217 if (!allocation.To(&result)) return allocation; 3215 if (!allocation.To(&result)) return allocation;
3218 3216
3219 if (immovable) { 3217 if (immovable) {
3220 Address address = result->address(); 3218 Address address = result->address();
3221 // Code objects which should stay at a fixed address are allocated either 3219 // Code objects which should stay at a fixed address are allocated either
3222 // in the first page of code space (objects on the first page of each space 3220 // in the first page of code space (objects on the first page of each space
3223 // are never moved) or in large object space. 3221 // are never moved) or in large object space.
3224 if (!code_space_->FirstPage()->Contains(address) && 3222 if (!code_space_->FirstPage()->Contains(address) &&
(...skipping 18 matching lines...) Expand all
3243 return code; 3241 return code;
3244 } 3242 }
3245 3243
3246 3244
3247 AllocationResult Heap::CopyCode(Code* code) { 3245 AllocationResult Heap::CopyCode(Code* code) {
3248 AllocationResult allocation; 3246 AllocationResult allocation;
3249 3247
3250 HeapObject* result = nullptr; 3248 HeapObject* result = nullptr;
3251 // Allocate an object the same size as the code object. 3249 // Allocate an object the same size as the code object.
3252 int obj_size = code->Size(); 3250 int obj_size = code->Size();
3253 allocation = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE); 3251 allocation = AllocateRaw(obj_size, CODE_SPACE);
3254 if (!allocation.To(&result)) return allocation; 3252 if (!allocation.To(&result)) return allocation;
3255 3253
3256 // Copy code object. 3254 // Copy code object.
3257 Address old_addr = code->address(); 3255 Address old_addr = code->address();
3258 Address new_addr = result->address(); 3256 Address new_addr = result->address();
3259 CopyBlock(new_addr, old_addr, obj_size); 3257 CopyBlock(new_addr, old_addr, obj_size);
3260 Code* new_code = Code::cast(result); 3258 Code* new_code = Code::cast(result);
3261 3259
3262 // Relocate the copy. 3260 // Relocate the copy.
3263 DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment)); 3261 DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment));
(...skipping 18 matching lines...) Expand all
3282 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment); 3280 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment);
3283 3281
3284 int new_obj_size = Code::SizeFor(new_body_size); 3282 int new_obj_size = Code::SizeFor(new_body_size);
3285 3283
3286 Address old_addr = code->address(); 3284 Address old_addr = code->address();
3287 3285
3288 size_t relocation_offset = 3286 size_t relocation_offset =
3289 static_cast<size_t>(code->instruction_end() - old_addr); 3287 static_cast<size_t>(code->instruction_end() - old_addr);
3290 3288
3291 HeapObject* result = nullptr; 3289 HeapObject* result = nullptr;
3292 AllocationResult allocation = 3290 AllocationResult allocation = AllocateRaw(new_obj_size, CODE_SPACE);
3293 AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE);
3294 if (!allocation.To(&result)) return allocation; 3291 if (!allocation.To(&result)) return allocation;
3295 3292
3296 // Copy code object. 3293 // Copy code object.
3297 Address new_addr = result->address(); 3294 Address new_addr = result->address();
3298 3295
3299 // Copy header and instructions. 3296 // Copy header and instructions.
3300 CopyBytes(new_addr, old_addr, relocation_offset); 3297 CopyBytes(new_addr, old_addr, relocation_offset);
3301 3298
3302 Code* new_code = Code::cast(result); 3299 Code* new_code = Code::cast(result);
3303 new_code->set_relocation_info(reloc_info_array); 3300 new_code->set_relocation_info(reloc_info_array);
(...skipping 25 matching lines...) Expand all
3329 if (FLAG_allocation_site_pretenuring) { 3326 if (FLAG_allocation_site_pretenuring) {
3330 allocation_site->IncrementMementoCreateCount(); 3327 allocation_site->IncrementMementoCreateCount();
3331 } 3328 }
3332 } 3329 }
3333 3330
3334 3331
3335 AllocationResult Heap::Allocate(Map* map, AllocationSpace space, 3332 AllocationResult Heap::Allocate(Map* map, AllocationSpace space,
3336 AllocationSite* allocation_site) { 3333 AllocationSite* allocation_site) {
3337 DCHECK(gc_state_ == NOT_IN_GC); 3334 DCHECK(gc_state_ == NOT_IN_GC);
3338 DCHECK(map->instance_type() != MAP_TYPE); 3335 DCHECK(map->instance_type() != MAP_TYPE);
3339 // If allocation failures are disallowed, we may allocate in a different
3340 // space when new space is full and the object is not a large object.
3341 AllocationSpace retry_space = (space != NEW_SPACE) ? space : OLD_SPACE;
3342 int size = map->instance_size(); 3336 int size = map->instance_size();
3343 if (allocation_site != NULL) { 3337 if (allocation_site != NULL) {
3344 size += AllocationMemento::kSize; 3338 size += AllocationMemento::kSize;
3345 } 3339 }
3346 HeapObject* result = nullptr; 3340 HeapObject* result = nullptr;
3347 AllocationResult allocation = AllocateRaw(size, space, retry_space); 3341 AllocationResult allocation = AllocateRaw(size, space);
3348 if (!allocation.To(&result)) return allocation; 3342 if (!allocation.To(&result)) return allocation;
3349 // No need for write barrier since object is white and map is in old space. 3343 // No need for write barrier since object is white and map is in old space.
3350 result->set_map_no_write_barrier(map); 3344 result->set_map_no_write_barrier(map);
3351 if (allocation_site != NULL) { 3345 if (allocation_site != NULL) {
3352 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( 3346 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
3353 reinterpret_cast<Address>(result) + map->instance_size()); 3347 reinterpret_cast<Address>(result) + map->instance_size());
3354 InitializeAllocationMemento(alloc_memento, allocation_site); 3348 InitializeAllocationMemento(alloc_memento, allocation_site);
3355 } 3349 }
3356 return result; 3350 return result;
3357 } 3351 }
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
3439 // We can only clone normal objects or arrays. Copying anything else 3433 // We can only clone normal objects or arrays. Copying anything else
3440 // will break invariants. 3434 // will break invariants.
3441 CHECK(map->instance_type() == JS_OBJECT_TYPE || 3435 CHECK(map->instance_type() == JS_OBJECT_TYPE ||
3442 map->instance_type() == JS_ARRAY_TYPE); 3436 map->instance_type() == JS_ARRAY_TYPE);
3443 3437
3444 int object_size = map->instance_size(); 3438 int object_size = map->instance_size();
3445 HeapObject* clone = nullptr; 3439 HeapObject* clone = nullptr;
3446 3440
3447 DCHECK(site == NULL || AllocationSite::CanTrack(map->instance_type())); 3441 DCHECK(site == NULL || AllocationSite::CanTrack(map->instance_type()));
3448 3442
3449 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; 3443 int adjusted_object_size =
3444 site != NULL ? object_size + AllocationMemento::kSize : object_size;
3445 AllocationResult allocation = AllocateRaw(adjusted_object_size, NEW_SPACE);
3446 if (!allocation.To(&clone)) return allocation;
3450 3447
3451 // If we're forced to always allocate, we use the general allocation 3448 SLOW_DCHECK(InNewSpace(clone));
3452 // functions which may leave us with an object in old space. 3449 // Since we know the clone is allocated in new space, we can copy
3453 if (always_allocate()) { 3450 // the contents without worrying about updating the write barrier.
3454 { 3451 CopyBlock(clone->address(), source->address(), object_size);
3455 AllocationResult allocation =
3456 AllocateRaw(object_size, NEW_SPACE, OLD_SPACE);
3457 if (!allocation.To(&clone)) return allocation;
3458 }
3459 Address clone_address = clone->address();
3460 CopyBlock(clone_address, source->address(), object_size);
3461 3452
3462 // Update write barrier for all tagged fields that lie beyond the header. 3453 if (site != NULL) {
3463 const int start_offset = JSObject::kHeaderSize; 3454 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
3464 const int end_offset = object_size; 3455 reinterpret_cast<Address>(clone) + object_size);
3465 3456 InitializeAllocationMemento(alloc_memento, site);
3466 #if V8_DOUBLE_FIELDS_UNBOXING
3467 LayoutDescriptorHelper helper(map);
3468 bool has_only_tagged_fields = helper.all_fields_tagged();
3469
3470 if (!has_only_tagged_fields) {
3471 for (int offset = start_offset; offset < end_offset;) {
3472 int end_of_region_offset;
3473 if (helper.IsTagged(offset, end_offset, &end_of_region_offset)) {
3474 RecordWrites(clone_address, offset,
3475 (end_of_region_offset - offset) / kPointerSize);
3476 }
3477 offset = end_of_region_offset;
3478 }
3479 } else {
3480 #endif
3481 // Object has only tagged fields.
3482 RecordWrites(clone_address, start_offset,
3483 (end_offset - start_offset) / kPointerSize);
3484 #if V8_DOUBLE_FIELDS_UNBOXING
3485 }
3486 #endif
3487
3488 } else {
3489 wb_mode = SKIP_WRITE_BARRIER;
3490
3491 {
3492 int adjusted_object_size =
3493 site != NULL ? object_size + AllocationMemento::kSize : object_size;
3494 AllocationResult allocation =
3495 AllocateRaw(adjusted_object_size, NEW_SPACE, NEW_SPACE);
3496 if (!allocation.To(&clone)) return allocation;
3497 }
3498 SLOW_DCHECK(InNewSpace(clone));
3499 // Since we know the clone is allocated in new space, we can copy
3500 // the contents without worrying about updating the write barrier.
3501 CopyBlock(clone->address(), source->address(), object_size);
3502
3503 if (site != NULL) {
3504 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
3505 reinterpret_cast<Address>(clone) + object_size);
3506 InitializeAllocationMemento(alloc_memento, site);
3507 }
3508 } 3457 }
3509 3458
3510 SLOW_DCHECK(JSObject::cast(clone)->GetElementsKind() == 3459 SLOW_DCHECK(JSObject::cast(clone)->GetElementsKind() ==
3511 source->GetElementsKind()); 3460 source->GetElementsKind());
3512 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); 3461 FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
3513 FixedArray* properties = FixedArray::cast(source->properties()); 3462 FixedArray* properties = FixedArray::cast(source->properties());
3514 // Update elements if necessary. 3463 // Update elements if necessary.
3515 if (elements->length() > 0) { 3464 if (elements->length() > 0) {
3516 FixedArrayBase* elem = nullptr; 3465 FixedArrayBase* elem = nullptr;
3517 { 3466 {
3518 AllocationResult allocation; 3467 AllocationResult allocation;
3519 if (elements->map() == fixed_cow_array_map()) { 3468 if (elements->map() == fixed_cow_array_map()) {
3520 allocation = FixedArray::cast(elements); 3469 allocation = FixedArray::cast(elements);
3521 } else if (source->HasFastDoubleElements()) { 3470 } else if (source->HasFastDoubleElements()) {
3522 allocation = CopyFixedDoubleArray(FixedDoubleArray::cast(elements)); 3471 allocation = CopyFixedDoubleArray(FixedDoubleArray::cast(elements));
3523 } else { 3472 } else {
3524 allocation = CopyFixedArray(FixedArray::cast(elements)); 3473 allocation = CopyFixedArray(FixedArray::cast(elements));
3525 } 3474 }
3526 if (!allocation.To(&elem)) return allocation; 3475 if (!allocation.To(&elem)) return allocation;
3527 } 3476 }
3528 JSObject::cast(clone)->set_elements(elem, wb_mode); 3477 JSObject::cast(clone)->set_elements(elem, SKIP_WRITE_BARRIER);
3529 } 3478 }
3530 // Update properties if necessary. 3479 // Update properties if necessary.
3531 if (properties->length() > 0) { 3480 if (properties->length() > 0) {
3532 FixedArray* prop = nullptr; 3481 FixedArray* prop = nullptr;
3533 { 3482 {
3534 AllocationResult allocation = CopyFixedArray(properties); 3483 AllocationResult allocation = CopyFixedArray(properties);
3535 if (!allocation.To(&prop)) return allocation; 3484 if (!allocation.To(&prop)) return allocation;
3536 } 3485 }
3537 JSObject::cast(clone)->set_properties(prop, wb_mode); 3486 JSObject::cast(clone)->set_properties(prop, SKIP_WRITE_BARRIER);
3538 } 3487 }
3539 // Return the new clone. 3488 // Return the new clone.
3540 return clone; 3489 return clone;
3541 } 3490 }
3542 3491
3543 3492
3544 static inline void WriteOneByteData(Vector<const char> vector, uint8_t* chars, 3493 static inline void WriteOneByteData(Vector<const char> vector, uint8_t* chars,
3545 int len) { 3494 int len) {
3546 // Only works for one byte strings. 3495 // Only works for one byte strings.
3547 DCHECK(vector.length() == len); 3496 DCHECK(vector.length() == len);
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
3601 map = one_byte_internalized_string_map(); 3550 map = one_byte_internalized_string_map();
3602 size = SeqOneByteString::SizeFor(chars); 3551 size = SeqOneByteString::SizeFor(chars);
3603 } else { 3552 } else {
3604 map = internalized_string_map(); 3553 map = internalized_string_map();
3605 size = SeqTwoByteString::SizeFor(chars); 3554 size = SeqTwoByteString::SizeFor(chars);
3606 } 3555 }
3607 3556
3608 // Allocate string. 3557 // Allocate string.
3609 HeapObject* result = nullptr; 3558 HeapObject* result = nullptr;
3610 { 3559 {
3611 AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); 3560 AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
3612 if (!allocation.To(&result)) return allocation; 3561 if (!allocation.To(&result)) return allocation;
3613 } 3562 }
3614 3563
3615 result->set_map_no_write_barrier(map); 3564 result->set_map_no_write_barrier(map);
3616 // Set length and hash fields of the allocated string. 3565 // Set length and hash fields of the allocated string.
3617 String* answer = String::cast(result); 3566 String* answer = String::cast(result);
3618 answer->set_length(chars); 3567 answer->set_length(chars);
3619 answer->set_hash_field(hash_field); 3568 answer->set_hash_field(hash_field);
3620 3569
3621 DCHECK_EQ(size, answer->Size()); 3570 DCHECK_EQ(size, answer->Size());
(...skipping 21 matching lines...) Expand all
3643 AllocationResult Heap::AllocateRawOneByteString(int length, 3592 AllocationResult Heap::AllocateRawOneByteString(int length,
3644 PretenureFlag pretenure) { 3593 PretenureFlag pretenure) {
3645 DCHECK_LE(0, length); 3594 DCHECK_LE(0, length);
3646 DCHECK_GE(String::kMaxLength, length); 3595 DCHECK_GE(String::kMaxLength, length);
3647 int size = SeqOneByteString::SizeFor(length); 3596 int size = SeqOneByteString::SizeFor(length);
3648 DCHECK(size <= SeqOneByteString::kMaxSize); 3597 DCHECK(size <= SeqOneByteString::kMaxSize);
3649 AllocationSpace space = SelectSpace(pretenure); 3598 AllocationSpace space = SelectSpace(pretenure);
3650 3599
3651 HeapObject* result = nullptr; 3600 HeapObject* result = nullptr;
3652 { 3601 {
3653 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); 3602 AllocationResult allocation = AllocateRaw(size, space);
3654 if (!allocation.To(&result)) return allocation; 3603 if (!allocation.To(&result)) return allocation;
3655 } 3604 }
3656 3605
3657 // Partially initialize the object. 3606 // Partially initialize the object.
3658 result->set_map_no_write_barrier(one_byte_string_map()); 3607 result->set_map_no_write_barrier(one_byte_string_map());
3659 String::cast(result)->set_length(length); 3608 String::cast(result)->set_length(length);
3660 String::cast(result)->set_hash_field(String::kEmptyHashField); 3609 String::cast(result)->set_hash_field(String::kEmptyHashField);
3661 DCHECK_EQ(size, HeapObject::cast(result)->Size()); 3610 DCHECK_EQ(size, HeapObject::cast(result)->Size());
3662 3611
3663 return result; 3612 return result;
3664 } 3613 }
3665 3614
3666 3615
3667 AllocationResult Heap::AllocateRawTwoByteString(int length, 3616 AllocationResult Heap::AllocateRawTwoByteString(int length,
3668 PretenureFlag pretenure) { 3617 PretenureFlag pretenure) {
3669 DCHECK_LE(0, length); 3618 DCHECK_LE(0, length);
3670 DCHECK_GE(String::kMaxLength, length); 3619 DCHECK_GE(String::kMaxLength, length);
3671 int size = SeqTwoByteString::SizeFor(length); 3620 int size = SeqTwoByteString::SizeFor(length);
3672 DCHECK(size <= SeqTwoByteString::kMaxSize); 3621 DCHECK(size <= SeqTwoByteString::kMaxSize);
3673 AllocationSpace space = SelectSpace(pretenure); 3622 AllocationSpace space = SelectSpace(pretenure);
3674 3623
3675 HeapObject* result = nullptr; 3624 HeapObject* result = nullptr;
3676 { 3625 {
3677 AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE); 3626 AllocationResult allocation = AllocateRaw(size, space);
3678 if (!allocation.To(&result)) return allocation; 3627 if (!allocation.To(&result)) return allocation;
3679 } 3628 }
3680 3629
3681 // Partially initialize the object. 3630 // Partially initialize the object.
3682 result->set_map_no_write_barrier(string_map()); 3631 result->set_map_no_write_barrier(string_map());
3683 String::cast(result)->set_length(length); 3632 String::cast(result)->set_length(length);
3684 String::cast(result)->set_hash_field(String::kEmptyHashField); 3633 String::cast(result)->set_hash_field(String::kEmptyHashField);
3685 DCHECK_EQ(size, HeapObject::cast(result)->Size()); 3634 DCHECK_EQ(size, HeapObject::cast(result)->Size());
3686 return result; 3635 return result;
3687 } 3636 }
3688 3637
3689 3638
3690 AllocationResult Heap::AllocateEmptyFixedArray() { 3639 AllocationResult Heap::AllocateEmptyFixedArray() {
3691 int size = FixedArray::SizeFor(0); 3640 int size = FixedArray::SizeFor(0);
3692 HeapObject* result = nullptr; 3641 HeapObject* result = nullptr;
3693 { 3642 {
3694 AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE); 3643 AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
3695 if (!allocation.To(&result)) return allocation; 3644 if (!allocation.To(&result)) return allocation;
3696 } 3645 }
3697 // Initialize the object. 3646 // Initialize the object.
3698 result->set_map_no_write_barrier(fixed_array_map()); 3647 result->set_map_no_write_barrier(fixed_array_map());
3699 FixedArray::cast(result)->set_length(0); 3648 FixedArray::cast(result)->set_length(0);
3700 return result; 3649 return result;
3701 } 3650 }
3702 3651
3703 3652
3704 AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) { 3653 AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) {
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
3800 3749
3801 3750
3802 AllocationResult Heap::AllocateRawFixedArray(int length, 3751 AllocationResult Heap::AllocateRawFixedArray(int length,
3803 PretenureFlag pretenure) { 3752 PretenureFlag pretenure) {
3804 if (length < 0 || length > FixedArray::kMaxLength) { 3753 if (length < 0 || length > FixedArray::kMaxLength) {
3805 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); 3754 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
3806 } 3755 }
3807 int size = FixedArray::SizeFor(length); 3756 int size = FixedArray::SizeFor(length);
3808 AllocationSpace space = SelectSpace(pretenure); 3757 AllocationSpace space = SelectSpace(pretenure);
3809 3758
3810 return AllocateRaw(size, space, OLD_SPACE); 3759 return AllocateRaw(size, space);
3811 } 3760 }
3812 3761
3813 3762
3814 AllocationResult Heap::AllocateFixedArrayWithFiller(int length, 3763 AllocationResult Heap::AllocateFixedArrayWithFiller(int length,
3815 PretenureFlag pretenure, 3764 PretenureFlag pretenure,
3816 Object* filler) { 3765 Object* filler) {
3817 DCHECK(length >= 0); 3766 DCHECK(length >= 0);
3818 DCHECK(empty_fixed_array()->IsFixedArray()); 3767 DCHECK(empty_fixed_array()->IsFixedArray());
3819 if (length == 0) return empty_fixed_array(); 3768 if (length == 0) return empty_fixed_array();
3820 3769
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
3871 PretenureFlag pretenure) { 3820 PretenureFlag pretenure) {
3872 if (length < 0 || length > FixedDoubleArray::kMaxLength) { 3821 if (length < 0 || length > FixedDoubleArray::kMaxLength) {
3873 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", 3822 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length",
3874 kDoubleAligned); 3823 kDoubleAligned);
3875 } 3824 }
3876 int size = FixedDoubleArray::SizeFor(length); 3825 int size = FixedDoubleArray::SizeFor(length);
3877 AllocationSpace space = SelectSpace(pretenure); 3826 AllocationSpace space = SelectSpace(pretenure);
3878 3827
3879 HeapObject* object = nullptr; 3828 HeapObject* object = nullptr;
3880 { 3829 {
3881 AllocationResult allocation = 3830 AllocationResult allocation = AllocateRaw(size, space, kDoubleAligned);
3882 AllocateRaw(size, space, OLD_SPACE, kDoubleAligned);
3883 if (!allocation.To(&object)) return allocation; 3831 if (!allocation.To(&object)) return allocation;
3884 } 3832 }
3885 3833
3886 return object; 3834 return object;
3887 } 3835 }
3888 3836
3889 3837
3890 AllocationResult Heap::AllocateSymbol() { 3838 AllocationResult Heap::AllocateSymbol() {
3891 // Statically ensure that it is safe to allocate symbols in paged spaces. 3839 // Statically ensure that it is safe to allocate symbols in paged spaces.
3892 STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize); 3840 STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize);
3893 3841
3894 HeapObject* result = nullptr; 3842 HeapObject* result = nullptr;
3895 AllocationResult allocation = 3843 AllocationResult allocation = AllocateRaw(Symbol::kSize, OLD_SPACE);
3896 AllocateRaw(Symbol::kSize, OLD_SPACE, OLD_SPACE);
3897 if (!allocation.To(&result)) return allocation; 3844 if (!allocation.To(&result)) return allocation;
3898 3845
3899 result->set_map_no_write_barrier(symbol_map()); 3846 result->set_map_no_write_barrier(symbol_map());
3900 3847
3901 // Generate a random hash value. 3848 // Generate a random hash value.
3902 int hash; 3849 int hash;
3903 int attempts = 0; 3850 int attempts = 0;
3904 do { 3851 do {
3905 hash = isolate()->random_number_generator()->NextInt() & Name::kHashBitMask; 3852 hash = isolate()->random_number_generator()->NextInt() & Name::kHashBitMask;
3906 attempts++; 3853 attempts++;
(...skipping 2225 matching lines...) Expand 10 before | Expand all | Expand 10 after
6132 } 6079 }
6133 6080
6134 6081
6135 // static 6082 // static
6136 int Heap::GetStaticVisitorIdForMap(Map* map) { 6083 int Heap::GetStaticVisitorIdForMap(Map* map) {
6137 return StaticVisitorBase::GetVisitorId(map); 6084 return StaticVisitorBase::GetVisitorId(map);
6138 } 6085 }
6139 6086
6140 } // namespace internal 6087 } // namespace internal
6141 } // namespace v8 6088 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698