Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(68)

Side by Side Diff: src/heap.cc

Issue 259173003: Kiss goodbye to MaybeObject. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: rebase + addressed comments Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "v8.h" 5 #include "v8.h"
6 6
7 #include "accessors.h" 7 #include "accessors.h"
8 #include "api.h" 8 #include "api.h"
9 #include "bootstrapper.h" 9 #include "bootstrapper.h"
10 #include "codegen.h" 10 #include "codegen.h"
(...skipping 896 matching lines...) Expand 10 before | Expand all | Expand 10 after
907 907
908 void Heap::ReserveSpace(int *sizes, Address *locations_out) { 908 void Heap::ReserveSpace(int *sizes, Address *locations_out) {
909 bool gc_performed = true; 909 bool gc_performed = true;
910 int counter = 0; 910 int counter = 0;
911 static const int kThreshold = 20; 911 static const int kThreshold = 20;
912 while (gc_performed && counter++ < kThreshold) { 912 while (gc_performed && counter++ < kThreshold) {
913 gc_performed = false; 913 gc_performed = false;
914 ASSERT(NEW_SPACE == FIRST_PAGED_SPACE - 1); 914 ASSERT(NEW_SPACE == FIRST_PAGED_SPACE - 1);
915 for (int space = NEW_SPACE; space <= LAST_PAGED_SPACE; space++) { 915 for (int space = NEW_SPACE; space <= LAST_PAGED_SPACE; space++) {
916 if (sizes[space] != 0) { 916 if (sizes[space] != 0) {
917 MaybeObject* allocation; 917 AllocationResult allocation;
918 if (space == NEW_SPACE) { 918 if (space == NEW_SPACE) {
919 allocation = new_space()->AllocateRaw(sizes[space]); 919 allocation = new_space()->AllocateRaw(sizes[space]);
920 } else { 920 } else {
921 allocation = paged_space(space)->AllocateRaw(sizes[space]); 921 allocation = paged_space(space)->AllocateRaw(sizes[space]);
922 } 922 }
923 FreeListNode* node; 923 FreeListNode* node;
924 if (!allocation->To<FreeListNode>(&node)) { 924 if (!allocation.To(&node)) {
925 if (space == NEW_SPACE) { 925 if (space == NEW_SPACE) {
926 Heap::CollectGarbage(NEW_SPACE, 926 Heap::CollectGarbage(NEW_SPACE,
927 "failed to reserve space in the new space"); 927 "failed to reserve space in the new space");
928 } else { 928 } else {
929 AbortIncrementalMarkingAndCollectGarbage( 929 AbortIncrementalMarkingAndCollectGarbage(
930 this, 930 this,
931 static_cast<AllocationSpace>(space), 931 static_cast<AllocationSpace>(space),
932 "failed to reserve space in paged space"); 932 "failed to reserve space in paged space");
933 } 933 }
934 gc_performed = true; 934 gc_performed = true;
(...skipping 1103 matching lines...) Expand 10 before | Expand all | Expand 10 after
2038 SLOW_ASSERT(object->Size() == object_size); 2038 SLOW_ASSERT(object->Size() == object_size);
2039 2039
2040 int allocation_size = object_size; 2040 int allocation_size = object_size;
2041 if (alignment != kObjectAlignment) { 2041 if (alignment != kObjectAlignment) {
2042 ASSERT(alignment == kDoubleAlignment); 2042 ASSERT(alignment == kDoubleAlignment);
2043 allocation_size += kPointerSize; 2043 allocation_size += kPointerSize;
2044 } 2044 }
2045 2045
2046 Heap* heap = map->GetHeap(); 2046 Heap* heap = map->GetHeap();
2047 if (heap->ShouldBePromoted(object->address(), object_size)) { 2047 if (heap->ShouldBePromoted(object->address(), object_size)) {
2048 MaybeObject* maybe_result; 2048 AllocationResult allocation;
2049 2049
2050 if (object_contents == DATA_OBJECT) { 2050 if (object_contents == DATA_OBJECT) {
2051 ASSERT(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE)); 2051 ASSERT(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE));
2052 maybe_result = heap->old_data_space()->AllocateRaw(allocation_size); 2052 allocation = heap->old_data_space()->AllocateRaw(allocation_size);
2053 } else { 2053 } else {
2054 ASSERT(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE)); 2054 ASSERT(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE));
2055 maybe_result = heap->old_pointer_space()->AllocateRaw(allocation_size); 2055 allocation = heap->old_pointer_space()->AllocateRaw(allocation_size);
2056 } 2056 }
2057 2057
2058 Object* result = NULL; // Initialization to please compiler. 2058 HeapObject* target = NULL; // Initialization to please compiler.
2059 if (maybe_result->ToObject(&result)) { 2059 if (allocation.To(&target)) {
2060 HeapObject* target = HeapObject::cast(result);
2061
2062 if (alignment != kObjectAlignment) { 2060 if (alignment != kObjectAlignment) {
2063 target = EnsureDoubleAligned(heap, target, allocation_size); 2061 target = EnsureDoubleAligned(heap, target, allocation_size);
2064 } 2062 }
2065 2063
2066 // Order is important: slot might be inside of the target if target 2064 // Order is important: slot might be inside of the target if target
2067 // was allocated over a dead object and slot comes from the store 2065 // was allocated over a dead object and slot comes from the store
2068 // buffer. 2066 // buffer.
2069 *slot = target; 2067 *slot = target;
2070 MigrateObject(heap, object, target, object_size); 2068 MigrateObject(heap, object, target, object_size);
2071 2069
2072 if (object_contents == POINTER_OBJECT) { 2070 if (object_contents == POINTER_OBJECT) {
2073 if (map->instance_type() == JS_FUNCTION_TYPE) { 2071 if (map->instance_type() == JS_FUNCTION_TYPE) {
2074 heap->promotion_queue()->insert( 2072 heap->promotion_queue()->insert(
2075 target, JSFunction::kNonWeakFieldsEndOffset); 2073 target, JSFunction::kNonWeakFieldsEndOffset);
2076 } else { 2074 } else {
2077 heap->promotion_queue()->insert(target, object_size); 2075 heap->promotion_queue()->insert(target, object_size);
2078 } 2076 }
2079 } 2077 }
2080 2078
2081 heap->tracer()->increment_promoted_objects_size(object_size); 2079 heap->tracer()->increment_promoted_objects_size(object_size);
2082 return; 2080 return;
2083 } 2081 }
2084 } 2082 }
2085 ASSERT(heap->AllowedToBeMigrated(object, NEW_SPACE)); 2083 ASSERT(heap->AllowedToBeMigrated(object, NEW_SPACE));
2086 MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size); 2084 AllocationResult allocation =
2085 heap->new_space()->AllocateRaw(allocation_size);
2087 heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); 2086 heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
2088 Object* result = allocation->ToObjectUnchecked(); 2087 HeapObject* target = HeapObject::cast(allocation.ToObjectChecked());
2089 HeapObject* target = HeapObject::cast(result);
2090 2088
2091 if (alignment != kObjectAlignment) { 2089 if (alignment != kObjectAlignment) {
2092 target = EnsureDoubleAligned(heap, target, allocation_size); 2090 target = EnsureDoubleAligned(heap, target, allocation_size);
2093 } 2091 }
2094 2092
2095 // Order is important: slot might be inside of the target if target 2093 // Order is important: slot might be inside of the target if target
2096 // was allocated over a dead object and slot comes from the store 2094 // was allocated over a dead object and slot comes from the store
2097 // buffer. 2095 // buffer.
2098 *slot = target; 2096 *slot = target;
2099 MigrateObject(heap, object, target, object_size); 2097 MigrateObject(heap, object, target, object_size);
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after
2316 2314
2317 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { 2315 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
2318 SLOW_ASSERT(object->GetIsolate()->heap()->InFromSpace(object)); 2316 SLOW_ASSERT(object->GetIsolate()->heap()->InFromSpace(object));
2319 MapWord first_word = object->map_word(); 2317 MapWord first_word = object->map_word();
2320 SLOW_ASSERT(!first_word.IsForwardingAddress()); 2318 SLOW_ASSERT(!first_word.IsForwardingAddress());
2321 Map* map = first_word.ToMap(); 2319 Map* map = first_word.ToMap();
2322 map->GetHeap()->DoScavengeObject(map, p, object); 2320 map->GetHeap()->DoScavengeObject(map, p, object);
2323 } 2321 }
2324 2322
2325 2323
2326 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type, 2324 AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
2327 int instance_size) { 2325 int instance_size) {
2328 Object* result; 2326 Object* result;
2329 MaybeObject* maybe_result = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); 2327 AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE);
2330 if (!maybe_result->ToObject(&result)) return maybe_result; 2328 if (!allocation.To(&result)) return allocation;
2331 2329
2332 // Map::cast cannot be used due to uninitialized map field. 2330 // Map::cast cannot be used due to uninitialized map field.
2333 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map()); 2331 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
2334 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); 2332 reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
2335 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); 2333 reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
2336 reinterpret_cast<Map*>(result)->set_visitor_id( 2334 reinterpret_cast<Map*>(result)->set_visitor_id(
2337 StaticVisitorBase::GetVisitorId(instance_type, instance_size)); 2335 StaticVisitorBase::GetVisitorId(instance_type, instance_size));
2338 reinterpret_cast<Map*>(result)->set_inobject_properties(0); 2336 reinterpret_cast<Map*>(result)->set_inobject_properties(0);
2339 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0); 2337 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
2340 reinterpret_cast<Map*>(result)->set_unused_property_fields(0); 2338 reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
2341 reinterpret_cast<Map*>(result)->set_bit_field(0); 2339 reinterpret_cast<Map*>(result)->set_bit_field(0);
2342 reinterpret_cast<Map*>(result)->set_bit_field2(0); 2340 reinterpret_cast<Map*>(result)->set_bit_field2(0);
2343 int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) | 2341 int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
2344 Map::OwnsDescriptors::encode(true); 2342 Map::OwnsDescriptors::encode(true);
2345 reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3); 2343 reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3);
2346 return result; 2344 return result;
2347 } 2345 }
2348 2346
2349 2347
2350 MaybeObject* Heap::AllocateMap(InstanceType instance_type, 2348 AllocationResult Heap::AllocateMap(InstanceType instance_type,
2351 int instance_size, 2349 int instance_size,
2352 ElementsKind elements_kind) { 2350 ElementsKind elements_kind) {
2353 Object* result; 2351 HeapObject* result;
2354 MaybeObject* maybe_result = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); 2352 AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE);
2355 if (!maybe_result->To(&result)) return maybe_result; 2353 if (!allocation.To(&result)) return allocation;
2356 2354
2357 Map* map = reinterpret_cast<Map*>(result); 2355 result->set_map_no_write_barrier(meta_map());
2358 map->set_map_no_write_barrier(meta_map()); 2356 Map* map = Map::cast(result);
2359 map->set_instance_type(instance_type); 2357 map->set_instance_type(instance_type);
2360 map->set_visitor_id( 2358 map->set_visitor_id(
2361 StaticVisitorBase::GetVisitorId(instance_type, instance_size)); 2359 StaticVisitorBase::GetVisitorId(instance_type, instance_size));
2362 map->set_prototype(null_value(), SKIP_WRITE_BARRIER); 2360 map->set_prototype(null_value(), SKIP_WRITE_BARRIER);
2363 map->set_constructor(null_value(), SKIP_WRITE_BARRIER); 2361 map->set_constructor(null_value(), SKIP_WRITE_BARRIER);
2364 map->set_instance_size(instance_size); 2362 map->set_instance_size(instance_size);
2365 map->set_inobject_properties(0); 2363 map->set_inobject_properties(0);
2366 map->set_pre_allocated_property_fields(0); 2364 map->set_pre_allocated_property_fields(0);
2367 map->set_code_cache(empty_fixed_array(), SKIP_WRITE_BARRIER); 2365 map->set_code_cache(empty_fixed_array(), SKIP_WRITE_BARRIER);
2368 map->set_dependent_code(DependentCode::cast(empty_fixed_array()), 2366 map->set_dependent_code(DependentCode::cast(empty_fixed_array()),
2369 SKIP_WRITE_BARRIER); 2367 SKIP_WRITE_BARRIER);
2370 map->init_back_pointer(undefined_value()); 2368 map->init_back_pointer(undefined_value());
2371 map->set_unused_property_fields(0); 2369 map->set_unused_property_fields(0);
2372 map->set_instance_descriptors(empty_descriptor_array()); 2370 map->set_instance_descriptors(empty_descriptor_array());
2373 map->set_bit_field(0); 2371 map->set_bit_field(0);
2374 map->set_bit_field2(1 << Map::kIsExtensible); 2372 map->set_bit_field2(1 << Map::kIsExtensible);
2375 int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) | 2373 int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
2376 Map::OwnsDescriptors::encode(true); 2374 Map::OwnsDescriptors::encode(true);
2377 map->set_bit_field3(bit_field3); 2375 map->set_bit_field3(bit_field3);
2378 map->set_elements_kind(elements_kind); 2376 map->set_elements_kind(elements_kind);
2379 2377
2380 return map; 2378 return map;
2381 } 2379 }
2382 2380
2383 2381
2384 MaybeObject* Heap::AllocateFillerObject(int size, 2382 AllocationResult Heap::AllocateFillerObject(int size,
2385 bool double_align, 2383 bool double_align,
2386 AllocationSpace space) { 2384 AllocationSpace space) {
2387 HeapObject* allocation; 2385 HeapObject* obj;
2388 { MaybeObject* maybe_allocation = AllocateRaw(size, space, space); 2386 { AllocationResult allocation = AllocateRaw(size, space, space);
2389 if (!maybe_allocation->To(&allocation)) return maybe_allocation; 2387 if (!allocation.To(&obj)) return allocation;
2390 } 2388 }
2391 #ifdef DEBUG 2389 #ifdef DEBUG
2392 MemoryChunk* chunk = MemoryChunk::FromAddress(allocation->address()); 2390 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
2393 ASSERT(chunk->owner()->identity() == space); 2391 ASSERT(chunk->owner()->identity() == space);
2394 #endif 2392 #endif
2395 CreateFillerObjectAt(allocation->address(), size); 2393 CreateFillerObjectAt(obj->address(), size);
2396 return allocation; 2394 return obj;
2397 } 2395 }
2398 2396
2399 2397
2400 const Heap::StringTypeTable Heap::string_type_table[] = { 2398 const Heap::StringTypeTable Heap::string_type_table[] = {
2401 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \ 2399 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \
2402 {type, size, k##camel_name##MapRootIndex}, 2400 {type, size, k##camel_name##MapRootIndex},
2403 STRING_TYPE_LIST(STRING_TYPE_ELEMENT) 2401 STRING_TYPE_LIST(STRING_TYPE_ELEMENT)
2404 #undef STRING_TYPE_ELEMENT 2402 #undef STRING_TYPE_ELEMENT
2405 }; 2403 };
2406 2404
2407 2405
2408 const Heap::ConstantStringTable Heap::constant_string_table[] = { 2406 const Heap::ConstantStringTable Heap::constant_string_table[] = {
2409 #define CONSTANT_STRING_ELEMENT(name, contents) \ 2407 #define CONSTANT_STRING_ELEMENT(name, contents) \
2410 {contents, k##name##RootIndex}, 2408 {contents, k##name##RootIndex},
2411 INTERNALIZED_STRING_LIST(CONSTANT_STRING_ELEMENT) 2409 INTERNALIZED_STRING_LIST(CONSTANT_STRING_ELEMENT)
2412 #undef CONSTANT_STRING_ELEMENT 2410 #undef CONSTANT_STRING_ELEMENT
2413 }; 2411 };
2414 2412
2415 2413
2416 const Heap::StructTable Heap::struct_table[] = { 2414 const Heap::StructTable Heap::struct_table[] = {
2417 #define STRUCT_TABLE_ELEMENT(NAME, Name, name) \ 2415 #define STRUCT_TABLE_ELEMENT(NAME, Name, name) \
2418 { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex }, 2416 { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex },
2419 STRUCT_LIST(STRUCT_TABLE_ELEMENT) 2417 STRUCT_LIST(STRUCT_TABLE_ELEMENT)
2420 #undef STRUCT_TABLE_ELEMENT 2418 #undef STRUCT_TABLE_ELEMENT
2421 }; 2419 };
2422 2420
2423 2421
2424 bool Heap::CreateInitialMaps() { 2422 bool Heap::CreateInitialMaps() {
2425 Object* obj; 2423 HeapObject* obj;
2426 { MaybeObject* maybe_obj = AllocatePartialMap(MAP_TYPE, Map::kSize); 2424 { AllocationResult allocation = AllocatePartialMap(MAP_TYPE, Map::kSize);
2427 if (!maybe_obj->ToObject(&obj)) return false; 2425 if (!allocation.To(&obj)) return false;
2428 } 2426 }
2429 // Map::cast cannot be used due to uninitialized map field. 2427 // Map::cast cannot be used due to uninitialized map field.
2430 Map* new_meta_map = reinterpret_cast<Map*>(obj); 2428 Map* new_meta_map = reinterpret_cast<Map*>(obj);
2431 set_meta_map(new_meta_map); 2429 set_meta_map(new_meta_map);
2432 new_meta_map->set_map(new_meta_map); 2430 new_meta_map->set_map(new_meta_map);
2433 2431
2434 { // Partial map allocation 2432 { // Partial map allocation
2435 #define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name) \ 2433 #define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name) \
2436 { Map* map; \ 2434 { Map* map; \
2437 if (!AllocatePartialMap((instance_type), (size))->To(&map)) return false;\ 2435 if (!AllocatePartialMap((instance_type), (size)).To(&map)) return false; \
2438 set_##field_name##_map(map); \ 2436 set_##field_name##_map(map); \
2439 } 2437 }
2440 2438
2441 ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel, fixed_array); 2439 ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel, fixed_array);
2442 ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, undefined); 2440 ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, undefined);
2443 ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, null); 2441 ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, null);
2444 ALLOCATE_PARTIAL_MAP(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel, 2442 ALLOCATE_PARTIAL_MAP(CONSTANT_POOL_ARRAY_TYPE, kVariableSizeSentinel,
2445 constant_pool_array); 2443 constant_pool_array);
2446 2444
2447 #undef ALLOCATE_PARTIAL_MAP 2445 #undef ALLOCATE_PARTIAL_MAP
2448 } 2446 }
2449 2447
2450 // Allocate the empty array. 2448 // Allocate the empty array.
2451 { MaybeObject* maybe_obj = AllocateEmptyFixedArray(); 2449 { AllocationResult allocation = AllocateEmptyFixedArray();
2452 if (!maybe_obj->ToObject(&obj)) return false; 2450 if (!allocation.To(&obj)) return false;
2453 } 2451 }
2454 set_empty_fixed_array(FixedArray::cast(obj)); 2452 set_empty_fixed_array(FixedArray::cast(obj));
2455 2453
2456 { MaybeObject* maybe_obj = Allocate(null_map(), OLD_POINTER_SPACE); 2454 { AllocationResult allocation = Allocate(null_map(), OLD_POINTER_SPACE);
2457 if (!maybe_obj->ToObject(&obj)) return false; 2455 if (!allocation.To(&obj)) return false;
2458 } 2456 }
2459 set_null_value(Oddball::cast(obj)); 2457 set_null_value(Oddball::cast(obj));
2460 Oddball::cast(obj)->set_kind(Oddball::kNull); 2458 Oddball::cast(obj)->set_kind(Oddball::kNull);
2461 2459
2462 { MaybeObject* maybe_obj = Allocate(undefined_map(), OLD_POINTER_SPACE); 2460 { AllocationResult allocation = Allocate(undefined_map(), OLD_POINTER_SPACE);
2463 if (!maybe_obj->ToObject(&obj)) return false; 2461 if (!allocation.To(&obj)) return false;
2464 } 2462 }
2465 set_undefined_value(Oddball::cast(obj)); 2463 set_undefined_value(Oddball::cast(obj));
2466 Oddball::cast(obj)->set_kind(Oddball::kUndefined); 2464 Oddball::cast(obj)->set_kind(Oddball::kUndefined);
2467 ASSERT(!InNewSpace(undefined_value())); 2465 ASSERT(!InNewSpace(undefined_value()));
2468 2466
2469 // Set preliminary exception sentinel value before actually initializing it. 2467 // Set preliminary exception sentinel value before actually initializing it.
2470 set_exception(null_value()); 2468 set_exception(null_value());
2471 2469
2472 // Allocate the empty descriptor array. 2470 // Allocate the empty descriptor array.
2473 { MaybeObject* maybe_obj = AllocateEmptyFixedArray(); 2471 { AllocationResult allocation = AllocateEmptyFixedArray();
2474 if (!maybe_obj->ToObject(&obj)) return false; 2472 if (!allocation.To(&obj)) return false;
2475 } 2473 }
2476 set_empty_descriptor_array(DescriptorArray::cast(obj)); 2474 set_empty_descriptor_array(DescriptorArray::cast(obj));
2477 2475
2478 // Allocate the constant pool array. 2476 // Allocate the constant pool array.
2479 { MaybeObject* maybe_obj = AllocateEmptyConstantPoolArray(); 2477 { AllocationResult allocation = AllocateEmptyConstantPoolArray();
2480 if (!maybe_obj->ToObject(&obj)) return false; 2478 if (!allocation.To(&obj)) return false;
2481 } 2479 }
2482 set_empty_constant_pool_array(ConstantPoolArray::cast(obj)); 2480 set_empty_constant_pool_array(ConstantPoolArray::cast(obj));
2483 2481
2484 // Fix the instance_descriptors for the existing maps. 2482 // Fix the instance_descriptors for the existing maps.
2485 meta_map()->set_code_cache(empty_fixed_array()); 2483 meta_map()->set_code_cache(empty_fixed_array());
2486 meta_map()->set_dependent_code(DependentCode::cast(empty_fixed_array())); 2484 meta_map()->set_dependent_code(DependentCode::cast(empty_fixed_array()));
2487 meta_map()->init_back_pointer(undefined_value()); 2485 meta_map()->init_back_pointer(undefined_value());
2488 meta_map()->set_instance_descriptors(empty_descriptor_array()); 2486 meta_map()->set_instance_descriptors(empty_descriptor_array());
2489 2487
2490 fixed_array_map()->set_code_cache(empty_fixed_array()); 2488 fixed_array_map()->set_code_cache(empty_fixed_array());
(...skipping 30 matching lines...) Expand all
2521 2519
2522 null_map()->set_prototype(null_value()); 2520 null_map()->set_prototype(null_value());
2523 null_map()->set_constructor(null_value()); 2521 null_map()->set_constructor(null_value());
2524 2522
2525 constant_pool_array_map()->set_prototype(null_value()); 2523 constant_pool_array_map()->set_prototype(null_value());
2526 constant_pool_array_map()->set_constructor(null_value()); 2524 constant_pool_array_map()->set_constructor(null_value());
2527 2525
2528 { // Map allocation 2526 { // Map allocation
2529 #define ALLOCATE_MAP(instance_type, size, field_name) \ 2527 #define ALLOCATE_MAP(instance_type, size, field_name) \
2530 { Map* map; \ 2528 { Map* map; \
2531 if (!AllocateMap((instance_type), size)->To(&map)) return false; \ 2529 if (!AllocateMap((instance_type), size).To(&map)) return false; \
2532 set_##field_name##_map(map); \ 2530 set_##field_name##_map(map); \
2533 } 2531 }
2534 2532
2535 #define ALLOCATE_VARSIZE_MAP(instance_type, field_name) \ 2533 #define ALLOCATE_VARSIZE_MAP(instance_type, field_name) \
2536 ALLOCATE_MAP(instance_type, kVariableSizeSentinel, field_name) 2534 ALLOCATE_MAP(instance_type, kVariableSizeSentinel, field_name)
2537 2535
2538 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, fixed_cow_array) 2536 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, fixed_cow_array)
2539 ASSERT(fixed_array_map() != fixed_cow_array_map()); 2537 ASSERT(fixed_array_map() != fixed_cow_array_map());
2540 2538
2541 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, scope_info) 2539 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, scope_info)
2542 ALLOCATE_MAP(HEAP_NUMBER_TYPE, HeapNumber::kSize, heap_number) 2540 ALLOCATE_MAP(HEAP_NUMBER_TYPE, HeapNumber::kSize, heap_number)
2543 ALLOCATE_MAP(SYMBOL_TYPE, Symbol::kSize, symbol) 2541 ALLOCATE_MAP(SYMBOL_TYPE, Symbol::kSize, symbol)
2544 ALLOCATE_MAP(FOREIGN_TYPE, Foreign::kSize, foreign) 2542 ALLOCATE_MAP(FOREIGN_TYPE, Foreign::kSize, foreign)
2545 2543
2546 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, the_hole); 2544 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, the_hole);
2547 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, boolean); 2545 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, boolean);
2548 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, uninitialized); 2546 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, uninitialized);
2549 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, arguments_marker); 2547 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, arguments_marker);
2550 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, no_interceptor_result_sentinel); 2548 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, no_interceptor_result_sentinel);
2551 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, exception); 2549 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, exception);
2552 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, termination_exception); 2550 ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, termination_exception);
2553 2551
2554 for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) { 2552 for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) {
2555 const StringTypeTable& entry = string_type_table[i]; 2553 const StringTypeTable& entry = string_type_table[i];
2556 { MaybeObject* maybe_obj = AllocateMap(entry.type, entry.size); 2554 { AllocationResult allocation = AllocateMap(entry.type, entry.size);
2557 if (!maybe_obj->ToObject(&obj)) return false; 2555 if (!allocation.To(&obj)) return false;
2558 } 2556 }
2559 // Mark cons string maps as unstable, because their objects can change 2557 // Mark cons string maps as unstable, because their objects can change
2560 // maps during GC. 2558 // maps during GC.
2561 Map* map = Map::cast(obj); 2559 Map* map = Map::cast(obj);
2562 if (StringShape(entry.type).IsCons()) map->mark_unstable(); 2560 if (StringShape(entry.type).IsCons()) map->mark_unstable();
2563 roots_[entry.index] = map; 2561 roots_[entry.index] = map;
2564 } 2562 }
2565 2563
2566 ALLOCATE_VARSIZE_MAP(STRING_TYPE, undetectable_string) 2564 ALLOCATE_VARSIZE_MAP(STRING_TYPE, undetectable_string)
2567 undetectable_string_map()->set_is_undetectable(); 2565 undetectable_string_map()->set_is_undetectable();
(...skipping 25 matching lines...) Expand all
2593 2591
2594 ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell) 2592 ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell)
2595 ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell) 2593 ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell)
2596 ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler) 2594 ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler)
2597 ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler) 2595 ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler)
2598 2596
2599 2597
2600 for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) { 2598 for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) {
2601 const StructTable& entry = struct_table[i]; 2599 const StructTable& entry = struct_table[i];
2602 Map* map; 2600 Map* map;
2603 if (!AllocateMap(entry.type, entry.size)->To(&map)) 2601 if (!AllocateMap(entry.type, entry.size).To(&map))
2604 return false; 2602 return false;
2605 roots_[entry.index] = map; 2603 roots_[entry.index] = map;
2606 } 2604 }
2607 2605
2608 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, hash_table) 2606 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, hash_table)
2609 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, ordered_hash_table) 2607 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, ordered_hash_table)
2610 2608
2611 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, function_context) 2609 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, function_context)
2612 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, catch_context) 2610 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, catch_context)
2613 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, with_context) 2611 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, with_context)
(...skipping 13 matching lines...) Expand all
2627 message_object) 2625 message_object)
2628 ALLOCATE_MAP(JS_OBJECT_TYPE, JSObject::kHeaderSize + kPointerSize, 2626 ALLOCATE_MAP(JS_OBJECT_TYPE, JSObject::kHeaderSize + kPointerSize,
2629 external) 2627 external)
2630 external_map()->set_is_extensible(false); 2628 external_map()->set_is_extensible(false);
2631 #undef ALLOCATE_VARSIZE_MAP 2629 #undef ALLOCATE_VARSIZE_MAP
2632 #undef ALLOCATE_MAP 2630 #undef ALLOCATE_MAP
2633 } 2631 }
2634 2632
2635 { // Empty arrays 2633 { // Empty arrays
2636 { ByteArray* byte_array; 2634 { ByteArray* byte_array;
2637 if (!AllocateByteArray(0, TENURED)->To(&byte_array)) return false; 2635 if (!AllocateByteArray(0, TENURED).To(&byte_array)) return false;
2638 set_empty_byte_array(byte_array); 2636 set_empty_byte_array(byte_array);
2639 } 2637 }
2640 2638
2641 #define ALLOCATE_EMPTY_EXTERNAL_ARRAY(Type, type, TYPE, ctype, size) \ 2639 #define ALLOCATE_EMPTY_EXTERNAL_ARRAY(Type, type, TYPE, ctype, size) \
2642 { ExternalArray* obj; \ 2640 { ExternalArray* obj; \
2643 if (!AllocateEmptyExternalArray(kExternal##Type##Array)->To(&obj)) \ 2641 if (!AllocateEmptyExternalArray(kExternal##Type##Array).To(&obj)) \
2644 return false; \ 2642 return false; \
2645 set_empty_external_##type##_array(obj); \ 2643 set_empty_external_##type##_array(obj); \
2646 } 2644 }
2647 2645
2648 TYPED_ARRAYS(ALLOCATE_EMPTY_EXTERNAL_ARRAY) 2646 TYPED_ARRAYS(ALLOCATE_EMPTY_EXTERNAL_ARRAY)
2649 #undef ALLOCATE_EMPTY_EXTERNAL_ARRAY 2647 #undef ALLOCATE_EMPTY_EXTERNAL_ARRAY
2650 2648
2651 #define ALLOCATE_EMPTY_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype, size) \ 2649 #define ALLOCATE_EMPTY_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype, size) \
2652 { FixedTypedArrayBase* obj; \ 2650 { FixedTypedArrayBase* obj; \
2653 if (!AllocateEmptyFixedTypedArray(kExternal##Type##Array)->To(&obj)) \ 2651 if (!AllocateEmptyFixedTypedArray(kExternal##Type##Array).To(&obj)) \
2654 return false; \ 2652 return false; \
2655 set_empty_fixed_##type##_array(obj); \ 2653 set_empty_fixed_##type##_array(obj); \
2656 } 2654 }
2657 2655
2658 TYPED_ARRAYS(ALLOCATE_EMPTY_FIXED_TYPED_ARRAY) 2656 TYPED_ARRAYS(ALLOCATE_EMPTY_FIXED_TYPED_ARRAY)
2659 #undef ALLOCATE_EMPTY_FIXED_TYPED_ARRAY 2657 #undef ALLOCATE_EMPTY_FIXED_TYPED_ARRAY
2660 } 2658 }
2661 ASSERT(!InNewSpace(empty_fixed_array())); 2659 ASSERT(!InNewSpace(empty_fixed_array()));
2662 return true; 2660 return true;
2663 } 2661 }
2664 2662
2665 2663
2666 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { 2664 AllocationResult Heap::AllocateHeapNumber(double value,
2665 PretenureFlag pretenure) {
2667 // Statically ensure that it is safe to allocate heap numbers in paged 2666 // Statically ensure that it is safe to allocate heap numbers in paged
2668 // spaces. 2667 // spaces.
2669 int size = HeapNumber::kSize; 2668 int size = HeapNumber::kSize;
2670 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxRegularHeapObjectSize); 2669 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxRegularHeapObjectSize);
2671 2670
2672 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); 2671 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
2673 2672
2674 Object* result; 2673 HeapObject* result;
2675 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); 2674 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
2676 if (!maybe_result->ToObject(&result)) return maybe_result; 2675 if (!allocation.To(&result)) return allocation;
2677 } 2676 }
2678 2677
2679 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map()); 2678 result->set_map_no_write_barrier(heap_number_map());
2680 HeapNumber::cast(result)->set_value(value); 2679 HeapNumber::cast(result)->set_value(value);
2681 return result; 2680 return result;
2682 } 2681 }
2683 2682
2684 2683
2685 MaybeObject* Heap::AllocateCell(Object* value) { 2684 AllocationResult Heap::AllocateCell(Object* value) {
2686 int size = Cell::kSize; 2685 int size = Cell::kSize;
2687 STATIC_ASSERT(Cell::kSize <= Page::kMaxRegularHeapObjectSize); 2686 STATIC_ASSERT(Cell::kSize <= Page::kMaxRegularHeapObjectSize);
2688 2687
2689 Object* result; 2688 HeapObject* result;
2690 { MaybeObject* maybe_result = AllocateRaw(size, CELL_SPACE, CELL_SPACE); 2689 { AllocationResult allocation = AllocateRaw(size, CELL_SPACE, CELL_SPACE);
2691 if (!maybe_result->ToObject(&result)) return maybe_result; 2690 if (!allocation.To(&result)) return allocation;
2692 } 2691 }
2693 HeapObject::cast(result)->set_map_no_write_barrier(cell_map()); 2692 result->set_map_no_write_barrier(cell_map());
2694 Cell::cast(result)->set_value(value); 2693 Cell::cast(result)->set_value(value);
2695 return result; 2694 return result;
2696 } 2695 }
2697 2696
2698 2697
2699 MaybeObject* Heap::AllocatePropertyCell() { 2698 AllocationResult Heap::AllocatePropertyCell() {
2700 int size = PropertyCell::kSize; 2699 int size = PropertyCell::kSize;
2701 STATIC_ASSERT(PropertyCell::kSize <= Page::kMaxRegularHeapObjectSize); 2700 STATIC_ASSERT(PropertyCell::kSize <= Page::kMaxRegularHeapObjectSize);
2702 2701
2703 Object* result; 2702 HeapObject* result;
2704 MaybeObject* maybe_result = 2703 AllocationResult allocation =
2705 AllocateRaw(size, PROPERTY_CELL_SPACE, PROPERTY_CELL_SPACE); 2704 AllocateRaw(size, PROPERTY_CELL_SPACE, PROPERTY_CELL_SPACE);
2706 if (!maybe_result->ToObject(&result)) return maybe_result; 2705 if (!allocation.To(&result)) return allocation;
2707 2706
2708 HeapObject::cast(result)->set_map_no_write_barrier( 2707 result->set_map_no_write_barrier(global_property_cell_map());
2709 global_property_cell_map());
2710 PropertyCell* cell = PropertyCell::cast(result); 2708 PropertyCell* cell = PropertyCell::cast(result);
2711 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), 2709 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()),
2712 SKIP_WRITE_BARRIER); 2710 SKIP_WRITE_BARRIER);
2713 cell->set_value(the_hole_value()); 2711 cell->set_value(the_hole_value());
2714 cell->set_type(HeapType::None()); 2712 cell->set_type(HeapType::None());
2715 return result; 2713 return result;
2716 } 2714 }
2717 2715
2718 2716
2719 void Heap::CreateApiObjects() { 2717 void Heap::CreateApiObjects() {
(...skipping 513 matching lines...) Expand 10 before | Expand all | Expand 10 after
3233 roots_[RootIndexForEmptyExternalArray(map->elements_kind())]); 3231 roots_[RootIndexForEmptyExternalArray(map->elements_kind())]);
3234 } 3232 }
3235 3233
3236 3234
3237 FixedTypedArrayBase* Heap::EmptyFixedTypedArrayForMap(Map* map) { 3235 FixedTypedArrayBase* Heap::EmptyFixedTypedArrayForMap(Map* map) {
3238 return FixedTypedArrayBase::cast( 3236 return FixedTypedArrayBase::cast(
3239 roots_[RootIndexForEmptyFixedTypedArray(map->elements_kind())]); 3237 roots_[RootIndexForEmptyFixedTypedArray(map->elements_kind())]);
3240 } 3238 }
3241 3239
3242 3240
3243 MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) { 3241 AllocationResult Heap::AllocateForeign(Address address,
3242 PretenureFlag pretenure) {
3244 // Statically ensure that it is safe to allocate foreigns in paged spaces. 3243 // Statically ensure that it is safe to allocate foreigns in paged spaces.
3245 STATIC_ASSERT(Foreign::kSize <= Page::kMaxRegularHeapObjectSize); 3244 STATIC_ASSERT(Foreign::kSize <= Page::kMaxRegularHeapObjectSize);
3246 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 3245 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
3247 Foreign* result; 3246 Foreign* result;
3248 MaybeObject* maybe_result = Allocate(foreign_map(), space); 3247 AllocationResult allocation = Allocate(foreign_map(), space);
3249 if (!maybe_result->To(&result)) return maybe_result; 3248 if (!allocation.To(&result)) return allocation;
3250 result->set_foreign_address(address); 3249 result->set_foreign_address(address);
3251 return result; 3250 return result;
3252 } 3251 }
3253 3252
3254 3253
3255 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) { 3254 AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
3256 if (length < 0 || length > ByteArray::kMaxLength) { 3255 if (length < 0 || length > ByteArray::kMaxLength) {
3257 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); 3256 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
3258 } 3257 }
3259 int size = ByteArray::SizeFor(length); 3258 int size = ByteArray::SizeFor(length);
3260 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); 3259 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
3261 Object* result; 3260 HeapObject* result;
3262 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); 3261 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
3263 if (!maybe_result->ToObject(&result)) return maybe_result; 3262 if (!allocation.To(&result)) return allocation;
3264 } 3263 }
3265 3264
3266 reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier( 3265 result->set_map_no_write_barrier(byte_array_map());
3267 byte_array_map()); 3266 ByteArray::cast(result)->set_length(length);
3268 reinterpret_cast<ByteArray*>(result)->set_length(length);
3269 return result; 3267 return result;
3270 } 3268 }
3271 3269
3272 3270
3273 void Heap::CreateFillerObjectAt(Address addr, int size) { 3271 void Heap::CreateFillerObjectAt(Address addr, int size) {
3274 if (size == 0) return; 3272 if (size == 0) return;
3275 HeapObject* filler = HeapObject::FromAddress(addr); 3273 HeapObject* filler = HeapObject::FromAddress(addr);
3276 if (size == kPointerSize) { 3274 if (size == kPointerSize) {
3277 filler->set_map_no_write_barrier(one_pointer_filler_map()); 3275 filler->set_map_no_write_barrier(one_pointer_filler_map());
3278 } else if (size == 2 * kPointerSize) { 3276 } else if (size == 2 * kPointerSize) {
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
3311 Marking::IsBlack(Marking::MarkBitFrom(address))) { 3309 Marking::IsBlack(Marking::MarkBitFrom(address))) {
3312 if (mode == FROM_GC) { 3310 if (mode == FROM_GC) {
3313 MemoryChunk::IncrementLiveBytesFromGC(address, by); 3311 MemoryChunk::IncrementLiveBytesFromGC(address, by);
3314 } else { 3312 } else {
3315 MemoryChunk::IncrementLiveBytesFromMutator(address, by); 3313 MemoryChunk::IncrementLiveBytesFromMutator(address, by);
3316 } 3314 }
3317 } 3315 }
3318 } 3316 }
3319 3317
3320 3318
3321 MaybeObject* Heap::AllocateExternalArray(int length, 3319 AllocationResult Heap::AllocateExternalArray(int length,
3322 ExternalArrayType array_type, 3320 ExternalArrayType array_type,
3323 void* external_pointer, 3321 void* external_pointer,
3324 PretenureFlag pretenure) { 3322 PretenureFlag pretenure) {
3325 int size = ExternalArray::kAlignedSize; 3323 int size = ExternalArray::kAlignedSize;
3326 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); 3324 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
3327 Object* result; 3325 HeapObject* result;
3328 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); 3326 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
3329 if (!maybe_result->ToObject(&result)) return maybe_result; 3327 if (!allocation.To(&result)) return allocation;
3330 } 3328 }
3331 3329
3332 reinterpret_cast<ExternalArray*>(result)->set_map_no_write_barrier( 3330 result->set_map_no_write_barrier(
3333 MapForExternalArrayType(array_type)); 3331 MapForExternalArrayType(array_type));
3334 reinterpret_cast<ExternalArray*>(result)->set_length(length); 3332 ExternalArray::cast(result)->set_length(length);
3335 reinterpret_cast<ExternalArray*>(result)->set_external_pointer( 3333 ExternalArray::cast(result)->set_external_pointer(external_pointer);
3336 external_pointer);
3337
3338 return result; 3334 return result;
3339 } 3335 }
3340 3336
3341 static void ForFixedTypedArray(ExternalArrayType array_type, 3337 static void ForFixedTypedArray(ExternalArrayType array_type,
3342 int* element_size, 3338 int* element_size,
3343 ElementsKind* element_kind) { 3339 ElementsKind* element_kind) {
3344 switch (array_type) { 3340 switch (array_type) {
3345 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 3341 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
3346 case kExternal##Type##Array: \ 3342 case kExternal##Type##Array: \
3347 *element_size = size; \ 3343 *element_size = size; \
3348 *element_kind = TYPE##_ELEMENTS; \ 3344 *element_kind = TYPE##_ELEMENTS; \
3349 return; 3345 return;
3350 3346
3351 TYPED_ARRAYS(TYPED_ARRAY_CASE) 3347 TYPED_ARRAYS(TYPED_ARRAY_CASE)
3352 #undef TYPED_ARRAY_CASE 3348 #undef TYPED_ARRAY_CASE
3353 3349
3354 default: 3350 default:
3355 *element_size = 0; // Bogus 3351 *element_size = 0; // Bogus
3356 *element_kind = UINT8_ELEMENTS; // Bogus 3352 *element_kind = UINT8_ELEMENTS; // Bogus
3357 UNREACHABLE(); 3353 UNREACHABLE();
3358 } 3354 }
3359 } 3355 }
3360 3356
3361 3357
3362 MaybeObject* Heap::AllocateFixedTypedArray(int length, 3358 AllocationResult Heap::AllocateFixedTypedArray(int length,
3363 ExternalArrayType array_type, 3359 ExternalArrayType array_type,
3364 PretenureFlag pretenure) { 3360 PretenureFlag pretenure) {
3365 int element_size; 3361 int element_size;
3366 ElementsKind elements_kind; 3362 ElementsKind elements_kind;
3367 ForFixedTypedArray(array_type, &element_size, &elements_kind); 3363 ForFixedTypedArray(array_type, &element_size, &elements_kind);
3368 int size = OBJECT_POINTER_ALIGN( 3364 int size = OBJECT_POINTER_ALIGN(
3369 length * element_size + FixedTypedArrayBase::kDataOffset); 3365 length * element_size + FixedTypedArrayBase::kDataOffset);
3370 #ifndef V8_HOST_ARCH_64_BIT 3366 #ifndef V8_HOST_ARCH_64_BIT
3371 if (array_type == kExternalFloat64Array) { 3367 if (array_type == kExternalFloat64Array) {
3372 size += kPointerSize; 3368 size += kPointerSize;
3373 } 3369 }
3374 #endif 3370 #endif
3375 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); 3371 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
3376 3372
3377 HeapObject* object; 3373 HeapObject* object;
3378 MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); 3374 AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
3379 if (!maybe_object->To(&object)) return maybe_object; 3375 if (!allocation.To(&object)) return allocation;
3380 3376
3381 if (array_type == kExternalFloat64Array) { 3377 if (array_type == kExternalFloat64Array) {
3382 object = EnsureDoubleAligned(this, object, size); 3378 object = EnsureDoubleAligned(this, object, size);
3383 } 3379 }
3384 3380
3385 FixedTypedArrayBase* elements = 3381 object->set_map(MapForFixedTypedArray(array_type));
3386 reinterpret_cast<FixedTypedArrayBase*>(object); 3382 FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object);
3387 elements->set_map(MapForFixedTypedArray(array_type));
3388 elements->set_length(length); 3383 elements->set_length(length);
3389 memset(elements->DataPtr(), 0, elements->DataSize()); 3384 memset(elements->DataPtr(), 0, elements->DataSize());
3390 return elements; 3385 return elements;
3391 } 3386 }
3392 3387
3393 3388
3394 MaybeObject* Heap::AllocateCode(int object_size, 3389 AllocationResult Heap::AllocateCode(int object_size,
3395 bool immovable) { 3390 bool immovable) {
3396 ASSERT(IsAligned(static_cast<intptr_t>(object_size), kCodeAlignment)); 3391 ASSERT(IsAligned(static_cast<intptr_t>(object_size), kCodeAlignment));
3397 MaybeObject* maybe_result; 3392 AllocationResult allocation;
3398 // Large code objects and code objects which should stay at a fixed address 3393 // Large code objects and code objects which should stay at a fixed address
3399 // are allocated in large object space. 3394 // are allocated in large object space.
3400 HeapObject* result; 3395 HeapObject* result;
3401 bool force_lo_space = object_size > code_space()->AreaSize(); 3396 bool force_lo_space = object_size > code_space()->AreaSize();
3402 if (force_lo_space) { 3397 if (force_lo_space) {
3403 maybe_result = lo_space_->AllocateRaw(object_size, EXECUTABLE); 3398 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE);
3404 } else { 3399 } else {
3405 maybe_result = AllocateRaw(object_size, CODE_SPACE, CODE_SPACE); 3400 allocation = AllocateRaw(object_size, CODE_SPACE, CODE_SPACE);
3406 } 3401 }
3407 if (!maybe_result->To<HeapObject>(&result)) return maybe_result; 3402 if (!allocation.To(&result)) return allocation;
3408 3403
3409 if (immovable && !force_lo_space && 3404 if (immovable && !force_lo_space &&
3410 // Objects on the first page of each space are never moved. 3405 // Objects on the first page of each space are never moved.
3411 !code_space_->FirstPage()->Contains(result->address())) { 3406 !code_space_->FirstPage()->Contains(result->address())) {
3412 // Discard the first code allocation, which was on a page where it could be 3407 // Discard the first code allocation, which was on a page where it could be
3413 // moved. 3408 // moved.
3414 CreateFillerObjectAt(result->address(), object_size); 3409 CreateFillerObjectAt(result->address(), object_size);
3415 maybe_result = lo_space_->AllocateRaw(object_size, EXECUTABLE); 3410 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE);
3416 if (!maybe_result->To<HeapObject>(&result)) return maybe_result; 3411 if (!allocation.To(&result)) return allocation;
3417 } 3412 }
3418 3413
3419 result->set_map_no_write_barrier(code_map()); 3414 result->set_map_no_write_barrier(code_map());
3420 Code* code = Code::cast(result); 3415 Code* code = Code::cast(result);
3421 ASSERT(!isolate_->code_range()->exists() || 3416 ASSERT(!isolate_->code_range()->exists() ||
3422 isolate_->code_range()->contains(code->address())); 3417 isolate_->code_range()->contains(code->address()));
3423 code->set_gc_metadata(Smi::FromInt(0)); 3418 code->set_gc_metadata(Smi::FromInt(0));
3424 code->set_ic_age(global_ic_age_); 3419 code->set_ic_age(global_ic_age_);
3425 return code; 3420 return code;
3426 } 3421 }
3427 3422
3428 3423
3429 MaybeObject* Heap::CopyCode(Code* code) { 3424 AllocationResult Heap::CopyCode(Code* code) {
3430 MaybeObject* maybe_result; 3425 AllocationResult allocation;
3431 Object* new_constant_pool; 3426 HeapObject* new_constant_pool;
3432 if (FLAG_enable_ool_constant_pool && 3427 if (FLAG_enable_ool_constant_pool &&
3433 code->constant_pool() != empty_constant_pool_array()) { 3428 code->constant_pool() != empty_constant_pool_array()) {
3434 // Copy the constant pool, since edits to the copied code may modify 3429 // Copy the constant pool, since edits to the copied code may modify
3435 // the constant pool. 3430 // the constant pool.
3436 maybe_result = CopyConstantPoolArray(code->constant_pool()); 3431 allocation = CopyConstantPoolArray(code->constant_pool());
3437 if (!maybe_result->ToObject(&new_constant_pool)) return maybe_result; 3432 if (!allocation.To(&new_constant_pool)) return allocation;
3438 } else { 3433 } else {
3439 new_constant_pool = empty_constant_pool_array(); 3434 new_constant_pool = empty_constant_pool_array();
3440 } 3435 }
3441 3436
3442 // Allocate an object the same size as the code object. 3437 // Allocate an object the same size as the code object.
3443 int obj_size = code->Size(); 3438 int obj_size = code->Size();
3444 if (obj_size > code_space()->AreaSize()) { 3439 if (obj_size > code_space()->AreaSize()) {
3445 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); 3440 allocation = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
3446 } else { 3441 } else {
3447 maybe_result = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE); 3442 allocation = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE);
3448 } 3443 }
3449 3444
3450 Object* result; 3445 HeapObject* result;
3451 if (!maybe_result->ToObject(&result)) return maybe_result; 3446 if (!allocation.To(&result)) return allocation;
3452 3447
3453 // Copy code object. 3448 // Copy code object.
3454 Address old_addr = code->address(); 3449 Address old_addr = code->address();
3455 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); 3450 Address new_addr = result->address();
3456 CopyBlock(new_addr, old_addr, obj_size); 3451 CopyBlock(new_addr, old_addr, obj_size);
3457 Code* new_code = Code::cast(result); 3452 Code* new_code = Code::cast(result);
3458 3453
3459 // Update the constant pool. 3454 // Update the constant pool.
3460 new_code->set_constant_pool(new_constant_pool); 3455 new_code->set_constant_pool(new_constant_pool);
3461 3456
3462 // Relocate the copy. 3457 // Relocate the copy.
3463 ASSERT(!isolate_->code_range()->exists() || 3458 ASSERT(!isolate_->code_range()->exists() ||
3464 isolate_->code_range()->contains(code->address())); 3459 isolate_->code_range()->contains(code->address()));
3465 new_code->Relocate(new_addr - old_addr); 3460 new_code->Relocate(new_addr - old_addr);
3466 return new_code; 3461 return new_code;
3467 } 3462 }
3468 3463
3469 3464
3470 MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) { 3465 AllocationResult Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
3471 // Allocate ByteArray and ConstantPoolArray before the Code object, so that we 3466 // Allocate ByteArray and ConstantPoolArray before the Code object, so that we
3472 // do not risk leaving uninitialized Code object (and breaking the heap). 3467 // do not risk leaving uninitialized Code object (and breaking the heap).
3473 Object* reloc_info_array; 3468 ByteArray* reloc_info_array;
3474 { MaybeObject* maybe_reloc_info_array = 3469 { AllocationResult allocation =
3475 AllocateByteArray(reloc_info.length(), TENURED); 3470 AllocateByteArray(reloc_info.length(), TENURED);
3476 if (!maybe_reloc_info_array->ToObject(&reloc_info_array)) { 3471 if (!allocation.To(&reloc_info_array)) return allocation;
3477 return maybe_reloc_info_array;
3478 }
3479 } 3472 }
3480 Object* new_constant_pool; 3473 HeapObject* new_constant_pool;
3481 if (FLAG_enable_ool_constant_pool && 3474 if (FLAG_enable_ool_constant_pool &&
3482 code->constant_pool() != empty_constant_pool_array()) { 3475 code->constant_pool() != empty_constant_pool_array()) {
3483 // Copy the constant pool, since edits to the copied code may modify 3476 // Copy the constant pool, since edits to the copied code may modify
3484 // the constant pool. 3477 // the constant pool.
3485 MaybeObject* maybe_constant_pool = 3478 AllocationResult allocation =
3486 CopyConstantPoolArray(code->constant_pool()); 3479 CopyConstantPoolArray(code->constant_pool());
3487 if (!maybe_constant_pool->ToObject(&new_constant_pool)) 3480 if (!allocation.To(&new_constant_pool)) return allocation;
3488 return maybe_constant_pool;
3489 } else { 3481 } else {
3490 new_constant_pool = empty_constant_pool_array(); 3482 new_constant_pool = empty_constant_pool_array();
3491 } 3483 }
3492 3484
3493 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment); 3485 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment);
3494 3486
3495 int new_obj_size = Code::SizeFor(new_body_size); 3487 int new_obj_size = Code::SizeFor(new_body_size);
3496 3488
3497 Address old_addr = code->address(); 3489 Address old_addr = code->address();
3498 3490
3499 size_t relocation_offset = 3491 size_t relocation_offset =
3500 static_cast<size_t>(code->instruction_end() - old_addr); 3492 static_cast<size_t>(code->instruction_end() - old_addr);
3501 3493
3502 MaybeObject* maybe_result; 3494 AllocationResult allocation;
3503 if (new_obj_size > code_space()->AreaSize()) { 3495 if (new_obj_size > code_space()->AreaSize()) {
3504 maybe_result = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE); 3496 allocation = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE);
3505 } else { 3497 } else {
3506 maybe_result = AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE); 3498 allocation = AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE);
3507 } 3499 }
3508 3500
3509 Object* result; 3501 HeapObject* result;
3510 if (!maybe_result->ToObject(&result)) return maybe_result; 3502 if (!allocation.To(&result)) return allocation;
3511 3503
3512 // Copy code object. 3504 // Copy code object.
3513 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); 3505 Address new_addr = result->address();
3514 3506
3515 // Copy header and instructions. 3507 // Copy header and instructions.
3516 CopyBytes(new_addr, old_addr, relocation_offset); 3508 CopyBytes(new_addr, old_addr, relocation_offset);
3517 3509
3518 Code* new_code = Code::cast(result); 3510 Code* new_code = Code::cast(result);
3519 new_code->set_relocation_info(ByteArray::cast(reloc_info_array)); 3511 new_code->set_relocation_info(reloc_info_array);
3520 3512
3521 // Update constant pool. 3513 // Update constant pool.
3522 new_code->set_constant_pool(new_constant_pool); 3514 new_code->set_constant_pool(new_constant_pool);
3523 3515
3524 // Copy patched rinfo. 3516 // Copy patched rinfo.
3525 CopyBytes(new_code->relocation_start(), 3517 CopyBytes(new_code->relocation_start(),
3526 reloc_info.start(), 3518 reloc_info.start(),
3527 static_cast<size_t>(reloc_info.length())); 3519 static_cast<size_t>(reloc_info.length()));
3528 3520
3529 // Relocate the copy. 3521 // Relocate the copy.
3530 ASSERT(!isolate_->code_range()->exists() || 3522 ASSERT(!isolate_->code_range()->exists() ||
3531 isolate_->code_range()->contains(code->address())); 3523 isolate_->code_range()->contains(code->address()));
3532 new_code->Relocate(new_addr - old_addr); 3524 new_code->Relocate(new_addr - old_addr);
3533 3525
3534 #ifdef VERIFY_HEAP 3526 #ifdef VERIFY_HEAP
3535 if (FLAG_verify_heap) { 3527 if (FLAG_verify_heap) code->ObjectVerify();
3536 code->Verify();
3537 }
3538 #endif 3528 #endif
3539 return new_code; 3529 return new_code;
3540 } 3530 }
3541 3531
3542 3532
3543 void Heap::InitializeAllocationMemento(AllocationMemento* memento, 3533 void Heap::InitializeAllocationMemento(AllocationMemento* memento,
3544 AllocationSite* allocation_site) { 3534 AllocationSite* allocation_site) {
3545 memento->set_map_no_write_barrier(allocation_memento_map()); 3535 memento->set_map_no_write_barrier(allocation_memento_map());
3546 ASSERT(allocation_site->map() == allocation_site_map()); 3536 ASSERT(allocation_site->map() == allocation_site_map());
3547 memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER); 3537 memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER);
3548 if (FLAG_allocation_site_pretenuring) { 3538 if (FLAG_allocation_site_pretenuring) {
3549 allocation_site->IncrementMementoCreateCount(); 3539 allocation_site->IncrementMementoCreateCount();
3550 } 3540 }
3551 } 3541 }
3552 3542
3553 3543
3554 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space, 3544 AllocationResult Heap::Allocate(Map* map, AllocationSpace space,
3555 AllocationSite* allocation_site) { 3545 AllocationSite* allocation_site) {
3556 ASSERT(gc_state_ == NOT_IN_GC); 3546 ASSERT(gc_state_ == NOT_IN_GC);
3557 ASSERT(map->instance_type() != MAP_TYPE); 3547 ASSERT(map->instance_type() != MAP_TYPE);
3558 // If allocation failures are disallowed, we may allocate in a different 3548 // If allocation failures are disallowed, we may allocate in a different
3559 // space when new space is full and the object is not a large object. 3549 // space when new space is full and the object is not a large object.
3560 AllocationSpace retry_space = 3550 AllocationSpace retry_space =
3561 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); 3551 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
3562 int size = map->instance_size(); 3552 int size = map->instance_size();
3563 if (allocation_site != NULL) { 3553 if (allocation_site != NULL) {
3564 size += AllocationMemento::kSize; 3554 size += AllocationMemento::kSize;
3565 } 3555 }
3566 Object* result; 3556 HeapObject* result;
3567 MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); 3557 AllocationResult allocation = AllocateRaw(size, space, retry_space);
3568 if (!maybe_result->ToObject(&result)) return maybe_result; 3558 if (!allocation.To(&result)) return allocation;
3569 // No need for write barrier since object is white and map is in old space. 3559 // No need for write barrier since object is white and map is in old space.
3570 HeapObject::cast(result)->set_map_no_write_barrier(map); 3560 result->set_map_no_write_barrier(map);
3571 if (allocation_site != NULL) { 3561 if (allocation_site != NULL) {
3572 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( 3562 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
3573 reinterpret_cast<Address>(result) + map->instance_size()); 3563 reinterpret_cast<Address>(result) + map->instance_size());
3574 InitializeAllocationMemento(alloc_memento, allocation_site); 3564 InitializeAllocationMemento(alloc_memento, allocation_site);
3575 } 3565 }
3576 return result; 3566 return result;
3577 } 3567 }
3578 3568
3579 3569
3580 MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) { 3570 AllocationResult Heap::AllocateArgumentsObject(Object* callee, int length) {
3581 // To get fast allocation and map sharing for arguments objects we 3571 // To get fast allocation and map sharing for arguments objects we
3582 // allocate them based on an arguments boilerplate. 3572 // allocate them based on an arguments boilerplate.
3583 3573
3584 JSObject* boilerplate; 3574 JSObject* boilerplate;
3585 int arguments_object_size; 3575 int arguments_object_size;
3586 bool strict_mode_callee = callee->IsJSFunction() && 3576 bool strict_mode_callee = callee->IsJSFunction() &&
3587 JSFunction::cast(callee)->shared()->strict_mode() == STRICT; 3577 JSFunction::cast(callee)->shared()->strict_mode() == STRICT;
3588 if (strict_mode_callee) { 3578 if (strict_mode_callee) {
3589 boilerplate = 3579 boilerplate =
3590 isolate()->context()->native_context()->strict_arguments_boilerplate(); 3580 isolate()->context()->native_context()->strict_arguments_boilerplate();
3591 arguments_object_size = kStrictArgumentsObjectSize; 3581 arguments_object_size = kStrictArgumentsObjectSize;
3592 } else { 3582 } else {
3593 boilerplate = 3583 boilerplate =
3594 isolate()->context()->native_context()->sloppy_arguments_boilerplate(); 3584 isolate()->context()->native_context()->sloppy_arguments_boilerplate();
3595 arguments_object_size = kSloppyArgumentsObjectSize; 3585 arguments_object_size = kSloppyArgumentsObjectSize;
3596 } 3586 }
3597 3587
3598 // Check that the size of the boilerplate matches our 3588 // Check that the size of the boilerplate matches our
3599 // expectations. The ArgumentsAccessStub::GenerateNewObject relies 3589 // expectations. The ArgumentsAccessStub::GenerateNewObject relies
3600 // on the size being a known constant. 3590 // on the size being a known constant.
3601 ASSERT(arguments_object_size == boilerplate->map()->instance_size()); 3591 ASSERT(arguments_object_size == boilerplate->map()->instance_size());
3602 3592
3603 // Do the allocation. 3593 // Do the allocation.
3604 Object* result; 3594 HeapObject* result;
3605 { MaybeObject* maybe_result = 3595 { AllocationResult allocation =
3606 AllocateRaw(arguments_object_size, NEW_SPACE, OLD_POINTER_SPACE); 3596 AllocateRaw(arguments_object_size, NEW_SPACE, OLD_POINTER_SPACE);
3607 if (!maybe_result->ToObject(&result)) return maybe_result; 3597 if (!allocation.To(&result)) return allocation;
3608 } 3598 }
3609 3599
3610 // Copy the content. The arguments boilerplate doesn't have any 3600 // Copy the content. The arguments boilerplate doesn't have any
3611 // fields that point to new space so it's safe to skip the write 3601 // fields that point to new space so it's safe to skip the write
3612 // barrier here. 3602 // barrier here.
3613 CopyBlock(HeapObject::cast(result)->address(), 3603 CopyBlock(result->address(), boilerplate->address(), JSObject::kHeaderSize);
3614 boilerplate->address(),
3615 JSObject::kHeaderSize);
3616 3604
3617 // Set the length property. 3605 // Set the length property.
3618 JSObject::cast(result)->InObjectPropertyAtPut(kArgumentsLengthIndex, 3606 JSObject* js_obj = JSObject::cast(result);
3619 Smi::FromInt(length), 3607 js_obj->InObjectPropertyAtPut(
3620 SKIP_WRITE_BARRIER); 3608 kArgumentsLengthIndex, Smi::FromInt(length), SKIP_WRITE_BARRIER);
3621 // Set the callee property for sloppy mode arguments object only. 3609 // Set the callee property for sloppy mode arguments object only.
3622 if (!strict_mode_callee) { 3610 if (!strict_mode_callee) {
3623 JSObject::cast(result)->InObjectPropertyAtPut(kArgumentsCalleeIndex, 3611 js_obj->InObjectPropertyAtPut(kArgumentsCalleeIndex, callee);
3624 callee);
3625 } 3612 }
3626 3613
3627 // Check the state of the object 3614 // Check the state of the object
3628 ASSERT(JSObject::cast(result)->HasFastProperties()); 3615 ASSERT(js_obj->HasFastProperties());
3629 ASSERT(JSObject::cast(result)->HasFastObjectElements()); 3616 ASSERT(js_obj->HasFastObjectElements());
3630 3617
3631 return result; 3618 return js_obj;
3632 } 3619 }
3633 3620
3634 3621
3635 void Heap::InitializeJSObjectFromMap(JSObject* obj, 3622 void Heap::InitializeJSObjectFromMap(JSObject* obj,
3636 FixedArray* properties, 3623 FixedArray* properties,
3637 Map* map) { 3624 Map* map) {
3638 obj->set_properties(properties); 3625 obj->set_properties(properties);
3639 obj->initialize_elements(); 3626 obj->initialize_elements();
3640 // TODO(1240798): Initialize the object's body using valid initial values 3627 // TODO(1240798): Initialize the object's body using valid initial values
3641 // according to the object's initial map. For example, if the map's 3628 // according to the object's initial map. For example, if the map's
(...skipping 15 matching lines...) Expand all
3657 // We might want to shrink the object later. 3644 // We might want to shrink the object later.
3658 ASSERT(obj->GetInternalFieldCount() == 0); 3645 ASSERT(obj->GetInternalFieldCount() == 0);
3659 filler = Heap::one_pointer_filler_map(); 3646 filler = Heap::one_pointer_filler_map();
3660 } else { 3647 } else {
3661 filler = Heap::undefined_value(); 3648 filler = Heap::undefined_value();
3662 } 3649 }
3663 obj->InitializeBody(map, Heap::undefined_value(), filler); 3650 obj->InitializeBody(map, Heap::undefined_value(), filler);
3664 } 3651 }
3665 3652
3666 3653
3667 MaybeObject* Heap::AllocateJSObjectFromMap( 3654 AllocationResult Heap::AllocateJSObjectFromMap(
3668 Map* map, 3655 Map* map,
3669 PretenureFlag pretenure, 3656 PretenureFlag pretenure,
3670 bool allocate_properties, 3657 bool allocate_properties,
3671 AllocationSite* allocation_site) { 3658 AllocationSite* allocation_site) {
3672 // JSFunctions should be allocated using AllocateFunction to be 3659 // JSFunctions should be allocated using AllocateFunction to be
3673 // properly initialized. 3660 // properly initialized.
3674 ASSERT(map->instance_type() != JS_FUNCTION_TYPE); 3661 ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
3675 3662
3676 // Both types of global objects should be allocated using 3663 // Both types of global objects should be allocated using
3677 // AllocateGlobalObject to be properly initialized. 3664 // AllocateGlobalObject to be properly initialized.
3678 ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE); 3665 ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
3679 ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE); 3666 ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE);
3680 3667
3681 // Allocate the backing storage for the properties. 3668 // Allocate the backing storage for the properties.
3682 FixedArray* properties; 3669 FixedArray* properties;
3683 if (allocate_properties) { 3670 if (allocate_properties) {
3684 int prop_size = map->InitialPropertiesLength(); 3671 int prop_size = map->InitialPropertiesLength();
3685 ASSERT(prop_size >= 0); 3672 ASSERT(prop_size >= 0);
3686 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure); 3673 { AllocationResult allocation = AllocateFixedArray(prop_size, pretenure);
3687 if (!maybe_properties->To(&properties)) return maybe_properties; 3674 if (!allocation.To(&properties)) return allocation;
3688 } 3675 }
3689 } else { 3676 } else {
3690 properties = empty_fixed_array(); 3677 properties = empty_fixed_array();
3691 } 3678 }
3692 3679
3693 // Allocate the JSObject. 3680 // Allocate the JSObject.
3694 int size = map->instance_size(); 3681 int size = map->instance_size();
3695 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); 3682 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure);
3696 Object* obj; 3683 JSObject* js_obj;
3697 MaybeObject* maybe_obj = Allocate(map, space, allocation_site); 3684 AllocationResult allocation = Allocate(map, space, allocation_site);
3698 if (!maybe_obj->To(&obj)) return maybe_obj; 3685 if (!allocation.To(&js_obj)) return allocation;
3699 3686
3700 // Initialize the JSObject. 3687 // Initialize the JSObject.
3701 InitializeJSObjectFromMap(JSObject::cast(obj), properties, map); 3688 InitializeJSObjectFromMap(js_obj, properties, map);
3702 ASSERT(JSObject::cast(obj)->HasFastElements() || 3689 ASSERT(js_obj->HasFastElements() ||
3703 JSObject::cast(obj)->HasExternalArrayElements() || 3690 js_obj->HasExternalArrayElements() ||
3704 JSObject::cast(obj)->HasFixedTypedArrayElements()); 3691 js_obj->HasFixedTypedArrayElements());
3705 return obj; 3692 return js_obj;
3706 } 3693 }
3707 3694
3708 3695
3709 MaybeObject* Heap::AllocateJSObject(JSFunction* constructor, 3696 AllocationResult Heap::AllocateJSObject(JSFunction* constructor,
3710 PretenureFlag pretenure, 3697 PretenureFlag pretenure,
3711 AllocationSite* allocation_site) { 3698 AllocationSite* allocation_site) {
3712 ASSERT(constructor->has_initial_map()); 3699 ASSERT(constructor->has_initial_map());
3713 3700
3714 // Allocate the object based on the constructors initial map. 3701 // Allocate the object based on the constructors initial map.
3715 MaybeObject* result = AllocateJSObjectFromMap(constructor->initial_map(), 3702 AllocationResult allocation = AllocateJSObjectFromMap(
3716 pretenure, 3703 constructor->initial_map(), pretenure, true, allocation_site);
3717 true,
3718 allocation_site);
3719 #ifdef DEBUG 3704 #ifdef DEBUG
3720 // Make sure result is NOT a global object if valid. 3705 // Make sure result is NOT a global object if valid.
3721 Object* non_failure; 3706 HeapObject* obj;
3722 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); 3707 ASSERT(!allocation.To(&obj) || !obj->IsGlobalObject());
3723 #endif 3708 #endif
3724 return result; 3709 return allocation;
3725 } 3710 }
3726 3711
3727 3712
3728 MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) { 3713 AllocationResult Heap::CopyJSObject(JSObject* source, AllocationSite* site) {
3729 // Never used to copy functions. If functions need to be copied we 3714 // Never used to copy functions. If functions need to be copied we
3730 // have to be careful to clear the literals array. 3715 // have to be careful to clear the literals array.
3731 SLOW_ASSERT(!source->IsJSFunction()); 3716 SLOW_ASSERT(!source->IsJSFunction());
3732 3717
3733 // Make the clone. 3718 // Make the clone.
3734 Map* map = source->map(); 3719 Map* map = source->map();
3735 int object_size = map->instance_size(); 3720 int object_size = map->instance_size();
3736 Object* clone; 3721 HeapObject* clone;
3737 3722
3738 ASSERT(site == NULL || AllocationSite::CanTrack(map->instance_type())); 3723 ASSERT(site == NULL || AllocationSite::CanTrack(map->instance_type()));
3739 3724
3740 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; 3725 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
3741 3726
3742 // If we're forced to always allocate, we use the general allocation 3727 // If we're forced to always allocate, we use the general allocation
3743 // functions which may leave us with an object in old space. 3728 // functions which may leave us with an object in old space.
3744 if (always_allocate()) { 3729 if (always_allocate()) {
3745 { MaybeObject* maybe_clone = 3730 { AllocationResult allocation =
3746 AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE); 3731 AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
3747 if (!maybe_clone->ToObject(&clone)) return maybe_clone; 3732 if (!allocation.To(&clone)) return allocation;
3748 } 3733 }
3749 Address clone_address = HeapObject::cast(clone)->address(); 3734 Address clone_address = clone->address();
3750 CopyBlock(clone_address, 3735 CopyBlock(clone_address,
3751 source->address(), 3736 source->address(),
3752 object_size); 3737 object_size);
3753 // Update write barrier for all fields that lie beyond the header. 3738 // Update write barrier for all fields that lie beyond the header.
3754 RecordWrites(clone_address, 3739 RecordWrites(clone_address,
3755 JSObject::kHeaderSize, 3740 JSObject::kHeaderSize,
3756 (object_size - JSObject::kHeaderSize) / kPointerSize); 3741 (object_size - JSObject::kHeaderSize) / kPointerSize);
3757 } else { 3742 } else {
3758 wb_mode = SKIP_WRITE_BARRIER; 3743 wb_mode = SKIP_WRITE_BARRIER;
3759 3744
3760 { int adjusted_object_size = site != NULL 3745 { int adjusted_object_size = site != NULL
3761 ? object_size + AllocationMemento::kSize 3746 ? object_size + AllocationMemento::kSize
3762 : object_size; 3747 : object_size;
3763 MaybeObject* maybe_clone = 3748 AllocationResult allocation =
3764 AllocateRaw(adjusted_object_size, NEW_SPACE, NEW_SPACE); 3749 AllocateRaw(adjusted_object_size, NEW_SPACE, NEW_SPACE);
3765 if (!maybe_clone->ToObject(&clone)) return maybe_clone; 3750 if (!allocation.To(&clone)) return allocation;
3766 } 3751 }
3767 SLOW_ASSERT(InNewSpace(clone)); 3752 SLOW_ASSERT(InNewSpace(clone));
3768 // Since we know the clone is allocated in new space, we can copy 3753 // Since we know the clone is allocated in new space, we can copy
3769 // the contents without worrying about updating the write barrier. 3754 // the contents without worrying about updating the write barrier.
3770 CopyBlock(HeapObject::cast(clone)->address(), 3755 CopyBlock(clone->address(),
3771 source->address(), 3756 source->address(),
3772 object_size); 3757 object_size);
3773 3758
3774 if (site != NULL) { 3759 if (site != NULL) {
3775 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( 3760 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
3776 reinterpret_cast<Address>(clone) + object_size); 3761 reinterpret_cast<Address>(clone) + object_size);
3777 InitializeAllocationMemento(alloc_memento, site); 3762 InitializeAllocationMemento(alloc_memento, site);
3778 } 3763 }
3779 } 3764 }
3780 3765
3781 SLOW_ASSERT( 3766 SLOW_ASSERT(
3782 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); 3767 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind());
3783 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); 3768 FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
3784 FixedArray* properties = FixedArray::cast(source->properties()); 3769 FixedArray* properties = FixedArray::cast(source->properties());
3785 // Update elements if necessary. 3770 // Update elements if necessary.
3786 if (elements->length() > 0) { 3771 if (elements->length() > 0) {
3787 Object* elem; 3772 FixedArrayBase* elem;
3788 { MaybeObject* maybe_elem; 3773 { AllocationResult allocation;
3789 if (elements->map() == fixed_cow_array_map()) { 3774 if (elements->map() == fixed_cow_array_map()) {
3790 maybe_elem = FixedArray::cast(elements); 3775 allocation = FixedArray::cast(elements);
3791 } else if (source->HasFastDoubleElements()) { 3776 } else if (source->HasFastDoubleElements()) {
3792 maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements)); 3777 allocation = CopyFixedDoubleArray(FixedDoubleArray::cast(elements));
3793 } else { 3778 } else {
3794 maybe_elem = CopyFixedArray(FixedArray::cast(elements)); 3779 allocation = CopyFixedArray(FixedArray::cast(elements));
3795 } 3780 }
3796 if (!maybe_elem->ToObject(&elem)) return maybe_elem; 3781 if (!allocation.To(&elem)) return allocation;
3797 } 3782 }
3798 JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem), wb_mode); 3783 JSObject::cast(clone)->set_elements(elem, wb_mode);
3799 } 3784 }
3800 // Update properties if necessary. 3785 // Update properties if necessary.
3801 if (properties->length() > 0) { 3786 if (properties->length() > 0) {
3802 Object* prop; 3787 FixedArray* prop;
3803 { MaybeObject* maybe_prop = CopyFixedArray(properties); 3788 { AllocationResult allocation = CopyFixedArray(properties);
3804 if (!maybe_prop->ToObject(&prop)) return maybe_prop; 3789 if (!allocation.To(&prop)) return allocation;
3805 } 3790 }
3806 JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode); 3791 JSObject::cast(clone)->set_properties(prop, wb_mode);
3807 } 3792 }
3808 // Return the new clone. 3793 // Return the new clone.
3809 return clone; 3794 return clone;
3810 } 3795 }
3811 3796
3812 3797
3813 MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string, 3798 AllocationResult Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
3814 int non_ascii_start, 3799 int non_ascii_start,
3815 PretenureFlag pretenure) { 3800 PretenureFlag pretenure) {
3816 // Continue counting the number of characters in the UTF-8 string, starting 3801 // Continue counting the number of characters in the UTF-8 string, starting
3817 // from the first non-ascii character or word. 3802 // from the first non-ascii character or word.
3818 Access<UnicodeCache::Utf8Decoder> 3803 Access<UnicodeCache::Utf8Decoder>
3819 decoder(isolate_->unicode_cache()->utf8_decoder()); 3804 decoder(isolate_->unicode_cache()->utf8_decoder());
3820 decoder->Reset(string.start() + non_ascii_start, 3805 decoder->Reset(string.start() + non_ascii_start,
3821 string.length() - non_ascii_start); 3806 string.length() - non_ascii_start);
3822 int utf16_length = decoder->Utf16Length(); 3807 int utf16_length = decoder->Utf16Length();
3823 ASSERT(utf16_length > 0); 3808 ASSERT(utf16_length > 0);
3824 // Allocate string. 3809 // Allocate string.
3825 Object* result; 3810 HeapObject* result;
3826 { 3811 {
3827 int chars = non_ascii_start + utf16_length; 3812 int chars = non_ascii_start + utf16_length;
3828 MaybeObject* maybe_result = AllocateRawTwoByteString(chars, pretenure); 3813 AllocationResult allocation = AllocateRawTwoByteString(chars, pretenure);
3829 if (!maybe_result->ToObject(&result) || result->IsException()) { 3814 if (!allocation.To(&result) || result->IsException()) {
3830 return maybe_result; 3815 return allocation;
3831 } 3816 }
3832 } 3817 }
3833 // Convert and copy the characters into the new object.
3834 SeqTwoByteString* twobyte = SeqTwoByteString::cast(result);
3835 // Copy ascii portion. 3818 // Copy ascii portion.
3836 uint16_t* data = twobyte->GetChars(); 3819 uint16_t* data = SeqTwoByteString::cast(result)->GetChars();
3837 if (non_ascii_start != 0) { 3820 if (non_ascii_start != 0) {
3838 const char* ascii_data = string.start(); 3821 const char* ascii_data = string.start();
3839 for (int i = 0; i < non_ascii_start; i++) { 3822 for (int i = 0; i < non_ascii_start; i++) {
3840 *data++ = *ascii_data++; 3823 *data++ = *ascii_data++;
3841 } 3824 }
3842 } 3825 }
3843 // Now write the remainder. 3826 // Now write the remainder.
3844 decoder->WriteUtf16(data, utf16_length); 3827 decoder->WriteUtf16(data, utf16_length);
3845 return result; 3828 return result;
3846 } 3829 }
3847 3830
3848 3831
3849 MaybeObject* Heap::AllocateStringFromTwoByte(Vector<const uc16> string, 3832 AllocationResult Heap::AllocateStringFromTwoByte(Vector<const uc16> string,
3850 PretenureFlag pretenure) { 3833 PretenureFlag pretenure) {
3851 // Check if the string is an ASCII string. 3834 // Check if the string is an ASCII string.
3852 Object* result; 3835 HeapObject* result;
3853 int length = string.length(); 3836 int length = string.length();
3854 const uc16* start = string.start(); 3837 const uc16* start = string.start();
3855 3838
3856 if (String::IsOneByte(start, length)) { 3839 if (String::IsOneByte(start, length)) {
3857 MaybeObject* maybe_result = AllocateRawOneByteString(length, pretenure); 3840 AllocationResult allocation = AllocateRawOneByteString(length, pretenure);
3858 if (!maybe_result->ToObject(&result) || result->IsException()) { 3841 if (!allocation.To(&result) || result->IsException()) {
3859 return maybe_result; 3842 return allocation;
3860 } 3843 }
3861 CopyChars(SeqOneByteString::cast(result)->GetChars(), start, length); 3844 CopyChars(SeqOneByteString::cast(result)->GetChars(), start, length);
3862 } else { // It's not a one byte string. 3845 } else { // It's not a one byte string.
3863 MaybeObject* maybe_result = AllocateRawTwoByteString(length, pretenure); 3846 AllocationResult allocation = AllocateRawTwoByteString(length, pretenure);
3864 if (!maybe_result->ToObject(&result) || result->IsException()) { 3847 if (!allocation.To(&result) || result->IsException()) {
3865 return maybe_result; 3848 return allocation;
3866 } 3849 }
3867 CopyChars(SeqTwoByteString::cast(result)->GetChars(), start, length); 3850 CopyChars(SeqTwoByteString::cast(result)->GetChars(), start, length);
3868 } 3851 }
3869 return result; 3852 return result;
3870 } 3853 }
3871 3854
3872 3855
3873 static inline void WriteOneByteData(Vector<const char> vector, 3856 static inline void WriteOneByteData(Vector<const char> vector,
3874 uint8_t* chars, 3857 uint8_t* chars,
3875 int len) { 3858 int len) {
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
3912 } 3895 }
3913 3896
3914 3897
3915 static inline void WriteTwoByteData(String* s, uint16_t* chars, int len) { 3898 static inline void WriteTwoByteData(String* s, uint16_t* chars, int len) {
3916 ASSERT(s->length() == len); 3899 ASSERT(s->length() == len);
3917 String::WriteToFlat(s, chars, 0, len); 3900 String::WriteToFlat(s, chars, 0, len);
3918 } 3901 }
3919 3902
3920 3903
3921 template<bool is_one_byte, typename T> 3904 template<bool is_one_byte, typename T>
3922 MaybeObject* Heap::AllocateInternalizedStringImpl( 3905 AllocationResult Heap::AllocateInternalizedStringImpl(
3923 T t, int chars, uint32_t hash_field) { 3906 T t, int chars, uint32_t hash_field) {
3924 ASSERT(chars >= 0); 3907 ASSERT(chars >= 0);
3925 // Compute map and object size. 3908 // Compute map and object size.
3926 int size; 3909 int size;
3927 Map* map; 3910 Map* map;
3928 3911
3929 if (chars < 0 || chars > String::kMaxLength) { 3912 if (chars < 0 || chars > String::kMaxLength) {
3930 return isolate()->ThrowInvalidStringLength(); 3913 return isolate()->ThrowInvalidStringLength();
3931 } 3914 }
3932 if (is_one_byte) { 3915 if (is_one_byte) {
3933 map = ascii_internalized_string_map(); 3916 map = ascii_internalized_string_map();
3934 size = SeqOneByteString::SizeFor(chars); 3917 size = SeqOneByteString::SizeFor(chars);
3935 } else { 3918 } else {
3936 map = internalized_string_map(); 3919 map = internalized_string_map();
3937 size = SeqTwoByteString::SizeFor(chars); 3920 size = SeqTwoByteString::SizeFor(chars);
3938 } 3921 }
3939 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); 3922 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
3940 3923
3941 // Allocate string. 3924 // Allocate string.
3942 Object* result; 3925 HeapObject* result;
3943 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); 3926 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
3944 if (!maybe_result->ToObject(&result)) return maybe_result; 3927 if (!allocation.To(&result)) return allocation;
3945 } 3928 }
3946 3929
3947 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map); 3930 result->set_map_no_write_barrier(map);
3948 // Set length and hash fields of the allocated string. 3931 // Set length and hash fields of the allocated string.
3949 String* answer = String::cast(result); 3932 String* answer = String::cast(result);
3950 answer->set_length(chars); 3933 answer->set_length(chars);
3951 answer->set_hash_field(hash_field); 3934 answer->set_hash_field(hash_field);
3952 3935
3953 ASSERT_EQ(size, answer->Size()); 3936 ASSERT_EQ(size, answer->Size());
3954 3937
3955 if (is_one_byte) { 3938 if (is_one_byte) {
3956 WriteOneByteData(t, SeqOneByteString::cast(answer)->GetChars(), chars); 3939 WriteOneByteData(t, SeqOneByteString::cast(answer)->GetChars(), chars);
3957 } else { 3940 } else {
3958 WriteTwoByteData(t, SeqTwoByteString::cast(answer)->GetChars(), chars); 3941 WriteTwoByteData(t, SeqTwoByteString::cast(answer)->GetChars(), chars);
3959 } 3942 }
3960 return answer; 3943 return answer;
3961 } 3944 }
3962 3945
3963 3946
3964 // Need explicit instantiations. 3947 // Need explicit instantiations.
3965 template 3948 template
3966 MaybeObject* Heap::AllocateInternalizedStringImpl<true>(String*, int, uint32_t); 3949 AllocationResult Heap::AllocateInternalizedStringImpl<true>(
3967 template
3968 MaybeObject* Heap::AllocateInternalizedStringImpl<false>(
3969 String*, int, uint32_t); 3950 String*, int, uint32_t);
3970 template 3951 template
3971 MaybeObject* Heap::AllocateInternalizedStringImpl<false>( 3952 AllocationResult Heap::AllocateInternalizedStringImpl<false>(
3953 String*, int, uint32_t);
3954 template
3955 AllocationResult Heap::AllocateInternalizedStringImpl<false>(
3972 Vector<const char>, int, uint32_t); 3956 Vector<const char>, int, uint32_t);
3973 3957
3974 3958
3975 MaybeObject* Heap::AllocateRawOneByteString(int length, 3959 AllocationResult Heap::AllocateRawOneByteString(int length,
3976 PretenureFlag pretenure) { 3960 PretenureFlag pretenure) {
3977 if (length < 0 || length > String::kMaxLength) { 3961 if (length < 0 || length > String::kMaxLength) {
3978 return isolate()->ThrowInvalidStringLength(); 3962 return isolate()->ThrowInvalidStringLength();
3979 } 3963 }
3980 int size = SeqOneByteString::SizeFor(length); 3964 int size = SeqOneByteString::SizeFor(length);
3981 ASSERT(size <= SeqOneByteString::kMaxSize); 3965 ASSERT(size <= SeqOneByteString::kMaxSize);
3982 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); 3966 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
3983 3967
3984 Object* result; 3968 HeapObject* result;
3985 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); 3969 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
3986 if (!maybe_result->ToObject(&result)) return maybe_result; 3970 if (!allocation.To(&result)) return allocation;
3987 } 3971 }
3988 3972
3989 // Partially initialize the object. 3973 // Partially initialize the object.
3990 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map()); 3974 result->set_map_no_write_barrier(ascii_string_map());
3991 String::cast(result)->set_length(length); 3975 String::cast(result)->set_length(length);
3992 String::cast(result)->set_hash_field(String::kEmptyHashField); 3976 String::cast(result)->set_hash_field(String::kEmptyHashField);
3993 ASSERT_EQ(size, HeapObject::cast(result)->Size()); 3977 ASSERT_EQ(size, HeapObject::cast(result)->Size());
3994 3978
3995 return result; 3979 return result;
3996 } 3980 }
3997 3981
3998 3982
3999 MaybeObject* Heap::AllocateRawTwoByteString(int length, 3983 AllocationResult Heap::AllocateRawTwoByteString(int length,
4000 PretenureFlag pretenure) { 3984 PretenureFlag pretenure) {
4001 if (length < 0 || length > String::kMaxLength) { 3985 if (length < 0 || length > String::kMaxLength) {
4002 return isolate()->ThrowInvalidStringLength(); 3986 return isolate()->ThrowInvalidStringLength();
4003 } 3987 }
4004 int size = SeqTwoByteString::SizeFor(length); 3988 int size = SeqTwoByteString::SizeFor(length);
4005 ASSERT(size <= SeqTwoByteString::kMaxSize); 3989 ASSERT(size <= SeqTwoByteString::kMaxSize);
4006 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); 3990 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
4007 3991
4008 Object* result; 3992 HeapObject* result;
4009 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); 3993 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
4010 if (!maybe_result->ToObject(&result)) return maybe_result; 3994 if (!allocation.To(&result)) return allocation;
4011 } 3995 }
4012 3996
4013 // Partially initialize the object. 3997 // Partially initialize the object.
4014 HeapObject::cast(result)->set_map_no_write_barrier(string_map()); 3998 result->set_map_no_write_barrier(string_map());
4015 String::cast(result)->set_length(length); 3999 String::cast(result)->set_length(length);
4016 String::cast(result)->set_hash_field(String::kEmptyHashField); 4000 String::cast(result)->set_hash_field(String::kEmptyHashField);
4017 ASSERT_EQ(size, HeapObject::cast(result)->Size()); 4001 ASSERT_EQ(size, HeapObject::cast(result)->Size());
4018 return result; 4002 return result;
4019 } 4003 }
4020 4004
4021 4005
4022 MaybeObject* Heap::AllocateEmptyFixedArray() { 4006 AllocationResult Heap::AllocateEmptyFixedArray() {
4023 int size = FixedArray::SizeFor(0); 4007 int size = FixedArray::SizeFor(0);
4024 Object* result; 4008 HeapObject* result;
4025 { MaybeObject* maybe_result = 4009 { AllocationResult allocation =
4026 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); 4010 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
4027 if (!maybe_result->ToObject(&result)) return maybe_result; 4011 if (!allocation.To(&result)) return allocation;
4028 } 4012 }
4029 // Initialize the object. 4013 // Initialize the object.
4030 reinterpret_cast<FixedArray*>(result)->set_map_no_write_barrier( 4014 result->set_map_no_write_barrier(fixed_array_map());
4031 fixed_array_map()); 4015 FixedArray::cast(result)->set_length(0);
4032 reinterpret_cast<FixedArray*>(result)->set_length(0);
4033 return result; 4016 return result;
4034 } 4017 }
4035 4018
4036 4019
4037 MaybeObject* Heap::AllocateEmptyExternalArray(ExternalArrayType array_type) { 4020 AllocationResult Heap::AllocateEmptyExternalArray(
4021 ExternalArrayType array_type) {
4038 return AllocateExternalArray(0, array_type, NULL, TENURED); 4022 return AllocateExternalArray(0, array_type, NULL, TENURED);
4039 } 4023 }
4040 4024
4041 4025
4042 MaybeObject* Heap::CopyAndTenureFixedCOWArray(FixedArray* src) { 4026 AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) {
4043 if (!InNewSpace(src)) { 4027 if (!InNewSpace(src)) {
4044 return src; 4028 return src;
4045 } 4029 }
4046 4030
4047 int len = src->length(); 4031 int len = src->length();
4048 Object* obj; 4032 HeapObject* obj;
4049 { MaybeObject* maybe_obj = AllocateRawFixedArray(len, TENURED); 4033 { AllocationResult allocation = AllocateRawFixedArray(len, TENURED);
4050 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 4034 if (!allocation.To(&obj)) return allocation;
4051 } 4035 }
4052 HeapObject::cast(obj)->set_map_no_write_barrier(fixed_array_map()); 4036 obj->set_map_no_write_barrier(fixed_array_map());
4053 FixedArray* result = FixedArray::cast(obj); 4037 FixedArray* result = FixedArray::cast(obj);
4054 result->set_length(len); 4038 result->set_length(len);
4055 4039
4056 // Copy the content 4040 // Copy the content
4057 DisallowHeapAllocation no_gc; 4041 DisallowHeapAllocation no_gc;
4058 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); 4042 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
4059 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); 4043 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
4060 4044
4061 // TODO(mvstanton): The map is set twice because of protection against calling 4045 // TODO(mvstanton): The map is set twice because of protection against calling
4062 // set() on a COW FixedArray. Issue v8:3221 created to track this, and 4046 // set() on a COW FixedArray. Issue v8:3221 created to track this, and
4063 // we might then be able to remove this whole method. 4047 // we might then be able to remove this whole method.
4064 HeapObject::cast(obj)->set_map_no_write_barrier(fixed_cow_array_map()); 4048 HeapObject::cast(obj)->set_map_no_write_barrier(fixed_cow_array_map());
4065 return result; 4049 return result;
4066 } 4050 }
4067 4051
4068 4052
4069 MaybeObject* Heap::AllocateEmptyFixedTypedArray(ExternalArrayType array_type) { 4053 AllocationResult Heap::AllocateEmptyFixedTypedArray(
4054 ExternalArrayType array_type) {
4070 return AllocateFixedTypedArray(0, array_type, TENURED); 4055 return AllocateFixedTypedArray(0, array_type, TENURED);
4071 } 4056 }
4072 4057
4073 4058
4074 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { 4059 AllocationResult Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
4075 int len = src->length(); 4060 int len = src->length();
4076 Object* obj; 4061 HeapObject* obj;
4077 { MaybeObject* maybe_obj = AllocateRawFixedArray(len, NOT_TENURED); 4062 { AllocationResult allocation = AllocateRawFixedArray(len, NOT_TENURED);
4078 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 4063 if (!allocation.To(&obj)) return allocation;
4079 } 4064 }
4080 if (InNewSpace(obj)) { 4065 if (InNewSpace(obj)) {
4081 HeapObject* dst = HeapObject::cast(obj); 4066 obj->set_map_no_write_barrier(map);
4082 dst->set_map_no_write_barrier(map); 4067 CopyBlock(obj->address() + kPointerSize,
4083 CopyBlock(dst->address() + kPointerSize,
4084 src->address() + kPointerSize, 4068 src->address() + kPointerSize,
4085 FixedArray::SizeFor(len) - kPointerSize); 4069 FixedArray::SizeFor(len) - kPointerSize);
4086 return obj; 4070 return obj;
4087 } 4071 }
4088 HeapObject::cast(obj)->set_map_no_write_barrier(map); 4072 obj->set_map_no_write_barrier(map);
4089 FixedArray* result = FixedArray::cast(obj); 4073 FixedArray* result = FixedArray::cast(obj);
4090 result->set_length(len); 4074 result->set_length(len);
4091 4075
4092 // Copy the content 4076 // Copy the content
4093 DisallowHeapAllocation no_gc; 4077 DisallowHeapAllocation no_gc;
4094 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); 4078 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
4095 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); 4079 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
4096 return result; 4080 return result;
4097 } 4081 }
4098 4082
4099 4083
4100 MaybeObject* Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, 4084 AllocationResult Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src,
4101 Map* map) { 4085 Map* map) {
4102 int len = src->length(); 4086 int len = src->length();
4103 Object* obj; 4087 HeapObject* obj;
4104 { MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(len, NOT_TENURED); 4088 { AllocationResult allocation = AllocateRawFixedDoubleArray(len, NOT_TENURED);
4105 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 4089 if (!allocation.To(&obj)) return allocation;
4106 } 4090 }
4107 HeapObject* dst = HeapObject::cast(obj); 4091 obj->set_map_no_write_barrier(map);
4108 dst->set_map_no_write_barrier(map);
4109 CopyBlock( 4092 CopyBlock(
4110 dst->address() + FixedDoubleArray::kLengthOffset, 4093 obj->address() + FixedDoubleArray::kLengthOffset,
4111 src->address() + FixedDoubleArray::kLengthOffset, 4094 src->address() + FixedDoubleArray::kLengthOffset,
4112 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); 4095 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset);
4113 return obj; 4096 return obj;
4114 } 4097 }
4115 4098
4116 4099
4117 MaybeObject* Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src, 4100 AllocationResult Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src,
4118 Map* map) { 4101 Map* map) {
4119 int int64_entries = src->count_of_int64_entries(); 4102 int int64_entries = src->count_of_int64_entries();
4120 int code_ptr_entries = src->count_of_code_ptr_entries(); 4103 int code_ptr_entries = src->count_of_code_ptr_entries();
4121 int heap_ptr_entries = src->count_of_heap_ptr_entries(); 4104 int heap_ptr_entries = src->count_of_heap_ptr_entries();
4122 int int32_entries = src->count_of_int32_entries(); 4105 int int32_entries = src->count_of_int32_entries();
4123 Object* obj; 4106 HeapObject* obj;
4124 { MaybeObject* maybe_obj = 4107 { AllocationResult allocation =
4125 AllocateConstantPoolArray(int64_entries, code_ptr_entries, 4108 AllocateConstantPoolArray(int64_entries, code_ptr_entries,
4126 heap_ptr_entries, int32_entries); 4109 heap_ptr_entries, int32_entries);
4127 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 4110 if (!allocation.To(&obj)) return allocation;
4128 } 4111 }
4129 HeapObject* dst = HeapObject::cast(obj); 4112 obj->set_map_no_write_barrier(map);
4130 dst->set_map_no_write_barrier(map);
4131 int size = ConstantPoolArray::SizeFor( 4113 int size = ConstantPoolArray::SizeFor(
4132 int64_entries, code_ptr_entries, heap_ptr_entries, int32_entries); 4114 int64_entries, code_ptr_entries, heap_ptr_entries, int32_entries);
4133 CopyBlock( 4115 CopyBlock(
4134 dst->address() + ConstantPoolArray::kLengthOffset, 4116 obj->address() + ConstantPoolArray::kLengthOffset,
4135 src->address() + ConstantPoolArray::kLengthOffset, 4117 src->address() + ConstantPoolArray::kLengthOffset,
4136 size - ConstantPoolArray::kLengthOffset); 4118 size - ConstantPoolArray::kLengthOffset);
4137 return obj; 4119 return obj;
4138 } 4120 }
4139 4121
4140 4122
4141 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { 4123 AllocationResult Heap::AllocateRawFixedArray(int length,
4124 PretenureFlag pretenure) {
4142 if (length < 0 || length > FixedArray::kMaxLength) { 4125 if (length < 0 || length > FixedArray::kMaxLength) {
4143 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); 4126 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
4144 } 4127 }
4145 int size = FixedArray::SizeFor(length); 4128 int size = FixedArray::SizeFor(length);
4146 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); 4129 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure);
4147 4130
4148 return AllocateRaw(size, space, OLD_POINTER_SPACE); 4131 return AllocateRaw(size, space, OLD_POINTER_SPACE);
4149 } 4132 }
4150 4133
4151 4134
4152 MaybeObject* Heap::AllocateFixedArrayWithFiller(int length, 4135 AllocationResult Heap::AllocateFixedArrayWithFiller(int length,
4153 PretenureFlag pretenure, 4136 PretenureFlag pretenure,
4154 Object* filler) { 4137 Object* filler) {
4155 ASSERT(length >= 0); 4138 ASSERT(length >= 0);
4156 ASSERT(empty_fixed_array()->IsFixedArray()); 4139 ASSERT(empty_fixed_array()->IsFixedArray());
4157 if (length == 0) return empty_fixed_array(); 4140 if (length == 0) return empty_fixed_array();
4158 4141
4159 ASSERT(!InNewSpace(filler)); 4142 ASSERT(!InNewSpace(filler));
4160 Object* result; 4143 HeapObject* result;
4161 { MaybeObject* maybe_result = AllocateRawFixedArray(length, pretenure); 4144 { AllocationResult allocation = AllocateRawFixedArray(length, pretenure);
4162 if (!maybe_result->ToObject(&result)) return maybe_result; 4145 if (!allocation.To(&result)) return allocation;
4163 } 4146 }
4164 4147
4165 HeapObject::cast(result)->set_map_no_write_barrier(fixed_array_map()); 4148 result->set_map_no_write_barrier(fixed_array_map());
4166 FixedArray* array = FixedArray::cast(result); 4149 FixedArray* array = FixedArray::cast(result);
4167 array->set_length(length); 4150 array->set_length(length);
4168 MemsetPointer(array->data_start(), filler, length); 4151 MemsetPointer(array->data_start(), filler, length);
4169 return array; 4152 return array;
4170 } 4153 }
4171 4154
4172 4155
4173 MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) { 4156 AllocationResult Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
4174 return AllocateFixedArrayWithFiller(length, pretenure, undefined_value()); 4157 return AllocateFixedArrayWithFiller(length, pretenure, undefined_value());
4175 } 4158 }
4176 4159
4177 4160
4178 MaybeObject* Heap::AllocateUninitializedFixedArray(int length) { 4161 AllocationResult Heap::AllocateUninitializedFixedArray(int length) {
4179 if (length == 0) return empty_fixed_array(); 4162 if (length == 0) return empty_fixed_array();
4180 4163
4181 Object* obj; 4164 HeapObject* obj;
4182 { MaybeObject* maybe_obj = AllocateRawFixedArray(length, NOT_TENURED); 4165 { AllocationResult allocation = AllocateRawFixedArray(length, NOT_TENURED);
4183 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 4166 if (!allocation.To(&obj)) return allocation;
4184 } 4167 }
4185 4168
4186 reinterpret_cast<FixedArray*>(obj)->set_map_no_write_barrier( 4169 obj->set_map_no_write_barrier(fixed_array_map());
4187 fixed_array_map());
4188 FixedArray::cast(obj)->set_length(length); 4170 FixedArray::cast(obj)->set_length(length);
4189 return obj; 4171 return obj;
4190 } 4172 }
4191 4173
4192 4174
4193 MaybeObject* Heap::AllocateUninitializedFixedDoubleArray( 4175 AllocationResult Heap::AllocateUninitializedFixedDoubleArray(
4194 int length, 4176 int length,
4195 PretenureFlag pretenure) { 4177 PretenureFlag pretenure) {
4196 if (length == 0) return empty_fixed_array(); 4178 if (length == 0) return empty_fixed_array();
4197 4179
4198 Object* elements_object; 4180 HeapObject* elements;
4199 MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(length, pretenure); 4181 AllocationResult allocation = AllocateRawFixedDoubleArray(length, pretenure);
4200 if (!maybe_obj->ToObject(&elements_object)) return maybe_obj; 4182 if (!allocation.To(&elements)) return allocation;
4201 FixedDoubleArray* elements =
4202 reinterpret_cast<FixedDoubleArray*>(elements_object);
4203 4183
4204 elements->set_map_no_write_barrier(fixed_double_array_map()); 4184 elements->set_map_no_write_barrier(fixed_double_array_map());
4205 elements->set_length(length); 4185 FixedDoubleArray::cast(elements)->set_length(length);
4206 return elements; 4186 return elements;
4207 } 4187 }
4208 4188
4209 4189
4210 MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, 4190 AllocationResult Heap::AllocateRawFixedDoubleArray(int length,
4211 PretenureFlag pretenure) { 4191 PretenureFlag pretenure) {
4212 if (length < 0 || length > FixedDoubleArray::kMaxLength) { 4192 if (length < 0 || length > FixedDoubleArray::kMaxLength) {
4213 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); 4193 v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
4214 } 4194 }
4215 int size = FixedDoubleArray::SizeFor(length); 4195 int size = FixedDoubleArray::SizeFor(length);
4216 #ifndef V8_HOST_ARCH_64_BIT 4196 #ifndef V8_HOST_ARCH_64_BIT
4217 size += kPointerSize; 4197 size += kPointerSize;
4218 #endif 4198 #endif
4219 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); 4199 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
4220 4200
4221 HeapObject* object; 4201 HeapObject* object;
4222 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); 4202 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
4223 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; 4203 if (!allocation.To(&object)) return allocation;
4224 } 4204 }
4225 4205
4226 return EnsureDoubleAligned(this, object, size); 4206 return EnsureDoubleAligned(this, object, size);
4227 } 4207 }
4228 4208
4229 4209
4230 MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries, 4210 AllocationResult Heap::AllocateConstantPoolArray(int number_of_int64_entries,
4231 int number_of_code_ptr_entries, 4211 int number_of_code_ptr_entries,
4232 int number_of_heap_ptr_entries, 4212 int number_of_heap_ptr_entries,
4233 int number_of_int32_entries) { 4213 int number_of_int32_entries) {
4234 CHECK(number_of_int64_entries >= 0 && 4214 CHECK(number_of_int64_entries >= 0 &&
4235 number_of_int64_entries <= ConstantPoolArray::kMaxEntriesPerType && 4215 number_of_int64_entries <= ConstantPoolArray::kMaxEntriesPerType &&
4236 number_of_code_ptr_entries >= 0 && 4216 number_of_code_ptr_entries >= 0 &&
4237 number_of_code_ptr_entries <= ConstantPoolArray::kMaxEntriesPerType && 4217 number_of_code_ptr_entries <= ConstantPoolArray::kMaxEntriesPerType &&
4238 number_of_heap_ptr_entries >= 0 && 4218 number_of_heap_ptr_entries >= 0 &&
4239 number_of_heap_ptr_entries <= ConstantPoolArray::kMaxEntriesPerType && 4219 number_of_heap_ptr_entries <= ConstantPoolArray::kMaxEntriesPerType &&
4240 number_of_int32_entries >= 0 && 4220 number_of_int32_entries >= 0 &&
4241 number_of_int32_entries <= ConstantPoolArray::kMaxEntriesPerType); 4221 number_of_int32_entries <= ConstantPoolArray::kMaxEntriesPerType);
4242 int size = ConstantPoolArray::SizeFor(number_of_int64_entries, 4222 int size = ConstantPoolArray::SizeFor(number_of_int64_entries,
4243 number_of_code_ptr_entries, 4223 number_of_code_ptr_entries,
4244 number_of_heap_ptr_entries, 4224 number_of_heap_ptr_entries,
4245 number_of_int32_entries); 4225 number_of_int32_entries);
4246 #ifndef V8_HOST_ARCH_64_BIT 4226 #ifndef V8_HOST_ARCH_64_BIT
4247 size += kPointerSize; 4227 size += kPointerSize;
4248 #endif 4228 #endif
4249 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); 4229 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
4250 4230
4251 HeapObject* object; 4231 HeapObject* object;
4252 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_POINTER_SPACE); 4232 { AllocationResult allocation = AllocateRaw(size, space, OLD_POINTER_SPACE);
4253 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; 4233 if (!allocation.To(&object)) return allocation;
4254 } 4234 }
4255 object = EnsureDoubleAligned(this, object, size); 4235 object = EnsureDoubleAligned(this, object, size);
4256 HeapObject::cast(object)->set_map_no_write_barrier(constant_pool_array_map()); 4236 object->set_map_no_write_barrier(constant_pool_array_map());
4257 4237
4258 ConstantPoolArray* constant_pool = 4238 ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object);
4259 reinterpret_cast<ConstantPoolArray*>(object);
4260 constant_pool->Init(number_of_int64_entries, 4239 constant_pool->Init(number_of_int64_entries,
4261 number_of_code_ptr_entries, 4240 number_of_code_ptr_entries,
4262 number_of_heap_ptr_entries, 4241 number_of_heap_ptr_entries,
4263 number_of_int32_entries); 4242 number_of_int32_entries);
4264 if (number_of_code_ptr_entries > 0) { 4243 if (number_of_code_ptr_entries > 0) {
4265 int offset = 4244 int offset =
4266 constant_pool->OffsetOfElementAt(constant_pool->first_code_ptr_index()); 4245 constant_pool->OffsetOfElementAt(constant_pool->first_code_ptr_index());
4267 MemsetPointer( 4246 MemsetPointer(
4268 reinterpret_cast<Address*>(HeapObject::RawField(constant_pool, offset)), 4247 reinterpret_cast<Address*>(HeapObject::RawField(constant_pool, offset)),
4269 isolate()->builtins()->builtin(Builtins::kIllegal)->entry(), 4248 isolate()->builtins()->builtin(Builtins::kIllegal)->entry(),
4270 number_of_code_ptr_entries); 4249 number_of_code_ptr_entries);
4271 } 4250 }
4272 if (number_of_heap_ptr_entries > 0) { 4251 if (number_of_heap_ptr_entries > 0) {
4273 int offset = 4252 int offset =
4274 constant_pool->OffsetOfElementAt(constant_pool->first_heap_ptr_index()); 4253 constant_pool->OffsetOfElementAt(constant_pool->first_heap_ptr_index());
4275 MemsetPointer( 4254 MemsetPointer(
4276 HeapObject::RawField(constant_pool, offset), 4255 HeapObject::RawField(constant_pool, offset),
4277 undefined_value(), 4256 undefined_value(),
4278 number_of_heap_ptr_entries); 4257 number_of_heap_ptr_entries);
4279 } 4258 }
4280 return constant_pool; 4259 return constant_pool;
4281 } 4260 }
4282 4261
4283 4262
4284 MaybeObject* Heap::AllocateEmptyConstantPoolArray() { 4263 AllocationResult Heap::AllocateEmptyConstantPoolArray() {
4285 int size = ConstantPoolArray::SizeFor(0, 0, 0, 0); 4264 int size = ConstantPoolArray::SizeFor(0, 0, 0, 0);
4286 Object* result; 4265 HeapObject* result;
4287 { MaybeObject* maybe_result = 4266 { AllocationResult allocation =
4288 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); 4267 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
4289 if (!maybe_result->ToObject(&result)) return maybe_result; 4268 if (!allocation.To(&result)) return allocation;
4290 } 4269 }
4291 HeapObject::cast(result)->set_map_no_write_barrier(constant_pool_array_map()); 4270 result->set_map_no_write_barrier(constant_pool_array_map());
4292 ConstantPoolArray::cast(result)->Init(0, 0, 0, 0); 4271 ConstantPoolArray::cast(result)->Init(0, 0, 0, 0);
4293 return result; 4272 return result;
4294 } 4273 }
4295 4274
4296 4275
4297 MaybeObject* Heap::AllocateSymbol() { 4276 AllocationResult Heap::AllocateSymbol() {
4298 // Statically ensure that it is safe to allocate symbols in paged spaces. 4277 // Statically ensure that it is safe to allocate symbols in paged spaces.
4299 STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize); 4278 STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize);
4300 4279
4301 Object* result; 4280 HeapObject* result;
4302 MaybeObject* maybe = 4281 AllocationResult allocation =
4303 AllocateRaw(Symbol::kSize, OLD_POINTER_SPACE, OLD_POINTER_SPACE); 4282 AllocateRaw(Symbol::kSize, OLD_POINTER_SPACE, OLD_POINTER_SPACE);
4304 if (!maybe->ToObject(&result)) return maybe; 4283 if (!allocation.To(&result)) return allocation;
4305 4284
4306 HeapObject::cast(result)->set_map_no_write_barrier(symbol_map()); 4285 result->set_map_no_write_barrier(symbol_map());
4307 4286
4308 // Generate a random hash value. 4287 // Generate a random hash value.
4309 int hash; 4288 int hash;
4310 int attempts = 0; 4289 int attempts = 0;
4311 do { 4290 do {
4312 hash = isolate()->random_number_generator()->NextInt() & Name::kHashBitMask; 4291 hash = isolate()->random_number_generator()->NextInt() & Name::kHashBitMask;
4313 attempts++; 4292 attempts++;
4314 } while (hash == 0 && attempts < 30); 4293 } while (hash == 0 && attempts < 30);
4315 if (hash == 0) hash = 1; // never return 0 4294 if (hash == 0) hash = 1; // never return 0
4316 4295
4317 Symbol::cast(result)->set_hash_field( 4296 Symbol::cast(result)->set_hash_field(
4318 Name::kIsNotArrayIndexMask | (hash << Name::kHashShift)); 4297 Name::kIsNotArrayIndexMask | (hash << Name::kHashShift));
4319 Symbol::cast(result)->set_name(undefined_value()); 4298 Symbol::cast(result)->set_name(undefined_value());
4320 Symbol::cast(result)->set_flags(Smi::FromInt(0)); 4299 Symbol::cast(result)->set_flags(Smi::FromInt(0));
4321 4300
4322 ASSERT(!Symbol::cast(result)->is_private()); 4301 ASSERT(!Symbol::cast(result)->is_private());
4323 return result; 4302 return result;
4324 } 4303 }
4325 4304
4326 4305
4327 MaybeObject* Heap::AllocateStruct(InstanceType type) { 4306 AllocationResult Heap::AllocateStruct(InstanceType type) {
4328 Map* map; 4307 Map* map;
4329 switch (type) { 4308 switch (type) {
4330 #define MAKE_CASE(NAME, Name, name) \ 4309 #define MAKE_CASE(NAME, Name, name) \
4331 case NAME##_TYPE: map = name##_map(); break; 4310 case NAME##_TYPE: map = name##_map(); break;
4332 STRUCT_LIST(MAKE_CASE) 4311 STRUCT_LIST(MAKE_CASE)
4333 #undef MAKE_CASE 4312 #undef MAKE_CASE
4334 default: 4313 default:
4335 UNREACHABLE(); 4314 UNREACHABLE();
4336 return exception(); 4315 return exception();
4337 } 4316 }
4338 int size = map->instance_size(); 4317 int size = map->instance_size();
4339 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); 4318 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
4340 Object* result; 4319 Struct* result;
4341 { MaybeObject* maybe_result = Allocate(map, space); 4320 { AllocationResult allocation = Allocate(map, space);
4342 if (!maybe_result->ToObject(&result)) return maybe_result; 4321 if (!allocation.To(&result)) return allocation;
4343 } 4322 }
4344 Struct::cast(result)->InitializeBody(size); 4323 result->InitializeBody(size);
4345 return result; 4324 return result;
4346 } 4325 }
4347 4326
4348 4327
4349 bool Heap::IsHeapIterable() { 4328 bool Heap::IsHeapIterable() {
4350 return (!old_pointer_space()->was_swept_conservatively() && 4329 return (!old_pointer_space()->was_swept_conservatively() &&
4351 !old_data_space()->was_swept_conservatively()); 4330 !old_data_space()->was_swept_conservatively());
4352 } 4331 }
4353 4332
4354 4333
(...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after
4633 case CODE_SPACE: 4612 case CODE_SPACE:
4634 return code_space_->Contains(addr); 4613 return code_space_->Contains(addr);
4635 case MAP_SPACE: 4614 case MAP_SPACE:
4636 return map_space_->Contains(addr); 4615 return map_space_->Contains(addr);
4637 case CELL_SPACE: 4616 case CELL_SPACE:
4638 return cell_space_->Contains(addr); 4617 return cell_space_->Contains(addr);
4639 case PROPERTY_CELL_SPACE: 4618 case PROPERTY_CELL_SPACE:
4640 return property_cell_space_->Contains(addr); 4619 return property_cell_space_->Contains(addr);
4641 case LO_SPACE: 4620 case LO_SPACE:
4642 return lo_space_->SlowContains(addr); 4621 return lo_space_->SlowContains(addr);
4622 default:
4623 break;
4643 } 4624 }
4644 4625 UNREACHABLE();
4645 return false; 4626 return false;
4646 } 4627 }
4647 4628
4648 4629
4649 #ifdef VERIFY_HEAP 4630 #ifdef VERIFY_HEAP
4650 void Heap::Verify() { 4631 void Heap::Verify() {
4651 CHECK(HasBeenSetUp()); 4632 CHECK(HasBeenSetUp());
4652 HandleScope scope(isolate()); 4633 HandleScope scope(isolate());
4653 4634
4654 store_buffer()->Verify(); 4635 store_buffer()->Verify();
(...skipping 1837 matching lines...) Expand 10 before | Expand all | Expand 10 after
6492 static_cast<int>(object_sizes_last_time_[index])); 6473 static_cast<int>(object_sizes_last_time_[index]));
6493 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) 6474 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT)
6494 #undef ADJUST_LAST_TIME_OBJECT_COUNT 6475 #undef ADJUST_LAST_TIME_OBJECT_COUNT
6495 6476
6496 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 6477 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
6497 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 6478 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
6498 ClearObjectStats(); 6479 ClearObjectStats();
6499 } 6480 }
6500 6481
6501 } } // namespace v8::internal 6482 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698