Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(467)

Side by Side Diff: src/heap.cc

Issue 4100005: Version 2.5.2 (Closed)
Patch Set: Created 10 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
49 49
50 50
51 namespace v8 { 51 namespace v8 {
52 namespace internal { 52 namespace internal {
53 53
54 54
55 String* Heap::hidden_symbol_; 55 String* Heap::hidden_symbol_;
56 Object* Heap::roots_[Heap::kRootListLength]; 56 Object* Heap::roots_[Heap::kRootListLength];
57 Object* Heap::global_contexts_list_; 57 Object* Heap::global_contexts_list_;
58 58
59
59 NewSpace Heap::new_space_; 60 NewSpace Heap::new_space_;
60 OldSpace* Heap::old_pointer_space_ = NULL; 61 OldSpace* Heap::old_pointer_space_ = NULL;
61 OldSpace* Heap::old_data_space_ = NULL; 62 OldSpace* Heap::old_data_space_ = NULL;
62 OldSpace* Heap::code_space_ = NULL; 63 OldSpace* Heap::code_space_ = NULL;
63 MapSpace* Heap::map_space_ = NULL; 64 MapSpace* Heap::map_space_ = NULL;
64 CellSpace* Heap::cell_space_ = NULL; 65 CellSpace* Heap::cell_space_ = NULL;
65 LargeObjectSpace* Heap::lo_space_ = NULL; 66 LargeObjectSpace* Heap::lo_space_ = NULL;
66 67
68 static const intptr_t kMinimumPromotionLimit = 2 * MB;
69 static const intptr_t kMinimumAllocationLimit = 8 * MB;
70
67 intptr_t Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit; 71 intptr_t Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
68 intptr_t Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit; 72 intptr_t Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
69 73
70 int Heap::old_gen_exhausted_ = false; 74 int Heap::old_gen_exhausted_ = false;
71 75
72 int Heap::amount_of_external_allocated_memory_ = 0; 76 int Heap::amount_of_external_allocated_memory_ = 0;
73 int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0; 77 int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0;
74 78
75 // semispace_size_ should be a power of 2 and old_generation_size_ should be 79 // semispace_size_ should be a power of 2 and old_generation_size_ should be
76 // a multiple of Page::kPageSize. 80 // a multiple of Page::kPageSize.
(...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after
408 Counters::number_of_symbols.Set(symbol_table()->NumberOfElements()); 412 Counters::number_of_symbols.Set(symbol_table()->NumberOfElements());
409 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 413 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
410 ReportStatisticsAfterGC(); 414 ReportStatisticsAfterGC();
411 #endif 415 #endif
412 #ifdef ENABLE_DEBUGGER_SUPPORT 416 #ifdef ENABLE_DEBUGGER_SUPPORT
413 Debug::AfterGarbageCollection(); 417 Debug::AfterGarbageCollection();
414 #endif 418 #endif
415 } 419 }
416 420
417 421
418 void Heap::CollectAllGarbage(bool force_compaction, 422 void Heap::CollectAllGarbage(bool force_compaction) {
419 CollectionPolicy collectionPolicy) {
420 // Since we are ignoring the return value, the exact choice of space does 423 // Since we are ignoring the return value, the exact choice of space does
421 // not matter, so long as we do not specify NEW_SPACE, which would not 424 // not matter, so long as we do not specify NEW_SPACE, which would not
422 // cause a full GC. 425 // cause a full GC.
423 MarkCompactCollector::SetForceCompaction(force_compaction); 426 MarkCompactCollector::SetForceCompaction(force_compaction);
424 CollectGarbage(OLD_POINTER_SPACE, collectionPolicy); 427 CollectGarbage(OLD_POINTER_SPACE);
425 MarkCompactCollector::SetForceCompaction(false); 428 MarkCompactCollector::SetForceCompaction(false);
426 } 429 }
427 430
428 431
429 void Heap::CollectAllAvailableGarbage() { 432 void Heap::CollectGarbage(AllocationSpace space) {
430 CompilationCache::Clear();
431 CollectAllGarbage(true, AGGRESSIVE);
432 }
433
434
435 void Heap::CollectGarbage(AllocationSpace space,
436 CollectionPolicy collectionPolicy) {
437 // The VM is in the GC state until exiting this function. 433 // The VM is in the GC state until exiting this function.
438 VMState state(GC); 434 VMState state(GC);
439 435
440 #ifdef DEBUG 436 #ifdef DEBUG
441 // Reset the allocation timeout to the GC interval, but make sure to 437 // Reset the allocation timeout to the GC interval, but make sure to
442 // allow at least a few allocations after a collection. The reason 438 // allow at least a few allocations after a collection. The reason
443 // for this is that we have a lot of allocation sequences and we 439 // for this is that we have a lot of allocation sequences and we
444 // assume that a garbage collection will allow the subsequent 440 // assume that a garbage collection will allow the subsequent
445 // allocation attempts to go through. 441 // allocation attempts to go through.
446 allocation_timeout_ = Max(6, FLAG_gc_interval); 442 allocation_timeout_ = Max(6, FLAG_gc_interval);
447 #endif 443 #endif
448 444
449 { GCTracer tracer; 445 { GCTracer tracer;
450 GarbageCollectionPrologue(); 446 GarbageCollectionPrologue();
451 // The GC count was incremented in the prologue. Tell the tracer about 447 // The GC count was incremented in the prologue. Tell the tracer about
452 // it. 448 // it.
453 tracer.set_gc_count(gc_count_); 449 tracer.set_gc_count(gc_count_);
454 450
455 GarbageCollector collector = SelectGarbageCollector(space); 451 GarbageCollector collector = SelectGarbageCollector(space);
456 // Tell the tracer which collector we've selected. 452 // Tell the tracer which collector we've selected.
457 tracer.set_collector(collector); 453 tracer.set_collector(collector);
458 454
459 HistogramTimer* rate = (collector == SCAVENGER) 455 HistogramTimer* rate = (collector == SCAVENGER)
460 ? &Counters::gc_scavenger 456 ? &Counters::gc_scavenger
461 : &Counters::gc_compactor; 457 : &Counters::gc_compactor;
462 rate->Start(); 458 rate->Start();
463 PerformGarbageCollection(collector, &tracer, collectionPolicy); 459 PerformGarbageCollection(collector, &tracer);
464 rate->Stop(); 460 rate->Stop();
465 461
466 GarbageCollectionEpilogue(); 462 GarbageCollectionEpilogue();
467 } 463 }
468 464
469 465
470 #ifdef ENABLE_LOGGING_AND_PROFILING 466 #ifdef ENABLE_LOGGING_AND_PROFILING
471 if (FLAG_log_gc) HeapProfiler::WriteSample(); 467 if (FLAG_log_gc) HeapProfiler::WriteSample();
472 if (CpuProfiler::is_profiling()) CpuProfiler::ProcessMovedFunctions(); 468 if (CpuProfiler::is_profiling()) CpuProfiler::ProcessMovedFunctions();
473 #endif 469 #endif
474 } 470 }
475 471
476 472
477 void Heap::PerformScavenge() { 473 void Heap::PerformScavenge() {
478 GCTracer tracer; 474 GCTracer tracer;
479 PerformGarbageCollection(SCAVENGER, &tracer, NORMAL); 475 PerformGarbageCollection(SCAVENGER, &tracer);
480 } 476 }
481 477
482 478
483 #ifdef DEBUG 479 #ifdef DEBUG
484 // Helper class for verifying the symbol table. 480 // Helper class for verifying the symbol table.
485 class SymbolTableVerifier : public ObjectVisitor { 481 class SymbolTableVerifier : public ObjectVisitor {
486 public: 482 public:
487 SymbolTableVerifier() { } 483 SymbolTableVerifier() { }
488 void VisitPointers(Object** start, Object** end) { 484 void VisitPointers(Object** start, Object** end) {
489 // Visit all HeapObject pointers in [start, end). 485 // Visit all HeapObject pointers in [start, end).
(...skipping 164 matching lines...) Expand 10 before | Expand all | Expand 10 after
654 } else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) { 650 } else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) {
655 set_survival_rate_trend(INCREASING); 651 set_survival_rate_trend(INCREASING);
656 } else { 652 } else {
657 set_survival_rate_trend(STABLE); 653 set_survival_rate_trend(STABLE);
658 } 654 }
659 655
660 survival_rate_ = survival_rate; 656 survival_rate_ = survival_rate;
661 } 657 }
662 658
663 void Heap::PerformGarbageCollection(GarbageCollector collector, 659 void Heap::PerformGarbageCollection(GarbageCollector collector,
664 GCTracer* tracer, 660 GCTracer* tracer) {
665 CollectionPolicy collectionPolicy) {
666 if (collector != SCAVENGER) { 661 if (collector != SCAVENGER) {
667 PROFILE(CodeMovingGCEvent()); 662 PROFILE(CodeMovingGCEvent());
668 } 663 }
669 664
670 VerifySymbolTable(); 665 VerifySymbolTable();
671 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) { 666 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
672 ASSERT(!allocation_allowed_); 667 ASSERT(!allocation_allowed_);
673 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); 668 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
674 global_gc_prologue_callback_(); 669 global_gc_prologue_callback_();
675 } 670 }
(...skipping 13 matching lines...) Expand all
689 684
690 if (collector == MARK_COMPACTOR) { 685 if (collector == MARK_COMPACTOR) {
691 // Perform mark-sweep with optional compaction. 686 // Perform mark-sweep with optional compaction.
692 MarkCompact(tracer); 687 MarkCompact(tracer);
693 688
694 bool high_survival_rate_during_scavenges = IsHighSurvivalRate() && 689 bool high_survival_rate_during_scavenges = IsHighSurvivalRate() &&
695 IsStableOrIncreasingSurvivalTrend(); 690 IsStableOrIncreasingSurvivalTrend();
696 691
697 UpdateSurvivalRateTrend(start_new_space_size); 692 UpdateSurvivalRateTrend(start_new_space_size);
698 693
699 UpdateOldSpaceLimits(); 694 intptr_t old_gen_size = PromotedSpaceSize();
695 old_gen_promotion_limit_ =
696 old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
697 old_gen_allocation_limit_ =
698 old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
700 699
701 // Major GC would invoke weak handle callbacks on weakly reachable 700 if (high_survival_rate_during_scavenges &&
702 // handles, but won't collect weakly reachable objects until next 701 IsStableOrIncreasingSurvivalTrend()) {
703 // major GC. Therefore if we collect aggressively and weak handle callback 702 // Stable high survival rates of young objects both during partial and
704 // has been invoked, we rerun major GC to release objects which become 703 // full collection indicate that mutator is either building or modifying
705 // garbage. 704 // a structure with a long lifetime.
706 if (collectionPolicy == AGGRESSIVE) { 705 // In this case we aggressively raise old generation memory limits to
707 // Note: as weak callbacks can execute arbitrary code, we cannot 706 // postpone subsequent mark-sweep collection and thus trade memory
708 // hope that eventually there will be no weak callbacks invocations. 707 // space for the mutation speed.
709 // Therefore stop recollecting after several attempts. 708 old_gen_promotion_limit_ *= 2;
710 const int kMaxNumberOfAttempts = 7; 709 old_gen_allocation_limit_ *= 2;
711 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
712 { DisableAssertNoAllocation allow_allocation;
713 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
714 if (!GlobalHandles::PostGarbageCollectionProcessing()) break;
715 }
716 MarkCompact(tracer);
717 // Weak handle callbacks can allocate data, so keep limits correct.
718 UpdateOldSpaceLimits();
719 }
720 } else {
721 if (high_survival_rate_during_scavenges &&
722 IsStableOrIncreasingSurvivalTrend()) {
723 // Stable high survival rates of young objects both during partial and
724 // full collection indicate that mutator is either building or modifying
725 // a structure with a long lifetime.
726 // In this case we aggressively raise old generation memory limits to
727 // postpone subsequent mark-sweep collection and thus trade memory
728 // space for the mutation speed.
729 old_gen_promotion_limit_ *= 2;
730 old_gen_allocation_limit_ *= 2;
731 }
732 } 710 }
733 711
734 { DisableAssertNoAllocation allow_allocation; 712 old_gen_exhausted_ = false;
735 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
736 GlobalHandles::PostGarbageCollectionProcessing();
737 }
738 } else { 713 } else {
739 tracer_ = tracer; 714 tracer_ = tracer;
740 Scavenge(); 715 Scavenge();
741 tracer_ = NULL; 716 tracer_ = NULL;
742 717
743 UpdateSurvivalRateTrend(start_new_space_size); 718 UpdateSurvivalRateTrend(start_new_space_size);
744 } 719 }
745 720
746 Counters::objs_since_last_young.Set(0); 721 Counters::objs_since_last_young.Set(0);
747 722
723 if (collector == MARK_COMPACTOR) {
724 DisableAssertNoAllocation allow_allocation;
725 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
726 GlobalHandles::PostGarbageCollectionProcessing();
727 }
728
748 // Update relocatables. 729 // Update relocatables.
749 Relocatable::PostGarbageCollectionProcessing(); 730 Relocatable::PostGarbageCollectionProcessing();
750 731
751 if (collector == MARK_COMPACTOR) { 732 if (collector == MARK_COMPACTOR) {
752 // Register the amount of external allocated memory. 733 // Register the amount of external allocated memory.
753 amount_of_external_allocated_memory_at_last_global_gc_ = 734 amount_of_external_allocated_memory_at_last_global_gc_ =
754 amount_of_external_allocated_memory_; 735 amount_of_external_allocated_memory_;
755 } 736 }
756 737
757 GCCallbackFlags callback_flags = tracer->is_compacting() 738 GCCallbackFlags callback_flags = tracer->is_compacting()
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
814 795
815 CompletelyClearInstanceofCache(); 796 CompletelyClearInstanceofCache();
816 797
817 if (is_compacting) FlushNumberStringCache(); 798 if (is_compacting) FlushNumberStringCache();
818 799
819 ClearNormalizedMapCaches(); 800 ClearNormalizedMapCaches();
820 } 801 }
821 802
822 803
823 Object* Heap::FindCodeObject(Address a) { 804 Object* Heap::FindCodeObject(Address a) {
824 Object* obj = code_space_->FindObject(a); 805 Object* obj = NULL; // Initialization to please compiler.
825 if (obj->IsFailure()) { 806 { MaybeObject* maybe_obj = code_space_->FindObject(a);
826 obj = lo_space_->FindObject(a); 807 if (!maybe_obj->ToObject(&obj)) {
808 obj = lo_space_->FindObject(a)->ToObjectUnchecked();
809 }
827 } 810 }
828 ASSERT(!obj->IsFailure());
829 return obj; 811 return obj;
830 } 812 }
831 813
832 814
833 // Helper class for copying HeapObjects 815 // Helper class for copying HeapObjects
834 class ScavengeVisitor: public ObjectVisitor { 816 class ScavengeVisitor: public ObjectVisitor {
835 public: 817 public:
836 818
837 void VisitPointer(Object** p) { ScavengePointer(p); } 819 void VisitPointer(Object** p) { ScavengePointer(p); }
838 820
(...skipping 433 matching lines...) Expand 10 before | Expand all | Expand 10 after
1272 template<ObjectContents object_contents, SizeRestriction size_restriction> 1254 template<ObjectContents object_contents, SizeRestriction size_restriction>
1273 static inline void EvacuateObject(Map* map, 1255 static inline void EvacuateObject(Map* map,
1274 HeapObject** slot, 1256 HeapObject** slot,
1275 HeapObject* object, 1257 HeapObject* object,
1276 int object_size) { 1258 int object_size) {
1277 ASSERT((size_restriction != SMALL) || 1259 ASSERT((size_restriction != SMALL) ||
1278 (object_size <= Page::kMaxHeapObjectSize)); 1260 (object_size <= Page::kMaxHeapObjectSize));
1279 ASSERT(object->Size() == object_size); 1261 ASSERT(object->Size() == object_size);
1280 1262
1281 if (Heap::ShouldBePromoted(object->address(), object_size)) { 1263 if (Heap::ShouldBePromoted(object->address(), object_size)) {
1282 Object* result; 1264 MaybeObject* maybe_result;
1283 1265
1284 if ((size_restriction != SMALL) && 1266 if ((size_restriction != SMALL) &&
1285 (object_size > Page::kMaxHeapObjectSize)) { 1267 (object_size > Page::kMaxHeapObjectSize)) {
1286 result = Heap::lo_space()->AllocateRawFixedArray(object_size); 1268 maybe_result = Heap::lo_space()->AllocateRawFixedArray(object_size);
1287 } else { 1269 } else {
1288 if (object_contents == DATA_OBJECT) { 1270 if (object_contents == DATA_OBJECT) {
1289 result = Heap::old_data_space()->AllocateRaw(object_size); 1271 maybe_result = Heap::old_data_space()->AllocateRaw(object_size);
1290 } else { 1272 } else {
1291 result = Heap::old_pointer_space()->AllocateRaw(object_size); 1273 maybe_result = Heap::old_pointer_space()->AllocateRaw(object_size);
1292 } 1274 }
1293 } 1275 }
1294 1276
1295 if (!result->IsFailure()) { 1277 Object* result = NULL; // Initialization to please compiler.
1278 if (maybe_result->ToObject(&result)) {
1296 HeapObject* target = HeapObject::cast(result); 1279 HeapObject* target = HeapObject::cast(result);
1297 *slot = MigrateObject(object, target, object_size); 1280 *slot = MigrateObject(object, target, object_size);
1298 1281
1299 if (object_contents == POINTER_OBJECT) { 1282 if (object_contents == POINTER_OBJECT) {
1300 promotion_queue.insert(target, object_size); 1283 promotion_queue.insert(target, object_size);
1301 } 1284 }
1302 1285
1303 Heap::tracer()->increment_promoted_objects_size(object_size); 1286 Heap::tracer()->increment_promoted_objects_size(object_size);
1304 return; 1287 return;
1305 } 1288 }
1306 } 1289 }
1307 Object* result = Heap::new_space()->AllocateRaw(object_size); 1290 Object* result =
1308 ASSERT(!result->IsFailure()); 1291 Heap::new_space()->AllocateRaw(object_size)->ToObjectUnchecked();
1309 *slot = MigrateObject(object, HeapObject::cast(result), object_size); 1292 *slot = MigrateObject(object, HeapObject::cast(result), object_size);
1310 return; 1293 return;
1311 } 1294 }
1312 1295
1313 1296
1314 static inline void EvacuateFixedArray(Map* map, 1297 static inline void EvacuateFixedArray(Map* map,
1315 HeapObject** slot, 1298 HeapObject** slot,
1316 HeapObject* object) { 1299 HeapObject* object) {
1317 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); 1300 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
1318 EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, 1301 EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map,
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
1420 Map* map = first_word.ToMap(); 1403 Map* map = first_word.ToMap();
1421 ScavengingVisitor::Scavenge(map, p, object); 1404 ScavengingVisitor::Scavenge(map, p, object);
1422 } 1405 }
1423 1406
1424 1407
1425 void Heap::ScavengePointer(HeapObject** p) { 1408 void Heap::ScavengePointer(HeapObject** p) {
1426 ScavengeObject(p, *p); 1409 ScavengeObject(p, *p);
1427 } 1410 }
1428 1411
1429 1412
1430 Object* Heap::AllocatePartialMap(InstanceType instance_type, 1413 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
1431 int instance_size) { 1414 int instance_size) {
1432 Object* result = AllocateRawMap(); 1415 Object* result;
1433 if (result->IsFailure()) return result; 1416 { MaybeObject* maybe_result = AllocateRawMap();
1417 if (!maybe_result->ToObject(&result)) return maybe_result;
1418 }
1434 1419
1435 // Map::cast cannot be used due to uninitialized map field. 1420 // Map::cast cannot be used due to uninitialized map field.
1436 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map()); 1421 reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
1437 reinterpret_cast<Map*>(result)->set_instance_type(instance_type); 1422 reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
1438 reinterpret_cast<Map*>(result)->set_instance_size(instance_size); 1423 reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
1439 reinterpret_cast<Map*>(result)-> 1424 reinterpret_cast<Map*>(result)->
1440 set_visitor_id( 1425 set_visitor_id(
1441 StaticVisitorBase::GetVisitorId(instance_type, instance_size)); 1426 StaticVisitorBase::GetVisitorId(instance_type, instance_size));
1442 reinterpret_cast<Map*>(result)->set_inobject_properties(0); 1427 reinterpret_cast<Map*>(result)->set_inobject_properties(0);
1443 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0); 1428 reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
1444 reinterpret_cast<Map*>(result)->set_unused_property_fields(0); 1429 reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
1445 reinterpret_cast<Map*>(result)->set_bit_field(0); 1430 reinterpret_cast<Map*>(result)->set_bit_field(0);
1446 reinterpret_cast<Map*>(result)->set_bit_field2(0); 1431 reinterpret_cast<Map*>(result)->set_bit_field2(0);
1447 return result; 1432 return result;
1448 } 1433 }
1449 1434
1450 1435
1451 Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) { 1436 MaybeObject* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
1452 Object* result = AllocateRawMap(); 1437 Object* result;
1453 if (result->IsFailure()) return result; 1438 { MaybeObject* maybe_result = AllocateRawMap();
1439 if (!maybe_result->ToObject(&result)) return maybe_result;
1440 }
1454 1441
1455 Map* map = reinterpret_cast<Map*>(result); 1442 Map* map = reinterpret_cast<Map*>(result);
1456 map->set_map(meta_map()); 1443 map->set_map(meta_map());
1457 map->set_instance_type(instance_type); 1444 map->set_instance_type(instance_type);
1458 map->set_visitor_id( 1445 map->set_visitor_id(
1459 StaticVisitorBase::GetVisitorId(instance_type, instance_size)); 1446 StaticVisitorBase::GetVisitorId(instance_type, instance_size));
1460 map->set_prototype(null_value()); 1447 map->set_prototype(null_value());
1461 map->set_constructor(null_value()); 1448 map->set_constructor(null_value());
1462 map->set_instance_size(instance_size); 1449 map->set_instance_size(instance_size);
1463 map->set_inobject_properties(0); 1450 map->set_inobject_properties(0);
1464 map->set_pre_allocated_property_fields(0); 1451 map->set_pre_allocated_property_fields(0);
1465 map->set_instance_descriptors(empty_descriptor_array()); 1452 map->set_instance_descriptors(empty_descriptor_array());
1466 map->set_code_cache(empty_fixed_array()); 1453 map->set_code_cache(empty_fixed_array());
1467 map->set_unused_property_fields(0); 1454 map->set_unused_property_fields(0);
1468 map->set_bit_field(0); 1455 map->set_bit_field(0);
1469 map->set_bit_field2((1 << Map::kIsExtensible) | (1 << Map::kHasFastElements)); 1456 map->set_bit_field2((1 << Map::kIsExtensible) | (1 << Map::kHasFastElements));
1470 1457
1471 // If the map object is aligned fill the padding area with Smi 0 objects. 1458 // If the map object is aligned fill the padding area with Smi 0 objects.
1472 if (Map::kPadStart < Map::kSize) { 1459 if (Map::kPadStart < Map::kSize) {
1473 memset(reinterpret_cast<byte*>(map) + Map::kPadStart - kHeapObjectTag, 1460 memset(reinterpret_cast<byte*>(map) + Map::kPadStart - kHeapObjectTag,
1474 0, 1461 0,
1475 Map::kSize - Map::kPadStart); 1462 Map::kSize - Map::kPadStart);
1476 } 1463 }
1477 return map; 1464 return map;
1478 } 1465 }
1479 1466
1480 1467
1481 Object* Heap::AllocateCodeCache() { 1468 MaybeObject* Heap::AllocateCodeCache() {
1482 Object* result = AllocateStruct(CODE_CACHE_TYPE); 1469 Object* result;
1483 if (result->IsFailure()) return result; 1470 { MaybeObject* maybe_result = AllocateStruct(CODE_CACHE_TYPE);
1471 if (!maybe_result->ToObject(&result)) return maybe_result;
1472 }
1484 CodeCache* code_cache = CodeCache::cast(result); 1473 CodeCache* code_cache = CodeCache::cast(result);
1485 code_cache->set_default_cache(empty_fixed_array()); 1474 code_cache->set_default_cache(empty_fixed_array());
1486 code_cache->set_normal_type_cache(undefined_value()); 1475 code_cache->set_normal_type_cache(undefined_value());
1487 return code_cache; 1476 return code_cache;
1488 } 1477 }
1489 1478
1490 1479
1491 const Heap::StringTypeTable Heap::string_type_table[] = { 1480 const Heap::StringTypeTable Heap::string_type_table[] = {
1492 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \ 1481 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \
1493 {type, size, k##camel_name##MapRootIndex}, 1482 {type, size, k##camel_name##MapRootIndex},
(...skipping 12 matching lines...) Expand all
1506 1495
1507 const Heap::StructTable Heap::struct_table[] = { 1496 const Heap::StructTable Heap::struct_table[] = {
1508 #define STRUCT_TABLE_ELEMENT(NAME, Name, name) \ 1497 #define STRUCT_TABLE_ELEMENT(NAME, Name, name) \
1509 { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex }, 1498 { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex },
1510 STRUCT_LIST(STRUCT_TABLE_ELEMENT) 1499 STRUCT_LIST(STRUCT_TABLE_ELEMENT)
1511 #undef STRUCT_TABLE_ELEMENT 1500 #undef STRUCT_TABLE_ELEMENT
1512 }; 1501 };
1513 1502
1514 1503
1515 bool Heap::CreateInitialMaps() { 1504 bool Heap::CreateInitialMaps() {
1516 Object* obj = AllocatePartialMap(MAP_TYPE, Map::kSize); 1505 Object* obj;
1517 if (obj->IsFailure()) return false; 1506 { MaybeObject* maybe_obj = AllocatePartialMap(MAP_TYPE, Map::kSize);
1507 if (!maybe_obj->ToObject(&obj)) return false;
1508 }
1518 // Map::cast cannot be used due to uninitialized map field. 1509 // Map::cast cannot be used due to uninitialized map field.
1519 Map* new_meta_map = reinterpret_cast<Map*>(obj); 1510 Map* new_meta_map = reinterpret_cast<Map*>(obj);
1520 set_meta_map(new_meta_map); 1511 set_meta_map(new_meta_map);
1521 new_meta_map->set_map(new_meta_map); 1512 new_meta_map->set_map(new_meta_map);
1522 1513
1523 obj = AllocatePartialMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); 1514 { MaybeObject* maybe_obj =
1524 if (obj->IsFailure()) return false; 1515 AllocatePartialMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
1516 if (!maybe_obj->ToObject(&obj)) return false;
1517 }
1525 set_fixed_array_map(Map::cast(obj)); 1518 set_fixed_array_map(Map::cast(obj));
1526 1519
1527 obj = AllocatePartialMap(ODDBALL_TYPE, Oddball::kSize); 1520 { MaybeObject* maybe_obj = AllocatePartialMap(ODDBALL_TYPE, Oddball::kSize);
1528 if (obj->IsFailure()) return false; 1521 if (!maybe_obj->ToObject(&obj)) return false;
1522 }
1529 set_oddball_map(Map::cast(obj)); 1523 set_oddball_map(Map::cast(obj));
1530 1524
1531 // Allocate the empty array. 1525 // Allocate the empty array.
1532 obj = AllocateEmptyFixedArray(); 1526 { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
1533 if (obj->IsFailure()) return false; 1527 if (!maybe_obj->ToObject(&obj)) return false;
1528 }
1534 set_empty_fixed_array(FixedArray::cast(obj)); 1529 set_empty_fixed_array(FixedArray::cast(obj));
1535 1530
1536 obj = Allocate(oddball_map(), OLD_DATA_SPACE); 1531 { MaybeObject* maybe_obj = Allocate(oddball_map(), OLD_DATA_SPACE);
1537 if (obj->IsFailure()) return false; 1532 if (!maybe_obj->ToObject(&obj)) return false;
1533 }
1538 set_null_value(obj); 1534 set_null_value(obj);
1539 1535
1540 // Allocate the empty descriptor array. 1536 // Allocate the empty descriptor array.
1541 obj = AllocateEmptyFixedArray(); 1537 { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
1542 if (obj->IsFailure()) return false; 1538 if (!maybe_obj->ToObject(&obj)) return false;
1539 }
1543 set_empty_descriptor_array(DescriptorArray::cast(obj)); 1540 set_empty_descriptor_array(DescriptorArray::cast(obj));
1544 1541
1545 // Fix the instance_descriptors for the existing maps. 1542 // Fix the instance_descriptors for the existing maps.
1546 meta_map()->set_instance_descriptors(empty_descriptor_array()); 1543 meta_map()->set_instance_descriptors(empty_descriptor_array());
1547 meta_map()->set_code_cache(empty_fixed_array()); 1544 meta_map()->set_code_cache(empty_fixed_array());
1548 1545
1549 fixed_array_map()->set_instance_descriptors(empty_descriptor_array()); 1546 fixed_array_map()->set_instance_descriptors(empty_descriptor_array());
1550 fixed_array_map()->set_code_cache(empty_fixed_array()); 1547 fixed_array_map()->set_code_cache(empty_fixed_array());
1551 1548
1552 oddball_map()->set_instance_descriptors(empty_descriptor_array()); 1549 oddball_map()->set_instance_descriptors(empty_descriptor_array());
1553 oddball_map()->set_code_cache(empty_fixed_array()); 1550 oddball_map()->set_code_cache(empty_fixed_array());
1554 1551
1555 // Fix prototype object for existing maps. 1552 // Fix prototype object for existing maps.
1556 meta_map()->set_prototype(null_value()); 1553 meta_map()->set_prototype(null_value());
1557 meta_map()->set_constructor(null_value()); 1554 meta_map()->set_constructor(null_value());
1558 1555
1559 fixed_array_map()->set_prototype(null_value()); 1556 fixed_array_map()->set_prototype(null_value());
1560 fixed_array_map()->set_constructor(null_value()); 1557 fixed_array_map()->set_constructor(null_value());
1561 1558
1562 oddball_map()->set_prototype(null_value()); 1559 oddball_map()->set_prototype(null_value());
1563 oddball_map()->set_constructor(null_value()); 1560 oddball_map()->set_constructor(null_value());
1564 1561
1565 obj = AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); 1562 { MaybeObject* maybe_obj =
1566 if (obj->IsFailure()) return false; 1563 AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
1564 if (!maybe_obj->ToObject(&obj)) return false;
1565 }
1567 set_fixed_cow_array_map(Map::cast(obj)); 1566 set_fixed_cow_array_map(Map::cast(obj));
1568 ASSERT(fixed_array_map() != fixed_cow_array_map()); 1567 ASSERT(fixed_array_map() != fixed_cow_array_map());
1569 1568
1570 obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize); 1569 { MaybeObject* maybe_obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
1571 if (obj->IsFailure()) return false; 1570 if (!maybe_obj->ToObject(&obj)) return false;
1571 }
1572 set_heap_number_map(Map::cast(obj)); 1572 set_heap_number_map(Map::cast(obj));
1573 1573
1574 obj = AllocateMap(PROXY_TYPE, Proxy::kSize); 1574 { MaybeObject* maybe_obj = AllocateMap(PROXY_TYPE, Proxy::kSize);
1575 if (obj->IsFailure()) return false; 1575 if (!maybe_obj->ToObject(&obj)) return false;
1576 }
1576 set_proxy_map(Map::cast(obj)); 1577 set_proxy_map(Map::cast(obj));
1577 1578
1578 for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) { 1579 for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) {
1579 const StringTypeTable& entry = string_type_table[i]; 1580 const StringTypeTable& entry = string_type_table[i];
1580 obj = AllocateMap(entry.type, entry.size); 1581 { MaybeObject* maybe_obj = AllocateMap(entry.type, entry.size);
1581 if (obj->IsFailure()) return false; 1582 if (!maybe_obj->ToObject(&obj)) return false;
1583 }
1582 roots_[entry.index] = Map::cast(obj); 1584 roots_[entry.index] = Map::cast(obj);
1583 } 1585 }
1584 1586
1585 obj = AllocateMap(STRING_TYPE, kVariableSizeSentinel); 1587 { MaybeObject* maybe_obj = AllocateMap(STRING_TYPE, kVariableSizeSentinel);
1586 if (obj->IsFailure()) return false; 1588 if (!maybe_obj->ToObject(&obj)) return false;
1589 }
1587 set_undetectable_string_map(Map::cast(obj)); 1590 set_undetectable_string_map(Map::cast(obj));
1588 Map::cast(obj)->set_is_undetectable(); 1591 Map::cast(obj)->set_is_undetectable();
1589 1592
1590 obj = AllocateMap(ASCII_STRING_TYPE, kVariableSizeSentinel); 1593 { MaybeObject* maybe_obj =
1591 if (obj->IsFailure()) return false; 1594 AllocateMap(ASCII_STRING_TYPE, kVariableSizeSentinel);
1595 if (!maybe_obj->ToObject(&obj)) return false;
1596 }
1592 set_undetectable_ascii_string_map(Map::cast(obj)); 1597 set_undetectable_ascii_string_map(Map::cast(obj));
1593 Map::cast(obj)->set_is_undetectable(); 1598 Map::cast(obj)->set_is_undetectable();
1594 1599
1595 obj = AllocateMap(BYTE_ARRAY_TYPE, kVariableSizeSentinel); 1600 { MaybeObject* maybe_obj =
1596 if (obj->IsFailure()) return false; 1601 AllocateMap(BYTE_ARRAY_TYPE, kVariableSizeSentinel);
1602 if (!maybe_obj->ToObject(&obj)) return false;
1603 }
1597 set_byte_array_map(Map::cast(obj)); 1604 set_byte_array_map(Map::cast(obj));
1598 1605
1599 obj = AllocateMap(PIXEL_ARRAY_TYPE, PixelArray::kAlignedSize); 1606 { MaybeObject* maybe_obj =
1600 if (obj->IsFailure()) return false; 1607 AllocateMap(PIXEL_ARRAY_TYPE, PixelArray::kAlignedSize);
1608 if (!maybe_obj->ToObject(&obj)) return false;
1609 }
1601 set_pixel_array_map(Map::cast(obj)); 1610 set_pixel_array_map(Map::cast(obj));
1602 1611
1603 obj = AllocateMap(EXTERNAL_BYTE_ARRAY_TYPE, 1612 { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_BYTE_ARRAY_TYPE,
1604 ExternalArray::kAlignedSize); 1613 ExternalArray::kAlignedSize);
1605 if (obj->IsFailure()) return false; 1614 if (!maybe_obj->ToObject(&obj)) return false;
1615 }
1606 set_external_byte_array_map(Map::cast(obj)); 1616 set_external_byte_array_map(Map::cast(obj));
1607 1617
1608 obj = AllocateMap(EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE, 1618 { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE,
1609 ExternalArray::kAlignedSize); 1619 ExternalArray::kAlignedSize);
1610 if (obj->IsFailure()) return false; 1620 if (!maybe_obj->ToObject(&obj)) return false;
1621 }
1611 set_external_unsigned_byte_array_map(Map::cast(obj)); 1622 set_external_unsigned_byte_array_map(Map::cast(obj));
1612 1623
1613 obj = AllocateMap(EXTERNAL_SHORT_ARRAY_TYPE, 1624 { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_SHORT_ARRAY_TYPE,
1614 ExternalArray::kAlignedSize); 1625 ExternalArray::kAlignedSize);
1615 if (obj->IsFailure()) return false; 1626 if (!maybe_obj->ToObject(&obj)) return false;
1627 }
1616 set_external_short_array_map(Map::cast(obj)); 1628 set_external_short_array_map(Map::cast(obj));
1617 1629
1618 obj = AllocateMap(EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE, 1630 { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE,
1619 ExternalArray::kAlignedSize); 1631 ExternalArray::kAlignedSize);
1620 if (obj->IsFailure()) return false; 1632 if (!maybe_obj->ToObject(&obj)) return false;
1633 }
1621 set_external_unsigned_short_array_map(Map::cast(obj)); 1634 set_external_unsigned_short_array_map(Map::cast(obj));
1622 1635
1623 obj = AllocateMap(EXTERNAL_INT_ARRAY_TYPE, 1636 { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_INT_ARRAY_TYPE,
1624 ExternalArray::kAlignedSize); 1637 ExternalArray::kAlignedSize);
1625 if (obj->IsFailure()) return false; 1638 if (!maybe_obj->ToObject(&obj)) return false;
1639 }
1626 set_external_int_array_map(Map::cast(obj)); 1640 set_external_int_array_map(Map::cast(obj));
1627 1641
1628 obj = AllocateMap(EXTERNAL_UNSIGNED_INT_ARRAY_TYPE, 1642 { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_UNSIGNED_INT_ARRAY_TYPE,
1629 ExternalArray::kAlignedSize); 1643 ExternalArray::kAlignedSize);
1630 if (obj->IsFailure()) return false; 1644 if (!maybe_obj->ToObject(&obj)) return false;
1645 }
1631 set_external_unsigned_int_array_map(Map::cast(obj)); 1646 set_external_unsigned_int_array_map(Map::cast(obj));
1632 1647
1633 obj = AllocateMap(EXTERNAL_FLOAT_ARRAY_TYPE, 1648 { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_FLOAT_ARRAY_TYPE,
1634 ExternalArray::kAlignedSize); 1649 ExternalArray::kAlignedSize);
1635 if (obj->IsFailure()) return false; 1650 if (!maybe_obj->ToObject(&obj)) return false;
1651 }
1636 set_external_float_array_map(Map::cast(obj)); 1652 set_external_float_array_map(Map::cast(obj));
1637 1653
1638 obj = AllocateMap(CODE_TYPE, kVariableSizeSentinel); 1654 { MaybeObject* maybe_obj = AllocateMap(CODE_TYPE, kVariableSizeSentinel);
1639 if (obj->IsFailure()) return false; 1655 if (!maybe_obj->ToObject(&obj)) return false;
1656 }
1640 set_code_map(Map::cast(obj)); 1657 set_code_map(Map::cast(obj));
1641 1658
1642 obj = AllocateMap(JS_GLOBAL_PROPERTY_CELL_TYPE, 1659 { MaybeObject* maybe_obj = AllocateMap(JS_GLOBAL_PROPERTY_CELL_TYPE,
1643 JSGlobalPropertyCell::kSize); 1660 JSGlobalPropertyCell::kSize);
1644 if (obj->IsFailure()) return false; 1661 if (!maybe_obj->ToObject(&obj)) return false;
1662 }
1645 set_global_property_cell_map(Map::cast(obj)); 1663 set_global_property_cell_map(Map::cast(obj));
1646 1664
1647 obj = AllocateMap(FILLER_TYPE, kPointerSize); 1665 { MaybeObject* maybe_obj = AllocateMap(FILLER_TYPE, kPointerSize);
1648 if (obj->IsFailure()) return false; 1666 if (!maybe_obj->ToObject(&obj)) return false;
1667 }
1649 set_one_pointer_filler_map(Map::cast(obj)); 1668 set_one_pointer_filler_map(Map::cast(obj));
1650 1669
1651 obj = AllocateMap(FILLER_TYPE, 2 * kPointerSize); 1670 { MaybeObject* maybe_obj = AllocateMap(FILLER_TYPE, 2 * kPointerSize);
1652 if (obj->IsFailure()) return false; 1671 if (!maybe_obj->ToObject(&obj)) return false;
1672 }
1653 set_two_pointer_filler_map(Map::cast(obj)); 1673 set_two_pointer_filler_map(Map::cast(obj));
1654 1674
1655 for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) { 1675 for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) {
1656 const StructTable& entry = struct_table[i]; 1676 const StructTable& entry = struct_table[i];
1657 obj = AllocateMap(entry.type, entry.size); 1677 { MaybeObject* maybe_obj = AllocateMap(entry.type, entry.size);
1658 if (obj->IsFailure()) return false; 1678 if (!maybe_obj->ToObject(&obj)) return false;
1679 }
1659 roots_[entry.index] = Map::cast(obj); 1680 roots_[entry.index] = Map::cast(obj);
1660 } 1681 }
1661 1682
1662 obj = AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); 1683 { MaybeObject* maybe_obj =
1663 if (obj->IsFailure()) return false; 1684 AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
1685 if (!maybe_obj->ToObject(&obj)) return false;
1686 }
1664 set_hash_table_map(Map::cast(obj)); 1687 set_hash_table_map(Map::cast(obj));
1665 1688
1666 obj = AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); 1689 { MaybeObject* maybe_obj =
1667 if (obj->IsFailure()) return false; 1690 AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
1691 if (!maybe_obj->ToObject(&obj)) return false;
1692 }
1668 set_context_map(Map::cast(obj)); 1693 set_context_map(Map::cast(obj));
1669 1694
1670 obj = AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); 1695 { MaybeObject* maybe_obj =
1671 if (obj->IsFailure()) return false; 1696 AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
1697 if (!maybe_obj->ToObject(&obj)) return false;
1698 }
1672 set_catch_context_map(Map::cast(obj)); 1699 set_catch_context_map(Map::cast(obj));
1673 1700
1674 obj = AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); 1701 { MaybeObject* maybe_obj =
1675 if (obj->IsFailure()) return false; 1702 AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
1703 if (!maybe_obj->ToObject(&obj)) return false;
1704 }
1676 Map* global_context_map = Map::cast(obj); 1705 Map* global_context_map = Map::cast(obj);
1677 global_context_map->set_visitor_id(StaticVisitorBase::kVisitGlobalContext); 1706 global_context_map->set_visitor_id(StaticVisitorBase::kVisitGlobalContext);
1678 set_global_context_map(global_context_map); 1707 set_global_context_map(global_context_map);
1679 1708
1680 obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE, 1709 { MaybeObject* maybe_obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE,
1681 SharedFunctionInfo::kAlignedSize); 1710 SharedFunctionInfo::kAlignedSize);
1682 if (obj->IsFailure()) return false; 1711 if (!maybe_obj->ToObject(&obj)) return false;
1712 }
1683 set_shared_function_info_map(Map::cast(obj)); 1713 set_shared_function_info_map(Map::cast(obj));
1684 1714
1685 ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array())); 1715 ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
1686 return true; 1716 return true;
1687 } 1717 }
1688 1718
1689 1719
1690 Object* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { 1720 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
1691 // Statically ensure that it is safe to allocate heap numbers in paged 1721 // Statically ensure that it is safe to allocate heap numbers in paged
1692 // spaces. 1722 // spaces.
1693 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize); 1723 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
1694 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 1724 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
1695 1725
1696 Object* result = AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE); 1726 Object* result;
1697 if (result->IsFailure()) return result; 1727 { MaybeObject* maybe_result =
1728 AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE);
1729 if (!maybe_result->ToObject(&result)) return maybe_result;
1730 }
1698 1731
1699 HeapObject::cast(result)->set_map(heap_number_map()); 1732 HeapObject::cast(result)->set_map(heap_number_map());
1700 HeapNumber::cast(result)->set_value(value); 1733 HeapNumber::cast(result)->set_value(value);
1701 return result; 1734 return result;
1702 } 1735 }
1703 1736
1704 1737
1705 Object* Heap::AllocateHeapNumber(double value) { 1738 MaybeObject* Heap::AllocateHeapNumber(double value) {
1706 // Use general version, if we're forced to always allocate. 1739 // Use general version, if we're forced to always allocate.
1707 if (always_allocate()) return AllocateHeapNumber(value, TENURED); 1740 if (always_allocate()) return AllocateHeapNumber(value, TENURED);
1708 1741
1709 // This version of AllocateHeapNumber is optimized for 1742 // This version of AllocateHeapNumber is optimized for
1710 // allocation in new space. 1743 // allocation in new space.
1711 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize); 1744 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
1712 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 1745 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
1713 Object* result = new_space_.AllocateRaw(HeapNumber::kSize); 1746 Object* result;
1714 if (result->IsFailure()) return result; 1747 { MaybeObject* maybe_result = new_space_.AllocateRaw(HeapNumber::kSize);
1748 if (!maybe_result->ToObject(&result)) return maybe_result;
1749 }
1715 HeapObject::cast(result)->set_map(heap_number_map()); 1750 HeapObject::cast(result)->set_map(heap_number_map());
1716 HeapNumber::cast(result)->set_value(value); 1751 HeapNumber::cast(result)->set_value(value);
1717 return result; 1752 return result;
1718 } 1753 }
1719 1754
1720 1755
1721 Object* Heap::AllocateJSGlobalPropertyCell(Object* value) { 1756 MaybeObject* Heap::AllocateJSGlobalPropertyCell(Object* value) {
1722 Object* result = AllocateRawCell(); 1757 Object* result;
1723 if (result->IsFailure()) return result; 1758 { MaybeObject* maybe_result = AllocateRawCell();
1759 if (!maybe_result->ToObject(&result)) return maybe_result;
1760 }
1724 HeapObject::cast(result)->set_map(global_property_cell_map()); 1761 HeapObject::cast(result)->set_map(global_property_cell_map());
1725 JSGlobalPropertyCell::cast(result)->set_value(value); 1762 JSGlobalPropertyCell::cast(result)->set_value(value);
1726 return result; 1763 return result;
1727 } 1764 }
1728 1765
1729 1766
1730 Object* Heap::CreateOddball(const char* to_string, 1767 MaybeObject* Heap::CreateOddball(const char* to_string,
1731 Object* to_number) { 1768 Object* to_number) {
1732 Object* result = Allocate(oddball_map(), OLD_DATA_SPACE); 1769 Object* result;
1733 if (result->IsFailure()) return result; 1770 { MaybeObject* maybe_result = Allocate(oddball_map(), OLD_DATA_SPACE);
1771 if (!maybe_result->ToObject(&result)) return maybe_result;
1772 }
1734 return Oddball::cast(result)->Initialize(to_string, to_number); 1773 return Oddball::cast(result)->Initialize(to_string, to_number);
1735 } 1774 }
1736 1775
1737 1776
1738 bool Heap::CreateApiObjects() { 1777 bool Heap::CreateApiObjects() {
1739 Object* obj; 1778 Object* obj;
1740 1779
1741 obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); 1780 { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
1742 if (obj->IsFailure()) return false; 1781 if (!maybe_obj->ToObject(&obj)) return false;
1782 }
1743 set_neander_map(Map::cast(obj)); 1783 set_neander_map(Map::cast(obj));
1744 1784
1745 obj = Heap::AllocateJSObjectFromMap(neander_map()); 1785 { MaybeObject* maybe_obj = Heap::AllocateJSObjectFromMap(neander_map());
1746 if (obj->IsFailure()) return false; 1786 if (!maybe_obj->ToObject(&obj)) return false;
1747 Object* elements = AllocateFixedArray(2); 1787 }
1748 if (elements->IsFailure()) return false; 1788 Object* elements;
1789 { MaybeObject* maybe_elements = AllocateFixedArray(2);
1790 if (!maybe_elements->ToObject(&elements)) return false;
1791 }
1749 FixedArray::cast(elements)->set(0, Smi::FromInt(0)); 1792 FixedArray::cast(elements)->set(0, Smi::FromInt(0));
1750 JSObject::cast(obj)->set_elements(FixedArray::cast(elements)); 1793 JSObject::cast(obj)->set_elements(FixedArray::cast(elements));
1751 set_message_listeners(JSObject::cast(obj)); 1794 set_message_listeners(JSObject::cast(obj));
1752 1795
1753 return true; 1796 return true;
1754 } 1797 }
1755 1798
1756 1799
1757 void Heap::CreateCEntryStub() { 1800 void Heap::CreateCEntryStub() {
1758 CEntryStub stub(1); 1801 CEntryStub stub(1);
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
1800 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP 1843 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
1801 Heap::CreateRegExpCEntryStub(); 1844 Heap::CreateRegExpCEntryStub();
1802 #endif 1845 #endif
1803 } 1846 }
1804 1847
1805 1848
1806 bool Heap::CreateInitialObjects() { 1849 bool Heap::CreateInitialObjects() {
1807 Object* obj; 1850 Object* obj;
1808 1851
1809 // The -0 value must be set before NumberFromDouble works. 1852 // The -0 value must be set before NumberFromDouble works.
1810 obj = AllocateHeapNumber(-0.0, TENURED); 1853 { MaybeObject* maybe_obj = AllocateHeapNumber(-0.0, TENURED);
1811 if (obj->IsFailure()) return false; 1854 if (!maybe_obj->ToObject(&obj)) return false;
1855 }
1812 set_minus_zero_value(obj); 1856 set_minus_zero_value(obj);
1813 ASSERT(signbit(minus_zero_value()->Number()) != 0); 1857 ASSERT(signbit(minus_zero_value()->Number()) != 0);
1814 1858
1815 obj = AllocateHeapNumber(OS::nan_value(), TENURED); 1859 { MaybeObject* maybe_obj = AllocateHeapNumber(OS::nan_value(), TENURED);
1816 if (obj->IsFailure()) return false; 1860 if (!maybe_obj->ToObject(&obj)) return false;
1861 }
1817 set_nan_value(obj); 1862 set_nan_value(obj);
1818 1863
1819 obj = Allocate(oddball_map(), OLD_DATA_SPACE); 1864 { MaybeObject* maybe_obj = Allocate(oddball_map(), OLD_DATA_SPACE);
1820 if (obj->IsFailure()) return false; 1865 if (!maybe_obj->ToObject(&obj)) return false;
1866 }
1821 set_undefined_value(obj); 1867 set_undefined_value(obj);
1822 ASSERT(!InNewSpace(undefined_value())); 1868 ASSERT(!InNewSpace(undefined_value()));
1823 1869
1824 // Allocate initial symbol table. 1870 // Allocate initial symbol table.
1825 obj = SymbolTable::Allocate(kInitialSymbolTableSize); 1871 { MaybeObject* maybe_obj = SymbolTable::Allocate(kInitialSymbolTableSize);
1826 if (obj->IsFailure()) return false; 1872 if (!maybe_obj->ToObject(&obj)) return false;
1873 }
1827 // Don't use set_symbol_table() due to asserts. 1874 // Don't use set_symbol_table() due to asserts.
1828 roots_[kSymbolTableRootIndex] = obj; 1875 roots_[kSymbolTableRootIndex] = obj;
1829 1876
1830 // Assign the print strings for oddballs after creating symboltable. 1877 // Assign the print strings for oddballs after creating symboltable.
1831 Object* symbol = LookupAsciiSymbol("undefined"); 1878 Object* symbol;
1832 if (symbol->IsFailure()) return false; 1879 { MaybeObject* maybe_symbol = LookupAsciiSymbol("undefined");
1880 if (!maybe_symbol->ToObject(&symbol)) return false;
1881 }
1833 Oddball::cast(undefined_value())->set_to_string(String::cast(symbol)); 1882 Oddball::cast(undefined_value())->set_to_string(String::cast(symbol));
1834 Oddball::cast(undefined_value())->set_to_number(nan_value()); 1883 Oddball::cast(undefined_value())->set_to_number(nan_value());
1835 1884
1836 // Allocate the null_value 1885 // Allocate the null_value
1837 obj = Oddball::cast(null_value())->Initialize("null", Smi::FromInt(0)); 1886 { MaybeObject* maybe_obj =
1838 if (obj->IsFailure()) return false; 1887 Oddball::cast(null_value())->Initialize("null", Smi::FromInt(0));
1888 if (!maybe_obj->ToObject(&obj)) return false;
1889 }
1839 1890
1840 obj = CreateOddball("true", Smi::FromInt(1)); 1891 { MaybeObject* maybe_obj = CreateOddball("true", Smi::FromInt(1));
1841 if (obj->IsFailure()) return false; 1892 if (!maybe_obj->ToObject(&obj)) return false;
1893 }
1842 set_true_value(obj); 1894 set_true_value(obj);
1843 1895
1844 obj = CreateOddball("false", Smi::FromInt(0)); 1896 { MaybeObject* maybe_obj = CreateOddball("false", Smi::FromInt(0));
1845 if (obj->IsFailure()) return false; 1897 if (!maybe_obj->ToObject(&obj)) return false;
1898 }
1846 set_false_value(obj); 1899 set_false_value(obj);
1847 1900
1848 obj = CreateOddball("hole", Smi::FromInt(-1)); 1901 { MaybeObject* maybe_obj = CreateOddball("hole", Smi::FromInt(-1));
1849 if (obj->IsFailure()) return false; 1902 if (!maybe_obj->ToObject(&obj)) return false;
1903 }
1850 set_the_hole_value(obj); 1904 set_the_hole_value(obj);
1851 1905
1852 obj = CreateOddball("no_interceptor_result_sentinel", Smi::FromInt(-2)); 1906 { MaybeObject* maybe_obj =
1853 if (obj->IsFailure()) return false; 1907 CreateOddball("no_interceptor_result_sentinel", Smi::FromInt(-2));
1908 if (!maybe_obj->ToObject(&obj)) return false;
1909 }
1854 set_no_interceptor_result_sentinel(obj); 1910 set_no_interceptor_result_sentinel(obj);
1855 1911
1856 obj = CreateOddball("termination_exception", Smi::FromInt(-3)); 1912 { MaybeObject* maybe_obj =
1857 if (obj->IsFailure()) return false; 1913 CreateOddball("termination_exception", Smi::FromInt(-3));
1914 if (!maybe_obj->ToObject(&obj)) return false;
1915 }
1858 set_termination_exception(obj); 1916 set_termination_exception(obj);
1859 1917
1860 // Allocate the empty string. 1918 // Allocate the empty string.
1861 obj = AllocateRawAsciiString(0, TENURED); 1919 { MaybeObject* maybe_obj = AllocateRawAsciiString(0, TENURED);
1862 if (obj->IsFailure()) return false; 1920 if (!maybe_obj->ToObject(&obj)) return false;
1921 }
1863 set_empty_string(String::cast(obj)); 1922 set_empty_string(String::cast(obj));
1864 1923
1865 for (unsigned i = 0; i < ARRAY_SIZE(constant_symbol_table); i++) { 1924 for (unsigned i = 0; i < ARRAY_SIZE(constant_symbol_table); i++) {
1866 obj = LookupAsciiSymbol(constant_symbol_table[i].contents); 1925 { MaybeObject* maybe_obj =
1867 if (obj->IsFailure()) return false; 1926 LookupAsciiSymbol(constant_symbol_table[i].contents);
1927 if (!maybe_obj->ToObject(&obj)) return false;
1928 }
1868 roots_[constant_symbol_table[i].index] = String::cast(obj); 1929 roots_[constant_symbol_table[i].index] = String::cast(obj);
1869 } 1930 }
1870 1931
1871 // Allocate the hidden symbol which is used to identify the hidden properties 1932 // Allocate the hidden symbol which is used to identify the hidden properties
1872 // in JSObjects. The hash code has a special value so that it will not match 1933 // in JSObjects. The hash code has a special value so that it will not match
1873 // the empty string when searching for the property. It cannot be part of the 1934 // the empty string when searching for the property. It cannot be part of the
1874 // loop above because it needs to be allocated manually with the special 1935 // loop above because it needs to be allocated manually with the special
1875 // hash code in place. The hash code for the hidden_symbol is zero to ensure 1936 // hash code in place. The hash code for the hidden_symbol is zero to ensure
1876 // that it will always be at the first entry in property descriptors. 1937 // that it will always be at the first entry in property descriptors.
1877 obj = AllocateSymbol(CStrVector(""), 0, String::kZeroHash); 1938 { MaybeObject* maybe_obj =
1878 if (obj->IsFailure()) return false; 1939 AllocateSymbol(CStrVector(""), 0, String::kZeroHash);
1940 if (!maybe_obj->ToObject(&obj)) return false;
1941 }
1879 hidden_symbol_ = String::cast(obj); 1942 hidden_symbol_ = String::cast(obj);
1880 1943
1881 // Allocate the proxy for __proto__. 1944 // Allocate the proxy for __proto__.
1882 obj = AllocateProxy((Address) &Accessors::ObjectPrototype); 1945 { MaybeObject* maybe_obj =
1883 if (obj->IsFailure()) return false; 1946 AllocateProxy((Address) &Accessors::ObjectPrototype);
1947 if (!maybe_obj->ToObject(&obj)) return false;
1948 }
1884 set_prototype_accessors(Proxy::cast(obj)); 1949 set_prototype_accessors(Proxy::cast(obj));
1885 1950
1886 // Allocate the code_stubs dictionary. The initial size is set to avoid 1951 // Allocate the code_stubs dictionary. The initial size is set to avoid
1887 // expanding the dictionary during bootstrapping. 1952 // expanding the dictionary during bootstrapping.
1888 obj = NumberDictionary::Allocate(128); 1953 { MaybeObject* maybe_obj = NumberDictionary::Allocate(128);
1889 if (obj->IsFailure()) return false; 1954 if (!maybe_obj->ToObject(&obj)) return false;
1955 }
1890 set_code_stubs(NumberDictionary::cast(obj)); 1956 set_code_stubs(NumberDictionary::cast(obj));
1891 1957
1892 // Allocate the non_monomorphic_cache used in stub-cache.cc. The initial size 1958 // Allocate the non_monomorphic_cache used in stub-cache.cc. The initial size
1893 // is set to avoid expanding the dictionary during bootstrapping. 1959 // is set to avoid expanding the dictionary during bootstrapping.
1894 obj = NumberDictionary::Allocate(64); 1960 { MaybeObject* maybe_obj = NumberDictionary::Allocate(64);
1895 if (obj->IsFailure()) return false; 1961 if (!maybe_obj->ToObject(&obj)) return false;
1962 }
1896 set_non_monomorphic_cache(NumberDictionary::cast(obj)); 1963 set_non_monomorphic_cache(NumberDictionary::cast(obj));
1897 1964
1898 set_instanceof_cache_function(Smi::FromInt(0)); 1965 set_instanceof_cache_function(Smi::FromInt(0));
1899 set_instanceof_cache_map(Smi::FromInt(0)); 1966 set_instanceof_cache_map(Smi::FromInt(0));
1900 set_instanceof_cache_answer(Smi::FromInt(0)); 1967 set_instanceof_cache_answer(Smi::FromInt(0));
1901 1968
1902 CreateFixedStubs(); 1969 CreateFixedStubs();
1903 1970
1904 // Allocate the dictionary of intrinsic function names. 1971 // Allocate the dictionary of intrinsic function names.
1905 obj = StringDictionary::Allocate(Runtime::kNumFunctions); 1972 { MaybeObject* maybe_obj = StringDictionary::Allocate(Runtime::kNumFunctions);
1906 if (obj->IsFailure()) return false; 1973 if (!maybe_obj->ToObject(&obj)) return false;
1907 obj = Runtime::InitializeIntrinsicFunctionNames(obj); 1974 }
1908 if (obj->IsFailure()) return false; 1975 { MaybeObject* maybe_obj = Runtime::InitializeIntrinsicFunctionNames(obj);
1976 if (!maybe_obj->ToObject(&obj)) return false;
1977 }
1909 set_intrinsic_function_names(StringDictionary::cast(obj)); 1978 set_intrinsic_function_names(StringDictionary::cast(obj));
1910 1979
1911 if (InitializeNumberStringCache()->IsFailure()) return false; 1980 if (InitializeNumberStringCache()->IsFailure()) return false;
1912 1981
1913 // Allocate cache for single character ASCII strings. 1982 // Allocate cache for single character ASCII strings.
1914 obj = AllocateFixedArray(String::kMaxAsciiCharCode + 1, TENURED); 1983 { MaybeObject* maybe_obj =
1915 if (obj->IsFailure()) return false; 1984 AllocateFixedArray(String::kMaxAsciiCharCode + 1, TENURED);
1985 if (!maybe_obj->ToObject(&obj)) return false;
1986 }
1916 set_single_character_string_cache(FixedArray::cast(obj)); 1987 set_single_character_string_cache(FixedArray::cast(obj));
1917 1988
1918 // Allocate cache for external strings pointing to native source code. 1989 // Allocate cache for external strings pointing to native source code.
1919 obj = AllocateFixedArray(Natives::GetBuiltinsCount()); 1990 { MaybeObject* maybe_obj = AllocateFixedArray(Natives::GetBuiltinsCount());
1920 if (obj->IsFailure()) return false; 1991 if (!maybe_obj->ToObject(&obj)) return false;
1992 }
1921 set_natives_source_cache(FixedArray::cast(obj)); 1993 set_natives_source_cache(FixedArray::cast(obj));
1922 1994
1923 // Handling of script id generation is in Factory::NewScript. 1995 // Handling of script id generation is in Factory::NewScript.
1924 set_last_script_id(undefined_value()); 1996 set_last_script_id(undefined_value());
1925 1997
1926 // Initialize keyed lookup cache. 1998 // Initialize keyed lookup cache.
1927 KeyedLookupCache::Clear(); 1999 KeyedLookupCache::Clear();
1928 2000
1929 // Initialize context slot cache. 2001 // Initialize context slot cache.
1930 ContextSlotCache::Clear(); 2002 ContextSlotCache::Clear();
1931 2003
1932 // Initialize descriptor cache. 2004 // Initialize descriptor cache.
1933 DescriptorLookupCache::Clear(); 2005 DescriptorLookupCache::Clear();
1934 2006
1935 // Initialize compilation cache. 2007 // Initialize compilation cache.
1936 CompilationCache::Clear(); 2008 CompilationCache::Clear();
1937 2009
1938 return true; 2010 return true;
1939 } 2011 }
1940 2012
1941 2013
1942 Object* Heap::InitializeNumberStringCache() { 2014 MaybeObject* Heap::InitializeNumberStringCache() {
1943 // Compute the size of the number string cache based on the max heap size. 2015 // Compute the size of the number string cache based on the max heap size.
1944 // max_semispace_size_ == 512 KB => number_string_cache_size = 32. 2016 // max_semispace_size_ == 512 KB => number_string_cache_size = 32.
1945 // max_semispace_size_ == 8 MB => number_string_cache_size = 16KB. 2017 // max_semispace_size_ == 8 MB => number_string_cache_size = 16KB.
1946 int number_string_cache_size = max_semispace_size_ / 512; 2018 int number_string_cache_size = max_semispace_size_ / 512;
1947 number_string_cache_size = Max(32, Min(16*KB, number_string_cache_size)); 2019 number_string_cache_size = Max(32, Min(16*KB, number_string_cache_size));
1948 Object* obj = AllocateFixedArray(number_string_cache_size * 2, TENURED); 2020 Object* obj;
1949 if (!obj->IsFailure()) set_number_string_cache(FixedArray::cast(obj)); 2021 MaybeObject* maybe_obj =
1950 return obj; 2022 AllocateFixedArray(number_string_cache_size * 2, TENURED);
2023 if (maybe_obj->ToObject(&obj)) set_number_string_cache(FixedArray::cast(obj));
2024 return maybe_obj;
1951 } 2025 }
1952 2026
1953 2027
1954 void Heap::FlushNumberStringCache() { 2028 void Heap::FlushNumberStringCache() {
1955 // Flush the number to string cache. 2029 // Flush the number to string cache.
1956 int len = number_string_cache()->length(); 2030 int len = number_string_cache()->length();
1957 for (int i = 0; i < len; i++) { 2031 for (int i = 0; i < len; i++) {
1958 number_string_cache()->set_undefined(i); 2032 number_string_cache()->set_undefined(i);
1959 } 2033 }
1960 } 2034 }
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
1998 hash = smi_get_hash(Smi::cast(number)) & mask; 2072 hash = smi_get_hash(Smi::cast(number)) & mask;
1999 number_string_cache()->set(hash * 2, Smi::cast(number)); 2073 number_string_cache()->set(hash * 2, Smi::cast(number));
2000 } else { 2074 } else {
2001 hash = double_get_hash(number->Number()) & mask; 2075 hash = double_get_hash(number->Number()) & mask;
2002 number_string_cache()->set(hash * 2, number); 2076 number_string_cache()->set(hash * 2, number);
2003 } 2077 }
2004 number_string_cache()->set(hash * 2 + 1, string); 2078 number_string_cache()->set(hash * 2 + 1, string);
2005 } 2079 }
2006 2080
2007 2081
2008 Object* Heap::NumberToString(Object* number, bool check_number_string_cache) { 2082 MaybeObject* Heap::NumberToString(Object* number,
2083 bool check_number_string_cache) {
2009 Counters::number_to_string_runtime.Increment(); 2084 Counters::number_to_string_runtime.Increment();
2010 if (check_number_string_cache) { 2085 if (check_number_string_cache) {
2011 Object* cached = GetNumberStringCache(number); 2086 Object* cached = GetNumberStringCache(number);
2012 if (cached != undefined_value()) { 2087 if (cached != undefined_value()) {
2013 return cached; 2088 return cached;
2014 } 2089 }
2015 } 2090 }
2016 2091
2017 char arr[100]; 2092 char arr[100];
2018 Vector<char> buffer(arr, ARRAY_SIZE(arr)); 2093 Vector<char> buffer(arr, ARRAY_SIZE(arr));
2019 const char* str; 2094 const char* str;
2020 if (number->IsSmi()) { 2095 if (number->IsSmi()) {
2021 int num = Smi::cast(number)->value(); 2096 int num = Smi::cast(number)->value();
2022 str = IntToCString(num, buffer); 2097 str = IntToCString(num, buffer);
2023 } else { 2098 } else {
2024 double num = HeapNumber::cast(number)->value(); 2099 double num = HeapNumber::cast(number)->value();
2025 str = DoubleToCString(num, buffer); 2100 str = DoubleToCString(num, buffer);
2026 } 2101 }
2027 Object* result = AllocateStringFromAscii(CStrVector(str));
2028 2102
2029 if (!result->IsFailure()) { 2103 Object* js_string;
2030 SetNumberStringCache(number, String::cast(result)); 2104 MaybeObject* maybe_js_string = AllocateStringFromAscii(CStrVector(str));
2105 if (maybe_js_string->ToObject(&js_string)) {
2106 SetNumberStringCache(number, String::cast(js_string));
2031 } 2107 }
2032 return result; 2108 return maybe_js_string;
2033 } 2109 }
2034 2110
2035 2111
2036 Map* Heap::MapForExternalArrayType(ExternalArrayType array_type) { 2112 Map* Heap::MapForExternalArrayType(ExternalArrayType array_type) {
2037 return Map::cast(roots_[RootIndexForExternalArrayType(array_type)]); 2113 return Map::cast(roots_[RootIndexForExternalArrayType(array_type)]);
2038 } 2114 }
2039 2115
2040 2116
2041 Heap::RootListIndex Heap::RootIndexForExternalArrayType( 2117 Heap::RootListIndex Heap::RootIndexForExternalArrayType(
2042 ExternalArrayType array_type) { 2118 ExternalArrayType array_type) {
(...skipping 12 matching lines...) Expand all
2055 return kExternalUnsignedIntArrayMapRootIndex; 2131 return kExternalUnsignedIntArrayMapRootIndex;
2056 case kExternalFloatArray: 2132 case kExternalFloatArray:
2057 return kExternalFloatArrayMapRootIndex; 2133 return kExternalFloatArrayMapRootIndex;
2058 default: 2134 default:
2059 UNREACHABLE(); 2135 UNREACHABLE();
2060 return kUndefinedValueRootIndex; 2136 return kUndefinedValueRootIndex;
2061 } 2137 }
2062 } 2138 }
2063 2139
2064 2140
2065 Object* Heap::NumberFromDouble(double value, PretenureFlag pretenure) { 2141 MaybeObject* Heap::NumberFromDouble(double value, PretenureFlag pretenure) {
2066 // We need to distinguish the minus zero value and this cannot be 2142 // We need to distinguish the minus zero value and this cannot be
2067 // done after conversion to int. Doing this by comparing bit 2143 // done after conversion to int. Doing this by comparing bit
2068 // patterns is faster than using fpclassify() et al. 2144 // patterns is faster than using fpclassify() et al.
2069 static const DoubleRepresentation minus_zero(-0.0); 2145 static const DoubleRepresentation minus_zero(-0.0);
2070 2146
2071 DoubleRepresentation rep(value); 2147 DoubleRepresentation rep(value);
2072 if (rep.bits == minus_zero.bits) { 2148 if (rep.bits == minus_zero.bits) {
2073 return AllocateHeapNumber(-0.0, pretenure); 2149 return AllocateHeapNumber(-0.0, pretenure);
2074 } 2150 }
2075 2151
2076 int int_value = FastD2I(value); 2152 int int_value = FastD2I(value);
2077 if (value == int_value && Smi::IsValid(int_value)) { 2153 if (value == int_value && Smi::IsValid(int_value)) {
2078 return Smi::FromInt(int_value); 2154 return Smi::FromInt(int_value);
2079 } 2155 }
2080 2156
2081 // Materialize the value in the heap. 2157 // Materialize the value in the heap.
2082 return AllocateHeapNumber(value, pretenure); 2158 return AllocateHeapNumber(value, pretenure);
2083 } 2159 }
2084 2160
2085 2161
2086 Object* Heap::AllocateProxy(Address proxy, PretenureFlag pretenure) { 2162 MaybeObject* Heap::AllocateProxy(Address proxy, PretenureFlag pretenure) {
2087 // Statically ensure that it is safe to allocate proxies in paged spaces. 2163 // Statically ensure that it is safe to allocate proxies in paged spaces.
2088 STATIC_ASSERT(Proxy::kSize <= Page::kMaxHeapObjectSize); 2164 STATIC_ASSERT(Proxy::kSize <= Page::kMaxHeapObjectSize);
2089 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 2165 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2090 Object* result = Allocate(proxy_map(), space); 2166 Object* result;
2091 if (result->IsFailure()) return result; 2167 { MaybeObject* maybe_result = Allocate(proxy_map(), space);
2168 if (!maybe_result->ToObject(&result)) return maybe_result;
2169 }
2092 2170
2093 Proxy::cast(result)->set_proxy(proxy); 2171 Proxy::cast(result)->set_proxy(proxy);
2094 return result; 2172 return result;
2095 } 2173 }
2096 2174
2097 2175
2098 Object* Heap::AllocateSharedFunctionInfo(Object* name) { 2176 MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
2099 Object* result = Allocate(shared_function_info_map(), OLD_POINTER_SPACE); 2177 Object* result;
2100 if (result->IsFailure()) return result; 2178 { MaybeObject* maybe_result =
2179 Allocate(shared_function_info_map(), OLD_POINTER_SPACE);
2180 if (!maybe_result->ToObject(&result)) return maybe_result;
2181 }
2101 2182
2102 SharedFunctionInfo* share = SharedFunctionInfo::cast(result); 2183 SharedFunctionInfo* share = SharedFunctionInfo::cast(result);
2103 share->set_name(name); 2184 share->set_name(name);
2104 Code* illegal = Builtins::builtin(Builtins::Illegal); 2185 Code* illegal = Builtins::builtin(Builtins::Illegal);
2105 share->set_code(illegal); 2186 share->set_code(illegal);
2106 share->set_scope_info(SerializedScopeInfo::Empty()); 2187 share->set_scope_info(SerializedScopeInfo::Empty());
2107 Code* construct_stub = Builtins::builtin(Builtins::JSConstructStubGeneric); 2188 Code* construct_stub = Builtins::builtin(Builtins::JSConstructStubGeneric);
2108 share->set_construct_stub(construct_stub); 2189 share->set_construct_stub(construct_stub);
2109 share->set_expected_nof_properties(0); 2190 share->set_expected_nof_properties(0);
2110 share->set_length(0); 2191 share->set_length(0);
(...skipping 15 matching lines...) Expand all
2126 } 2207 }
2127 2208
2128 2209
2129 // Returns true for a character in a range. Both limits are inclusive. 2210 // Returns true for a character in a range. Both limits are inclusive.
2130 static inline bool Between(uint32_t character, uint32_t from, uint32_t to) { 2211 static inline bool Between(uint32_t character, uint32_t from, uint32_t to) {
2131 // This makes uses of the the unsigned wraparound. 2212 // This makes uses of the the unsigned wraparound.
2132 return character - from <= to - from; 2213 return character - from <= to - from;
2133 } 2214 }
2134 2215
2135 2216
2136 static inline Object* MakeOrFindTwoCharacterString(uint32_t c1, uint32_t c2) { 2217 MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString(
2218 uint32_t c1,
2219 uint32_t c2) {
2137 String* symbol; 2220 String* symbol;
2138 // Numeric strings have a different hash algorithm not known by 2221 // Numeric strings have a different hash algorithm not known by
2139 // LookupTwoCharsSymbolIfExists, so we skip this step for such strings. 2222 // LookupTwoCharsSymbolIfExists, so we skip this step for such strings.
2140 if ((!Between(c1, '0', '9') || !Between(c2, '0', '9')) && 2223 if ((!Between(c1, '0', '9') || !Between(c2, '0', '9')) &&
2141 Heap::symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) { 2224 Heap::symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) {
2142 return symbol; 2225 return symbol;
2143 // Now we know the length is 2, we might as well make use of that fact 2226 // Now we know the length is 2, we might as well make use of that fact
2144 // when building the new string. 2227 // when building the new string.
2145 } else if ((c1 | c2) <= String::kMaxAsciiCharCodeU) { // We can do this 2228 } else if ((c1 | c2) <= String::kMaxAsciiCharCodeU) { // We can do this
2146 ASSERT(IsPowerOf2(String::kMaxAsciiCharCodeU + 1)); // because of this. 2229 ASSERT(IsPowerOf2(String::kMaxAsciiCharCodeU + 1)); // because of this.
2147 Object* result = Heap::AllocateRawAsciiString(2); 2230 Object* result;
2148 if (result->IsFailure()) return result; 2231 { MaybeObject* maybe_result = Heap::AllocateRawAsciiString(2);
2232 if (!maybe_result->ToObject(&result)) return maybe_result;
2233 }
2149 char* dest = SeqAsciiString::cast(result)->GetChars(); 2234 char* dest = SeqAsciiString::cast(result)->GetChars();
2150 dest[0] = c1; 2235 dest[0] = c1;
2151 dest[1] = c2; 2236 dest[1] = c2;
2152 return result; 2237 return result;
2153 } else { 2238 } else {
2154 Object* result = Heap::AllocateRawTwoByteString(2); 2239 Object* result;
2155 if (result->IsFailure()) return result; 2240 { MaybeObject* maybe_result = Heap::AllocateRawTwoByteString(2);
2241 if (!maybe_result->ToObject(&result)) return maybe_result;
2242 }
2156 uc16* dest = SeqTwoByteString::cast(result)->GetChars(); 2243 uc16* dest = SeqTwoByteString::cast(result)->GetChars();
2157 dest[0] = c1; 2244 dest[0] = c1;
2158 dest[1] = c2; 2245 dest[1] = c2;
2159 return result; 2246 return result;
2160 } 2247 }
2161 } 2248 }
2162 2249
2163 2250
2164 Object* Heap::AllocateConsString(String* first, String* second) { 2251 MaybeObject* Heap::AllocateConsString(String* first, String* second) {
2165 int first_length = first->length(); 2252 int first_length = first->length();
2166 if (first_length == 0) { 2253 if (first_length == 0) {
2167 return second; 2254 return second;
2168 } 2255 }
2169 2256
2170 int second_length = second->length(); 2257 int second_length = second->length();
2171 if (second_length == 0) { 2258 if (second_length == 0) {
2172 return first; 2259 return first;
2173 } 2260 }
2174 2261
(...skipping 29 matching lines...) Expand all
2204 if (is_ascii_data_in_two_byte_string) { 2291 if (is_ascii_data_in_two_byte_string) {
2205 Counters::string_add_runtime_ext_to_ascii.Increment(); 2292 Counters::string_add_runtime_ext_to_ascii.Increment();
2206 } 2293 }
2207 } 2294 }
2208 2295
2209 // If the resulting string is small make a flat string. 2296 // If the resulting string is small make a flat string.
2210 if (length < String::kMinNonFlatLength) { 2297 if (length < String::kMinNonFlatLength) {
2211 ASSERT(first->IsFlat()); 2298 ASSERT(first->IsFlat());
2212 ASSERT(second->IsFlat()); 2299 ASSERT(second->IsFlat());
2213 if (is_ascii) { 2300 if (is_ascii) {
2214 Object* result = AllocateRawAsciiString(length); 2301 Object* result;
2215 if (result->IsFailure()) return result; 2302 { MaybeObject* maybe_result = AllocateRawAsciiString(length);
2303 if (!maybe_result->ToObject(&result)) return maybe_result;
2304 }
2216 // Copy the characters into the new object. 2305 // Copy the characters into the new object.
2217 char* dest = SeqAsciiString::cast(result)->GetChars(); 2306 char* dest = SeqAsciiString::cast(result)->GetChars();
2218 // Copy first part. 2307 // Copy first part.
2219 const char* src; 2308 const char* src;
2220 if (first->IsExternalString()) { 2309 if (first->IsExternalString()) {
2221 src = ExternalAsciiString::cast(first)->resource()->data(); 2310 src = ExternalAsciiString::cast(first)->resource()->data();
2222 } else { 2311 } else {
2223 src = SeqAsciiString::cast(first)->GetChars(); 2312 src = SeqAsciiString::cast(first)->GetChars();
2224 } 2313 }
2225 for (int i = 0; i < first_length; i++) *dest++ = src[i]; 2314 for (int i = 0; i < first_length; i++) *dest++ = src[i];
2226 // Copy second part. 2315 // Copy second part.
2227 if (second->IsExternalString()) { 2316 if (second->IsExternalString()) {
2228 src = ExternalAsciiString::cast(second)->resource()->data(); 2317 src = ExternalAsciiString::cast(second)->resource()->data();
2229 } else { 2318 } else {
2230 src = SeqAsciiString::cast(second)->GetChars(); 2319 src = SeqAsciiString::cast(second)->GetChars();
2231 } 2320 }
2232 for (int i = 0; i < second_length; i++) *dest++ = src[i]; 2321 for (int i = 0; i < second_length; i++) *dest++ = src[i];
2233 return result; 2322 return result;
2234 } else { 2323 } else {
2235 if (is_ascii_data_in_two_byte_string) { 2324 if (is_ascii_data_in_two_byte_string) {
2236 Object* result = AllocateRawAsciiString(length); 2325 Object* result;
2237 if (result->IsFailure()) return result; 2326 { MaybeObject* maybe_result = AllocateRawAsciiString(length);
2327 if (!maybe_result->ToObject(&result)) return maybe_result;
2328 }
2238 // Copy the characters into the new object. 2329 // Copy the characters into the new object.
2239 char* dest = SeqAsciiString::cast(result)->GetChars(); 2330 char* dest = SeqAsciiString::cast(result)->GetChars();
2240 String::WriteToFlat(first, dest, 0, first_length); 2331 String::WriteToFlat(first, dest, 0, first_length);
2241 String::WriteToFlat(second, dest + first_length, 0, second_length); 2332 String::WriteToFlat(second, dest + first_length, 0, second_length);
2242 return result; 2333 return result;
2243 } 2334 }
2244 2335
2245 Object* result = AllocateRawTwoByteString(length); 2336 Object* result;
2246 if (result->IsFailure()) return result; 2337 { MaybeObject* maybe_result = AllocateRawTwoByteString(length);
2338 if (!maybe_result->ToObject(&result)) return maybe_result;
2339 }
2247 // Copy the characters into the new object. 2340 // Copy the characters into the new object.
2248 uc16* dest = SeqTwoByteString::cast(result)->GetChars(); 2341 uc16* dest = SeqTwoByteString::cast(result)->GetChars();
2249 String::WriteToFlat(first, dest, 0, first_length); 2342 String::WriteToFlat(first, dest, 0, first_length);
2250 String::WriteToFlat(second, dest + first_length, 0, second_length); 2343 String::WriteToFlat(second, dest + first_length, 0, second_length);
2251 return result; 2344 return result;
2252 } 2345 }
2253 } 2346 }
2254 2347
2255 Map* map = (is_ascii || is_ascii_data_in_two_byte_string) ? 2348 Map* map = (is_ascii || is_ascii_data_in_two_byte_string) ?
2256 cons_ascii_string_map() : cons_string_map(); 2349 cons_ascii_string_map() : cons_string_map();
2257 2350
2258 Object* result = Allocate(map, NEW_SPACE); 2351 Object* result;
2259 if (result->IsFailure()) return result; 2352 { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
2353 if (!maybe_result->ToObject(&result)) return maybe_result;
2354 }
2260 2355
2261 AssertNoAllocation no_gc; 2356 AssertNoAllocation no_gc;
2262 ConsString* cons_string = ConsString::cast(result); 2357 ConsString* cons_string = ConsString::cast(result);
2263 WriteBarrierMode mode = cons_string->GetWriteBarrierMode(no_gc); 2358 WriteBarrierMode mode = cons_string->GetWriteBarrierMode(no_gc);
2264 cons_string->set_length(length); 2359 cons_string->set_length(length);
2265 cons_string->set_hash_field(String::kEmptyHashField); 2360 cons_string->set_hash_field(String::kEmptyHashField);
2266 cons_string->set_first(first, mode); 2361 cons_string->set_first(first, mode);
2267 cons_string->set_second(second, mode); 2362 cons_string->set_second(second, mode);
2268 return result; 2363 return result;
2269 } 2364 }
2270 2365
2271 2366
2272 Object* Heap::AllocateSubString(String* buffer, 2367 MaybeObject* Heap::AllocateSubString(String* buffer,
2273 int start, 2368 int start,
2274 int end, 2369 int end,
2275 PretenureFlag pretenure) { 2370 PretenureFlag pretenure) {
2276 int length = end - start; 2371 int length = end - start;
2277 2372
2278 if (length == 1) { 2373 if (length == 1) {
2279 return Heap::LookupSingleCharacterStringFromCode( 2374 return Heap::LookupSingleCharacterStringFromCode(
2280 buffer->Get(start)); 2375 buffer->Get(start));
2281 } else if (length == 2) { 2376 } else if (length == 2) {
2282 // Optimization for 2-byte strings often used as keys in a decompression 2377 // Optimization for 2-byte strings often used as keys in a decompression
2283 // dictionary. Check whether we already have the string in the symbol 2378 // dictionary. Check whether we already have the string in the symbol
2284 // table to prevent creation of many unneccesary strings. 2379 // table to prevent creation of many unneccesary strings.
2285 unsigned c1 = buffer->Get(start); 2380 unsigned c1 = buffer->Get(start);
2286 unsigned c2 = buffer->Get(start + 1); 2381 unsigned c2 = buffer->Get(start + 1);
2287 return MakeOrFindTwoCharacterString(c1, c2); 2382 return MakeOrFindTwoCharacterString(c1, c2);
2288 } 2383 }
2289 2384
2290 // Make an attempt to flatten the buffer to reduce access time. 2385 // Make an attempt to flatten the buffer to reduce access time.
2291 buffer = buffer->TryFlattenGetString(); 2386 buffer = buffer->TryFlattenGetString();
2292 2387
2293 Object* result = buffer->IsAsciiRepresentation() 2388 Object* result;
2294 ? AllocateRawAsciiString(length, pretenure ) 2389 { MaybeObject* maybe_result = buffer->IsAsciiRepresentation()
2295 : AllocateRawTwoByteString(length, pretenure); 2390 ? AllocateRawAsciiString(length, pretenure )
2296 if (result->IsFailure()) return result; 2391 : AllocateRawTwoByteString(length, pretenure);
2392 if (!maybe_result->ToObject(&result)) return maybe_result;
2393 }
2297 String* string_result = String::cast(result); 2394 String* string_result = String::cast(result);
2298 // Copy the characters into the new object. 2395 // Copy the characters into the new object.
2299 if (buffer->IsAsciiRepresentation()) { 2396 if (buffer->IsAsciiRepresentation()) {
2300 ASSERT(string_result->IsAsciiRepresentation()); 2397 ASSERT(string_result->IsAsciiRepresentation());
2301 char* dest = SeqAsciiString::cast(string_result)->GetChars(); 2398 char* dest = SeqAsciiString::cast(string_result)->GetChars();
2302 String::WriteToFlat(buffer, dest, start, end); 2399 String::WriteToFlat(buffer, dest, start, end);
2303 } else { 2400 } else {
2304 ASSERT(string_result->IsTwoByteRepresentation()); 2401 ASSERT(string_result->IsTwoByteRepresentation());
2305 uc16* dest = SeqTwoByteString::cast(string_result)->GetChars(); 2402 uc16* dest = SeqTwoByteString::cast(string_result)->GetChars();
2306 String::WriteToFlat(buffer, dest, start, end); 2403 String::WriteToFlat(buffer, dest, start, end);
2307 } 2404 }
2308 2405
2309 return result; 2406 return result;
2310 } 2407 }
2311 2408
2312 2409
2313 Object* Heap::AllocateExternalStringFromAscii( 2410 MaybeObject* Heap::AllocateExternalStringFromAscii(
2314 ExternalAsciiString::Resource* resource) { 2411 ExternalAsciiString::Resource* resource) {
2315 size_t length = resource->length(); 2412 size_t length = resource->length();
2316 if (length > static_cast<size_t>(String::kMaxLength)) { 2413 if (length > static_cast<size_t>(String::kMaxLength)) {
2317 Top::context()->mark_out_of_memory(); 2414 Top::context()->mark_out_of_memory();
2318 return Failure::OutOfMemoryException(); 2415 return Failure::OutOfMemoryException();
2319 } 2416 }
2320 2417
2321 Map* map = external_ascii_string_map(); 2418 Map* map = external_ascii_string_map();
2322 Object* result = Allocate(map, NEW_SPACE); 2419 Object* result;
2323 if (result->IsFailure()) return result; 2420 { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
2421 if (!maybe_result->ToObject(&result)) return maybe_result;
2422 }
2324 2423
2325 ExternalAsciiString* external_string = ExternalAsciiString::cast(result); 2424 ExternalAsciiString* external_string = ExternalAsciiString::cast(result);
2326 external_string->set_length(static_cast<int>(length)); 2425 external_string->set_length(static_cast<int>(length));
2327 external_string->set_hash_field(String::kEmptyHashField); 2426 external_string->set_hash_field(String::kEmptyHashField);
2328 external_string->set_resource(resource); 2427 external_string->set_resource(resource);
2329 2428
2330 return result; 2429 return result;
2331 } 2430 }
2332 2431
2333 2432
2334 Object* Heap::AllocateExternalStringFromTwoByte( 2433 MaybeObject* Heap::AllocateExternalStringFromTwoByte(
2335 ExternalTwoByteString::Resource* resource) { 2434 ExternalTwoByteString::Resource* resource) {
2336 size_t length = resource->length(); 2435 size_t length = resource->length();
2337 if (length > static_cast<size_t>(String::kMaxLength)) { 2436 if (length > static_cast<size_t>(String::kMaxLength)) {
2338 Top::context()->mark_out_of_memory(); 2437 Top::context()->mark_out_of_memory();
2339 return Failure::OutOfMemoryException(); 2438 return Failure::OutOfMemoryException();
2340 } 2439 }
2341 2440
2342 // For small strings we check whether the resource contains only 2441 // For small strings we check whether the resource contains only
2343 // ascii characters. If yes, we use a different string map. 2442 // ascii characters. If yes, we use a different string map.
2344 bool is_ascii = true; 2443 bool is_ascii = true;
2345 if (length >= static_cast<size_t>(String::kMinNonFlatLength)) { 2444 if (length >= static_cast<size_t>(String::kMinNonFlatLength)) {
2346 is_ascii = false; 2445 is_ascii = false;
2347 } else { 2446 } else {
2348 const uc16* data = resource->data(); 2447 const uc16* data = resource->data();
2349 for (size_t i = 0; i < length; i++) { 2448 for (size_t i = 0; i < length; i++) {
2350 if (data[i] > String::kMaxAsciiCharCode) { 2449 if (data[i] > String::kMaxAsciiCharCode) {
2351 is_ascii = false; 2450 is_ascii = false;
2352 break; 2451 break;
2353 } 2452 }
2354 } 2453 }
2355 } 2454 }
2356 2455
2357 Map* map = is_ascii ? 2456 Map* map = is_ascii ?
2358 Heap::external_string_with_ascii_data_map() : Heap::external_string_map(); 2457 Heap::external_string_with_ascii_data_map() : Heap::external_string_map();
2359 Object* result = Allocate(map, NEW_SPACE); 2458 Object* result;
2360 if (result->IsFailure()) return result; 2459 { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
2460 if (!maybe_result->ToObject(&result)) return maybe_result;
2461 }
2361 2462
2362 ExternalTwoByteString* external_string = ExternalTwoByteString::cast(result); 2463 ExternalTwoByteString* external_string = ExternalTwoByteString::cast(result);
2363 external_string->set_length(static_cast<int>(length)); 2464 external_string->set_length(static_cast<int>(length));
2364 external_string->set_hash_field(String::kEmptyHashField); 2465 external_string->set_hash_field(String::kEmptyHashField);
2365 external_string->set_resource(resource); 2466 external_string->set_resource(resource);
2366 2467
2367 return result; 2468 return result;
2368 } 2469 }
2369 2470
2370 2471
2371 Object* Heap::LookupSingleCharacterStringFromCode(uint16_t code) { 2472 MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
2372 if (code <= String::kMaxAsciiCharCode) { 2473 if (code <= String::kMaxAsciiCharCode) {
2373 Object* value = Heap::single_character_string_cache()->get(code); 2474 Object* value = Heap::single_character_string_cache()->get(code);
2374 if (value != Heap::undefined_value()) return value; 2475 if (value != Heap::undefined_value()) return value;
2375 2476
2376 char buffer[1]; 2477 char buffer[1];
2377 buffer[0] = static_cast<char>(code); 2478 buffer[0] = static_cast<char>(code);
2378 Object* result = LookupSymbol(Vector<const char>(buffer, 1)); 2479 Object* result;
2480 MaybeObject* maybe_result = LookupSymbol(Vector<const char>(buffer, 1));
2379 2481
2380 if (result->IsFailure()) return result; 2482 if (!maybe_result->ToObject(&result)) return maybe_result;
2381 Heap::single_character_string_cache()->set(code, result); 2483 Heap::single_character_string_cache()->set(code, result);
2382 return result; 2484 return result;
2383 } 2485 }
2384 2486
2385 Object* result = Heap::AllocateRawTwoByteString(1); 2487 Object* result;
2386 if (result->IsFailure()) return result; 2488 { MaybeObject* maybe_result = Heap::AllocateRawTwoByteString(1);
2489 if (!maybe_result->ToObject(&result)) return maybe_result;
2490 }
2387 String* answer = String::cast(result); 2491 String* answer = String::cast(result);
2388 answer->Set(0, code); 2492 answer->Set(0, code);
2389 return answer; 2493 return answer;
2390 } 2494 }
2391 2495
2392 2496
2393 Object* Heap::AllocateByteArray(int length, PretenureFlag pretenure) { 2497 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
2394 if (length < 0 || length > ByteArray::kMaxLength) { 2498 if (length < 0 || length > ByteArray::kMaxLength) {
2395 return Failure::OutOfMemoryException(); 2499 return Failure::OutOfMemoryException();
2396 } 2500 }
2397 if (pretenure == NOT_TENURED) { 2501 if (pretenure == NOT_TENURED) {
2398 return AllocateByteArray(length); 2502 return AllocateByteArray(length);
2399 } 2503 }
2400 int size = ByteArray::SizeFor(length); 2504 int size = ByteArray::SizeFor(length);
2401 Object* result = (size <= MaxObjectSizeInPagedSpace()) 2505 Object* result;
2402 ? old_data_space_->AllocateRaw(size) 2506 { MaybeObject* maybe_result = (size <= MaxObjectSizeInPagedSpace())
2403 : lo_space_->AllocateRaw(size); 2507 ? old_data_space_->AllocateRaw(size)
2404 if (result->IsFailure()) return result; 2508 : lo_space_->AllocateRaw(size);
2509 if (!maybe_result->ToObject(&result)) return maybe_result;
2510 }
2405 2511
2406 reinterpret_cast<ByteArray*>(result)->set_map(byte_array_map()); 2512 reinterpret_cast<ByteArray*>(result)->set_map(byte_array_map());
2407 reinterpret_cast<ByteArray*>(result)->set_length(length); 2513 reinterpret_cast<ByteArray*>(result)->set_length(length);
2408 return result; 2514 return result;
2409 } 2515 }
2410 2516
2411 2517
2412 Object* Heap::AllocateByteArray(int length) { 2518 MaybeObject* Heap::AllocateByteArray(int length) {
2413 if (length < 0 || length > ByteArray::kMaxLength) { 2519 if (length < 0 || length > ByteArray::kMaxLength) {
2414 return Failure::OutOfMemoryException(); 2520 return Failure::OutOfMemoryException();
2415 } 2521 }
2416 int size = ByteArray::SizeFor(length); 2522 int size = ByteArray::SizeFor(length);
2417 AllocationSpace space = 2523 AllocationSpace space =
2418 (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : NEW_SPACE; 2524 (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : NEW_SPACE;
2419 Object* result = AllocateRaw(size, space, OLD_DATA_SPACE); 2525 Object* result;
2420 if (result->IsFailure()) return result; 2526 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
2527 if (!maybe_result->ToObject(&result)) return maybe_result;
2528 }
2421 2529
2422 reinterpret_cast<ByteArray*>(result)->set_map(byte_array_map()); 2530 reinterpret_cast<ByteArray*>(result)->set_map(byte_array_map());
2423 reinterpret_cast<ByteArray*>(result)->set_length(length); 2531 reinterpret_cast<ByteArray*>(result)->set_length(length);
2424 return result; 2532 return result;
2425 } 2533 }
2426 2534
2427 2535
2428 void Heap::CreateFillerObjectAt(Address addr, int size) { 2536 void Heap::CreateFillerObjectAt(Address addr, int size) {
2429 if (size == 0) return; 2537 if (size == 0) return;
2430 HeapObject* filler = HeapObject::FromAddress(addr); 2538 HeapObject* filler = HeapObject::FromAddress(addr);
2431 if (size == kPointerSize) { 2539 if (size == kPointerSize) {
2432 filler->set_map(one_pointer_filler_map()); 2540 filler->set_map(one_pointer_filler_map());
2433 } else if (size == 2 * kPointerSize) { 2541 } else if (size == 2 * kPointerSize) {
2434 filler->set_map(two_pointer_filler_map()); 2542 filler->set_map(two_pointer_filler_map());
2435 } else { 2543 } else {
2436 filler->set_map(byte_array_map()); 2544 filler->set_map(byte_array_map());
2437 ByteArray::cast(filler)->set_length(ByteArray::LengthFor(size)); 2545 ByteArray::cast(filler)->set_length(ByteArray::LengthFor(size));
2438 } 2546 }
2439 } 2547 }
2440 2548
2441 2549
2442 Object* Heap::AllocatePixelArray(int length, 2550 MaybeObject* Heap::AllocatePixelArray(int length,
2443 uint8_t* external_pointer, 2551 uint8_t* external_pointer,
2444 PretenureFlag pretenure) { 2552 PretenureFlag pretenure) {
2445 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 2553 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2446 Object* result = AllocateRaw(PixelArray::kAlignedSize, space, OLD_DATA_SPACE); 2554 Object* result;
2447 if (result->IsFailure()) return result; 2555 { MaybeObject* maybe_result =
2556 AllocateRaw(PixelArray::kAlignedSize, space, OLD_DATA_SPACE);
2557 if (!maybe_result->ToObject(&result)) return maybe_result;
2558 }
2448 2559
2449 reinterpret_cast<PixelArray*>(result)->set_map(pixel_array_map()); 2560 reinterpret_cast<PixelArray*>(result)->set_map(pixel_array_map());
2450 reinterpret_cast<PixelArray*>(result)->set_length(length); 2561 reinterpret_cast<PixelArray*>(result)->set_length(length);
2451 reinterpret_cast<PixelArray*>(result)->set_external_pointer(external_pointer); 2562 reinterpret_cast<PixelArray*>(result)->set_external_pointer(external_pointer);
2452 2563
2453 return result; 2564 return result;
2454 } 2565 }
2455 2566
2456 2567
2457 Object* Heap::AllocateExternalArray(int length, 2568 MaybeObject* Heap::AllocateExternalArray(int length,
2458 ExternalArrayType array_type, 2569 ExternalArrayType array_type,
2459 void* external_pointer, 2570 void* external_pointer,
2460 PretenureFlag pretenure) { 2571 PretenureFlag pretenure) {
2461 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 2572 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2462 Object* result = AllocateRaw(ExternalArray::kAlignedSize, 2573 Object* result;
2463 space, 2574 { MaybeObject* maybe_result = AllocateRaw(ExternalArray::kAlignedSize,
2464 OLD_DATA_SPACE); 2575 space,
2465 if (result->IsFailure()) return result; 2576 OLD_DATA_SPACE);
2577 if (!maybe_result->ToObject(&result)) return maybe_result;
2578 }
2466 2579
2467 reinterpret_cast<ExternalArray*>(result)->set_map( 2580 reinterpret_cast<ExternalArray*>(result)->set_map(
2468 MapForExternalArrayType(array_type)); 2581 MapForExternalArrayType(array_type));
2469 reinterpret_cast<ExternalArray*>(result)->set_length(length); 2582 reinterpret_cast<ExternalArray*>(result)->set_length(length);
2470 reinterpret_cast<ExternalArray*>(result)->set_external_pointer( 2583 reinterpret_cast<ExternalArray*>(result)->set_external_pointer(
2471 external_pointer); 2584 external_pointer);
2472 2585
2473 return result; 2586 return result;
2474 } 2587 }
2475 2588
2476 2589
2477 Object* Heap::CreateCode(const CodeDesc& desc, 2590 MaybeObject* Heap::CreateCode(const CodeDesc& desc,
2478 Code::Flags flags, 2591 Code::Flags flags,
2479 Handle<Object> self_reference) { 2592 Handle<Object> self_reference) {
2480 // Allocate ByteArray before the Code object, so that we do not risk 2593 // Allocate ByteArray before the Code object, so that we do not risk
2481 // leaving uninitialized Code object (and breaking the heap). 2594 // leaving uninitialized Code object (and breaking the heap).
2482 Object* reloc_info = AllocateByteArray(desc.reloc_size, TENURED); 2595 Object* reloc_info;
2483 if (reloc_info->IsFailure()) return reloc_info; 2596 { MaybeObject* maybe_reloc_info = AllocateByteArray(desc.reloc_size, TENURED);
2597 if (!maybe_reloc_info->ToObject(&reloc_info)) return maybe_reloc_info;
2598 }
2484 2599
2485 // Compute size 2600 // Compute size
2486 int body_size = RoundUp(desc.instr_size, kObjectAlignment); 2601 int body_size = RoundUp(desc.instr_size, kObjectAlignment);
2487 int obj_size = Code::SizeFor(body_size); 2602 int obj_size = Code::SizeFor(body_size);
2488 ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment)); 2603 ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment));
2489 Object* result; 2604 MaybeObject* maybe_result;
2490 if (obj_size > MaxObjectSizeInPagedSpace()) { 2605 if (obj_size > MaxObjectSizeInPagedSpace()) {
2491 result = lo_space_->AllocateRawCode(obj_size); 2606 maybe_result = lo_space_->AllocateRawCode(obj_size);
2492 } else { 2607 } else {
2493 result = code_space_->AllocateRaw(obj_size); 2608 maybe_result = code_space_->AllocateRaw(obj_size);
2494 } 2609 }
2495 2610
2496 if (result->IsFailure()) return result; 2611 Object* result;
2612 if (!maybe_result->ToObject(&result)) return maybe_result;
2497 2613
2498 // Initialize the object 2614 // Initialize the object
2499 HeapObject::cast(result)->set_map(code_map()); 2615 HeapObject::cast(result)->set_map(code_map());
2500 Code* code = Code::cast(result); 2616 Code* code = Code::cast(result);
2501 ASSERT(!CodeRange::exists() || CodeRange::contains(code->address())); 2617 ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
2502 code->set_instruction_size(desc.instr_size); 2618 code->set_instruction_size(desc.instr_size);
2503 code->set_relocation_info(ByteArray::cast(reloc_info)); 2619 code->set_relocation_info(ByteArray::cast(reloc_info));
2504 code->set_flags(flags); 2620 code->set_flags(flags);
2505 // Allow self references to created code object by patching the handle to 2621 // Allow self references to created code object by patching the handle to
2506 // point to the newly allocated Code object. 2622 // point to the newly allocated Code object.
2507 if (!self_reference.is_null()) { 2623 if (!self_reference.is_null()) {
2508 *(self_reference.location()) = code; 2624 *(self_reference.location()) = code;
2509 } 2625 }
2510 // Migrate generated code. 2626 // Migrate generated code.
2511 // The generated code can contain Object** values (typically from handles) 2627 // The generated code can contain Object** values (typically from handles)
2512 // that are dereferenced during the copy to point directly to the actual heap 2628 // that are dereferenced during the copy to point directly to the actual heap
2513 // objects. These pointers can include references to the code object itself, 2629 // objects. These pointers can include references to the code object itself,
2514 // through the self_reference parameter. 2630 // through the self_reference parameter.
2515 code->CopyFrom(desc); 2631 code->CopyFrom(desc);
2516 2632
2517 #ifdef DEBUG 2633 #ifdef DEBUG
2518 code->Verify(); 2634 code->Verify();
2519 #endif 2635 #endif
2520 return code; 2636 return code;
2521 } 2637 }
2522 2638
2523 2639
2524 Object* Heap::CopyCode(Code* code) { 2640 MaybeObject* Heap::CopyCode(Code* code) {
2525 // Allocate an object the same size as the code object. 2641 // Allocate an object the same size as the code object.
2526 int obj_size = code->Size(); 2642 int obj_size = code->Size();
2527 Object* result; 2643 MaybeObject* maybe_result;
2528 if (obj_size > MaxObjectSizeInPagedSpace()) { 2644 if (obj_size > MaxObjectSizeInPagedSpace()) {
2529 result = lo_space_->AllocateRawCode(obj_size); 2645 maybe_result = lo_space_->AllocateRawCode(obj_size);
2530 } else { 2646 } else {
2531 result = code_space_->AllocateRaw(obj_size); 2647 maybe_result = code_space_->AllocateRaw(obj_size);
2532 } 2648 }
2533 2649
2534 if (result->IsFailure()) return result; 2650 Object* result;
2651 if (!maybe_result->ToObject(&result)) return maybe_result;
2535 2652
2536 // Copy code object. 2653 // Copy code object.
2537 Address old_addr = code->address(); 2654 Address old_addr = code->address();
2538 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); 2655 Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
2539 CopyBlock(new_addr, old_addr, obj_size); 2656 CopyBlock(new_addr, old_addr, obj_size);
2540 // Relocate the copy. 2657 // Relocate the copy.
2541 Code* new_code = Code::cast(result); 2658 Code* new_code = Code::cast(result);
2542 ASSERT(!CodeRange::exists() || CodeRange::contains(code->address())); 2659 ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
2543 new_code->Relocate(new_addr - old_addr); 2660 new_code->Relocate(new_addr - old_addr);
2544 return new_code; 2661 return new_code;
2545 } 2662 }
2546 2663
2547 2664
2548 Object* Heap::CopyCode(Code* code, Vector<byte> reloc_info) { 2665 MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
2549 // Allocate ByteArray before the Code object, so that we do not risk 2666 // Allocate ByteArray before the Code object, so that we do not risk
2550 // leaving uninitialized Code object (and breaking the heap). 2667 // leaving uninitialized Code object (and breaking the heap).
2551 Object* reloc_info_array = AllocateByteArray(reloc_info.length(), TENURED); 2668 Object* reloc_info_array;
2552 if (reloc_info_array->IsFailure()) return reloc_info_array; 2669 { MaybeObject* maybe_reloc_info_array =
2670 AllocateByteArray(reloc_info.length(), TENURED);
2671 if (!maybe_reloc_info_array->ToObject(&reloc_info_array)) {
2672 return maybe_reloc_info_array;
2673 }
2674 }
2553 2675
2554 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment); 2676 int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment);
2555 2677
2556 int new_obj_size = Code::SizeFor(new_body_size); 2678 int new_obj_size = Code::SizeFor(new_body_size);
2557 2679
2558 Address old_addr = code->address(); 2680 Address old_addr = code->address();
2559 2681
2560 size_t relocation_offset = 2682 size_t relocation_offset =
2561 static_cast<size_t>(code->instruction_end() - old_addr); 2683 static_cast<size_t>(code->instruction_end() - old_addr);
2562 2684
2563 Object* result; 2685 MaybeObject* maybe_result;
2564 if (new_obj_size > MaxObjectSizeInPagedSpace()) { 2686 if (new_obj_size > MaxObjectSizeInPagedSpace()) {
2565 result = lo_space_->AllocateRawCode(new_obj_size); 2687 maybe_result = lo_space_->AllocateRawCode(new_obj_size);
2566 } else { 2688 } else {
2567 result = code_space_->AllocateRaw(new_obj_size); 2689 maybe_result = code_space_->AllocateRaw(new_obj_size);
2568 } 2690 }
2569 2691
2570 if (result->IsFailure()) return result; 2692 Object* result;
2693 if (!maybe_result->ToObject(&result)) return maybe_result;
2571 2694
2572 // Copy code object. 2695 // Copy code object.
2573 Address new_addr = reinterpret_cast<HeapObject*>(result)->address(); 2696 Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
2574 2697
2575 // Copy header and instructions. 2698 // Copy header and instructions.
2576 memcpy(new_addr, old_addr, relocation_offset); 2699 memcpy(new_addr, old_addr, relocation_offset);
2577 2700
2578 Code* new_code = Code::cast(result); 2701 Code* new_code = Code::cast(result);
2579 new_code->set_relocation_info(ByteArray::cast(reloc_info_array)); 2702 new_code->set_relocation_info(ByteArray::cast(reloc_info_array));
2580 2703
2581 // Copy patched rinfo. 2704 // Copy patched rinfo.
2582 memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length()); 2705 memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length());
2583 2706
2584 // Relocate the copy. 2707 // Relocate the copy.
2585 ASSERT(!CodeRange::exists() || CodeRange::contains(code->address())); 2708 ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
2586 new_code->Relocate(new_addr - old_addr); 2709 new_code->Relocate(new_addr - old_addr);
2587 2710
2588 #ifdef DEBUG 2711 #ifdef DEBUG
2589 code->Verify(); 2712 code->Verify();
2590 #endif 2713 #endif
2591 return new_code; 2714 return new_code;
2592 } 2715 }
2593 2716
2594 2717
2595 Object* Heap::Allocate(Map* map, AllocationSpace space) { 2718 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
2596 ASSERT(gc_state_ == NOT_IN_GC); 2719 ASSERT(gc_state_ == NOT_IN_GC);
2597 ASSERT(map->instance_type() != MAP_TYPE); 2720 ASSERT(map->instance_type() != MAP_TYPE);
2598 // If allocation failures are disallowed, we may allocate in a different 2721 // If allocation failures are disallowed, we may allocate in a different
2599 // space when new space is full and the object is not a large object. 2722 // space when new space is full and the object is not a large object.
2600 AllocationSpace retry_space = 2723 AllocationSpace retry_space =
2601 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); 2724 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
2602 Object* result = 2725 Object* result;
2603 AllocateRaw(map->instance_size(), space, retry_space); 2726 { MaybeObject* maybe_result =
2604 if (result->IsFailure()) return result; 2727 AllocateRaw(map->instance_size(), space, retry_space);
2728 if (!maybe_result->ToObject(&result)) return maybe_result;
2729 }
2605 HeapObject::cast(result)->set_map(map); 2730 HeapObject::cast(result)->set_map(map);
2606 #ifdef ENABLE_LOGGING_AND_PROFILING 2731 #ifdef ENABLE_LOGGING_AND_PROFILING
2607 ProducerHeapProfile::RecordJSObjectAllocation(result); 2732 ProducerHeapProfile::RecordJSObjectAllocation(result);
2608 #endif 2733 #endif
2609 return result; 2734 return result;
2610 } 2735 }
2611 2736
2612 2737
2613 Object* Heap::InitializeFunction(JSFunction* function, 2738 MaybeObject* Heap::InitializeFunction(JSFunction* function,
2614 SharedFunctionInfo* shared, 2739 SharedFunctionInfo* shared,
2615 Object* prototype) { 2740 Object* prototype) {
2616 ASSERT(!prototype->IsMap()); 2741 ASSERT(!prototype->IsMap());
2617 function->initialize_properties(); 2742 function->initialize_properties();
2618 function->initialize_elements(); 2743 function->initialize_elements();
2619 function->set_shared(shared); 2744 function->set_shared(shared);
2620 function->set_code(shared->code()); 2745 function->set_code(shared->code());
2621 function->set_prototype_or_initial_map(prototype); 2746 function->set_prototype_or_initial_map(prototype);
2622 function->set_context(undefined_value()); 2747 function->set_context(undefined_value());
2623 function->set_literals(empty_fixed_array()); 2748 function->set_literals(empty_fixed_array());
2624 return function; 2749 return function;
2625 } 2750 }
2626 2751
2627 2752
2628 Object* Heap::AllocateFunctionPrototype(JSFunction* function) { 2753 MaybeObject* Heap::AllocateFunctionPrototype(JSFunction* function) {
2629 // Allocate the prototype. Make sure to use the object function 2754 // Allocate the prototype. Make sure to use the object function
2630 // from the function's context, since the function can be from a 2755 // from the function's context, since the function can be from a
2631 // different context. 2756 // different context.
2632 JSFunction* object_function = 2757 JSFunction* object_function =
2633 function->context()->global_context()->object_function(); 2758 function->context()->global_context()->object_function();
2634 Object* prototype = AllocateJSObject(object_function); 2759 Object* prototype;
2635 if (prototype->IsFailure()) return prototype; 2760 { MaybeObject* maybe_prototype = AllocateJSObject(object_function);
2761 if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
2762 }
2636 // When creating the prototype for the function we must set its 2763 // When creating the prototype for the function we must set its
2637 // constructor to the function. 2764 // constructor to the function.
2638 Object* result = 2765 Object* result;
2639 JSObject::cast(prototype)->SetProperty(constructor_symbol(), 2766 { MaybeObject* maybe_result =
2640 function, 2767 JSObject::cast(prototype)->SetProperty(constructor_symbol(),
2641 DONT_ENUM); 2768 function,
2642 if (result->IsFailure()) return result; 2769 DONT_ENUM);
2770 if (!maybe_result->ToObject(&result)) return maybe_result;
2771 }
2643 return prototype; 2772 return prototype;
2644 } 2773 }
2645 2774
2646 2775
2647 Object* Heap::AllocateFunction(Map* function_map, 2776 MaybeObject* Heap::AllocateFunction(Map* function_map,
2648 SharedFunctionInfo* shared, 2777 SharedFunctionInfo* shared,
2649 Object* prototype, 2778 Object* prototype,
2650 PretenureFlag pretenure) { 2779 PretenureFlag pretenure) {
2651 AllocationSpace space = 2780 AllocationSpace space =
2652 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; 2781 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
2653 Object* result = Allocate(function_map, space); 2782 Object* result;
2654 if (result->IsFailure()) return result; 2783 { MaybeObject* maybe_result = Allocate(function_map, space);
2784 if (!maybe_result->ToObject(&result)) return maybe_result;
2785 }
2655 return InitializeFunction(JSFunction::cast(result), shared, prototype); 2786 return InitializeFunction(JSFunction::cast(result), shared, prototype);
2656 } 2787 }
2657 2788
2658 2789
2659 Object* Heap::AllocateArgumentsObject(Object* callee, int length) { 2790 MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
2660 // To get fast allocation and map sharing for arguments objects we 2791 // To get fast allocation and map sharing for arguments objects we
2661 // allocate them based on an arguments boilerplate. 2792 // allocate them based on an arguments boilerplate.
2662 2793
2663 // This calls Copy directly rather than using Heap::AllocateRaw so we 2794 // This calls Copy directly rather than using Heap::AllocateRaw so we
2664 // duplicate the check here. 2795 // duplicate the check here.
2665 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 2796 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
2666 2797
2667 JSObject* boilerplate = 2798 JSObject* boilerplate =
2668 Top::context()->global_context()->arguments_boilerplate(); 2799 Top::context()->global_context()->arguments_boilerplate();
2669 2800
2670 // Check that the size of the boilerplate matches our 2801 // Check that the size of the boilerplate matches our
2671 // expectations. The ArgumentsAccessStub::GenerateNewObject relies 2802 // expectations. The ArgumentsAccessStub::GenerateNewObject relies
2672 // on the size being a known constant. 2803 // on the size being a known constant.
2673 ASSERT(kArgumentsObjectSize == boilerplate->map()->instance_size()); 2804 ASSERT(kArgumentsObjectSize == boilerplate->map()->instance_size());
2674 2805
2675 // Do the allocation. 2806 // Do the allocation.
2676 Object* result = 2807 Object* result;
2677 AllocateRaw(kArgumentsObjectSize, NEW_SPACE, OLD_POINTER_SPACE); 2808 { MaybeObject* maybe_result =
2678 if (result->IsFailure()) return result; 2809 AllocateRaw(kArgumentsObjectSize, NEW_SPACE, OLD_POINTER_SPACE);
2810 if (!maybe_result->ToObject(&result)) return maybe_result;
2811 }
2679 2812
2680 // Copy the content. The arguments boilerplate doesn't have any 2813 // Copy the content. The arguments boilerplate doesn't have any
2681 // fields that point to new space so it's safe to skip the write 2814 // fields that point to new space so it's safe to skip the write
2682 // barrier here. 2815 // barrier here.
2683 CopyBlock(HeapObject::cast(result)->address(), 2816 CopyBlock(HeapObject::cast(result)->address(),
2684 boilerplate->address(), 2817 boilerplate->address(),
2685 kArgumentsObjectSize); 2818 kArgumentsObjectSize);
2686 2819
2687 // Set the two properties. 2820 // Set the two properties.
2688 JSObject::cast(result)->InObjectPropertyAtPut(arguments_callee_index, 2821 JSObject::cast(result)->InObjectPropertyAtPut(arguments_callee_index,
(...skipping 17 matching lines...) Expand all
2706 for (int i = 1; i != count; i++) { 2839 for (int i = 1; i != count; i++) {
2707 String* current_key = descriptors->GetKey(i); 2840 String* current_key = descriptors->GetKey(i);
2708 if (prev_key == current_key) return true; 2841 if (prev_key == current_key) return true;
2709 prev_key = current_key; 2842 prev_key = current_key;
2710 } 2843 }
2711 } 2844 }
2712 return false; 2845 return false;
2713 } 2846 }
2714 2847
2715 2848
2716 Object* Heap::AllocateInitialMap(JSFunction* fun) { 2849 MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
2717 ASSERT(!fun->has_initial_map()); 2850 ASSERT(!fun->has_initial_map());
2718 2851
2719 // First create a new map with the size and number of in-object properties 2852 // First create a new map with the size and number of in-object properties
2720 // suggested by the function. 2853 // suggested by the function.
2721 int instance_size = fun->shared()->CalculateInstanceSize(); 2854 int instance_size = fun->shared()->CalculateInstanceSize();
2722 int in_object_properties = fun->shared()->CalculateInObjectProperties(); 2855 int in_object_properties = fun->shared()->CalculateInObjectProperties();
2723 Object* map_obj = Heap::AllocateMap(JS_OBJECT_TYPE, instance_size); 2856 Object* map_obj;
2724 if (map_obj->IsFailure()) return map_obj; 2857 { MaybeObject* maybe_map_obj =
2858 Heap::AllocateMap(JS_OBJECT_TYPE, instance_size);
2859 if (!maybe_map_obj->ToObject(&map_obj)) return maybe_map_obj;
2860 }
2725 2861
2726 // Fetch or allocate prototype. 2862 // Fetch or allocate prototype.
2727 Object* prototype; 2863 Object* prototype;
2728 if (fun->has_instance_prototype()) { 2864 if (fun->has_instance_prototype()) {
2729 prototype = fun->instance_prototype(); 2865 prototype = fun->instance_prototype();
2730 } else { 2866 } else {
2731 prototype = AllocateFunctionPrototype(fun); 2867 { MaybeObject* maybe_prototype = AllocateFunctionPrototype(fun);
2732 if (prototype->IsFailure()) return prototype; 2868 if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
2869 }
2733 } 2870 }
2734 Map* map = Map::cast(map_obj); 2871 Map* map = Map::cast(map_obj);
2735 map->set_inobject_properties(in_object_properties); 2872 map->set_inobject_properties(in_object_properties);
2736 map->set_unused_property_fields(in_object_properties); 2873 map->set_unused_property_fields(in_object_properties);
2737 map->set_prototype(prototype); 2874 map->set_prototype(prototype);
2738 ASSERT(map->has_fast_elements()); 2875 ASSERT(map->has_fast_elements());
2739 2876
2740 // If the function has only simple this property assignments add 2877 // If the function has only simple this property assignments add
2741 // field descriptors for these to the initial map as the object 2878 // field descriptors for these to the initial map as the object
2742 // cannot be constructed without having these properties. Guard by 2879 // cannot be constructed without having these properties. Guard by
2743 // the inline_new flag so we only change the map if we generate a 2880 // the inline_new flag so we only change the map if we generate a
2744 // specialized construct stub. 2881 // specialized construct stub.
2745 ASSERT(in_object_properties <= Map::kMaxPreAllocatedPropertyFields); 2882 ASSERT(in_object_properties <= Map::kMaxPreAllocatedPropertyFields);
2746 if (fun->shared()->CanGenerateInlineConstructor(prototype)) { 2883 if (fun->shared()->CanGenerateInlineConstructor(prototype)) {
2747 int count = fun->shared()->this_property_assignments_count(); 2884 int count = fun->shared()->this_property_assignments_count();
2748 if (count > in_object_properties) { 2885 if (count > in_object_properties) {
2749 // Inline constructor can only handle inobject properties. 2886 // Inline constructor can only handle inobject properties.
2750 fun->shared()->ForbidInlineConstructor(); 2887 fun->shared()->ForbidInlineConstructor();
2751 } else { 2888 } else {
2752 Object* descriptors_obj = DescriptorArray::Allocate(count); 2889 Object* descriptors_obj;
2753 if (descriptors_obj->IsFailure()) return descriptors_obj; 2890 { MaybeObject* maybe_descriptors_obj = DescriptorArray::Allocate(count);
2891 if (!maybe_descriptors_obj->ToObject(&descriptors_obj)) {
2892 return maybe_descriptors_obj;
2893 }
2894 }
2754 DescriptorArray* descriptors = DescriptorArray::cast(descriptors_obj); 2895 DescriptorArray* descriptors = DescriptorArray::cast(descriptors_obj);
2755 for (int i = 0; i < count; i++) { 2896 for (int i = 0; i < count; i++) {
2756 String* name = fun->shared()->GetThisPropertyAssignmentName(i); 2897 String* name = fun->shared()->GetThisPropertyAssignmentName(i);
2757 ASSERT(name->IsSymbol()); 2898 ASSERT(name->IsSymbol());
2758 FieldDescriptor field(name, i, NONE); 2899 FieldDescriptor field(name, i, NONE);
2759 field.SetEnumerationIndex(i); 2900 field.SetEnumerationIndex(i);
2760 descriptors->Set(i, &field); 2901 descriptors->Set(i, &field);
2761 } 2902 }
2762 descriptors->SetNextEnumerationIndex(count); 2903 descriptors->SetNextEnumerationIndex(count);
2763 descriptors->SortUnchecked(); 2904 descriptors->SortUnchecked();
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
2804 // We might want to shrink the object later. 2945 // We might want to shrink the object later.
2805 ASSERT(obj->GetInternalFieldCount() == 0); 2946 ASSERT(obj->GetInternalFieldCount() == 0);
2806 filler = Heap::one_pointer_filler_map(); 2947 filler = Heap::one_pointer_filler_map();
2807 } else { 2948 } else {
2808 filler = Heap::undefined_value(); 2949 filler = Heap::undefined_value();
2809 } 2950 }
2810 obj->InitializeBody(map->instance_size(), filler); 2951 obj->InitializeBody(map->instance_size(), filler);
2811 } 2952 }
2812 2953
2813 2954
2814 Object* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) { 2955 MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
2815 // JSFunctions should be allocated using AllocateFunction to be 2956 // JSFunctions should be allocated using AllocateFunction to be
2816 // properly initialized. 2957 // properly initialized.
2817 ASSERT(map->instance_type() != JS_FUNCTION_TYPE); 2958 ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
2818 2959
2819 // Both types of global objects should be allocated using 2960 // Both types of global objects should be allocated using
2820 // AllocateGlobalObject to be properly initialized. 2961 // AllocateGlobalObject to be properly initialized.
2821 ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE); 2962 ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
2822 ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE); 2963 ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE);
2823 2964
2824 // Allocate the backing storage for the properties. 2965 // Allocate the backing storage for the properties.
2825 int prop_size = 2966 int prop_size =
2826 map->pre_allocated_property_fields() + 2967 map->pre_allocated_property_fields() +
2827 map->unused_property_fields() - 2968 map->unused_property_fields() -
2828 map->inobject_properties(); 2969 map->inobject_properties();
2829 ASSERT(prop_size >= 0); 2970 ASSERT(prop_size >= 0);
2830 Object* properties = AllocateFixedArray(prop_size, pretenure); 2971 Object* properties;
2831 if (properties->IsFailure()) return properties; 2972 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure);
2973 if (!maybe_properties->ToObject(&properties)) return maybe_properties;
2974 }
2832 2975
2833 // Allocate the JSObject. 2976 // Allocate the JSObject.
2834 AllocationSpace space = 2977 AllocationSpace space =
2835 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; 2978 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
2836 if (map->instance_size() > MaxObjectSizeInPagedSpace()) space = LO_SPACE; 2979 if (map->instance_size() > MaxObjectSizeInPagedSpace()) space = LO_SPACE;
2837 Object* obj = Allocate(map, space); 2980 Object* obj;
2838 if (obj->IsFailure()) return obj; 2981 { MaybeObject* maybe_obj = Allocate(map, space);
2982 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2983 }
2839 2984
2840 // Initialize the JSObject. 2985 // Initialize the JSObject.
2841 InitializeJSObjectFromMap(JSObject::cast(obj), 2986 InitializeJSObjectFromMap(JSObject::cast(obj),
2842 FixedArray::cast(properties), 2987 FixedArray::cast(properties),
2843 map); 2988 map);
2844 ASSERT(JSObject::cast(obj)->HasFastElements()); 2989 ASSERT(JSObject::cast(obj)->HasFastElements());
2845 return obj; 2990 return obj;
2846 } 2991 }
2847 2992
2848 2993
2849 Object* Heap::AllocateJSObject(JSFunction* constructor, 2994 MaybeObject* Heap::AllocateJSObject(JSFunction* constructor,
2850 PretenureFlag pretenure) { 2995 PretenureFlag pretenure) {
2851 // Allocate the initial map if absent. 2996 // Allocate the initial map if absent.
2852 if (!constructor->has_initial_map()) { 2997 if (!constructor->has_initial_map()) {
2853 Object* initial_map = AllocateInitialMap(constructor); 2998 Object* initial_map;
2854 if (initial_map->IsFailure()) return initial_map; 2999 { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor);
3000 if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map;
3001 }
2855 constructor->set_initial_map(Map::cast(initial_map)); 3002 constructor->set_initial_map(Map::cast(initial_map));
2856 Map::cast(initial_map)->set_constructor(constructor); 3003 Map::cast(initial_map)->set_constructor(constructor);
2857 } 3004 }
2858 // Allocate the object based on the constructors initial map. 3005 // Allocate the object based on the constructors initial map.
2859 Object* result = 3006 MaybeObject* result =
2860 AllocateJSObjectFromMap(constructor->initial_map(), pretenure); 3007 AllocateJSObjectFromMap(constructor->initial_map(), pretenure);
3008 #ifdef DEBUG
2861 // Make sure result is NOT a global object if valid. 3009 // Make sure result is NOT a global object if valid.
2862 ASSERT(result->IsFailure() || !result->IsGlobalObject()); 3010 Object* non_failure;
3011 ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject());
3012 #endif
2863 return result; 3013 return result;
2864 } 3014 }
2865 3015
2866 3016
2867 Object* Heap::AllocateGlobalObject(JSFunction* constructor) { 3017 MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
2868 ASSERT(constructor->has_initial_map()); 3018 ASSERT(constructor->has_initial_map());
2869 Map* map = constructor->initial_map(); 3019 Map* map = constructor->initial_map();
2870 3020
2871 // Make sure no field properties are described in the initial map. 3021 // Make sure no field properties are described in the initial map.
2872 // This guarantees us that normalizing the properties does not 3022 // This guarantees us that normalizing the properties does not
2873 // require us to change property values to JSGlobalPropertyCells. 3023 // require us to change property values to JSGlobalPropertyCells.
2874 ASSERT(map->NextFreePropertyIndex() == 0); 3024 ASSERT(map->NextFreePropertyIndex() == 0);
2875 3025
2876 // Make sure we don't have a ton of pre-allocated slots in the 3026 // Make sure we don't have a ton of pre-allocated slots in the
2877 // global objects. They will be unused once we normalize the object. 3027 // global objects. They will be unused once we normalize the object.
2878 ASSERT(map->unused_property_fields() == 0); 3028 ASSERT(map->unused_property_fields() == 0);
2879 ASSERT(map->inobject_properties() == 0); 3029 ASSERT(map->inobject_properties() == 0);
2880 3030
2881 // Initial size of the backing store to avoid resize of the storage during 3031 // Initial size of the backing store to avoid resize of the storage during
2882 // bootstrapping. The size differs between the JS global object ad the 3032 // bootstrapping. The size differs between the JS global object ad the
2883 // builtins object. 3033 // builtins object.
2884 int initial_size = map->instance_type() == JS_GLOBAL_OBJECT_TYPE ? 64 : 512; 3034 int initial_size = map->instance_type() == JS_GLOBAL_OBJECT_TYPE ? 64 : 512;
2885 3035
2886 // Allocate a dictionary object for backing storage. 3036 // Allocate a dictionary object for backing storage.
2887 Object* obj = 3037 Object* obj;
2888 StringDictionary::Allocate( 3038 { MaybeObject* maybe_obj =
2889 map->NumberOfDescribedProperties() * 2 + initial_size); 3039 StringDictionary::Allocate(
2890 if (obj->IsFailure()) return obj; 3040 map->NumberOfDescribedProperties() * 2 + initial_size);
3041 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3042 }
2891 StringDictionary* dictionary = StringDictionary::cast(obj); 3043 StringDictionary* dictionary = StringDictionary::cast(obj);
2892 3044
2893 // The global object might be created from an object template with accessors. 3045 // The global object might be created from an object template with accessors.
2894 // Fill these accessors into the dictionary. 3046 // Fill these accessors into the dictionary.
2895 DescriptorArray* descs = map->instance_descriptors(); 3047 DescriptorArray* descs = map->instance_descriptors();
2896 for (int i = 0; i < descs->number_of_descriptors(); i++) { 3048 for (int i = 0; i < descs->number_of_descriptors(); i++) {
2897 PropertyDetails details = descs->GetDetails(i); 3049 PropertyDetails details = descs->GetDetails(i);
2898 ASSERT(details.type() == CALLBACKS); // Only accessors are expected. 3050 ASSERT(details.type() == CALLBACKS); // Only accessors are expected.
2899 PropertyDetails d = 3051 PropertyDetails d =
2900 PropertyDetails(details.attributes(), CALLBACKS, details.index()); 3052 PropertyDetails(details.attributes(), CALLBACKS, details.index());
2901 Object* value = descs->GetCallbacksObject(i); 3053 Object* value = descs->GetCallbacksObject(i);
2902 value = Heap::AllocateJSGlobalPropertyCell(value); 3054 { MaybeObject* maybe_value = Heap::AllocateJSGlobalPropertyCell(value);
2903 if (value->IsFailure()) return value; 3055 if (!maybe_value->ToObject(&value)) return maybe_value;
3056 }
2904 3057
2905 Object* result = dictionary->Add(descs->GetKey(i), value, d); 3058 Object* result;
2906 if (result->IsFailure()) return result; 3059 { MaybeObject* maybe_result = dictionary->Add(descs->GetKey(i), value, d);
3060 if (!maybe_result->ToObject(&result)) return maybe_result;
3061 }
2907 dictionary = StringDictionary::cast(result); 3062 dictionary = StringDictionary::cast(result);
2908 } 3063 }
2909 3064
2910 // Allocate the global object and initialize it with the backing store. 3065 // Allocate the global object and initialize it with the backing store.
2911 obj = Allocate(map, OLD_POINTER_SPACE); 3066 { MaybeObject* maybe_obj = Allocate(map, OLD_POINTER_SPACE);
2912 if (obj->IsFailure()) return obj; 3067 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3068 }
2913 JSObject* global = JSObject::cast(obj); 3069 JSObject* global = JSObject::cast(obj);
2914 InitializeJSObjectFromMap(global, dictionary, map); 3070 InitializeJSObjectFromMap(global, dictionary, map);
2915 3071
2916 // Create a new map for the global object. 3072 // Create a new map for the global object.
2917 obj = map->CopyDropDescriptors(); 3073 { MaybeObject* maybe_obj = map->CopyDropDescriptors();
2918 if (obj->IsFailure()) return obj; 3074 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3075 }
2919 Map* new_map = Map::cast(obj); 3076 Map* new_map = Map::cast(obj);
2920 3077
2921 // Setup the global object as a normalized object. 3078 // Setup the global object as a normalized object.
2922 global->set_map(new_map); 3079 global->set_map(new_map);
2923 global->map()->set_instance_descriptors(Heap::empty_descriptor_array()); 3080 global->map()->set_instance_descriptors(Heap::empty_descriptor_array());
2924 global->set_properties(dictionary); 3081 global->set_properties(dictionary);
2925 3082
2926 // Make sure result is a global object with properties in dictionary. 3083 // Make sure result is a global object with properties in dictionary.
2927 ASSERT(global->IsGlobalObject()); 3084 ASSERT(global->IsGlobalObject());
2928 ASSERT(!global->HasFastProperties()); 3085 ASSERT(!global->HasFastProperties());
2929 return global; 3086 return global;
2930 } 3087 }
2931 3088
2932 3089
2933 Object* Heap::CopyJSObject(JSObject* source) { 3090 MaybeObject* Heap::CopyJSObject(JSObject* source) {
2934 // Never used to copy functions. If functions need to be copied we 3091 // Never used to copy functions. If functions need to be copied we
2935 // have to be careful to clear the literals array. 3092 // have to be careful to clear the literals array.
2936 ASSERT(!source->IsJSFunction()); 3093 ASSERT(!source->IsJSFunction());
2937 3094
2938 // Make the clone. 3095 // Make the clone.
2939 Map* map = source->map(); 3096 Map* map = source->map();
2940 int object_size = map->instance_size(); 3097 int object_size = map->instance_size();
2941 Object* clone; 3098 Object* clone;
2942 3099
2943 // If we're forced to always allocate, we use the general allocation 3100 // If we're forced to always allocate, we use the general allocation
2944 // functions which may leave us with an object in old space. 3101 // functions which may leave us with an object in old space.
2945 if (always_allocate()) { 3102 if (always_allocate()) {
2946 clone = AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE); 3103 { MaybeObject* maybe_clone =
2947 if (clone->IsFailure()) return clone; 3104 AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
3105 if (!maybe_clone->ToObject(&clone)) return maybe_clone;
3106 }
2948 Address clone_address = HeapObject::cast(clone)->address(); 3107 Address clone_address = HeapObject::cast(clone)->address();
2949 CopyBlock(clone_address, 3108 CopyBlock(clone_address,
2950 source->address(), 3109 source->address(),
2951 object_size); 3110 object_size);
2952 // Update write barrier for all fields that lie beyond the header. 3111 // Update write barrier for all fields that lie beyond the header.
2953 RecordWrites(clone_address, 3112 RecordWrites(clone_address,
2954 JSObject::kHeaderSize, 3113 JSObject::kHeaderSize,
2955 (object_size - JSObject::kHeaderSize) / kPointerSize); 3114 (object_size - JSObject::kHeaderSize) / kPointerSize);
2956 } else { 3115 } else {
2957 clone = new_space_.AllocateRaw(object_size); 3116 { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
2958 if (clone->IsFailure()) return clone; 3117 if (!maybe_clone->ToObject(&clone)) return maybe_clone;
3118 }
2959 ASSERT(Heap::InNewSpace(clone)); 3119 ASSERT(Heap::InNewSpace(clone));
2960 // Since we know the clone is allocated in new space, we can copy 3120 // Since we know the clone is allocated in new space, we can copy
2961 // the contents without worrying about updating the write barrier. 3121 // the contents without worrying about updating the write barrier.
2962 CopyBlock(HeapObject::cast(clone)->address(), 3122 CopyBlock(HeapObject::cast(clone)->address(),
2963 source->address(), 3123 source->address(),
2964 object_size); 3124 object_size);
2965 } 3125 }
2966 3126
2967 FixedArray* elements = FixedArray::cast(source->elements()); 3127 FixedArray* elements = FixedArray::cast(source->elements());
2968 FixedArray* properties = FixedArray::cast(source->properties()); 3128 FixedArray* properties = FixedArray::cast(source->properties());
2969 // Update elements if necessary. 3129 // Update elements if necessary.
2970 if (elements->length() > 0) { 3130 if (elements->length() > 0) {
2971 Object* elem = 3131 Object* elem;
2972 (elements->map() == fixed_cow_array_map()) ? 3132 { MaybeObject* maybe_elem =
2973 elements : CopyFixedArray(elements); 3133 (elements->map() == fixed_cow_array_map()) ?
2974 if (elem->IsFailure()) return elem; 3134 elements : CopyFixedArray(elements);
3135 if (!maybe_elem->ToObject(&elem)) return maybe_elem;
3136 }
2975 JSObject::cast(clone)->set_elements(FixedArray::cast(elem)); 3137 JSObject::cast(clone)->set_elements(FixedArray::cast(elem));
2976 } 3138 }
2977 // Update properties if necessary. 3139 // Update properties if necessary.
2978 if (properties->length() > 0) { 3140 if (properties->length() > 0) {
2979 Object* prop = CopyFixedArray(properties); 3141 Object* prop;
2980 if (prop->IsFailure()) return prop; 3142 { MaybeObject* maybe_prop = CopyFixedArray(properties);
3143 if (!maybe_prop->ToObject(&prop)) return maybe_prop;
3144 }
2981 JSObject::cast(clone)->set_properties(FixedArray::cast(prop)); 3145 JSObject::cast(clone)->set_properties(FixedArray::cast(prop));
2982 } 3146 }
2983 // Return the new clone. 3147 // Return the new clone.
2984 #ifdef ENABLE_LOGGING_AND_PROFILING 3148 #ifdef ENABLE_LOGGING_AND_PROFILING
2985 ProducerHeapProfile::RecordJSObjectAllocation(clone); 3149 ProducerHeapProfile::RecordJSObjectAllocation(clone);
2986 #endif 3150 #endif
2987 return clone; 3151 return clone;
2988 } 3152 }
2989 3153
2990 3154
2991 Object* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor, 3155 MaybeObject* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor,
2992 JSGlobalProxy* object) { 3156 JSGlobalProxy* object) {
2993 ASSERT(constructor->has_initial_map()); 3157 ASSERT(constructor->has_initial_map());
2994 Map* map = constructor->initial_map(); 3158 Map* map = constructor->initial_map();
2995 3159
2996 // Check that the already allocated object has the same size and type as 3160 // Check that the already allocated object has the same size and type as
2997 // objects allocated using the constructor. 3161 // objects allocated using the constructor.
2998 ASSERT(map->instance_size() == object->map()->instance_size()); 3162 ASSERT(map->instance_size() == object->map()->instance_size());
2999 ASSERT(map->instance_type() == object->map()->instance_type()); 3163 ASSERT(map->instance_type() == object->map()->instance_type());
3000 3164
3001 // Allocate the backing storage for the properties. 3165 // Allocate the backing storage for the properties.
3002 int prop_size = map->unused_property_fields() - map->inobject_properties(); 3166 int prop_size = map->unused_property_fields() - map->inobject_properties();
3003 Object* properties = AllocateFixedArray(prop_size, TENURED); 3167 Object* properties;
3004 if (properties->IsFailure()) return properties; 3168 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, TENURED);
3169 if (!maybe_properties->ToObject(&properties)) return maybe_properties;
3170 }
3005 3171
3006 // Reset the map for the object. 3172 // Reset the map for the object.
3007 object->set_map(constructor->initial_map()); 3173 object->set_map(constructor->initial_map());
3008 3174
3009 // Reinitialize the object from the constructor map. 3175 // Reinitialize the object from the constructor map.
3010 InitializeJSObjectFromMap(object, FixedArray::cast(properties), map); 3176 InitializeJSObjectFromMap(object, FixedArray::cast(properties), map);
3011 return object; 3177 return object;
3012 } 3178 }
3013 3179
3014 3180
3015 Object* Heap::AllocateStringFromAscii(Vector<const char> string, 3181 MaybeObject* Heap::AllocateStringFromAscii(Vector<const char> string,
3016 PretenureFlag pretenure) { 3182 PretenureFlag pretenure) {
3017 Object* result = AllocateRawAsciiString(string.length(), pretenure); 3183 Object* result;
3018 if (result->IsFailure()) return result; 3184 { MaybeObject* maybe_result =
3185 AllocateRawAsciiString(string.length(), pretenure);
3186 if (!maybe_result->ToObject(&result)) return maybe_result;
3187 }
3019 3188
3020 // Copy the characters into the new object. 3189 // Copy the characters into the new object.
3021 SeqAsciiString* string_result = SeqAsciiString::cast(result); 3190 SeqAsciiString* string_result = SeqAsciiString::cast(result);
3022 for (int i = 0; i < string.length(); i++) { 3191 for (int i = 0; i < string.length(); i++) {
3023 string_result->SeqAsciiStringSet(i, string[i]); 3192 string_result->SeqAsciiStringSet(i, string[i]);
3024 } 3193 }
3025 return result; 3194 return result;
3026 } 3195 }
3027 3196
3028 3197
3029 Object* Heap::AllocateStringFromUtf8(Vector<const char> string, 3198 MaybeObject* Heap::AllocateStringFromUtf8(Vector<const char> string,
3030 PretenureFlag pretenure) { 3199 PretenureFlag pretenure) {
3031 // V8 only supports characters in the Basic Multilingual Plane. 3200 // V8 only supports characters in the Basic Multilingual Plane.
3032 const uc32 kMaxSupportedChar = 0xFFFF; 3201 const uc32 kMaxSupportedChar = 0xFFFF;
3033 // Count the number of characters in the UTF-8 string and check if 3202 // Count the number of characters in the UTF-8 string and check if
3034 // it is an ASCII string. 3203 // it is an ASCII string.
3035 Access<Scanner::Utf8Decoder> decoder(Scanner::utf8_decoder()); 3204 Access<Scanner::Utf8Decoder> decoder(Scanner::utf8_decoder());
3036 decoder->Reset(string.start(), string.length()); 3205 decoder->Reset(string.start(), string.length());
3037 int chars = 0; 3206 int chars = 0;
3038 bool is_ascii = true; 3207 bool is_ascii = true;
3039 while (decoder->has_more()) { 3208 while (decoder->has_more()) {
3040 uc32 r = decoder->GetNext(); 3209 uc32 r = decoder->GetNext();
3041 if (r > String::kMaxAsciiCharCode) is_ascii = false; 3210 if (r > String::kMaxAsciiCharCode) is_ascii = false;
3042 chars++; 3211 chars++;
3043 } 3212 }
3044 3213
3045 // If the string is ascii, we do not need to convert the characters 3214 // If the string is ascii, we do not need to convert the characters
3046 // since UTF8 is backwards compatible with ascii. 3215 // since UTF8 is backwards compatible with ascii.
3047 if (is_ascii) return AllocateStringFromAscii(string, pretenure); 3216 if (is_ascii) return AllocateStringFromAscii(string, pretenure);
3048 3217
3049 Object* result = AllocateRawTwoByteString(chars, pretenure); 3218 Object* result;
3050 if (result->IsFailure()) return result; 3219 { MaybeObject* maybe_result = AllocateRawTwoByteString(chars, pretenure);
3220 if (!maybe_result->ToObject(&result)) return maybe_result;
3221 }
3051 3222
3052 // Convert and copy the characters into the new object. 3223 // Convert and copy the characters into the new object.
3053 String* string_result = String::cast(result); 3224 String* string_result = String::cast(result);
3054 decoder->Reset(string.start(), string.length()); 3225 decoder->Reset(string.start(), string.length());
3055 for (int i = 0; i < chars; i++) { 3226 for (int i = 0; i < chars; i++) {
3056 uc32 r = decoder->GetNext(); 3227 uc32 r = decoder->GetNext();
3057 if (r > kMaxSupportedChar) { r = unibrow::Utf8::kBadChar; } 3228 if (r > kMaxSupportedChar) { r = unibrow::Utf8::kBadChar; }
3058 string_result->Set(i, r); 3229 string_result->Set(i, r);
3059 } 3230 }
3060 return result; 3231 return result;
3061 } 3232 }
3062 3233
3063 3234
3064 Object* Heap::AllocateStringFromTwoByte(Vector<const uc16> string, 3235 MaybeObject* Heap::AllocateStringFromTwoByte(Vector<const uc16> string,
3065 PretenureFlag pretenure) { 3236 PretenureFlag pretenure) {
3066 // Check if the string is an ASCII string. 3237 // Check if the string is an ASCII string.
3067 int i = 0; 3238 int i = 0;
3068 while (i < string.length() && string[i] <= String::kMaxAsciiCharCode) i++; 3239 while (i < string.length() && string[i] <= String::kMaxAsciiCharCode) i++;
3069 3240
3241 MaybeObject* maybe_result;
3242 if (i == string.length()) { // It's an ASCII string.
3243 maybe_result = AllocateRawAsciiString(string.length(), pretenure);
3244 } else { // It's not an ASCII string.
3245 maybe_result = AllocateRawTwoByteString(string.length(), pretenure);
3246 }
3070 Object* result; 3247 Object* result;
3071 if (i == string.length()) { // It's an ASCII string. 3248 if (!maybe_result->ToObject(&result)) return maybe_result;
3072 result = AllocateRawAsciiString(string.length(), pretenure);
3073 } else { // It's not an ASCII string.
3074 result = AllocateRawTwoByteString(string.length(), pretenure);
3075 }
3076 if (result->IsFailure()) return result;
3077 3249
3078 // Copy the characters into the new object, which may be either ASCII or 3250 // Copy the characters into the new object, which may be either ASCII or
3079 // UTF-16. 3251 // UTF-16.
3080 String* string_result = String::cast(result); 3252 String* string_result = String::cast(result);
3081 for (int i = 0; i < string.length(); i++) { 3253 for (int i = 0; i < string.length(); i++) {
3082 string_result->Set(i, string[i]); 3254 string_result->Set(i, string[i]);
3083 } 3255 }
3084 return result; 3256 return result;
3085 } 3257 }
3086 3258
(...skipping 12 matching lines...) Expand all
3099 if (map == external_ascii_string_map()) return external_ascii_symbol_map(); 3271 if (map == external_ascii_string_map()) return external_ascii_symbol_map();
3100 if (map == external_string_with_ascii_data_map()) { 3272 if (map == external_string_with_ascii_data_map()) {
3101 return external_symbol_with_ascii_data_map(); 3273 return external_symbol_with_ascii_data_map();
3102 } 3274 }
3103 3275
3104 // No match found. 3276 // No match found.
3105 return NULL; 3277 return NULL;
3106 } 3278 }
3107 3279
3108 3280
3109 Object* Heap::AllocateInternalSymbol(unibrow::CharacterStream* buffer, 3281 MaybeObject* Heap::AllocateInternalSymbol(unibrow::CharacterStream* buffer,
3110 int chars, 3282 int chars,
3111 uint32_t hash_field) { 3283 uint32_t hash_field) {
3112 ASSERT(chars >= 0); 3284 ASSERT(chars >= 0);
3113 // Ensure the chars matches the number of characters in the buffer. 3285 // Ensure the chars matches the number of characters in the buffer.
3114 ASSERT(static_cast<unsigned>(chars) == buffer->Length()); 3286 ASSERT(static_cast<unsigned>(chars) == buffer->Length());
3115 // Determine whether the string is ascii. 3287 // Determine whether the string is ascii.
3116 bool is_ascii = true; 3288 bool is_ascii = true;
3117 while (buffer->has_more()) { 3289 while (buffer->has_more()) {
3118 if (buffer->GetNext() > unibrow::Utf8::kMaxOneByteChar) { 3290 if (buffer->GetNext() > unibrow::Utf8::kMaxOneByteChar) {
3119 is_ascii = false; 3291 is_ascii = false;
3120 break; 3292 break;
3121 } 3293 }
(...skipping 12 matching lines...) Expand all
3134 size = SeqAsciiString::SizeFor(chars); 3306 size = SeqAsciiString::SizeFor(chars);
3135 } else { 3307 } else {
3136 if (chars > SeqTwoByteString::kMaxLength) { 3308 if (chars > SeqTwoByteString::kMaxLength) {
3137 return Failure::OutOfMemoryException(); 3309 return Failure::OutOfMemoryException();
3138 } 3310 }
3139 map = symbol_map(); 3311 map = symbol_map();
3140 size = SeqTwoByteString::SizeFor(chars); 3312 size = SeqTwoByteString::SizeFor(chars);
3141 } 3313 }
3142 3314
3143 // Allocate string. 3315 // Allocate string.
3144 Object* result = (size > MaxObjectSizeInPagedSpace()) 3316 Object* result;
3145 ? lo_space_->AllocateRaw(size) 3317 { MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace())
3146 : old_data_space_->AllocateRaw(size); 3318 ? lo_space_->AllocateRaw(size)
3147 if (result->IsFailure()) return result; 3319 : old_data_space_->AllocateRaw(size);
3320 if (!maybe_result->ToObject(&result)) return maybe_result;
3321 }
3148 3322
3149 reinterpret_cast<HeapObject*>(result)->set_map(map); 3323 reinterpret_cast<HeapObject*>(result)->set_map(map);
3150 // Set length and hash fields of the allocated string. 3324 // Set length and hash fields of the allocated string.
3151 String* answer = String::cast(result); 3325 String* answer = String::cast(result);
3152 answer->set_length(chars); 3326 answer->set_length(chars);
3153 answer->set_hash_field(hash_field); 3327 answer->set_hash_field(hash_field);
3154 3328
3155 ASSERT_EQ(size, answer->Size()); 3329 ASSERT_EQ(size, answer->Size());
3156 3330
3157 // Fill in the characters. 3331 // Fill in the characters.
3158 for (int i = 0; i < chars; i++) { 3332 for (int i = 0; i < chars; i++) {
3159 answer->Set(i, buffer->GetNext()); 3333 answer->Set(i, buffer->GetNext());
3160 } 3334 }
3161 return answer; 3335 return answer;
3162 } 3336 }
3163 3337
3164 3338
3165 Object* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) { 3339 MaybeObject* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) {
3166 if (length < 0 || length > SeqAsciiString::kMaxLength) { 3340 if (length < 0 || length > SeqAsciiString::kMaxLength) {
3167 return Failure::OutOfMemoryException(); 3341 return Failure::OutOfMemoryException();
3168 } 3342 }
3169 3343
3170 int size = SeqAsciiString::SizeFor(length); 3344 int size = SeqAsciiString::SizeFor(length);
3171 ASSERT(size <= SeqAsciiString::kMaxSize); 3345 ASSERT(size <= SeqAsciiString::kMaxSize);
3172 3346
3173 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 3347 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
3174 AllocationSpace retry_space = OLD_DATA_SPACE; 3348 AllocationSpace retry_space = OLD_DATA_SPACE;
3175 3349
3176 if (space == NEW_SPACE) { 3350 if (space == NEW_SPACE) {
3177 if (size > kMaxObjectSizeInNewSpace) { 3351 if (size > kMaxObjectSizeInNewSpace) {
3178 // Allocate in large object space, retry space will be ignored. 3352 // Allocate in large object space, retry space will be ignored.
3179 space = LO_SPACE; 3353 space = LO_SPACE;
3180 } else if (size > MaxObjectSizeInPagedSpace()) { 3354 } else if (size > MaxObjectSizeInPagedSpace()) {
3181 // Allocate in new space, retry in large object space. 3355 // Allocate in new space, retry in large object space.
3182 retry_space = LO_SPACE; 3356 retry_space = LO_SPACE;
3183 } 3357 }
3184 } else if (space == OLD_DATA_SPACE && size > MaxObjectSizeInPagedSpace()) { 3358 } else if (space == OLD_DATA_SPACE && size > MaxObjectSizeInPagedSpace()) {
3185 space = LO_SPACE; 3359 space = LO_SPACE;
3186 } 3360 }
3187 Object* result = AllocateRaw(size, space, retry_space); 3361 Object* result;
3188 if (result->IsFailure()) return result; 3362 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
3363 if (!maybe_result->ToObject(&result)) return maybe_result;
3364 }
3189 3365
3190 // Partially initialize the object. 3366 // Partially initialize the object.
3191 HeapObject::cast(result)->set_map(ascii_string_map()); 3367 HeapObject::cast(result)->set_map(ascii_string_map());
3192 String::cast(result)->set_length(length); 3368 String::cast(result)->set_length(length);
3193 String::cast(result)->set_hash_field(String::kEmptyHashField); 3369 String::cast(result)->set_hash_field(String::kEmptyHashField);
3194 ASSERT_EQ(size, HeapObject::cast(result)->Size()); 3370 ASSERT_EQ(size, HeapObject::cast(result)->Size());
3195 return result; 3371 return result;
3196 } 3372 }
3197 3373
3198 3374
3199 Object* Heap::AllocateRawTwoByteString(int length, PretenureFlag pretenure) { 3375 MaybeObject* Heap::AllocateRawTwoByteString(int length,
3376 PretenureFlag pretenure) {
3200 if (length < 0 || length > SeqTwoByteString::kMaxLength) { 3377 if (length < 0 || length > SeqTwoByteString::kMaxLength) {
3201 return Failure::OutOfMemoryException(); 3378 return Failure::OutOfMemoryException();
3202 } 3379 }
3203 int size = SeqTwoByteString::SizeFor(length); 3380 int size = SeqTwoByteString::SizeFor(length);
3204 ASSERT(size <= SeqTwoByteString::kMaxSize); 3381 ASSERT(size <= SeqTwoByteString::kMaxSize);
3205 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 3382 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
3206 AllocationSpace retry_space = OLD_DATA_SPACE; 3383 AllocationSpace retry_space = OLD_DATA_SPACE;
3207 3384
3208 if (space == NEW_SPACE) { 3385 if (space == NEW_SPACE) {
3209 if (size > kMaxObjectSizeInNewSpace) { 3386 if (size > kMaxObjectSizeInNewSpace) {
3210 // Allocate in large object space, retry space will be ignored. 3387 // Allocate in large object space, retry space will be ignored.
3211 space = LO_SPACE; 3388 space = LO_SPACE;
3212 } else if (size > MaxObjectSizeInPagedSpace()) { 3389 } else if (size > MaxObjectSizeInPagedSpace()) {
3213 // Allocate in new space, retry in large object space. 3390 // Allocate in new space, retry in large object space.
3214 retry_space = LO_SPACE; 3391 retry_space = LO_SPACE;
3215 } 3392 }
3216 } else if (space == OLD_DATA_SPACE && size > MaxObjectSizeInPagedSpace()) { 3393 } else if (space == OLD_DATA_SPACE && size > MaxObjectSizeInPagedSpace()) {
3217 space = LO_SPACE; 3394 space = LO_SPACE;
3218 } 3395 }
3219 Object* result = AllocateRaw(size, space, retry_space); 3396 Object* result;
3220 if (result->IsFailure()) return result; 3397 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
3398 if (!maybe_result->ToObject(&result)) return maybe_result;
3399 }
3221 3400
3222 // Partially initialize the object. 3401 // Partially initialize the object.
3223 HeapObject::cast(result)->set_map(string_map()); 3402 HeapObject::cast(result)->set_map(string_map());
3224 String::cast(result)->set_length(length); 3403 String::cast(result)->set_length(length);
3225 String::cast(result)->set_hash_field(String::kEmptyHashField); 3404 String::cast(result)->set_hash_field(String::kEmptyHashField);
3226 ASSERT_EQ(size, HeapObject::cast(result)->Size()); 3405 ASSERT_EQ(size, HeapObject::cast(result)->Size());
3227 return result; 3406 return result;
3228 } 3407 }
3229 3408
3230 3409
3231 Object* Heap::AllocateEmptyFixedArray() { 3410 MaybeObject* Heap::AllocateEmptyFixedArray() {
3232 int size = FixedArray::SizeFor(0); 3411 int size = FixedArray::SizeFor(0);
3233 Object* result = AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); 3412 Object* result;
3234 if (result->IsFailure()) return result; 3413 { MaybeObject* maybe_result =
3414 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
3415 if (!maybe_result->ToObject(&result)) return maybe_result;
3416 }
3235 // Initialize the object. 3417 // Initialize the object.
3236 reinterpret_cast<FixedArray*>(result)->set_map(fixed_array_map()); 3418 reinterpret_cast<FixedArray*>(result)->set_map(fixed_array_map());
3237 reinterpret_cast<FixedArray*>(result)->set_length(0); 3419 reinterpret_cast<FixedArray*>(result)->set_length(0);
3238 return result; 3420 return result;
3239 } 3421 }
3240 3422
3241 3423
3242 Object* Heap::AllocateRawFixedArray(int length) { 3424 MaybeObject* Heap::AllocateRawFixedArray(int length) {
3243 if (length < 0 || length > FixedArray::kMaxLength) { 3425 if (length < 0 || length > FixedArray::kMaxLength) {
3244 return Failure::OutOfMemoryException(); 3426 return Failure::OutOfMemoryException();
3245 } 3427 }
3246 ASSERT(length > 0); 3428 ASSERT(length > 0);
3247 // Use the general function if we're forced to always allocate. 3429 // Use the general function if we're forced to always allocate.
3248 if (always_allocate()) return AllocateFixedArray(length, TENURED); 3430 if (always_allocate()) return AllocateFixedArray(length, TENURED);
3249 // Allocate the raw data for a fixed array. 3431 // Allocate the raw data for a fixed array.
3250 int size = FixedArray::SizeFor(length); 3432 int size = FixedArray::SizeFor(length);
3251 return size <= kMaxObjectSizeInNewSpace 3433 return size <= kMaxObjectSizeInNewSpace
3252 ? new_space_.AllocateRaw(size) 3434 ? new_space_.AllocateRaw(size)
3253 : lo_space_->AllocateRawFixedArray(size); 3435 : lo_space_->AllocateRawFixedArray(size);
3254 } 3436 }
3255 3437
3256 3438
3257 Object* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { 3439 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
3258 int len = src->length(); 3440 int len = src->length();
3259 Object* obj = AllocateRawFixedArray(len); 3441 Object* obj;
3260 if (obj->IsFailure()) return obj; 3442 { MaybeObject* maybe_obj = AllocateRawFixedArray(len);
3443 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3444 }
3261 if (Heap::InNewSpace(obj)) { 3445 if (Heap::InNewSpace(obj)) {
3262 HeapObject* dst = HeapObject::cast(obj); 3446 HeapObject* dst = HeapObject::cast(obj);
3263 dst->set_map(map); 3447 dst->set_map(map);
3264 CopyBlock(dst->address() + kPointerSize, 3448 CopyBlock(dst->address() + kPointerSize,
3265 src->address() + kPointerSize, 3449 src->address() + kPointerSize,
3266 FixedArray::SizeFor(len) - kPointerSize); 3450 FixedArray::SizeFor(len) - kPointerSize);
3267 return obj; 3451 return obj;
3268 } 3452 }
3269 HeapObject::cast(obj)->set_map(map); 3453 HeapObject::cast(obj)->set_map(map);
3270 FixedArray* result = FixedArray::cast(obj); 3454 FixedArray* result = FixedArray::cast(obj);
3271 result->set_length(len); 3455 result->set_length(len);
3272 3456
3273 // Copy the content 3457 // Copy the content
3274 AssertNoAllocation no_gc; 3458 AssertNoAllocation no_gc;
3275 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); 3459 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
3276 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); 3460 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
3277 return result; 3461 return result;
3278 } 3462 }
3279 3463
3280 3464
3281 Object* Heap::AllocateFixedArray(int length) { 3465 MaybeObject* Heap::AllocateFixedArray(int length) {
3282 ASSERT(length >= 0); 3466 ASSERT(length >= 0);
3283 if (length == 0) return empty_fixed_array(); 3467 if (length == 0) return empty_fixed_array();
3284 Object* result = AllocateRawFixedArray(length); 3468 Object* result;
3285 if (!result->IsFailure()) { 3469 { MaybeObject* maybe_result = AllocateRawFixedArray(length);
3286 // Initialize header. 3470 if (!maybe_result->ToObject(&result)) return maybe_result;
3287 FixedArray* array = reinterpret_cast<FixedArray*>(result);
3288 array->set_map(fixed_array_map());
3289 array->set_length(length);
3290 // Initialize body.
3291 ASSERT(!Heap::InNewSpace(undefined_value()));
3292 MemsetPointer(array->data_start(), undefined_value(), length);
3293 } 3471 }
3472 // Initialize header.
3473 FixedArray* array = reinterpret_cast<FixedArray*>(result);
3474 array->set_map(fixed_array_map());
3475 array->set_length(length);
3476 // Initialize body.
3477 ASSERT(!Heap::InNewSpace(undefined_value()));
3478 MemsetPointer(array->data_start(), undefined_value(), length);
3294 return result; 3479 return result;
3295 } 3480 }
3296 3481
3297 3482
3298 Object* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { 3483 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
3299 if (length < 0 || length > FixedArray::kMaxLength) { 3484 if (length < 0 || length > FixedArray::kMaxLength) {
3300 return Failure::OutOfMemoryException(); 3485 return Failure::OutOfMemoryException();
3301 } 3486 }
3302 3487
3303 AllocationSpace space = 3488 AllocationSpace space =
3304 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; 3489 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
3305 int size = FixedArray::SizeFor(length); 3490 int size = FixedArray::SizeFor(length);
3306 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { 3491 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
3307 // Too big for new space. 3492 // Too big for new space.
3308 space = LO_SPACE; 3493 space = LO_SPACE;
3309 } else if (space == OLD_POINTER_SPACE && 3494 } else if (space == OLD_POINTER_SPACE &&
3310 size > MaxObjectSizeInPagedSpace()) { 3495 size > MaxObjectSizeInPagedSpace()) {
3311 // Too big for old pointer space. 3496 // Too big for old pointer space.
3312 space = LO_SPACE; 3497 space = LO_SPACE;
3313 } 3498 }
3314 3499
3315 AllocationSpace retry_space = 3500 AllocationSpace retry_space =
3316 (size <= MaxObjectSizeInPagedSpace()) ? OLD_POINTER_SPACE : LO_SPACE; 3501 (size <= MaxObjectSizeInPagedSpace()) ? OLD_POINTER_SPACE : LO_SPACE;
3317 3502
3318 return AllocateRaw(size, space, retry_space); 3503 return AllocateRaw(size, space, retry_space);
3319 } 3504 }
3320 3505
3321 3506
3322 static Object* AllocateFixedArrayWithFiller(int length, 3507 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
3323 PretenureFlag pretenure, 3508 int length,
3324 Object* filler) { 3509 PretenureFlag pretenure,
3510 Object* filler) {
3325 ASSERT(length >= 0); 3511 ASSERT(length >= 0);
3326 ASSERT(Heap::empty_fixed_array()->IsFixedArray()); 3512 ASSERT(Heap::empty_fixed_array()->IsFixedArray());
3327 if (length == 0) return Heap::empty_fixed_array(); 3513 if (length == 0) return Heap::empty_fixed_array();
3328 3514
3329 ASSERT(!Heap::InNewSpace(filler)); 3515 ASSERT(!Heap::InNewSpace(filler));
3330 Object* result = Heap::AllocateRawFixedArray(length, pretenure); 3516 Object* result;
3331 if (result->IsFailure()) return result; 3517 { MaybeObject* maybe_result = Heap::AllocateRawFixedArray(length, pretenure);
3518 if (!maybe_result->ToObject(&result)) return maybe_result;
3519 }
3332 3520
3333 HeapObject::cast(result)->set_map(Heap::fixed_array_map()); 3521 HeapObject::cast(result)->set_map(Heap::fixed_array_map());
3334 FixedArray* array = FixedArray::cast(result); 3522 FixedArray* array = FixedArray::cast(result);
3335 array->set_length(length); 3523 array->set_length(length);
3336 MemsetPointer(array->data_start(), filler, length); 3524 MemsetPointer(array->data_start(), filler, length);
3337 return array; 3525 return array;
3338 } 3526 }
3339 3527
3340 3528
3341 Object* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) { 3529 MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
3342 return AllocateFixedArrayWithFiller(length, pretenure, undefined_value()); 3530 return AllocateFixedArrayWithFiller(length, pretenure, undefined_value());
3343 } 3531 }
3344 3532
3345 3533
3346 Object* Heap::AllocateFixedArrayWithHoles(int length, PretenureFlag pretenure) { 3534 MaybeObject* Heap::AllocateFixedArrayWithHoles(int length,
3535 PretenureFlag pretenure) {
3347 return AllocateFixedArrayWithFiller(length, pretenure, the_hole_value()); 3536 return AllocateFixedArrayWithFiller(length, pretenure, the_hole_value());
3348 } 3537 }
3349 3538
3350 3539
3351 Object* Heap::AllocateUninitializedFixedArray(int length) { 3540 MaybeObject* Heap::AllocateUninitializedFixedArray(int length) {
3352 if (length == 0) return empty_fixed_array(); 3541 if (length == 0) return empty_fixed_array();
3353 3542
3354 Object* obj = AllocateRawFixedArray(length); 3543 Object* obj;
3355 if (obj->IsFailure()) return obj; 3544 { MaybeObject* maybe_obj = AllocateRawFixedArray(length);
3545 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3546 }
3356 3547
3357 reinterpret_cast<FixedArray*>(obj)->set_map(fixed_array_map()); 3548 reinterpret_cast<FixedArray*>(obj)->set_map(fixed_array_map());
3358 FixedArray::cast(obj)->set_length(length); 3549 FixedArray::cast(obj)->set_length(length);
3359 return obj; 3550 return obj;
3360 } 3551 }
3361 3552
3362 3553
3363 Object* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { 3554 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
3364 Object* result = Heap::AllocateFixedArray(length, pretenure); 3555 Object* result;
3365 if (result->IsFailure()) return result; 3556 { MaybeObject* maybe_result = Heap::AllocateFixedArray(length, pretenure);
3557 if (!maybe_result->ToObject(&result)) return maybe_result;
3558 }
3366 reinterpret_cast<HeapObject*>(result)->set_map(hash_table_map()); 3559 reinterpret_cast<HeapObject*>(result)->set_map(hash_table_map());
3367 ASSERT(result->IsHashTable()); 3560 ASSERT(result->IsHashTable());
3368 return result; 3561 return result;
3369 } 3562 }
3370 3563
3371 3564
3372 Object* Heap::AllocateGlobalContext() { 3565 MaybeObject* Heap::AllocateGlobalContext() {
3373 Object* result = Heap::AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS); 3566 Object* result;
3374 if (result->IsFailure()) return result; 3567 { MaybeObject* maybe_result =
3568 Heap::AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
3569 if (!maybe_result->ToObject(&result)) return maybe_result;
3570 }
3375 Context* context = reinterpret_cast<Context*>(result); 3571 Context* context = reinterpret_cast<Context*>(result);
3376 context->set_map(global_context_map()); 3572 context->set_map(global_context_map());
3377 ASSERT(context->IsGlobalContext()); 3573 ASSERT(context->IsGlobalContext());
3378 ASSERT(result->IsContext()); 3574 ASSERT(result->IsContext());
3379 return result; 3575 return result;
3380 } 3576 }
3381 3577
3382 3578
3383 Object* Heap::AllocateFunctionContext(int length, JSFunction* function) { 3579 MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
3384 ASSERT(length >= Context::MIN_CONTEXT_SLOTS); 3580 ASSERT(length >= Context::MIN_CONTEXT_SLOTS);
3385 Object* result = Heap::AllocateFixedArray(length); 3581 Object* result;
3386 if (result->IsFailure()) return result; 3582 { MaybeObject* maybe_result = Heap::AllocateFixedArray(length);
3583 if (!maybe_result->ToObject(&result)) return maybe_result;
3584 }
3387 Context* context = reinterpret_cast<Context*>(result); 3585 Context* context = reinterpret_cast<Context*>(result);
3388 context->set_map(context_map()); 3586 context->set_map(context_map());
3389 context->set_closure(function); 3587 context->set_closure(function);
3390 context->set_fcontext(context); 3588 context->set_fcontext(context);
3391 context->set_previous(NULL); 3589 context->set_previous(NULL);
3392 context->set_extension(NULL); 3590 context->set_extension(NULL);
3393 context->set_global(function->context()->global()); 3591 context->set_global(function->context()->global());
3394 ASSERT(!context->IsGlobalContext()); 3592 ASSERT(!context->IsGlobalContext());
3395 ASSERT(context->is_function_context()); 3593 ASSERT(context->is_function_context());
3396 ASSERT(result->IsContext()); 3594 ASSERT(result->IsContext());
3397 return result; 3595 return result;
3398 } 3596 }
3399 3597
3400 3598
3401 Object* Heap::AllocateWithContext(Context* previous, 3599 MaybeObject* Heap::AllocateWithContext(Context* previous,
3402 JSObject* extension, 3600 JSObject* extension,
3403 bool is_catch_context) { 3601 bool is_catch_context) {
3404 Object* result = Heap::AllocateFixedArray(Context::MIN_CONTEXT_SLOTS); 3602 Object* result;
3405 if (result->IsFailure()) return result; 3603 { MaybeObject* maybe_result =
3604 Heap::AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
3605 if (!maybe_result->ToObject(&result)) return maybe_result;
3606 }
3406 Context* context = reinterpret_cast<Context*>(result); 3607 Context* context = reinterpret_cast<Context*>(result);
3407 context->set_map(is_catch_context ? catch_context_map() : context_map()); 3608 context->set_map(is_catch_context ? catch_context_map() : context_map());
3408 context->set_closure(previous->closure()); 3609 context->set_closure(previous->closure());
3409 context->set_fcontext(previous->fcontext()); 3610 context->set_fcontext(previous->fcontext());
3410 context->set_previous(previous); 3611 context->set_previous(previous);
3411 context->set_extension(extension); 3612 context->set_extension(extension);
3412 context->set_global(previous->global()); 3613 context->set_global(previous->global());
3413 ASSERT(!context->IsGlobalContext()); 3614 ASSERT(!context->IsGlobalContext());
3414 ASSERT(!context->is_function_context()); 3615 ASSERT(!context->is_function_context());
3415 ASSERT(result->IsContext()); 3616 ASSERT(result->IsContext());
3416 return result; 3617 return result;
3417 } 3618 }
3418 3619
3419 3620
3420 Object* Heap::AllocateStruct(InstanceType type) { 3621 MaybeObject* Heap::AllocateStruct(InstanceType type) {
3421 Map* map; 3622 Map* map;
3422 switch (type) { 3623 switch (type) {
3423 #define MAKE_CASE(NAME, Name, name) case NAME##_TYPE: map = name##_map(); break; 3624 #define MAKE_CASE(NAME, Name, name) case NAME##_TYPE: map = name##_map(); break;
3424 STRUCT_LIST(MAKE_CASE) 3625 STRUCT_LIST(MAKE_CASE)
3425 #undef MAKE_CASE 3626 #undef MAKE_CASE
3426 default: 3627 default:
3427 UNREACHABLE(); 3628 UNREACHABLE();
3428 return Failure::InternalError(); 3629 return Failure::InternalError();
3429 } 3630 }
3430 int size = map->instance_size(); 3631 int size = map->instance_size();
3431 AllocationSpace space = 3632 AllocationSpace space =
3432 (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE; 3633 (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE;
3433 Object* result = Heap::Allocate(map, space); 3634 Object* result;
3434 if (result->IsFailure()) return result; 3635 { MaybeObject* maybe_result = Heap::Allocate(map, space);
3636 if (!maybe_result->ToObject(&result)) return maybe_result;
3637 }
3435 Struct::cast(result)->InitializeBody(size); 3638 Struct::cast(result)->InitializeBody(size);
3436 return result; 3639 return result;
3437 } 3640 }
3438 3641
3439 3642
3440 bool Heap::IdleNotification() { 3643 bool Heap::IdleNotification() {
3441 static const int kIdlesBeforeScavenge = 4; 3644 static const int kIdlesBeforeScavenge = 4;
3442 static const int kIdlesBeforeMarkSweep = 7; 3645 static const int kIdlesBeforeMarkSweep = 7;
3443 static const int kIdlesBeforeMarkCompact = 8; 3646 static const int kIdlesBeforeMarkCompact = 8;
3444 static int number_idle_notifications = 0; 3647 static int number_idle_notifications = 0;
(...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after
3684 VerifyPointersVisitor no_dirty_regions_visitor; 3887 VerifyPointersVisitor no_dirty_regions_visitor;
3685 old_data_space_->Verify(&no_dirty_regions_visitor); 3888 old_data_space_->Verify(&no_dirty_regions_visitor);
3686 code_space_->Verify(&no_dirty_regions_visitor); 3889 code_space_->Verify(&no_dirty_regions_visitor);
3687 cell_space_->Verify(&no_dirty_regions_visitor); 3890 cell_space_->Verify(&no_dirty_regions_visitor);
3688 3891
3689 lo_space_->Verify(); 3892 lo_space_->Verify();
3690 } 3893 }
3691 #endif // DEBUG 3894 #endif // DEBUG
3692 3895
3693 3896
3694 Object* Heap::LookupSymbol(Vector<const char> string) { 3897 MaybeObject* Heap::LookupSymbol(Vector<const char> string) {
3695 Object* symbol = NULL; 3898 Object* symbol = NULL;
3696 Object* new_table = symbol_table()->LookupSymbol(string, &symbol); 3899 Object* new_table;
3697 if (new_table->IsFailure()) return new_table; 3900 { MaybeObject* maybe_new_table =
3901 symbol_table()->LookupSymbol(string, &symbol);
3902 if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
3903 }
3698 // Can't use set_symbol_table because SymbolTable::cast knows that 3904 // Can't use set_symbol_table because SymbolTable::cast knows that
3699 // SymbolTable is a singleton and checks for identity. 3905 // SymbolTable is a singleton and checks for identity.
3700 roots_[kSymbolTableRootIndex] = new_table; 3906 roots_[kSymbolTableRootIndex] = new_table;
3701 ASSERT(symbol != NULL); 3907 ASSERT(symbol != NULL);
3702 return symbol; 3908 return symbol;
3703 } 3909 }
3704 3910
3705 3911
3706 Object* Heap::LookupSymbol(String* string) { 3912 MaybeObject* Heap::LookupSymbol(String* string) {
3707 if (string->IsSymbol()) return string; 3913 if (string->IsSymbol()) return string;
3708 Object* symbol = NULL; 3914 Object* symbol = NULL;
3709 Object* new_table = symbol_table()->LookupString(string, &symbol); 3915 Object* new_table;
3710 if (new_table->IsFailure()) return new_table; 3916 { MaybeObject* maybe_new_table =
3917 symbol_table()->LookupString(string, &symbol);
3918 if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
3919 }
3711 // Can't use set_symbol_table because SymbolTable::cast knows that 3920 // Can't use set_symbol_table because SymbolTable::cast knows that
3712 // SymbolTable is a singleton and checks for identity. 3921 // SymbolTable is a singleton and checks for identity.
3713 roots_[kSymbolTableRootIndex] = new_table; 3922 roots_[kSymbolTableRootIndex] = new_table;
3714 ASSERT(symbol != NULL); 3923 ASSERT(symbol != NULL);
3715 return symbol; 3924 return symbol;
3716 } 3925 }
3717 3926
3718 3927
3719 bool Heap::LookupSymbolIfExists(String* string, String** symbol) { 3928 bool Heap::LookupSymbolIfExists(String* string, String** symbol) {
3720 if (string->IsSymbol()) { 3929 if (string->IsSymbol()) {
(...skipping 1305 matching lines...) Expand 10 before | Expand all | Expand 10 after
5026 void ExternalStringTable::TearDown() { 5235 void ExternalStringTable::TearDown() {
5027 new_space_strings_.Free(); 5236 new_space_strings_.Free();
5028 old_space_strings_.Free(); 5237 old_space_strings_.Free();
5029 } 5238 }
5030 5239
5031 5240
5032 List<Object*> ExternalStringTable::new_space_strings_; 5241 List<Object*> ExternalStringTable::new_space_strings_;
5033 List<Object*> ExternalStringTable::old_space_strings_; 5242 List<Object*> ExternalStringTable::old_space_strings_;
5034 5243
5035 } } // namespace v8::internal 5244 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698