| Index: src/heap.cc
 | 
| ===================================================================
 | 
| --- src/heap.cc	(revision 7267)
 | 
| +++ src/heap.cc	(working copy)
 | 
| @@ -50,119 +50,110 @@
 | 
|  #include "arm/regexp-macro-assembler-arm.h"
 | 
|  #endif
 | 
|  
 | 
| -
 | 
|  namespace v8 {
 | 
|  namespace internal {
 | 
|  
 | 
|  
 | 
| -String* Heap::hidden_symbol_;
 | 
| -Object* Heap::roots_[Heap::kRootListLength];
 | 
| -Object* Heap::global_contexts_list_;
 | 
| -
 | 
| -
 | 
| -NewSpace Heap::new_space_;
 | 
| -OldSpace* Heap::old_pointer_space_ = NULL;
 | 
| -OldSpace* Heap::old_data_space_ = NULL;
 | 
| -OldSpace* Heap::code_space_ = NULL;
 | 
| -MapSpace* Heap::map_space_ = NULL;
 | 
| -CellSpace* Heap::cell_space_ = NULL;
 | 
| -LargeObjectSpace* Heap::lo_space_ = NULL;
 | 
| -
 | 
|  static const intptr_t kMinimumPromotionLimit = 2 * MB;
 | 
|  static const intptr_t kMinimumAllocationLimit = 8 * MB;
 | 
|  
 | 
| -intptr_t Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
 | 
| -intptr_t Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
 | 
|  
 | 
| -int Heap::old_gen_exhausted_ = false;
 | 
| +static Mutex* gc_initializer_mutex = OS::CreateMutex();
 | 
|  
 | 
| -int Heap::amount_of_external_allocated_memory_ = 0;
 | 
| -int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0;
 | 
|  
 | 
| +Heap::Heap()
 | 
| +    : isolate_(NULL),
 | 
|  // semispace_size_ should be a power of 2 and old_generation_size_ should be
 | 
|  // a multiple of Page::kPageSize.
 | 
|  #if defined(ANDROID)
 | 
| -static const int default_max_semispace_size_  = 2*MB;
 | 
| -intptr_t Heap::max_old_generation_size_ = 192*MB;
 | 
| -int Heap::initial_semispace_size_ = 128*KB;
 | 
| -intptr_t Heap::code_range_size_ = 0;
 | 
| -intptr_t Heap::max_executable_size_ = max_old_generation_size_;
 | 
| +      reserved_semispace_size_(2*MB),
 | 
| +      max_semispace_size_(2*MB),
 | 
| +      initial_semispace_size_(128*KB),
 | 
| +      max_old_generation_size_(192*MB),
 | 
| +      max_executable_size_(max_old_generation_size_),
 | 
| +      code_range_size_(0),
 | 
|  #elif defined(V8_TARGET_ARCH_X64)
 | 
| -static const int default_max_semispace_size_  = 16*MB;
 | 
| -intptr_t Heap::max_old_generation_size_ = 1*GB;
 | 
| -int Heap::initial_semispace_size_ = 1*MB;
 | 
| -intptr_t Heap::code_range_size_ = 512*MB;
 | 
| -intptr_t Heap::max_executable_size_ = 256*MB;
 | 
| +      reserved_semispace_size_(16*MB),
 | 
| +      max_semispace_size_(16*MB),
 | 
| +      initial_semispace_size_(1*MB),
 | 
| +      max_old_generation_size_(1*GB),
 | 
| +      max_executable_size_(256*MB),
 | 
| +      code_range_size_(512*MB),
 | 
|  #else
 | 
| -static const int default_max_semispace_size_  = 8*MB;
 | 
| -intptr_t Heap::max_old_generation_size_ = 512*MB;
 | 
| -int Heap::initial_semispace_size_ = 512*KB;
 | 
| -intptr_t Heap::code_range_size_ = 0;
 | 
| -intptr_t Heap::max_executable_size_ = 128*MB;
 | 
| +      reserved_semispace_size_(8*MB),
 | 
| +      max_semispace_size_(8*MB),
 | 
| +      initial_semispace_size_(512*KB),
 | 
| +      max_old_generation_size_(512*MB),
 | 
| +      max_executable_size_(128*MB),
 | 
| +      code_range_size_(0),
 | 
|  #endif
 | 
| -
 | 
| -// Allow build-time customization of the max semispace size. Building
 | 
| -// V8 with snapshots and a non-default max semispace size is much
 | 
| -// easier if you can define it as part of the build environment.
 | 
| -#if defined(V8_MAX_SEMISPACE_SIZE)
 | 
| -int Heap::max_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
 | 
| -#else
 | 
| -int Heap::max_semispace_size_ = default_max_semispace_size_;
 | 
| -#endif
 | 
| -
 | 
| -// The snapshot semispace size will be the default semispace size if
 | 
| -// snapshotting is used and will be the requested semispace size as
 | 
| -// set up by ConfigureHeap otherwise.
 | 
| -int Heap::reserved_semispace_size_ = Heap::max_semispace_size_;
 | 
| -
 | 
| -List<Heap::GCPrologueCallbackPair> Heap::gc_prologue_callbacks_;
 | 
| -List<Heap::GCEpilogueCallbackPair> Heap::gc_epilogue_callbacks_;
 | 
| -
 | 
| -GCCallback Heap::global_gc_prologue_callback_ = NULL;
 | 
| -GCCallback Heap::global_gc_epilogue_callback_ = NULL;
 | 
| -HeapObjectCallback Heap::gc_safe_size_of_old_object_ = NULL;
 | 
| -
 | 
|  // Variables set based on semispace_size_ and old_generation_size_ in
 | 
| -// ConfigureHeap.
 | 
| -
 | 
| +// ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_)
 | 
|  // Will be 4 * reserved_semispace_size_ to ensure that young
 | 
|  // generation can be aligned to its size.
 | 
| -int Heap::survived_since_last_expansion_ = 0;
 | 
| -intptr_t Heap::external_allocation_limit_ = 0;
 | 
| -
 | 
| -Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
 | 
| -
 | 
| -int Heap::mc_count_ = 0;
 | 
| -int Heap::ms_count_ = 0;
 | 
| -unsigned int Heap::gc_count_ = 0;
 | 
| -
 | 
| -GCTracer* Heap::tracer_ = NULL;
 | 
| -
 | 
| -int Heap::unflattened_strings_length_ = 0;
 | 
| -
 | 
| -int Heap::always_allocate_scope_depth_ = 0;
 | 
| -int Heap::linear_allocation_scope_depth_ = 0;
 | 
| -int Heap::contexts_disposed_ = 0;
 | 
| -
 | 
| -int Heap::young_survivors_after_last_gc_ = 0;
 | 
| -int Heap::high_survival_rate_period_length_ = 0;
 | 
| -double Heap::survival_rate_ = 0;
 | 
| -Heap::SurvivalRateTrend Heap::previous_survival_rate_trend_ = Heap::STABLE;
 | 
| -Heap::SurvivalRateTrend Heap::survival_rate_trend_ = Heap::STABLE;
 | 
| -
 | 
| +      survived_since_last_expansion_(0),
 | 
| +      always_allocate_scope_depth_(0),
 | 
| +      linear_allocation_scope_depth_(0),
 | 
| +      contexts_disposed_(0),
 | 
| +      new_space_(this),
 | 
| +      old_pointer_space_(NULL),
 | 
| +      old_data_space_(NULL),
 | 
| +      code_space_(NULL),
 | 
| +      map_space_(NULL),
 | 
| +      cell_space_(NULL),
 | 
| +      lo_space_(NULL),
 | 
| +      gc_state_(NOT_IN_GC),
 | 
| +      mc_count_(0),
 | 
| +      ms_count_(0),
 | 
| +      gc_count_(0),
 | 
| +      unflattened_strings_length_(0),
 | 
|  #ifdef DEBUG
 | 
| -bool Heap::allocation_allowed_ = true;
 | 
| -
 | 
| -int Heap::allocation_timeout_ = 0;
 | 
| -bool Heap::disallow_allocation_failure_ = false;
 | 
| +      allocation_allowed_(true),
 | 
| +      allocation_timeout_(0),
 | 
| +      disallow_allocation_failure_(false),
 | 
| +      debug_utils_(NULL),
 | 
|  #endif  // DEBUG
 | 
| +      old_gen_promotion_limit_(kMinimumPromotionLimit),
 | 
| +      old_gen_allocation_limit_(kMinimumAllocationLimit),
 | 
| +      external_allocation_limit_(0),
 | 
| +      amount_of_external_allocated_memory_(0),
 | 
| +      amount_of_external_allocated_memory_at_last_global_gc_(0),
 | 
| +      old_gen_exhausted_(false),
 | 
| +      hidden_symbol_(NULL),
 | 
| +      global_gc_prologue_callback_(NULL),
 | 
| +      global_gc_epilogue_callback_(NULL),
 | 
| +      gc_safe_size_of_old_object_(NULL),
 | 
| +      tracer_(NULL),
 | 
| +      young_survivors_after_last_gc_(0),
 | 
| +      high_survival_rate_period_length_(0),
 | 
| +      survival_rate_(0),
 | 
| +      previous_survival_rate_trend_(Heap::STABLE),
 | 
| +      survival_rate_trend_(Heap::STABLE),
 | 
| +      max_gc_pause_(0),
 | 
| +      max_alive_after_gc_(0),
 | 
| +      min_in_mutator_(kMaxInt),
 | 
| +      alive_after_last_gc_(0),
 | 
| +      last_gc_end_timestamp_(0.0),
 | 
| +      page_watermark_invalidated_mark_(1 << Page::WATERMARK_INVALIDATED),
 | 
| +      number_idle_notifications_(0),
 | 
| +      last_idle_notification_gc_count_(0),
 | 
| +      last_idle_notification_gc_count_init_(false),
 | 
| +      configured_(false),
 | 
| +      is_safe_to_read_maps_(true) {
 | 
| +  // Allow build-time customization of the max semispace size. Building
 | 
| +  // V8 with snapshots and a non-default max semispace size is much
 | 
| +  // easier if you can define it as part of the build environment.
 | 
| +#if defined(V8_MAX_SEMISPACE_SIZE)
 | 
| +  max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
 | 
| +#endif
 | 
|  
 | 
| -intptr_t GCTracer::alive_after_last_gc_ = 0;
 | 
| -double GCTracer::last_gc_end_timestamp_ = 0.0;
 | 
| -int GCTracer::max_gc_pause_ = 0;
 | 
| -intptr_t GCTracer::max_alive_after_gc_ = 0;
 | 
| -int GCTracer::min_in_mutator_ = kMaxInt;
 | 
| +  memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
 | 
| +  global_contexts_list_ = NULL;
 | 
| +  mark_compact_collector_.heap_ = this;
 | 
| +  external_string_table_.heap_ = this;
 | 
| +}
 | 
|  
 | 
| +
 | 
|  intptr_t Heap::Capacity() {
 | 
|    if (!HasBeenSetup()) return 0;
 | 
|  
 | 
| @@ -190,7 +181,7 @@
 | 
|  intptr_t Heap::CommittedMemoryExecutable() {
 | 
|    if (!HasBeenSetup()) return 0;
 | 
|  
 | 
| -  return MemoryAllocator::SizeExecutable();
 | 
| +  return isolate()->memory_allocator()->SizeExecutable();
 | 
|  }
 | 
|  
 | 
|  
 | 
| @@ -217,8 +208,8 @@
 | 
|  
 | 
|  
 | 
|  int Heap::GcSafeSizeOfOldObject(HeapObject* object) {
 | 
| -  ASSERT(!Heap::InNewSpace(object));  // Code only works for old objects.
 | 
| -  ASSERT(!MarkCompactCollector::are_map_pointers_encoded());
 | 
| +  ASSERT(!HEAP->InNewSpace(object));  // Code only works for old objects.
 | 
| +  ASSERT(!HEAP->mark_compact_collector()->are_map_pointers_encoded());
 | 
|    MapWord map_word = object->map_word();
 | 
|    map_word.ClearMark();
 | 
|    map_word.ClearOverflow();
 | 
| @@ -227,8 +218,8 @@
 | 
|  
 | 
|  
 | 
|  int Heap::GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object) {
 | 
| -  ASSERT(!Heap::InNewSpace(object));  // Code only works for old objects.
 | 
| -  ASSERT(MarkCompactCollector::are_map_pointers_encoded());
 | 
| +  ASSERT(!HEAP->InNewSpace(object));  // Code only works for old objects.
 | 
| +  ASSERT(HEAP->mark_compact_collector()->are_map_pointers_encoded());
 | 
|    uint32_t marker = Memory::uint32_at(object->address());
 | 
|    if (marker == MarkCompactCollector::kSingleFreeEncoding) {
 | 
|      return kIntSize;
 | 
| @@ -236,7 +227,7 @@
 | 
|      return Memory::int_at(object->address() + kIntSize);
 | 
|    } else {
 | 
|      MapWord map_word = object->map_word();
 | 
| -    Address map_address = map_word.DecodeMapAddress(Heap::map_space());
 | 
| +    Address map_address = map_word.DecodeMapAddress(HEAP->map_space());
 | 
|      Map* map = reinterpret_cast<Map*>(HeapObject::FromAddress(map_address));
 | 
|      return object->SizeFromMap(map);
 | 
|    }
 | 
| @@ -246,19 +237,20 @@
 | 
|  GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
 | 
|    // Is global GC requested?
 | 
|    if (space != NEW_SPACE || FLAG_gc_global) {
 | 
| -    Counters::gc_compactor_caused_by_request.Increment();
 | 
| +    isolate_->counters()->gc_compactor_caused_by_request()->Increment();
 | 
|      return MARK_COMPACTOR;
 | 
|    }
 | 
|  
 | 
|    // Is enough data promoted to justify a global GC?
 | 
|    if (OldGenerationPromotionLimitReached()) {
 | 
| -    Counters::gc_compactor_caused_by_promoted_data.Increment();
 | 
| +    isolate_->counters()->gc_compactor_caused_by_promoted_data()->Increment();
 | 
|      return MARK_COMPACTOR;
 | 
|    }
 | 
|  
 | 
|    // Have allocation in OLD and LO failed?
 | 
|    if (old_gen_exhausted_) {
 | 
| -    Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
 | 
| +    isolate_->counters()->
 | 
| +        gc_compactor_caused_by_oldspace_exhaustion()->Increment();
 | 
|      return MARK_COMPACTOR;
 | 
|    }
 | 
|  
 | 
| @@ -271,8 +263,9 @@
 | 
|    // and does not count available bytes already in the old space or code
 | 
|    // space.  Undercounting is safe---we may get an unrequested full GC when
 | 
|    // a scavenge would have succeeded.
 | 
| -  if (MemoryAllocator::MaxAvailable() <= new_space_.Size()) {
 | 
| -    Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
 | 
| +  if (isolate_->memory_allocator()->MaxAvailable() <= new_space_.Size()) {
 | 
| +    isolate_->counters()->
 | 
| +        gc_compactor_caused_by_oldspace_exhaustion()->Increment();
 | 
|      return MARK_COMPACTOR;
 | 
|    }
 | 
|  
 | 
| @@ -317,8 +310,8 @@
 | 
|    if (!FLAG_trace_gc_verbose) return;
 | 
|    PrintF("Memory allocator,   used: %8" V8_PTR_PREFIX "d"
 | 
|               ", available: %8" V8_PTR_PREFIX "d\n",
 | 
| -         MemoryAllocator::Size(),
 | 
| -         MemoryAllocator::Available());
 | 
| +         isolate_->memory_allocator()->Size(),
 | 
| +         isolate_->memory_allocator()->Available());
 | 
|    PrintF("New space,          used: %8" V8_PTR_PREFIX "d"
 | 
|               ", available: %8" V8_PTR_PREFIX "d\n",
 | 
|           Heap::new_space_.Size(),
 | 
| @@ -383,7 +376,7 @@
 | 
|  
 | 
|  
 | 
|  void Heap::GarbageCollectionPrologue() {
 | 
| -  TranscendentalCache::Clear();
 | 
| +  isolate_->transcendental_cache()->Clear();
 | 
|    ClearJSFunctionResultCaches();
 | 
|    gc_count_++;
 | 
|    unflattened_strings_length_ = 0;
 | 
| @@ -424,21 +417,24 @@
 | 
|      Verify();
 | 
|    }
 | 
|  
 | 
| -  if (FLAG_print_global_handles) GlobalHandles::Print();
 | 
| +  if (FLAG_print_global_handles) isolate_->global_handles()->Print();
 | 
|    if (FLAG_print_handles) PrintHandles();
 | 
|    if (FLAG_gc_verbose) Print();
 | 
|    if (FLAG_code_stats) ReportCodeStatistics("After GC");
 | 
|  #endif
 | 
|  
 | 
| -  Counters::alive_after_last_gc.Set(static_cast<int>(SizeOfObjects()));
 | 
| +  isolate_->counters()->alive_after_last_gc()->Set(
 | 
| +      static_cast<int>(SizeOfObjects()));
 | 
|  
 | 
| -  Counters::symbol_table_capacity.Set(symbol_table()->Capacity());
 | 
| -  Counters::number_of_symbols.Set(symbol_table()->NumberOfElements());
 | 
| +  isolate_->counters()->symbol_table_capacity()->Set(
 | 
| +      symbol_table()->Capacity());
 | 
| +  isolate_->counters()->number_of_symbols()->Set(
 | 
| +      symbol_table()->NumberOfElements());
 | 
|  #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
 | 
|    ReportStatisticsAfterGC();
 | 
|  #endif
 | 
|  #ifdef ENABLE_DEBUGGER_SUPPORT
 | 
| -  Debug::AfterGarbageCollection();
 | 
| +  isolate_->debug()->AfterGarbageCollection();
 | 
|  #endif
 | 
|  }
 | 
|  
 | 
| @@ -447,9 +443,9 @@
 | 
|    // Since we are ignoring the return value, the exact choice of space does
 | 
|    // not matter, so long as we do not specify NEW_SPACE, which would not
 | 
|    // cause a full GC.
 | 
| -  MarkCompactCollector::SetForceCompaction(force_compaction);
 | 
| +  mark_compact_collector_.SetForceCompaction(force_compaction);
 | 
|    CollectGarbage(OLD_POINTER_SPACE);
 | 
| -  MarkCompactCollector::SetForceCompaction(false);
 | 
| +  mark_compact_collector_.SetForceCompaction(false);
 | 
|  }
 | 
|  
 | 
|  
 | 
| @@ -457,7 +453,7 @@
 | 
|    // Since we are ignoring the return value, the exact choice of space does
 | 
|    // not matter, so long as we do not specify NEW_SPACE, which would not
 | 
|    // cause a full GC.
 | 
| -  MarkCompactCollector::SetForceCompaction(true);
 | 
| +  mark_compact_collector()->SetForceCompaction(true);
 | 
|  
 | 
|    // Major GC would invoke weak handle callbacks on weakly reachable
 | 
|    // handles, but won't collect weakly reachable objects until next
 | 
| @@ -473,13 +469,13 @@
 | 
|        break;
 | 
|      }
 | 
|    }
 | 
| -  MarkCompactCollector::SetForceCompaction(false);
 | 
| +  mark_compact_collector()->SetForceCompaction(false);
 | 
|  }
 | 
|  
 | 
|  
 | 
|  bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
 | 
|    // The VM is in the GC state until exiting this function.
 | 
| -  VMState state(GC);
 | 
| +  VMState state(isolate_, GC);
 | 
|  
 | 
|  #ifdef DEBUG
 | 
|    // Reset the allocation timeout to the GC interval, but make sure to
 | 
| @@ -492,7 +488,7 @@
 | 
|  
 | 
|    bool next_gc_likely_to_collect_more = false;
 | 
|  
 | 
| -  { GCTracer tracer;
 | 
| +  { GCTracer tracer(this);
 | 
|      GarbageCollectionPrologue();
 | 
|      // The GC count was incremented in the prologue.  Tell the tracer about
 | 
|      // it.
 | 
| @@ -502,8 +498,8 @@
 | 
|      tracer.set_collector(collector);
 | 
|  
 | 
|      HistogramTimer* rate = (collector == SCAVENGER)
 | 
| -        ? &Counters::gc_scavenger
 | 
| -        : &Counters::gc_compactor;
 | 
| +        ? isolate_->counters()->gc_scavenger()
 | 
| +        : isolate_->counters()->gc_compactor();
 | 
|      rate->Start();
 | 
|      next_gc_likely_to_collect_more =
 | 
|          PerformGarbageCollection(collector, &tracer);
 | 
| @@ -522,7 +518,7 @@
 | 
|  
 | 
|  
 | 
|  void Heap::PerformScavenge() {
 | 
| -  GCTracer tracer;
 | 
| +  GCTracer tracer(this);
 | 
|    PerformGarbageCollection(SCAVENGER, &tracer);
 | 
|  }
 | 
|  
 | 
| @@ -531,7 +527,6 @@
 | 
|  // Helper class for verifying the symbol table.
 | 
|  class SymbolTableVerifier : public ObjectVisitor {
 | 
|   public:
 | 
| -  SymbolTableVerifier() { }
 | 
|    void VisitPointers(Object** start, Object** end) {
 | 
|      // Visit all HeapObject pointers in [start, end).
 | 
|      for (Object** p = start; p < end; p++) {
 | 
| @@ -548,7 +543,7 @@
 | 
|  static void VerifySymbolTable() {
 | 
|  #ifdef DEBUG
 | 
|    SymbolTableVerifier verifier;
 | 
| -  Heap::symbol_table()->IterateElements(&verifier);
 | 
| +  HEAP->symbol_table()->IterateElements(&verifier);
 | 
|  #endif  // DEBUG
 | 
|  }
 | 
|  
 | 
| @@ -633,7 +628,7 @@
 | 
|  
 | 
|  
 | 
|  void Heap::ClearJSFunctionResultCaches() {
 | 
| -  if (Bootstrapper::IsActive()) return;
 | 
| +  if (isolate_->bootstrapper()->IsActive()) return;
 | 
|  
 | 
|    Object* context = global_contexts_list_;
 | 
|    while (!context->IsUndefined()) {
 | 
| @@ -651,8 +646,9 @@
 | 
|  }
 | 
|  
 | 
|  
 | 
| +
 | 
|  void Heap::ClearNormalizedMapCaches() {
 | 
| -  if (Bootstrapper::IsActive()) return;
 | 
| +  if (isolate_->bootstrapper()->IsActive()) return;
 | 
|  
 | 
|    Object* context = global_contexts_list_;
 | 
|    while (!context->IsUndefined()) {
 | 
| @@ -709,7 +705,7 @@
 | 
|    bool next_gc_likely_to_collect_more = false;
 | 
|  
 | 
|    if (collector != SCAVENGER) {
 | 
| -    PROFILE(CodeMovingGCEvent());
 | 
| +    PROFILE(isolate_, CodeMovingGCEvent());
 | 
|    }
 | 
|  
 | 
|    VerifySymbolTable();
 | 
| @@ -768,13 +764,13 @@
 | 
|      UpdateSurvivalRateTrend(start_new_space_size);
 | 
|    }
 | 
|  
 | 
| -  Counters::objs_since_last_young.Set(0);
 | 
| +  isolate_->counters()->objs_since_last_young()->Set(0);
 | 
|  
 | 
|    if (collector == MARK_COMPACTOR) {
 | 
|      DisableAssertNoAllocation allow_allocation;
 | 
|      GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
 | 
|      next_gc_likely_to_collect_more =
 | 
| -        GlobalHandles::PostGarbageCollectionProcessing();
 | 
| +        isolate_->global_handles()->PostGarbageCollectionProcessing();
 | 
|    }
 | 
|  
 | 
|    // Update relocatables.
 | 
| @@ -808,11 +804,11 @@
 | 
|  
 | 
|  void Heap::MarkCompact(GCTracer* tracer) {
 | 
|    gc_state_ = MARK_COMPACT;
 | 
| -  LOG(ResourceEvent("markcompact", "begin"));
 | 
| +  LOG(isolate_, ResourceEvent("markcompact", "begin"));
 | 
|  
 | 
| -  MarkCompactCollector::Prepare(tracer);
 | 
| +  mark_compact_collector_.Prepare(tracer);
 | 
|  
 | 
| -  bool is_compacting = MarkCompactCollector::IsCompacting();
 | 
| +  bool is_compacting = mark_compact_collector_.IsCompacting();
 | 
|  
 | 
|    if (is_compacting) {
 | 
|      mc_count_++;
 | 
| @@ -823,15 +819,17 @@
 | 
|  
 | 
|    MarkCompactPrologue(is_compacting);
 | 
|  
 | 
| -  MarkCompactCollector::CollectGarbage();
 | 
| +  is_safe_to_read_maps_ = false;
 | 
| +  mark_compact_collector_.CollectGarbage();
 | 
| +  is_safe_to_read_maps_ = true;
 | 
|  
 | 
| -  LOG(ResourceEvent("markcompact", "end"));
 | 
| +  LOG(isolate_, ResourceEvent("markcompact", "end"));
 | 
|  
 | 
|    gc_state_ = NOT_IN_GC;
 | 
|  
 | 
|    Shrink();
 | 
|  
 | 
| -  Counters::objs_since_last_full.Set(0);
 | 
| +  isolate_->counters()->objs_since_last_full()->Set(0);
 | 
|  
 | 
|    contexts_disposed_ = 0;
 | 
|  }
 | 
| @@ -840,11 +838,11 @@
 | 
|  void Heap::MarkCompactPrologue(bool is_compacting) {
 | 
|    // At any old GC clear the keyed lookup cache to enable collection of unused
 | 
|    // maps.
 | 
| -  KeyedLookupCache::Clear();
 | 
| -  ContextSlotCache::Clear();
 | 
| -  DescriptorLookupCache::Clear();
 | 
| +  isolate_->keyed_lookup_cache()->Clear();
 | 
| +  isolate_->context_slot_cache()->Clear();
 | 
| +  isolate_->descriptor_lookup_cache()->Clear();
 | 
|  
 | 
| -  CompilationCache::MarkCompactPrologue();
 | 
| +  isolate_->compilation_cache()->MarkCompactPrologue();
 | 
|  
 | 
|    CompletelyClearInstanceofCache();
 | 
|  
 | 
| @@ -868,6 +866,7 @@
 | 
|  // Helper class for copying HeapObjects
 | 
|  class ScavengeVisitor: public ObjectVisitor {
 | 
|   public:
 | 
| +  explicit ScavengeVisitor(Heap* heap) : heap_(heap) {}
 | 
|  
 | 
|    void VisitPointer(Object** p) { ScavengePointer(p); }
 | 
|  
 | 
| @@ -879,48 +878,15 @@
 | 
|   private:
 | 
|    void ScavengePointer(Object** p) {
 | 
|      Object* object = *p;
 | 
| -    if (!Heap::InNewSpace(object)) return;
 | 
| +    if (!heap_->InNewSpace(object)) return;
 | 
|      Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
 | 
|                           reinterpret_cast<HeapObject*>(object));
 | 
|    }
 | 
| -};
 | 
|  
 | 
| -
 | 
| -// A queue of objects promoted during scavenge. Each object is accompanied
 | 
| -// by it's size to avoid dereferencing a map pointer for scanning.
 | 
| -class PromotionQueue {
 | 
| - public:
 | 
| -  void Initialize(Address start_address) {
 | 
| -    front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
 | 
| -  }
 | 
| -
 | 
| -  bool is_empty() { return front_ <= rear_; }
 | 
| -
 | 
| -  void insert(HeapObject* target, int size) {
 | 
| -    *(--rear_) = reinterpret_cast<intptr_t>(target);
 | 
| -    *(--rear_) = size;
 | 
| -    // Assert no overflow into live objects.
 | 
| -    ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top());
 | 
| -  }
 | 
| -
 | 
| -  void remove(HeapObject** target, int* size) {
 | 
| -    *target = reinterpret_cast<HeapObject*>(*(--front_));
 | 
| -    *size = static_cast<int>(*(--front_));
 | 
| -    // Assert no underflow.
 | 
| -    ASSERT(front_ >= rear_);
 | 
| -  }
 | 
| -
 | 
| - private:
 | 
| -  // The front of the queue is higher in memory than the rear.
 | 
| -  intptr_t* front_;
 | 
| -  intptr_t* rear_;
 | 
| +  Heap* heap_;
 | 
|  };
 | 
|  
 | 
|  
 | 
| -// Shared state read by the scavenge collector and set by ScavengeObject.
 | 
| -static PromotionQueue promotion_queue;
 | 
| -
 | 
| -
 | 
|  #ifdef DEBUG
 | 
|  // Visitor class to verify pointers in code or data space do not point into
 | 
|  // new space.
 | 
| @@ -929,7 +895,7 @@
 | 
|    void VisitPointers(Object** start, Object**end) {
 | 
|      for (Object** current = start; current < end; current++) {
 | 
|        if ((*current)->IsHeapObject()) {
 | 
| -        ASSERT(!Heap::InNewSpace(HeapObject::cast(*current)));
 | 
| +        ASSERT(!HEAP->InNewSpace(HeapObject::cast(*current)));
 | 
|        }
 | 
|      }
 | 
|    }
 | 
| @@ -940,12 +906,12 @@
 | 
|    // Verify that there are no pointers to new space in spaces where we
 | 
|    // do not expect them.
 | 
|    VerifyNonPointerSpacePointersVisitor v;
 | 
| -  HeapObjectIterator code_it(Heap::code_space());
 | 
| +  HeapObjectIterator code_it(HEAP->code_space());
 | 
|    for (HeapObject* object = code_it.next();
 | 
|         object != NULL; object = code_it.next())
 | 
|      object->Iterate(&v);
 | 
|  
 | 
| -  HeapObjectIterator data_it(Heap::old_data_space());
 | 
| +  HeapObjectIterator data_it(HEAP->old_data_space());
 | 
|    for (HeapObject* object = data_it.next();
 | 
|         object != NULL; object = data_it.next())
 | 
|      object->Iterate(&v);
 | 
| @@ -971,7 +937,7 @@
 | 
|  
 | 
|    gc_state_ = SCAVENGE;
 | 
|  
 | 
| -  Page::FlipMeaningOfInvalidatedWatermarkFlag();
 | 
| +  Page::FlipMeaningOfInvalidatedWatermarkFlag(this);
 | 
|  #ifdef DEBUG
 | 
|    VerifyPageWatermarkValidity(old_pointer_space_, ALL_VALID);
 | 
|    VerifyPageWatermarkValidity(map_space_, ALL_VALID);
 | 
| @@ -986,10 +952,10 @@
 | 
|    map_space_->FlushTopPageWatermark();
 | 
|  
 | 
|    // Implements Cheney's copying algorithm
 | 
| -  LOG(ResourceEvent("scavenge", "begin"));
 | 
| +  LOG(isolate_, ResourceEvent("scavenge", "begin"));
 | 
|  
 | 
|    // Clear descriptor cache.
 | 
| -  DescriptorLookupCache::Clear();
 | 
| +  isolate_->descriptor_lookup_cache()->Clear();
 | 
|  
 | 
|    // Used for updating survived_since_last_expansion_ at function end.
 | 
|    intptr_t survived_watermark = PromotedSpaceSize();
 | 
| @@ -1019,16 +985,17 @@
 | 
|    // frees up its size in bytes from the top of the new space, and
 | 
|    // objects are at least one pointer in size.
 | 
|    Address new_space_front = new_space_.ToSpaceLow();
 | 
| -  promotion_queue.Initialize(new_space_.ToSpaceHigh());
 | 
| +  promotion_queue_.Initialize(new_space_.ToSpaceHigh());
 | 
|  
 | 
| -  ScavengeVisitor scavenge_visitor;
 | 
| +  is_safe_to_read_maps_ = false;
 | 
| +  ScavengeVisitor scavenge_visitor(this);
 | 
|    // Copy roots.
 | 
|    IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
 | 
|  
 | 
|    // Copy objects reachable from the old generation.  By definition,
 | 
|    // there are no intergenerational pointers in code or data spaces.
 | 
|    IterateDirtyRegions(old_pointer_space_,
 | 
| -                      &IteratePointersInDirtyRegion,
 | 
| +                      &Heap::IteratePointersInDirtyRegion,
 | 
|                        &ScavengePointer,
 | 
|                        WATERMARK_CAN_BE_INVALID);
 | 
|  
 | 
| @@ -1060,10 +1027,12 @@
 | 
|        &UpdateNewSpaceReferenceInExternalStringTableEntry);
 | 
|  
 | 
|    LiveObjectList::UpdateReferencesForScavengeGC();
 | 
| -  RuntimeProfiler::UpdateSamplesAfterScavenge();
 | 
| +  isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
 | 
|  
 | 
|    ASSERT(new_space_front == new_space_.top());
 | 
|  
 | 
| +  is_safe_to_read_maps_ = true;
 | 
| +
 | 
|    // Set age mark.
 | 
|    new_space_.set_age_mark(new_space_.top());
 | 
|  
 | 
| @@ -1071,18 +1040,19 @@
 | 
|    IncrementYoungSurvivorsCounter(static_cast<int>(
 | 
|        (PromotedSpaceSize() - survived_watermark) + new_space_.Size()));
 | 
|  
 | 
| -  LOG(ResourceEvent("scavenge", "end"));
 | 
| +  LOG(isolate_, ResourceEvent("scavenge", "end"));
 | 
|  
 | 
|    gc_state_ = NOT_IN_GC;
 | 
|  }
 | 
|  
 | 
|  
 | 
| -String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Object** p) {
 | 
| +String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
 | 
| +                                                                Object** p) {
 | 
|    MapWord first_word = HeapObject::cast(*p)->map_word();
 | 
|  
 | 
|    if (!first_word.IsForwardingAddress()) {
 | 
|      // Unreachable external string can be finalized.
 | 
| -    FinalizeExternalString(String::cast(*p));
 | 
| +    heap->FinalizeExternalString(String::cast(*p));
 | 
|      return NULL;
 | 
|    }
 | 
|  
 | 
| @@ -1093,48 +1063,49 @@
 | 
|  
 | 
|  void Heap::UpdateNewSpaceReferencesInExternalStringTable(
 | 
|      ExternalStringTableUpdaterCallback updater_func) {
 | 
| -  ExternalStringTable::Verify();
 | 
| +  external_string_table_.Verify();
 | 
|  
 | 
| -  if (ExternalStringTable::new_space_strings_.is_empty()) return;
 | 
| +  if (external_string_table_.new_space_strings_.is_empty()) return;
 | 
|  
 | 
| -  Object** start = &ExternalStringTable::new_space_strings_[0];
 | 
| -  Object** end = start + ExternalStringTable::new_space_strings_.length();
 | 
| +  Object** start = &external_string_table_.new_space_strings_[0];
 | 
| +  Object** end = start + external_string_table_.new_space_strings_.length();
 | 
|    Object** last = start;
 | 
|  
 | 
|    for (Object** p = start; p < end; ++p) {
 | 
| -    ASSERT(Heap::InFromSpace(*p));
 | 
| -    String* target = updater_func(p);
 | 
| +    ASSERT(InFromSpace(*p));
 | 
| +    String* target = updater_func(this, p);
 | 
|  
 | 
|      if (target == NULL) continue;
 | 
|  
 | 
|      ASSERT(target->IsExternalString());
 | 
|  
 | 
| -    if (Heap::InNewSpace(target)) {
 | 
| +    if (InNewSpace(target)) {
 | 
|        // String is still in new space.  Update the table entry.
 | 
|        *last = target;
 | 
|        ++last;
 | 
|      } else {
 | 
|        // String got promoted.  Move it to the old string list.
 | 
| -      ExternalStringTable::AddOldString(target);
 | 
| +      external_string_table_.AddOldString(target);
 | 
|      }
 | 
|    }
 | 
|  
 | 
|    ASSERT(last <= end);
 | 
| -  ExternalStringTable::ShrinkNewStrings(static_cast<int>(last - start));
 | 
| +  external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
 | 
|  }
 | 
|  
 | 
|  
 | 
| -static Object* ProcessFunctionWeakReferences(Object* function,
 | 
| +static Object* ProcessFunctionWeakReferences(Heap* heap,
 | 
| +                                             Object* function,
 | 
|                                               WeakObjectRetainer* retainer) {
 | 
| -  Object* head = Heap::undefined_value();
 | 
| +  Object* head = heap->undefined_value();
 | 
|    JSFunction* tail = NULL;
 | 
|    Object* candidate = function;
 | 
| -  while (!candidate->IsUndefined()) {
 | 
| +  while (candidate != heap->undefined_value()) {
 | 
|      // Check whether to keep the candidate in the list.
 | 
|      JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate);
 | 
|      Object* retain = retainer->RetainAs(candidate);
 | 
|      if (retain != NULL) {
 | 
| -      if (head->IsUndefined()) {
 | 
| +      if (head == heap->undefined_value()) {
 | 
|          // First element in the list.
 | 
|          head = candidate_function;
 | 
|        } else {
 | 
| @@ -1151,7 +1122,7 @@
 | 
|  
 | 
|    // Terminate the list if there is one or more elements.
 | 
|    if (tail != NULL) {
 | 
| -    tail->set_next_function_link(Heap::undefined_value());
 | 
| +    tail->set_next_function_link(heap->undefined_value());
 | 
|    }
 | 
|  
 | 
|    return head;
 | 
| @@ -1162,18 +1133,19 @@
 | 
|    Object* head = undefined_value();
 | 
|    Context* tail = NULL;
 | 
|    Object* candidate = global_contexts_list_;
 | 
| -  while (!candidate->IsUndefined()) {
 | 
| +  while (candidate != undefined_value()) {
 | 
|      // Check whether to keep the candidate in the list.
 | 
|      Context* candidate_context = reinterpret_cast<Context*>(candidate);
 | 
|      Object* retain = retainer->RetainAs(candidate);
 | 
|      if (retain != NULL) {
 | 
| -      if (head->IsUndefined()) {
 | 
| +      if (head == undefined_value()) {
 | 
|          // First element in the list.
 | 
|          head = candidate_context;
 | 
|        } else {
 | 
|          // Subsequent elements in the list.
 | 
|          ASSERT(tail != NULL);
 | 
| -        tail->set_unchecked(Context::NEXT_CONTEXT_LINK,
 | 
| +        tail->set_unchecked(this,
 | 
| +                            Context::NEXT_CONTEXT_LINK,
 | 
|                              candidate_context,
 | 
|                              UPDATE_WRITE_BARRIER);
 | 
|        }
 | 
| @@ -1183,9 +1155,11 @@
 | 
|        // Process the weak list of optimized functions for the context.
 | 
|        Object* function_list_head =
 | 
|            ProcessFunctionWeakReferences(
 | 
| +              this,
 | 
|                candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST),
 | 
|                retainer);
 | 
| -      candidate_context->set_unchecked(Context::OPTIMIZED_FUNCTIONS_LIST,
 | 
| +      candidate_context->set_unchecked(this,
 | 
| +                                       Context::OPTIMIZED_FUNCTIONS_LIST,
 | 
|                                         function_list_head,
 | 
|                                         UPDATE_WRITE_BARRIER);
 | 
|      }
 | 
| @@ -1195,21 +1169,22 @@
 | 
|  
 | 
|    // Terminate the list if there is one or more elements.
 | 
|    if (tail != NULL) {
 | 
| -    tail->set_unchecked(Context::NEXT_CONTEXT_LINK,
 | 
| +    tail->set_unchecked(this,
 | 
| +                        Context::NEXT_CONTEXT_LINK,
 | 
|                          Heap::undefined_value(),
 | 
|                          UPDATE_WRITE_BARRIER);
 | 
|    }
 | 
|  
 | 
|    // Update the head of the list of contexts.
 | 
| -  Heap::global_contexts_list_ = head;
 | 
| +  global_contexts_list_ = head;
 | 
|  }
 | 
|  
 | 
|  
 | 
|  class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
 | 
|   public:
 | 
| -  static inline void VisitPointer(Object** p) {
 | 
| +  static inline void VisitPointer(Heap* heap, Object** p) {
 | 
|      Object* object = *p;
 | 
| -    if (!Heap::InNewSpace(object)) return;
 | 
| +    if (!heap->InNewSpace(object)) return;
 | 
|      Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
 | 
|                           reinterpret_cast<HeapObject*>(object));
 | 
|    }
 | 
| @@ -1230,10 +1205,10 @@
 | 
|      }
 | 
|  
 | 
|      // Promote and process all the to-be-promoted objects.
 | 
| -    while (!promotion_queue.is_empty()) {
 | 
| +    while (!promotion_queue_.is_empty()) {
 | 
|        HeapObject* target;
 | 
|        int size;
 | 
| -      promotion_queue.remove(&target, &size);
 | 
| +      promotion_queue_.remove(&target, &size);
 | 
|  
 | 
|        // Promoted object might be already partially visited
 | 
|        // during dirty regions iteration. Thus we search specificly
 | 
| @@ -1303,7 +1278,7 @@
 | 
|    enum SizeRestriction { SMALL, UNKNOWN_SIZE };
 | 
|  
 | 
|  #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
 | 
| -  static void RecordCopiedObject(HeapObject* obj) {
 | 
| +  static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
 | 
|      bool should_record = false;
 | 
|  #ifdef DEBUG
 | 
|      should_record = FLAG_heap_stats;
 | 
| @@ -1312,10 +1287,10 @@
 | 
|      should_record = should_record || FLAG_log_gc;
 | 
|  #endif
 | 
|      if (should_record) {
 | 
| -      if (Heap::new_space()->Contains(obj)) {
 | 
| -        Heap::new_space()->RecordAllocation(obj);
 | 
| +      if (heap->new_space()->Contains(obj)) {
 | 
| +        heap->new_space()->RecordAllocation(obj);
 | 
|        } else {
 | 
| -        Heap::new_space()->RecordPromotion(obj);
 | 
| +        heap->new_space()->RecordPromotion(obj);
 | 
|        }
 | 
|      }
 | 
|    }
 | 
| @@ -1324,24 +1299,27 @@
 | 
|    // Helper function used by CopyObject to copy a source object to an
 | 
|    // allocated target object and update the forwarding pointer in the source
 | 
|    // object.  Returns the target object.
 | 
| -  INLINE(static HeapObject* MigrateObject(HeapObject* source,
 | 
| +  INLINE(static HeapObject* MigrateObject(Heap* heap,
 | 
| +                                          HeapObject* source,
 | 
|                                            HeapObject* target,
 | 
|                                            int size)) {
 | 
|      // Copy the content of source to target.
 | 
| -    Heap::CopyBlock(target->address(), source->address(), size);
 | 
| +    heap->CopyBlock(target->address(), source->address(), size);
 | 
|  
 | 
|      // Set the forwarding address.
 | 
|      source->set_map_word(MapWord::FromForwardingAddress(target));
 | 
|  
 | 
|  #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
 | 
|      // Update NewSpace stats if necessary.
 | 
| -    RecordCopiedObject(target);
 | 
| +    RecordCopiedObject(heap, target);
 | 
|  #endif
 | 
| -    HEAP_PROFILE(ObjectMoveEvent(source->address(), target->address()));
 | 
| +    HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
 | 
|  #if defined(ENABLE_LOGGING_AND_PROFILING)
 | 
| -    if (Logger::is_logging() || CpuProfiler::is_profiling()) {
 | 
| +    Isolate* isolate = heap->isolate();
 | 
| +    if (isolate->logger()->is_logging() ||
 | 
| +        isolate->cpu_profiler()->is_profiling()) {
 | 
|        if (target->IsSharedFunctionInfo()) {
 | 
| -        PROFILE(SharedFunctionInfoMoveEvent(
 | 
| +        PROFILE(isolate, SharedFunctionInfoMoveEvent(
 | 
|              source->address(), target->address()));
 | 
|        }
 | 
|      }
 | 
| @@ -1359,36 +1337,37 @@
 | 
|             (object_size <= Page::kMaxHeapObjectSize));
 | 
|      ASSERT(object->Size() == object_size);
 | 
|  
 | 
| -    if (Heap::ShouldBePromoted(object->address(), object_size)) {
 | 
| +    Heap* heap = map->heap();
 | 
| +    if (heap->ShouldBePromoted(object->address(), object_size)) {
 | 
|        MaybeObject* maybe_result;
 | 
|  
 | 
|        if ((size_restriction != SMALL) &&
 | 
|            (object_size > Page::kMaxHeapObjectSize)) {
 | 
| -        maybe_result = Heap::lo_space()->AllocateRawFixedArray(object_size);
 | 
| +        maybe_result = heap->lo_space()->AllocateRawFixedArray(object_size);
 | 
|        } else {
 | 
|          if (object_contents == DATA_OBJECT) {
 | 
| -          maybe_result = Heap::old_data_space()->AllocateRaw(object_size);
 | 
| +          maybe_result = heap->old_data_space()->AllocateRaw(object_size);
 | 
|          } else {
 | 
| -          maybe_result = Heap::old_pointer_space()->AllocateRaw(object_size);
 | 
| +          maybe_result = heap->old_pointer_space()->AllocateRaw(object_size);
 | 
|          }
 | 
|        }
 | 
|  
 | 
|        Object* result = NULL;  // Initialization to please compiler.
 | 
|        if (maybe_result->ToObject(&result)) {
 | 
|          HeapObject* target = HeapObject::cast(result);
 | 
| -        *slot = MigrateObject(object, target, object_size);
 | 
| +        *slot = MigrateObject(heap, object , target, object_size);
 | 
|  
 | 
|          if (object_contents == POINTER_OBJECT) {
 | 
| -          promotion_queue.insert(target, object_size);
 | 
| +          heap->promotion_queue()->insert(target, object_size);
 | 
|          }
 | 
|  
 | 
| -        Heap::tracer()->increment_promoted_objects_size(object_size);
 | 
| +        heap->tracer()->increment_promoted_objects_size(object_size);
 | 
|          return;
 | 
|        }
 | 
|      }
 | 
|      Object* result =
 | 
| -        Heap::new_space()->AllocateRaw(object_size)->ToObjectUnchecked();
 | 
| -    *slot = MigrateObject(object, HeapObject::cast(result), object_size);
 | 
| +        heap->new_space()->AllocateRaw(object_size)->ToObjectUnchecked();
 | 
| +    *slot = MigrateObject(heap, object, HeapObject::cast(result), object_size);
 | 
|      return;
 | 
|    }
 | 
|  
 | 
| @@ -1439,13 +1418,14 @@
 | 
|                                                 HeapObject* object) {
 | 
|      ASSERT(IsShortcutCandidate(map->instance_type()));
 | 
|  
 | 
| -    if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) {
 | 
| +    if (ConsString::cast(object)->unchecked_second() ==
 | 
| +        map->heap()->empty_string()) {
 | 
|        HeapObject* first =
 | 
|            HeapObject::cast(ConsString::cast(object)->unchecked_first());
 | 
|  
 | 
|        *slot = first;
 | 
|  
 | 
| -      if (!Heap::InNewSpace(first)) {
 | 
| +      if (!map->heap()->InNewSpace(first)) {
 | 
|          object->set_map_word(MapWord::FromForwardingAddress(first));
 | 
|          return;
 | 
|        }
 | 
| @@ -1496,7 +1476,7 @@
 | 
|  
 | 
|  
 | 
|  void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
 | 
| -  ASSERT(InFromSpace(object));
 | 
| +  ASSERT(HEAP->InFromSpace(object));
 | 
|    MapWord first_word = object->map_word();
 | 
|    ASSERT(!first_word.IsForwardingAddress());
 | 
|    Map* map = first_word.ToMap();
 | 
| @@ -1504,11 +1484,6 @@
 | 
|  }
 | 
|  
 | 
|  
 | 
| -void Heap::ScavengePointer(HeapObject** p) {
 | 
| -  ScavengeObject(p, *p);
 | 
| -}
 | 
| -
 | 
| -
 | 
|  MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
 | 
|                                        int instance_size) {
 | 
|    Object* result;
 | 
| @@ -1520,9 +1495,8 @@
 | 
|    reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
 | 
|    reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
 | 
|    reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
 | 
| -  reinterpret_cast<Map*>(result)->
 | 
| -      set_visitor_id(
 | 
| -          StaticVisitorBase::GetVisitorId(instance_type, instance_size));
 | 
| +  reinterpret_cast<Map*>(result)->set_visitor_id(
 | 
| +        StaticVisitorBase::GetVisitorId(instance_type, instance_size));
 | 
|    reinterpret_cast<Map*>(result)->set_inobject_properties(0);
 | 
|    reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
 | 
|    reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
 | 
| @@ -1631,6 +1605,7 @@
 | 
|      if (!maybe_obj->ToObject(&obj)) return false;
 | 
|    }
 | 
|    set_null_value(obj);
 | 
| +  Oddball::cast(obj)->set_kind(Oddball::kNull);
 | 
|  
 | 
|    // Allocate the empty descriptor array.
 | 
|    { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
 | 
| @@ -1822,7 +1797,7 @@
 | 
|    }
 | 
|    set_message_object_map(Map::cast(obj));
 | 
|  
 | 
| -  ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
 | 
| +  ASSERT(!InNewSpace(empty_fixed_array()));
 | 
|    return true;
 | 
|  }
 | 
|  
 | 
| @@ -1875,12 +1850,13 @@
 | 
|  
 | 
|  
 | 
|  MaybeObject* Heap::CreateOddball(const char* to_string,
 | 
| -                                 Object* to_number) {
 | 
| +                                 Object* to_number,
 | 
| +                                 byte kind) {
 | 
|    Object* result;
 | 
|    { MaybeObject* maybe_result = Allocate(oddball_map(), OLD_DATA_SPACE);
 | 
|      if (!maybe_result->ToObject(&result)) return maybe_result;
 | 
|    }
 | 
| -  return Oddball::cast(result)->Initialize(to_string, to_number);
 | 
| +  return Oddball::cast(result)->Initialize(to_string, to_number, kind);
 | 
|  }
 | 
|  
 | 
|  
 | 
| @@ -1892,7 +1868,7 @@
 | 
|    }
 | 
|    set_neander_map(Map::cast(obj));
 | 
|  
 | 
| -  { MaybeObject* maybe_obj = Heap::AllocateJSObjectFromMap(neander_map());
 | 
| +  { MaybeObject* maybe_obj = AllocateJSObjectFromMap(neander_map());
 | 
|      if (!maybe_obj->ToObject(&obj)) return false;
 | 
|    }
 | 
|    Object* elements;
 | 
| @@ -1957,6 +1933,7 @@
 | 
|      if (!maybe_obj->ToObject(&obj)) return false;
 | 
|    }
 | 
|    set_undefined_value(obj);
 | 
| +  Oddball::cast(obj)->set_kind(Oddball::kUndefined);
 | 
|    ASSERT(!InNewSpace(undefined_value()));
 | 
|  
 | 
|    // Allocate initial symbol table.
 | 
| @@ -1976,39 +1953,50 @@
 | 
|  
 | 
|    // Allocate the null_value
 | 
|    { MaybeObject* maybe_obj =
 | 
| -        Oddball::cast(null_value())->Initialize("null", Smi::FromInt(0));
 | 
| +        Oddball::cast(null_value())->Initialize("null",
 | 
| +                                                Smi::FromInt(0),
 | 
| +                                                Oddball::kNull);
 | 
|      if (!maybe_obj->ToObject(&obj)) return false;
 | 
|    }
 | 
|  
 | 
| -  { MaybeObject* maybe_obj = CreateOddball("true", Smi::FromInt(1));
 | 
| +  { MaybeObject* maybe_obj = CreateOddball("true",
 | 
| +                                           Smi::FromInt(1),
 | 
| +                                           Oddball::kTrue);
 | 
|      if (!maybe_obj->ToObject(&obj)) return false;
 | 
|    }
 | 
|    set_true_value(obj);
 | 
|  
 | 
| -  { MaybeObject* maybe_obj = CreateOddball("false", Smi::FromInt(0));
 | 
| +  { MaybeObject* maybe_obj = CreateOddball("false",
 | 
| +                                           Smi::FromInt(0),
 | 
| +                                           Oddball::kFalse);
 | 
|      if (!maybe_obj->ToObject(&obj)) return false;
 | 
|    }
 | 
|    set_false_value(obj);
 | 
|  
 | 
| -  { MaybeObject* maybe_obj = CreateOddball("hole", Smi::FromInt(-1));
 | 
| +  { MaybeObject* maybe_obj = CreateOddball("hole",
 | 
| +                                           Smi::FromInt(-1),
 | 
| +                                           Oddball::kTheHole);
 | 
|      if (!maybe_obj->ToObject(&obj)) return false;
 | 
|    }
 | 
|    set_the_hole_value(obj);
 | 
|  
 | 
|    { MaybeObject* maybe_obj = CreateOddball("arguments_marker",
 | 
| -                                           Smi::FromInt(-4));
 | 
| +                                           Smi::FromInt(-4),
 | 
| +                                           Oddball::kArgumentMarker);
 | 
|      if (!maybe_obj->ToObject(&obj)) return false;
 | 
|    }
 | 
|    set_arguments_marker(obj);
 | 
|  
 | 
| -  { MaybeObject* maybe_obj =
 | 
| -        CreateOddball("no_interceptor_result_sentinel", Smi::FromInt(-2));
 | 
| +  { MaybeObject* maybe_obj = CreateOddball("no_interceptor_result_sentinel",
 | 
| +                                           Smi::FromInt(-2),
 | 
| +                                           Oddball::kOther);
 | 
|      if (!maybe_obj->ToObject(&obj)) return false;
 | 
|    }
 | 
|    set_no_interceptor_result_sentinel(obj);
 | 
|  
 | 
| -  { MaybeObject* maybe_obj =
 | 
| -        CreateOddball("termination_exception", Smi::FromInt(-3));
 | 
| +  { MaybeObject* maybe_obj = CreateOddball("termination_exception",
 | 
| +                                           Smi::FromInt(-3),
 | 
| +                                           Oddball::kOther);
 | 
|      if (!maybe_obj->ToObject(&obj)) return false;
 | 
|    }
 | 
|    set_termination_exception(obj);
 | 
| @@ -2070,7 +2058,8 @@
 | 
|    { MaybeObject* maybe_obj = StringDictionary::Allocate(Runtime::kNumFunctions);
 | 
|      if (!maybe_obj->ToObject(&obj)) return false;
 | 
|    }
 | 
| -  { MaybeObject* maybe_obj = Runtime::InitializeIntrinsicFunctionNames(obj);
 | 
| +  { MaybeObject* maybe_obj = Runtime::InitializeIntrinsicFunctionNames(this,
 | 
| +                                                                       obj);
 | 
|      if (!maybe_obj->ToObject(&obj)) return false;
 | 
|    }
 | 
|    set_intrinsic_function_names(StringDictionary::cast(obj));
 | 
| @@ -2090,20 +2079,20 @@
 | 
|    }
 | 
|    set_natives_source_cache(FixedArray::cast(obj));
 | 
|  
 | 
| -  // Handling of script id generation is in Factory::NewScript.
 | 
| +  // Handling of script id generation is in FACTORY->NewScript.
 | 
|    set_last_script_id(undefined_value());
 | 
|  
 | 
|    // Initialize keyed lookup cache.
 | 
| -  KeyedLookupCache::Clear();
 | 
| +  isolate_->keyed_lookup_cache()->Clear();
 | 
|  
 | 
|    // Initialize context slot cache.
 | 
| -  ContextSlotCache::Clear();
 | 
| +  isolate_->context_slot_cache()->Clear();
 | 
|  
 | 
|    // Initialize descriptor cache.
 | 
| -  DescriptorLookupCache::Clear();
 | 
| +  isolate_->descriptor_lookup_cache()->Clear();
 | 
|  
 | 
|    // Initialize compilation cache.
 | 
| -  CompilationCache::Clear();
 | 
| +  isolate_->compilation_cache()->Clear();
 | 
|  
 | 
|    return true;
 | 
|  }
 | 
| @@ -2127,7 +2116,7 @@
 | 
|    // Flush the number to string cache.
 | 
|    int len = number_string_cache()->length();
 | 
|    for (int i = 0; i < len; i++) {
 | 
| -    number_string_cache()->set_undefined(i);
 | 
| +    number_string_cache()->set_undefined(this, i);
 | 
|    }
 | 
|  }
 | 
|  
 | 
| @@ -2179,7 +2168,7 @@
 | 
|  
 | 
|  MaybeObject* Heap::NumberToString(Object* number,
 | 
|                                    bool check_number_string_cache) {
 | 
| -  Counters::number_to_string_runtime.Increment();
 | 
| +  isolate_->counters()->number_to_string_runtime()->Increment();
 | 
|    if (check_number_string_cache) {
 | 
|      Object* cached = GetNumberStringCache(number);
 | 
|      if (cached != undefined_value()) {
 | 
| @@ -2282,10 +2271,11 @@
 | 
|  
 | 
|    SharedFunctionInfo* share = SharedFunctionInfo::cast(result);
 | 
|    share->set_name(name);
 | 
| -  Code* illegal = Builtins::builtin(Builtins::Illegal);
 | 
| +  Code* illegal = isolate_->builtins()->builtin(Builtins::Illegal);
 | 
|    share->set_code(illegal);
 | 
|    share->set_scope_info(SerializedScopeInfo::Empty());
 | 
| -  Code* construct_stub = Builtins::builtin(Builtins::JSConstructStubGeneric);
 | 
| +  Code* construct_stub = isolate_->builtins()->builtin(
 | 
| +      Builtins::JSConstructStubGeneric);
 | 
|    share->set_construct_stub(construct_stub);
 | 
|    share->set_expected_nof_properties(0);
 | 
|    share->set_length(0);
 | 
| @@ -2343,20 +2333,21 @@
 | 
|  
 | 
|  
 | 
|  MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString(
 | 
| +    Heap* heap,
 | 
|      uint32_t c1,
 | 
|      uint32_t c2) {
 | 
|    String* symbol;
 | 
|    // Numeric strings have a different hash algorithm not known by
 | 
|    // LookupTwoCharsSymbolIfExists, so we skip this step for such strings.
 | 
|    if ((!Between(c1, '0', '9') || !Between(c2, '0', '9')) &&
 | 
| -      Heap::symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) {
 | 
| +      heap->symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) {
 | 
|      return symbol;
 | 
|    // Now we know the length is 2, we might as well make use of that fact
 | 
|    // when building the new string.
 | 
|    } else if ((c1 | c2) <= String::kMaxAsciiCharCodeU) {  // We can do this
 | 
|      ASSERT(IsPowerOf2(String::kMaxAsciiCharCodeU + 1));  // because of this.
 | 
|      Object* result;
 | 
| -    { MaybeObject* maybe_result = Heap::AllocateRawAsciiString(2);
 | 
| +    { MaybeObject* maybe_result = heap->AllocateRawAsciiString(2);
 | 
|        if (!maybe_result->ToObject(&result)) return maybe_result;
 | 
|      }
 | 
|      char* dest = SeqAsciiString::cast(result)->GetChars();
 | 
| @@ -2365,7 +2356,7 @@
 | 
|      return result;
 | 
|    } else {
 | 
|      Object* result;
 | 
| -    { MaybeObject* maybe_result = Heap::AllocateRawTwoByteString(2);
 | 
| +    { MaybeObject* maybe_result = heap->AllocateRawTwoByteString(2);
 | 
|        if (!maybe_result->ToObject(&result)) return maybe_result;
 | 
|      }
 | 
|      uc16* dest = SeqTwoByteString::cast(result)->GetChars();
 | 
| @@ -2395,7 +2386,7 @@
 | 
|    if (length == 2) {
 | 
|      unsigned c1 = first->Get(0);
 | 
|      unsigned c2 = second->Get(0);
 | 
| -    return MakeOrFindTwoCharacterString(c1, c2);
 | 
| +    return MakeOrFindTwoCharacterString(this, c1, c2);
 | 
|    }
 | 
|  
 | 
|    bool first_is_ascii = first->IsAsciiRepresentation();
 | 
| @@ -2405,7 +2396,7 @@
 | 
|    // Make sure that an out of memory exception is thrown if the length
 | 
|    // of the new cons string is too large.
 | 
|    if (length > String::kMaxLength || length < 0) {
 | 
| -    Top::context()->mark_out_of_memory();
 | 
| +    isolate()->context()->mark_out_of_memory();
 | 
|      return Failure::OutOfMemoryException();
 | 
|    }
 | 
|  
 | 
| @@ -2417,7 +2408,7 @@
 | 
|      is_ascii_data_in_two_byte_string =
 | 
|          first->HasOnlyAsciiChars() && second->HasOnlyAsciiChars();
 | 
|      if (is_ascii_data_in_two_byte_string) {
 | 
| -      Counters::string_add_runtime_ext_to_ascii.Increment();
 | 
| +      isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
 | 
|      }
 | 
|    }
 | 
|  
 | 
| @@ -2458,6 +2449,7 @@
 | 
|          char* dest = SeqAsciiString::cast(result)->GetChars();
 | 
|          String::WriteToFlat(first, dest, 0, first_length);
 | 
|          String::WriteToFlat(second, dest + first_length, 0, second_length);
 | 
| +        isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
 | 
|          return result;
 | 
|        }
 | 
|  
 | 
| @@ -2499,15 +2491,14 @@
 | 
|    int length = end - start;
 | 
|  
 | 
|    if (length == 1) {
 | 
| -    return Heap::LookupSingleCharacterStringFromCode(
 | 
| -        buffer->Get(start));
 | 
| +    return LookupSingleCharacterStringFromCode(buffer->Get(start));
 | 
|    } else if (length == 2) {
 | 
|      // Optimization for 2-byte strings often used as keys in a decompression
 | 
|      // dictionary.  Check whether we already have the string in the symbol
 | 
|      // table to prevent creation of many unneccesary strings.
 | 
|      unsigned c1 = buffer->Get(start);
 | 
|      unsigned c2 = buffer->Get(start + 1);
 | 
| -    return MakeOrFindTwoCharacterString(c1, c2);
 | 
| +    return MakeOrFindTwoCharacterString(this, c1, c2);
 | 
|    }
 | 
|  
 | 
|    // Make an attempt to flatten the buffer to reduce access time.
 | 
| @@ -2539,7 +2530,7 @@
 | 
|      ExternalAsciiString::Resource* resource) {
 | 
|    size_t length = resource->length();
 | 
|    if (length > static_cast<size_t>(String::kMaxLength)) {
 | 
| -    Top::context()->mark_out_of_memory();
 | 
| +    isolate()->context()->mark_out_of_memory();
 | 
|      return Failure::OutOfMemoryException();
 | 
|    }
 | 
|  
 | 
| @@ -2562,7 +2553,7 @@
 | 
|      ExternalTwoByteString::Resource* resource) {
 | 
|    size_t length = resource->length();
 | 
|    if (length > static_cast<size_t>(String::kMaxLength)) {
 | 
| -    Top::context()->mark_out_of_memory();
 | 
| +    isolate()->context()->mark_out_of_memory();
 | 
|      return Failure::OutOfMemoryException();
 | 
|    }
 | 
|  
 | 
| @@ -2572,7 +2563,7 @@
 | 
|    bool is_ascii = length <= kAsciiCheckLengthLimit &&
 | 
|        String::IsAscii(resource->data(), static_cast<int>(length));
 | 
|    Map* map = is_ascii ?
 | 
| -      Heap::external_string_with_ascii_data_map() : Heap::external_string_map();
 | 
| +      external_string_with_ascii_data_map() : external_string_map();
 | 
|    Object* result;
 | 
|    { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
 | 
|      if (!maybe_result->ToObject(&result)) return maybe_result;
 | 
| @@ -2589,8 +2580,8 @@
 | 
|  
 | 
|  MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
 | 
|    if (code <= String::kMaxAsciiCharCode) {
 | 
| -    Object* value = Heap::single_character_string_cache()->get(code);
 | 
| -    if (value != Heap::undefined_value()) return value;
 | 
| +    Object* value = single_character_string_cache()->get(code);
 | 
| +    if (value != undefined_value()) return value;
 | 
|  
 | 
|      char buffer[1];
 | 
|      buffer[0] = static_cast<char>(code);
 | 
| @@ -2598,12 +2589,12 @@
 | 
|      MaybeObject* maybe_result = LookupSymbol(Vector<const char>(buffer, 1));
 | 
|  
 | 
|      if (!maybe_result->ToObject(&result)) return maybe_result;
 | 
| -    Heap::single_character_string_cache()->set(code, result);
 | 
| +    single_character_string_cache()->set(code, result);
 | 
|      return result;
 | 
|    }
 | 
|  
 | 
|    Object* result;
 | 
| -  { MaybeObject* maybe_result = Heap::AllocateRawTwoByteString(1);
 | 
| +  { MaybeObject* maybe_result = AllocateRawTwoByteString(1);
 | 
|      if (!maybe_result->ToObject(&result)) return maybe_result;
 | 
|    }
 | 
|    String* answer = String::cast(result);
 | 
| @@ -2717,7 +2708,8 @@
 | 
|    // Initialize the object
 | 
|    HeapObject::cast(result)->set_map(code_map());
 | 
|    Code* code = Code::cast(result);
 | 
| -  ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
 | 
| +  ASSERT(!isolate_->code_range()->exists() ||
 | 
| +      isolate_->code_range()->contains(code->address()));
 | 
|    code->set_instruction_size(desc.instr_size);
 | 
|    code->set_relocation_info(ByteArray::cast(reloc_info));
 | 
|    code->set_flags(flags);
 | 
| @@ -2763,7 +2755,8 @@
 | 
|    CopyBlock(new_addr, old_addr, obj_size);
 | 
|    // Relocate the copy.
 | 
|    Code* new_code = Code::cast(result);
 | 
| -  ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
 | 
| +  ASSERT(!isolate_->code_range()->exists() ||
 | 
| +      isolate_->code_range()->contains(code->address()));
 | 
|    new_code->Relocate(new_addr - old_addr);
 | 
|    return new_code;
 | 
|  }
 | 
| @@ -2812,7 +2805,8 @@
 | 
|    memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length());
 | 
|  
 | 
|    // Relocate the copy.
 | 
| -  ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
 | 
| +  ASSERT(!isolate_->code_range()->exists() ||
 | 
| +      isolate_->code_range()->contains(code->address()));
 | 
|    new_code->Relocate(new_addr - old_addr);
 | 
|  
 | 
|  #ifdef DEBUG
 | 
| @@ -2836,7 +2830,7 @@
 | 
|    }
 | 
|    HeapObject::cast(result)->set_map(map);
 | 
|  #ifdef ENABLE_LOGGING_AND_PROFILING
 | 
| -  ProducerHeapProfile::RecordJSObjectAllocation(result);
 | 
| +  isolate_->producer_heap_profile()->RecordJSObjectAllocation(result);
 | 
|  #endif
 | 
|    return result;
 | 
|  }
 | 
| @@ -2904,10 +2898,12 @@
 | 
|                              JSFunction::cast(callee)->shared()->strict_mode();
 | 
|    if (strict_mode_callee) {
 | 
|      boilerplate =
 | 
| -        Top::context()->global_context()->strict_mode_arguments_boilerplate();
 | 
| +        isolate()->context()->global_context()->
 | 
| +            strict_mode_arguments_boilerplate();
 | 
|      arguments_object_size = kArgumentsObjectSizeStrict;
 | 
|    } else {
 | 
| -    boilerplate = Top::context()->global_context()->arguments_boilerplate();
 | 
| +    boilerplate =
 | 
| +        isolate()->context()->global_context()->arguments_boilerplate();
 | 
|      arguments_object_size = kArgumentsObjectSize;
 | 
|    }
 | 
|  
 | 
| @@ -2974,8 +2970,7 @@
 | 
|    int instance_size = fun->shared()->CalculateInstanceSize();
 | 
|    int in_object_properties = fun->shared()->CalculateInObjectProperties();
 | 
|    Object* map_obj;
 | 
| -  { MaybeObject* maybe_map_obj =
 | 
| -        Heap::AllocateMap(JS_OBJECT_TYPE, instance_size);
 | 
| +  { MaybeObject* maybe_map_obj = AllocateMap(JS_OBJECT_TYPE, instance_size);
 | 
|      if (!maybe_map_obj->ToObject(&map_obj)) return maybe_map_obj;
 | 
|    }
 | 
|  
 | 
| @@ -3171,7 +3166,7 @@
 | 
|      PropertyDetails d =
 | 
|          PropertyDetails(details.attributes(), CALLBACKS, details.index());
 | 
|      Object* value = descs->GetCallbacksObject(i);
 | 
| -    { MaybeObject* maybe_value = Heap::AllocateJSGlobalPropertyCell(value);
 | 
| +    { MaybeObject* maybe_value = AllocateJSGlobalPropertyCell(value);
 | 
|        if (!maybe_value->ToObject(&value)) return maybe_value;
 | 
|      }
 | 
|  
 | 
| @@ -3197,7 +3192,7 @@
 | 
|  
 | 
|    // Setup the global object as a normalized object.
 | 
|    global->set_map(new_map);
 | 
| -  global->map()->set_instance_descriptors(Heap::empty_descriptor_array());
 | 
| +  global->map()->set_instance_descriptors(empty_descriptor_array());
 | 
|    global->set_properties(dictionary);
 | 
|  
 | 
|    // Make sure result is a global object with properties in dictionary.
 | 
| @@ -3236,7 +3231,7 @@
 | 
|      { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
 | 
|        if (!maybe_clone->ToObject(&clone)) return maybe_clone;
 | 
|      }
 | 
| -    ASSERT(Heap::InNewSpace(clone));
 | 
| +    ASSERT(InNewSpace(clone));
 | 
|      // Since we know the clone is allocated in new space, we can copy
 | 
|      // the contents without worrying about updating the write barrier.
 | 
|      CopyBlock(HeapObject::cast(clone)->address(),
 | 
| @@ -3266,7 +3261,7 @@
 | 
|    }
 | 
|    // Return the new clone.
 | 
|  #ifdef ENABLE_LOGGING_AND_PROFILING
 | 
| -  ProducerHeapProfile::RecordJSObjectAllocation(clone);
 | 
| +  isolate_->producer_heap_profile()->RecordJSObjectAllocation(clone);
 | 
|  #endif
 | 
|    return clone;
 | 
|  }
 | 
| @@ -3322,7 +3317,7 @@
 | 
|    // Count the number of characters in the UTF-8 string and check if
 | 
|    // it is an ASCII string.
 | 
|    Access<ScannerConstants::Utf8Decoder>
 | 
| -      decoder(ScannerConstants::utf8_decoder());
 | 
| +      decoder(isolate_->scanner_constants()->utf8_decoder());
 | 
|    decoder->Reset(string.start(), string.length());
 | 
|    int chars = 0;
 | 
|    while (decoder->has_more()) {
 | 
| @@ -3375,12 +3370,24 @@
 | 
|  
 | 
|    // Find the corresponding symbol map for strings.
 | 
|    Map* map = string->map();
 | 
| -  if (map == ascii_string_map()) return ascii_symbol_map();
 | 
| -  if (map == string_map()) return symbol_map();
 | 
| -  if (map == cons_string_map()) return cons_symbol_map();
 | 
| -  if (map == cons_ascii_string_map()) return cons_ascii_symbol_map();
 | 
| -  if (map == external_string_map()) return external_symbol_map();
 | 
| -  if (map == external_ascii_string_map()) return external_ascii_symbol_map();
 | 
| +  if (map == ascii_string_map()) {
 | 
| +    return ascii_symbol_map();
 | 
| +  }
 | 
| +  if (map == string_map()) {
 | 
| +    return symbol_map();
 | 
| +  }
 | 
| +  if (map == cons_string_map()) {
 | 
| +    return cons_symbol_map();
 | 
| +  }
 | 
| +  if (map == cons_ascii_string_map()) {
 | 
| +    return cons_ascii_symbol_map();
 | 
| +  }
 | 
| +  if (map == external_string_map()) {
 | 
| +    return external_symbol_map();
 | 
| +  }
 | 
| +  if (map == external_ascii_string_map()) {
 | 
| +    return external_ascii_symbol_map();
 | 
| +  }
 | 
|    if (map == external_string_with_ascii_data_map()) {
 | 
|      return external_symbol_with_ascii_data_map();
 | 
|    }
 | 
| @@ -3554,7 +3561,7 @@
 | 
|    { MaybeObject* maybe_obj = AllocateRawFixedArray(len);
 | 
|      if (!maybe_obj->ToObject(&obj)) return maybe_obj;
 | 
|    }
 | 
| -  if (Heap::InNewSpace(obj)) {
 | 
| +  if (InNewSpace(obj)) {
 | 
|      HeapObject* dst = HeapObject::cast(obj);
 | 
|      dst->set_map(map);
 | 
|      CopyBlock(dst->address() + kPointerSize,
 | 
| @@ -3586,7 +3593,7 @@
 | 
|    array->set_map(fixed_array_map());
 | 
|    array->set_length(length);
 | 
|    // Initialize body.
 | 
| -  ASSERT(!Heap::InNewSpace(undefined_value()));
 | 
| +  ASSERT(!InNewSpace(undefined_value()));
 | 
|    MemsetPointer(array->data_start(), undefined_value(), length);
 | 
|    return result;
 | 
|  }
 | 
| @@ -3617,20 +3624,21 @@
 | 
|  
 | 
|  
 | 
|  MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
 | 
| +    Heap* heap,
 | 
|      int length,
 | 
|      PretenureFlag pretenure,
 | 
|      Object* filler) {
 | 
|    ASSERT(length >= 0);
 | 
| -  ASSERT(Heap::empty_fixed_array()->IsFixedArray());
 | 
| -  if (length == 0) return Heap::empty_fixed_array();
 | 
| +  ASSERT(heap->empty_fixed_array()->IsFixedArray());
 | 
| +  if (length == 0) return heap->empty_fixed_array();
 | 
|  
 | 
| -  ASSERT(!Heap::InNewSpace(filler));
 | 
| +  ASSERT(!heap->InNewSpace(filler));
 | 
|    Object* result;
 | 
| -  { MaybeObject* maybe_result = Heap::AllocateRawFixedArray(length, pretenure);
 | 
| +  { MaybeObject* maybe_result = heap->AllocateRawFixedArray(length, pretenure);
 | 
|      if (!maybe_result->ToObject(&result)) return maybe_result;
 | 
|    }
 | 
|  
 | 
| -  HeapObject::cast(result)->set_map(Heap::fixed_array_map());
 | 
| +  HeapObject::cast(result)->set_map(heap->fixed_array_map());
 | 
|    FixedArray* array = FixedArray::cast(result);
 | 
|    array->set_length(length);
 | 
|    MemsetPointer(array->data_start(), filler, length);
 | 
| @@ -3639,13 +3647,19 @@
 | 
|  
 | 
|  
 | 
|  MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
 | 
| -  return AllocateFixedArrayWithFiller(length, pretenure, undefined_value());
 | 
| +  return AllocateFixedArrayWithFiller(this,
 | 
| +                                      length,
 | 
| +                                      pretenure,
 | 
| +                                      undefined_value());
 | 
|  }
 | 
|  
 | 
|  
 | 
|  MaybeObject* Heap::AllocateFixedArrayWithHoles(int length,
 | 
|                                                 PretenureFlag pretenure) {
 | 
| -  return AllocateFixedArrayWithFiller(length, pretenure, the_hole_value());
 | 
| +  return AllocateFixedArrayWithFiller(this,
 | 
| +                                      length,
 | 
| +                                      pretenure,
 | 
| +                                      the_hole_value());
 | 
|  }
 | 
|  
 | 
|  
 | 
| @@ -3665,7 +3679,7 @@
 | 
|  
 | 
|  MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
 | 
|    Object* result;
 | 
| -  { MaybeObject* maybe_result = Heap::AllocateFixedArray(length, pretenure);
 | 
| +  { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure);
 | 
|      if (!maybe_result->ToObject(&result)) return maybe_result;
 | 
|    }
 | 
|    reinterpret_cast<HeapObject*>(result)->set_map(hash_table_map());
 | 
| @@ -3677,7 +3691,7 @@
 | 
|  MaybeObject* Heap::AllocateGlobalContext() {
 | 
|    Object* result;
 | 
|    { MaybeObject* maybe_result =
 | 
| -        Heap::AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
 | 
| +        AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
 | 
|      if (!maybe_result->ToObject(&result)) return maybe_result;
 | 
|    }
 | 
|    Context* context = reinterpret_cast<Context*>(result);
 | 
| @@ -3691,7 +3705,7 @@
 | 
|  MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
 | 
|    ASSERT(length >= Context::MIN_CONTEXT_SLOTS);
 | 
|    Object* result;
 | 
| -  { MaybeObject* maybe_result = Heap::AllocateFixedArray(length);
 | 
| +  { MaybeObject* maybe_result = AllocateFixedArray(length);
 | 
|      if (!maybe_result->ToObject(&result)) return maybe_result;
 | 
|    }
 | 
|    Context* context = reinterpret_cast<Context*>(result);
 | 
| @@ -3712,12 +3726,12 @@
 | 
|                                         JSObject* extension,
 | 
|                                         bool is_catch_context) {
 | 
|    Object* result;
 | 
| -  { MaybeObject* maybe_result =
 | 
| -        Heap::AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
 | 
| +  { MaybeObject* maybe_result = AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
 | 
|      if (!maybe_result->ToObject(&result)) return maybe_result;
 | 
|    }
 | 
|    Context* context = reinterpret_cast<Context*>(result);
 | 
| -  context->set_map(is_catch_context ? catch_context_map() : context_map());
 | 
| +  context->set_map(is_catch_context ? catch_context_map() :
 | 
| +      context_map());
 | 
|    context->set_closure(previous->closure());
 | 
|    context->set_fcontext(previous->fcontext());
 | 
|    context->set_previous(previous);
 | 
| @@ -3733,7 +3747,8 @@
 | 
|  MaybeObject* Heap::AllocateStruct(InstanceType type) {
 | 
|    Map* map;
 | 
|    switch (type) {
 | 
| -#define MAKE_CASE(NAME, Name, name) case NAME##_TYPE: map = name##_map(); break;
 | 
| +#define MAKE_CASE(NAME, Name, name) \
 | 
| +    case NAME##_TYPE: map = name##_map(); break;
 | 
|  STRUCT_LIST(MAKE_CASE)
 | 
|  #undef MAKE_CASE
 | 
|      default:
 | 
| @@ -3744,7 +3759,7 @@
 | 
|    AllocationSpace space =
 | 
|        (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE;
 | 
|    Object* result;
 | 
| -  { MaybeObject* maybe_result = Heap::Allocate(map, space);
 | 
| +  { MaybeObject* maybe_result = Allocate(map, space);
 | 
|      if (!maybe_result->ToObject(&result)) return maybe_result;
 | 
|    }
 | 
|    Struct::cast(result)->InitializeBody(size);
 | 
| @@ -3758,9 +3773,12 @@
 | 
|    static const int kIdlesBeforeMarkCompact = 8;
 | 
|    static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1;
 | 
|    static const unsigned int kGCsBetweenCleanup = 4;
 | 
| -  static int number_idle_notifications = 0;
 | 
| -  static unsigned int last_gc_count = gc_count_;
 | 
|  
 | 
| +  if (!last_idle_notification_gc_count_init_) {
 | 
| +    last_idle_notification_gc_count_ = gc_count_;
 | 
| +    last_idle_notification_gc_count_init_ = true;
 | 
| +  }
 | 
| +
 | 
|    bool uncommit = true;
 | 
|    bool finished = false;
 | 
|  
 | 
| @@ -3768,56 +3786,56 @@
 | 
|    // GCs have taken place. This allows another round of cleanup based
 | 
|    // on idle notifications if enough work has been carried out to
 | 
|    // provoke a number of garbage collections.
 | 
| -  if (gc_count_ - last_gc_count < kGCsBetweenCleanup) {
 | 
| -    number_idle_notifications =
 | 
| -        Min(number_idle_notifications + 1, kMaxIdleCount);
 | 
| +  if (gc_count_ - last_idle_notification_gc_count_ < kGCsBetweenCleanup) {
 | 
| +    number_idle_notifications_ =
 | 
| +        Min(number_idle_notifications_ + 1, kMaxIdleCount);
 | 
|    } else {
 | 
| -    number_idle_notifications = 0;
 | 
| -    last_gc_count = gc_count_;
 | 
| +    number_idle_notifications_ = 0;
 | 
| +    last_idle_notification_gc_count_ = gc_count_;
 | 
|    }
 | 
|  
 | 
| -  if (number_idle_notifications == kIdlesBeforeScavenge) {
 | 
| +  if (number_idle_notifications_ == kIdlesBeforeScavenge) {
 | 
|      if (contexts_disposed_ > 0) {
 | 
| -      HistogramTimerScope scope(&Counters::gc_context);
 | 
| +      HistogramTimerScope scope(isolate_->counters()->gc_context());
 | 
|        CollectAllGarbage(false);
 | 
|      } else {
 | 
|        CollectGarbage(NEW_SPACE);
 | 
|      }
 | 
|      new_space_.Shrink();
 | 
| -    last_gc_count = gc_count_;
 | 
| -  } else if (number_idle_notifications == kIdlesBeforeMarkSweep) {
 | 
| +    last_idle_notification_gc_count_ = gc_count_;
 | 
| +  } else if (number_idle_notifications_ == kIdlesBeforeMarkSweep) {
 | 
|      // Before doing the mark-sweep collections we clear the
 | 
|      // compilation cache to avoid hanging on to source code and
 | 
|      // generated code for cached functions.
 | 
| -    CompilationCache::Clear();
 | 
| +    isolate_->compilation_cache()->Clear();
 | 
|  
 | 
|      CollectAllGarbage(false);
 | 
|      new_space_.Shrink();
 | 
| -    last_gc_count = gc_count_;
 | 
| +    last_idle_notification_gc_count_ = gc_count_;
 | 
|  
 | 
| -  } else if (number_idle_notifications == kIdlesBeforeMarkCompact) {
 | 
| +  } else if (number_idle_notifications_ == kIdlesBeforeMarkCompact) {
 | 
|      CollectAllGarbage(true);
 | 
|      new_space_.Shrink();
 | 
| -    last_gc_count = gc_count_;
 | 
| +    last_idle_notification_gc_count_ = gc_count_;
 | 
| +    number_idle_notifications_ = 0;
 | 
|      finished = true;
 | 
| -
 | 
|    } else if (contexts_disposed_ > 0) {
 | 
|      if (FLAG_expose_gc) {
 | 
|        contexts_disposed_ = 0;
 | 
|      } else {
 | 
| -      HistogramTimerScope scope(&Counters::gc_context);
 | 
| +      HistogramTimerScope scope(isolate_->counters()->gc_context());
 | 
|        CollectAllGarbage(false);
 | 
| -      last_gc_count = gc_count_;
 | 
| +      last_idle_notification_gc_count_ = gc_count_;
 | 
|      }
 | 
|      // If this is the first idle notification, we reset the
 | 
|      // notification count to avoid letting idle notifications for
 | 
|      // context disposal garbage collections start a potentially too
 | 
|      // aggressive idle GC cycle.
 | 
| -    if (number_idle_notifications <= 1) {
 | 
| -      number_idle_notifications = 0;
 | 
| +    if (number_idle_notifications_ <= 1) {
 | 
| +      number_idle_notifications_ = 0;
 | 
|        uncommit = false;
 | 
|      }
 | 
| -  } else if (number_idle_notifications > kIdlesBeforeMarkCompact) {
 | 
| +  } else if (number_idle_notifications_ > kIdlesBeforeMarkCompact) {
 | 
|      // If we have received more than kIdlesBeforeMarkCompact idle
 | 
|      // notifications we do not perform any cleanup because we don't
 | 
|      // expect to gain much by doing so.
 | 
| @@ -3827,7 +3845,7 @@
 | 
|    // Make sure that we have no pending context disposals and
 | 
|    // conditionally uncommit from space.
 | 
|    ASSERT(contexts_disposed_ == 0);
 | 
| -  if (uncommit) Heap::UncommitFromSpace();
 | 
| +  if (uncommit) UncommitFromSpace();
 | 
|    return finished;
 | 
|  }
 | 
|  
 | 
| @@ -3836,7 +3854,7 @@
 | 
|  
 | 
|  void Heap::Print() {
 | 
|    if (!HasBeenSetup()) return;
 | 
| -  Top::PrintStack();
 | 
| +  isolate()->PrintStack();
 | 
|    AllSpaces spaces;
 | 
|    for (Space* space = spaces.next(); space != NULL; space = spaces.next())
 | 
|      space->Print();
 | 
| @@ -3869,11 +3887,11 @@
 | 
|  
 | 
|    PrintF("\n");
 | 
|    PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles());
 | 
| -  GlobalHandles::PrintStats();
 | 
| +  isolate_->global_handles()->PrintStats();
 | 
|    PrintF("\n");
 | 
|  
 | 
|    PrintF("Heap statistics : ");
 | 
| -  MemoryAllocator::ReportStatistics();
 | 
| +  isolate_->memory_allocator()->ReportStatistics();
 | 
|    PrintF("To space : ");
 | 
|    new_space_.ReportStatistics();
 | 
|    PrintF("Old pointer space : ");
 | 
| @@ -3956,7 +3974,7 @@
 | 
|      Address start = page->ObjectAreaStart();
 | 
|      Address end = page->AllocationWatermark();
 | 
|  
 | 
| -    Heap::IterateDirtyRegions(Page::kAllRegionsDirtyMarks,
 | 
| +    HEAP->IterateDirtyRegions(Page::kAllRegionsDirtyMarks,
 | 
|                                start,
 | 
|                                end,
 | 
|                                visit_dirty_region,
 | 
| @@ -3977,7 +3995,7 @@
 | 
|          // When we are not in GC the Heap::InNewSpace() predicate
 | 
|          // checks that pointers which satisfy predicate point into
 | 
|          // the active semispace.
 | 
| -        Heap::InNewSpace(*slot);
 | 
| +        HEAP->InNewSpace(*slot);
 | 
|          slot_address += kPointerSize;
 | 
|        }
 | 
|      }
 | 
| @@ -4098,7 +4116,8 @@
 | 
|  #endif  // DEBUG
 | 
|  
 | 
|  
 | 
| -bool Heap::IteratePointersInDirtyRegion(Address start,
 | 
| +bool Heap::IteratePointersInDirtyRegion(Heap* heap,
 | 
| +                                        Address start,
 | 
|                                          Address end,
 | 
|                                          ObjectSlotCallback copy_object_func) {
 | 
|    Address slot_address = start;
 | 
| @@ -4106,10 +4125,10 @@
 | 
|  
 | 
|    while (slot_address < end) {
 | 
|      Object** slot = reinterpret_cast<Object**>(slot_address);
 | 
| -    if (Heap::InNewSpace(*slot)) {
 | 
| +    if (heap->InNewSpace(*slot)) {
 | 
|        ASSERT((*slot)->IsHeapObject());
 | 
|        copy_object_func(reinterpret_cast<HeapObject**>(slot));
 | 
| -      if (Heap::InNewSpace(*slot)) {
 | 
| +      if (heap->InNewSpace(*slot)) {
 | 
|          ASSERT((*slot)->IsHeapObject());
 | 
|          pointers_to_new_space_found = true;
 | 
|        }
 | 
| @@ -4143,14 +4162,16 @@
 | 
|    Address map_address = start;
 | 
|    bool pointers_to_new_space_found = false;
 | 
|  
 | 
| +  Heap* heap = HEAP;
 | 
|    while (map_address < end) {
 | 
| -    ASSERT(!Heap::InNewSpace(Memory::Object_at(map_address)));
 | 
| +    ASSERT(!heap->InNewSpace(Memory::Object_at(map_address)));
 | 
|      ASSERT(Memory::Object_at(map_address)->IsMap());
 | 
|  
 | 
|      Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset;
 | 
|      Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset;
 | 
|  
 | 
| -    if (Heap::IteratePointersInDirtyRegion(pointer_fields_start,
 | 
| +    if (Heap::IteratePointersInDirtyRegion(heap,
 | 
| +                                           pointer_fields_start,
 | 
|                                             pointer_fields_end,
 | 
|                                             copy_object_func)) {
 | 
|        pointers_to_new_space_found = true;
 | 
| @@ -4164,6 +4185,7 @@
 | 
|  
 | 
|  
 | 
|  bool Heap::IteratePointersInDirtyMapsRegion(
 | 
| +    Heap* heap,
 | 
|      Address start,
 | 
|      Address end,
 | 
|      ObjectSlotCallback copy_object_func) {
 | 
| @@ -4183,7 +4205,8 @@
 | 
|          Min(prev_map + Map::kPointerFieldsEndOffset, end);
 | 
|  
 | 
|      contains_pointers_to_new_space =
 | 
| -      IteratePointersInDirtyRegion(pointer_fields_start,
 | 
| +      IteratePointersInDirtyRegion(heap,
 | 
| +                                   pointer_fields_start,
 | 
|                                     pointer_fields_end,
 | 
|                                     copy_object_func)
 | 
|          || contains_pointers_to_new_space;
 | 
| @@ -4205,7 +4228,8 @@
 | 
|          Min(end, map_aligned_end + Map::kPointerFieldsEndOffset);
 | 
|  
 | 
|      contains_pointers_to_new_space =
 | 
| -      IteratePointersInDirtyRegion(pointer_fields_start,
 | 
| +      IteratePointersInDirtyRegion(heap,
 | 
| +                                   pointer_fields_start,
 | 
|                                     pointer_fields_end,
 | 
|                                     copy_object_func)
 | 
|          || contains_pointers_to_new_space;
 | 
| @@ -4225,10 +4249,10 @@
 | 
|  
 | 
|    while (slot_address < end) {
 | 
|      Object** slot = reinterpret_cast<Object**>(slot_address);
 | 
| -    if (Heap::InFromSpace(*slot)) {
 | 
| +    if (InFromSpace(*slot)) {
 | 
|        ASSERT((*slot)->IsHeapObject());
 | 
|        callback(reinterpret_cast<HeapObject**>(slot));
 | 
| -      if (Heap::InNewSpace(*slot)) {
 | 
| +      if (InNewSpace(*slot)) {
 | 
|          ASSERT((*slot)->IsHeapObject());
 | 
|          marks |= page->GetRegionMaskForAddress(slot_address);
 | 
|        }
 | 
| @@ -4267,7 +4291,7 @@
 | 
|    Address region_end = Min(second_region, area_end);
 | 
|  
 | 
|    if (marks & mask) {
 | 
| -    if (visit_dirty_region(region_start, region_end, copy_object_func)) {
 | 
| +    if (visit_dirty_region(this, region_start, region_end, copy_object_func)) {
 | 
|        newmarks |= mask;
 | 
|      }
 | 
|    }
 | 
| @@ -4279,7 +4303,10 @@
 | 
|  
 | 
|    while (region_end <= area_end) {
 | 
|      if (marks & mask) {
 | 
| -      if (visit_dirty_region(region_start, region_end, copy_object_func)) {
 | 
| +      if (visit_dirty_region(this,
 | 
| +                             region_start,
 | 
| +                             region_end,
 | 
| +                             copy_object_func)) {
 | 
|          newmarks |= mask;
 | 
|        }
 | 
|      }
 | 
| @@ -4295,7 +4322,7 @@
 | 
|      // with region end. Check whether region covering last part of area is
 | 
|      // dirty.
 | 
|      if (marks & mask) {
 | 
| -      if (visit_dirty_region(region_start, area_end, copy_object_func)) {
 | 
| +      if (visit_dirty_region(this, region_start, area_end, copy_object_func)) {
 | 
|          newmarks |= mask;
 | 
|        }
 | 
|      }
 | 
| @@ -4361,7 +4388,7 @@
 | 
|    v->Synchronize("symbol_table");
 | 
|    if (mode != VISIT_ALL_IN_SCAVENGE) {
 | 
|      // Scavenge collections have special processing for this.
 | 
| -    ExternalStringTable::Iterate(v);
 | 
| +    external_string_table_.Iterate(v);
 | 
|    }
 | 
|    v->Synchronize("external_string_table");
 | 
|  }
 | 
| @@ -4374,42 +4401,42 @@
 | 
|    v->VisitPointer(BitCast<Object**>(&hidden_symbol_));
 | 
|    v->Synchronize("symbol");
 | 
|  
 | 
| -  Bootstrapper::Iterate(v);
 | 
| +  isolate_->bootstrapper()->Iterate(v);
 | 
|    v->Synchronize("bootstrapper");
 | 
| -  Top::Iterate(v);
 | 
| +  isolate_->Iterate(v);
 | 
|    v->Synchronize("top");
 | 
|    Relocatable::Iterate(v);
 | 
|    v->Synchronize("relocatable");
 | 
|  
 | 
|  #ifdef ENABLE_DEBUGGER_SUPPORT
 | 
| -  Debug::Iterate(v);
 | 
| +  isolate_->debug()->Iterate(v);
 | 
|  #endif
 | 
|    v->Synchronize("debug");
 | 
| -  CompilationCache::Iterate(v);
 | 
| +  isolate_->compilation_cache()->Iterate(v);
 | 
|    v->Synchronize("compilationcache");
 | 
|  
 | 
|    // Iterate over local handles in handle scopes.
 | 
| -  HandleScopeImplementer::Iterate(v);
 | 
| +  isolate_->handle_scope_implementer()->Iterate(v);
 | 
|    v->Synchronize("handlescope");
 | 
|  
 | 
|    // Iterate over the builtin code objects and code stubs in the
 | 
|    // heap. Note that it is not necessary to iterate over code objects
 | 
|    // on scavenge collections.
 | 
|    if (mode != VISIT_ALL_IN_SCAVENGE) {
 | 
| -    Builtins::IterateBuiltins(v);
 | 
| +    isolate_->builtins()->IterateBuiltins(v);
 | 
|    }
 | 
|    v->Synchronize("builtins");
 | 
|  
 | 
|    // Iterate over global handles.
 | 
|    if (mode == VISIT_ONLY_STRONG) {
 | 
| -    GlobalHandles::IterateStrongRoots(v);
 | 
| +    isolate_->global_handles()->IterateStrongRoots(v);
 | 
|    } else {
 | 
| -    GlobalHandles::IterateAllRoots(v);
 | 
| +    isolate_->global_handles()->IterateAllRoots(v);
 | 
|    }
 | 
|    v->Synchronize("globalhandles");
 | 
|  
 | 
|    // Iterate over pointers being held by inactive threads.
 | 
| -  ThreadManager::Iterate(v);
 | 
| +  isolate_->thread_manager()->Iterate(v);
 | 
|    v->Synchronize("threadmanager");
 | 
|  
 | 
|    // Iterate over the pointers the Serialization/Deserialization code is
 | 
| @@ -4428,10 +4455,6 @@
 | 
|  }
 | 
|  
 | 
|  
 | 
| -// Flag is set when the heap has been configured.  The heap can be repeatedly
 | 
| -// configured through the API until it is setup.
 | 
| -static bool heap_configured = false;
 | 
| -
 | 
|  // TODO(1236194): Since the heap size is configurable on the command line
 | 
|  // and through the API, we should gracefully handle the case that the heap
 | 
|  // size is not big enough to fit all the initial objects.
 | 
| @@ -4478,7 +4501,7 @@
 | 
|    // The old generation is paged.
 | 
|    max_old_generation_size_ = RoundUp(max_old_generation_size_, Page::kPageSize);
 | 
|  
 | 
| -  heap_configured = true;
 | 
| +  configured_ = true;
 | 
|    return true;
 | 
|  }
 | 
|  
 | 
| @@ -4506,11 +4529,13 @@
 | 
|    *stats->cell_space_size = cell_space_->Size();
 | 
|    *stats->cell_space_capacity = cell_space_->Capacity();
 | 
|    *stats->lo_space_size = lo_space_->Size();
 | 
| -  GlobalHandles::RecordStats(stats);
 | 
| -  *stats->memory_allocator_size = MemoryAllocator::Size();
 | 
| +  isolate_->global_handles()->RecordStats(stats);
 | 
| +  *stats->memory_allocator_size = isolate()->memory_allocator()->Size();
 | 
|    *stats->memory_allocator_capacity =
 | 
| -      MemoryAllocator::Size() + MemoryAllocator::Available();
 | 
| +      isolate()->memory_allocator()->Size() +
 | 
| +      isolate()->memory_allocator()->Available();
 | 
|    *stats->os_error = OS::GetLastError();
 | 
| +      isolate()->memory_allocator()->Available();
 | 
|    if (take_snapshot) {
 | 
|      HeapIterator iterator(HeapIterator::kFilterFreeListNodes);
 | 
|      for (HeapObject* obj = iterator.next();
 | 
| @@ -4542,8 +4567,177 @@
 | 
|        - amount_of_external_allocated_memory_at_last_global_gc_;
 | 
|  }
 | 
|  
 | 
| +#ifdef DEBUG
 | 
|  
 | 
| +// Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
 | 
| +static const int kMarkTag = 2;
 | 
| +
 | 
| +
 | 
| +class HeapDebugUtils {
 | 
| + public:
 | 
| +  explicit HeapDebugUtils(Heap* heap)
 | 
| +    : search_for_any_global_(false),
 | 
| +      search_target_(NULL),
 | 
| +      found_target_(false),
 | 
| +      object_stack_(20),
 | 
| +      heap_(heap) {
 | 
| +  }
 | 
| +
 | 
| +  class MarkObjectVisitor : public ObjectVisitor {
 | 
| +   public:
 | 
| +    explicit MarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
 | 
| +
 | 
| +    void VisitPointers(Object** start, Object** end) {
 | 
| +      // Copy all HeapObject pointers in [start, end)
 | 
| +      for (Object** p = start; p < end; p++) {
 | 
| +        if ((*p)->IsHeapObject())
 | 
| +          utils_->MarkObjectRecursively(p);
 | 
| +      }
 | 
| +    }
 | 
| +
 | 
| +    HeapDebugUtils* utils_;
 | 
| +  };
 | 
| +
 | 
| +  void MarkObjectRecursively(Object** p) {
 | 
| +    if (!(*p)->IsHeapObject()) return;
 | 
| +
 | 
| +    HeapObject* obj = HeapObject::cast(*p);
 | 
| +
 | 
| +    Object* map = obj->map();
 | 
| +
 | 
| +    if (!map->IsHeapObject()) return;  // visited before
 | 
| +
 | 
| +    if (found_target_) return;  // stop if target found
 | 
| +    object_stack_.Add(obj);
 | 
| +    if ((search_for_any_global_ && obj->IsJSGlobalObject()) ||
 | 
| +        (!search_for_any_global_ && (obj == search_target_))) {
 | 
| +      found_target_ = true;
 | 
| +      return;
 | 
| +    }
 | 
| +
 | 
| +    // not visited yet
 | 
| +    Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
 | 
| +
 | 
| +    Address map_addr = map_p->address();
 | 
| +
 | 
| +    obj->set_map(reinterpret_cast<Map*>(map_addr + kMarkTag));
 | 
| +
 | 
| +    MarkObjectRecursively(&map);
 | 
| +
 | 
| +    MarkObjectVisitor mark_visitor(this);
 | 
| +
 | 
| +    obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p),
 | 
| +                     &mark_visitor);
 | 
| +
 | 
| +    if (!found_target_)  // don't pop if found the target
 | 
| +      object_stack_.RemoveLast();
 | 
| +  }
 | 
| +
 | 
| +
 | 
| +  class UnmarkObjectVisitor : public ObjectVisitor {
 | 
| +   public:
 | 
| +    explicit UnmarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
 | 
| +
 | 
| +    void VisitPointers(Object** start, Object** end) {
 | 
| +      // Copy all HeapObject pointers in [start, end)
 | 
| +      for (Object** p = start; p < end; p++) {
 | 
| +        if ((*p)->IsHeapObject())
 | 
| +          utils_->UnmarkObjectRecursively(p);
 | 
| +      }
 | 
| +    }
 | 
| +
 | 
| +    HeapDebugUtils* utils_;
 | 
| +  };
 | 
| +
 | 
| +
 | 
| +  void UnmarkObjectRecursively(Object** p) {
 | 
| +    if (!(*p)->IsHeapObject()) return;
 | 
| +
 | 
| +    HeapObject* obj = HeapObject::cast(*p);
 | 
| +
 | 
| +    Object* map = obj->map();
 | 
| +
 | 
| +    if (map->IsHeapObject()) return;  // unmarked already
 | 
| +
 | 
| +    Address map_addr = reinterpret_cast<Address>(map);
 | 
| +
 | 
| +    map_addr -= kMarkTag;
 | 
| +
 | 
| +    ASSERT_TAG_ALIGNED(map_addr);
 | 
| +
 | 
| +    HeapObject* map_p = HeapObject::FromAddress(map_addr);
 | 
| +
 | 
| +    obj->set_map(reinterpret_cast<Map*>(map_p));
 | 
| +
 | 
| +    UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p));
 | 
| +
 | 
| +    UnmarkObjectVisitor unmark_visitor(this);
 | 
| +
 | 
| +    obj->IterateBody(Map::cast(map_p)->instance_type(),
 | 
| +                     obj->SizeFromMap(Map::cast(map_p)),
 | 
| +                     &unmark_visitor);
 | 
| +  }
 | 
| +
 | 
| +
 | 
| +  void MarkRootObjectRecursively(Object** root) {
 | 
| +    if (search_for_any_global_) {
 | 
| +      ASSERT(search_target_ == NULL);
 | 
| +    } else {
 | 
| +      ASSERT(search_target_->IsHeapObject());
 | 
| +    }
 | 
| +    found_target_ = false;
 | 
| +    object_stack_.Clear();
 | 
| +
 | 
| +    MarkObjectRecursively(root);
 | 
| +    UnmarkObjectRecursively(root);
 | 
| +
 | 
| +    if (found_target_) {
 | 
| +      PrintF("=====================================\n");
 | 
| +      PrintF("====        Path to object       ====\n");
 | 
| +      PrintF("=====================================\n\n");
 | 
| +
 | 
| +      ASSERT(!object_stack_.is_empty());
 | 
| +      for (int i = 0; i < object_stack_.length(); i++) {
 | 
| +        if (i > 0) PrintF("\n     |\n     |\n     V\n\n");
 | 
| +        Object* obj = object_stack_[i];
 | 
| +        obj->Print();
 | 
| +      }
 | 
| +      PrintF("=====================================\n");
 | 
| +    }
 | 
| +  }
 | 
| +
 | 
| +  // Helper class for visiting HeapObjects recursively.
 | 
| +  class MarkRootVisitor: public ObjectVisitor {
 | 
| +   public:
 | 
| +    explicit MarkRootVisitor(HeapDebugUtils* utils) : utils_(utils) { }
 | 
| +
 | 
| +    void VisitPointers(Object** start, Object** end) {
 | 
| +      // Visit all HeapObject pointers in [start, end)
 | 
| +      for (Object** p = start; p < end; p++) {
 | 
| +        if ((*p)->IsHeapObject())
 | 
| +          utils_->MarkRootObjectRecursively(p);
 | 
| +      }
 | 
| +    }
 | 
| +
 | 
| +    HeapDebugUtils* utils_;
 | 
| +  };
 | 
| +
 | 
| +  bool search_for_any_global_;
 | 
| +  Object* search_target_;
 | 
| +  bool found_target_;
 | 
| +  List<Object*> object_stack_;
 | 
| +  Heap* heap_;
 | 
| +
 | 
| +  friend class Heap;
 | 
| +};
 | 
| +
 | 
| +#endif
 | 
| +
 | 
|  bool Heap::Setup(bool create_heap_objects) {
 | 
| +#ifdef DEBUG
 | 
| +  debug_utils_ = new HeapDebugUtils(this);
 | 
| +#endif
 | 
| +
 | 
|    // Initialize heap spaces and initial maps and objects. Whenever something
 | 
|    // goes wrong, just return false. The caller should check the results and
 | 
|    // call Heap::TearDown() to release allocated memory.
 | 
| @@ -4552,13 +4746,19 @@
 | 
|    // Configuration is based on the flags new-space-size (really the semispace
 | 
|    // size) and old-space-size if set or the initial values of semispace_size_
 | 
|    // and old_generation_size_ otherwise.
 | 
| -  if (!heap_configured) {
 | 
| +  if (!configured_) {
 | 
|      if (!ConfigureHeapDefault()) return false;
 | 
|    }
 | 
|  
 | 
| -  ScavengingVisitor::Initialize();
 | 
| -  NewSpaceScavenger::Initialize();
 | 
| -  MarkCompactCollector::Initialize();
 | 
| +  gc_initializer_mutex->Lock();
 | 
| +  static bool initialized_gc = false;
 | 
| +  if (!initialized_gc) {
 | 
| +      initialized_gc = true;
 | 
| +      ScavengingVisitor::Initialize();
 | 
| +      NewSpaceScavenger::Initialize();
 | 
| +      MarkCompactCollector::Initialize();
 | 
| +  }
 | 
| +  gc_initializer_mutex->Unlock();
 | 
|  
 | 
|    MarkMapPointersAsEncoded(false);
 | 
|  
 | 
| @@ -4566,9 +4766,11 @@
 | 
|    // space.  The chunk is double the size of the requested reserved
 | 
|    // new space size to ensure that we can find a pair of semispaces that
 | 
|    // are contiguous and aligned to their size.
 | 
| -  if (!MemoryAllocator::Setup(MaxReserved(), MaxExecutableSize())) return false;
 | 
| +  if (!isolate_->memory_allocator()->Setup(MaxReserved(), MaxExecutableSize()))
 | 
| +      return false;
 | 
|    void* chunk =
 | 
| -      MemoryAllocator::ReserveInitialChunk(4 * reserved_semispace_size_);
 | 
| +      isolate_->memory_allocator()->ReserveInitialChunk(
 | 
| +          4 * reserved_semispace_size_);
 | 
|    if (chunk == NULL) return false;
 | 
|  
 | 
|    // Align the pair of semispaces to their size, which must be a power
 | 
| @@ -4581,13 +4783,19 @@
 | 
|  
 | 
|    // Initialize old pointer space.
 | 
|    old_pointer_space_ =
 | 
| -      new OldSpace(max_old_generation_size_, OLD_POINTER_SPACE, NOT_EXECUTABLE);
 | 
| +      new OldSpace(this,
 | 
| +                   max_old_generation_size_,
 | 
| +                   OLD_POINTER_SPACE,
 | 
| +                   NOT_EXECUTABLE);
 | 
|    if (old_pointer_space_ == NULL) return false;
 | 
|    if (!old_pointer_space_->Setup(NULL, 0)) return false;
 | 
|  
 | 
|    // Initialize old data space.
 | 
|    old_data_space_ =
 | 
| -      new OldSpace(max_old_generation_size_, OLD_DATA_SPACE, NOT_EXECUTABLE);
 | 
| +      new OldSpace(this,
 | 
| +                   max_old_generation_size_,
 | 
| +                   OLD_DATA_SPACE,
 | 
| +                   NOT_EXECUTABLE);
 | 
|    if (old_data_space_ == NULL) return false;
 | 
|    if (!old_data_space_->Setup(NULL, 0)) return false;
 | 
|  
 | 
| @@ -4596,18 +4804,18 @@
 | 
|    // On 64-bit platform(s), we put all code objects in a 2 GB range of
 | 
|    // virtual address space, so that they can call each other with near calls.
 | 
|    if (code_range_size_ > 0) {
 | 
| -    if (!CodeRange::Setup(code_range_size_)) {
 | 
| +    if (!isolate_->code_range()->Setup(code_range_size_)) {
 | 
|        return false;
 | 
|      }
 | 
|    }
 | 
|  
 | 
|    code_space_ =
 | 
| -      new OldSpace(max_old_generation_size_, CODE_SPACE, EXECUTABLE);
 | 
| +      new OldSpace(this, max_old_generation_size_, CODE_SPACE, EXECUTABLE);
 | 
|    if (code_space_ == NULL) return false;
 | 
|    if (!code_space_->Setup(NULL, 0)) return false;
 | 
|  
 | 
|    // Initialize map space.
 | 
| -  map_space_ = new MapSpace(FLAG_use_big_map_space
 | 
| +  map_space_ = new MapSpace(this, FLAG_use_big_map_space
 | 
|        ? max_old_generation_size_
 | 
|        : MapSpace::kMaxMapPageIndex * Page::kPageSize,
 | 
|        FLAG_max_map_space_pages,
 | 
| @@ -4616,14 +4824,14 @@
 | 
|    if (!map_space_->Setup(NULL, 0)) return false;
 | 
|  
 | 
|    // Initialize global property cell space.
 | 
| -  cell_space_ = new CellSpace(max_old_generation_size_, CELL_SPACE);
 | 
| +  cell_space_ = new CellSpace(this, max_old_generation_size_, CELL_SPACE);
 | 
|    if (cell_space_ == NULL) return false;
 | 
|    if (!cell_space_->Setup(NULL, 0)) return false;
 | 
|  
 | 
|    // The large object code space may contain code or data.  We set the memory
 | 
|    // to be non-executable here for safety, but this means we need to enable it
 | 
|    // explicitly when allocating large code objects.
 | 
| -  lo_space_ = new LargeObjectSpace(LO_SPACE);
 | 
| +  lo_space_ = new LargeObjectSpace(this, LO_SPACE);
 | 
|    if (lo_space_ == NULL) return false;
 | 
|    if (!lo_space_->Setup()) return false;
 | 
|  
 | 
| @@ -4638,12 +4846,12 @@
 | 
|      global_contexts_list_ = undefined_value();
 | 
|    }
 | 
|  
 | 
| -  LOG(IntPtrTEvent("heap-capacity", Capacity()));
 | 
| -  LOG(IntPtrTEvent("heap-available", Available()));
 | 
| +  LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
 | 
| +  LOG(isolate_, IntPtrTEvent("heap-available", Available()));
 | 
|  
 | 
|  #ifdef ENABLE_LOGGING_AND_PROFILING
 | 
|    // This should be called only after initial objects have been created.
 | 
| -  ProducerHeapProfile::Setup();
 | 
| +  isolate_->producer_heap_profile()->Setup();
 | 
|  #endif
 | 
|  
 | 
|    return true;
 | 
| @@ -4651,6 +4859,8 @@
 | 
|  
 | 
|  
 | 
|  void Heap::SetStackLimits() {
 | 
| +  ASSERT(isolate_ != NULL);
 | 
| +  ASSERT(isolate_ == isolate());
 | 
|    // On 64 bit machines, pointers are generally out of range of Smis.  We write
 | 
|    // something that looks like an out of range Smi to the GC.
 | 
|  
 | 
| @@ -4658,10 +4868,10 @@
 | 
|    // These are actually addresses, but the tag makes the GC ignore it.
 | 
|    roots_[kStackLimitRootIndex] =
 | 
|        reinterpret_cast<Object*>(
 | 
| -          (StackGuard::jslimit() & ~kSmiTagMask) | kSmiTag);
 | 
| +          (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag);
 | 
|    roots_[kRealStackLimitRootIndex] =
 | 
|        reinterpret_cast<Object*>(
 | 
| -          (StackGuard::real_jslimit() & ~kSmiTagMask) | kSmiTag);
 | 
| +          (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag);
 | 
|  }
 | 
|  
 | 
|  
 | 
| @@ -4671,16 +4881,16 @@
 | 
|      PrintF("gc_count=%d ", gc_count_);
 | 
|      PrintF("mark_sweep_count=%d ", ms_count_);
 | 
|      PrintF("mark_compact_count=%d ", mc_count_);
 | 
| -    PrintF("max_gc_pause=%d ", GCTracer::get_max_gc_pause());
 | 
| -    PrintF("min_in_mutator=%d ", GCTracer::get_min_in_mutator());
 | 
| +    PrintF("max_gc_pause=%d ", get_max_gc_pause());
 | 
| +    PrintF("min_in_mutator=%d ", get_min_in_mutator());
 | 
|      PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ",
 | 
| -           GCTracer::get_max_alive_after_gc());
 | 
| +           get_max_alive_after_gc());
 | 
|      PrintF("\n\n");
 | 
|    }
 | 
|  
 | 
| -  GlobalHandles::TearDown();
 | 
| +  isolate_->global_handles()->TearDown();
 | 
|  
 | 
| -  ExternalStringTable::TearDown();
 | 
| +  external_string_table_.TearDown();
 | 
|  
 | 
|    new_space_.TearDown();
 | 
|  
 | 
| @@ -4720,7 +4930,12 @@
 | 
|      lo_space_ = NULL;
 | 
|    }
 | 
|  
 | 
| -  MemoryAllocator::TearDown();
 | 
| +  isolate_->memory_allocator()->TearDown();
 | 
| +
 | 
| +#ifdef DEBUG
 | 
| +  delete debug_utils_;
 | 
| +  debug_utils_ = NULL;
 | 
| +#endif
 | 
|  }
 | 
|  
 | 
|  
 | 
| @@ -4809,7 +5024,7 @@
 | 
|  void Heap::PrintHandles() {
 | 
|    PrintF("Handles:\n");
 | 
|    PrintHandleVisitor v;
 | 
| -  HandleScopeImplementer::Iterate(&v);
 | 
| +  isolate_->handle_scope_implementer()->Iterate(&v);
 | 
|  }
 | 
|  
 | 
|  #endif
 | 
| @@ -4818,19 +5033,19 @@
 | 
|  Space* AllSpaces::next() {
 | 
|    switch (counter_++) {
 | 
|      case NEW_SPACE:
 | 
| -      return Heap::new_space();
 | 
| +      return HEAP->new_space();
 | 
|      case OLD_POINTER_SPACE:
 | 
| -      return Heap::old_pointer_space();
 | 
| +      return HEAP->old_pointer_space();
 | 
|      case OLD_DATA_SPACE:
 | 
| -      return Heap::old_data_space();
 | 
| +      return HEAP->old_data_space();
 | 
|      case CODE_SPACE:
 | 
| -      return Heap::code_space();
 | 
| +      return HEAP->code_space();
 | 
|      case MAP_SPACE:
 | 
| -      return Heap::map_space();
 | 
| +      return HEAP->map_space();
 | 
|      case CELL_SPACE:
 | 
| -      return Heap::cell_space();
 | 
| +      return HEAP->cell_space();
 | 
|      case LO_SPACE:
 | 
| -      return Heap::lo_space();
 | 
| +      return HEAP->lo_space();
 | 
|      default:
 | 
|        return NULL;
 | 
|    }
 | 
| @@ -4840,15 +5055,15 @@
 | 
|  PagedSpace* PagedSpaces::next() {
 | 
|    switch (counter_++) {
 | 
|      case OLD_POINTER_SPACE:
 | 
| -      return Heap::old_pointer_space();
 | 
| +      return HEAP->old_pointer_space();
 | 
|      case OLD_DATA_SPACE:
 | 
| -      return Heap::old_data_space();
 | 
| +      return HEAP->old_data_space();
 | 
|      case CODE_SPACE:
 | 
| -      return Heap::code_space();
 | 
| +      return HEAP->code_space();
 | 
|      case MAP_SPACE:
 | 
| -      return Heap::map_space();
 | 
| +      return HEAP->map_space();
 | 
|      case CELL_SPACE:
 | 
| -      return Heap::cell_space();
 | 
| +      return HEAP->cell_space();
 | 
|      default:
 | 
|        return NULL;
 | 
|    }
 | 
| @@ -4859,11 +5074,11 @@
 | 
|  OldSpace* OldSpaces::next() {
 | 
|    switch (counter_++) {
 | 
|      case OLD_POINTER_SPACE:
 | 
| -      return Heap::old_pointer_space();
 | 
| +      return HEAP->old_pointer_space();
 | 
|      case OLD_DATA_SPACE:
 | 
| -      return Heap::old_data_space();
 | 
| +      return HEAP->old_data_space();
 | 
|      case CODE_SPACE:
 | 
| -      return Heap::code_space();
 | 
| +      return HEAP->code_space();
 | 
|      default:
 | 
|        return NULL;
 | 
|    }
 | 
| @@ -4918,25 +5133,25 @@
 | 
|  
 | 
|    switch (current_space_) {
 | 
|      case NEW_SPACE:
 | 
| -      iterator_ = new SemiSpaceIterator(Heap::new_space(), size_func_);
 | 
| +      iterator_ = new SemiSpaceIterator(HEAP->new_space(), size_func_);
 | 
|        break;
 | 
|      case OLD_POINTER_SPACE:
 | 
| -      iterator_ = new HeapObjectIterator(Heap::old_pointer_space(), size_func_);
 | 
| +      iterator_ = new HeapObjectIterator(HEAP->old_pointer_space(), size_func_);
 | 
|        break;
 | 
|      case OLD_DATA_SPACE:
 | 
| -      iterator_ = new HeapObjectIterator(Heap::old_data_space(), size_func_);
 | 
| +      iterator_ = new HeapObjectIterator(HEAP->old_data_space(), size_func_);
 | 
|        break;
 | 
|      case CODE_SPACE:
 | 
| -      iterator_ = new HeapObjectIterator(Heap::code_space(), size_func_);
 | 
| +      iterator_ = new HeapObjectIterator(HEAP->code_space(), size_func_);
 | 
|        break;
 | 
|      case MAP_SPACE:
 | 
| -      iterator_ = new HeapObjectIterator(Heap::map_space(), size_func_);
 | 
| +      iterator_ = new HeapObjectIterator(HEAP->map_space(), size_func_);
 | 
|        break;
 | 
|      case CELL_SPACE:
 | 
| -      iterator_ = new HeapObjectIterator(Heap::cell_space(), size_func_);
 | 
| +      iterator_ = new HeapObjectIterator(HEAP->cell_space(), size_func_);
 | 
|        break;
 | 
|      case LO_SPACE:
 | 
| -      iterator_ = new LargeObjectIterator(Heap::lo_space(), size_func_);
 | 
| +      iterator_ = new LargeObjectIterator(HEAP->lo_space(), size_func_);
 | 
|        break;
 | 
|    }
 | 
|  
 | 
| @@ -4970,16 +5185,17 @@
 | 
|  
 | 
|   private:
 | 
|    void MarkFreeListNodes() {
 | 
| -    Heap::old_pointer_space()->MarkFreeListNodes();
 | 
| -    Heap::old_data_space()->MarkFreeListNodes();
 | 
| -    MarkCodeSpaceFreeListNodes();
 | 
| -    Heap::map_space()->MarkFreeListNodes();
 | 
| -    Heap::cell_space()->MarkFreeListNodes();
 | 
| +    Heap* heap = HEAP;
 | 
| +    heap->old_pointer_space()->MarkFreeListNodes();
 | 
| +    heap->old_data_space()->MarkFreeListNodes();
 | 
| +    MarkCodeSpaceFreeListNodes(heap);
 | 
| +    heap->map_space()->MarkFreeListNodes();
 | 
| +    heap->cell_space()->MarkFreeListNodes();
 | 
|    }
 | 
|  
 | 
| -  void MarkCodeSpaceFreeListNodes() {
 | 
| +  void MarkCodeSpaceFreeListNodes(Heap* heap) {
 | 
|      // For code space, using FreeListNode::IsFreeListNode is OK.
 | 
| -    HeapObjectIterator iter(Heap::code_space());
 | 
| +    HeapObjectIterator iter(heap->code_space());
 | 
|      for (HeapObject* obj = iter.next_object();
 | 
|           obj != NULL;
 | 
|           obj = iter.next_object()) {
 | 
| @@ -5041,7 +5257,7 @@
 | 
|        obj->SetMark();
 | 
|      }
 | 
|      UnmarkingVisitor visitor;
 | 
| -    Heap::IterateRoots(&visitor, VISIT_ALL);
 | 
| +    HEAP->IterateRoots(&visitor, VISIT_ALL);
 | 
|      while (visitor.can_process())
 | 
|        visitor.ProcessNext();
 | 
|    }
 | 
| @@ -5344,7 +5560,7 @@
 | 
|  }
 | 
|  
 | 
|  
 | 
| -GCTracer::GCTracer()
 | 
| +GCTracer::GCTracer(Heap* heap)
 | 
|      : start_time_(0.0),
 | 
|        start_size_(0),
 | 
|        gc_count_(0),
 | 
| @@ -5353,14 +5569,16 @@
 | 
|        marked_count_(0),
 | 
|        allocated_since_last_gc_(0),
 | 
|        spent_in_mutator_(0),
 | 
| -      promoted_objects_size_(0) {
 | 
| +      promoted_objects_size_(0),
 | 
| +      heap_(heap) {
 | 
|    // These two fields reflect the state of the previous full collection.
 | 
|    // Set them before they are changed by the collector.
 | 
| -  previous_has_compacted_ = MarkCompactCollector::HasCompacted();
 | 
| -  previous_marked_count_ = MarkCompactCollector::previous_marked_count();
 | 
| +  previous_has_compacted_ = heap_->mark_compact_collector_.HasCompacted();
 | 
| +  previous_marked_count_ =
 | 
| +      heap_->mark_compact_collector_.previous_marked_count();
 | 
|    if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
 | 
|    start_time_ = OS::TimeCurrentMillis();
 | 
| -  start_size_ = Heap::SizeOfObjects();
 | 
| +  start_size_ = heap_->SizeOfObjects();
 | 
|  
 | 
|    for (int i = 0; i < Scope::kNumberOfScopes; i++) {
 | 
|      scopes_[i] = 0;
 | 
| @@ -5368,10 +5586,11 @@
 | 
|  
 | 
|    in_free_list_or_wasted_before_gc_ = CountTotalHolesSize();
 | 
|  
 | 
| -  allocated_since_last_gc_ = Heap::SizeOfObjects() - alive_after_last_gc_;
 | 
| +  allocated_since_last_gc_ =
 | 
| +      heap_->SizeOfObjects() - heap_->alive_after_last_gc_;
 | 
|  
 | 
| -  if (last_gc_end_timestamp_ > 0) {
 | 
| -    spent_in_mutator_ = Max(start_time_ - last_gc_end_timestamp_, 0.0);
 | 
| +  if (heap_->last_gc_end_timestamp_ > 0) {
 | 
| +    spent_in_mutator_ = Max(start_time_ - heap_->last_gc_end_timestamp_, 0.0);
 | 
|    }
 | 
|  }
 | 
|  
 | 
| @@ -5380,20 +5599,21 @@
 | 
|    // Printf ONE line iff flag is set.
 | 
|    if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
 | 
|  
 | 
| -  bool first_gc = (last_gc_end_timestamp_ == 0);
 | 
| +  bool first_gc = (heap_->last_gc_end_timestamp_ == 0);
 | 
|  
 | 
| -  alive_after_last_gc_ = Heap::SizeOfObjects();
 | 
| -  last_gc_end_timestamp_ = OS::TimeCurrentMillis();
 | 
| +  heap_->alive_after_last_gc_ = heap_->SizeOfObjects();
 | 
| +  heap_->last_gc_end_timestamp_ = OS::TimeCurrentMillis();
 | 
|  
 | 
| -  int time = static_cast<int>(last_gc_end_timestamp_ - start_time_);
 | 
| +  int time = static_cast<int>(heap_->last_gc_end_timestamp_ - start_time_);
 | 
|  
 | 
|    // Update cumulative GC statistics if required.
 | 
|    if (FLAG_print_cumulative_gc_stat) {
 | 
| -    max_gc_pause_ = Max(max_gc_pause_, time);
 | 
| -    max_alive_after_gc_ = Max(max_alive_after_gc_, alive_after_last_gc_);
 | 
| +    heap_->max_gc_pause_ = Max(heap_->max_gc_pause_, time);
 | 
| +    heap_->max_alive_after_gc_ = Max(heap_->max_alive_after_gc_,
 | 
| +                                     heap_->alive_after_last_gc_);
 | 
|      if (!first_gc) {
 | 
| -      min_in_mutator_ = Min(min_in_mutator_,
 | 
| -                            static_cast<int>(spent_in_mutator_));
 | 
| +      heap_->min_in_mutator_ = Min(heap_->min_in_mutator_,
 | 
| +                                   static_cast<int>(spent_in_mutator_));
 | 
|      }
 | 
|    }
 | 
|  
 | 
| @@ -5418,7 +5638,8 @@
 | 
|          PrintF("s");
 | 
|          break;
 | 
|        case MARK_COMPACTOR:
 | 
| -        PrintF(MarkCompactCollector::HasCompacted() ? "mc" : "ms");
 | 
| +        PrintF("%s",
 | 
| +               heap_->mark_compact_collector_.HasCompacted() ? "mc" : "ms");
 | 
|          break;
 | 
|        default:
 | 
|          UNREACHABLE();
 | 
| @@ -5432,7 +5653,7 @@
 | 
|      PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
 | 
|  
 | 
|      PrintF("total_size_before=%" V8_PTR_PREFIX "d ", start_size_);
 | 
| -    PrintF("total_size_after=%" V8_PTR_PREFIX "d ", Heap::SizeOfObjects());
 | 
| +    PrintF("total_size_after=%" V8_PTR_PREFIX "d ", heap_->SizeOfObjects());
 | 
|      PrintF("holes_size_before=%" V8_PTR_PREFIX "d ",
 | 
|             in_free_list_or_wasted_before_gc_);
 | 
|      PrintF("holes_size_after=%" V8_PTR_PREFIX "d ", CountTotalHolesSize());
 | 
| @@ -5444,7 +5665,7 @@
 | 
|    }
 | 
|  
 | 
|  #if defined(ENABLE_LOGGING_AND_PROFILING)
 | 
| -  Heap::PrintShortHeapStatistics();
 | 
| +  heap_->PrintShortHeapStatistics();
 | 
|  #endif
 | 
|  }
 | 
|  
 | 
| @@ -5454,8 +5675,8 @@
 | 
|      case SCAVENGER:
 | 
|        return "Scavenge";
 | 
|      case MARK_COMPACTOR:
 | 
| -      return MarkCompactCollector::HasCompacted() ? "Mark-compact"
 | 
| -                                                  : "Mark-sweep";
 | 
| +      return heap_->mark_compact_collector_.HasCompacted() ? "Mark-compact"
 | 
| +                                                           : "Mark-sweep";
 | 
|    }
 | 
|    return "Unknown GC";
 | 
|  }
 | 
| @@ -5475,13 +5696,13 @@
 | 
|    if ((key.map == map) && key.name->Equals(name)) {
 | 
|      return field_offsets_[index];
 | 
|    }
 | 
| -  return -1;
 | 
| +  return kNotFound;
 | 
|  }
 | 
|  
 | 
|  
 | 
|  void KeyedLookupCache::Update(Map* map, String* name, int field_offset) {
 | 
|    String* symbol;
 | 
| -  if (Heap::LookupSymbolIfExists(name, &symbol)) {
 | 
| +  if (HEAP->LookupSymbolIfExists(name, &symbol)) {
 | 
|      int index = Hash(map, symbol);
 | 
|      Key& key = keys_[index];
 | 
|      key.map = map;
 | 
| @@ -5496,35 +5717,24 @@
 | 
|  }
 | 
|  
 | 
|  
 | 
| -KeyedLookupCache::Key KeyedLookupCache::keys_[KeyedLookupCache::kLength];
 | 
| -
 | 
| -
 | 
| -int KeyedLookupCache::field_offsets_[KeyedLookupCache::kLength];
 | 
| -
 | 
| -
 | 
|  void DescriptorLookupCache::Clear() {
 | 
|    for (int index = 0; index < kLength; index++) keys_[index].array = NULL;
 | 
|  }
 | 
|  
 | 
|  
 | 
| -DescriptorLookupCache::Key
 | 
| -DescriptorLookupCache::keys_[DescriptorLookupCache::kLength];
 | 
| -
 | 
| -int DescriptorLookupCache::results_[DescriptorLookupCache::kLength];
 | 
| -
 | 
| -
 | 
|  #ifdef DEBUG
 | 
|  void Heap::GarbageCollectionGreedyCheck() {
 | 
|    ASSERT(FLAG_gc_greedy);
 | 
| -  if (Bootstrapper::IsActive()) return;
 | 
| +  if (isolate_->bootstrapper()->IsActive()) return;
 | 
|    if (disallow_allocation_failure()) return;
 | 
|    CollectGarbage(NEW_SPACE);
 | 
|  }
 | 
|  #endif
 | 
|  
 | 
|  
 | 
| -TranscendentalCache::TranscendentalCache(TranscendentalCache::Type t)
 | 
| -  : type_(t) {
 | 
| +TranscendentalCache::SubCache::SubCache(Type t)
 | 
| +  : type_(t),
 | 
| +    isolate_(Isolate::Current()) {
 | 
|    uint32_t in0 = 0xffffffffu;  // Bit-pattern for a NaN that isn't
 | 
|    uint32_t in1 = 0xffffffffu;  // generated by the FPU.
 | 
|    for (int i = 0; i < kCacheSize; i++) {
 | 
| @@ -5535,9 +5745,6 @@
 | 
|  }
 | 
|  
 | 
|  
 | 
| -TranscendentalCache* TranscendentalCache::caches_[kNumberOfCaches];
 | 
| -
 | 
| -
 | 
|  void TranscendentalCache::Clear() {
 | 
|    for (int i = 0; i < kNumberOfCaches; i++) {
 | 
|      if (caches_[i] != NULL) {
 | 
| @@ -5551,8 +5758,8 @@
 | 
|  void ExternalStringTable::CleanUp() {
 | 
|    int last = 0;
 | 
|    for (int i = 0; i < new_space_strings_.length(); ++i) {
 | 
| -    if (new_space_strings_[i] == Heap::raw_unchecked_null_value()) continue;
 | 
| -    if (Heap::InNewSpace(new_space_strings_[i])) {
 | 
| +    if (new_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
 | 
| +    if (heap_->InNewSpace(new_space_strings_[i])) {
 | 
|        new_space_strings_[last++] = new_space_strings_[i];
 | 
|      } else {
 | 
|        old_space_strings_.Add(new_space_strings_[i]);
 | 
| @@ -5561,8 +5768,8 @@
 | 
|    new_space_strings_.Rewind(last);
 | 
|    last = 0;
 | 
|    for (int i = 0; i < old_space_strings_.length(); ++i) {
 | 
| -    if (old_space_strings_[i] == Heap::raw_unchecked_null_value()) continue;
 | 
| -    ASSERT(!Heap::InNewSpace(old_space_strings_[i]));
 | 
| +    if (old_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
 | 
| +    ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
 | 
|      old_space_strings_[last++] = old_space_strings_[i];
 | 
|    }
 | 
|    old_space_strings_.Rewind(last);
 | 
| @@ -5576,7 +5783,4 @@
 | 
|  }
 | 
|  
 | 
|  
 | 
| -List<Object*> ExternalStringTable::new_space_strings_;
 | 
| -List<Object*> ExternalStringTable::old_space_strings_;
 | 
| -
 | 
|  } }  // namespace v8::internal
 | 
| 
 |