| Index: src/objects-inl.h
|
| ===================================================================
|
| --- src/objects-inl.h (revision 7563)
|
| +++ src/objects-inl.h (working copy)
|
| @@ -40,6 +40,7 @@
|
| #include "conversions-inl.h"
|
| #include "heap.h"
|
| #include "memory.h"
|
| +#include "isolate.h"
|
| #include "property.h"
|
| #include "spaces.h"
|
| #include "store-buffer.h"
|
| @@ -81,10 +82,19 @@
|
| type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
|
| void holder::set_##name(type* value, WriteBarrierMode mode) { \
|
| WRITE_FIELD(this, offset, value); \
|
| - WRITE_BARRIER(this, offset, value); \
|
| + WRITE_BARRIER(GetHeap(), this, offset, value); \
|
| }
|
|
|
|
|
| +// GC-safe accessors do not use HeapObject::GetHeap(), but access TLS instead.
|
| +#define ACCESSORS_GCSAFE(holder, name, type, offset) \
|
| + type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
|
| + void holder::set_##name(type* value, WriteBarrierMode mode) { \
|
| + WRITE_FIELD(this, offset, value); \
|
| + WRITE_BARRIER(HEAP, this, offset, value); \
|
| + }
|
| +
|
| +
|
| #define SMI_ACCESSORS(holder, name, offset) \
|
| int holder::name() { \
|
| Object* value = READ_FIELD(this, offset); \
|
| @@ -441,7 +451,7 @@
|
|
|
|
|
| bool MaybeObject::IsTheHole() {
|
| - return this == Heap::the_hole_value();
|
| + return !IsFailure() && ToObjectUnchecked()->IsTheHole();
|
| }
|
|
|
|
|
| @@ -509,22 +519,27 @@
|
|
|
|
|
| bool Object::IsContext() {
|
| - return Object::IsHeapObject()
|
| - && (HeapObject::cast(this)->map() == Heap::context_map() ||
|
| - HeapObject::cast(this)->map() == Heap::catch_context_map() ||
|
| - HeapObject::cast(this)->map() == Heap::global_context_map());
|
| + if (Object::IsHeapObject()) {
|
| + Heap* heap = HeapObject::cast(this)->GetHeap();
|
| + return (HeapObject::cast(this)->map() == heap->context_map() ||
|
| + HeapObject::cast(this)->map() == heap->catch_context_map() ||
|
| + HeapObject::cast(this)->map() == heap->global_context_map());
|
| + }
|
| + return false;
|
| }
|
|
|
|
|
| bool Object::IsCatchContext() {
|
| - return Object::IsHeapObject()
|
| - && HeapObject::cast(this)->map() == Heap::catch_context_map();
|
| + return Object::IsHeapObject() &&
|
| + HeapObject::cast(this)->map() ==
|
| + HeapObject::cast(this)->GetHeap()->catch_context_map();
|
| }
|
|
|
|
|
| bool Object::IsGlobalContext() {
|
| - return Object::IsHeapObject()
|
| - && HeapObject::cast(this)->map() == Heap::global_context_map();
|
| + return Object::IsHeapObject() &&
|
| + HeapObject::cast(this)->map() ==
|
| + HeapObject::cast(this)->GetHeap()->global_context_map();
|
| }
|
|
|
|
|
| @@ -590,7 +605,8 @@
|
|
|
|
|
| bool Object::IsBoolean() {
|
| - return IsTrue() || IsFalse();
|
| + return IsOddball() &&
|
| + ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
|
| }
|
|
|
|
|
| @@ -612,18 +628,21 @@
|
|
|
|
|
| bool Object::IsHashTable() {
|
| - return Object::IsHeapObject()
|
| - && HeapObject::cast(this)->map() == Heap::hash_table_map();
|
| + return Object::IsHeapObject() &&
|
| + HeapObject::cast(this)->map() ==
|
| + HeapObject::cast(this)->GetHeap()->hash_table_map();
|
| }
|
|
|
|
|
| bool Object::IsDictionary() {
|
| - return IsHashTable() && this != Heap::symbol_table();
|
| + return IsHashTable() && this !=
|
| + HeapObject::cast(this)->GetHeap()->symbol_table();
|
| }
|
|
|
|
|
| bool Object::IsSymbolTable() {
|
| - return IsHashTable() && this == Heap::raw_unchecked_symbol_table();
|
| + return IsHashTable() && this ==
|
| + HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
|
| }
|
|
|
|
|
| @@ -740,27 +759,32 @@
|
|
|
|
|
| bool Object::IsUndefined() {
|
| - return this == Heap::undefined_value();
|
| + return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
|
| }
|
|
|
|
|
| bool Object::IsNull() {
|
| - return this == Heap::null_value();
|
| + return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
|
| }
|
|
|
|
|
| +bool Object::IsTheHole() {
|
| + return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
|
| +}
|
| +
|
| +
|
| bool Object::IsTrue() {
|
| - return this == Heap::true_value();
|
| + return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
|
| }
|
|
|
|
|
| bool Object::IsFalse() {
|
| - return this == Heap::false_value();
|
| + return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
|
| }
|
|
|
|
|
| bool Object::IsArgumentsMarker() {
|
| - return this == Heap::arguments_marker();
|
| + return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
|
| }
|
|
|
|
|
| @@ -772,7 +796,6 @@
|
| }
|
|
|
|
|
| -
|
| MaybeObject* Object::ToSmi() {
|
| if (IsSmi()) return this;
|
| if (IsHeapNumber()) {
|
| @@ -795,7 +818,7 @@
|
| // GetElement can trigger a getter which can cause allocation.
|
| // This was not always the case. This ASSERT is here to catch
|
| // leftover incorrect uses.
|
| - ASSERT(Heap::IsAllocationAllowed());
|
| + ASSERT(HEAP->IsAllocationAllowed());
|
| return GetElementWithReceiver(this, index);
|
| }
|
|
|
| @@ -829,12 +852,14 @@
|
| #define WRITE_FIELD(p, offset, value) \
|
| (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
|
|
|
| -#define WRITE_BARRIER(object, offset, value) \
|
| - IncrementalMarking::RecordWrite(object, value); \
|
| - if (Heap::InNewSpace(value)) { \
|
| - Heap::RecordWrite(object->address(), offset); \
|
| +#define WRITE_BARRIER(heap, object, offset, value) \
|
| + heap->incremental_marking()->RecordWrite(object, value); \
|
| + if (HEAP->InNewSpace(value)) { \
|
| + heap->RecordWrite(object->address(), offset); \
|
| }
|
|
|
| +// TODO(gc) !!!
|
| +
|
| #define READ_DOUBLE_FIELD(p, offset) \
|
| (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
|
|
|
| @@ -1025,6 +1050,22 @@
|
| #endif
|
|
|
|
|
| +Heap* HeapObject::GetHeap() {
|
| + // During GC, the map pointer in HeapObject is used in various ways that
|
| + // prevent us from retrieving Heap from the map.
|
| + // Assert that we are not in GC, implement GC code in a way that it doesn't
|
| + // pull heap from the map.
|
| + return map()->heap();
|
| +}
|
| +
|
| +
|
| +Isolate* HeapObject::GetIsolate() {
|
| + Isolate* i = GetHeap()->isolate();
|
| + ASSERT(i == Isolate::Current());
|
| + return i;
|
| +}
|
| +
|
| +
|
| Map* HeapObject::map() {
|
| return map_word().ToMap();
|
| }
|
| @@ -1032,7 +1073,9 @@
|
|
|
| void HeapObject::set_map(Map* value) {
|
| set_map_word(MapWord::FromMap(value));
|
| - IncrementalMarking::RecordWrite(this, value);
|
| + if (value != NULL) {
|
| + value->heap()->incremental_marking()->RecordWrite(this, value);
|
| + }
|
| }
|
|
|
|
|
| @@ -1109,25 +1152,25 @@
|
|
|
| void JSObject::set_elements(HeapObject* value, WriteBarrierMode mode) {
|
| ASSERT(map()->has_fast_elements() ==
|
| - (value->map() == Heap::fixed_array_map() ||
|
| - value->map() == Heap::fixed_cow_array_map()));
|
| + (value->map() == GetHeap()->fixed_array_map() ||
|
| + value->map() == GetHeap()->fixed_cow_array_map()));
|
| // In the assert below Dictionary is covered under FixedArray.
|
| ASSERT(value->IsFixedArray() || value->IsExternalArray());
|
| WRITE_FIELD(this, kElementsOffset, value);
|
| - WRITE_BARRIER(this, kElementsOffset, value);
|
| + WRITE_BARRIER(GetHeap(), this, kElementsOffset, value);
|
| }
|
|
|
|
|
| void JSObject::initialize_properties() {
|
| - ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
|
| - WRITE_FIELD(this, kPropertiesOffset, Heap::empty_fixed_array());
|
| + ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
|
| + WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
|
| }
|
|
|
|
|
| void JSObject::initialize_elements() {
|
| ASSERT(map()->has_fast_elements());
|
| - ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
|
| - WRITE_FIELD(this, kElementsOffset, Heap::empty_fixed_array());
|
| + ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
|
| + WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
|
| }
|
|
|
|
|
| @@ -1146,6 +1189,16 @@
|
| ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
|
|
|
|
|
| +byte Oddball::kind() {
|
| + return READ_BYTE_FIELD(this, kKindOffset);
|
| +}
|
| +
|
| +
|
| +void Oddball::set_kind(byte value) {
|
| + WRITE_BYTE_FIELD(this, kKindOffset, value);
|
| +}
|
| +
|
| +
|
| Object* JSGlobalPropertyCell::value() {
|
| return READ_FIELD(this, kValueOffset);
|
| }
|
| @@ -1155,7 +1208,8 @@
|
| // The write barrier is not used for global property cells.
|
| ASSERT(!val->IsJSGlobalPropertyCell());
|
| WRITE_FIELD(this, kValueOffset, val);
|
| - IncrementalMarking::RecordWrite(this, val);
|
| + // TODO(gc) ISOLATES MERGE cell should heap accessor.
|
| + GetHeap()->incremental_marking()->RecordWrite(this, val);
|
| }
|
|
|
|
|
| @@ -1222,7 +1276,7 @@
|
| // to adjust the index here.
|
| int offset = GetHeaderSize() + (kPointerSize * index);
|
| WRITE_FIELD(this, offset, value);
|
| - WRITE_BARRIER(this, offset, value);
|
| + WRITE_BARRIER(GetHeap(), this, offset, value);
|
| }
|
|
|
|
|
| @@ -1248,7 +1302,7 @@
|
| if (index < 0) {
|
| int offset = map()->instance_size() + (index * kPointerSize);
|
| WRITE_FIELD(this, offset, value);
|
| - WRITE_BARRIER(this, offset, value);
|
| + WRITE_BARRIER(GetHeap(), this, offset, value);
|
| } else {
|
| ASSERT(index < properties()->length());
|
| properties()->set(index, value);
|
| @@ -1282,14 +1336,14 @@
|
| ASSERT(index < 0);
|
| int offset = map()->instance_size() + (index * kPointerSize);
|
| WRITE_FIELD(this, offset, value);
|
| - WRITE_BARRIER(this, offset, value);
|
| + WRITE_BARRIER(GetHeap(), this, offset, value);
|
| return value;
|
| }
|
|
|
|
|
|
|
| void JSObject::InitializeBody(int object_size, Object* value) {
|
| - ASSERT(!value->IsHeapObject() || !Heap::InNewSpace(value));
|
| + ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
|
| for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
|
| WRITE_FIELD(this, offset, value);
|
| }
|
| @@ -1312,7 +1366,7 @@
|
|
|
|
|
| void Struct::InitializeBody(int object_size) {
|
| - Object* value = Heap::undefined_value();
|
| + Object* value = GetHeap()->undefined_value();
|
| for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
|
| WRITE_FIELD(this, offset, value);
|
| }
|
| @@ -1358,7 +1412,7 @@
|
|
|
|
|
| void FixedArray::set(int index, Smi* value) {
|
| - ASSERT(map() != Heap::fixed_cow_array_map());
|
| + ASSERT(map() != HEAP->fixed_cow_array_map());
|
| ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
|
| int offset = kHeaderSize + index * kPointerSize;
|
| WRITE_FIELD(this, offset, value);
|
| @@ -1366,16 +1420,16 @@
|
|
|
|
|
| void FixedArray::set(int index, Object* value) {
|
| - ASSERT(map() != Heap::fixed_cow_array_map());
|
| + ASSERT(map() != HEAP->fixed_cow_array_map());
|
| ASSERT(index >= 0 && index < this->length());
|
| int offset = kHeaderSize + index * kPointerSize;
|
| WRITE_FIELD(this, offset, value);
|
| - WRITE_BARRIER(this, offset, value);
|
| + WRITE_BARRIER(GetHeap(), this, offset, value);
|
| }
|
|
|
|
|
| WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
|
| - if (Heap::InNewSpace(this)) return SKIP_WRITE_BARRIER;
|
| + if (GetHeap()->InNewSpace(this)) return SKIP_WRITE_BARRIER;
|
| return UPDATE_WRITE_BARRIER;
|
| }
|
|
|
| @@ -1383,45 +1437,56 @@
|
| void FixedArray::set(int index,
|
| Object* value,
|
| WriteBarrierMode mode) {
|
| - ASSERT(map() != Heap::fixed_cow_array_map());
|
| + ASSERT(map() != HEAP->fixed_cow_array_map());
|
| ASSERT(index >= 0 && index < this->length());
|
| int offset = kHeaderSize + index * kPointerSize;
|
| WRITE_FIELD(this, offset, value);
|
| - WRITE_BARRIER(this, offset, value);
|
| + WRITE_BARRIER(GetHeap(), this, offset, value);
|
| }
|
|
|
|
|
| void FixedArray::fast_set(FixedArray* array, int index, Object* value) {
|
| - ASSERT(array->map() != Heap::raw_unchecked_fixed_cow_array_map());
|
| + ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
|
| ASSERT(index >= 0 && index < array->length());
|
| - ASSERT(!Heap::InNewSpace(value));
|
| + ASSERT(!HEAP->InNewSpace(value));
|
| WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
|
| - IncrementalMarking::RecordWrite(array, value);
|
| + array->GetHeap()->incremental_marking()->RecordWrite(array, value);
|
| }
|
|
|
|
|
| void FixedArray::set_undefined(int index) {
|
| - ASSERT(map() != Heap::fixed_cow_array_map());
|
| + ASSERT(map() != HEAP->fixed_cow_array_map());
|
| + set_undefined(GetHeap(), index);
|
| +}
|
| +
|
| +
|
| +void FixedArray::set_undefined(Heap* heap, int index) {
|
| ASSERT(index >= 0 && index < this->length());
|
| - ASSERT(!Heap::InNewSpace(Heap::undefined_value()));
|
| + ASSERT(!heap->InNewSpace(heap->undefined_value()));
|
| WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
|
| - Heap::undefined_value());
|
| + heap->undefined_value());
|
| }
|
|
|
|
|
| void FixedArray::set_null(int index) {
|
| - ASSERT(map() != Heap::fixed_cow_array_map());
|
| + set_null(GetHeap(), index);
|
| +}
|
| +
|
| +
|
| +void FixedArray::set_null(Heap* heap, int index) {
|
| ASSERT(index >= 0 && index < this->length());
|
| - ASSERT(!Heap::InNewSpace(Heap::null_value()));
|
| - WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::null_value());
|
| + ASSERT(!heap->InNewSpace(heap->null_value()));
|
| + WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
|
| }
|
|
|
|
|
| void FixedArray::set_the_hole(int index) {
|
| - ASSERT(map() != Heap::fixed_cow_array_map());
|
| + ASSERT(map() != HEAP->fixed_cow_array_map());
|
| ASSERT(index >= 0 && index < this->length());
|
| - ASSERT(!Heap::InNewSpace(Heap::the_hole_value()));
|
| - WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::the_hole_value());
|
| + ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
|
| + WRITE_FIELD(this,
|
| + kHeaderSize + index * kPointerSize,
|
| + GetHeap()->the_hole_value());
|
| }
|
|
|
|
|
| @@ -1432,19 +1497,20 @@
|
| }
|
|
|
|
|
| -void FixedArray::set_unchecked(int index,
|
| +void FixedArray::set_unchecked(Heap* heap,
|
| + int index,
|
| Object* value,
|
| WriteBarrierMode mode) {
|
| int offset = kHeaderSize + index * kPointerSize;
|
| WRITE_FIELD(this, offset, value);
|
| - WRITE_BARRIER(this, offset, value);
|
| + WRITE_BARRIER(heap, this, offset, value);
|
| }
|
|
|
|
|
| -void FixedArray::set_null_unchecked(int index) {
|
| +void FixedArray::set_null_unchecked(Heap* heap, int index) {
|
| ASSERT(index >= 0 && index < this->length());
|
| - ASSERT(!Heap::InNewSpace(Heap::null_value()));
|
| - WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::null_value());
|
| + ASSERT(!HEAP->InNewSpace(heap->null_value()));
|
| + WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
|
| }
|
|
|
|
|
| @@ -1454,9 +1520,9 @@
|
|
|
|
|
| bool DescriptorArray::IsEmpty() {
|
| - ASSERT(this == Heap::empty_descriptor_array() ||
|
| - this->length() > 2);
|
| - return this == Heap::empty_descriptor_array();
|
| + ASSERT(this->length() > kFirstIndex ||
|
| + this == HEAP->empty_descriptor_array());
|
| + return length() <= kFirstIndex;
|
| }
|
|
|
|
|
| @@ -1486,10 +1552,10 @@
|
|
|
|
|
| int DescriptorArray::SearchWithCache(String* name) {
|
| - int number = DescriptorLookupCache::Lookup(this, name);
|
| + int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
|
| if (number == DescriptorLookupCache::kAbsent) {
|
| number = Search(name);
|
| - DescriptorLookupCache::Update(this, name, number);
|
| + GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
|
| }
|
| return number;
|
| }
|
| @@ -1575,8 +1641,8 @@
|
| ASSERT(descriptor_number < number_of_descriptors());
|
|
|
| // Make sure none of the elements in desc are in new space.
|
| - ASSERT(!Heap::InNewSpace(desc->GetKey()));
|
| - ASSERT(!Heap::InNewSpace(desc->GetValue()));
|
| + ASSERT(!HEAP->InNewSpace(desc->GetKey()));
|
| + ASSERT(!HEAP->InNewSpace(desc->GetValue()));
|
|
|
| fast_set(this, ToKeyIndex(descriptor_number), desc->GetKey());
|
| FixedArray* content_array = GetContentArray();
|
| @@ -1601,6 +1667,30 @@
|
| }
|
|
|
|
|
| +template<typename Shape, typename Key>
|
| +int HashTable<Shape, Key>::FindEntry(Key key) {
|
| + return FindEntry(GetIsolate(), key);
|
| +}
|
| +
|
| +
|
| +// Find entry for key otherwise return kNotFound.
|
| +template<typename Shape, typename Key>
|
| +int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
|
| + uint32_t capacity = Capacity();
|
| + uint32_t entry = FirstProbe(Shape::Hash(key), capacity);
|
| + uint32_t count = 1;
|
| + // EnsureCapacity will guarantee the hash table is never full.
|
| + while (true) {
|
| + Object* element = KeyAt(entry);
|
| + if (element == isolate->heap()->undefined_value()) break; // Empty entry.
|
| + if (element != isolate->heap()->null_value() &&
|
| + Shape::IsMatch(key, element)) return entry;
|
| + entry = NextProbe(entry, count++, capacity);
|
| + }
|
| + return kNotFound;
|
| +}
|
| +
|
| +
|
| bool NumberDictionary::requires_slow_elements() {
|
| Object* max_index_object = get(kMaxNumberKeyIndex);
|
| if (!max_index_object->IsSmi()) return false;
|
| @@ -1849,7 +1939,7 @@
|
|
|
| void ConsString::set_first(String* value, WriteBarrierMode mode) {
|
| WRITE_FIELD(this, kFirstOffset, value);
|
| - WRITE_BARRIER(this, kFirstOffset, value);
|
| + WRITE_BARRIER(GetHeap(), this, kFirstOffset, value);
|
| }
|
|
|
|
|
| @@ -1865,7 +1955,7 @@
|
|
|
| void ConsString::set_second(String* value, WriteBarrierMode mode) {
|
| WRITE_FIELD(this, kSecondOffset, value);
|
| - WRITE_BARRIER(this, kSecondOffset, value);
|
| + WRITE_BARRIER(GetHeap(), this, kSecondOffset, value);
|
| }
|
|
|
|
|
| @@ -1901,7 +1991,7 @@
|
| int cache_size = size();
|
| Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
|
| MemsetPointer(entries_start,
|
| - Heap::the_hole_value(),
|
| + GetHeap()->the_hole_value(),
|
| cache_size - kEntriesIndex);
|
| MakeZeroSize();
|
| }
|
| @@ -2621,6 +2711,15 @@
|
| }
|
|
|
|
|
| +Heap* Map::heap() {
|
| + // NOTE: address() helper is not used to save one instruction.
|
| + Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap();
|
| + ASSERT(heap != NULL);
|
| + ASSERT(heap->isolate() == Isolate::Current());
|
| + return heap;
|
| +}
|
| +
|
| +
|
| Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
|
| return HeapObject::
|
| FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
|
| @@ -2635,7 +2734,7 @@
|
| void Map::set_prototype(Object* value, WriteBarrierMode mode) {
|
| ASSERT(value->IsNull() || value->IsJSObject());
|
| WRITE_FIELD(this, kPrototypeOffset, value);
|
| - WRITE_BARRIER(this, kPrototypeOffset, value);
|
| + WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value);
|
| }
|
|
|
|
|
| @@ -2647,7 +2746,7 @@
|
| }
|
| Map* new_map = Map::cast(obj);
|
| new_map->set_has_fast_elements(true);
|
| - Counters::map_slow_to_fast_elements.Increment();
|
| + COUNTERS->map_slow_to_fast_elements()->Increment();
|
| return new_map;
|
| }
|
|
|
| @@ -2660,7 +2759,7 @@
|
| }
|
| Map* new_map = Map::cast(obj);
|
| new_map->set_has_fast_elements(false);
|
| - Counters::map_fast_to_slow_elements.Increment();
|
| + COUNTERS->map_fast_to_slow_elements()->Increment();
|
| return new_map;
|
| }
|
|
|
| @@ -2675,7 +2774,7 @@
|
| Map* new_map = Map::cast(obj);
|
| new_map->set_has_fast_elements(false);
|
| new_map->set_has_external_array_elements(true);
|
| - Counters::map_to_external_array_elements.Increment();
|
| + COUNTERS->map_to_external_array_elements()->Increment();
|
| return new_map;
|
| }
|
|
|
| @@ -2687,7 +2786,10 @@
|
|
|
| ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
|
| ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset)
|
| -ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
|
| +ACCESSORS_GCSAFE(JSFunction,
|
| + next_function_link,
|
| + Object,
|
| + kNextFunctionLinkOffset)
|
|
|
| ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
|
| ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
|
| @@ -2776,8 +2878,8 @@
|
| #endif
|
|
|
| ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
|
| -ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
|
| -ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
|
| +ACCESSORS_GCSAFE(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
|
| +ACCESSORS_GCSAFE(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
|
| ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
|
| kInstanceClassNameOffset)
|
| ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
|
| @@ -2905,7 +3007,7 @@
|
|
|
|
|
| bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
|
| - return initial_map() != Heap::undefined_value();
|
| + return initial_map() != HEAP->undefined_value();
|
| }
|
|
|
|
|
| @@ -2984,7 +3086,8 @@
|
|
|
| void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
|
| WRITE_FIELD(this, kCodeOffset, value);
|
| - WRITE_BARRIER(this, kCodeOffset, value);
|
| + // TODO(gc) ISOLATESMERGE HEAP
|
| + WRITE_BARRIER(HEAP, this, kCodeOffset, value);
|
| }
|
|
|
|
|
| @@ -2997,7 +3100,10 @@
|
| void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
|
| WriteBarrierMode mode) {
|
| WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
|
| - WRITE_BARRIER(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
|
| + WRITE_BARRIER(GetHeap(),
|
| + this,
|
| + kScopeInfoOffset,
|
| + reinterpret_cast<Object*>(value));
|
| }
|
|
|
|
|
| @@ -3012,7 +3118,8 @@
|
|
|
|
|
| bool SharedFunctionInfo::is_compiled() {
|
| - return code() != Builtins::builtin(Builtins::LazyCompile);
|
| + return code() !=
|
| + Isolate::Current()->builtins()->builtin(Builtins::LazyCompile);
|
| }
|
|
|
|
|
| @@ -3072,7 +3179,7 @@
|
|
|
|
|
| bool JSFunction::IsMarkedForLazyRecompilation() {
|
| - return code() == Builtins::builtin(Builtins::LazyRecompile);
|
| + return code() == GetIsolate()->builtins()->builtin(Builtins::LazyRecompile);
|
| }
|
|
|
|
|
| @@ -3089,10 +3196,10 @@
|
|
|
| void JSFunction::set_code(Code* value) {
|
| // Skip the write barrier because code is never in new space.
|
| - ASSERT(!Heap::InNewSpace(value));
|
| + ASSERT(!HEAP->InNewSpace(value));
|
| Address entry = value->entry();
|
| WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
|
| - IncrementalMarking::RecordWrite(this, value);
|
| + GetHeap()->incremental_marking()->RecordWrite(this, value);
|
| }
|
|
|
|
|
| @@ -3130,9 +3237,9 @@
|
|
|
|
|
| void JSFunction::set_context(Object* value) {
|
| - ASSERT(value == Heap::undefined_value() || value->IsContext());
|
| + ASSERT(value->IsUndefined() || value->IsContext());
|
| WRITE_FIELD(this, kContextOffset, value);
|
| - WRITE_BARRIER(this, kContextOffset, value);
|
| + WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
|
| }
|
|
|
| ACCESSORS(JSFunction, prototype_or_initial_map, Object,
|
| @@ -3187,7 +3294,7 @@
|
|
|
|
|
| bool JSFunction::is_compiled() {
|
| - return code() != Builtins::builtin(Builtins::LazyCompile);
|
| + return code() != GetIsolate()->builtins()->builtin(Builtins::LazyCompile);
|
| }
|
|
|
|
|
| @@ -3206,7 +3313,7 @@
|
| Object* value) {
|
| ASSERT(id < kJSBuiltinsCount); // id is unsigned.
|
| WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
|
| - WRITE_BARRIER(this, OffsetOfFunctionWithId(id), value);
|
| + WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
|
| }
|
|
|
|
|
| @@ -3220,7 +3327,7 @@
|
| Code* value) {
|
| ASSERT(id < kJSBuiltinsCount); // id is unsigned.
|
| WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
|
| - ASSERT(!Heap::InNewSpace(value));
|
| + ASSERT(!HEAP->InNewSpace(value));
|
| }
|
|
|
|
|
| @@ -3370,8 +3477,8 @@
|
|
|
| JSObject::ElementsKind JSObject::GetElementsKind() {
|
| if (map()->has_fast_elements()) {
|
| - ASSERT(elements()->map() == Heap::fixed_array_map() ||
|
| - elements()->map() == Heap::fixed_cow_array_map());
|
| + ASSERT(elements()->map() == GetHeap()->fixed_array_map() ||
|
| + elements()->map() == GetHeap()->fixed_cow_array_map());
|
| return FAST_ELEMENTS;
|
| }
|
| HeapObject* array = elements();
|
| @@ -3381,6 +3488,7 @@
|
| ASSERT(array->IsDictionary());
|
| return DICTIONARY_ELEMENTS;
|
| }
|
| + ASSERT(!map()->has_fast_elements());
|
| if (array->IsExternalArray()) {
|
| switch (array->map()->instance_type()) {
|
| case EXTERNAL_BYTE_ARRAY_TYPE:
|
| @@ -3466,16 +3574,17 @@
|
| MaybeObject* JSObject::EnsureWritableFastElements() {
|
| ASSERT(HasFastElements());
|
| FixedArray* elems = FixedArray::cast(elements());
|
| - if (elems->map() != Heap::fixed_cow_array_map()) return elems;
|
| + Isolate* isolate = GetIsolate();
|
| + if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
|
| Object* writable_elems;
|
| - { MaybeObject* maybe_writable_elems =
|
| - Heap::CopyFixedArrayWithMap(elems, Heap::fixed_array_map());
|
| + { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
|
| + elems, isolate->heap()->fixed_array_map());
|
| if (!maybe_writable_elems->ToObject(&writable_elems)) {
|
| return maybe_writable_elems;
|
| }
|
| }
|
| set_elements(FixedArray::cast(writable_elems));
|
| - Counters::cow_arrays_converted.Increment();
|
| + isolate->counters()->cow_arrays_converted()->Increment();
|
| return writable_elems;
|
| }
|
|
|
| @@ -3615,7 +3724,7 @@
|
| Object* JSObject::BypassGlobalProxy() {
|
| if (IsJSGlobalProxy()) {
|
| Object* proto = GetPrototype();
|
| - if (proto->IsNull()) return Heap::undefined_value();
|
| + if (proto->IsNull()) return GetHeap()->undefined_value();
|
| ASSERT(proto->IsJSGlobalObject());
|
| return proto;
|
| }
|
| @@ -3626,7 +3735,7 @@
|
| bool JSObject::HasHiddenPropertiesObject() {
|
| ASSERT(!IsJSGlobalProxy());
|
| return GetPropertyAttributePostInterceptor(this,
|
| - Heap::hidden_symbol(),
|
| + GetHeap()->hidden_symbol(),
|
| false) != ABSENT;
|
| }
|
|
|
| @@ -3639,7 +3748,7 @@
|
| // object.
|
| Object* result =
|
| GetLocalPropertyPostInterceptor(this,
|
| - Heap::hidden_symbol(),
|
| + GetHeap()->hidden_symbol(),
|
| &attributes)->ToObjectUnchecked();
|
| return result;
|
| }
|
| @@ -3647,7 +3756,7 @@
|
|
|
| MaybeObject* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) {
|
| ASSERT(!IsJSGlobalProxy());
|
| - return SetPropertyPostInterceptor(Heap::hidden_symbol(),
|
| + return SetPropertyPostInterceptor(GetHeap()->hidden_symbol(),
|
| hidden_obj,
|
| DONT_ENUM,
|
| kNonStrictMode);
|
| @@ -3715,12 +3824,57 @@
|
| }
|
|
|
|
|
| -void Map::ClearCodeCache() {
|
| +bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
|
| + ASSERT(other->IsNumber());
|
| + return key == static_cast<uint32_t>(other->Number());
|
| +}
|
| +
|
| +
|
| +uint32_t NumberDictionaryShape::Hash(uint32_t key) {
|
| + return ComputeIntegerHash(key);
|
| +}
|
| +
|
| +
|
| +uint32_t NumberDictionaryShape::HashForObject(uint32_t key, Object* other) {
|
| + ASSERT(other->IsNumber());
|
| + return ComputeIntegerHash(static_cast<uint32_t>(other->Number()));
|
| +}
|
| +
|
| +
|
| +MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
|
| + return Isolate::Current()->heap()->NumberFromUint32(key);
|
| +}
|
| +
|
| +
|
| +bool StringDictionaryShape::IsMatch(String* key, Object* other) {
|
| + // We know that all entries in a hash table had their hash keys created.
|
| + // Use that knowledge to have fast failure.
|
| + if (key->Hash() != String::cast(other)->Hash()) return false;
|
| + return key->Equals(String::cast(other));
|
| +}
|
| +
|
| +
|
| +uint32_t StringDictionaryShape::Hash(String* key) {
|
| + return key->Hash();
|
| +}
|
| +
|
| +
|
| +uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
|
| + return String::cast(other)->Hash();
|
| +}
|
| +
|
| +
|
| +MaybeObject* StringDictionaryShape::AsObject(String* key) {
|
| + return key;
|
| +}
|
| +
|
| +
|
| +void Map::ClearCodeCache(Heap* heap) {
|
| // No write barrier is needed since empty_fixed_array is not in new space.
|
| // Please note this function is used during marking:
|
| // - MarkCompactCollector::MarkUnmarkedObject
|
| - ASSERT(!Heap::InNewSpace(Heap::raw_unchecked_empty_fixed_array()));
|
| - WRITE_FIELD(this, kCodeCacheOffset, Heap::raw_unchecked_empty_fixed_array());
|
| + ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
|
| + WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
|
| }
|
|
|
|
|
| @@ -3733,7 +3887,7 @@
|
| // constantly growing.
|
| Expand(required_size + (required_size >> 3));
|
| // It's a performance benefit to keep a frequently used array in new-space.
|
| - } else if (!Heap::new_space()->Contains(elts) &&
|
| + } else if (!GetHeap()->new_space()->Contains(elts) &&
|
| required_size < kArraySizeThatFitsComfortablyInNewSpace) {
|
| // Expand will allocate a new backing store in new space even if the size
|
| // we asked for isn't larger than what we had before.
|
| @@ -3755,10 +3909,25 @@
|
|
|
| MaybeObject* FixedArray::Copy() {
|
| if (length() == 0) return this;
|
| - return Heap::CopyFixedArray(this);
|
| + return GetHeap()->CopyFixedArray(this);
|
| }
|
|
|
|
|
| +Relocatable::Relocatable(Isolate* isolate) {
|
| + ASSERT(isolate == Isolate::Current());
|
| + isolate_ = isolate;
|
| + prev_ = isolate->relocatable_top();
|
| + isolate->set_relocatable_top(this);
|
| +}
|
| +
|
| +
|
| +Relocatable::~Relocatable() {
|
| + ASSERT(isolate_ == Isolate::Current());
|
| + ASSERT_EQ(isolate_->relocatable_top(), this);
|
| + isolate_->set_relocatable_top(prev_);
|
| +}
|
| +
|
| +
|
| int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
|
| return map->instance_size();
|
| }
|
|
|