| Index: src/objects-inl.h
|
| ===================================================================
|
| --- src/objects-inl.h (revision 9531)
|
| +++ src/objects-inl.h (working copy)
|
| @@ -43,8 +43,11 @@
|
| #include "isolate.h"
|
| #include "property.h"
|
| #include "spaces.h"
|
| +#include "store-buffer.h"
|
| #include "v8memory.h"
|
|
|
| +#include "incremental-marking.h"
|
| +
|
| namespace v8 {
|
| namespace internal {
|
|
|
| @@ -80,19 +83,10 @@
|
| type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
|
| void holder::set_##name(type* value, WriteBarrierMode mode) { \
|
| WRITE_FIELD(this, offset, value); \
|
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode); \
|
| + CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
|
| }
|
|
|
|
|
| -// GC-safe accessors do not use HeapObject::GetHeap(), but access TLS instead.
|
| -#define ACCESSORS_GCSAFE(holder, name, type, offset) \
|
| - type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
|
| - void holder::set_##name(type* value, WriteBarrierMode mode) { \
|
| - WRITE_FIELD(this, offset, value); \
|
| - CONDITIONAL_WRITE_BARRIER(HEAP, this, offset, mode); \
|
| - }
|
| -
|
| -
|
| #define SMI_ACCESSORS(holder, name, offset) \
|
| int holder::name() { \
|
| Object* value = READ_FIELD(this, offset); \
|
| @@ -147,6 +141,12 @@
|
| }
|
|
|
|
|
| +bool Object::NonFailureIsHeapObject() {
|
| + ASSERT(!this->IsFailure());
|
| + return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
|
| +}
|
| +
|
| +
|
| bool Object::IsHeapNumber() {
|
| return Object::IsHeapObject()
|
| && HeapObject::cast(this)->map()->instance_type() == HEAP_NUMBER_TYPE;
|
| @@ -165,6 +165,13 @@
|
| }
|
|
|
|
|
| +bool Object::IsSpecFunction() {
|
| + if (!Object::IsHeapObject()) return false;
|
| + InstanceType type = HeapObject::cast(this)->map()->instance_type();
|
| + return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
|
| +}
|
| +
|
| +
|
| bool Object::IsSymbol() {
|
| if (!this->IsHeapObject()) return false;
|
| uint32_t type = HeapObject::cast(this)->map()->instance_type();
|
| @@ -402,6 +409,19 @@
|
| }
|
|
|
|
|
| +bool Object::IsFreeSpace() {
|
| + return Object::IsHeapObject()
|
| + && HeapObject::cast(this)->map()->instance_type() == FREE_SPACE_TYPE;
|
| +}
|
| +
|
| +
|
| +bool Object::IsFiller() {
|
| + if (!Object::IsHeapObject()) return false;
|
| + InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
|
| + return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
|
| +}
|
| +
|
| +
|
| bool Object::IsExternalPixelArray() {
|
| return Object::IsHeapObject() &&
|
| HeapObject::cast(this)->map()->instance_type() ==
|
| @@ -509,20 +529,23 @@
|
|
|
|
|
| bool Object::IsJSReceiver() {
|
| + STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
|
| return IsHeapObject() &&
|
| HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
|
| }
|
|
|
|
|
| bool Object::IsJSObject() {
|
| - return IsJSReceiver() && !IsJSProxy();
|
| + STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
|
| + return IsHeapObject() &&
|
| + HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
|
| }
|
|
|
|
|
| bool Object::IsJSProxy() {
|
| - return Object::IsHeapObject() &&
|
| - (HeapObject::cast(this)->map()->instance_type() == JS_PROXY_TYPE ||
|
| - HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_PROXY_TYPE);
|
| + if (!Object::IsHeapObject()) return false;
|
| + InstanceType type = HeapObject::cast(this)->map()->instance_type();
|
| + return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
|
| }
|
|
|
|
|
| @@ -642,7 +665,6 @@
|
|
|
|
|
| bool Object::IsOddball() {
|
| - ASSERT(HEAP->is_safe_to_read_maps());
|
| return Object::IsHeapObject()
|
| && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE;
|
| }
|
| @@ -939,21 +961,20 @@
|
| #define WRITE_FIELD(p, offset, value) \
|
| (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
|
|
|
| -// TODO(isolates): Pass heap in to these macros.
|
| -#define WRITE_BARRIER(object, offset) \
|
| - object->GetHeap()->RecordWrite(object->address(), offset);
|
| +#define WRITE_BARRIER(heap, object, offset, value) \
|
| + heap->incremental_marking()->RecordWrite( \
|
| + object, HeapObject::RawField(object, offset), value); \
|
| + if (heap->InNewSpace(value)) { \
|
| + heap->RecordWrite(object->address(), offset); \
|
| + }
|
|
|
| -// CONDITIONAL_WRITE_BARRIER must be issued after the actual
|
| -// write due to the assert validating the written value.
|
| -#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, mode) \
|
| - if (mode == UPDATE_WRITE_BARRIER) { \
|
| - heap->RecordWrite(object->address(), offset); \
|
| - } else { \
|
| - ASSERT(mode == SKIP_WRITE_BARRIER); \
|
| - ASSERT(heap->InNewSpace(object) || \
|
| - !heap->InNewSpace(READ_FIELD(object, offset)) || \
|
| - Page::FromAddress(object->address())-> \
|
| - IsRegionDirty(object->address() + offset)); \
|
| +#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
|
| + if (mode == UPDATE_WRITE_BARRIER) { \
|
| + heap->incremental_marking()->RecordWrite( \
|
| + object, HeapObject::RawField(object, offset), value); \
|
| + if (heap->InNewSpace(value)) { \
|
| + heap->RecordWrite(object->address(), offset); \
|
| + } \
|
| }
|
|
|
| #ifndef V8_TARGET_ARCH_MIPS
|
| @@ -974,7 +995,6 @@
|
| #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
|
| #endif // V8_TARGET_ARCH_MIPS
|
|
|
| -
|
| #ifndef V8_TARGET_ARCH_MIPS
|
| #define WRITE_DOUBLE_FIELD(p, offset, value) \
|
| (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
|
| @@ -1169,91 +1189,6 @@
|
| }
|
|
|
|
|
| -bool MapWord::IsMarked() {
|
| - return (value_ & kMarkingMask) == 0;
|
| -}
|
| -
|
| -
|
| -void MapWord::SetMark() {
|
| - value_ &= ~kMarkingMask;
|
| -}
|
| -
|
| -
|
| -void MapWord::ClearMark() {
|
| - value_ |= kMarkingMask;
|
| -}
|
| -
|
| -
|
| -bool MapWord::IsOverflowed() {
|
| - return (value_ & kOverflowMask) != 0;
|
| -}
|
| -
|
| -
|
| -void MapWord::SetOverflow() {
|
| - value_ |= kOverflowMask;
|
| -}
|
| -
|
| -
|
| -void MapWord::ClearOverflow() {
|
| - value_ &= ~kOverflowMask;
|
| -}
|
| -
|
| -
|
| -MapWord MapWord::EncodeAddress(Address map_address, int offset) {
|
| - // Offset is the distance in live bytes from the first live object in the
|
| - // same page. The offset between two objects in the same page should not
|
| - // exceed the object area size of a page.
|
| - ASSERT(0 <= offset && offset < Page::kObjectAreaSize);
|
| -
|
| - uintptr_t compact_offset = offset >> kObjectAlignmentBits;
|
| - ASSERT(compact_offset < (1 << kForwardingOffsetBits));
|
| -
|
| - Page* map_page = Page::FromAddress(map_address);
|
| - ASSERT_MAP_PAGE_INDEX(map_page->mc_page_index);
|
| -
|
| - uintptr_t map_page_offset =
|
| - map_page->Offset(map_address) >> kMapAlignmentBits;
|
| -
|
| - uintptr_t encoding =
|
| - (compact_offset << kForwardingOffsetShift) |
|
| - (map_page_offset << kMapPageOffsetShift) |
|
| - (map_page->mc_page_index << kMapPageIndexShift);
|
| - return MapWord(encoding);
|
| -}
|
| -
|
| -
|
| -Address MapWord::DecodeMapAddress(MapSpace* map_space) {
|
| - int map_page_index =
|
| - static_cast<int>((value_ & kMapPageIndexMask) >> kMapPageIndexShift);
|
| - ASSERT_MAP_PAGE_INDEX(map_page_index);
|
| -
|
| - int map_page_offset = static_cast<int>(
|
| - ((value_ & kMapPageOffsetMask) >> kMapPageOffsetShift) <<
|
| - kMapAlignmentBits);
|
| -
|
| - return (map_space->PageAddress(map_page_index) + map_page_offset);
|
| -}
|
| -
|
| -
|
| -int MapWord::DecodeOffset() {
|
| - // The offset field is represented in the kForwardingOffsetBits
|
| - // most-significant bits.
|
| - uintptr_t offset = (value_ >> kForwardingOffsetShift) << kObjectAlignmentBits;
|
| - ASSERT(offset < static_cast<uintptr_t>(Page::kObjectAreaSize));
|
| - return static_cast<int>(offset);
|
| -}
|
| -
|
| -
|
| -MapWord MapWord::FromEncodedAddress(Address address) {
|
| - return MapWord(reinterpret_cast<uintptr_t>(address));
|
| -}
|
| -
|
| -
|
| -Address MapWord::ToEncodedAddress() {
|
| - return reinterpret_cast<Address>(value_);
|
| -}
|
| -
|
| -
|
| #ifdef DEBUG
|
| void HeapObject::VerifyObjectField(int offset) {
|
| VerifyPointer(READ_FIELD(this, offset));
|
| @@ -1266,12 +1201,11 @@
|
|
|
|
|
| Heap* HeapObject::GetHeap() {
|
| - // During GC, the map pointer in HeapObject is used in various ways that
|
| - // prevent us from retrieving Heap from the map.
|
| - // Assert that we are not in GC, implement GC code in a way that it doesn't
|
| - // pull heap from the map.
|
| - ASSERT(HEAP->is_safe_to_read_maps());
|
| - return map()->heap();
|
| + Heap* heap =
|
| + MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
|
| + ASSERT(heap != NULL);
|
| + ASSERT(heap->isolate() == Isolate::Current());
|
| + return heap;
|
| }
|
|
|
|
|
| @@ -1287,9 +1221,20 @@
|
|
|
| void HeapObject::set_map(Map* value) {
|
| set_map_word(MapWord::FromMap(value));
|
| + if (value != NULL) {
|
| + // TODO(1600) We are passing NULL as a slot because maps can never be on
|
| + // evacuation candidate.
|
| + value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
|
| + }
|
| }
|
|
|
|
|
| +// Unsafe accessor omitting write barrier.
|
| +void HeapObject::set_map_unsafe(Map* value) {
|
| + set_map_word(MapWord::FromMap(value));
|
| +}
|
| +
|
| +
|
| MapWord HeapObject::map_word() {
|
| return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
|
| }
|
| @@ -1329,47 +1274,6 @@
|
| }
|
|
|
|
|
| -bool HeapObject::IsMarked() {
|
| - return map_word().IsMarked();
|
| -}
|
| -
|
| -
|
| -void HeapObject::SetMark() {
|
| - ASSERT(!IsMarked());
|
| - MapWord first_word = map_word();
|
| - first_word.SetMark();
|
| - set_map_word(first_word);
|
| -}
|
| -
|
| -
|
| -void HeapObject::ClearMark() {
|
| - ASSERT(IsMarked());
|
| - MapWord first_word = map_word();
|
| - first_word.ClearMark();
|
| - set_map_word(first_word);
|
| -}
|
| -
|
| -
|
| -bool HeapObject::IsOverflowed() {
|
| - return map_word().IsOverflowed();
|
| -}
|
| -
|
| -
|
| -void HeapObject::SetOverflow() {
|
| - MapWord first_word = map_word();
|
| - first_word.SetOverflow();
|
| - set_map_word(first_word);
|
| -}
|
| -
|
| -
|
| -void HeapObject::ClearOverflow() {
|
| - ASSERT(IsOverflowed());
|
| - MapWord first_word = map_word();
|
| - first_word.ClearOverflow();
|
| - set_map_word(first_word);
|
| -}
|
| -
|
| -
|
| double HeapNumber::value() {
|
| return READ_DOUBLE_FIELD(this, kValueOffset);
|
| }
|
| @@ -1400,16 +1304,84 @@
|
| return static_cast<FixedArrayBase*>(array);
|
| }
|
|
|
| +void JSObject::ValidateSmiOnlyElements() {
|
| +#if DEBUG
|
| + if (FLAG_smi_only_arrays &&
|
| + map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
|
| + Heap* heap = GetHeap();
|
| + // Don't use elements, since integrity checks will fail if there
|
| + // are filler pointers in the array.
|
| + FixedArray* fixed_array =
|
| + reinterpret_cast<FixedArray*>(READ_FIELD(this, kElementsOffset));
|
| + Map* map = fixed_array->map();
|
| + // Arrays that have been shifted in place can't be verified.
|
| + if (map != heap->raw_unchecked_one_pointer_filler_map() &&
|
| + map != heap->raw_unchecked_two_pointer_filler_map() &&
|
| + map != heap->free_space_map()) {
|
| + for (int i = 0; i < fixed_array->length(); i++) {
|
| + Object* current = fixed_array->get(i);
|
| + ASSERT(current->IsSmi() || current == heap->the_hole_value());
|
| + }
|
| + }
|
| + }
|
| +#endif
|
| +}
|
|
|
| +
|
| +MaybeObject* JSObject::EnsureCanContainNonSmiElements() {
|
| +#if DEBUG
|
| + ValidateSmiOnlyElements();
|
| +#endif
|
| + if (FLAG_smi_only_arrays &&
|
| + (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS)) {
|
| + Object* obj;
|
| + MaybeObject* maybe_obj = GetElementsTransitionMap(FAST_ELEMENTS);
|
| + if (!maybe_obj->ToObject(&obj)) return maybe_obj;
|
| + set_map(Map::cast(obj));
|
| + }
|
| + return this;
|
| +}
|
| +
|
| +
|
| +MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
|
| + uint32_t count) {
|
| + if (FLAG_smi_only_arrays &&
|
| + map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
|
| + for (uint32_t i = 0; i < count; ++i) {
|
| + Object* current = *objects++;
|
| + if (!current->IsSmi() && current != GetHeap()->the_hole_value()) {
|
| + return EnsureCanContainNonSmiElements();
|
| + }
|
| + }
|
| + }
|
| + return this;
|
| +}
|
| +
|
| +
|
| +MaybeObject* JSObject::EnsureCanContainElements(FixedArray* elements) {
|
| + if (FLAG_smi_only_arrays) {
|
| + Object** objects = reinterpret_cast<Object**>(
|
| + FIELD_ADDR(elements, elements->OffsetOfElementAt(0)));
|
| + return EnsureCanContainElements(objects, elements->length());
|
| + } else {
|
| + return this;
|
| + }
|
| +}
|
| +
|
| +
|
| void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
|
| - ASSERT(map()->has_fast_elements() ==
|
| + ASSERT((map()->has_fast_elements() ||
|
| + map()->has_fast_smi_only_elements()) ==
|
| (value->map() == GetHeap()->fixed_array_map() ||
|
| value->map() == GetHeap()->fixed_cow_array_map()));
|
| ASSERT(map()->has_fast_double_elements() ==
|
| value->IsFixedDoubleArray());
|
| ASSERT(value->HasValidElements());
|
| +#ifdef DEBUG
|
| + ValidateSmiOnlyElements();
|
| +#endif
|
| WRITE_FIELD(this, kElementsOffset, value);
|
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, mode);
|
| + CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
|
| }
|
|
|
|
|
| @@ -1420,7 +1392,7 @@
|
|
|
|
|
| void JSObject::initialize_elements() {
|
| - ASSERT(map()->has_fast_elements());
|
| + ASSERT(map()->has_fast_elements() || map()->has_fast_smi_only_elements());
|
| ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
|
| WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
|
| }
|
| @@ -1428,9 +1400,11 @@
|
|
|
| MaybeObject* JSObject::ResetElements() {
|
| Object* obj;
|
| - { MaybeObject* maybe_obj = map()->GetFastElementsMap();
|
| - if (!maybe_obj->ToObject(&obj)) return maybe_obj;
|
| - }
|
| + ElementsKind elements_kind = FLAG_smi_only_arrays
|
| + ? FAST_SMI_ONLY_ELEMENTS
|
| + : FAST_ELEMENTS;
|
| + MaybeObject* maybe_obj = GetElementsTransitionMap(elements_kind);
|
| + if (!maybe_obj->ToObject(&obj)) return maybe_obj;
|
| set_map(Map::cast(obj));
|
| initialize_elements();
|
| return this;
|
| @@ -1442,12 +1416,12 @@
|
|
|
|
|
| byte Oddball::kind() {
|
| - return READ_BYTE_FIELD(this, kKindOffset);
|
| + return Smi::cast(READ_FIELD(this, kKindOffset))->value();
|
| }
|
|
|
|
|
| void Oddball::set_kind(byte value) {
|
| - WRITE_BYTE_FIELD(this, kKindOffset, value);
|
| + WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
|
| }
|
|
|
|
|
| @@ -1460,6 +1434,8 @@
|
| // The write barrier is not used for global property cells.
|
| ASSERT(!val->IsJSGlobalPropertyCell());
|
| WRITE_FIELD(this, kValueOffset, val);
|
| + GetHeap()->incremental_marking()->RecordWrite(
|
| + this, HeapObject::RawField(this, kValueOffset), val);
|
| }
|
|
|
|
|
| @@ -1528,7 +1504,7 @@
|
| // to adjust the index here.
|
| int offset = GetHeaderSize() + (kPointerSize * index);
|
| WRITE_FIELD(this, offset, value);
|
| - WRITE_BARRIER(this, offset);
|
| + WRITE_BARRIER(GetHeap(), this, offset, value);
|
| }
|
|
|
|
|
| @@ -1554,7 +1530,7 @@
|
| if (index < 0) {
|
| int offset = map()->instance_size() + (index * kPointerSize);
|
| WRITE_FIELD(this, offset, value);
|
| - WRITE_BARRIER(this, offset);
|
| + WRITE_BARRIER(GetHeap(), this, offset, value);
|
| } else {
|
| ASSERT(index < properties()->length());
|
| properties()->set(index, value);
|
| @@ -1588,17 +1564,33 @@
|
| ASSERT(index < 0);
|
| int offset = map()->instance_size() + (index * kPointerSize);
|
| WRITE_FIELD(this, offset, value);
|
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);
|
| + CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
|
| return value;
|
| }
|
|
|
|
|
|
|
| -void JSObject::InitializeBody(int object_size, Object* value) {
|
| - ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
|
| - for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
|
| - WRITE_FIELD(this, offset, value);
|
| +void JSObject::InitializeBody(Map* map,
|
| + Object* pre_allocated_value,
|
| + Object* filler_value) {
|
| + ASSERT(!filler_value->IsHeapObject() ||
|
| + !GetHeap()->InNewSpace(filler_value));
|
| + ASSERT(!pre_allocated_value->IsHeapObject() ||
|
| + !GetHeap()->InNewSpace(pre_allocated_value));
|
| + int size = map->instance_size();
|
| + int offset = kHeaderSize;
|
| + if (filler_value != pre_allocated_value) {
|
| + int pre_allocated = map->pre_allocated_property_fields();
|
| + ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
|
| + for (int i = 0; i < pre_allocated; i++) {
|
| + WRITE_FIELD(this, offset, pre_allocated_value);
|
| + offset += kPointerSize;
|
| + }
|
| }
|
| + while (offset < size) {
|
| + WRITE_FIELD(this, offset, filler_value);
|
| + offset += kPointerSize;
|
| + }
|
| }
|
|
|
|
|
| @@ -1683,7 +1675,7 @@
|
| ASSERT(index >= 0 && index < this->length());
|
| int offset = kHeaderSize + index * kPointerSize;
|
| WRITE_FIELD(this, offset, value);
|
| - WRITE_BARRIER(this, offset);
|
| + WRITE_BARRIER(GetHeap(), this, offset, value);
|
| }
|
|
|
|
|
| @@ -1768,7 +1760,7 @@
|
|
|
| void FixedDoubleArray::Initialize(FixedArray* from) {
|
| int old_length = from->length();
|
| - ASSERT(old_length < length());
|
| + ASSERT(old_length <= length());
|
| for (int i = 0; i < old_length; i++) {
|
| Object* hole_or_object = from->get(i);
|
| if (hole_or_object->IsTheHole()) {
|
| @@ -1802,7 +1794,9 @@
|
|
|
|
|
| WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
|
| - if (GetHeap()->InNewSpace(this)) return SKIP_WRITE_BARRIER;
|
| + Heap* heap = GetHeap();
|
| + if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
|
| + if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
|
| return UPDATE_WRITE_BARRIER;
|
| }
|
|
|
| @@ -1814,7 +1808,7 @@
|
| ASSERT(index >= 0 && index < this->length());
|
| int offset = kHeaderSize + index * kPointerSize;
|
| WRITE_FIELD(this, offset, value);
|
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);
|
| + CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
|
| }
|
|
|
|
|
| @@ -1823,6 +1817,10 @@
|
| ASSERT(index >= 0 && index < array->length());
|
| ASSERT(!HEAP->InNewSpace(value));
|
| WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
|
| + array->GetHeap()->incremental_marking()->RecordWrite(
|
| + array,
|
| + HeapObject::RawField(array, kHeaderSize + index * kPointerSize),
|
| + value);
|
| }
|
|
|
|
|
| @@ -1875,7 +1873,7 @@
|
| WriteBarrierMode mode) {
|
| int offset = kHeaderSize + index * kPointerSize;
|
| WRITE_FIELD(this, offset, value);
|
| - CONDITIONAL_WRITE_BARRIER(heap, this, offset, mode);
|
| + CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
|
| }
|
|
|
|
|
| @@ -2154,6 +2152,7 @@
|
| CAST_ACCESSOR(JSWeakMap)
|
| CAST_ACCESSOR(Foreign)
|
| CAST_ACCESSOR(ByteArray)
|
| +CAST_ACCESSOR(FreeSpace)
|
| CAST_ACCESSOR(ExternalArray)
|
| CAST_ACCESSOR(ExternalByteArray)
|
| CAST_ACCESSOR(ExternalUnsignedByteArray)
|
| @@ -2180,6 +2179,7 @@
|
|
|
|
|
| SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
|
| +SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
|
|
|
| SMI_ACCESSORS(String, length, kLengthOffset)
|
|
|
| @@ -2336,7 +2336,7 @@
|
|
|
|
|
| void SlicedString::set_parent(String* parent) {
|
| - ASSERT(parent->IsSeqString());
|
| + ASSERT(parent->IsSeqString() || parent->IsExternalString());
|
| WRITE_FIELD(this, kParentOffset, parent);
|
| }
|
|
|
| @@ -2356,7 +2356,7 @@
|
|
|
| void ConsString::set_first(String* value, WriteBarrierMode mode) {
|
| WRITE_FIELD(this, kFirstOffset, value);
|
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, mode);
|
| + CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
|
| }
|
|
|
|
|
| @@ -2372,29 +2372,31 @@
|
|
|
| void ConsString::set_second(String* value, WriteBarrierMode mode) {
|
| WRITE_FIELD(this, kSecondOffset, value);
|
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, mode);
|
| + CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
|
| }
|
|
|
|
|
| -ExternalAsciiString::Resource* ExternalAsciiString::resource() {
|
| +const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
|
| return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
|
| }
|
|
|
|
|
| void ExternalAsciiString::set_resource(
|
| - ExternalAsciiString::Resource* resource) {
|
| - *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource;
|
| + const ExternalAsciiString::Resource* resource) {
|
| + *reinterpret_cast<const Resource**>(
|
| + FIELD_ADDR(this, kResourceOffset)) = resource;
|
| }
|
|
|
|
|
| -ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
|
| +const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
|
| return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
|
| }
|
|
|
|
|
| void ExternalTwoByteString::set_resource(
|
| - ExternalTwoByteString::Resource* resource) {
|
| - *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource;
|
| + const ExternalTwoByteString::Resource* resource) {
|
| + *reinterpret_cast<const Resource**>(
|
| + FIELD_ADDR(this, kResourceOffset)) = resource;
|
| }
|
|
|
|
|
| @@ -2694,6 +2696,9 @@
|
| if (instance_type == BYTE_ARRAY_TYPE) {
|
| return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
|
| }
|
| + if (instance_type == FREE_SPACE_TYPE) {
|
| + return reinterpret_cast<FreeSpace*>(this)->size();
|
| + }
|
| if (instance_type == STRING_TYPE) {
|
| return SeqTwoByteString::SizeFor(
|
| reinterpret_cast<SeqTwoByteString*>(this)->length());
|
| @@ -2855,12 +2860,6 @@
|
| }
|
|
|
|
|
| -FixedArray* Map::unchecked_prototype_transitions() {
|
| - return reinterpret_cast<FixedArray*>(
|
| - READ_FIELD(this, kPrototypeTransitionsOffset));
|
| -}
|
| -
|
| -
|
| Code::Flags Code::flags() {
|
| return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
|
| }
|
| @@ -2932,6 +2931,19 @@
|
| }
|
|
|
|
|
| +bool Code::is_pregenerated() {
|
| + return kind() == STUB && IsPregeneratedField::decode(flags());
|
| +}
|
| +
|
| +
|
| +void Code::set_is_pregenerated(bool value) {
|
| + ASSERT(kind() == STUB);
|
| + Flags f = flags();
|
| + f = static_cast<Flags>(IsPregeneratedField::update(f, value));
|
| + set_flags(f);
|
| +}
|
| +
|
| +
|
| bool Code::optimizable() {
|
| ASSERT(kind() == FUNCTION);
|
| return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
|
| @@ -3097,6 +3109,19 @@
|
| WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
|
| }
|
|
|
| +
|
| +bool Code::has_function_cache() {
|
| + ASSERT(kind() == STUB);
|
| + return READ_BYTE_FIELD(this, kHasFunctionCacheOffset) != 0;
|
| +}
|
| +
|
| +
|
| +void Code::set_has_function_cache(bool flag) {
|
| + ASSERT(kind() == STUB);
|
| + WRITE_BYTE_FIELD(this, kHasFunctionCacheOffset, flag);
|
| +}
|
| +
|
| +
|
| bool Code::is_inline_cache_stub() {
|
| Kind kind = this->kind();
|
| return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
|
| @@ -3182,48 +3207,6 @@
|
| }
|
|
|
|
|
| -Isolate* Map::isolate() {
|
| - return heap()->isolate();
|
| -}
|
| -
|
| -
|
| -Heap* Map::heap() {
|
| - // NOTE: address() helper is not used to save one instruction.
|
| - Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
|
| - ASSERT(heap != NULL);
|
| - ASSERT(heap->isolate() == Isolate::Current());
|
| - return heap;
|
| -}
|
| -
|
| -
|
| -Heap* Code::heap() {
|
| - // NOTE: address() helper is not used to save one instruction.
|
| - Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
|
| - ASSERT(heap != NULL);
|
| - ASSERT(heap->isolate() == Isolate::Current());
|
| - return heap;
|
| -}
|
| -
|
| -
|
| -Isolate* Code::isolate() {
|
| - return heap()->isolate();
|
| -}
|
| -
|
| -
|
| -Heap* JSGlobalPropertyCell::heap() {
|
| - // NOTE: address() helper is not used to save one instruction.
|
| - Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
|
| - ASSERT(heap != NULL);
|
| - ASSERT(heap->isolate() == Isolate::Current());
|
| - return heap;
|
| -}
|
| -
|
| -
|
| -Isolate* JSGlobalPropertyCell::isolate() {
|
| - return heap()->isolate();
|
| -}
|
| -
|
| -
|
| Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
|
| return HeapObject::
|
| FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
|
| @@ -3238,49 +3221,10 @@
|
| void Map::set_prototype(Object* value, WriteBarrierMode mode) {
|
| ASSERT(value->IsNull() || value->IsJSReceiver());
|
| WRITE_FIELD(this, kPrototypeOffset, value);
|
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, mode);
|
| + CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
|
| }
|
|
|
|
|
| -MaybeObject* Map::GetFastElementsMap() {
|
| - if (has_fast_elements()) return this;
|
| - Object* obj;
|
| - { MaybeObject* maybe_obj = CopyDropTransitions();
|
| - if (!maybe_obj->ToObject(&obj)) return maybe_obj;
|
| - }
|
| - Map* new_map = Map::cast(obj);
|
| - new_map->set_elements_kind(FAST_ELEMENTS);
|
| - isolate()->counters()->map_to_fast_elements()->Increment();
|
| - return new_map;
|
| -}
|
| -
|
| -
|
| -MaybeObject* Map::GetFastDoubleElementsMap() {
|
| - if (has_fast_double_elements()) return this;
|
| - Object* obj;
|
| - { MaybeObject* maybe_obj = CopyDropTransitions();
|
| - if (!maybe_obj->ToObject(&obj)) return maybe_obj;
|
| - }
|
| - Map* new_map = Map::cast(obj);
|
| - new_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
|
| - isolate()->counters()->map_to_fast_double_elements()->Increment();
|
| - return new_map;
|
| -}
|
| -
|
| -
|
| -MaybeObject* Map::GetSlowElementsMap() {
|
| - if (!has_fast_elements() && !has_fast_double_elements()) return this;
|
| - Object* obj;
|
| - { MaybeObject* maybe_obj = CopyDropTransitions();
|
| - if (!maybe_obj->ToObject(&obj)) return maybe_obj;
|
| - }
|
| - Map* new_map = Map::cast(obj);
|
| - new_map->set_elements_kind(DICTIONARY_ELEMENTS);
|
| - isolate()->counters()->map_to_slow_elements()->Increment();
|
| - return new_map;
|
| -}
|
| -
|
| -
|
| DescriptorArray* Map::instance_descriptors() {
|
| Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
|
| if (object->IsSmi()) {
|
| @@ -3312,7 +3256,8 @@
|
| WriteBarrierMode mode) {
|
| Object* object = READ_FIELD(this,
|
| kInstanceDescriptorsOrBitField3Offset);
|
| - if (value == isolate()->heap()->empty_descriptor_array()) {
|
| + Heap* heap = GetHeap();
|
| + if (value == heap->empty_descriptor_array()) {
|
| clear_instance_descriptors();
|
| return;
|
| } else {
|
| @@ -3325,10 +3270,8 @@
|
| }
|
| ASSERT(!is_shared());
|
| WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
|
| - CONDITIONAL_WRITE_BARRIER(GetHeap(),
|
| - this,
|
| - kInstanceDescriptorsOrBitField3Offset,
|
| - mode);
|
| + CONDITIONAL_WRITE_BARRIER(
|
| + heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
|
| }
|
|
|
|
|
| @@ -3357,14 +3300,22 @@
|
| }
|
|
|
|
|
| +FixedArray* Map::unchecked_prototype_transitions() {
|
| + return reinterpret_cast<FixedArray*>(
|
| + READ_FIELD(this, kPrototypeTransitionsOffset));
|
| +}
|
| +
|
| +
|
| ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
|
| ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
|
| ACCESSORS(Map, constructor, Object, kConstructorOffset)
|
|
|
| ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
|
| ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset)
|
| -ACCESSORS_GCSAFE(JSFunction, next_function_link, Object,
|
| - kNextFunctionLinkOffset)
|
| +ACCESSORS(JSFunction,
|
| + next_function_link,
|
| + Object,
|
| + kNextFunctionLinkOffset)
|
|
|
| ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
|
| ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
|
| @@ -3453,8 +3404,8 @@
|
| #endif
|
|
|
| ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
|
| -ACCESSORS_GCSAFE(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
|
| -ACCESSORS_GCSAFE(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
|
| +ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
|
| +ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
|
| ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
|
| kInstanceClassNameOffset)
|
| ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
|
| @@ -3660,7 +3611,7 @@
|
|
|
| void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
|
| WRITE_FIELD(this, kCodeOffset, value);
|
| - ASSERT(!Isolate::Current()->heap()->InNewSpace(value));
|
| + CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
|
| }
|
|
|
|
|
| @@ -3673,7 +3624,11 @@
|
| void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
|
| WriteBarrierMode mode) {
|
| WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
|
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kScopeInfoOffset, mode);
|
| + CONDITIONAL_WRITE_BARRIER(GetHeap(),
|
| + this,
|
| + kScopeInfoOffset,
|
| + reinterpret_cast<Object*>(value),
|
| + mode);
|
| }
|
|
|
|
|
| @@ -3770,10 +3725,13 @@
|
|
|
|
|
| void JSFunction::set_code(Code* value) {
|
| - // Skip the write barrier because code is never in new space.
|
| ASSERT(!HEAP->InNewSpace(value));
|
| Address entry = value->entry();
|
| WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
|
| + GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
|
| + this,
|
| + HeapObject::RawField(this, kCodeEntryOffset),
|
| + value);
|
| }
|
|
|
|
|
| @@ -3813,7 +3771,7 @@
|
| void JSFunction::set_context(Object* value) {
|
| ASSERT(value->IsUndefined() || value->IsContext());
|
| WRITE_FIELD(this, kContextOffset, value);
|
| - WRITE_BARRIER(this, kContextOffset);
|
| + WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
|
| }
|
|
|
| ACCESSORS(JSFunction, prototype_or_initial_map, Object,
|
| @@ -3887,7 +3845,7 @@
|
| Object* value) {
|
| ASSERT(id < kJSBuiltinsCount); // id is unsigned.
|
| WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
|
| - WRITE_BARRIER(this, OffsetOfFunctionWithId(id));
|
| + WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
|
| }
|
|
|
|
|
| @@ -3906,6 +3864,7 @@
|
|
|
|
|
| ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
|
| +ACCESSORS(JSProxy, hash, Object, kHashOffset)
|
| ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
|
| ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
|
|
|
| @@ -3918,8 +3877,8 @@
|
| }
|
|
|
|
|
| -ACCESSORS(JSWeakMap, table, ObjectHashTable, kTableOffset)
|
| -ACCESSORS_GCSAFE(JSWeakMap, next, Object, kNextOffset)
|
| +ACCESSORS(JSWeakMap, table, Object, kTableOffset)
|
| +ACCESSORS(JSWeakMap, next, Object, kNextOffset)
|
|
|
|
|
| ObjectHashTable* JSWeakMap::unchecked_table() {
|
| @@ -4011,9 +3970,8 @@
|
| }
|
|
|
|
|
| -bool Code::contains(byte* pc) {
|
| - return (instruction_start() <= pc) &&
|
| - (pc <= instruction_start() + instruction_size());
|
| +bool Code::contains(byte* inner_pointer) {
|
| + return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
|
| }
|
|
|
|
|
| @@ -4092,6 +4050,7 @@
|
| if (value->IsSmi()) {
|
| fa->set_unchecked(index, Smi::cast(value));
|
| } else {
|
| + // We only do this during GC, so we don't need to notify the write barrier.
|
| fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
|
| }
|
| }
|
| @@ -4099,15 +4058,20 @@
|
|
|
| ElementsKind JSObject::GetElementsKind() {
|
| ElementsKind kind = map()->elements_kind();
|
| - ASSERT((kind == FAST_ELEMENTS &&
|
| - (elements()->map() == GetHeap()->fixed_array_map() ||
|
| - elements()->map() == GetHeap()->fixed_cow_array_map())) ||
|
| +#if DEBUG
|
| + FixedArrayBase* fixed_array =
|
| + reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
|
| + Map* map = fixed_array->map();
|
| + ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
|
| + (map == GetHeap()->fixed_array_map() ||
|
| + map == GetHeap()->fixed_cow_array_map())) ||
|
| (kind == FAST_DOUBLE_ELEMENTS &&
|
| - elements()->IsFixedDoubleArray()) ||
|
| + fixed_array->IsFixedDoubleArray()) ||
|
| (kind == DICTIONARY_ELEMENTS &&
|
| - elements()->IsFixedArray() &&
|
| - elements()->IsDictionary()) ||
|
| + fixed_array->IsFixedArray() &&
|
| + fixed_array->IsDictionary()) ||
|
| (kind > DICTIONARY_ELEMENTS));
|
| +#endif
|
| return kind;
|
| }
|
|
|
| @@ -4122,6 +4086,18 @@
|
| }
|
|
|
|
|
| +bool JSObject::HasFastSmiOnlyElements() {
|
| + return GetElementsKind() == FAST_SMI_ONLY_ELEMENTS;
|
| +}
|
| +
|
| +
|
| +bool JSObject::HasFastTypeElements() {
|
| + ElementsKind elements_kind = GetElementsKind();
|
| + return elements_kind == FAST_SMI_ONLY_ELEMENTS ||
|
| + elements_kind == FAST_ELEMENTS;
|
| +}
|
| +
|
| +
|
| bool JSObject::HasFastDoubleElements() {
|
| return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
|
| }
|
| @@ -4132,6 +4108,11 @@
|
| }
|
|
|
|
|
| +bool JSObject::HasNonStrictArgumentsElements() {
|
| + return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
|
| +}
|
| +
|
| +
|
| bool JSObject::HasExternalArrayElements() {
|
| HeapObject* array = elements();
|
| ASSERT(array != NULL);
|
| @@ -4183,7 +4164,7 @@
|
|
|
|
|
| MaybeObject* JSObject::EnsureWritableFastElements() {
|
| - ASSERT(HasFastElements());
|
| + ASSERT(HasFastTypeElements());
|
| FixedArray* elems = FixedArray::cast(elements());
|
| Isolate* isolate = GetIsolate();
|
| if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
|
| @@ -4359,47 +4340,21 @@
|
| }
|
|
|
|
|
| -bool JSObject::HasHiddenPropertiesObject() {
|
| - ASSERT(!IsJSGlobalProxy());
|
| - return GetPropertyAttributePostInterceptor(this,
|
| - GetHeap()->hidden_symbol(),
|
| - false) != ABSENT;
|
| +MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
|
| + return IsJSProxy()
|
| + ? JSProxy::cast(this)->GetIdentityHash(flag)
|
| + : JSObject::cast(this)->GetIdentityHash(flag);
|
| }
|
|
|
|
|
| -Object* JSObject::GetHiddenPropertiesObject() {
|
| - ASSERT(!IsJSGlobalProxy());
|
| - PropertyAttributes attributes;
|
| - // You can't install a getter on a property indexed by the hidden symbol,
|
| - // so we can be sure that GetLocalPropertyPostInterceptor returns a real
|
| - // object.
|
| - Object* result =
|
| - GetLocalPropertyPostInterceptor(this,
|
| - GetHeap()->hidden_symbol(),
|
| - &attributes)->ToObjectUnchecked();
|
| - return result;
|
| +bool JSReceiver::HasElement(uint32_t index) {
|
| + if (IsJSProxy()) {
|
| + return JSProxy::cast(this)->HasElementWithHandler(index);
|
| + }
|
| + return JSObject::cast(this)->HasElementWithReceiver(this, index);
|
| }
|
|
|
|
|
| -MaybeObject* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) {
|
| - ASSERT(!IsJSGlobalProxy());
|
| - return SetPropertyPostInterceptor(GetHeap()->hidden_symbol(),
|
| - hidden_obj,
|
| - DONT_ENUM,
|
| - kNonStrictMode);
|
| -}
|
| -
|
| -
|
| -bool JSObject::HasHiddenProperties() {
|
| - return !GetHiddenProperties(OMIT_CREATION)->ToObjectChecked()->IsUndefined();
|
| -}
|
| -
|
| -
|
| -bool JSObject::HasElement(uint32_t index) {
|
| - return HasElementWithReceiver(this, index);
|
| -}
|
| -
|
| -
|
| bool AccessorInfo::all_can_read() {
|
| return BooleanBit::get(flag(), kAllCanReadBit);
|
| }
|
| @@ -4508,27 +4463,27 @@
|
| }
|
|
|
|
|
| -bool ObjectHashTableShape::IsMatch(JSObject* key, Object* other) {
|
| - return key == JSObject::cast(other);
|
| +bool ObjectHashTableShape::IsMatch(JSReceiver* key, Object* other) {
|
| + return key == JSReceiver::cast(other);
|
| }
|
|
|
|
|
| -uint32_t ObjectHashTableShape::Hash(JSObject* key) {
|
| - MaybeObject* maybe_hash = key->GetIdentityHash(JSObject::OMIT_CREATION);
|
| +uint32_t ObjectHashTableShape::Hash(JSReceiver* key) {
|
| + MaybeObject* maybe_hash = key->GetIdentityHash(OMIT_CREATION);
|
| ASSERT(!maybe_hash->IsFailure());
|
| return Smi::cast(maybe_hash->ToObjectUnchecked())->value();
|
| }
|
|
|
|
|
| -uint32_t ObjectHashTableShape::HashForObject(JSObject* key, Object* other) {
|
| - MaybeObject* maybe_hash = JSObject::cast(other)->GetIdentityHash(
|
| - JSObject::OMIT_CREATION);
|
| +uint32_t ObjectHashTableShape::HashForObject(JSReceiver* key, Object* other) {
|
| + MaybeObject* maybe_hash =
|
| + JSReceiver::cast(other)->GetIdentityHash(OMIT_CREATION);
|
| ASSERT(!maybe_hash->IsFailure());
|
| return Smi::cast(maybe_hash->ToObjectUnchecked())->value();
|
| }
|
|
|
|
|
| -MaybeObject* ObjectHashTableShape::AsObject(JSObject* key) {
|
| +MaybeObject* ObjectHashTableShape::AsObject(JSReceiver* key) {
|
| return key;
|
| }
|
|
|
| @@ -4548,7 +4503,7 @@
|
|
|
|
|
| void JSArray::EnsureSize(int required_size) {
|
| - ASSERT(HasFastElements());
|
| + ASSERT(HasFastTypeElements());
|
| FixedArray* elts = FixedArray::cast(elements());
|
| const int kArraySizeThatFitsComfortablyInNewSpace = 128;
|
| if (elts->length() < required_size) {
|
| @@ -4566,13 +4521,17 @@
|
|
|
|
|
| void JSArray::set_length(Smi* length) {
|
| + // Don't need a write barrier for a Smi.
|
| set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
|
| }
|
|
|
|
|
| -void JSArray::SetContent(FixedArray* storage) {
|
| +MaybeObject* JSArray::SetContent(FixedArray* storage) {
|
| + MaybeObject* maybe_object = EnsureCanContainElements(storage);
|
| + if (maybe_object->IsFailure()) return maybe_object;
|
| set_length(Smi::FromInt(storage->length()));
|
| set_elements(storage);
|
| + return this;
|
| }
|
|
|
|
|
|
|