Chromium Code Reviews| Index: src/objects-inl.h |
| diff --git a/src/objects-inl.h b/src/objects-inl.h |
| index de77da0fc5039b4a0e546f556ffdd61783a1a75c..2b3300857c48fed887a9889f54b007fec10199bd 100644 |
| --- a/src/objects-inl.h |
| +++ b/src/objects-inl.h |
| @@ -65,52 +65,53 @@ PropertyDetails PropertyDetails::AsDeleted() const { |
| #define INT_ACCESSORS(holder, name, offset) \ |
| - int holder::name() { return READ_INT_FIELD(this, offset); } \ |
| - void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); } |
| + int holder::name() { return Heap::read_int_field(this, offset); } \ |
| + void holder::set_##name(int value) { \ |
| + Heap::write_int_field(this, offset, value); } |
| -#define ACCESSORS(holder, name, type, offset) \ |
| - type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \ |
| - void holder::set_##name(type* value, WriteBarrierMode mode) { \ |
| - WRITE_FIELD(this, offset, value); \ |
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \ |
| +#define ACCESSORS(holder, name, type, offset) \ |
| + type* holder::name() { return type::cast(Heap::read_field(this, offset)); } \ |
| + void holder::set_##name(type* value, WriteBarrierMode mode) { \ |
| + Heap::write_field(this, offset, value, mode); \ |
| } |
| // Getter that returns a tagged Smi and setter that writes a tagged Smi. |
| -#define ACCESSORS_TO_SMI(holder, name, offset) \ |
| - Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \ |
| - void holder::set_##name(Smi* value, WriteBarrierMode mode) { \ |
| - WRITE_FIELD(this, offset, value); \ |
| +#define ACCESSORS_TO_SMI(holder, name, offset) \ |
| + Smi* holder::name() { return Smi::cast(Heap::read_field(this, offset)); } \ |
| + void holder::set_##name(Smi* value, WriteBarrierMode mode) { \ |
| + Heap::write_field(this, offset, value, SKIP_WRITE_BARRIER); \ |
| } |
| // Getter that returns a Smi as an int and writes an int as a Smi. |
| #define SMI_ACCESSORS(holder, name, offset) \ |
| int holder::name() { \ |
| - Object* value = READ_FIELD(this, offset); \ |
| + Object* value = Heap::read_field(this, offset); \ |
| return Smi::cast(value)->value(); \ |
| } \ |
| void holder::set_##name(int value) { \ |
| - WRITE_FIELD(this, offset, Smi::FromInt(value)); \ |
| + Heap::write_field(this, offset, \ |
| + Smi::FromInt(value), SKIP_WRITE_BARRIER); \ |
| } |
| #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \ |
|
Hannes Payer (out of office)
2014/04/30 07:47:25
The synchronized accessors should also live in hea
|
| int holder::synchronized_##name() { \ |
| - Object* value = ACQUIRE_READ_FIELD(this, offset); \ |
| + Object* value = Heap::acquire_read_field(this, offset); \ |
| return Smi::cast(value)->value(); \ |
| } \ |
| void holder::synchronized_set_##name(int value) { \ |
| - RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \ |
| + Heap::release_write_field(this, offset, Smi::FromInt(value)); \ |
| } |
| #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \ |
| int holder::nobarrier_##name() { \ |
| - Object* value = NOBARRIER_READ_FIELD(this, offset); \ |
| + Object* value = Heap::nobarrier_read_field(this, offset); \ |
| return Smi::cast(value)->value(); \ |
| } \ |
| void holder::nobarrier_set_##name(int value) { \ |
| - NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \ |
| + Heap::nobarrier_write_field(this, offset, Smi::FromInt(value)); \ |
| } |
| #define BOOL_GETTER(holder, field, name, offset) \ |
| @@ -1129,138 +1130,8 @@ bool JSProxy::HasElementWithHandler(Handle<JSProxy> proxy, uint32_t index) { |
| } |
| -#define FIELD_ADDR(p, offset) \ |
| - (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag) |
| - |
| -#define READ_FIELD(p, offset) \ |
| - (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset))) |
| - |
| -#define ACQUIRE_READ_FIELD(p, offset) \ |
| - reinterpret_cast<Object*>( \ |
| - Acquire_Load(reinterpret_cast<AtomicWord*>(FIELD_ADDR(p, offset)))) |
| - |
| -#define NOBARRIER_READ_FIELD(p, offset) \ |
| - reinterpret_cast<Object*>( \ |
| - NoBarrier_Load(reinterpret_cast<AtomicWord*>(FIELD_ADDR(p, offset)))) |
| - |
| -#define WRITE_FIELD(p, offset, value) \ |
| - (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value) |
| - |
| -#define RELEASE_WRITE_FIELD(p, offset, value) \ |
| - Release_Store(reinterpret_cast<AtomicWord*>(FIELD_ADDR(p, offset)), \ |
| - reinterpret_cast<AtomicWord>(value)); |
| - |
| -#define NOBARRIER_WRITE_FIELD(p, offset, value) \ |
| - NoBarrier_Store(reinterpret_cast<AtomicWord*>(FIELD_ADDR(p, offset)), \ |
| - reinterpret_cast<AtomicWord>(value)); |
| - |
| -#define WRITE_BARRIER(heap, object, offset, value) \ |
| - heap->incremental_marking()->RecordWrite( \ |
| - object, HeapObject::RawField(object, offset), value); \ |
| - if (heap->InNewSpace(value)) { \ |
| - heap->RecordWrite(object->address(), offset); \ |
| - } |
| - |
| -#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \ |
| - if (mode == UPDATE_WRITE_BARRIER) { \ |
| - heap->incremental_marking()->RecordWrite( \ |
| - object, HeapObject::RawField(object, offset), value); \ |
| - if (heap->InNewSpace(value)) { \ |
| - heap->RecordWrite(object->address(), offset); \ |
| - } \ |
| - } |
| - |
| -#ifndef V8_TARGET_ARCH_MIPS |
| - #define READ_DOUBLE_FIELD(p, offset) \ |
| - (*reinterpret_cast<double*>(FIELD_ADDR(p, offset))) |
| -#else // V8_TARGET_ARCH_MIPS |
| - // Prevent gcc from using load-double (mips ldc1) on (possibly) |
| - // non-64-bit aligned HeapNumber::value. |
| - static inline double read_double_field(void* p, int offset) { |
| - union conversion { |
| - double d; |
| - uint32_t u[2]; |
| - } c; |
| - c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))); |
| - c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))); |
| - return c.d; |
| - } |
| - #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset) |
| -#endif // V8_TARGET_ARCH_MIPS |
| - |
| -#ifndef V8_TARGET_ARCH_MIPS |
| - #define WRITE_DOUBLE_FIELD(p, offset, value) \ |
| - (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value) |
| -#else // V8_TARGET_ARCH_MIPS |
| - // Prevent gcc from using store-double (mips sdc1) on (possibly) |
| - // non-64-bit aligned HeapNumber::value. |
| - static inline void write_double_field(void* p, int offset, |
| - double value) { |
| - union conversion { |
| - double d; |
| - uint32_t u[2]; |
| - } c; |
| - c.d = value; |
| - (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0]; |
| - (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1]; |
| - } |
| - #define WRITE_DOUBLE_FIELD(p, offset, value) \ |
| - write_double_field(p, offset, value) |
| -#endif // V8_TARGET_ARCH_MIPS |
| - |
| - |
| -#define READ_INT_FIELD(p, offset) \ |
| - (*reinterpret_cast<int*>(FIELD_ADDR(p, offset))) |
| - |
| -#define WRITE_INT_FIELD(p, offset, value) \ |
| - (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value) |
| - |
| -#define READ_INTPTR_FIELD(p, offset) \ |
| - (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset))) |
| - |
| -#define WRITE_INTPTR_FIELD(p, offset, value) \ |
| - (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value) |
| - |
| -#define READ_UINT32_FIELD(p, offset) \ |
| - (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) |
| - |
| -#define WRITE_UINT32_FIELD(p, offset, value) \ |
| - (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value) |
| - |
| -#define READ_INT32_FIELD(p, offset) \ |
| - (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset))) |
| - |
| -#define WRITE_INT32_FIELD(p, offset, value) \ |
| - (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value) |
| - |
| -#define READ_INT64_FIELD(p, offset) \ |
| - (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset))) |
| - |
| -#define WRITE_INT64_FIELD(p, offset, value) \ |
| - (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value) |
| - |
| -#define READ_SHORT_FIELD(p, offset) \ |
| - (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset))) |
| - |
| -#define WRITE_SHORT_FIELD(p, offset, value) \ |
| - (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value) |
| - |
| -#define READ_BYTE_FIELD(p, offset) \ |
| - (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset))) |
| - |
| -#define NOBARRIER_READ_BYTE_FIELD(p, offset) \ |
| - static_cast<byte>(NoBarrier_Load( \ |
| - reinterpret_cast<Atomic8*>(FIELD_ADDR(p, offset))) ) |
| - |
| -#define WRITE_BYTE_FIELD(p, offset, value) \ |
| - (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value) |
| - |
| -#define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \ |
| - NoBarrier_Store(reinterpret_cast<Atomic8*>(FIELD_ADDR(p, offset)), \ |
| - static_cast<Atomic8>(value)); |
| - |
| Object** HeapObject::RawField(HeapObject* obj, int byte_offset) { |
| - return &READ_FIELD(obj, byte_offset); |
| + return reinterpret_cast<Object**>(Heap::get_field_address(obj, byte_offset)); |
| } |
| @@ -1359,11 +1230,11 @@ HeapObject* MapWord::ToForwardingAddress() { |
| #ifdef VERIFY_HEAP |
| void HeapObject::VerifyObjectField(int offset) { |
| - VerifyPointer(READ_FIELD(this, offset)); |
| + VerifyPointer(Heap::read_field(this, offset)); |
| } |
| void HeapObject::VerifySmiField(int offset) { |
| - CHECK(READ_FIELD(this, offset)->IsSmi()); |
| + CHECK(Heap::read_field(this, offset)->IsSmi()); |
| } |
| #endif |
| @@ -1423,25 +1294,25 @@ void HeapObject::set_map_no_write_barrier(Map* value) { |
| MapWord HeapObject::map_word() { |
| - return MapWord( |
| - reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset))); |
| + return MapWord(reinterpret_cast<uintptr_t>( |
| + Heap::nobarrier_read_field(this, kMapOffset))); |
| } |
| void HeapObject::set_map_word(MapWord map_word) { |
| - NOBARRIER_WRITE_FIELD( |
| + Heap::nobarrier_write_field( |
| this, kMapOffset, reinterpret_cast<Object*>(map_word.value_)); |
| } |
| MapWord HeapObject::synchronized_map_word() { |
| return MapWord( |
| - reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset))); |
| + reinterpret_cast<uintptr_t>(Heap::acquire_read_field(this, kMapOffset))); |
| } |
| void HeapObject::synchronized_set_map_word(MapWord map_word) { |
| - RELEASE_WRITE_FIELD( |
| + Heap::release_write_field( |
| this, kMapOffset, reinterpret_cast<Object*>(map_word.value_)); |
| } |
| @@ -1463,39 +1334,42 @@ int HeapObject::Size() { |
| void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) { |
| - v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)), |
| - reinterpret_cast<Object**>(FIELD_ADDR(this, end))); |
| + v->VisitPointers( |
| + reinterpret_cast<Object**>(Heap::get_field_address(this, start)), |
| + reinterpret_cast<Object**>(Heap::get_field_address(this, end))); |
| } |
| void HeapObject::IteratePointer(ObjectVisitor* v, int offset) { |
| - v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset))); |
| + v->VisitPointer( |
| + reinterpret_cast<Object**>(Heap::get_field_address(this, offset))); |
| } |
| void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) { |
| - v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset))); |
| + v->VisitNextCodeLink( |
| + reinterpret_cast<Object**>(Heap::get_field_address(this, offset))); |
| } |
| double HeapNumber::value() { |
| - return READ_DOUBLE_FIELD(this, kValueOffset); |
| + return Heap::read_double_field(this, kValueOffset); |
| } |
| void HeapNumber::set_value(double value) { |
| - WRITE_DOUBLE_FIELD(this, kValueOffset, value); |
| + Heap::write_double_field(this, kValueOffset, value); |
| } |
| int HeapNumber::get_exponent() { |
| - return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >> |
| + return ((Heap::read_int_field(this, kExponentOffset) & kExponentMask) >> |
| kExponentShift) - kExponentBias; |
| } |
| int HeapNumber::get_sign() { |
| - return READ_INT_FIELD(this, kExponentOffset) & kSignMask; |
| + return Heap::read_int_field(this, kExponentOffset) & kSignMask; |
| } |
| @@ -1503,7 +1377,8 @@ ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset) |
| Object** FixedArray::GetFirstElementAddress() { |
| - return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0))); |
| + return reinterpret_cast<Object**>( |
| + Heap::get_field_address(this, OffsetOfElementAt(0))); |
| } |
| @@ -1519,7 +1394,7 @@ bool FixedArray::ContainsOnlySmisOrHoles() { |
| FixedArrayBase* JSObject::elements() { |
| - Object* array = READ_FIELD(this, kElementsOffset); |
| + Object* array = Heap::read_field(this, kElementsOffset); |
| return static_cast<FixedArrayBase*>(array); |
| } |
| @@ -1772,20 +1647,22 @@ void JSObject::SetMapAndElements(Handle<JSObject> object, |
| void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) { |
| - WRITE_FIELD(this, kElementsOffset, value); |
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode); |
| + Heap::write_field(this, kElementsOffset, value, mode); |
| } |
| void JSObject::initialize_properties() { |
| ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array())); |
| - WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array()); |
| + Heap::write_field(this, |
| + kPropertiesOffset, |
| + GetHeap()->empty_fixed_array(), |
| + SKIP_WRITE_BARRIER); |
| } |
| void JSObject::initialize_elements() { |
| FixedArrayBase* elements = map()->GetInitialElements(); |
| - WRITE_FIELD(this, kElementsOffset, elements); |
| + Heap::write_field(this, kElementsOffset, elements, SKIP_WRITE_BARRIER); |
| } |
| @@ -1829,35 +1706,35 @@ ACCESSORS(Oddball, to_number, Object, kToNumberOffset) |
| byte Oddball::kind() { |
| - return Smi::cast(READ_FIELD(this, kKindOffset))->value(); |
| + return Smi::cast(Heap::read_field(this, kKindOffset))->value(); |
| } |
| void Oddball::set_kind(byte value) { |
| - WRITE_FIELD(this, kKindOffset, Smi::FromInt(value)); |
| + Heap::write_field(this, kKindOffset, Smi::FromInt(value), SKIP_WRITE_BARRIER); |
| } |
| Object* Cell::value() { |
| - return READ_FIELD(this, kValueOffset); |
| + return Heap::read_field(this, kValueOffset); |
| } |
| void Cell::set_value(Object* val, WriteBarrierMode ignored) { |
| // The write barrier is not used for global property cells. |
| ASSERT(!val->IsPropertyCell() && !val->IsCell()); |
| - WRITE_FIELD(this, kValueOffset, val); |
| + Heap::write_field(this, kValueOffset, val, SKIP_WRITE_BARRIER); |
| } |
| ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset) |
| Object* PropertyCell::type_raw() { |
| - return READ_FIELD(this, kTypeOffset); |
| + return Heap::read_field(this, kTypeOffset); |
| } |
| void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) { |
| - WRITE_FIELD(this, kTypeOffset, val); |
| + Heap::write_field(this, kTypeOffset, val, SKIP_WRITE_BARRIER); |
| } |
| @@ -1939,7 +1816,7 @@ Object* JSObject::GetInternalField(int index) { |
| // Internal objects do follow immediately after the header, whereas in-object |
| // properties are at the end of the object. Therefore there is no need |
| // to adjust the index here. |
| - return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index)); |
| + return Heap::read_field(this, GetHeaderSize() + (kPointerSize * index)); |
| } |
| @@ -1949,8 +1826,7 @@ void JSObject::SetInternalField(int index, Object* value) { |
| // properties are at the end of the object. Therefore there is no need |
| // to adjust the index here. |
| int offset = GetHeaderSize() + (kPointerSize * index); |
| - WRITE_FIELD(this, offset, value); |
| - WRITE_BARRIER(GetHeap(), this, offset, value); |
| + Heap::write_field(this, offset, value, UPDATE_WRITE_BARRIER); |
| } |
| @@ -1960,7 +1836,7 @@ void JSObject::SetInternalField(int index, Smi* value) { |
| // properties are at the end of the object. Therefore there is no need |
| // to adjust the index here. |
| int offset = GetHeaderSize() + (kPointerSize * index); |
| - WRITE_FIELD(this, offset, value); |
| + Heap::write_field(this, offset, value, SKIP_WRITE_BARRIER); |
| } |
| @@ -1972,7 +1848,7 @@ Object* JSObject::RawFastPropertyAt(int index) { |
| index -= map()->inobject_properties(); |
| if (index < 0) { |
| int offset = map()->instance_size() + (index * kPointerSize); |
| - return READ_FIELD(this, offset); |
| + return Heap::read_field(this, offset); |
| } else { |
| ASSERT(index < properties()->length()); |
| return properties()->get(index); |
| @@ -1985,8 +1861,7 @@ void JSObject::FastPropertyAtPut(int index, Object* value) { |
| index -= map()->inobject_properties(); |
| if (index < 0) { |
| int offset = map()->instance_size() + (index * kPointerSize); |
| - WRITE_FIELD(this, offset, value); |
| - WRITE_BARRIER(GetHeap(), this, offset, value); |
| + Heap::write_field(this, offset, value, UPDATE_WRITE_BARRIER); |
| } else { |
| ASSERT(index < properties()->length()); |
| properties()->set(index, value); |
| @@ -2001,7 +1876,7 @@ int JSObject::GetInObjectPropertyOffset(int index) { |
| Object* JSObject::InObjectPropertyAt(int index) { |
| int offset = GetInObjectPropertyOffset(index); |
| - return READ_FIELD(this, offset); |
| + return Heap::read_field(this, offset); |
| } |
| @@ -2010,8 +1885,7 @@ Object* JSObject::InObjectPropertyAtPut(int index, |
| WriteBarrierMode mode) { |
| // Adjust for the number of properties stored in the object. |
| int offset = GetInObjectPropertyOffset(index); |
| - WRITE_FIELD(this, offset, value); |
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); |
| + Heap::write_field(this, offset, value, mode); |
| return value; |
| } |
| @@ -2030,12 +1904,12 @@ void JSObject::InitializeBody(Map* map, |
| int pre_allocated = map->pre_allocated_property_fields(); |
| ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size); |
| for (int i = 0; i < pre_allocated; i++) { |
| - WRITE_FIELD(this, offset, pre_allocated_value); |
| + Heap::write_field(this, offset, pre_allocated_value, SKIP_WRITE_BARRIER); |
| offset += kPointerSize; |
| } |
| } |
| while (offset < size) { |
| - WRITE_FIELD(this, offset, filler_value); |
| + Heap::write_field(this, offset, filler_value, SKIP_WRITE_BARRIER); |
| offset += kPointerSize; |
| } |
| } |
| @@ -2070,7 +1944,7 @@ bool JSObject::TooManyFastProperties(StoreFromKeyed store_mode) { |
| void Struct::InitializeBody(int object_size) { |
| Object* value = GetHeap()->undefined_value(); |
| for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) { |
| - WRITE_FIELD(this, offset, value); |
| + Heap::write_field(this, offset, value, SKIP_WRITE_BARRIER); |
| } |
| } |
| @@ -2132,7 +2006,7 @@ FixedArrayBase* FixedArrayBase::cast(Object* object) { |
| Object* FixedArray::get(int index) { |
| SLOW_ASSERT(index >= 0 && index < this->length()); |
| - return READ_FIELD(this, kHeaderSize + index * kPointerSize); |
| + return Heap::read_field(this, kHeaderSize + index * kPointerSize); |
| } |
| @@ -2151,7 +2025,7 @@ void FixedArray::set(int index, Smi* value) { |
| ASSERT(index >= 0 && index < this->length()); |
| ASSERT(reinterpret_cast<Object*>(value)->IsSmi()); |
| int offset = kHeaderSize + index * kPointerSize; |
| - WRITE_FIELD(this, offset, value); |
| + Heap::write_field(this, offset, value, SKIP_WRITE_BARRIER); |
| } |
| @@ -2159,8 +2033,7 @@ void FixedArray::set(int index, Object* value) { |
| ASSERT(map() != GetHeap()->fixed_cow_array_map()); |
| ASSERT(index >= 0 && index < this->length()); |
| int offset = kHeaderSize + index * kPointerSize; |
| - WRITE_FIELD(this, offset, value); |
| - WRITE_BARRIER(GetHeap(), this, offset, value); |
| + Heap::write_field(this, offset, value, UPDATE_WRITE_BARRIER); |
| } |
| @@ -2185,7 +2058,8 @@ double FixedDoubleArray::get_scalar(int index) { |
| ASSERT(map() != GetHeap()->fixed_cow_array_map() && |
| map() != GetHeap()->fixed_array_map()); |
| ASSERT(index >= 0 && index < this->length()); |
| - double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize); |
| + double result = |
| + Heap::read_double_field(this, kHeaderSize + index * kDoubleSize); |
| ASSERT(!is_the_hole_nan(result)); |
| return result; |
| } |
| @@ -2194,7 +2068,7 @@ int64_t FixedDoubleArray::get_representation(int index) { |
| ASSERT(map() != GetHeap()->fixed_cow_array_map() && |
| map() != GetHeap()->fixed_array_map()); |
| ASSERT(index >= 0 && index < this->length()); |
| - return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize); |
| + return Heap::read_int64_field(this, kHeaderSize + index * kDoubleSize); |
| } |
| @@ -2213,7 +2087,7 @@ void FixedDoubleArray::set(int index, double value) { |
| map() != GetHeap()->fixed_array_map()); |
| int offset = kHeaderSize + index * kDoubleSize; |
| if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double(); |
| - WRITE_DOUBLE_FIELD(this, offset, value); |
| + Heap::write_double_field(this, offset, value); |
| } |
| @@ -2221,18 +2095,18 @@ void FixedDoubleArray::set_the_hole(int index) { |
| ASSERT(map() != GetHeap()->fixed_cow_array_map() && |
| map() != GetHeap()->fixed_array_map()); |
| int offset = kHeaderSize + index * kDoubleSize; |
| - WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double()); |
| + Heap::write_double_field(this, offset, hole_nan_as_double()); |
| } |
| bool FixedDoubleArray::is_the_hole(int index) { |
| int offset = kHeaderSize + index * kDoubleSize; |
| - return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset)); |
| + return is_the_hole_nan(Heap::read_double_field(this, offset)); |
| } |
| double* FixedDoubleArray::data_start() { |
| - return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize)); |
| + return reinterpret_cast<double*>(Heap::get_field_address(this, kHeaderSize)); |
| } |
| @@ -2245,14 +2119,14 @@ void FixedDoubleArray::FillWithHoles(int from, int to) { |
| void ConstantPoolArray::set_weak_object_state( |
| ConstantPoolArray::WeakObjectState state) { |
| - int old_layout_field = READ_INT_FIELD(this, kArrayLayoutOffset); |
| + int old_layout_field = Heap::read_int_field(this, kArrayLayoutOffset); |
| int new_layout_field = WeakObjectStateField::update(old_layout_field, state); |
| - WRITE_INT_FIELD(this, kArrayLayoutOffset, new_layout_field); |
| + Heap::write_int_field(this, kArrayLayoutOffset, new_layout_field); |
| } |
| ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() { |
| - int layout_field = READ_INT_FIELD(this, kArrayLayoutOffset); |
| + int layout_field = Heap::read_int_field(this, kArrayLayoutOffset); |
| return WeakObjectStateField::decode(layout_field); |
| } |
| @@ -2263,21 +2137,21 @@ int ConstantPoolArray::first_int64_index() { |
| int ConstantPoolArray::first_code_ptr_index() { |
| - int layout_field = READ_INT_FIELD(this, kArrayLayoutOffset); |
| + int layout_field = Heap::read_int_field(this, kArrayLayoutOffset); |
| return first_int64_index() + |
| NumberOfInt64EntriesField::decode(layout_field); |
| } |
| int ConstantPoolArray::first_heap_ptr_index() { |
| - int layout_field = READ_INT_FIELD(this, kArrayLayoutOffset); |
| + int layout_field = Heap::read_int_field(this, kArrayLayoutOffset); |
| return first_code_ptr_index() + |
| NumberOfCodePtrEntriesField::decode(layout_field); |
| } |
| int ConstantPoolArray::first_int32_index() { |
| - int layout_field = READ_INT_FIELD(this, kArrayLayoutOffset); |
| + int layout_field = Heap::read_int_field(this, kArrayLayoutOffset); |
| return first_heap_ptr_index() + |
| NumberOfHeapPtrEntriesField::decode(layout_field); |
| } |
| @@ -2316,64 +2190,70 @@ void ConstantPoolArray::Init(int number_of_int64_entries, |
| NumberOfCodePtrEntriesField::encode(number_of_code_ptr_entries) | |
| NumberOfHeapPtrEntriesField::encode(number_of_heap_ptr_entries) | |
| WeakObjectStateField::encode(NO_WEAK_OBJECTS); |
| - WRITE_INT_FIELD(this, kArrayLayoutOffset, layout_field); |
| + Heap::write_int_field(this, kArrayLayoutOffset, layout_field); |
| } |
| int64_t ConstantPoolArray::get_int64_entry(int index) { |
| ASSERT(map() == GetHeap()->constant_pool_array_map()); |
| ASSERT(index >= 0 && index < first_code_ptr_index()); |
| - return READ_INT64_FIELD(this, OffsetOfElementAt(index)); |
| + return Heap::read_int64_field(this, OffsetOfElementAt(index)); |
| } |
| double ConstantPoolArray::get_int64_entry_as_double(int index) { |
| STATIC_ASSERT(kDoubleSize == kInt64Size); |
| ASSERT(map() == GetHeap()->constant_pool_array_map()); |
| ASSERT(index >= 0 && index < first_code_ptr_index()); |
| - return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index)); |
| + return Heap::read_double_field(this, OffsetOfElementAt(index)); |
| } |
| Address ConstantPoolArray::get_code_ptr_entry(int index) { |
| ASSERT(map() == GetHeap()->constant_pool_array_map()); |
| ASSERT(index >= first_code_ptr_index() && index < first_heap_ptr_index()); |
| - return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index))); |
| + return reinterpret_cast<Address>( |
| + Heap::read_field(this, OffsetOfElementAt(index))); |
| } |
| Object* ConstantPoolArray::get_heap_ptr_entry(int index) { |
| ASSERT(map() == GetHeap()->constant_pool_array_map()); |
| ASSERT(index >= first_heap_ptr_index() && index < first_int32_index()); |
| - return READ_FIELD(this, OffsetOfElementAt(index)); |
| + return Heap::read_field(this, OffsetOfElementAt(index)); |
| } |
| int32_t ConstantPoolArray::get_int32_entry(int index) { |
| ASSERT(map() == GetHeap()->constant_pool_array_map()); |
| ASSERT(index >= first_int32_index() && index < length()); |
| - return READ_INT32_FIELD(this, OffsetOfElementAt(index)); |
| + return Heap::read_int32_field(this, OffsetOfElementAt(index)); |
| } |
| void ConstantPoolArray::set(int index, Address value) { |
| ASSERT(map() == GetHeap()->constant_pool_array_map()); |
| ASSERT(index >= first_code_ptr_index() && index < first_heap_ptr_index()); |
| - WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value)); |
| + Heap::write_field(this, |
| + OffsetOfElementAt(index), |
| + reinterpret_cast<Object*>(value), |
| + SKIP_WRITE_BARRIER); |
| } |
| void ConstantPoolArray::set(int index, Object* value) { |
| ASSERT(map() == GetHeap()->constant_pool_array_map()); |
| ASSERT(index >= first_code_ptr_index() && index < first_int32_index()); |
| - WRITE_FIELD(this, OffsetOfElementAt(index), value); |
| - WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value); |
| + Heap::write_field(this, |
| + OffsetOfElementAt(index), |
| + value, |
| + UPDATE_WRITE_BARRIER); |
| } |
| void ConstantPoolArray::set(int index, int64_t value) { |
| ASSERT(map() == GetHeap()->constant_pool_array_map()); |
| ASSERT(index >= first_int64_index() && index < first_code_ptr_index()); |
| - WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value); |
| + Heap::write_int64_field(this, OffsetOfElementAt(index), value); |
| } |
| @@ -2381,14 +2261,14 @@ void ConstantPoolArray::set(int index, double value) { |
| STATIC_ASSERT(kDoubleSize == kInt64Size); |
| ASSERT(map() == GetHeap()->constant_pool_array_map()); |
| ASSERT(index >= first_int64_index() && index < first_code_ptr_index()); |
| - WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value); |
| + Heap::write_double_field(this, OffsetOfElementAt(index), value); |
| } |
| void ConstantPoolArray::set(int index, int32_t value) { |
| ASSERT(map() == GetHeap()->constant_pool_array_map()); |
| ASSERT(index >= this->first_int32_index() && index < length()); |
| - WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value); |
| + Heap::write_int32_field(this, OffsetOfElementAt(index), value); |
| } |
| @@ -2407,8 +2287,7 @@ void FixedArray::set(int index, |
| ASSERT(map() != GetHeap()->fixed_cow_array_map()); |
| ASSERT(index >= 0 && index < this->length()); |
| int offset = kHeaderSize + index * kPointerSize; |
| - WRITE_FIELD(this, offset, value); |
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); |
| + Heap::write_field(this, offset, value, mode); |
| } |
| @@ -2418,7 +2297,7 @@ void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array, |
| ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map()); |
| ASSERT(index >= 0 && index < array->length()); |
| int offset = kHeaderSize + index * kPointerSize; |
| - WRITE_FIELD(array, offset, value); |
| + Heap::write_field(array, offset, value, SKIP_WRITE_BARRIER); |
| Heap* heap = array->GetHeap(); |
| if (heap->InNewSpace(value)) { |
| heap->RecordWrite(array->address(), offset); |
| @@ -2432,7 +2311,10 @@ void FixedArray::NoWriteBarrierSet(FixedArray* array, |
| ASSERT(array->map() != array->GetHeap()->fixed_cow_array_map()); |
| ASSERT(index >= 0 && index < array->length()); |
| ASSERT(!array->GetHeap()->InNewSpace(value)); |
| - WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value); |
| + Heap::write_field(array, |
| + kHeaderSize + index * kPointerSize, |
| + value, |
| + SKIP_WRITE_BARRIER); |
| } |
| @@ -2440,18 +2322,20 @@ void FixedArray::set_undefined(int index) { |
| ASSERT(map() != GetHeap()->fixed_cow_array_map()); |
| ASSERT(index >= 0 && index < this->length()); |
| ASSERT(!GetHeap()->InNewSpace(GetHeap()->undefined_value())); |
| - WRITE_FIELD(this, |
| + Heap::write_field(this, |
| kHeaderSize + index * kPointerSize, |
| - GetHeap()->undefined_value()); |
| + GetHeap()->undefined_value(), |
| + SKIP_WRITE_BARRIER); |
| } |
| void FixedArray::set_null(int index) { |
| ASSERT(index >= 0 && index < this->length()); |
| ASSERT(!GetHeap()->InNewSpace(GetHeap()->null_value())); |
| - WRITE_FIELD(this, |
| + Heap::write_field(this, |
| kHeaderSize + index * kPointerSize, |
| - GetHeap()->null_value()); |
| + GetHeap()->null_value(), |
| + SKIP_WRITE_BARRIER); |
| } |
| @@ -2459,9 +2343,10 @@ void FixedArray::set_the_hole(int index) { |
| ASSERT(map() != GetHeap()->fixed_cow_array_map()); |
| ASSERT(index >= 0 && index < this->length()); |
| ASSERT(!GetHeap()->InNewSpace(GetHeap()->the_hole_value())); |
| - WRITE_FIELD(this, |
| + Heap::write_field(this, |
| kHeaderSize + index * kPointerSize, |
| - GetHeap()->the_hole_value()); |
| + GetHeap()->the_hole_value(), |
| + SKIP_WRITE_BARRIER); |
| } |
| @@ -2485,8 +2370,11 @@ bool DescriptorArray::IsEmpty() { |
| void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) { |
| - WRITE_FIELD( |
| - this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors)); |
| + Heap::write_field( |
| + this, |
| + kDescriptorLengthOffset, |
| + Smi::FromInt(number_of_descriptors), |
| + SKIP_WRITE_BARRIER); |
| } |
| @@ -3013,14 +2901,14 @@ SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset) |
| uint32_t Name::hash_field() { |
| - return READ_UINT32_FIELD(this, kHashFieldOffset); |
| + return Heap::read_uint32_field(this, kHashFieldOffset); |
| } |
| void Name::set_hash_field(uint32_t value) { |
| - WRITE_UINT32_FIELD(this, kHashFieldOffset, value); |
| + Heap::write_uint32_field(this, kHashFieldOffset, value); |
| #if V8_HOST_ARCH_64_BIT |
| - WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0); |
| + Heap::write_uint32_field(this, kHashFieldOffset + kIntSize, 0); |
| #endif |
| } |
| @@ -3126,7 +3014,7 @@ String* String::GetUnderlying() { |
| ASSERT(StringShape(this).IsIndirect()); |
| STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset); |
| const int kUnderlyingOffset = SlicedString::kParentOffset; |
| - return String::cast(READ_FIELD(this, kUnderlyingOffset)); |
| + return String::cast(Heap::read_field(this, kUnderlyingOffset)); |
| } |
| @@ -3186,19 +3074,19 @@ ConsString* String::VisitFlat(Visitor* visitor, |
| uint16_t SeqOneByteString::SeqOneByteStringGet(int index) { |
| ASSERT(index >= 0 && index < length()); |
| - return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize); |
| + return Heap::read_byte_field(this, kHeaderSize + index * kCharSize); |
| } |
| void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) { |
| ASSERT(index >= 0 && index < length() && value <= kMaxOneByteCharCode); |
| - WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, |
| + Heap::write_byte_field(this, kHeaderSize + index * kCharSize, |
| static_cast<byte>(value)); |
| } |
| Address SeqOneByteString::GetCharsAddress() { |
| - return FIELD_ADDR(this, kHeaderSize); |
| + return Heap::get_field_address(this, kHeaderSize); |
| } |
| @@ -3208,24 +3096,24 @@ uint8_t* SeqOneByteString::GetChars() { |
| Address SeqTwoByteString::GetCharsAddress() { |
| - return FIELD_ADDR(this, kHeaderSize); |
| + return Heap::get_field_address(this, kHeaderSize); |
| } |
| uc16* SeqTwoByteString::GetChars() { |
| - return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize)); |
| + return reinterpret_cast<uc16*>(Heap::get_field_address(this, kHeaderSize)); |
| } |
| uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) { |
| ASSERT(index >= 0 && index < length()); |
| - return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize); |
| + return Heap::read_short_field(this, kHeaderSize + index * kShortSize); |
| } |
| void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) { |
| ASSERT(index >= 0 && index < length()); |
| - WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value); |
| + Heap::write_short_field(this, kHeaderSize + index * kShortSize, value); |
| } |
| @@ -3240,14 +3128,13 @@ int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) { |
| String* SlicedString::parent() { |
| - return String::cast(READ_FIELD(this, kParentOffset)); |
| + return String::cast(Heap::read_field(this, kParentOffset)); |
| } |
| void SlicedString::set_parent(String* parent, WriteBarrierMode mode) { |
| ASSERT(parent->IsSeqString() || parent->IsExternalString()); |
| - WRITE_FIELD(this, kParentOffset, parent); |
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode); |
| + Heap::write_field(this, kParentOffset, parent, mode); |
| } |
| @@ -3255,34 +3142,32 @@ SMI_ACCESSORS(SlicedString, offset, kOffsetOffset) |
| String* ConsString::first() { |
| - return String::cast(READ_FIELD(this, kFirstOffset)); |
| + return String::cast(Heap::read_field(this, kFirstOffset)); |
| } |
| Object* ConsString::unchecked_first() { |
| - return READ_FIELD(this, kFirstOffset); |
| + return Heap::read_field(this, kFirstOffset); |
| } |
| void ConsString::set_first(String* value, WriteBarrierMode mode) { |
| - WRITE_FIELD(this, kFirstOffset, value); |
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode); |
| + Heap::write_field(this, kFirstOffset, value, mode); |
| } |
| String* ConsString::second() { |
| - return String::cast(READ_FIELD(this, kSecondOffset)); |
| + return String::cast(Heap::read_field(this, kSecondOffset)); |
| } |
| Object* ConsString::unchecked_second() { |
| - return READ_FIELD(this, kSecondOffset); |
| + return Heap::read_field(this, kSecondOffset); |
| } |
| void ConsString::set_second(String* value, WriteBarrierMode mode) { |
| - WRITE_FIELD(this, kSecondOffset, value); |
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode); |
| + Heap::write_field(this, kSecondOffset, value, mode); |
| } |
| @@ -3293,14 +3178,15 @@ bool ExternalString::is_short() { |
| const ExternalAsciiString::Resource* ExternalAsciiString::resource() { |
| - return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)); |
| + return *reinterpret_cast<Resource**>( |
| + Heap::get_field_address(this, kResourceOffset)); |
| } |
| void ExternalAsciiString::update_data_cache() { |
| if (is_short()) return; |
| - const char** data_field = |
| - reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset)); |
| + const char** data_field = reinterpret_cast<const char**>( |
| + Heap::get_field_address(this, kResourceDataOffset)); |
| *data_field = resource()->data(); |
| } |
| @@ -3309,7 +3195,7 @@ void ExternalAsciiString::set_resource( |
| const ExternalAsciiString::Resource* resource) { |
| ASSERT(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize)); |
| *reinterpret_cast<const Resource**>( |
| - FIELD_ADDR(this, kResourceOffset)) = resource; |
| + Heap::get_field_address(this, kResourceOffset)) = resource; |
| if (resource != NULL) update_data_cache(); |
| } |
| @@ -3326,14 +3212,15 @@ uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) { |
| const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() { |
| - return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)); |
| + return *reinterpret_cast<Resource**>( |
| + Heap::get_field_address(this, kResourceOffset)); |
| } |
| void ExternalTwoByteString::update_data_cache() { |
| if (is_short()) return; |
| - const uint16_t** data_field = |
| - reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset)); |
| + const uint16_t** data_field = reinterpret_cast<const uint16_t**>( |
| + Heap::get_field_address(this, kResourceDataOffset)); |
| *data_field = resource()->data(); |
| } |
| @@ -3341,7 +3228,7 @@ void ExternalTwoByteString::update_data_cache() { |
| void ExternalTwoByteString::set_resource( |
| const ExternalTwoByteString::Resource* resource) { |
| *reinterpret_cast<const Resource**>( |
| - FIELD_ADDR(this, kResourceOffset)) = resource; |
| + Heap::get_field_address(this, kResourceOffset)) = resource; |
| if (resource != NULL) update_data_cache(); |
| } |
| @@ -3487,19 +3374,19 @@ void JSFunctionResultCache::set_finger_index(int finger_index) { |
| byte ByteArray::get(int index) { |
| ASSERT(index >= 0 && index < this->length()); |
| - return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize); |
| + return Heap::read_byte_field(this, kHeaderSize + index * kCharSize); |
| } |
| void ByteArray::set(int index, byte value) { |
| ASSERT(index >= 0 && index < this->length()); |
| - WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value); |
| + Heap::write_byte_field(this, kHeaderSize + index * kCharSize, value); |
| } |
| int ByteArray::get_int(int index) { |
| ASSERT(index >= 0 && (index * kIntSize) < this->length()); |
| - return READ_INT_FIELD(this, kHeaderSize + index * kIntSize); |
| + return Heap::read_int_field(this, kHeaderSize + index * kIntSize); |
| } |
| @@ -3542,14 +3429,14 @@ void ExternalUint8ClampedArray::set(int index, uint8_t value) { |
| void* ExternalArray::external_pointer() { |
| - intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset); |
| + intptr_t ptr = Heap::read_intptr_field(this, kExternalPointerOffset); |
| return reinterpret_cast<void*>(ptr); |
| } |
| void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) { |
| intptr_t ptr = reinterpret_cast<intptr_t>(value); |
| - WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr); |
| + Heap::write_intptr_field(this, kExternalPointerOffset, ptr); |
| } |
| @@ -3720,7 +3607,7 @@ void ExternalFloat64Array::set(int index, double value) { |
| void* FixedTypedArrayBase::DataPtr() { |
| - return FIELD_ADDR(this, kDataOffset); |
| + return Heap::get_field_address(this, kDataOffset); |
| } |
| @@ -3781,7 +3668,7 @@ template <class Traits> |
| typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) { |
| ASSERT((index >= 0) && (index < this->length())); |
| ElementType* ptr = reinterpret_cast<ElementType*>( |
| - FIELD_ADDR(this, kDataOffset)); |
| + Heap::get_field_address(this, kDataOffset)); |
| return ptr[index]; |
| } |
| @@ -3790,7 +3677,7 @@ template<> inline |
| FixedTypedArray<Float64ArrayTraits>::ElementType |
| FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) { |
| ASSERT((index >= 0) && (index < this->length())); |
| - return READ_DOUBLE_FIELD(this, ElementOffset(index)); |
| + return Heap::read_double_field(this, ElementOffset(index)); |
| } |
| @@ -3798,7 +3685,7 @@ template <class Traits> |
| void FixedTypedArray<Traits>::set(int index, ElementType value) { |
| ASSERT((index >= 0) && (index < this->length())); |
| ElementType* ptr = reinterpret_cast<ElementType*>( |
| - FIELD_ADDR(this, kDataOffset)); |
| + Heap::get_field_address(this, kDataOffset)); |
| ptr[index] = value; |
| } |
| @@ -3807,7 +3694,7 @@ template<> inline |
| void FixedTypedArray<Float64ArrayTraits>::set( |
| int index, Float64ArrayTraits::ElementType value) { |
| ASSERT((index >= 0) && (index < this->length())); |
| - WRITE_DOUBLE_FIELD(this, ElementOffset(index), value); |
| + Heap::write_double_field(this, ElementOffset(index), value); |
| } |
| @@ -3931,29 +3818,29 @@ Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) { |
| int Map::visitor_id() { |
| - return READ_BYTE_FIELD(this, kVisitorIdOffset); |
| + return Heap::read_byte_field(this, kVisitorIdOffset); |
| } |
| void Map::set_visitor_id(int id) { |
| ASSERT(0 <= id && id < 256); |
| - WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id)); |
| + Heap::write_byte_field(this, kVisitorIdOffset, static_cast<byte>(id)); |
| } |
| int Map::instance_size() { |
| - return NOBARRIER_READ_BYTE_FIELD( |
| + return Heap::nobarrier_read_byte_field( |
| this, kInstanceSizeOffset) << kPointerSizeLog2; |
| } |
| int Map::inobject_properties() { |
| - return READ_BYTE_FIELD(this, kInObjectPropertiesOffset); |
| + return Heap::read_byte_field(this, kInObjectPropertiesOffset); |
| } |
| int Map::pre_allocated_property_fields() { |
| - return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset); |
| + return Heap::read_byte_field(this, kPreAllocatedPropertyFieldsOffset); |
| } |
| @@ -4013,62 +3900,66 @@ void Map::set_instance_size(int value) { |
| ASSERT_EQ(0, value & (kPointerSize - 1)); |
| value >>= kPointerSizeLog2; |
| ASSERT(0 <= value && value < 256); |
| - NOBARRIER_WRITE_BYTE_FIELD( |
| + Heap::nobarrier_write_byte_field( |
| this, kInstanceSizeOffset, static_cast<byte>(value)); |
| } |
| void Map::set_inobject_properties(int value) { |
| ASSERT(0 <= value && value < 256); |
| - WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value)); |
| + Heap::write_byte_field( |
| + this, |
| + kInObjectPropertiesOffset, |
| + static_cast<byte>(value)); |
| } |
| void Map::set_pre_allocated_property_fields(int value) { |
| ASSERT(0 <= value && value < 256); |
| - WRITE_BYTE_FIELD(this, |
| + Heap::write_byte_field(this, |
| kPreAllocatedPropertyFieldsOffset, |
| static_cast<byte>(value)); |
| } |
| InstanceType Map::instance_type() { |
| - return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset)); |
| + return static_cast<InstanceType>( |
| + Heap::read_byte_field(this, kInstanceTypeOffset)); |
| } |
| void Map::set_instance_type(InstanceType value) { |
| - WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value); |
| + Heap::write_byte_field(this, kInstanceTypeOffset, value); |
| } |
| int Map::unused_property_fields() { |
| - return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset); |
| + return Heap::read_byte_field(this, kUnusedPropertyFieldsOffset); |
| } |
| void Map::set_unused_property_fields(int value) { |
| - WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255)); |
| + Heap::write_byte_field(this, kUnusedPropertyFieldsOffset, Min(value, 255)); |
| } |
| byte Map::bit_field() { |
| - return READ_BYTE_FIELD(this, kBitFieldOffset); |
| + return Heap::read_byte_field(this, kBitFieldOffset); |
| } |
| void Map::set_bit_field(byte value) { |
| - WRITE_BYTE_FIELD(this, kBitFieldOffset, value); |
| + Heap::write_byte_field(this, kBitFieldOffset, value); |
| } |
| byte Map::bit_field2() { |
| - return READ_BYTE_FIELD(this, kBitField2Offset); |
| + return Heap::read_byte_field(this, kBitField2Offset); |
| } |
| void Map::set_bit_field2(byte value) { |
| - WRITE_BYTE_FIELD(this, kBitField2Offset, value); |
| + Heap::write_byte_field(this, kBitField2Offset, value); |
| } |
| @@ -4158,7 +4049,7 @@ bool Map::is_dictionary_map() { |
| Code::Flags Code::flags() { |
| - return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset)); |
| + return static_cast<Flags>(Heap::read_int_field(this, kFlagsOffset)); |
| } |
| @@ -4319,7 +4210,7 @@ void DependentCode::ExtendGroup(DependencyGroup group) { |
| void Code::set_flags(Code::Flags flags) { |
| STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1); |
| - WRITE_INT_FIELD(this, kFlagsOffset, flags); |
| + Heap::write_int_field(this, kFlagsOffset, flags); |
| } |
| @@ -4353,41 +4244,41 @@ Code::StubType Code::type() { |
| // For initialization. |
| void Code::set_raw_kind_specific_flags1(int value) { |
| - WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value); |
| + Heap::write_int_field(this, kKindSpecificFlags1Offset, value); |
| } |
| void Code::set_raw_kind_specific_flags2(int value) { |
| - WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value); |
| + Heap::write_int_field(this, kKindSpecificFlags2Offset, value); |
| } |
| inline bool Code::is_crankshafted() { |
| return IsCrankshaftedField::decode( |
| - READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)); |
| + Heap::read_uint32_field(this, kKindSpecificFlags2Offset)); |
| } |
| inline void Code::set_is_crankshafted(bool value) { |
| - int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); |
| + int previous = Heap::read_uint32_field(this, kKindSpecificFlags2Offset); |
| int updated = IsCrankshaftedField::update(previous, value); |
| - WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); |
| + Heap::write_uint32_field(this, kKindSpecificFlags2Offset, updated); |
| } |
| int Code::major_key() { |
| ASSERT(has_major_key()); |
| return StubMajorKeyField::decode( |
| - READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)); |
| + Heap::read_uint32_field(this, kKindSpecificFlags2Offset)); |
| } |
| void Code::set_major_key(int major) { |
| ASSERT(has_major_key()); |
| ASSERT(0 <= major && major < 256); |
| - int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); |
| + int previous = Heap::read_uint32_field(this, kKindSpecificFlags2Offset); |
| int updated = StubMajorKeyField::update(previous, major); |
| - WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); |
| + Heap::write_uint32_field(this, kKindSpecificFlags2Offset, updated); |
| } |
| @@ -4407,107 +4298,107 @@ bool Code::has_major_key() { |
| bool Code::optimizable() { |
| ASSERT_EQ(FUNCTION, kind()); |
| - return READ_BYTE_FIELD(this, kOptimizableOffset) == 1; |
| + return Heap::read_byte_field(this, kOptimizableOffset) == 1; |
| } |
| void Code::set_optimizable(bool value) { |
| ASSERT_EQ(FUNCTION, kind()); |
| - WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0); |
| + Heap::write_byte_field(this, kOptimizableOffset, value ? 1 : 0); |
| } |
| bool Code::has_deoptimization_support() { |
| ASSERT_EQ(FUNCTION, kind()); |
| - byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); |
| + byte flags = Heap::read_byte_field(this, kFullCodeFlags); |
| return FullCodeFlagsHasDeoptimizationSupportField::decode(flags); |
| } |
| void Code::set_has_deoptimization_support(bool value) { |
| ASSERT_EQ(FUNCTION, kind()); |
| - byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); |
| + byte flags = Heap::read_byte_field(this, kFullCodeFlags); |
| flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value); |
| - WRITE_BYTE_FIELD(this, kFullCodeFlags, flags); |
| + Heap::write_byte_field(this, kFullCodeFlags, flags); |
| } |
| bool Code::has_debug_break_slots() { |
| ASSERT_EQ(FUNCTION, kind()); |
| - byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); |
| + byte flags = Heap::read_byte_field(this, kFullCodeFlags); |
| return FullCodeFlagsHasDebugBreakSlotsField::decode(flags); |
| } |
| void Code::set_has_debug_break_slots(bool value) { |
| ASSERT_EQ(FUNCTION, kind()); |
| - byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); |
| + byte flags = Heap::read_byte_field(this, kFullCodeFlags); |
| flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value); |
| - WRITE_BYTE_FIELD(this, kFullCodeFlags, flags); |
| + Heap::write_byte_field(this, kFullCodeFlags, flags); |
| } |
| bool Code::is_compiled_optimizable() { |
| ASSERT_EQ(FUNCTION, kind()); |
| - byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); |
| + byte flags = Heap::read_byte_field(this, kFullCodeFlags); |
| return FullCodeFlagsIsCompiledOptimizable::decode(flags); |
| } |
| void Code::set_compiled_optimizable(bool value) { |
| ASSERT_EQ(FUNCTION, kind()); |
| - byte flags = READ_BYTE_FIELD(this, kFullCodeFlags); |
| + byte flags = Heap::read_byte_field(this, kFullCodeFlags); |
| flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value); |
| - WRITE_BYTE_FIELD(this, kFullCodeFlags, flags); |
| + Heap::write_byte_field(this, kFullCodeFlags, flags); |
| } |
| int Code::allow_osr_at_loop_nesting_level() { |
| ASSERT_EQ(FUNCTION, kind()); |
| - return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset); |
| + return Heap::read_byte_field(this, kAllowOSRAtLoopNestingLevelOffset); |
| } |
| void Code::set_allow_osr_at_loop_nesting_level(int level) { |
| ASSERT_EQ(FUNCTION, kind()); |
| ASSERT(level >= 0 && level <= kMaxLoopNestingMarker); |
| - WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level); |
| + Heap::write_byte_field(this, kAllowOSRAtLoopNestingLevelOffset, level); |
| } |
| int Code::profiler_ticks() { |
| ASSERT_EQ(FUNCTION, kind()); |
| - return READ_BYTE_FIELD(this, kProfilerTicksOffset); |
| + return Heap::read_byte_field(this, kProfilerTicksOffset); |
| } |
| void Code::set_profiler_ticks(int ticks) { |
| ASSERT_EQ(FUNCTION, kind()); |
| ASSERT(ticks < 256); |
| - WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks); |
| + Heap::write_byte_field(this, kProfilerTicksOffset, ticks); |
| } |
| unsigned Code::stack_slots() { |
| ASSERT(is_crankshafted()); |
| return StackSlotsField::decode( |
| - READ_UINT32_FIELD(this, kKindSpecificFlags1Offset)); |
| + Heap::read_uint32_field(this, kKindSpecificFlags1Offset)); |
| } |
| void Code::set_stack_slots(unsigned slots) { |
| CHECK(slots <= (1 << kStackSlotsBitCount)); |
| ASSERT(is_crankshafted()); |
| - int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); |
| + int previous = Heap::read_uint32_field(this, kKindSpecificFlags1Offset); |
| int updated = StackSlotsField::update(previous, slots); |
| - WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); |
| + Heap::write_uint32_field(this, kKindSpecificFlags1Offset, updated); |
| } |
| unsigned Code::safepoint_table_offset() { |
| ASSERT(is_crankshafted()); |
| return SafepointTableOffsetField::decode( |
| - READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)); |
| + Heap::read_uint32_field(this, kKindSpecificFlags2Offset)); |
| } |
| @@ -4515,40 +4406,40 @@ void Code::set_safepoint_table_offset(unsigned offset) { |
| CHECK(offset <= (1 << kSafepointTableOffsetBitCount)); |
| ASSERT(is_crankshafted()); |
| ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize))); |
| - int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); |
| + int previous = Heap::read_uint32_field(this, kKindSpecificFlags2Offset); |
| int updated = SafepointTableOffsetField::update(previous, offset); |
| - WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); |
| + Heap::write_uint32_field(this, kKindSpecificFlags2Offset, updated); |
| } |
| unsigned Code::back_edge_table_offset() { |
| ASSERT_EQ(FUNCTION, kind()); |
| return BackEdgeTableOffsetField::decode( |
| - READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)); |
| + Heap::read_uint32_field(this, kKindSpecificFlags2Offset)); |
| } |
| void Code::set_back_edge_table_offset(unsigned offset) { |
| ASSERT_EQ(FUNCTION, kind()); |
| ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize))); |
| - int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); |
| + int previous = Heap::read_uint32_field(this, kKindSpecificFlags2Offset); |
| int updated = BackEdgeTableOffsetField::update(previous, offset); |
| - WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); |
| + Heap::write_uint32_field(this, kKindSpecificFlags2Offset, updated); |
| } |
| bool Code::back_edges_patched_for_osr() { |
| ASSERT_EQ(FUNCTION, kind()); |
| return BackEdgesPatchedForOSRField::decode( |
| - READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)); |
| + Heap::read_uint32_field(this, kKindSpecificFlags2Offset)); |
| } |
| void Code::set_back_edges_patched_for_osr(bool value) { |
| ASSERT_EQ(FUNCTION, kind()); |
| - int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); |
| + int previous = Heap::read_uint32_field(this, kKindSpecificFlags2Offset); |
| int updated = BackEdgesPatchedForOSRField::update(previous, value); |
| - WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); |
| + Heap::write_uint32_field(this, kKindSpecificFlags2Offset, updated); |
| } |
| @@ -4561,59 +4452,59 @@ byte Code::to_boolean_state() { |
| bool Code::has_function_cache() { |
| ASSERT(kind() == STUB); |
| return HasFunctionCacheField::decode( |
| - READ_UINT32_FIELD(this, kKindSpecificFlags1Offset)); |
| + Heap::read_uint32_field(this, kKindSpecificFlags1Offset)); |
| } |
| void Code::set_has_function_cache(bool flag) { |
| ASSERT(kind() == STUB); |
| - int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); |
| + int previous = Heap::read_uint32_field(this, kKindSpecificFlags1Offset); |
| int updated = HasFunctionCacheField::update(previous, flag); |
| - WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); |
| + Heap::write_uint32_field(this, kKindSpecificFlags1Offset, updated); |
| } |
| bool Code::marked_for_deoptimization() { |
| ASSERT(kind() == OPTIMIZED_FUNCTION); |
| return MarkedForDeoptimizationField::decode( |
| - READ_UINT32_FIELD(this, kKindSpecificFlags1Offset)); |
| + Heap::read_uint32_field(this, kKindSpecificFlags1Offset)); |
| } |
| void Code::set_marked_for_deoptimization(bool flag) { |
| ASSERT(kind() == OPTIMIZED_FUNCTION); |
| ASSERT(!flag || AllowDeoptimization::IsAllowed(GetIsolate())); |
| - int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); |
| + int previous = Heap::read_uint32_field(this, kKindSpecificFlags1Offset); |
| int updated = MarkedForDeoptimizationField::update(previous, flag); |
| - WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); |
| + Heap::write_uint32_field(this, kKindSpecificFlags1Offset, updated); |
| } |
| bool Code::is_weak_stub() { |
| return CanBeWeakStub() && WeakStubField::decode( |
| - READ_UINT32_FIELD(this, kKindSpecificFlags1Offset)); |
| + Heap::read_uint32_field(this, kKindSpecificFlags1Offset)); |
| } |
| void Code::mark_as_weak_stub() { |
| ASSERT(CanBeWeakStub()); |
| - int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); |
| + int previous = Heap::read_uint32_field(this, kKindSpecificFlags1Offset); |
| int updated = WeakStubField::update(previous, true); |
| - WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); |
| + Heap::write_uint32_field(this, kKindSpecificFlags1Offset, updated); |
| } |
| bool Code::is_invalidated_weak_stub() { |
| return is_weak_stub() && InvalidatedWeakStubField::decode( |
| - READ_UINT32_FIELD(this, kKindSpecificFlags1Offset)); |
| + Heap::read_uint32_field(this, kKindSpecificFlags1Offset)); |
| } |
| void Code::mark_as_invalidated_weak_stub() { |
| ASSERT(is_inline_cache_stub()); |
| - int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset); |
| + int previous = Heap::read_uint32_field(this, kKindSpecificFlags1Offset); |
| int updated = InvalidatedWeakStubField::update(previous, true); |
| - WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated); |
| + Heap::write_uint32_field(this, kKindSpecificFlags1Offset, updated); |
| } |
| @@ -4639,14 +4530,13 @@ bool Code::is_debug_stub() { |
| ConstantPoolArray* Code::constant_pool() { |
| - return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset)); |
| + return ConstantPoolArray::cast(Heap::read_field(this, kConstantPoolOffset)); |
| } |
| void Code::set_constant_pool(Object* value) { |
| ASSERT(value->IsConstantPoolArray()); |
| - WRITE_FIELD(this, kConstantPoolOffset, value); |
| - WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value); |
| + Heap::write_field(this, kConstantPoolOffset, value, UPDATE_WRITE_BARRIER); |
| } |
| @@ -4768,14 +4658,13 @@ bool Code::IsWeakObjectInIC(Object* object) { |
| Object* Map::prototype() { |
| - return READ_FIELD(this, kPrototypeOffset); |
| + return Heap::read_field(this, kPrototypeOffset); |
| } |
| void Map::set_prototype(Object* value, WriteBarrierMode mode) { |
| ASSERT(value->IsNull() || value->IsJSReceiver()); |
| - WRITE_FIELD(this, kPrototypeOffset, value); |
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode); |
| + Heap::write_field(this, kPrototypeOffset, value, mode); |
| } |
| @@ -4809,12 +4698,15 @@ void Map::set_bit_field3(uint32_t bits) { |
| // Ensure the upper 2 bits have the same value by sign extending it. This is |
| // necessary to be able to use the 31st bit. |
| int value = bits << 1; |
| - WRITE_FIELD(this, kBitField3Offset, Smi::FromInt(value >> 1)); |
| + Heap::write_field(this, |
| + kBitField3Offset, |
| + Smi::FromInt(value >> 1), |
| + SKIP_WRITE_BARRIER); |
| } |
| uint32_t Map::bit_field3() { |
| - Object* value = READ_FIELD(this, kBitField3Offset); |
| + Object* value = Heap::read_field(this, kBitField3Offset); |
| return Smi::cast(value)->value(); |
| } |
| @@ -4829,7 +4721,7 @@ void Map::AppendDescriptor(Descriptor* desc) { |
| Object* Map::GetBackPointer() { |
| - Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); |
| + Object* object = Heap::read_field(this, kTransitionsOrBackPointerOffset); |
| if (object->IsDescriptorArray()) { |
| return TransitionArray::cast(object)->back_pointer_storage(); |
| } else { |
| @@ -4845,7 +4737,7 @@ bool Map::HasElementsTransition() { |
| bool Map::HasTransitionArray() { |
| - Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); |
| + Object* object = Heap::read_field(this, kTransitionsOrBackPointerOffset); |
| return object->IsTransitionArray(); |
| } |
| @@ -4906,7 +4798,7 @@ bool Map::HasPrototypeTransitions() { |
| TransitionArray* Map::transitions() { |
| ASSERT(HasTransitionArray()); |
| - Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); |
| + Object* object = Heap::read_field(this, kTransitionsOrBackPointerOffset); |
| return TransitionArray::cast(object); |
| } |
| @@ -4933,15 +4825,19 @@ void Map::set_transitions(TransitionArray* transition_array, |
| ZapTransitions(); |
| } |
| - WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array); |
| - CONDITIONAL_WRITE_BARRIER( |
| - GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode); |
| + Heap::write_field(this, |
| + kTransitionsOrBackPointerOffset, |
| + transition_array, |
| + mode); |
| } |
| void Map::init_back_pointer(Object* undefined) { |
| ASSERT(undefined->IsUndefined()); |
| - WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined); |
| + Heap::write_field(this, |
| + kTransitionsOrBackPointerOffset, |
| + undefined, |
| + SKIP_WRITE_BARRIER); |
| } |
| @@ -4949,13 +4845,11 @@ void Map::SetBackPointer(Object* value, WriteBarrierMode mode) { |
| ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE); |
| ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) || |
| (value->IsMap() && GetBackPointer()->IsUndefined())); |
| - Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); |
| + Object* object = Heap::read_field(this, kTransitionsOrBackPointerOffset); |
| if (object->IsTransitionArray()) { |
| TransitionArray::cast(object)->set_back_pointer_storage(value); |
| } else { |
| - WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value); |
| - CONDITIONAL_WRITE_BARRIER( |
| - GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode); |
| + Heap::write_field(this, kTransitionsOrBackPointerOffset, value, mode); |
| } |
| } |
| @@ -5168,7 +5062,7 @@ SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset) |
| #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \ |
| STATIC_ASSERT(holder::offset % kPointerSize == 0); \ |
| int holder::name() { \ |
| - int value = READ_INT_FIELD(this, offset); \ |
| + int value = Heap::read_int_field(this, offset); \ |
| ASSERT(kHeapObjectTag == 1); \ |
| ASSERT((value & kHeapObjectTag) == 0); \ |
| return value >> 1; \ |
| @@ -5177,7 +5071,7 @@ SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset) |
| ASSERT(kHeapObjectTag == 1); \ |
| ASSERT((value & 0xC0000000) == 0xC0000000 || \ |
| (value & 0xC0000000) == 0x000000000); \ |
| - WRITE_INT_FIELD(this, \ |
| + Heap::write_int_field(this, \ |
| offset, \ |
| (value << 1) & ~kHeapObjectTag); \ |
| } |
| @@ -5225,13 +5119,16 @@ PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, |
| int SharedFunctionInfo::construction_count() { |
| - return READ_BYTE_FIELD(this, kConstructionCountOffset); |
| + return Heap::read_byte_field(this, kConstructionCountOffset); |
| } |
| void SharedFunctionInfo::set_construction_count(int value) { |
| ASSERT(0 <= value && value < 256); |
| - WRITE_BYTE_FIELD(this, kConstructionCountOffset, static_cast<byte>(value)); |
| + Heap::write_byte_field( |
| + this, |
| + kConstructionCountOffset, |
| + static_cast<byte>(value)); |
| } |
| @@ -5337,14 +5234,13 @@ void SharedFunctionInfo::set_start_position(int start_position) { |
| Code* SharedFunctionInfo::code() { |
| - return Code::cast(READ_FIELD(this, kCodeOffset)); |
| + return Code::cast(Heap::read_field(this, kCodeOffset)); |
| } |
| void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) { |
| ASSERT(value->kind() != Code::OPTIMIZED_FUNCTION); |
| - WRITE_FIELD(this, kCodeOffset, value); |
| - CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode); |
| + Heap::write_field(this, kCodeOffset, value, mode); |
| } |
| @@ -5362,18 +5258,16 @@ void SharedFunctionInfo::ReplaceCode(Code* value) { |
| ScopeInfo* SharedFunctionInfo::scope_info() { |
| - return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset)); |
| + return reinterpret_cast<ScopeInfo*>(Heap::read_field(this, kScopeInfoOffset)); |
| } |
| void SharedFunctionInfo::set_scope_info(ScopeInfo* value, |
| WriteBarrierMode mode) { |
| - WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value)); |
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), |
| - this, |
| - kScopeInfoOffset, |
| - reinterpret_cast<Object*>(value), |
| - mode); |
| + Heap::write_field(this, |
| + kScopeInfoOffset, |
| + reinterpret_cast<Object*>(value), |
| + mode); |
| } |
| @@ -5521,15 +5415,18 @@ bool JSFunction::IsInOptimizationQueue() { |
| Code* JSFunction::code() { |
| - return Code::cast( |
| - Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset))); |
| + return Code::cast(Code::GetObjectFromEntryAddress( |
| + Heap::get_field_address(this, kCodeEntryOffset))); |
| } |
| void JSFunction::set_code(Code* value) { |
| ASSERT(!GetHeap()->InNewSpace(value)); |
| Address entry = value->entry(); |
| - WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry)); |
| + Heap::write_intptr_field( |
| + this, |
| + kCodeEntryOffset, |
| + reinterpret_cast<intptr_t>(entry)); |
| GetHeap()->incremental_marking()->RecordWriteOfCodeEntry( |
| this, |
| HeapObject::RawField(this, kCodeEntryOffset), |
| @@ -5540,7 +5437,10 @@ void JSFunction::set_code(Code* value) { |
| void JSFunction::set_code_no_write_barrier(Code* value) { |
| ASSERT(!GetHeap()->InNewSpace(value)); |
| Address entry = value->entry(); |
| - WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry)); |
| + Heap::write_intptr_field( |
| + this, |
| + kCodeEntryOffset, |
| + reinterpret_cast<intptr_t>(entry)); |
| } |
| @@ -5568,14 +5468,13 @@ void JSFunction::ReplaceCode(Code* code) { |
| Context* JSFunction::context() { |
| - return Context::cast(READ_FIELD(this, kContextOffset)); |
| + return Context::cast(Heap::read_field(this, kContextOffset)); |
| } |
| void JSFunction::set_context(Object* value) { |
| ASSERT(value->IsUndefined() || value->IsContext()); |
| - WRITE_FIELD(this, kContextOffset, value); |
| - WRITE_BARRIER(GetHeap(), this, kContextOffset, value); |
| + Heap::write_field(this, kContextOffset, value, UPDATE_WRITE_BARRIER); |
| } |
| ACCESSORS(JSFunction, prototype_or_initial_map, Object, |
| @@ -5672,28 +5571,30 @@ int JSFunction::NumberOfLiterals() { |
| Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) { |
| ASSERT(id < kJSBuiltinsCount); // id is unsigned. |
| - return READ_FIELD(this, OffsetOfFunctionWithId(id)); |
| + return Heap::read_field(this, OffsetOfFunctionWithId(id)); |
| } |
| void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id, |
| Object* value) { |
| ASSERT(id < kJSBuiltinsCount); // id is unsigned. |
| - WRITE_FIELD(this, OffsetOfFunctionWithId(id), value); |
| - WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value); |
| + Heap::write_field(this, |
| + OffsetOfFunctionWithId(id), |
| + value, |
| + UPDATE_WRITE_BARRIER); |
| } |
| Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) { |
| ASSERT(id < kJSBuiltinsCount); // id is unsigned. |
| - return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id))); |
| + return Code::cast(Heap::read_field(this, OffsetOfCodeWithId(id))); |
| } |
| void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id, |
| Code* value) { |
| ASSERT(id < kJSBuiltinsCount); // id is unsigned. |
| - WRITE_FIELD(this, OffsetOfCodeWithId(id), value); |
| + Heap::write_field(this, OffsetOfCodeWithId(id), value, UPDATE_WRITE_BARRIER); |
| ASSERT(!GetHeap()->InNewSpace(value)); |
| } |
| @@ -5707,7 +5608,7 @@ ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset) |
| void JSProxy::InitializeBody(int object_size, Object* value) { |
| ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value)); |
| for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) { |
| - WRITE_FIELD(this, offset, value); |
| + Heap::write_field(this, offset, value, SKIP_WRITE_BARRIER); |
| } |
| } |
| @@ -5719,13 +5620,12 @@ ACCESSORS(JSMap, table, Object, kTableOffset) |
| #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \ |
| template<class Derived, class TableType> \ |
| type* OrderedHashTableIterator<Derived, TableType>::name() { \ |
| - return type::cast(READ_FIELD(this, offset)); \ |
| + return type::cast(Heap::read_field(this, offset)); \ |
| } \ |
| template<class Derived, class TableType> \ |
| void OrderedHashTableIterator<Derived, TableType>::set_##name( \ |
| type* value, WriteBarrierMode mode) { \ |
| - WRITE_FIELD(this, offset, value); \ |
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \ |
| + Heap::write_field(this, offset, value, mode); \ |
| } |
| ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset) |
| @@ -5745,12 +5645,13 @@ ACCESSORS(JSWeakCollection, next, Object, kNextOffset) |
| Address Foreign::foreign_address() { |
| - return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset)); |
| + return AddressFrom<Address>( |
| + Heap::read_intptr_field(this, kForeignAddressOffset)); |
| } |
| void Foreign::set_foreign_address(Address value) { |
| - WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value)); |
| + Heap::write_intptr_field(this, kForeignAddressOffset, OffsetFrom(value)); |
| } |
| @@ -5833,13 +5734,13 @@ ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset) |
| void Code::WipeOutHeader() { |
| - WRITE_FIELD(this, kRelocationInfoOffset, NULL); |
| - WRITE_FIELD(this, kHandlerTableOffset, NULL); |
| - WRITE_FIELD(this, kDeoptimizationDataOffset, NULL); |
| - WRITE_FIELD(this, kConstantPoolOffset, NULL); |
| + Heap::write_field(this, kRelocationInfoOffset, NULL, SKIP_WRITE_BARRIER); |
| + Heap::write_field(this, kHandlerTableOffset, NULL, SKIP_WRITE_BARRIER); |
| + Heap::write_field(this, kDeoptimizationDataOffset, NULL, SKIP_WRITE_BARRIER); |
| + Heap::write_field(this, kConstantPoolOffset, NULL, SKIP_WRITE_BARRIER); |
| // Do not wipe out e.g. a minor key. |
| - if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) { |
| - WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL); |
| + if (!Heap::read_field(this, kTypeFeedbackInfoOffset)->IsSmi()) { |
| + Heap::write_field(this, kTypeFeedbackInfoOffset, NULL, SKIP_WRITE_BARRIER); |
| } |
| } |
| @@ -5853,8 +5754,6 @@ Object* Code::type_feedback_info() { |
| void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) { |
| ASSERT(kind() == FUNCTION); |
| set_raw_type_feedback_info(value, mode); |
| - CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset, |
| - value, mode); |
| } |
| @@ -5883,7 +5782,7 @@ INT_ACCESSORS(Code, ic_age, kICAgeOffset) |
| byte* Code::instruction_start() { |
| - return FIELD_ADDR(this, kHeaderSize); |
| + return Heap::get_field_address(this, kHeaderSize); |
| } |
| @@ -5898,7 +5797,8 @@ int Code::body_size() { |
| ByteArray* Code::unchecked_relocation_info() { |
| - return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset)); |
| + return reinterpret_cast<ByteArray*>( |
| + Heap::read_field(this, kRelocationInfoOffset)); |
| } |
| @@ -5926,14 +5826,14 @@ ACCESSORS(JSArray, length, Object, kLengthOffset) |
| void* JSArrayBuffer::backing_store() { |
| - intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset); |
| + intptr_t ptr = Heap::read_intptr_field(this, kBackingStoreOffset); |
| return reinterpret_cast<void*>(ptr); |
| } |
| void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) { |
| intptr_t ptr = reinterpret_cast<intptr_t>(value); |
| - WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr); |
| + Heap::write_intptr_field(this, kBackingStoreOffset, ptr); |
| } |
| @@ -6028,7 +5928,8 @@ ElementsKind JSObject::GetElementsKind() { |
| ElementsKind kind = map()->elements_kind(); |
| #if DEBUG |
| FixedArrayBase* fixed_array = |
| - reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset)); |
| + reinterpret_cast<FixedArrayBase*>(Heap::read_field(this, |
| + kElementsOffset)); |
| // If a GC was caused while constructing this object, the elements |
| // pointer may point to a one pointer filler map. |
| @@ -6639,7 +6540,10 @@ void Map::ClearCodeCache(Heap* heap) { |
| // - MarkCompactCollector::MarkUnmarkedObject |
| // - IncrementalMarking::Step |
| ASSERT(!heap->InNewSpace(heap->empty_fixed_array())); |
| - WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array()); |
| + Heap::write_field(this, |
| + kCodeCacheOffset, |
| + heap->empty_fixed_array(), |
| + SKIP_WRITE_BARRIER); |
| } |
| @@ -6712,27 +6616,30 @@ Object* TypeFeedbackInfo::RawUninitializedSentinel(Heap* heap) { |
| int TypeFeedbackInfo::ic_total_count() { |
| - int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value(); |
| + int current = Smi::cast(Heap::read_field(this, kStorage1Offset))->value(); |
| return ICTotalCountField::decode(current); |
| } |
| void TypeFeedbackInfo::set_ic_total_count(int count) { |
| - int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value(); |
| + int value = Smi::cast(Heap::read_field(this, kStorage1Offset))->value(); |
| value = ICTotalCountField::update(value, |
| ICTotalCountField::decode(count)); |
| - WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value)); |
| + Heap::write_field(this, |
| + kStorage1Offset, |
| + Smi::FromInt(value), |
| + SKIP_WRITE_BARRIER); |
| } |
| int TypeFeedbackInfo::ic_with_type_info_count() { |
| - int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value(); |
| + int current = Smi::cast(Heap::read_field(this, kStorage2Offset))->value(); |
| return ICsWithTypeInfoCountField::decode(current); |
| } |
| void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) { |
| - int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value(); |
| + int value = Smi::cast(Heap::read_field(this, kStorage2Offset))->value(); |
| int new_count = ICsWithTypeInfoCountField::decode(value) + delta; |
| // We can get negative count here when the type-feedback info is |
| // shared between two code objects. The can only happen when |
| @@ -6742,48 +6649,57 @@ void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) { |
| if (new_count >= 0) { |
| new_count &= ICsWithTypeInfoCountField::kMask; |
| value = ICsWithTypeInfoCountField::update(value, new_count); |
| - WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value)); |
| + Heap::write_field(this, |
| + kStorage2Offset, |
| + Smi::FromInt(value), |
| + SKIP_WRITE_BARRIER); |
| } |
| } |
| void TypeFeedbackInfo::initialize_storage() { |
| - WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0)); |
| - WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0)); |
| + Heap::write_field(this, kStorage1Offset, Smi::FromInt(0), SKIP_WRITE_BARRIER); |
| + Heap::write_field(this, kStorage2Offset, Smi::FromInt(0), SKIP_WRITE_BARRIER); |
| } |
| void TypeFeedbackInfo::change_own_type_change_checksum() { |
| - int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value(); |
| + int value = Smi::cast(Heap::read_field(this, kStorage1Offset))->value(); |
| int checksum = OwnTypeChangeChecksum::decode(value); |
| checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits); |
| value = OwnTypeChangeChecksum::update(value, checksum); |
| // Ensure packed bit field is in Smi range. |
| if (value > Smi::kMaxValue) value |= Smi::kMinValue; |
| if (value < Smi::kMinValue) value &= ~Smi::kMinValue; |
| - WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value)); |
| + Heap::write_field(this, |
| + kStorage1Offset, |
| + Smi::FromInt(value), |
| + SKIP_WRITE_BARRIER); |
| } |
| void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) { |
| - int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value(); |
| + int value = Smi::cast(Heap::read_field(this, kStorage2Offset))->value(); |
| int mask = (1 << kTypeChangeChecksumBits) - 1; |
| value = InlinedTypeChangeChecksum::update(value, checksum & mask); |
| // Ensure packed bit field is in Smi range. |
| if (value > Smi::kMaxValue) value |= Smi::kMinValue; |
| if (value < Smi::kMinValue) value &= ~Smi::kMinValue; |
| - WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value)); |
| + Heap::write_field(this, |
| + kStorage2Offset, |
| + Smi::FromInt(value), |
| + SKIP_WRITE_BARRIER); |
| } |
| int TypeFeedbackInfo::own_type_change_checksum() { |
| - int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value(); |
| + int value = Smi::cast(Heap::read_field(this, kStorage1Offset))->value(); |
| return OwnTypeChangeChecksum::decode(value); |
| } |
| bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) { |
| - int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value(); |
| + int value = Smi::cast(Heap::read_field(this, kStorage2Offset))->value(); |
| int mask = (1 << kTypeChangeChecksumBits) - 1; |
| return InlinedTypeChangeChecksum::decode(value) == (checksum & mask); |
| } |
| @@ -6816,44 +6732,46 @@ int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) { |
| void Foreign::ForeignIterateBody(ObjectVisitor* v) { |
| v->VisitExternalReference( |
| - reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset))); |
| + reinterpret_cast<Address*>( |
| + Heap::get_field_address(this, kForeignAddressOffset))); |
| } |
| template<typename StaticVisitor> |
| void Foreign::ForeignIterateBody() { |
| StaticVisitor::VisitExternalReference( |
| - reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset))); |
| + reinterpret_cast<Address*>( |
| + Heap::get_field_address(this, kForeignAddressOffset))); |
| } |
| void ExternalAsciiString::ExternalAsciiStringIterateBody(ObjectVisitor* v) { |
| typedef v8::String::ExternalAsciiStringResource Resource; |
| - v->VisitExternalAsciiString( |
| - reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset))); |
| + v->VisitExternalAsciiString(reinterpret_cast<Resource**>( |
| + Heap::get_field_address(this, kResourceOffset))); |
| } |
| template<typename StaticVisitor> |
| void ExternalAsciiString::ExternalAsciiStringIterateBody() { |
| typedef v8::String::ExternalAsciiStringResource Resource; |
| - StaticVisitor::VisitExternalAsciiString( |
| - reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset))); |
| + StaticVisitor::VisitExternalAsciiString(reinterpret_cast<Resource**>( |
| + Heap::get_field_address(this, kResourceOffset))); |
| } |
| void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) { |
| typedef v8::String::ExternalStringResource Resource; |
| - v->VisitExternalTwoByteString( |
| - reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset))); |
| + v->VisitExternalTwoByteString(reinterpret_cast<Resource**>( |
| + Heap::get_field_address(this, kResourceOffset))); |
| } |
| template<typename StaticVisitor> |
| void ExternalTwoByteString::ExternalTwoByteStringIterateBody() { |
| typedef v8::String::ExternalStringResource Resource; |
| - StaticVisitor::VisitExternalTwoByteString( |
| - reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset))); |
| + StaticVisitor::VisitExternalTwoByteString(reinterpret_cast<Resource**>( |
| + Heap::get_field_address(this, kResourceOffset))); |
| } |
| @@ -6885,27 +6803,6 @@ void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj, |
| #undef NOBARRIER_SMI_ACCESSORS |
| #undef BOOL_GETTER |
| #undef BOOL_ACCESSORS |
| -#undef FIELD_ADDR |
| -#undef READ_FIELD |
| -#undef NOBARRIER_READ_FIELD |
| -#undef WRITE_FIELD |
| -#undef NOBARRIER_WRITE_FIELD |
| -#undef WRITE_BARRIER |
| -#undef CONDITIONAL_WRITE_BARRIER |
| -#undef READ_DOUBLE_FIELD |
| -#undef WRITE_DOUBLE_FIELD |
| -#undef READ_INT_FIELD |
| -#undef WRITE_INT_FIELD |
| -#undef READ_INTPTR_FIELD |
| -#undef WRITE_INTPTR_FIELD |
| -#undef READ_UINT32_FIELD |
| -#undef WRITE_UINT32_FIELD |
| -#undef READ_SHORT_FIELD |
| -#undef WRITE_SHORT_FIELD |
| -#undef READ_BYTE_FIELD |
| -#undef WRITE_BYTE_FIELD |
| -#undef NOBARRIER_READ_BYTE_FIELD |
| -#undef NOBARRIER_WRITE_BYTE_FIELD |
| } } // namespace v8::internal |