OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 // | 4 // |
5 // Review notes: | 5 // Review notes: |
6 // | 6 // |
7 // - The use of macros in these inline functions may seem superfluous | 7 // - The use of macros in these inline functions may seem superfluous |
8 // but it is absolutely needed to make sure gcc generates optimal | 8 // but it is absolutely needed to make sure gcc generates optimal |
9 // code. gcc is not happy when attempting to inline too deep. | 9 // code. gcc is not happy when attempting to inline too deep. |
10 // | 10 // |
(...skipping 1512 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1523 } else { | 1523 } else { |
1524 if (FLAG_unbox_double_fields) { | 1524 if (FLAG_unbox_double_fields) { |
1525 LayoutDescriptorHelper helper(map()); | 1525 LayoutDescriptorHelper helper(map()); |
1526 if (!helper.all_fields_tagged()) return HeapObjectContents::kMixedValues; | 1526 if (!helper.all_fields_tagged()) return HeapObjectContents::kMixedValues; |
1527 } | 1527 } |
1528 return HeapObjectContents::kTaggedValues; | 1528 return HeapObjectContents::kTaggedValues; |
1529 } | 1529 } |
1530 } | 1530 } |
1531 | 1531 |
1532 | 1532 |
1533 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) { | |
1534 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)), | |
1535 reinterpret_cast<Object**>(FIELD_ADDR(this, end))); | |
1536 } | |
1537 | |
1538 | |
1539 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) { | |
1540 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset))); | |
1541 } | |
1542 | |
1543 | |
1544 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) { | |
1545 v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset))); | |
1546 } | |
1547 | |
1548 | |
1549 double HeapNumber::value() const { | 1533 double HeapNumber::value() const { |
1550 return READ_DOUBLE_FIELD(this, kValueOffset); | 1534 return READ_DOUBLE_FIELD(this, kValueOffset); |
1551 } | 1535 } |
1552 | 1536 |
1553 | 1537 |
1554 void HeapNumber::set_value(double value) { | 1538 void HeapNumber::set_value(double value) { |
1555 WRITE_DOUBLE_FIELD(this, kValueOffset, value); | 1539 WRITE_DOUBLE_FIELD(this, kValueOffset, value); |
1556 } | 1540 } |
1557 | 1541 |
1558 | 1542 |
(...skipping 2607 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4166 | 4150 |
4167 | 4151 |
4168 int ByteArray::ByteArraySize() { return SizeFor(this->length()); } | 4152 int ByteArray::ByteArraySize() { return SizeFor(this->length()); } |
4169 | 4153 |
4170 | 4154 |
4171 Address ByteArray::GetDataStartAddress() { | 4155 Address ByteArray::GetDataStartAddress() { |
4172 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize; | 4156 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize; |
4173 } | 4157 } |
4174 | 4158 |
4175 | 4159 |
4176 void BytecodeArray::BytecodeArrayIterateBody(ObjectVisitor* v) { | |
4177 IteratePointer(v, kConstantPoolOffset); | |
4178 } | |
4179 | |
4180 | |
4181 byte BytecodeArray::get(int index) { | 4160 byte BytecodeArray::get(int index) { |
4182 DCHECK(index >= 0 && index < this->length()); | 4161 DCHECK(index >= 0 && index < this->length()); |
4183 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize); | 4162 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize); |
4184 } | 4163 } |
4185 | 4164 |
4186 | 4165 |
4187 void BytecodeArray::set(int index, byte value) { | 4166 void BytecodeArray::set(int index, byte value) { |
4188 DCHECK(index >= 0 && index < this->length()); | 4167 DCHECK(index >= 0 && index < this->length()); |
4189 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value); | 4168 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value); |
4190 } | 4169 } |
(...skipping 340 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4531 layout_descriptor); | 4510 layout_descriptor); |
4532 } | 4511 } |
4533 | 4512 |
4534 | 4513 |
4535 int HeapObject::SizeFromMap(Map* map) { | 4514 int HeapObject::SizeFromMap(Map* map) { |
4536 int instance_size = map->instance_size(); | 4515 int instance_size = map->instance_size(); |
4537 if (instance_size != kVariableSizeSentinel) return instance_size; | 4516 if (instance_size != kVariableSizeSentinel) return instance_size; |
4538 // Only inline the most frequent cases. | 4517 // Only inline the most frequent cases. |
4539 InstanceType instance_type = map->instance_type(); | 4518 InstanceType instance_type = map->instance_type(); |
4540 if (instance_type == FIXED_ARRAY_TYPE) { | 4519 if (instance_type == FIXED_ARRAY_TYPE) { |
4541 return FixedArray::BodyDescriptor::SizeOf(map, this); | 4520 return FixedArray::SizeFor( |
| 4521 reinterpret_cast<FixedArray*>(this)->synchronized_length()); |
4542 } | 4522 } |
4543 if (instance_type == ONE_BYTE_STRING_TYPE || | 4523 if (instance_type == ONE_BYTE_STRING_TYPE || |
4544 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) { | 4524 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) { |
4545 // Strings may get concurrently truncated, hence we have to access its | 4525 // Strings may get concurrently truncated, hence we have to access its |
4546 // length synchronized. | 4526 // length synchronized. |
4547 return SeqOneByteString::SizeFor( | 4527 return SeqOneByteString::SizeFor( |
4548 reinterpret_cast<SeqOneByteString*>(this)->synchronized_length()); | 4528 reinterpret_cast<SeqOneByteString*>(this)->synchronized_length()); |
4549 } | 4529 } |
4550 if (instance_type == BYTE_ARRAY_TYPE) { | 4530 if (instance_type == BYTE_ARRAY_TYPE) { |
4551 return reinterpret_cast<ByteArray*>(this)->ByteArraySize(); | 4531 return reinterpret_cast<ByteArray*>(this)->ByteArraySize(); |
(...skipping 2125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6677 | 6657 |
6678 | 6658 |
6679 bool JSArrayBuffer::is_shared() { return IsShared::decode(bit_field()); } | 6659 bool JSArrayBuffer::is_shared() { return IsShared::decode(bit_field()); } |
6680 | 6660 |
6681 | 6661 |
6682 void JSArrayBuffer::set_is_shared(bool value) { | 6662 void JSArrayBuffer::set_is_shared(bool value) { |
6683 set_bit_field(IsShared::update(bit_field(), value)); | 6663 set_bit_field(IsShared::update(bit_field(), value)); |
6684 } | 6664 } |
6685 | 6665 |
6686 | 6666 |
6687 // static | |
6688 template <typename StaticVisitor> | |
6689 void JSArrayBuffer::JSArrayBufferIterateBody(Heap* heap, HeapObject* obj) { | |
6690 StaticVisitor::VisitPointers( | |
6691 heap, obj, | |
6692 HeapObject::RawField(obj, JSArrayBuffer::BodyDescriptor::kStartOffset), | |
6693 HeapObject::RawField(obj, | |
6694 JSArrayBuffer::kByteLengthOffset + kPointerSize)); | |
6695 StaticVisitor::VisitPointers( | |
6696 heap, obj, HeapObject::RawField(obj, JSArrayBuffer::kSize), | |
6697 HeapObject::RawField(obj, JSArrayBuffer::kSizeWithInternalFields)); | |
6698 } | |
6699 | |
6700 | |
6701 void JSArrayBuffer::JSArrayBufferIterateBody(HeapObject* obj, | |
6702 ObjectVisitor* v) { | |
6703 v->VisitPointers( | |
6704 HeapObject::RawField(obj, JSArrayBuffer::BodyDescriptor::kStartOffset), | |
6705 HeapObject::RawField(obj, | |
6706 JSArrayBuffer::kByteLengthOffset + kPointerSize)); | |
6707 v->VisitPointers( | |
6708 HeapObject::RawField(obj, JSArrayBuffer::kSize), | |
6709 HeapObject::RawField(obj, JSArrayBuffer::kSizeWithInternalFields)); | |
6710 } | |
6711 | |
6712 | |
6713 Object* JSArrayBufferView::byte_offset() const { | 6667 Object* JSArrayBufferView::byte_offset() const { |
6714 if (WasNeutered()) return Smi::FromInt(0); | 6668 if (WasNeutered()) return Smi::FromInt(0); |
6715 return Object::cast(READ_FIELD(this, kByteOffsetOffset)); | 6669 return Object::cast(READ_FIELD(this, kByteOffsetOffset)); |
6716 } | 6670 } |
6717 | 6671 |
6718 | 6672 |
6719 void JSArrayBufferView::set_byte_offset(Object* value, WriteBarrierMode mode) { | 6673 void JSArrayBufferView::set_byte_offset(Object* value, WriteBarrierMode mode) { |
6720 WRITE_FIELD(this, kByteOffsetOffset, value); | 6674 WRITE_FIELD(this, kByteOffsetOffset, value); |
6721 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteOffsetOffset, value, mode); | 6675 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteOffsetOffset, value, mode); |
6722 } | 6676 } |
(...skipping 1112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7835 isolate->set_relocatable_top(this); | 7789 isolate->set_relocatable_top(this); |
7836 } | 7790 } |
7837 | 7791 |
7838 | 7792 |
7839 Relocatable::~Relocatable() { | 7793 Relocatable::~Relocatable() { |
7840 DCHECK_EQ(isolate_->relocatable_top(), this); | 7794 DCHECK_EQ(isolate_->relocatable_top(), this); |
7841 isolate_->set_relocatable_top(prev_); | 7795 isolate_->set_relocatable_top(prev_); |
7842 } | 7796 } |
7843 | 7797 |
7844 | 7798 |
7845 // static | |
7846 template <int start_offset> | |
7847 int FlexibleBodyDescriptor<start_offset>::SizeOf(Map* map, HeapObject* object) { | |
7848 return map->instance_size(); | |
7849 } | |
7850 | |
7851 | |
7852 // static | |
7853 int FixedArray::BodyDescriptor::SizeOf(Map* map, HeapObject* object) { | |
7854 return SizeFor(reinterpret_cast<FixedArray*>(object)->synchronized_length()); | |
7855 } | |
7856 | |
7857 | |
7858 void Foreign::ForeignIterateBody(ObjectVisitor* v) { | |
7859 v->VisitExternalReference( | |
7860 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset))); | |
7861 } | |
7862 | |
7863 | |
7864 template<typename StaticVisitor> | |
7865 void Foreign::ForeignIterateBody() { | |
7866 StaticVisitor::VisitExternalReference( | |
7867 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset))); | |
7868 } | |
7869 | |
7870 | |
7871 void FixedTypedArrayBase::FixedTypedArrayBaseIterateBody(ObjectVisitor* v) { | |
7872 v->VisitPointer( | |
7873 reinterpret_cast<Object**>(FIELD_ADDR(this, kBasePointerOffset))); | |
7874 } | |
7875 | |
7876 | |
7877 template <typename StaticVisitor> | |
7878 void FixedTypedArrayBase::FixedTypedArrayBaseIterateBody() { | |
7879 StaticVisitor::VisitPointer( | |
7880 reinterpret_cast<Object**>(FIELD_ADDR(this, kBasePointerOffset))); | |
7881 } | |
7882 | |
7883 | |
7884 void ExternalOneByteString::ExternalOneByteStringIterateBody(ObjectVisitor* v) { | |
7885 typedef v8::String::ExternalOneByteStringResource Resource; | |
7886 v->VisitExternalOneByteString( | |
7887 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset))); | |
7888 } | |
7889 | |
7890 | |
7891 template <typename StaticVisitor> | |
7892 void ExternalOneByteString::ExternalOneByteStringIterateBody() { | |
7893 typedef v8::String::ExternalOneByteStringResource Resource; | |
7894 StaticVisitor::VisitExternalOneByteString( | |
7895 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset))); | |
7896 } | |
7897 | |
7898 | |
7899 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) { | |
7900 typedef v8::String::ExternalStringResource Resource; | |
7901 v->VisitExternalTwoByteString( | |
7902 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset))); | |
7903 } | |
7904 | |
7905 | |
7906 template<typename StaticVisitor> | |
7907 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() { | |
7908 typedef v8::String::ExternalStringResource Resource; | |
7909 StaticVisitor::VisitExternalTwoByteString( | |
7910 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset))); | |
7911 } | |
7912 | |
7913 | |
7914 void BodyDescriptorBase::IterateBodyImpl(HeapObject* obj, int start_offset, | |
7915 int end_offset, ObjectVisitor* v) { | |
7916 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) { | |
7917 IteratePointers(obj, start_offset, end_offset, v); | |
7918 } else { | |
7919 DCHECK(FLAG_unbox_double_fields); | |
7920 DCHECK(IsAligned(start_offset, kPointerSize) && | |
7921 IsAligned(end_offset, kPointerSize)); | |
7922 | |
7923 LayoutDescriptorHelper helper(obj->map()); | |
7924 DCHECK(!helper.all_fields_tagged()); | |
7925 for (int offset = start_offset; offset < end_offset;) { | |
7926 int end_of_region_offset; | |
7927 if (helper.IsTagged(offset, end_offset, &end_of_region_offset)) { | |
7928 IteratePointers(obj, offset, end_of_region_offset, v); | |
7929 } | |
7930 offset = end_of_region_offset; | |
7931 } | |
7932 } | |
7933 } | |
7934 | |
7935 | |
7936 template <typename StaticVisitor> | |
7937 void BodyDescriptorBase::IterateBodyImpl(Heap* heap, HeapObject* obj, | |
7938 int start_offset, int end_offset) { | |
7939 if (!FLAG_unbox_double_fields || obj->map()->HasFastPointerLayout()) { | |
7940 IteratePointers<StaticVisitor>(heap, obj, start_offset, end_offset); | |
7941 } else { | |
7942 DCHECK(FLAG_unbox_double_fields); | |
7943 DCHECK(IsAligned(start_offset, kPointerSize) && | |
7944 IsAligned(end_offset, kPointerSize)); | |
7945 | |
7946 LayoutDescriptorHelper helper(obj->map()); | |
7947 DCHECK(!helper.all_fields_tagged()); | |
7948 for (int offset = start_offset; offset < end_offset;) { | |
7949 int end_of_region_offset; | |
7950 if (helper.IsTagged(offset, end_offset, &end_of_region_offset)) { | |
7951 IteratePointers<StaticVisitor>(heap, obj, offset, end_of_region_offset); | |
7952 } | |
7953 offset = end_of_region_offset; | |
7954 } | |
7955 } | |
7956 } | |
7957 | |
7958 | |
7959 void BodyDescriptorBase::IteratePointers(HeapObject* obj, int start_offset, | |
7960 int end_offset, ObjectVisitor* v) { | |
7961 v->VisitPointers(HeapObject::RawField(obj, start_offset), | |
7962 HeapObject::RawField(obj, end_offset)); | |
7963 } | |
7964 | |
7965 | |
7966 template <typename StaticVisitor> | |
7967 void BodyDescriptorBase::IteratePointers(Heap* heap, HeapObject* obj, | |
7968 int start_offset, int end_offset) { | |
7969 StaticVisitor::VisitPointers(heap, obj, | |
7970 HeapObject::RawField(obj, start_offset), | |
7971 HeapObject::RawField(obj, end_offset)); | |
7972 } | |
7973 | |
7974 | |
7975 // Iterates the function object according to the visiting policy. | |
7976 template <JSFunction::BodyVisitingPolicy body_visiting_policy> | |
7977 class JSFunction::BodyDescriptorImpl : public BodyDescriptorBase { | |
7978 public: | |
7979 STATIC_ASSERT(kNonWeakFieldsEndOffset == kCodeEntryOffset); | |
7980 STATIC_ASSERT(kCodeEntryOffset + kPointerSize == kNextFunctionLinkOffset); | |
7981 STATIC_ASSERT(kNextFunctionLinkOffset + kPointerSize == kSize); | |
7982 | |
7983 static inline void IterateBody(HeapObject* obj, int object_size, | |
7984 ObjectVisitor* v) { | |
7985 IteratePointers(obj, kPropertiesOffset, kNonWeakFieldsEndOffset, v); | |
7986 | |
7987 if (body_visiting_policy & kVisitCodeEntry) { | |
7988 v->VisitCodeEntry(obj->address() + kCodeEntryOffset); | |
7989 } | |
7990 | |
7991 if (body_visiting_policy & kVisitNextFunction) { | |
7992 IteratePointers(obj, kNextFunctionLinkOffset, kSize, v); | |
7993 } | |
7994 | |
7995 // TODO(ishell): v8:4531, fix when JFunctions are allowed to have in-object | |
7996 // properties | |
7997 // IterateBodyImpl(obj, kSize, object_size, v); | |
7998 } | |
7999 | |
8000 template <typename StaticVisitor> | |
8001 static inline void IterateBody(HeapObject* obj, int object_size) { | |
8002 Heap* heap = obj->GetHeap(); | |
8003 IteratePointers<StaticVisitor>(heap, obj, kPropertiesOffset, | |
8004 kNonWeakFieldsEndOffset); | |
8005 | |
8006 if (body_visiting_policy & kVisitCodeEntry) { | |
8007 StaticVisitor::VisitCodeEntry(heap, obj, | |
8008 obj->address() + kCodeEntryOffset); | |
8009 } | |
8010 | |
8011 if (body_visiting_policy & kVisitNextFunction) { | |
8012 IteratePointers<StaticVisitor>(heap, obj, kNextFunctionLinkOffset, kSize); | |
8013 } | |
8014 | |
8015 // TODO(ishell): v8:4531, fix when JFunctions are allowed to have in-object | |
8016 // properties | |
8017 // IterateBodyImpl<StaticVisitor>(heap, obj, kSize, object_size); | |
8018 } | |
8019 | |
8020 static inline int SizeOf(Map* map, HeapObject* object) { | |
8021 // TODO(ishell): v8:4531, fix when JFunctions are allowed to have in-object | |
8022 // properties | |
8023 return JSFunction::kSize; | |
8024 } | |
8025 }; | |
8026 | |
8027 | |
8028 template<class Derived, class TableType> | 7799 template<class Derived, class TableType> |
8029 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() { | 7800 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() { |
8030 TableType* table(TableType::cast(this->table())); | 7801 TableType* table(TableType::cast(this->table())); |
8031 int index = Smi::cast(this->index())->value(); | 7802 int index = Smi::cast(this->index())->value(); |
8032 Object* key = table->KeyAt(index); | 7803 Object* key = table->KeyAt(index); |
8033 DCHECK(!key->IsTheHole()); | 7804 DCHECK(!key->IsTheHole()); |
8034 return key; | 7805 return key; |
8035 } | 7806 } |
8036 | 7807 |
8037 | 7808 |
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8151 #undef WRITE_INT64_FIELD | 7922 #undef WRITE_INT64_FIELD |
8152 #undef READ_BYTE_FIELD | 7923 #undef READ_BYTE_FIELD |
8153 #undef WRITE_BYTE_FIELD | 7924 #undef WRITE_BYTE_FIELD |
8154 #undef NOBARRIER_READ_BYTE_FIELD | 7925 #undef NOBARRIER_READ_BYTE_FIELD |
8155 #undef NOBARRIER_WRITE_BYTE_FIELD | 7926 #undef NOBARRIER_WRITE_BYTE_FIELD |
8156 | 7927 |
8157 } // namespace internal | 7928 } // namespace internal |
8158 } // namespace v8 | 7929 } // namespace v8 |
8159 | 7930 |
8160 #endif // V8_OBJECTS_INL_H_ | 7931 #endif // V8_OBJECTS_INL_H_ |
OLD | NEW |