Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(192)

Side by Side Diff: src/objects-inl.h

Issue 1286403002: Make object.h usable without object-inl.h header. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix typo. Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/objects.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 // 4 //
5 // Review notes: 5 // Review notes:
6 // 6 //
7 // - The use of macros in these inline functions may seem superfluous 7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal 8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep. 9 // code. gcc is not happy when attempting to inline too deep.
10 // 10 //
(...skipping 1089 matching lines...) Expand 10 before | Expand all | Expand 10 after
1100 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value()); 1100 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1101 } 1101 }
1102 1102
1103 1103
1104 bool Object::IsMinusZero() const { 1104 bool Object::IsMinusZero() const {
1105 return this->IsHeapNumber() && 1105 return this->IsHeapNumber() &&
1106 i::IsMinusZero(HeapNumber::cast(this)->value()); 1106 i::IsMinusZero(HeapNumber::cast(this)->value());
1107 } 1107 }
1108 1108
1109 1109
1110 Representation Object::OptimalRepresentation() {
1111 if (!FLAG_track_fields) return Representation::Tagged();
1112 if (IsSmi()) {
1113 return Representation::Smi();
1114 } else if (FLAG_track_double_fields && IsHeapNumber()) {
1115 return Representation::Double();
1116 } else if (FLAG_track_computed_fields && IsUninitialized()) {
1117 return Representation::None();
1118 } else if (FLAG_track_heap_object_fields) {
1119 DCHECK(IsHeapObject());
1120 return Representation::HeapObject();
1121 } else {
1122 return Representation::Tagged();
1123 }
1124 }
1125
1126
1127 ElementsKind Object::OptimalElementsKind() {
1128 if (IsSmi()) return FAST_SMI_ELEMENTS;
1129 if (IsNumber()) return FAST_DOUBLE_ELEMENTS;
1130 return FAST_ELEMENTS;
1131 }
1132
1133
1134 bool Object::FitsRepresentation(Representation representation) {
1135 if (FLAG_track_fields && representation.IsNone()) {
1136 return false;
1137 } else if (FLAG_track_fields && representation.IsSmi()) {
1138 return IsSmi();
1139 } else if (FLAG_track_double_fields && representation.IsDouble()) {
1140 return IsMutableHeapNumber() || IsNumber();
1141 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
1142 return IsHeapObject();
1143 }
1144 return true;
1145 }
1146
1147
1110 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate, 1148 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1111 Handle<Object> object) { 1149 Handle<Object> object) {
1112 return ToObject( 1150 return ToObject(
1113 isolate, object, handle(isolate->context()->native_context(), isolate)); 1151 isolate, object, handle(isolate->context()->native_context(), isolate));
1114 } 1152 }
1115 1153
1116 1154
1117 bool Object::HasSpecificClassOf(String* name) { 1155 bool Object::HasSpecificClassOf(String* name) {
1118 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name); 1156 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1119 } 1157 }
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after
1285 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \ 1323 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1286 base::NoBarrier_Store( \ 1324 base::NoBarrier_Store( \
1287 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \ 1325 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1288 static_cast<base::Atomic8>(value)); 1326 static_cast<base::Atomic8>(value));
1289 1327
1290 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) { 1328 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1291 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset)); 1329 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1292 } 1330 }
1293 1331
1294 1332
1295 int Smi::value() const {
1296 return Internals::SmiValue(this);
1297 }
1298
1299
1300 Smi* Smi::FromInt(int value) {
1301 DCHECK(Smi::IsValid(value));
1302 return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1303 }
1304
1305
1306 Smi* Smi::FromIntptr(intptr_t value) {
1307 DCHECK(Smi::IsValid(value));
1308 int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1309 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1310 }
1311
1312
1313 bool Smi::IsValid(intptr_t value) {
1314 bool result = Internals::IsValidSmi(value);
1315 DCHECK_EQ(result, value >= kMinValue && value <= kMaxValue);
1316 return result;
1317 }
1318
1319
1320 MapWord MapWord::FromMap(const Map* map) { 1333 MapWord MapWord::FromMap(const Map* map) {
1321 return MapWord(reinterpret_cast<uintptr_t>(map)); 1334 return MapWord(reinterpret_cast<uintptr_t>(map));
1322 } 1335 }
1323 1336
1324 1337
1325 Map* MapWord::ToMap() { 1338 Map* MapWord::ToMap() {
1326 return reinterpret_cast<Map*>(value_); 1339 return reinterpret_cast<Map*>(value_);
1327 } 1340 }
1328 1341
1329 1342
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
1433 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset))); 1446 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1434 } 1447 }
1435 1448
1436 1449
1437 void HeapObject::synchronized_set_map_word(MapWord map_word) { 1450 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1438 RELEASE_WRITE_FIELD( 1451 RELEASE_WRITE_FIELD(
1439 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_)); 1452 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1440 } 1453 }
1441 1454
1442 1455
1443 HeapObject* HeapObject::FromAddress(Address address) {
1444 DCHECK_TAG_ALIGNED(address);
1445 return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1446 }
1447
1448
1449 Address HeapObject::address() {
1450 return reinterpret_cast<Address>(this) - kHeapObjectTag;
1451 }
1452
1453
1454 int HeapObject::Size() { 1456 int HeapObject::Size() {
1455 return SizeFromMap(map()); 1457 return SizeFromMap(map());
1456 } 1458 }
1457 1459
1458 1460
1459 HeapObjectContents HeapObject::ContentType() { 1461 HeapObjectContents HeapObject::ContentType() {
1460 InstanceType type = map()->instance_type(); 1462 InstanceType type = map()->instance_type();
1461 if (type <= LAST_NAME_TYPE) { 1463 if (type <= LAST_NAME_TYPE) {
1462 if (type == SYMBOL_TYPE) { 1464 if (type == SYMBOL_TYPE) {
1463 return HeapObjectContents::kTaggedValues; 1465 return HeapObjectContents::kTaggedValues;
(...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after
1738 set_transition_info(Smi::FromInt(0)); 1740 set_transition_info(Smi::FromInt(0));
1739 SetElementsKind(GetInitialFastElementsKind()); 1741 SetElementsKind(GetInitialFastElementsKind());
1740 set_nested_site(Smi::FromInt(0)); 1742 set_nested_site(Smi::FromInt(0));
1741 set_pretenure_data(Smi::FromInt(0)); 1743 set_pretenure_data(Smi::FromInt(0));
1742 set_pretenure_create_count(Smi::FromInt(0)); 1744 set_pretenure_create_count(Smi::FromInt(0));
1743 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()), 1745 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1744 SKIP_WRITE_BARRIER); 1746 SKIP_WRITE_BARRIER);
1745 } 1747 }
1746 1748
1747 1749
1750 bool AllocationSite::IsZombie() { return pretenure_decision() == kZombie; }
1751
1752
1753 bool AllocationSite::IsMaybeTenure() {
1754 return pretenure_decision() == kMaybeTenure;
1755 }
1756
1757
1758 bool AllocationSite::PretenuringDecisionMade() {
1759 return pretenure_decision() != kUndecided;
1760 }
1761
1762
1748 void AllocationSite::MarkZombie() { 1763 void AllocationSite::MarkZombie() {
1749 DCHECK(!IsZombie()); 1764 DCHECK(!IsZombie());
1750 Initialize(); 1765 Initialize();
1751 set_pretenure_decision(kZombie); 1766 set_pretenure_decision(kZombie);
1752 } 1767 }
1753 1768
1754 1769
1770 ElementsKind AllocationSite::GetElementsKind() {
1771 DCHECK(!SitePointsToLiteral());
1772 int value = Smi::cast(transition_info())->value();
1773 return ElementsKindBits::decode(value);
1774 }
1775
1776
1777 void AllocationSite::SetElementsKind(ElementsKind kind) {
1778 int value = Smi::cast(transition_info())->value();
1779 set_transition_info(Smi::FromInt(ElementsKindBits::update(value, kind)),
1780 SKIP_WRITE_BARRIER);
1781 }
1782
1783
1784 bool AllocationSite::CanInlineCall() {
1785 int value = Smi::cast(transition_info())->value();
1786 return DoNotInlineBit::decode(value) == 0;
1787 }
1788
1789
1790 void AllocationSite::SetDoNotInlineCall() {
1791 int value = Smi::cast(transition_info())->value();
1792 set_transition_info(Smi::FromInt(DoNotInlineBit::update(value, true)),
1793 SKIP_WRITE_BARRIER);
1794 }
1795
1796
1797 bool AllocationSite::SitePointsToLiteral() {
1798 // If transition_info is a smi, then it represents an ElementsKind
1799 // for a constructed array. Otherwise, it must be a boilerplate
1800 // for an object or array literal.
1801 return transition_info()->IsJSArray() || transition_info()->IsJSObject();
1802 }
1803
1804
1755 // Heuristic: We only need to create allocation site info if the boilerplate 1805 // Heuristic: We only need to create allocation site info if the boilerplate
1756 // elements kind is the initial elements kind. 1806 // elements kind is the initial elements kind.
1757 AllocationSiteMode AllocationSite::GetMode( 1807 AllocationSiteMode AllocationSite::GetMode(
1758 ElementsKind boilerplate_elements_kind) { 1808 ElementsKind boilerplate_elements_kind) {
1759 if (FLAG_pretenuring_call_new || 1809 if (FLAG_pretenuring_call_new ||
1760 IsFastSmiElementsKind(boilerplate_elements_kind)) { 1810 IsFastSmiElementsKind(boilerplate_elements_kind)) {
1761 return TRACK_ALLOCATION_SITE; 1811 return TRACK_ALLOCATION_SITE;
1762 } 1812 }
1763 1813
1764 return DONT_TRACK_ALLOCATION_SITE; 1814 return DONT_TRACK_ALLOCATION_SITE;
(...skipping 15 matching lines...) Expand all
1780 inline bool AllocationSite::CanTrack(InstanceType type) { 1830 inline bool AllocationSite::CanTrack(InstanceType type) {
1781 if (FLAG_allocation_site_pretenuring) { 1831 if (FLAG_allocation_site_pretenuring) {
1782 return type == JS_ARRAY_TYPE || 1832 return type == JS_ARRAY_TYPE ||
1783 type == JS_OBJECT_TYPE || 1833 type == JS_OBJECT_TYPE ||
1784 type < FIRST_NONSTRING_TYPE; 1834 type < FIRST_NONSTRING_TYPE;
1785 } 1835 }
1786 return type == JS_ARRAY_TYPE; 1836 return type == JS_ARRAY_TYPE;
1787 } 1837 }
1788 1838
1789 1839
1840 AllocationSite::PretenureDecision AllocationSite::pretenure_decision() {
1841 int value = pretenure_data()->value();
1842 return PretenureDecisionBits::decode(value);
1843 }
1844
1845
1846 void AllocationSite::set_pretenure_decision(PretenureDecision decision) {
1847 int value = pretenure_data()->value();
1848 set_pretenure_data(
1849 Smi::FromInt(PretenureDecisionBits::update(value, decision)),
1850 SKIP_WRITE_BARRIER);
1851 }
1852
1853
1854 bool AllocationSite::deopt_dependent_code() {
1855 int value = pretenure_data()->value();
1856 return DeoptDependentCodeBit::decode(value);
1857 }
1858
1859
1860 void AllocationSite::set_deopt_dependent_code(bool deopt) {
1861 int value = pretenure_data()->value();
1862 set_pretenure_data(Smi::FromInt(DeoptDependentCodeBit::update(value, deopt)),
1863 SKIP_WRITE_BARRIER);
1864 }
1865
1866
1867 int AllocationSite::memento_found_count() {
1868 int value = pretenure_data()->value();
1869 return MementoFoundCountBits::decode(value);
1870 }
1871
1872
1790 inline void AllocationSite::set_memento_found_count(int count) { 1873 inline void AllocationSite::set_memento_found_count(int count) {
1791 int value = pretenure_data()->value(); 1874 int value = pretenure_data()->value();
1792 // Verify that we can count more mementos than we can possibly find in one 1875 // Verify that we can count more mementos than we can possibly find in one
1793 // new space collection. 1876 // new space collection.
1794 DCHECK((GetHeap()->MaxSemiSpaceSize() / 1877 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1795 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize + 1878 (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1796 AllocationMemento::kSize)) < MementoFoundCountBits::kMax); 1879 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1797 DCHECK(count < MementoFoundCountBits::kMax); 1880 DCHECK(count < MementoFoundCountBits::kMax);
1798 set_pretenure_data( 1881 set_pretenure_data(
1799 Smi::FromInt(MementoFoundCountBits::update(value, count)), 1882 Smi::FromInt(MementoFoundCountBits::update(value, count)),
1800 SKIP_WRITE_BARRIER); 1883 SKIP_WRITE_BARRIER);
1801 } 1884 }
1802 1885
1886
1887 int AllocationSite::memento_create_count() {
1888 return pretenure_create_count()->value();
1889 }
1890
1891
1892 void AllocationSite::set_memento_create_count(int count) {
1893 set_pretenure_create_count(Smi::FromInt(count), SKIP_WRITE_BARRIER);
1894 }
1895
1896
1803 inline bool AllocationSite::IncrementMementoFoundCount() { 1897 inline bool AllocationSite::IncrementMementoFoundCount() {
1804 if (IsZombie()) return false; 1898 if (IsZombie()) return false;
1805 1899
1806 int value = memento_found_count(); 1900 int value = memento_found_count();
1807 set_memento_found_count(value + 1); 1901 set_memento_found_count(value + 1);
1808 return memento_found_count() == kPretenureMinimumCreated; 1902 return memento_found_count() == kPretenureMinimumCreated;
1809 } 1903 }
1810 1904
1811 1905
1812 inline void AllocationSite::IncrementMementoCreateCount() { 1906 inline void AllocationSite::IncrementMementoCreateCount() {
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
1866 PretenureDecisionName(pretenure_decision())); 1960 PretenureDecisionName(pretenure_decision()));
1867 } 1961 }
1868 1962
1869 // Clear feedback calculation fields until the next gc. 1963 // Clear feedback calculation fields until the next gc.
1870 set_memento_found_count(0); 1964 set_memento_found_count(0);
1871 set_memento_create_count(0); 1965 set_memento_create_count(0);
1872 return deopt; 1966 return deopt;
1873 } 1967 }
1874 1968
1875 1969
1970 bool AllocationMemento::IsValid() {
1971 return allocation_site()->IsAllocationSite() &&
1972 !AllocationSite::cast(allocation_site())->IsZombie();
1973 }
1974
1975
1976 AllocationSite* AllocationMemento::GetAllocationSite() {
1977 DCHECK(IsValid());
1978 return AllocationSite::cast(allocation_site());
1979 }
1980
1981
1876 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) { 1982 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1877 JSObject::ValidateElements(object); 1983 JSObject::ValidateElements(object);
1878 ElementsKind elements_kind = object->map()->elements_kind(); 1984 ElementsKind elements_kind = object->map()->elements_kind();
1879 if (!IsFastObjectElementsKind(elements_kind)) { 1985 if (!IsFastObjectElementsKind(elements_kind)) {
1880 if (IsFastHoleyElementsKind(elements_kind)) { 1986 if (IsFastHoleyElementsKind(elements_kind)) {
1881 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS); 1987 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1882 } else { 1988 } else {
1883 TransitionElementsKind(object, FAST_ELEMENTS); 1989 TransitionElementsKind(object, FAST_ELEMENTS);
1884 } 1990 }
1885 } 1991 }
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
2008 void Oddball::set_kind(byte value) { 2114 void Oddball::set_kind(byte value) {
2009 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value)); 2115 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
2010 } 2116 }
2011 2117
2012 2118
2013 ACCESSORS(Cell, value, Object, kValueOffset) 2119 ACCESSORS(Cell, value, Object, kValueOffset)
2014 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset) 2120 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
2015 ACCESSORS(PropertyCell, property_details_raw, Object, kDetailsOffset) 2121 ACCESSORS(PropertyCell, property_details_raw, Object, kDetailsOffset)
2016 ACCESSORS(PropertyCell, value, Object, kValueOffset) 2122 ACCESSORS(PropertyCell, value, Object, kValueOffset)
2017 2123
2124
2125 PropertyDetails PropertyCell::property_details() {
2126 return PropertyDetails(Smi::cast(property_details_raw()));
2127 }
2128
2129
2130 void PropertyCell::set_property_details(PropertyDetails details) {
2131 set_property_details_raw(details.AsSmi());
2132 }
2133
2134
2018 Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); } 2135 Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
2019 2136
2020 2137
2021 void WeakCell::clear() { 2138 void WeakCell::clear() {
2022 DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT); 2139 DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT);
2023 WRITE_FIELD(this, kValueOffset, Smi::FromInt(0)); 2140 WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
2024 } 2141 }
2025 2142
2026 2143
2027 void WeakCell::initialize(HeapObject* val) { 2144 void WeakCell::initialize(HeapObject* val) {
(...skipping 593 matching lines...) Expand 10 before | Expand all | Expand 10 after
2621 set_the_hole(i); 2738 set_the_hole(i);
2622 } 2739 }
2623 } 2740 }
2624 2741
2625 2742
2626 Object** FixedArray::data_start() { 2743 Object** FixedArray::data_start() {
2627 return HeapObject::RawField(this, kHeaderSize); 2744 return HeapObject::RawField(this, kHeaderSize);
2628 } 2745 }
2629 2746
2630 2747
2748 Object** FixedArray::RawFieldOfElementAt(int index) {
2749 return HeapObject::RawField(this, OffsetOfElementAt(index));
2750 }
2751
2752
2631 bool DescriptorArray::IsEmpty() { 2753 bool DescriptorArray::IsEmpty() {
2632 DCHECK(length() >= kFirstIndex || 2754 DCHECK(length() >= kFirstIndex ||
2633 this == GetHeap()->empty_descriptor_array()); 2755 this == GetHeap()->empty_descriptor_array());
2634 return length() < kFirstIndex; 2756 return length() < kFirstIndex;
2635 } 2757 }
2636 2758
2637 2759
2760 int DescriptorArray::number_of_descriptors() {
2761 DCHECK(length() >= kFirstIndex || IsEmpty());
2762 int len = length();
2763 return len == 0 ? 0 : Smi::cast(get(kDescriptorLengthIndex))->value();
2764 }
2765
2766
2767 int DescriptorArray::number_of_descriptors_storage() {
2768 int len = length();
2769 return len == 0 ? 0 : (len - kFirstIndex) / kDescriptorSize;
2770 }
2771
2772
2773 int DescriptorArray::NumberOfSlackDescriptors() {
2774 return number_of_descriptors_storage() - number_of_descriptors();
2775 }
2776
2777
2638 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) { 2778 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2639 WRITE_FIELD( 2779 WRITE_FIELD(
2640 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors)); 2780 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2641 } 2781 }
2642 2782
2643 2783
2784 inline int DescriptorArray::number_of_entries() {
2785 return number_of_descriptors();
2786 }
2787
2788
2789 bool DescriptorArray::HasEnumCache() {
2790 return !IsEmpty() && !get(kEnumCacheIndex)->IsSmi();
2791 }
2792
2793
2794 void DescriptorArray::CopyEnumCacheFrom(DescriptorArray* array) {
2795 set(kEnumCacheIndex, array->get(kEnumCacheIndex));
2796 }
2797
2798
2799 FixedArray* DescriptorArray::GetEnumCache() {
2800 DCHECK(HasEnumCache());
2801 FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2802 return FixedArray::cast(bridge->get(kEnumCacheBridgeCacheIndex));
2803 }
2804
2805
2806 bool DescriptorArray::HasEnumIndicesCache() {
2807 if (IsEmpty()) return false;
2808 Object* object = get(kEnumCacheIndex);
2809 if (object->IsSmi()) return false;
2810 FixedArray* bridge = FixedArray::cast(object);
2811 return !bridge->get(kEnumCacheBridgeIndicesCacheIndex)->IsSmi();
2812 }
2813
2814
2815 FixedArray* DescriptorArray::GetEnumIndicesCache() {
2816 DCHECK(HasEnumIndicesCache());
2817 FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2818 return FixedArray::cast(bridge->get(kEnumCacheBridgeIndicesCacheIndex));
2819 }
2820
2821
2822 Object** DescriptorArray::GetEnumCacheSlot() {
2823 DCHECK(HasEnumCache());
2824 return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
2825 kEnumCacheOffset);
2826 }
2827
2828
2644 // Perform a binary search in a fixed array. Low and high are entry indices. If 2829 // Perform a binary search in a fixed array. Low and high are entry indices. If
2645 // there are three entries in this array it should be called with low=0 and 2830 // there are three entries in this array it should be called with low=0 and
2646 // high=2. 2831 // high=2.
2647 template <SearchMode search_mode, typename T> 2832 template <SearchMode search_mode, typename T>
2648 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries, 2833 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries,
2649 int* out_insertion_index) { 2834 int* out_insertion_index) {
2650 DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL); 2835 DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL);
2651 uint32_t hash = name->Hash(); 2836 uint32_t hash = name->Hash();
2652 int limit = high; 2837 int limit = high;
2653 2838
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
2769 2954
2770 return number; 2955 return number;
2771 } 2956 }
2772 2957
2773 2958
2774 PropertyDetails Map::GetLastDescriptorDetails() { 2959 PropertyDetails Map::GetLastDescriptorDetails() {
2775 return instance_descriptors()->GetDetails(LastAdded()); 2960 return instance_descriptors()->GetDetails(LastAdded());
2776 } 2961 }
2777 2962
2778 2963
2964 int Map::LastAdded() {
2965 int number_of_own_descriptors = NumberOfOwnDescriptors();
2966 DCHECK(number_of_own_descriptors > 0);
2967 return number_of_own_descriptors - 1;
2968 }
2969
2970
2971 int Map::NumberOfOwnDescriptors() {
2972 return NumberOfOwnDescriptorsBits::decode(bit_field3());
2973 }
2974
2975
2976 void Map::SetNumberOfOwnDescriptors(int number) {
2977 DCHECK(number <= instance_descriptors()->number_of_descriptors());
2978 set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
2979 }
2980
2981
2982 int Map::EnumLength() { return EnumLengthBits::decode(bit_field3()); }
2983
2984
2985 void Map::SetEnumLength(int length) {
2986 if (length != kInvalidEnumCacheSentinel) {
2987 DCHECK(length >= 0);
2988 DCHECK(length == 0 || instance_descriptors()->HasEnumCache());
2989 DCHECK(length <= NumberOfOwnDescriptors());
2990 }
2991 set_bit_field3(EnumLengthBits::update(bit_field3(), length));
2992 }
2993
2994
2779 FixedArrayBase* Map::GetInitialElements() { 2995 FixedArrayBase* Map::GetInitialElements() {
2780 if (has_fast_smi_or_object_elements() || 2996 if (has_fast_smi_or_object_elements() ||
2781 has_fast_double_elements()) { 2997 has_fast_double_elements()) {
2782 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array())); 2998 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2783 return GetHeap()->empty_fixed_array(); 2999 return GetHeap()->empty_fixed_array();
2784 } else if (has_fixed_typed_array_elements()) { 3000 } else if (has_fixed_typed_array_elements()) {
2785 FixedTypedArrayBase* empty_array = 3001 FixedTypedArrayBase* empty_array =
2786 GetHeap()->EmptyFixedTypedArrayForMap(this); 3002 GetHeap()->EmptyFixedTypedArrayForMap(this);
2787 DCHECK(!GetHeap()->InNewSpace(empty_array)); 3003 DCHECK(!GetHeap()->InNewSpace(empty_array));
2788 return empty_array; 3004 return empty_array;
(...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after
2976 DCHECK(!marking_->IsMarking() || 3192 DCHECK(!marking_->IsMarking() ||
2977 Marking::Color(array) == Marking::WHITE_OBJECT); 3193 Marking::Color(array) == Marking::WHITE_OBJECT);
2978 } 3194 }
2979 3195
2980 3196
2981 DescriptorArray::WhitenessWitness::~WhitenessWitness() { 3197 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
2982 marking_->LeaveNoMarkingScope(); 3198 marking_->LeaveNoMarkingScope();
2983 } 3199 }
2984 3200
2985 3201
3202 PropertyType DescriptorArray::Entry::type() { return descs_->GetType(index_); }
3203
3204
3205 Object* DescriptorArray::Entry::GetCallbackObject() {
3206 return descs_->GetValue(index_);
3207 }
3208
3209
3210 int HashTableBase::NumberOfElements() {
3211 return Smi::cast(get(kNumberOfElementsIndex))->value();
3212 }
3213
3214
3215 int HashTableBase::NumberOfDeletedElements() {
3216 return Smi::cast(get(kNumberOfDeletedElementsIndex))->value();
3217 }
3218
3219
3220 int HashTableBase::Capacity() {
3221 return Smi::cast(get(kCapacityIndex))->value();
3222 }
3223
3224
3225 void HashTableBase::ElementAdded() {
3226 SetNumberOfElements(NumberOfElements() + 1);
3227 }
3228
3229
3230 void HashTableBase::ElementRemoved() {
3231 SetNumberOfElements(NumberOfElements() - 1);
3232 SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
3233 }
3234
3235
3236 void HashTableBase::ElementsRemoved(int n) {
3237 SetNumberOfElements(NumberOfElements() - n);
3238 SetNumberOfDeletedElements(NumberOfDeletedElements() + n);
3239 }
3240
3241
3242 // static
2986 int HashTableBase::ComputeCapacity(int at_least_space_for) { 3243 int HashTableBase::ComputeCapacity(int at_least_space_for) {
2987 const int kMinCapacity = 4; 3244 const int kMinCapacity = 4;
2988 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2); 3245 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
2989 return Max(capacity, kMinCapacity); 3246 return Max(capacity, kMinCapacity);
2990 } 3247 }
2991 3248
2992 3249
3250 bool HashTableBase::IsKey(Object* k) {
3251 return !k->IsTheHole() && !k->IsUndefined();
3252 }
3253
3254
3255 void HashTableBase::SetNumberOfElements(int nof) {
3256 set(kNumberOfElementsIndex, Smi::FromInt(nof));
3257 }
3258
3259
3260 void HashTableBase::SetNumberOfDeletedElements(int nod) {
3261 set(kNumberOfDeletedElementsIndex, Smi::FromInt(nod));
3262 }
3263
3264
2993 template <typename Derived, typename Shape, typename Key> 3265 template <typename Derived, typename Shape, typename Key>
2994 int HashTable<Derived, Shape, Key>::FindEntry(Key key) { 3266 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
2995 return FindEntry(GetIsolate(), key); 3267 return FindEntry(GetIsolate(), key);
2996 } 3268 }
2997 3269
2998 3270
2999 template<typename Derived, typename Shape, typename Key> 3271 template<typename Derived, typename Shape, typename Key>
3000 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) { 3272 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3001 return FindEntry(isolate, key, HashTable::Hash(key)); 3273 return FindEntry(isolate, key, HashTable::Hash(key));
3002 } 3274 }
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after
3156 template <class Traits> 3428 template <class Traits>
3157 const FixedTypedArray<Traits>* 3429 const FixedTypedArray<Traits>*
3158 FixedTypedArray<Traits>::cast(const Object* object) { 3430 FixedTypedArray<Traits>::cast(const Object* object) {
3159 SLOW_DCHECK(object->IsHeapObject() && 3431 SLOW_DCHECK(object->IsHeapObject() &&
3160 HeapObject::cast(object)->map()->instance_type() == 3432 HeapObject::cast(object)->map()->instance_type() ==
3161 Traits::kInstanceType); 3433 Traits::kInstanceType);
3162 return reinterpret_cast<FixedTypedArray<Traits>*>(object); 3434 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3163 } 3435 }
3164 3436
3165 3437
3438 #define DEFINE_DEOPT_ELEMENT_ACCESSORS(name, type) \
3439 type* DeoptimizationInputData::name() { \
3440 return type::cast(get(k##name##Index)); \
3441 } \
3442 void DeoptimizationInputData::Set##name(type* value) { \
3443 set(k##name##Index, value); \
3444 }
3445
3446 DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
3447 DEFINE_DEOPT_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
3448 DEFINE_DEOPT_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
3449 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrAstId, Smi)
3450 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
3451 DEFINE_DEOPT_ELEMENT_ACCESSORS(OptimizationId, Smi)
3452 DEFINE_DEOPT_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
3453 DEFINE_DEOPT_ELEMENT_ACCESSORS(WeakCellCache, Object)
3454
3455 #undef DEFINE_DEOPT_ELEMENT_ACCESSORS
3456
3457
3458 #define DEFINE_DEOPT_ENTRY_ACCESSORS(name, type) \
3459 type* DeoptimizationInputData::name(int i) { \
3460 return type::cast(get(IndexForEntry(i) + k##name##Offset)); \
3461 } \
3462 void DeoptimizationInputData::Set##name(int i, type* value) { \
3463 set(IndexForEntry(i) + k##name##Offset, value); \
3464 }
3465
3466 DEFINE_DEOPT_ENTRY_ACCESSORS(AstIdRaw, Smi)
3467 DEFINE_DEOPT_ENTRY_ACCESSORS(TranslationIndex, Smi)
3468 DEFINE_DEOPT_ENTRY_ACCESSORS(ArgumentsStackHeight, Smi)
3469 DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
3470
3471 #undef DEFINE_DEOPT_ENTRY_ACCESSORS
3472
3473
3474 BailoutId DeoptimizationInputData::AstId(int i) {
3475 return BailoutId(AstIdRaw(i)->value());
3476 }
3477
3478
3479 void DeoptimizationInputData::SetAstId(int i, BailoutId value) {
3480 SetAstIdRaw(i, Smi::FromInt(value.ToInt()));
3481 }
3482
3483
3484 int DeoptimizationInputData::DeoptCount() {
3485 return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
3486 }
3487
3488
3489 int DeoptimizationOutputData::DeoptPoints() { return length() / 2; }
3490
3491
3492 BailoutId DeoptimizationOutputData::AstId(int index) {
3493 return BailoutId(Smi::cast(get(index * 2))->value());
3494 }
3495
3496
3497 void DeoptimizationOutputData::SetAstId(int index, BailoutId id) {
3498 set(index * 2, Smi::FromInt(id.ToInt()));
3499 }
3500
3501
3502 Smi* DeoptimizationOutputData::PcAndState(int index) {
3503 return Smi::cast(get(1 + index * 2));
3504 }
3505
3506
3507 void DeoptimizationOutputData::SetPcAndState(int index, Smi* offset) {
3508 set(1 + index * 2, offset);
3509 }
3510
3511
3512 void HandlerTable::SetRangeStart(int index, int value) {
3513 set(index * kRangeEntrySize + kRangeStartIndex, Smi::FromInt(value));
3514 }
3515
3516
3517 void HandlerTable::SetRangeEnd(int index, int value) {
3518 set(index * kRangeEntrySize + kRangeEndIndex, Smi::FromInt(value));
3519 }
3520
3521
3522 void HandlerTable::SetRangeHandler(int index, int offset,
3523 CatchPrediction prediction) {
3524 int value = HandlerOffsetField::encode(offset) |
3525 HandlerPredictionField::encode(prediction);
3526 set(index * kRangeEntrySize + kRangeHandlerIndex, Smi::FromInt(value));
3527 }
3528
3529
3530 void HandlerTable::SetRangeDepth(int index, int value) {
3531 set(index * kRangeEntrySize + kRangeDepthIndex, Smi::FromInt(value));
3532 }
3533
3534
3535 void HandlerTable::SetReturnOffset(int index, int value) {
3536 set(index * kReturnEntrySize + kReturnOffsetIndex, Smi::FromInt(value));
3537 }
3538
3539
3540 void HandlerTable::SetReturnHandler(int index, int offset,
3541 CatchPrediction prediction) {
3542 int value = HandlerOffsetField::encode(offset) |
3543 HandlerPredictionField::encode(prediction);
3544 set(index * kReturnEntrySize + kReturnHandlerIndex, Smi::FromInt(value));
3545 }
3546
3547
3166 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name) 3548 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
3167 STRUCT_LIST(MAKE_STRUCT_CAST) 3549 STRUCT_LIST(MAKE_STRUCT_CAST)
3168 #undef MAKE_STRUCT_CAST 3550 #undef MAKE_STRUCT_CAST
3169 3551
3170 3552
3171 template <typename Derived, typename Shape, typename Key> 3553 template <typename Derived, typename Shape, typename Key>
3172 HashTable<Derived, Shape, Key>* 3554 HashTable<Derived, Shape, Key>*
3173 HashTable<Derived, Shape, Key>::cast(Object* obj) { 3555 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3174 SLOW_DCHECK(obj->IsHashTable()); 3556 SLOW_DCHECK(obj->IsHashTable());
3175 return reinterpret_cast<HashTable*>(obj); 3557 return reinterpret_cast<HashTable*>(obj);
(...skipping 11 matching lines...) Expand all
3187 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset) 3569 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3188 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset) 3570 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3189 3571
3190 SMI_ACCESSORS(FreeSpace, size, kSizeOffset) 3572 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3191 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset) 3573 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3192 3574
3193 SMI_ACCESSORS(String, length, kLengthOffset) 3575 SMI_ACCESSORS(String, length, kLengthOffset)
3194 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset) 3576 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3195 3577
3196 3578
3579 int FreeSpace::Size() { return size(); }
3580
3581
3197 FreeSpace* FreeSpace::next() { 3582 FreeSpace* FreeSpace::next() {
3198 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() || 3583 DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
3199 (!GetHeap()->deserialization_complete() && map() == NULL)); 3584 (!GetHeap()->deserialization_complete() && map() == NULL));
3200 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size()); 3585 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3201 return reinterpret_cast<FreeSpace*>( 3586 return reinterpret_cast<FreeSpace*>(
3202 Memory::Address_at(address() + kNextOffset)); 3587 Memory::Address_at(address() + kNextOffset));
3203 } 3588 }
3204 3589
3205 3590
3206 FreeSpace** FreeSpace::next_address() { 3591 FreeSpace** FreeSpace::next_address() {
(...skipping 471 matching lines...) Expand 10 before | Expand all | Expand 10 after
3678 4063
3679 4064
3680 void StringCharacterStream::VisitTwoByteString( 4065 void StringCharacterStream::VisitTwoByteString(
3681 const uint16_t* chars, int length) { 4066 const uint16_t* chars, int length) {
3682 is_one_byte_ = false; 4067 is_one_byte_ = false;
3683 buffer16_ = chars; 4068 buffer16_ = chars;
3684 end_ = reinterpret_cast<const uint8_t*>(chars + length); 4069 end_ = reinterpret_cast<const uint8_t*>(chars + length);
3685 } 4070 }
3686 4071
3687 4072
4073 int ByteArray::Size() { return RoundUp(length() + kHeaderSize, kPointerSize); }
4074
4075
3688 byte ByteArray::get(int index) { 4076 byte ByteArray::get(int index) {
3689 DCHECK(index >= 0 && index < this->length()); 4077 DCHECK(index >= 0 && index < this->length());
3690 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize); 4078 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3691 } 4079 }
3692 4080
3693 4081
3694 void ByteArray::set(int index, byte value) { 4082 void ByteArray::set(int index, byte value) {
3695 DCHECK(index >= 0 && index < this->length()); 4083 DCHECK(index >= 0 && index < this->length());
3696 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value); 4084 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3697 } 4085 }
3698 4086
3699 4087
3700 int ByteArray::get_int(int index) { 4088 int ByteArray::get_int(int index) {
3701 DCHECK(index >= 0 && (index * kIntSize) < this->length()); 4089 DCHECK(index >= 0 && (index * kIntSize) < this->length());
3702 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize); 4090 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3703 } 4091 }
3704 4092
3705 4093
3706 ByteArray* ByteArray::FromDataStartAddress(Address address) { 4094 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3707 DCHECK_TAG_ALIGNED(address); 4095 DCHECK_TAG_ALIGNED(address);
3708 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag); 4096 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3709 } 4097 }
3710 4098
3711 4099
4100 int ByteArray::ByteArraySize() { return SizeFor(this->length()); }
4101
4102
3712 Address ByteArray::GetDataStartAddress() { 4103 Address ByteArray::GetDataStartAddress() {
3713 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize; 4104 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3714 } 4105 }
3715 4106
3716 4107
3717 byte BytecodeArray::get(int index) { 4108 byte BytecodeArray::get(int index) {
3718 DCHECK(index >= 0 && index < this->length()); 4109 DCHECK(index >= 0 && index < this->length());
3719 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize); 4110 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3720 } 4111 }
3721 4112
(...skipping 14 matching lines...) Expand all
3736 int BytecodeArray::frame_size() const { 4127 int BytecodeArray::frame_size() const {
3737 return READ_INT_FIELD(this, kFrameSizeOffset); 4128 return READ_INT_FIELD(this, kFrameSizeOffset);
3738 } 4129 }
3739 4130
3740 4131
3741 Address BytecodeArray::GetFirstBytecodeAddress() { 4132 Address BytecodeArray::GetFirstBytecodeAddress() {
3742 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize; 4133 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3743 } 4134 }
3744 4135
3745 4136
4137 int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
4138
4139
3746 ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset) 4140 ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset)
3747 4141
3748 4142
3749 void* FixedTypedArrayBase::external_pointer() const { 4143 void* FixedTypedArrayBase::external_pointer() const {
3750 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset); 4144 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3751 return reinterpret_cast<void*>(ptr); 4145 return reinterpret_cast<void*>(ptr);
3752 } 4146 }
3753 4147
3754 4148
3755 void FixedTypedArrayBase::set_external_pointer(void* value, 4149 void FixedTypedArrayBase::set_external_pointer(void* value,
(...skipping 395 matching lines...) Expand 10 before | Expand all | Expand 10 after
4151 void Map::set_function_with_prototype(bool value) { 4545 void Map::set_function_with_prototype(bool value) {
4152 set_bit_field(FunctionWithPrototype::update(bit_field(), value)); 4546 set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4153 } 4547 }
4154 4548
4155 4549
4156 bool Map::function_with_prototype() { 4550 bool Map::function_with_prototype() {
4157 return FunctionWithPrototype::decode(bit_field()); 4551 return FunctionWithPrototype::decode(bit_field());
4158 } 4552 }
4159 4553
4160 4554
4555 void Map::set_is_hidden_prototype() {
4556 set_bit_field(bit_field() | (1 << kIsHiddenPrototype));
4557 }
4558
4559
4560 bool Map::is_hidden_prototype() {
4561 return ((1 << kIsHiddenPrototype) & bit_field()) != 0;
4562 }
4563
4564
4565 void Map::set_has_indexed_interceptor() {
4566 set_bit_field(bit_field() | (1 << kHasIndexedInterceptor));
4567 }
4568
4569
4570 bool Map::has_indexed_interceptor() {
4571 return ((1 << kHasIndexedInterceptor) & bit_field()) != 0;
4572 }
4573
4574
4575 void Map::set_is_undetectable() {
4576 set_bit_field(bit_field() | (1 << kIsUndetectable));
4577 }
4578
4579
4580 bool Map::is_undetectable() {
4581 return ((1 << kIsUndetectable) & bit_field()) != 0;
4582 }
4583
4584
4585 void Map::set_is_observed() { set_bit_field(bit_field() | (1 << kIsObserved)); }
4586
4587 bool Map::is_observed() { return ((1 << kIsObserved) & bit_field()) != 0; }
4588
4589
4590 void Map::set_has_named_interceptor() {
4591 set_bit_field(bit_field() | (1 << kHasNamedInterceptor));
4592 }
4593
4594
4595 bool Map::has_named_interceptor() {
4596 return ((1 << kHasNamedInterceptor) & bit_field()) != 0;
4597 }
4598
4599
4161 void Map::set_is_access_check_needed(bool access_check_needed) { 4600 void Map::set_is_access_check_needed(bool access_check_needed) {
4162 if (access_check_needed) { 4601 if (access_check_needed) {
4163 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded)); 4602 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4164 } else { 4603 } else {
4165 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded)); 4604 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4166 } 4605 }
4167 } 4606 }
4168 4607
4169 4608
4170 bool Map::is_access_check_needed() { 4609 bool Map::is_access_check_needed() {
(...skipping 16 matching lines...) Expand all
4187 4626
4188 void Map::set_is_prototype_map(bool value) { 4627 void Map::set_is_prototype_map(bool value) {
4189 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value)); 4628 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4190 } 4629 }
4191 4630
4192 bool Map::is_prototype_map() const { 4631 bool Map::is_prototype_map() const {
4193 return IsPrototypeMapBits::decode(bit_field2()); 4632 return IsPrototypeMapBits::decode(bit_field2());
4194 } 4633 }
4195 4634
4196 4635
4636 void Map::set_elements_kind(ElementsKind elements_kind) {
4637 DCHECK(static_cast<int>(elements_kind) < kElementsKindCount);
4638 DCHECK(kElementsKindCount <= (1 << Map::ElementsKindBits::kSize));
4639 set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
4640 DCHECK(this->elements_kind() == elements_kind);
4641 }
4642
4643
4644 ElementsKind Map::elements_kind() {
4645 return Map::ElementsKindBits::decode(bit_field2());
4646 }
4647
4648
4649 bool Map::has_fast_smi_elements() {
4650 return IsFastSmiElementsKind(elements_kind());
4651 }
4652
4653 bool Map::has_fast_object_elements() {
4654 return IsFastObjectElementsKind(elements_kind());
4655 }
4656
4657 bool Map::has_fast_smi_or_object_elements() {
4658 return IsFastSmiOrObjectElementsKind(elements_kind());
4659 }
4660
4661 bool Map::has_fast_double_elements() {
4662 return IsFastDoubleElementsKind(elements_kind());
4663 }
4664
4665 bool Map::has_fast_elements() { return IsFastElementsKind(elements_kind()); }
4666
4667 bool Map::has_sloppy_arguments_elements() {
4668 return IsSloppyArgumentsElements(elements_kind());
4669 }
4670
4671 bool Map::has_fixed_typed_array_elements() {
4672 return IsFixedTypedArrayElementsKind(elements_kind());
4673 }
4674
4675 bool Map::has_dictionary_elements() {
4676 return IsDictionaryElementsKind(elements_kind());
4677 }
4678
4679
4197 void Map::set_dictionary_map(bool value) { 4680 void Map::set_dictionary_map(bool value) {
4198 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value); 4681 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4199 new_bit_field3 = IsUnstable::update(new_bit_field3, value); 4682 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4200 set_bit_field3(new_bit_field3); 4683 set_bit_field3(new_bit_field3);
4201 } 4684 }
4202 4685
4203 4686
4204 bool Map::is_dictionary_map() { 4687 bool Map::is_dictionary_map() {
4205 return DictionaryMap::decode(bit_field3()); 4688 return DictionaryMap::decode(bit_field3());
4206 } 4689 }
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
4301 void Map::NotifyLeafMapLayoutChange() { 4784 void Map::NotifyLeafMapLayoutChange() {
4302 if (is_stable()) { 4785 if (is_stable()) {
4303 mark_unstable(); 4786 mark_unstable();
4304 dependent_code()->DeoptimizeDependentCodeGroup( 4787 dependent_code()->DeoptimizeDependentCodeGroup(
4305 GetIsolate(), 4788 GetIsolate(),
4306 DependentCode::kPrototypeCheckGroup); 4789 DependentCode::kPrototypeCheckGroup);
4307 } 4790 }
4308 } 4791 }
4309 4792
4310 4793
4794 bool Map::CanTransition() {
4795 // Only JSObject and subtypes have map transitions and back pointers.
4796 STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
4797 return instance_type() >= FIRST_JS_OBJECT_TYPE;
4798 }
4799
4800
4801 bool Map::IsPrimitiveMap() {
4802 STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
4803 return instance_type() <= LAST_PRIMITIVE_TYPE;
4804 }
4805 bool Map::IsJSObjectMap() {
4806 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
4807 return instance_type() >= FIRST_JS_OBJECT_TYPE;
4808 }
4809 bool Map::IsJSArrayMap() { return instance_type() == JS_ARRAY_TYPE; }
4810 bool Map::IsStringMap() { return instance_type() < FIRST_NONSTRING_TYPE; }
4811 bool Map::IsJSProxyMap() {
4812 InstanceType type = instance_type();
4813 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
4814 }
4815 bool Map::IsJSGlobalProxyMap() {
4816 return instance_type() == JS_GLOBAL_PROXY_TYPE;
4817 }
4818 bool Map::IsJSGlobalObjectMap() {
4819 return instance_type() == JS_GLOBAL_OBJECT_TYPE;
4820 }
4821 bool Map::IsGlobalObjectMap() {
4822 const InstanceType type = instance_type();
4823 return type == JS_GLOBAL_OBJECT_TYPE || type == JS_BUILTINS_OBJECT_TYPE;
4824 }
4825
4826
4311 bool Map::CanOmitMapChecks() { 4827 bool Map::CanOmitMapChecks() {
4312 return is_stable() && FLAG_omit_map_checks_for_leaf_maps; 4828 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4313 } 4829 }
4314 4830
4315 4831
4316 int DependentCode::number_of_entries(DependencyGroup group) { 4832 int DependentCode::number_of_entries(DependencyGroup group) {
4317 if (length() == 0) return 0; 4833 if (length() == 0) return 0;
4318 return Smi::cast(get(group))->value(); 4834 return Smi::cast(get(group))->value();
4319 } 4835 }
4320 4836
(...skipping 319 matching lines...) Expand 10 before | Expand all | Expand 10 after
4640 default: return false; 5156 default: return false;
4641 } 5157 }
4642 } 5158 }
4643 5159
4644 5160
4645 bool Code::is_keyed_stub() { 5161 bool Code::is_keyed_stub() {
4646 return is_keyed_load_stub() || is_keyed_store_stub(); 5162 return is_keyed_load_stub() || is_keyed_store_stub();
4647 } 5163 }
4648 5164
4649 5165
4650 bool Code::is_debug_stub() { 5166 bool Code::is_debug_stub() { return ic_state() == DEBUG_STUB; }
4651 return ic_state() == DEBUG_STUB; 5167 bool Code::is_handler() { return kind() == HANDLER; }
5168 bool Code::is_load_stub() { return kind() == LOAD_IC; }
5169 bool Code::is_keyed_load_stub() { return kind() == KEYED_LOAD_IC; }
5170 bool Code::is_store_stub() { return kind() == STORE_IC; }
5171 bool Code::is_keyed_store_stub() { return kind() == KEYED_STORE_IC; }
5172 bool Code::is_call_stub() { return kind() == CALL_IC; }
5173 bool Code::is_binary_op_stub() { return kind() == BINARY_OP_IC; }
5174 bool Code::is_compare_ic_stub() { return kind() == COMPARE_IC; }
5175 bool Code::is_compare_nil_ic_stub() { return kind() == COMPARE_NIL_IC; }
5176 bool Code::is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
5177 bool Code::is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; }
5178
5179
5180 bool Code::embeds_maps_weakly() {
5181 Kind k = kind();
5182 return (k == LOAD_IC || k == STORE_IC || k == KEYED_LOAD_IC ||
5183 k == KEYED_STORE_IC || k == COMPARE_NIL_IC) &&
5184 ic_state() == MONOMORPHIC;
4652 } 5185 }
4653 5186
4654 5187
4655 Address Code::constant_pool() { 5188 Address Code::constant_pool() {
4656 Address constant_pool = NULL; 5189 Address constant_pool = NULL;
4657 if (FLAG_enable_embedded_constant_pool) { 5190 if (FLAG_enable_embedded_constant_pool) {
4658 int offset = constant_pool_offset(); 5191 int offset = constant_pool_offset();
4659 if (offset < instruction_size()) { 5192 if (offset < instruction_size()) {
4660 constant_pool = FIELD_ADDR(this, kHeaderSize + offset); 5193 constant_pool = FIELD_ADDR(this, kHeaderSize + offset);
4661 } 5194 }
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
4738 return result; 5271 return result;
4739 } 5272 }
4740 5273
4741 5274
4742 Object* Code::GetObjectFromEntryAddress(Address location_of_address) { 5275 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
4743 return HeapObject:: 5276 return HeapObject::
4744 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize); 5277 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
4745 } 5278 }
4746 5279
4747 5280
5281 bool Code::CanContainWeakObjects() {
5282 // is_turbofanned() implies !can_have_weak_objects().
5283 DCHECK(!is_optimized_code() || !is_turbofanned() || !can_have_weak_objects());
5284 return is_optimized_code() && can_have_weak_objects();
5285 }
5286
5287
5288 bool Code::IsWeakObject(Object* object) {
5289 return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
5290 }
5291
5292
4748 bool Code::IsWeakObjectInOptimizedCode(Object* object) { 5293 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
4749 if (object->IsMap()) { 5294 if (object->IsMap()) {
4750 return Map::cast(object)->CanTransition() && 5295 return Map::cast(object)->CanTransition() &&
4751 FLAG_weak_embedded_maps_in_optimized_code; 5296 FLAG_weak_embedded_maps_in_optimized_code;
4752 } 5297 }
4753 if (object->IsCell()) { 5298 if (object->IsCell()) {
4754 object = Cell::cast(object)->value(); 5299 object = Cell::cast(object)->value();
4755 } else if (object->IsPropertyCell()) { 5300 } else if (object->IsPropertyCell()) {
4756 object = PropertyCell::cast(object)->value(); 5301 object = PropertyCell::cast(object)->value();
4757 } 5302 }
(...skipping 765 matching lines...) Expand 10 before | Expand all | Expand 10 after
5523 // We reenable optimization whenever the number of tries is a large 6068 // We reenable optimization whenever the number of tries is a large
5524 // enough power of 2. 6069 // enough power of 2.
5525 if (tries >= 16 && (((tries - 1) & tries) == 0)) { 6070 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5526 set_optimization_disabled(false); 6071 set_optimization_disabled(false);
5527 set_opt_count(0); 6072 set_opt_count(0);
5528 set_deopt_count(0); 6073 set_deopt_count(0);
5529 } 6074 }
5530 } 6075 }
5531 6076
5532 6077
6078 void SharedFunctionInfo::set_disable_optimization_reason(BailoutReason reason) {
6079 set_opt_count_and_bailout_reason(DisabledOptimizationReasonBits::update(
6080 opt_count_and_bailout_reason(), reason));
6081 }
6082
6083
5533 bool SharedFunctionInfo::IsSubjectToDebugging() { 6084 bool SharedFunctionInfo::IsSubjectToDebugging() {
5534 Object* script_obj = script(); 6085 Object* script_obj = script();
5535 if (script_obj->IsUndefined()) return false; 6086 if (script_obj->IsUndefined()) return false;
5536 Script* script = Script::cast(script_obj); 6087 Script* script = Script::cast(script_obj);
5537 Script::Type type = static_cast<Script::Type>(script->type()->value()); 6088 Script::Type type = static_cast<Script::Type>(script->type()->value());
5538 return type == Script::TYPE_NORMAL; 6089 return type == Script::TYPE_NORMAL;
5539 } 6090 }
5540 6091
5541 6092
5542 bool JSFunction::IsBuiltin() { 6093 bool JSFunction::IsBuiltin() {
(...skipping 410 matching lines...) Expand 10 before | Expand all | Expand 10 after
5953 byte* Code::entry() { 6504 byte* Code::entry() {
5954 return instruction_start(); 6505 return instruction_start();
5955 } 6506 }
5956 6507
5957 6508
5958 bool Code::contains(byte* inner_pointer) { 6509 bool Code::contains(byte* inner_pointer) {
5959 return (address() <= inner_pointer) && (inner_pointer <= address() + Size()); 6510 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
5960 } 6511 }
5961 6512
5962 6513
6514 int Code::ExecutableSize() {
6515 // Check that the assumptions about the layout of the code object holds.
6516 DCHECK_EQ(static_cast<int>(instruction_start() - address()),
6517 Code::kHeaderSize);
6518 return instruction_size() + Code::kHeaderSize;
6519 }
6520
6521
6522 int Code::CodeSize() { return SizeFor(body_size()); }
6523
6524
5963 ACCESSORS(JSArray, length, Object, kLengthOffset) 6525 ACCESSORS(JSArray, length, Object, kLengthOffset)
5964 6526
5965 6527
5966 void* JSArrayBuffer::backing_store() const { 6528 void* JSArrayBuffer::backing_store() const {
5967 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset); 6529 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
5968 return reinterpret_cast<void*>(ptr); 6530 return reinterpret_cast<void*>(ptr);
5969 } 6531 }
5970 6532
5971 6533
5972 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) { 6534 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
(...skipping 434 matching lines...) Expand 10 before | Expand all | Expand 10 after
6407 template <typename schar> 6969 template <typename schar>
6408 uint32_t StringHasher::HashSequentialString(const schar* chars, 6970 uint32_t StringHasher::HashSequentialString(const schar* chars,
6409 int length, 6971 int length,
6410 uint32_t seed) { 6972 uint32_t seed) {
6411 StringHasher hasher(length, seed); 6973 StringHasher hasher(length, seed);
6412 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length); 6974 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6413 return hasher.GetHashField(); 6975 return hasher.GetHashField();
6414 } 6976 }
6415 6977
6416 6978
6979 IteratingStringHasher::IteratingStringHasher(int len, uint32_t seed)
6980 : StringHasher(len, seed) {}
6981
6982
6417 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) { 6983 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
6418 IteratingStringHasher hasher(string->length(), seed); 6984 IteratingStringHasher hasher(string->length(), seed);
6419 // Nothing to do. 6985 // Nothing to do.
6420 if (hasher.has_trivial_hash()) return hasher.GetHashField(); 6986 if (hasher.has_trivial_hash()) return hasher.GetHashField();
6421 ConsString* cons_string = String::VisitFlat(&hasher, string); 6987 ConsString* cons_string = String::VisitFlat(&hasher, string);
6422 if (cons_string == nullptr) return hasher.GetHashField(); 6988 if (cons_string == nullptr) return hasher.GetHashField();
6423 hasher.VisitConsString(cons_string); 6989 hasher.VisitConsString(cons_string);
6424 return hasher.GetHashField(); 6990 return hasher.GetHashField();
6425 } 6991 }
6426 6992
(...skipping 213 matching lines...) Expand 10 before | Expand all | Expand 10 after
6640 7206
6641 7207
6642 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) { 7208 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
6643 if (!HasExpectedReceiverType()) return true; 7209 if (!HasExpectedReceiverType()) return true;
6644 if (!receiver->IsJSObject()) return false; 7210 if (!receiver->IsJSObject()) return false;
6645 return FunctionTemplateInfo::cast(expected_receiver_type()) 7211 return FunctionTemplateInfo::cast(expected_receiver_type())
6646 ->IsTemplateFor(JSObject::cast(receiver)->map()); 7212 ->IsTemplateFor(JSObject::cast(receiver)->map());
6647 } 7213 }
6648 7214
6649 7215
7216 bool AccessorInfo::HasExpectedReceiverType() {
7217 return expected_receiver_type()->IsFunctionTemplateInfo();
7218 }
7219
7220
7221 Object* AccessorPair::get(AccessorComponent component) {
7222 return component == ACCESSOR_GETTER ? getter() : setter();
7223 }
7224
7225
7226 void AccessorPair::set(AccessorComponent component, Object* value) {
7227 if (component == ACCESSOR_GETTER) {
7228 set_getter(value);
7229 } else {
7230 set_setter(value);
7231 }
7232 }
7233
7234
7235 void AccessorPair::SetComponents(Object* getter, Object* setter) {
7236 if (!getter->IsNull()) set_getter(getter);
7237 if (!setter->IsNull()) set_setter(setter);
7238 }
7239
7240
7241 bool AccessorPair::Equals(AccessorPair* pair) {
7242 return (this == pair) || pair->Equals(getter(), setter());
7243 }
7244
7245
7246 bool AccessorPair::Equals(Object* getter_value, Object* setter_value) {
7247 return (getter() == getter_value) && (setter() == setter_value);
7248 }
7249
7250
7251 bool AccessorPair::ContainsAccessor() {
7252 return IsJSAccessor(getter()) || IsJSAccessor(setter());
7253 }
7254
7255
7256 bool AccessorPair::IsJSAccessor(Object* obj) {
7257 return obj->IsSpecFunction() || obj->IsUndefined();
7258 }
7259
7260
6650 template<typename Derived, typename Shape, typename Key> 7261 template<typename Derived, typename Shape, typename Key>
6651 void Dictionary<Derived, Shape, Key>::SetEntry(int entry, 7262 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6652 Handle<Object> key, 7263 Handle<Object> key,
6653 Handle<Object> value) { 7264 Handle<Object> value) {
6654 this->SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0))); 7265 this->SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
6655 } 7266 }
6656 7267
6657 7268
6658 template<typename Derived, typename Shape, typename Key> 7269 template<typename Derived, typename Shape, typename Key>
6659 void Dictionary<Derived, Shape, Key>::SetEntry(int entry, 7270 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after
6813 return key; 7424 return key;
6814 } 7425 }
6815 7426
6816 7427
6817 Handle<ObjectHashTable> ObjectHashTable::Shrink( 7428 Handle<ObjectHashTable> ObjectHashTable::Shrink(
6818 Handle<ObjectHashTable> table, Handle<Object> key) { 7429 Handle<ObjectHashTable> table, Handle<Object> key) {
6819 return DerivedHashTable::Shrink(table, key); 7430 return DerivedHashTable::Shrink(table, key);
6820 } 7431 }
6821 7432
6822 7433
7434 Object* OrderedHashMap::ValueAt(int entry) {
7435 return get(EntryToIndex(entry) + kValueOffset);
7436 }
7437
7438
6823 template <int entrysize> 7439 template <int entrysize>
6824 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) { 7440 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
6825 if (other->IsWeakCell()) other = WeakCell::cast(other)->value(); 7441 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
6826 return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other 7442 return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
6827 : *key == other; 7443 : *key == other;
6828 } 7444 }
6829 7445
6830 7446
6831 template <int entrysize> 7447 template <int entrysize>
6832 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) { 7448 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
(...skipping 14 matching lines...) Expand all
6847 } 7463 }
6848 7464
6849 7465
6850 template <int entrysize> 7466 template <int entrysize>
6851 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate, 7467 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
6852 Handle<Object> key) { 7468 Handle<Object> key) {
6853 return key; 7469 return key;
6854 } 7470 }
6855 7471
6856 7472
7473 bool ScopeInfo::IsAsmModule() { return AsmModuleField::decode(Flags()); }
7474
7475
7476 bool ScopeInfo::IsAsmFunction() { return AsmFunctionField::decode(Flags()); }
7477
7478
7479 bool ScopeInfo::HasSimpleParameters() {
7480 return HasSimpleParametersField::decode(Flags());
7481 }
7482
7483
7484 #define SCOPE_INFO_FIELD_ACCESSORS(name) \
7485 void ScopeInfo::Set##name(int value) { set(k##name, Smi::FromInt(value)); } \
7486 int ScopeInfo::name() { \
7487 if (length() > 0) { \
7488 return Smi::cast(get(k##name))->value(); \
7489 } else { \
7490 return 0; \
7491 } \
7492 }
7493 FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(SCOPE_INFO_FIELD_ACCESSORS)
7494 #undef SCOPE_INFO_FIELD_ACCESSORS
7495
7496
6857 void Map::ClearCodeCache(Heap* heap) { 7497 void Map::ClearCodeCache(Heap* heap) {
6858 // No write barrier is needed since empty_fixed_array is not in new space. 7498 // No write barrier is needed since empty_fixed_array is not in new space.
6859 // Please note this function is used during marking: 7499 // Please note this function is used during marking:
6860 // - MarkCompactCollector::MarkUnmarkedObject 7500 // - MarkCompactCollector::MarkUnmarkedObject
6861 // - IncrementalMarking::Step 7501 // - IncrementalMarking::Step
6862 DCHECK(!heap->InNewSpace(heap->empty_fixed_array())); 7502 DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
6863 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array()); 7503 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
6864 } 7504 }
6865 7505
6866 7506
(...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after
7017 isolate->set_relocatable_top(this); 7657 isolate->set_relocatable_top(this);
7018 } 7658 }
7019 7659
7020 7660
7021 Relocatable::~Relocatable() { 7661 Relocatable::~Relocatable() {
7022 DCHECK_EQ(isolate_->relocatable_top(), this); 7662 DCHECK_EQ(isolate_->relocatable_top(), this);
7023 isolate_->set_relocatable_top(prev_); 7663 isolate_->set_relocatable_top(prev_);
7024 } 7664 }
7025 7665
7026 7666
7667 // static
7027 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) { 7668 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7028 return map->instance_size(); 7669 return map->instance_size();
7029 } 7670 }
7030 7671
7031 7672
7673 // static
7674 int FixedArray::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7675 return SizeFor(reinterpret_cast<FixedArray*>(object)->synchronized_length());
7676 }
7677
7678
7679 // static
7680 int StructBodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7681 return map->instance_size();
7682 }
7683
7684
7032 void Foreign::ForeignIterateBody(ObjectVisitor* v) { 7685 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7033 v->VisitExternalReference( 7686 v->VisitExternalReference(
7034 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset))); 7687 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7035 } 7688 }
7036 7689
7037 7690
7038 template<typename StaticVisitor> 7691 template<typename StaticVisitor>
7039 void Foreign::ForeignIterateBody() { 7692 void Foreign::ForeignIterateBody() {
7040 StaticVisitor::VisitExternalReference( 7693 StaticVisitor::VisitExternalReference(
7041 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset))); 7694 reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after
7154 7807
7155 Object* JSMapIterator::CurrentValue() { 7808 Object* JSMapIterator::CurrentValue() {
7156 OrderedHashMap* table(OrderedHashMap::cast(this->table())); 7809 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7157 int index = Smi::cast(this->index())->value(); 7810 int index = Smi::cast(this->index())->value();
7158 Object* value = table->ValueAt(index); 7811 Object* value = table->ValueAt(index);
7159 DCHECK(!value->IsTheHole()); 7812 DCHECK(!value->IsTheHole());
7160 return value; 7813 return value;
7161 } 7814 }
7162 7815
7163 7816
7817 String::SubStringRange::SubStringRange(String* string, int first, int length)
7818 : string_(string),
7819 first_(first),
7820 length_(length == -1 ? string->length() : length) {}
7821
7822
7164 class String::SubStringRange::iterator final { 7823 class String::SubStringRange::iterator final {
7165 public: 7824 public:
7166 typedef std::forward_iterator_tag iterator_category; 7825 typedef std::forward_iterator_tag iterator_category;
7167 typedef int difference_type; 7826 typedef int difference_type;
7168 typedef uc16 value_type; 7827 typedef uc16 value_type;
7169 typedef uc16* pointer; 7828 typedef uc16* pointer;
7170 typedef uc16& reference; 7829 typedef uc16& reference;
7171 7830
7172 iterator(const iterator& other) 7831 iterator(const iterator& other)
7173 : content_(other.content_), offset_(other.offset_) {} 7832 : content_(other.content_), offset_(other.offset_) {}
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
7243 #undef READ_INT64_FIELD 7902 #undef READ_INT64_FIELD
7244 #undef WRITE_INT64_FIELD 7903 #undef WRITE_INT64_FIELD
7245 #undef READ_BYTE_FIELD 7904 #undef READ_BYTE_FIELD
7246 #undef WRITE_BYTE_FIELD 7905 #undef WRITE_BYTE_FIELD
7247 #undef NOBARRIER_READ_BYTE_FIELD 7906 #undef NOBARRIER_READ_BYTE_FIELD
7248 #undef NOBARRIER_WRITE_BYTE_FIELD 7907 #undef NOBARRIER_WRITE_BYTE_FIELD
7249 7908
7250 } } // namespace v8::internal 7909 } } // namespace v8::internal
7251 7910
7252 #endif // V8_OBJECTS_INL_H_ 7911 #endif // V8_OBJECTS_INL_H_
OLDNEW
« no previous file with comments | « src/objects.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698