Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(162)

Side by Side Diff: src/objects-inl.h

Issue 391693002: In-object double fields unboxing (for 64-bit only). (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressed Toon's comments Created 6 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 // 4 //
5 // Review notes: 5 // Review notes:
6 // 6 //
7 // - The use of macros in these inline functions may seem superfluous 7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal 8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep. 9 // code. gcc is not happy when attempting to inline too deep.
10 // 10 //
11 11
12 #ifndef V8_OBJECTS_INL_H_ 12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_ 13 #define V8_OBJECTS_INL_H_
14 14
15 #include "src/base/atomicops.h" 15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h" 16 #include "src/base/bits.h"
17 #include "src/contexts.h" 17 #include "src/contexts.h"
18 #include "src/conversions-inl.h" 18 #include "src/conversions-inl.h"
19 #include "src/elements.h" 19 #include "src/elements.h"
20 #include "src/factory.h" 20 #include "src/factory.h"
21 #include "src/field-index-inl.h" 21 #include "src/field-index-inl.h"
22 #include "src/heap/heap-inl.h" 22 #include "src/heap/heap-inl.h"
23 #include "src/heap/heap.h" 23 #include "src/heap/heap.h"
24 #include "src/heap/incremental-marking.h" 24 #include "src/heap/incremental-marking.h"
25 #include "src/heap/objects-visiting.h" 25 #include "src/heap/objects-visiting.h"
26 #include "src/heap/spaces.h" 26 #include "src/heap/spaces.h"
27 #include "src/heap/store-buffer.h" 27 #include "src/heap/store-buffer.h"
28 #include "src/isolate.h" 28 #include "src/isolate.h"
29 #include "src/layout-descriptor-inl.h"
29 #include "src/lookup.h" 30 #include "src/lookup.h"
30 #include "src/objects.h" 31 #include "src/objects.h"
31 #include "src/property.h" 32 #include "src/property.h"
32 #include "src/prototype.h" 33 #include "src/prototype.h"
33 #include "src/transitions-inl.h" 34 #include "src/transitions-inl.h"
34 #include "src/type-feedback-vector-inl.h" 35 #include "src/type-feedback-vector-inl.h"
35 #include "src/v8memory.h" 36 #include "src/v8memory.h"
36 37
37 namespace v8 { 38 namespace v8 {
38 namespace internal { 39 namespace internal {
(...skipping 10 matching lines...) Expand all
49 return Smi::FromInt(value >> 1); 50 return Smi::FromInt(value >> 1);
50 } 51 }
51 52
52 53
53 PropertyDetails PropertyDetails::AsDeleted() const { 54 PropertyDetails PropertyDetails::AsDeleted() const {
54 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1)); 55 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
55 return PropertyDetails(smi); 56 return PropertyDetails(smi);
56 } 57 }
57 58
58 59
60 int PropertyDetails::field_width_in_words() const {
61 DCHECK(type() == FIELD);
62 if (!FLAG_unbox_double_fields) return 1;
63 if (kDoubleSize == kPointerSize) return 1;
64 return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
65 }
66
67
59 #define TYPE_CHECKER(type, instancetype) \ 68 #define TYPE_CHECKER(type, instancetype) \
60 bool Object::Is##type() const { \ 69 bool Object::Is##type() const { \
61 return Object::IsHeapObject() && \ 70 return Object::IsHeapObject() && \
62 HeapObject::cast(this)->map()->instance_type() == instancetype; \ 71 HeapObject::cast(this)->map()->instance_type() == instancetype; \
63 } 72 }
64 73
65 74
66 #define CAST_ACCESSOR(type) \ 75 #define CAST_ACCESSOR(type) \
67 type* type::cast(Object* object) { \ 76 type* type::cast(Object* object) { \
68 SLOW_DCHECK(object->Is##type()); \ 77 SLOW_DCHECK(object->Is##type()); \
(...skipping 628 matching lines...) Expand 10 before | Expand all | Expand 10 after
697 bool Object::IsJSWeakCollection() const { 706 bool Object::IsJSWeakCollection() const {
698 return IsJSWeakMap() || IsJSWeakSet(); 707 return IsJSWeakMap() || IsJSWeakSet();
699 } 708 }
700 709
701 710
702 bool Object::IsDescriptorArray() const { 711 bool Object::IsDescriptorArray() const {
703 return IsFixedArray(); 712 return IsFixedArray();
704 } 713 }
705 714
706 715
716 bool Object::IsLayoutDescriptor() const {
717 return IsSmi() || IsFixedTypedArrayBase();
718 }
719
720
707 bool Object::IsTransitionArray() const { 721 bool Object::IsTransitionArray() const {
708 return IsFixedArray(); 722 return IsFixedArray();
709 } 723 }
710 724
711 725
712 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); } 726 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
713 727
714 728
715 bool Object::IsDeoptimizationInputData() const { 729 bool Object::IsDeoptimizationInputData() const {
716 // Must be a fixed array. 730 // Must be a fixed array.
(...skipping 1338 matching lines...) Expand 10 before | Expand all | Expand 10 after
2055 void JSObject::SetInternalField(int index, Smi* value) { 2069 void JSObject::SetInternalField(int index, Smi* value) {
2056 DCHECK(index < GetInternalFieldCount() && index >= 0); 2070 DCHECK(index < GetInternalFieldCount() && index >= 0);
2057 // Internal objects do follow immediately after the header, whereas in-object 2071 // Internal objects do follow immediately after the header, whereas in-object
2058 // properties are at the end of the object. Therefore there is no need 2072 // properties are at the end of the object. Therefore there is no need
2059 // to adjust the index here. 2073 // to adjust the index here.
2060 int offset = GetHeaderSize() + (kPointerSize * index); 2074 int offset = GetHeaderSize() + (kPointerSize * index);
2061 WRITE_FIELD(this, offset, value); 2075 WRITE_FIELD(this, offset, value);
2062 } 2076 }
2063 2077
2064 2078
2079 bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
2080 if (!FLAG_unbox_double_fields) return false;
2081 if (index.is_hidden_field() || !index.is_inobject()) return false;
2082 return !map()->layout_descriptor()->IsTagged(index.property_index());
Hannes Payer (out of office) 2014/11/06 12:29:46 You could call IsUnboxedDoubleFrield from map.
Igor Sheludko 2014/11/07 08:03:52 I made two versions intentionally, to postpone cal
Hannes Payer (out of office) 2014/11/10 15:26:08 You are already calling map here. You could call m
Igor Sheludko 2014/11/10 15:43:54 Ok, lets make it look cleaner. Done.
2083 }
2084
2085
2086 bool Map::IsUnboxedDoubleField(FieldIndex index) {
2087 if (!FLAG_unbox_double_fields) return false;
2088 if (index.is_hidden_field() || !index.is_inobject()) return false;
2089 return !layout_descriptor()->IsTagged(index.property_index());
2090 }
2091
2092
2065 // Access fast-case object properties at index. The use of these routines 2093 // Access fast-case object properties at index. The use of these routines
2066 // is needed to correctly distinguish between properties stored in-object and 2094 // is needed to correctly distinguish between properties stored in-object and
2067 // properties stored in the properties array. 2095 // properties stored in the properties array.
2068 Object* JSObject::RawFastPropertyAt(FieldIndex index) { 2096 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2097 DCHECK(!IsUnboxedDoubleField(index));
2069 if (index.is_inobject()) { 2098 if (index.is_inobject()) {
2070 return READ_FIELD(this, index.offset()); 2099 return READ_FIELD(this, index.offset());
2071 } else { 2100 } else {
2072 return properties()->get(index.outobject_array_index()); 2101 return properties()->get(index.outobject_array_index());
2073 } 2102 }
2074 } 2103 }
2075 2104
2076 2105
2077 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) { 2106 double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
2107 DCHECK(IsUnboxedDoubleField(index));
2108 return READ_DOUBLE_FIELD(this, index.offset());
2109 }
2110
2111
2112 void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
2078 if (index.is_inobject()) { 2113 if (index.is_inobject()) {
2079 int offset = index.offset(); 2114 int offset = index.offset();
2080 WRITE_FIELD(this, offset, value); 2115 WRITE_FIELD(this, offset, value);
2081 WRITE_BARRIER(GetHeap(), this, offset, value); 2116 WRITE_BARRIER(GetHeap(), this, offset, value);
2082 } else { 2117 } else {
2083 properties()->set(index.outobject_array_index(), value); 2118 properties()->set(index.outobject_array_index(), value);
2084 } 2119 }
2085 } 2120 }
2086 2121
2087 2122
2123 void JSObject::RawFastDoublePropertyAtPut(FieldIndex index, double value) {
2124 WRITE_DOUBLE_FIELD(this, index.offset(), value);
2125 }
2126
2127
2128 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2129 if (IsUnboxedDoubleField(index)) {
2130 DCHECK(value->IsMutableHeapNumber());
2131 RawFastDoublePropertyAtPut(index, HeapNumber::cast(value)->value());
2132 } else {
2133 RawFastPropertyAtPut(index, value);
2134 }
2135 }
2136
2137
2088 int JSObject::GetInObjectPropertyOffset(int index) { 2138 int JSObject::GetInObjectPropertyOffset(int index) {
2089 return map()->GetInObjectPropertyOffset(index); 2139 return map()->GetInObjectPropertyOffset(index);
2090 } 2140 }
2091 2141
2092 2142
2093 Object* JSObject::InObjectPropertyAt(int index) { 2143 Object* JSObject::InObjectPropertyAt(int index) {
2094 int offset = GetInObjectPropertyOffset(index); 2144 int offset = GetInObjectPropertyOffset(index);
2095 return READ_FIELD(this, offset); 2145 return READ_FIELD(this, offset);
2096 } 2146 }
2097 2147
(...skipping 977 matching lines...) Expand 10 before | Expand all | Expand 10 after
3075 const WhitenessWitness&) { 3125 const WhitenessWitness&) {
3076 // Range check. 3126 // Range check.
3077 DCHECK(descriptor_number < number_of_descriptors()); 3127 DCHECK(descriptor_number < number_of_descriptors());
3078 3128
3079 NoIncrementalWriteBarrierSet(this, 3129 NoIncrementalWriteBarrierSet(this,
3080 ToKeyIndex(descriptor_number), 3130 ToKeyIndex(descriptor_number),
3081 *desc->GetKey()); 3131 *desc->GetKey());
3082 NoIncrementalWriteBarrierSet(this, 3132 NoIncrementalWriteBarrierSet(this,
3083 ToValueIndex(descriptor_number), 3133 ToValueIndex(descriptor_number),
3084 *desc->GetValue()); 3134 *desc->GetValue());
3085 NoIncrementalWriteBarrierSet(this, 3135 NoIncrementalWriteBarrierSet(this, ToDetailsIndex(descriptor_number),
3086 ToDetailsIndex(descriptor_number),
3087 desc->GetDetails().AsSmi()); 3136 desc->GetDetails().AsSmi());
3088 } 3137 }
3089 3138
3090 3139
3091 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) { 3140 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3092 // Range check. 3141 // Range check.
3093 DCHECK(descriptor_number < number_of_descriptors()); 3142 DCHECK(descriptor_number < number_of_descriptors());
3094 3143
3095 set(ToKeyIndex(descriptor_number), *desc->GetKey()); 3144 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3096 set(ToValueIndex(descriptor_number), *desc->GetValue()); 3145 set(ToValueIndex(descriptor_number), *desc->GetValue());
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after
3251 CAST_ACCESSOR(JSObject) 3300 CAST_ACCESSOR(JSObject)
3252 CAST_ACCESSOR(JSProxy) 3301 CAST_ACCESSOR(JSProxy)
3253 CAST_ACCESSOR(JSReceiver) 3302 CAST_ACCESSOR(JSReceiver)
3254 CAST_ACCESSOR(JSRegExp) 3303 CAST_ACCESSOR(JSRegExp)
3255 CAST_ACCESSOR(JSSet) 3304 CAST_ACCESSOR(JSSet)
3256 CAST_ACCESSOR(JSSetIterator) 3305 CAST_ACCESSOR(JSSetIterator)
3257 CAST_ACCESSOR(JSTypedArray) 3306 CAST_ACCESSOR(JSTypedArray)
3258 CAST_ACCESSOR(JSValue) 3307 CAST_ACCESSOR(JSValue)
3259 CAST_ACCESSOR(JSWeakMap) 3308 CAST_ACCESSOR(JSWeakMap)
3260 CAST_ACCESSOR(JSWeakSet) 3309 CAST_ACCESSOR(JSWeakSet)
3310 CAST_ACCESSOR(LayoutDescriptor)
3261 CAST_ACCESSOR(Map) 3311 CAST_ACCESSOR(Map)
3262 CAST_ACCESSOR(MapCache) 3312 CAST_ACCESSOR(MapCache)
3263 CAST_ACCESSOR(Name) 3313 CAST_ACCESSOR(Name)
3264 CAST_ACCESSOR(NameDictionary) 3314 CAST_ACCESSOR(NameDictionary)
3265 CAST_ACCESSOR(NormalizedMapCache) 3315 CAST_ACCESSOR(NormalizedMapCache)
3266 CAST_ACCESSOR(Object) 3316 CAST_ACCESSOR(Object)
3267 CAST_ACCESSOR(ObjectHashTable) 3317 CAST_ACCESSOR(ObjectHashTable)
3268 CAST_ACCESSOR(Oddball) 3318 CAST_ACCESSOR(Oddball)
3269 CAST_ACCESSOR(OrderedHashMap) 3319 CAST_ACCESSOR(OrderedHashMap)
3270 CAST_ACCESSOR(OrderedHashSet) 3320 CAST_ACCESSOR(OrderedHashSet)
(...skipping 1863 matching lines...) Expand 10 before | Expand all | Expand 10 after
5134 transitions->set_back_pointer_storage(map->GetBackPointer()); 5184 transitions->set_back_pointer_storage(map->GetBackPointer());
5135 } else if (!map->transitions()->IsFullTransitionArray()) { 5185 } else if (!map->transitions()->IsFullTransitionArray()) {
5136 transitions = TransitionArray::ExtendToFullTransitionArray(map); 5186 transitions = TransitionArray::ExtendToFullTransitionArray(map);
5137 } else { 5187 } else {
5138 return; 5188 return;
5139 } 5189 }
5140 map->set_transitions(*transitions); 5190 map->set_transitions(*transitions);
5141 } 5191 }
5142 5192
5143 5193
5144 void Map::InitializeDescriptors(DescriptorArray* descriptors) { 5194 LayoutDescriptor* Map::layout_descriptor_gc_safe() {
5195 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5196 return LayoutDescriptor::cast_gc_safe(layout_desc);
5197 }
5198
5199
5200 void Map::UpdateDescriptors(DescriptorArray* descriptors,
5201 LayoutDescriptor* layout_desc) {
5202 set_instance_descriptors(descriptors);
5203 if (FLAG_unbox_double_fields) {
5204 if (layout_descriptor()->IsSlowLayout()) {
5205 set_layout_descriptor(layout_desc);
5206 }
5207 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5208 DCHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
5209 }
5210 }
5211
5212
5213 void Map::InitializeDescriptors(DescriptorArray* descriptors,
5214 LayoutDescriptor* layout_desc) {
5145 int len = descriptors->number_of_descriptors(); 5215 int len = descriptors->number_of_descriptors();
5146 set_instance_descriptors(descriptors); 5216 set_instance_descriptors(descriptors);
5147 SetNumberOfOwnDescriptors(len); 5217 SetNumberOfOwnDescriptors(len);
5218
5219 if (FLAG_unbox_double_fields) {
5220 set_layout_descriptor(layout_desc);
5221 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5222 set_visitor_id(StaticVisitorBase::GetVisitorId(this));
5223 }
5148 } 5224 }
5149 5225
5150 5226
5151 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset) 5227 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5228 ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDecriptorOffset)
5152 5229
5153 5230
5154 void Map::set_bit_field3(uint32_t bits) { 5231 void Map::set_bit_field3(uint32_t bits) {
5155 if (kInt32Size != kPointerSize) { 5232 if (kInt32Size != kPointerSize) {
5156 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0); 5233 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5157 } 5234 }
5158 WRITE_UINT32_FIELD(this, kBitField3Offset, bits); 5235 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5159 } 5236 }
5160 5237
5161 5238
5162 uint32_t Map::bit_field3() { 5239 uint32_t Map::bit_field3() {
5163 return READ_UINT32_FIELD(this, kBitField3Offset); 5240 return READ_UINT32_FIELD(this, kBitField3Offset);
5164 } 5241 }
5165 5242
5166 5243
5244 Handle<LayoutDescriptor> Map::GetLayoutDescriptor() {
5245 LayoutDescriptor* layout_desc = FLAG_unbox_double_fields
5246 ? layout_descriptor()
5247 : LayoutDescriptor::FastPointerLayout();
5248 return handle(layout_desc, GetIsolate());
5249 }
5250
5251
5167 void Map::AppendDescriptor(Descriptor* desc) { 5252 void Map::AppendDescriptor(Descriptor* desc) {
5168 DescriptorArray* descriptors = instance_descriptors(); 5253 DescriptorArray* descriptors = instance_descriptors();
5169 int number_of_own_descriptors = NumberOfOwnDescriptors(); 5254 int number_of_own_descriptors = NumberOfOwnDescriptors();
5170 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors); 5255 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5171 descriptors->Append(desc); 5256 descriptors->Append(desc);
5172 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1); 5257 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5258
5259 // This function does not support appending double field descriptors and
5260 // it should never try to (otherwise, layout descriptor must be updated too).
5261 #ifdef DEBUG
5262 PropertyDetails details = desc->GetDetails();
5263 CHECK(details.type() != FIELD || !details.representation().IsDouble());
5264 #endif
5173 } 5265 }
5174 5266
5175 5267
5176 Object* Map::GetBackPointer() { 5268 Object* Map::GetBackPointer() {
5177 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); 5269 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5178 if (object->IsDescriptorArray()) { 5270 if (object->IsDescriptorArray()) {
5179 return TransitionArray::cast(object)->back_pointer_storage(); 5271 return TransitionArray::cast(object)->back_pointer_storage();
5180 } else { 5272 } else {
5181 DCHECK(object->IsMap() || object->IsUndefined()); 5273 DCHECK(object->IsMap() || object->IsUndefined());
5182 return object; 5274 return object;
(...skipping 2011 matching lines...) Expand 10 before | Expand all | Expand 10 after
7194 7286
7195 7287
7196 template<typename StaticVisitor> 7288 template<typename StaticVisitor>
7197 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() { 7289 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7198 typedef v8::String::ExternalStringResource Resource; 7290 typedef v8::String::ExternalStringResource Resource;
7199 StaticVisitor::VisitExternalTwoByteString( 7291 StaticVisitor::VisitExternalTwoByteString(
7200 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset))); 7292 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7201 } 7293 }
7202 7294
7203 7295
7296 static void IterateBodyUsingLayoutDescriptor(HeapObject* object,
7297 int start_offset, int end_offset,
7298 ObjectVisitor* v) {
7299 DCHECK(FLAG_unbox_double_fields);
7300 DCHECK(IsAligned(start_offset, kPointerSize) &&
7301 IsAligned(end_offset, kPointerSize));
7302
7303 InobjectPropertiesHelper helper(object->map());
7304 DCHECK(!helper.all_fields_tagged());
7305
7306 for (int offset = start_offset; offset < end_offset; offset += kPointerSize) {
7307 // Visit all tagged fields.
7308 if (helper.IsTagged(offset)) {
7309 v->VisitPointer(HeapObject::RawField(object, offset));
7310 }
7311 }
7312 }
7313
7314
7204 template<int start_offset, int end_offset, int size> 7315 template<int start_offset, int end_offset, int size>
7205 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody( 7316 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7206 HeapObject* obj, 7317 HeapObject* obj,
7207 ObjectVisitor* v) { 7318 ObjectVisitor* v) {
7319 if (!FLAG_unbox_double_fields ||
7320 obj->map()->layout_descriptor()->IsFastPointerLayout()) {
7208 v->VisitPointers(HeapObject::RawField(obj, start_offset), 7321 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7209 HeapObject::RawField(obj, end_offset)); 7322 HeapObject::RawField(obj, end_offset));
7323 } else {
7324 IterateBodyUsingLayoutDescriptor(obj, start_offset, end_offset, v);
7325 }
7210 } 7326 }
7211 7327
7212 7328
7213 template<int start_offset> 7329 template<int start_offset>
7214 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj, 7330 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7215 int object_size, 7331 int object_size,
7216 ObjectVisitor* v) { 7332 ObjectVisitor* v) {
7217 v->VisitPointers(HeapObject::RawField(obj, start_offset), 7333 if (!FLAG_unbox_double_fields ||
7218 HeapObject::RawField(obj, object_size)); 7334 obj->map()->layout_descriptor()->IsFastPointerLayout()) {
7335 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7336 HeapObject::RawField(obj, object_size));
7337 } else {
7338 IterateBodyUsingLayoutDescriptor(obj, start_offset, object_size, v);
7339 }
7219 } 7340 }
7220 7341
7221 7342
7222 template<class Derived, class TableType> 7343 template<class Derived, class TableType>
7223 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() { 7344 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7224 TableType* table(TableType::cast(this->table())); 7345 TableType* table(TableType::cast(this->table()));
7225 int index = Smi::cast(this->index())->value(); 7346 int index = Smi::cast(this->index())->value();
7226 Object* key = table->KeyAt(index); 7347 Object* key = table->KeyAt(index);
7227 DCHECK(!key->IsTheHole()); 7348 DCHECK(!key->IsTheHole());
7228 return key; 7349 return key;
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
7278 #undef READ_SHORT_FIELD 7399 #undef READ_SHORT_FIELD
7279 #undef WRITE_SHORT_FIELD 7400 #undef WRITE_SHORT_FIELD
7280 #undef READ_BYTE_FIELD 7401 #undef READ_BYTE_FIELD
7281 #undef WRITE_BYTE_FIELD 7402 #undef WRITE_BYTE_FIELD
7282 #undef NOBARRIER_READ_BYTE_FIELD 7403 #undef NOBARRIER_READ_BYTE_FIELD
7283 #undef NOBARRIER_WRITE_BYTE_FIELD 7404 #undef NOBARRIER_WRITE_BYTE_FIELD
7284 7405
7285 } } // namespace v8::internal 7406 } } // namespace v8::internal
7286 7407
7287 #endif // V8_OBJECTS_INL_H_ 7408 #endif // V8_OBJECTS_INL_H_
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698