OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 // | 4 // |
5 // Review notes: | 5 // Review notes: |
6 // | 6 // |
7 // - The use of macros in these inline functions may seem superfluous | 7 // - The use of macros in these inline functions may seem superfluous |
8 // but it is absolutely needed to make sure gcc generates optimal | 8 // but it is absolutely needed to make sure gcc generates optimal |
9 // code. gcc is not happy when attempting to inline too deep. | 9 // code. gcc is not happy when attempting to inline too deep. |
10 // | 10 // |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
49 return Smi::FromInt(value >> 1); | 49 return Smi::FromInt(value >> 1); |
50 } | 50 } |
51 | 51 |
52 | 52 |
53 PropertyDetails PropertyDetails::AsDeleted() const { | 53 PropertyDetails PropertyDetails::AsDeleted() const { |
54 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1)); | 54 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1)); |
55 return PropertyDetails(smi); | 55 return PropertyDetails(smi); |
56 } | 56 } |
57 | 57 |
58 | 58 |
59 int PropertyDetails::field_width_in_words() const { | |
60 DCHECK(type() == FIELD); | |
61 if (!FLAG_unbox_double_fields) return 1; | |
62 if (kDoubleSize == kPointerSize) return 1; | |
63 return representation().IsDouble() ? kDoubleSize / kPointerSize : 1; | |
64 } | |
65 | |
66 | |
59 #define TYPE_CHECKER(type, instancetype) \ | 67 #define TYPE_CHECKER(type, instancetype) \ |
60 bool Object::Is##type() const { \ | 68 bool Object::Is##type() const { \ |
61 return Object::IsHeapObject() && \ | 69 return Object::IsHeapObject() && \ |
62 HeapObject::cast(this)->map()->instance_type() == instancetype; \ | 70 HeapObject::cast(this)->map()->instance_type() == instancetype; \ |
63 } | 71 } |
64 | 72 |
65 | 73 |
66 #define CAST_ACCESSOR(type) \ | 74 #define CAST_ACCESSOR(type) \ |
67 type* type::cast(Object* object) { \ | 75 type* type::cast(Object* object) { \ |
68 SLOW_DCHECK(object->Is##type()); \ | 76 SLOW_DCHECK(object->Is##type()); \ |
(...skipping 628 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
697 bool Object::IsJSWeakCollection() const { | 705 bool Object::IsJSWeakCollection() const { |
698 return IsJSWeakMap() || IsJSWeakSet(); | 706 return IsJSWeakMap() || IsJSWeakSet(); |
699 } | 707 } |
700 | 708 |
701 | 709 |
702 bool Object::IsDescriptorArray() const { | 710 bool Object::IsDescriptorArray() const { |
703 return IsFixedArray(); | 711 return IsFixedArray(); |
704 } | 712 } |
705 | 713 |
706 | 714 |
715 bool Object::IsLayoutDescriptor() const { | |
716 return IsSmi() || IsFixedTypedArrayBase(); | |
717 } | |
718 | |
719 | |
707 bool Object::IsTransitionArray() const { | 720 bool Object::IsTransitionArray() const { |
708 return IsFixedArray(); | 721 return IsFixedArray(); |
709 } | 722 } |
710 | 723 |
711 | 724 |
712 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); } | 725 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); } |
713 | 726 |
714 | 727 |
715 bool Object::IsDeoptimizationInputData() const { | 728 bool Object::IsDeoptimizationInputData() const { |
716 // Must be a fixed array. | 729 // Must be a fixed array. |
(...skipping 1338 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2055 void JSObject::SetInternalField(int index, Smi* value) { | 2068 void JSObject::SetInternalField(int index, Smi* value) { |
2056 DCHECK(index < GetInternalFieldCount() && index >= 0); | 2069 DCHECK(index < GetInternalFieldCount() && index >= 0); |
2057 // Internal objects do follow immediately after the header, whereas in-object | 2070 // Internal objects do follow immediately after the header, whereas in-object |
2058 // properties are at the end of the object. Therefore there is no need | 2071 // properties are at the end of the object. Therefore there is no need |
2059 // to adjust the index here. | 2072 // to adjust the index here. |
2060 int offset = GetHeaderSize() + (kPointerSize * index); | 2073 int offset = GetHeaderSize() + (kPointerSize * index); |
2061 WRITE_FIELD(this, offset, value); | 2074 WRITE_FIELD(this, offset, value); |
2062 } | 2075 } |
2063 | 2076 |
2064 | 2077 |
2078 bool JSObject::IsUnboxedDoubleField(FieldIndex index) { | |
2079 if (!FLAG_unbox_double_fields) return false; | |
2080 if (index.is_hidden_field() || !index.is_inobject()) return false; | |
2081 return !map()->layout_descriptor()->IsTagged(index.property_index()); | |
2082 } | |
2083 | |
2084 | |
2085 bool Map::IsUnboxedDoubleField(FieldIndex index) { | |
2086 if (!FLAG_unbox_double_fields) return false; | |
2087 if (index.is_hidden_field() || !index.is_inobject()) return false; | |
2088 return !layout_descriptor()->IsTagged(index.property_index()); | |
2089 } | |
2090 | |
2091 | |
2065 // Access fast-case object properties at index. The use of these routines | 2092 // Access fast-case object properties at index. The use of these routines |
2066 // is needed to correctly distinguish between properties stored in-object and | 2093 // is needed to correctly distinguish between properties stored in-object and |
2067 // properties stored in the properties array. | 2094 // properties stored in the properties array. |
2068 Object* JSObject::RawFastPropertyAt(FieldIndex index) { | 2095 Object* JSObject::RawFastPropertyAt(FieldIndex index) { |
2096 DCHECK(!IsUnboxedDoubleField(index)); | |
2069 if (index.is_inobject()) { | 2097 if (index.is_inobject()) { |
2070 return READ_FIELD(this, index.offset()); | 2098 return READ_FIELD(this, index.offset()); |
2071 } else { | 2099 } else { |
2072 return properties()->get(index.outobject_array_index()); | 2100 return properties()->get(index.outobject_array_index()); |
2073 } | 2101 } |
2074 } | 2102 } |
2075 | 2103 |
2076 | 2104 |
2077 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) { | 2105 double JSObject::RawFastDoublePropertyAt(FieldIndex index) { |
2106 DCHECK(IsUnboxedDoubleField(index)); | |
2107 return READ_DOUBLE_FIELD(this, index.offset()); | |
2108 } | |
2109 | |
2110 | |
2111 void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) { | |
2078 if (index.is_inobject()) { | 2112 if (index.is_inobject()) { |
2079 int offset = index.offset(); | 2113 int offset = index.offset(); |
2080 WRITE_FIELD(this, offset, value); | 2114 WRITE_FIELD(this, offset, value); |
2081 WRITE_BARRIER(GetHeap(), this, offset, value); | 2115 WRITE_BARRIER(GetHeap(), this, offset, value); |
2082 } else { | 2116 } else { |
2083 properties()->set(index.outobject_array_index(), value); | 2117 properties()->set(index.outobject_array_index(), value); |
2084 } | 2118 } |
2085 } | 2119 } |
2086 | 2120 |
2087 | 2121 |
2122 void JSObject::RawFastDoublePropertyAtPut(FieldIndex index, double value) { | |
2123 WRITE_DOUBLE_FIELD(this, index.offset(), value); | |
2124 } | |
2125 | |
2126 | |
2127 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) { | |
2128 if (IsUnboxedDoubleField(index)) { | |
2129 DCHECK(value->IsMutableHeapNumber()); | |
2130 RawFastDoublePropertyAtPut(index, HeapNumber::cast(value)->value()); | |
2131 } else { | |
2132 RawFastPropertyAtPut(index, value); | |
2133 } | |
2134 } | |
2135 | |
2136 | |
2088 int JSObject::GetInObjectPropertyOffset(int index) { | 2137 int JSObject::GetInObjectPropertyOffset(int index) { |
2089 return map()->GetInObjectPropertyOffset(index); | 2138 return map()->GetInObjectPropertyOffset(index); |
2090 } | 2139 } |
2091 | 2140 |
2092 | 2141 |
2093 Object* JSObject::InObjectPropertyAt(int index) { | 2142 Object* JSObject::InObjectPropertyAt(int index) { |
2094 int offset = GetInObjectPropertyOffset(index); | 2143 int offset = GetInObjectPropertyOffset(index); |
2095 return READ_FIELD(this, offset); | 2144 return READ_FIELD(this, offset); |
2096 } | 2145 } |
2097 | 2146 |
(...skipping 977 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3075 const WhitenessWitness&) { | 3124 const WhitenessWitness&) { |
3076 // Range check. | 3125 // Range check. |
3077 DCHECK(descriptor_number < number_of_descriptors()); | 3126 DCHECK(descriptor_number < number_of_descriptors()); |
3078 | 3127 |
3079 NoIncrementalWriteBarrierSet(this, | 3128 NoIncrementalWriteBarrierSet(this, |
3080 ToKeyIndex(descriptor_number), | 3129 ToKeyIndex(descriptor_number), |
3081 *desc->GetKey()); | 3130 *desc->GetKey()); |
3082 NoIncrementalWriteBarrierSet(this, | 3131 NoIncrementalWriteBarrierSet(this, |
3083 ToValueIndex(descriptor_number), | 3132 ToValueIndex(descriptor_number), |
3084 *desc->GetValue()); | 3133 *desc->GetValue()); |
3085 NoIncrementalWriteBarrierSet(this, | 3134 NoIncrementalWriteBarrierSet(this, ToDetailsIndex(descriptor_number), |
3086 ToDetailsIndex(descriptor_number), | |
3087 desc->GetDetails().AsSmi()); | 3135 desc->GetDetails().AsSmi()); |
3088 } | 3136 } |
3089 | 3137 |
3090 | 3138 |
3091 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) { | 3139 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) { |
3092 // Range check. | 3140 // Range check. |
3093 DCHECK(descriptor_number < number_of_descriptors()); | 3141 DCHECK(descriptor_number < number_of_descriptors()); |
3094 | 3142 |
3095 set(ToKeyIndex(descriptor_number), *desc->GetKey()); | 3143 set(ToKeyIndex(descriptor_number), *desc->GetKey()); |
3096 set(ToValueIndex(descriptor_number), *desc->GetValue()); | 3144 set(ToValueIndex(descriptor_number), *desc->GetValue()); |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3131 DCHECK(!marking_->IsMarking() || | 3179 DCHECK(!marking_->IsMarking() || |
3132 Marking::Color(array) == Marking::WHITE_OBJECT); | 3180 Marking::Color(array) == Marking::WHITE_OBJECT); |
3133 } | 3181 } |
3134 | 3182 |
3135 | 3183 |
3136 DescriptorArray::WhitenessWitness::~WhitenessWitness() { | 3184 DescriptorArray::WhitenessWitness::~WhitenessWitness() { |
3137 marking_->LeaveNoMarkingScope(); | 3185 marking_->LeaveNoMarkingScope(); |
3138 } | 3186 } |
3139 | 3187 |
3140 | 3188 |
3189 LayoutDescriptor* LayoutDescriptor::FromSmi(Smi* smi) { | |
3190 return LayoutDescriptor::cast(smi); | |
3191 } | |
3192 | |
3193 | |
3194 Handle<LayoutDescriptor> LayoutDescriptor::New(Isolate* isolate, int length) { | |
3195 if (length <= kSmiValueSize) { | |
3196 // The whole bit vector fits into a smi. | |
3197 return handle(LayoutDescriptor::FromSmi(Smi::FromInt(0)), isolate); | |
3198 } | |
3199 | |
3200 length = (length + kNumberOfBits - 1) / kNumberOfBits; | |
3201 DCHECK(length > 0); | |
3202 | |
Toon Verwaest
2014/10/31 16:23:41
Add a comment why this is a good idea (not wasting
Igor Sheludko
2014/11/03 12:47:23
Done.
| |
3203 if (SmiValuesAre32Bits() && (length & 1)) { | |
3204 ++length; // Make kPtrSize aligned | |
3205 } | |
3206 return Handle<LayoutDescriptor>::cast( | |
3207 isolate->factory()->NewFixedTypedArray(length, kExternalUint32Array)); | |
3208 } | |
3209 | |
3210 | |
3211 bool LayoutDescriptor::InobjectUnboxedField(int inobject_properties, | |
3212 PropertyDetails details) { | |
3213 if (details.type() != FIELD || !details.representation().IsDouble()) { | |
3214 return false; | |
3215 } | |
3216 int field_index = details.field_index(); | |
3217 // We care only about in-object properties. | |
3218 if (field_index >= inobject_properties) { | |
3219 return false; | |
3220 } | |
3221 return true; | |
3222 } | |
3223 | |
3224 | |
3225 LayoutDescriptor* LayoutDescriptor::FastPointerLayout() { | |
3226 return LayoutDescriptor::FromSmi(Smi::FromInt(0)); | |
3227 } | |
3228 | |
3229 | |
3230 bool LayoutDescriptor::GetIndexes(int field_index, int* layout_word_index, | |
3231 uint32_t* layout_mask) { | |
3232 if (field_index >= capacity()) return false; | |
3233 | |
3234 *layout_word_index = field_index / kNumberOfBits; | |
3235 CHECK((!IsSmi() && (*layout_word_index < length())) || | |
3236 (IsSmi() && (*layout_word_index < 1))); | |
3237 | |
3238 int layout_bit_index = field_index % kNumberOfBits; | |
3239 *layout_mask = static_cast<uint32_t>(1) << layout_bit_index; | |
3240 return true; | |
3241 } | |
3242 | |
3243 | |
3244 LayoutDescriptor* LayoutDescriptor::SetTagged(int field_index, bool tagged) { | |
3245 int layout_word_index; | |
3246 uint32_t layout_mask; | |
3247 | |
3248 if (!GetIndexes(field_index, &layout_word_index, &layout_mask)) { | |
3249 CHECK(!"Bad index"); | |
3250 return this; | |
3251 } | |
3252 | |
3253 if (IsSlowLayout()) { | |
3254 uint32_t value = get_scalar(layout_word_index); | |
3255 if (tagged) { | |
3256 value &= ~layout_mask; | |
3257 } else { | |
3258 value |= layout_mask; | |
3259 } | |
3260 set(layout_word_index, value); | |
3261 return this; | |
3262 } else { | |
3263 uint32_t value = static_cast<uint32_t>(Smi::cast(this)->value()); | |
3264 if (tagged) { | |
3265 value &= ~layout_mask; | |
3266 } else { | |
3267 value |= layout_mask; | |
3268 } | |
3269 return LayoutDescriptor::FromSmi(Smi::FromInt(static_cast<int>(value))); | |
3270 } | |
3271 } | |
3272 | |
3273 | |
3274 bool LayoutDescriptor::IsTagged(int field_index) { | |
3275 if (IsFastPointerLayout()) return true; | |
3276 | |
3277 int layout_word_index; | |
3278 uint32_t layout_mask; | |
3279 | |
3280 if (!GetIndexes(field_index, &layout_word_index, &layout_mask)) { | |
3281 // All bits after Out of bounds queries | |
3282 return true; | |
3283 } | |
3284 | |
3285 if (IsSlowLayout()) { | |
3286 uint32_t value = get_scalar(layout_word_index); | |
3287 return (value & layout_mask) == 0; | |
3288 } else { | |
3289 uint32_t value = static_cast<uint32_t>(Smi::cast(this)->value()); | |
3290 return (value & layout_mask) == 0; | |
3291 } | |
3292 } | |
3293 | |
3294 | |
3295 bool LayoutDescriptor::IsFastPointerLayout() { | |
3296 return IsSmi() && (Smi::cast(this)->value() == 0); | |
3297 } | |
3298 | |
3299 | |
3300 bool LayoutDescriptor::IsSlowLayout() { return !IsSmi(); } | |
3301 | |
3302 | |
3303 int LayoutDescriptor::capacity() { | |
3304 return IsSlowLayout() ? (length() * kNumberOfBits) : kSmiValueSize; | |
3305 } | |
3306 | |
3307 | |
3308 LayoutDescriptor* LayoutDescriptor::cast_gc_safe(Object* object) { | |
3309 if (object->IsSmi()) { | |
3310 // Either fast mode or forwarding pointer. | |
3311 LayoutDescriptor* layout_desc = reinterpret_cast<LayoutDescriptor*>(object); | |
3312 return layout_desc; | |
3313 } | |
3314 | |
3315 // This is a mixed descriptor which is a fixed typed array. | |
3316 MapWord map_word = reinterpret_cast<HeapObject*>(object)->map_word(); | |
3317 if (map_word.IsForwardingAddress()) { | |
3318 // Mark-compact has already moved layout descriptor. | |
3319 object = map_word.ToForwardingAddress(); | |
3320 } | |
3321 return LayoutDescriptor::cast(object); | |
3322 } | |
3323 | |
3324 | |
3325 // InobjectPropertiesHelper is a helper class for querying whether inobject | |
3326 // property at offset is Double or not. | |
3327 InobjectPropertiesHelper::InobjectPropertiesHelper(Map* map) | |
3328 : all_fields_tagged_(true), | |
3329 header_size_(0), | |
3330 inobject_properties_count_(0), | |
3331 layout_descriptor_(LayoutDescriptor::FastPointerLayout()) { | |
3332 if (!FLAG_unbox_double_fields) return; | |
3333 | |
3334 layout_descriptor_ = map->layout_descriptor_gc_safe(); | |
3335 if (layout_descriptor_->IsFastPointerLayout()) { | |
3336 return; | |
3337 } | |
3338 | |
3339 int inobject_properties = map->inobject_properties(); | |
3340 DCHECK(inobject_properties > 0); | |
3341 header_size_ = map->instance_size() - (inobject_properties * kPointerSize); | |
3342 DCHECK(header_size_ >= 0); | |
3343 | |
3344 all_fields_tagged_ = false; | |
3345 } | |
3346 | |
3347 | |
3348 bool InobjectPropertiesHelper::IsTagged(int offset_in_bytes) { | |
3349 DCHECK(IsAligned(offset_in_bytes, kPointerSize)); | |
3350 if (all_fields_tagged_) return true; | |
3351 // Object headers do not contain non-tagged fields. | |
3352 if (offset_in_bytes < header_size_) return true; | |
3353 int field_index = (offset_in_bytes - header_size_) / kPointerSize; | |
3354 | |
3355 return layout_descriptor_->IsTagged(field_index); | |
3356 } | |
3357 | |
3358 | |
3141 template<typename Derived, typename Shape, typename Key> | 3359 template<typename Derived, typename Shape, typename Key> |
3142 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) { | 3360 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) { |
3143 const int kMinCapacity = 32; | 3361 const int kMinCapacity = 32; |
3144 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2); | 3362 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2); |
3145 if (capacity < kMinCapacity) { | 3363 if (capacity < kMinCapacity) { |
3146 capacity = kMinCapacity; // Guarantee min capacity. | 3364 capacity = kMinCapacity; // Guarantee min capacity. |
3147 } | 3365 } |
3148 return capacity; | 3366 return capacity; |
3149 } | 3367 } |
3150 | 3368 |
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3251 CAST_ACCESSOR(JSObject) | 3469 CAST_ACCESSOR(JSObject) |
3252 CAST_ACCESSOR(JSProxy) | 3470 CAST_ACCESSOR(JSProxy) |
3253 CAST_ACCESSOR(JSReceiver) | 3471 CAST_ACCESSOR(JSReceiver) |
3254 CAST_ACCESSOR(JSRegExp) | 3472 CAST_ACCESSOR(JSRegExp) |
3255 CAST_ACCESSOR(JSSet) | 3473 CAST_ACCESSOR(JSSet) |
3256 CAST_ACCESSOR(JSSetIterator) | 3474 CAST_ACCESSOR(JSSetIterator) |
3257 CAST_ACCESSOR(JSTypedArray) | 3475 CAST_ACCESSOR(JSTypedArray) |
3258 CAST_ACCESSOR(JSValue) | 3476 CAST_ACCESSOR(JSValue) |
3259 CAST_ACCESSOR(JSWeakMap) | 3477 CAST_ACCESSOR(JSWeakMap) |
3260 CAST_ACCESSOR(JSWeakSet) | 3478 CAST_ACCESSOR(JSWeakSet) |
3479 CAST_ACCESSOR(LayoutDescriptor) | |
3261 CAST_ACCESSOR(Map) | 3480 CAST_ACCESSOR(Map) |
3262 CAST_ACCESSOR(MapCache) | 3481 CAST_ACCESSOR(MapCache) |
3263 CAST_ACCESSOR(Name) | 3482 CAST_ACCESSOR(Name) |
3264 CAST_ACCESSOR(NameDictionary) | 3483 CAST_ACCESSOR(NameDictionary) |
3265 CAST_ACCESSOR(NormalizedMapCache) | 3484 CAST_ACCESSOR(NormalizedMapCache) |
3266 CAST_ACCESSOR(Object) | 3485 CAST_ACCESSOR(Object) |
3267 CAST_ACCESSOR(ObjectHashTable) | 3486 CAST_ACCESSOR(ObjectHashTable) |
3268 CAST_ACCESSOR(Oddball) | 3487 CAST_ACCESSOR(Oddball) |
3269 CAST_ACCESSOR(OrderedHashMap) | 3488 CAST_ACCESSOR(OrderedHashMap) |
3270 CAST_ACCESSOR(OrderedHashSet) | 3489 CAST_ACCESSOR(OrderedHashSet) |
(...skipping 1863 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5134 transitions->set_back_pointer_storage(map->GetBackPointer()); | 5353 transitions->set_back_pointer_storage(map->GetBackPointer()); |
5135 } else if (!map->transitions()->IsFullTransitionArray()) { | 5354 } else if (!map->transitions()->IsFullTransitionArray()) { |
5136 transitions = TransitionArray::ExtendToFullTransitionArray(map); | 5355 transitions = TransitionArray::ExtendToFullTransitionArray(map); |
5137 } else { | 5356 } else { |
5138 return; | 5357 return; |
5139 } | 5358 } |
5140 map->set_transitions(*transitions); | 5359 map->set_transitions(*transitions); |
5141 } | 5360 } |
5142 | 5361 |
5143 | 5362 |
5144 void Map::InitializeDescriptors(DescriptorArray* descriptors) { | 5363 LayoutDescriptor* Map::layout_descriptor_gc_safe() { |
5364 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset); | |
5365 return LayoutDescriptor::cast_gc_safe(layout_desc); | |
5366 } | |
5367 | |
5368 | |
5369 void Map::UpdateDescriptors(DescriptorArray* descriptors, | |
5370 LayoutDescriptor* layout_desc) { | |
5371 set_instance_descriptors(descriptors); | |
5372 if (FLAG_unbox_double_fields) { | |
5373 if (layout_descriptor()->IsSlowLayout()) { | |
5374 set_layout_descriptor(layout_desc); | |
5375 } | |
5376 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this)); | |
5377 DCHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this)); | |
5378 } | |
5379 } | |
5380 | |
5381 | |
5382 void Map::InitializeDescriptors(DescriptorArray* descriptors, | |
5383 LayoutDescriptor* layout_desc) { | |
5145 int len = descriptors->number_of_descriptors(); | 5384 int len = descriptors->number_of_descriptors(); |
5146 set_instance_descriptors(descriptors); | 5385 set_instance_descriptors(descriptors); |
5147 SetNumberOfOwnDescriptors(len); | 5386 SetNumberOfOwnDescriptors(len); |
5387 | |
5388 if (FLAG_unbox_double_fields) { | |
5389 set_layout_descriptor(layout_desc); | |
5390 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this)); | |
5391 set_visitor_id(StaticVisitorBase::GetVisitorId(this)); | |
5392 } | |
5148 } | 5393 } |
5149 | 5394 |
5150 | 5395 |
5151 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset) | 5396 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset) |
5397 ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDecriptorOffset) | |
5152 | 5398 |
5153 | 5399 |
5154 void Map::set_bit_field3(uint32_t bits) { | 5400 void Map::set_bit_field3(uint32_t bits) { |
5155 if (kInt32Size != kPointerSize) { | 5401 if (kInt32Size != kPointerSize) { |
5156 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0); | 5402 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0); |
5157 } | 5403 } |
5158 WRITE_UINT32_FIELD(this, kBitField3Offset, bits); | 5404 WRITE_UINT32_FIELD(this, kBitField3Offset, bits); |
5159 } | 5405 } |
5160 | 5406 |
5161 | 5407 |
5162 uint32_t Map::bit_field3() { | 5408 uint32_t Map::bit_field3() { |
5163 return READ_UINT32_FIELD(this, kBitField3Offset); | 5409 return READ_UINT32_FIELD(this, kBitField3Offset); |
5164 } | 5410 } |
5165 | 5411 |
5166 | 5412 |
5413 Handle<LayoutDescriptor> Map::GetLayoutDescriptor() { | |
5414 LayoutDescriptor* layout_desc = FLAG_unbox_double_fields | |
5415 ? layout_descriptor() | |
5416 : LayoutDescriptor::FastPointerLayout(); | |
5417 return handle(layout_desc, GetIsolate()); | |
5418 } | |
5419 | |
5420 | |
5167 void Map::AppendDescriptor(Descriptor* desc) { | 5421 void Map::AppendDescriptor(Descriptor* desc) { |
5168 DescriptorArray* descriptors = instance_descriptors(); | 5422 DescriptorArray* descriptors = instance_descriptors(); |
5169 int number_of_own_descriptors = NumberOfOwnDescriptors(); | 5423 int number_of_own_descriptors = NumberOfOwnDescriptors(); |
5170 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors); | 5424 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors); |
5171 descriptors->Append(desc); | 5425 descriptors->Append(desc); |
5172 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1); | 5426 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1); |
5427 | |
5428 // This function does not support appending double field descriptors and | |
5429 // it should never try to (otherwise, layout descriptor must be updated too). | |
5430 #ifdef DEBUG | |
5431 PropertyDetails details = desc->GetDetails(); | |
5432 CHECK(details.type() != FIELD || !details.representation().IsDouble()); | |
5433 #endif | |
5173 } | 5434 } |
5174 | 5435 |
5175 | 5436 |
5176 Object* Map::GetBackPointer() { | 5437 Object* Map::GetBackPointer() { |
5177 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); | 5438 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); |
5178 if (object->IsDescriptorArray()) { | 5439 if (object->IsDescriptorArray()) { |
5179 return TransitionArray::cast(object)->back_pointer_storage(); | 5440 return TransitionArray::cast(object)->back_pointer_storage(); |
5180 } else { | 5441 } else { |
5181 DCHECK(object->IsMap() || object->IsUndefined()); | 5442 DCHECK(object->IsMap() || object->IsUndefined()); |
5182 return object; | 5443 return object; |
(...skipping 2011 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
7194 | 7455 |
7195 | 7456 |
7196 template<typename StaticVisitor> | 7457 template<typename StaticVisitor> |
7197 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() { | 7458 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() { |
7198 typedef v8::String::ExternalStringResource Resource; | 7459 typedef v8::String::ExternalStringResource Resource; |
7199 StaticVisitor::VisitExternalTwoByteString( | 7460 StaticVisitor::VisitExternalTwoByteString( |
7200 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset))); | 7461 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset))); |
7201 } | 7462 } |
7202 | 7463 |
7203 | 7464 |
7465 static void IterateBodyUsingLayoutDescriptor(HeapObject* object, | |
7466 int start_offset, int end_offset, | |
7467 ObjectVisitor* v) { | |
7468 DCHECK(FLAG_unbox_double_fields); | |
7469 DCHECK(IsAligned(start_offset, kPointerSize) && | |
7470 IsAligned(end_offset, kPointerSize)); | |
7471 | |
7472 InobjectPropertiesHelper helper(object->map()); | |
7473 DCHECK(!helper.all_fields_tagged()); | |
7474 | |
7475 for (int offset = start_offset; offset < end_offset; offset += kPointerSize) { | |
7476 // Visit all tagged fields. | |
7477 if (helper.IsTagged(offset)) { | |
7478 v->VisitPointer(HeapObject::RawField(object, offset)); | |
7479 } | |
7480 } | |
7481 } | |
7482 | |
7483 | |
7204 template<int start_offset, int end_offset, int size> | 7484 template<int start_offset, int end_offset, int size> |
7205 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody( | 7485 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody( |
7206 HeapObject* obj, | 7486 HeapObject* obj, |
7207 ObjectVisitor* v) { | 7487 ObjectVisitor* v) { |
7488 if (!FLAG_unbox_double_fields || | |
7489 obj->map()->layout_descriptor()->IsFastPointerLayout()) { | |
7208 v->VisitPointers(HeapObject::RawField(obj, start_offset), | 7490 v->VisitPointers(HeapObject::RawField(obj, start_offset), |
7209 HeapObject::RawField(obj, end_offset)); | 7491 HeapObject::RawField(obj, end_offset)); |
7492 } else { | |
7493 IterateBodyUsingLayoutDescriptor(obj, start_offset, end_offset, v); | |
7494 } | |
7210 } | 7495 } |
7211 | 7496 |
7212 | 7497 |
7213 template<int start_offset> | 7498 template<int start_offset> |
7214 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj, | 7499 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj, |
7215 int object_size, | 7500 int object_size, |
7216 ObjectVisitor* v) { | 7501 ObjectVisitor* v) { |
7217 v->VisitPointers(HeapObject::RawField(obj, start_offset), | 7502 if (!FLAG_unbox_double_fields || |
7218 HeapObject::RawField(obj, object_size)); | 7503 obj->map()->layout_descriptor()->IsFastPointerLayout()) { |
7504 v->VisitPointers(HeapObject::RawField(obj, start_offset), | |
7505 HeapObject::RawField(obj, object_size)); | |
7506 } else { | |
7507 IterateBodyUsingLayoutDescriptor(obj, start_offset, object_size, v); | |
7508 } | |
7219 } | 7509 } |
7220 | 7510 |
7221 | 7511 |
7222 template<class Derived, class TableType> | 7512 template<class Derived, class TableType> |
7223 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() { | 7513 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() { |
7224 TableType* table(TableType::cast(this->table())); | 7514 TableType* table(TableType::cast(this->table())); |
7225 int index = Smi::cast(this->index())->value(); | 7515 int index = Smi::cast(this->index())->value(); |
7226 Object* key = table->KeyAt(index); | 7516 Object* key = table->KeyAt(index); |
7227 DCHECK(!key->IsTheHole()); | 7517 DCHECK(!key->IsTheHole()); |
7228 return key; | 7518 return key; |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
7278 #undef READ_SHORT_FIELD | 7568 #undef READ_SHORT_FIELD |
7279 #undef WRITE_SHORT_FIELD | 7569 #undef WRITE_SHORT_FIELD |
7280 #undef READ_BYTE_FIELD | 7570 #undef READ_BYTE_FIELD |
7281 #undef WRITE_BYTE_FIELD | 7571 #undef WRITE_BYTE_FIELD |
7282 #undef NOBARRIER_READ_BYTE_FIELD | 7572 #undef NOBARRIER_READ_BYTE_FIELD |
7283 #undef NOBARRIER_WRITE_BYTE_FIELD | 7573 #undef NOBARRIER_WRITE_BYTE_FIELD |
7284 | 7574 |
7285 } } // namespace v8::internal | 7575 } } // namespace v8::internal |
7286 | 7576 |
7287 #endif // V8_OBJECTS_INL_H_ | 7577 #endif // V8_OBJECTS_INL_H_ |
OLD | NEW |