Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(115)

Side by Side Diff: src/objects-inl.h

Issue 391693002: In-object double fields unboxing (for 64-bit only). (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressing comments Created 6 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 // 4 //
5 // Review notes: 5 // Review notes:
6 // 6 //
7 // - The use of macros in these inline functions may seem superfluous 7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal 8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep. 9 // code. gcc is not happy when attempting to inline too deep.
10 // 10 //
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
49 return Smi::FromInt(value >> 1); 49 return Smi::FromInt(value >> 1);
50 } 50 }
51 51
52 52
53 PropertyDetails PropertyDetails::AsDeleted() const { 53 PropertyDetails PropertyDetails::AsDeleted() const {
54 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1)); 54 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
55 return PropertyDetails(smi); 55 return PropertyDetails(smi);
56 } 56 }
57 57
58 58
59 int PropertyDetails::field_width_in_words() const {
60 DCHECK(type() == FIELD);
61 if (!FLAG_unbox_double_fields) return 1;
62 if (kDoubleSize == kPointerSize) return 1;
63 return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
64 }
65
66
59 #define TYPE_CHECKER(type, instancetype) \ 67 #define TYPE_CHECKER(type, instancetype) \
60 bool Object::Is##type() const { \ 68 bool Object::Is##type() const { \
61 return Object::IsHeapObject() && \ 69 return Object::IsHeapObject() && \
62 HeapObject::cast(this)->map()->instance_type() == instancetype; \ 70 HeapObject::cast(this)->map()->instance_type() == instancetype; \
63 } 71 }
64 72
65 73
66 #define CAST_ACCESSOR(type) \ 74 #define CAST_ACCESSOR(type) \
67 type* type::cast(Object* object) { \ 75 type* type::cast(Object* object) { \
68 SLOW_DCHECK(object->Is##type()); \ 76 SLOW_DCHECK(object->Is##type()); \
(...skipping 628 matching lines...) Expand 10 before | Expand all | Expand 10 after
697 bool Object::IsJSWeakCollection() const { 705 bool Object::IsJSWeakCollection() const {
698 return IsJSWeakMap() || IsJSWeakSet(); 706 return IsJSWeakMap() || IsJSWeakSet();
699 } 707 }
700 708
701 709
702 bool Object::IsDescriptorArray() const { 710 bool Object::IsDescriptorArray() const {
703 return IsFixedArray(); 711 return IsFixedArray();
704 } 712 }
705 713
706 714
715 bool Object::IsLayoutDescriptor() const {
716 return IsSmi() || IsFixedTypedArrayBase();
717 }
718
719
707 bool Object::IsTransitionArray() const { 720 bool Object::IsTransitionArray() const {
708 return IsFixedArray(); 721 return IsFixedArray();
709 } 722 }
710 723
711 724
712 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); } 725 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
713 726
714 727
715 bool Object::IsDeoptimizationInputData() const { 728 bool Object::IsDeoptimizationInputData() const {
716 // Must be a fixed array. 729 // Must be a fixed array.
(...skipping 1338 matching lines...) Expand 10 before | Expand all | Expand 10 after
2055 void JSObject::SetInternalField(int index, Smi* value) { 2068 void JSObject::SetInternalField(int index, Smi* value) {
2056 DCHECK(index < GetInternalFieldCount() && index >= 0); 2069 DCHECK(index < GetInternalFieldCount() && index >= 0);
2057 // Internal objects do follow immediately after the header, whereas in-object 2070 // Internal objects do follow immediately after the header, whereas in-object
2058 // properties are at the end of the object. Therefore there is no need 2071 // properties are at the end of the object. Therefore there is no need
2059 // to adjust the index here. 2072 // to adjust the index here.
2060 int offset = GetHeaderSize() + (kPointerSize * index); 2073 int offset = GetHeaderSize() + (kPointerSize * index);
2061 WRITE_FIELD(this, offset, value); 2074 WRITE_FIELD(this, offset, value);
2062 } 2075 }
2063 2076
2064 2077
2078 bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
2079 if (!FLAG_unbox_double_fields) return false;
2080 if (index.is_hidden_field() || !index.is_inobject()) return false;
2081 return !map()->layout_descriptor()->IsTagged(index.property_index());
2082 }
2083
2084
2085 bool Map::IsUnboxedDoubleField(FieldIndex index) {
2086 if (!FLAG_unbox_double_fields) return false;
2087 if (index.is_hidden_field() || !index.is_inobject()) return false;
2088 return !layout_descriptor()->IsTagged(index.property_index());
2089 }
2090
2091
2065 // Access fast-case object properties at index. The use of these routines 2092 // Access fast-case object properties at index. The use of these routines
2066 // is needed to correctly distinguish between properties stored in-object and 2093 // is needed to correctly distinguish between properties stored in-object and
2067 // properties stored in the properties array. 2094 // properties stored in the properties array.
2068 Object* JSObject::RawFastPropertyAt(FieldIndex index) { 2095 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2096 DCHECK(!IsUnboxedDoubleField(index));
2069 if (index.is_inobject()) { 2097 if (index.is_inobject()) {
2070 return READ_FIELD(this, index.offset()); 2098 return READ_FIELD(this, index.offset());
2071 } else { 2099 } else {
2072 return properties()->get(index.outobject_array_index()); 2100 return properties()->get(index.outobject_array_index());
2073 } 2101 }
2074 } 2102 }
2075 2103
2076 2104
2077 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) { 2105 double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
2106 DCHECK(IsUnboxedDoubleField(index));
2107 return READ_DOUBLE_FIELD(this, index.offset());
2108 }
2109
2110
2111 void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
2078 if (index.is_inobject()) { 2112 if (index.is_inobject()) {
2079 int offset = index.offset(); 2113 int offset = index.offset();
2080 WRITE_FIELD(this, offset, value); 2114 WRITE_FIELD(this, offset, value);
2081 WRITE_BARRIER(GetHeap(), this, offset, value); 2115 WRITE_BARRIER(GetHeap(), this, offset, value);
2082 } else { 2116 } else {
2083 properties()->set(index.outobject_array_index(), value); 2117 properties()->set(index.outobject_array_index(), value);
2084 } 2118 }
2085 } 2119 }
2086 2120
2087 2121
2122 void JSObject::RawFastDoublePropertyAtPut(FieldIndex index, double value) {
2123 WRITE_DOUBLE_FIELD(this, index.offset(), value);
2124 }
2125
2126
2127 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2128 if (IsUnboxedDoubleField(index)) {
2129 DCHECK(value->IsMutableHeapNumber());
2130 RawFastDoublePropertyAtPut(index, HeapNumber::cast(value)->value());
2131 } else {
2132 RawFastPropertyAtPut(index, value);
2133 }
2134 }
2135
2136
2088 int JSObject::GetInObjectPropertyOffset(int index) { 2137 int JSObject::GetInObjectPropertyOffset(int index) {
2089 return map()->GetInObjectPropertyOffset(index); 2138 return map()->GetInObjectPropertyOffset(index);
2090 } 2139 }
2091 2140
2092 2141
2093 Object* JSObject::InObjectPropertyAt(int index) { 2142 Object* JSObject::InObjectPropertyAt(int index) {
2094 int offset = GetInObjectPropertyOffset(index); 2143 int offset = GetInObjectPropertyOffset(index);
2095 return READ_FIELD(this, offset); 2144 return READ_FIELD(this, offset);
2096 } 2145 }
2097 2146
(...skipping 977 matching lines...) Expand 10 before | Expand all | Expand 10 after
3075 const WhitenessWitness&) { 3124 const WhitenessWitness&) {
3076 // Range check. 3125 // Range check.
3077 DCHECK(descriptor_number < number_of_descriptors()); 3126 DCHECK(descriptor_number < number_of_descriptors());
3078 3127
3079 NoIncrementalWriteBarrierSet(this, 3128 NoIncrementalWriteBarrierSet(this,
3080 ToKeyIndex(descriptor_number), 3129 ToKeyIndex(descriptor_number),
3081 *desc->GetKey()); 3130 *desc->GetKey());
3082 NoIncrementalWriteBarrierSet(this, 3131 NoIncrementalWriteBarrierSet(this,
3083 ToValueIndex(descriptor_number), 3132 ToValueIndex(descriptor_number),
3084 *desc->GetValue()); 3133 *desc->GetValue());
3085 NoIncrementalWriteBarrierSet(this, 3134 NoIncrementalWriteBarrierSet(this, ToDetailsIndex(descriptor_number),
3086 ToDetailsIndex(descriptor_number),
3087 desc->GetDetails().AsSmi()); 3135 desc->GetDetails().AsSmi());
3088 } 3136 }
3089 3137
3090 3138
3091 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) { 3139 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3092 // Range check. 3140 // Range check.
3093 DCHECK(descriptor_number < number_of_descriptors()); 3141 DCHECK(descriptor_number < number_of_descriptors());
3094 3142
3095 set(ToKeyIndex(descriptor_number), *desc->GetKey()); 3143 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3096 set(ToValueIndex(descriptor_number), *desc->GetValue()); 3144 set(ToValueIndex(descriptor_number), *desc->GetValue());
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
3131 DCHECK(!marking_->IsMarking() || 3179 DCHECK(!marking_->IsMarking() ||
3132 Marking::Color(array) == Marking::WHITE_OBJECT); 3180 Marking::Color(array) == Marking::WHITE_OBJECT);
3133 } 3181 }
3134 3182
3135 3183
3136 DescriptorArray::WhitenessWitness::~WhitenessWitness() { 3184 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
3137 marking_->LeaveNoMarkingScope(); 3185 marking_->LeaveNoMarkingScope();
3138 } 3186 }
3139 3187
3140 3188
3189 LayoutDescriptor* LayoutDescriptor::FromSmi(Smi* smi) {
3190 return LayoutDescriptor::cast(smi);
3191 }
3192
3193
3194 Handle<LayoutDescriptor> LayoutDescriptor::New(Isolate* isolate, int length) {
3195 if (length <= kSmiValueSize) {
3196 // The whole bit vector fits into a smi.
3197 return handle(LayoutDescriptor::FromSmi(Smi::FromInt(0)), isolate);
3198 }
3199
3200 length = (length + kNumberOfBits - 1) / kNumberOfBits;
3201 DCHECK(length > 0);
3202
3203 if (SmiValuesAre32Bits() && (length & 1)) {
3204 // On 64-bit systems if the length is odd then the half-word space would be
3205 // lost anyway (due to alignment and the fact that we are allocating
3206 // uint32-typed array), so we increase the length of allocated array
3207 // to utilize that "lost" space which could also help to avoid layout
3208 // descriptor reallocations.
3209 ++length;
3210 }
3211 return Handle<LayoutDescriptor>::cast(
3212 isolate->factory()->NewFixedTypedArray(length, kExternalUint32Array));
3213 }
3214
3215
3216 bool LayoutDescriptor::InobjectUnboxedField(int inobject_properties,
3217 PropertyDetails details) {
3218 if (details.type() != FIELD || !details.representation().IsDouble()) {
3219 return false;
3220 }
3221 // We care only about in-object properties.
3222 return details.field_index() < inobject_properties;
3223 }
3224
3225
3226 LayoutDescriptor* LayoutDescriptor::FastPointerLayout() {
3227 return LayoutDescriptor::FromSmi(Smi::FromInt(0));
3228 }
3229
3230
3231 bool LayoutDescriptor::GetIndexes(int field_index, int* layout_word_index,
3232 uint32_t* layout_mask) {
3233 if (static_cast<unsigned>(field_index) >= static_cast<unsigned>(capacity())) {
3234 return false;
3235 }
3236
3237 *layout_word_index = field_index / kNumberOfBits;
3238 CHECK((!IsSmi() && (*layout_word_index < length())) ||
3239 (IsSmi() && (*layout_word_index < 1)));
3240
3241 int layout_bit_index = field_index % kNumberOfBits;
3242 *layout_mask = static_cast<uint32_t>(1) << layout_bit_index;
3243 return true;
3244 }
3245
3246
3247 LayoutDescriptor* LayoutDescriptor::SetTagged(int field_index, bool tagged) {
3248 int layout_word_index;
3249 uint32_t layout_mask;
3250
3251 if (!GetIndexes(field_index, &layout_word_index, &layout_mask)) {
3252 CHECK(false);
3253 return this;
3254 }
3255
3256 if (IsSlowLayout()) {
3257 uint32_t value = get_scalar(layout_word_index);
3258 if (tagged) {
3259 value &= ~layout_mask;
3260 } else {
3261 value |= layout_mask;
3262 }
3263 set(layout_word_index, value);
3264 return this;
3265 } else {
3266 uint32_t value = static_cast<uint32_t>(Smi::cast(this)->value());
3267 if (tagged) {
3268 value &= ~layout_mask;
3269 } else {
3270 value |= layout_mask;
3271 }
3272 return LayoutDescriptor::FromSmi(Smi::FromInt(static_cast<int>(value)));
3273 }
3274 }
3275
3276
3277 bool LayoutDescriptor::IsTagged(int field_index) {
3278 if (IsFastPointerLayout()) return true;
3279
3280 int layout_word_index;
3281 uint32_t layout_mask;
3282
3283 if (!GetIndexes(field_index, &layout_word_index, &layout_mask)) {
3284 // All bits after Out of bounds queries
3285 return true;
3286 }
3287
3288 if (IsSlowLayout()) {
3289 uint32_t value = get_scalar(layout_word_index);
3290 return (value & layout_mask) == 0;
3291 } else {
3292 uint32_t value = static_cast<uint32_t>(Smi::cast(this)->value());
3293 return (value & layout_mask) == 0;
3294 }
3295 }
3296
3297
3298 bool LayoutDescriptor::IsFastPointerLayout() {
3299 return IsSmi() && (Smi::cast(this)->value() == 0);
3300 }
3301
3302
3303 bool LayoutDescriptor::IsSlowLayout() { return !IsSmi(); }
3304
3305
3306 int LayoutDescriptor::capacity() {
3307 return IsSlowLayout() ? (length() * kNumberOfBits) : kSmiValueSize;
3308 }
3309
3310
3311 LayoutDescriptor* LayoutDescriptor::cast_gc_safe(Object* object) {
3312 if (object->IsSmi()) {
3313 // Fast mode layout descriptor.
3314 return reinterpret_cast<LayoutDescriptor*>(object);
3315 }
3316
3317 // This is a mixed descriptor which is a fixed typed array.
3318 MapWord map_word = reinterpret_cast<HeapObject*>(object)->map_word();
3319 if (map_word.IsForwardingAddress()) {
3320 // Mark-compact has already moved layout descriptor.
3321 object = map_word.ToForwardingAddress();
3322 }
3323 return LayoutDescriptor::cast(object);
3324 }
3325
3326
3327 // InobjectPropertiesHelper is a helper class for querying whether inobject
3328 // property at offset is Double or not.
3329 InobjectPropertiesHelper::InobjectPropertiesHelper(Map* map)
3330 : all_fields_tagged_(true),
3331 header_size_(0),
3332 inobject_properties_count_(0),
3333 layout_descriptor_(LayoutDescriptor::FastPointerLayout()) {
3334 if (!FLAG_unbox_double_fields) return;
3335
3336 layout_descriptor_ = map->layout_descriptor_gc_safe();
3337 if (layout_descriptor_->IsFastPointerLayout()) {
3338 return;
3339 }
3340
3341 int inobject_properties = map->inobject_properties();
3342 DCHECK(inobject_properties > 0);
3343 header_size_ = map->instance_size() - (inobject_properties * kPointerSize);
3344 DCHECK(header_size_ >= 0);
3345
3346 all_fields_tagged_ = false;
3347 }
3348
3349
3350 bool InobjectPropertiesHelper::IsTagged(int offset_in_bytes) {
3351 DCHECK(IsAligned(offset_in_bytes, kPointerSize));
3352 if (all_fields_tagged_) return true;
3353 // Object headers do not contain non-tagged fields.
3354 if (offset_in_bytes < header_size_) return true;
3355 int field_index = (offset_in_bytes - header_size_) / kPointerSize;
3356
3357 return layout_descriptor_->IsTagged(field_index);
3358 }
3359
3360
3141 template<typename Derived, typename Shape, typename Key> 3361 template<typename Derived, typename Shape, typename Key>
3142 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) { 3362 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
3143 const int kMinCapacity = 32; 3363 const int kMinCapacity = 32;
3144 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2); 3364 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
3145 if (capacity < kMinCapacity) { 3365 if (capacity < kMinCapacity) {
3146 capacity = kMinCapacity; // Guarantee min capacity. 3366 capacity = kMinCapacity; // Guarantee min capacity.
3147 } 3367 }
3148 return capacity; 3368 return capacity;
3149 } 3369 }
3150 3370
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
3251 CAST_ACCESSOR(JSObject) 3471 CAST_ACCESSOR(JSObject)
3252 CAST_ACCESSOR(JSProxy) 3472 CAST_ACCESSOR(JSProxy)
3253 CAST_ACCESSOR(JSReceiver) 3473 CAST_ACCESSOR(JSReceiver)
3254 CAST_ACCESSOR(JSRegExp) 3474 CAST_ACCESSOR(JSRegExp)
3255 CAST_ACCESSOR(JSSet) 3475 CAST_ACCESSOR(JSSet)
3256 CAST_ACCESSOR(JSSetIterator) 3476 CAST_ACCESSOR(JSSetIterator)
3257 CAST_ACCESSOR(JSTypedArray) 3477 CAST_ACCESSOR(JSTypedArray)
3258 CAST_ACCESSOR(JSValue) 3478 CAST_ACCESSOR(JSValue)
3259 CAST_ACCESSOR(JSWeakMap) 3479 CAST_ACCESSOR(JSWeakMap)
3260 CAST_ACCESSOR(JSWeakSet) 3480 CAST_ACCESSOR(JSWeakSet)
3481 CAST_ACCESSOR(LayoutDescriptor)
3261 CAST_ACCESSOR(Map) 3482 CAST_ACCESSOR(Map)
3262 CAST_ACCESSOR(MapCache) 3483 CAST_ACCESSOR(MapCache)
3263 CAST_ACCESSOR(Name) 3484 CAST_ACCESSOR(Name)
3264 CAST_ACCESSOR(NameDictionary) 3485 CAST_ACCESSOR(NameDictionary)
3265 CAST_ACCESSOR(NormalizedMapCache) 3486 CAST_ACCESSOR(NormalizedMapCache)
3266 CAST_ACCESSOR(Object) 3487 CAST_ACCESSOR(Object)
3267 CAST_ACCESSOR(ObjectHashTable) 3488 CAST_ACCESSOR(ObjectHashTable)
3268 CAST_ACCESSOR(Oddball) 3489 CAST_ACCESSOR(Oddball)
3269 CAST_ACCESSOR(OrderedHashMap) 3490 CAST_ACCESSOR(OrderedHashMap)
3270 CAST_ACCESSOR(OrderedHashSet) 3491 CAST_ACCESSOR(OrderedHashSet)
(...skipping 1863 matching lines...) Expand 10 before | Expand all | Expand 10 after
5134 transitions->set_back_pointer_storage(map->GetBackPointer()); 5355 transitions->set_back_pointer_storage(map->GetBackPointer());
5135 } else if (!map->transitions()->IsFullTransitionArray()) { 5356 } else if (!map->transitions()->IsFullTransitionArray()) {
5136 transitions = TransitionArray::ExtendToFullTransitionArray(map); 5357 transitions = TransitionArray::ExtendToFullTransitionArray(map);
5137 } else { 5358 } else {
5138 return; 5359 return;
5139 } 5360 }
5140 map->set_transitions(*transitions); 5361 map->set_transitions(*transitions);
5141 } 5362 }
5142 5363
5143 5364
5144 void Map::InitializeDescriptors(DescriptorArray* descriptors) { 5365 LayoutDescriptor* Map::layout_descriptor_gc_safe() {
5366 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5367 return LayoutDescriptor::cast_gc_safe(layout_desc);
5368 }
5369
5370
5371 void Map::UpdateDescriptors(DescriptorArray* descriptors,
5372 LayoutDescriptor* layout_desc) {
5373 set_instance_descriptors(descriptors);
5374 if (FLAG_unbox_double_fields) {
5375 if (layout_descriptor()->IsSlowLayout()) {
5376 set_layout_descriptor(layout_desc);
5377 }
5378 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5379 DCHECK(visitor_id() == StaticVisitorBase::GetVisitorId(this));
5380 }
5381 }
5382
5383
5384 void Map::InitializeDescriptors(DescriptorArray* descriptors,
5385 LayoutDescriptor* layout_desc) {
5145 int len = descriptors->number_of_descriptors(); 5386 int len = descriptors->number_of_descriptors();
5146 set_instance_descriptors(descriptors); 5387 set_instance_descriptors(descriptors);
5147 SetNumberOfOwnDescriptors(len); 5388 SetNumberOfOwnDescriptors(len);
5389
5390 if (FLAG_unbox_double_fields) {
5391 set_layout_descriptor(layout_desc);
5392 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5393 set_visitor_id(StaticVisitorBase::GetVisitorId(this));
5394 }
5148 } 5395 }
5149 5396
5150 5397
5151 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset) 5398 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5399 ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDecriptorOffset)
5152 5400
5153 5401
5154 void Map::set_bit_field3(uint32_t bits) { 5402 void Map::set_bit_field3(uint32_t bits) {
5155 if (kInt32Size != kPointerSize) { 5403 if (kInt32Size != kPointerSize) {
5156 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0); 5404 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5157 } 5405 }
5158 WRITE_UINT32_FIELD(this, kBitField3Offset, bits); 5406 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5159 } 5407 }
5160 5408
5161 5409
5162 uint32_t Map::bit_field3() { 5410 uint32_t Map::bit_field3() {
5163 return READ_UINT32_FIELD(this, kBitField3Offset); 5411 return READ_UINT32_FIELD(this, kBitField3Offset);
5164 } 5412 }
5165 5413
5166 5414
5415 Handle<LayoutDescriptor> Map::GetLayoutDescriptor() {
5416 LayoutDescriptor* layout_desc = FLAG_unbox_double_fields
5417 ? layout_descriptor()
5418 : LayoutDescriptor::FastPointerLayout();
5419 return handle(layout_desc, GetIsolate());
5420 }
5421
5422
5167 void Map::AppendDescriptor(Descriptor* desc) { 5423 void Map::AppendDescriptor(Descriptor* desc) {
5168 DescriptorArray* descriptors = instance_descriptors(); 5424 DescriptorArray* descriptors = instance_descriptors();
5169 int number_of_own_descriptors = NumberOfOwnDescriptors(); 5425 int number_of_own_descriptors = NumberOfOwnDescriptors();
5170 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors); 5426 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5171 descriptors->Append(desc); 5427 descriptors->Append(desc);
5172 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1); 5428 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5429
5430 // This function does not support appending double field descriptors and
5431 // it should never try to (otherwise, layout descriptor must be updated too).
5432 #ifdef DEBUG
5433 PropertyDetails details = desc->GetDetails();
5434 CHECK(details.type() != FIELD || !details.representation().IsDouble());
5435 #endif
5173 } 5436 }
5174 5437
5175 5438
5176 Object* Map::GetBackPointer() { 5439 Object* Map::GetBackPointer() {
5177 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset); 5440 Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5178 if (object->IsDescriptorArray()) { 5441 if (object->IsDescriptorArray()) {
5179 return TransitionArray::cast(object)->back_pointer_storage(); 5442 return TransitionArray::cast(object)->back_pointer_storage();
5180 } else { 5443 } else {
5181 DCHECK(object->IsMap() || object->IsUndefined()); 5444 DCHECK(object->IsMap() || object->IsUndefined());
5182 return object; 5445 return object;
(...skipping 2011 matching lines...) Expand 10 before | Expand all | Expand 10 after
7194 7457
7195 7458
7196 template<typename StaticVisitor> 7459 template<typename StaticVisitor>
7197 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() { 7460 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7198 typedef v8::String::ExternalStringResource Resource; 7461 typedef v8::String::ExternalStringResource Resource;
7199 StaticVisitor::VisitExternalTwoByteString( 7462 StaticVisitor::VisitExternalTwoByteString(
7200 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset))); 7463 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7201 } 7464 }
7202 7465
7203 7466
7467 static void IterateBodyUsingLayoutDescriptor(HeapObject* object,
7468 int start_offset, int end_offset,
7469 ObjectVisitor* v) {
7470 DCHECK(FLAG_unbox_double_fields);
7471 DCHECK(IsAligned(start_offset, kPointerSize) &&
7472 IsAligned(end_offset, kPointerSize));
7473
7474 InobjectPropertiesHelper helper(object->map());
7475 DCHECK(!helper.all_fields_tagged());
7476
7477 for (int offset = start_offset; offset < end_offset; offset += kPointerSize) {
7478 // Visit all tagged fields.
7479 if (helper.IsTagged(offset)) {
7480 v->VisitPointer(HeapObject::RawField(object, offset));
7481 }
7482 }
7483 }
7484
7485
7204 template<int start_offset, int end_offset, int size> 7486 template<int start_offset, int end_offset, int size>
7205 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody( 7487 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7206 HeapObject* obj, 7488 HeapObject* obj,
7207 ObjectVisitor* v) { 7489 ObjectVisitor* v) {
7490 if (!FLAG_unbox_double_fields ||
7491 obj->map()->layout_descriptor()->IsFastPointerLayout()) {
7208 v->VisitPointers(HeapObject::RawField(obj, start_offset), 7492 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7209 HeapObject::RawField(obj, end_offset)); 7493 HeapObject::RawField(obj, end_offset));
7494 } else {
7495 IterateBodyUsingLayoutDescriptor(obj, start_offset, end_offset, v);
7496 }
7210 } 7497 }
7211 7498
7212 7499
7213 template<int start_offset> 7500 template<int start_offset>
7214 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj, 7501 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7215 int object_size, 7502 int object_size,
7216 ObjectVisitor* v) { 7503 ObjectVisitor* v) {
7217 v->VisitPointers(HeapObject::RawField(obj, start_offset), 7504 if (!FLAG_unbox_double_fields ||
7218 HeapObject::RawField(obj, object_size)); 7505 obj->map()->layout_descriptor()->IsFastPointerLayout()) {
7506 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7507 HeapObject::RawField(obj, object_size));
7508 } else {
7509 IterateBodyUsingLayoutDescriptor(obj, start_offset, object_size, v);
7510 }
7219 } 7511 }
7220 7512
7221 7513
7222 template<class Derived, class TableType> 7514 template<class Derived, class TableType>
7223 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() { 7515 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7224 TableType* table(TableType::cast(this->table())); 7516 TableType* table(TableType::cast(this->table()));
7225 int index = Smi::cast(this->index())->value(); 7517 int index = Smi::cast(this->index())->value();
7226 Object* key = table->KeyAt(index); 7518 Object* key = table->KeyAt(index);
7227 DCHECK(!key->IsTheHole()); 7519 DCHECK(!key->IsTheHole());
7228 return key; 7520 return key;
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
7278 #undef READ_SHORT_FIELD 7570 #undef READ_SHORT_FIELD
7279 #undef WRITE_SHORT_FIELD 7571 #undef WRITE_SHORT_FIELD
7280 #undef READ_BYTE_FIELD 7572 #undef READ_BYTE_FIELD
7281 #undef WRITE_BYTE_FIELD 7573 #undef WRITE_BYTE_FIELD
7282 #undef NOBARRIER_READ_BYTE_FIELD 7574 #undef NOBARRIER_READ_BYTE_FIELD
7283 #undef NOBARRIER_WRITE_BYTE_FIELD 7575 #undef NOBARRIER_WRITE_BYTE_FIELD
7284 7576
7285 } } // namespace v8::internal 7577 } } // namespace v8::internal
7286 7578
7287 #endif // V8_OBJECTS_INL_H_ 7579 #endif // V8_OBJECTS_INL_H_
OLDNEW
« src/objects.cc ('K') | « src/objects-debug.cc ('k') | src/objects-printer.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698