Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(567)

Side by Side Diff: src/objects-inl.h

Issue 391693002: In-object double fields unboxing (for 64-bit only). (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 // 4 //
5 // Review notes: 5 // Review notes:
6 // 6 //
7 // - The use of macros in these inline functions may seem superfluous 7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal 8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep. 9 // code. gcc is not happy when attempting to inline too deep.
10 // 10 //
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
46 return Smi::FromInt(value >> 1); 46 return Smi::FromInt(value >> 1);
47 } 47 }
48 48
49 49
50 PropertyDetails PropertyDetails::AsDeleted() const { 50 PropertyDetails PropertyDetails::AsDeleted() const {
51 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1)); 51 Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
52 return PropertyDetails(smi); 52 return PropertyDetails(smi);
53 } 53 }
54 54
55 55
56 int PropertyDetails::field_width_in_words() const {
57 ASSERT(type() == FIELD);
58 if (!FLAG_unbox_double_fields) return 1;
59 if (kDoubleSize == kPointerSize) return 1;
60 return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
61 }
62
63
56 #define TYPE_CHECKER(type, instancetype) \ 64 #define TYPE_CHECKER(type, instancetype) \
57 bool Object::Is##type() const { \ 65 bool Object::Is##type() const { \
58 return Object::IsHeapObject() && \ 66 return Object::IsHeapObject() && \
59 HeapObject::cast(this)->map()->instance_type() == instancetype; \ 67 HeapObject::cast(this)->map()->instance_type() == instancetype; \
60 } 68 }
61 69
62 70
63 #define CAST_ACCESSOR(type) \ 71 #define CAST_ACCESSOR(type) \
64 type* type::cast(Object* object) { \ 72 type* type::cast(Object* object) { \
65 SLOW_ASSERT(object->Is##type()); \ 73 SLOW_ASSERT(object->Is##type()); \
(...skipping 642 matching lines...) Expand 10 before | Expand all | Expand 10 after
708 bool Object::IsJSWeakCollection() const { 716 bool Object::IsJSWeakCollection() const {
709 return IsJSWeakMap() || IsJSWeakSet(); 717 return IsJSWeakMap() || IsJSWeakSet();
710 } 718 }
711 719
712 720
713 bool Object::IsDescriptorArray() const { 721 bool Object::IsDescriptorArray() const {
714 return IsFixedArray(); 722 return IsFixedArray();
715 } 723 }
716 724
717 725
726 bool Object::IsLayoutDescriptor() const {
727 return IsSmi() || IsFixedTypedArrayBase();
728 }
729
730
718 bool Object::IsTransitionArray() const { 731 bool Object::IsTransitionArray() const {
719 return IsFixedArray(); 732 return IsFixedArray();
720 } 733 }
721 734
722 735
723 bool Object::IsDeoptimizationInputData() const { 736 bool Object::IsDeoptimizationInputData() const {
724 // Must be a fixed array. 737 // Must be a fixed array.
725 if (!IsFixedArray()) return false; 738 if (!IsFixedArray()) return false;
726 739
727 // There's no sure way to detect the difference between a fixed array and 740 // There's no sure way to detect the difference between a fixed array and
(...skipping 1268 matching lines...) Expand 10 before | Expand all | Expand 10 after
1996 void JSObject::SetInternalField(int index, Smi* value) { 2009 void JSObject::SetInternalField(int index, Smi* value) {
1997 ASSERT(index < GetInternalFieldCount() && index >= 0); 2010 ASSERT(index < GetInternalFieldCount() && index >= 0);
1998 // Internal objects do follow immediately after the header, whereas in-object 2011 // Internal objects do follow immediately after the header, whereas in-object
1999 // properties are at the end of the object. Therefore there is no need 2012 // properties are at the end of the object. Therefore there is no need
2000 // to adjust the index here. 2013 // to adjust the index here.
2001 int offset = GetHeaderSize() + (kPointerSize * index); 2014 int offset = GetHeaderSize() + (kPointerSize * index);
2002 WRITE_FIELD(this, offset, value); 2015 WRITE_FIELD(this, offset, value);
2003 } 2016 }
2004 2017
2005 2018
2019 bool Map::IsUnboxedDoubleField(FieldIndex index) {
2020 if (!FLAG_unbox_double_fields) return false;
2021 if (index.is_hidden_field() || !index.is_inobject()) return false;
2022 return !layout_descriptor()->IsTagged(index.property_index());
2023 }
2024
2025
2006 // Access fast-case object properties at index. The use of these routines 2026 // Access fast-case object properties at index. The use of these routines
2007 // is needed to correctly distinguish between properties stored in-object and 2027 // is needed to correctly distinguish between properties stored in-object and
2008 // properties stored in the properties array. 2028 // properties stored in the properties array.
2009 Object* JSObject::RawFastPropertyAt(FieldIndex index) { 2029 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2010 if (index.is_inobject()) { 2030 if (index.is_inobject()) {
2031 ASSERT(!map()->IsUnboxedDoubleField(index));
2011 return READ_FIELD(this, index.offset()); 2032 return READ_FIELD(this, index.offset());
2012 } else { 2033 } else {
2013 return properties()->get(index.outobject_array_index()); 2034 return properties()->get(index.outobject_array_index());
2014 } 2035 }
2015 } 2036 }
2016 2037
2017 2038
2018 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) { 2039 // Access fast-case object properties at index. The use of these routines
2040 // is needed to correctly distinguish between properties stored in-object and
2041 // properties stored in the properties array.
2042 Handle<Object> JSObject::RawFastBoxedPropertyAt(Handle<JSObject> object,
2043 FieldIndex index) {
2044 Isolate* isolate = object->GetIsolate();
2045 if (index.is_inobject()) {
2046 Map* map = object->map();
2047 if (map->IsUnboxedDoubleField(index)) {
2048 double value = READ_DOUBLE_FIELD(*object, index.offset());
2049 return isolate->factory()->NewHeapNumber(value, MUTABLE);
Toon Verwaest 2014/07/29 15:02:09 Should not call this for unboxed doubles. We shoul
Igor Sheludko 2014/10/30 14:23:43 Done.
2050
2051 } else {
2052 return handle(READ_FIELD(*object, index.offset()), isolate);
2053 }
2054 } else {
2055 ASSERT(index.outobject_array_index() < object->properties()->length());
2056 return handle(object->properties()->get(index.outobject_array_index()),
2057 isolate);
2058 }
2059 }
2060
2061
2062 double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
2063 ASSERT(map()->IsUnboxedDoubleField(index));
2064 return READ_DOUBLE_FIELD(this, index.offset());
2065 }
2066
2067
2068 void JSObject::FastPropertyAtPut(Map* map, FieldIndex index, Object* value) {
2019 if (index.is_inobject()) { 2069 if (index.is_inobject()) {
2020 int offset = index.offset(); 2070 int offset = index.offset();
2021 WRITE_FIELD(this, offset, value); 2071 if (map->IsUnboxedDoubleField(index)) {
Toon Verwaest 2014/07/29 15:02:08 Always go through FastDoublePropertyAtPut? I would
Igor Sheludko 2014/10/30 14:23:43 I fixed callers of this methods where it makes sen
2022 WRITE_BARRIER(GetHeap(), this, offset, value); 2072 ASSERT(value->IsMutableHeapNumber());
2073 WRITE_DOUBLE_FIELD(this, offset, HeapNumber::cast(value)->value());
2074 } else {
2075 WRITE_FIELD(this, offset, value);
2076 WRITE_BARRIER(GetHeap(), this, offset, value);
2077 }
2023 } else { 2078 } else {
2024 properties()->set(index.outobject_array_index(), value); 2079 properties()->set(index.outobject_array_index(), value);
2025 } 2080 }
2026 } 2081 }
2027 2082
2028 2083
2084 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2085 FastPropertyAtPut(map(), index, value);
2086 }
2087
2088
2089 void JSObject::FastDoublePropertyAtPut(FieldIndex index, double value) {
2090 ASSERT(map()->IsUnboxedDoubleField(index));
2091 WRITE_DOUBLE_FIELD(this, index.offset(), value);
2092 }
2093
2094
2029 int JSObject::GetInObjectPropertyOffset(int index) { 2095 int JSObject::GetInObjectPropertyOffset(int index) {
2030 return map()->GetInObjectPropertyOffset(index); 2096 return map()->GetInObjectPropertyOffset(index);
2031 } 2097 }
2032 2098
2033 2099
2034 Object* JSObject::InObjectPropertyAt(int index) { 2100 Object* JSObject::InObjectPropertyAt(int index) {
2035 int offset = GetInObjectPropertyOffset(index); 2101 int offset = GetInObjectPropertyOffset(index);
2036 return READ_FIELD(this, offset); 2102 return READ_FIELD(this, offset);
2037 } 2103 }
2038 2104
(...skipping 697 matching lines...) Expand 10 before | Expand all | Expand 10 after
2736 bool DescriptorArray::IsEmpty() { 2802 bool DescriptorArray::IsEmpty() {
2737 ASSERT(length() >= kFirstIndex || 2803 ASSERT(length() >= kFirstIndex ||
2738 this == GetHeap()->empty_descriptor_array()); 2804 this == GetHeap()->empty_descriptor_array());
2739 return length() < kFirstIndex; 2805 return length() < kFirstIndex;
2740 } 2806 }
2741 2807
2742 2808
2743 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) { 2809 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2744 WRITE_FIELD( 2810 WRITE_FIELD(
2745 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors)); 2811 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2812 if (FLAG_unbox_double_fields) drop_cached_layout_descriptor();
Toon Verwaest 2014/07/29 15:02:08 This doesn't belong here
Igor Sheludko 2014/10/30 14:23:43 Done. I removed the whole layout descriptor cache
2746 } 2813 }
2747 2814
2748 2815
2816 Object* DescriptorArray::cached_layout_descriptor() {
2817 ASSERT(FLAG_unbox_double_fields);
2818 ASSERT(length() >= kFirstIndex || IsEmpty());
2819 if (length() == 0) return LayoutDescriptor::FastPointerLayout();
2820 return get(kLayoutDescriptorCacheIndex);
2821 }
2822
2823
2824 void DescriptorArray::set_cached_layout_descriptor(LayoutDescriptor* cached) {
2825 ASSERT(FLAG_unbox_double_fields);
2826 ASSERT(length() >= kFirstIndex || IsEmpty());
2827 if (length() > 0) set(kLayoutDescriptorCacheIndex, cached);
2828 }
2829
2830
2831 void DescriptorArray::drop_cached_layout_descriptor() {
2832 ASSERT(FLAG_unbox_double_fields);
2833 ASSERT(length() >= kFirstIndex || IsEmpty());
2834 if (length() > 0) {
2835 set(kLayoutDescriptorCacheIndex, GetHeap()->undefined_value());
2836 }
2837 }
2838
2839
2749 // Perform a binary search in a fixed array. Low and high are entry indices. If 2840 // Perform a binary search in a fixed array. Low and high are entry indices. If
2750 // there are three entries in this array it should be called with low=0 and 2841 // there are three entries in this array it should be called with low=0 and
2751 // high=2. 2842 // high=2.
2752 template<SearchMode search_mode, typename T> 2843 template<SearchMode search_mode, typename T>
2753 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) { 2844 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
2754 uint32_t hash = name->Hash(); 2845 uint32_t hash = name->Hash();
2755 int limit = high; 2846 int limit = high;
2756 2847
2757 ASSERT(low <= high); 2848 ASSERT(low <= high);
2758 2849
(...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after
3021 const WhitenessWitness&) { 3112 const WhitenessWitness&) {
3022 // Range check. 3113 // Range check.
3023 ASSERT(descriptor_number < number_of_descriptors()); 3114 ASSERT(descriptor_number < number_of_descriptors());
3024 3115
3025 NoIncrementalWriteBarrierSet(this, 3116 NoIncrementalWriteBarrierSet(this,
3026 ToKeyIndex(descriptor_number), 3117 ToKeyIndex(descriptor_number),
3027 *desc->GetKey()); 3118 *desc->GetKey());
3028 NoIncrementalWriteBarrierSet(this, 3119 NoIncrementalWriteBarrierSet(this,
3029 ToValueIndex(descriptor_number), 3120 ToValueIndex(descriptor_number),
3030 *desc->GetValue()); 3121 *desc->GetValue());
3031 NoIncrementalWriteBarrierSet(this, 3122 NoIncrementalWriteBarrierSet(this, ToDetailsIndex(descriptor_number),
3032 ToDetailsIndex(descriptor_number),
3033 desc->GetDetails().AsSmi()); 3123 desc->GetDetails().AsSmi());
3124 if (FLAG_unbox_double_fields) drop_cached_layout_descriptor();
Toon Verwaest 2014/07/29 15:02:08 This doesn't belong here
Igor Sheludko 2014/10/30 14:23:43 Done.
3034 } 3125 }
3035 3126
3036 3127
3037 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) { 3128 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3038 // Range check. 3129 // Range check.
3039 ASSERT(descriptor_number < number_of_descriptors()); 3130 ASSERT(descriptor_number < number_of_descriptors());
3040 3131
3041 set(ToKeyIndex(descriptor_number), *desc->GetKey()); 3132 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3042 set(ToValueIndex(descriptor_number), *desc->GetValue()); 3133 set(ToValueIndex(descriptor_number), *desc->GetValue());
3043 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi()); 3134 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3135 if (FLAG_unbox_double_fields) drop_cached_layout_descriptor();
Toon Verwaest 2014/07/29 15:02:09 This doesn't belong here
Igor Sheludko 2014/10/30 14:23:43 Done.
3044 } 3136 }
3045 3137
3046 3138
3047 void DescriptorArray::Append(Descriptor* desc, 3139 void DescriptorArray::Append(Descriptor* desc,
3048 const WhitenessWitness& witness) { 3140 const WhitenessWitness& witness) {
3049 DisallowHeapAllocation no_gc; 3141 DisallowHeapAllocation no_gc;
3050 int descriptor_number = number_of_descriptors(); 3142 int descriptor_number = number_of_descriptors();
3051 SetNumberOfDescriptors(descriptor_number + 1); 3143 SetNumberOfDescriptors(descriptor_number + 1);
3052 Set(descriptor_number, desc, witness); 3144 Set(descriptor_number, desc, witness);
3053 3145
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
3098 ASSERT(!marking_->IsMarking() || 3190 ASSERT(!marking_->IsMarking() ||
3099 Marking::Color(array) == Marking::WHITE_OBJECT); 3191 Marking::Color(array) == Marking::WHITE_OBJECT);
3100 } 3192 }
3101 3193
3102 3194
3103 DescriptorArray::WhitenessWitness::~WhitenessWitness() { 3195 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
3104 marking_->LeaveNoMarkingScope(); 3196 marking_->LeaveNoMarkingScope();
3105 } 3197 }
3106 3198
3107 3199
3200 LayoutDescriptor* LayoutDescriptor::FromSmi(Smi* smi) {
3201 return LayoutDescriptor::cast(smi);
3202 }
3203
3204
3205 Handle<LayoutDescriptor> LayoutDescriptor::New(Isolate* isolate, int length) {
3206 if (length <= kSmiValueSize) {
3207 // The whole bit vector fits into a smi.
3208 return handle(LayoutDescriptor::FromSmi(Smi::FromInt(0)), isolate);
3209 }
3210
3211 length = (length + kNumberOfBits - 1) / kNumberOfBits;
3212 ASSERT(length > 0);
3213
3214 if (SmiValuesAre32Bits() && (length & 1)) {
3215 ++length; // Make kPtrSize aligned
3216 }
3217 return Handle<LayoutDescriptor>::cast(
3218 isolate->factory()->NewFixedTypedArray(length, kExternalUint32Array));
3219 }
3220
3221
3222 LayoutDescriptor* LayoutDescriptor::FastPointerLayout() {
3223 return LayoutDescriptor::FromSmi(Smi::FromInt(0));
3224 }
3225
3226
3227 void LayoutDescriptor::GetIndexes(int field_index, int* layout_word_index,
3228 uint32_t* layout_mask) {
3229 *layout_word_index = field_index / kNumberOfBits;
3230 ASSERT((!IsSmi() && (*layout_word_index < length())) ||
3231 (IsSmi() && (*layout_word_index < 32)));
3232
3233 int layout_bit_index = field_index % kNumberOfBits;
3234 *layout_mask = static_cast<uint32_t>(1) << layout_bit_index;
3235 }
3236
3237
3238 LayoutDescriptor* LayoutDescriptor::SetTagged(int field_index, bool tagged) {
3239 int layout_word_index;
3240 uint32_t layout_mask;
3241
3242 GetIndexes(field_index, &layout_word_index, &layout_mask);
3243
3244 if (IsSlowLayout()) {
3245 uint32_t value = get_scalar(layout_word_index);
3246 if (tagged) {
3247 value &= ~layout_mask;
3248 } else {
3249 value |= layout_mask;
3250 }
3251 set(layout_word_index, value);
3252 return this;
3253 } else {
3254 uint32_t value = static_cast<uint32_t>(Smi::cast(this)->value());
3255 if (tagged) {
3256 value &= ~layout_mask;
3257 } else {
3258 value |= layout_mask;
3259 }
3260 return LayoutDescriptor::FromSmi(Smi::FromInt(static_cast<int>(value)));
3261 }
3262 }
3263
3264
3265 bool LayoutDescriptor::IsTagged(int field_index) {
3266 if (IsFastPointerLayout()) return true;
3267
3268 int layout_word_index;
3269 uint32_t layout_mask;
3270
3271 GetIndexes(field_index, &layout_word_index, &layout_mask);
3272
3273 if (IsSlowLayout()) {
3274 uint32_t value = get_scalar(layout_word_index);
3275 return (value & layout_mask) == 0;
3276 } else {
3277 uint32_t value = static_cast<uint32_t>(Smi::cast(this)->value());
3278 return (value & layout_mask) == 0;
3279 }
3280 }
3281
3282
3283 bool LayoutDescriptor::IsFastPointerLayout() {
3284 return IsSmi() && (Smi::cast(this)->value() == 0);
3285 }
3286
3287
3288 bool LayoutDescriptor::IsSlowLayout() { return !IsSmi(); }
3289
3290
3291 LayoutDescriptor* LayoutDescriptor::cast_gc_safe(Object* object) {
3292 if (object->IsSmi()) {
3293 // Either fast mode or forwarding pointer.
3294 LayoutDescriptor* layout_desc = reinterpret_cast<LayoutDescriptor*>(object);
3295 return layout_desc;
3296 }
3297
3298 // This is a mixed descriptor which is a fixed typed array.
3299 MapWord map_word = reinterpret_cast<HeapObject*>(object)->map_word();
3300 if (map_word.IsForwardingAddress()) {
3301 // Mark-compact has already moved layout descriptor.
3302 object = map_word.ToForwardingAddress();
3303 }
3304 return LayoutDescriptor::cast(object);
3305 }
3306
3307
3308 LayoutDescriptor* LayoutDescriptor::OptimizeFor(Map* map) {
Toon Verwaest 2014/07/29 15:02:08 You shouldn't need to "undo" installing an expensi
Igor Sheludko 2014/10/30 14:23:43 Done.
3309 if (IsFastPointerLayout()) return this;
3310
3311 // Try to "take" only a part of the descriptor that corresponds to
3312 // used in-object fields of given map.
3313 if (map->inobject_properties() == 0) return FastPointerLayout();
3314
3315 int last_used_inobject_field_index =
3316 Min(map->inobject_properties(), map->NextFreePropertyIndex()) - 1;
3317 if (last_used_inobject_field_index < 0) return FastPointerLayout();
3318
3319 // Check if all fields are tagged.
3320 int layout_word_index;
3321 uint32_t mask;
3322 GetIndexes(last_used_inobject_field_index, &layout_word_index, &mask);
3323 // Calculate |mask| for all the bits that correspond to |inobject| fields
3324 // of given |map|.
3325 mask = (mask << 1) - 1;
3326
3327 if (IsSlowLayout()) {
3328 if (last_used_inobject_field_index < kSmiValueSize) {
3329 // "Take" a subset of bits from the first word.
3330 uint32_t value = get_scalar(0) & mask;
3331 return LayoutDescriptor::FromSmi(Smi::FromInt(static_cast<int>(value)));
3332 }
3333
3334 // All the bit-words but the last one should be zero.
3335 for (int i = 0; i < layout_word_index; i++) {
3336 uint32_t value = get_scalar(i);
3337 if (value != 0) return this;
3338 }
3339 // Now check part of the last word we are interested in.
3340 uint32_t value = get_scalar(layout_word_index) & mask;
3341 if (value != 0) return this;
3342
3343 // All the fields are tagged.
3344 return FastPointerLayout();
3345
3346 } else {
3347 // "Take" a subset of bits from the value.
3348 uint32_t value = static_cast<uint32_t>(Smi::cast(this)->value()) & mask;
3349 return LayoutDescriptor::FromSmi(Smi::FromInt(static_cast<int>(value)));
3350 }
3351 }
3352
3353
3354 // InobjectPropertiesHelper is a helper class for querying whether inobject
3355 // property at offset is Double or not.
3356 InobjectPropertiesHelper::InobjectPropertiesHelper(Map* map)
3357 : all_fields_tagged_(true),
3358 header_size_(0),
3359 inobject_properties_count_(0),
3360 layout_descriptor_(LayoutDescriptor::FastPointerLayout()) {
3361 if (!FLAG_unbox_double_fields) return;
3362
3363 layout_descriptor_ = map->layout_descriptor_gc_safe();
3364 if (layout_descriptor_->IsFastPointerLayout()) {
3365 return;
3366 }
3367
3368 int inobject_properties = map->inobject_properties();
3369 ASSERT(inobject_properties > 0);
3370 header_size_ = map->instance_size() - (inobject_properties * kPointerSize);
3371 ASSERT(header_size_ >= 0);
3372
3373 all_fields_tagged_ = false;
3374 }
3375
3376
3377 bool InobjectPropertiesHelper::IsTagged(int offset_in_bytes) {
3378 ASSERT(IsAligned(offset_in_bytes, kPointerSize));
3379 if (all_fields_tagged_) return true;
3380 // Object headers do not contain non-tagged fields.
3381 if (offset_in_bytes < header_size_) return true;
3382 int field_index = (offset_in_bytes - header_size_) / kPointerSize;
3383
3384 return layout_descriptor_->IsTagged(field_index);
3385 }
3386
3387
3108 template<typename Derived, typename Shape, typename Key> 3388 template<typename Derived, typename Shape, typename Key>
3109 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) { 3389 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
3110 const int kMinCapacity = 32; 3390 const int kMinCapacity = 32;
3111 int capacity = RoundUpToPowerOf2(at_least_space_for * 2); 3391 int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
3112 if (capacity < kMinCapacity) { 3392 if (capacity < kMinCapacity) {
3113 capacity = kMinCapacity; // Guarantee min capacity. 3393 capacity = kMinCapacity; // Guarantee min capacity.
3114 } 3394 }
3115 return capacity; 3395 return capacity;
3116 } 3396 }
3117 3397
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
3218 CAST_ACCESSOR(JSObject) 3498 CAST_ACCESSOR(JSObject)
3219 CAST_ACCESSOR(JSProxy) 3499 CAST_ACCESSOR(JSProxy)
3220 CAST_ACCESSOR(JSReceiver) 3500 CAST_ACCESSOR(JSReceiver)
3221 CAST_ACCESSOR(JSRegExp) 3501 CAST_ACCESSOR(JSRegExp)
3222 CAST_ACCESSOR(JSSet) 3502 CAST_ACCESSOR(JSSet)
3223 CAST_ACCESSOR(JSSetIterator) 3503 CAST_ACCESSOR(JSSetIterator)
3224 CAST_ACCESSOR(JSTypedArray) 3504 CAST_ACCESSOR(JSTypedArray)
3225 CAST_ACCESSOR(JSValue) 3505 CAST_ACCESSOR(JSValue)
3226 CAST_ACCESSOR(JSWeakMap) 3506 CAST_ACCESSOR(JSWeakMap)
3227 CAST_ACCESSOR(JSWeakSet) 3507 CAST_ACCESSOR(JSWeakSet)
3508 CAST_ACCESSOR(LayoutDescriptor)
3228 CAST_ACCESSOR(Map) 3509 CAST_ACCESSOR(Map)
3229 CAST_ACCESSOR(MapCache) 3510 CAST_ACCESSOR(MapCache)
3230 CAST_ACCESSOR(Name) 3511 CAST_ACCESSOR(Name)
3231 CAST_ACCESSOR(NameDictionary) 3512 CAST_ACCESSOR(NameDictionary)
3232 CAST_ACCESSOR(NormalizedMapCache) 3513 CAST_ACCESSOR(NormalizedMapCache)
3233 CAST_ACCESSOR(Object) 3514 CAST_ACCESSOR(Object)
3234 CAST_ACCESSOR(ObjectHashTable) 3515 CAST_ACCESSOR(ObjectHashTable)
3235 CAST_ACCESSOR(Oddball) 3516 CAST_ACCESSOR(Oddball)
3236 CAST_ACCESSOR(OrderedHashMap) 3517 CAST_ACCESSOR(OrderedHashMap)
3237 CAST_ACCESSOR(OrderedHashSet) 3518 CAST_ACCESSOR(OrderedHashSet)
(...skipping 1854 matching lines...) Expand 10 before | Expand all | Expand 10 after
5092 transitions->set_back_pointer_storage(map->GetBackPointer()); 5373 transitions->set_back_pointer_storage(map->GetBackPointer());
5093 } else if (!map->transitions()->IsFullTransitionArray()) { 5374 } else if (!map->transitions()->IsFullTransitionArray()) {
5094 transitions = TransitionArray::ExtendToFullTransitionArray(map); 5375 transitions = TransitionArray::ExtendToFullTransitionArray(map);
5095 } else { 5376 } else {
5096 return; 5377 return;
5097 } 5378 }
5098 map->set_transitions(*transitions); 5379 map->set_transitions(*transitions);
5099 } 5380 }
5100 5381
5101 5382
5102 void Map::InitializeDescriptors(DescriptorArray* descriptors) { 5383 LayoutDescriptor* Map::layout_descriptor_gc_safe() {
5384 Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5385 return LayoutDescriptor::cast_gc_safe(layout_desc);
5386 }
5387
5388
5389 // Rebuilds layout descriptor. Must be called after map layout parameters
5390 // (such as |instance_type|, |instance_size|, |instance_descriptors| and
5391 // |inobject_properties|) are fully initialized.
5392 // Note: |unused_property_fields| is allowed to be in inconsistent state.
5393 void Map::RebuildLayoutDescriptor(Handle<Map> map) {
Toon Verwaest 2014/07/29 15:02:08 Try to avoid introducing this method
Igor Sheludko 2014/10/30 14:23:43 Done.
5394 Handle<DescriptorArray> descriptors(map->instance_descriptors());
5395 Handle<LayoutDescriptor> layout_desc = LayoutDescriptor::New(descriptors);
5396
5397 map->InitializeDescriptors(*descriptors, *layout_desc);
5398 if (!FLAG_unbox_double_fields) {
5399 map->set_visitor_id(StaticVisitorBase::GetVisitorId(*map));
5400 }
5401 }
5402
5403
5404 void Map::InitializeDescriptors(DescriptorArray* descriptors,
5405 LayoutDescriptor* layout_desc) {
5406 set_instance_descriptors(descriptors);
5407 if (FLAG_unbox_double_fields) {
5408 set_layout_descriptor(layout_desc->OptimizeFor(this));
Toon Verwaest 2014/07/29 15:02:08 Only overwrite the layout descriptor if the curren
Igor Sheludko 2014/10/30 14:23:43 Done.
5409 SLOW_ASSERT(layout_descriptor()->IsConsistentWithMap(this));
5410 set_visitor_id(StaticVisitorBase::GetVisitorId(this));
5411 }
5412 }
5413
5414
5415 void Map::InitializeOwnDescriptors(DescriptorArray* descriptors,
5416 LayoutDescriptor* layout_desc) {
5103 int len = descriptors->number_of_descriptors(); 5417 int len = descriptors->number_of_descriptors();
5104 set_instance_descriptors(descriptors); 5418 set_instance_descriptors(descriptors);
5105 SetNumberOfOwnDescriptors(len); 5419 SetNumberOfOwnDescriptors(len);
5420
5421 if (FLAG_unbox_double_fields) {
5422 set_layout_descriptor(layout_desc->OptimizeFor(this));
Toon Verwaest 2014/07/29 15:02:09 Make sure the layout_desc that comes into this fun
Igor Sheludko 2014/10/30 14:23:43 Done.
5423 SLOW_ASSERT(layout_descriptor()->IsConsistentWithMap(this));
5424 set_visitor_id(StaticVisitorBase::GetVisitorId(this));
5425 }
5106 } 5426 }
5107 5427
5108 5428
5109 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset) 5429 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5430 ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDecriptorOffset)
5110 5431
5111 5432
5112 void Map::set_bit_field3(uint32_t bits) { 5433 void Map::set_bit_field3(uint32_t bits) {
5113 if (kInt32Size != kPointerSize) { 5434 if (kInt32Size != kPointerSize) {
5114 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0); 5435 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5115 } 5436 }
5116 WRITE_UINT32_FIELD(this, kBitField3Offset, bits); 5437 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5117 } 5438 }
5118 5439
5119 5440
(...skipping 1966 matching lines...) Expand 10 before | Expand all | Expand 10 after
7086 7407
7087 7408
7088 template<typename StaticVisitor> 7409 template<typename StaticVisitor>
7089 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() { 7410 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7090 typedef v8::String::ExternalStringResource Resource; 7411 typedef v8::String::ExternalStringResource Resource;
7091 StaticVisitor::VisitExternalTwoByteString( 7412 StaticVisitor::VisitExternalTwoByteString(
7092 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset))); 7413 reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7093 } 7414 }
7094 7415
7095 7416
7417 static void IterateBodyUsingLayoutDescriptor(HeapObject* object,
7418 int start_offset, int end_offset,
7419 ObjectVisitor* v) {
7420 ASSERT(FLAG_unbox_double_fields);
7421 ASSERT(IsAligned(start_offset, kPointerSize) &&
7422 IsAligned(end_offset, kPointerSize));
7423
7424 InobjectPropertiesHelper helper(object->map());
7425 ASSERT(!helper.all_fields_tagged());
7426
7427 for (int offset = start_offset; offset < end_offset; offset += kPointerSize) {
7428 // Visit all tagged fields.
7429 if (helper.IsTagged(offset)) {
7430 v->VisitPointers(HeapObject::RawField(object, offset),
Toon Verwaest 2014/07/29 15:02:08 VisitPointer?
Igor Sheludko 2014/10/30 14:23:43 Done.
7431 HeapObject::RawField(object, offset + kPointerSize));
7432 }
7433 }
7434 }
7435
7436
7096 template<int start_offset, int end_offset, int size> 7437 template<int start_offset, int end_offset, int size>
7097 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody( 7438 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7098 HeapObject* obj, 7439 HeapObject* obj,
7099 ObjectVisitor* v) { 7440 ObjectVisitor* v) {
7441 if (!FLAG_unbox_double_fields ||
7442 obj->map()->layout_descriptor()->IsFastPointerLayout()) {
7100 v->VisitPointers(HeapObject::RawField(obj, start_offset), 7443 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7101 HeapObject::RawField(obj, end_offset)); 7444 HeapObject::RawField(obj, end_offset));
7445 } else {
7446 IterateBodyUsingLayoutDescriptor(obj, start_offset, end_offset, v);
7447 }
7102 } 7448 }
7103 7449
7104 7450
7105 template<int start_offset> 7451 template<int start_offset>
7106 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj, 7452 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7107 int object_size, 7453 int object_size,
7108 ObjectVisitor* v) { 7454 ObjectVisitor* v) {
7109 v->VisitPointers(HeapObject::RawField(obj, start_offset), 7455 if (!FLAG_unbox_double_fields ||
7110 HeapObject::RawField(obj, object_size)); 7456 obj->map()->layout_descriptor()->IsFastPointerLayout()) {
7457 v->VisitPointers(HeapObject::RawField(obj, start_offset),
7458 HeapObject::RawField(obj, object_size));
7459 } else {
7460 IterateBodyUsingLayoutDescriptor(obj, start_offset, object_size, v);
7461 }
7111 } 7462 }
7112 7463
7113 7464
7114 template<class Derived, class TableType> 7465 template<class Derived, class TableType>
7115 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() { 7466 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7116 TableType* table(TableType::cast(this->table())); 7467 TableType* table(TableType::cast(this->table()));
7117 int index = Smi::cast(this->index())->value(); 7468 int index = Smi::cast(this->index())->value();
7118 Object* key = table->KeyAt(index); 7469 Object* key = table->KeyAt(index);
7119 ASSERT(!key->IsTheHole()); 7470 ASSERT(!key->IsTheHole());
7120 return key; 7471 return key;
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
7170 #undef READ_SHORT_FIELD 7521 #undef READ_SHORT_FIELD
7171 #undef WRITE_SHORT_FIELD 7522 #undef WRITE_SHORT_FIELD
7172 #undef READ_BYTE_FIELD 7523 #undef READ_BYTE_FIELD
7173 #undef WRITE_BYTE_FIELD 7524 #undef WRITE_BYTE_FIELD
7174 #undef NOBARRIER_READ_BYTE_FIELD 7525 #undef NOBARRIER_READ_BYTE_FIELD
7175 #undef NOBARRIER_WRITE_BYTE_FIELD 7526 #undef NOBARRIER_WRITE_BYTE_FIELD
7176 7527
7177 } } // namespace v8::internal 7528 } } // namespace v8::internal
7178 7529
7179 #endif // V8_OBJECTS_INL_H_ 7530 #endif // V8_OBJECTS_INL_H_
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698