Index: src/objects-inl.h |
diff --git a/src/objects-inl.h b/src/objects-inl.h |
index 7fa2a0e0cfd45c007182aa181523905b87b8af62..0486eab368030f016bc874579e0c6758eeab9486 100644 |
--- a/src/objects-inl.h |
+++ b/src/objects-inl.h |
@@ -53,6 +53,14 @@ PropertyDetails PropertyDetails::AsDeleted() const { |
} |
+int PropertyDetails::field_width_in_words() const { |
+ ASSERT(type() == FIELD); |
+ if (!FLAG_unbox_double_fields) return 1; |
+ if (kDoubleSize == kPointerSize) return 1; |
+ return representation().IsDouble() ? kDoubleSize / kPointerSize : 1; |
+} |
+ |
+ |
#define TYPE_CHECKER(type, instancetype) \ |
bool Object::Is##type() const { \ |
return Object::IsHeapObject() && \ |
@@ -715,6 +723,11 @@ bool Object::IsDescriptorArray() const { |
} |
+bool Object::IsLayoutDescriptor() const { |
+ return IsSmi() || IsFixedTypedArrayBase(); |
+} |
+ |
+ |
bool Object::IsTransitionArray() const { |
return IsFixedArray(); |
} |
@@ -2003,11 +2016,19 @@ void JSObject::SetInternalField(int index, Smi* value) { |
} |
+bool Map::IsUnboxedDoubleField(FieldIndex index) { |
+ if (!FLAG_unbox_double_fields) return false; |
+ if (index.is_hidden_field() || !index.is_inobject()) return false; |
+ return !layout_descriptor()->IsTagged(index.property_index()); |
+} |
+ |
+ |
// Access fast-case object properties at index. The use of these routines |
// is needed to correctly distinguish between properties stored in-object and |
// properties stored in the properties array. |
Object* JSObject::RawFastPropertyAt(FieldIndex index) { |
if (index.is_inobject()) { |
+ ASSERT(!map()->IsUnboxedDoubleField(index)); |
return READ_FIELD(this, index.offset()); |
} else { |
return properties()->get(index.outobject_array_index()); |
@@ -2015,17 +2036,62 @@ Object* JSObject::RawFastPropertyAt(FieldIndex index) { |
} |
-void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) { |
+// Access fast-case object properties at index. The use of these routines |
+// is needed to correctly distinguish between properties stored in-object and |
+// properties stored in the properties array. |
+Handle<Object> JSObject::RawFastBoxedPropertyAt(Handle<JSObject> object, |
+ FieldIndex index) { |
+ Isolate* isolate = object->GetIsolate(); |
+ if (index.is_inobject()) { |
+ Map* map = object->map(); |
+ if (map->IsUnboxedDoubleField(index)) { |
+ double value = READ_DOUBLE_FIELD(*object, index.offset()); |
+ return isolate->factory()->NewHeapNumber(value, MUTABLE); |
Toon Verwaest
2014/07/29 15:02:09
Should not call this for unboxed doubles.
We shoul
Igor Sheludko
2014/10/30 14:23:43
Done.
|
+ |
+ } else { |
+ return handle(READ_FIELD(*object, index.offset()), isolate); |
+ } |
+ } else { |
+ ASSERT(index.outobject_array_index() < object->properties()->length()); |
+ return handle(object->properties()->get(index.outobject_array_index()), |
+ isolate); |
+ } |
+} |
+ |
+ |
+double JSObject::RawFastDoublePropertyAt(FieldIndex index) { |
+ ASSERT(map()->IsUnboxedDoubleField(index)); |
+ return READ_DOUBLE_FIELD(this, index.offset()); |
+} |
+ |
+ |
+void JSObject::FastPropertyAtPut(Map* map, FieldIndex index, Object* value) { |
if (index.is_inobject()) { |
int offset = index.offset(); |
- WRITE_FIELD(this, offset, value); |
- WRITE_BARRIER(GetHeap(), this, offset, value); |
+ if (map->IsUnboxedDoubleField(index)) { |
Toon Verwaest
2014/07/29 15:02:08
Always go through FastDoublePropertyAtPut?
I would
Igor Sheludko
2014/10/30 14:23:43
I fixed callers of this methods where it makes sen
|
+ ASSERT(value->IsMutableHeapNumber()); |
+ WRITE_DOUBLE_FIELD(this, offset, HeapNumber::cast(value)->value()); |
+ } else { |
+ WRITE_FIELD(this, offset, value); |
+ WRITE_BARRIER(GetHeap(), this, offset, value); |
+ } |
} else { |
properties()->set(index.outobject_array_index(), value); |
} |
} |
+void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) { |
+ FastPropertyAtPut(map(), index, value); |
+} |
+ |
+ |
+void JSObject::FastDoublePropertyAtPut(FieldIndex index, double value) { |
+ ASSERT(map()->IsUnboxedDoubleField(index)); |
+ WRITE_DOUBLE_FIELD(this, index.offset(), value); |
+} |
+ |
+ |
int JSObject::GetInObjectPropertyOffset(int index) { |
return map()->GetInObjectPropertyOffset(index); |
} |
@@ -2743,6 +2809,31 @@ bool DescriptorArray::IsEmpty() { |
void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) { |
WRITE_FIELD( |
this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors)); |
+ if (FLAG_unbox_double_fields) drop_cached_layout_descriptor(); |
Toon Verwaest
2014/07/29 15:02:08
This doesn't belong here
Igor Sheludko
2014/10/30 14:23:43
Done. I removed the whole layout descriptor cache
|
+} |
+ |
+ |
+Object* DescriptorArray::cached_layout_descriptor() { |
+ ASSERT(FLAG_unbox_double_fields); |
+ ASSERT(length() >= kFirstIndex || IsEmpty()); |
+ if (length() == 0) return LayoutDescriptor::FastPointerLayout(); |
+ return get(kLayoutDescriptorCacheIndex); |
+} |
+ |
+ |
+void DescriptorArray::set_cached_layout_descriptor(LayoutDescriptor* cached) { |
+ ASSERT(FLAG_unbox_double_fields); |
+ ASSERT(length() >= kFirstIndex || IsEmpty()); |
+ if (length() > 0) set(kLayoutDescriptorCacheIndex, cached); |
+} |
+ |
+ |
+void DescriptorArray::drop_cached_layout_descriptor() { |
+ ASSERT(FLAG_unbox_double_fields); |
+ ASSERT(length() >= kFirstIndex || IsEmpty()); |
+ if (length() > 0) { |
+ set(kLayoutDescriptorCacheIndex, GetHeap()->undefined_value()); |
+ } |
} |
@@ -3028,9 +3119,9 @@ void DescriptorArray::Set(int descriptor_number, |
NoIncrementalWriteBarrierSet(this, |
ToValueIndex(descriptor_number), |
*desc->GetValue()); |
- NoIncrementalWriteBarrierSet(this, |
- ToDetailsIndex(descriptor_number), |
+ NoIncrementalWriteBarrierSet(this, ToDetailsIndex(descriptor_number), |
desc->GetDetails().AsSmi()); |
+ if (FLAG_unbox_double_fields) drop_cached_layout_descriptor(); |
Toon Verwaest
2014/07/29 15:02:08
This doesn't belong here
Igor Sheludko
2014/10/30 14:23:43
Done.
|
} |
@@ -3041,6 +3132,7 @@ void DescriptorArray::Set(int descriptor_number, Descriptor* desc) { |
set(ToKeyIndex(descriptor_number), *desc->GetKey()); |
set(ToValueIndex(descriptor_number), *desc->GetValue()); |
set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi()); |
+ if (FLAG_unbox_double_fields) drop_cached_layout_descriptor(); |
Toon Verwaest
2014/07/29 15:02:09
This doesn't belong here
Igor Sheludko
2014/10/30 14:23:43
Done.
|
} |
@@ -3105,6 +3197,194 @@ DescriptorArray::WhitenessWitness::~WhitenessWitness() { |
} |
+LayoutDescriptor* LayoutDescriptor::FromSmi(Smi* smi) { |
+ return LayoutDescriptor::cast(smi); |
+} |
+ |
+ |
+Handle<LayoutDescriptor> LayoutDescriptor::New(Isolate* isolate, int length) { |
+ if (length <= kSmiValueSize) { |
+ // The whole bit vector fits into a smi. |
+ return handle(LayoutDescriptor::FromSmi(Smi::FromInt(0)), isolate); |
+ } |
+ |
+ length = (length + kNumberOfBits - 1) / kNumberOfBits; |
+ ASSERT(length > 0); |
+ |
+ if (SmiValuesAre32Bits() && (length & 1)) { |
+ ++length; // Make kPtrSize aligned |
+ } |
+ return Handle<LayoutDescriptor>::cast( |
+ isolate->factory()->NewFixedTypedArray(length, kExternalUint32Array)); |
+} |
+ |
+ |
+LayoutDescriptor* LayoutDescriptor::FastPointerLayout() { |
+ return LayoutDescriptor::FromSmi(Smi::FromInt(0)); |
+} |
+ |
+ |
+void LayoutDescriptor::GetIndexes(int field_index, int* layout_word_index, |
+ uint32_t* layout_mask) { |
+ *layout_word_index = field_index / kNumberOfBits; |
+ ASSERT((!IsSmi() && (*layout_word_index < length())) || |
+ (IsSmi() && (*layout_word_index < 32))); |
+ |
+ int layout_bit_index = field_index % kNumberOfBits; |
+ *layout_mask = static_cast<uint32_t>(1) << layout_bit_index; |
+} |
+ |
+ |
+LayoutDescriptor* LayoutDescriptor::SetTagged(int field_index, bool tagged) { |
+ int layout_word_index; |
+ uint32_t layout_mask; |
+ |
+ GetIndexes(field_index, &layout_word_index, &layout_mask); |
+ |
+ if (IsSlowLayout()) { |
+ uint32_t value = get_scalar(layout_word_index); |
+ if (tagged) { |
+ value &= ~layout_mask; |
+ } else { |
+ value |= layout_mask; |
+ } |
+ set(layout_word_index, value); |
+ return this; |
+ } else { |
+ uint32_t value = static_cast<uint32_t>(Smi::cast(this)->value()); |
+ if (tagged) { |
+ value &= ~layout_mask; |
+ } else { |
+ value |= layout_mask; |
+ } |
+ return LayoutDescriptor::FromSmi(Smi::FromInt(static_cast<int>(value))); |
+ } |
+} |
+ |
+ |
+bool LayoutDescriptor::IsTagged(int field_index) { |
+ if (IsFastPointerLayout()) return true; |
+ |
+ int layout_word_index; |
+ uint32_t layout_mask; |
+ |
+ GetIndexes(field_index, &layout_word_index, &layout_mask); |
+ |
+ if (IsSlowLayout()) { |
+ uint32_t value = get_scalar(layout_word_index); |
+ return (value & layout_mask) == 0; |
+ } else { |
+ uint32_t value = static_cast<uint32_t>(Smi::cast(this)->value()); |
+ return (value & layout_mask) == 0; |
+ } |
+} |
+ |
+ |
+bool LayoutDescriptor::IsFastPointerLayout() { |
+ return IsSmi() && (Smi::cast(this)->value() == 0); |
+} |
+ |
+ |
+bool LayoutDescriptor::IsSlowLayout() { return !IsSmi(); } |
+ |
+ |
+LayoutDescriptor* LayoutDescriptor::cast_gc_safe(Object* object) { |
+ if (object->IsSmi()) { |
+ // Either fast mode or forwarding pointer. |
+ LayoutDescriptor* layout_desc = reinterpret_cast<LayoutDescriptor*>(object); |
+ return layout_desc; |
+ } |
+ |
+ // This is a mixed descriptor which is a fixed typed array. |
+ MapWord map_word = reinterpret_cast<HeapObject*>(object)->map_word(); |
+ if (map_word.IsForwardingAddress()) { |
+ // Mark-compact has already moved layout descriptor. |
+ object = map_word.ToForwardingAddress(); |
+ } |
+ return LayoutDescriptor::cast(object); |
+} |
+ |
+ |
+LayoutDescriptor* LayoutDescriptor::OptimizeFor(Map* map) { |
Toon Verwaest
2014/07/29 15:02:08
You shouldn't need to "undo" installing an expensi
Igor Sheludko
2014/10/30 14:23:43
Done.
|
+ if (IsFastPointerLayout()) return this; |
+ |
+ // Try to "take" only a part of the descriptor that corresponds to |
+ // used in-object fields of given map. |
+ if (map->inobject_properties() == 0) return FastPointerLayout(); |
+ |
+ int last_used_inobject_field_index = |
+ Min(map->inobject_properties(), map->NextFreePropertyIndex()) - 1; |
+ if (last_used_inobject_field_index < 0) return FastPointerLayout(); |
+ |
+ // Check if all fields are tagged. |
+ int layout_word_index; |
+ uint32_t mask; |
+ GetIndexes(last_used_inobject_field_index, &layout_word_index, &mask); |
+ // Calculate |mask| for all the bits that correspond to |inobject| fields |
+ // of given |map|. |
+ mask = (mask << 1) - 1; |
+ |
+ if (IsSlowLayout()) { |
+ if (last_used_inobject_field_index < kSmiValueSize) { |
+ // "Take" a subset of bits from the first word. |
+ uint32_t value = get_scalar(0) & mask; |
+ return LayoutDescriptor::FromSmi(Smi::FromInt(static_cast<int>(value))); |
+ } |
+ |
+ // All the bit-words but the last one should be zero. |
+ for (int i = 0; i < layout_word_index; i++) { |
+ uint32_t value = get_scalar(i); |
+ if (value != 0) return this; |
+ } |
+ // Now check part of the last word we are interested in. |
+ uint32_t value = get_scalar(layout_word_index) & mask; |
+ if (value != 0) return this; |
+ |
+ // All the fields are tagged. |
+ return FastPointerLayout(); |
+ |
+ } else { |
+ // "Take" a subset of bits from the value. |
+ uint32_t value = static_cast<uint32_t>(Smi::cast(this)->value()) & mask; |
+ return LayoutDescriptor::FromSmi(Smi::FromInt(static_cast<int>(value))); |
+ } |
+} |
+ |
+ |
+// InobjectPropertiesHelper is a helper class for querying whether inobject |
+// property at offset is Double or not. |
+InobjectPropertiesHelper::InobjectPropertiesHelper(Map* map) |
+ : all_fields_tagged_(true), |
+ header_size_(0), |
+ inobject_properties_count_(0), |
+ layout_descriptor_(LayoutDescriptor::FastPointerLayout()) { |
+ if (!FLAG_unbox_double_fields) return; |
+ |
+ layout_descriptor_ = map->layout_descriptor_gc_safe(); |
+ if (layout_descriptor_->IsFastPointerLayout()) { |
+ return; |
+ } |
+ |
+ int inobject_properties = map->inobject_properties(); |
+ ASSERT(inobject_properties > 0); |
+ header_size_ = map->instance_size() - (inobject_properties * kPointerSize); |
+ ASSERT(header_size_ >= 0); |
+ |
+ all_fields_tagged_ = false; |
+} |
+ |
+ |
+bool InobjectPropertiesHelper::IsTagged(int offset_in_bytes) { |
+ ASSERT(IsAligned(offset_in_bytes, kPointerSize)); |
+ if (all_fields_tagged_) return true; |
+ // Object headers do not contain non-tagged fields. |
+ if (offset_in_bytes < header_size_) return true; |
+ int field_index = (offset_in_bytes - header_size_) / kPointerSize; |
+ |
+ return layout_descriptor_->IsTagged(field_index); |
+} |
+ |
+ |
template<typename Derived, typename Shape, typename Key> |
int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) { |
const int kMinCapacity = 32; |
@@ -3225,6 +3505,7 @@ CAST_ACCESSOR(JSTypedArray) |
CAST_ACCESSOR(JSValue) |
CAST_ACCESSOR(JSWeakMap) |
CAST_ACCESSOR(JSWeakSet) |
+CAST_ACCESSOR(LayoutDescriptor) |
CAST_ACCESSOR(Map) |
CAST_ACCESSOR(MapCache) |
CAST_ACCESSOR(Name) |
@@ -5099,14 +5380,54 @@ static void EnsureHasTransitionArray(Handle<Map> map) { |
} |
-void Map::InitializeDescriptors(DescriptorArray* descriptors) { |
+LayoutDescriptor* Map::layout_descriptor_gc_safe() { |
+ Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset); |
+ return LayoutDescriptor::cast_gc_safe(layout_desc); |
+} |
+ |
+ |
+// Rebuilds layout descriptor. Must be called after map layout parameters |
+// (such as |instance_type|, |instance_size|, |instance_descriptors| and |
+// |inobject_properties|) are fully initialized. |
+// Note: |unused_property_fields| is allowed to be in inconsistent state. |
+void Map::RebuildLayoutDescriptor(Handle<Map> map) { |
Toon Verwaest
2014/07/29 15:02:08
Try to avoid introducing this method
Igor Sheludko
2014/10/30 14:23:43
Done.
|
+ Handle<DescriptorArray> descriptors(map->instance_descriptors()); |
+ Handle<LayoutDescriptor> layout_desc = LayoutDescriptor::New(descriptors); |
+ |
+ map->InitializeDescriptors(*descriptors, *layout_desc); |
+ if (!FLAG_unbox_double_fields) { |
+ map->set_visitor_id(StaticVisitorBase::GetVisitorId(*map)); |
+ } |
+} |
+ |
+ |
+void Map::InitializeDescriptors(DescriptorArray* descriptors, |
+ LayoutDescriptor* layout_desc) { |
+ set_instance_descriptors(descriptors); |
+ if (FLAG_unbox_double_fields) { |
+ set_layout_descriptor(layout_desc->OptimizeFor(this)); |
Toon Verwaest
2014/07/29 15:02:08
Only overwrite the layout descriptor if the curren
Igor Sheludko
2014/10/30 14:23:43
Done.
|
+ SLOW_ASSERT(layout_descriptor()->IsConsistentWithMap(this)); |
+ set_visitor_id(StaticVisitorBase::GetVisitorId(this)); |
+ } |
+} |
+ |
+ |
+void Map::InitializeOwnDescriptors(DescriptorArray* descriptors, |
+ LayoutDescriptor* layout_desc) { |
int len = descriptors->number_of_descriptors(); |
set_instance_descriptors(descriptors); |
SetNumberOfOwnDescriptors(len); |
+ |
+ if (FLAG_unbox_double_fields) { |
+ set_layout_descriptor(layout_desc->OptimizeFor(this)); |
Toon Verwaest
2014/07/29 15:02:09
Make sure the layout_desc that comes into this fun
Igor Sheludko
2014/10/30 14:23:43
Done.
|
+ SLOW_ASSERT(layout_descriptor()->IsConsistentWithMap(this)); |
+ set_visitor_id(StaticVisitorBase::GetVisitorId(this)); |
+ } |
} |
ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset) |
+ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDecriptorOffset) |
void Map::set_bit_field3(uint32_t bits) { |
@@ -7093,12 +7414,37 @@ void ExternalTwoByteString::ExternalTwoByteStringIterateBody() { |
} |
+static void IterateBodyUsingLayoutDescriptor(HeapObject* object, |
+ int start_offset, int end_offset, |
+ ObjectVisitor* v) { |
+ ASSERT(FLAG_unbox_double_fields); |
+ ASSERT(IsAligned(start_offset, kPointerSize) && |
+ IsAligned(end_offset, kPointerSize)); |
+ |
+ InobjectPropertiesHelper helper(object->map()); |
+ ASSERT(!helper.all_fields_tagged()); |
+ |
+ for (int offset = start_offset; offset < end_offset; offset += kPointerSize) { |
+ // Visit all tagged fields. |
+ if (helper.IsTagged(offset)) { |
+ v->VisitPointers(HeapObject::RawField(object, offset), |
Toon Verwaest
2014/07/29 15:02:08
VisitPointer?
Igor Sheludko
2014/10/30 14:23:43
Done.
|
+ HeapObject::RawField(object, offset + kPointerSize)); |
+ } |
+ } |
+} |
+ |
+ |
template<int start_offset, int end_offset, int size> |
void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody( |
HeapObject* obj, |
ObjectVisitor* v) { |
+ if (!FLAG_unbox_double_fields || |
+ obj->map()->layout_descriptor()->IsFastPointerLayout()) { |
v->VisitPointers(HeapObject::RawField(obj, start_offset), |
HeapObject::RawField(obj, end_offset)); |
+ } else { |
+ IterateBodyUsingLayoutDescriptor(obj, start_offset, end_offset, v); |
+ } |
} |
@@ -7106,8 +7452,13 @@ template<int start_offset> |
void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj, |
int object_size, |
ObjectVisitor* v) { |
- v->VisitPointers(HeapObject::RawField(obj, start_offset), |
- HeapObject::RawField(obj, object_size)); |
+ if (!FLAG_unbox_double_fields || |
+ obj->map()->layout_descriptor()->IsFastPointerLayout()) { |
+ v->VisitPointers(HeapObject::RawField(obj, start_offset), |
+ HeapObject::RawField(obj, object_size)); |
+ } else { |
+ IterateBodyUsingLayoutDescriptor(obj, start_offset, object_size, v); |
+ } |
} |