Index: src/code-stubs-hydrogen.cc |
diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc |
index f717d75c192423425453b25f4da792b281e83662..16a94527a6cffefb5f0ab7adc91d626f969f1563 100644 |
--- a/src/code-stubs-hydrogen.cc |
+++ b/src/code-stubs-hydrogen.cc |
@@ -1328,7 +1328,11 @@ HValue* CodeStubGraphBuilder<KeyedLoadDictionaryElementStub>::BuildCodeStub() { |
Add<HCheckSmi>(key); |
- return BuildUncheckedDictionaryElementLoad(receiver, key); |
+ HValue* elements = AddLoadElements(receiver); |
Toon Verwaest
2013/12/04 17:29:26
We need at least a BuildCheckHeapObject(receiver)
danno
2014/06/06 15:43:50
Why is that needed? We already did a map check in
|
+ |
+ HValue* hash = BuildElementIndexHash(key); |
+ |
+ return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash); |
} |
@@ -1337,4 +1341,275 @@ Handle<Code> KeyedLoadDictionaryElementStub::GenerateCode(Isolate* isolate) { |
} |
+template <> |
+class CodeStubGraphBuilder<KeyedLoadGenericElementStub> |
+ : public CodeStubGraphBuilderBase { |
+ public: |
+ CodeStubGraphBuilder(Isolate* isolate, |
+ KeyedLoadGenericElementStub* stub) |
Toon Verwaest
2013/12/04 17:29:26
nit: Weird indentation
danno
2014/06/06 15:43:50
Done.
|
+ : CodeStubGraphBuilderBase(isolate, stub) {} |
+ |
+ protected: |
+ virtual HValue* BuildCodeStub(); |
+ |
+ void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder, |
+ HValue* receiver, |
+ HValue* key, |
+ HValue* instance_type, |
+ HValue* bit_field2, |
+ ElementsKind next_kind, |
+ ElementsKind handle_as_kind); |
+ |
+ KeyedLoadGenericElementStub* casted_stub() { |
+ return static_cast<KeyedLoadGenericElementStub*>(stub()); |
+ } |
+}; |
+ |
+ |
+void CodeStubGraphBuilder<KeyedLoadGenericElementStub>::BuildFastElementLoad( |
+ HGraphBuilder::IfBuilder* if_builder, |
+ HValue* receiver, |
+ HValue* key, |
+ HValue* instance_type, |
+ HValue* bit_field2, |
+ ElementsKind next_kind, |
+ ElementsKind handle_as_kind) { |
Toon Verwaest
2013/12/04 17:29:26
In isolation it's very hard to see what this metho
danno
2014/06/06 15:43:50
Done.
|
+ HValue* next_kind_value = |
+ Add<HConstant>(next_kind << Map::kElementsKindShift); |
+ if_builder->If<HCompareNumericAndBranch>(bit_field2, next_kind_value, |
+ Token::LT); |
+ if_builder->Then(); |
+ |
+ IfBuilder js_array_check(this); |
+ |
+ if (!IsExternalArrayElementsKind(handle_as_kind)) { |
+ js_array_check.If<HCompareNumericAndBranch>( |
+ instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ); |
+ } |
+ |
+ js_array_check.Then(); |
+ if (!IsExternalArrayElementsKind(handle_as_kind)) { |
Toon Verwaest
2013/12/04 17:29:26
It's hard to understand what kind of graph is crea
danno
2014/06/06 15:43:50
Done.
|
+ Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL, |
+ true, handle_as_kind, |
+ false, NEVER_RETURN_HOLE, |
+ STANDARD_STORE)); |
+ } else { |
+ Push(graph()->GetConstant0()); |
+ } |
+ |
+ js_array_check.Else(); |
+ Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL, |
+ false, handle_as_kind, |
+ false, NEVER_RETURN_HOLE, |
+ STANDARD_STORE)); |
+} |
+ |
+ |
+HValue* CodeStubGraphBuilder<KeyedLoadGenericElementStub>::BuildCodeStub() { |
+ HValue* receiver = GetParameter(0); |
+ HValue* key = GetParameter(1); |
+ |
+ // Split into a smi/integer case and unique string case. |
+ HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(), |
+ graph()->CreateBasicBlock()); |
+ |
+ BuildKeyedIndexCheck(key, &index_name_split_continuation); |
+ |
+ IfBuilder index_name_split(this, &index_name_split_continuation); |
+ index_name_split.Then(); |
+ { |
+ // Key is an index (number) |
+ key = Pop(); |
+ |
+ int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) | |
+ (1 << Map::kHasIndexedInterceptor); |
+ BuildReceiverCheck(receiver, bit_field_mask); |
Toon Verwaest
2013/12/04 17:29:26
Can we pull these 2 checks out of the continuation
danno
2014/06/06 15:43:50
No, these can't be moved since they are different
|
+ |
+ HValue* map = Add<HLoadNamedField>(receiver, HObjectAccess::ForMap()); |
+ |
+ HValue* instance_type = |
+ Add<HLoadNamedField>(map, HObjectAccess::ForMapInstanceType()); |
+ |
+ HValue* bit_field2 = Add<HLoadNamedField>(map, |
+ HObjectAccess::ForMapBitField2()); |
+ |
+ IfBuilder kind_if(this); |
+ BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2, |
+ FAST_DOUBLE_ELEMENTS, FAST_HOLEY_ELEMENTS); |
+ |
+ kind_if.Else(); |
+ { |
+ BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2, |
+ DICTIONARY_ELEMENTS, FAST_HOLEY_DOUBLE_ELEMENTS); |
+ } |
+ kind_if.Else(); |
+ { |
+ int non_strict_elements_kind = |
+ NON_STRICT_ARGUMENTS_ELEMENTS << Map::kElementsKindShift; |
+ HValue* dictionary_limit = Add<HConstant>(non_strict_elements_kind); |
+ kind_if.If<HCompareNumericAndBranch>(bit_field2, dictionary_limit, |
+ Token::LT); |
+ } |
+ kind_if.Then(); |
+ { |
+ HValue* elements = AddLoadElements(receiver); |
+ |
+ HValue* hash = BuildElementIndexHash(key); |
+ |
+ Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash)); |
+ } |
+ // Non-strict elements are not handled. |
+ kind_if.Else(); |
+ |
+ HValue* non_strict_limit = |
+ Add<HConstant>(EXTERNAL_BYTE_ELEMENTS << Map::kElementsKindShift); |
+ DeoptimizeIf<HCompareNumericAndBranch>( |
+ bit_field2, non_strict_limit, Token::LT, |
+ "non-strict argument elements in KeyedLoadGenericElementStub"); |
+ |
+ BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2, |
+ EXTERNAL_UNSIGNED_BYTE_ELEMENTS, |
+ EXTERNAL_BYTE_ELEMENTS); |
+ |
+ kind_if.Else(); |
+ BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2, |
+ EXTERNAL_SHORT_ELEMENTS, |
+ EXTERNAL_UNSIGNED_BYTE_ELEMENTS); |
+ |
+ kind_if.Else(); |
+ BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2, |
+ EXTERNAL_UNSIGNED_SHORT_ELEMENTS, |
+ EXTERNAL_SHORT_ELEMENTS); |
+ |
+ kind_if.Else(); |
+ BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2, |
+ EXTERNAL_INT_ELEMENTS, |
+ EXTERNAL_UNSIGNED_SHORT_ELEMENTS); |
+ |
+ kind_if.Else(); |
+ BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2, |
+ EXTERNAL_UNSIGNED_INT_ELEMENTS, |
+ EXTERNAL_INT_ELEMENTS); |
+ |
+ kind_if.Else(); |
+ BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2, |
+ EXTERNAL_FLOAT_ELEMENTS, |
+ EXTERNAL_UNSIGNED_INT_ELEMENTS); |
+ |
+ kind_if.Else(); |
+ BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2, |
+ EXTERNAL_DOUBLE_ELEMENTS, EXTERNAL_FLOAT_ELEMENTS); |
+ |
+ kind_if.Else(); |
+ BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2, |
+ EXTERNAL_PIXEL_ELEMENTS, EXTERNAL_DOUBLE_ELEMENTS); |
+ |
+ kind_if.Else(); |
+ BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2, |
+ static_cast<ElementsKind>(EXTERNAL_PIXEL_ELEMENTS + 1), |
+ EXTERNAL_PIXEL_ELEMENTS); |
+ |
+ kind_if.ElseDeopt("ElementsKind unhandled in KeyedLoadGenericElementStub"); |
+ |
+ kind_if.End(); |
+ } |
+ index_name_split.Else(); |
+ { |
+ // Key is a unique string. |
+ key = Pop(); |
+ |
+ int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) | |
+ (1 << Map::kHasNamedInterceptor); |
+ BuildReceiverCheck(receiver, bit_field_mask); |
+ |
+ HIfContinuation continuation; |
+ BuildCheckForDictionaryProperties(receiver, &continuation); |
+ IfBuilder if_dict_properties(this, &continuation); |
+ if_dict_properties.Then(); |
+ { |
+ // Key is string, properties are dictionary mode |
+ BuildGlobalInstanceTypeCheck(receiver); |
+ |
+ HValue* properties = Add<HLoadNamedField>( |
+ receiver, HObjectAccess::ForPropertiesPointer()); |
+ |
+ HValue* hash = |
+ Add<HLoadNamedField>(key, HObjectAccess::ForNameHashField()); |
+ |
+ HValue* value = BuildUncheckedDictionaryElementLoad(receiver, |
+ properties, |
+ key, |
+ hash); |
+ Push(value); |
+ } |
+ if_dict_properties.Else(); |
+ { |
+ // Key is string, properties are fast mode |
+ HValue* hash = BuildKeyedLookupCacheHash(receiver, key); |
+ |
+ ExternalReference cache_keys_ref = |
+ ExternalReference::keyed_lookup_cache_keys(isolate()); |
+ HValue* cache_keys = Add<HConstant>(cache_keys_ref); |
+ |
+ HValue* map = Add<HLoadNamedField>(receiver, HObjectAccess::ForMap()); |
+ HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2)); |
+ base_index->ClearFlag(HValue::kCanOverflow); |
+ |
+ IfBuilder lookup_if(this); |
+ for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket; |
+ ++probe) { |
+ HValue* map_index = AddUncasted<HAdd>(base_index, |
+ Add<HConstant>(probe * 2)); |
+ map_index->ClearFlag(HValue::kCanOverflow); |
+ HValue* key_index = AddUncasted<HAdd>(base_index, |
+ Add<HConstant>(probe * 2 + 1)); |
+ key_index->ClearFlag(HValue::kCanOverflow); |
+ HValue* map_to_check = Add<HLoadKeyed>(cache_keys, |
+ map_index, |
+ static_cast<HValue*>(NULL), |
+ FAST_ELEMENTS, |
+ NEVER_RETURN_HOLE, 0); |
+ lookup_if.If<HCompareObjectEqAndBranch>(map_to_check, map); |
+ lookup_if.And(); |
+ HValue* key_to_check = Add<HLoadKeyed>(cache_keys, |
+ key_index, |
+ static_cast<HValue*>(NULL), |
+ FAST_ELEMENTS, |
+ NEVER_RETURN_HOLE, 0); |
+ lookup_if.If<HCompareObjectEqAndBranch>(key_to_check, key); |
+ lookup_if.Then(); |
+ { |
+ ExternalReference cache_field_offsets_ref = |
+ ExternalReference::keyed_lookup_cache_field_offsets(isolate()); |
+ HValue* cache_field_offsets = Add<HConstant>(cache_field_offsets_ref); |
+ HValue* index = AddUncasted<HAdd>(hash, |
+ Add<HConstant>(probe)); |
+ index->ClearFlag(HValue::kCanOverflow); |
+ HValue* property_index = Add<HLoadKeyed>(cache_field_offsets, |
+ index, |
+ static_cast<HValue*>(NULL), |
+ EXTERNAL_INT_ELEMENTS, |
+ NEVER_RETURN_HOLE, 0); |
+ Push(property_index); |
+ } |
+ lookup_if.Else(); |
+ } |
+ Add<HDeoptimize>("KeyedLoad fall-back", Deoptimizer::EAGER); |
+ Push(graph()->GetConstant0()); |
+ lookup_if.End(); |
+ Push(Add<HLoadFieldByIndex>(receiver, Pop())); |
+ } |
+ if_dict_properties.End(); |
+ } |
+ index_name_split.End(); |
+ |
+ return Pop(); |
+} |
+ |
+ |
+Handle<Code> KeyedLoadGenericElementStub::GenerateCode(Isolate* isolate) { |
+ return DoGenerateCode(isolate, this); |
+} |
+ |
+ |
} } // namespace v8::internal |