Index: src/code-stubs-hydrogen.cc |
diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc |
index 6f86778f997e33a09a223d8b955908b765674937..82714af40534b207c78a7ae7ce4da879d7e7d7c2 100644 |
--- a/src/code-stubs-hydrogen.cc |
+++ b/src/code-stubs-hydrogen.cc |
@@ -1654,9 +1654,13 @@ HValue* CodeStubGraphBuilder<KeyedLoadGenericElementStub>::BuildCodeStub() { |
HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2)); |
base_index->ClearFlag(HValue::kCanOverflow); |
- IfBuilder lookup_if(this); |
+ HIfContinuation inline_or_runtime_continuation( |
+ graph()->CreateBasicBlock(), graph()->CreateBasicBlock()); |
+ IfBuilder* lookup_ifs[KeyedLookupCache::kEntriesPerBucket]; |
for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket; |
++probe) { |
+ IfBuilder* lookup_if = new IfBuilder(this); |
danno
2014/07/07 07:59:06
Shouldn't you just Zone allocate this?
void* buff
Jakob Kummerow
2014/07/07 11:52:10
I'm not sure. In what way would that be better? We
danno
2014/07/07 12:08:45
Well, since we generally avoid allocating from the
|
+ lookup_ifs[probe] = lookup_if; |
int probe_base = probe * KeyedLookupCache::kEntryLength; |
HValue* map_index = AddUncasted<HAdd>(base_index, |
Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex)); |
@@ -1669,15 +1673,15 @@ HValue* CodeStubGraphBuilder<KeyedLoadGenericElementStub>::BuildCodeStub() { |
static_cast<HValue*>(NULL), |
FAST_ELEMENTS, |
NEVER_RETURN_HOLE, 0); |
- lookup_if.If<HCompareObjectEqAndBranch>(map_to_check, map); |
- lookup_if.And(); |
+ lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map); |
+ lookup_if->And(); |
HValue* key_to_check = Add<HLoadKeyed>(cache_keys, |
key_index, |
static_cast<HValue*>(NULL), |
FAST_ELEMENTS, |
NEVER_RETURN_HOLE, 0); |
- lookup_if.If<HCompareObjectEqAndBranch>(key_to_check, key); |
- lookup_if.Then(); |
+ lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key); |
+ lookup_if->Then(); |
{ |
ExternalReference cache_field_offsets_ref = |
ExternalReference::keyed_lookup_cache_field_offsets(isolate()); |
@@ -1692,12 +1696,28 @@ HValue* CodeStubGraphBuilder<KeyedLoadGenericElementStub>::BuildCodeStub() { |
NEVER_RETURN_HOLE, 0); |
Push(property_index); |
} |
- lookup_if.Else(); |
+ lookup_if->Else(); |
} |
- Add<HDeoptimize>("KeyedLoad fall-back", Deoptimizer::EAGER); |
- Push(graph()->GetConstant0()); |
- lookup_if.End(); |
- Push(Add<HLoadFieldByIndex>(receiver, Pop())); |
+ for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) { |
+ lookup_ifs[i]->JoinContinuation(&inline_or_runtime_continuation); |
+ delete lookup_ifs[i]; |
+ } |
+ |
+ IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation); |
+ inline_or_runtime.Then(); |
+ { |
+ // Found a cached index, load property inline. |
+ Push(Add<HLoadFieldByIndex>(receiver, Pop())); |
+ } |
+ inline_or_runtime.Else(); |
+ { |
+ // KeyedLookupCache miss; call runtime. |
+ Add<HPushArguments>(receiver, key); |
+ Push(Add<HCallRuntime>( |
+ isolate()->factory()->empty_string(), |
+ Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2)); |
+ } |
+ inline_or_runtime.End(); |
} |
if_dict_properties.End(); |
} |