Index: src/builtins/arm64/builtins-arm64.cc |
diff --git a/src/builtins/arm64/builtins-arm64.cc b/src/builtins/arm64/builtins-arm64.cc |
index 48551dea0003dba9bb713db36bf7733dd001c19f..552c0a51ee533572d51ce489c5858bbf83145ccb 100644 |
--- a/src/builtins/arm64/builtins-arm64.cc |
+++ b/src/builtins/arm64/builtins-arm64.cc |
@@ -1381,7 +1381,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
// ----------------------------------- |
// First lookup code, maybe we don't need to compile! |
Label gotta_call_runtime; |
- Label maybe_call_runtime; |
Label try_shared; |
Label loop_top, loop_bottom; |
@@ -1439,7 +1438,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
FieldMemOperand(array_pointer, |
SharedFunctionInfo::kOffsetToPreviousCachedCode)); |
__ Ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); |
- __ JumpIfSmi(entry, &maybe_call_runtime); |
+ __ JumpIfSmi(entry, &try_shared); |
// Found literals and code. Get them into the closure and return. |
__ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); |
@@ -1476,18 +1475,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
// We found neither literals nor code. |
__ B(&gotta_call_runtime); |
- __ Bind(&maybe_call_runtime); |
- |
- // Last possibility. Check the context free optimized code map entry. |
- __ Ldr(entry, FieldMemOperand(map, FixedArray::kHeaderSize + |
- SharedFunctionInfo::kSharedCodeIndex)); |
- __ Ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); |
- __ JumpIfSmi(entry, &try_shared); |
- |
- // Store code entry in the closure. |
- __ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); |
- __ B(&install_optimized_code_and_tailcall); |
- |
__ Bind(&try_shared); |
// Is the full code valid? |
__ Ldr(entry, |