Index: src/builtins/arm64/builtins-arm64.cc |
diff --git a/src/builtins/arm64/builtins-arm64.cc b/src/builtins/arm64/builtins-arm64.cc |
index 13a2e33f506336da9d49409561f92dbdc6b2cc78..de227f266d01a6dc6cf7fd14964012ebeeede19e 100644 |
--- a/src/builtins/arm64/builtins-arm64.cc |
+++ b/src/builtins/arm64/builtins-arm64.cc |
@@ -1435,14 +1435,14 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
__ TestAndBranchIfAnySet( |
temp, 1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte, |
&gotta_call_runtime); |
- // Is the full code valid? |
+ |
+ // If SFI points to anything other than CompileLazy, install that. |
__ Ldr(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset)); |
- __ Ldr(x5, FieldMemOperand(entry, Code::kFlagsOffset)); |
- __ and_(x5, x5, Operand(Code::KindField::kMask)); |
- __ Mov(x5, Operand(x5, LSR, Code::KindField::kShift)); |
- __ Cmp(x5, Operand(Code::BUILTIN)); |
+ __ Move(temp, masm->CodeObject()); |
+ __ Cmp(entry, temp); |
__ B(eq, &gotta_call_runtime); |
- // Yes, install the full code. |
+ |
+ // Install the SFI's code entry. |
__ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); |
__ Str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset)); |
__ RecordWriteCodeEntryField(closure, entry, x5); |