| Index: src/builtins/x64/builtins-x64.cc
|
| diff --git a/src/builtins/x64/builtins-x64.cc b/src/builtins/x64/builtins-x64.cc
|
| index 0f524e656fb552415157c256201fedbb8777cffb..87dfc7d3a6b6305a4d7ec63c05c36e47b5418a7c 100644
|
| --- a/src/builtins/x64/builtins-x64.cc
|
| +++ b/src/builtins/x64/builtins-x64.cc
|
| @@ -1076,14 +1076,14 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
|
| __ testb(FieldOperand(entry, SharedFunctionInfo::kMarkedForTierUpByteOffset),
|
| Immediate(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte));
|
| __ j(not_zero, &gotta_call_runtime);
|
| - // Is the full code valid?
|
| +
|
| + // If SFI points to anything other than CompileLazy, install that.
|
| __ movp(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
|
| - __ movl(rbx, FieldOperand(entry, Code::kFlagsOffset));
|
| - __ andl(rbx, Immediate(Code::KindField::kMask));
|
| - __ shrl(rbx, Immediate(Code::KindField::kShift));
|
| - __ cmpl(rbx, Immediate(Code::BUILTIN));
|
| + __ Move(rbx, masm->CodeObject());
|
| + __ cmpp(entry, rbx);
|
| __ j(equal, &gotta_call_runtime);
|
| - // Yes, install the full code.
|
| +
|
| + // Install the SFI's code entry.
|
| __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
|
| __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
|
| __ RecordWriteCodeEntryField(closure, entry, r15);
|
|
|