Index: src/x64/code-stubs-x64.cc |
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc |
index f7ded184ecc386ee47777caf4bd4e079ffcc7a58..1bfb9bdafbc77b83677724821ea7603bd7d9f3f2 100644 |
--- a/src/x64/code-stubs-x64.cc |
+++ b/src/x64/code-stubs-x64.cc |
@@ -287,8 +287,8 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) { |
// The optimized code map must never be empty, so check the first elements. |
Label install_optimized; |
// Speculatively move code object into edx. |
- __ movq(rdx, FieldOperand(rbx, FixedArray::kHeaderSize + kPointerSize)); |
- __ cmpq(rcx, FieldOperand(rbx, FixedArray::kHeaderSize)); |
+ __ movq(rdx, FieldOperand(rbx, SharedFunctionInfo::kFirstCodeSlot)); |
+ __ cmpq(rcx, FieldOperand(rbx, SharedFunctionInfo::kFirstContextSlot)); |
__ j(equal, &install_optimized); |
// Iterate through the rest of map backwards. rdx holds an index. |
@@ -298,7 +298,7 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) { |
__ SmiToInteger32(rdx, rdx); |
__ bind(&loop); |
// Do not double check first entry. |
- __ cmpq(rdx, Immediate(SharedFunctionInfo::kEntryLength)); |
+ __ cmpq(rdx, Immediate(SharedFunctionInfo::kSecondEntryIndex)); |
__ j(equal, &restore); |
__ subq(rdx, Immediate(SharedFunctionInfo::kEntryLength)); // Skip an entry. |
__ cmpq(rcx, FieldOperand(rbx, |