| Index: src/x64/stub-cache-x64.cc
|
| diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
|
| index 0994230073cba9ff05367bbd028908bae0e50cb1..58a3e0f6a2f4466f5ed21eb376e3842d20518882 100644
|
| --- a/src/x64/stub-cache-x64.cc
|
| +++ b/src/x64/stub-cache-x64.cc
|
| @@ -47,19 +47,24 @@ static void ProbeTable(MacroAssembler* masm,
|
| StubCache::Table table,
|
| Register name,
|
| Register offset) {
|
| - // The offset register must hold a *positive* smi.
|
| + ASSERT_EQ(8, kPointerSize);
|
| + ASSERT_EQ(16, sizeof(StubCache::Entry));
|
| + // The offset register holds the entry offset times four (due to masking
|
| + // and shifting optimizations).
|
| ExternalReference key_offset(SCTableReference::keyReference(table));
|
| Label miss;
|
|
|
| __ movq(kScratchRegister, key_offset);
|
| - SmiIndex index = masm->SmiToIndex(offset, offset, kPointerSizeLog2);
|
| // Check that the key in the entry matches the name.
|
| - __ cmpl(name, Operand(kScratchRegister, index.reg, index.scale, 0));
|
| + // Multiply entry offset by 16 to get the entry address. Since the
|
| + // offset register already holds the entry offset times four, multiply
|
| + // by a further four.
|
| + __ cmpl(name, Operand(kScratchRegister, offset, times_4, 0));
|
| __ j(not_equal, &miss);
|
| // Get the code entry from the cache.
|
| // Use key_offset + kPointerSize, rather than loading value_offset.
|
| __ movq(kScratchRegister,
|
| - Operand(kScratchRegister, index.reg, index.scale, kPointerSize));
|
| + Operand(kScratchRegister, offset, times_4, kPointerSize));
|
| // Check that the flags match what we're looking for.
|
| __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
|
| __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup));
|
|
|