Chromium Code Reviews| Index: src/arm/code-stubs-arm.cc |
| =================================================================== |
| --- src/arm/code-stubs-arm.cc (revision 6297) |
| +++ src/arm/code-stubs-arm.cc (working copy) |
| @@ -2895,13 +2895,25 @@ |
| // r1 (or at sp), depending on whether or not |
| // args_in_registers() is true. |
| void InstanceofStub::Generate(MacroAssembler* masm) { |
| + // Call site inlining and patching implies arguments in registers. |
| + ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck()); |
| + // ReturnTrueFalse is only implemented for inlined call sites. |
| + ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck()); |
| + |
| // Fixed register usage throughout the stub: |
| const Register object = r0; // Object (lhs). |
| const Register map = r3; // Map of the object. |
| const Register function = r1; // Function (rhs). |
| const Register prototype = r4; // Prototype of the function. |
| + const Register inline_site = r9; |
| const Register scratch = r2; |
| + |
| + const int32_t kDeltaToLoadBoolResult = 3 * kPointerSize; |
| + const uint32_t kLdrOffsetMask = (1 << 12) - 1; |
| + const int32_t kPCRegOffset = 2 * kPointerSize; |
| + |
| Label slow, loop, is_instance, is_not_instance, not_js_object; |
| + |
| if (!HasArgsInRegisters()) { |
| __ ldr(object, MemOperand(sp, 1 * kPointerSize)); |
| __ ldr(function, MemOperand(sp, 0)); |
| @@ -2911,50 +2923,143 @@ |
| __ BranchOnSmi(object, ¬_js_object); |
| __ IsObjectJSObjectType(object, map, scratch, ¬_js_object); |
| - // Look up the function and the map in the instanceof cache. |
| - Label miss; |
| - __ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex); |
| - __ cmp(function, ip); |
| - __ b(ne, &miss); |
| - __ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex); |
| - __ cmp(map, ip); |
| - __ b(ne, &miss); |
| - __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); |
| - __ Ret(HasArgsInRegisters() ? 0 : 2); |
| + // If there is a call site cache don't look in the global cache, but do the |
| + // real lookup and update the call site cache. |
| + if (!HasCallSiteInlineCheck()) { |
| + Label miss; |
| + __ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex); |
| + __ cmp(function, ip); |
| + __ b(ne, &miss); |
| + __ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex); |
| + __ cmp(map, ip); |
| + __ b(ne, &miss); |
| + __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); |
| + __ Ret(HasArgsInRegisters() ? 0 : 2); |
| - __ bind(&miss); |
| + __ bind(&miss); |
| + } |
| + |
| + // Get the prototype of the function. |
| __ TryGetFunctionPrototype(function, prototype, scratch, &slow); |
| // Check that the function prototype is a JS object. |
| __ BranchOnSmi(prototype, &slow); |
| __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); |
| - __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); |
| - __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex); |
| + // Update the global instanceof or call site inlined cache with the current |
| + // map and function. The cached answer will be set when it is known below. |
| + if (!HasCallSiteInlineCheck()) { |
| + __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); |
| + __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex); |
| + } else { |
| + ASSERT(HasArgsInRegisters()); |
| + // Patch the (relocated) inlined map check. |
| + // Get the load instruction. |
| + // The offset was stored in r4 slot on the stack. |
| + // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal) |
| + __ ldr(scratch, MemOperand(sp, r4.code() * kPointerSize)); |
| + __ sub(inline_site, lr, scratch); |
| + __ ldr(scratch, MemOperand(inline_site)); |
|
Søren Thygesen Gjesse
2011/01/13 15:14:36
This code pattern (the patching of the constant po
Alexandre
2011/01/14 17:53:40
Introduced a new GetRelocatedValueLocation MacroAs
|
| + if (FLAG_debug_code) { |
| + // Check that the instruction is a ldr reg, [pc + offset] . |
| + __ and_(scratch, scratch, Operand(kLdrPCPattern)); |
| + __ cmp(scratch, Operand(kLdrPCPattern)); |
| + __ Check(eq, "The instruction to patch should be a load from pc."); |
| + // scratch was clobbered. Restore it. |
| + __ ldr(scratch, MemOperand(inline_site)); |
| + } |
| + // Retrieve the address of the constant. |
| + // constant_address = (load_address + actual_pc_offset) + load_offset |
| + __ and_(scratch, scratch, Operand(kLdrOffsetMask)); |
| + __ add(scratch, inline_site, Operand(scratch)); |
| + __ add(scratch, scratch, Operand(kPCRegOffset)); |
| + |
| + // scratch: address of the map to patch (in the constant pool). |
| + // inline_site: address of the load reg, map instruction (inline site). |
| + // Preserve this register as we will use it again. |
| + |
| + // Patch the constant in the constant pool. |
| + __ str(map, MemOperand(scratch)); |
| + } |
| + |
| // Register mapping: r3 is object map and r4 is function prototype. |
| // Get prototype of object into r2. |
| __ ldr(scratch, FieldMemOperand(map, Map::kPrototypeOffset)); |
| + // We don't need map any more. Use it as a scratch register. |
| + Register scratch3 = map; |
|
Søren Thygesen Gjesse
2011/01/13 15:14:36
Please add
map = no_reg;
as well.
Alexandre
2011/01/14 17:53:40
Done.
|
| + |
| // Loop through the prototype chain looking for the function prototype. |
| + __ LoadRoot(scratch3, Heap::kNullValueRootIndex); |
| __ bind(&loop); |
| __ cmp(scratch, Operand(prototype)); |
| __ b(eq, &is_instance); |
| - __ LoadRoot(ip, Heap::kNullValueRootIndex); |
| - __ cmp(scratch, ip); |
| + __ cmp(scratch, scratch3); |
| __ b(eq, &is_not_instance); |
| __ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
| __ ldr(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset)); |
| __ jmp(&loop); |
| __ bind(&is_instance); |
| + if (!HasCallSiteInlineCheck()) { |
| __ mov(r0, Operand(Smi::FromInt(0))); |
| __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); |
| + } else { |
| + // Patch the call site to return true (relocated boolean constant). |
| + __ ldr(scratch, MemOperand(inline_site, kDeltaToLoadBoolResult)); |
| + if (FLAG_debug_code) { |
| + // Check that the instruction is a ldr reg, [pc + offset] . |
| + __ and_(scratch, scratch, Operand(kLdrPCPattern)); |
| + __ cmp(scratch, Operand(kLdrPCPattern)); |
| + __ Check(eq, "The instruction to patch should be a load from pc."); |
| + // Scratch was clobbered. Restore it. |
| + __ ldr(scratch, MemOperand(inline_site, kDeltaToLoadBoolResult)); |
| + } |
| + // Get the address of the constant. |
| + __ and_(scratch, scratch, Operand(kLdrOffsetMask)); |
| + __ add(scratch, inline_site, Operand(scratch)); |
| + __ add(scratch, scratch, Operand(kPCRegOffset)); |
| + |
| + // Patch the constant in the constant pool. |
| + __ LoadRoot(r0, Heap::kTrueValueRootIndex); |
| + __ str(r0, MemOperand(scratch)); |
| + |
| + if (!ReturnTrueFalseObject()) { |
| + __ mov(r0, Operand(Smi::FromInt(0))); |
| + } |
| + } |
| __ Ret(HasArgsInRegisters() ? 0 : 2); |
| __ bind(&is_not_instance); |
| + if (!HasCallSiteInlineCheck()) { |
| __ mov(r0, Operand(Smi::FromInt(1))); |
| __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); |
| + } else { |
| + // Patch the call site to return false. |
| + __ ldr(scratch, MemOperand(inline_site, kDeltaToLoadBoolResult)); |
| + if (FLAG_debug_code) { |
| + // Check that the instruction is a ldr reg, [pc + offset] . |
| + __ and_(scratch, scratch, Operand(kLdrPCPattern)); |
| + __ cmp(scratch, Operand(kLdrPCPattern)); |
| + __ Check(eq, "The instruction to patch should be a load from pc."); |
| + // Scratch was clobbered. Restore it. |
| + __ ldr(scratch, MemOperand(inline_site, kDeltaToLoadBoolResult)); |
| + } |
| + // Get the address of the constant. |
| + __ and_(scratch, scratch, Operand(kLdrOffsetMask)); |
| + __ add(scratch, inline_site, Operand(scratch)); |
| + __ add(scratch, scratch, Operand(kPCRegOffset)); |
| + |
| + // Patch the constant in the constant pool. |
| + // Use r0 as we need the result in it. |
| + __ LoadRoot(r0, Heap::kFalseValueRootIndex); |
| + __ str(r0, MemOperand(scratch)); |
| + |
| + if (!ReturnTrueFalseObject()) { |
| + __ mov(r0, Operand(Smi::FromInt(1))); |
| + } |
| + } |
| __ Ret(HasArgsInRegisters() ? 0 : 2); |
| Label object_not_null, object_not_null_or_smi; |
| @@ -2985,13 +3090,30 @@ |
| // Slow-case. Tail call builtin. |
| __ bind(&slow); |
| - if (HasArgsInRegisters()) { |
| + if (!ReturnTrueFalseObject()) { |
| + if (HasArgsInRegisters()) { |
| + __ Push(r0, r1); |
| + } |
| + __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_JS); |
| + } else { |
| + __ EnterInternalFrame(); |
| __ Push(r0, r1); |
| + __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_JS); |
| + __ EnterInternalFrame(); |
| + __ tst(r0, r0); |
| + __ LoadRoot(r0, Heap::kTrueValueRootIndex, ne); |
| + __ LoadRoot(r0, Heap::kFalseValueRootIndex, eq); |
| + __ Ret(HasArgsInRegisters() ? 0 : 2); |
| } |
| - __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_JS); |
| } |
| +Register InstanceofStub::left() { return r0; } |
| + |
| + |
| +Register InstanceofStub::right() { return r1; } |
| + |
| + |
| void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { |
| // The displacement is the offset of the last parameter (if any) |
| // relative to the frame pointer. |