| Index: src/arm/code-stubs-arm.cc
|
| ===================================================================
|
| --- src/arm/code-stubs-arm.cc (revision 6398)
|
| +++ src/arm/code-stubs-arm.cc (working copy)
|
| @@ -2890,18 +2890,33 @@
|
| }
|
|
|
|
|
| -// Uses registers r0 to r4. Expected input is
|
| -// object in r0 (or at sp+1*kPointerSize) and function in
|
| -// r1 (or at sp), depending on whether or not
|
| -// args_in_registers() is true.
|
| +// Uses registers r0 to r4.
|
| +// Expected input (depending on whether args are in registers or on the stack):
|
| +// * object: r0 or at sp + 1 * kPointerSize.
|
| +// * function: r1 or at sp.
|
| +//
|
| +// An inlined call site may have been generated before calling this stub.
|
| +// In this case the offset to the inline site to patch is passed on the stack,
|
| +// in the safepoint slot for register r4.
|
| +// (See LCodeGen::DoInstanceOfKnownGlobal)
|
| void InstanceofStub::Generate(MacroAssembler* masm) {
|
| + // Call site inlining and patching implies arguments in registers.
|
| + ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
|
| + // ReturnTrueFalse is only implemented for inlined call sites.
|
| + ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck());
|
| +
|
| // Fixed register usage throughout the stub:
|
| const Register object = r0; // Object (lhs).
|
| - const Register map = r3; // Map of the object.
|
| + Register map = r3; // Map of the object.
|
| const Register function = r1; // Function (rhs).
|
| const Register prototype = r4; // Prototype of the function.
|
| + const Register inline_site = r9;
|
| const Register scratch = r2;
|
| +
|
| + const int32_t kDeltaToLoadBoolResult = 3 * kPointerSize;
|
| +
|
| Label slow, loop, is_instance, is_not_instance, not_js_object;
|
| +
|
| if (!HasArgsInRegisters()) {
|
| __ ldr(object, MemOperand(sp, 1 * kPointerSize));
|
| __ ldr(function, MemOperand(sp, 0));
|
| @@ -2911,50 +2926,100 @@
|
| __ BranchOnSmi(object, ¬_js_object);
|
| __ IsObjectJSObjectType(object, map, scratch, ¬_js_object);
|
|
|
| - // Look up the function and the map in the instanceof cache.
|
| - Label miss;
|
| - __ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex);
|
| - __ cmp(function, ip);
|
| - __ b(ne, &miss);
|
| - __ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex);
|
| - __ cmp(map, ip);
|
| - __ b(ne, &miss);
|
| - __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
|
| - __ Ret(HasArgsInRegisters() ? 0 : 2);
|
| + // If there is a call site cache don't look in the global cache, but do the
|
| + // real lookup and update the call site cache.
|
| + if (!HasCallSiteInlineCheck()) {
|
| + Label miss;
|
| + __ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex);
|
| + __ cmp(function, ip);
|
| + __ b(ne, &miss);
|
| + __ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex);
|
| + __ cmp(map, ip);
|
| + __ b(ne, &miss);
|
| + __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
|
| + __ Ret(HasArgsInRegisters() ? 0 : 2);
|
|
|
| - __ bind(&miss);
|
| + __ bind(&miss);
|
| + }
|
| +
|
| + // Get the prototype of the function.
|
| __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
|
|
|
| // Check that the function prototype is a JS object.
|
| __ BranchOnSmi(prototype, &slow);
|
| __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
|
|
|
| - __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
|
| - __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
|
| + // Update the global instanceof or call site inlined cache with the current
|
| + // map and function. The cached answer will be set when it is known below.
|
| + if (!HasCallSiteInlineCheck()) {
|
| + __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
|
| + __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
|
| + } else {
|
| + ASSERT(HasArgsInRegisters());
|
| + // Patch the (relocated) inlined map check.
|
|
|
| + // The offset was stored in r4 safepoint slot.
|
| + // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal)
|
| + __ ldr(scratch, MacroAssembler::SafepointRegisterSlot(r4));
|
| + __ sub(inline_site, lr, scratch);
|
| + // Get the map location in scratch and patch it.
|
| + __ GetRelocatedValueLocation(inline_site, scratch);
|
| + __ str(map, MemOperand(scratch));
|
| + }
|
| +
|
| // Register mapping: r3 is object map and r4 is function prototype.
|
| // Get prototype of object into r2.
|
| __ ldr(scratch, FieldMemOperand(map, Map::kPrototypeOffset));
|
|
|
| + // We don't need map any more. Use it as a scratch register.
|
| + Register scratch2 = map;
|
| + map = no_reg;
|
| +
|
| // Loop through the prototype chain looking for the function prototype.
|
| + __ LoadRoot(scratch2, Heap::kNullValueRootIndex);
|
| __ bind(&loop);
|
| __ cmp(scratch, Operand(prototype));
|
| __ b(eq, &is_instance);
|
| - __ LoadRoot(ip, Heap::kNullValueRootIndex);
|
| - __ cmp(scratch, ip);
|
| + __ cmp(scratch, scratch2);
|
| __ b(eq, &is_not_instance);
|
| __ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
|
| __ ldr(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset));
|
| __ jmp(&loop);
|
|
|
| __ bind(&is_instance);
|
| - __ mov(r0, Operand(Smi::FromInt(0)));
|
| - __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
|
| + if (!HasCallSiteInlineCheck()) {
|
| + __ mov(r0, Operand(Smi::FromInt(0)));
|
| + __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
|
| + } else {
|
| + // Patch the call site to return true.
|
| + __ LoadRoot(r0, Heap::kTrueValueRootIndex);
|
| + __ add(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
|
| + // Get the boolean result location in scratch and patch it.
|
| + __ GetRelocatedValueLocation(inline_site, scratch);
|
| + __ str(r0, MemOperand(scratch));
|
| +
|
| + if (!ReturnTrueFalseObject()) {
|
| + __ mov(r0, Operand(Smi::FromInt(0)));
|
| + }
|
| + }
|
| __ Ret(HasArgsInRegisters() ? 0 : 2);
|
|
|
| __ bind(&is_not_instance);
|
| - __ mov(r0, Operand(Smi::FromInt(1)));
|
| - __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
|
| + if (!HasCallSiteInlineCheck()) {
|
| + __ mov(r0, Operand(Smi::FromInt(1)));
|
| + __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
|
| + } else {
|
| + // Patch the call site to return false.
|
| + __ LoadRoot(r0, Heap::kFalseValueRootIndex);
|
| + __ add(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
|
| + // Get the boolean result location in scratch and patch it.
|
| + __ GetRelocatedValueLocation(inline_site, scratch);
|
| + __ str(r0, MemOperand(scratch));
|
| +
|
| + if (!ReturnTrueFalseObject()) {
|
| + __ mov(r0, Operand(Smi::FromInt(1)));
|
| + }
|
| + }
|
| __ Ret(HasArgsInRegisters() ? 0 : 2);
|
|
|
| Label object_not_null, object_not_null_or_smi;
|
| @@ -2962,7 +3027,7 @@
|
| // Before null, smi and string value checks, check that the rhs is a function
|
| // as for a non-function rhs an exception needs to be thrown.
|
| __ BranchOnSmi(function, &slow);
|
| - __ CompareObjectType(function, map, scratch, JS_FUNCTION_TYPE);
|
| + __ CompareObjectType(function, scratch2, scratch, JS_FUNCTION_TYPE);
|
| __ b(ne, &slow);
|
|
|
| // Null is not instance of anything.
|
| @@ -2985,13 +3050,30 @@
|
|
|
| // Slow-case. Tail call builtin.
|
| __ bind(&slow);
|
| - if (HasArgsInRegisters()) {
|
| + if (!ReturnTrueFalseObject()) {
|
| + if (HasArgsInRegisters()) {
|
| + __ Push(r0, r1);
|
| + }
|
| + __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_JS);
|
| + } else {
|
| + __ EnterInternalFrame();
|
| __ Push(r0, r1);
|
| + __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_JS);
|
| + __ LeaveInternalFrame();
|
| + __ cmp(r0, Operand(0));
|
| + __ LoadRoot(r0, Heap::kTrueValueRootIndex, eq);
|
| + __ LoadRoot(r0, Heap::kFalseValueRootIndex, ne);
|
| + __ Ret(HasArgsInRegisters() ? 0 : 2);
|
| }
|
| - __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_JS);
|
| }
|
|
|
|
|
| +Register InstanceofStub::left() { return r0; }
|
| +
|
| +
|
| +Register InstanceofStub::right() { return r1; }
|
| +
|
| +
|
| void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
|
| // The displacement is the offset of the last parameter (if any)
|
| // relative to the frame pointer.
|
|
|