| Index: src/arm/lithium-codegen-arm.cc
|
| ===================================================================
|
| --- src/arm/lithium-codegen-arm.cc (revision 6398)
|
| +++ src/arm/lithium-codegen-arm.cc (working copy)
|
| @@ -1949,10 +1949,122 @@
|
|
|
|
|
| void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
|
| - Abort("DoInstanceOfKnownGlobal unimplemented.");
|
| + class DeferredInstanceOfKnownGlobal: public LDeferredCode {
|
| + public:
|
| + DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
|
| + LInstanceOfKnownGlobal* instr)
|
| + : LDeferredCode(codegen), instr_(instr) { }
|
| + virtual void Generate() {
|
| + codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
|
| + }
|
| +
|
| + Label* map_check() { return &map_check_; }
|
| +
|
| + private:
|
| + LInstanceOfKnownGlobal* instr_;
|
| + Label map_check_;
|
| + };
|
| +
|
| + DeferredInstanceOfKnownGlobal* deferred;
|
| + deferred = new DeferredInstanceOfKnownGlobal(this, instr);
|
| +
|
| + Label done, false_result;
|
| + Register object = ToRegister(instr->input());
|
| + Register temp = ToRegister(instr->temp());
|
| + Register result = ToRegister(instr->result());
|
| +
|
| + ASSERT(object.is(r0));
|
| + ASSERT(result.is(r0));
|
| +
|
| + // A Smi is not instance of anything.
|
| + __ BranchOnSmi(object, &false_result);
|
| +
|
| + // This is the inlined call site instanceof cache. The two occurences of the
|
| + // hole value will be patched to the last map/result pair generated by the
|
| + // instanceof stub.
|
| + Label cache_miss;
|
| + Register map = temp;
|
| + __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
|
| + __ bind(deferred->map_check()); // Label for calculating code patching.
|
| + // We use Factory::the_hole_value() on purpose instead of loading from the
|
| + // root array to force relocation to be able to later patch with
|
| + // the cached map.
|
| + __ mov(ip, Operand(Factory::the_hole_value()));
|
| + __ cmp(map, Operand(ip));
|
| + __ b(ne, &cache_miss);
|
| + // We use Factory::the_hole_value() on purpose instead of loading from the
|
| + // root array to force relocation to be able to later patch
|
| + // with true or false.
|
| + __ mov(result, Operand(Factory::the_hole_value()));
|
| + __ b(&done);
|
| +
|
| + // The inlined call site cache did not match. Check null and string before
|
| + // calling the deferred code.
|
| + __ bind(&cache_miss);
|
| + // Null is not instance of anything.
|
| + __ LoadRoot(ip, Heap::kNullValueRootIndex);
|
| + __ cmp(object, Operand(ip));
|
| + __ b(eq, &false_result);
|
| +
|
| + // String values is not instance of anything.
|
| + Condition is_string = masm_->IsObjectStringType(object, temp);
|
| + __ b(is_string, &false_result);
|
| +
|
| + // Go to the deferred code.
|
| + __ b(deferred->entry());
|
| +
|
| + __ bind(&false_result);
|
| + __ LoadRoot(result, Heap::kFalseValueRootIndex);
|
| +
|
| + // Here result has either true or false. Deferred code also produces true or
|
| + // false object.
|
| + __ bind(deferred->exit());
|
| + __ bind(&done);
|
| }
|
|
|
|
|
| +void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
|
| + Label* map_check) {
|
| + Register result = ToRegister(instr->result());
|
| + ASSERT(result.is(r0));
|
| +
|
| + InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
|
| + flags = static_cast<InstanceofStub::Flags>(
|
| + flags | InstanceofStub::kArgsInRegisters);
|
| + flags = static_cast<InstanceofStub::Flags>(
|
| + flags | InstanceofStub::kCallSiteInlineCheck);
|
| + flags = static_cast<InstanceofStub::Flags>(
|
| + flags | InstanceofStub::kReturnTrueFalseObject);
|
| + InstanceofStub stub(flags);
|
| +
|
| + __ PushSafepointRegisters();
|
| +
|
| + // Get the temp register reserved by the instruction. This needs to be r4 as
|
| + // its slot of the pushing of safepoint registers is used to communicate the
|
| + // offset to the location of the map check.
|
| + Register temp = ToRegister(instr->temp());
|
| + ASSERT(temp.is(r4));
|
| + __ mov(InstanceofStub::right(), Operand(instr->function()));
|
| + static const int kAdditionalDelta = 4;
|
| + int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
|
| + Label before_push_delta;
|
| + __ bind(&before_push_delta);
|
| + __ BlockConstPoolFor(kAdditionalDelta);
|
| + __ mov(temp, Operand(delta * kPointerSize));
|
| + __ StoreToSafepointRegisterSlot(temp);
|
| + __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
|
| + ASSERT_EQ(kAdditionalDelta,
|
| + masm_->InstructionsGeneratedSince(&before_push_delta));
|
| + RecordSafepointWithRegisters(
|
| + instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
|
| + // Put the result value into the result register slot and
|
| + // restore all registers.
|
| + __ StoreToSafepointRegisterSlot(result);
|
| +
|
| + __ PopSafepointRegisters();
|
| +}
|
| +
|
| +
|
| static Condition ComputeCompareCondition(Token::Value op) {
|
| switch (op) {
|
| case Token::EQ_STRICT:
|
|
|