| Index: src/a64/lithium-codegen-a64.cc
|
| diff --git a/src/a64/lithium-codegen-a64.cc b/src/a64/lithium-codegen-a64.cc
|
| index 56b29ceb09e0d5d9622517e584476bdc2a57e702..dd3cec2a11fa01980b9fbad907306100bfc7e5f7 100644
|
| --- a/src/a64/lithium-codegen-a64.cc
|
| +++ b/src/a64/lithium-codegen-a64.cc
|
| @@ -2724,21 +2724,22 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
|
| LInstanceOfKnownGlobal* instr)
|
| : LDeferredCode(codegen), instr_(instr) { }
|
| virtual void Generate() {
|
| - codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
|
| + codegen()->DoDeferredInstanceOfKnownGlobal(instr_);
|
| }
|
| virtual LInstruction* instr() { return instr_; }
|
| - Label* map_check() { return &map_check_; }
|
| private:
|
| LInstanceOfKnownGlobal* instr_;
|
| - Label map_check_;
|
| };
|
|
|
| DeferredInstanceOfKnownGlobal* deferred =
|
| new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
|
|
|
| - Label return_false, cache_miss;
|
| + Label map_check, return_false, cache_miss, done;
|
| Register object = ToRegister(instr->value());
|
| Register result = ToRegister(instr->result());
|
| + // x4 is expected in the associated deferred code and stub.
|
| + Register map_check_site = x4;
|
| + Register map = x5;
|
|
|
| // This instruction is marked as call. We can clobber any register.
|
| ASSERT(instr->IsMarkedAsCall());
|
| @@ -2750,11 +2751,36 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
|
| // A Smi is not instance of anything.
|
| __ JumpIfSmi(object, &return_false);
|
|
|
| - TODO_UNIMPLEMENTED("patchable inline check");
|
| + // This is the inlined call site instanceof cache. The two occurences of the
|
| + // hole value will be patched to the last map/result pair generated by the
|
| + // instanceof stub.
|
| + __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
|
| + {
|
| + // Below we use Factory::the_hole_value() on purpose instead of loading from
|
| + // the root array to force relocation and later be able to patch with a
|
| + // custom value.
|
| + InstructionAccurateScope scope(masm(), 5);
|
| + __ bind(&map_check);
|
| + // Will be patched with the cached map.
|
| + Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value());
|
| + __ LoadRelocated(scratch, Operand(Handle<Object>(cell)));
|
| + __ ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
|
| + __ cmp(map, Operand(scratch));
|
| + __ b(&cache_miss, ne);
|
| + // The address of this instruction is computed relative to the map check
|
| + // above, so check the size of the code generated.
|
| + ASSERT(masm()->InstructionsGeneratedSince(&map_check) == 4);
|
| + // Will be patched with the cached result.
|
| + __ LoadRelocated(result, Operand(factory()->the_hole_value()));
|
| + }
|
| + __ B(&done);
|
|
|
| // The inlined call site cache did not match.
|
| // Check null and string before calling the deferred code.
|
| __ Bind(&cache_miss);
|
| + // Compute the address of the map check. It must not be clobbered until the
|
| + // InstanceOfStub has used it.
|
| + __ Adr(map_check_site, &map_check);
|
| // Null is not instance of anything.
|
| __ JumpIfRoot(object, Heap::kNullValueRootIndex, &return_false);
|
|
|
| @@ -2772,6 +2798,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
|
|
|
| // Here result is either true or false.
|
| __ Bind(deferred->exit());
|
| + __ Bind(&done);
|
| }
|
|
|
|
|
| @@ -2783,11 +2810,16 @@ void LCodeGen::DoInstanceSize(LInstanceSize* instr) {
|
| }
|
|
|
|
|
| -void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
|
| - Label* map_check) {
|
| +void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
|
| Register result = ToRegister(instr->result());
|
| ASSERT(result.Is(x0)); // InstanceofStub returns its result in x0.
|
| - InstanceofStub::Flags flags = InstanceofStub::kArgsInRegisters;
|
| + InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
|
| + flags = static_cast<InstanceofStub::Flags>(
|
| + flags | InstanceofStub::kArgsInRegisters);
|
| + flags = static_cast<InstanceofStub::Flags>(
|
| + flags | InstanceofStub::kReturnTrueFalseObject);
|
| + flags = static_cast<InstanceofStub::Flags>(
|
| + flags | InstanceofStub::kCallSiteInlineCheck);
|
|
|
| PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
|
|
|
| @@ -2803,12 +2835,6 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
|
| LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
|
| safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
|
|
|
| - // TODO(all): This could be integrated into InstanceofStub.
|
| - __ LoadTrueFalseRoots(x1, x2);
|
| - ASSERT(Smi::FromInt(0) == 0);
|
| - __ Cmp(result, 0);
|
| - __ Csel(result, x1, x2, eq);
|
| -
|
| // Put the result value into the result register slot.
|
| __ StoreToSafepointRegisterSlot(result, result);
|
| }
|
|
|