Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(718)

Unified Diff: src/ppc/lithium-codegen-ppc.cc

Issue 1314263002: PPC: Correctify instanceof and make it optimizable. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Rebase Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/ppc/lithium-codegen-ppc.h ('k') | src/ppc/lithium-ppc.h » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/ppc/lithium-codegen-ppc.cc
diff --git a/src/ppc/lithium-codegen-ppc.cc b/src/ppc/lithium-codegen-ppc.cc
index 1c2dcd8e52a3485f0444536c86f02408b379d3a8..c996eca42129ca52b166eb10ccd0d059fd57a131 100644
--- a/src/ppc/lithium-codegen-ppc.cc
+++ b/src/ppc/lithium-codegen-ppc.cc
@@ -2180,6 +2180,13 @@ void LCodeGen::EmitBranch(InstrType instr, Condition cond, CRegister cr) {
template <class InstrType>
+void LCodeGen::EmitTrueBranch(InstrType instr, Condition cond, CRegister cr) {
+ int true_block = instr->TrueDestination(chunk_);
+ __ b(cond, chunk_->GetAssemblyLabel(true_block), cr);
+}
+
+
+template <class InstrType>
void LCodeGen::EmitFalseBranch(InstrType instr, Condition cond, CRegister cr) {
int false_block = instr->FalseDestination(chunk_);
__ b(cond, chunk_->GetAssemblyLabel(false_block), cr);
@@ -2759,157 +2766,42 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
DCHECK(ToRegister(instr->context()).is(cp));
- DCHECK(ToRegister(instr->left()).is(r3)); // Object is in r3.
- DCHECK(ToRegister(instr->right()).is(r4)); // Function is in r4.
-
- InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
+ DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister()));
+ DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister()));
+ DCHECK(ToRegister(instr->result()).is(r3));
+ InstanceOfStub stub(isolate());
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
-
- if (CpuFeatures::IsSupported(ISELECT)) {
- __ mov(r4, Operand(factory()->true_value()));
- __ mov(r5, Operand(factory()->false_value()));
- __ cmpi(r3, Operand::Zero());
- __ isel(eq, r3, r4, r5);
- } else {
- Label equal, done;
- __ cmpi(r3, Operand::Zero());
- __ beq(&equal);
- __ mov(r3, Operand(factory()->false_value()));
- __ b(&done);
-
- __ bind(&equal);
- __ mov(r3, Operand(factory()->true_value()));
- __ bind(&done);
- }
}
-void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
- class DeferredInstanceOfKnownGlobal final : public LDeferredCode {
- public:
- DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
- LInstanceOfKnownGlobal* instr)
- : LDeferredCode(codegen), instr_(instr) {}
- void Generate() override {
- codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_,
- &load_bool_);
- }
- LInstruction* instr() override { return instr_; }
- Label* map_check() { return &map_check_; }
- Label* load_bool() { return &load_bool_; }
-
- private:
- LInstanceOfKnownGlobal* instr_;
- Label map_check_;
- Label load_bool_;
- };
-
- DeferredInstanceOfKnownGlobal* deferred;
- deferred = new (zone()) DeferredInstanceOfKnownGlobal(this, instr);
+void LCodeGen::DoHasInPrototypeChainAndBranch(
+ LHasInPrototypeChainAndBranch* instr) {
+ Register const object = ToRegister(instr->object());
+ Register const object_map = scratch0();
+ Register const object_prototype = object_map;
+ Register const prototype = ToRegister(instr->prototype());
- Label done, false_result;
- Register object = ToRegister(instr->value());
- Register temp = ToRegister(instr->temp());
- Register result = ToRegister(instr->result());
-
- // A Smi is not instance of anything.
- __ JumpIfSmi(object, &false_result);
-
- // This is the inlined call site instanceof cache. The two occurences of the
- // hole value will be patched to the last map/result pair generated by the
- // instanceof stub.
- Label cache_miss;
- Register map = temp;
- __ LoadP(map, FieldMemOperand(object, HeapObject::kMapOffset));
- {
- // Block trampoline emission to ensure the positions of instructions are
- // as expected by the patcher. See InstanceofStub::Generate().
- Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
- __ bind(deferred->map_check()); // Label for calculating code patching.
- // We use Factory::the_hole_value() on purpose instead of loading from the
- // root array to force relocation to be able to later patch with
- // the cached map.
- Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value());
- __ mov(ip, Operand(cell));
- __ LoadP(ip, FieldMemOperand(ip, Cell::kValueOffset));
- __ cmp(map, ip);
- __ bc_short(ne, &cache_miss);
- __ bind(deferred->load_bool()); // Label for calculating code patching.
- // We use Factory::the_hole_value() on purpose instead of loading from the
- // root array to force relocation to be able to later patch
- // with true or false.
- __ mov(result, Operand(factory()->the_hole_value()));
+ // The {object} must be a spec object. It's sufficient to know that {object}
+ // is not a smi, since all other non-spec objects have {null} prototypes and
+ // will be ruled out below.
+ if (instr->hydrogen()->ObjectNeedsSmiCheck()) {
+ __ TestIfSmi(object, r0);
+ EmitFalseBranch(instr, eq, cr0);
}
- __ b(&done);
-
- // The inlined call site cache did not match. Check null and string before
- // calling the deferred code.
- __ bind(&cache_miss);
- // Null is not instance of anything.
- __ LoadRoot(ip, Heap::kNullValueRootIndex);
- __ cmp(object, ip);
- __ beq(&false_result);
-
- // String values is not instance of anything.
- Condition is_string = masm_->IsObjectStringType(object, temp);
- __ b(is_string, &false_result, cr0);
- // Go to the deferred code.
- __ b(deferred->entry());
-
- __ bind(&false_result);
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
-
- // Here result has either true or false. Deferred code also produces true or
- // false object.
- __ bind(deferred->exit());
- __ bind(&done);
-}
-
-
-void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
- Label* map_check,
- Label* bool_load) {
- InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
- flags = static_cast<InstanceofStub::Flags>(flags |
- InstanceofStub::kArgsInRegisters);
- flags = static_cast<InstanceofStub::Flags>(
- flags | InstanceofStub::kCallSiteInlineCheck);
- flags = static_cast<InstanceofStub::Flags>(
- flags | InstanceofStub::kReturnTrueFalseObject);
- InstanceofStub stub(isolate(), flags);
-
- PushSafepointRegistersScope scope(this);
- LoadContextFromDeferred(instr->context());
-
- __ Move(InstanceofStub::right(), instr->function());
- {
- Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
- Handle<Code> code = stub.GetCode();
- // Include instructions below in delta: bitwise_mov32 + li + call
- int additional_delta = 3 * Instruction::kInstrSize + masm_->CallSize(code);
- // The labels must be already bound since the code has predictabel size up
- // to the call instruction.
- DCHECK(map_check->is_bound());
- DCHECK(bool_load->is_bound());
- int map_check_delta =
- masm_->InstructionsGeneratedSince(map_check) * Instruction::kInstrSize;
- int bool_load_delta =
- masm_->InstructionsGeneratedSince(bool_load) * Instruction::kInstrSize;
- // r8 is the delta from our callee's lr to the location of the map check.
- __ bitwise_mov32(r8, map_check_delta + additional_delta);
- // r9 is the delta from map check to bool load.
- __ li(r9, Operand(map_check_delta - bool_load_delta));
- CallCodeGeneric(code, RelocInfo::CODE_TARGET, instr,
- RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
- DCHECK_EQ((map_check_delta + additional_delta) / Instruction::kInstrSize,
- masm_->InstructionsGeneratedSince(map_check));
- }
- LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
- safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
- // Put the result value (r3) into the result register slot and
- // restore all registers.
- __ StoreToSafepointRegisterSlot(r3, ToRegister(instr->result()));
+ // Loop through the {object}s prototype chain looking for the {prototype}.
+ __ LoadP(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
+ Label loop;
+ __ bind(&loop);
+ __ LoadP(object_prototype,
+ FieldMemOperand(object_map, Map::kPrototypeOffset));
+ __ cmp(object_prototype, prototype);
+ EmitTrueBranch(instr, eq);
+ __ CompareRoot(object_prototype, Heap::kNullValueRootIndex);
+ EmitFalseBranch(instr, eq);
+ __ LoadP(object_map,
+ FieldMemOperand(object_prototype, HeapObject::kMapOffset));
+ __ b(&loop);
}
« no previous file with comments | « src/ppc/lithium-codegen-ppc.h ('k') | src/ppc/lithium-ppc.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698