Chromium Code Reviews| Index: src/arm/full-codegen-arm.cc |
| =================================================================== |
| --- src/arm/full-codegen-arm.cc (revision 6683) |
| +++ src/arm/full-codegen-arm.cc (working copy) |
| @@ -45,6 +45,67 @@ |
| #define __ ACCESS_MASM(masm_) |
| + |
| +// A patch site is a location in the code which it is possible to patch. This |
| +// class has a number of methods the emit the code which is patchable and the |
|
Alexandre
2011/02/09 13:57:16
typo: "the emit the code" -> "to emit the code" ?
Søren Thygesen Gjesse
2011/02/09 14:48:28
Done.
|
| +// method EmitPatchInfo to record a marker back to the patchable code. This |
| +// marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit |
| +// immediate value is used) is the delta from the pc to the first instruction of |
| +// the patchable code. |
| +class JumpPatchSite BASE_EMBEDDED { |
| + public: |
| + explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { |
| +#ifdef DEBUG |
| + info_emitted_ = false; |
| +#endif |
| + } |
| + |
| + ~JumpPatchSite() { |
| + ASSERT(patch_site_.is_bound() == info_emitted_); |
| + } |
| + |
| + // When initially emitting this ensure that a jump is always generated to skip |
| + // the inlined smi code. |
| + void EmitJumpIfNotSmi(Register reg, Label* target) { |
| + ASSERT(!patch_site_.is_bound() && !info_emitted_); |
| + __ bind(&patch_site_); |
| + __ cmp(reg, Operand(reg)); |
| + // Don't use b(al, ...) as that might emit the constant pool right after the |
| + // branch. After patching when the branch is no longer unconditional |
| + // execution can continue into the constant pool. |
| + __ b(eq, target); // Always taken before patched. |
| + } |
| + |
| + // When initially emitting this ensure that a jump is never generated to skip |
| + // the inlined smi code. |
| + void EmitJumpIfSmi(Register reg, Label* target) { |
| + ASSERT(!patch_site_.is_bound() && !info_emitted_); |
| + __ bind(&patch_site_); |
| + __ cmp(reg, Operand(reg)); |
| + __ b(ne, target); // Never taken before patched. |
| + } |
| + |
| + void EmitPatchInfo() { |
| + int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); |
| + Register reg; |
| + reg.set_code(delta_to_patch_site / kOff12Mask); |
| + __ cmp_raw_immediate(r0, delta_to_patch_site % kOff12Mask); |
|
Karl Klose
2011/02/09 14:35:30
The destination register of the cmp should be reg.
Søren Thygesen Gjesse
2011/02/09 14:48:28
Good catch, fixed.
|
| +#ifdef DEBUG |
| + info_emitted_ = true; |
| +#endif |
| + } |
| + |
| + bool is_bound() const { return patch_site_.is_bound(); } |
| + |
| + private: |
| + MacroAssembler* masm_; |
| + Label patch_site_; |
| +#ifdef DEBUG |
| + bool info_emitted_; |
| +#endif |
| +}; |
| + |
| + |
| // Generate code for a JS function. On entry to the function the receiver |
| // and arguments have been pushed on the stack left to right. The actual |
| // argument count matches the formal parameter count expected by the |
| @@ -752,24 +813,25 @@ |
| // Perform the comparison as if via '==='. |
| __ ldr(r1, MemOperand(sp, 0)); // Switch value. |
| bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); |
| + JumpPatchSite patch_site(masm_); |
| if (inline_smi_code) { |
| Label slow_case; |
| __ orr(r2, r1, r0); |
| - __ tst(r2, Operand(kSmiTagMask)); |
| - __ b(ne, &slow_case); |
| + patch_site.EmitJumpIfNotSmi(r2, &slow_case); |
| + |
| __ cmp(r1, r0); |
| __ b(ne, &next_test); |
| __ Drop(1); // Switch value is no longer needed. |
| __ b(clause->body_target()->entry_label()); |
| - __ bind(&slow_case); |
| + __ bind(&slow_case); |
| } |
| - CompareFlags flags = inline_smi_code |
| - ? NO_SMI_COMPARE_IN_STUB |
| - : NO_COMPARE_FLAGS; |
| - CompareStub stub(eq, true, flags, r1, r0); |
| - __ CallStub(&stub); |
| - __ cmp(r0, Operand(0, RelocInfo::NONE)); |
| + // Record position before stub call for type feedback. |
| + SetSourcePosition(clause->position()); |
| + Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT); |
| + EmitCallIC(ic, &patch_site); |
| + |
| + __ cmp(r0, Operand(0)); |
| __ b(ne, &next_test); |
| __ Drop(1); // Switch value is no longer needed. |
| __ b(clause->body_target()->entry_label()); |
| @@ -3510,21 +3572,22 @@ |
| } |
| bool inline_smi_code = ShouldInlineSmiCase(op); |
| + JumpPatchSite patch_site(masm_); |
| if (inline_smi_code) { |
| Label slow_case; |
| __ orr(r2, r0, Operand(r1)); |
| - __ JumpIfNotSmi(r2, &slow_case); |
| + patch_site.EmitJumpIfNotSmi(r2, &slow_case); |
| __ cmp(r1, r0); |
| Split(cond, if_true, if_false, NULL); |
| __ bind(&slow_case); |
| } |
| - CompareFlags flags = inline_smi_code |
| - ? NO_SMI_COMPARE_IN_STUB |
| - : NO_COMPARE_FLAGS; |
| - CompareStub stub(cond, strict, flags, r1, r0); |
| - __ CallStub(&stub); |
| + |
| + // Record position and call the compare IC. |
| + SetSourcePosition(expr->position()); |
| + Handle<Code> ic = CompareIC::GetUninitialized(op); |
| + EmitCallIC(ic, &patch_site); |
| PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| - __ cmp(r0, Operand(0, RelocInfo::NONE)); |
| + __ cmp(r0, Operand(0)); |
| Split(cond, if_true, if_false, fall_through); |
| } |
| } |
| @@ -3591,6 +3654,16 @@ |
| } |
| +void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) { |
| + __ Call(ic, RelocInfo::CODE_TARGET); |
| + if (patch_site != NULL && patch_site->is_bound()) { |
| + patch_site->EmitPatchInfo(); |
| + } else { |
| + __ nop(); // Signals no inlined code. |
| + } |
| +} |
| + |
| + |
| void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { |
| ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); |
| __ str(value, MemOperand(fp, frame_offset)); |