| Index: src/x64/full-codegen-x64.cc
|
| diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
|
| index 9412d38b823a6c7487a0388fcca8a27086dbeec3..b0fa5cf003a6709c60854963c5fdb413f3692d52 100644
|
| --- a/src/x64/full-codegen-x64.cc
|
| +++ b/src/x64/full-codegen-x64.cc
|
| @@ -43,6 +43,58 @@ namespace internal {
|
|
|
| #define __ ACCESS_MASM(masm_)
|
|
|
| +
|
| +class JumpPatchSite BASE_EMBEDDED {
|
| + public:
|
| + explicit JumpPatchSite(MacroAssembler* masm)
|
| + : masm_(masm) {
|
| +#ifdef DEBUG
|
| + info_emitted_ = false;
|
| +#endif
|
| + }
|
| +
|
| + ~JumpPatchSite() {
|
| + ASSERT(patch_site_.is_bound() == info_emitted_);
|
| + }
|
| +
|
| + void EmitJumpIfNotSmi(Register reg, NearLabel* target) {
|
| + __ testb(reg, Immediate(kSmiTagMask));
|
| + EmitJump(not_carry, target); // Always taken before patched.
|
| + }
|
| +
|
| + void EmitJumpIfSmi(Register reg, NearLabel* target) {
|
| + __ testb(reg, Immediate(kSmiTagMask));
|
| + EmitJump(carry, target); // Never taken before patched.
|
| + }
|
| +
|
| + void EmitPatchInfo() {
|
| + int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
|
| + ASSERT(is_int8(delta_to_patch_site));
|
| + __ testl(rax, Immediate(delta_to_patch_site));
|
| +#ifdef DEBUG
|
| + info_emitted_ = true;
|
| +#endif
|
| + }
|
| +
|
| + bool is_bound() const { return patch_site_.is_bound(); }
|
| +
|
| + private:
|
| + // jc will be patched with jz, jnc will become jnz.
|
| + void EmitJump(Condition cc, NearLabel* target) {
|
| + ASSERT(!patch_site_.is_bound() && !info_emitted_);
|
| + ASSERT(cc == carry || cc == not_carry);
|
| + __ bind(&patch_site_);
|
| + __ j(cc, target);
|
| + }
|
| +
|
| + MacroAssembler* masm_;
|
| + Label patch_site_;
|
| +#ifdef DEBUG
|
| + bool info_emitted_;
|
| +#endif
|
| +};
|
| +
|
| +
|
| // Generate code for a JS function. On entry to the function the receiver
|
| // and arguments have been pushed on the stack left to right, with the
|
| // return address on top of them. The actual argument count matches the
|
| @@ -728,21 +780,25 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
|
| // Perform the comparison as if via '==='.
|
| __ movq(rdx, Operand(rsp, 0)); // Switch value.
|
| bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
|
| + JumpPatchSite patch_site(masm_);
|
| if (inline_smi_code) {
|
| - Label slow_case;
|
| - __ JumpIfNotBothSmi(rdx, rax, &slow_case);
|
| - __ SmiCompare(rdx, rax);
|
| + NearLabel slow_case;
|
| + __ movq(rcx, rdx);
|
| + __ or_(rcx, rax);
|
| + patch_site.EmitJumpIfNotSmi(rcx, &slow_case);
|
| +
|
| + __ cmpq(rdx, rax);
|
| __ j(not_equal, &next_test);
|
| __ Drop(1); // Switch value is no longer needed.
|
| __ jmp(clause->body_target()->entry_label());
|
| __ bind(&slow_case);
|
| }
|
|
|
| - CompareFlags flags = inline_smi_code
|
| - ? NO_SMI_COMPARE_IN_STUB
|
| - : NO_COMPARE_FLAGS;
|
| - CompareStub stub(equal, true, flags);
|
| - __ CallStub(&stub);
|
| + // Record position before stub call for type feedback.
|
| + SetSourcePosition(clause->position());
|
| + Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
|
| + EmitCallIC(ic, &patch_site);
|
| +
|
| __ testq(rax, rax);
|
| __ j(not_equal, &next_test);
|
| __ Drop(1); // Switch value is no longer needed.
|
| @@ -1522,16 +1578,17 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
|
| // Do combined smi check of the operands. Left operand is on the
|
| // stack (popped into rdx). Right operand is in rax but moved into
|
| // rcx to make the shifts easier.
|
| - Label done, stub_call, smi_case;
|
| + NearLabel done, stub_call, smi_case;
|
| __ pop(rdx);
|
| __ movq(rcx, rax);
|
| - Condition smi = masm()->CheckBothSmi(rdx, rax);
|
| - __ j(smi, &smi_case);
|
| + __ or_(rax, rdx);
|
| + JumpPatchSite patch_site(masm_);
|
| + patch_site.EmitJumpIfSmi(rax, &smi_case);
|
|
|
| __ bind(&stub_call);
|
| - TypeRecordingBinaryOpStub stub(op, mode);
|
| __ movq(rax, rcx);
|
| - __ CallStub(&stub);
|
| + TypeRecordingBinaryOpStub stub(op, mode);
|
| + EmitCallIC(stub.GetCode(), &patch_site);
|
| __ jmp(&done);
|
|
|
| __ bind(&smi_case);
|
| @@ -3197,7 +3254,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
| }
|
|
|
| // Inline smi case if we are in a loop.
|
| - Label stub_call, done;
|
| + NearLabel stub_call, done;
|
| + JumpPatchSite patch_site(masm_);
|
| +
|
| if (ShouldInlineSmiCase(expr->op())) {
|
| if (expr->op() == Token::INC) {
|
| __ SmiAddConstant(rax, rax, Smi::FromInt(1));
|
| @@ -3207,8 +3266,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
| __ j(overflow, &stub_call);
|
| // We could eliminate this smi check if we split the code at
|
| // the first smi check before calling ToNumber.
|
| - is_smi = masm_->CheckSmi(rax);
|
| - __ j(is_smi, &done);
|
| + patch_site.EmitJumpIfSmi(rax, &done);
|
|
|
| __ bind(&stub_call);
|
| // Call stub. Undo operation first.
|
| @@ -3230,9 +3288,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
| __ movq(rdx, rax);
|
| __ Move(rax, Smi::FromInt(1));
|
| }
|
| - __ CallStub(&stub);
|
| -
|
| + EmitCallIC(stub.GetCode(), &patch_site);
|
| __ bind(&done);
|
| +
|
| // Store the value returned in rax.
|
| switch (assign_type) {
|
| case VARIABLE:
|
| @@ -3500,19 +3558,21 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
|
| }
|
|
|
| bool inline_smi_code = ShouldInlineSmiCase(op);
|
| + JumpPatchSite patch_site(masm_);
|
| if (inline_smi_code) {
|
| - Label slow_case;
|
| - __ JumpIfNotBothSmi(rax, rdx, &slow_case);
|
| - __ SmiCompare(rdx, rax);
|
| + NearLabel slow_case;
|
| + __ movq(rcx, rdx);
|
| + __ or_(rcx, rax);
|
| + patch_site.EmitJumpIfNotSmi(rcx, &slow_case);
|
| + __ cmpq(rdx, rax);
|
| Split(cc, if_true, if_false, NULL);
|
| __ bind(&slow_case);
|
| }
|
|
|
| - CompareFlags flags = inline_smi_code
|
| - ? NO_SMI_COMPARE_IN_STUB
|
| - : NO_COMPARE_FLAGS;
|
| - CompareStub stub(cc, strict, flags);
|
| - __ CallStub(&stub);
|
| + // Record position and call the compare IC.
|
| + SetSourcePosition(expr->position());
|
| + Handle<Code> ic = CompareIC::GetUninitialized(op);
|
| + EmitCallIC(ic, &patch_site);
|
|
|
| PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
|
| __ testq(rax, rax);
|
| @@ -3617,6 +3677,16 @@ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
|
| }
|
|
|
|
|
| +void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
|
| + __ call(ic, RelocInfo::CODE_TARGET);
|
| + if (patch_site != NULL && patch_site->is_bound()) {
|
| + patch_site->EmitPatchInfo();
|
| + } else {
|
| + __ nop(); // Signals no inlined code.
|
| + }
|
| +}
|
| +
|
| +
|
| void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
|
| ASSERT(IsAligned(frame_offset, kPointerSize));
|
| __ movq(Operand(rbp, frame_offset), value);
|
|
|