| Index: src/ia32/full-codegen-ia32.cc
|
| ===================================================================
|
| --- src/ia32/full-codegen-ia32.cc (revision 6095)
|
| +++ src/ia32/full-codegen-ia32.cc (working copy)
|
| @@ -41,8 +41,61 @@
|
| namespace v8 {
|
| namespace internal {
|
|
|
| +
|
| #define __ ACCESS_MASM(masm_)
|
|
|
| +
|
| +class JumpPatchSite BASE_EMBEDDED {
|
| + public:
|
| + explicit JumpPatchSite(MacroAssembler* masm)
|
| + : masm_(masm) {
|
| +#ifdef DEBUG
|
| + info_emitted_ = false;
|
| +#endif
|
| + }
|
| +
|
| + ~JumpPatchSite() {
|
| + ASSERT(patch_site_.is_bound() == info_emitted_);
|
| + }
|
| +
|
| + void EmitJumpIfNotSmi(Register reg, NearLabel* target) {
|
| + __ test(reg, Immediate(kSmiTagMask));
|
| + EmitJump(not_carry, target); // Always taken before patched.
|
| + }
|
| +
|
| + void EmitJumpIfSmi(Register reg, NearLabel* target) {
|
| + __ test(reg, Immediate(kSmiTagMask));
|
| + EmitJump(carry, target); // Never taken before patched.
|
| + }
|
| +
|
| + void EmitPatchInfo() {
|
| + int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
|
| + ASSERT(is_int8(delta_to_patch_site));
|
| + __ test(eax, Immediate(delta_to_patch_site));
|
| +#ifdef DEBUG
|
| + info_emitted_ = true;
|
| +#endif
|
| + }
|
| +
|
| + bool is_bound() const { return patch_site_.is_bound(); }
|
| +
|
| + private:
|
| + // jc will be patched with jz, jnc will become jnz.
|
| + void EmitJump(Condition cc, NearLabel* target) {
|
| + ASSERT(!patch_site_.is_bound() && !info_emitted_);
|
| + ASSERT(cc == carry || cc == not_carry);
|
| + __ bind(&patch_site_);
|
| + __ j(cc, target);
|
| + }
|
| +
|
| + MacroAssembler* masm_;
|
| + Label patch_site_;
|
| +#ifdef DEBUG
|
| + bool info_emitted_;
|
| +#endif
|
| +};
|
| +
|
| +
|
| // Generate code for a JS function. On entry to the function the receiver
|
| // and arguments have been pushed on the stack left to right, with the
|
| // return address on top of them. The actual argument count matches the
|
| @@ -200,6 +253,11 @@
|
| }
|
|
|
|
|
| +void FullCodeGenerator::ClearAccumulator() {
|
| + __ Set(eax, Immediate(Smi::FromInt(0)));
|
| +}
|
| +
|
| +
|
| void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
|
| Comment cmnt(masm_, "[ Stack check");
|
| NearLabel ok;
|
| @@ -694,10 +752,9 @@
|
| Breakable nested_statement(this, stmt);
|
| SetStatementPosition(stmt);
|
|
|
| - PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
|
| -
|
| // Keep the switch value on the stack until a case matches.
|
| VisitForStackValue(stmt->tag());
|
| + PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
|
|
|
| ZoneList<CaseClause*>* clauses = stmt->cases();
|
| CaseClause* default_clause = NULL; // Can occur anywhere in the list.
|
| @@ -722,12 +779,13 @@
|
| // Perform the comparison as if via '==='.
|
| __ mov(edx, Operand(esp, 0)); // Switch value.
|
| bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
|
| + JumpPatchSite patch_site(masm_);
|
| if (inline_smi_code) {
|
| NearLabel slow_case;
|
| __ mov(ecx, edx);
|
| __ or_(ecx, Operand(eax));
|
| - __ test(ecx, Immediate(kSmiTagMask));
|
| - __ j(not_zero, &slow_case, not_taken);
|
| + patch_site.EmitJumpIfNotSmi(ecx, &slow_case);
|
| +
|
| __ cmp(edx, Operand(eax));
|
| __ j(not_equal, &next_test);
|
| __ Drop(1); // Switch value is no longer needed.
|
| @@ -737,9 +795,8 @@
|
|
|
| // Record position before stub call for type feedback.
|
| SetSourcePosition(clause->position());
|
| -
|
| Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
|
| - __ call(ic, RelocInfo::CODE_TARGET);
|
| + EmitCallIC(ic, &patch_site);
|
|
|
| __ test(eax, Operand(eax));
|
| __ j(not_equal, &next_test);
|
| @@ -1565,12 +1622,11 @@
|
| OverwriteMode mode,
|
| bool left_is_constant_smi,
|
| Smi* value) {
|
| - NearLabel call_stub;
|
| - Label done;
|
| + NearLabel call_stub, done;
|
| __ add(Operand(eax), Immediate(value));
|
| __ j(overflow, &call_stub);
|
| - __ test(eax, Immediate(kSmiTagMask));
|
| - __ j(zero, &done);
|
| + JumpPatchSite patch_site(masm_);
|
| + patch_site.EmitJumpIfSmi(eax, &done);
|
|
|
| // Undo the optimistic add operation and call the shared stub.
|
| __ bind(&call_stub);
|
| @@ -1583,7 +1639,8 @@
|
| __ mov(edx, eax);
|
| __ mov(eax, Immediate(value));
|
| }
|
| - __ CallStub(&stub);
|
| + EmitCallIC(stub.GetCode(), &patch_site);
|
| +
|
| __ bind(&done);
|
| context()->Plug(eax);
|
| }
|
| @@ -1593,7 +1650,7 @@
|
| OverwriteMode mode,
|
| bool left_is_constant_smi,
|
| Smi* value) {
|
| - Label call_stub, done;
|
| + NearLabel call_stub, done;
|
| if (left_is_constant_smi) {
|
| __ mov(ecx, eax);
|
| __ mov(eax, Immediate(value));
|
| @@ -1602,8 +1659,8 @@
|
| __ sub(Operand(eax), Immediate(value));
|
| }
|
| __ j(overflow, &call_stub);
|
| - __ test(eax, Immediate(kSmiTagMask));
|
| - __ j(zero, &done);
|
| + JumpPatchSite patch_site(masm_);
|
| + patch_site.EmitJumpIfSmi(eax, &done);
|
|
|
| __ bind(&call_stub);
|
| if (left_is_constant_smi) {
|
| @@ -1616,7 +1673,8 @@
|
| }
|
| Token::Value op = Token::SUB;
|
| TypeRecordingBinaryOpStub stub(op, mode);
|
| - __ CallStub(&stub);
|
| + EmitCallIC(stub.GetCode(), &patch_site);
|
| +
|
| __ bind(&done);
|
| context()->Plug(eax);
|
| }
|
| @@ -1626,19 +1684,21 @@
|
| Token::Value op,
|
| OverwriteMode mode,
|
| Smi* value) {
|
| - Label call_stub, smi_case, done;
|
| + NearLabel call_stub, smi_case, done;
|
| int shift_value = value->value() & 0x1f;
|
|
|
| - __ test(eax, Immediate(kSmiTagMask));
|
| - __ j(zero, &smi_case);
|
| + JumpPatchSite patch_site(masm_);
|
| + patch_site.EmitJumpIfSmi(eax, &smi_case);
|
|
|
| + // Call stub.
|
| __ bind(&call_stub);
|
| __ mov(edx, eax);
|
| __ mov(eax, Immediate(value));
|
| TypeRecordingBinaryOpStub stub(op, mode);
|
| - __ CallStub(&stub);
|
| + EmitCallIC(stub.GetCode(), &patch_site);
|
| __ jmp(&done);
|
|
|
| + // Smi case.
|
| __ bind(&smi_case);
|
| switch (op) {
|
| case Token::SHL:
|
| @@ -1688,17 +1748,19 @@
|
| Token::Value op,
|
| OverwriteMode mode,
|
| Smi* value) {
|
| - Label smi_case, done;
|
| - __ test(eax, Immediate(kSmiTagMask));
|
| - __ j(zero, &smi_case);
|
| + NearLabel smi_case, done;
|
|
|
| + JumpPatchSite patch_site(masm_);
|
| + patch_site.EmitJumpIfSmi(eax, &smi_case);
|
| +
|
| // The order of the arguments does not matter for bit-ops with a
|
| // constant operand.
|
| __ mov(edx, Immediate(value));
|
| TypeRecordingBinaryOpStub stub(op, mode);
|
| - __ CallStub(&stub);
|
| + EmitCallIC(stub.GetCode(), &patch_site);
|
| __ jmp(&done);
|
|
|
| + // Smi case.
|
| __ bind(&smi_case);
|
| switch (op) {
|
| case Token::BIT_OR:
|
| @@ -1766,19 +1828,20 @@
|
|
|
| // Do combined smi check of the operands. Left operand is on the
|
| // stack. Right operand is in eax.
|
| - Label done, stub_call, smi_case;
|
| + NearLabel done, smi_case, stub_call;
|
| __ pop(edx);
|
| __ mov(ecx, eax);
|
| __ or_(eax, Operand(edx));
|
| - __ test(eax, Immediate(kSmiTagMask));
|
| - __ j(zero, &smi_case);
|
| + JumpPatchSite patch_site(masm_);
|
| + patch_site.EmitJumpIfSmi(eax, &smi_case);
|
|
|
| __ bind(&stub_call);
|
| __ mov(eax, ecx);
|
| TypeRecordingBinaryOpStub stub(op, mode);
|
| - __ CallStub(&stub);
|
| + EmitCallIC(stub.GetCode(), &patch_site);
|
| __ jmp(&done);
|
|
|
| + // Smi case.
|
| __ bind(&smi_case);
|
| __ mov(eax, edx); // Copy left operand in case of a stub call.
|
|
|
| @@ -1857,7 +1920,7 @@
|
| OverwriteMode mode) {
|
| __ pop(edx);
|
| TypeRecordingBinaryOpStub stub(op, mode);
|
| - __ CallStub(&stub);
|
| + EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code.
|
| context()->Plug(eax);
|
| }
|
|
|
| @@ -3019,7 +3082,8 @@
|
|
|
| void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
|
| // Load the argument on the stack and call the stub.
|
| - TranscendentalCacheStub stub(TranscendentalCache::SIN);
|
| + TranscendentalCacheStub stub(TranscendentalCache::SIN,
|
| + TranscendentalCacheStub::TAGGED);
|
| ASSERT(args->length() == 1);
|
| VisitForStackValue(args->at(0));
|
| __ CallStub(&stub);
|
| @@ -3029,7 +3093,8 @@
|
|
|
| void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
|
| // Load the argument on the stack and call the stub.
|
| - TranscendentalCacheStub stub(TranscendentalCache::COS);
|
| + TranscendentalCacheStub stub(TranscendentalCache::COS,
|
| + TranscendentalCacheStub::TAGGED);
|
| ASSERT(args->length() == 1);
|
| VisitForStackValue(args->at(0));
|
| __ CallStub(&stub);
|
| @@ -3039,7 +3104,8 @@
|
|
|
| void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
|
| // Load the argument on the stack and call the stub.
|
| - TranscendentalCacheStub stub(TranscendentalCache::LOG);
|
| + TranscendentalCacheStub stub(TranscendentalCache::LOG,
|
| + TranscendentalCacheStub::TAGGED);
|
| ASSERT(args->length() == 1);
|
| VisitForStackValue(args->at(0));
|
| __ CallStub(&stub);
|
| @@ -3123,6 +3189,13 @@
|
| __ test(temp, Immediate(kSmiTagMask));
|
| __ j(not_zero, &slow_case);
|
|
|
| + // Check that both indices are valid.
|
| + __ mov(temp, FieldOperand(object, JSArray::kLengthOffset));
|
| + __ cmp(temp, Operand(index_1));
|
| + __ j(below_equal, &slow_case);
|
| + __ cmp(temp, Operand(index_2));
|
| + __ j(below_equal, &slow_case);
|
| +
|
| // Bring addresses into index1 and index2.
|
| __ lea(index_1, CodeGenerator::FixedArrayElementOperand(elements, index_1));
|
| __ lea(index_2, CodeGenerator::FixedArrayElementOperand(elements, index_2));
|
| @@ -3731,8 +3804,9 @@
|
| }
|
|
|
| // Inline smi case if we are in a loop.
|
| - NearLabel stub_call;
|
| - Label done;
|
| + NearLabel stub_call, done;
|
| + JumpPatchSite patch_site(masm_);
|
| +
|
| if (ShouldInlineSmiCase(expr->op())) {
|
| if (expr->op() == Token::INC) {
|
| __ add(Operand(eax), Immediate(Smi::FromInt(1)));
|
| @@ -3742,8 +3816,8 @@
|
| __ j(overflow, &stub_call);
|
| // We could eliminate this smi check if we split the code at
|
| // the first smi check before calling ToNumber.
|
| - __ test(eax, Immediate(kSmiTagMask));
|
| - __ j(zero, &done);
|
| + patch_site.EmitJumpIfSmi(eax, &done);
|
| +
|
| __ bind(&stub_call);
|
| // Call stub. Undo operation first.
|
| if (expr->op() == Token::INC) {
|
| @@ -3761,7 +3835,7 @@
|
| __ mov(eax, Immediate(Smi::FromInt(1)));
|
| TypeRecordingBinaryOpStub stub(expr->binary_op(),
|
| NO_OVERWRITE);
|
| - __ CallStub(&stub);
|
| + EmitCallIC(stub.GetCode(), &patch_site);
|
| __ bind(&done);
|
|
|
| // Store the value returned in eax.
|
| @@ -3986,7 +4060,8 @@
|
|
|
| case Token::INSTANCEOF: {
|
| VisitForStackValue(expr->right());
|
| - InstanceofStub stub;
|
| + __ IncrementCounter(&Counters::instance_of_full, 1);
|
| + InstanceofStub stub(InstanceofStub::kNoFlags);
|
| __ CallStub(&stub);
|
| PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
|
| __ test(eax, Operand(eax));
|
| @@ -4034,21 +4109,22 @@
|
| }
|
|
|
| bool inline_smi_code = ShouldInlineSmiCase(op);
|
| + JumpPatchSite patch_site(masm_);
|
| if (inline_smi_code) {
|
| NearLabel slow_case;
|
| __ mov(ecx, Operand(edx));
|
| __ or_(ecx, Operand(eax));
|
| - __ test(ecx, Immediate(kSmiTagMask));
|
| - __ j(not_zero, &slow_case, not_taken);
|
| + patch_site.EmitJumpIfNotSmi(ecx, &slow_case);
|
| __ cmp(edx, Operand(eax));
|
| Split(cc, if_true, if_false, NULL);
|
| __ bind(&slow_case);
|
| }
|
|
|
| // Record position and call the compare IC.
|
| + SetSourcePosition(expr->position());
|
| Handle<Code> ic = CompareIC::GetUninitialized(op);
|
| - SetSourcePosition(expr->position());
|
| - __ call(ic, RelocInfo::CODE_TARGET);
|
| + EmitCallIC(ic, &patch_site);
|
| +
|
| PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
|
| __ test(eax, Operand(eax));
|
| Split(cc, if_true, if_false, fall_through);
|
| @@ -4152,6 +4228,16 @@
|
| }
|
|
|
|
|
| +void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
|
| + __ call(ic, RelocInfo::CODE_TARGET);
|
| + if (patch_site != NULL && patch_site->is_bound()) {
|
| + patch_site->EmitPatchInfo();
|
| + } else {
|
| + __ nop(); // Signals no inlined code.
|
| + }
|
| +}
|
| +
|
| +
|
| void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
|
| ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
|
| __ mov(Operand(ebp, frame_offset), value);
|
|
|