| Index: src/ia32/lithium-codegen-ia32.cc
|
| ===================================================================
|
| --- src/ia32/lithium-codegen-ia32.cc (revision 7254)
|
| +++ src/ia32/lithium-codegen-ia32.cc (working copy)
|
| @@ -893,7 +893,49 @@
|
| }
|
|
|
| if (right->IsConstantOperand()) {
|
| - __ imul(left, left, ToInteger32(LConstantOperand::cast(right)));
|
| + // Try strength reductions on the multiplication.
|
| + // All replacement instructions are at most as long as the imul
|
| + // and have better latency.
|
| + int constant = ToInteger32(LConstantOperand::cast(right));
|
| + if (constant == -1) {
|
| + __ neg(left);
|
| + } else if (constant == 0) {
|
| + __ xor_(left, Operand(left));
|
| + } else if (constant == 2) {
|
| + __ add(left, Operand(left));
|
| + } else if (!instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
|
| + // If we know that the multiplication can't overflow, it's safe to
|
| + // use instructions that don't set the overflow flag for the
|
| + // multiplication.
|
| + switch (constant) {
|
| + case 1:
|
| + // Do nothing.
|
| + break;
|
| + case 3:
|
| + __ lea(left, Operand(left, left, times_2, 0));
|
| + break;
|
| + case 4:
|
| + __ shl(left, 2);
|
| + break;
|
| + case 5:
|
| + __ lea(left, Operand(left, left, times_4, 0));
|
| + break;
|
| + case 8:
|
| + __ shl(left, 3);
|
| + break;
|
| + case 9:
|
| + __ lea(left, Operand(left, left, times_8, 0));
|
| + break;
|
| + case 16:
|
| + __ shl(left, 4);
|
| + break;
|
| + default:
|
| + __ imul(left, left, constant);
|
| + break;
|
| + }
|
| + } else {
|
| + __ imul(left, left, constant);
|
| + }
|
| } else {
|
| __ imul(left, ToOperand(right));
|
| }
|
| @@ -3239,10 +3281,8 @@
|
| DeoptimizeIf(not_equal, env);
|
|
|
| // Convert undefined to NaN.
|
| - __ push(input_reg);
|
| - __ mov(input_reg, factory()->nan_value());
|
| - __ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
|
| - __ pop(input_reg);
|
| + ExternalReference nan = ExternalReference::address_of_nan();
|
| + __ movdbl(result_reg, Operand::StaticVariable(nan));
|
| __ jmp(&done);
|
|
|
| // Heap number to XMM conversion.
|
| @@ -3502,12 +3542,18 @@
|
|
|
| void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
|
| LOperand* input = instr->InputAt(0);
|
| - ASSERT(input->IsRegister());
|
| __ test(ToRegister(input), Immediate(kSmiTagMask));
|
| - DeoptimizeIf(instr->condition(), instr->environment());
|
| + DeoptimizeIf(not_zero, instr->environment());
|
| }
|
|
|
|
|
| +void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
|
| + LOperand* input = instr->InputAt(0);
|
| + __ test(ToRegister(input), Immediate(kSmiTagMask));
|
| + DeoptimizeIf(zero, instr->environment());
|
| +}
|
| +
|
| +
|
| void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
|
| Register input = ToRegister(instr->InputAt(0));
|
| Register temp = ToRegister(instr->TempAt(0));
|
| @@ -3698,8 +3744,9 @@
|
| // space for nested functions that don't need literals cloning.
|
| Handle<SharedFunctionInfo> shared_info = instr->shared_info();
|
| bool pretenure = instr->hydrogen()->pretenure();
|
| - if (shared_info->num_literals() == 0 && !pretenure) {
|
| - FastNewClosureStub stub;
|
| + if (!pretenure && shared_info->num_literals() == 0) {
|
| + FastNewClosureStub stub(
|
| + shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
|
| __ push(Immediate(shared_info));
|
| CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
|
| } else {
|
|
|