| Index: src/x64/lithium-codegen-x64.cc
|
| diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
|
| index 6f4d3e309f49a43dc5b4ff789eec3ee3fd8a93e5..9dca6b3e20a4fccd4f006cf20c110784eb44d538 100644
|
| --- a/src/x64/lithium-codegen-x64.cc
|
| +++ b/src/x64/lithium-codegen-x64.cc
|
| @@ -3520,7 +3520,7 @@ void LCodeGen::DoMathFloor(LMathFloor* instr) {
|
| __ bind(&negative_sign);
|
| // Truncate, then compare and compensate.
|
| __ cvttsd2si(output_reg, input_reg);
|
| - __ Cvtlsi2sd(xmm_scratch, output_reg);
|
| + __ cvtlsi2sd(xmm_scratch, output_reg);
|
| __ ucomisd(input_reg, xmm_scratch);
|
| __ j(equal, &done, Label::kNear);
|
| __ subl(output_reg, Immediate(1));
|
| @@ -3569,7 +3569,7 @@ void LCodeGen::DoMathRound(LMathRound* instr) {
|
| __ RecordComment("D2I conversion overflow");
|
| DeoptimizeIf(equal, instr->environment());
|
|
|
| - __ Cvtlsi2sd(xmm_scratch, output_reg);
|
| + __ cvtlsi2sd(xmm_scratch, output_reg);
|
| __ ucomisd(input_reg, xmm_scratch);
|
| __ j(equal, &restore, Label::kNear);
|
| __ subl(output_reg, Immediate(1));
|
| @@ -4449,9 +4449,9 @@ void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
|
| LOperand* output = instr->result();
|
| ASSERT(output->IsDoubleRegister());
|
| if (input->IsRegister()) {
|
| - __ Cvtlsi2sd(ToDoubleRegister(output), ToRegister(input));
|
| + __ cvtlsi2sd(ToDoubleRegister(output), ToRegister(input));
|
| } else {
|
| - __ Cvtlsi2sd(ToDoubleRegister(output), ToOperand(input));
|
| + __ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input));
|
| }
|
| }
|
|
|
| @@ -4623,7 +4623,7 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
|
| bool deoptimize_on_minus_zero,
|
| LEnvironment* env,
|
| NumberUntagDMode mode) {
|
| - Label convert, load_smi, done;
|
| + Label load_smi, done;
|
|
|
| if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) {
|
| // Smi check.
|
| @@ -4632,17 +4632,25 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
|
| // Heap number map check.
|
| __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
|
| Heap::kHeapNumberMapRootIndex);
|
| -
|
| - // On x64 it is safe to load at heap number offset before evaluating the map
|
| - // check, since all heap objects are at least two words long.
|
| - __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
|
| -
|
| - if (can_convert_undefined_to_nan) {
|
| - __ j(not_equal, &convert);
|
| + if (!can_convert_undefined_to_nan) {
|
| + DeoptimizeIf(not_equal, env);
|
| } else {
|
| + Label heap_number, convert;
|
| + __ j(equal, &heap_number, Label::kNear);
|
| +
|
| + // Convert undefined (and hole) to NaN. Compute NaN as 0/0.
|
| + __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
|
| DeoptimizeIf(not_equal, env);
|
| - }
|
|
|
| + __ bind(&convert);
|
| + __ xorps(result_reg, result_reg);
|
| + __ divsd(result_reg, result_reg);
|
| + __ jmp(&done, Label::kNear);
|
| +
|
| + __ bind(&heap_number);
|
| + }
|
| + // Heap number to XMM conversion.
|
| + __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
|
| if (deoptimize_on_minus_zero) {
|
| XMMRegister xmm_scratch = xmm0;
|
| __ xorps(xmm_scratch, xmm_scratch);
|
| @@ -4653,18 +4661,6 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
|
| DeoptimizeIf(not_zero, env);
|
| }
|
| __ jmp(&done, Label::kNear);
|
| -
|
| - if (can_convert_undefined_to_nan) {
|
| - __ bind(&convert);
|
| -
|
| - // Convert undefined (and hole) to NaN. Compute NaN as 0/0.
|
| - __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
|
| - DeoptimizeIf(not_equal, env);
|
| -
|
| - __ xorps(result_reg, result_reg);
|
| - __ divsd(result_reg, result_reg);
|
| - __ jmp(&done, Label::kNear);
|
| - }
|
| } else {
|
| ASSERT(mode == NUMBER_CANDIDATE_IS_SMI);
|
| }
|
| @@ -4672,7 +4668,7 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
|
| // Smi to XMM conversion
|
| __ bind(&load_smi);
|
| __ SmiToInteger32(kScratchRegister, input_reg);
|
| - __ Cvtlsi2sd(result_reg, kScratchRegister);
|
| + __ cvtlsi2sd(result_reg, kScratchRegister);
|
| __ bind(&done);
|
| }
|
|
|
|
|