| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 417 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 428 return ToDoubleRegister(op->index()); | 428 return ToDoubleRegister(op->index()); |
| 429 } | 429 } |
| 430 | 430 |
| 431 | 431 |
| 432 bool LCodeGen::IsInteger32Constant(LConstantOperand* op) const { | 432 bool LCodeGen::IsInteger32Constant(LConstantOperand* op) const { |
| 433 return op->IsConstantOperand() && | 433 return op->IsConstantOperand() && |
| 434 chunk_->LookupLiteralRepresentation(op).IsInteger32(); | 434 chunk_->LookupLiteralRepresentation(op).IsInteger32(); |
| 435 } | 435 } |
| 436 | 436 |
| 437 | 437 |
| 438 bool LCodeGen::IsSmiConstant(LConstantOperand* op) const { |
| 439 return op->IsConstantOperand() && |
| 440 chunk_->LookupLiteralRepresentation(op).IsSmi(); |
| 441 } |
| 442 |
| 443 |
| 438 bool LCodeGen::IsTaggedConstant(LConstantOperand* op) const { | 444 bool LCodeGen::IsTaggedConstant(LConstantOperand* op) const { |
| 439 return op->IsConstantOperand() && | 445 return op->IsConstantOperand() && |
| 440 chunk_->LookupLiteralRepresentation(op).IsTagged(); | 446 chunk_->LookupLiteralRepresentation(op).IsTagged(); |
| 441 } | 447 } |
| 442 | 448 |
| 443 | 449 |
| 444 int LCodeGen::ToInteger32(LConstantOperand* op) const { | 450 int LCodeGen::ToInteger32(LConstantOperand* op) const { |
| 445 HConstant* constant = chunk_->LookupConstant(op); | 451 HConstant* constant = chunk_->LookupConstant(op); |
| 446 return constant->Integer32Value(); | 452 return constant->Integer32Value(); |
| 447 } | 453 } |
| 448 | 454 |
| 449 | 455 |
| 450 double LCodeGen::ToDouble(LConstantOperand* op) const { | 456 double LCodeGen::ToDouble(LConstantOperand* op) const { |
| 451 HConstant* constant = chunk_->LookupConstant(op); | 457 HConstant* constant = chunk_->LookupConstant(op); |
| 452 ASSERT(constant->HasDoubleValue()); | 458 ASSERT(constant->HasDoubleValue()); |
| 453 return constant->DoubleValue(); | 459 return constant->DoubleValue(); |
| 454 } | 460 } |
| 455 | 461 |
| 456 | 462 |
| 457 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { | 463 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { |
| 458 HConstant* constant = chunk_->LookupConstant(op); | 464 HConstant* constant = chunk_->LookupConstant(op); |
| 459 ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged()); | 465 ASSERT(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged()); |
| 460 return constant->handle(); | 466 return constant->handle(); |
| 461 } | 467 } |
| 462 | 468 |
| 463 | 469 |
| 464 Operand LCodeGen::ToOperand(LOperand* op) const { | 470 Operand LCodeGen::ToOperand(LOperand* op) const { |
| 465 // Does not handle registers. In X64 assembler, plain registers are not | 471 // Does not handle registers. In X64 assembler, plain registers are not |
| 466 // representable as an Operand. | 472 // representable as an Operand. |
| 467 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot()); | 473 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot()); |
| 468 return Operand(rbp, StackSlotOffset(op->index())); | 474 return Operand(rbp, StackSlotOffset(op->index())); |
| 469 } | 475 } |
| (...skipping 1400 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1870 | 1876 |
| 1871 void LCodeGen::DoBranch(LBranch* instr) { | 1877 void LCodeGen::DoBranch(LBranch* instr) { |
| 1872 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1878 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1873 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1879 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1874 | 1880 |
| 1875 Representation r = instr->hydrogen()->value()->representation(); | 1881 Representation r = instr->hydrogen()->value()->representation(); |
| 1876 if (r.IsInteger32()) { | 1882 if (r.IsInteger32()) { |
| 1877 Register reg = ToRegister(instr->value()); | 1883 Register reg = ToRegister(instr->value()); |
| 1878 __ testl(reg, reg); | 1884 __ testl(reg, reg); |
| 1879 EmitBranch(true_block, false_block, not_zero); | 1885 EmitBranch(true_block, false_block, not_zero); |
| 1886 } else if (r.IsSmi()) { |
| 1887 Register reg = ToRegister(instr->value()); |
| 1888 __ testq(reg, reg); |
| 1889 EmitBranch(true_block, false_block, not_zero); |
| 1880 } else if (r.IsDouble()) { | 1890 } else if (r.IsDouble()) { |
| 1881 XMMRegister reg = ToDoubleRegister(instr->value()); | 1891 XMMRegister reg = ToDoubleRegister(instr->value()); |
| 1882 __ xorps(xmm0, xmm0); | 1892 __ xorps(xmm0, xmm0); |
| 1883 __ ucomisd(reg, xmm0); | 1893 __ ucomisd(reg, xmm0); |
| 1884 EmitBranch(true_block, false_block, not_equal); | 1894 EmitBranch(true_block, false_block, not_equal); |
| 1885 } else { | 1895 } else { |
| 1886 ASSERT(r.IsTagged()); | 1896 ASSERT(r.IsTagged()); |
| 1887 Register reg = ToRegister(instr->value()); | 1897 Register reg = ToRegister(instr->value()); |
| 1888 HType type = instr->hydrogen()->value()->type(); | 1898 HType type = instr->hydrogen()->value()->type(); |
| 1889 if (type.IsBoolean()) { | 1899 if (type.IsBoolean()) { |
| (...skipping 2008 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3898 | 3908 |
| 3899 Register object = ToRegister(instr->object()); | 3909 Register object = ToRegister(instr->object()); |
| 3900 | 3910 |
| 3901 int offset = instr->offset(); | 3911 int offset = instr->offset(); |
| 3902 | 3912 |
| 3903 Handle<Map> transition = instr->transition(); | 3913 Handle<Map> transition = instr->transition(); |
| 3904 | 3914 |
| 3905 if (FLAG_track_fields && representation.IsSmi()) { | 3915 if (FLAG_track_fields && representation.IsSmi()) { |
| 3906 if (instr->value()->IsConstantOperand()) { | 3916 if (instr->value()->IsConstantOperand()) { |
| 3907 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); | 3917 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); |
| 3908 if (!IsInteger32Constant(operand_value)) { | 3918 if (!IsSmiConstant(operand_value)) { |
| 3909 DeoptimizeIf(no_condition, instr->environment()); | 3919 DeoptimizeIf(no_condition, instr->environment()); |
| 3910 } | 3920 } |
| 3911 } else { | |
| 3912 Register value = ToRegister(instr->value()); | |
| 3913 __ Integer32ToSmi(value, value); | |
| 3914 } | 3921 } |
| 3915 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { | 3922 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { |
| 3916 if (instr->value()->IsConstantOperand()) { | 3923 if (instr->value()->IsConstantOperand()) { |
| 3917 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); | 3924 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); |
| 3918 if (IsInteger32Constant(operand_value)) { | 3925 if (IsInteger32Constant(operand_value)) { |
| 3919 DeoptimizeIf(no_condition, instr->environment()); | 3926 DeoptimizeIf(no_condition, instr->environment()); |
| 3920 } | 3927 } |
| 3921 } else { | 3928 } else { |
| 3922 if (!instr->hydrogen()->value()->type().IsHeapObject()) { | 3929 if (!instr->hydrogen()->value()->type().IsHeapObject()) { |
| 3923 Register value = ToRegister(instr->value()); | 3930 Register value = ToRegister(instr->value()); |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3961 type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 3968 type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
| 3962 | 3969 |
| 3963 Register write_register = object; | 3970 Register write_register = object; |
| 3964 if (!instr->is_in_object()) { | 3971 if (!instr->is_in_object()) { |
| 3965 write_register = ToRegister(instr->temp()); | 3972 write_register = ToRegister(instr->temp()); |
| 3966 __ movq(write_register, FieldOperand(object, JSObject::kPropertiesOffset)); | 3973 __ movq(write_register, FieldOperand(object, JSObject::kPropertiesOffset)); |
| 3967 } | 3974 } |
| 3968 | 3975 |
| 3969 if (instr->value()->IsConstantOperand()) { | 3976 if (instr->value()->IsConstantOperand()) { |
| 3970 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); | 3977 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); |
| 3971 if (IsInteger32Constant(operand_value)) { | 3978 if (operand_value->IsRegister()) { |
| 3972 // In lithium register preparation, we made sure that the constant integer | |
| 3973 // operand fits into smi range. | |
| 3974 Smi* smi_value = Smi::FromInt(ToInteger32(operand_value)); | |
| 3975 __ Move(FieldOperand(write_register, offset), smi_value); | |
| 3976 } else if (operand_value->IsRegister()) { | |
| 3977 __ movq(FieldOperand(write_register, offset), | 3979 __ movq(FieldOperand(write_register, offset), |
| 3978 ToRegister(operand_value)); | 3980 ToRegister(operand_value)); |
| 3979 } else { | 3981 } else { |
| 3980 Handle<Object> handle_value = ToHandle(operand_value); | 3982 Handle<Object> handle_value = ToHandle(operand_value); |
| 3981 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); | 3983 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); |
| 3982 __ Move(FieldOperand(write_register, offset), handle_value); | 3984 __ Move(FieldOperand(write_register, offset), handle_value); |
| 3983 } | 3985 } |
| 3984 } else { | 3986 } else { |
| 3985 __ movq(FieldOperand(write_register, offset), ToRegister(instr->value())); | 3987 __ movq(FieldOperand(write_register, offset), ToRegister(instr->value())); |
| 3986 } | 3988 } |
| (...skipping 440 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4427 LOperand* output = instr->result(); | 4429 LOperand* output = instr->result(); |
| 4428 ASSERT(output->IsDoubleRegister()); | 4430 ASSERT(output->IsDoubleRegister()); |
| 4429 if (input->IsRegister()) { | 4431 if (input->IsRegister()) { |
| 4430 __ cvtlsi2sd(ToDoubleRegister(output), ToRegister(input)); | 4432 __ cvtlsi2sd(ToDoubleRegister(output), ToRegister(input)); |
| 4431 } else { | 4433 } else { |
| 4432 __ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input)); | 4434 __ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input)); |
| 4433 } | 4435 } |
| 4434 } | 4436 } |
| 4435 | 4437 |
| 4436 | 4438 |
| 4439 void LCodeGen::DoInteger32ToSmi(LInteger32ToSmi* instr) { |
| 4440 LOperand* input = instr->value(); |
| 4441 ASSERT(input->IsRegister()); |
| 4442 LOperand* output = instr->result(); |
| 4443 __ Integer32ToSmi(ToRegister(output), ToRegister(input)); |
| 4444 if (!instr->hydrogen()->value()->HasRange() || |
| 4445 !instr->hydrogen()->value()->range()->IsInSmiRange()) { |
| 4446 DeoptimizeIf(overflow, instr->environment()); |
| 4447 } |
| 4448 } |
| 4449 |
| 4450 |
| 4437 void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) { | 4451 void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) { |
| 4438 LOperand* input = instr->value(); | 4452 LOperand* input = instr->value(); |
| 4439 LOperand* output = instr->result(); | 4453 LOperand* output = instr->result(); |
| 4440 LOperand* temp = instr->temp(); | 4454 LOperand* temp = instr->temp(); |
| 4441 | 4455 |
| 4442 __ LoadUint32(ToDoubleRegister(output), | 4456 __ LoadUint32(ToDoubleRegister(output), |
| 4443 ToRegister(input), | 4457 ToRegister(input), |
| 4444 ToDoubleRegister(temp)); | 4458 ToDoubleRegister(temp)); |
| 4445 } | 4459 } |
| 4446 | 4460 |
| (...skipping 372 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4819 // If input was positive, we are ok and return 0, otherwise | 4833 // If input was positive, we are ok and return 0, otherwise |
| 4820 // deoptimize. | 4834 // deoptimize. |
| 4821 __ andl(result_reg, Immediate(1)); | 4835 __ andl(result_reg, Immediate(1)); |
| 4822 DeoptimizeIf(not_zero, instr->environment()); | 4836 DeoptimizeIf(not_zero, instr->environment()); |
| 4823 __ bind(&done); | 4837 __ bind(&done); |
| 4824 } | 4838 } |
| 4825 } | 4839 } |
| 4826 } | 4840 } |
| 4827 | 4841 |
| 4828 | 4842 |
| 4843 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { |
| 4844 LOperand* input = instr->value(); |
| 4845 ASSERT(input->IsDoubleRegister()); |
| 4846 LOperand* result = instr->result(); |
| 4847 ASSERT(result->IsRegister()); |
| 4848 CpuFeatureScope scope(masm(), SSE2); |
| 4849 |
| 4850 XMMRegister input_reg = ToDoubleRegister(input); |
| 4851 Register result_reg = ToRegister(result); |
| 4852 |
| 4853 Label done; |
| 4854 __ cvttsd2si(result_reg, input_reg); |
| 4855 __ cvtlsi2sd(xmm0, result_reg); |
| 4856 __ ucomisd(xmm0, input_reg); |
| 4857 DeoptimizeIf(not_equal, instr->environment()); |
| 4858 DeoptimizeIf(parity_even, instr->environment()); // NaN. |
| 4859 |
| 4860 // The integer converted back is equal to the original. We |
| 4861 // only have to test if we got -0 as an input. |
| 4862 __ testl(result_reg, result_reg); |
| 4863 __ j(not_zero, &done, Label::kNear); |
| 4864 __ movmskpd(result_reg, input_reg); |
| 4865 // Bit 0 contains the sign of the double in input_reg. |
| 4866 // If input was positive, we are ok and return 0, otherwise |
| 4867 // deoptimize. |
| 4868 __ andl(result_reg, Immediate(1)); |
| 4869 DeoptimizeIf(not_zero, instr->environment()); |
| 4870 __ bind(&done); |
| 4871 __ Integer32ToSmi(result_reg, result_reg); |
| 4872 DeoptimizeIf(overflow, instr->environment()); |
| 4873 } |
| 4874 |
| 4875 |
| 4829 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { | 4876 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { |
| 4830 LOperand* input = instr->value(); | 4877 LOperand* input = instr->value(); |
| 4831 Condition cc = masm()->CheckSmi(ToRegister(input)); | 4878 Condition cc = masm()->CheckSmi(ToRegister(input)); |
| 4832 DeoptimizeIf(NegateCondition(cc), instr->environment()); | 4879 DeoptimizeIf(NegateCondition(cc), instr->environment()); |
| 4833 } | 4880 } |
| 4834 | 4881 |
| 4835 | 4882 |
| 4836 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { | 4883 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { |
| 4837 LOperand* input = instr->value(); | 4884 LOperand* input = instr->value(); |
| 4838 Condition cc = masm()->CheckSmi(ToRegister(input)); | 4885 Condition cc = masm()->CheckSmi(ToRegister(input)); |
| (...skipping 766 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5605 FixedArray::kHeaderSize - kPointerSize)); | 5652 FixedArray::kHeaderSize - kPointerSize)); |
| 5606 __ bind(&done); | 5653 __ bind(&done); |
| 5607 } | 5654 } |
| 5608 | 5655 |
| 5609 | 5656 |
| 5610 #undef __ | 5657 #undef __ |
| 5611 | 5658 |
| 5612 } } // namespace v8::internal | 5659 } } // namespace v8::internal |
| 5613 | 5660 |
| 5614 #endif // V8_TARGET_ARCH_X64 | 5661 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |