| Index: src/x64/lithium-codegen-x64.cc
|
| diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
|
| index 475c405d0d67823ca5c7c2279f054b78f683123f..7f0ed3dbaeaafe00f3924c307a96a6e431d4d7d9 100644
|
| --- a/src/x64/lithium-codegen-x64.cc
|
| +++ b/src/x64/lithium-codegen-x64.cc
|
| @@ -410,7 +410,7 @@ bool LCodeGen::IsTaggedConstant(LConstantOperand* op) const {
|
| }
|
|
|
|
|
| -int LCodeGen::ToInteger32(LConstantOperand* op) const {
|
| +int32_t LCodeGen::ToInteger32(LConstantOperand* op) const {
|
| HConstant* constant = chunk_->LookupConstant(op);
|
| return constant->Integer32Value();
|
| }
|
| @@ -697,12 +697,6 @@ void LCodeGen::DeoptimizeIf(Condition cc,
|
| }
|
|
|
|
|
| -void LCodeGen::SoftDeoptimize(LEnvironment* environment) {
|
| - ASSERT(!info()->IsStub());
|
| - DeoptimizeIf(no_condition, environment, Deoptimizer::SOFT);
|
| -}
|
| -
|
| -
|
| void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) {
|
| ZoneList<Handle<Map> > maps(1, zone());
|
| int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
|
| @@ -1305,9 +1299,19 @@ void LCodeGen::DoMulI(LMulI* instr) {
|
| __ imull(left, left, Immediate(right_value));
|
| }
|
| } else if (right->IsStackSlot()) {
|
| - __ imull(left, ToOperand(right));
|
| + if (instr->hydrogen_value()->representation().IsSmi()) {
|
| + __ SmiToInteger32(left, left);
|
| + __ imul(left, ToOperand(right));
|
| + } else {
|
| + __ imull(left, ToOperand(right));
|
| + }
|
| } else {
|
| - __ imull(left, ToRegister(right));
|
| + if (instr->hydrogen_value()->representation().IsSmi()) {
|
| + __ SmiToInteger32(left, left);
|
| + __ imul(left, ToRegister(right));
|
| + } else {
|
| + __ imull(left, ToRegister(right));
|
| + }
|
| }
|
|
|
| if (can_overflow) {
|
| @@ -1364,13 +1368,13 @@ void LCodeGen::DoBitI(LBitI* instr) {
|
| } else if (right->IsStackSlot()) {
|
| switch (instr->op()) {
|
| case Token::BIT_AND:
|
| - __ andl(ToRegister(left), ToOperand(right));
|
| + __ and_(ToRegister(left), ToOperand(right));
|
| break;
|
| case Token::BIT_OR:
|
| - __ orl(ToRegister(left), ToOperand(right));
|
| + __ or_(ToRegister(left), ToOperand(right));
|
| break;
|
| case Token::BIT_XOR:
|
| - __ xorl(ToRegister(left), ToOperand(right));
|
| + __ xor_(ToRegister(left), ToOperand(right));
|
| break;
|
| default:
|
| UNREACHABLE();
|
| @@ -1380,13 +1384,13 @@ void LCodeGen::DoBitI(LBitI* instr) {
|
| ASSERT(right->IsRegister());
|
| switch (instr->op()) {
|
| case Token::BIT_AND:
|
| - __ andl(ToRegister(left), ToRegister(right));
|
| + __ and_(ToRegister(left), ToRegister(right));
|
| break;
|
| case Token::BIT_OR:
|
| - __ orl(ToRegister(left), ToRegister(right));
|
| + __ or_(ToRegister(left), ToRegister(right));
|
| break;
|
| case Token::BIT_XOR:
|
| - __ xorl(ToRegister(left), ToRegister(right));
|
| + __ xor_(ToRegister(left), ToRegister(right));
|
| break;
|
| default:
|
| UNREACHABLE();
|
| @@ -1449,7 +1453,11 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
|
| break;
|
| case Token::SHL:
|
| if (shift_count != 0) {
|
| - __ shll(ToRegister(left), Immediate(shift_count));
|
| + if (instr->hydrogen_value()->representation().IsSmi()) {
|
| + __ shl(ToRegister(left), Immediate(shift_count));
|
| + } else {
|
| + __ shll(ToRegister(left), Immediate(shift_count));
|
| + }
|
| }
|
| break;
|
| default:
|
| @@ -1469,9 +1477,17 @@ void LCodeGen::DoSubI(LSubI* instr) {
|
| __ subl(ToRegister(left),
|
| Immediate(ToInteger32(LConstantOperand::cast(right))));
|
| } else if (right->IsRegister()) {
|
| - __ subl(ToRegister(left), ToRegister(right));
|
| + if (instr->hydrogen_value()->representation().IsSmi()) {
|
| + __ subq(ToRegister(left), ToRegister(right));
|
| + } else {
|
| + __ subl(ToRegister(left), ToRegister(right));
|
| + }
|
| } else {
|
| - __ subl(ToRegister(left), ToOperand(right));
|
| + if (instr->hydrogen_value()->representation().IsSmi()) {
|
| + __ subq(ToRegister(left), ToOperand(right));
|
| + } else {
|
| + __ subl(ToRegister(left), ToOperand(right));
|
| + }
|
| }
|
|
|
| if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
|
| @@ -1510,12 +1526,7 @@ void LCodeGen::DoConstantD(LConstantD* instr) {
|
| void LCodeGen::DoConstantT(LConstantT* instr) {
|
| Handle<Object> value = instr->value();
|
| AllowDeferredHandleDereference smi_check;
|
| - if (value->IsSmi()) {
|
| - __ Move(ToRegister(instr->result()), value);
|
| - } else {
|
| - __ LoadHeapObject(ToRegister(instr->result()),
|
| - Handle<HeapObject>::cast(value));
|
| - }
|
| + __ LoadObject(ToRegister(instr->result()), value);
|
| }
|
|
|
|
|
| @@ -1657,16 +1668,28 @@ void LCodeGen::DoAddI(LAddI* instr) {
|
| MemOperand(ToRegister(left), offset));
|
| } else {
|
| Operand address(ToRegister(left), ToRegister(right), times_1, 0);
|
| - __ leal(ToRegister(instr->result()), address);
|
| + if (instr->hydrogen()->representation().IsSmi()) {
|
| + __ lea(ToRegister(instr->result()), address);
|
| + } else {
|
| + __ leal(ToRegister(instr->result()), address);
|
| + }
|
| }
|
| } else {
|
| if (right->IsConstantOperand()) {
|
| __ addl(ToRegister(left),
|
| Immediate(ToInteger32(LConstantOperand::cast(right))));
|
| } else if (right->IsRegister()) {
|
| - __ addl(ToRegister(left), ToRegister(right));
|
| + if (instr->hydrogen_value()->representation().IsSmi()) {
|
| + __ addq(ToRegister(left), ToRegister(right));
|
| + } else {
|
| + __ addl(ToRegister(left), ToRegister(right));
|
| + }
|
| } else {
|
| - __ addl(ToRegister(left), ToOperand(right));
|
| + if (instr->hydrogen_value()->representation().IsSmi()) {
|
| + __ addq(ToRegister(left), ToOperand(right));
|
| + } else {
|
| + __ addl(ToRegister(left), ToOperand(right));
|
| + }
|
| }
|
| if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
|
| DeoptimizeIf(overflow, instr->environment());
|
| @@ -1680,7 +1703,7 @@ void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
|
| LOperand* right = instr->right();
|
| ASSERT(left->Equals(instr->result()));
|
| HMathMinMax::Operation operation = instr->hydrogen()->operation();
|
| - if (instr->hydrogen()->representation().IsInteger32()) {
|
| + if (instr->hydrogen()->representation().IsSmiOrInteger32()) {
|
| Label return_left;
|
| Condition condition = (operation == HMathMinMax::kMathMin)
|
| ? less_equal
|
| @@ -1689,17 +1712,26 @@ void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
|
| if (right->IsConstantOperand()) {
|
| Immediate right_imm =
|
| Immediate(ToInteger32(LConstantOperand::cast(right)));
|
| + ASSERT(!instr->hydrogen_value()->representation().IsSmi());
|
| __ cmpl(left_reg, right_imm);
|
| __ j(condition, &return_left, Label::kNear);
|
| __ movq(left_reg, right_imm);
|
| } else if (right->IsRegister()) {
|
| Register right_reg = ToRegister(right);
|
| - __ cmpl(left_reg, right_reg);
|
| + if (instr->hydrogen_value()->representation().IsSmi()) {
|
| + __ cmpq(left_reg, right_reg);
|
| + } else {
|
| + __ cmpl(left_reg, right_reg);
|
| + }
|
| __ j(condition, &return_left, Label::kNear);
|
| __ movq(left_reg, right_reg);
|
| } else {
|
| Operand right_op = ToOperand(right);
|
| - __ cmpl(left_reg, right_op);
|
| + if (instr->hydrogen_value()->representation().IsSmi()) {
|
| + __ cmpq(left_reg, right_op);
|
| + } else {
|
| + __ cmpl(left_reg, right_op);
|
| + }
|
| __ j(condition, &return_left, Label::kNear);
|
| __ movq(left_reg, right_op);
|
| }
|
| @@ -2087,14 +2119,6 @@ void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
|
| }
|
|
|
|
|
| -void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
|
| - Register left = ToRegister(instr->left());
|
| -
|
| - __ cmpq(left, Immediate(instr->hydrogen()->right()));
|
| - EmitBranch(instr, equal);
|
| -}
|
| -
|
| -
|
| Condition LCodeGen::EmitIsObject(Register input,
|
| Label* is_not_object,
|
| Label* is_object) {
|
| @@ -2703,9 +2727,9 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
|
| __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset));
|
| __ movq(result, FieldOperand(result, offset + FixedArray::kHeaderSize));
|
| }
|
| - } else if (lookup.IsConstantFunction()) {
|
| - Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
|
| - __ LoadHeapObject(result, function);
|
| + } else if (lookup.IsConstant()) {
|
| + Handle<Object> constant(lookup.GetConstantFromMap(*type), isolate());
|
| + __ LoadObject(result, constant);
|
| } else {
|
| // Negative lookup.
|
| // Check prototypes.
|
| @@ -2736,7 +2760,7 @@ static bool CompactEmit(SmallMapList* list,
|
| if (map->HasElementsTransition()) return false;
|
| LookupResult lookup(isolate);
|
| map->LookupDescriptor(NULL, *name, &lookup);
|
| - return lookup.IsField() || lookup.IsConstantFunction();
|
| + return lookup.IsField() || lookup.IsConstant();
|
| }
|
|
|
|
|
| @@ -2886,9 +2910,7 @@ void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
|
| // gets replaced during bound check elimination with the index argument
|
| // to the bounds check, which can be tagged, so that case must be
|
| // handled here, too.
|
| - if (instr->hydrogen()->key()->representation().IsSmi()) {
|
| - __ SmiToInteger64(key_reg, key_reg);
|
| - } else if (instr->hydrogen()->IsDehoisted()) {
|
| + if (instr->hydrogen()->IsDehoisted()) {
|
| // Sign extend key because it could be a 32 bit negative value
|
| // and the dehoisted address computation happens in 64 bits
|
| __ movsxlq(key_reg, key_reg);
|
| @@ -2959,9 +2981,7 @@ void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
|
| // representation for the key to be an integer, the input gets replaced
|
| // during bound check elimination with the index argument to the bounds
|
| // check, which can be tagged, so that case must be handled here, too.
|
| - if (instr->hydrogen()->key()->representation().IsSmi()) {
|
| - __ SmiToInteger64(key_reg, key_reg);
|
| - } else if (instr->hydrogen()->IsDehoisted()) {
|
| + if (instr->hydrogen()->IsDehoisted()) {
|
| // Sign extend key because it could be a 32 bit negative value
|
| // and the dehoisted address computation happens in 64 bits
|
| __ movsxlq(key_reg, key_reg);
|
| @@ -3001,9 +3021,7 @@ void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
|
| // gets replaced during bound check elimination with the index
|
| // argument to the bounds check, which can be tagged, so that
|
| // case must be handled here, too.
|
| - if (instr->hydrogen()->key()->representation().IsSmi()) {
|
| - __ SmiToInteger64(key_reg, key_reg);
|
| - } else if (instr->hydrogen()->IsDehoisted()) {
|
| + if (instr->hydrogen()->IsDehoisted()) {
|
| // Sign extend key because it could be a 32 bit negative value
|
| // and the dehoisted address computation happens in 64 bits
|
| __ movsxlq(key_reg, key_reg);
|
| @@ -3343,38 +3361,29 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) {
|
| Heap::kHeapNumberMapRootIndex);
|
| DeoptimizeIf(not_equal, instr->environment());
|
|
|
| - Label done;
|
| + Label slow, allocated, done;
|
| Register tmp = input_reg.is(rax) ? rcx : rax;
|
| Register tmp2 = tmp.is(rcx) ? rdx : input_reg.is(rcx) ? rdx : rcx;
|
|
|
| // Preserve the value of all registers.
|
| PushSafepointRegistersScope scope(this);
|
|
|
| - Label negative;
|
| __ movl(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
|
| // Check the sign of the argument. If the argument is positive, just
|
| // return it. We do not need to patch the stack since |input| and
|
| // |result| are the same register and |input| will be restored
|
| // unchanged by popping safepoint registers.
|
| __ testl(tmp, Immediate(HeapNumber::kSignMask));
|
| - __ j(not_zero, &negative);
|
| - __ jmp(&done);
|
| + __ j(zero, &done);
|
|
|
| - __ bind(&negative);
|
| -
|
| - Label allocated, slow;
|
| __ AllocateHeapNumber(tmp, tmp2, &slow);
|
| - __ jmp(&allocated);
|
| + __ jmp(&allocated, Label::kNear);
|
|
|
| // Slow case: Call the runtime system to do the number allocation.
|
| __ bind(&slow);
|
| -
|
| CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
|
| // Set the pointer to the new heap number in tmp.
|
| - if (!tmp.is(rax)) {
|
| - __ movq(tmp, rax);
|
| - }
|
| -
|
| + if (!tmp.is(rax)) __ movq(tmp, rax);
|
| // Restore input_reg after call to runtime.
|
| __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
|
|
|
| @@ -3393,7 +3402,7 @@ void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) {
|
| Register input_reg = ToRegister(instr->value());
|
| __ testl(input_reg, input_reg);
|
| Label is_positive;
|
| - __ j(not_sign, &is_positive);
|
| + __ j(not_sign, &is_positive, Label::kNear);
|
| __ negl(input_reg); // Sets flags.
|
| DeoptimizeIf(negative, instr->environment());
|
| __ bind(&is_positive);
|
| @@ -4023,6 +4032,18 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
|
| }
|
|
|
|
|
| +void LCodeGen::ApplyCheckIf(Condition cc, LBoundsCheck* check) {
|
| + if (FLAG_debug_code && check->hydrogen()->skip_check()) {
|
| + Label done;
|
| + __ j(NegateCondition(cc), &done, Label::kNear);
|
| + __ int3();
|
| + __ bind(&done);
|
| + } else {
|
| + DeoptimizeIf(cc, check->environment());
|
| + }
|
| +}
|
| +
|
| +
|
| void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
|
| if (instr->hydrogen()->skip_check()) return;
|
|
|
| @@ -4060,7 +4081,9 @@ void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
|
| __ cmpq(length, ToRegister(instr->index()));
|
| }
|
| }
|
| - DeoptimizeIf(below_equal, instr->environment());
|
| + Condition condition =
|
| + instr->hydrogen()->allow_equality() ? below : below_equal;
|
| + ApplyCheckIf(condition, instr);
|
| }
|
|
|
|
|
| @@ -4074,9 +4097,7 @@ void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
|
| // gets replaced during bound check elimination with the index
|
| // argument to the bounds check, which can be tagged, so that case
|
| // must be handled here, too.
|
| - if (instr->hydrogen()->key()->representation().IsSmi()) {
|
| - __ SmiToInteger64(key_reg, key_reg);
|
| - } else if (instr->hydrogen()->IsDehoisted()) {
|
| + if (instr->hydrogen()->IsDehoisted()) {
|
| // Sign extend key because it could be a 32 bit negative value
|
| // and the dehoisted address computation happens in 64 bits
|
| __ movsxlq(key_reg, key_reg);
|
| @@ -4138,9 +4159,7 @@ void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
|
| // input gets replaced during bound check elimination with the index
|
| // argument to the bounds check, which can be tagged, so that case
|
| // must be handled here, too.
|
| - if (instr->hydrogen()->key()->representation().IsSmi()) {
|
| - __ SmiToInteger64(key_reg, key_reg);
|
| - } else if (instr->hydrogen()->IsDehoisted()) {
|
| + if (instr->hydrogen()->IsDehoisted()) {
|
| // Sign extend key because it could be a 32 bit negative value
|
| // and the dehoisted address computation happens in 64 bits
|
| __ movsxlq(key_reg, key_reg);
|
| @@ -4181,9 +4200,7 @@ void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) {
|
| // input gets replaced during bound check elimination with the index
|
| // argument to the bounds check, which can be tagged, so that case
|
| // must be handled here, too.
|
| - if (instr->hydrogen()->key()->representation().IsSmi()) {
|
| - __ SmiToInteger64(key_reg, key_reg);
|
| - } else if (instr->hydrogen()->IsDehoisted()) {
|
| + if (instr->hydrogen()->IsDehoisted()) {
|
| // Sign extend key because it could be a 32 bit negative value
|
| // and the dehoisted address computation happens in 64 bits
|
| __ movsxlq(key_reg, key_reg);
|
| @@ -4271,7 +4288,7 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
|
| ASSERT_NE(instr->temp(), NULL);
|
| __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
|
| ToRegister(instr->temp()), kDontSaveFPRegs);
|
| - } else if (FLAG_compiled_transitions) {
|
| + } else {
|
| PushSafepointRegistersScope scope(this);
|
| if (!object_reg.is(rax)) {
|
| __ movq(rax, object_reg);
|
| @@ -4281,28 +4298,6 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
|
| __ CallStub(&stub);
|
| RecordSafepointWithRegisters(
|
| instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
|
| - } else if (IsFastSmiElementsKind(from_kind) &&
|
| - IsFastDoubleElementsKind(to_kind)) {
|
| - Register fixed_object_reg = ToRegister(instr->temp());
|
| - ASSERT(fixed_object_reg.is(rdx));
|
| - Register new_map_reg = ToRegister(instr->new_map_temp());
|
| - ASSERT(new_map_reg.is(rbx));
|
| - __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT);
|
| - __ movq(fixed_object_reg, object_reg);
|
| - CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
|
| - RelocInfo::CODE_TARGET, instr);
|
| - } else if (IsFastDoubleElementsKind(from_kind) &&
|
| - IsFastObjectElementsKind(to_kind)) {
|
| - Register fixed_object_reg = ToRegister(instr->temp());
|
| - ASSERT(fixed_object_reg.is(rdx));
|
| - Register new_map_reg = ToRegister(instr->new_map_temp());
|
| - ASSERT(new_map_reg.is(rbx));
|
| - __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT);
|
| - __ movq(fixed_object_reg, object_reg);
|
| - CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(),
|
| - RelocInfo::CODE_TARGET, instr);
|
| - } else {
|
| - UNREACHABLE();
|
| }
|
| __ bind(¬_applicable);
|
| }
|
| @@ -4954,6 +4949,7 @@ void LCodeGen::DoCheckMapCommon(Register reg,
|
|
|
|
|
| void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
|
| + if (instr->hydrogen()->CanOmitMapChecks()) return;
|
| LOperand* input = instr->value();
|
| ASSERT(input->IsRegister());
|
| Register reg = ToRegister(input);
|
| @@ -5021,6 +5017,7 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
|
|
|
|
|
| void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
|
| + if (instr->hydrogen()->CanOmitPrototypeChecks()) return;
|
| Register reg = ToRegister(instr->temp());
|
|
|
| ZoneList<Handle<JSObject> >* prototypes = instr->prototypes();
|
| @@ -5028,11 +5025,9 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
|
|
|
| ASSERT(prototypes->length() == maps->length());
|
|
|
| - if (!instr->hydrogen()->CanOmitPrototypeChecks()) {
|
| - for (int i = 0; i < prototypes->length(); i++) {
|
| - __ LoadHeapObject(reg, prototypes->at(i));
|
| - DoCheckMapCommon(reg, maps->at(i), instr);
|
| - }
|
| + for (int i = 0; i < prototypes->length(); i++) {
|
| + __ LoadHeapObject(reg, prototypes->at(i));
|
| + DoCheckMapCommon(reg, maps->at(i), instr);
|
| }
|
| }
|
|
|
| @@ -5364,11 +5359,15 @@ void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
|
|
|
|
|
| void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
|
| - if (instr->hydrogen_value()->IsSoftDeoptimize()) {
|
| - SoftDeoptimize(instr->environment());
|
| - } else {
|
| - DeoptimizeIf(no_condition, instr->environment());
|
| - }
|
| + Deoptimizer::BailoutType type = instr->hydrogen()->type();
|
| + // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the
|
| + // needed return address), even though the implementation of LAZY and EAGER is
|
| + // now identical. When LAZY is eventually completely folded into EAGER, remove
|
| + // the special case below.
|
| + if (info()->IsStub() && type == Deoptimizer::EAGER) {
|
| + type = Deoptimizer::LAZY;
|
| + }
|
| + DeoptimizeIf(no_condition, instr->environment(), type);
|
| }
|
|
|
|
|
|
|