Index: src/ia32/lithium-codegen-ia32.cc |
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc |
index 38d2011d0b736badcf452b9254febc7ef6d1fa38..4ce7fd6067d106d92209b07a5f17d272bd15b292 100644 |
--- a/src/ia32/lithium-codegen-ia32.cc |
+++ b/src/ia32/lithium-codegen-ia32.cc |
@@ -656,9 +656,18 @@ XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const { |
} |
-int LCodeGen::ToInteger32(LConstantOperand* op) const { |
+int32_t LCodeGen::ToInteger32(LConstantOperand* op) const { |
+ return ToRepresentation(op, Representation::Integer32()); |
+} |
+ |
+ |
+int32_t LCodeGen::ToRepresentation(LConstantOperand* op, |
+ const Representation& r) const { |
HConstant* constant = chunk_->LookupConstant(op); |
- return constant->Integer32Value(); |
+ int32_t value = constant->Integer32Value(); |
+ if (r.IsInteger32()) return value; |
+ ASSERT(r.IsSmiOrTagged()); |
+ return reinterpret_cast<int32_t>(Smi::FromInt(value)); |
} |
@@ -1003,12 +1012,6 @@ void LCodeGen::DeoptimizeIf(Condition cc, |
} |
-void LCodeGen::SoftDeoptimize(LEnvironment* environment) { |
- ASSERT(!info()->IsStub()); |
- DeoptimizeIf(no_condition, environment, Deoptimizer::SOFT); |
-} |
- |
- |
void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { |
ZoneList<Handle<Map> > maps(1, zone()); |
int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
@@ -1625,6 +1628,9 @@ void LCodeGen::DoMulI(LMulI* instr) { |
__ imul(left, left, constant); |
} |
} else { |
+ if (instr->hydrogen()->representation().IsSmi()) { |
+ __ SmiUntag(left); |
+ } |
__ imul(left, ToOperand(right)); |
} |
@@ -1661,7 +1667,8 @@ void LCodeGen::DoBitI(LBitI* instr) { |
ASSERT(left->IsRegister()); |
if (right->IsConstantOperand()) { |
- int right_operand = ToInteger32(LConstantOperand::cast(right)); |
+ int right_operand = ToRepresentation(LConstantOperand::cast(right), |
+ instr->hydrogen()->representation()); |
switch (instr->op()) { |
case Token::BIT_AND: |
__ and_(ToRegister(left), right_operand); |
@@ -1755,7 +1762,14 @@ void LCodeGen::DoShiftI(LShiftI* instr) { |
break; |
case Token::SHL: |
if (shift_count != 0) { |
- __ shl(ToRegister(left), shift_count); |
+ if (instr->hydrogen_value()->representation().IsSmi() && |
+ instr->can_deopt()) { |
+ __ shl(ToRegister(left), shift_count - 1); |
+ __ SmiTag(ToRegister(left)); |
+ DeoptimizeIf(overflow, instr->environment()); |
+ } else { |
+ __ shl(ToRegister(left), shift_count); |
+ } |
} |
break; |
default: |
@@ -1772,7 +1786,8 @@ void LCodeGen::DoSubI(LSubI* instr) { |
ASSERT(left->Equals(instr->result())); |
if (right->IsConstantOperand()) { |
- __ sub(ToOperand(left), ToInteger32Immediate(right)); |
+ __ sub(ToOperand(left), |
+ ToImmediate(right, instr->hydrogen()->representation())); |
} else { |
__ sub(ToRegister(left), ToOperand(right)); |
} |
@@ -1842,11 +1857,7 @@ void LCodeGen::DoConstantT(LConstantT* instr) { |
Register reg = ToRegister(instr->result()); |
Handle<Object> handle = instr->value(); |
AllowDeferredHandleDereference smi_check; |
- if (handle->IsHeapObject()) { |
- __ LoadHeapObject(reg, Handle<HeapObject>::cast(handle)); |
- } else { |
- __ Set(reg, Immediate(handle)); |
- } |
+ __ LoadObject(reg, handle); |
} |
@@ -1985,7 +1996,8 @@ void LCodeGen::DoAddI(LAddI* instr) { |
if (LAddI::UseLea(instr->hydrogen()) && !left->Equals(instr->result())) { |
if (right->IsConstantOperand()) { |
- int32_t offset = ToInteger32(LConstantOperand::cast(right)); |
+ int32_t offset = ToRepresentation(LConstantOperand::cast(right), |
+ instr->hydrogen()->representation()); |
__ lea(ToRegister(instr->result()), MemOperand(ToRegister(left), offset)); |
} else { |
Operand address(ToRegister(left), ToRegister(right), times_1, 0); |
@@ -1993,7 +2005,8 @@ void LCodeGen::DoAddI(LAddI* instr) { |
} |
} else { |
if (right->IsConstantOperand()) { |
- __ add(ToOperand(left), ToInteger32Immediate(right)); |
+ __ add(ToOperand(left), |
+ ToImmediate(right, instr->hydrogen()->representation())); |
} else { |
__ add(ToRegister(left), ToOperand(right)); |
} |
@@ -2010,17 +2023,18 @@ void LCodeGen::DoMathMinMax(LMathMinMax* instr) { |
LOperand* right = instr->right(); |
ASSERT(left->Equals(instr->result())); |
HMathMinMax::Operation operation = instr->hydrogen()->operation(); |
- if (instr->hydrogen()->representation().IsInteger32()) { |
+ if (instr->hydrogen()->representation().IsSmiOrInteger32()) { |
Label return_left; |
Condition condition = (operation == HMathMinMax::kMathMin) |
? less_equal |
: greater_equal; |
if (right->IsConstantOperand()) { |
Operand left_op = ToOperand(left); |
- Immediate right_imm = ToInteger32Immediate(right); |
- __ cmp(left_op, right_imm); |
+ Immediate immediate = ToImmediate(LConstantOperand::cast(instr->right()), |
+ instr->hydrogen()->representation()); |
+ __ cmp(left_op, immediate); |
__ j(condition, &return_left, Label::kNear); |
- __ mov(left_op, right_imm); |
+ __ mov(left_op, immediate); |
} else { |
Register left_reg = ToRegister(left); |
Operand right_op = ToOperand(right); |
@@ -2388,19 +2402,11 @@ void LCodeGen::DoCompareNumericAndBranch(LCompareNumericAndBranch* instr) { |
__ j(parity_even, instr->FalseLabel(chunk_)); |
} else { |
if (right->IsConstantOperand()) { |
- int32_t const_value = ToInteger32(LConstantOperand::cast(right)); |
- if (instr->hydrogen_value()->representation().IsSmi()) { |
- __ cmp(ToOperand(left), Immediate(Smi::FromInt(const_value))); |
- } else { |
- __ cmp(ToOperand(left), Immediate(const_value)); |
- } |
+ __ cmp(ToOperand(left), |
+ ToImmediate(right, instr->hydrogen()->representation())); |
} else if (left->IsConstantOperand()) { |
- int32_t const_value = ToInteger32(LConstantOperand::cast(left)); |
- if (instr->hydrogen_value()->representation().IsSmi()) { |
- __ cmp(ToOperand(right), Immediate(Smi::FromInt(const_value))); |
- } else { |
- __ cmp(ToOperand(right), Immediate(const_value)); |
- } |
+ __ cmp(ToOperand(right), |
+ ToImmediate(left, instr->hydrogen()->representation())); |
// We transposed the operands. Reverse the condition. |
cc = ReverseCondition(cc); |
} else { |
@@ -2426,14 +2432,6 @@ void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) { |
} |
-void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) { |
- Register left = ToRegister(instr->left()); |
- |
- __ cmp(left, instr->hydrogen()->right()); |
- EmitBranch(instr, equal); |
-} |
- |
- |
Condition LCodeGen::EmitIsObject(Register input, |
Register temp1, |
Label* is_not_object, |
@@ -3074,11 +3072,11 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { |
} |
-void LCodeGen::EmitLoadFieldOrConstantFunction(Register result, |
- Register object, |
- Handle<Map> type, |
- Handle<String> name, |
- LEnvironment* env) { |
+void LCodeGen::EmitLoadFieldOrConstant(Register result, |
+ Register object, |
+ Handle<Map> type, |
+ Handle<String> name, |
+ LEnvironment* env) { |
LookupResult lookup(isolate()); |
type->LookupDescriptor(NULL, *name, &lookup); |
ASSERT(lookup.IsFound() || lookup.IsCacheable()); |
@@ -3094,9 +3092,9 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result, |
__ mov(result, FieldOperand(object, JSObject::kPropertiesOffset)); |
__ mov(result, FieldOperand(result, offset + FixedArray::kHeaderSize)); |
} |
- } else if (lookup.IsConstantFunction()) { |
- Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type)); |
- __ LoadHeapObject(result, function); |
+ } else if (lookup.IsConstant()) { |
+ Handle<Object> constant(lookup.GetConstantFromMap(*type), isolate()); |
+ __ LoadObject(result, constant); |
} else { |
// Negative lookup. |
// Check prototypes. |
@@ -3145,7 +3143,7 @@ static bool CompactEmit(SmallMapList* list, |
if (map->HasElementsTransition()) return false; |
LookupResult lookup(isolate); |
map->LookupDescriptor(NULL, *name, &lookup); |
- return lookup.IsField() || lookup.IsConstantFunction(); |
+ return lookup.IsField() || lookup.IsConstant(); |
} |
@@ -3177,16 +3175,14 @@ void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { |
if (last && !need_generic) { |
DeoptimizeIf(not_equal, instr->environment()); |
__ bind(&check_passed); |
- EmitLoadFieldOrConstantFunction( |
- result, object, map, name, instr->environment()); |
+ EmitLoadFieldOrConstant(result, object, map, name, instr->environment()); |
} else { |
Label next; |
bool compact = all_are_compact ? true : |
CompactEmit(instr->hydrogen()->types(), name, i, isolate()); |
__ j(not_equal, &next, compact ? Label::kNear : Label::kFar); |
__ bind(&check_passed); |
- EmitLoadFieldOrConstantFunction( |
- result, object, map, name, instr->environment()); |
+ EmitLoadFieldOrConstant(result, object, map, name, instr->environment()); |
__ jmp(&done, all_are_compact ? Label::kNear : Label::kFar); |
__ bind(&next); |
} |
@@ -3736,38 +3732,30 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { |
factory()->heap_number_map()); |
DeoptimizeIf(not_equal, instr->environment()); |
- Label done; |
+ Label slow, allocated, done; |
Register tmp = input_reg.is(eax) ? ecx : eax; |
Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx; |
// Preserve the value of all registers. |
PushSafepointRegistersScope scope(this); |
- Label negative; |
__ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
// Check the sign of the argument. If the argument is positive, just |
// return it. We do not need to patch the stack since |input| and |
// |result| are the same register and |input| will be restored |
// unchanged by popping safepoint registers. |
__ test(tmp, Immediate(HeapNumber::kSignMask)); |
- __ j(not_zero, &negative); |
- __ jmp(&done); |
+ __ j(zero, &done); |
- __ bind(&negative); |
- |
- Label allocated, slow; |
__ AllocateHeapNumber(tmp, tmp2, no_reg, &slow); |
- __ jmp(&allocated); |
+ __ jmp(&allocated, Label::kNear); |
// Slow case: Call the runtime system to do the number allocation. |
__ bind(&slow); |
- |
CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, |
instr, instr->context()); |
- |
// Set the pointer to the new heap number in tmp. |
if (!tmp.is(eax)) __ mov(tmp, eax); |
- |
// Restore input_reg after call to runtime. |
__ LoadFromSafepointRegisterSlot(input_reg, input_reg); |
@@ -3787,9 +3775,8 @@ void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { |
Register input_reg = ToRegister(instr->value()); |
__ test(input_reg, Operand(input_reg)); |
Label is_positive; |
- __ j(not_sign, &is_positive); |
- __ neg(input_reg); |
- __ test(input_reg, Operand(input_reg)); |
+ __ j(not_sign, &is_positive, Label::kNear); |
+ __ neg(input_reg); // Sets flags. |
DeoptimizeIf(negative, instr->environment()); |
__ bind(&is_positive); |
} |
@@ -4454,22 +4441,34 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { |
} |
+void LCodeGen::ApplyCheckIf(Condition cc, LBoundsCheck* check) { |
+ if (FLAG_debug_code && check->hydrogen()->skip_check()) { |
+ Label done; |
+ __ j(NegateCondition(cc), &done, Label::kNear); |
+ __ int3(); |
+ __ bind(&done); |
+ } else { |
+ DeoptimizeIf(cc, check->environment()); |
+ } |
+} |
+ |
+ |
void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { |
- if (instr->hydrogen()->skip_check()) return; |
+ if (instr->hydrogen()->skip_check() && !FLAG_debug_code) return; |
if (instr->index()->IsConstantOperand()) { |
- int constant_index = |
- ToInteger32(LConstantOperand::cast(instr->index())); |
- if (instr->hydrogen()->length()->representation().IsSmi()) { |
- __ cmp(ToOperand(instr->length()), |
- Immediate(Smi::FromInt(constant_index))); |
- } else { |
- __ cmp(ToOperand(instr->length()), Immediate(constant_index)); |
- } |
- DeoptimizeIf(below_equal, instr->environment()); |
+ Immediate immediate = |
+ ToImmediate(LConstantOperand::cast(instr->index()), |
+ instr->hydrogen()->length()->representation()); |
+ __ cmp(ToOperand(instr->length()), immediate); |
+ Condition condition = |
+ instr->hydrogen()->allow_equality() ? below : below_equal; |
+ ApplyCheckIf(condition, instr); |
} else { |
__ cmp(ToRegister(instr->index()), ToOperand(instr->length())); |
- DeoptimizeIf(above_equal, instr->environment()); |
+ Condition condition = |
+ instr->hydrogen()->allow_equality() ? above : above_equal; |
+ ApplyCheckIf(condition, instr); |
} |
} |
@@ -4629,10 +4628,11 @@ void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) { |
__ mov(operand, ToRegister(instr->value())); |
} else { |
LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); |
- if (IsInteger32(operand_value)) { |
- Smi* smi_value = Smi::FromInt(ToInteger32(operand_value)); |
- __ mov(operand, Immediate(smi_value)); |
+ if (IsSmi(operand_value)) { |
+ Immediate immediate = ToImmediate(operand_value, Representation::Smi()); |
+ __ mov(operand, immediate); |
} else { |
+ ASSERT(!IsInteger32(operand_value)); |
Handle<Object> handle_value = ToHandle(operand_value); |
__ mov(operand, handle_value); |
} |
@@ -4715,7 +4715,7 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { |
__ RecordWriteForMap(object_reg, to_map, new_map_reg, |
ToRegister(instr->temp()), |
kDontSaveFPRegs); |
- } else if (FLAG_compiled_transitions) { |
+ } else { |
PushSafepointRegistersScope scope(this); |
if (!object_reg.is(eax)) { |
__ push(object_reg); |
@@ -4729,28 +4729,6 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { |
__ CallStub(&stub); |
RecordSafepointWithRegisters( |
instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
- } else if (IsFastSmiElementsKind(from_kind) && |
- IsFastDoubleElementsKind(to_kind)) { |
- Register new_map_reg = ToRegister(instr->new_map_temp()); |
- __ mov(new_map_reg, to_map); |
- Register fixed_object_reg = ToRegister(instr->temp()); |
- ASSERT(fixed_object_reg.is(edx)); |
- ASSERT(new_map_reg.is(ebx)); |
- __ mov(fixed_object_reg, object_reg); |
- CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(), |
- RelocInfo::CODE_TARGET, instr); |
- } else if (IsFastDoubleElementsKind(from_kind) && |
- IsFastObjectElementsKind(to_kind)) { |
- Register new_map_reg = ToRegister(instr->new_map_temp()); |
- __ mov(new_map_reg, to_map); |
- Register fixed_object_reg = ToRegister(instr->temp()); |
- ASSERT(fixed_object_reg.is(edx)); |
- ASSERT(new_map_reg.is(ebx)); |
- __ mov(fixed_object_reg, object_reg); |
- CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(), |
- RelocInfo::CODE_TARGET, instr); |
- } else { |
- UNREACHABLE(); |
} |
__ bind(¬_applicable); |
} |
@@ -4795,8 +4773,9 @@ void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) { |
// DoStringCharCodeAt above. |
STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); |
if (instr->index()->IsConstantOperand()) { |
- int const_index = ToInteger32(LConstantOperand::cast(instr->index())); |
- __ push(Immediate(Smi::FromInt(const_index))); |
+ Immediate immediate = ToImmediate(LConstantOperand::cast(instr->index()), |
+ Representation::Smi()); |
+ __ push(immediate); |
} else { |
Register index = ToRegister(instr->index()); |
__ SmiTag(index); |
@@ -5802,6 +5781,7 @@ void LCodeGen::DoCheckMapCommon(Register reg, |
void LCodeGen::DoCheckMaps(LCheckMaps* instr) { |
+ if (instr->hydrogen()->CanOmitMapChecks()) return; |
LOperand* input = instr->value(); |
ASSERT(input->IsRegister()); |
Register reg = ToRegister(input); |
@@ -5992,6 +5972,7 @@ void LCodeGen::DoClampTToUint8NoSSE2(LClampTToUint8NoSSE2* instr) { |
void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { |
+ if (instr->hydrogen()->CanOmitPrototypeChecks()) return; |
Register reg = ToRegister(instr->temp()); |
ZoneList<Handle<JSObject> >* prototypes = instr->prototypes(); |
@@ -5999,11 +5980,9 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { |
ASSERT(prototypes->length() == maps->length()); |
- if (!instr->hydrogen()->CanOmitPrototypeChecks()) { |
- for (int i = 0; i < prototypes->length(); i++) { |
- __ LoadHeapObject(reg, prototypes->at(i)); |
- DoCheckMapCommon(reg, maps->at(i), instr); |
- } |
+ for (int i = 0; i < prototypes->length(); i++) { |
+ __ LoadHeapObject(reg, prototypes->at(i)); |
+ DoCheckMapCommon(reg, maps->at(i), instr); |
} |
} |
@@ -6323,11 +6302,15 @@ void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
- if (instr->hydrogen_value()->IsSoftDeoptimize()) { |
- SoftDeoptimize(instr->environment()); |
- } else { |
- DeoptimizeIf(no_condition, instr->environment()); |
- } |
+ Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
+ // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
+ // needed return address), even though the implementation of LAZY and EAGER is |
+ // now identical. When LAZY is eventually completely folded into EAGER, remove |
+ // the special case below. |
+ if (info()->IsStub() && type == Deoptimizer::EAGER) { |
+ type = Deoptimizer::LAZY; |
+ } |
+ DeoptimizeIf(no_condition, instr->environment(), type); |
} |