Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(615)

Unified Diff: src/x64/lithium-codegen-x64.cc

Issue 892843007: Revert of Externalize deoptimization reasons. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x87/lithium-codegen-x87.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/x64/lithium-codegen-x64.cc
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
index 1af400138692a7a37c5b37a9b951f7d17756ae49..ff128605e6618bf7b215e321ee82af04d5b35f30 100644
--- a/src/x64/lithium-codegen-x64.cc
+++ b/src/x64/lithium-codegen-x64.cc
@@ -723,7 +723,7 @@
void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
- Deoptimizer::DeoptReason deopt_reason,
+ const char* detail,
Deoptimizer::BailoutType bailout_type) {
LEnvironment* environment = instr->environment();
RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
@@ -769,7 +769,7 @@
}
Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(),
- instr->Mnemonic(), deopt_reason);
+ instr->Mnemonic(), detail);
DCHECK(info()->IsStub() || frame_is_built_);
// Go through jump table if we need to handle condition, build frame, or
// restore caller doubles.
@@ -796,11 +796,11 @@
void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
- Deoptimizer::DeoptReason deopt_reason) {
+ const char* detail) {
Deoptimizer::BailoutType bailout_type = info()->IsStub()
? Deoptimizer::LAZY
: Deoptimizer::EAGER;
- DeoptimizeIf(cc, instr, deopt_reason, bailout_type);
+ DeoptimizeIf(cc, instr, detail, bailout_type);
}
@@ -1032,7 +1032,7 @@
__ andl(dividend, Immediate(mask));
__ negl(dividend);
if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
- DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(zero, instr, "minus zero");
}
__ jmp(&done, Label::kNear);
}
@@ -1049,7 +1049,7 @@
DCHECK(ToRegister(instr->result()).is(rax));
if (divisor == 0) {
- DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero);
+ DeoptimizeIf(no_condition, instr, "division by zero");
return;
}
@@ -1064,7 +1064,7 @@
Label remainder_not_zero;
__ j(not_zero, &remainder_not_zero, Label::kNear);
__ cmpl(dividend, Immediate(0));
- DeoptimizeIf(less, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(less, instr, "minus zero");
__ bind(&remainder_not_zero);
}
}
@@ -1086,7 +1086,7 @@
// deopt in this case because we can't return a NaN.
if (hmod->CheckFlag(HValue::kCanBeDivByZero)) {
__ testl(right_reg, right_reg);
- DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero);
+ DeoptimizeIf(zero, instr, "division by zero");
}
// Check for kMinInt % -1, idiv would signal a divide error. We
@@ -1097,7 +1097,7 @@
__ j(not_zero, &no_overflow_possible, Label::kNear);
__ cmpl(right_reg, Immediate(-1));
if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
- DeoptimizeIf(equal, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(equal, instr, "minus zero");
} else {
__ j(not_equal, &no_overflow_possible, Label::kNear);
__ Set(result_reg, 0);
@@ -1117,7 +1117,7 @@
__ j(not_sign, &positive_left, Label::kNear);
__ idivl(right_reg);
__ testl(result_reg, result_reg);
- DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(zero, instr, "minus zero");
__ jmp(&done, Label::kNear);
__ bind(&positive_left);
}
@@ -1143,13 +1143,13 @@
// If the divisor is negative, we have to negate and handle edge cases.
__ negl(dividend);
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
- DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(zero, instr, "minus zero");
}
// Dividing by -1 is basically negation, unless we overflow.
if (divisor == -1) {
if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) {
- DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(overflow, instr, "overflow");
}
return;
}
@@ -1176,7 +1176,7 @@
DCHECK(ToRegister(instr->result()).is(rdx));
if (divisor == 0) {
- DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero);
+ DeoptimizeIf(no_condition, instr, "division by zero");
return;
}
@@ -1184,7 +1184,7 @@
HMathFloorOfDiv* hdiv = instr->hydrogen();
if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
__ testl(dividend, dividend);
- DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(zero, instr, "minus zero");
}
// Easy case: We need no dynamic check for the dividend and the flooring
@@ -1231,7 +1231,7 @@
// Check for x / 0.
if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) {
__ testl(divisor, divisor);
- DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero);
+ DeoptimizeIf(zero, instr, "division by zero");
}
// Check for (0 / -x) that will produce negative zero.
@@ -1240,7 +1240,7 @@
__ testl(dividend, dividend);
__ j(not_zero, &dividend_not_zero, Label::kNear);
__ testl(divisor, divisor);
- DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(sign, instr, "minus zero");
__ bind(&dividend_not_zero);
}
@@ -1250,7 +1250,7 @@
__ cmpl(dividend, Immediate(kMinInt));
__ j(not_zero, &dividend_not_min_int, Label::kNear);
__ cmpl(divisor, Immediate(-1));
- DeoptimizeIf(zero, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(zero, instr, "overflow");
__ bind(&dividend_not_min_int);
}
@@ -1279,19 +1279,19 @@
HDiv* hdiv = instr->hydrogen();
if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
__ testl(dividend, dividend);
- DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(zero, instr, "minus zero");
}
// Check for (kMinInt / -1).
if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) {
__ cmpl(dividend, Immediate(kMinInt));
- DeoptimizeIf(zero, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(zero, instr, "overflow");
}
// Deoptimize if remainder will not be 0.
if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
divisor != 1 && divisor != -1) {
int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
__ testl(dividend, Immediate(mask));
- DeoptimizeIf(not_zero, instr, Deoptimizer::kLostPrecision);
+ DeoptimizeIf(not_zero, instr, "lost precision");
}
__ Move(result, dividend);
int32_t shift = WhichPowerOf2Abs(divisor);
@@ -1312,7 +1312,7 @@
DCHECK(ToRegister(instr->result()).is(rdx));
if (divisor == 0) {
- DeoptimizeIf(no_condition, instr, Deoptimizer::kDivisionByZero);
+ DeoptimizeIf(no_condition, instr, "division by zero");
return;
}
@@ -1320,7 +1320,7 @@
HDiv* hdiv = instr->hydrogen();
if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
__ testl(dividend, dividend);
- DeoptimizeIf(zero, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(zero, instr, "minus zero");
}
__ TruncatingDiv(dividend, Abs(divisor));
@@ -1330,7 +1330,7 @@
__ movl(rax, rdx);
__ imull(rax, rax, Immediate(divisor));
__ subl(rax, dividend);
- DeoptimizeIf(not_equal, instr, Deoptimizer::kLostPrecision);
+ DeoptimizeIf(not_equal, instr, "lost precision");
}
}
@@ -1350,7 +1350,7 @@
// Check for x / 0.
if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) {
__ testl(divisor, divisor);
- DeoptimizeIf(zero, instr, Deoptimizer::kDivisionByZero);
+ DeoptimizeIf(zero, instr, "division by zero");
}
// Check for (0 / -x) that will produce negative zero.
@@ -1359,7 +1359,7 @@
__ testl(dividend, dividend);
__ j(not_zero, &dividend_not_zero, Label::kNear);
__ testl(divisor, divisor);
- DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(sign, instr, "minus zero");
__ bind(&dividend_not_zero);
}
@@ -1369,7 +1369,7 @@
__ cmpl(dividend, Immediate(kMinInt));
__ j(not_zero, &dividend_not_min_int, Label::kNear);
__ cmpl(divisor, Immediate(-1));
- DeoptimizeIf(zero, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(zero, instr, "overflow");
__ bind(&dividend_not_min_int);
}
@@ -1380,7 +1380,7 @@
if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) {
// Deoptimize if remainder is not 0.
__ testl(remainder, remainder);
- DeoptimizeIf(not_zero, instr, Deoptimizer::kLostPrecision);
+ DeoptimizeIf(not_zero, instr, "lost precision");
}
}
@@ -1457,7 +1457,7 @@
}
if (can_overflow) {
- DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(overflow, instr, "overflow");
}
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
@@ -1476,10 +1476,10 @@
? !instr->hydrogen_value()->representation().IsSmi()
: SmiValuesAre31Bits());
if (ToInteger32(LConstantOperand::cast(right)) < 0) {
- DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(no_condition, instr, "minus zero");
} else if (ToInteger32(LConstantOperand::cast(right)) == 0) {
__ cmpl(kScratchRegister, Immediate(0));
- DeoptimizeIf(less, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(less, instr, "minus zero");
}
} else if (right->IsStackSlot()) {
if (instr->hydrogen_value()->representation().IsSmi()) {
@@ -1487,7 +1487,7 @@
} else {
__ orl(kScratchRegister, ToOperand(right));
}
- DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(sign, instr, "minus zero");
} else {
// Test the non-zero operand for negative sign.
if (instr->hydrogen_value()->representation().IsSmi()) {
@@ -1495,7 +1495,7 @@
} else {
__ orl(kScratchRegister, ToRegister(right));
}
- DeoptimizeIf(sign, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(sign, instr, "minus zero");
}
__ bind(&done);
}
@@ -1608,7 +1608,7 @@
__ shrl_cl(ToRegister(left));
if (instr->can_deopt()) {
__ testl(ToRegister(left), ToRegister(left));
- DeoptimizeIf(negative, instr, Deoptimizer::kNegativeValue);
+ DeoptimizeIf(negative, instr, "negative value");
}
break;
case Token::SHL:
@@ -1637,7 +1637,7 @@
__ shrl(ToRegister(left), Immediate(shift_count));
} else if (instr->can_deopt()) {
__ testl(ToRegister(left), ToRegister(left));
- DeoptimizeIf(negative, instr, Deoptimizer::kNegativeValue);
+ DeoptimizeIf(negative, instr, "negative value");
}
break;
case Token::SHL:
@@ -1652,7 +1652,7 @@
__ shll(ToRegister(left), Immediate(shift_count - 1));
}
__ Integer32ToSmi(ToRegister(left), ToRegister(left));
- DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(overflow, instr, "overflow");
} else {
__ shll(ToRegister(left), Immediate(shift_count));
}
@@ -1695,7 +1695,7 @@
}
if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
- DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(overflow, instr, "overflow");
}
}
@@ -1748,9 +1748,9 @@
DCHECK(object.is(rax));
Condition cc = masm()->CheckSmi(object);
- DeoptimizeIf(cc, instr, Deoptimizer::kSmi);
+ DeoptimizeIf(cc, instr, "Smi");
__ CmpObjectType(object, JS_DATE_TYPE, kScratchRegister);
- DeoptimizeIf(not_equal, instr, Deoptimizer::kNotADateObject);
+ DeoptimizeIf(not_equal, instr, "not a date object");
if (index->value() == 0) {
__ movp(result, FieldOperand(object, JSDate::kValueOffset));
@@ -1914,7 +1914,7 @@
}
}
if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
- DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(overflow, instr, "overflow");
}
}
}
@@ -2181,7 +2181,7 @@
} else if (expected.NeedsMap()) {
// If we need a map later and have a Smi -> deopt.
__ testb(reg, Immediate(kSmiTagMask));
- DeoptimizeIf(zero, instr, Deoptimizer::kSmi);
+ DeoptimizeIf(zero, instr, "Smi");
}
const Register map = kScratchRegister;
@@ -2235,7 +2235,7 @@
if (!expected.IsGeneric()) {
// We've seen something for the first time -> deopt.
// This can only happen if we are not generic already.
- DeoptimizeIf(no_condition, instr, Deoptimizer::kUnexpectedObject);
+ DeoptimizeIf(no_condition, instr, "unexpected object");
}
}
}
@@ -2852,7 +2852,7 @@
__ LoadGlobalCell(result, instr->hydrogen()->cell().handle());
if (instr->hydrogen()->RequiresHoleCheck()) {
__ CompareRoot(result, Heap::kTheHoleValueRootIndex);
- DeoptimizeIf(equal, instr, Deoptimizer::kHole);
+ DeoptimizeIf(equal, instr, "hole");
}
}
@@ -2905,7 +2905,7 @@
DCHECK(!value.is(cell));
__ Move(cell, cell_handle, RelocInfo::CELL);
__ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex);
- DeoptimizeIf(equal, instr, Deoptimizer::kHole);
+ DeoptimizeIf(equal, instr, "hole");
// Store the value.
__ movp(Operand(cell, 0), value);
} else {
@@ -2924,7 +2924,7 @@
if (instr->hydrogen()->RequiresHoleCheck()) {
__ CompareRoot(result, Heap::kTheHoleValueRootIndex);
if (instr->hydrogen()->DeoptimizesOnHole()) {
- DeoptimizeIf(equal, instr, Deoptimizer::kHole);
+ DeoptimizeIf(equal, instr, "hole");
} else {
Label is_not_hole;
__ j(not_equal, &is_not_hole, Label::kNear);
@@ -2945,7 +2945,7 @@
if (instr->hydrogen()->RequiresHoleCheck()) {
__ CompareRoot(target, Heap::kTheHoleValueRootIndex);
if (instr->hydrogen()->DeoptimizesOnHole()) {
- DeoptimizeIf(equal, instr, Deoptimizer::kHole);
+ DeoptimizeIf(equal, instr, "hole");
} else {
__ j(not_equal, &skip_assignment);
}
@@ -3045,7 +3045,7 @@
// Check that the function has a prototype or an initial map.
__ CompareRoot(result, Heap::kTheHoleValueRootIndex);
- DeoptimizeIf(equal, instr, Deoptimizer::kHole);
+ DeoptimizeIf(equal, instr, "hole");
// If the function does not have an initial map, we're done.
Label done;
@@ -3157,7 +3157,7 @@
__ movl(result, operand);
if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) {
__ testl(result, result);
- DeoptimizeIf(negative, instr, Deoptimizer::kNegativeValue);
+ DeoptimizeIf(negative, instr, "negative value");
}
break;
case EXTERNAL_FLOAT32_ELEMENTS:
@@ -3196,7 +3196,7 @@
FAST_DOUBLE_ELEMENTS,
instr->base_offset() + sizeof(kHoleNanLower32));
__ cmpl(hole_check_operand, Immediate(kHoleNanUpper32));
- DeoptimizeIf(equal, instr, Deoptimizer::kHole);
+ DeoptimizeIf(equal, instr, "hole");
}
Operand double_load_operand = BuildFastArrayOperand(
@@ -3253,10 +3253,10 @@
if (requires_hole_check) {
if (IsFastSmiElementsKind(hinstr->elements_kind())) {
Condition smi = __ CheckSmi(result);
- DeoptimizeIf(NegateCondition(smi), instr, Deoptimizer::kNotASmi);
+ DeoptimizeIf(NegateCondition(smi), instr, "not a Smi");
} else {
__ CompareRoot(result, Heap::kTheHoleValueRootIndex);
- DeoptimizeIf(equal, instr, Deoptimizer::kHole);
+ DeoptimizeIf(equal, instr, "hole");
}
}
}
@@ -3403,9 +3403,9 @@
// The receiver should be a JS object.
Condition is_smi = __ CheckSmi(receiver);
- DeoptimizeIf(is_smi, instr, Deoptimizer::kSmi);
+ DeoptimizeIf(is_smi, instr, "Smi");
__ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, kScratchRegister);
- DeoptimizeIf(below, instr, Deoptimizer::kNotAJavaScriptObject);
+ DeoptimizeIf(below, instr, "not a JavaScript object");
__ jmp(&receiver_ok, Label::kNear);
__ bind(&global_object);
@@ -3432,7 +3432,7 @@
// adaptor frame below it.
const uint32_t kArgumentsLimit = 1 * KB;
__ cmpp(length, Immediate(kArgumentsLimit));
- DeoptimizeIf(above, instr, Deoptimizer::kTooManyArguments);
+ DeoptimizeIf(above, instr, "too many arguments");
__ Push(receiver);
__ movp(receiver, length);
@@ -3646,7 +3646,7 @@
Register input_reg = ToRegister(instr->value());
__ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
Heap::kHeapNumberMapRootIndex);
- DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber);
+ DeoptimizeIf(not_equal, instr, "not a heap number");
Label slow, allocated, done;
Register tmp = input_reg.is(rax) ? rcx : rax;
@@ -3692,7 +3692,7 @@
Label is_positive;
__ j(not_sign, &is_positive, Label::kNear);
__ negl(input_reg); // Sets flags.
- DeoptimizeIf(negative, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(negative, instr, "overflow");
__ bind(&is_positive);
}
@@ -3703,7 +3703,7 @@
Label is_positive;
__ j(not_sign, &is_positive, Label::kNear);
__ negp(input_reg); // Sets flags.
- DeoptimizeIf(negative, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(negative, instr, "overflow");
__ bind(&is_positive);
}
@@ -3759,18 +3759,18 @@
// Deoptimize if minus zero.
__ movq(output_reg, input_reg);
__ subq(output_reg, Immediate(1));
- DeoptimizeIf(overflow, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(overflow, instr, "minus zero");
}
__ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown);
__ cvttsd2si(output_reg, xmm_scratch);
__ cmpl(output_reg, Immediate(0x1));
- DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(overflow, instr, "overflow");
} else {
Label negative_sign, done;
// Deoptimize on unordered.
__ xorps(xmm_scratch, xmm_scratch); // Zero the register.
__ ucomisd(input_reg, xmm_scratch);
- DeoptimizeIf(parity_even, instr, Deoptimizer::kNaN);
+ DeoptimizeIf(parity_even, instr, "NaN");
__ j(below, &negative_sign, Label::kNear);
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
@@ -3779,7 +3779,7 @@
__ j(above, &positive_sign, Label::kNear);
__ movmskpd(output_reg, input_reg);
__ testq(output_reg, Immediate(1));
- DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(not_zero, instr, "minus zero");
__ Set(output_reg, 0);
__ jmp(&done);
__ bind(&positive_sign);
@@ -3789,7 +3789,7 @@
__ cvttsd2si(output_reg, input_reg);
// Overflow is signalled with minint.
__ cmpl(output_reg, Immediate(0x1));
- DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(overflow, instr, "overflow");
__ jmp(&done, Label::kNear);
// Non-zero negative reaches here.
@@ -3800,7 +3800,7 @@
__ ucomisd(input_reg, xmm_scratch);
__ j(equal, &done, Label::kNear);
__ subl(output_reg, Immediate(1));
- DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(overflow, instr, "overflow");
__ bind(&done);
}
@@ -3827,7 +3827,7 @@
__ cvttsd2si(output_reg, xmm_scratch);
// Overflow is signalled with minint.
__ cmpl(output_reg, Immediate(0x1));
- DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(overflow, instr, "overflow");
__ jmp(&done, dist);
__ bind(&below_one_half);
@@ -3843,7 +3843,7 @@
__ cvttsd2si(output_reg, input_temp);
// Catch minint due to overflow, and to prevent overflow when compensating.
__ cmpl(output_reg, Immediate(0x1));
- DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(overflow, instr, "overflow");
__ Cvtlsi2sd(xmm_scratch, output_reg);
__ ucomisd(xmm_scratch, input_temp);
@@ -3858,7 +3858,7 @@
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
__ movq(output_reg, input_reg);
__ testq(output_reg, output_reg);
- DeoptimizeIf(negative, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(negative, instr, "minus zero");
}
__ Set(output_reg, 0);
__ bind(&done);
@@ -3937,7 +3937,7 @@
Label no_deopt;
__ JumpIfSmi(tagged_exponent, &no_deopt, Label::kNear);
__ CmpObjectType(tagged_exponent, HEAP_NUMBER_TYPE, rcx);
- DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber);
+ DeoptimizeIf(not_equal, instr, "not a heap number");
__ bind(&no_deopt);
MathPowStub stub(isolate(), MathPowStub::TAGGED);
__ CallStub(&stub);
@@ -4331,7 +4331,7 @@
__ int3();
__ bind(&done);
} else {
- DeoptimizeIf(cc, instr, Deoptimizer::kOutOfBounds);
+ DeoptimizeIf(cc, instr, "out of bounds");
}
}
@@ -4572,7 +4572,7 @@
Register temp = ToRegister(instr->temp());
Label no_memento_found;
__ TestJSArrayForAllocationMemento(object, temp, &no_memento_found);
- DeoptimizeIf(equal, instr, Deoptimizer::kMementoFound);
+ DeoptimizeIf(equal, instr, "memento found");
__ bind(&no_memento_found);
}
@@ -4892,12 +4892,12 @@
if (hchange->CheckFlag(HValue::kCanOverflow) &&
hchange->value()->CheckFlag(HValue::kUint32)) {
Condition is_smi = __ CheckUInteger32ValidSmiValue(input);
- DeoptimizeIf(NegateCondition(is_smi), instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(NegateCondition(is_smi), instr, "overflow");
}
__ Integer32ToSmi(output, input);
if (hchange->CheckFlag(HValue::kCanOverflow) &&
!hchange->value()->CheckFlag(HValue::kUint32)) {
- DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(overflow, instr, "overflow");
}
}
@@ -4907,7 +4907,7 @@
Register input = ToRegister(instr->value());
if (instr->needs_check()) {
Condition is_smi = __ CheckSmi(input);
- DeoptimizeIf(NegateCondition(is_smi), instr, Deoptimizer::kNotASmi);
+ DeoptimizeIf(NegateCondition(is_smi), instr, "not a Smi");
} else {
__ AssertSmi(input);
}
@@ -4938,7 +4938,7 @@
if (can_convert_undefined_to_nan) {
__ j(not_equal, &convert, Label::kNear);
} else {
- DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber);
+ DeoptimizeIf(not_equal, instr, "not a heap number");
}
if (deoptimize_on_minus_zero) {
@@ -4948,7 +4948,7 @@
__ j(not_equal, &done, Label::kNear);
__ movmskpd(kScratchRegister, result_reg);
__ testq(kScratchRegister, Immediate(1));
- DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(not_zero, instr, "minus zero");
}
__ jmp(&done, Label::kNear);
@@ -4957,7 +4957,7 @@
// Convert undefined (and hole) to NaN. Compute NaN as 0/0.
__ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
- DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumberUndefined);
+ DeoptimizeIf(not_equal, instr, "not a heap number/undefined");
__ pcmpeqd(result_reg, result_reg);
__ jmp(&done, Label::kNear);
@@ -5003,27 +5003,26 @@
__ bind(&check_false);
__ CompareRoot(input_reg, Heap::kFalseValueRootIndex);
- DeoptimizeIf(not_equal, instr,
- Deoptimizer::kNotAHeapNumberUndefinedBoolean);
+ DeoptimizeIf(not_equal, instr, "not a heap number/undefined/true/false");
__ Set(input_reg, 0);
} else {
XMMRegister scratch = ToDoubleRegister(instr->temp());
DCHECK(!scratch.is(xmm0));
__ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
Heap::kHeapNumberMapRootIndex);
- DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber);
+ DeoptimizeIf(not_equal, instr, "not a heap number");
__ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
__ cvttsd2si(input_reg, xmm0);
__ Cvtlsi2sd(scratch, input_reg);
__ ucomisd(xmm0, scratch);
- DeoptimizeIf(not_equal, instr, Deoptimizer::kLostPrecision);
- DeoptimizeIf(parity_even, instr, Deoptimizer::kNaN);
+ DeoptimizeIf(not_equal, instr, "lost precision");
+ DeoptimizeIf(parity_even, instr, "NaN");
if (instr->hydrogen()->GetMinusZeroMode() == FAIL_ON_MINUS_ZERO) {
__ testl(input_reg, input_reg);
__ j(not_zero, done);
__ movmskpd(input_reg, xmm0);
__ andl(input_reg, Immediate(1));
- DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(not_zero, instr, "minus zero");
}
}
}
@@ -5094,11 +5093,11 @@
&is_nan, &minus_zero, dist);
__ jmp(&done, dist);
__ bind(&lost_precision);
- DeoptimizeIf(no_condition, instr, Deoptimizer::kLostPrecision);
+ DeoptimizeIf(no_condition, instr, "lost precision");
__ bind(&is_nan);
- DeoptimizeIf(no_condition, instr, Deoptimizer::kNaN);
+ DeoptimizeIf(no_condition, instr, "NaN");
__ bind(&minus_zero);
- DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(no_condition, instr, "minus zero");
__ bind(&done);
}
}
@@ -5121,21 +5120,21 @@
&minus_zero, dist);
__ jmp(&done, dist);
__ bind(&lost_precision);
- DeoptimizeIf(no_condition, instr, Deoptimizer::kLostPrecision);
+ DeoptimizeIf(no_condition, instr, "lost precision");
__ bind(&is_nan);
- DeoptimizeIf(no_condition, instr, Deoptimizer::kNaN);
+ DeoptimizeIf(no_condition, instr, "NaN");
__ bind(&minus_zero);
- DeoptimizeIf(no_condition, instr, Deoptimizer::kMinusZero);
+ DeoptimizeIf(no_condition, instr, "minus zero");
__ bind(&done);
__ Integer32ToSmi(result_reg, result_reg);
- DeoptimizeIf(overflow, instr, Deoptimizer::kOverflow);
+ DeoptimizeIf(overflow, instr, "overflow");
}
void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
LOperand* input = instr->value();
Condition cc = masm()->CheckSmi(ToRegister(input));
- DeoptimizeIf(NegateCondition(cc), instr, Deoptimizer::kNotASmi);
+ DeoptimizeIf(NegateCondition(cc), instr, "not a Smi");
}
@@ -5143,7 +5142,7 @@
if (!instr->hydrogen()->value()->type().IsHeapObject()) {
LOperand* input = instr->value();
Condition cc = masm()->CheckSmi(ToRegister(input));
- DeoptimizeIf(cc, instr, Deoptimizer::kSmi);
+ DeoptimizeIf(cc, instr, "Smi");
}
}
@@ -5163,14 +5162,14 @@
// If there is only one type in the interval check for equality.
if (first == last) {
- DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongInstanceType);
+ DeoptimizeIf(not_equal, instr, "wrong instance type");
} else {
- DeoptimizeIf(below, instr, Deoptimizer::kWrongInstanceType);
+ DeoptimizeIf(below, instr, "wrong instance type");
// Omit check for the last type.
if (last != LAST_TYPE) {
__ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
Immediate(static_cast<int8_t>(last)));
- DeoptimizeIf(above, instr, Deoptimizer::kWrongInstanceType);
+ DeoptimizeIf(above, instr, "wrong instance type");
}
}
} else {
@@ -5182,14 +5181,13 @@
DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag));
__ testb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
Immediate(mask));
- DeoptimizeIf(tag == 0 ? not_zero : zero, instr,
- Deoptimizer::kWrongInstanceType);
+ DeoptimizeIf(tag == 0 ? not_zero : zero, instr, "wrong instance type");
} else {
__ movzxbl(kScratchRegister,
FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
__ andb(kScratchRegister, Immediate(mask));
__ cmpb(kScratchRegister, Immediate(tag));
- DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongInstanceType);
+ DeoptimizeIf(not_equal, instr, "wrong instance type");
}
}
}
@@ -5198,7 +5196,7 @@
void LCodeGen::DoCheckValue(LCheckValue* instr) {
Register reg = ToRegister(instr->value());
__ Cmp(reg, instr->hydrogen()->object().handle());
- DeoptimizeIf(not_equal, instr, Deoptimizer::kValueMismatch);
+ DeoptimizeIf(not_equal, instr, "value mismatch");
}
@@ -5213,7 +5211,7 @@
__ testp(rax, Immediate(kSmiTagMask));
}
- DeoptimizeIf(zero, instr, Deoptimizer::kInstanceMigrationFailed);
+ DeoptimizeIf(zero, instr, "instance migration failed");
}
@@ -5267,7 +5265,7 @@
if (instr->hydrogen()->HasMigrationTarget()) {
__ j(not_equal, deferred->entry());
} else {
- DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongMap);
+ DeoptimizeIf(not_equal, instr, "wrong map");
}
__ bind(&success);
@@ -5306,7 +5304,7 @@
// Check for undefined. Undefined is converted to zero for clamping
// conversions.
__ Cmp(input_reg, factory()->undefined_value());
- DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumberUndefined);
+ DeoptimizeIf(not_equal, instr, "not a heap number/undefined");
__ xorl(input_reg, input_reg);
__ jmp(&done, Label::kNear);
@@ -5785,19 +5783,19 @@
void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
DCHECK(ToRegister(instr->context()).is(rsi));
__ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
- DeoptimizeIf(equal, instr, Deoptimizer::kUndefined);
+ DeoptimizeIf(equal, instr, "undefined");
Register null_value = rdi;
__ LoadRoot(null_value, Heap::kNullValueRootIndex);
__ cmpp(rax, null_value);
- DeoptimizeIf(equal, instr, Deoptimizer::kNull);
+ DeoptimizeIf(equal, instr, "null");
Condition cc = masm()->CheckSmi(rax);
- DeoptimizeIf(cc, instr, Deoptimizer::kSmi);
+ DeoptimizeIf(cc, instr, "Smi");
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
__ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
- DeoptimizeIf(below_equal, instr, Deoptimizer::kWrongInstanceType);
+ DeoptimizeIf(below_equal, instr, "wrong instance type");
Label use_cache, call_runtime;
__ CheckEnumCache(null_value, &call_runtime);
@@ -5812,7 +5810,7 @@
__ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
Heap::kMetaMapRootIndex);
- DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongMap);
+ DeoptimizeIf(not_equal, instr, "wrong map");
__ bind(&use_cache);
}
@@ -5834,7 +5832,7 @@
FieldOperand(result, FixedArray::SizeFor(instr->idx())));
__ bind(&done);
Condition cc = masm()->CheckSmi(result);
- DeoptimizeIf(cc, instr, Deoptimizer::kNoCache);
+ DeoptimizeIf(cc, instr, "no cache");
}
@@ -5842,7 +5840,7 @@
Register object = ToRegister(instr->value());
__ cmpp(ToRegister(instr->map()),
FieldOperand(object, HeapObject::kMapOffset));
- DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongMap);
+ DeoptimizeIf(not_equal, instr, "wrong map");
}
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x87/lithium-codegen-x87.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698