Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(260)

Side by Side Diff: src/mips/lithium-codegen-mips.cc

Issue 131363008: A64: Synchronize with r15922. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/lithium-codegen-mips.h ('k') | src/mips/lithium-gap-resolver-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 389 matching lines...) Expand 10 before | Expand all | Expand 10 after
400 if (op->IsRegister()) { 400 if (op->IsRegister()) {
401 return ToRegister(op->index()); 401 return ToRegister(op->index());
402 } else if (op->IsConstantOperand()) { 402 } else if (op->IsConstantOperand()) {
403 LConstantOperand* const_op = LConstantOperand::cast(op); 403 LConstantOperand* const_op = LConstantOperand::cast(op);
404 HConstant* constant = chunk_->LookupConstant(const_op); 404 HConstant* constant = chunk_->LookupConstant(const_op);
405 Handle<Object> literal = constant->handle(); 405 Handle<Object> literal = constant->handle();
406 Representation r = chunk_->LookupLiteralRepresentation(const_op); 406 Representation r = chunk_->LookupLiteralRepresentation(const_op);
407 if (r.IsInteger32()) { 407 if (r.IsInteger32()) {
408 ASSERT(literal->IsNumber()); 408 ASSERT(literal->IsNumber());
409 __ li(scratch, Operand(static_cast<int32_t>(literal->Number()))); 409 __ li(scratch, Operand(static_cast<int32_t>(literal->Number())));
410 } else if (r.IsSmi()) {
411 ASSERT(constant->HasSmiValue());
412 __ li(scratch, Operand(Smi::FromInt(constant->Integer32Value())));
410 } else if (r.IsDouble()) { 413 } else if (r.IsDouble()) {
411 Abort("EmitLoadRegister: Unsupported double immediate."); 414 Abort("EmitLoadRegister: Unsupported double immediate.");
412 } else { 415 } else {
413 ASSERT(r.IsTagged()); 416 ASSERT(r.IsTagged());
414 if (literal->IsSmi()) { 417 __ LoadObject(scratch, literal);
415 __ li(scratch, Operand(literal));
416 } else {
417 __ LoadHeapObject(scratch, Handle<HeapObject>::cast(literal));
418 }
419 } 418 }
420 return scratch; 419 return scratch;
421 } else if (op->IsStackSlot() || op->IsArgument()) { 420 } else if (op->IsStackSlot() || op->IsArgument()) {
422 __ lw(scratch, ToMemOperand(op)); 421 __ lw(scratch, ToMemOperand(op));
423 return scratch; 422 return scratch;
424 } 423 }
425 UNREACHABLE(); 424 UNREACHABLE();
426 return scratch; 425 return scratch;
427 } 426 }
428 427
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
474 bool LCodeGen::IsInteger32(LConstantOperand* op) const { 473 bool LCodeGen::IsInteger32(LConstantOperand* op) const {
475 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32(); 474 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32();
476 } 475 }
477 476
478 477
479 bool LCodeGen::IsSmi(LConstantOperand* op) const { 478 bool LCodeGen::IsSmi(LConstantOperand* op) const {
480 return chunk_->LookupLiteralRepresentation(op).IsSmi(); 479 return chunk_->LookupLiteralRepresentation(op).IsSmi();
481 } 480 }
482 481
483 482
484 int LCodeGen::ToInteger32(LConstantOperand* op) const { 483 int32_t LCodeGen::ToInteger32(LConstantOperand* op) const {
485 HConstant* constant = chunk_->LookupConstant(op); 484 return ToRepresentation(op, Representation::Integer32());
486 return constant->Integer32Value();
487 } 485 }
488 486
489 487
488 int32_t LCodeGen::ToRepresentation(LConstantOperand* op,
489 const Representation& r) const {
490 HConstant* constant = chunk_->LookupConstant(op);
491 int32_t value = constant->Integer32Value();
492 if (r.IsInteger32()) return value;
493 ASSERT(r.IsSmiOrTagged());
494 return reinterpret_cast<int32_t>(Smi::FromInt(value));
495 }
496
497
490 Smi* LCodeGen::ToSmi(LConstantOperand* op) const { 498 Smi* LCodeGen::ToSmi(LConstantOperand* op) const {
491 HConstant* constant = chunk_->LookupConstant(op); 499 HConstant* constant = chunk_->LookupConstant(op);
492 return Smi::FromInt(constant->Integer32Value()); 500 return Smi::FromInt(constant->Integer32Value());
493 } 501 }
494 502
495 503
496 double LCodeGen::ToDouble(LConstantOperand* op) const { 504 double LCodeGen::ToDouble(LConstantOperand* op) const {
497 HConstant* constant = chunk_->LookupConstant(op); 505 HConstant* constant = chunk_->LookupConstant(op);
498 ASSERT(constant->HasDoubleValue()); 506 ASSERT(constant->HasDoubleValue());
499 return constant->DoubleValue(); 507 return constant->DoubleValue();
500 } 508 }
501 509
502 510
503 Operand LCodeGen::ToOperand(LOperand* op) { 511 Operand LCodeGen::ToOperand(LOperand* op) {
504 if (op->IsConstantOperand()) { 512 if (op->IsConstantOperand()) {
505 LConstantOperand* const_op = LConstantOperand::cast(op); 513 LConstantOperand* const_op = LConstantOperand::cast(op);
506 HConstant* constant = chunk()->LookupConstant(const_op); 514 HConstant* constant = chunk()->LookupConstant(const_op);
507 Representation r = chunk_->LookupLiteralRepresentation(const_op); 515 Representation r = chunk_->LookupLiteralRepresentation(const_op);
508 if (r.IsInteger32()) { 516 if (r.IsSmi()) {
517 ASSERT(constant->HasSmiValue());
518 return Operand(Smi::FromInt(constant->Integer32Value()));
519 } else if (r.IsInteger32()) {
509 ASSERT(constant->HasInteger32Value()); 520 ASSERT(constant->HasInteger32Value());
510 return Operand(constant->Integer32Value()); 521 return Operand(constant->Integer32Value());
511 } else if (r.IsDouble()) { 522 } else if (r.IsDouble()) {
512 Abort("ToOperand Unsupported double immediate."); 523 Abort("ToOperand Unsupported double immediate.");
513 } 524 }
514 ASSERT(r.IsTagged()); 525 ASSERT(r.IsTagged());
515 return Operand(constant->handle()); 526 return Operand(constant->handle());
516 } else if (op->IsRegister()) { 527 } else if (op->IsRegister()) {
517 return Operand(ToRegister(op)); 528 return Operand(ToRegister(op));
518 } else if (op->IsDoubleRegister()) { 529 } else if (op->IsDoubleRegister()) {
(...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after
783 LEnvironment* environment, 794 LEnvironment* environment,
784 Register src1, 795 Register src1,
785 const Operand& src2) { 796 const Operand& src2) {
786 Deoptimizer::BailoutType bailout_type = info()->IsStub() 797 Deoptimizer::BailoutType bailout_type = info()->IsStub()
787 ? Deoptimizer::LAZY 798 ? Deoptimizer::LAZY
788 : Deoptimizer::EAGER; 799 : Deoptimizer::EAGER;
789 DeoptimizeIf(cc, environment, bailout_type, src1, src2); 800 DeoptimizeIf(cc, environment, bailout_type, src1, src2);
790 } 801 }
791 802
792 803
793 void LCodeGen::SoftDeoptimize(LEnvironment* environment,
794 Register src1,
795 const Operand& src2) {
796 ASSERT(!info()->IsStub());
797 DeoptimizeIf(al, environment, Deoptimizer::SOFT, src1, src2);
798 }
799
800
801 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { 804 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) {
802 ZoneList<Handle<Map> > maps(1, zone()); 805 ZoneList<Handle<Map> > maps(1, zone());
803 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 806 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
804 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { 807 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
805 RelocInfo::Mode mode = it.rinfo()->rmode(); 808 RelocInfo::Mode mode = it.rinfo()->rmode();
806 if (mode == RelocInfo::EMBEDDED_OBJECT && 809 if (mode == RelocInfo::EMBEDDED_OBJECT &&
807 it.rinfo()->target_object()->IsMap()) { 810 it.rinfo()->target_object()->IsMap()) {
808 Handle<Map> map(Map::cast(it.rinfo()->target_object())); 811 Handle<Map> map(Map::cast(it.rinfo()->target_object()));
809 if (map->CanTransition()) { 812 if (map->CanTransition()) {
810 maps.Add(map, zone()); 813 maps.Add(map, zone());
(...skipping 561 matching lines...) Expand 10 before | Expand all | Expand 10 after
1372 // Note that result may alias left. 1375 // Note that result may alias left.
1373 Register left = ToRegister(instr->left()); 1376 Register left = ToRegister(instr->left());
1374 LOperand* right_op = instr->right(); 1377 LOperand* right_op = instr->right();
1375 1378
1376 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); 1379 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1377 bool bailout_on_minus_zero = 1380 bool bailout_on_minus_zero =
1378 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero); 1381 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero);
1379 1382
1380 if (right_op->IsConstantOperand() && !can_overflow) { 1383 if (right_op->IsConstantOperand() && !can_overflow) {
1381 // Use optimized code for specific constants. 1384 // Use optimized code for specific constants.
1382 int32_t constant = ToInteger32(LConstantOperand::cast(right_op)); 1385 int32_t constant = ToRepresentation(
1386 LConstantOperand::cast(right_op),
1387 instr->hydrogen()->right()->representation());
1383 1388
1384 if (bailout_on_minus_zero && (constant < 0)) { 1389 if (bailout_on_minus_zero && (constant < 0)) {
1385 // The case of a null constant will be handled separately. 1390 // The case of a null constant will be handled separately.
1386 // If constant is negative and left is null, the result should be -0. 1391 // If constant is negative and left is null, the result should be -0.
1387 DeoptimizeIf(eq, instr->environment(), left, Operand(zero_reg)); 1392 DeoptimizeIf(eq, instr->environment(), left, Operand(zero_reg));
1388 } 1393 }
1389 1394
1390 switch (constant) { 1395 switch (constant) {
1391 case -1: 1396 case -1:
1392 __ Subu(result, zero_reg, left); 1397 __ Subu(result, zero_reg, left);
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
1439 } 1444 }
1440 1445
1441 } else { 1446 } else {
1442 Register right = EmitLoadRegister(right_op, scratch); 1447 Register right = EmitLoadRegister(right_op, scratch);
1443 if (bailout_on_minus_zero) { 1448 if (bailout_on_minus_zero) {
1444 __ Or(ToRegister(instr->temp()), left, right); 1449 __ Or(ToRegister(instr->temp()), left, right);
1445 } 1450 }
1446 1451
1447 if (can_overflow) { 1452 if (can_overflow) {
1448 // hi:lo = left * right. 1453 // hi:lo = left * right.
1449 __ mult(left, right); 1454 if (instr->hydrogen()->representation().IsSmi()) {
1450 __ mfhi(scratch); 1455 __ SmiUntag(result, left);
1451 __ mflo(result); 1456 __ mult(result, right);
1457 __ mfhi(scratch);
1458 __ mflo(result);
1459 } else {
1460 __ mult(left, right);
1461 __ mfhi(scratch);
1462 __ mflo(result);
1463 }
1452 __ sra(at, result, 31); 1464 __ sra(at, result, 31);
1453 DeoptimizeIf(ne, instr->environment(), scratch, Operand(at)); 1465 DeoptimizeIf(ne, instr->environment(), scratch, Operand(at));
1454 } else { 1466 } else {
1455 __ Mul(result, left, right); 1467 if (instr->hydrogen()->representation().IsSmi()) {
1468 __ SmiUntag(result, left);
1469 __ Mul(result, result, right);
1470 } else {
1471 __ Mul(result, left, right);
1472 }
1456 } 1473 }
1457 1474
1458 if (bailout_on_minus_zero) { 1475 if (bailout_on_minus_zero) {
1459 // Bail out if the result is supposed to be negative zero. 1476 // Bail out if the result is supposed to be negative zero.
1460 Label done; 1477 Label done;
1461 __ Branch(&done, ne, result, Operand(zero_reg)); 1478 __ Branch(&done, ne, result, Operand(zero_reg));
1462 DeoptimizeIf(lt, 1479 DeoptimizeIf(lt,
1463 instr->environment(), 1480 instr->environment(),
1464 ToRegister(instr->temp()), 1481 ToRegister(instr->temp()),
1465 Operand(zero_reg)); 1482 Operand(zero_reg));
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1500 } 1517 }
1501 } 1518 }
1502 1519
1503 1520
1504 void LCodeGen::DoShiftI(LShiftI* instr) { 1521 void LCodeGen::DoShiftI(LShiftI* instr) {
1505 // Both 'left' and 'right' are "used at start" (see LCodeGen::DoShift), so 1522 // Both 'left' and 'right' are "used at start" (see LCodeGen::DoShift), so
1506 // result may alias either of them. 1523 // result may alias either of them.
1507 LOperand* right_op = instr->right(); 1524 LOperand* right_op = instr->right();
1508 Register left = ToRegister(instr->left()); 1525 Register left = ToRegister(instr->left());
1509 Register result = ToRegister(instr->result()); 1526 Register result = ToRegister(instr->result());
1527 Register scratch = scratch0();
1510 1528
1511 if (right_op->IsRegister()) { 1529 if (right_op->IsRegister()) {
1512 // No need to mask the right operand on MIPS, it is built into the variable 1530 // No need to mask the right operand on MIPS, it is built into the variable
1513 // shift instructions. 1531 // shift instructions.
1514 switch (instr->op()) { 1532 switch (instr->op()) {
1515 case Token::ROR: 1533 case Token::ROR:
1516 __ Ror(result, left, Operand(ToRegister(right_op))); 1534 __ Ror(result, left, Operand(ToRegister(right_op)));
1517 break; 1535 break;
1518 case Token::SAR: 1536 case Token::SAR:
1519 __ srav(result, left, ToRegister(right_op)); 1537 __ srav(result, left, ToRegister(right_op));
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1556 } else { 1574 } else {
1557 if (instr->can_deopt()) { 1575 if (instr->can_deopt()) {
1558 __ And(at, left, Operand(0x80000000)); 1576 __ And(at, left, Operand(0x80000000));
1559 DeoptimizeIf(ne, instr->environment(), at, Operand(zero_reg)); 1577 DeoptimizeIf(ne, instr->environment(), at, Operand(zero_reg));
1560 } 1578 }
1561 __ Move(result, left); 1579 __ Move(result, left);
1562 } 1580 }
1563 break; 1581 break;
1564 case Token::SHL: 1582 case Token::SHL:
1565 if (shift_count != 0) { 1583 if (shift_count != 0) {
1566 __ sll(result, left, shift_count); 1584 if (instr->hydrogen_value()->representation().IsSmi() &&
1585 instr->can_deopt()) {
1586 __ sll(result, left, shift_count - 1);
1587 __ SmiTagCheckOverflow(result, result, scratch);
1588 DeoptimizeIf(lt, instr->environment(), scratch, Operand(zero_reg));
1589 } else {
1590 __ sll(result, left, shift_count);
1591 }
1567 } else { 1592 } else {
1568 __ Move(result, left); 1593 __ Move(result, left);
1569 } 1594 }
1570 break; 1595 break;
1571 default: 1596 default:
1572 UNREACHABLE(); 1597 UNREACHABLE();
1573 break; 1598 break;
1574 } 1599 }
1575 } 1600 }
1576 } 1601 }
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
1629 ASSERT(instr->result()->IsDoubleRegister()); 1654 ASSERT(instr->result()->IsDoubleRegister());
1630 DoubleRegister result = ToDoubleRegister(instr->result()); 1655 DoubleRegister result = ToDoubleRegister(instr->result());
1631 double v = instr->value(); 1656 double v = instr->value();
1632 __ Move(result, v); 1657 __ Move(result, v);
1633 } 1658 }
1634 1659
1635 1660
1636 void LCodeGen::DoConstantT(LConstantT* instr) { 1661 void LCodeGen::DoConstantT(LConstantT* instr) {
1637 Handle<Object> value = instr->value(); 1662 Handle<Object> value = instr->value();
1638 AllowDeferredHandleDereference smi_check; 1663 AllowDeferredHandleDereference smi_check;
1639 if (value->IsSmi()) { 1664 __ LoadObject(ToRegister(instr->result()), value);
1640 __ li(ToRegister(instr->result()), Operand(value));
1641 } else {
1642 __ LoadHeapObject(ToRegister(instr->result()),
1643 Handle<HeapObject>::cast(value));
1644 }
1645 } 1665 }
1646 1666
1647 1667
1648 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) { 1668 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
1649 Register result = ToRegister(instr->result()); 1669 Register result = ToRegister(instr->result());
1650 Register map = ToRegister(instr->value()); 1670 Register map = ToRegister(instr->value());
1651 __ EnumLength(result, map); 1671 __ EnumLength(result, map);
1652 } 1672 }
1653 1673
1654 1674
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
1813 DeoptimizeIf(lt, instr->environment(), overflow, Operand(zero_reg)); 1833 DeoptimizeIf(lt, instr->environment(), overflow, Operand(zero_reg));
1814 } 1834 }
1815 } 1835 }
1816 1836
1817 1837
1818 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { 1838 void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
1819 LOperand* left = instr->left(); 1839 LOperand* left = instr->left();
1820 LOperand* right = instr->right(); 1840 LOperand* right = instr->right();
1821 HMathMinMax::Operation operation = instr->hydrogen()->operation(); 1841 HMathMinMax::Operation operation = instr->hydrogen()->operation();
1822 Condition condition = (operation == HMathMinMax::kMathMin) ? le : ge; 1842 Condition condition = (operation == HMathMinMax::kMathMin) ? le : ge;
1823 if (instr->hydrogen()->representation().IsInteger32()) { 1843 if (instr->hydrogen()->representation().IsSmiOrInteger32()) {
1824 Register left_reg = ToRegister(left); 1844 Register left_reg = ToRegister(left);
1825 Operand right_op = (right->IsRegister() || right->IsConstantOperand()) 1845 Operand right_op = (right->IsRegister() || right->IsConstantOperand())
1826 ? ToOperand(right) 1846 ? ToOperand(right)
1827 : Operand(EmitLoadRegister(right, at)); 1847 : Operand(EmitLoadRegister(right, at));
1828 Register result_reg = ToRegister(instr->result()); 1848 Register result_reg = ToRegister(instr->result());
1829 Label return_right, done; 1849 Label return_right, done;
1830 if (!result_reg.is(left_reg)) { 1850 if (!result_reg.is(left_reg)) {
1831 __ Branch(&return_right, NegateCondition(condition), left_reg, right_op); 1851 __ Branch(&return_right, NegateCondition(condition), left_reg, right_op);
1832 __ mov(result_reg, left_reg); 1852 __ mov(result_reg, left_reg);
1833 __ Branch(&done); 1853 __ Branch(&done);
(...skipping 399 matching lines...) Expand 10 before | Expand all | Expand 10 after
2233 2253
2234 2254
2235 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) { 2255 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
2236 Register left = ToRegister(instr->left()); 2256 Register left = ToRegister(instr->left());
2237 Register right = ToRegister(instr->right()); 2257 Register right = ToRegister(instr->right());
2238 2258
2239 EmitBranch(instr, eq, left, Operand(right)); 2259 EmitBranch(instr, eq, left, Operand(right));
2240 } 2260 }
2241 2261
2242 2262
2243 void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
2244 Register left = ToRegister(instr->left());
2245
2246 EmitBranch(instr, eq, left, Operand(instr->hydrogen()->right()));
2247 }
2248
2249
2250 Condition LCodeGen::EmitIsObject(Register input, 2263 Condition LCodeGen::EmitIsObject(Register input,
2251 Register temp1, 2264 Register temp1,
2252 Register temp2, 2265 Register temp2,
2253 Label* is_not_object, 2266 Label* is_not_object,
2254 Label* is_object) { 2267 Label* is_object) {
2255 __ JumpIfSmi(input, is_not_object); 2268 __ JumpIfSmi(input, is_not_object);
2256 2269
2257 __ LoadRoot(temp2, Heap::kNullValueRootIndex); 2270 __ LoadRoot(temp2, Heap::kNullValueRootIndex);
2258 __ Branch(is_object, eq, input, Operand(temp2)); 2271 __ Branch(is_object, eq, input, Operand(temp2));
2259 2272
(...skipping 634 matching lines...) Expand 10 before | Expand all | Expand 10 after
2894 int offset = index * kPointerSize; 2907 int offset = index * kPointerSize;
2895 if (index < 0) { 2908 if (index < 0) {
2896 // Negative property indices are in-object properties, indexed 2909 // Negative property indices are in-object properties, indexed
2897 // from the end of the fixed part of the object. 2910 // from the end of the fixed part of the object.
2898 __ lw(result, FieldMemOperand(object, offset + type->instance_size())); 2911 __ lw(result, FieldMemOperand(object, offset + type->instance_size()));
2899 } else { 2912 } else {
2900 // Non-negative property indices are in the properties array. 2913 // Non-negative property indices are in the properties array.
2901 __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); 2914 __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2902 __ lw(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize)); 2915 __ lw(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
2903 } 2916 }
2904 } else if (lookup.IsConstantFunction()) { 2917 } else if (lookup.IsConstant()) {
2905 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type)); 2918 Handle<Object> constant(lookup.GetConstantFromMap(*type), isolate());
2906 __ LoadHeapObject(result, function); 2919 __ LoadObject(result, constant);
2907 } else { 2920 } else {
2908 // Negative lookup. 2921 // Negative lookup.
2909 // Check prototypes. 2922 // Check prototypes.
2910 Handle<HeapObject> current(HeapObject::cast((*type)->prototype())); 2923 Handle<HeapObject> current(HeapObject::cast((*type)->prototype()));
2911 Heap* heap = type->GetHeap(); 2924 Heap* heap = type->GetHeap();
2912 while (*current != heap->null_value()) { 2925 while (*current != heap->null_value()) {
2913 __ LoadHeapObject(result, current); 2926 __ LoadHeapObject(result, current);
2914 __ lw(result, FieldMemOperand(result, HeapObject::kMapOffset)); 2927 __ lw(result, FieldMemOperand(result, HeapObject::kMapOffset));
2915 DeoptimizeIf(ne, env, result, Operand(Handle<Map>(current->map()))); 2928 DeoptimizeIf(ne, env, result, Operand(Handle<Map>(current->map())));
2916 current = 2929 current =
(...skipping 1263 matching lines...) Expand 10 before | Expand all | Expand 10 after
4180 4193
4181 // Name is always in a2. 4194 // Name is always in a2.
4182 __ li(a2, Operand(instr->name())); 4195 __ li(a2, Operand(instr->name()));
4183 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) 4196 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
4184 ? isolate()->builtins()->StoreIC_Initialize_Strict() 4197 ? isolate()->builtins()->StoreIC_Initialize_Strict()
4185 : isolate()->builtins()->StoreIC_Initialize(); 4198 : isolate()->builtins()->StoreIC_Initialize();
4186 CallCode(ic, RelocInfo::CODE_TARGET, instr); 4199 CallCode(ic, RelocInfo::CODE_TARGET, instr);
4187 } 4200 }
4188 4201
4189 4202
4203 void LCodeGen::ApplyCheckIf(Condition cc,
4204 LBoundsCheck* check,
4205 Register src1,
4206 const Operand& src2) {
4207 if (FLAG_debug_code && check->hydrogen()->skip_check()) {
4208 Label done;
4209 __ Branch(&done, NegateCondition(cc), src1, src2);
4210 __ stop("eliminated bounds check failed");
4211 __ bind(&done);
4212 } else {
4213 DeoptimizeIf(cc, check->environment(), src1, src2);
4214 }
4215 }
4216
4217
4190 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { 4218 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
4191 if (instr->hydrogen()->skip_check()) return; 4219 if (instr->hydrogen()->skip_check()) return;
4192 4220
4221 Condition condition = instr->hydrogen()->allow_equality() ? hi : hs;
4193 if (instr->index()->IsConstantOperand()) { 4222 if (instr->index()->IsConstantOperand()) {
4194 int constant_index = 4223 int constant_index =
4195 ToInteger32(LConstantOperand::cast(instr->index())); 4224 ToInteger32(LConstantOperand::cast(instr->index()));
4196 if (instr->hydrogen()->length()->representation().IsSmi()) { 4225 if (instr->hydrogen()->length()->representation().IsSmi()) {
4197 __ li(at, Operand(Smi::FromInt(constant_index))); 4226 __ li(at, Operand(Smi::FromInt(constant_index)));
4198 } else { 4227 } else {
4199 __ li(at, Operand(constant_index)); 4228 __ li(at, Operand(constant_index));
4200 } 4229 }
4201 DeoptimizeIf(hs, 4230 ApplyCheckIf(condition,
4202 instr->environment(), 4231 instr,
4203 at, 4232 at,
4204 Operand(ToRegister(instr->length()))); 4233 Operand(ToRegister(instr->length())));
4205 } else { 4234 } else {
4206 DeoptimizeIf(hs, 4235 ApplyCheckIf(condition,
4207 instr->environment(), 4236 instr,
4208 ToRegister(instr->index()), 4237 ToRegister(instr->index()),
4209 Operand(ToRegister(instr->length()))); 4238 Operand(ToRegister(instr->length())));
4210 } 4239 }
4211 } 4240 }
4212 4241
4213 4242
4214 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { 4243 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
4215 Register external_pointer = ToRegister(instr->elements()); 4244 Register external_pointer = ToRegister(instr->elements());
4216 Register key = no_reg; 4245 Register key = no_reg;
4217 ElementsKind elements_kind = instr->elements_kind(); 4246 ElementsKind elements_kind = instr->elements_kind();
(...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after
4419 __ lw(scratch, FieldMemOperand(object_reg, HeapObject::kMapOffset)); 4448 __ lw(scratch, FieldMemOperand(object_reg, HeapObject::kMapOffset));
4420 __ Branch(&not_applicable, ne, scratch, Operand(from_map)); 4449 __ Branch(&not_applicable, ne, scratch, Operand(from_map));
4421 4450
4422 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { 4451 if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
4423 Register new_map_reg = ToRegister(instr->new_map_temp()); 4452 Register new_map_reg = ToRegister(instr->new_map_temp());
4424 __ li(new_map_reg, Operand(to_map)); 4453 __ li(new_map_reg, Operand(to_map));
4425 __ sw(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset)); 4454 __ sw(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset));
4426 // Write barrier. 4455 // Write barrier.
4427 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, 4456 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
4428 scratch, GetRAState(), kDontSaveFPRegs); 4457 scratch, GetRAState(), kDontSaveFPRegs);
4429 } else if (FLAG_compiled_transitions) { 4458 } else {
4430 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 4459 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4431 __ mov(a0, object_reg); 4460 __ mov(a0, object_reg);
4432 __ li(a1, Operand(to_map)); 4461 __ li(a1, Operand(to_map));
4433 TransitionElementsKindStub stub(from_kind, to_kind); 4462 TransitionElementsKindStub stub(from_kind, to_kind);
4434 __ CallStub(&stub); 4463 __ CallStub(&stub);
4435 RecordSafepointWithRegisters( 4464 RecordSafepointWithRegisters(
4436 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); 4465 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4437 } else if (IsFastSmiElementsKind(from_kind) &&
4438 IsFastDoubleElementsKind(to_kind)) {
4439 Register fixed_object_reg = ToRegister(instr->temp());
4440 ASSERT(fixed_object_reg.is(a2));
4441 Register new_map_reg = ToRegister(instr->new_map_temp());
4442 ASSERT(new_map_reg.is(a3));
4443 __ li(new_map_reg, Operand(to_map));
4444 __ mov(fixed_object_reg, object_reg);
4445 CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
4446 RelocInfo::CODE_TARGET, instr);
4447 } else if (IsFastDoubleElementsKind(from_kind) &&
4448 IsFastObjectElementsKind(to_kind)) {
4449 Register fixed_object_reg = ToRegister(instr->temp());
4450 ASSERT(fixed_object_reg.is(a2));
4451 Register new_map_reg = ToRegister(instr->new_map_temp());
4452 ASSERT(new_map_reg.is(a3));
4453 __ li(new_map_reg, Operand(to_map));
4454 __ mov(fixed_object_reg, object_reg);
4455 CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(),
4456 RelocInfo::CODE_TARGET, instr);
4457 } else {
4458 UNREACHABLE();
4459 } 4466 }
4460 __ bind(&not_applicable); 4467 __ bind(&not_applicable);
4461 } 4468 }
4462 4469
4463 4470
4464 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { 4471 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
4465 Register object = ToRegister(instr->object()); 4472 Register object = ToRegister(instr->object());
4466 Register temp = ToRegister(instr->temp()); 4473 Register temp = ToRegister(instr->temp());
4467 Label fail; 4474 Label fail;
4468 __ TestJSArrayForAllocationMemento(object, temp, ne, &fail); 4475 __ TestJSArrayForAllocationMemento(object, temp, ne, &fail);
(...skipping 719 matching lines...) Expand 10 before | Expand all | Expand 10 after
5188 Handle<Map> map, 5195 Handle<Map> map,
5189 LEnvironment* env) { 5196 LEnvironment* env) {
5190 Label success; 5197 Label success;
5191 __ CompareMapAndBranch(map_reg, map, &success, eq, &success); 5198 __ CompareMapAndBranch(map_reg, map, &success, eq, &success);
5192 DeoptimizeIf(al, env); 5199 DeoptimizeIf(al, env);
5193 __ bind(&success); 5200 __ bind(&success);
5194 } 5201 }
5195 5202
5196 5203
5197 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { 5204 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
5205 if (instr->hydrogen()->CanOmitMapChecks()) return;
5198 Register map_reg = scratch0(); 5206 Register map_reg = scratch0();
5199 LOperand* input = instr->value(); 5207 LOperand* input = instr->value();
5200 ASSERT(input->IsRegister()); 5208 ASSERT(input->IsRegister());
5201 Register reg = ToRegister(input); 5209 Register reg = ToRegister(input);
5202 Label success; 5210 Label success;
5203 SmallMapList* map_set = instr->hydrogen()->map_set(); 5211 SmallMapList* map_set = instr->hydrogen()->map_set();
5204 __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); 5212 __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
5205 for (int i = 0; i < map_set->length() - 1; i++) { 5213 for (int i = 0; i < map_set->length() - 1; i++) {
5206 Handle<Map> map = map_set->at(i); 5214 Handle<Map> map = map_set->at(i);
5207 __ CompareMapAndBranch(map_reg, map, &success, eq, &success); 5215 __ CompareMapAndBranch(map_reg, map, &success, eq, &success);
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
5256 __ jmp(&done); 5264 __ jmp(&done);
5257 5265
5258 __ bind(&is_smi); 5266 __ bind(&is_smi);
5259 __ ClampUint8(result_reg, scratch); 5267 __ ClampUint8(result_reg, scratch);
5260 5268
5261 __ bind(&done); 5269 __ bind(&done);
5262 } 5270 }
5263 5271
5264 5272
5265 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { 5273 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
5274 if (instr->hydrogen()->CanOmitPrototypeChecks()) return;
5275
5266 Register prototype_reg = ToRegister(instr->temp()); 5276 Register prototype_reg = ToRegister(instr->temp());
5267 Register map_reg = ToRegister(instr->temp2()); 5277 Register map_reg = ToRegister(instr->temp2());
5268 5278
5269 ZoneList<Handle<JSObject> >* prototypes = instr->prototypes(); 5279 ZoneList<Handle<JSObject> >* prototypes = instr->prototypes();
5270 ZoneList<Handle<Map> >* maps = instr->maps(); 5280 ZoneList<Handle<Map> >* maps = instr->maps();
5271 5281
5272 ASSERT(prototypes->length() == maps->length()); 5282 ASSERT(prototypes->length() == maps->length());
5273 5283
5274 if (!instr->hydrogen()->CanOmitPrototypeChecks()) { 5284 for (int i = 0; i < prototypes->length(); i++) {
5275 for (int i = 0; i < prototypes->length(); i++) { 5285 __ LoadHeapObject(prototype_reg, prototypes->at(i));
5276 __ LoadHeapObject(prototype_reg, prototypes->at(i)); 5286 __ lw(map_reg, FieldMemOperand(prototype_reg, HeapObject::kMapOffset));
5277 __ lw(map_reg, FieldMemOperand(prototype_reg, HeapObject::kMapOffset)); 5287 DoCheckMapCommon(map_reg, maps->at(i), instr->environment());
5278 DoCheckMapCommon(map_reg, maps->at(i), instr->environment());
5279 }
5280 } 5288 }
5281 } 5289 }
5282 5290
5283 5291
5284 void LCodeGen::DoAllocate(LAllocate* instr) { 5292 void LCodeGen::DoAllocate(LAllocate* instr) {
5285 class DeferredAllocate: public LDeferredCode { 5293 class DeferredAllocate: public LDeferredCode {
5286 public: 5294 public:
5287 DeferredAllocate(LCodeGen* codegen, LAllocate* instr) 5295 DeferredAllocate(LCodeGen* codegen, LAllocate* instr)
5288 : LDeferredCode(codegen), instr_(instr) { } 5296 : LDeferredCode(codegen), instr_(instr) { }
5289 virtual void Generate() { codegen()->DoDeferredAllocate(instr_); } 5297 virtual void Generate() { codegen()->DoDeferredAllocate(instr_); }
(...skipping 352 matching lines...) Expand 10 before | Expand all | Expand 10 after
5642 EnsureSpaceForLazyDeopt(); 5650 EnsureSpaceForLazyDeopt();
5643 last_lazy_deopt_pc_ = masm()->pc_offset(); 5651 last_lazy_deopt_pc_ = masm()->pc_offset();
5644 ASSERT(instr->HasEnvironment()); 5652 ASSERT(instr->HasEnvironment());
5645 LEnvironment* env = instr->environment(); 5653 LEnvironment* env = instr->environment();
5646 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); 5654 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5647 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 5655 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5648 } 5656 }
5649 5657
5650 5658
5651 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { 5659 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
5652 if (instr->hydrogen_value()->IsSoftDeoptimize()) { 5660 Deoptimizer::BailoutType type = instr->hydrogen()->type();
5653 SoftDeoptimize(instr->environment(), zero_reg, Operand(zero_reg)); 5661 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the
5654 } else { 5662 // needed return address), even though the implementation of LAZY and EAGER is
5655 DeoptimizeIf(al, instr->environment(), zero_reg, Operand(zero_reg)); 5663 // now identical. When LAZY is eventually completely folded into EAGER, remove
5664 // the special case below.
5665 if (info()->IsStub() && type == Deoptimizer::EAGER) {
5666 type = Deoptimizer::LAZY;
5656 } 5667 }
5668 DeoptimizeIf(al, instr->environment(), type, zero_reg, Operand(zero_reg));
5657 } 5669 }
5658 5670
5659 5671
5660 void LCodeGen::DoDummyUse(LDummyUse* instr) { 5672 void LCodeGen::DoDummyUse(LDummyUse* instr) {
5661 // Nothing to see here, move on! 5673 // Nothing to see here, move on!
5662 } 5674 }
5663 5675
5664 5676
5665 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { 5677 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
5666 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 5678 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
5824 __ Subu(scratch, result, scratch); 5836 __ Subu(scratch, result, scratch);
5825 __ lw(result, FieldMemOperand(scratch, 5837 __ lw(result, FieldMemOperand(scratch,
5826 FixedArray::kHeaderSize - kPointerSize)); 5838 FixedArray::kHeaderSize - kPointerSize));
5827 __ bind(&done); 5839 __ bind(&done);
5828 } 5840 }
5829 5841
5830 5842
5831 #undef __ 5843 #undef __
5832 5844
5833 } } // namespace v8::internal 5845 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mips/lithium-codegen-mips.h ('k') | src/mips/lithium-gap-resolver-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698