Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(219)

Side by Side Diff: src/ia32/lithium-codegen-ia32.cc

Issue 131363008: A64: Synchronize with r15922. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/ia32/lithium-gap-resolver-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 638 matching lines...) Expand 10 before | Expand all | Expand 10 after
649 return ToX87Register(op->index()); 649 return ToX87Register(op->index());
650 } 650 }
651 651
652 652
653 XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const { 653 XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
654 ASSERT(op->IsDoubleRegister()); 654 ASSERT(op->IsDoubleRegister());
655 return ToDoubleRegister(op->index()); 655 return ToDoubleRegister(op->index());
656 } 656 }
657 657
658 658
659 int LCodeGen::ToInteger32(LConstantOperand* op) const { 659 int32_t LCodeGen::ToInteger32(LConstantOperand* op) const {
660 HConstant* constant = chunk_->LookupConstant(op); 660 return ToRepresentation(op, Representation::Integer32());
661 return constant->Integer32Value();
662 } 661 }
663 662
664 663
664 int32_t LCodeGen::ToRepresentation(LConstantOperand* op,
665 const Representation& r) const {
666 HConstant* constant = chunk_->LookupConstant(op);
667 int32_t value = constant->Integer32Value();
668 if (r.IsInteger32()) return value;
669 ASSERT(r.IsSmiOrTagged());
670 return reinterpret_cast<int32_t>(Smi::FromInt(value));
671 }
672
673
665 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { 674 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
666 HConstant* constant = chunk_->LookupConstant(op); 675 HConstant* constant = chunk_->LookupConstant(op);
667 ASSERT(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged()); 676 ASSERT(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged());
668 return constant->handle(); 677 return constant->handle();
669 } 678 }
670 679
671 680
672 double LCodeGen::ToDouble(LConstantOperand* op) const { 681 double LCodeGen::ToDouble(LConstantOperand* op) const {
673 HConstant* constant = chunk_->LookupConstant(op); 682 HConstant* constant = chunk_->LookupConstant(op);
674 ASSERT(constant->HasDoubleValue()); 683 ASSERT(constant->HasDoubleValue());
(...skipping 321 matching lines...) Expand 10 before | Expand all | Expand 10 after
996 1005
997 void LCodeGen::DeoptimizeIf(Condition cc, 1006 void LCodeGen::DeoptimizeIf(Condition cc,
998 LEnvironment* environment) { 1007 LEnvironment* environment) {
999 Deoptimizer::BailoutType bailout_type = info()->IsStub() 1008 Deoptimizer::BailoutType bailout_type = info()->IsStub()
1000 ? Deoptimizer::LAZY 1009 ? Deoptimizer::LAZY
1001 : Deoptimizer::EAGER; 1010 : Deoptimizer::EAGER;
1002 DeoptimizeIf(cc, environment, bailout_type); 1011 DeoptimizeIf(cc, environment, bailout_type);
1003 } 1012 }
1004 1013
1005 1014
1006 void LCodeGen::SoftDeoptimize(LEnvironment* environment) {
1007 ASSERT(!info()->IsStub());
1008 DeoptimizeIf(no_condition, environment, Deoptimizer::SOFT);
1009 }
1010
1011
1012 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { 1015 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) {
1013 ZoneList<Handle<Map> > maps(1, zone()); 1016 ZoneList<Handle<Map> > maps(1, zone());
1014 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 1017 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
1015 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { 1018 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
1016 RelocInfo::Mode mode = it.rinfo()->rmode(); 1019 RelocInfo::Mode mode = it.rinfo()->rmode();
1017 if (mode == RelocInfo::EMBEDDED_OBJECT && 1020 if (mode == RelocInfo::EMBEDDED_OBJECT &&
1018 it.rinfo()->target_object()->IsMap()) { 1021 it.rinfo()->target_object()->IsMap()) {
1019 Handle<Map> map(Map::cast(it.rinfo()->target_object())); 1022 Handle<Map> map(Map::cast(it.rinfo()->target_object()));
1020 if (map->CanTransition()) { 1023 if (map->CanTransition()) {
1021 maps.Add(map, zone()); 1024 maps.Add(map, zone());
(...skipping 596 matching lines...) Expand 10 before | Expand all | Expand 10 after
1618 __ shl(left, 4); 1621 __ shl(left, 4);
1619 break; 1622 break;
1620 default: 1623 default:
1621 __ imul(left, left, constant); 1624 __ imul(left, left, constant);
1622 break; 1625 break;
1623 } 1626 }
1624 } else { 1627 } else {
1625 __ imul(left, left, constant); 1628 __ imul(left, left, constant);
1626 } 1629 }
1627 } else { 1630 } else {
1631 if (instr->hydrogen()->representation().IsSmi()) {
1632 __ SmiUntag(left);
1633 }
1628 __ imul(left, ToOperand(right)); 1634 __ imul(left, ToOperand(right));
1629 } 1635 }
1630 1636
1631 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { 1637 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1632 DeoptimizeIf(overflow, instr->environment()); 1638 DeoptimizeIf(overflow, instr->environment());
1633 } 1639 }
1634 1640
1635 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 1641 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1636 // Bail out if the result is supposed to be negative zero. 1642 // Bail out if the result is supposed to be negative zero.
1637 Label done; 1643 Label done;
(...skipping 16 matching lines...) Expand all
1654 } 1660 }
1655 1661
1656 1662
1657 void LCodeGen::DoBitI(LBitI* instr) { 1663 void LCodeGen::DoBitI(LBitI* instr) {
1658 LOperand* left = instr->left(); 1664 LOperand* left = instr->left();
1659 LOperand* right = instr->right(); 1665 LOperand* right = instr->right();
1660 ASSERT(left->Equals(instr->result())); 1666 ASSERT(left->Equals(instr->result()));
1661 ASSERT(left->IsRegister()); 1667 ASSERT(left->IsRegister());
1662 1668
1663 if (right->IsConstantOperand()) { 1669 if (right->IsConstantOperand()) {
1664 int right_operand = ToInteger32(LConstantOperand::cast(right)); 1670 int right_operand = ToRepresentation(LConstantOperand::cast(right),
1671 instr->hydrogen()->representation());
1665 switch (instr->op()) { 1672 switch (instr->op()) {
1666 case Token::BIT_AND: 1673 case Token::BIT_AND:
1667 __ and_(ToRegister(left), right_operand); 1674 __ and_(ToRegister(left), right_operand);
1668 break; 1675 break;
1669 case Token::BIT_OR: 1676 case Token::BIT_OR:
1670 __ or_(ToRegister(left), right_operand); 1677 __ or_(ToRegister(left), right_operand);
1671 break; 1678 break;
1672 case Token::BIT_XOR: 1679 case Token::BIT_XOR:
1673 __ xor_(ToRegister(left), right_operand); 1680 __ xor_(ToRegister(left), right_operand);
1674 break; 1681 break;
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
1748 case Token::SHR: 1755 case Token::SHR:
1749 if (shift_count == 0 && instr->can_deopt()) { 1756 if (shift_count == 0 && instr->can_deopt()) {
1750 __ test(ToRegister(left), Immediate(0x80000000)); 1757 __ test(ToRegister(left), Immediate(0x80000000));
1751 DeoptimizeIf(not_zero, instr->environment()); 1758 DeoptimizeIf(not_zero, instr->environment());
1752 } else { 1759 } else {
1753 __ shr(ToRegister(left), shift_count); 1760 __ shr(ToRegister(left), shift_count);
1754 } 1761 }
1755 break; 1762 break;
1756 case Token::SHL: 1763 case Token::SHL:
1757 if (shift_count != 0) { 1764 if (shift_count != 0) {
1758 __ shl(ToRegister(left), shift_count); 1765 if (instr->hydrogen_value()->representation().IsSmi() &&
1766 instr->can_deopt()) {
1767 __ shl(ToRegister(left), shift_count - 1);
1768 __ SmiTag(ToRegister(left));
1769 DeoptimizeIf(overflow, instr->environment());
1770 } else {
1771 __ shl(ToRegister(left), shift_count);
1772 }
1759 } 1773 }
1760 break; 1774 break;
1761 default: 1775 default:
1762 UNREACHABLE(); 1776 UNREACHABLE();
1763 break; 1777 break;
1764 } 1778 }
1765 } 1779 }
1766 } 1780 }
1767 1781
1768 1782
1769 void LCodeGen::DoSubI(LSubI* instr) { 1783 void LCodeGen::DoSubI(LSubI* instr) {
1770 LOperand* left = instr->left(); 1784 LOperand* left = instr->left();
1771 LOperand* right = instr->right(); 1785 LOperand* right = instr->right();
1772 ASSERT(left->Equals(instr->result())); 1786 ASSERT(left->Equals(instr->result()));
1773 1787
1774 if (right->IsConstantOperand()) { 1788 if (right->IsConstantOperand()) {
1775 __ sub(ToOperand(left), ToInteger32Immediate(right)); 1789 __ sub(ToOperand(left),
1790 ToImmediate(right, instr->hydrogen()->representation()));
1776 } else { 1791 } else {
1777 __ sub(ToRegister(left), ToOperand(right)); 1792 __ sub(ToRegister(left), ToOperand(right));
1778 } 1793 }
1779 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { 1794 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1780 DeoptimizeIf(overflow, instr->environment()); 1795 DeoptimizeIf(overflow, instr->environment());
1781 } 1796 }
1782 } 1797 }
1783 1798
1784 1799
1785 void LCodeGen::DoConstantI(LConstantI* instr) { 1800 void LCodeGen::DoConstantI(LConstantI* instr) {
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
1835 } 1850 }
1836 } 1851 }
1837 } 1852 }
1838 } 1853 }
1839 1854
1840 1855
1841 void LCodeGen::DoConstantT(LConstantT* instr) { 1856 void LCodeGen::DoConstantT(LConstantT* instr) {
1842 Register reg = ToRegister(instr->result()); 1857 Register reg = ToRegister(instr->result());
1843 Handle<Object> handle = instr->value(); 1858 Handle<Object> handle = instr->value();
1844 AllowDeferredHandleDereference smi_check; 1859 AllowDeferredHandleDereference smi_check;
1845 if (handle->IsHeapObject()) { 1860 __ LoadObject(reg, handle);
1846 __ LoadHeapObject(reg, Handle<HeapObject>::cast(handle));
1847 } else {
1848 __ Set(reg, Immediate(handle));
1849 }
1850 } 1861 }
1851 1862
1852 1863
1853 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) { 1864 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
1854 Register result = ToRegister(instr->result()); 1865 Register result = ToRegister(instr->result());
1855 Register map = ToRegister(instr->value()); 1866 Register map = ToRegister(instr->value());
1856 __ EnumLength(result, map); 1867 __ EnumLength(result, map);
1857 } 1868 }
1858 1869
1859 1870
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
1978 } 1989 }
1979 } 1990 }
1980 1991
1981 1992
1982 void LCodeGen::DoAddI(LAddI* instr) { 1993 void LCodeGen::DoAddI(LAddI* instr) {
1983 LOperand* left = instr->left(); 1994 LOperand* left = instr->left();
1984 LOperand* right = instr->right(); 1995 LOperand* right = instr->right();
1985 1996
1986 if (LAddI::UseLea(instr->hydrogen()) && !left->Equals(instr->result())) { 1997 if (LAddI::UseLea(instr->hydrogen()) && !left->Equals(instr->result())) {
1987 if (right->IsConstantOperand()) { 1998 if (right->IsConstantOperand()) {
1988 int32_t offset = ToInteger32(LConstantOperand::cast(right)); 1999 int32_t offset = ToRepresentation(LConstantOperand::cast(right),
2000 instr->hydrogen()->representation());
1989 __ lea(ToRegister(instr->result()), MemOperand(ToRegister(left), offset)); 2001 __ lea(ToRegister(instr->result()), MemOperand(ToRegister(left), offset));
1990 } else { 2002 } else {
1991 Operand address(ToRegister(left), ToRegister(right), times_1, 0); 2003 Operand address(ToRegister(left), ToRegister(right), times_1, 0);
1992 __ lea(ToRegister(instr->result()), address); 2004 __ lea(ToRegister(instr->result()), address);
1993 } 2005 }
1994 } else { 2006 } else {
1995 if (right->IsConstantOperand()) { 2007 if (right->IsConstantOperand()) {
1996 __ add(ToOperand(left), ToInteger32Immediate(right)); 2008 __ add(ToOperand(left),
2009 ToImmediate(right, instr->hydrogen()->representation()));
1997 } else { 2010 } else {
1998 __ add(ToRegister(left), ToOperand(right)); 2011 __ add(ToRegister(left), ToOperand(right));
1999 } 2012 }
2000 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { 2013 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
2001 DeoptimizeIf(overflow, instr->environment()); 2014 DeoptimizeIf(overflow, instr->environment());
2002 } 2015 }
2003 } 2016 }
2004 } 2017 }
2005 2018
2006 2019
2007 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { 2020 void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
2008 CpuFeatureScope scope(masm(), SSE2); 2021 CpuFeatureScope scope(masm(), SSE2);
2009 LOperand* left = instr->left(); 2022 LOperand* left = instr->left();
2010 LOperand* right = instr->right(); 2023 LOperand* right = instr->right();
2011 ASSERT(left->Equals(instr->result())); 2024 ASSERT(left->Equals(instr->result()));
2012 HMathMinMax::Operation operation = instr->hydrogen()->operation(); 2025 HMathMinMax::Operation operation = instr->hydrogen()->operation();
2013 if (instr->hydrogen()->representation().IsInteger32()) { 2026 if (instr->hydrogen()->representation().IsSmiOrInteger32()) {
2014 Label return_left; 2027 Label return_left;
2015 Condition condition = (operation == HMathMinMax::kMathMin) 2028 Condition condition = (operation == HMathMinMax::kMathMin)
2016 ? less_equal 2029 ? less_equal
2017 : greater_equal; 2030 : greater_equal;
2018 if (right->IsConstantOperand()) { 2031 if (right->IsConstantOperand()) {
2019 Operand left_op = ToOperand(left); 2032 Operand left_op = ToOperand(left);
2020 Immediate right_imm = ToInteger32Immediate(right); 2033 Immediate immediate = ToImmediate(LConstantOperand::cast(instr->right()),
2021 __ cmp(left_op, right_imm); 2034 instr->hydrogen()->representation());
2035 __ cmp(left_op, immediate);
2022 __ j(condition, &return_left, Label::kNear); 2036 __ j(condition, &return_left, Label::kNear);
2023 __ mov(left_op, right_imm); 2037 __ mov(left_op, immediate);
2024 } else { 2038 } else {
2025 Register left_reg = ToRegister(left); 2039 Register left_reg = ToRegister(left);
2026 Operand right_op = ToOperand(right); 2040 Operand right_op = ToOperand(right);
2027 __ cmp(left_reg, right_op); 2041 __ cmp(left_reg, right_op);
2028 __ j(condition, &return_left, Label::kNear); 2042 __ j(condition, &return_left, Label::kNear);
2029 __ mov(left_reg, right_op); 2043 __ mov(left_reg, right_op);
2030 } 2044 }
2031 __ bind(&return_left); 2045 __ bind(&return_left);
2032 } else { 2046 } else {
2033 ASSERT(instr->hydrogen()->representation().IsDouble()); 2047 ASSERT(instr->hydrogen()->representation().IsDouble());
(...skipping 347 matching lines...) Expand 10 before | Expand all | Expand 10 after
2381 EmitGoto(next_block); 2395 EmitGoto(next_block);
2382 } else { 2396 } else {
2383 if (instr->is_double()) { 2397 if (instr->is_double()) {
2384 CpuFeatureScope scope(masm(), SSE2); 2398 CpuFeatureScope scope(masm(), SSE2);
2385 // Don't base result on EFLAGS when a NaN is involved. Instead 2399 // Don't base result on EFLAGS when a NaN is involved. Instead
2386 // jump to the false block. 2400 // jump to the false block.
2387 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right)); 2401 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
2388 __ j(parity_even, instr->FalseLabel(chunk_)); 2402 __ j(parity_even, instr->FalseLabel(chunk_));
2389 } else { 2403 } else {
2390 if (right->IsConstantOperand()) { 2404 if (right->IsConstantOperand()) {
2391 int32_t const_value = ToInteger32(LConstantOperand::cast(right)); 2405 __ cmp(ToOperand(left),
2392 if (instr->hydrogen_value()->representation().IsSmi()) { 2406 ToImmediate(right, instr->hydrogen()->representation()));
2393 __ cmp(ToOperand(left), Immediate(Smi::FromInt(const_value)));
2394 } else {
2395 __ cmp(ToOperand(left), Immediate(const_value));
2396 }
2397 } else if (left->IsConstantOperand()) { 2407 } else if (left->IsConstantOperand()) {
2398 int32_t const_value = ToInteger32(LConstantOperand::cast(left)); 2408 __ cmp(ToOperand(right),
2399 if (instr->hydrogen_value()->representation().IsSmi()) { 2409 ToImmediate(left, instr->hydrogen()->representation()));
2400 __ cmp(ToOperand(right), Immediate(Smi::FromInt(const_value)));
2401 } else {
2402 __ cmp(ToOperand(right), Immediate(const_value));
2403 }
2404 // We transposed the operands. Reverse the condition. 2410 // We transposed the operands. Reverse the condition.
2405 cc = ReverseCondition(cc); 2411 cc = ReverseCondition(cc);
2406 } else { 2412 } else {
2407 __ cmp(ToRegister(left), ToOperand(right)); 2413 __ cmp(ToRegister(left), ToOperand(right));
2408 } 2414 }
2409 } 2415 }
2410 EmitBranch(instr, cc); 2416 EmitBranch(instr, cc);
2411 } 2417 }
2412 } 2418 }
2413 2419
2414 2420
2415 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) { 2421 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
2416 Register left = ToRegister(instr->left()); 2422 Register left = ToRegister(instr->left());
2417 2423
2418 if (instr->right()->IsConstantOperand()) { 2424 if (instr->right()->IsConstantOperand()) {
2419 Handle<Object> right = ToHandle(LConstantOperand::cast(instr->right())); 2425 Handle<Object> right = ToHandle(LConstantOperand::cast(instr->right()));
2420 __ CmpObject(left, right); 2426 __ CmpObject(left, right);
2421 } else { 2427 } else {
2422 Operand right = ToOperand(instr->right()); 2428 Operand right = ToOperand(instr->right());
2423 __ cmp(left, right); 2429 __ cmp(left, right);
2424 } 2430 }
2425 EmitBranch(instr, equal); 2431 EmitBranch(instr, equal);
2426 } 2432 }
2427 2433
2428 2434
2429 void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
2430 Register left = ToRegister(instr->left());
2431
2432 __ cmp(left, instr->hydrogen()->right());
2433 EmitBranch(instr, equal);
2434 }
2435
2436
2437 Condition LCodeGen::EmitIsObject(Register input, 2435 Condition LCodeGen::EmitIsObject(Register input,
2438 Register temp1, 2436 Register temp1,
2439 Label* is_not_object, 2437 Label* is_not_object,
2440 Label* is_object) { 2438 Label* is_object) {
2441 __ JumpIfSmi(input, is_not_object); 2439 __ JumpIfSmi(input, is_not_object);
2442 2440
2443 __ cmp(input, isolate()->factory()->null_value()); 2441 __ cmp(input, isolate()->factory()->null_value());
2444 __ j(equal, is_object); 2442 __ j(equal, is_object);
2445 2443
2446 __ mov(temp1, FieldOperand(input, HeapObject::kMapOffset)); 2444 __ mov(temp1, FieldOperand(input, HeapObject::kMapOffset));
(...skipping 620 matching lines...) Expand 10 before | Expand all | Expand 10 after
3067 Register result = ToRegister(instr->result()); 3065 Register result = ToRegister(instr->result());
3068 if (access.IsInobject()) { 3066 if (access.IsInobject()) {
3069 __ mov(result, FieldOperand(object, offset)); 3067 __ mov(result, FieldOperand(object, offset));
3070 } else { 3068 } else {
3071 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset)); 3069 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
3072 __ mov(result, FieldOperand(result, offset)); 3070 __ mov(result, FieldOperand(result, offset));
3073 } 3071 }
3074 } 3072 }
3075 3073
3076 3074
3077 void LCodeGen::EmitLoadFieldOrConstantFunction(Register result, 3075 void LCodeGen::EmitLoadFieldOrConstant(Register result,
3078 Register object, 3076 Register object,
3079 Handle<Map> type, 3077 Handle<Map> type,
3080 Handle<String> name, 3078 Handle<String> name,
3081 LEnvironment* env) { 3079 LEnvironment* env) {
3082 LookupResult lookup(isolate()); 3080 LookupResult lookup(isolate());
3083 type->LookupDescriptor(NULL, *name, &lookup); 3081 type->LookupDescriptor(NULL, *name, &lookup);
3084 ASSERT(lookup.IsFound() || lookup.IsCacheable()); 3082 ASSERT(lookup.IsFound() || lookup.IsCacheable());
3085 if (lookup.IsField()) { 3083 if (lookup.IsField()) {
3086 int index = lookup.GetLocalFieldIndexFromMap(*type); 3084 int index = lookup.GetLocalFieldIndexFromMap(*type);
3087 int offset = index * kPointerSize; 3085 int offset = index * kPointerSize;
3088 if (index < 0) { 3086 if (index < 0) {
3089 // Negative property indices are in-object properties, indexed 3087 // Negative property indices are in-object properties, indexed
3090 // from the end of the fixed part of the object. 3088 // from the end of the fixed part of the object.
3091 __ mov(result, FieldOperand(object, offset + type->instance_size())); 3089 __ mov(result, FieldOperand(object, offset + type->instance_size()));
3092 } else { 3090 } else {
3093 // Non-negative property indices are in the properties array. 3091 // Non-negative property indices are in the properties array.
3094 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset)); 3092 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
3095 __ mov(result, FieldOperand(result, offset + FixedArray::kHeaderSize)); 3093 __ mov(result, FieldOperand(result, offset + FixedArray::kHeaderSize));
3096 } 3094 }
3097 } else if (lookup.IsConstantFunction()) { 3095 } else if (lookup.IsConstant()) {
3098 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type)); 3096 Handle<Object> constant(lookup.GetConstantFromMap(*type), isolate());
3099 __ LoadHeapObject(result, function); 3097 __ LoadObject(result, constant);
3100 } else { 3098 } else {
3101 // Negative lookup. 3099 // Negative lookup.
3102 // Check prototypes. 3100 // Check prototypes.
3103 Handle<HeapObject> current(HeapObject::cast((*type)->prototype())); 3101 Handle<HeapObject> current(HeapObject::cast((*type)->prototype()));
3104 Heap* heap = type->GetHeap(); 3102 Heap* heap = type->GetHeap();
3105 while (*current != heap->null_value()) { 3103 while (*current != heap->null_value()) {
3106 __ LoadHeapObject(result, current); 3104 __ LoadHeapObject(result, current);
3107 __ cmp(FieldOperand(result, HeapObject::kMapOffset), 3105 __ cmp(FieldOperand(result, HeapObject::kMapOffset),
3108 Handle<Map>(current->map())); 3106 Handle<Map>(current->map()));
3109 DeoptimizeIf(not_equal, env); 3107 DeoptimizeIf(not_equal, env);
(...skipping 28 matching lines...) Expand all
3138 static bool CompactEmit(SmallMapList* list, 3136 static bool CompactEmit(SmallMapList* list,
3139 Handle<String> name, 3137 Handle<String> name,
3140 int i, 3138 int i,
3141 Isolate* isolate) { 3139 Isolate* isolate) {
3142 Handle<Map> map = list->at(i); 3140 Handle<Map> map = list->at(i);
3143 // If the map has ElementsKind transitions, we will generate map checks 3141 // If the map has ElementsKind transitions, we will generate map checks
3144 // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS). 3142 // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS).
3145 if (map->HasElementsTransition()) return false; 3143 if (map->HasElementsTransition()) return false;
3146 LookupResult lookup(isolate); 3144 LookupResult lookup(isolate);
3147 map->LookupDescriptor(NULL, *name, &lookup); 3145 map->LookupDescriptor(NULL, *name, &lookup);
3148 return lookup.IsField() || lookup.IsConstantFunction(); 3146 return lookup.IsField() || lookup.IsConstant();
3149 } 3147 }
3150 3148
3151 3149
3152 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { 3150 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
3153 Register object = ToRegister(instr->object()); 3151 Register object = ToRegister(instr->object());
3154 Register result = ToRegister(instr->result()); 3152 Register result = ToRegister(instr->result());
3155 3153
3156 int map_count = instr->hydrogen()->types()->length(); 3154 int map_count = instr->hydrogen()->types()->length();
3157 bool need_generic = instr->hydrogen()->need_generic(); 3155 bool need_generic = instr->hydrogen()->need_generic();
3158 3156
(...skipping 11 matching lines...) Expand all
3170 } 3168 }
3171 } 3169 }
3172 for (int i = 0; i < map_count; ++i) { 3170 for (int i = 0; i < map_count; ++i) {
3173 bool last = (i == map_count - 1); 3171 bool last = (i == map_count - 1);
3174 Handle<Map> map = instr->hydrogen()->types()->at(i); 3172 Handle<Map> map = instr->hydrogen()->types()->at(i);
3175 Label check_passed; 3173 Label check_passed;
3176 __ CompareMap(object, map, &check_passed); 3174 __ CompareMap(object, map, &check_passed);
3177 if (last && !need_generic) { 3175 if (last && !need_generic) {
3178 DeoptimizeIf(not_equal, instr->environment()); 3176 DeoptimizeIf(not_equal, instr->environment());
3179 __ bind(&check_passed); 3177 __ bind(&check_passed);
3180 EmitLoadFieldOrConstantFunction( 3178 EmitLoadFieldOrConstant(result, object, map, name, instr->environment());
3181 result, object, map, name, instr->environment());
3182 } else { 3179 } else {
3183 Label next; 3180 Label next;
3184 bool compact = all_are_compact ? true : 3181 bool compact = all_are_compact ? true :
3185 CompactEmit(instr->hydrogen()->types(), name, i, isolate()); 3182 CompactEmit(instr->hydrogen()->types(), name, i, isolate());
3186 __ j(not_equal, &next, compact ? Label::kNear : Label::kFar); 3183 __ j(not_equal, &next, compact ? Label::kNear : Label::kFar);
3187 __ bind(&check_passed); 3184 __ bind(&check_passed);
3188 EmitLoadFieldOrConstantFunction( 3185 EmitLoadFieldOrConstant(result, object, map, name, instr->environment());
3189 result, object, map, name, instr->environment());
3190 __ jmp(&done, all_are_compact ? Label::kNear : Label::kFar); 3186 __ jmp(&done, all_are_compact ? Label::kNear : Label::kFar);
3191 __ bind(&next); 3187 __ bind(&next);
3192 } 3188 }
3193 } 3189 }
3194 if (need_generic) { 3190 if (need_generic) {
3195 __ mov(ecx, name); 3191 __ mov(ecx, name);
3196 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 3192 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
3197 CallCode(ic, RelocInfo::CODE_TARGET, instr); 3193 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3198 } 3194 }
3199 __ bind(&done); 3195 __ bind(&done);
(...skipping 529 matching lines...) Expand 10 before | Expand all | Expand 10 after
3729 EDI_UNINITIALIZED); 3725 EDI_UNINITIALIZED);
3730 } 3726 }
3731 3727
3732 3728
3733 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { 3729 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) {
3734 Register input_reg = ToRegister(instr->value()); 3730 Register input_reg = ToRegister(instr->value());
3735 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), 3731 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
3736 factory()->heap_number_map()); 3732 factory()->heap_number_map());
3737 DeoptimizeIf(not_equal, instr->environment()); 3733 DeoptimizeIf(not_equal, instr->environment());
3738 3734
3739 Label done; 3735 Label slow, allocated, done;
3740 Register tmp = input_reg.is(eax) ? ecx : eax; 3736 Register tmp = input_reg.is(eax) ? ecx : eax;
3741 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx; 3737 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx;
3742 3738
3743 // Preserve the value of all registers. 3739 // Preserve the value of all registers.
3744 PushSafepointRegistersScope scope(this); 3740 PushSafepointRegistersScope scope(this);
3745 3741
3746 Label negative;
3747 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); 3742 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
3748 // Check the sign of the argument. If the argument is positive, just 3743 // Check the sign of the argument. If the argument is positive, just
3749 // return it. We do not need to patch the stack since |input| and 3744 // return it. We do not need to patch the stack since |input| and
3750 // |result| are the same register and |input| will be restored 3745 // |result| are the same register and |input| will be restored
3751 // unchanged by popping safepoint registers. 3746 // unchanged by popping safepoint registers.
3752 __ test(tmp, Immediate(HeapNumber::kSignMask)); 3747 __ test(tmp, Immediate(HeapNumber::kSignMask));
3753 __ j(not_zero, &negative); 3748 __ j(zero, &done);
3754 __ jmp(&done);
3755 3749
3756 __ bind(&negative);
3757
3758 Label allocated, slow;
3759 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow); 3750 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow);
3760 __ jmp(&allocated); 3751 __ jmp(&allocated, Label::kNear);
3761 3752
3762 // Slow case: Call the runtime system to do the number allocation. 3753 // Slow case: Call the runtime system to do the number allocation.
3763 __ bind(&slow); 3754 __ bind(&slow);
3764
3765 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, 3755 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0,
3766 instr, instr->context()); 3756 instr, instr->context());
3767
3768 // Set the pointer to the new heap number in tmp. 3757 // Set the pointer to the new heap number in tmp.
3769 if (!tmp.is(eax)) __ mov(tmp, eax); 3758 if (!tmp.is(eax)) __ mov(tmp, eax);
3770
3771 // Restore input_reg after call to runtime. 3759 // Restore input_reg after call to runtime.
3772 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); 3760 __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
3773 3761
3774 __ bind(&allocated); 3762 __ bind(&allocated);
3775 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset)); 3763 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset));
3776 __ and_(tmp2, ~HeapNumber::kSignMask); 3764 __ and_(tmp2, ~HeapNumber::kSignMask);
3777 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2); 3765 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2);
3778 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); 3766 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset));
3779 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2); 3767 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2);
3780 __ StoreToSafepointRegisterSlot(input_reg, tmp); 3768 __ StoreToSafepointRegisterSlot(input_reg, tmp);
3781 3769
3782 __ bind(&done); 3770 __ bind(&done);
3783 } 3771 }
3784 3772
3785 3773
3786 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { 3774 void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) {
3787 Register input_reg = ToRegister(instr->value()); 3775 Register input_reg = ToRegister(instr->value());
3788 __ test(input_reg, Operand(input_reg)); 3776 __ test(input_reg, Operand(input_reg));
3789 Label is_positive; 3777 Label is_positive;
3790 __ j(not_sign, &is_positive); 3778 __ j(not_sign, &is_positive, Label::kNear);
3791 __ neg(input_reg); 3779 __ neg(input_reg); // Sets flags.
3792 __ test(input_reg, Operand(input_reg));
3793 DeoptimizeIf(negative, instr->environment()); 3780 DeoptimizeIf(negative, instr->environment());
3794 __ bind(&is_positive); 3781 __ bind(&is_positive);
3795 } 3782 }
3796 3783
3797 3784
3798 void LCodeGen::DoMathAbs(LMathAbs* instr) { 3785 void LCodeGen::DoMathAbs(LMathAbs* instr) {
3799 // Class for deferred case. 3786 // Class for deferred case.
3800 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode { 3787 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
3801 public: 3788 public:
3802 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, LMathAbs* instr) 3789 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, LMathAbs* instr)
(...skipping 644 matching lines...) Expand 10 before | Expand all | Expand 10 after
4447 ASSERT(ToRegister(instr->value()).is(eax)); 4434 ASSERT(ToRegister(instr->value()).is(eax));
4448 4435
4449 __ mov(ecx, instr->name()); 4436 __ mov(ecx, instr->name());
4450 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) 4437 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
4451 ? isolate()->builtins()->StoreIC_Initialize_Strict() 4438 ? isolate()->builtins()->StoreIC_Initialize_Strict()
4452 : isolate()->builtins()->StoreIC_Initialize(); 4439 : isolate()->builtins()->StoreIC_Initialize();
4453 CallCode(ic, RelocInfo::CODE_TARGET, instr); 4440 CallCode(ic, RelocInfo::CODE_TARGET, instr);
4454 } 4441 }
4455 4442
4456 4443
4444 void LCodeGen::ApplyCheckIf(Condition cc, LBoundsCheck* check) {
4445 if (FLAG_debug_code && check->hydrogen()->skip_check()) {
4446 Label done;
4447 __ j(NegateCondition(cc), &done, Label::kNear);
4448 __ int3();
4449 __ bind(&done);
4450 } else {
4451 DeoptimizeIf(cc, check->environment());
4452 }
4453 }
4454
4455
4457 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { 4456 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
4458 if (instr->hydrogen()->skip_check()) return; 4457 if (instr->hydrogen()->skip_check() && !FLAG_debug_code) return;
4459 4458
4460 if (instr->index()->IsConstantOperand()) { 4459 if (instr->index()->IsConstantOperand()) {
4461 int constant_index = 4460 Immediate immediate =
4462 ToInteger32(LConstantOperand::cast(instr->index())); 4461 ToImmediate(LConstantOperand::cast(instr->index()),
4463 if (instr->hydrogen()->length()->representation().IsSmi()) { 4462 instr->hydrogen()->length()->representation());
4464 __ cmp(ToOperand(instr->length()), 4463 __ cmp(ToOperand(instr->length()), immediate);
4465 Immediate(Smi::FromInt(constant_index))); 4464 Condition condition =
4466 } else { 4465 instr->hydrogen()->allow_equality() ? below : below_equal;
4467 __ cmp(ToOperand(instr->length()), Immediate(constant_index)); 4466 ApplyCheckIf(condition, instr);
4468 }
4469 DeoptimizeIf(below_equal, instr->environment());
4470 } else { 4467 } else {
4471 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); 4468 __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
4472 DeoptimizeIf(above_equal, instr->environment()); 4469 Condition condition =
4470 instr->hydrogen()->allow_equality() ? above : above_equal;
4471 ApplyCheckIf(condition, instr);
4473 } 4472 }
4474 } 4473 }
4475 4474
4476 4475
4477 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) { 4476 void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
4478 ElementsKind elements_kind = instr->elements_kind(); 4477 ElementsKind elements_kind = instr->elements_kind();
4479 LOperand* key = instr->key(); 4478 LOperand* key = instr->key();
4480 if (!key->IsConstantOperand() && 4479 if (!key->IsConstantOperand() &&
4481 ExternalArrayOpRequiresTemp(instr->hydrogen()->key()->representation(), 4480 ExternalArrayOpRequiresTemp(instr->hydrogen()->key()->representation(),
4482 elements_kind)) { 4481 elements_kind)) {
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
4622 instr->elements(), 4621 instr->elements(),
4623 instr->key(), 4622 instr->key(),
4624 instr->hydrogen()->key()->representation(), 4623 instr->hydrogen()->key()->representation(),
4625 FAST_ELEMENTS, 4624 FAST_ELEMENTS,
4626 FixedArray::kHeaderSize - kHeapObjectTag, 4625 FixedArray::kHeaderSize - kHeapObjectTag,
4627 instr->additional_index()); 4626 instr->additional_index());
4628 if (instr->value()->IsRegister()) { 4627 if (instr->value()->IsRegister()) {
4629 __ mov(operand, ToRegister(instr->value())); 4628 __ mov(operand, ToRegister(instr->value()));
4630 } else { 4629 } else {
4631 LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); 4630 LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
4632 if (IsInteger32(operand_value)) { 4631 if (IsSmi(operand_value)) {
4633 Smi* smi_value = Smi::FromInt(ToInteger32(operand_value)); 4632 Immediate immediate = ToImmediate(operand_value, Representation::Smi());
4634 __ mov(operand, Immediate(smi_value)); 4633 __ mov(operand, immediate);
4635 } else { 4634 } else {
4635 ASSERT(!IsInteger32(operand_value));
4636 Handle<Object> handle_value = ToHandle(operand_value); 4636 Handle<Object> handle_value = ToHandle(operand_value);
4637 __ mov(operand, handle_value); 4637 __ mov(operand, handle_value);
4638 } 4638 }
4639 } 4639 }
4640 4640
4641 if (instr->hydrogen()->NeedsWriteBarrier()) { 4641 if (instr->hydrogen()->NeedsWriteBarrier()) {
4642 ASSERT(instr->value()->IsRegister()); 4642 ASSERT(instr->value()->IsRegister());
4643 Register value = ToRegister(instr->value()); 4643 Register value = ToRegister(instr->value());
4644 ASSERT(!instr->key()->IsConstantOperand()); 4644 ASSERT(!instr->key()->IsConstantOperand());
4645 SmiCheck check_needed = 4645 SmiCheck check_needed =
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
4708 if (is_simple_map_transition) { 4708 if (is_simple_map_transition) {
4709 Register new_map_reg = ToRegister(instr->new_map_temp()); 4709 Register new_map_reg = ToRegister(instr->new_map_temp());
4710 Handle<Map> map = instr->hydrogen()->transitioned_map(); 4710 Handle<Map> map = instr->hydrogen()->transitioned_map();
4711 __ mov(FieldOperand(object_reg, HeapObject::kMapOffset), 4711 __ mov(FieldOperand(object_reg, HeapObject::kMapOffset),
4712 Immediate(map)); 4712 Immediate(map));
4713 // Write barrier. 4713 // Write barrier.
4714 ASSERT_NE(instr->temp(), NULL); 4714 ASSERT_NE(instr->temp(), NULL);
4715 __ RecordWriteForMap(object_reg, to_map, new_map_reg, 4715 __ RecordWriteForMap(object_reg, to_map, new_map_reg,
4716 ToRegister(instr->temp()), 4716 ToRegister(instr->temp()),
4717 kDontSaveFPRegs); 4717 kDontSaveFPRegs);
4718 } else if (FLAG_compiled_transitions) { 4718 } else {
4719 PushSafepointRegistersScope scope(this); 4719 PushSafepointRegistersScope scope(this);
4720 if (!object_reg.is(eax)) { 4720 if (!object_reg.is(eax)) {
4721 __ push(object_reg); 4721 __ push(object_reg);
4722 } 4722 }
4723 LoadContextFromDeferred(instr->context()); 4723 LoadContextFromDeferred(instr->context());
4724 if (!object_reg.is(eax)) { 4724 if (!object_reg.is(eax)) {
4725 __ pop(eax); 4725 __ pop(eax);
4726 } 4726 }
4727 __ mov(ebx, to_map); 4727 __ mov(ebx, to_map);
4728 TransitionElementsKindStub stub(from_kind, to_kind); 4728 TransitionElementsKindStub stub(from_kind, to_kind);
4729 __ CallStub(&stub); 4729 __ CallStub(&stub);
4730 RecordSafepointWithRegisters( 4730 RecordSafepointWithRegisters(
4731 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); 4731 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4732 } else if (IsFastSmiElementsKind(from_kind) &&
4733 IsFastDoubleElementsKind(to_kind)) {
4734 Register new_map_reg = ToRegister(instr->new_map_temp());
4735 __ mov(new_map_reg, to_map);
4736 Register fixed_object_reg = ToRegister(instr->temp());
4737 ASSERT(fixed_object_reg.is(edx));
4738 ASSERT(new_map_reg.is(ebx));
4739 __ mov(fixed_object_reg, object_reg);
4740 CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
4741 RelocInfo::CODE_TARGET, instr);
4742 } else if (IsFastDoubleElementsKind(from_kind) &&
4743 IsFastObjectElementsKind(to_kind)) {
4744 Register new_map_reg = ToRegister(instr->new_map_temp());
4745 __ mov(new_map_reg, to_map);
4746 Register fixed_object_reg = ToRegister(instr->temp());
4747 ASSERT(fixed_object_reg.is(edx));
4748 ASSERT(new_map_reg.is(ebx));
4749 __ mov(fixed_object_reg, object_reg);
4750 CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(),
4751 RelocInfo::CODE_TARGET, instr);
4752 } else {
4753 UNREACHABLE();
4754 } 4732 }
4755 __ bind(&not_applicable); 4733 __ bind(&not_applicable);
4756 } 4734 }
4757 4735
4758 4736
4759 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { 4737 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
4760 class DeferredStringCharCodeAt: public LDeferredCode { 4738 class DeferredStringCharCodeAt: public LDeferredCode {
4761 public: 4739 public:
4762 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) 4740 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
4763 : LDeferredCode(codegen), instr_(instr) { } 4741 : LDeferredCode(codegen), instr_(instr) { }
(...skipping 24 matching lines...) Expand all
4788 // result register contain a valid pointer because it is already 4766 // result register contain a valid pointer because it is already
4789 // contained in the register pointer map. 4767 // contained in the register pointer map.
4790 __ Set(result, Immediate(0)); 4768 __ Set(result, Immediate(0));
4791 4769
4792 PushSafepointRegistersScope scope(this); 4770 PushSafepointRegistersScope scope(this);
4793 __ push(string); 4771 __ push(string);
4794 // Push the index as a smi. This is safe because of the checks in 4772 // Push the index as a smi. This is safe because of the checks in
4795 // DoStringCharCodeAt above. 4773 // DoStringCharCodeAt above.
4796 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); 4774 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
4797 if (instr->index()->IsConstantOperand()) { 4775 if (instr->index()->IsConstantOperand()) {
4798 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); 4776 Immediate immediate = ToImmediate(LConstantOperand::cast(instr->index()),
4799 __ push(Immediate(Smi::FromInt(const_index))); 4777 Representation::Smi());
4778 __ push(immediate);
4800 } else { 4779 } else {
4801 Register index = ToRegister(instr->index()); 4780 Register index = ToRegister(instr->index());
4802 __ SmiTag(index); 4781 __ SmiTag(index);
4803 __ push(index); 4782 __ push(index);
4804 } 4783 }
4805 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, 4784 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2,
4806 instr, instr->context()); 4785 instr, instr->context());
4807 __ AssertSmi(eax); 4786 __ AssertSmi(eax);
4808 __ SmiUntag(eax); 4787 __ SmiUntag(eax);
4809 __ StoreToSafepointRegisterSlot(result, eax); 4788 __ StoreToSafepointRegisterSlot(result, eax);
(...skipping 985 matching lines...) Expand 10 before | Expand all | Expand 10 after
5795 Handle<Map> map, 5774 Handle<Map> map,
5796 LInstruction* instr) { 5775 LInstruction* instr) {
5797 Label success; 5776 Label success;
5798 __ CompareMap(reg, map, &success); 5777 __ CompareMap(reg, map, &success);
5799 DeoptimizeIf(not_equal, instr->environment()); 5778 DeoptimizeIf(not_equal, instr->environment());
5800 __ bind(&success); 5779 __ bind(&success);
5801 } 5780 }
5802 5781
5803 5782
5804 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { 5783 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
5784 if (instr->hydrogen()->CanOmitMapChecks()) return;
5805 LOperand* input = instr->value(); 5785 LOperand* input = instr->value();
5806 ASSERT(input->IsRegister()); 5786 ASSERT(input->IsRegister());
5807 Register reg = ToRegister(input); 5787 Register reg = ToRegister(input);
5808 5788
5809 Label success; 5789 Label success;
5810 SmallMapList* map_set = instr->hydrogen()->map_set(); 5790 SmallMapList* map_set = instr->hydrogen()->map_set();
5811 for (int i = 0; i < map_set->length() - 1; i++) { 5791 for (int i = 0; i < map_set->length() - 1; i++) {
5812 Handle<Map> map = map_set->at(i); 5792 Handle<Map> map = map_set->at(i);
5813 __ CompareMap(reg, map, &success); 5793 __ CompareMap(reg, map, &success);
5814 __ j(equal, &success); 5794 __ j(equal, &success);
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
5985 if (!input_reg.is(result_reg)) { 5965 if (!input_reg.is(result_reg)) {
5986 __ mov(result_reg, input_reg); 5966 __ mov(result_reg, input_reg);
5987 } 5967 }
5988 __ SmiUntag(result_reg); 5968 __ SmiUntag(result_reg);
5989 __ ClampUint8(result_reg); 5969 __ ClampUint8(result_reg);
5990 __ bind(&done); 5970 __ bind(&done);
5991 } 5971 }
5992 5972
5993 5973
5994 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { 5974 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
5975 if (instr->hydrogen()->CanOmitPrototypeChecks()) return;
5995 Register reg = ToRegister(instr->temp()); 5976 Register reg = ToRegister(instr->temp());
5996 5977
5997 ZoneList<Handle<JSObject> >* prototypes = instr->prototypes(); 5978 ZoneList<Handle<JSObject> >* prototypes = instr->prototypes();
5998 ZoneList<Handle<Map> >* maps = instr->maps(); 5979 ZoneList<Handle<Map> >* maps = instr->maps();
5999 5980
6000 ASSERT(prototypes->length() == maps->length()); 5981 ASSERT(prototypes->length() == maps->length());
6001 5982
6002 if (!instr->hydrogen()->CanOmitPrototypeChecks()) { 5983 for (int i = 0; i < prototypes->length(); i++) {
6003 for (int i = 0; i < prototypes->length(); i++) { 5984 __ LoadHeapObject(reg, prototypes->at(i));
6004 __ LoadHeapObject(reg, prototypes->at(i)); 5985 DoCheckMapCommon(reg, maps->at(i), instr);
6005 DoCheckMapCommon(reg, maps->at(i), instr);
6006 }
6007 } 5986 }
6008 } 5987 }
6009 5988
6010 5989
6011 void LCodeGen::DoAllocate(LAllocate* instr) { 5990 void LCodeGen::DoAllocate(LAllocate* instr) {
6012 class DeferredAllocate: public LDeferredCode { 5991 class DeferredAllocate: public LDeferredCode {
6013 public: 5992 public:
6014 DeferredAllocate(LCodeGen* codegen, LAllocate* instr) 5993 DeferredAllocate(LCodeGen* codegen, LAllocate* instr)
6015 : LDeferredCode(codegen), instr_(instr) { } 5994 : LDeferredCode(codegen), instr_(instr) { }
6016 virtual void Generate() { codegen()->DoDeferredAllocate(instr_); } 5995 virtual void Generate() { codegen()->DoDeferredAllocate(instr_); }
(...skipping 299 matching lines...) Expand 10 before | Expand all | Expand 10 after
6316 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { 6295 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
6317 EnsureSpaceForLazyDeopt(); 6296 EnsureSpaceForLazyDeopt();
6318 ASSERT(instr->HasEnvironment()); 6297 ASSERT(instr->HasEnvironment());
6319 LEnvironment* env = instr->environment(); 6298 LEnvironment* env = instr->environment();
6320 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); 6299 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
6321 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 6300 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
6322 } 6301 }
6323 6302
6324 6303
6325 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { 6304 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
6326 if (instr->hydrogen_value()->IsSoftDeoptimize()) { 6305 Deoptimizer::BailoutType type = instr->hydrogen()->type();
6327 SoftDeoptimize(instr->environment()); 6306 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the
6328 } else { 6307 // needed return address), even though the implementation of LAZY and EAGER is
6329 DeoptimizeIf(no_condition, instr->environment()); 6308 // now identical. When LAZY is eventually completely folded into EAGER, remove
6309 // the special case below.
6310 if (info()->IsStub() && type == Deoptimizer::EAGER) {
6311 type = Deoptimizer::LAZY;
6330 } 6312 }
6313 DeoptimizeIf(no_condition, instr->environment(), type);
6331 } 6314 }
6332 6315
6333 6316
6334 void LCodeGen::DoDummyUse(LDummyUse* instr) { 6317 void LCodeGen::DoDummyUse(LDummyUse* instr) {
6335 // Nothing to see here, move on! 6318 // Nothing to see here, move on!
6336 } 6319 }
6337 6320
6338 6321
6339 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { 6322 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
6340 PushSafepointRegistersScope scope(this); 6323 PushSafepointRegistersScope scope(this);
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after
6501 FixedArray::kHeaderSize - kPointerSize)); 6484 FixedArray::kHeaderSize - kPointerSize));
6502 __ bind(&done); 6485 __ bind(&done);
6503 } 6486 }
6504 6487
6505 6488
6506 #undef __ 6489 #undef __
6507 6490
6508 } } // namespace v8::internal 6491 } } // namespace v8::internal
6509 6492
6510 #endif // V8_TARGET_ARCH_IA32 6493 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/ia32/lithium-gap-resolver-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698