Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(21)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 8139027: Version 3.6.5 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: '' Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/lithium-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
74 int deoptimization_index_; 74 int deoptimization_index_;
75 }; 75 };
76 76
77 77
78 #define __ masm()-> 78 #define __ masm()->
79 79
80 bool LCodeGen::GenerateCode() { 80 bool LCodeGen::GenerateCode() {
81 HPhase phase("Code generation", chunk()); 81 HPhase phase("Code generation", chunk());
82 ASSERT(is_unused()); 82 ASSERT(is_unused());
83 status_ = GENERATING; 83 status_ = GENERATING;
84
85 // Open a frame scope to indicate that there is a frame on the stack. The
86 // MANUAL indicates that the scope shouldn't actually generate code to set up
87 // the frame (that is done in GeneratePrologue).
88 FrameScope frame_scope(masm_, StackFrame::MANUAL);
89
84 return GeneratePrologue() && 90 return GeneratePrologue() &&
85 GenerateBody() && 91 GenerateBody() &&
86 GenerateDeferredCode() && 92 GenerateDeferredCode() &&
87 GenerateJumpTable() && 93 GenerateJumpTable() &&
88 GenerateSafepointTable(); 94 GenerateSafepointTable();
89 } 95 }
90 96
91 97
92 void LCodeGen::FinishCode(Handle<Code> code) { 98 void LCodeGen::FinishCode(Handle<Code> code) {
93 ASSERT(is_done()); 99 ASSERT(is_done());
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
210 for (int i = 0; i < num_parameters; i++) { 216 for (int i = 0; i < num_parameters; i++) {
211 Variable* var = scope()->parameter(i); 217 Variable* var = scope()->parameter(i);
212 if (var->IsContextSlot()) { 218 if (var->IsContextSlot()) {
213 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 219 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
214 (num_parameters - 1 - i) * kPointerSize; 220 (num_parameters - 1 - i) * kPointerSize;
215 // Load parameter from stack. 221 // Load parameter from stack.
216 __ movq(rax, Operand(rbp, parameter_offset)); 222 __ movq(rax, Operand(rbp, parameter_offset));
217 // Store it in the context. 223 // Store it in the context.
218 int context_offset = Context::SlotOffset(var->index()); 224 int context_offset = Context::SlotOffset(var->index());
219 __ movq(Operand(rsi, context_offset), rax); 225 __ movq(Operand(rsi, context_offset), rax);
220 // Update the write barrier. This clobbers all involved 226 // Update the write barrier. This clobbers rax and rbx.
221 // registers, so we have use a third register to avoid 227 __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs);
222 // clobbering rsi.
223 __ movq(rcx, rsi);
224 __ RecordWrite(rcx, context_offset, rax, rbx);
225 } 228 }
226 } 229 }
227 Comment(";;; End allocate local context"); 230 Comment(";;; End allocate local context");
228 } 231 }
229 232
230 // Trace the call. 233 // Trace the call.
231 if (FLAG_trace) { 234 if (FLAG_trace) {
232 __ CallRuntime(Runtime::kTraceEnter, 0); 235 __ CallRuntime(Runtime::kTraceEnter, 0);
233 } 236 }
234 return !is_aborted(); 237 return !is_aborted();
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
273 return !is_aborted(); 276 return !is_aborted();
274 } 277 }
275 278
276 279
277 bool LCodeGen::GenerateDeferredCode() { 280 bool LCodeGen::GenerateDeferredCode() {
278 ASSERT(is_generating()); 281 ASSERT(is_generating());
279 if (deferred_.length() > 0) { 282 if (deferred_.length() > 0) {
280 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { 283 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
281 LDeferredCode* code = deferred_[i]; 284 LDeferredCode* code = deferred_[i];
282 __ bind(code->entry()); 285 __ bind(code->entry());
286 Comment(";;; Deferred code @%d: %s.",
287 code->instruction_index(),
288 code->instr()->Mnemonic());
283 code->Generate(); 289 code->Generate();
284 __ jmp(code->exit()); 290 __ jmp(code->exit());
285 } 291 }
286 292
287 // Pad code to ensure that the last piece of deferred code have 293 // Pad code to ensure that the last piece of deferred code have
288 // room for lazy bailout. 294 // room for lazy bailout.
289 while ((masm()->pc_offset() - LastSafepointEnd()) 295 while ((masm()->pc_offset() - LastSafepointEnd())
290 < Deoptimizer::patch_size()) { 296 < Deoptimizer::patch_size()) {
291 int padding = masm()->pc_offset() - LastSafepointEnd(); 297 int padding = masm()->pc_offset() - LastSafepointEnd();
292 if (padding > 9) { 298 if (padding > 9) {
(...skipping 367 matching lines...) Expand 10 before | Expand all | Expand 10 after
660 } 666 }
661 667
662 668
663 void LCodeGen::RecordSafepoint( 669 void LCodeGen::RecordSafepoint(
664 LPointerMap* pointers, 670 LPointerMap* pointers,
665 Safepoint::Kind kind, 671 Safepoint::Kind kind,
666 int arguments, 672 int arguments,
667 int deoptimization_index) { 673 int deoptimization_index) {
668 ASSERT(kind == expected_safepoint_kind_); 674 ASSERT(kind == expected_safepoint_kind_);
669 675
670 const ZoneList<LOperand*>* operands = pointers->operands(); 676 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
671 677
672 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), 678 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
673 kind, arguments, deoptimization_index); 679 kind, arguments, deoptimization_index);
674 for (int i = 0; i < operands->length(); i++) { 680 for (int i = 0; i < operands->length(); i++) {
675 LOperand* pointer = operands->at(i); 681 LOperand* pointer = operands->at(i);
676 if (pointer->IsStackSlot()) { 682 if (pointer->IsStackSlot()) {
677 safepoint.DefinePointerSlot(pointer->index()); 683 safepoint.DefinePointerSlot(pointer->index());
678 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { 684 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
679 safepoint.DefinePointerRegister(ToRegister(pointer)); 685 safepoint.DefinePointerRegister(ToRegister(pointer));
680 } 686 }
(...skipping 889 matching lines...) Expand 10 before | Expand all | Expand 10 after
1570 void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) { 1576 void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
1571 Register left = ToRegister(instr->InputAt(0)); 1577 Register left = ToRegister(instr->InputAt(0));
1572 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1578 int true_block = chunk_->LookupDestination(instr->true_block_id());
1573 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1579 int false_block = chunk_->LookupDestination(instr->false_block_id());
1574 1580
1575 __ cmpq(left, Immediate(instr->hydrogen()->right())); 1581 __ cmpq(left, Immediate(instr->hydrogen()->right()));
1576 EmitBranch(true_block, false_block, equal); 1582 EmitBranch(true_block, false_block, equal);
1577 } 1583 }
1578 1584
1579 1585
1580 void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) { 1586 void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
1581 Register reg = ToRegister(instr->InputAt(0)); 1587 Register reg = ToRegister(instr->InputAt(0));
1582
1583 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1588 int false_block = chunk_->LookupDestination(instr->false_block_id());
1584 1589
1590 // If the expression is known to be untagged or a smi, then it's definitely
1591 // not null, and it can't be a an undetectable object.
1585 if (instr->hydrogen()->representation().IsSpecialization() || 1592 if (instr->hydrogen()->representation().IsSpecialization() ||
1586 instr->hydrogen()->type().IsSmi()) { 1593 instr->hydrogen()->type().IsSmi()) {
1587 // If the expression is known to untagged or smi, then it's definitely
1588 // not null, and it can't be a an undetectable object.
1589 // Jump directly to the false block.
1590 EmitGoto(false_block); 1594 EmitGoto(false_block);
1591 return; 1595 return;
1592 } 1596 }
1593 1597
1594 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1598 int true_block = chunk_->LookupDestination(instr->true_block_id());
1595 1599 Heap::RootListIndex nil_value = instr->nil() == kNullValue ?
1596 __ CompareRoot(reg, Heap::kNullValueRootIndex); 1600 Heap::kNullValueRootIndex :
1597 if (instr->is_strict()) { 1601 Heap::kUndefinedValueRootIndex;
1602 __ CompareRoot(reg, nil_value);
1603 if (instr->kind() == kStrictEquality) {
1598 EmitBranch(true_block, false_block, equal); 1604 EmitBranch(true_block, false_block, equal);
1599 } else { 1605 } else {
1606 Heap::RootListIndex other_nil_value = instr->nil() == kNullValue ?
1607 Heap::kUndefinedValueRootIndex :
1608 Heap::kNullValueRootIndex;
1600 Label* true_label = chunk_->GetAssemblyLabel(true_block); 1609 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1601 Label* false_label = chunk_->GetAssemblyLabel(false_block); 1610 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1602 __ j(equal, true_label); 1611 __ j(equal, true_label);
1603 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex); 1612 __ CompareRoot(reg, other_nil_value);
1604 __ j(equal, true_label); 1613 __ j(equal, true_label);
1605 __ JumpIfSmi(reg, false_label); 1614 __ JumpIfSmi(reg, false_label);
1606 // Check for undetectable objects by looking in the bit field in 1615 // Check for undetectable objects by looking in the bit field in
1607 // the map. The object has already been smi checked. 1616 // the map. The object has already been smi checked.
1608 Register scratch = ToRegister(instr->TempAt(0)); 1617 Register scratch = ToRegister(instr->TempAt(0));
1609 __ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset)); 1618 __ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1610 __ testb(FieldOperand(scratch, Map::kBitFieldOffset), 1619 __ testb(FieldOperand(scratch, Map::kBitFieldOffset),
1611 Immediate(1 << Map::kIsUndetectable)); 1620 Immediate(1 << Map::kIsUndetectable));
1612 EmitBranch(true_block, false_block, not_zero); 1621 EmitBranch(true_block, false_block, not_zero);
1613 } 1622 }
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after
1745 EmitBranch(true_block, false_block, equal); 1754 EmitBranch(true_block, false_block, equal);
1746 } 1755 }
1747 1756
1748 1757
1749 // Branches to a label or falls through with the answer in the z flag. 1758 // Branches to a label or falls through with the answer in the z flag.
1750 // Trashes the temp register and possibly input (if it and temp are aliased). 1759 // Trashes the temp register and possibly input (if it and temp are aliased).
1751 void LCodeGen::EmitClassOfTest(Label* is_true, 1760 void LCodeGen::EmitClassOfTest(Label* is_true,
1752 Label* is_false, 1761 Label* is_false,
1753 Handle<String> class_name, 1762 Handle<String> class_name,
1754 Register input, 1763 Register input,
1755 Register temp) { 1764 Register temp,
1765 Register scratch) {
1756 __ JumpIfSmi(input, is_false); 1766 __ JumpIfSmi(input, is_false);
1757 __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, temp);
1758 __ j(below, is_false);
1759 1767
1760 // Map is now in temp.
1761 // Functions have class 'Function'.
1762 __ CmpInstanceType(temp, FIRST_CALLABLE_SPEC_OBJECT_TYPE);
1763 if (class_name->IsEqualTo(CStrVector("Function"))) { 1768 if (class_name->IsEqualTo(CStrVector("Function"))) {
1764 __ j(above_equal, is_true); 1769 // Assuming the following assertions, we can use the same compares to test
1770 // for both being a function type and being in the object type range.
1771 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
1772 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
1773 FIRST_SPEC_OBJECT_TYPE + 1);
1774 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
1775 LAST_SPEC_OBJECT_TYPE - 1);
1776 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1777 __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, temp);
1778 __ j(below, is_false);
1779 __ j(equal, is_true);
1780 __ CmpInstanceType(temp, LAST_SPEC_OBJECT_TYPE);
1781 __ j(equal, is_true);
1765 } else { 1782 } else {
1766 __ j(above_equal, is_false); 1783 // Faster code path to avoid two compares: subtract lower bound from the
1784 // actual type and do a signed compare with the width of the type range.
1785 __ movq(temp, FieldOperand(input, HeapObject::kMapOffset));
1786 __ movq(scratch, FieldOperand(temp, Map::kInstanceTypeOffset));
1787 __ subb(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
1788 __ cmpb(scratch,
1789 Immediate(static_cast<int8_t>(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
1790 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)));
1791 __ j(above, is_false);
1767 } 1792 }
1768 1793
1794 // Now we are in the FIRST-LAST_NONCALLABLE_SPEC_OBJECT_TYPE range.
1769 // Check if the constructor in the map is a function. 1795 // Check if the constructor in the map is a function.
1770 __ movq(temp, FieldOperand(temp, Map::kConstructorOffset)); 1796 __ movq(temp, FieldOperand(temp, Map::kConstructorOffset));
1771 1797
1772 // As long as LAST_CALLABLE_SPEC_OBJECT_TYPE is the last type and
1773 // FIRST_CALLABLE_SPEC_OBJECT_TYPE comes right after
1774 // LAST_NONCALLABLE_SPEC_OBJECT_TYPE, we can avoid checking for the latter.
1775 STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
1776 STATIC_ASSERT(FIRST_CALLABLE_SPEC_OBJECT_TYPE ==
1777 LAST_NONCALLABLE_SPEC_OBJECT_TYPE + 1);
1778
1779 // Objects with a non-function constructor have class 'Object'. 1798 // Objects with a non-function constructor have class 'Object'.
1780 __ CmpObjectType(temp, JS_FUNCTION_TYPE, kScratchRegister); 1799 __ CmpObjectType(temp, JS_FUNCTION_TYPE, kScratchRegister);
1781 if (class_name->IsEqualTo(CStrVector("Object"))) { 1800 if (class_name->IsEqualTo(CStrVector("Object"))) {
1782 __ j(not_equal, is_true); 1801 __ j(not_equal, is_true);
1783 } else { 1802 } else {
1784 __ j(not_equal, is_false); 1803 __ j(not_equal, is_false);
1785 } 1804 }
1786 1805
1787 // temp now contains the constructor function. Grab the 1806 // temp now contains the constructor function. Grab the
1788 // instance class name from there. 1807 // instance class name from there.
1789 __ movq(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset)); 1808 __ movq(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1790 __ movq(temp, FieldOperand(temp, 1809 __ movq(temp, FieldOperand(temp,
1791 SharedFunctionInfo::kInstanceClassNameOffset)); 1810 SharedFunctionInfo::kInstanceClassNameOffset));
1792 // The class name we are testing against is a symbol because it's a literal. 1811 // The class name we are testing against is a symbol because it's a literal.
1793 // The name in the constructor is a symbol because of the way the context is 1812 // The name in the constructor is a symbol because of the way the context is
1794 // booted. This routine isn't expected to work for random API-created 1813 // booted. This routine isn't expected to work for random API-created
1795 // classes and it doesn't have to because you can't access it with natives 1814 // classes and it doesn't have to because you can't access it with natives
1796 // syntax. Since both sides are symbols it is sufficient to use an identity 1815 // syntax. Since both sides are symbols it is sufficient to use an identity
1797 // comparison. 1816 // comparison.
1798 ASSERT(class_name->IsSymbol()); 1817 ASSERT(class_name->IsSymbol());
1799 __ Cmp(temp, class_name); 1818 __ Cmp(temp, class_name);
1800 // End with the answer in the z flag. 1819 // End with the answer in the z flag.
1801 } 1820 }
1802 1821
1803 1822
1804 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) { 1823 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1805 Register input = ToRegister(instr->InputAt(0)); 1824 Register input = ToRegister(instr->InputAt(0));
1806 Register temp = ToRegister(instr->TempAt(0)); 1825 Register temp = ToRegister(instr->TempAt(0));
1826 Register temp2 = ToRegister(instr->TempAt(1));
1807 Handle<String> class_name = instr->hydrogen()->class_name(); 1827 Handle<String> class_name = instr->hydrogen()->class_name();
1808 1828
1809 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1829 int true_block = chunk_->LookupDestination(instr->true_block_id());
1810 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1830 int false_block = chunk_->LookupDestination(instr->false_block_id());
1811 1831
1812 Label* true_label = chunk_->GetAssemblyLabel(true_block); 1832 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1813 Label* false_label = chunk_->GetAssemblyLabel(false_block); 1833 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1814 1834
1815 EmitClassOfTest(true_label, false_label, class_name, input, temp); 1835 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1816 1836
1817 EmitBranch(true_block, false_block, equal); 1837 EmitBranch(true_block, false_block, equal);
1818 } 1838 }
1819 1839
1820 1840
1821 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { 1841 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
1822 Register reg = ToRegister(instr->InputAt(0)); 1842 Register reg = ToRegister(instr->InputAt(0));
1823 int true_block = instr->true_block_id(); 1843 int true_block = instr->true_block_id();
1824 int false_block = instr->false_block_id(); 1844 int false_block = instr->false_block_id();
1825 1845
(...skipping 20 matching lines...) Expand all
1846 1866
1847 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { 1867 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1848 class DeferredInstanceOfKnownGlobal: public LDeferredCode { 1868 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1849 public: 1869 public:
1850 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, 1870 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1851 LInstanceOfKnownGlobal* instr) 1871 LInstanceOfKnownGlobal* instr)
1852 : LDeferredCode(codegen), instr_(instr) { } 1872 : LDeferredCode(codegen), instr_(instr) { }
1853 virtual void Generate() { 1873 virtual void Generate() {
1854 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_); 1874 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
1855 } 1875 }
1856 1876 virtual LInstruction* instr() { return instr_; }
1857 Label* map_check() { return &map_check_; } 1877 Label* map_check() { return &map_check_; }
1858
1859 private: 1878 private:
1860 LInstanceOfKnownGlobal* instr_; 1879 LInstanceOfKnownGlobal* instr_;
1861 Label map_check_; 1880 Label map_check_;
1862 }; 1881 };
1863 1882
1864 1883
1865 DeferredInstanceOfKnownGlobal* deferred; 1884 DeferredInstanceOfKnownGlobal* deferred;
1866 deferred = new DeferredInstanceOfKnownGlobal(this, instr); 1885 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
1867 1886
1868 Label done, false_result; 1887 Label done, false_result;
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
1989 2008
1990 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { 2009 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
1991 Register result = ToRegister(instr->result()); 2010 Register result = ToRegister(instr->result());
1992 if (result.is(rax)) { 2011 if (result.is(rax)) {
1993 __ load_rax(instr->hydrogen()->cell().location(), 2012 __ load_rax(instr->hydrogen()->cell().location(),
1994 RelocInfo::GLOBAL_PROPERTY_CELL); 2013 RelocInfo::GLOBAL_PROPERTY_CELL);
1995 } else { 2014 } else {
1996 __ movq(result, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL); 2015 __ movq(result, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL);
1997 __ movq(result, Operand(result, 0)); 2016 __ movq(result, Operand(result, 0));
1998 } 2017 }
1999 if (instr->hydrogen()->check_hole_value()) { 2018 if (instr->hydrogen()->RequiresHoleCheck()) {
2000 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); 2019 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2001 DeoptimizeIf(equal, instr->environment()); 2020 DeoptimizeIf(equal, instr->environment());
2002 } 2021 }
2003 } 2022 }
2004 2023
2005 2024
2006 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { 2025 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2007 ASSERT(ToRegister(instr->global_object()).is(rax)); 2026 ASSERT(ToRegister(instr->global_object()).is(rax));
2008 ASSERT(ToRegister(instr->result()).is(rax)); 2027 ASSERT(ToRegister(instr->result()).is(rax));
2009 2028
2010 __ Move(rcx, instr->name()); 2029 __ Move(rcx, instr->name());
2011 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET : 2030 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET :
2012 RelocInfo::CODE_TARGET_CONTEXT; 2031 RelocInfo::CODE_TARGET_CONTEXT;
2013 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 2032 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2014 CallCode(ic, mode, instr); 2033 CallCode(ic, mode, instr);
2015 } 2034 }
2016 2035
2017 2036
2018 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { 2037 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
2038 Register object = ToRegister(instr->TempAt(0));
2039 Register address = ToRegister(instr->TempAt(1));
2019 Register value = ToRegister(instr->InputAt(0)); 2040 Register value = ToRegister(instr->InputAt(0));
2020 Register temp = ToRegister(instr->TempAt(0)); 2041 ASSERT(!value.is(object));
2021 ASSERT(!value.is(temp)); 2042 Handle<JSGlobalPropertyCell> cell_handle(instr->hydrogen()->cell());
2022 bool check_hole = instr->hydrogen()->check_hole_value(); 2043
2023 if (!check_hole && value.is(rax)) { 2044 __ movq(address, cell_handle, RelocInfo::GLOBAL_PROPERTY_CELL);
2024 __ store_rax(instr->hydrogen()->cell().location(), 2045
2025 RelocInfo::GLOBAL_PROPERTY_CELL);
2026 return;
2027 }
2028 // If the cell we are storing to contains the hole it could have 2046 // If the cell we are storing to contains the hole it could have
2029 // been deleted from the property dictionary. In that case, we need 2047 // been deleted from the property dictionary. In that case, we need
2030 // to update the property details in the property dictionary to mark 2048 // to update the property details in the property dictionary to mark
2031 // it as no longer deleted. We deoptimize in that case. 2049 // it as no longer deleted. We deoptimize in that case.
2032 __ movq(temp, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL); 2050 if (instr->hydrogen()->RequiresHoleCheck()) {
2033 if (check_hole) { 2051 __ CompareRoot(Operand(address, 0), Heap::kTheHoleValueRootIndex);
2034 __ CompareRoot(Operand(temp, 0), Heap::kTheHoleValueRootIndex);
2035 DeoptimizeIf(equal, instr->environment()); 2052 DeoptimizeIf(equal, instr->environment());
2036 } 2053 }
2037 __ movq(Operand(temp, 0), value); 2054
2055 // Store the value.
2056 __ movq(Operand(address, 0), value);
2057
2058 Label smi_store;
2059 __ JumpIfSmi(value, &smi_store, Label::kNear);
2060
2061 int offset = JSGlobalPropertyCell::kValueOffset - kHeapObjectTag;
2062 __ lea(object, Operand(address, -offset));
2063 // Cells are always in the remembered set.
2064 __ RecordWrite(object,
2065 address,
2066 value,
2067 kSaveFPRegs,
2068 OMIT_REMEMBERED_SET,
2069 OMIT_SMI_CHECK);
2070 __ bind(&smi_store);
2038 } 2071 }
2039 2072
2040 2073
2041 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { 2074 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2042 ASSERT(ToRegister(instr->global_object()).is(rdx)); 2075 ASSERT(ToRegister(instr->global_object()).is(rdx));
2043 ASSERT(ToRegister(instr->value()).is(rax)); 2076 ASSERT(ToRegister(instr->value()).is(rax));
2044 2077
2045 __ Move(rcx, instr->name()); 2078 __ Move(rcx, instr->name());
2046 Handle<Code> ic = instr->strict_mode() 2079 Handle<Code> ic = instr->strict_mode()
2047 ? isolate()->builtins()->StoreIC_Initialize_Strict() 2080 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2048 : isolate()->builtins()->StoreIC_Initialize(); 2081 : isolate()->builtins()->StoreIC_Initialize();
2049 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); 2082 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2050 } 2083 }
2051 2084
2052 2085
2053 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { 2086 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2054 Register context = ToRegister(instr->context()); 2087 Register context = ToRegister(instr->context());
2055 Register result = ToRegister(instr->result()); 2088 Register result = ToRegister(instr->result());
2056 __ movq(result, ContextOperand(context, instr->slot_index())); 2089 __ movq(result, ContextOperand(context, instr->slot_index()));
2057 } 2090 }
2058 2091
2059 2092
2060 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { 2093 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2061 Register context = ToRegister(instr->context()); 2094 Register context = ToRegister(instr->context());
2062 Register value = ToRegister(instr->value()); 2095 Register value = ToRegister(instr->value());
2063 __ movq(ContextOperand(context, instr->slot_index()), value); 2096 __ movq(ContextOperand(context, instr->slot_index()), value);
2064 if (instr->needs_write_barrier()) { 2097 if (instr->needs_write_barrier()) {
2065 int offset = Context::SlotOffset(instr->slot_index()); 2098 int offset = Context::SlotOffset(instr->slot_index());
2066 Register scratch = ToRegister(instr->TempAt(0)); 2099 Register scratch = ToRegister(instr->TempAt(0));
2067 __ RecordWrite(context, offset, value, scratch); 2100 __ RecordWriteContextSlot(context, offset, value, scratch, kSaveFPRegs);
2068 } 2101 }
2069 } 2102 }
2070 2103
2071 2104
2072 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { 2105 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
2073 Register object = ToRegister(instr->InputAt(0)); 2106 Register object = ToRegister(instr->InputAt(0));
2074 Register result = ToRegister(instr->result()); 2107 Register result = ToRegister(instr->result());
2075 if (instr->hydrogen()->is_in_object()) { 2108 if (instr->hydrogen()->is_in_object()) {
2076 __ movq(result, FieldOperand(object, instr->hydrogen()->offset())); 2109 __ movq(result, FieldOperand(object, instr->hydrogen()->offset()));
2077 } else { 2110 } else {
(...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after
2276 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); 2309 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2277 DeoptimizeIf(equal, instr->environment()); 2310 DeoptimizeIf(equal, instr->environment());
2278 } 2311 }
2279 } 2312 }
2280 2313
2281 2314
2282 void LCodeGen::DoLoadKeyedFastDoubleElement( 2315 void LCodeGen::DoLoadKeyedFastDoubleElement(
2283 LLoadKeyedFastDoubleElement* instr) { 2316 LLoadKeyedFastDoubleElement* instr) {
2284 XMMRegister result(ToDoubleRegister(instr->result())); 2317 XMMRegister result(ToDoubleRegister(instr->result()));
2285 2318
2286 if (instr->hydrogen()->RequiresHoleCheck()) { 2319 int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
2287 int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag + 2320 sizeof(kHoleNanLower32);
2288 sizeof(kHoleNanLower32); 2321 Operand hole_check_operand = BuildFastArrayOperand(
2289 Operand hole_check_operand = BuildFastArrayOperand( 2322 instr->elements(),
2290 instr->elements(), 2323 instr->key(),
2291 instr->key(), 2324 FAST_DOUBLE_ELEMENTS,
2292 FAST_DOUBLE_ELEMENTS, 2325 offset);
2293 offset); 2326 __ cmpl(hole_check_operand, Immediate(kHoleNanUpper32));
2294 __ cmpl(hole_check_operand, Immediate(kHoleNanUpper32)); 2327 DeoptimizeIf(equal, instr->environment());
2295 DeoptimizeIf(equal, instr->environment());
2296 }
2297 2328
2298 Operand double_load_operand = BuildFastArrayOperand( 2329 Operand double_load_operand = BuildFastArrayOperand(
2299 instr->elements(), instr->key(), FAST_DOUBLE_ELEMENTS, 2330 instr->elements(), instr->key(), FAST_DOUBLE_ELEMENTS,
2300 FixedDoubleArray::kHeaderSize - kHeapObjectTag); 2331 FixedDoubleArray::kHeaderSize - kHeapObjectTag);
2301 __ movsd(result, double_load_operand); 2332 __ movsd(result, double_load_operand);
2302 } 2333 }
2303 2334
2304 2335
2305 Operand LCodeGen::BuildFastArrayOperand( 2336 Operand LCodeGen::BuildFastArrayOperand(
2306 LOperand* elements_pointer, 2337 LOperand* elements_pointer,
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
2358 __ movl(result, operand); 2389 __ movl(result, operand);
2359 __ testl(result, result); 2390 __ testl(result, result);
2360 // TODO(danno): we could be more clever here, perhaps having a special 2391 // TODO(danno): we could be more clever here, perhaps having a special
2361 // version of the stub that detects if the overflow case actually 2392 // version of the stub that detects if the overflow case actually
2362 // happens, and generate code that returns a double rather than int. 2393 // happens, and generate code that returns a double rather than int.
2363 DeoptimizeIf(negative, instr->environment()); 2394 DeoptimizeIf(negative, instr->environment());
2364 break; 2395 break;
2365 case EXTERNAL_FLOAT_ELEMENTS: 2396 case EXTERNAL_FLOAT_ELEMENTS:
2366 case EXTERNAL_DOUBLE_ELEMENTS: 2397 case EXTERNAL_DOUBLE_ELEMENTS:
2367 case FAST_ELEMENTS: 2398 case FAST_ELEMENTS:
2399 case FAST_SMI_ONLY_ELEMENTS:
2368 case FAST_DOUBLE_ELEMENTS: 2400 case FAST_DOUBLE_ELEMENTS:
2369 case DICTIONARY_ELEMENTS: 2401 case DICTIONARY_ELEMENTS:
2370 case NON_STRICT_ARGUMENTS_ELEMENTS: 2402 case NON_STRICT_ARGUMENTS_ELEMENTS:
2371 UNREACHABLE(); 2403 UNREACHABLE();
2372 break; 2404 break;
2373 } 2405 }
2374 } 2406 }
2375 } 2407 }
2376 2408
2377 2409
(...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after
2674 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) { 2706 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2675 // Class for deferred case. 2707 // Class for deferred case.
2676 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode { 2708 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2677 public: 2709 public:
2678 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, 2710 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2679 LUnaryMathOperation* instr) 2711 LUnaryMathOperation* instr)
2680 : LDeferredCode(codegen), instr_(instr) { } 2712 : LDeferredCode(codegen), instr_(instr) { }
2681 virtual void Generate() { 2713 virtual void Generate() {
2682 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_); 2714 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2683 } 2715 }
2716 virtual LInstruction* instr() { return instr_; }
2684 private: 2717 private:
2685 LUnaryMathOperation* instr_; 2718 LUnaryMathOperation* instr_;
2686 }; 2719 };
2687 2720
2688 ASSERT(instr->InputAt(0)->Equals(instr->result())); 2721 ASSERT(instr->InputAt(0)->Equals(instr->result()));
2689 Representation r = instr->hydrogen()->value()->representation(); 2722 Representation r = instr->hydrogen()->value()->representation();
2690 2723
2691 if (r.IsDouble()) { 2724 if (r.IsDouble()) {
2692 XMMRegister scratch = xmm0; 2725 XMMRegister scratch = xmm0;
2693 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); 2726 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
(...skipping 276 matching lines...) Expand 10 before | Expand all | Expand 10 after
2970 __ Move(rcx, instr->name()); 3003 __ Move(rcx, instr->name());
2971 CallCode(ic, mode, instr); 3004 CallCode(ic, mode, instr);
2972 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 3005 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2973 } 3006 }
2974 3007
2975 3008
2976 void LCodeGen::DoCallFunction(LCallFunction* instr) { 3009 void LCodeGen::DoCallFunction(LCallFunction* instr) {
2977 ASSERT(ToRegister(instr->result()).is(rax)); 3010 ASSERT(ToRegister(instr->result()).is(rax));
2978 3011
2979 int arity = instr->arity(); 3012 int arity = instr->arity();
2980 CallFunctionStub stub(arity, RECEIVER_MIGHT_BE_IMPLICIT); 3013 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
2981 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3014 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2982 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 3015 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2983 __ Drop(1); 3016 __ Drop(1);
2984 } 3017 }
2985 3018
2986 3019
2987 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { 3020 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
2988 ASSERT(ToRegister(instr->result()).is(rax)); 3021 ASSERT(ToRegister(instr->result()).is(rax));
2989 int arity = instr->arity(); 3022 int arity = instr->arity();
2990 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT; 3023 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
3026 if (!instr->transition().is_null()) { 3059 if (!instr->transition().is_null()) {
3027 __ Move(FieldOperand(object, HeapObject::kMapOffset), instr->transition()); 3060 __ Move(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
3028 } 3061 }
3029 3062
3030 // Do the store. 3063 // Do the store.
3031 if (instr->is_in_object()) { 3064 if (instr->is_in_object()) {
3032 __ movq(FieldOperand(object, offset), value); 3065 __ movq(FieldOperand(object, offset), value);
3033 if (instr->needs_write_barrier()) { 3066 if (instr->needs_write_barrier()) {
3034 Register temp = ToRegister(instr->TempAt(0)); 3067 Register temp = ToRegister(instr->TempAt(0));
3035 // Update the write barrier for the object for in-object properties. 3068 // Update the write barrier for the object for in-object properties.
3036 __ RecordWrite(object, offset, value, temp); 3069 __ RecordWriteField(object, offset, value, temp, kSaveFPRegs);
3037 } 3070 }
3038 } else { 3071 } else {
3039 Register temp = ToRegister(instr->TempAt(0)); 3072 Register temp = ToRegister(instr->TempAt(0));
3040 __ movq(temp, FieldOperand(object, JSObject::kPropertiesOffset)); 3073 __ movq(temp, FieldOperand(object, JSObject::kPropertiesOffset));
3041 __ movq(FieldOperand(temp, offset), value); 3074 __ movq(FieldOperand(temp, offset), value);
3042 if (instr->needs_write_barrier()) { 3075 if (instr->needs_write_barrier()) {
3043 // Update the write barrier for the properties array. 3076 // Update the write barrier for the properties array.
3044 // object is used as a scratch register. 3077 // object is used as a scratch register.
3045 __ RecordWrite(temp, offset, value, object); 3078 __ RecordWriteField(temp, offset, value, object, kSaveFPRegs);
3046 } 3079 }
3047 } 3080 }
3048 } 3081 }
3049 3082
3050 3083
3051 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { 3084 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3052 ASSERT(ToRegister(instr->object()).is(rdx)); 3085 ASSERT(ToRegister(instr->object()).is(rdx));
3053 ASSERT(ToRegister(instr->value()).is(rax)); 3086 ASSERT(ToRegister(instr->value()).is(rax));
3054 3087
3055 __ Move(rcx, instr->hydrogen()->name()); 3088 __ Move(rcx, instr->hydrogen()->name());
(...skipping 27 matching lines...) Expand all
3083 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 3116 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3084 __ movw(operand, value); 3117 __ movw(operand, value);
3085 break; 3118 break;
3086 case EXTERNAL_INT_ELEMENTS: 3119 case EXTERNAL_INT_ELEMENTS:
3087 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 3120 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3088 __ movl(operand, value); 3121 __ movl(operand, value);
3089 break; 3122 break;
3090 case EXTERNAL_FLOAT_ELEMENTS: 3123 case EXTERNAL_FLOAT_ELEMENTS:
3091 case EXTERNAL_DOUBLE_ELEMENTS: 3124 case EXTERNAL_DOUBLE_ELEMENTS:
3092 case FAST_ELEMENTS: 3125 case FAST_ELEMENTS:
3126 case FAST_SMI_ONLY_ELEMENTS:
3093 case FAST_DOUBLE_ELEMENTS: 3127 case FAST_DOUBLE_ELEMENTS:
3094 case DICTIONARY_ELEMENTS: 3128 case DICTIONARY_ELEMENTS:
3095 case NON_STRICT_ARGUMENTS_ELEMENTS: 3129 case NON_STRICT_ARGUMENTS_ELEMENTS:
3096 UNREACHABLE(); 3130 UNREACHABLE();
3097 break; 3131 break;
3098 } 3132 }
3099 } 3133 }
3100 } 3134 }
3101 3135
3102 3136
(...skipping 15 matching lines...) Expand all
3118 } 3152 }
3119 DeoptimizeIf(below_equal, instr->environment()); 3153 DeoptimizeIf(below_equal, instr->environment());
3120 } 3154 }
3121 3155
3122 3156
3123 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) { 3157 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
3124 Register value = ToRegister(instr->value()); 3158 Register value = ToRegister(instr->value());
3125 Register elements = ToRegister(instr->object()); 3159 Register elements = ToRegister(instr->object());
3126 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg; 3160 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3127 3161
3162 // This instruction cannot handle the FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
3163 // conversion, so it deopts in that case.
3164 if (instr->hydrogen()->ValueNeedsSmiCheck()) {
3165 Condition cc = masm()->CheckSmi(value);
3166 DeoptimizeIf(NegateCondition(cc), instr->environment());
3167 }
3168
3128 // Do the store. 3169 // Do the store.
3129 if (instr->key()->IsConstantOperand()) { 3170 if (instr->key()->IsConstantOperand()) {
3130 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); 3171 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3131 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); 3172 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3132 int offset = 3173 int offset =
3133 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize; 3174 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3134 __ movq(FieldOperand(elements, offset), value); 3175 __ movq(FieldOperand(elements, offset), value);
3135 } else { 3176 } else {
3136 __ movq(FieldOperand(elements, 3177 __ movq(FieldOperand(elements,
3137 key, 3178 key,
3138 times_pointer_size, 3179 times_pointer_size,
3139 FixedArray::kHeaderSize), 3180 FixedArray::kHeaderSize),
3140 value); 3181 value);
3141 } 3182 }
3142 3183
3143 if (instr->hydrogen()->NeedsWriteBarrier()) { 3184 if (instr->hydrogen()->NeedsWriteBarrier()) {
3144 // Compute address of modified element and store it into key register. 3185 // Compute address of modified element and store it into key register.
3145 __ lea(key, FieldOperand(elements, 3186 __ lea(key, FieldOperand(elements,
3146 key, 3187 key,
3147 times_pointer_size, 3188 times_pointer_size,
3148 FixedArray::kHeaderSize)); 3189 FixedArray::kHeaderSize));
3149 __ RecordWrite(elements, key, value); 3190 __ RecordWrite(elements, key, value, kSaveFPRegs);
3150 } 3191 }
3151 } 3192 }
3152 3193
3153 3194
3154 void LCodeGen::DoStoreKeyedFastDoubleElement( 3195 void LCodeGen::DoStoreKeyedFastDoubleElement(
3155 LStoreKeyedFastDoubleElement* instr) { 3196 LStoreKeyedFastDoubleElement* instr) {
3156 XMMRegister value = ToDoubleRegister(instr->value()); 3197 XMMRegister value = ToDoubleRegister(instr->value());
3157 Label have_value; 3198 Label have_value;
3158 3199
3159 __ ucomisd(value, value); 3200 __ ucomisd(value, value);
(...skipping 29 matching lines...) Expand all
3189 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3230 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3190 } 3231 }
3191 3232
3192 3233
3193 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { 3234 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3194 class DeferredStringCharCodeAt: public LDeferredCode { 3235 class DeferredStringCharCodeAt: public LDeferredCode {
3195 public: 3236 public:
3196 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) 3237 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3197 : LDeferredCode(codegen), instr_(instr) { } 3238 : LDeferredCode(codegen), instr_(instr) { }
3198 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); } 3239 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3240 virtual LInstruction* instr() { return instr_; }
3199 private: 3241 private:
3200 LStringCharCodeAt* instr_; 3242 LStringCharCodeAt* instr_;
3201 }; 3243 };
3202 3244
3203 Register string = ToRegister(instr->string()); 3245 Register string = ToRegister(instr->string());
3204 Register index = ToRegister(instr->index()); 3246 Register index = ToRegister(instr->index());
3205 Register result = ToRegister(instr->result()); 3247 Register result = ToRegister(instr->result());
3206 3248
3207 DeferredStringCharCodeAt* deferred = 3249 DeferredStringCharCodeAt* deferred =
3208 new DeferredStringCharCodeAt(this, instr); 3250 new DeferredStringCharCodeAt(this, instr);
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
3309 __ StoreToSafepointRegisterSlot(result, rax); 3351 __ StoreToSafepointRegisterSlot(result, rax);
3310 } 3352 }
3311 3353
3312 3354
3313 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { 3355 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3314 class DeferredStringCharFromCode: public LDeferredCode { 3356 class DeferredStringCharFromCode: public LDeferredCode {
3315 public: 3357 public:
3316 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr) 3358 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3317 : LDeferredCode(codegen), instr_(instr) { } 3359 : LDeferredCode(codegen), instr_(instr) { }
3318 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); } 3360 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3361 virtual LInstruction* instr() { return instr_; }
3319 private: 3362 private:
3320 LStringCharFromCode* instr_; 3363 LStringCharFromCode* instr_;
3321 }; 3364 };
3322 3365
3323 DeferredStringCharFromCode* deferred = 3366 DeferredStringCharFromCode* deferred =
3324 new DeferredStringCharFromCode(this, instr); 3367 new DeferredStringCharFromCode(this, instr);
3325 3368
3326 ASSERT(instr->hydrogen()->value()->representation().IsInteger32()); 3369 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3327 Register char_code = ToRegister(instr->char_code()); 3370 Register char_code = ToRegister(instr->char_code());
3328 Register result = ToRegister(instr->result()); 3371 Register result = ToRegister(instr->result());
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
3385 __ Integer32ToSmi(reg, reg); 3428 __ Integer32ToSmi(reg, reg);
3386 } 3429 }
3387 3430
3388 3431
3389 void LCodeGen::DoNumberTagD(LNumberTagD* instr) { 3432 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3390 class DeferredNumberTagD: public LDeferredCode { 3433 class DeferredNumberTagD: public LDeferredCode {
3391 public: 3434 public:
3392 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr) 3435 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3393 : LDeferredCode(codegen), instr_(instr) { } 3436 : LDeferredCode(codegen), instr_(instr) { }
3394 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); } 3437 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3438 virtual LInstruction* instr() { return instr_; }
3395 private: 3439 private:
3396 LNumberTagD* instr_; 3440 LNumberTagD* instr_;
3397 }; 3441 };
3398 3442
3399 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); 3443 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
3400 Register reg = ToRegister(instr->result()); 3444 Register reg = ToRegister(instr->result());
3401 Register tmp = ToRegister(instr->TempAt(0)); 3445 Register tmp = ToRegister(instr->TempAt(0));
3402 3446
3403 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr); 3447 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3404 if (FLAG_inline_new) { 3448 if (FLAG_inline_new) {
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
3480 __ jmp(&done, Label::kNear); 3524 __ jmp(&done, Label::kNear);
3481 3525
3482 // Smi to XMM conversion 3526 // Smi to XMM conversion
3483 __ bind(&load_smi); 3527 __ bind(&load_smi);
3484 __ SmiToInteger32(kScratchRegister, input_reg); 3528 __ SmiToInteger32(kScratchRegister, input_reg);
3485 __ cvtlsi2sd(result_reg, kScratchRegister); 3529 __ cvtlsi2sd(result_reg, kScratchRegister);
3486 __ bind(&done); 3530 __ bind(&done);
3487 } 3531 }
3488 3532
3489 3533
3490 class DeferredTaggedToI: public LDeferredCode {
3491 public:
3492 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3493 : LDeferredCode(codegen), instr_(instr) { }
3494 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3495 private:
3496 LTaggedToI* instr_;
3497 };
3498
3499
3500 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) { 3534 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3501 Label done, heap_number; 3535 Label done, heap_number;
3502 Register input_reg = ToRegister(instr->InputAt(0)); 3536 Register input_reg = ToRegister(instr->InputAt(0));
3503 3537
3504 // Heap number map check. 3538 // Heap number map check.
3505 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), 3539 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
3506 Heap::kHeapNumberMapRootIndex); 3540 Heap::kHeapNumberMapRootIndex);
3507 3541
3508 if (instr->truncating()) { 3542 if (instr->truncating()) {
3509 __ j(equal, &heap_number, Label::kNear); 3543 __ j(equal, &heap_number, Label::kNear);
(...skipping 28 matching lines...) Expand all
3538 __ movmskpd(input_reg, xmm0); 3572 __ movmskpd(input_reg, xmm0);
3539 __ andl(input_reg, Immediate(1)); 3573 __ andl(input_reg, Immediate(1));
3540 DeoptimizeIf(not_zero, instr->environment()); 3574 DeoptimizeIf(not_zero, instr->environment());
3541 } 3575 }
3542 } 3576 }
3543 __ bind(&done); 3577 __ bind(&done);
3544 } 3578 }
3545 3579
3546 3580
3547 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { 3581 void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
3582 class DeferredTaggedToI: public LDeferredCode {
3583 public:
3584 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3585 : LDeferredCode(codegen), instr_(instr) { }
3586 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3587 virtual LInstruction* instr() { return instr_; }
3588 private:
3589 LTaggedToI* instr_;
3590 };
3591
3548 LOperand* input = instr->InputAt(0); 3592 LOperand* input = instr->InputAt(0);
3549 ASSERT(input->IsRegister()); 3593 ASSERT(input->IsRegister());
3550 ASSERT(input->Equals(instr->result())); 3594 ASSERT(input->Equals(instr->result()));
3551 3595
3552 Register input_reg = ToRegister(input); 3596 Register input_reg = ToRegister(input);
3553 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr); 3597 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3554 __ JumpIfNotSmi(input_reg, deferred->entry()); 3598 __ JumpIfNotSmi(input_reg, deferred->entry());
3555 __ SmiToInteger32(input_reg, input_reg); 3599 __ SmiToInteger32(input_reg, input_reg);
3556 __ bind(deferred->exit()); 3600 __ bind(deferred->exit());
3557 } 3601 }
(...skipping 416 matching lines...) Expand 10 before | Expand all | Expand 10 after
3974 __ CompareRoot(input, Heap::kUndefinedValueRootIndex); 4018 __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
3975 __ j(equal, true_label); 4019 __ j(equal, true_label);
3976 __ JumpIfSmi(input, false_label); 4020 __ JumpIfSmi(input, false_label);
3977 // Check for undetectable objects => true. 4021 // Check for undetectable objects => true.
3978 __ movq(input, FieldOperand(input, HeapObject::kMapOffset)); 4022 __ movq(input, FieldOperand(input, HeapObject::kMapOffset));
3979 __ testb(FieldOperand(input, Map::kBitFieldOffset), 4023 __ testb(FieldOperand(input, Map::kBitFieldOffset),
3980 Immediate(1 << Map::kIsUndetectable)); 4024 Immediate(1 << Map::kIsUndetectable));
3981 final_branch_condition = not_zero; 4025 final_branch_condition = not_zero;
3982 4026
3983 } else if (type_name->Equals(heap()->function_symbol())) { 4027 } else if (type_name->Equals(heap()->function_symbol())) {
4028 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3984 __ JumpIfSmi(input, false_label); 4029 __ JumpIfSmi(input, false_label);
3985 __ CmpObjectType(input, FIRST_CALLABLE_SPEC_OBJECT_TYPE, input); 4030 __ CmpObjectType(input, JS_FUNCTION_TYPE, input);
3986 final_branch_condition = above_equal; 4031 __ j(equal, true_label);
4032 __ CmpInstanceType(input, JS_FUNCTION_PROXY_TYPE);
4033 final_branch_condition = equal;
3987 4034
3988 } else if (type_name->Equals(heap()->object_symbol())) { 4035 } else if (type_name->Equals(heap()->object_symbol())) {
3989 __ JumpIfSmi(input, false_label); 4036 __ JumpIfSmi(input, false_label);
3990 if (!FLAG_harmony_typeof) { 4037 if (!FLAG_harmony_typeof) {
3991 __ CompareRoot(input, Heap::kNullValueRootIndex); 4038 __ CompareRoot(input, Heap::kNullValueRootIndex);
3992 __ j(equal, true_label); 4039 __ j(equal, true_label);
3993 } 4040 }
3994 __ CmpObjectType(input, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, input); 4041 __ CmpObjectType(input, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, input);
3995 __ j(below, false_label); 4042 __ j(below, false_label);
3996 __ CmpInstanceType(input, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); 4043 __ CmpInstanceType(input, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
4102 safepoints_.SetPcAfterGap(pc); 4149 safepoints_.SetPcAfterGap(pc);
4103 } 4150 }
4104 4151
4105 4152
4106 void LCodeGen::DoStackCheck(LStackCheck* instr) { 4153 void LCodeGen::DoStackCheck(LStackCheck* instr) {
4107 class DeferredStackCheck: public LDeferredCode { 4154 class DeferredStackCheck: public LDeferredCode {
4108 public: 4155 public:
4109 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) 4156 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4110 : LDeferredCode(codegen), instr_(instr) { } 4157 : LDeferredCode(codegen), instr_(instr) { }
4111 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } 4158 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4159 virtual LInstruction* instr() { return instr_; }
4112 private: 4160 private:
4113 LStackCheck* instr_; 4161 LStackCheck* instr_;
4114 }; 4162 };
4115 4163
4116 if (instr->hydrogen()->is_function_entry()) { 4164 if (instr->hydrogen()->is_function_entry()) {
4117 // Perform stack overflow check. 4165 // Perform stack overflow check.
4118 Label done; 4166 Label done;
4119 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 4167 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
4120 __ j(above_equal, &done, Label::kNear); 4168 __ j(above_equal, &done, Label::kNear);
4121 StackCheckStub stub; 4169 StackCheckStub stub;
(...skipping 26 matching lines...) Expand all
4148 RegisterEnvironmentForDeoptimization(environment); 4196 RegisterEnvironmentForDeoptimization(environment);
4149 ASSERT(osr_pc_offset_ == -1); 4197 ASSERT(osr_pc_offset_ == -1);
4150 osr_pc_offset_ = masm()->pc_offset(); 4198 osr_pc_offset_ = masm()->pc_offset();
4151 } 4199 }
4152 4200
4153 #undef __ 4201 #undef __
4154 4202
4155 } } // namespace v8::internal 4203 } } // namespace v8::internal
4156 4204
4157 #endif // V8_TARGET_ARCH_X64 4205 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/lithium-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698