OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 248 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
259 Comment(";;; jump table entry %d.", i); | 259 Comment(";;; jump table entry %d.", i); |
260 } else { | 260 } else { |
261 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); | 261 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); |
262 } | 262 } |
263 if (jump_table_[i].needs_frame) { | 263 if (jump_table_[i].needs_frame) { |
264 __ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry)); | 264 __ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry)); |
265 if (needs_frame.is_bound()) { | 265 if (needs_frame.is_bound()) { |
266 __ jmp(&needs_frame); | 266 __ jmp(&needs_frame); |
267 } else { | 267 } else { |
268 __ bind(&needs_frame); | 268 __ bind(&needs_frame); |
| 269 __ movq(rsi, MemOperand(rbp, StandardFrameConstants::kContextOffset)); |
269 __ push(rbp); | 270 __ push(rbp); |
270 __ movq(rbp, rsp); | 271 __ movq(rbp, rsp); |
271 __ push(rsi); | 272 __ push(rsi); |
272 // This variant of deopt can only be used with stubs. Since we don't | 273 // This variant of deopt can only be used with stubs. Since we don't |
273 // have a function pointer to install in the stack frame that we're | 274 // have a function pointer to install in the stack frame that we're |
274 // building, install a special marker there instead. | 275 // building, install a special marker there instead. |
275 ASSERT(info()->IsStub()); | 276 ASSERT(info()->IsStub()); |
276 __ Move(rsi, Smi::FromInt(StackFrame::STUB)); | 277 __ Move(rsi, Smi::FromInt(StackFrame::STUB)); |
277 __ push(rsi); | 278 __ push(rsi); |
278 __ movq(rsi, MemOperand(rsp, kPointerSize)); | 279 __ movq(rsi, MemOperand(rsp, kPointerSize)); |
(...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
582 SaveFPRegsMode save_doubles) { | 583 SaveFPRegsMode save_doubles) { |
583 ASSERT(instr != NULL); | 584 ASSERT(instr != NULL); |
584 ASSERT(instr->HasPointerMap()); | 585 ASSERT(instr->HasPointerMap()); |
585 | 586 |
586 __ CallRuntime(function, num_arguments, save_doubles); | 587 __ CallRuntime(function, num_arguments, save_doubles); |
587 | 588 |
588 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0); | 589 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0); |
589 } | 590 } |
590 | 591 |
591 | 592 |
| 593 void LCodeGen::LoadContextFromDeferred(LOperand* context) { |
| 594 if (context->IsRegister()) { |
| 595 if (!ToRegister(context).is(rsi)) { |
| 596 __ movq(rsi, ToRegister(context)); |
| 597 } |
| 598 } else if (context->IsStackSlot()) { |
| 599 __ movq(rsi, ToOperand(context)); |
| 600 } else if (context->IsConstantOperand()) { |
| 601 HConstant* constant = |
| 602 chunk_->LookupConstant(LConstantOperand::cast(context)); |
| 603 __ Move(rsi, Handle<Object>::cast(constant->handle(isolate()))); |
| 604 } else { |
| 605 UNREACHABLE(); |
| 606 } |
| 607 } |
| 608 |
| 609 |
| 610 |
592 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, | 611 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, |
593 int argc, | 612 int argc, |
594 LInstruction* instr) { | 613 LInstruction* instr, |
595 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 614 LOperand* context) { |
| 615 LoadContextFromDeferred(context); |
| 616 |
596 __ CallRuntimeSaveDoubles(id); | 617 __ CallRuntimeSaveDoubles(id); |
597 RecordSafepointWithRegisters( | 618 RecordSafepointWithRegisters( |
598 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); | 619 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); |
599 } | 620 } |
600 | 621 |
601 | 622 |
602 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, | 623 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, |
603 Safepoint::DeoptMode mode) { | 624 Safepoint::DeoptMode mode) { |
604 if (!environment->HasBeenRegistered()) { | 625 if (!environment->HasBeenRegistered()) { |
605 // Physical stack frame layout: | 626 // Physical stack frame layout: |
(...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
809 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), | 830 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), |
810 kind, arguments, deopt_mode); | 831 kind, arguments, deopt_mode); |
811 for (int i = 0; i < operands->length(); i++) { | 832 for (int i = 0; i < operands->length(); i++) { |
812 LOperand* pointer = operands->at(i); | 833 LOperand* pointer = operands->at(i); |
813 if (pointer->IsStackSlot()) { | 834 if (pointer->IsStackSlot()) { |
814 safepoint.DefinePointerSlot(pointer->index(), zone()); | 835 safepoint.DefinePointerSlot(pointer->index(), zone()); |
815 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { | 836 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { |
816 safepoint.DefinePointerRegister(ToRegister(pointer), zone()); | 837 safepoint.DefinePointerRegister(ToRegister(pointer), zone()); |
817 } | 838 } |
818 } | 839 } |
819 if (kind & Safepoint::kWithRegisters) { | |
820 // Register rsi always contains a pointer to the context. | |
821 safepoint.DefinePointerRegister(rsi, zone()); | |
822 } | |
823 } | 840 } |
824 | 841 |
825 | 842 |
826 void LCodeGen::RecordSafepoint(LPointerMap* pointers, | 843 void LCodeGen::RecordSafepoint(LPointerMap* pointers, |
827 Safepoint::DeoptMode deopt_mode) { | 844 Safepoint::DeoptMode deopt_mode) { |
828 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode); | 845 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode); |
829 } | 846 } |
830 | 847 |
831 | 848 |
832 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) { | 849 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) { |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
888 DoGap(instr); | 905 DoGap(instr); |
889 } | 906 } |
890 | 907 |
891 | 908 |
892 void LCodeGen::DoParameter(LParameter* instr) { | 909 void LCodeGen::DoParameter(LParameter* instr) { |
893 // Nothing to do. | 910 // Nothing to do. |
894 } | 911 } |
895 | 912 |
896 | 913 |
897 void LCodeGen::DoCallStub(LCallStub* instr) { | 914 void LCodeGen::DoCallStub(LCallStub* instr) { |
| 915 ASSERT(ToRegister(instr->context()).is(rsi)); |
898 ASSERT(ToRegister(instr->result()).is(rax)); | 916 ASSERT(ToRegister(instr->result()).is(rax)); |
899 switch (instr->hydrogen()->major_key()) { | 917 switch (instr->hydrogen()->major_key()) { |
900 case CodeStub::RegExpConstructResult: { | 918 case CodeStub::RegExpConstructResult: { |
901 RegExpConstructResultStub stub; | 919 RegExpConstructResultStub stub; |
902 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 920 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
903 break; | 921 break; |
904 } | 922 } |
905 case CodeStub::RegExpExec: { | 923 case CodeStub::RegExpExec: { |
906 RegExpExecStub stub; | 924 RegExpExecStub stub; |
907 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 925 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
(...skipping 711 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1619 __ j(not_equal, &runtime, Label::kNear); | 1637 __ j(not_equal, &runtime, Label::kNear); |
1620 __ movq(result, FieldOperand(object, JSDate::kValueOffset + | 1638 __ movq(result, FieldOperand(object, JSDate::kValueOffset + |
1621 kPointerSize * index->value())); | 1639 kPointerSize * index->value())); |
1622 __ jmp(&done); | 1640 __ jmp(&done); |
1623 } | 1641 } |
1624 __ bind(&runtime); | 1642 __ bind(&runtime); |
1625 __ PrepareCallCFunction(2); | 1643 __ PrepareCallCFunction(2); |
1626 __ movq(arg_reg_1, object); | 1644 __ movq(arg_reg_1, object); |
1627 __ movq(arg_reg_2, index, RelocInfo::NONE64); | 1645 __ movq(arg_reg_2, index, RelocInfo::NONE64); |
1628 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | 1646 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); |
1629 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
1630 __ bind(&done); | 1647 __ bind(&done); |
1631 } | 1648 } |
1632 } | 1649 } |
1633 | 1650 |
1634 | 1651 |
1635 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) { | 1652 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) { |
1636 Register string = ToRegister(instr->string()); | 1653 Register string = ToRegister(instr->string()); |
1637 Register index = ToRegister(instr->index()); | 1654 Register index = ToRegister(instr->index()); |
1638 Register value = ToRegister(instr->value()); | 1655 Register value = ToRegister(instr->value()); |
1639 String::Encoding encoding = instr->encoding(); | 1656 String::Encoding encoding = instr->encoding(); |
(...skipping 17 matching lines...) Expand all Loading... |
1657 value); | 1674 value); |
1658 } else { | 1675 } else { |
1659 __ movw(FieldOperand(string, index, times_2, SeqString::kHeaderSize), | 1676 __ movw(FieldOperand(string, index, times_2, SeqString::kHeaderSize), |
1660 value); | 1677 value); |
1661 } | 1678 } |
1662 } | 1679 } |
1663 | 1680 |
1664 | 1681 |
1665 void LCodeGen::DoThrow(LThrow* instr) { | 1682 void LCodeGen::DoThrow(LThrow* instr) { |
1666 __ push(ToRegister(instr->value())); | 1683 __ push(ToRegister(instr->value())); |
| 1684 ASSERT(ToRegister(instr->context()).is(rsi)); |
1667 CallRuntime(Runtime::kThrow, 1, instr); | 1685 CallRuntime(Runtime::kThrow, 1, instr); |
1668 | 1686 |
1669 if (FLAG_debug_code) { | 1687 if (FLAG_debug_code) { |
1670 Comment("Unreachable code."); | 1688 Comment("Unreachable code."); |
1671 __ int3(); | 1689 __ int3(); |
1672 } | 1690 } |
1673 } | 1691 } |
1674 | 1692 |
1675 | 1693 |
1676 void LCodeGen::DoAddI(LAddI* instr) { | 1694 void LCodeGen::DoAddI(LAddI* instr) { |
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1811 // when there is a mulsd depending on the result | 1829 // when there is a mulsd depending on the result |
1812 __ movaps(left, left); | 1830 __ movaps(left, left); |
1813 break; | 1831 break; |
1814 case Token::MOD: { | 1832 case Token::MOD: { |
1815 XMMRegister xmm_scratch = double_scratch0(); | 1833 XMMRegister xmm_scratch = double_scratch0(); |
1816 __ PrepareCallCFunction(2); | 1834 __ PrepareCallCFunction(2); |
1817 __ movaps(xmm_scratch, left); | 1835 __ movaps(xmm_scratch, left); |
1818 ASSERT(right.is(xmm1)); | 1836 ASSERT(right.is(xmm1)); |
1819 __ CallCFunction( | 1837 __ CallCFunction( |
1820 ExternalReference::double_fp_operation(Token::MOD, isolate()), 2); | 1838 ExternalReference::double_fp_operation(Token::MOD, isolate()), 2); |
1821 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
1822 __ movaps(result, xmm_scratch); | 1839 __ movaps(result, xmm_scratch); |
1823 break; | 1840 break; |
1824 } | 1841 } |
1825 default: | 1842 default: |
1826 UNREACHABLE(); | 1843 UNREACHABLE(); |
1827 break; | 1844 break; |
1828 } | 1845 } |
1829 } | 1846 } |
1830 | 1847 |
1831 | 1848 |
1832 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 1849 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
| 1850 ASSERT(ToRegister(instr->context()).is(rsi)); |
1833 ASSERT(ToRegister(instr->left()).is(rdx)); | 1851 ASSERT(ToRegister(instr->left()).is(rdx)); |
1834 ASSERT(ToRegister(instr->right()).is(rax)); | 1852 ASSERT(ToRegister(instr->right()).is(rax)); |
1835 ASSERT(ToRegister(instr->result()).is(rax)); | 1853 ASSERT(ToRegister(instr->result()).is(rax)); |
1836 | 1854 |
1837 BinaryOpStub stub(instr->op(), NO_OVERWRITE); | 1855 BinaryOpStub stub(instr->op(), NO_OVERWRITE); |
1838 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 1856 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
1839 __ nop(); // Signals no inlined code. | 1857 __ nop(); // Signals no inlined code. |
1840 } | 1858 } |
1841 | 1859 |
1842 | 1860 |
(...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2237 __ JumpIfSmi(input, instr->FalseLabel(chunk_)); | 2255 __ JumpIfSmi(input, instr->FalseLabel(chunk_)); |
2238 } | 2256 } |
2239 __ movq(temp, FieldOperand(input, HeapObject::kMapOffset)); | 2257 __ movq(temp, FieldOperand(input, HeapObject::kMapOffset)); |
2240 __ testb(FieldOperand(temp, Map::kBitFieldOffset), | 2258 __ testb(FieldOperand(temp, Map::kBitFieldOffset), |
2241 Immediate(1 << Map::kIsUndetectable)); | 2259 Immediate(1 << Map::kIsUndetectable)); |
2242 EmitBranch(instr, not_zero); | 2260 EmitBranch(instr, not_zero); |
2243 } | 2261 } |
2244 | 2262 |
2245 | 2263 |
2246 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { | 2264 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { |
| 2265 ASSERT(ToRegister(instr->context()).is(rsi)); |
2247 Token::Value op = instr->op(); | 2266 Token::Value op = instr->op(); |
2248 | 2267 |
2249 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); | 2268 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); |
2250 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2269 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
2251 | 2270 |
2252 Condition condition = TokenToCondition(op, false); | 2271 Condition condition = TokenToCondition(op, false); |
2253 __ testq(rax, rax); | 2272 __ testq(rax, rax); |
2254 | 2273 |
2255 EmitBranch(instr, condition); | 2274 EmitBranch(instr, condition); |
2256 } | 2275 } |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2393 | 2412 |
2394 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { | 2413 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { |
2395 Register reg = ToRegister(instr->value()); | 2414 Register reg = ToRegister(instr->value()); |
2396 | 2415 |
2397 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); | 2416 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); |
2398 EmitBranch(instr, equal); | 2417 EmitBranch(instr, equal); |
2399 } | 2418 } |
2400 | 2419 |
2401 | 2420 |
2402 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 2421 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
| 2422 ASSERT(ToRegister(instr->context()).is(rsi)); |
2403 InstanceofStub stub(InstanceofStub::kNoFlags); | 2423 InstanceofStub stub(InstanceofStub::kNoFlags); |
2404 __ push(ToRegister(instr->left())); | 2424 __ push(ToRegister(instr->left())); |
2405 __ push(ToRegister(instr->right())); | 2425 __ push(ToRegister(instr->right())); |
2406 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 2426 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
2407 Label true_value, done; | 2427 Label true_value, done; |
2408 __ testq(rax, rax); | 2428 __ testq(rax, rax); |
2409 __ j(zero, &true_value, Label::kNear); | 2429 __ j(zero, &true_value, Label::kNear); |
2410 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); | 2430 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); |
2411 __ jmp(&done, Label::kNear); | 2431 __ jmp(&done, Label::kNear); |
2412 __ bind(&true_value); | 2432 __ bind(&true_value); |
(...skipping 11 matching lines...) Expand all Loading... |
2424 virtual void Generate() V8_OVERRIDE { | 2444 virtual void Generate() V8_OVERRIDE { |
2425 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); | 2445 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); |
2426 } | 2446 } |
2427 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | 2447 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
2428 Label* map_check() { return &map_check_; } | 2448 Label* map_check() { return &map_check_; } |
2429 private: | 2449 private: |
2430 LInstanceOfKnownGlobal* instr_; | 2450 LInstanceOfKnownGlobal* instr_; |
2431 Label map_check_; | 2451 Label map_check_; |
2432 }; | 2452 }; |
2433 | 2453 |
2434 | 2454 ASSERT(ToRegister(instr->context()).is(rsi)); |
2435 DeferredInstanceOfKnownGlobal* deferred; | 2455 DeferredInstanceOfKnownGlobal* deferred; |
2436 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr); | 2456 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr); |
2437 | 2457 |
2438 Label done, false_result; | 2458 Label done, false_result; |
2439 Register object = ToRegister(instr->value()); | 2459 Register object = ToRegister(instr->value()); |
2440 | 2460 |
2441 // A Smi is not an instance of anything. | 2461 // A Smi is not an instance of anything. |
2442 __ JumpIfSmi(object, &false_result); | 2462 __ JumpIfSmi(object, &false_result); |
2443 | 2463 |
2444 // This is the inlined call site instanceof cache. The two occurences of the | 2464 // This is the inlined call site instanceof cache. The two occurences of the |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2519 __ j(not_zero, &load_false); | 2539 __ j(not_zero, &load_false); |
2520 __ LoadRoot(rax, Heap::kTrueValueRootIndex); | 2540 __ LoadRoot(rax, Heap::kTrueValueRootIndex); |
2521 __ jmp(&done); | 2541 __ jmp(&done); |
2522 __ bind(&load_false); | 2542 __ bind(&load_false); |
2523 __ LoadRoot(rax, Heap::kFalseValueRootIndex); | 2543 __ LoadRoot(rax, Heap::kFalseValueRootIndex); |
2524 __ bind(&done); | 2544 __ bind(&done); |
2525 } | 2545 } |
2526 | 2546 |
2527 | 2547 |
2528 void LCodeGen::DoCmpT(LCmpT* instr) { | 2548 void LCodeGen::DoCmpT(LCmpT* instr) { |
| 2549 ASSERT(ToRegister(instr->context()).is(rsi)); |
2529 Token::Value op = instr->op(); | 2550 Token::Value op = instr->op(); |
2530 | 2551 |
2531 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); | 2552 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); |
2532 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2553 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
2533 | 2554 |
2534 Condition condition = TokenToCondition(op, false); | 2555 Condition condition = TokenToCondition(op, false); |
2535 Label true_value, done; | 2556 Label true_value, done; |
2536 __ testq(rax, rax); | 2557 __ testq(rax, rax); |
2537 __ j(condition, &true_value, Label::kNear); | 2558 __ j(condition, &true_value, Label::kNear); |
2538 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); | 2559 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); |
2539 __ jmp(&done, Label::kNear); | 2560 __ jmp(&done, Label::kNear); |
2540 __ bind(&true_value); | 2561 __ bind(&true_value); |
2541 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); | 2562 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); |
2542 __ bind(&done); | 2563 __ bind(&done); |
2543 } | 2564 } |
2544 | 2565 |
2545 | 2566 |
2546 void LCodeGen::DoReturn(LReturn* instr) { | 2567 void LCodeGen::DoReturn(LReturn* instr) { |
2547 if (FLAG_trace && info()->IsOptimizing()) { | 2568 if (FLAG_trace && info()->IsOptimizing()) { |
2548 // Preserve the return value on the stack and rely on the runtime | 2569 // Preserve the return value on the stack and rely on the runtime call |
2549 // call to return the value in the same register. | 2570 // to return the value in the same register. We're leaving the code |
| 2571 // managed by the register allocator and tearing down the frame, it's |
| 2572 // safe to write to the context register. |
2550 __ push(rax); | 2573 __ push(rax); |
| 2574 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
2551 __ CallRuntime(Runtime::kTraceExit, 1); | 2575 __ CallRuntime(Runtime::kTraceExit, 1); |
2552 } | 2576 } |
2553 if (info()->saves_caller_doubles()) { | 2577 if (info()->saves_caller_doubles()) { |
2554 ASSERT(NeedsEagerFrame()); | 2578 ASSERT(NeedsEagerFrame()); |
2555 BitVector* doubles = chunk()->allocated_double_registers(); | 2579 BitVector* doubles = chunk()->allocated_double_registers(); |
2556 BitVector::Iterator save_iterator(doubles); | 2580 BitVector::Iterator save_iterator(doubles); |
2557 int count = 0; | 2581 int count = 0; |
2558 while (!save_iterator.Done()) { | 2582 while (!save_iterator.Done()) { |
2559 __ movsd(XMMRegister::FromAllocationIndex(save_iterator.Current()), | 2583 __ movsd(XMMRegister::FromAllocationIndex(save_iterator.Current()), |
2560 MemOperand(rsp, count * kDoubleSize)); | 2584 MemOperand(rsp, count * kDoubleSize)); |
(...skipping 30 matching lines...) Expand all Loading... |
2591 Register result = ToRegister(instr->result()); | 2615 Register result = ToRegister(instr->result()); |
2592 __ LoadGlobalCell(result, instr->hydrogen()->cell().handle()); | 2616 __ LoadGlobalCell(result, instr->hydrogen()->cell().handle()); |
2593 if (instr->hydrogen()->RequiresHoleCheck()) { | 2617 if (instr->hydrogen()->RequiresHoleCheck()) { |
2594 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); | 2618 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); |
2595 DeoptimizeIf(equal, instr->environment()); | 2619 DeoptimizeIf(equal, instr->environment()); |
2596 } | 2620 } |
2597 } | 2621 } |
2598 | 2622 |
2599 | 2623 |
2600 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { | 2624 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { |
| 2625 ASSERT(ToRegister(instr->context()).is(rsi)); |
2601 ASSERT(ToRegister(instr->global_object()).is(rax)); | 2626 ASSERT(ToRegister(instr->global_object()).is(rax)); |
2602 ASSERT(ToRegister(instr->result()).is(rax)); | 2627 ASSERT(ToRegister(instr->result()).is(rax)); |
2603 | 2628 |
2604 __ Move(rcx, instr->name()); | 2629 __ Move(rcx, instr->name()); |
2605 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET : | 2630 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET : |
2606 RelocInfo::CODE_TARGET_CONTEXT; | 2631 RelocInfo::CODE_TARGET_CONTEXT; |
2607 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 2632 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
2608 CallCode(ic, mode, instr); | 2633 CallCode(ic, mode, instr); |
2609 } | 2634 } |
2610 | 2635 |
(...skipping 18 matching lines...) Expand all Loading... |
2629 } else { | 2654 } else { |
2630 // Store the value. | 2655 // Store the value. |
2631 __ movq(kScratchRegister, cell_handle, RelocInfo::CELL); | 2656 __ movq(kScratchRegister, cell_handle, RelocInfo::CELL); |
2632 __ movq(Operand(kScratchRegister, 0), value); | 2657 __ movq(Operand(kScratchRegister, 0), value); |
2633 } | 2658 } |
2634 // Cells are always rescanned, so no write barrier here. | 2659 // Cells are always rescanned, so no write barrier here. |
2635 } | 2660 } |
2636 | 2661 |
2637 | 2662 |
2638 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { | 2663 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { |
| 2664 ASSERT(ToRegister(instr->context()).is(rsi)); |
2639 ASSERT(ToRegister(instr->global_object()).is(rdx)); | 2665 ASSERT(ToRegister(instr->global_object()).is(rdx)); |
2640 ASSERT(ToRegister(instr->value()).is(rax)); | 2666 ASSERT(ToRegister(instr->value()).is(rax)); |
2641 | 2667 |
2642 __ Move(rcx, instr->name()); | 2668 __ Move(rcx, instr->name()); |
2643 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) | 2669 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) |
2644 ? isolate()->builtins()->StoreIC_Initialize_Strict() | 2670 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
2645 : isolate()->builtins()->StoreIC_Initialize(); | 2671 : isolate()->builtins()->StoreIC_Initialize(); |
2646 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); | 2672 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); |
2647 } | 2673 } |
2648 | 2674 |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2728 Register result = ToRegister(instr->result()); | 2754 Register result = ToRegister(instr->result()); |
2729 if (!access.IsInobject()) { | 2755 if (!access.IsInobject()) { |
2730 __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset)); | 2756 __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset)); |
2731 object = result; | 2757 object = result; |
2732 } | 2758 } |
2733 __ Load(result, FieldOperand(object, offset), access.representation()); | 2759 __ Load(result, FieldOperand(object, offset), access.representation()); |
2734 } | 2760 } |
2735 | 2761 |
2736 | 2762 |
2737 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { | 2763 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { |
| 2764 ASSERT(ToRegister(instr->context()).is(rsi)); |
2738 ASSERT(ToRegister(instr->object()).is(rax)); | 2765 ASSERT(ToRegister(instr->object()).is(rax)); |
2739 ASSERT(ToRegister(instr->result()).is(rax)); | 2766 ASSERT(ToRegister(instr->result()).is(rax)); |
2740 | 2767 |
2741 __ Move(rcx, instr->name()); | 2768 __ Move(rcx, instr->name()); |
2742 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 2769 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
2743 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2770 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
2744 } | 2771 } |
2745 | 2772 |
2746 | 2773 |
2747 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { | 2774 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { |
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3007 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size); | 3034 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size); |
3008 return Operand(elements_pointer_reg, | 3035 return Operand(elements_pointer_reg, |
3009 ToRegister(key), | 3036 ToRegister(key), |
3010 scale_factor, | 3037 scale_factor, |
3011 offset + (additional_index << shift_size)); | 3038 offset + (additional_index << shift_size)); |
3012 } | 3039 } |
3013 } | 3040 } |
3014 | 3041 |
3015 | 3042 |
3016 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { | 3043 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { |
| 3044 ASSERT(ToRegister(instr->context()).is(rsi)); |
3017 ASSERT(ToRegister(instr->object()).is(rdx)); | 3045 ASSERT(ToRegister(instr->object()).is(rdx)); |
3018 ASSERT(ToRegister(instr->key()).is(rax)); | 3046 ASSERT(ToRegister(instr->key()).is(rax)); |
3019 | 3047 |
3020 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); | 3048 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); |
3021 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 3049 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
3022 } | 3050 } |
3023 | 3051 |
3024 | 3052 |
3025 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { | 3053 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { |
3026 Register result = ToRegister(instr->result()); | 3054 Register result = ToRegister(instr->result()); |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3109 Condition is_smi = __ CheckSmi(receiver); | 3137 Condition is_smi = __ CheckSmi(receiver); |
3110 DeoptimizeIf(is_smi, instr->environment()); | 3138 DeoptimizeIf(is_smi, instr->environment()); |
3111 __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, kScratchRegister); | 3139 __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, kScratchRegister); |
3112 DeoptimizeIf(below, instr->environment()); | 3140 DeoptimizeIf(below, instr->environment()); |
3113 __ jmp(&receiver_ok, Label::kNear); | 3141 __ jmp(&receiver_ok, Label::kNear); |
3114 | 3142 |
3115 __ bind(&global_object); | 3143 __ bind(&global_object); |
3116 // TODO(kmillikin): We have a hydrogen value for the global object. See | 3144 // TODO(kmillikin): We have a hydrogen value for the global object. See |
3117 // if it's better to use it than to explicitly fetch it from the context | 3145 // if it's better to use it than to explicitly fetch it from the context |
3118 // here. | 3146 // here. |
3119 __ movq(receiver, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX)); | 3147 __ movq(receiver, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 3148 __ movq(receiver, ContextOperand(receiver, Context::GLOBAL_OBJECT_INDEX)); |
3120 __ movq(receiver, | 3149 __ movq(receiver, |
3121 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset)); | 3150 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset)); |
3122 __ bind(&receiver_ok); | 3151 __ bind(&receiver_ok); |
3123 } | 3152 } |
3124 | 3153 |
3125 | 3154 |
3126 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { | 3155 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { |
3127 Register receiver = ToRegister(instr->receiver()); | 3156 Register receiver = ToRegister(instr->receiver()); |
3128 Register function = ToRegister(instr->function()); | 3157 Register function = ToRegister(instr->function()); |
3129 Register length = ToRegister(instr->length()); | 3158 Register length = ToRegister(instr->length()); |
(...skipping 26 matching lines...) Expand all Loading... |
3156 | 3185 |
3157 // Invoke the function. | 3186 // Invoke the function. |
3158 __ bind(&invoke); | 3187 __ bind(&invoke); |
3159 ASSERT(instr->HasPointerMap()); | 3188 ASSERT(instr->HasPointerMap()); |
3160 LPointerMap* pointers = instr->pointer_map(); | 3189 LPointerMap* pointers = instr->pointer_map(); |
3161 SafepointGenerator safepoint_generator( | 3190 SafepointGenerator safepoint_generator( |
3162 this, pointers, Safepoint::kLazyDeopt); | 3191 this, pointers, Safepoint::kLazyDeopt); |
3163 ParameterCount actual(rax); | 3192 ParameterCount actual(rax); |
3164 __ InvokeFunction(function, actual, CALL_FUNCTION, | 3193 __ InvokeFunction(function, actual, CALL_FUNCTION, |
3165 safepoint_generator, CALL_AS_METHOD); | 3194 safepoint_generator, CALL_AS_METHOD); |
3166 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
3167 } | 3195 } |
3168 | 3196 |
3169 | 3197 |
3170 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 3198 void LCodeGen::DoPushArgument(LPushArgument* instr) { |
3171 LOperand* argument = instr->value(); | 3199 LOperand* argument = instr->value(); |
3172 EmitPushTaggedOperand(argument); | 3200 EmitPushTaggedOperand(argument); |
3173 } | 3201 } |
3174 | 3202 |
3175 | 3203 |
3176 void LCodeGen::DoDrop(LDrop* instr) { | 3204 void LCodeGen::DoDrop(LDrop* instr) { |
3177 __ Drop(instr->count()); | 3205 __ Drop(instr->count()); |
3178 } | 3206 } |
3179 | 3207 |
3180 | 3208 |
3181 void LCodeGen::DoThisFunction(LThisFunction* instr) { | 3209 void LCodeGen::DoThisFunction(LThisFunction* instr) { |
3182 Register result = ToRegister(instr->result()); | 3210 Register result = ToRegister(instr->result()); |
3183 __ movq(result, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 3211 __ movq(result, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
3184 } | 3212 } |
3185 | 3213 |
3186 | 3214 |
3187 void LCodeGen::DoContext(LContext* instr) { | 3215 void LCodeGen::DoContext(LContext* instr) { |
3188 Register result = ToRegister(instr->result()); | 3216 Register result = ToRegister(instr->result()); |
3189 __ movq(result, rsi); | 3217 if (info()->IsOptimizing()) { |
| 3218 __ movq(result, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 3219 } else { |
| 3220 // If there is no frame, the context must be in rsi. |
| 3221 ASSERT(result.is(rsi)); |
| 3222 } |
3190 } | 3223 } |
3191 | 3224 |
3192 | 3225 |
3193 void LCodeGen::DoOuterContext(LOuterContext* instr) { | 3226 void LCodeGen::DoOuterContext(LOuterContext* instr) { |
3194 Register context = ToRegister(instr->context()); | 3227 Register context = ToRegister(instr->context()); |
3195 Register result = ToRegister(instr->result()); | 3228 Register result = ToRegister(instr->result()); |
3196 __ movq(result, | 3229 __ movq(result, |
3197 Operand(context, Context::SlotOffset(Context::PREVIOUS_INDEX))); | 3230 Operand(context, Context::SlotOffset(Context::PREVIOUS_INDEX))); |
3198 } | 3231 } |
3199 | 3232 |
3200 | 3233 |
3201 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { | 3234 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { |
| 3235 ASSERT(ToRegister(instr->context()).is(rsi)); |
3202 __ push(rsi); // The context is the first argument. | 3236 __ push(rsi); // The context is the first argument. |
3203 __ Push(instr->hydrogen()->pairs()); | 3237 __ Push(instr->hydrogen()->pairs()); |
3204 __ Push(Smi::FromInt(instr->hydrogen()->flags())); | 3238 __ Push(Smi::FromInt(instr->hydrogen()->flags())); |
3205 CallRuntime(Runtime::kDeclareGlobals, 3, instr); | 3239 CallRuntime(Runtime::kDeclareGlobals, 3, instr); |
3206 } | 3240 } |
3207 | 3241 |
3208 | 3242 |
3209 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { | 3243 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { |
| 3244 Register context = ToRegister(instr->context()); |
3210 Register result = ToRegister(instr->result()); | 3245 Register result = ToRegister(instr->result()); |
3211 __ movq(result, GlobalObjectOperand()); | 3246 __ movq(result, |
| 3247 Operand(context, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
3212 } | 3248 } |
3213 | 3249 |
3214 | 3250 |
3215 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { | 3251 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { |
3216 Register global = ToRegister(instr->global()); | 3252 Register global = ToRegister(instr->global()); |
3217 Register result = ToRegister(instr->result()); | 3253 Register result = ToRegister(instr->result()); |
3218 __ movq(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset)); | 3254 __ movq(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset)); |
3219 } | 3255 } |
3220 | 3256 |
3221 | 3257 |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3258 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0); | 3294 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0); |
3259 } else { | 3295 } else { |
3260 // We need to adapt arguments. | 3296 // We need to adapt arguments. |
3261 SafepointGenerator generator( | 3297 SafepointGenerator generator( |
3262 this, pointers, Safepoint::kLazyDeopt); | 3298 this, pointers, Safepoint::kLazyDeopt); |
3263 ParameterCount count(arity); | 3299 ParameterCount count(arity); |
3264 ParameterCount expected(formal_parameter_count); | 3300 ParameterCount expected(formal_parameter_count); |
3265 __ InvokeFunction( | 3301 __ InvokeFunction( |
3266 function, expected, count, CALL_FUNCTION, generator, call_kind); | 3302 function, expected, count, CALL_FUNCTION, generator, call_kind); |
3267 } | 3303 } |
3268 | |
3269 // Restore context. | |
3270 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
3271 } | 3304 } |
3272 | 3305 |
3273 | 3306 |
3274 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 3307 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { |
3275 ASSERT(ToRegister(instr->result()).is(rax)); | 3308 ASSERT(ToRegister(instr->result()).is(rax)); |
3276 CallKnownFunction(instr->hydrogen()->function(), | 3309 CallKnownFunction(instr->hydrogen()->function(), |
3277 instr->hydrogen()->formal_parameter_count(), | 3310 instr->hydrogen()->formal_parameter_count(), |
3278 instr->arity(), | 3311 instr->arity(), |
3279 instr, | 3312 instr, |
3280 CALL_AS_METHOD, | 3313 CALL_AS_METHOD, |
(...skipping 20 matching lines...) Expand all Loading... |
3301 // |result| are the same register and |input| will be restored | 3334 // |result| are the same register and |input| will be restored |
3302 // unchanged by popping safepoint registers. | 3335 // unchanged by popping safepoint registers. |
3303 __ testl(tmp, Immediate(HeapNumber::kSignMask)); | 3336 __ testl(tmp, Immediate(HeapNumber::kSignMask)); |
3304 __ j(zero, &done); | 3337 __ j(zero, &done); |
3305 | 3338 |
3306 __ AllocateHeapNumber(tmp, tmp2, &slow); | 3339 __ AllocateHeapNumber(tmp, tmp2, &slow); |
3307 __ jmp(&allocated, Label::kNear); | 3340 __ jmp(&allocated, Label::kNear); |
3308 | 3341 |
3309 // Slow case: Call the runtime system to do the number allocation. | 3342 // Slow case: Call the runtime system to do the number allocation. |
3310 __ bind(&slow); | 3343 __ bind(&slow); |
3311 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); | 3344 CallRuntimeFromDeferred( |
| 3345 Runtime::kAllocateHeapNumber, 0, instr, instr->context()); |
3312 // Set the pointer to the new heap number in tmp. | 3346 // Set the pointer to the new heap number in tmp. |
3313 if (!tmp.is(rax)) __ movq(tmp, rax); | 3347 if (!tmp.is(rax)) __ movq(tmp, rax); |
3314 // Restore input_reg after call to runtime. | 3348 // Restore input_reg after call to runtime. |
3315 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); | 3349 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); |
3316 | 3350 |
3317 __ bind(&allocated); | 3351 __ bind(&allocated); |
3318 __ MoveDouble(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 3352 __ MoveDouble(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
3319 __ shl(tmp2, Immediate(1)); | 3353 __ shl(tmp2, Immediate(1)); |
3320 __ shr(tmp2, Immediate(1)); | 3354 __ shr(tmp2, Immediate(1)); |
3321 __ MoveDouble(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2); | 3355 __ MoveDouble(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2); |
(...skipping 358 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3680 __ fyl2x(); | 3714 __ fyl2x(); |
3681 __ fstp_d(Operand(rsp, 0)); | 3715 __ fstp_d(Operand(rsp, 0)); |
3682 __ movsd(input_reg, Operand(rsp, 0)); | 3716 __ movsd(input_reg, Operand(rsp, 0)); |
3683 __ addq(rsp, Immediate(kDoubleSize)); | 3717 __ addq(rsp, Immediate(kDoubleSize)); |
3684 __ bind(&done); | 3718 __ bind(&done); |
3685 } | 3719 } |
3686 | 3720 |
3687 | 3721 |
3688 void LCodeGen::DoMathTan(LMathTan* instr) { | 3722 void LCodeGen::DoMathTan(LMathTan* instr) { |
3689 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 3723 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
| 3724 // Set the context register to a GC-safe fake value. Clobbering it is |
| 3725 // OK because this instruction is marked as a call. |
| 3726 __ Set(rsi, 0); |
3690 TranscendentalCacheStub stub(TranscendentalCache::TAN, | 3727 TranscendentalCacheStub stub(TranscendentalCache::TAN, |
3691 TranscendentalCacheStub::UNTAGGED); | 3728 TranscendentalCacheStub::UNTAGGED); |
3692 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3729 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
3693 } | 3730 } |
3694 | 3731 |
3695 | 3732 |
3696 void LCodeGen::DoMathCos(LMathCos* instr) { | 3733 void LCodeGen::DoMathCos(LMathCos* instr) { |
3697 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 3734 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
| 3735 // Set the context register to a GC-safe fake value. Clobbering it is |
| 3736 // OK because this instruction is marked as a call. |
| 3737 __ Set(rsi, 0); |
3698 TranscendentalCacheStub stub(TranscendentalCache::COS, | 3738 TranscendentalCacheStub stub(TranscendentalCache::COS, |
3699 TranscendentalCacheStub::UNTAGGED); | 3739 TranscendentalCacheStub::UNTAGGED); |
3700 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3740 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
3701 } | 3741 } |
3702 | 3742 |
3703 | 3743 |
3704 void LCodeGen::DoMathSin(LMathSin* instr) { | 3744 void LCodeGen::DoMathSin(LMathSin* instr) { |
3705 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 3745 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
| 3746 // Set the context register to a GC-safe fake value. Clobbering it is |
| 3747 // OK because this instruction is marked as a call. |
| 3748 __ Set(rsi, 0); |
3706 TranscendentalCacheStub stub(TranscendentalCache::SIN, | 3749 TranscendentalCacheStub stub(TranscendentalCache::SIN, |
3707 TranscendentalCacheStub::UNTAGGED); | 3750 TranscendentalCacheStub::UNTAGGED); |
3708 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3751 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
3709 } | 3752 } |
3710 | 3753 |
3711 | 3754 |
3712 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { | 3755 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { |
| 3756 ASSERT(ToRegister(instr->context()).is(rsi)); |
3713 ASSERT(ToRegister(instr->function()).is(rdi)); | 3757 ASSERT(ToRegister(instr->function()).is(rdi)); |
3714 ASSERT(instr->HasPointerMap()); | 3758 ASSERT(instr->HasPointerMap()); |
3715 | 3759 |
3716 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); | 3760 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); |
3717 if (known_function.is_null()) { | 3761 if (known_function.is_null()) { |
3718 LPointerMap* pointers = instr->pointer_map(); | 3762 LPointerMap* pointers = instr->pointer_map(); |
3719 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); | 3763 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
3720 ParameterCount count(instr->arity()); | 3764 ParameterCount count(instr->arity()); |
3721 __ InvokeFunction(rdi, count, CALL_FUNCTION, generator, CALL_AS_METHOD); | 3765 __ InvokeFunction(rdi, count, CALL_FUNCTION, generator, CALL_AS_METHOD); |
3722 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
3723 } else { | 3766 } else { |
3724 CallKnownFunction(known_function, | 3767 CallKnownFunction(known_function, |
3725 instr->hydrogen()->formal_parameter_count(), | 3768 instr->hydrogen()->formal_parameter_count(), |
3726 instr->arity(), | 3769 instr->arity(), |
3727 instr, | 3770 instr, |
3728 CALL_AS_METHOD, | 3771 CALL_AS_METHOD, |
3729 RDI_CONTAINS_TARGET); | 3772 RDI_CONTAINS_TARGET); |
3730 } | 3773 } |
3731 } | 3774 } |
3732 | 3775 |
3733 | 3776 |
3734 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { | 3777 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { |
| 3778 ASSERT(ToRegister(instr->context()).is(rsi)); |
3735 ASSERT(ToRegister(instr->key()).is(rcx)); | 3779 ASSERT(ToRegister(instr->key()).is(rcx)); |
3736 ASSERT(ToRegister(instr->result()).is(rax)); | 3780 ASSERT(ToRegister(instr->result()).is(rax)); |
3737 | 3781 |
3738 int arity = instr->arity(); | 3782 int arity = instr->arity(); |
3739 Handle<Code> ic = | 3783 Handle<Code> ic = |
3740 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity); | 3784 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity); |
3741 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 3785 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
3742 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
3743 } | 3786 } |
3744 | 3787 |
3745 | 3788 |
3746 void LCodeGen::DoCallNamed(LCallNamed* instr) { | 3789 void LCodeGen::DoCallNamed(LCallNamed* instr) { |
| 3790 ASSERT(ToRegister(instr->context()).is(rsi)); |
3747 ASSERT(ToRegister(instr->result()).is(rax)); | 3791 ASSERT(ToRegister(instr->result()).is(rax)); |
3748 | 3792 |
3749 int arity = instr->arity(); | 3793 int arity = instr->arity(); |
3750 RelocInfo::Mode mode = RelocInfo::CODE_TARGET; | 3794 RelocInfo::Mode mode = RelocInfo::CODE_TARGET; |
3751 Handle<Code> ic = | 3795 Handle<Code> ic = |
3752 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); | 3796 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); |
3753 __ Move(rcx, instr->name()); | 3797 __ Move(rcx, instr->name()); |
3754 CallCode(ic, mode, instr); | 3798 CallCode(ic, mode, instr); |
3755 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
3756 } | 3799 } |
3757 | 3800 |
3758 | 3801 |
3759 void LCodeGen::DoCallFunction(LCallFunction* instr) { | 3802 void LCodeGen::DoCallFunction(LCallFunction* instr) { |
| 3803 ASSERT(ToRegister(instr->context()).is(rsi)); |
3760 ASSERT(ToRegister(instr->function()).is(rdi)); | 3804 ASSERT(ToRegister(instr->function()).is(rdi)); |
3761 ASSERT(ToRegister(instr->result()).is(rax)); | 3805 ASSERT(ToRegister(instr->result()).is(rax)); |
3762 | 3806 |
3763 int arity = instr->arity(); | 3807 int arity = instr->arity(); |
3764 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS); | 3808 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS); |
3765 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3809 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
3766 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
3767 } | 3810 } |
3768 | 3811 |
3769 | 3812 |
3770 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { | 3813 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { |
| 3814 ASSERT(ToRegister(instr->context()).is(rsi)); |
3771 ASSERT(ToRegister(instr->result()).is(rax)); | 3815 ASSERT(ToRegister(instr->result()).is(rax)); |
3772 int arity = instr->arity(); | 3816 int arity = instr->arity(); |
3773 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT; | 3817 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT; |
3774 Handle<Code> ic = | 3818 Handle<Code> ic = |
3775 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); | 3819 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); |
3776 __ Move(rcx, instr->name()); | 3820 __ Move(rcx, instr->name()); |
3777 CallCode(ic, mode, instr); | 3821 CallCode(ic, mode, instr); |
3778 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
3779 } | 3822 } |
3780 | 3823 |
3781 | 3824 |
3782 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { | 3825 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { |
3783 ASSERT(ToRegister(instr->result()).is(rax)); | 3826 ASSERT(ToRegister(instr->result()).is(rax)); |
3784 CallKnownFunction(instr->hydrogen()->target(), | 3827 CallKnownFunction(instr->hydrogen()->target(), |
3785 instr->hydrogen()->formal_parameter_count(), | 3828 instr->hydrogen()->formal_parameter_count(), |
3786 instr->arity(), | 3829 instr->arity(), |
3787 instr, | 3830 instr, |
3788 CALL_AS_FUNCTION, | 3831 CALL_AS_FUNCTION, |
3789 RDI_UNINITIALIZED); | 3832 RDI_UNINITIALIZED); |
3790 } | 3833 } |
3791 | 3834 |
3792 | 3835 |
3793 void LCodeGen::DoCallNew(LCallNew* instr) { | 3836 void LCodeGen::DoCallNew(LCallNew* instr) { |
| 3837 ASSERT(ToRegister(instr->context()).is(rsi)); |
3794 ASSERT(ToRegister(instr->constructor()).is(rdi)); | 3838 ASSERT(ToRegister(instr->constructor()).is(rdi)); |
3795 ASSERT(ToRegister(instr->result()).is(rax)); | 3839 ASSERT(ToRegister(instr->result()).is(rax)); |
3796 | 3840 |
3797 __ Set(rax, instr->arity()); | 3841 __ Set(rax, instr->arity()); |
3798 // No cell in ebx for construct type feedback in optimized code | 3842 // No cell in ebx for construct type feedback in optimized code |
3799 Handle<Object> undefined_value(isolate()->factory()->undefined_value()); | 3843 Handle<Object> undefined_value(isolate()->factory()->undefined_value()); |
3800 __ Move(rbx, undefined_value); | 3844 __ Move(rbx, undefined_value); |
3801 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); | 3845 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); |
3802 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 3846 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
3803 } | 3847 } |
3804 | 3848 |
3805 | 3849 |
3806 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { | 3850 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { |
| 3851 ASSERT(ToRegister(instr->context()).is(rsi)); |
3807 ASSERT(ToRegister(instr->constructor()).is(rdi)); | 3852 ASSERT(ToRegister(instr->constructor()).is(rdi)); |
3808 ASSERT(ToRegister(instr->result()).is(rax)); | 3853 ASSERT(ToRegister(instr->result()).is(rax)); |
3809 | 3854 |
3810 __ Set(rax, instr->arity()); | 3855 __ Set(rax, instr->arity()); |
3811 __ Move(rbx, instr->hydrogen()->property_cell()); | 3856 __ Move(rbx, instr->hydrogen()->property_cell()); |
3812 ElementsKind kind = instr->hydrogen()->elements_kind(); | 3857 ElementsKind kind = instr->hydrogen()->elements_kind(); |
3813 AllocationSiteOverrideMode override_mode = | 3858 AllocationSiteOverrideMode override_mode = |
3814 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) | 3859 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) |
3815 ? DISABLE_ALLOCATION_SITES | 3860 ? DISABLE_ALLOCATION_SITES |
3816 : DONT_OVERRIDE; | 3861 : DONT_OVERRIDE; |
(...skipping 24 matching lines...) Expand all Loading... |
3841 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 3886 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
3842 __ bind(&done); | 3887 __ bind(&done); |
3843 } else { | 3888 } else { |
3844 ArrayNArgumentsConstructorStub stub(kind, context_mode, override_mode); | 3889 ArrayNArgumentsConstructorStub stub(kind, context_mode, override_mode); |
3845 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 3890 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
3846 } | 3891 } |
3847 } | 3892 } |
3848 | 3893 |
3849 | 3894 |
3850 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { | 3895 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
| 3896 ASSERT(ToRegister(instr->context()).is(rsi)); |
3851 CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles()); | 3897 CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles()); |
3852 } | 3898 } |
3853 | 3899 |
3854 | 3900 |
3855 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) { | 3901 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) { |
3856 Register function = ToRegister(instr->function()); | 3902 Register function = ToRegister(instr->function()); |
3857 Register code_object = ToRegister(instr->code_object()); | 3903 Register code_object = ToRegister(instr->code_object()); |
3858 __ lea(code_object, FieldOperand(code_object, Code::kHeaderSize)); | 3904 __ lea(code_object, FieldOperand(code_object, Code::kHeaderSize)); |
3859 __ movq(FieldOperand(function, JSFunction::kCodeEntryOffset), code_object); | 3905 __ movq(FieldOperand(function, JSFunction::kCodeEntryOffset), code_object); |
3860 } | 3906 } |
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3973 value, | 4019 value, |
3974 temp, | 4020 temp, |
3975 kSaveFPRegs, | 4021 kSaveFPRegs, |
3976 EMIT_REMEMBERED_SET, | 4022 EMIT_REMEMBERED_SET, |
3977 check_needed); | 4023 check_needed); |
3978 } | 4024 } |
3979 } | 4025 } |
3980 | 4026 |
3981 | 4027 |
3982 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { | 4028 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { |
| 4029 ASSERT(ToRegister(instr->context()).is(rsi)); |
3983 ASSERT(ToRegister(instr->object()).is(rdx)); | 4030 ASSERT(ToRegister(instr->object()).is(rdx)); |
3984 ASSERT(ToRegister(instr->value()).is(rax)); | 4031 ASSERT(ToRegister(instr->value()).is(rax)); |
3985 | 4032 |
3986 __ Move(rcx, instr->hydrogen()->name()); | 4033 __ Move(rcx, instr->hydrogen()->name()); |
3987 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) | 4034 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) |
3988 ? isolate()->builtins()->StoreIC_Initialize_Strict() | 4035 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
3989 : isolate()->builtins()->StoreIC_Initialize(); | 4036 : isolate()->builtins()->StoreIC_Initialize(); |
3990 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 4037 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
3991 } | 4038 } |
3992 | 4039 |
(...skipping 217 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4210 DoStoreKeyedExternalArray(instr); | 4257 DoStoreKeyedExternalArray(instr); |
4211 } else if (instr->hydrogen()->value()->representation().IsDouble()) { | 4258 } else if (instr->hydrogen()->value()->representation().IsDouble()) { |
4212 DoStoreKeyedFixedDoubleArray(instr); | 4259 DoStoreKeyedFixedDoubleArray(instr); |
4213 } else { | 4260 } else { |
4214 DoStoreKeyedFixedArray(instr); | 4261 DoStoreKeyedFixedArray(instr); |
4215 } | 4262 } |
4216 } | 4263 } |
4217 | 4264 |
4218 | 4265 |
4219 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { | 4266 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { |
| 4267 ASSERT(ToRegister(instr->context()).is(rsi)); |
4220 ASSERT(ToRegister(instr->object()).is(rdx)); | 4268 ASSERT(ToRegister(instr->object()).is(rdx)); |
4221 ASSERT(ToRegister(instr->key()).is(rcx)); | 4269 ASSERT(ToRegister(instr->key()).is(rcx)); |
4222 ASSERT(ToRegister(instr->value()).is(rax)); | 4270 ASSERT(ToRegister(instr->value()).is(rax)); |
4223 | 4271 |
4224 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) | 4272 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) |
4225 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() | 4273 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() |
4226 : isolate()->builtins()->KeyedStoreIC_Initialize(); | 4274 : isolate()->builtins()->KeyedStoreIC_Initialize(); |
4227 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 4275 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
4228 } | 4276 } |
4229 | 4277 |
(...skipping 15 matching lines...) Expand all Loading... |
4245 __ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg); | 4293 __ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg); |
4246 // Write barrier. | 4294 // Write barrier. |
4247 ASSERT_NE(instr->temp(), NULL); | 4295 ASSERT_NE(instr->temp(), NULL); |
4248 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, | 4296 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, |
4249 ToRegister(instr->temp()), kDontSaveFPRegs); | 4297 ToRegister(instr->temp()), kDontSaveFPRegs); |
4250 } else { | 4298 } else { |
4251 PushSafepointRegistersScope scope(this); | 4299 PushSafepointRegistersScope scope(this); |
4252 if (!object_reg.is(rax)) { | 4300 if (!object_reg.is(rax)) { |
4253 __ movq(rax, object_reg); | 4301 __ movq(rax, object_reg); |
4254 } | 4302 } |
| 4303 LoadContextFromDeferred(instr->context()); |
4255 __ Move(rbx, to_map); | 4304 __ Move(rbx, to_map); |
4256 TransitionElementsKindStub stub(from_kind, to_kind); | 4305 TransitionElementsKindStub stub(from_kind, to_kind); |
4257 __ CallStub(&stub); | 4306 __ CallStub(&stub); |
4258 RecordSafepointWithRegisters( | 4307 RecordSafepointWithRegisters( |
4259 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); | 4308 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
4260 } | 4309 } |
4261 __ bind(¬_applicable); | 4310 __ bind(¬_applicable); |
4262 } | 4311 } |
4263 | 4312 |
4264 | 4313 |
4265 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 4314 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
4266 Register object = ToRegister(instr->object()); | 4315 Register object = ToRegister(instr->object()); |
4267 Register temp = ToRegister(instr->temp()); | 4316 Register temp = ToRegister(instr->temp()); |
4268 Label no_memento_found; | 4317 Label no_memento_found; |
4269 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); | 4318 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found); |
4270 DeoptimizeIf(equal, instr->environment()); | 4319 DeoptimizeIf(equal, instr->environment()); |
4271 __ bind(&no_memento_found); | 4320 __ bind(&no_memento_found); |
4272 } | 4321 } |
4273 | 4322 |
4274 | 4323 |
4275 void LCodeGen::DoStringAdd(LStringAdd* instr) { | 4324 void LCodeGen::DoStringAdd(LStringAdd* instr) { |
| 4325 ASSERT(ToRegister(instr->context()).is(rsi)); |
4276 EmitPushTaggedOperand(instr->left()); | 4326 EmitPushTaggedOperand(instr->left()); |
4277 EmitPushTaggedOperand(instr->right()); | 4327 EmitPushTaggedOperand(instr->right()); |
4278 StringAddStub stub(instr->hydrogen()->flags()); | 4328 StringAddStub stub(instr->hydrogen()->flags()); |
4279 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 4329 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
4280 } | 4330 } |
4281 | 4331 |
4282 | 4332 |
4283 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { | 4333 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { |
4284 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { | 4334 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { |
4285 public: | 4335 public: |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4320 // DoStringCharCodeAt above. | 4370 // DoStringCharCodeAt above. |
4321 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); | 4371 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); |
4322 if (instr->index()->IsConstantOperand()) { | 4372 if (instr->index()->IsConstantOperand()) { |
4323 int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index())); | 4373 int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index())); |
4324 __ Push(Smi::FromInt(const_index)); | 4374 __ Push(Smi::FromInt(const_index)); |
4325 } else { | 4375 } else { |
4326 Register index = ToRegister(instr->index()); | 4376 Register index = ToRegister(instr->index()); |
4327 __ Integer32ToSmi(index, index); | 4377 __ Integer32ToSmi(index, index); |
4328 __ push(index); | 4378 __ push(index); |
4329 } | 4379 } |
4330 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr); | 4380 CallRuntimeFromDeferred( |
| 4381 Runtime::kStringCharCodeAt, 2, instr, instr->context()); |
4331 __ AssertSmi(rax); | 4382 __ AssertSmi(rax); |
4332 __ SmiToInteger32(rax, rax); | 4383 __ SmiToInteger32(rax, rax); |
4333 __ StoreToSafepointRegisterSlot(result, rax); | 4384 __ StoreToSafepointRegisterSlot(result, rax); |
4334 } | 4385 } |
4335 | 4386 |
4336 | 4387 |
4337 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { | 4388 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { |
4338 class DeferredStringCharFromCode V8_FINAL : public LDeferredCode { | 4389 class DeferredStringCharFromCode V8_FINAL : public LDeferredCode { |
4339 public: | 4390 public: |
4340 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr) | 4391 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr) |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4373 Register result = ToRegister(instr->result()); | 4424 Register result = ToRegister(instr->result()); |
4374 | 4425 |
4375 // TODO(3095996): Get rid of this. For now, we need to make the | 4426 // TODO(3095996): Get rid of this. For now, we need to make the |
4376 // result register contain a valid pointer because it is already | 4427 // result register contain a valid pointer because it is already |
4377 // contained in the register pointer map. | 4428 // contained in the register pointer map. |
4378 __ Set(result, 0); | 4429 __ Set(result, 0); |
4379 | 4430 |
4380 PushSafepointRegistersScope scope(this); | 4431 PushSafepointRegistersScope scope(this); |
4381 __ Integer32ToSmi(char_code, char_code); | 4432 __ Integer32ToSmi(char_code, char_code); |
4382 __ push(char_code); | 4433 __ push(char_code); |
4383 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr); | 4434 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context()); |
4384 __ StoreToSafepointRegisterSlot(result, rax); | 4435 __ StoreToSafepointRegisterSlot(result, rax); |
4385 } | 4436 } |
4386 | 4437 |
4387 | 4438 |
4388 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { | 4439 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { |
4389 LOperand* input = instr->value(); | 4440 LOperand* input = instr->value(); |
4390 ASSERT(input->IsRegister() || input->IsStackSlot()); | 4441 ASSERT(input->IsRegister() || input->IsStackSlot()); |
4391 LOperand* output = instr->result(); | 4442 LOperand* output = instr->result(); |
4392 ASSERT(output->IsDoubleRegister()); | 4443 ASSERT(output->IsDoubleRegister()); |
4393 if (input->IsRegister()) { | 4444 if (input->IsRegister()) { |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4477 } | 4528 } |
4478 | 4529 |
4479 // Slow case: Call the runtime system to do the number allocation. | 4530 // Slow case: Call the runtime system to do the number allocation. |
4480 __ bind(&slow); | 4531 __ bind(&slow); |
4481 | 4532 |
4482 // Put a valid pointer value in the stack slot where the result | 4533 // Put a valid pointer value in the stack slot where the result |
4483 // register is stored, as this register is in the pointer map, but contains an | 4534 // register is stored, as this register is in the pointer map, but contains an |
4484 // integer value. | 4535 // integer value. |
4485 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); | 4536 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); |
4486 | 4537 |
4487 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); | 4538 // NumberTagU uses the context from the frame, rather than |
| 4539 // the environment's HContext or HInlinedContext value. |
| 4540 // They only call Runtime::kAllocateHeapNumber. |
| 4541 // The corresponding HChange instructions are added in a phase that does |
| 4542 // not have easy access to the local context. |
| 4543 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 4544 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 4545 RecordSafepointWithRegisters( |
| 4546 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
| 4547 |
4488 if (!reg.is(rax)) __ movq(reg, rax); | 4548 if (!reg.is(rax)) __ movq(reg, rax); |
4489 | 4549 |
4490 // Done. Put the value in temp_xmm into the value of the allocated heap | 4550 // Done. Put the value in temp_xmm into the value of the allocated heap |
4491 // number. | 4551 // number. |
4492 __ bind(&done); | 4552 __ bind(&done); |
4493 __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), temp_xmm); | 4553 __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), temp_xmm); |
4494 __ StoreToSafepointRegisterSlot(reg, reg); | 4554 __ StoreToSafepointRegisterSlot(reg, reg); |
4495 } | 4555 } |
4496 | 4556 |
4497 | 4557 |
(...skipping 27 matching lines...) Expand all Loading... |
4525 | 4585 |
4526 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { | 4586 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { |
4527 // TODO(3095996): Get rid of this. For now, we need to make the | 4587 // TODO(3095996): Get rid of this. For now, we need to make the |
4528 // result register contain a valid pointer because it is already | 4588 // result register contain a valid pointer because it is already |
4529 // contained in the register pointer map. | 4589 // contained in the register pointer map. |
4530 Register reg = ToRegister(instr->result()); | 4590 Register reg = ToRegister(instr->result()); |
4531 __ Move(reg, Smi::FromInt(0)); | 4591 __ Move(reg, Smi::FromInt(0)); |
4532 | 4592 |
4533 { | 4593 { |
4534 PushSafepointRegistersScope scope(this); | 4594 PushSafepointRegistersScope scope(this); |
4535 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); | 4595 // NumberTagD uses the context from the frame, rather than |
4536 // Ensure that value in rax survives popping registers. | 4596 // the environment's HContext or HInlinedContext value. |
| 4597 // They only call Runtime::kAllocateHeapNumber. |
| 4598 // The corresponding HChange instructions are added in a phase that does |
| 4599 // not have easy access to the local context. |
| 4600 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 4601 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 4602 RecordSafepointWithRegisters( |
| 4603 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
4537 __ movq(kScratchRegister, rax); | 4604 __ movq(kScratchRegister, rax); |
4538 } | 4605 } |
4539 __ movq(reg, kScratchRegister); | 4606 __ movq(reg, kScratchRegister); |
4540 } | 4607 } |
4541 | 4608 |
4542 | 4609 |
4543 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 4610 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
4544 ASSERT(instr->value()->Equals(instr->result())); | 4611 ASSERT(instr->value()->Equals(instr->result())); |
4545 Register input = ToRegister(instr->value()); | 4612 Register input = ToRegister(instr->value()); |
4546 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); | 4613 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); |
(...skipping 285 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4832 Register reg = ToRegister(instr->value()); | 4899 Register reg = ToRegister(instr->value()); |
4833 __ Cmp(reg, instr->hydrogen()->object().handle()); | 4900 __ Cmp(reg, instr->hydrogen()->object().handle()); |
4834 DeoptimizeIf(not_equal, instr->environment()); | 4901 DeoptimizeIf(not_equal, instr->environment()); |
4835 } | 4902 } |
4836 | 4903 |
4837 | 4904 |
4838 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { | 4905 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { |
4839 { | 4906 { |
4840 PushSafepointRegistersScope scope(this); | 4907 PushSafepointRegistersScope scope(this); |
4841 __ push(object); | 4908 __ push(object); |
4842 CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr); | 4909 __ Set(rsi, 0); |
| 4910 __ CallRuntimeSaveDoubles(Runtime::kMigrateInstance); |
| 4911 RecordSafepointWithRegisters( |
| 4912 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); |
| 4913 |
4843 __ testq(rax, Immediate(kSmiTagMask)); | 4914 __ testq(rax, Immediate(kSmiTagMask)); |
4844 } | 4915 } |
4845 DeoptimizeIf(zero, instr->environment()); | 4916 DeoptimizeIf(zero, instr->environment()); |
4846 } | 4917 } |
4847 | 4918 |
4848 | 4919 |
4849 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { | 4920 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { |
4850 class DeferredCheckMaps V8_FINAL : public LDeferredCode { | 4921 class DeferredCheckMaps V8_FINAL : public LDeferredCode { |
4851 public: | 4922 public: |
4852 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) | 4923 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) |
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5024 __ Integer32ToSmi(size, size); | 5095 __ Integer32ToSmi(size, size); |
5025 __ push(size); | 5096 __ push(size); |
5026 } else { | 5097 } else { |
5027 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); | 5098 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); |
5028 __ Push(Smi::FromInt(size)); | 5099 __ Push(Smi::FromInt(size)); |
5029 } | 5100 } |
5030 | 5101 |
5031 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { | 5102 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { |
5032 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); | 5103 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); |
5033 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 5104 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
5034 CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr); | 5105 CallRuntimeFromDeferred( |
| 5106 Runtime::kAllocateInOldPointerSpace, 1, instr, instr->context()); |
5035 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { | 5107 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { |
5036 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 5108 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
5037 CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr); | 5109 CallRuntimeFromDeferred( |
| 5110 Runtime::kAllocateInOldDataSpace, 1, instr, instr->context()); |
5038 } else { | 5111 } else { |
5039 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr); | 5112 CallRuntimeFromDeferred( |
| 5113 Runtime::kAllocateInNewSpace, 1, instr, instr->context()); |
5040 } | 5114 } |
5041 __ StoreToSafepointRegisterSlot(result, rax); | 5115 __ StoreToSafepointRegisterSlot(result, rax); |
5042 } | 5116 } |
5043 | 5117 |
5044 | 5118 |
5045 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { | 5119 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { |
5046 ASSERT(ToRegister(instr->value()).is(rax)); | 5120 ASSERT(ToRegister(instr->value()).is(rax)); |
5047 __ push(rax); | 5121 __ push(rax); |
5048 CallRuntime(Runtime::kToFastProperties, 1, instr); | 5122 CallRuntime(Runtime::kToFastProperties, 1, instr); |
5049 } | 5123 } |
5050 | 5124 |
5051 | 5125 |
5052 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { | 5126 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { |
| 5127 ASSERT(ToRegister(instr->context()).is(rsi)); |
5053 Label materialized; | 5128 Label materialized; |
5054 // Registers will be used as follows: | 5129 // Registers will be used as follows: |
5055 // rcx = literals array. | 5130 // rcx = literals array. |
5056 // rbx = regexp literal. | 5131 // rbx = regexp literal. |
5057 // rax = regexp literal clone. | 5132 // rax = regexp literal clone. |
5058 int literal_offset = | 5133 int literal_offset = |
5059 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); | 5134 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); |
5060 __ Move(rcx, instr->hydrogen()->literals()); | 5135 __ Move(rcx, instr->hydrogen()->literals()); |
5061 __ movq(rbx, FieldOperand(rcx, literal_offset)); | 5136 __ movq(rbx, FieldOperand(rcx, literal_offset)); |
5062 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); | 5137 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); |
(...skipping 30 matching lines...) Expand all Loading... |
5093 __ movq(FieldOperand(rax, i + kPointerSize), rcx); | 5168 __ movq(FieldOperand(rax, i + kPointerSize), rcx); |
5094 } | 5169 } |
5095 if ((size % (2 * kPointerSize)) != 0) { | 5170 if ((size % (2 * kPointerSize)) != 0) { |
5096 __ movq(rdx, FieldOperand(rbx, size - kPointerSize)); | 5171 __ movq(rdx, FieldOperand(rbx, size - kPointerSize)); |
5097 __ movq(FieldOperand(rax, size - kPointerSize), rdx); | 5172 __ movq(FieldOperand(rax, size - kPointerSize), rdx); |
5098 } | 5173 } |
5099 } | 5174 } |
5100 | 5175 |
5101 | 5176 |
5102 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 5177 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
| 5178 ASSERT(ToRegister(instr->context()).is(rsi)); |
5103 // Use the fast case closure allocation code that allocates in new | 5179 // Use the fast case closure allocation code that allocates in new |
5104 // space for nested functions that don't need literals cloning. | 5180 // space for nested functions that don't need literals cloning. |
5105 bool pretenure = instr->hydrogen()->pretenure(); | 5181 bool pretenure = instr->hydrogen()->pretenure(); |
5106 if (!pretenure && instr->hydrogen()->has_no_literals()) { | 5182 if (!pretenure && instr->hydrogen()->has_no_literals()) { |
5107 FastNewClosureStub stub(instr->hydrogen()->language_mode(), | 5183 FastNewClosureStub stub(instr->hydrogen()->language_mode(), |
5108 instr->hydrogen()->is_generator()); | 5184 instr->hydrogen()->is_generator()); |
5109 __ Move(rbx, instr->hydrogen()->shared_info()); | 5185 __ Move(rbx, instr->hydrogen()->shared_info()); |
5110 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 5186 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
5111 } else { | 5187 } else { |
5112 __ push(rsi); | 5188 __ push(rsi); |
5113 __ Push(instr->hydrogen()->shared_info()); | 5189 __ Push(instr->hydrogen()->shared_info()); |
5114 __ PushRoot(pretenure ? Heap::kTrueValueRootIndex : | 5190 __ PushRoot(pretenure ? Heap::kTrueValueRootIndex : |
5115 Heap::kFalseValueRootIndex); | 5191 Heap::kFalseValueRootIndex); |
5116 CallRuntime(Runtime::kNewClosure, 3, instr); | 5192 CallRuntime(Runtime::kNewClosure, 3, instr); |
5117 } | 5193 } |
5118 } | 5194 } |
5119 | 5195 |
5120 | 5196 |
5121 void LCodeGen::DoTypeof(LTypeof* instr) { | 5197 void LCodeGen::DoTypeof(LTypeof* instr) { |
| 5198 ASSERT(ToRegister(instr->context()).is(rsi)); |
5122 LOperand* input = instr->value(); | 5199 LOperand* input = instr->value(); |
5123 EmitPushTaggedOperand(input); | 5200 EmitPushTaggedOperand(input); |
5124 CallRuntime(Runtime::kTypeof, 1, instr); | 5201 CallRuntime(Runtime::kTypeof, 1, instr); |
5125 } | 5202 } |
5126 | 5203 |
5127 | 5204 |
5128 void LCodeGen::EmitPushTaggedOperand(LOperand* operand) { | 5205 void LCodeGen::EmitPushTaggedOperand(LOperand* operand) { |
5129 ASSERT(!operand->IsDoubleRegister()); | 5206 ASSERT(!operand->IsDoubleRegister()); |
5130 if (operand->IsConstantOperand()) { | 5207 if (operand->IsConstantOperand()) { |
5131 __ Push(ToHandle(LConstantOperand::cast(operand))); | 5208 __ Push(ToHandle(LConstantOperand::cast(operand))); |
(...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5319 | 5396 |
5320 ASSERT(instr->HasEnvironment()); | 5397 ASSERT(instr->HasEnvironment()); |
5321 LEnvironment* env = instr->environment(); | 5398 LEnvironment* env = instr->environment(); |
5322 // There is no LLazyBailout instruction for stack-checks. We have to | 5399 // There is no LLazyBailout instruction for stack-checks. We have to |
5323 // prepare for lazy deoptimization explicitly here. | 5400 // prepare for lazy deoptimization explicitly here. |
5324 if (instr->hydrogen()->is_function_entry()) { | 5401 if (instr->hydrogen()->is_function_entry()) { |
5325 // Perform stack overflow check. | 5402 // Perform stack overflow check. |
5326 Label done; | 5403 Label done; |
5327 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 5404 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
5328 __ j(above_equal, &done, Label::kNear); | 5405 __ j(above_equal, &done, Label::kNear); |
| 5406 |
| 5407 ASSERT(instr->context()->IsRegister()); |
| 5408 ASSERT(ToRegister(instr->context()).is(rsi)); |
5329 CallCode(isolate()->builtins()->StackCheck(), | 5409 CallCode(isolate()->builtins()->StackCheck(), |
5330 RelocInfo::CODE_TARGET, | 5410 RelocInfo::CODE_TARGET, |
5331 instr); | 5411 instr); |
5332 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5412 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
5333 last_lazy_deopt_pc_ = masm()->pc_offset(); | 5413 last_lazy_deopt_pc_ = masm()->pc_offset(); |
5334 __ bind(&done); | 5414 __ bind(&done); |
5335 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5415 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
5336 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5416 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
5337 } else { | 5417 } else { |
5338 ASSERT(instr->hydrogen()->is_backwards_branch()); | 5418 ASSERT(instr->hydrogen()->is_backwards_branch()); |
(...skipping 23 matching lines...) Expand all Loading... |
5362 // If the environment were already registered, we would have no way of | 5442 // If the environment were already registered, we would have no way of |
5363 // backpatching it with the spill slot operands. | 5443 // backpatching it with the spill slot operands. |
5364 ASSERT(!environment->HasBeenRegistered()); | 5444 ASSERT(!environment->HasBeenRegistered()); |
5365 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 5445 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
5366 | 5446 |
5367 GenerateOsrPrologue(); | 5447 GenerateOsrPrologue(); |
5368 } | 5448 } |
5369 | 5449 |
5370 | 5450 |
5371 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { | 5451 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { |
| 5452 ASSERT(ToRegister(instr->context()).is(rsi)); |
5372 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); | 5453 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); |
5373 DeoptimizeIf(equal, instr->environment()); | 5454 DeoptimizeIf(equal, instr->environment()); |
5374 | 5455 |
5375 Register null_value = rdi; | 5456 Register null_value = rdi; |
5376 __ LoadRoot(null_value, Heap::kNullValueRootIndex); | 5457 __ LoadRoot(null_value, Heap::kNullValueRootIndex); |
5377 __ cmpq(rax, null_value); | 5458 __ cmpq(rax, null_value); |
5378 DeoptimizeIf(equal, instr->environment()); | 5459 DeoptimizeIf(equal, instr->environment()); |
5379 | 5460 |
5380 Condition cc = masm()->CheckSmi(rax); | 5461 Condition cc = masm()->CheckSmi(rax); |
5381 DeoptimizeIf(cc, instr->environment()); | 5462 DeoptimizeIf(cc, instr->environment()); |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5455 FixedArray::kHeaderSize - kPointerSize)); | 5536 FixedArray::kHeaderSize - kPointerSize)); |
5456 __ bind(&done); | 5537 __ bind(&done); |
5457 } | 5538 } |
5458 | 5539 |
5459 | 5540 |
5460 #undef __ | 5541 #undef __ |
5461 | 5542 |
5462 } } // namespace v8::internal | 5543 } } // namespace v8::internal |
5463 | 5544 |
5464 #endif // V8_TARGET_ARCH_X64 | 5545 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |