Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1581)

Side by Side Diff: src/ia32/lithium-codegen-ia32.cc

Issue 6452001: Allow esi to be an allocatable register on IA32. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
150 offset > 0; 150 offset > 0;
151 offset -= kPageSize) { 151 offset -= kPageSize) {
152 __ mov(Operand(esp, offset), eax); 152 __ mov(Operand(esp, offset), eax);
153 } 153 }
154 #endif 154 #endif
155 } 155 }
156 } 156 }
157 157
158 // Trace the call. 158 // Trace the call.
159 if (FLAG_trace) { 159 if (FLAG_trace) {
160 // We have not executed any compiled code yet, so esi still holds the
161 // incoming context.
160 __ CallRuntime(Runtime::kTraceEnter, 0); 162 __ CallRuntime(Runtime::kTraceEnter, 0);
161 } 163 }
162 return !is_aborted(); 164 return !is_aborted();
163 } 165 }
164 166
165 167
166 bool LCodeGen::GenerateBody() { 168 bool LCodeGen::GenerateBody() {
167 ASSERT(is_generating()); 169 ASSERT(is_generating());
168 bool emit_instructions = true; 170 bool emit_instructions = true;
169 for (current_instruction_ = 0; 171 for (current_instruction_ = 0;
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after
360 int src_index = DefineDeoptimizationLiteral(literal); 362 int src_index = DefineDeoptimizationLiteral(literal);
361 translation->StoreLiteral(src_index); 363 translation->StoreLiteral(src_index);
362 } else { 364 } else {
363 UNREACHABLE(); 365 UNREACHABLE();
364 } 366 }
365 } 367 }
366 368
367 369
368 void LCodeGen::CallCode(Handle<Code> code, 370 void LCodeGen::CallCode(Handle<Code> code,
369 RelocInfo::Mode mode, 371 RelocInfo::Mode mode,
370 LInstruction* instr) { 372 LInstruction* instr,
373 bool adjusted) {
371 ASSERT(instr != NULL); 374 ASSERT(instr != NULL);
372 LPointerMap* pointers = instr->pointer_map(); 375 LPointerMap* pointers = instr->pointer_map();
373 RecordPosition(pointers->position()); 376 RecordPosition(pointers->position());
377 if (!adjusted) {
fschneider 2011/02/09 13:41:53 Is there a convenient way to assert that "!adjus
378 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
379 }
374 __ call(code, mode); 380 __ call(code, mode);
375 RegisterLazyDeoptimization(instr); 381 RegisterLazyDeoptimization(instr);
376 382
377 // Signal that we don't inline smi code before these stubs in the 383 // Signal that we don't inline smi code before these stubs in the
378 // optimizing code generator. 384 // optimizing code generator.
379 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || 385 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
380 code->kind() == Code::COMPARE_IC) { 386 code->kind() == Code::COMPARE_IC) {
381 __ nop(); 387 __ nop();
382 } 388 }
383 } 389 }
384 390
385 391
386 void LCodeGen::CallRuntime(Runtime::Function* function, 392 void LCodeGen::CallRuntime(Runtime::Function* fun,
387 int num_arguments, 393 int argc,
388 LInstruction* instr) { 394 LInstruction* instr,
395 bool adjusted) {
389 ASSERT(instr != NULL); 396 ASSERT(instr != NULL);
390 ASSERT(instr->HasPointerMap()); 397 ASSERT(instr->HasPointerMap());
391 LPointerMap* pointers = instr->pointer_map(); 398 LPointerMap* pointers = instr->pointer_map();
392 RecordPosition(pointers->position()); 399 RecordPosition(pointers->position());
393 400
394 __ CallRuntime(function, num_arguments); 401 if (!adjusted) {
402 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
403 }
404 __ CallRuntime(fun, argc);
395 RegisterLazyDeoptimization(instr); 405 RegisterLazyDeoptimization(instr);
396 } 406 }
397 407
398 408
399 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { 409 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) {
400 // Create the environment to bailout to. If the call has side effects 410 // Create the environment to bailout to. If the call has side effects
401 // execution has to continue after the call otherwise execution can continue 411 // execution has to continue after the call otherwise execution can continue
402 // from a previous bailout point repeating the call. 412 // from a previous bailout point repeating the call.
403 LEnvironment* deoptimization_environment; 413 LEnvironment* deoptimization_environment;
404 if (instr->HasDeoptimizationEnvironment()) { 414 if (instr->HasDeoptimizationEnvironment()) {
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after
561 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), 571 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
562 kind, arguments, deoptimization_index); 572 kind, arguments, deoptimization_index);
563 for (int i = 0; i < operands->length(); i++) { 573 for (int i = 0; i < operands->length(); i++) {
564 LOperand* pointer = operands->at(i); 574 LOperand* pointer = operands->at(i);
565 if (pointer->IsStackSlot()) { 575 if (pointer->IsStackSlot()) {
566 safepoint.DefinePointerSlot(pointer->index()); 576 safepoint.DefinePointerSlot(pointer->index());
567 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { 577 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
568 safepoint.DefinePointerRegister(ToRegister(pointer)); 578 safepoint.DefinePointerRegister(ToRegister(pointer));
569 } 579 }
570 } 580 }
571 if (kind & Safepoint::kWithRegisters) {
572 // Register esi always contains a pointer to the context.
573 safepoint.DefinePointerRegister(esi);
574 }
575 } 581 }
576 582
577 583
578 void LCodeGen::RecordSafepoint(LPointerMap* pointers, 584 void LCodeGen::RecordSafepoint(LPointerMap* pointers,
579 int deoptimization_index) { 585 int deoptimization_index) {
580 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index); 586 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
581 } 587 }
582 588
583 589
584 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, 590 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
628 } 634 }
629 } 635 }
630 636
631 637
632 void LCodeGen::DoParameter(LParameter* instr) { 638 void LCodeGen::DoParameter(LParameter* instr) {
633 // Nothing to do. 639 // Nothing to do.
634 } 640 }
635 641
636 642
637 void LCodeGen::DoCallStub(LCallStub* instr) { 643 void LCodeGen::DoCallStub(LCallStub* instr) {
644 ASSERT(ToRegister(instr->context()).is(esi));
638 ASSERT(ToRegister(instr->result()).is(eax)); 645 ASSERT(ToRegister(instr->result()).is(eax));
639 switch (instr->hydrogen()->major_key()) { 646 switch (instr->hydrogen()->major_key()) {
640 case CodeStub::RegExpConstructResult: { 647 case CodeStub::RegExpConstructResult: {
641 RegExpConstructResultStub stub; 648 RegExpConstructResultStub stub;
642 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 649 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
643 break; 650 break;
644 } 651 }
645 case CodeStub::RegExpExec: { 652 case CodeStub::RegExpExec: {
646 RegExpExecStub stub; 653 RegExpExecStub stub;
647 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 654 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
(...skipping 356 matching lines...) Expand 10 before | Expand all | Expand 10 after
1004 1011
1005 void LCodeGen::DoBitNotI(LBitNotI* instr) { 1012 void LCodeGen::DoBitNotI(LBitNotI* instr) {
1006 LOperand* input = instr->InputAt(0); 1013 LOperand* input = instr->InputAt(0);
1007 ASSERT(input->Equals(instr->result())); 1014 ASSERT(input->Equals(instr->result()));
1008 __ not_(ToRegister(input)); 1015 __ not_(ToRegister(input));
1009 } 1016 }
1010 1017
1011 1018
1012 void LCodeGen::DoThrow(LThrow* instr) { 1019 void LCodeGen::DoThrow(LThrow* instr) {
1013 __ push(ToOperand(instr->InputAt(0))); 1020 __ push(ToOperand(instr->InputAt(0)));
1014 CallRuntime(Runtime::kThrow, 1, instr); 1021 CallRuntime(Runtime::kThrow, 1, instr, false);
1015 1022
1016 if (FLAG_debug_code) { 1023 if (FLAG_debug_code) {
1017 Comment("Unreachable code."); 1024 Comment("Unreachable code.");
1018 __ int3(); 1025 __ int3();
1019 } 1026 }
1020 } 1027 }
1021 1028
1022 1029
1023 void LCodeGen::DoAddI(LAddI* instr) { 1030 void LCodeGen::DoAddI(LAddI* instr) {
1024 LOperand* left = instr->InputAt(0); 1031 LOperand* left = instr->InputAt(0);
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
1076 } 1083 }
1077 } 1084 }
1078 1085
1079 1086
1080 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { 1087 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1081 ASSERT(ToRegister(instr->InputAt(0)).is(edx)); 1088 ASSERT(ToRegister(instr->InputAt(0)).is(edx));
1082 ASSERT(ToRegister(instr->InputAt(1)).is(eax)); 1089 ASSERT(ToRegister(instr->InputAt(1)).is(eax));
1083 ASSERT(ToRegister(instr->result()).is(eax)); 1090 ASSERT(ToRegister(instr->result()).is(eax));
1084 1091
1085 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE); 1092 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
1086 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1093 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
1087 } 1094 }
1088 1095
1089 1096
1090 int LCodeGen::GetNextEmittedBlock(int block) { 1097 int LCodeGen::GetNextEmittedBlock(int block) {
1091 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { 1098 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1092 LLabel* label = chunk_->GetLabel(i); 1099 LLabel* label = chunk_->GetLabel(i);
1093 if (!label->HasReplacement()) return i; 1100 if (!label->HasReplacement()) return i;
1094 } 1101 }
1095 return -1; 1102 return -1;
1096 } 1103 }
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
1189 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); 1196 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1190 } else { 1197 } else {
1191 __ jmp(chunk_->GetAssemblyLabel(block)); 1198 __ jmp(chunk_->GetAssemblyLabel(block));
1192 } 1199 }
1193 } 1200 }
1194 } 1201 }
1195 1202
1196 1203
1197 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { 1204 void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
1198 __ pushad(); 1205 __ pushad();
1206 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1199 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); 1207 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
1200 RecordSafepointWithRegisters( 1208 RecordSafepointWithRegisters(
1201 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); 1209 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1202 __ popad(); 1210 __ popad();
1203 } 1211 }
1204 1212
1205 void LCodeGen::DoGoto(LGoto* instr) { 1213 void LCodeGen::DoGoto(LGoto* instr) {
1206 class DeferredStackCheck: public LDeferredCode { 1214 class DeferredStackCheck: public LDeferredCode {
1207 public: 1215 public:
1208 DeferredStackCheck(LCodeGen* codegen, LGoto* instr) 1216 DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
(...skipping 470 matching lines...) Expand 10 before | Expand all | Expand 10 after
1679 int true_block = instr->true_block_id(); 1687 int true_block = instr->true_block_id();
1680 int false_block = instr->false_block_id(); 1688 int false_block = instr->false_block_id();
1681 1689
1682 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); 1690 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
1683 EmitBranch(true_block, false_block, equal); 1691 EmitBranch(true_block, false_block, equal);
1684 } 1692 }
1685 1693
1686 1694
1687 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { 1695 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
1688 // Object and function are in fixed registers defined by the stub. 1696 // Object and function are in fixed registers defined by the stub.
1697 ASSERT(ToRegister(instr->context()).is(esi));
1689 InstanceofStub stub(InstanceofStub::kArgsInRegisters); 1698 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1690 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1699 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1691 1700
1692 NearLabel true_value, done; 1701 NearLabel true_value, done;
1693 __ test(eax, Operand(eax)); 1702 __ test(eax, Operand(eax));
1694 __ j(zero, &true_value); 1703 __ j(zero, &true_value);
1695 __ mov(ToRegister(instr->result()), Factory::false_value()); 1704 __ mov(ToRegister(instr->result()), Factory::false_value());
1696 __ jmp(&done); 1705 __ jmp(&done);
1697 __ bind(&true_value); 1706 __ bind(&true_value);
1698 __ mov(ToRegister(instr->result()), Factory::true_value()); 1707 __ mov(ToRegister(instr->result()), Factory::true_value());
1699 __ bind(&done); 1708 __ bind(&done);
1700 } 1709 }
1701 1710
1702 1711
1703 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { 1712 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1713 ASSERT(ToRegister(instr->context()).is(esi));
1704 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1714 int true_block = chunk_->LookupDestination(instr->true_block_id());
1705 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1715 int false_block = chunk_->LookupDestination(instr->false_block_id());
1706 1716
1707 InstanceofStub stub(InstanceofStub::kArgsInRegisters); 1717 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1708 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1718 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1709 __ test(eax, Operand(eax)); 1719 __ test(eax, Operand(eax));
1710 EmitBranch(true_block, false_block, zero); 1720 EmitBranch(true_block, false_block, zero);
1711 } 1721 }
1712 1722
1713 1723
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
1787 flags = static_cast<InstanceofStub::Flags>( 1797 flags = static_cast<InstanceofStub::Flags>(
1788 flags | InstanceofStub::kReturnTrueFalseObject); 1798 flags | InstanceofStub::kReturnTrueFalseObject);
1789 InstanceofStub stub(flags); 1799 InstanceofStub stub(flags);
1790 1800
1791 // Get the temp register reserved by the instruction. This needs to be edi as 1801 // Get the temp register reserved by the instruction. This needs to be edi as
1792 // its slot of the pushing of safepoint registers is used to communicate the 1802 // its slot of the pushing of safepoint registers is used to communicate the
1793 // offset to the location of the map check. 1803 // offset to the location of the map check.
1794 Register temp = ToRegister(instr->TempAt(0)); 1804 Register temp = ToRegister(instr->TempAt(0));
1795 ASSERT(temp.is(edi)); 1805 ASSERT(temp.is(edi));
1796 __ mov(InstanceofStub::right(), Immediate(instr->function())); 1806 __ mov(InstanceofStub::right(), Immediate(instr->function()));
1797 static const int kAdditionalDelta = 13; 1807 static const int kAdditionalDelta = 16;
1798 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; 1808 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
1799 Label before_push_delta; 1809 Label before_push_delta;
1800 __ bind(&before_push_delta); 1810 __ bind(&before_push_delta);
1801 __ mov(temp, Immediate(delta)); 1811 __ mov(temp, Immediate(delta));
1802 __ mov(Operand(esp, EspIndexForPushAll(temp) * kPointerSize), temp); 1812 __ mov(Operand(esp, EspIndexForPushAll(temp) * kPointerSize), temp);
1813 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1803 __ call(stub.GetCode(), RelocInfo::CODE_TARGET); 1814 __ call(stub.GetCode(), RelocInfo::CODE_TARGET);
1804 ASSERT_EQ(kAdditionalDelta, 1815 ASSERT_EQ(kAdditionalDelta,
1805 masm_->SizeOfCodeGeneratedSince(&before_push_delta)); 1816 masm_->SizeOfCodeGeneratedSince(&before_push_delta));
1806 RecordSafepointWithRegisters( 1817 RecordSafepointWithRegisters(
1807 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); 1818 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1808 // Put the result value into the eax slot and restore all registers. 1819 // Put the result value into the eax slot and restore all registers.
1809 __ mov(Operand(esp, EspIndexForPushAll(eax) * kPointerSize), eax); 1820 __ mov(Operand(esp, EspIndexForPushAll(eax) * kPointerSize), eax);
1810 1821
1811 __ PopSafepointRegisters(); 1822 __ PopSafepointRegisters();
1812 } 1823 }
(...skipping 16 matching lines...) Expand all
1829 UNREACHABLE(); 1840 UNREACHABLE();
1830 return no_condition; 1841 return no_condition;
1831 } 1842 }
1832 } 1843 }
1833 1844
1834 1845
1835 void LCodeGen::DoCmpT(LCmpT* instr) { 1846 void LCodeGen::DoCmpT(LCmpT* instr) {
1836 Token::Value op = instr->op(); 1847 Token::Value op = instr->op();
1837 1848
1838 Handle<Code> ic = CompareIC::GetUninitialized(op); 1849 Handle<Code> ic = CompareIC::GetUninitialized(op);
1839 CallCode(ic, RelocInfo::CODE_TARGET, instr); 1850 CallCode(ic, RelocInfo::CODE_TARGET, instr, false);
1840 1851
1841 Condition condition = ComputeCompareCondition(op); 1852 Condition condition = ComputeCompareCondition(op);
1842 if (op == Token::GT || op == Token::LTE) { 1853 if (op == Token::GT || op == Token::LTE) {
1843 condition = ReverseCondition(condition); 1854 condition = ReverseCondition(condition);
1844 } 1855 }
1845 NearLabel true_value, done; 1856 NearLabel true_value, done;
1846 __ test(eax, Operand(eax)); 1857 __ test(eax, Operand(eax));
1847 __ j(condition, &true_value); 1858 __ j(condition, &true_value);
1848 __ mov(ToRegister(instr->result()), Factory::false_value()); 1859 __ mov(ToRegister(instr->result()), Factory::false_value());
1849 __ jmp(&done); 1860 __ jmp(&done);
1850 __ bind(&true_value); 1861 __ bind(&true_value);
1851 __ mov(ToRegister(instr->result()), Factory::true_value()); 1862 __ mov(ToRegister(instr->result()), Factory::true_value());
1852 __ bind(&done); 1863 __ bind(&done);
1853 } 1864 }
1854 1865
1855 1866
1856 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) { 1867 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
1857 Token::Value op = instr->op(); 1868 Token::Value op = instr->op();
1858 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1869 int true_block = chunk_->LookupDestination(instr->true_block_id());
1859 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1870 int false_block = chunk_->LookupDestination(instr->false_block_id());
1860 1871
1861 Handle<Code> ic = CompareIC::GetUninitialized(op); 1872 Handle<Code> ic = CompareIC::GetUninitialized(op);
1862 CallCode(ic, RelocInfo::CODE_TARGET, instr); 1873 CallCode(ic, RelocInfo::CODE_TARGET, instr, false);
1863 1874
1864 // The compare stub expects compare condition and the input operands 1875 // The compare stub expects compare condition and the input operands
1865 // reversed for GT and LTE. 1876 // reversed for GT and LTE.
1866 Condition condition = ComputeCompareCondition(op); 1877 Condition condition = ComputeCompareCondition(op);
1867 if (op == Token::GT || op == Token::LTE) { 1878 if (op == Token::GT || op == Token::LTE) {
1868 condition = ReverseCondition(condition); 1879 condition = ReverseCondition(condition);
1869 } 1880 }
1870 __ test(eax, Operand(eax)); 1881 __ test(eax, Operand(eax));
1871 EmitBranch(true_block, false_block, condition); 1882 EmitBranch(true_block, false_block, condition);
1872 } 1883 }
1873 1884
1874 1885
1875 void LCodeGen::DoReturn(LReturn* instr) { 1886 void LCodeGen::DoReturn(LReturn* instr) {
1876 if (FLAG_trace) { 1887 if (FLAG_trace) {
1877 // Preserve the return value on the stack and rely on the runtime 1888 // Preserve the return value on the stack and rely on the runtime call
1878 // call to return the value in the same register. 1889 // to return the value in the same register. We're leaving the code
1890 // managed by the register allocator and tearing down the frame, it's
1891 // safe to write to the context register.
1879 __ push(eax); 1892 __ push(eax);
1893 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1880 __ CallRuntime(Runtime::kTraceExit, 1); 1894 __ CallRuntime(Runtime::kTraceExit, 1);
1881 } 1895 }
1882 __ mov(esp, ebp); 1896 __ mov(esp, ebp);
1883 __ pop(ebp); 1897 __ pop(ebp);
1884 __ ret((ParameterCount() + 1) * kPointerSize); 1898 __ ret((ParameterCount() + 1) * kPointerSize);
1885 } 1899 }
1886 1900
1887 1901
1888 void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) { 1902 void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) {
1889 Register result = ToRegister(instr->result()); 1903 Register result = ToRegister(instr->result());
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
1938 if (instr->hydrogen()->is_in_object()) { 1952 if (instr->hydrogen()->is_in_object()) {
1939 __ mov(result, FieldOperand(object, instr->hydrogen()->offset())); 1953 __ mov(result, FieldOperand(object, instr->hydrogen()->offset()));
1940 } else { 1954 } else {
1941 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset)); 1955 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
1942 __ mov(result, FieldOperand(result, instr->hydrogen()->offset())); 1956 __ mov(result, FieldOperand(result, instr->hydrogen()->offset()));
1943 } 1957 }
1944 } 1958 }
1945 1959
1946 1960
1947 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { 1961 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
1962 ASSERT(ToRegister(instr->context()).is(esi));
1948 ASSERT(ToRegister(instr->object()).is(eax)); 1963 ASSERT(ToRegister(instr->object()).is(eax));
1949 ASSERT(ToRegister(instr->result()).is(eax)); 1964 ASSERT(ToRegister(instr->result()).is(eax));
1950 1965
1951 __ mov(ecx, instr->name()); 1966 __ mov(ecx, instr->name());
1952 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); 1967 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1953 CallCode(ic, RelocInfo::CODE_TARGET, instr); 1968 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1954 } 1969 }
1955 1970
1956 1971
1957 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { 1972 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
2040 times_pointer_size, 2055 times_pointer_size,
2041 FixedArray::kHeaderSize)); 2056 FixedArray::kHeaderSize));
2042 2057
2043 // Check for the hole value. 2058 // Check for the hole value.
2044 __ cmp(result, Factory::the_hole_value()); 2059 __ cmp(result, Factory::the_hole_value());
2045 DeoptimizeIf(equal, instr->environment()); 2060 DeoptimizeIf(equal, instr->environment());
2046 } 2061 }
2047 2062
2048 2063
2049 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { 2064 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2065 ASSERT(ToRegister(instr->context()).is(esi));
2050 ASSERT(ToRegister(instr->object()).is(edx)); 2066 ASSERT(ToRegister(instr->object()).is(edx));
2051 ASSERT(ToRegister(instr->key()).is(eax)); 2067 ASSERT(ToRegister(instr->key()).is(eax));
2052 2068
2053 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 2069 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
2054 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2070 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2055 } 2071 }
2056 2072
2057 2073
2058 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { 2074 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2059 Register result = ToRegister(instr->result()); 2075 Register result = ToRegister(instr->result());
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
2108 ASSERT(ToRegister(instr->result()).is(eax)); 2124 ASSERT(ToRegister(instr->result()).is(eax));
2109 2125
2110 // If the receiver is null or undefined, we have to pass the 2126 // If the receiver is null or undefined, we have to pass the
2111 // global object as a receiver. 2127 // global object as a receiver.
2112 NearLabel global_receiver, receiver_ok; 2128 NearLabel global_receiver, receiver_ok;
2113 __ cmp(receiver, Factory::null_value()); 2129 __ cmp(receiver, Factory::null_value());
2114 __ j(equal, &global_receiver); 2130 __ j(equal, &global_receiver);
2115 __ cmp(receiver, Factory::undefined_value()); 2131 __ cmp(receiver, Factory::undefined_value());
2116 __ j(not_equal, &receiver_ok); 2132 __ j(not_equal, &receiver_ok);
2117 __ bind(&global_receiver); 2133 __ bind(&global_receiver);
2118 __ mov(receiver, GlobalObjectOperand()); 2134 // TODO(kmillikin): We have a hydrogen value for the global object. See
2135 // if it's better to use it than to explicitly fetch it from the context
2136 // here.
2137 __ mov(receiver, Operand(ebp, StandardFrameConstants::kContextOffset));
2138 __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
2119 __ bind(&receiver_ok); 2139 __ bind(&receiver_ok);
2120 2140
2121 Register length = ToRegister(instr->length()); 2141 Register length = ToRegister(instr->length());
2122 Register elements = ToRegister(instr->elements()); 2142 Register elements = ToRegister(instr->elements());
2123 2143
2124 Label invoke; 2144 Label invoke;
2125 2145
2126 // Copy the arguments to this function possibly from the 2146 // Copy the arguments to this function possibly from the
2127 // adaptor frame below it. 2147 // adaptor frame below it.
2128 const uint32_t kArgumentsLimit = 1 * KB; 2148 const uint32_t kArgumentsLimit = 1 * KB;
(...skipping 20 matching lines...) Expand all
2149 LPointerMap* pointers = instr->pointer_map(); 2169 LPointerMap* pointers = instr->pointer_map();
2150 LEnvironment* env = instr->deoptimization_environment(); 2170 LEnvironment* env = instr->deoptimization_environment();
2151 RecordPosition(pointers->position()); 2171 RecordPosition(pointers->position());
2152 RegisterEnvironmentForDeoptimization(env); 2172 RegisterEnvironmentForDeoptimization(env);
2153 SafepointGenerator safepoint_generator(this, 2173 SafepointGenerator safepoint_generator(this,
2154 pointers, 2174 pointers,
2155 env->deoptimization_index()); 2175 env->deoptimization_index());
2156 ASSERT(receiver.is(eax)); 2176 ASSERT(receiver.is(eax));
2157 v8::internal::ParameterCount actual(eax); 2177 v8::internal::ParameterCount actual(eax);
2158 __ InvokeFunction(edi, actual, CALL_FUNCTION, &safepoint_generator); 2178 __ InvokeFunction(edi, actual, CALL_FUNCTION, &safepoint_generator);
2159
2160 // Restore context.
2161 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2162 } 2179 }
2163 2180
2164 2181
2165 void LCodeGen::DoPushArgument(LPushArgument* instr) { 2182 void LCodeGen::DoPushArgument(LPushArgument* instr) {
2166 LOperand* argument = instr->InputAt(0); 2183 LOperand* argument = instr->InputAt(0);
2167 if (argument->IsConstantOperand()) { 2184 if (argument->IsConstantOperand()) {
2168 __ push(ToImmediate(argument)); 2185 __ push(ToImmediate(argument));
2169 } else { 2186 } else {
2170 __ push(ToOperand(argument)); 2187 __ push(ToOperand(argument));
2171 } 2188 }
2172 } 2189 }
2173 2190
2174 2191
2175 void LCodeGen::DoContext(LContext* instr) { 2192 void LCodeGen::DoContext(LContext* instr) {
2176 Register result = ToRegister(instr->result()); 2193 Register result = ToRegister(instr->result());
2177 __ mov(result, esi); 2194 __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset));
2178 } 2195 }
2179 2196
2180 2197
2181 void LCodeGen::DoOuterContext(LOuterContext* instr) { 2198 void LCodeGen::DoOuterContext(LOuterContext* instr) {
2182 Register context = ToRegister(instr->context()); 2199 Register context = ToRegister(instr->context());
2183 Register result = ToRegister(instr->result()); 2200 Register result = ToRegister(instr->result());
2184 __ mov(result, Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX))); 2201 __ mov(result, Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
2185 __ mov(result, FieldOperand(result, JSFunction::kContextOffset)); 2202 __ mov(result, FieldOperand(result, JSFunction::kContextOffset));
2186 } 2203 }
2187 2204
(...skipping 15 matching lines...) Expand all
2203 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, 2220 void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2204 int arity, 2221 int arity,
2205 LInstruction* instr) { 2222 LInstruction* instr) {
2206 // Change context if needed. 2223 // Change context if needed.
2207 bool change_context = 2224 bool change_context =
2208 (graph()->info()->closure()->context() != function->context()) || 2225 (graph()->info()->closure()->context() != function->context()) ||
2209 scope()->contains_with() || 2226 scope()->contains_with() ||
2210 (scope()->num_heap_slots() > 0); 2227 (scope()->num_heap_slots() > 0);
2211 if (change_context) { 2228 if (change_context) {
2212 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); 2229 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2230 } else {
2231 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
fschneider 2011/02/09 15:23:31 Should this function for now also take an addition
2213 } 2232 }
2214 2233
2215 // Set eax to arguments count if adaption is not needed. Assumes that eax 2234 // Set eax to arguments count if adaption is not needed. Assumes that eax
2216 // is available to write to at this point. 2235 // is available to write to at this point.
2217 if (!function->NeedsArgumentsAdaption()) { 2236 if (!function->NeedsArgumentsAdaption()) {
2218 __ mov(eax, arity); 2237 __ mov(eax, arity);
2219 } 2238 }
2220 2239
2221 LPointerMap* pointers = instr->pointer_map(); 2240 LPointerMap* pointers = instr->pointer_map();
2222 RecordPosition(pointers->position()); 2241 RecordPosition(pointers->position());
2223 2242
2224 // Invoke function. 2243 // Invoke function.
2225 if (*function == *graph()->info()->closure()) { 2244 if (*function == *graph()->info()->closure()) {
2226 __ CallSelf(); 2245 __ CallSelf();
2227 } else { 2246 } else {
2228 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); 2247 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
2229 } 2248 }
2230 2249
2231 // Setup deoptimization. 2250 // Setup deoptimization.
2232 RegisterLazyDeoptimization(instr); 2251 RegisterLazyDeoptimization(instr);
2233
2234 // Restore context.
2235 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2236 } 2252 }
2237 2253
2238 2254
2239 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { 2255 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2240 ASSERT(ToRegister(instr->result()).is(eax)); 2256 ASSERT(ToRegister(instr->result()).is(eax));
2241 __ mov(edi, instr->function()); 2257 __ mov(edi, instr->function());
2242 CallKnownFunction(instr->function(), instr->arity(), instr); 2258 CallKnownFunction(instr->function(), instr->arity(), instr);
2243 } 2259 }
2244 2260
2245 2261
(...skipping 22 matching lines...) Expand all
2268 2284
2269 __ bind(&negative); 2285 __ bind(&negative);
2270 2286
2271 Label allocated, slow; 2287 Label allocated, slow;
2272 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow); 2288 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow);
2273 __ jmp(&allocated); 2289 __ jmp(&allocated);
2274 2290
2275 // Slow case: Call the runtime system to do the number allocation. 2291 // Slow case: Call the runtime system to do the number allocation.
2276 __ bind(&slow); 2292 __ bind(&slow);
2277 2293
2294 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2278 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); 2295 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2279 RecordSafepointWithRegisters( 2296 RecordSafepointWithRegisters(
2280 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); 2297 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2281 // Set the pointer to the new heap number in tmp. 2298 // Set the pointer to the new heap number in tmp.
2282 if (!tmp.is(eax)) __ mov(tmp, eax); 2299 if (!tmp.is(eax)) __ mov(tmp, eax);
2283 2300
2284 // Restore input_reg after call to runtime. 2301 // Restore input_reg after call to runtime.
2285 __ mov(input_reg, Operand(esp, EspIndexForPushAll(input_reg) * kPointerSize)); 2302 __ mov(input_reg, Operand(esp, EspIndexForPushAll(input_reg) * kPointerSize));
2286 2303
2287 __ bind(&allocated); 2304 __ bind(&allocated);
(...skipping 191 matching lines...) Expand 10 before | Expand all | Expand 10 after
2479 __ fstp_d(Operand(esp, 0)); 2496 __ fstp_d(Operand(esp, 0));
2480 __ movdbl(result_reg, Operand(esp, 0)); 2497 __ movdbl(result_reg, Operand(esp, 0));
2481 __ add(Operand(esp), Immediate(kDoubleSize)); 2498 __ add(Operand(esp), Immediate(kDoubleSize));
2482 } 2499 }
2483 2500
2484 2501
2485 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { 2502 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
2486 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); 2503 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2487 TranscendentalCacheStub stub(TranscendentalCache::LOG, 2504 TranscendentalCacheStub stub(TranscendentalCache::LOG,
2488 TranscendentalCacheStub::UNTAGGED); 2505 TranscendentalCacheStub::UNTAGGED);
2489 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 2506 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
2490 } 2507 }
2491 2508
2492 2509
2493 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { 2510 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
2494 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); 2511 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2495 TranscendentalCacheStub stub(TranscendentalCache::COS, 2512 TranscendentalCacheStub stub(TranscendentalCache::COS,
2496 TranscendentalCacheStub::UNTAGGED); 2513 TranscendentalCacheStub::UNTAGGED);
2497 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 2514 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
2498 } 2515 }
2499 2516
2500 2517
2501 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { 2518 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
2502 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); 2519 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2503 TranscendentalCacheStub stub(TranscendentalCache::SIN, 2520 TranscendentalCacheStub stub(TranscendentalCache::SIN,
2504 TranscendentalCacheStub::UNTAGGED); 2521 TranscendentalCacheStub::UNTAGGED);
2505 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 2522 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
2506 } 2523 }
2507 2524
2508 2525
2509 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { 2526 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2510 switch (instr->op()) { 2527 switch (instr->op()) {
2511 case kMathAbs: 2528 case kMathAbs:
2512 DoMathAbs(instr); 2529 DoMathAbs(instr);
2513 break; 2530 break;
2514 case kMathFloor: 2531 case kMathFloor:
2515 DoMathFloor(instr); 2532 DoMathFloor(instr);
(...skipping 17 matching lines...) Expand all
2533 DoMathLog(instr); 2550 DoMathLog(instr);
2534 break; 2551 break;
2535 2552
2536 default: 2553 default:
2537 UNREACHABLE(); 2554 UNREACHABLE();
2538 } 2555 }
2539 } 2556 }
2540 2557
2541 2558
2542 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { 2559 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
2560 ASSERT(ToRegister(instr->context()).is(esi));
2561 ASSERT(ToRegister(instr->key()).is(ecx));
2543 ASSERT(ToRegister(instr->result()).is(eax)); 2562 ASSERT(ToRegister(instr->result()).is(eax));
2544 ASSERT(ToRegister(instr->InputAt(0)).is(ecx));
2545 2563
2546 int arity = instr->arity(); 2564 int arity = instr->arity();
2547 Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP); 2565 Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
2548 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2566 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2549 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2550 } 2567 }
2551 2568
2552 2569
2553 void LCodeGen::DoCallNamed(LCallNamed* instr) { 2570 void LCodeGen::DoCallNamed(LCallNamed* instr) {
2571 ASSERT(ToRegister(instr->context()).is(esi));
2554 ASSERT(ToRegister(instr->result()).is(eax)); 2572 ASSERT(ToRegister(instr->result()).is(eax));
2555 2573
2556 int arity = instr->arity(); 2574 int arity = instr->arity();
2557 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP); 2575 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2558 __ mov(ecx, instr->name()); 2576 __ mov(ecx, instr->name());
2559 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2577 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2560 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2561 } 2578 }
2562 2579
2563 2580
2564 void LCodeGen::DoCallFunction(LCallFunction* instr) { 2581 void LCodeGen::DoCallFunction(LCallFunction* instr) {
2582 ASSERT(ToRegister(instr->context()).is(esi));
2565 ASSERT(ToRegister(instr->result()).is(eax)); 2583 ASSERT(ToRegister(instr->result()).is(eax));
2566 2584
2567 int arity = instr->arity(); 2585 int arity = instr->arity();
2568 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); 2586 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
2569 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 2587 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2570 __ Drop(1); 2588 __ Drop(1);
2571 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2572 } 2589 }
2573 2590
2574 2591
2575 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { 2592 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
2593 ASSERT(ToRegister(instr->context()).is(esi));
2576 ASSERT(ToRegister(instr->result()).is(eax)); 2594 ASSERT(ToRegister(instr->result()).is(eax));
2577 2595
2578 int arity = instr->arity(); 2596 int arity = instr->arity();
2579 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP); 2597 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2580 __ mov(ecx, instr->name()); 2598 __ mov(ecx, instr->name());
2581 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); 2599 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2582 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2583 } 2600 }
2584 2601
2585 2602
2586 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { 2603 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
2587 ASSERT(ToRegister(instr->result()).is(eax)); 2604 ASSERT(ToRegister(instr->result()).is(eax));
2588 __ mov(edi, instr->target()); 2605 __ mov(edi, instr->target());
2589 CallKnownFunction(instr->target(), instr->arity(), instr); 2606 CallKnownFunction(instr->target(), instr->arity(), instr);
2590 } 2607 }
2591 2608
2592 2609
2593 void LCodeGen::DoCallNew(LCallNew* instr) { 2610 void LCodeGen::DoCallNew(LCallNew* instr) {
2594 ASSERT(ToRegister(instr->InputAt(0)).is(edi)); 2611 ASSERT(ToRegister(instr->context()).is(esi));
2612 ASSERT(ToRegister(instr->constructor()).is(edi));
2595 ASSERT(ToRegister(instr->result()).is(eax)); 2613 ASSERT(ToRegister(instr->result()).is(eax));
2596 2614
2597 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall)); 2615 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
2598 __ Set(eax, Immediate(instr->arity())); 2616 __ Set(eax, Immediate(instr->arity()));
2599 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); 2617 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
2600 } 2618 }
2601 2619
2602 2620
2603 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { 2621 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
2604 CallRuntime(instr->function(), instr->arity(), instr); 2622 CallRuntime(instr->function(), instr->arity(), instr, false);
2605 } 2623 }
2606 2624
2607 2625
2608 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { 2626 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
2609 Register object = ToRegister(instr->object()); 2627 Register object = ToRegister(instr->object());
2610 Register value = ToRegister(instr->value()); 2628 Register value = ToRegister(instr->value());
2611 int offset = instr->offset(); 2629 int offset = instr->offset();
2612 2630
2613 if (!instr->transition().is_null()) { 2631 if (!instr->transition().is_null()) {
2614 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition()); 2632 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
(...skipping 14 matching lines...) Expand all
2629 if (instr->needs_write_barrier()) { 2647 if (instr->needs_write_barrier()) {
2630 // Update the write barrier for the properties array. 2648 // Update the write barrier for the properties array.
2631 // object is used as a scratch register. 2649 // object is used as a scratch register.
2632 __ RecordWrite(temp, offset, value, object); 2650 __ RecordWrite(temp, offset, value, object);
2633 } 2651 }
2634 } 2652 }
2635 } 2653 }
2636 2654
2637 2655
2638 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { 2656 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
2657 ASSERT(ToRegister(instr->context()).is(esi));
2639 ASSERT(ToRegister(instr->object()).is(edx)); 2658 ASSERT(ToRegister(instr->object()).is(edx));
2640 ASSERT(ToRegister(instr->value()).is(eax)); 2659 ASSERT(ToRegister(instr->value()).is(eax));
2641 2660
2642 __ mov(ecx, instr->name()); 2661 __ mov(ecx, instr->name());
2643 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 2662 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
2644 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2663 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2645 } 2664 }
2646 2665
2647 2666
2648 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { 2667 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
(...skipping 28 matching lines...) Expand all
2677 FieldOperand(elements, 2696 FieldOperand(elements,
2678 key, 2697 key,
2679 times_pointer_size, 2698 times_pointer_size,
2680 FixedArray::kHeaderSize)); 2699 FixedArray::kHeaderSize));
2681 __ RecordWrite(elements, key, value); 2700 __ RecordWrite(elements, key, value);
2682 } 2701 }
2683 } 2702 }
2684 2703
2685 2704
2686 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { 2705 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
2706 ASSERT(ToRegister(instr->context()).is(esi));
2687 ASSERT(ToRegister(instr->object()).is(edx)); 2707 ASSERT(ToRegister(instr->object()).is(edx));
2688 ASSERT(ToRegister(instr->key()).is(ecx)); 2708 ASSERT(ToRegister(instr->key()).is(ecx));
2689 ASSERT(ToRegister(instr->value()).is(eax)); 2709 ASSERT(ToRegister(instr->value()).is(eax));
2690 2710
2691 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); 2711 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
2692 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2712 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2693 } 2713 }
2694 2714
2695 2715
2696 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { 2716 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
2812 // DoStringCharCodeAt above. 2832 // DoStringCharCodeAt above.
2813 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); 2833 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
2814 if (instr->index()->IsConstantOperand()) { 2834 if (instr->index()->IsConstantOperand()) {
2815 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); 2835 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2816 __ push(Immediate(Smi::FromInt(const_index))); 2836 __ push(Immediate(Smi::FromInt(const_index)));
2817 } else { 2837 } else {
2818 Register index = ToRegister(instr->index()); 2838 Register index = ToRegister(instr->index());
2819 __ SmiTag(index); 2839 __ SmiTag(index);
2820 __ push(index); 2840 __ push(index);
2821 } 2841 }
2842 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2822 __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt); 2843 __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt);
2823 RecordSafepointWithRegisters( 2844 RecordSafepointWithRegisters(
2824 instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex); 2845 instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex);
2825 if (FLAG_debug_code) { 2846 if (FLAG_debug_code) {
2826 __ AbortIfNotSmi(eax); 2847 __ AbortIfNotSmi(eax);
2827 } 2848 }
2828 __ SmiUntag(eax); 2849 __ SmiUntag(eax);
2829 __ mov(Operand(esp, EspIndexForPushAll(result) * kPointerSize), eax); 2850 __ mov(Operand(esp, EspIndexForPushAll(result) * kPointerSize), eax);
2830 __ PopSafepointRegisters(); 2851 __ PopSafepointRegisters();
2831 } 2852 }
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
2889 } 2910 }
2890 2911
2891 // Slow case: Call the runtime system to do the number allocation. 2912 // Slow case: Call the runtime system to do the number allocation.
2892 __ bind(&slow); 2913 __ bind(&slow);
2893 2914
2894 // TODO(3095996): Put a valid pointer value in the stack slot where the result 2915 // TODO(3095996): Put a valid pointer value in the stack slot where the result
2895 // register is stored, as this register is in the pointer map, but contains an 2916 // register is stored, as this register is in the pointer map, but contains an
2896 // integer value. 2917 // integer value.
2897 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), Immediate(0)); 2918 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), Immediate(0));
2898 2919
2920 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2899 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); 2921 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2900 RecordSafepointWithRegisters( 2922 RecordSafepointWithRegisters(
2901 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); 2923 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2902 if (!reg.is(eax)) __ mov(reg, eax); 2924 if (!reg.is(eax)) __ mov(reg, eax);
2903 2925
2904 // Done. Put the value in xmm0 into the value of the allocated heap 2926 // Done. Put the value in xmm0 into the value of the allocated heap
2905 // number. 2927 // number.
2906 __ bind(&done); 2928 __ bind(&done);
2907 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0); 2929 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0);
2908 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), reg); 2930 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), reg);
(...skipping 27 matching lines...) Expand all
2936 2958
2937 2959
2938 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { 2960 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
2939 // TODO(3095996): Get rid of this. For now, we need to make the 2961 // TODO(3095996): Get rid of this. For now, we need to make the
2940 // result register contain a valid pointer because it is already 2962 // result register contain a valid pointer because it is already
2941 // contained in the register pointer map. 2963 // contained in the register pointer map.
2942 Register reg = ToRegister(instr->result()); 2964 Register reg = ToRegister(instr->result());
2943 __ Set(reg, Immediate(0)); 2965 __ Set(reg, Immediate(0));
2944 2966
2945 __ PushSafepointRegisters(); 2967 __ PushSafepointRegisters();
2968 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2946 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); 2969 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2947 RecordSafepointWithRegisters( 2970 RecordSafepointWithRegisters(
2948 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); 2971 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2949 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), eax); 2972 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), eax);
2950 __ PopSafepointRegisters(); 2973 __ PopSafepointRegisters();
2951 } 2974 }
2952 2975
2953 2976
2954 void LCodeGen::DoSmiTag(LSmiTag* instr) { 2977 void LCodeGen::DoSmiTag(LSmiTag* instr) {
2955 LOperand* input = instr->InputAt(0); 2978 LOperand* input = instr->InputAt(0);
(...skipping 395 matching lines...) Expand 10 before | Expand all | Expand 10 after
3351 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); 3374 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3352 __ push(Immediate(instr->hydrogen()->constant_elements())); 3375 __ push(Immediate(instr->hydrogen()->constant_elements()));
3353 3376
3354 // Pick the right runtime function or stub to call. 3377 // Pick the right runtime function or stub to call.
3355 int length = instr->hydrogen()->length(); 3378 int length = instr->hydrogen()->length();
3356 if (instr->hydrogen()->IsCopyOnWrite()) { 3379 if (instr->hydrogen()->IsCopyOnWrite()) {
3357 ASSERT(instr->hydrogen()->depth() == 1); 3380 ASSERT(instr->hydrogen()->depth() == 1);
3358 FastCloneShallowArrayStub::Mode mode = 3381 FastCloneShallowArrayStub::Mode mode =
3359 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; 3382 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
3360 FastCloneShallowArrayStub stub(mode, length); 3383 FastCloneShallowArrayStub stub(mode, length);
3361 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3384 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
3362 } else if (instr->hydrogen()->depth() > 1) { 3385 } else if (instr->hydrogen()->depth() > 1) {
3363 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr); 3386 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, false);
3364 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 3387 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
3365 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); 3388 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, false);
3366 } else { 3389 } else {
3367 FastCloneShallowArrayStub::Mode mode = 3390 FastCloneShallowArrayStub::Mode mode =
3368 FastCloneShallowArrayStub::CLONE_ELEMENTS; 3391 FastCloneShallowArrayStub::CLONE_ELEMENTS;
3369 FastCloneShallowArrayStub stub(mode, length); 3392 FastCloneShallowArrayStub stub(mode, length);
3370 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3393 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
3371 } 3394 }
3372 } 3395 }
3373 3396
3374 3397
3375 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { 3398 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
3399 ASSERT(ToRegister(instr->context()).is(esi));
3376 // Setup the parameters to the stub/runtime call. 3400 // Setup the parameters to the stub/runtime call.
3377 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 3401 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3378 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); 3402 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3379 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); 3403 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3380 __ push(Immediate(instr->hydrogen()->constant_properties())); 3404 __ push(Immediate(instr->hydrogen()->constant_properties()));
3381 __ push(Immediate(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0))); 3405 __ push(Immediate(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
3382 3406
3383 // Pick the right runtime function to call. 3407 // Pick the right runtime function to call.
3384 if (instr->hydrogen()->depth() > 1) { 3408 if (instr->hydrogen()->depth() > 1) {
3385 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); 3409 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
(...skipping 17 matching lines...) Expand all
3403 __ mov(ebx, FieldOperand(ecx, literal_offset)); 3427 __ mov(ebx, FieldOperand(ecx, literal_offset));
3404 __ cmp(ebx, Factory::undefined_value()); 3428 __ cmp(ebx, Factory::undefined_value());
3405 __ j(not_equal, &materialized); 3429 __ j(not_equal, &materialized);
3406 3430
3407 // Create regexp literal using runtime function 3431 // Create regexp literal using runtime function
3408 // Result will be in eax. 3432 // Result will be in eax.
3409 __ push(ecx); 3433 __ push(ecx);
3410 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); 3434 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3411 __ push(Immediate(instr->hydrogen()->pattern())); 3435 __ push(Immediate(instr->hydrogen()->pattern()));
3412 __ push(Immediate(instr->hydrogen()->flags())); 3436 __ push(Immediate(instr->hydrogen()->flags()));
3413 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); 3437 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, false);
3414 __ mov(ebx, eax); 3438 __ mov(ebx, eax);
3415 3439
3416 __ bind(&materialized); 3440 __ bind(&materialized);
3417 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 3441 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3418 Label allocated, runtime_allocate; 3442 Label allocated, runtime_allocate;
3419 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); 3443 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
3420 __ jmp(&allocated); 3444 __ jmp(&allocated);
3421 3445
3422 __ bind(&runtime_allocate); 3446 __ bind(&runtime_allocate);
3423 __ push(ebx); 3447 __ push(ebx);
3424 __ push(Immediate(Smi::FromInt(size))); 3448 __ push(Immediate(Smi::FromInt(size)));
3425 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); 3449 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, false);
3426 __ pop(ebx); 3450 __ pop(ebx);
3427 3451
3428 __ bind(&allocated); 3452 __ bind(&allocated);
3429 // Copy the content into the newly allocated memory. 3453 // Copy the content into the newly allocated memory.
3430 // (Unroll copy loop once for better throughput). 3454 // (Unroll copy loop once for better throughput).
3431 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { 3455 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
3432 __ mov(edx, FieldOperand(ebx, i)); 3456 __ mov(edx, FieldOperand(ebx, i));
3433 __ mov(ecx, FieldOperand(ebx, i + kPointerSize)); 3457 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
3434 __ mov(FieldOperand(eax, i), edx); 3458 __ mov(FieldOperand(eax, i), edx);
3435 __ mov(FieldOperand(eax, i + kPointerSize), ecx); 3459 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
3436 } 3460 }
3437 if ((size % (2 * kPointerSize)) != 0) { 3461 if ((size % (2 * kPointerSize)) != 0) {
3438 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); 3462 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
3439 __ mov(FieldOperand(eax, size - kPointerSize), edx); 3463 __ mov(FieldOperand(eax, size - kPointerSize), edx);
3440 } 3464 }
3441 } 3465 }
3442 3466
3443 3467
3444 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { 3468 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
3445 // Use the fast case closure allocation code that allocates in new 3469 // Use the fast case closure allocation code that allocates in new
3446 // space for nested functions that don't need literals cloning. 3470 // space for nested functions that don't need literals cloning.
3447 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); 3471 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
3448 bool pretenure = instr->hydrogen()->pretenure(); 3472 bool pretenure = instr->hydrogen()->pretenure();
3449 if (shared_info->num_literals() == 0 && !pretenure) { 3473 if (shared_info->num_literals() == 0 && !pretenure) {
3450 FastNewClosureStub stub; 3474 FastNewClosureStub stub;
3451 __ push(Immediate(shared_info)); 3475 __ push(Immediate(shared_info));
3452 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3476 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
3453 } else { 3477 } else {
3454 __ push(esi); 3478 __ push(Operand(ebp, StandardFrameConstants::kContextOffset));
3455 __ push(Immediate(shared_info)); 3479 __ push(Immediate(shared_info));
3456 __ push(Immediate(pretenure 3480 __ push(Immediate(pretenure
3457 ? Factory::true_value() 3481 ? Factory::true_value()
3458 : Factory::false_value())); 3482 : Factory::false_value()));
3459 CallRuntime(Runtime::kNewClosure, 3, instr); 3483 CallRuntime(Runtime::kNewClosure, 3, instr, false);
3460 } 3484 }
3461 } 3485 }
3462 3486
3463 3487
3464 void LCodeGen::DoTypeof(LTypeof* instr) { 3488 void LCodeGen::DoTypeof(LTypeof* instr) {
3465 LOperand* input = instr->InputAt(0); 3489 LOperand* input = instr->InputAt(0);
3466 if (input->IsConstantOperand()) { 3490 if (input->IsConstantOperand()) {
3467 __ push(ToImmediate(input)); 3491 __ push(ToImmediate(input));
3468 } else { 3492 } else {
3469 __ push(ToOperand(input)); 3493 __ push(ToOperand(input));
3470 } 3494 }
3471 CallRuntime(Runtime::kTypeof, 1, instr); 3495 CallRuntime(Runtime::kTypeof, 1, instr, false);
3472 } 3496 }
3473 3497
3474 3498
3475 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { 3499 void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
3476 Register input = ToRegister(instr->InputAt(0)); 3500 Register input = ToRegister(instr->InputAt(0));
3477 Register result = ToRegister(instr->result()); 3501 Register result = ToRegister(instr->result());
3478 Label true_label; 3502 Label true_label;
3479 Label false_label; 3503 Label false_label;
3480 NearLabel done; 3504 NearLabel done;
3481 3505
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
3655 __ push(ToOperand(key)); 3679 __ push(ToOperand(key));
3656 } 3680 }
3657 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); 3681 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
3658 LPointerMap* pointers = instr->pointer_map(); 3682 LPointerMap* pointers = instr->pointer_map();
3659 LEnvironment* env = instr->deoptimization_environment(); 3683 LEnvironment* env = instr->deoptimization_environment();
3660 RecordPosition(pointers->position()); 3684 RecordPosition(pointers->position());
3661 RegisterEnvironmentForDeoptimization(env); 3685 RegisterEnvironmentForDeoptimization(env);
3662 SafepointGenerator safepoint_generator(this, 3686 SafepointGenerator safepoint_generator(this,
3663 pointers, 3687 pointers,
3664 env->deoptimization_index()); 3688 env->deoptimization_index());
3689 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3665 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator); 3690 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator);
3666 } 3691 }
3667 3692
3668 3693
3669 void LCodeGen::DoStackCheck(LStackCheck* instr) { 3694 void LCodeGen::DoStackCheck(LStackCheck* instr) {
3670 // Perform stack overflow check. 3695 // Perform stack overflow check.
3671 NearLabel done; 3696 NearLabel done;
3672 ExternalReference stack_limit = ExternalReference::address_of_stack_limit(); 3697 ExternalReference stack_limit = ExternalReference::address_of_stack_limit();
3673 __ cmp(esp, Operand::StaticVariable(stack_limit)); 3698 __ cmp(esp, Operand::StaticVariable(stack_limit));
3674 __ j(above_equal, &done); 3699 __ j(above_equal, &done);
3675 3700
3676 StackCheckStub stub; 3701 StackCheckStub stub;
3677 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3702 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
3678 __ bind(&done); 3703 __ bind(&done);
3679 } 3704 }
3680 3705
3681 3706
3682 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { 3707 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
3683 // This is a pseudo-instruction that ensures that the environment here is 3708 // This is a pseudo-instruction that ensures that the environment here is
3684 // properly registered for deoptimization and records the assembler's PC 3709 // properly registered for deoptimization and records the assembler's PC
3685 // offset. 3710 // offset.
3686 LEnvironment* environment = instr->environment(); 3711 LEnvironment* environment = instr->environment();
3687 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), 3712 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
3688 instr->SpilledDoubleRegisterArray()); 3713 instr->SpilledDoubleRegisterArray());
3689 3714
3690 // If the environment were already registered, we would have no way of 3715 // If the environment were already registered, we would have no way of
3691 // backpatching it with the spill slot operands. 3716 // backpatching it with the spill slot operands.
3692 ASSERT(!environment->HasBeenRegistered()); 3717 ASSERT(!environment->HasBeenRegistered());
3693 RegisterEnvironmentForDeoptimization(environment); 3718 RegisterEnvironmentForDeoptimization(environment);
3694 ASSERT(osr_pc_offset_ == -1); 3719 ASSERT(osr_pc_offset_ == -1);
3695 osr_pc_offset_ = masm()->pc_offset(); 3720 osr_pc_offset_ = masm()->pc_offset();
3696 } 3721 }
3697 3722
3698 3723
3699 #undef __ 3724 #undef __
3700 3725
3701 } } // namespace v8::internal 3726 } } // namespace v8::internal
3702 3727
3703 #endif // V8_TARGET_ARCH_IA32 3728 #endif // V8_TARGET_ARCH_IA32
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698