Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(452)

Side by Side Diff: src/ia32/lithium-codegen-ia32.cc

Issue 6815010: Merge r7516, r7541 into 3.1 branch. (Closed) Base URL: http://v8.googlecode.com/svn/branches/3.1/
Patch Set: Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/ia32/macro-assembler-ia32.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 400 matching lines...) Expand 10 before | Expand all | Expand 10 after
411 } else if (op->IsConstantOperand()) { 411 } else if (op->IsConstantOperand()) {
412 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op)); 412 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
413 int src_index = DefineDeoptimizationLiteral(literal); 413 int src_index = DefineDeoptimizationLiteral(literal);
414 translation->StoreLiteral(src_index); 414 translation->StoreLiteral(src_index);
415 } else { 415 } else {
416 UNREACHABLE(); 416 UNREACHABLE();
417 } 417 }
418 } 418 }
419 419
420 420
421 void LCodeGen::CallCode(Handle<Code> code, 421 void LCodeGen::CallCodeGeneric(Handle<Code> code,
422 RelocInfo::Mode mode, 422 RelocInfo::Mode mode,
423 LInstruction* instr, 423 LInstruction* instr,
424 bool adjusted) { 424 ContextMode context_mode,
425 SafepointMode safepoint_mode) {
425 ASSERT(instr != NULL); 426 ASSERT(instr != NULL);
426 LPointerMap* pointers = instr->pointer_map(); 427 LPointerMap* pointers = instr->pointer_map();
427 RecordPosition(pointers->position()); 428 RecordPosition(pointers->position());
428 429
429 if (!adjusted) { 430 if (context_mode == RESTORE_CONTEXT) {
430 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 431 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
431 } 432 }
432 __ call(code, mode); 433 __ call(code, mode);
433 434
434 RegisterLazyDeoptimization(instr); 435 RegisterLazyDeoptimization(instr, safepoint_mode);
435 436
436 // Signal that we don't inline smi code before these stubs in the 437 // Signal that we don't inline smi code before these stubs in the
437 // optimizing code generator. 438 // optimizing code generator.
438 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || 439 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
439 code->kind() == Code::COMPARE_IC) { 440 code->kind() == Code::COMPARE_IC) {
440 __ nop(); 441 __ nop();
441 } 442 }
442 } 443 }
443 444
444 445
446 void LCodeGen::CallCode(Handle<Code> code,
447 RelocInfo::Mode mode,
448 LInstruction* instr,
449 ContextMode context_mode) {
450 CallCodeGeneric(code, mode, instr, context_mode, RECORD_SIMPLE_SAFEPOINT);
451 }
452
453
445 void LCodeGen::CallRuntime(Runtime::Function* fun, 454 void LCodeGen::CallRuntime(Runtime::Function* fun,
446 int argc, 455 int argc,
447 LInstruction* instr, 456 LInstruction* instr,
448 bool adjusted) { 457 ContextMode context_mode) {
449 ASSERT(instr != NULL); 458 ASSERT(instr != NULL);
450 ASSERT(instr->HasPointerMap()); 459 ASSERT(instr->HasPointerMap());
451 LPointerMap* pointers = instr->pointer_map(); 460 LPointerMap* pointers = instr->pointer_map();
452 RecordPosition(pointers->position()); 461 RecordPosition(pointers->position());
453 462
454 if (!adjusted) { 463 if (context_mode == RESTORE_CONTEXT) {
455 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 464 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
456 } 465 }
457 __ CallRuntime(fun, argc); 466 __ CallRuntime(fun, argc);
458 467
459 RegisterLazyDeoptimization(instr); 468 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
460 } 469 }
461 470
462 471
463 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { 472 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
473 int argc,
474 LInstruction* instr) {
475 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
476 __ CallRuntimeSaveDoubles(id);
477 RecordSafepointWithRegisters(
478 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
479 }
480
481
482 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
483 SafepointMode safepoint_mode) {
464 // Create the environment to bailout to. If the call has side effects 484 // Create the environment to bailout to. If the call has side effects
465 // execution has to continue after the call otherwise execution can continue 485 // execution has to continue after the call otherwise execution can continue
466 // from a previous bailout point repeating the call. 486 // from a previous bailout point repeating the call.
467 LEnvironment* deoptimization_environment; 487 LEnvironment* deoptimization_environment;
468 if (instr->HasDeoptimizationEnvironment()) { 488 if (instr->HasDeoptimizationEnvironment()) {
469 deoptimization_environment = instr->deoptimization_environment(); 489 deoptimization_environment = instr->deoptimization_environment();
470 } else { 490 } else {
471 deoptimization_environment = instr->environment(); 491 deoptimization_environment = instr->environment();
472 } 492 }
473 493
474 RegisterEnvironmentForDeoptimization(deoptimization_environment); 494 RegisterEnvironmentForDeoptimization(deoptimization_environment);
475 RecordSafepoint(instr->pointer_map(), 495 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
476 deoptimization_environment->deoptimization_index()); 496 RecordSafepoint(instr->pointer_map(),
497 deoptimization_environment->deoptimization_index());
498 } else {
499 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
500 RecordSafepointWithRegisters(
501 instr->pointer_map(),
502 0,
503 deoptimization_environment->deoptimization_index());
504 }
477 } 505 }
478 506
479 507
480 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { 508 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
481 if (!environment->HasBeenRegistered()) { 509 if (!environment->HasBeenRegistered()) {
482 // Physical stack frame layout: 510 // Physical stack frame layout:
483 // -x ............. -4 0 ..................................... y 511 // -x ............. -4 0 ..................................... y
484 // [incoming arguments] [spill slots] [pushed outgoing arguments] 512 // [incoming arguments] [spill slots] [pushed outgoing arguments]
485 513
486 // Layout of the environment: 514 // Layout of the environment:
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
615 643
616 inlined_function_count_ = deoptimization_literals_.length(); 644 inlined_function_count_ = deoptimization_literals_.length();
617 } 645 }
618 646
619 647
620 void LCodeGen::RecordSafepoint( 648 void LCodeGen::RecordSafepoint(
621 LPointerMap* pointers, 649 LPointerMap* pointers,
622 Safepoint::Kind kind, 650 Safepoint::Kind kind,
623 int arguments, 651 int arguments,
624 int deoptimization_index) { 652 int deoptimization_index) {
653 ASSERT(kind == expected_safepoint_kind_);
625 const ZoneList<LOperand*>* operands = pointers->operands(); 654 const ZoneList<LOperand*>* operands = pointers->operands();
626 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), 655 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
627 kind, arguments, deoptimization_index); 656 kind, arguments, deoptimization_index);
628 for (int i = 0; i < operands->length(); i++) { 657 for (int i = 0; i < operands->length(); i++) {
629 LOperand* pointer = operands->at(i); 658 LOperand* pointer = operands->at(i);
630 if (pointer->IsStackSlot()) { 659 if (pointer->IsStackSlot()) {
631 safepoint.DefinePointerSlot(pointer->index()); 660 safepoint.DefinePointerSlot(pointer->index());
632 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { 661 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
633 safepoint.DefinePointerRegister(ToRegister(pointer)); 662 safepoint.DefinePointerRegister(ToRegister(pointer));
634 } 663 }
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
700 // Nothing to do. 729 // Nothing to do.
701 } 730 }
702 731
703 732
704 void LCodeGen::DoCallStub(LCallStub* instr) { 733 void LCodeGen::DoCallStub(LCallStub* instr) {
705 ASSERT(ToRegister(instr->context()).is(esi)); 734 ASSERT(ToRegister(instr->context()).is(esi));
706 ASSERT(ToRegister(instr->result()).is(eax)); 735 ASSERT(ToRegister(instr->result()).is(eax));
707 switch (instr->hydrogen()->major_key()) { 736 switch (instr->hydrogen()->major_key()) {
708 case CodeStub::RegExpConstructResult: { 737 case CodeStub::RegExpConstructResult: {
709 RegExpConstructResultStub stub; 738 RegExpConstructResultStub stub;
710 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 739 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
711 break; 740 break;
712 } 741 }
713 case CodeStub::RegExpExec: { 742 case CodeStub::RegExpExec: {
714 RegExpExecStub stub; 743 RegExpExecStub stub;
715 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 744 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
716 break; 745 break;
717 } 746 }
718 case CodeStub::SubString: { 747 case CodeStub::SubString: {
719 SubStringStub stub; 748 SubStringStub stub;
720 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 749 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
721 break; 750 break;
722 } 751 }
723 case CodeStub::StringCharAt: { 752 case CodeStub::StringCharAt: {
724 StringCharAtStub stub; 753 StringCharAtStub stub;
725 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 754 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
726 break; 755 break;
727 } 756 }
728 case CodeStub::MathPow: { 757 case CodeStub::MathPow: {
729 MathPowStub stub; 758 MathPowStub stub;
730 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 759 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
731 break; 760 break;
732 } 761 }
733 case CodeStub::NumberToString: { 762 case CodeStub::NumberToString: {
734 NumberToStringStub stub; 763 NumberToStringStub stub;
735 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 764 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
736 break; 765 break;
737 } 766 }
738 case CodeStub::StringAdd: { 767 case CodeStub::StringAdd: {
739 StringAddStub stub(NO_STRING_ADD_FLAGS); 768 StringAddStub stub(NO_STRING_ADD_FLAGS);
740 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 769 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
741 break; 770 break;
742 } 771 }
743 case CodeStub::StringCompare: { 772 case CodeStub::StringCompare: {
744 StringCompareStub stub; 773 StringCompareStub stub;
745 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 774 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
746 break; 775 break;
747 } 776 }
748 case CodeStub::TranscendentalCache: { 777 case CodeStub::TranscendentalCache: {
749 TranscendentalCacheStub stub(instr->transcendental_type(), 778 TranscendentalCacheStub stub(instr->transcendental_type(),
750 TranscendentalCacheStub::TAGGED); 779 TranscendentalCacheStub::TAGGED);
751 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 780 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
752 break; 781 break;
753 } 782 }
754 default: 783 default:
755 UNREACHABLE(); 784 UNREACHABLE();
756 } 785 }
757 } 786 }
758 787
759 788
760 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { 789 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
761 // Nothing to do. 790 // Nothing to do.
(...skipping 329 matching lines...) Expand 10 before | Expand all | Expand 10 after
1091 1120
1092 void LCodeGen::DoBitNotI(LBitNotI* instr) { 1121 void LCodeGen::DoBitNotI(LBitNotI* instr) {
1093 LOperand* input = instr->InputAt(0); 1122 LOperand* input = instr->InputAt(0);
1094 ASSERT(input->Equals(instr->result())); 1123 ASSERT(input->Equals(instr->result()));
1095 __ not_(ToRegister(input)); 1124 __ not_(ToRegister(input));
1096 } 1125 }
1097 1126
1098 1127
1099 void LCodeGen::DoThrow(LThrow* instr) { 1128 void LCodeGen::DoThrow(LThrow* instr) {
1100 __ push(ToOperand(instr->InputAt(0))); 1129 __ push(ToOperand(instr->InputAt(0)));
1101 CallRuntime(Runtime::kThrow, 1, instr, false); 1130 CallRuntime(Runtime::kThrow, 1, instr, RESTORE_CONTEXT);
1102 1131
1103 if (FLAG_debug_code) { 1132 if (FLAG_debug_code) {
1104 Comment("Unreachable code."); 1133 Comment("Unreachable code.");
1105 __ int3(); 1134 __ int3();
1106 } 1135 }
1107 } 1136 }
1108 1137
1109 1138
1110 void LCodeGen::DoAddI(LAddI* instr) { 1139 void LCodeGen::DoAddI(LAddI* instr) {
1111 LOperand* left = instr->InputAt(0); 1140 LOperand* left = instr->InputAt(0);
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
1163 } 1192 }
1164 } 1193 }
1165 1194
1166 1195
1167 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { 1196 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1168 ASSERT(ToRegister(instr->InputAt(0)).is(edx)); 1197 ASSERT(ToRegister(instr->InputAt(0)).is(edx));
1169 ASSERT(ToRegister(instr->InputAt(1)).is(eax)); 1198 ASSERT(ToRegister(instr->InputAt(1)).is(eax));
1170 ASSERT(ToRegister(instr->result()).is(eax)); 1199 ASSERT(ToRegister(instr->result()).is(eax));
1171 1200
1172 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE); 1201 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
1173 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 1202 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
1174 } 1203 }
1175 1204
1176 1205
1177 int LCodeGen::GetNextEmittedBlock(int block) { 1206 int LCodeGen::GetNextEmittedBlock(int block) {
1178 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { 1207 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1179 LLabel* label = chunk_->GetLabel(i); 1208 LLabel* label = chunk_->GetLabel(i);
1180 if (!label->HasReplacement()) return i; 1209 if (!label->HasReplacement()) return i;
1181 } 1210 }
1182 return -1; 1211 return -1;
1183 } 1212 }
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
1275 __ jmp(deferred_stack_check->entry()); 1304 __ jmp(deferred_stack_check->entry());
1276 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); 1305 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1277 } else { 1306 } else {
1278 __ jmp(chunk_->GetAssemblyLabel(block)); 1307 __ jmp(chunk_->GetAssemblyLabel(block));
1279 } 1308 }
1280 } 1309 }
1281 } 1310 }
1282 1311
1283 1312
1284 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { 1313 void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
1285 __ pushad(); 1314 PushSafepointRegistersScope scope(this);
1286 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 1315 CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr);
1287 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
1288 RecordSafepointWithRegisters(
1289 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1290 __ popad();
1291 } 1316 }
1292 1317
1293 void LCodeGen::DoGoto(LGoto* instr) { 1318 void LCodeGen::DoGoto(LGoto* instr) {
1294 class DeferredStackCheck: public LDeferredCode { 1319 class DeferredStackCheck: public LDeferredCode {
1295 public: 1320 public:
1296 DeferredStackCheck(LCodeGen* codegen, LGoto* instr) 1321 DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1297 : LDeferredCode(codegen), instr_(instr) { } 1322 : LDeferredCode(codegen), instr_(instr) { }
1298 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } 1323 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1299 private: 1324 private:
1300 LGoto* instr_; 1325 LGoto* instr_;
(...skipping 468 matching lines...) Expand 10 before | Expand all | Expand 10 after
1769 1794
1770 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); 1795 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
1771 EmitBranch(true_block, false_block, equal); 1796 EmitBranch(true_block, false_block, equal);
1772 } 1797 }
1773 1798
1774 1799
1775 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { 1800 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
1776 // Object and function are in fixed registers defined by the stub. 1801 // Object and function are in fixed registers defined by the stub.
1777 ASSERT(ToRegister(instr->context()).is(esi)); 1802 ASSERT(ToRegister(instr->context()).is(esi));
1778 InstanceofStub stub(InstanceofStub::kArgsInRegisters); 1803 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1779 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1804 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
1780 1805
1781 NearLabel true_value, done; 1806 NearLabel true_value, done;
1782 __ test(eax, Operand(eax)); 1807 __ test(eax, Operand(eax));
1783 __ j(zero, &true_value); 1808 __ j(zero, &true_value);
1784 __ mov(ToRegister(instr->result()), Factory::false_value()); 1809 __ mov(ToRegister(instr->result()), Factory::false_value());
1785 __ jmp(&done); 1810 __ jmp(&done);
1786 __ bind(&true_value); 1811 __ bind(&true_value);
1787 __ mov(ToRegister(instr->result()), Factory::true_value()); 1812 __ mov(ToRegister(instr->result()), Factory::true_value());
1788 __ bind(&done); 1813 __ bind(&done);
1789 } 1814 }
1790 1815
1791 1816
1792 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { 1817 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1793 ASSERT(ToRegister(instr->context()).is(esi)); 1818 ASSERT(ToRegister(instr->context()).is(esi));
1794 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1819 int true_block = chunk_->LookupDestination(instr->true_block_id());
1795 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1820 int false_block = chunk_->LookupDestination(instr->false_block_id());
1796 1821
1797 InstanceofStub stub(InstanceofStub::kArgsInRegisters); 1822 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1798 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1823 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
1799 __ test(eax, Operand(eax)); 1824 __ test(eax, Operand(eax));
1800 EmitBranch(true_block, false_block, zero); 1825 EmitBranch(true_block, false_block, zero);
1801 } 1826 }
1802 1827
1803 1828
1804 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { 1829 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1805 class DeferredInstanceOfKnownGlobal: public LDeferredCode { 1830 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1806 public: 1831 public:
1807 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, 1832 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1808 LInstanceOfKnownGlobal* instr) 1833 LInstanceOfKnownGlobal* instr)
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
1860 1885
1861 // Here result has either true or false. Deferred code also produces true or 1886 // Here result has either true or false. Deferred code also produces true or
1862 // false object. 1887 // false object.
1863 __ bind(deferred->exit()); 1888 __ bind(deferred->exit());
1864 __ bind(&done); 1889 __ bind(&done);
1865 } 1890 }
1866 1891
1867 1892
1868 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 1893 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
1869 Label* map_check) { 1894 Label* map_check) {
1870 __ PushSafepointRegisters(); 1895 PushSafepointRegistersScope scope(this);
1871 1896
1872 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; 1897 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
1873 flags = static_cast<InstanceofStub::Flags>( 1898 flags = static_cast<InstanceofStub::Flags>(
1874 flags | InstanceofStub::kArgsInRegisters); 1899 flags | InstanceofStub::kArgsInRegisters);
1875 flags = static_cast<InstanceofStub::Flags>( 1900 flags = static_cast<InstanceofStub::Flags>(
1876 flags | InstanceofStub::kCallSiteInlineCheck); 1901 flags | InstanceofStub::kCallSiteInlineCheck);
1877 flags = static_cast<InstanceofStub::Flags>( 1902 flags = static_cast<InstanceofStub::Flags>(
1878 flags | InstanceofStub::kReturnTrueFalseObject); 1903 flags | InstanceofStub::kReturnTrueFalseObject);
1879 InstanceofStub stub(flags); 1904 InstanceofStub stub(flags);
1880 1905
1881 // Get the temp register reserved by the instruction. This needs to be edi as 1906 // Get the temp register reserved by the instruction. This needs to be a
1882 // its slot of the pushing of safepoint registers is used to communicate the 1907 // register which is pushed last by PushSafepointRegisters as top of the
1883 // offset to the location of the map check. 1908 // stack is used to pass the offset to the location of the map check to
1909 // the stub.
1884 Register temp = ToRegister(instr->TempAt(0)); 1910 Register temp = ToRegister(instr->TempAt(0));
1885 ASSERT(temp.is(edi)); 1911 ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0);
1886 __ mov(InstanceofStub::right(), Immediate(instr->function())); 1912 __ mov(InstanceofStub::right(), Immediate(instr->function()));
1887 static const int kAdditionalDelta = 16; 1913 static const int kAdditionalDelta = 16;
1888 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; 1914 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
1889 Label before_push_delta; 1915 Label before_push_delta;
1890 __ bind(&before_push_delta); 1916 __ bind(&before_push_delta);
1891 __ mov(temp, Immediate(delta)); 1917 __ mov(temp, Immediate(delta));
1892 __ StoreToSafepointRegisterSlot(temp, temp); 1918 __ StoreToSafepointRegisterSlot(temp, temp);
1893 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 1919 CallCodeGeneric(stub.GetCode(),
1920 RelocInfo::CODE_TARGET,
1921 instr,
1922 RESTORE_CONTEXT,
1923 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
1894 // Put the result value into the eax slot and restore all registers. 1924 // Put the result value into the eax slot and restore all registers.
1895 __ StoreToSafepointRegisterSlot(eax, eax); 1925 __ StoreToSafepointRegisterSlot(eax, eax);
1896 __ PopSafepointRegisters();
1897 } 1926 }
1898 1927
1899 1928
1900 static Condition ComputeCompareCondition(Token::Value op) { 1929 static Condition ComputeCompareCondition(Token::Value op) {
1901 switch (op) { 1930 switch (op) {
1902 case Token::EQ_STRICT: 1931 case Token::EQ_STRICT:
1903 case Token::EQ: 1932 case Token::EQ:
1904 return equal; 1933 return equal;
1905 case Token::LT: 1934 case Token::LT:
1906 return less; 1935 return less;
1907 case Token::GT: 1936 case Token::GT:
1908 return greater; 1937 return greater;
1909 case Token::LTE: 1938 case Token::LTE:
1910 return less_equal; 1939 return less_equal;
1911 case Token::GTE: 1940 case Token::GTE:
1912 return greater_equal; 1941 return greater_equal;
1913 default: 1942 default:
1914 UNREACHABLE(); 1943 UNREACHABLE();
1915 return no_condition; 1944 return no_condition;
1916 } 1945 }
1917 } 1946 }
1918 1947
1919 1948
1920 void LCodeGen::DoCmpT(LCmpT* instr) { 1949 void LCodeGen::DoCmpT(LCmpT* instr) {
1921 Token::Value op = instr->op(); 1950 Token::Value op = instr->op();
1922 1951
1923 Handle<Code> ic = CompareIC::GetUninitialized(op); 1952 Handle<Code> ic = CompareIC::GetUninitialized(op);
1924 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); 1953 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
1925 1954
1926 Condition condition = ComputeCompareCondition(op); 1955 Condition condition = ComputeCompareCondition(op);
1927 if (op == Token::GT || op == Token::LTE) { 1956 if (op == Token::GT || op == Token::LTE) {
1928 condition = ReverseCondition(condition); 1957 condition = ReverseCondition(condition);
1929 } 1958 }
1930 NearLabel true_value, done; 1959 NearLabel true_value, done;
1931 __ test(eax, Operand(eax)); 1960 __ test(eax, Operand(eax));
1932 __ j(condition, &true_value); 1961 __ j(condition, &true_value);
1933 __ mov(ToRegister(instr->result()), Factory::false_value()); 1962 __ mov(ToRegister(instr->result()), Factory::false_value());
1934 __ jmp(&done); 1963 __ jmp(&done);
1935 __ bind(&true_value); 1964 __ bind(&true_value);
1936 __ mov(ToRegister(instr->result()), Factory::true_value()); 1965 __ mov(ToRegister(instr->result()), Factory::true_value());
1937 __ bind(&done); 1966 __ bind(&done);
1938 } 1967 }
1939 1968
1940 1969
1941 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) { 1970 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
1942 Token::Value op = instr->op(); 1971 Token::Value op = instr->op();
1943 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1972 int true_block = chunk_->LookupDestination(instr->true_block_id());
1944 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1973 int false_block = chunk_->LookupDestination(instr->false_block_id());
1945 1974
1946 Handle<Code> ic = CompareIC::GetUninitialized(op); 1975 Handle<Code> ic = CompareIC::GetUninitialized(op);
1947 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); 1976 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
1948 1977
1949 // The compare stub expects compare condition and the input operands 1978 // The compare stub expects compare condition and the input operands
1950 // reversed for GT and LTE. 1979 // reversed for GT and LTE.
1951 Condition condition = ComputeCompareCondition(op); 1980 Condition condition = ComputeCompareCondition(op);
1952 if (op == Token::GT || op == Token::LTE) { 1981 if (op == Token::GT || op == Token::LTE) {
1953 condition = ReverseCondition(condition); 1982 condition = ReverseCondition(condition);
1954 } 1983 }
1955 __ test(eax, Operand(eax)); 1984 __ test(eax, Operand(eax));
1956 EmitBranch(true_block, false_block, condition); 1985 EmitBranch(true_block, false_block, condition);
1957 } 1986 }
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
2032 } 2061 }
2033 2062
2034 2063
2035 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { 2064 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2036 ASSERT(ToRegister(instr->context()).is(esi)); 2065 ASSERT(ToRegister(instr->context()).is(esi));
2037 ASSERT(ToRegister(instr->object()).is(eax)); 2066 ASSERT(ToRegister(instr->object()).is(eax));
2038 ASSERT(ToRegister(instr->result()).is(eax)); 2067 ASSERT(ToRegister(instr->result()).is(eax));
2039 2068
2040 __ mov(ecx, instr->name()); 2069 __ mov(ecx, instr->name());
2041 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); 2070 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
2042 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2071 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2043 } 2072 }
2044 2073
2045 2074
2046 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { 2075 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2047 Register function = ToRegister(instr->function()); 2076 Register function = ToRegister(instr->function());
2048 Register temp = ToRegister(instr->TempAt(0)); 2077 Register temp = ToRegister(instr->TempAt(0));
2049 Register result = ToRegister(instr->result()); 2078 Register result = ToRegister(instr->result());
2050 2079
2051 // Check that the function really is a function. 2080 // Check that the function really is a function.
2052 __ CmpObjectType(function, JS_FUNCTION_TYPE, result); 2081 __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
2156 __ movzx_b(result, Operand(external_pointer, key, times_1, 0)); 2185 __ movzx_b(result, Operand(external_pointer, key, times_1, 0));
2157 } 2186 }
2158 2187
2159 2188
2160 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { 2189 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2161 ASSERT(ToRegister(instr->context()).is(esi)); 2190 ASSERT(ToRegister(instr->context()).is(esi));
2162 ASSERT(ToRegister(instr->object()).is(edx)); 2191 ASSERT(ToRegister(instr->object()).is(edx));
2163 ASSERT(ToRegister(instr->key()).is(eax)); 2192 ASSERT(ToRegister(instr->key()).is(eax));
2164 2193
2165 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 2194 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
2166 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2195 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2167 } 2196 }
2168 2197
2169 2198
2170 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { 2199 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2171 Register result = ToRegister(instr->result()); 2200 Register result = ToRegister(instr->result());
2172 2201
2173 // Check for arguments adapter frame. 2202 // Check for arguments adapter frame.
2174 NearLabel done, adapted; 2203 NearLabel done, adapted;
2175 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); 2204 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2176 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset)); 2205 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset));
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after
2344 RecordPosition(pointers->position()); 2373 RecordPosition(pointers->position());
2345 2374
2346 // Invoke function. 2375 // Invoke function.
2347 if (*function == *graph()->info()->closure()) { 2376 if (*function == *graph()->info()->closure()) {
2348 __ CallSelf(); 2377 __ CallSelf();
2349 } else { 2378 } else {
2350 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); 2379 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
2351 } 2380 }
2352 2381
2353 // Setup deoptimization. 2382 // Setup deoptimization.
2354 RegisterLazyDeoptimization(instr); 2383 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
2355 } 2384 }
2356 2385
2357 2386
2358 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { 2387 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2359 ASSERT(ToRegister(instr->result()).is(eax)); 2388 ASSERT(ToRegister(instr->result()).is(eax));
2360 __ mov(edi, instr->function()); 2389 __ mov(edi, instr->function());
2361 CallKnownFunction(instr->function(), instr->arity(), instr); 2390 CallKnownFunction(instr->function(), instr->arity(), instr);
2362 } 2391 }
2363 2392
2364 2393
2365 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { 2394 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2366 Register input_reg = ToRegister(instr->InputAt(0)); 2395 Register input_reg = ToRegister(instr->InputAt(0));
2367 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), 2396 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
2368 Factory::heap_number_map()); 2397 Factory::heap_number_map());
2369 DeoptimizeIf(not_equal, instr->environment()); 2398 DeoptimizeIf(not_equal, instr->environment());
2370 2399
2371 Label done; 2400 Label done;
2372 Register tmp = input_reg.is(eax) ? ecx : eax; 2401 Register tmp = input_reg.is(eax) ? ecx : eax;
2373 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx; 2402 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx;
2374 2403
2375 // Preserve the value of all registers. 2404 // Preserve the value of all registers.
2376 __ PushSafepointRegisters(); 2405 PushSafepointRegistersScope scope(this);
2377 2406
2378 Label negative; 2407 Label negative;
2379 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); 2408 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2380 // Check the sign of the argument. If the argument is positive, just 2409 // Check the sign of the argument. If the argument is positive, just
2381 // return it. We do not need to patch the stack since |input| and 2410 // return it. We do not need to patch the stack since |input| and
2382 // |result| are the same register and |input| will be restored 2411 // |result| are the same register and |input| will be restored
2383 // unchanged by popping safepoint registers. 2412 // unchanged by popping safepoint registers.
2384 __ test(tmp, Immediate(HeapNumber::kSignMask)); 2413 __ test(tmp, Immediate(HeapNumber::kSignMask));
2385 __ j(not_zero, &negative); 2414 __ j(not_zero, &negative);
2386 __ jmp(&done); 2415 __ jmp(&done);
2387 2416
2388 __ bind(&negative); 2417 __ bind(&negative);
2389 2418
2390 Label allocated, slow; 2419 Label allocated, slow;
2391 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow); 2420 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow);
2392 __ jmp(&allocated); 2421 __ jmp(&allocated);
2393 2422
2394 // Slow case: Call the runtime system to do the number allocation. 2423 // Slow case: Call the runtime system to do the number allocation.
2395 __ bind(&slow); 2424 __ bind(&slow);
2396 2425
2397 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 2426 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
2398 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); 2427
2399 RecordSafepointWithRegisters(
2400 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2401 // Set the pointer to the new heap number in tmp. 2428 // Set the pointer to the new heap number in tmp.
2402 if (!tmp.is(eax)) __ mov(tmp, eax); 2429 if (!tmp.is(eax)) __ mov(tmp, eax);
2403 2430
2404 // Restore input_reg after call to runtime. 2431 // Restore input_reg after call to runtime.
2405 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); 2432 __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
2406 2433
2407 __ bind(&allocated); 2434 __ bind(&allocated);
2408 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset)); 2435 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2409 __ and_(tmp2, ~HeapNumber::kSignMask); 2436 __ and_(tmp2, ~HeapNumber::kSignMask);
2410 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2); 2437 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2);
2411 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); 2438 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset));
2412 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2); 2439 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2);
2413 __ StoreToSafepointRegisterSlot(input_reg, tmp); 2440 __ StoreToSafepointRegisterSlot(input_reg, tmp);
2414 2441
2415 __ bind(&done); 2442 __ bind(&done);
2416 __ PopSafepointRegisters();
2417 } 2443 }
2418 2444
2419 2445
2420 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) { 2446 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2421 Register input_reg = ToRegister(instr->InputAt(0)); 2447 Register input_reg = ToRegister(instr->InputAt(0));
2422 __ test(input_reg, Operand(input_reg)); 2448 __ test(input_reg, Operand(input_reg));
2423 Label is_positive; 2449 Label is_positive;
2424 __ j(not_sign, &is_positive); 2450 __ j(not_sign, &is_positive);
2425 __ neg(input_reg); 2451 __ neg(input_reg);
2426 __ test(input_reg, Operand(input_reg)); 2452 __ test(input_reg, Operand(input_reg));
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after
2594 __ fstp_d(Operand(esp, 0)); 2620 __ fstp_d(Operand(esp, 0));
2595 __ movdbl(result_reg, Operand(esp, 0)); 2621 __ movdbl(result_reg, Operand(esp, 0));
2596 __ add(Operand(esp), Immediate(kDoubleSize)); 2622 __ add(Operand(esp), Immediate(kDoubleSize));
2597 } 2623 }
2598 2624
2599 2625
2600 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { 2626 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
2601 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); 2627 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2602 TranscendentalCacheStub stub(TranscendentalCache::LOG, 2628 TranscendentalCacheStub stub(TranscendentalCache::LOG,
2603 TranscendentalCacheStub::UNTAGGED); 2629 TranscendentalCacheStub::UNTAGGED);
2604 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 2630 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
2605 } 2631 }
2606 2632
2607 2633
2608 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { 2634 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
2609 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); 2635 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2610 TranscendentalCacheStub stub(TranscendentalCache::COS, 2636 TranscendentalCacheStub stub(TranscendentalCache::COS,
2611 TranscendentalCacheStub::UNTAGGED); 2637 TranscendentalCacheStub::UNTAGGED);
2612 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 2638 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
2613 } 2639 }
2614 2640
2615 2641
2616 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { 2642 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
2617 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); 2643 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2618 TranscendentalCacheStub stub(TranscendentalCache::SIN, 2644 TranscendentalCacheStub stub(TranscendentalCache::SIN,
2619 TranscendentalCacheStub::UNTAGGED); 2645 TranscendentalCacheStub::UNTAGGED);
2620 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 2646 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
2621 } 2647 }
2622 2648
2623 2649
2624 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { 2650 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2625 switch (instr->op()) { 2651 switch (instr->op()) {
2626 case kMathAbs: 2652 case kMathAbs:
2627 DoMathAbs(instr); 2653 DoMathAbs(instr);
2628 break; 2654 break;
2629 case kMathFloor: 2655 case kMathFloor:
2630 DoMathFloor(instr); 2656 DoMathFloor(instr);
(...skipping 23 matching lines...) Expand all
2654 } 2680 }
2655 2681
2656 2682
2657 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { 2683 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
2658 ASSERT(ToRegister(instr->context()).is(esi)); 2684 ASSERT(ToRegister(instr->context()).is(esi));
2659 ASSERT(ToRegister(instr->key()).is(ecx)); 2685 ASSERT(ToRegister(instr->key()).is(ecx));
2660 ASSERT(ToRegister(instr->result()).is(eax)); 2686 ASSERT(ToRegister(instr->result()).is(eax));
2661 2687
2662 int arity = instr->arity(); 2688 int arity = instr->arity();
2663 Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP); 2689 Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
2664 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2690 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2665 } 2691 }
2666 2692
2667 2693
2668 void LCodeGen::DoCallNamed(LCallNamed* instr) { 2694 void LCodeGen::DoCallNamed(LCallNamed* instr) {
2669 ASSERT(ToRegister(instr->context()).is(esi)); 2695 ASSERT(ToRegister(instr->context()).is(esi));
2670 ASSERT(ToRegister(instr->result()).is(eax)); 2696 ASSERT(ToRegister(instr->result()).is(eax));
2671 2697
2672 int arity = instr->arity(); 2698 int arity = instr->arity();
2673 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP); 2699 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2674 __ mov(ecx, instr->name()); 2700 __ mov(ecx, instr->name());
2675 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2701 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2676 } 2702 }
2677 2703
2678 2704
2679 void LCodeGen::DoCallFunction(LCallFunction* instr) { 2705 void LCodeGen::DoCallFunction(LCallFunction* instr) {
2680 ASSERT(ToRegister(instr->context()).is(esi)); 2706 ASSERT(ToRegister(instr->context()).is(esi));
2681 ASSERT(ToRegister(instr->result()).is(eax)); 2707 ASSERT(ToRegister(instr->result()).is(eax));
2682 2708
2683 int arity = instr->arity(); 2709 int arity = instr->arity();
2684 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); 2710 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
2685 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 2711 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2686 __ Drop(1); 2712 __ Drop(1);
2687 } 2713 }
2688 2714
2689 2715
2690 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { 2716 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
2691 ASSERT(ToRegister(instr->context()).is(esi)); 2717 ASSERT(ToRegister(instr->context()).is(esi));
2692 ASSERT(ToRegister(instr->result()).is(eax)); 2718 ASSERT(ToRegister(instr->result()).is(eax));
2693 2719
2694 int arity = instr->arity(); 2720 int arity = instr->arity();
2695 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP); 2721 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2696 __ mov(ecx, instr->name()); 2722 __ mov(ecx, instr->name());
2697 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); 2723 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr, CONTEXT_ADJUSTED);
2698 } 2724 }
2699 2725
2700 2726
2701 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { 2727 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
2702 ASSERT(ToRegister(instr->result()).is(eax)); 2728 ASSERT(ToRegister(instr->result()).is(eax));
2703 __ mov(edi, instr->target()); 2729 __ mov(edi, instr->target());
2704 CallKnownFunction(instr->target(), instr->arity(), instr); 2730 CallKnownFunction(instr->target(), instr->arity(), instr);
2705 } 2731 }
2706 2732
2707 2733
2708 void LCodeGen::DoCallNew(LCallNew* instr) { 2734 void LCodeGen::DoCallNew(LCallNew* instr) {
2709 ASSERT(ToRegister(instr->context()).is(esi)); 2735 ASSERT(ToRegister(instr->context()).is(esi));
2710 ASSERT(ToRegister(instr->constructor()).is(edi)); 2736 ASSERT(ToRegister(instr->constructor()).is(edi));
2711 ASSERT(ToRegister(instr->result()).is(eax)); 2737 ASSERT(ToRegister(instr->result()).is(eax));
2712 2738
2713 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall)); 2739 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
2714 __ Set(eax, Immediate(instr->arity())); 2740 __ Set(eax, Immediate(instr->arity()));
2715 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); 2741 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr, CONTEXT_ADJUSTED);
2716 } 2742 }
2717 2743
2718 2744
2719 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { 2745 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
2720 CallRuntime(instr->function(), instr->arity(), instr, false); 2746 CallRuntime(instr->function(), instr->arity(), instr, RESTORE_CONTEXT);
2721 } 2747 }
2722 2748
2723 2749
2724 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { 2750 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
2725 Register object = ToRegister(instr->object()); 2751 Register object = ToRegister(instr->object());
2726 Register value = ToRegister(instr->value()); 2752 Register value = ToRegister(instr->value());
2727 int offset = instr->offset(); 2753 int offset = instr->offset();
2728 2754
2729 if (!instr->transition().is_null()) { 2755 if (!instr->transition().is_null()) {
2730 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition()); 2756 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
(...skipping 22 matching lines...) Expand all
2753 2779
2754 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { 2780 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
2755 ASSERT(ToRegister(instr->context()).is(esi)); 2781 ASSERT(ToRegister(instr->context()).is(esi));
2756 ASSERT(ToRegister(instr->object()).is(edx)); 2782 ASSERT(ToRegister(instr->object()).is(edx));
2757 ASSERT(ToRegister(instr->value()).is(eax)); 2783 ASSERT(ToRegister(instr->value()).is(eax));
2758 2784
2759 __ mov(ecx, instr->name()); 2785 __ mov(ecx, instr->name());
2760 Handle<Code> ic(Builtins::builtin( 2786 Handle<Code> ic(Builtins::builtin(
2761 info_->is_strict() ? Builtins::StoreIC_Initialize_Strict 2787 info_->is_strict() ? Builtins::StoreIC_Initialize_Strict
2762 : Builtins::StoreIC_Initialize)); 2788 : Builtins::StoreIC_Initialize));
2763 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2789 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2764 } 2790 }
2765 2791
2766 2792
2767 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { 2793 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
2768 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); 2794 __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
2769 DeoptimizeIf(above_equal, instr->environment()); 2795 DeoptimizeIf(above_equal, instr->environment());
2770 } 2796 }
2771 2797
2772 2798
2773 void LCodeGen::DoStorePixelArrayElement(LStorePixelArrayElement* instr) { 2799 void LCodeGen::DoStorePixelArrayElement(LStorePixelArrayElement* instr) {
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
2823 2849
2824 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { 2850 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
2825 ASSERT(ToRegister(instr->context()).is(esi)); 2851 ASSERT(ToRegister(instr->context()).is(esi));
2826 ASSERT(ToRegister(instr->object()).is(edx)); 2852 ASSERT(ToRegister(instr->object()).is(edx));
2827 ASSERT(ToRegister(instr->key()).is(ecx)); 2853 ASSERT(ToRegister(instr->key()).is(ecx));
2828 ASSERT(ToRegister(instr->value()).is(eax)); 2854 ASSERT(ToRegister(instr->value()).is(eax));
2829 2855
2830 Handle<Code> ic(Builtins::builtin( 2856 Handle<Code> ic(Builtins::builtin(
2831 info_->is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict 2857 info_->is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
2832 : Builtins::KeyedStoreIC_Initialize)); 2858 : Builtins::KeyedStoreIC_Initialize));
2833 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2859 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2834 } 2860 }
2835 2861
2836 2862
2837 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { 2863 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
2838 class DeferredStringCharCodeAt: public LDeferredCode { 2864 class DeferredStringCharCodeAt: public LDeferredCode {
2839 public: 2865 public:
2840 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) 2866 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
2841 : LDeferredCode(codegen), instr_(instr) { } 2867 : LDeferredCode(codegen), instr_(instr) { }
2842 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); } 2868 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
2843 private: 2869 private:
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
2941 2967
2942 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) { 2968 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
2943 Register string = ToRegister(instr->string()); 2969 Register string = ToRegister(instr->string());
2944 Register result = ToRegister(instr->result()); 2970 Register result = ToRegister(instr->result());
2945 2971
2946 // TODO(3095996): Get rid of this. For now, we need to make the 2972 // TODO(3095996): Get rid of this. For now, we need to make the
2947 // result register contain a valid pointer because it is already 2973 // result register contain a valid pointer because it is already
2948 // contained in the register pointer map. 2974 // contained in the register pointer map.
2949 __ Set(result, Immediate(0)); 2975 __ Set(result, Immediate(0));
2950 2976
2951 __ PushSafepointRegisters(); 2977 PushSafepointRegistersScope scope(this);
2952 __ push(string); 2978 __ push(string);
2953 // Push the index as a smi. This is safe because of the checks in 2979 // Push the index as a smi. This is safe because of the checks in
2954 // DoStringCharCodeAt above. 2980 // DoStringCharCodeAt above.
2955 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); 2981 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
2956 if (instr->index()->IsConstantOperand()) { 2982 if (instr->index()->IsConstantOperand()) {
2957 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); 2983 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2958 __ push(Immediate(Smi::FromInt(const_index))); 2984 __ push(Immediate(Smi::FromInt(const_index)));
2959 } else { 2985 } else {
2960 Register index = ToRegister(instr->index()); 2986 Register index = ToRegister(instr->index());
2961 __ SmiTag(index); 2987 __ SmiTag(index);
2962 __ push(index); 2988 __ push(index);
2963 } 2989 }
2964 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 2990 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
2965 __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt);
2966 RecordSafepointWithRegisters(
2967 instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex);
2968 if (FLAG_debug_code) { 2991 if (FLAG_debug_code) {
2969 __ AbortIfNotSmi(eax); 2992 __ AbortIfNotSmi(eax);
2970 } 2993 }
2971 __ SmiUntag(eax); 2994 __ SmiUntag(eax);
2972 __ StoreToSafepointRegisterSlot(result, eax); 2995 __ StoreToSafepointRegisterSlot(result, eax);
2973 __ PopSafepointRegisters();
2974 } 2996 }
2975 2997
2976 2998
2977 void LCodeGen::DoStringLength(LStringLength* instr) { 2999 void LCodeGen::DoStringLength(LStringLength* instr) {
2978 Register string = ToRegister(instr->string()); 3000 Register string = ToRegister(instr->string());
2979 Register result = ToRegister(instr->result()); 3001 Register result = ToRegister(instr->result());
2980 __ mov(result, FieldOperand(string, String::kLengthOffset)); 3002 __ mov(result, FieldOperand(string, String::kLengthOffset));
2981 } 3003 }
2982 3004
2983 3005
(...skipping 26 matching lines...) Expand all
3010 __ bind(deferred->exit()); 3032 __ bind(deferred->exit());
3011 } 3033 }
3012 3034
3013 3035
3014 void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) { 3036 void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3015 Label slow; 3037 Label slow;
3016 Register reg = ToRegister(instr->InputAt(0)); 3038 Register reg = ToRegister(instr->InputAt(0));
3017 Register tmp = reg.is(eax) ? ecx : eax; 3039 Register tmp = reg.is(eax) ? ecx : eax;
3018 3040
3019 // Preserve the value of all registers. 3041 // Preserve the value of all registers.
3020 __ PushSafepointRegisters(); 3042 PushSafepointRegistersScope scope(this);
3021 3043
3022 // There was overflow, so bits 30 and 31 of the original integer 3044 // There was overflow, so bits 30 and 31 of the original integer
3023 // disagree. Try to allocate a heap number in new space and store 3045 // disagree. Try to allocate a heap number in new space and store
3024 // the value in there. If that fails, call the runtime system. 3046 // the value in there. If that fails, call the runtime system.
3025 NearLabel done; 3047 NearLabel done;
3026 __ SmiUntag(reg); 3048 __ SmiUntag(reg);
3027 __ xor_(reg, 0x80000000); 3049 __ xor_(reg, 0x80000000);
3028 __ cvtsi2sd(xmm0, Operand(reg)); 3050 __ cvtsi2sd(xmm0, Operand(reg));
3029 if (FLAG_inline_new) { 3051 if (FLAG_inline_new) {
3030 __ AllocateHeapNumber(reg, tmp, no_reg, &slow); 3052 __ AllocateHeapNumber(reg, tmp, no_reg, &slow);
3031 __ jmp(&done); 3053 __ jmp(&done);
3032 } 3054 }
3033 3055
3034 // Slow case: Call the runtime system to do the number allocation. 3056 // Slow case: Call the runtime system to do the number allocation.
3035 __ bind(&slow); 3057 __ bind(&slow);
3036 3058
3037 // TODO(3095996): Put a valid pointer value in the stack slot where the result 3059 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3038 // register is stored, as this register is in the pointer map, but contains an 3060 // register is stored, as this register is in the pointer map, but contains an
3039 // integer value. 3061 // integer value.
3040 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); 3062 __ StoreToSafepointRegisterSlot(reg, Immediate(0));
3041 3063
3042 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 3064 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
3043 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3044 RecordSafepointWithRegisters(
3045 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
3046 if (!reg.is(eax)) __ mov(reg, eax); 3065 if (!reg.is(eax)) __ mov(reg, eax);
3047 3066
3048 // Done. Put the value in xmm0 into the value of the allocated heap 3067 // Done. Put the value in xmm0 into the value of the allocated heap
3049 // number. 3068 // number.
3050 __ bind(&done); 3069 __ bind(&done);
3051 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0); 3070 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0);
3052 __ StoreToSafepointRegisterSlot(reg, reg); 3071 __ StoreToSafepointRegisterSlot(reg, reg);
3053 __ PopSafepointRegisters();
3054 } 3072 }
3055 3073
3056 3074
3057 void LCodeGen::DoNumberTagD(LNumberTagD* instr) { 3075 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3058 class DeferredNumberTagD: public LDeferredCode { 3076 class DeferredNumberTagD: public LDeferredCode {
3059 public: 3077 public:
3060 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr) 3078 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3061 : LDeferredCode(codegen), instr_(instr) { } 3079 : LDeferredCode(codegen), instr_(instr) { }
3062 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); } 3080 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3063 private: 3081 private:
(...skipping 15 matching lines...) Expand all
3079 } 3097 }
3080 3098
3081 3099
3082 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { 3100 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3083 // TODO(3095996): Get rid of this. For now, we need to make the 3101 // TODO(3095996): Get rid of this. For now, we need to make the
3084 // result register contain a valid pointer because it is already 3102 // result register contain a valid pointer because it is already
3085 // contained in the register pointer map. 3103 // contained in the register pointer map.
3086 Register reg = ToRegister(instr->result()); 3104 Register reg = ToRegister(instr->result());
3087 __ Set(reg, Immediate(0)); 3105 __ Set(reg, Immediate(0));
3088 3106
3089 __ PushSafepointRegisters(); 3107 PushSafepointRegistersScope scope(this);
3090 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 3108 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
3091 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3092 RecordSafepointWithRegisters(
3093 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
3094 __ StoreToSafepointRegisterSlot(reg, eax); 3109 __ StoreToSafepointRegisterSlot(reg, eax);
3095 __ PopSafepointRegisters();
3096 } 3110 }
3097 3111
3098 3112
3099 void LCodeGen::DoSmiTag(LSmiTag* instr) { 3113 void LCodeGen::DoSmiTag(LSmiTag* instr) {
3100 LOperand* input = instr->InputAt(0); 3114 LOperand* input = instr->InputAt(0);
3101 ASSERT(input->IsRegister() && input->Equals(instr->result())); 3115 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3102 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); 3116 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3103 __ SmiTag(ToRegister(input)); 3117 __ SmiTag(ToRegister(input));
3104 } 3118 }
3105 3119
(...skipping 390 matching lines...) Expand 10 before | Expand all | Expand 10 after
3496 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); 3510 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3497 __ push(Immediate(instr->hydrogen()->constant_elements())); 3511 __ push(Immediate(instr->hydrogen()->constant_elements()));
3498 3512
3499 // Pick the right runtime function or stub to call. 3513 // Pick the right runtime function or stub to call.
3500 int length = instr->hydrogen()->length(); 3514 int length = instr->hydrogen()->length();
3501 if (instr->hydrogen()->IsCopyOnWrite()) { 3515 if (instr->hydrogen()->IsCopyOnWrite()) {
3502 ASSERT(instr->hydrogen()->depth() == 1); 3516 ASSERT(instr->hydrogen()->depth() == 1);
3503 FastCloneShallowArrayStub::Mode mode = 3517 FastCloneShallowArrayStub::Mode mode =
3504 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; 3518 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
3505 FastCloneShallowArrayStub stub(mode, length); 3519 FastCloneShallowArrayStub stub(mode, length);
3506 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 3520 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
3507 } else if (instr->hydrogen()->depth() > 1) { 3521 } else if (instr->hydrogen()->depth() > 1) {
3508 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, false); 3522 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, RESTORE_CONTEXT);
3509 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 3523 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
3510 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, false); 3524 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, RESTORE_CONTEXT);
3511 } else { 3525 } else {
3512 FastCloneShallowArrayStub::Mode mode = 3526 FastCloneShallowArrayStub::Mode mode =
3513 FastCloneShallowArrayStub::CLONE_ELEMENTS; 3527 FastCloneShallowArrayStub::CLONE_ELEMENTS;
3514 FastCloneShallowArrayStub stub(mode, length); 3528 FastCloneShallowArrayStub stub(mode, length);
3515 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 3529 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
3516 } 3530 }
3517 } 3531 }
3518 3532
3519 3533
3520 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { 3534 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
3521 ASSERT(ToRegister(instr->context()).is(esi)); 3535 ASSERT(ToRegister(instr->context()).is(esi));
3522 // Setup the parameters to the stub/runtime call. 3536 // Setup the parameters to the stub/runtime call.
3523 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 3537 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3524 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); 3538 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3525 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); 3539 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3526 __ push(Immediate(instr->hydrogen()->constant_properties())); 3540 __ push(Immediate(instr->hydrogen()->constant_properties()));
3527 __ push(Immediate(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0))); 3541 __ push(Immediate(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
3528 3542
3529 // Pick the right runtime function to call. 3543 // Pick the right runtime function to call.
3530 if (instr->hydrogen()->depth() > 1) { 3544 if (instr->hydrogen()->depth() > 1) {
3531 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); 3545 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr, CONTEXT_ADJUSTED);
3532 } else { 3546 } else {
3533 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); 3547 CallRuntime(Runtime::kCreateObjectLiteralShallow,
3548 4,
3549 instr,
3550 CONTEXT_ADJUSTED);
3534 } 3551 }
3535 } 3552 }
3536 3553
3537 3554
3538 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { 3555 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
3539 NearLabel materialized; 3556 NearLabel materialized;
3540 // Registers will be used as follows: 3557 // Registers will be used as follows:
3541 // edi = JS function. 3558 // edi = JS function.
3542 // ecx = literals array. 3559 // ecx = literals array.
3543 // ebx = regexp literal. 3560 // ebx = regexp literal.
3544 // eax = regexp literal clone. 3561 // eax = regexp literal clone.
3545 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 3562 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3546 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset)); 3563 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
3547 int literal_offset = FixedArray::kHeaderSize + 3564 int literal_offset = FixedArray::kHeaderSize +
3548 instr->hydrogen()->literal_index() * kPointerSize; 3565 instr->hydrogen()->literal_index() * kPointerSize;
3549 __ mov(ebx, FieldOperand(ecx, literal_offset)); 3566 __ mov(ebx, FieldOperand(ecx, literal_offset));
3550 __ cmp(ebx, Factory::undefined_value()); 3567 __ cmp(ebx, Factory::undefined_value());
3551 __ j(not_equal, &materialized); 3568 __ j(not_equal, &materialized);
3552 3569
3553 // Create regexp literal using runtime function 3570 // Create regexp literal using runtime function
3554 // Result will be in eax. 3571 // Result will be in eax.
3555 __ push(ecx); 3572 __ push(ecx);
3556 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); 3573 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3557 __ push(Immediate(instr->hydrogen()->pattern())); 3574 __ push(Immediate(instr->hydrogen()->pattern()));
3558 __ push(Immediate(instr->hydrogen()->flags())); 3575 __ push(Immediate(instr->hydrogen()->flags()));
3559 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, false); 3576 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, RESTORE_CONTEXT);
3560 __ mov(ebx, eax); 3577 __ mov(ebx, eax);
3561 3578
3562 __ bind(&materialized); 3579 __ bind(&materialized);
3563 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 3580 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3564 Label allocated, runtime_allocate; 3581 Label allocated, runtime_allocate;
3565 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); 3582 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
3566 __ jmp(&allocated); 3583 __ jmp(&allocated);
3567 3584
3568 __ bind(&runtime_allocate); 3585 __ bind(&runtime_allocate);
3569 __ push(ebx); 3586 __ push(ebx);
3570 __ push(Immediate(Smi::FromInt(size))); 3587 __ push(Immediate(Smi::FromInt(size)));
3571 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, false); 3588 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, RESTORE_CONTEXT);
3572 __ pop(ebx); 3589 __ pop(ebx);
3573 3590
3574 __ bind(&allocated); 3591 __ bind(&allocated);
3575 // Copy the content into the newly allocated memory. 3592 // Copy the content into the newly allocated memory.
3576 // (Unroll copy loop once for better throughput). 3593 // (Unroll copy loop once for better throughput).
3577 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { 3594 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
3578 __ mov(edx, FieldOperand(ebx, i)); 3595 __ mov(edx, FieldOperand(ebx, i));
3579 __ mov(ecx, FieldOperand(ebx, i + kPointerSize)); 3596 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
3580 __ mov(FieldOperand(eax, i), edx); 3597 __ mov(FieldOperand(eax, i), edx);
3581 __ mov(FieldOperand(eax, i + kPointerSize), ecx); 3598 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
3582 } 3599 }
3583 if ((size % (2 * kPointerSize)) != 0) { 3600 if ((size % (2 * kPointerSize)) != 0) {
3584 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); 3601 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
3585 __ mov(FieldOperand(eax, size - kPointerSize), edx); 3602 __ mov(FieldOperand(eax, size - kPointerSize), edx);
3586 } 3603 }
3587 } 3604 }
3588 3605
3589 3606
3590 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { 3607 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
3591 // Use the fast case closure allocation code that allocates in new 3608 // Use the fast case closure allocation code that allocates in new
3592 // space for nested functions that don't need literals cloning. 3609 // space for nested functions that don't need literals cloning.
3593 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); 3610 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
3594 bool pretenure = instr->hydrogen()->pretenure(); 3611 bool pretenure = instr->hydrogen()->pretenure();
3595 if (shared_info->num_literals() == 0 && !pretenure) { 3612 if (shared_info->num_literals() == 0 && !pretenure) {
3596 FastNewClosureStub stub; 3613 FastNewClosureStub stub;
3597 __ push(Immediate(shared_info)); 3614 __ push(Immediate(shared_info));
3598 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 3615 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
3599 } else { 3616 } else {
3600 __ push(Operand(ebp, StandardFrameConstants::kContextOffset)); 3617 __ push(Operand(ebp, StandardFrameConstants::kContextOffset));
3601 __ push(Immediate(shared_info)); 3618 __ push(Immediate(shared_info));
3602 __ push(Immediate(pretenure 3619 __ push(Immediate(pretenure
3603 ? Factory::true_value() 3620 ? Factory::true_value()
3604 : Factory::false_value())); 3621 : Factory::false_value()));
3605 CallRuntime(Runtime::kNewClosure, 3, instr, false); 3622 CallRuntime(Runtime::kNewClosure, 3, instr, RESTORE_CONTEXT);
3606 } 3623 }
3607 } 3624 }
3608 3625
3609 3626
3610 void LCodeGen::DoTypeof(LTypeof* instr) { 3627 void LCodeGen::DoTypeof(LTypeof* instr) {
3611 LOperand* input = instr->InputAt(0); 3628 LOperand* input = instr->InputAt(0);
3612 if (input->IsConstantOperand()) { 3629 if (input->IsConstantOperand()) {
3613 __ push(ToImmediate(input)); 3630 __ push(ToImmediate(input));
3614 } else { 3631 } else {
3615 __ push(ToOperand(input)); 3632 __ push(ToOperand(input));
3616 } 3633 }
3617 CallRuntime(Runtime::kTypeof, 1, instr, false); 3634 CallRuntime(Runtime::kTypeof, 1, instr, RESTORE_CONTEXT);
3618 } 3635 }
3619 3636
3620 3637
3621 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { 3638 void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
3622 Register input = ToRegister(instr->InputAt(0)); 3639 Register input = ToRegister(instr->InputAt(0));
3623 Register result = ToRegister(instr->result()); 3640 Register result = ToRegister(instr->result());
3624 Label true_label; 3641 Label true_label;
3625 Label false_label; 3642 Label false_label;
3626 NearLabel done; 3643 NearLabel done;
3627 3644
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after
3818 3835
3819 3836
3820 void LCodeGen::DoStackCheck(LStackCheck* instr) { 3837 void LCodeGen::DoStackCheck(LStackCheck* instr) {
3821 // Perform stack overflow check. 3838 // Perform stack overflow check.
3822 NearLabel done; 3839 NearLabel done;
3823 ExternalReference stack_limit = ExternalReference::address_of_stack_limit(); 3840 ExternalReference stack_limit = ExternalReference::address_of_stack_limit();
3824 __ cmp(esp, Operand::StaticVariable(stack_limit)); 3841 __ cmp(esp, Operand::StaticVariable(stack_limit));
3825 __ j(above_equal, &done); 3842 __ j(above_equal, &done);
3826 3843
3827 StackCheckStub stub; 3844 StackCheckStub stub;
3828 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 3845 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
3829 __ bind(&done); 3846 __ bind(&done);
3830 } 3847 }
3831 3848
3832 3849
3833 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { 3850 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
3834 // This is a pseudo-instruction that ensures that the environment here is 3851 // This is a pseudo-instruction that ensures that the environment here is
3835 // properly registered for deoptimization and records the assembler's PC 3852 // properly registered for deoptimization and records the assembler's PC
3836 // offset. 3853 // offset.
3837 LEnvironment* environment = instr->environment(); 3854 LEnvironment* environment = instr->environment();
3838 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), 3855 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
3839 instr->SpilledDoubleRegisterArray()); 3856 instr->SpilledDoubleRegisterArray());
3840 3857
3841 // If the environment were already registered, we would have no way of 3858 // If the environment were already registered, we would have no way of
3842 // backpatching it with the spill slot operands. 3859 // backpatching it with the spill slot operands.
3843 ASSERT(!environment->HasBeenRegistered()); 3860 ASSERT(!environment->HasBeenRegistered());
3844 RegisterEnvironmentForDeoptimization(environment); 3861 RegisterEnvironmentForDeoptimization(environment);
3845 ASSERT(osr_pc_offset_ == -1); 3862 ASSERT(osr_pc_offset_ == -1);
3846 osr_pc_offset_ = masm()->pc_offset(); 3863 osr_pc_offset_ = masm()->pc_offset();
3847 } 3864 }
3848 3865
3849 3866
3850 #undef __ 3867 #undef __
3851 3868
3852 } } // namespace v8::internal 3869 } } // namespace v8::internal
3853 3870
3854 #endif // V8_TARGET_ARCH_IA32 3871 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/ia32/macro-assembler-ia32.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698