OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 390 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
401 } else if (op->IsConstantOperand()) { | 401 } else if (op->IsConstantOperand()) { |
402 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op)); | 402 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op)); |
403 int src_index = DefineDeoptimizationLiteral(literal); | 403 int src_index = DefineDeoptimizationLiteral(literal); |
404 translation->StoreLiteral(src_index); | 404 translation->StoreLiteral(src_index); |
405 } else { | 405 } else { |
406 UNREACHABLE(); | 406 UNREACHABLE(); |
407 } | 407 } |
408 } | 408 } |
409 | 409 |
410 | 410 |
411 void LCodeGen::CallCode(Handle<Code> code, | 411 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
412 RelocInfo::Mode mode, | 412 RelocInfo::Mode mode, |
413 LInstruction* instr, | 413 LInstruction* instr, |
414 bool adjusted) { | 414 ContextMode context_mode, |
415 SafepointMode safepoint_mode) { | |
415 ASSERT(instr != NULL); | 416 ASSERT(instr != NULL); |
416 LPointerMap* pointers = instr->pointer_map(); | 417 LPointerMap* pointers = instr->pointer_map(); |
417 RecordPosition(pointers->position()); | 418 RecordPosition(pointers->position()); |
418 | 419 |
419 if (!adjusted) { | 420 if (context_mode == RESTORE_CONTEXT) { |
420 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 421 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
421 } | 422 } |
422 __ call(code, mode); | 423 __ call(code, mode); |
423 | 424 |
424 RegisterLazyDeoptimization(instr); | 425 RegisterLazyDeoptimization(instr, safepoint_mode); |
425 | 426 |
426 // Signal that we don't inline smi code before these stubs in the | 427 // Signal that we don't inline smi code before these stubs in the |
427 // optimizing code generator. | 428 // optimizing code generator. |
428 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || | 429 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || |
429 code->kind() == Code::COMPARE_IC) { | 430 code->kind() == Code::COMPARE_IC) { |
430 __ nop(); | 431 __ nop(); |
431 } | 432 } |
432 } | 433 } |
433 | 434 |
434 | 435 |
436 void LCodeGen::CallCode(Handle<Code> code, | |
437 RelocInfo::Mode mode, | |
438 LInstruction* instr, | |
439 ContextMode context_mode) { | |
440 CallCodeGeneric(code, mode, instr, context_mode, RECORD_SIMPLE_SAFEPOINT); | |
441 } | |
442 | |
443 | |
435 void LCodeGen::CallRuntime(const Runtime::Function* fun, | 444 void LCodeGen::CallRuntime(const Runtime::Function* fun, |
436 int argc, | 445 int argc, |
437 LInstruction* instr, | 446 LInstruction* instr, |
438 bool adjusted) { | 447 ContextMode context_mode) { |
439 ASSERT(instr != NULL); | 448 ASSERT(instr != NULL); |
440 ASSERT(instr->HasPointerMap()); | 449 ASSERT(instr->HasPointerMap()); |
441 LPointerMap* pointers = instr->pointer_map(); | 450 LPointerMap* pointers = instr->pointer_map(); |
442 RecordPosition(pointers->position()); | 451 RecordPosition(pointers->position()); |
443 | 452 |
444 if (!adjusted) { | 453 if (context_mode == RESTORE_CONTEXT) { |
445 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 454 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
446 } | 455 } |
447 __ CallRuntime(fun, argc); | 456 __ CallRuntime(fun, argc); |
448 | 457 |
449 RegisterLazyDeoptimization(instr); | 458 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); |
450 } | 459 } |
451 | 460 |
452 | 461 |
453 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { | 462 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, |
463 int argc, | |
464 LInstruction* instr) { | |
465 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | |
466 __ CallRuntimeSaveDoubles(id); | |
467 RecordSafepointWithRegisters( | |
468 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex); | |
469 } | |
470 | |
471 | |
472 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr, | |
473 SafepointMode safepoint_mode) { | |
454 // Create the environment to bailout to. If the call has side effects | 474 // Create the environment to bailout to. If the call has side effects |
455 // execution has to continue after the call otherwise execution can continue | 475 // execution has to continue after the call otherwise execution can continue |
456 // from a previous bailout point repeating the call. | 476 // from a previous bailout point repeating the call. |
457 LEnvironment* deoptimization_environment; | 477 LEnvironment* deoptimization_environment; |
458 if (instr->HasDeoptimizationEnvironment()) { | 478 if (instr->HasDeoptimizationEnvironment()) { |
459 deoptimization_environment = instr->deoptimization_environment(); | 479 deoptimization_environment = instr->deoptimization_environment(); |
460 } else { | 480 } else { |
461 deoptimization_environment = instr->environment(); | 481 deoptimization_environment = instr->environment(); |
462 } | 482 } |
463 | 483 |
464 RegisterEnvironmentForDeoptimization(deoptimization_environment); | 484 RegisterEnvironmentForDeoptimization(deoptimization_environment); |
465 RecordSafepoint(instr->pointer_map(), | 485 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { |
466 deoptimization_environment->deoptimization_index()); | 486 RecordSafepoint(instr->pointer_map(), |
487 deoptimization_environment->deoptimization_index()); | |
488 } else { | |
489 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | |
490 RecordSafepointWithRegisters( | |
491 instr->pointer_map(), | |
492 0, | |
493 deoptimization_environment->deoptimization_index()); | |
494 } | |
467 } | 495 } |
468 | 496 |
469 | 497 |
470 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { | 498 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { |
471 if (!environment->HasBeenRegistered()) { | 499 if (!environment->HasBeenRegistered()) { |
472 // Physical stack frame layout: | 500 // Physical stack frame layout: |
473 // -x ............. -4 0 ..................................... y | 501 // -x ............. -4 0 ..................................... y |
474 // [incoming arguments] [spill slots] [pushed outgoing arguments] | 502 // [incoming arguments] [spill slots] [pushed outgoing arguments] |
475 | 503 |
476 // Layout of the environment: | 504 // Layout of the environment: |
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
605 | 633 |
606 inlined_function_count_ = deoptimization_literals_.length(); | 634 inlined_function_count_ = deoptimization_literals_.length(); |
607 } | 635 } |
608 | 636 |
609 | 637 |
610 void LCodeGen::RecordSafepoint( | 638 void LCodeGen::RecordSafepoint( |
611 LPointerMap* pointers, | 639 LPointerMap* pointers, |
612 Safepoint::Kind kind, | 640 Safepoint::Kind kind, |
613 int arguments, | 641 int arguments, |
614 int deoptimization_index) { | 642 int deoptimization_index) { |
643 ASSERT(kind == Safepoint::kWithRegisters || !safepoint_registers_pushed_); | |
fschneider
2011/04/04 14:56:34
You could assert stronger:
ASSERT((kind == Safepo
| |
615 const ZoneList<LOperand*>* operands = pointers->operands(); | 644 const ZoneList<LOperand*>* operands = pointers->operands(); |
616 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), | 645 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), |
617 kind, arguments, deoptimization_index); | 646 kind, arguments, deoptimization_index); |
618 for (int i = 0; i < operands->length(); i++) { | 647 for (int i = 0; i < operands->length(); i++) { |
619 LOperand* pointer = operands->at(i); | 648 LOperand* pointer = operands->at(i); |
620 if (pointer->IsStackSlot()) { | 649 if (pointer->IsStackSlot()) { |
621 safepoint.DefinePointerSlot(pointer->index()); | 650 safepoint.DefinePointerSlot(pointer->index()); |
622 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { | 651 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { |
623 safepoint.DefinePointerRegister(ToRegister(pointer)); | 652 safepoint.DefinePointerRegister(ToRegister(pointer)); |
624 } | 653 } |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
690 // Nothing to do. | 719 // Nothing to do. |
691 } | 720 } |
692 | 721 |
693 | 722 |
694 void LCodeGen::DoCallStub(LCallStub* instr) { | 723 void LCodeGen::DoCallStub(LCallStub* instr) { |
695 ASSERT(ToRegister(instr->context()).is(esi)); | 724 ASSERT(ToRegister(instr->context()).is(esi)); |
696 ASSERT(ToRegister(instr->result()).is(eax)); | 725 ASSERT(ToRegister(instr->result()).is(eax)); |
697 switch (instr->hydrogen()->major_key()) { | 726 switch (instr->hydrogen()->major_key()) { |
698 case CodeStub::RegExpConstructResult: { | 727 case CodeStub::RegExpConstructResult: { |
699 RegExpConstructResultStub stub; | 728 RegExpConstructResultStub stub; |
700 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 729 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
701 break; | 730 break; |
702 } | 731 } |
703 case CodeStub::RegExpExec: { | 732 case CodeStub::RegExpExec: { |
704 RegExpExecStub stub; | 733 RegExpExecStub stub; |
705 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 734 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
706 break; | 735 break; |
707 } | 736 } |
708 case CodeStub::SubString: { | 737 case CodeStub::SubString: { |
709 SubStringStub stub; | 738 SubStringStub stub; |
710 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 739 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
711 break; | 740 break; |
712 } | 741 } |
713 case CodeStub::NumberToString: { | 742 case CodeStub::NumberToString: { |
714 NumberToStringStub stub; | 743 NumberToStringStub stub; |
715 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 744 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
716 break; | 745 break; |
717 } | 746 } |
718 case CodeStub::StringAdd: { | 747 case CodeStub::StringAdd: { |
719 StringAddStub stub(NO_STRING_ADD_FLAGS); | 748 StringAddStub stub(NO_STRING_ADD_FLAGS); |
720 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 749 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
721 break; | 750 break; |
722 } | 751 } |
723 case CodeStub::StringCompare: { | 752 case CodeStub::StringCompare: { |
724 StringCompareStub stub; | 753 StringCompareStub stub; |
725 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 754 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
726 break; | 755 break; |
727 } | 756 } |
728 case CodeStub::TranscendentalCache: { | 757 case CodeStub::TranscendentalCache: { |
729 TranscendentalCacheStub stub(instr->transcendental_type(), | 758 TranscendentalCacheStub stub(instr->transcendental_type(), |
730 TranscendentalCacheStub::TAGGED); | 759 TranscendentalCacheStub::TAGGED); |
731 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 760 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
732 break; | 761 break; |
733 } | 762 } |
734 default: | 763 default: |
735 UNREACHABLE(); | 764 UNREACHABLE(); |
736 } | 765 } |
737 } | 766 } |
738 | 767 |
739 | 768 |
740 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { | 769 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { |
741 // Nothing to do. | 770 // Nothing to do. |
(...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1136 | 1165 |
1137 void LCodeGen::DoBitNotI(LBitNotI* instr) { | 1166 void LCodeGen::DoBitNotI(LBitNotI* instr) { |
1138 LOperand* input = instr->InputAt(0); | 1167 LOperand* input = instr->InputAt(0); |
1139 ASSERT(input->Equals(instr->result())); | 1168 ASSERT(input->Equals(instr->result())); |
1140 __ not_(ToRegister(input)); | 1169 __ not_(ToRegister(input)); |
1141 } | 1170 } |
1142 | 1171 |
1143 | 1172 |
1144 void LCodeGen::DoThrow(LThrow* instr) { | 1173 void LCodeGen::DoThrow(LThrow* instr) { |
1145 __ push(ToOperand(instr->InputAt(0))); | 1174 __ push(ToOperand(instr->InputAt(0))); |
1146 CallRuntime(Runtime::kThrow, 1, instr, false); | 1175 CallRuntime(Runtime::kThrow, 1, instr, RESTORE_CONTEXT); |
1147 | 1176 |
1148 if (FLAG_debug_code) { | 1177 if (FLAG_debug_code) { |
1149 Comment("Unreachable code."); | 1178 Comment("Unreachable code."); |
1150 __ int3(); | 1179 __ int3(); |
1151 } | 1180 } |
1152 } | 1181 } |
1153 | 1182 |
1154 | 1183 |
1155 void LCodeGen::DoAddI(LAddI* instr) { | 1184 void LCodeGen::DoAddI(LAddI* instr) { |
1156 LOperand* left = instr->InputAt(0); | 1185 LOperand* left = instr->InputAt(0); |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1211 } | 1240 } |
1212 } | 1241 } |
1213 | 1242 |
1214 | 1243 |
1215 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 1244 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
1216 ASSERT(ToRegister(instr->InputAt(0)).is(edx)); | 1245 ASSERT(ToRegister(instr->InputAt(0)).is(edx)); |
1217 ASSERT(ToRegister(instr->InputAt(1)).is(eax)); | 1246 ASSERT(ToRegister(instr->InputAt(1)).is(eax)); |
1218 ASSERT(ToRegister(instr->result()).is(eax)); | 1247 ASSERT(ToRegister(instr->result()).is(eax)); |
1219 | 1248 |
1220 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE); | 1249 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE); |
1221 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 1250 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
1222 } | 1251 } |
1223 | 1252 |
1224 | 1253 |
1225 int LCodeGen::GetNextEmittedBlock(int block) { | 1254 int LCodeGen::GetNextEmittedBlock(int block) { |
1226 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { | 1255 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { |
1227 LLabel* label = chunk_->GetLabel(i); | 1256 LLabel* label = chunk_->GetLabel(i); |
1228 if (!label->HasReplacement()) return i; | 1257 if (!label->HasReplacement()) return i; |
1229 } | 1258 } |
1230 return -1; | 1259 return -1; |
1231 } | 1260 } |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1323 __ jmp(deferred_stack_check->entry()); | 1352 __ jmp(deferred_stack_check->entry()); |
1324 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); | 1353 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); |
1325 } else { | 1354 } else { |
1326 __ jmp(chunk_->GetAssemblyLabel(block)); | 1355 __ jmp(chunk_->GetAssemblyLabel(block)); |
1327 } | 1356 } |
1328 } | 1357 } |
1329 } | 1358 } |
1330 | 1359 |
1331 | 1360 |
1332 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { | 1361 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { |
1333 __ pushad(); | 1362 PushSafepointRegistersScope scope(this); |
1334 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 1363 CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr); |
1335 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | |
1336 RecordSafepointWithRegisters( | |
1337 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | |
1338 __ popad(); | |
1339 } | 1364 } |
1340 | 1365 |
1341 void LCodeGen::DoGoto(LGoto* instr) { | 1366 void LCodeGen::DoGoto(LGoto* instr) { |
1342 class DeferredStackCheck: public LDeferredCode { | 1367 class DeferredStackCheck: public LDeferredCode { |
1343 public: | 1368 public: |
1344 DeferredStackCheck(LCodeGen* codegen, LGoto* instr) | 1369 DeferredStackCheck(LCodeGen* codegen, LGoto* instr) |
1345 : LDeferredCode(codegen), instr_(instr) { } | 1370 : LDeferredCode(codegen), instr_(instr) { } |
1346 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } | 1371 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } |
1347 private: | 1372 private: |
1348 LGoto* instr_; | 1373 LGoto* instr_; |
(...skipping 481 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1830 | 1855 |
1831 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); | 1856 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); |
1832 EmitBranch(true_block, false_block, equal); | 1857 EmitBranch(true_block, false_block, equal); |
1833 } | 1858 } |
1834 | 1859 |
1835 | 1860 |
1836 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 1861 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
1837 // Object and function are in fixed registers defined by the stub. | 1862 // Object and function are in fixed registers defined by the stub. |
1838 ASSERT(ToRegister(instr->context()).is(esi)); | 1863 ASSERT(ToRegister(instr->context()).is(esi)); |
1839 InstanceofStub stub(InstanceofStub::kArgsInRegisters); | 1864 InstanceofStub stub(InstanceofStub::kArgsInRegisters); |
1840 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1865 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
1841 | 1866 |
1842 NearLabel true_value, done; | 1867 NearLabel true_value, done; |
1843 __ test(eax, Operand(eax)); | 1868 __ test(eax, Operand(eax)); |
1844 __ j(zero, &true_value); | 1869 __ j(zero, &true_value); |
1845 __ mov(ToRegister(instr->result()), factory()->false_value()); | 1870 __ mov(ToRegister(instr->result()), factory()->false_value()); |
1846 __ jmp(&done); | 1871 __ jmp(&done); |
1847 __ bind(&true_value); | 1872 __ bind(&true_value); |
1848 __ mov(ToRegister(instr->result()), factory()->true_value()); | 1873 __ mov(ToRegister(instr->result()), factory()->true_value()); |
1849 __ bind(&done); | 1874 __ bind(&done); |
1850 } | 1875 } |
1851 | 1876 |
1852 | 1877 |
1853 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { | 1878 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { |
1854 ASSERT(ToRegister(instr->context()).is(esi)); | 1879 ASSERT(ToRegister(instr->context()).is(esi)); |
1855 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1880 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
1856 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1881 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
1857 | 1882 |
1858 InstanceofStub stub(InstanceofStub::kArgsInRegisters); | 1883 InstanceofStub stub(InstanceofStub::kArgsInRegisters); |
1859 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1884 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
1860 __ test(eax, Operand(eax)); | 1885 __ test(eax, Operand(eax)); |
1861 EmitBranch(true_block, false_block, zero); | 1886 EmitBranch(true_block, false_block, zero); |
1862 } | 1887 } |
1863 | 1888 |
1864 | 1889 |
1865 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 1890 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
1866 class DeferredInstanceOfKnownGlobal: public LDeferredCode { | 1891 class DeferredInstanceOfKnownGlobal: public LDeferredCode { |
1867 public: | 1892 public: |
1868 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, | 1893 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, |
1869 LInstanceOfKnownGlobal* instr) | 1894 LInstanceOfKnownGlobal* instr) |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1921 | 1946 |
1922 // Here result has either true or false. Deferred code also produces true or | 1947 // Here result has either true or false. Deferred code also produces true or |
1923 // false object. | 1948 // false object. |
1924 __ bind(deferred->exit()); | 1949 __ bind(deferred->exit()); |
1925 __ bind(&done); | 1950 __ bind(&done); |
1926 } | 1951 } |
1927 | 1952 |
1928 | 1953 |
1929 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | 1954 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
1930 Label* map_check) { | 1955 Label* map_check) { |
1931 __ PushSafepointRegisters(); | 1956 PushSafepointRegistersScope scope(this); |
1932 | 1957 |
1933 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; | 1958 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; |
1934 flags = static_cast<InstanceofStub::Flags>( | 1959 flags = static_cast<InstanceofStub::Flags>( |
1935 flags | InstanceofStub::kArgsInRegisters); | 1960 flags | InstanceofStub::kArgsInRegisters); |
1936 flags = static_cast<InstanceofStub::Flags>( | 1961 flags = static_cast<InstanceofStub::Flags>( |
1937 flags | InstanceofStub::kCallSiteInlineCheck); | 1962 flags | InstanceofStub::kCallSiteInlineCheck); |
1938 flags = static_cast<InstanceofStub::Flags>( | 1963 flags = static_cast<InstanceofStub::Flags>( |
1939 flags | InstanceofStub::kReturnTrueFalseObject); | 1964 flags | InstanceofStub::kReturnTrueFalseObject); |
1940 InstanceofStub stub(flags); | 1965 InstanceofStub stub(flags); |
1941 | 1966 |
1942 // Get the temp register reserved by the instruction. This needs to be edi as | 1967 // Get the temp register reserved by the instruction. This needs to be edi as |
1943 // its slot of the pushing of safepoint registers is used to communicate the | 1968 // its slot of the pushing of safepoint registers is used to communicate the |
1944 // offset to the location of the map check. | 1969 // offset to the location of the map check. |
1945 Register temp = ToRegister(instr->TempAt(0)); | 1970 Register temp = ToRegister(instr->TempAt(0)); |
1946 ASSERT(temp.is(edi)); | 1971 ASSERT(temp.is(edi)); |
1947 __ mov(InstanceofStub::right(), Immediate(instr->function())); | 1972 __ mov(InstanceofStub::right(), Immediate(instr->function())); |
1948 static const int kAdditionalDelta = 16; | 1973 static const int kAdditionalDelta = 16; |
1949 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; | 1974 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; |
1950 __ mov(temp, Immediate(delta)); | 1975 __ mov(temp, Immediate(delta)); |
1951 __ StoreToSafepointRegisterSlot(temp, temp); | 1976 __ StoreToSafepointRegisterSlot(temp, temp); |
1952 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 1977 CallCodeGeneric(stub.GetCode(), |
1978 RelocInfo::CODE_TARGET, | |
1979 instr, | |
1980 RESTORE_CONTEXT, | |
1981 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | |
1953 // Put the result value into the eax slot and restore all registers. | 1982 // Put the result value into the eax slot and restore all registers. |
1954 __ StoreToSafepointRegisterSlot(eax, eax); | 1983 __ StoreToSafepointRegisterSlot(eax, eax); |
1955 __ PopSafepointRegisters(); | |
1956 } | 1984 } |
1957 | 1985 |
1958 | 1986 |
1959 static Condition ComputeCompareCondition(Token::Value op) { | 1987 static Condition ComputeCompareCondition(Token::Value op) { |
1960 switch (op) { | 1988 switch (op) { |
1961 case Token::EQ_STRICT: | 1989 case Token::EQ_STRICT: |
1962 case Token::EQ: | 1990 case Token::EQ: |
1963 return equal; | 1991 return equal; |
1964 case Token::LT: | 1992 case Token::LT: |
1965 return less; | 1993 return less; |
1966 case Token::GT: | 1994 case Token::GT: |
1967 return greater; | 1995 return greater; |
1968 case Token::LTE: | 1996 case Token::LTE: |
1969 return less_equal; | 1997 return less_equal; |
1970 case Token::GTE: | 1998 case Token::GTE: |
1971 return greater_equal; | 1999 return greater_equal; |
1972 default: | 2000 default: |
1973 UNREACHABLE(); | 2001 UNREACHABLE(); |
1974 return no_condition; | 2002 return no_condition; |
1975 } | 2003 } |
1976 } | 2004 } |
1977 | 2005 |
1978 | 2006 |
1979 void LCodeGen::DoCmpT(LCmpT* instr) { | 2007 void LCodeGen::DoCmpT(LCmpT* instr) { |
1980 Token::Value op = instr->op(); | 2008 Token::Value op = instr->op(); |
1981 | 2009 |
1982 Handle<Code> ic = CompareIC::GetUninitialized(op); | 2010 Handle<Code> ic = CompareIC::GetUninitialized(op); |
1983 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); | 2011 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
1984 | 2012 |
1985 Condition condition = ComputeCompareCondition(op); | 2013 Condition condition = ComputeCompareCondition(op); |
1986 if (op == Token::GT || op == Token::LTE) { | 2014 if (op == Token::GT || op == Token::LTE) { |
1987 condition = ReverseCondition(condition); | 2015 condition = ReverseCondition(condition); |
1988 } | 2016 } |
1989 NearLabel true_value, done; | 2017 NearLabel true_value, done; |
1990 __ test(eax, Operand(eax)); | 2018 __ test(eax, Operand(eax)); |
1991 __ j(condition, &true_value); | 2019 __ j(condition, &true_value); |
1992 __ mov(ToRegister(instr->result()), factory()->false_value()); | 2020 __ mov(ToRegister(instr->result()), factory()->false_value()); |
1993 __ jmp(&done); | 2021 __ jmp(&done); |
1994 __ bind(&true_value); | 2022 __ bind(&true_value); |
1995 __ mov(ToRegister(instr->result()), factory()->true_value()); | 2023 __ mov(ToRegister(instr->result()), factory()->true_value()); |
1996 __ bind(&done); | 2024 __ bind(&done); |
1997 } | 2025 } |
1998 | 2026 |
1999 | 2027 |
2000 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) { | 2028 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) { |
2001 Token::Value op = instr->op(); | 2029 Token::Value op = instr->op(); |
2002 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 2030 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
2003 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 2031 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
2004 | 2032 |
2005 Handle<Code> ic = CompareIC::GetUninitialized(op); | 2033 Handle<Code> ic = CompareIC::GetUninitialized(op); |
2006 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); | 2034 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
2007 | 2035 |
2008 // The compare stub expects compare condition and the input operands | 2036 // The compare stub expects compare condition and the input operands |
2009 // reversed for GT and LTE. | 2037 // reversed for GT and LTE. |
2010 Condition condition = ComputeCompareCondition(op); | 2038 Condition condition = ComputeCompareCondition(op); |
2011 if (op == Token::GT || op == Token::LTE) { | 2039 if (op == Token::GT || op == Token::LTE) { |
2012 condition = ReverseCondition(condition); | 2040 condition = ReverseCondition(condition); |
2013 } | 2041 } |
2014 __ test(eax, Operand(eax)); | 2042 __ test(eax, Operand(eax)); |
2015 EmitBranch(true_block, false_block, condition); | 2043 EmitBranch(true_block, false_block, condition); |
2016 } | 2044 } |
(...skipping 27 matching lines...) Expand all Loading... | |
2044 | 2072 |
2045 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { | 2073 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { |
2046 ASSERT(ToRegister(instr->context()).is(esi)); | 2074 ASSERT(ToRegister(instr->context()).is(esi)); |
2047 ASSERT(ToRegister(instr->global_object()).is(eax)); | 2075 ASSERT(ToRegister(instr->global_object()).is(eax)); |
2048 ASSERT(ToRegister(instr->result()).is(eax)); | 2076 ASSERT(ToRegister(instr->result()).is(eax)); |
2049 | 2077 |
2050 __ mov(ecx, instr->name()); | 2078 __ mov(ecx, instr->name()); |
2051 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET : | 2079 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET : |
2052 RelocInfo::CODE_TARGET_CONTEXT; | 2080 RelocInfo::CODE_TARGET_CONTEXT; |
2053 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 2081 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
2054 CallCode(ic, mode, instr); | 2082 CallCode(ic, mode, instr, CONTEXT_ADJUSTED); |
2055 } | 2083 } |
2056 | 2084 |
2057 | 2085 |
2058 void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) { | 2086 void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) { |
2059 Register value = ToRegister(instr->InputAt(0)); | 2087 Register value = ToRegister(instr->InputAt(0)); |
2060 Operand cell_operand = Operand::Cell(instr->hydrogen()->cell()); | 2088 Operand cell_operand = Operand::Cell(instr->hydrogen()->cell()); |
2061 | 2089 |
2062 // If the cell we are storing to contains the hole it could have | 2090 // If the cell we are storing to contains the hole it could have |
2063 // been deleted from the property dictionary. In that case, we need | 2091 // been deleted from the property dictionary. In that case, we need |
2064 // to update the property details in the property dictionary to mark | 2092 // to update the property details in the property dictionary to mark |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2128 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { | 2156 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { |
2129 Register object = ToRegister(instr->object()); | 2157 Register object = ToRegister(instr->object()); |
2130 Register result = ToRegister(instr->result()); | 2158 Register result = ToRegister(instr->result()); |
2131 | 2159 |
2132 int map_count = instr->hydrogen()->types()->length(); | 2160 int map_count = instr->hydrogen()->types()->length(); |
2133 Handle<String> name = instr->hydrogen()->name(); | 2161 Handle<String> name = instr->hydrogen()->name(); |
2134 if (map_count == 0) { | 2162 if (map_count == 0) { |
2135 ASSERT(instr->hydrogen()->need_generic()); | 2163 ASSERT(instr->hydrogen()->need_generic()); |
2136 __ mov(ecx, name); | 2164 __ mov(ecx, name); |
2137 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 2165 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
2138 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); | 2166 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
2139 } else { | 2167 } else { |
2140 NearLabel done; | 2168 NearLabel done; |
2141 for (int i = 0; i < map_count - 1; ++i) { | 2169 for (int i = 0; i < map_count - 1; ++i) { |
2142 Handle<Map> map = instr->hydrogen()->types()->at(i); | 2170 Handle<Map> map = instr->hydrogen()->types()->at(i); |
2143 NearLabel next; | 2171 NearLabel next; |
2144 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map); | 2172 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map); |
2145 __ j(not_equal, &next); | 2173 __ j(not_equal, &next); |
2146 EmitLoadField(result, object, map, name); | 2174 EmitLoadField(result, object, map, name); |
2147 __ jmp(&done); | 2175 __ jmp(&done); |
2148 __ bind(&next); | 2176 __ bind(&next); |
2149 } | 2177 } |
2150 Handle<Map> map = instr->hydrogen()->types()->last(); | 2178 Handle<Map> map = instr->hydrogen()->types()->last(); |
2151 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map); | 2179 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map); |
2152 if (instr->hydrogen()->need_generic()) { | 2180 if (instr->hydrogen()->need_generic()) { |
2153 NearLabel generic; | 2181 NearLabel generic; |
2154 __ j(not_equal, &generic); | 2182 __ j(not_equal, &generic); |
2155 EmitLoadField(result, object, map, name); | 2183 EmitLoadField(result, object, map, name); |
2156 __ jmp(&done); | 2184 __ jmp(&done); |
2157 __ bind(&generic); | 2185 __ bind(&generic); |
2158 __ mov(ecx, name); | 2186 __ mov(ecx, name); |
2159 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 2187 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
2160 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); | 2188 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
2161 } else { | 2189 } else { |
2162 DeoptimizeIf(not_equal, instr->environment()); | 2190 DeoptimizeIf(not_equal, instr->environment()); |
2163 EmitLoadField(result, object, map, name); | 2191 EmitLoadField(result, object, map, name); |
2164 } | 2192 } |
2165 __ bind(&done); | 2193 __ bind(&done); |
2166 } | 2194 } |
2167 } | 2195 } |
2168 | 2196 |
2169 | 2197 |
2170 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { | 2198 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { |
2171 ASSERT(ToRegister(instr->context()).is(esi)); | 2199 ASSERT(ToRegister(instr->context()).is(esi)); |
2172 ASSERT(ToRegister(instr->object()).is(eax)); | 2200 ASSERT(ToRegister(instr->object()).is(eax)); |
2173 ASSERT(ToRegister(instr->result()).is(eax)); | 2201 ASSERT(ToRegister(instr->result()).is(eax)); |
2174 | 2202 |
2175 __ mov(ecx, instr->name()); | 2203 __ mov(ecx, instr->name()); |
2176 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 2204 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
2177 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2205 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
2178 } | 2206 } |
2179 | 2207 |
2180 | 2208 |
2181 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { | 2209 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { |
2182 Register function = ToRegister(instr->function()); | 2210 Register function = ToRegister(instr->function()); |
2183 Register temp = ToRegister(instr->TempAt(0)); | 2211 Register temp = ToRegister(instr->TempAt(0)); |
2184 Register result = ToRegister(instr->result()); | 2212 Register result = ToRegister(instr->result()); |
2185 | 2213 |
2186 // Check that the function really is a function. | 2214 // Check that the function really is a function. |
2187 __ CmpObjectType(function, JS_FUNCTION_TYPE, result); | 2215 __ CmpObjectType(function, JS_FUNCTION_TYPE, result); |
(...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2330 } | 2358 } |
2331 } | 2359 } |
2332 | 2360 |
2333 | 2361 |
2334 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { | 2362 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { |
2335 ASSERT(ToRegister(instr->context()).is(esi)); | 2363 ASSERT(ToRegister(instr->context()).is(esi)); |
2336 ASSERT(ToRegister(instr->object()).is(edx)); | 2364 ASSERT(ToRegister(instr->object()).is(edx)); |
2337 ASSERT(ToRegister(instr->key()).is(eax)); | 2365 ASSERT(ToRegister(instr->key()).is(eax)); |
2338 | 2366 |
2339 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); | 2367 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); |
2340 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2368 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
2341 } | 2369 } |
2342 | 2370 |
2343 | 2371 |
2344 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { | 2372 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { |
2345 Register result = ToRegister(instr->result()); | 2373 Register result = ToRegister(instr->result()); |
2346 | 2374 |
2347 // Check for arguments adapter frame. | 2375 // Check for arguments adapter frame. |
2348 NearLabel done, adapted; | 2376 NearLabel done, adapted; |
2349 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); | 2377 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); |
2350 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset)); | 2378 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset)); |
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2518 RecordPosition(pointers->position()); | 2546 RecordPosition(pointers->position()); |
2519 | 2547 |
2520 // Invoke function. | 2548 // Invoke function. |
2521 if (*function == *info()->closure()) { | 2549 if (*function == *info()->closure()) { |
2522 __ CallSelf(); | 2550 __ CallSelf(); |
2523 } else { | 2551 } else { |
2524 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); | 2552 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); |
2525 } | 2553 } |
2526 | 2554 |
2527 // Setup deoptimization. | 2555 // Setup deoptimization. |
2528 RegisterLazyDeoptimization(instr); | 2556 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); |
2529 } | 2557 } |
2530 | 2558 |
2531 | 2559 |
2532 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 2560 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { |
2533 ASSERT(ToRegister(instr->result()).is(eax)); | 2561 ASSERT(ToRegister(instr->result()).is(eax)); |
2534 __ mov(edi, instr->function()); | 2562 __ mov(edi, instr->function()); |
2535 CallKnownFunction(instr->function(), instr->arity(), instr); | 2563 CallKnownFunction(instr->function(), instr->arity(), instr); |
2536 } | 2564 } |
2537 | 2565 |
2538 | 2566 |
2539 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { | 2567 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { |
2540 Register input_reg = ToRegister(instr->InputAt(0)); | 2568 Register input_reg = ToRegister(instr->InputAt(0)); |
2541 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 2569 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
2542 factory()->heap_number_map()); | 2570 factory()->heap_number_map()); |
2543 DeoptimizeIf(not_equal, instr->environment()); | 2571 DeoptimizeIf(not_equal, instr->environment()); |
2544 | 2572 |
2545 Label done; | 2573 Label done; |
2546 Register tmp = input_reg.is(eax) ? ecx : eax; | 2574 Register tmp = input_reg.is(eax) ? ecx : eax; |
2547 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx; | 2575 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx; |
2548 | 2576 |
2549 // Preserve the value of all registers. | 2577 // Preserve the value of all registers. |
2550 __ PushSafepointRegisters(); | 2578 PushSafepointRegistersScope scope(this); |
2551 | 2579 |
2552 Label negative; | 2580 Label negative; |
2553 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); | 2581 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
2554 // Check the sign of the argument. If the argument is positive, just | 2582 // Check the sign of the argument. If the argument is positive, just |
2555 // return it. We do not need to patch the stack since |input| and | 2583 // return it. We do not need to patch the stack since |input| and |
2556 // |result| are the same register and |input| will be restored | 2584 // |result| are the same register and |input| will be restored |
2557 // unchanged by popping safepoint registers. | 2585 // unchanged by popping safepoint registers. |
2558 __ test(tmp, Immediate(HeapNumber::kSignMask)); | 2586 __ test(tmp, Immediate(HeapNumber::kSignMask)); |
2559 __ j(not_zero, &negative); | 2587 __ j(not_zero, &negative); |
2560 __ jmp(&done); | 2588 __ jmp(&done); |
2561 | 2589 |
2562 __ bind(&negative); | 2590 __ bind(&negative); |
2563 | 2591 |
2564 Label allocated, slow; | 2592 Label allocated, slow; |
2565 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow); | 2593 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow); |
2566 __ jmp(&allocated); | 2594 __ jmp(&allocated); |
2567 | 2595 |
2568 // Slow case: Call the runtime system to do the number allocation. | 2596 // Slow case: Call the runtime system to do the number allocation. |
2569 __ bind(&slow); | 2597 __ bind(&slow); |
2570 | 2598 |
2571 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 2599 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); |
2572 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 2600 |
2573 RecordSafepointWithRegisters( | |
2574 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | |
2575 // Set the pointer to the new heap number in tmp. | 2601 // Set the pointer to the new heap number in tmp. |
2576 if (!tmp.is(eax)) __ mov(tmp, eax); | 2602 if (!tmp.is(eax)) __ mov(tmp, eax); |
2577 | 2603 |
2578 // Restore input_reg after call to runtime. | 2604 // Restore input_reg after call to runtime. |
2579 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); | 2605 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); |
2580 | 2606 |
2581 __ bind(&allocated); | 2607 __ bind(&allocated); |
2582 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset)); | 2608 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
2583 __ and_(tmp2, ~HeapNumber::kSignMask); | 2609 __ and_(tmp2, ~HeapNumber::kSignMask); |
2584 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2); | 2610 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2); |
2585 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); | 2611 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); |
2586 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2); | 2612 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2); |
2587 __ StoreToSafepointRegisterSlot(input_reg, tmp); | 2613 __ StoreToSafepointRegisterSlot(input_reg, tmp); |
2588 | 2614 |
2589 __ bind(&done); | 2615 __ bind(&done); |
2590 __ PopSafepointRegisters(); | |
2591 } | 2616 } |
2592 | 2617 |
2593 | 2618 |
2594 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) { | 2619 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) { |
2595 Register input_reg = ToRegister(instr->InputAt(0)); | 2620 Register input_reg = ToRegister(instr->InputAt(0)); |
2596 __ test(input_reg, Operand(input_reg)); | 2621 __ test(input_reg, Operand(input_reg)); |
2597 Label is_positive; | 2622 Label is_positive; |
2598 __ j(not_sign, &is_positive); | 2623 __ j(not_sign, &is_positive); |
2599 __ neg(input_reg); | 2624 __ neg(input_reg); |
2600 __ test(input_reg, Operand(input_reg)); | 2625 __ test(input_reg, Operand(input_reg)); |
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2772 __ fstp_d(Operand(esp, 0)); | 2797 __ fstp_d(Operand(esp, 0)); |
2773 __ movdbl(result_reg, Operand(esp, 0)); | 2798 __ movdbl(result_reg, Operand(esp, 0)); |
2774 __ add(Operand(esp), Immediate(kDoubleSize)); | 2799 __ add(Operand(esp), Immediate(kDoubleSize)); |
2775 } | 2800 } |
2776 | 2801 |
2777 | 2802 |
2778 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { | 2803 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { |
2779 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 2804 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
2780 TranscendentalCacheStub stub(TranscendentalCache::LOG, | 2805 TranscendentalCacheStub stub(TranscendentalCache::LOG, |
2781 TranscendentalCacheStub::UNTAGGED); | 2806 TranscendentalCacheStub::UNTAGGED); |
2782 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 2807 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
2783 } | 2808 } |
2784 | 2809 |
2785 | 2810 |
2786 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { | 2811 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { |
2787 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 2812 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
2788 TranscendentalCacheStub stub(TranscendentalCache::COS, | 2813 TranscendentalCacheStub stub(TranscendentalCache::COS, |
2789 TranscendentalCacheStub::UNTAGGED); | 2814 TranscendentalCacheStub::UNTAGGED); |
2790 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 2815 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
2791 } | 2816 } |
2792 | 2817 |
2793 | 2818 |
2794 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { | 2819 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { |
2795 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 2820 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
2796 TranscendentalCacheStub stub(TranscendentalCache::SIN, | 2821 TranscendentalCacheStub stub(TranscendentalCache::SIN, |
2797 TranscendentalCacheStub::UNTAGGED); | 2822 TranscendentalCacheStub::UNTAGGED); |
2798 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 2823 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
2799 } | 2824 } |
2800 | 2825 |
2801 | 2826 |
2802 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { | 2827 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { |
2803 switch (instr->op()) { | 2828 switch (instr->op()) { |
2804 case kMathAbs: | 2829 case kMathAbs: |
2805 DoMathAbs(instr); | 2830 DoMathAbs(instr); |
2806 break; | 2831 break; |
2807 case kMathFloor: | 2832 case kMathFloor: |
2808 DoMathFloor(instr); | 2833 DoMathFloor(instr); |
(...skipping 24 matching lines...) Expand all Loading... | |
2833 | 2858 |
2834 | 2859 |
2835 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { | 2860 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { |
2836 ASSERT(ToRegister(instr->context()).is(esi)); | 2861 ASSERT(ToRegister(instr->context()).is(esi)); |
2837 ASSERT(ToRegister(instr->key()).is(ecx)); | 2862 ASSERT(ToRegister(instr->key()).is(ecx)); |
2838 ASSERT(ToRegister(instr->result()).is(eax)); | 2863 ASSERT(ToRegister(instr->result()).is(eax)); |
2839 | 2864 |
2840 int arity = instr->arity(); | 2865 int arity = instr->arity(); |
2841 Handle<Code> ic = isolate()->stub_cache()-> | 2866 Handle<Code> ic = isolate()->stub_cache()-> |
2842 ComputeKeyedCallInitialize(arity, NOT_IN_LOOP); | 2867 ComputeKeyedCallInitialize(arity, NOT_IN_LOOP); |
2843 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2868 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
2844 } | 2869 } |
2845 | 2870 |
2846 | 2871 |
2847 void LCodeGen::DoCallNamed(LCallNamed* instr) { | 2872 void LCodeGen::DoCallNamed(LCallNamed* instr) { |
2848 ASSERT(ToRegister(instr->context()).is(esi)); | 2873 ASSERT(ToRegister(instr->context()).is(esi)); |
2849 ASSERT(ToRegister(instr->result()).is(eax)); | 2874 ASSERT(ToRegister(instr->result()).is(eax)); |
2850 | 2875 |
2851 int arity = instr->arity(); | 2876 int arity = instr->arity(); |
2852 Handle<Code> ic = isolate()->stub_cache()-> | 2877 Handle<Code> ic = isolate()->stub_cache()-> |
2853 ComputeCallInitialize(arity, NOT_IN_LOOP); | 2878 ComputeCallInitialize(arity, NOT_IN_LOOP); |
2854 __ mov(ecx, instr->name()); | 2879 __ mov(ecx, instr->name()); |
2855 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2880 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
2856 } | 2881 } |
2857 | 2882 |
2858 | 2883 |
2859 void LCodeGen::DoCallFunction(LCallFunction* instr) { | 2884 void LCodeGen::DoCallFunction(LCallFunction* instr) { |
2860 ASSERT(ToRegister(instr->context()).is(esi)); | 2885 ASSERT(ToRegister(instr->context()).is(esi)); |
2861 ASSERT(ToRegister(instr->result()).is(eax)); | 2886 ASSERT(ToRegister(instr->result()).is(eax)); |
2862 | 2887 |
2863 int arity = instr->arity(); | 2888 int arity = instr->arity(); |
2864 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); | 2889 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); |
2865 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2890 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
2866 __ Drop(1); | 2891 __ Drop(1); |
2867 } | 2892 } |
2868 | 2893 |
2869 | 2894 |
2870 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { | 2895 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { |
2871 ASSERT(ToRegister(instr->context()).is(esi)); | 2896 ASSERT(ToRegister(instr->context()).is(esi)); |
2872 ASSERT(ToRegister(instr->result()).is(eax)); | 2897 ASSERT(ToRegister(instr->result()).is(eax)); |
2873 | 2898 |
2874 int arity = instr->arity(); | 2899 int arity = instr->arity(); |
2875 Handle<Code> ic = isolate()->stub_cache()-> | 2900 Handle<Code> ic = isolate()->stub_cache()-> |
2876 ComputeCallInitialize(arity, NOT_IN_LOOP); | 2901 ComputeCallInitialize(arity, NOT_IN_LOOP); |
2877 __ mov(ecx, instr->name()); | 2902 __ mov(ecx, instr->name()); |
2878 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); | 2903 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr, CONTEXT_ADJUSTED); |
2879 } | 2904 } |
2880 | 2905 |
2881 | 2906 |
2882 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { | 2907 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { |
2883 ASSERT(ToRegister(instr->result()).is(eax)); | 2908 ASSERT(ToRegister(instr->result()).is(eax)); |
2884 __ mov(edi, instr->target()); | 2909 __ mov(edi, instr->target()); |
2885 CallKnownFunction(instr->target(), instr->arity(), instr); | 2910 CallKnownFunction(instr->target(), instr->arity(), instr); |
2886 } | 2911 } |
2887 | 2912 |
2888 | 2913 |
2889 void LCodeGen::DoCallNew(LCallNew* instr) { | 2914 void LCodeGen::DoCallNew(LCallNew* instr) { |
2890 ASSERT(ToRegister(instr->context()).is(esi)); | 2915 ASSERT(ToRegister(instr->context()).is(esi)); |
2891 ASSERT(ToRegister(instr->constructor()).is(edi)); | 2916 ASSERT(ToRegister(instr->constructor()).is(edi)); |
2892 ASSERT(ToRegister(instr->result()).is(eax)); | 2917 ASSERT(ToRegister(instr->result()).is(eax)); |
2893 | 2918 |
2894 Handle<Code> builtin = isolate()->builtins()->JSConstructCall(); | 2919 Handle<Code> builtin = isolate()->builtins()->JSConstructCall(); |
2895 __ Set(eax, Immediate(instr->arity())); | 2920 __ Set(eax, Immediate(instr->arity())); |
2896 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); | 2921 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr, CONTEXT_ADJUSTED); |
2897 } | 2922 } |
2898 | 2923 |
2899 | 2924 |
2900 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { | 2925 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
2901 CallRuntime(instr->function(), instr->arity(), instr, false); | 2926 CallRuntime(instr->function(), instr->arity(), instr, RESTORE_CONTEXT); |
2902 } | 2927 } |
2903 | 2928 |
2904 | 2929 |
2905 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { | 2930 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { |
2906 Register object = ToRegister(instr->object()); | 2931 Register object = ToRegister(instr->object()); |
2907 Register value = ToRegister(instr->value()); | 2932 Register value = ToRegister(instr->value()); |
2908 int offset = instr->offset(); | 2933 int offset = instr->offset(); |
2909 | 2934 |
2910 if (!instr->transition().is_null()) { | 2935 if (!instr->transition().is_null()) { |
2911 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition()); | 2936 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition()); |
(...skipping 22 matching lines...) Expand all Loading... | |
2934 | 2959 |
2935 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { | 2960 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { |
2936 ASSERT(ToRegister(instr->context()).is(esi)); | 2961 ASSERT(ToRegister(instr->context()).is(esi)); |
2937 ASSERT(ToRegister(instr->object()).is(edx)); | 2962 ASSERT(ToRegister(instr->object()).is(edx)); |
2938 ASSERT(ToRegister(instr->value()).is(eax)); | 2963 ASSERT(ToRegister(instr->value()).is(eax)); |
2939 | 2964 |
2940 __ mov(ecx, instr->name()); | 2965 __ mov(ecx, instr->name()); |
2941 Handle<Code> ic = info_->is_strict() | 2966 Handle<Code> ic = info_->is_strict() |
2942 ? isolate()->builtins()->StoreIC_Initialize_Strict() | 2967 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
2943 : isolate()->builtins()->StoreIC_Initialize(); | 2968 : isolate()->builtins()->StoreIC_Initialize(); |
2944 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2969 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
2945 } | 2970 } |
2946 | 2971 |
2947 | 2972 |
2948 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { | 2973 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { |
2949 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); | 2974 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); |
2950 DeoptimizeIf(above_equal, instr->environment()); | 2975 DeoptimizeIf(above_equal, instr->environment()); |
2951 } | 2976 } |
2952 | 2977 |
2953 | 2978 |
2954 void LCodeGen::DoStoreKeyedSpecializedArrayElement( | 2979 void LCodeGen::DoStoreKeyedSpecializedArrayElement( |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3034 | 3059 |
3035 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { | 3060 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { |
3036 ASSERT(ToRegister(instr->context()).is(esi)); | 3061 ASSERT(ToRegister(instr->context()).is(esi)); |
3037 ASSERT(ToRegister(instr->object()).is(edx)); | 3062 ASSERT(ToRegister(instr->object()).is(edx)); |
3038 ASSERT(ToRegister(instr->key()).is(ecx)); | 3063 ASSERT(ToRegister(instr->key()).is(ecx)); |
3039 ASSERT(ToRegister(instr->value()).is(eax)); | 3064 ASSERT(ToRegister(instr->value()).is(eax)); |
3040 | 3065 |
3041 Handle<Code> ic = info_->is_strict() | 3066 Handle<Code> ic = info_->is_strict() |
3042 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() | 3067 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() |
3043 : isolate()->builtins()->KeyedStoreIC_Initialize(); | 3068 : isolate()->builtins()->KeyedStoreIC_Initialize(); |
3044 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 3069 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
3045 } | 3070 } |
3046 | 3071 |
3047 | 3072 |
3048 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { | 3073 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { |
3049 class DeferredStringCharCodeAt: public LDeferredCode { | 3074 class DeferredStringCharCodeAt: public LDeferredCode { |
3050 public: | 3075 public: |
3051 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) | 3076 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) |
3052 : LDeferredCode(codegen), instr_(instr) { } | 3077 : LDeferredCode(codegen), instr_(instr) { } |
3053 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); } | 3078 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); } |
3054 private: | 3079 private: |
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3152 | 3177 |
3153 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) { | 3178 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) { |
3154 Register string = ToRegister(instr->string()); | 3179 Register string = ToRegister(instr->string()); |
3155 Register result = ToRegister(instr->result()); | 3180 Register result = ToRegister(instr->result()); |
3156 | 3181 |
3157 // TODO(3095996): Get rid of this. For now, we need to make the | 3182 // TODO(3095996): Get rid of this. For now, we need to make the |
3158 // result register contain a valid pointer because it is already | 3183 // result register contain a valid pointer because it is already |
3159 // contained in the register pointer map. | 3184 // contained in the register pointer map. |
3160 __ Set(result, Immediate(0)); | 3185 __ Set(result, Immediate(0)); |
3161 | 3186 |
3162 __ PushSafepointRegisters(); | 3187 PushSafepointRegistersScope scope(this); |
3163 __ push(string); | 3188 __ push(string); |
3164 // Push the index as a smi. This is safe because of the checks in | 3189 // Push the index as a smi. This is safe because of the checks in |
3165 // DoStringCharCodeAt above. | 3190 // DoStringCharCodeAt above. |
3166 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); | 3191 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); |
3167 if (instr->index()->IsConstantOperand()) { | 3192 if (instr->index()->IsConstantOperand()) { |
3168 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); | 3193 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); |
3169 __ push(Immediate(Smi::FromInt(const_index))); | 3194 __ push(Immediate(Smi::FromInt(const_index))); |
3170 } else { | 3195 } else { |
3171 Register index = ToRegister(instr->index()); | 3196 Register index = ToRegister(instr->index()); |
3172 __ SmiTag(index); | 3197 __ SmiTag(index); |
3173 __ push(index); | 3198 __ push(index); |
3174 } | 3199 } |
3175 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 3200 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr); |
3176 __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt); | |
3177 RecordSafepointWithRegisters( | |
3178 instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex); | |
3179 if (FLAG_debug_code) { | 3201 if (FLAG_debug_code) { |
3180 __ AbortIfNotSmi(eax); | 3202 __ AbortIfNotSmi(eax); |
3181 } | 3203 } |
3182 __ SmiUntag(eax); | 3204 __ SmiUntag(eax); |
3183 __ StoreToSafepointRegisterSlot(result, eax); | 3205 __ StoreToSafepointRegisterSlot(result, eax); |
3184 __ PopSafepointRegisters(); | |
3185 } | 3206 } |
3186 | 3207 |
3187 | 3208 |
3188 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { | 3209 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { |
3189 class DeferredStringCharFromCode: public LDeferredCode { | 3210 class DeferredStringCharFromCode: public LDeferredCode { |
3190 public: | 3211 public: |
3191 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr) | 3212 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr) |
3192 : LDeferredCode(codegen), instr_(instr) { } | 3213 : LDeferredCode(codegen), instr_(instr) { } |
3193 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); } | 3214 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); } |
3194 private: | 3215 private: |
(...skipping 22 matching lines...) Expand all Loading... | |
3217 | 3238 |
3218 void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) { | 3239 void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) { |
3219 Register char_code = ToRegister(instr->char_code()); | 3240 Register char_code = ToRegister(instr->char_code()); |
3220 Register result = ToRegister(instr->result()); | 3241 Register result = ToRegister(instr->result()); |
3221 | 3242 |
3222 // TODO(3095996): Get rid of this. For now, we need to make the | 3243 // TODO(3095996): Get rid of this. For now, we need to make the |
3223 // result register contain a valid pointer because it is already | 3244 // result register contain a valid pointer because it is already |
3224 // contained in the register pointer map. | 3245 // contained in the register pointer map. |
3225 __ Set(result, Immediate(0)); | 3246 __ Set(result, Immediate(0)); |
3226 | 3247 |
3227 __ PushSafepointRegisters(); | 3248 PushSafepointRegistersScope scope(this); |
3228 __ SmiTag(char_code); | 3249 __ SmiTag(char_code); |
3229 __ push(char_code); | 3250 __ push(char_code); |
3230 __ CallRuntimeSaveDoubles(Runtime::kCharFromCode); | 3251 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr); |
3231 RecordSafepointWithRegisters( | |
3232 instr->pointer_map(), 1, Safepoint::kNoDeoptimizationIndex); | |
3233 __ StoreToSafepointRegisterSlot(result, eax); | 3252 __ StoreToSafepointRegisterSlot(result, eax); |
3234 __ PopSafepointRegisters(); | |
3235 } | 3253 } |
3236 | 3254 |
3237 | 3255 |
3238 void LCodeGen::DoStringLength(LStringLength* instr) { | 3256 void LCodeGen::DoStringLength(LStringLength* instr) { |
3239 Register string = ToRegister(instr->string()); | 3257 Register string = ToRegister(instr->string()); |
3240 Register result = ToRegister(instr->result()); | 3258 Register result = ToRegister(instr->result()); |
3241 __ mov(result, FieldOperand(string, String::kLengthOffset)); | 3259 __ mov(result, FieldOperand(string, String::kLengthOffset)); |
3242 } | 3260 } |
3243 | 3261 |
3244 | 3262 |
(...skipping 26 matching lines...) Expand all Loading... | |
3271 __ bind(deferred->exit()); | 3289 __ bind(deferred->exit()); |
3272 } | 3290 } |
3273 | 3291 |
3274 | 3292 |
3275 void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) { | 3293 void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) { |
3276 Label slow; | 3294 Label slow; |
3277 Register reg = ToRegister(instr->InputAt(0)); | 3295 Register reg = ToRegister(instr->InputAt(0)); |
3278 Register tmp = reg.is(eax) ? ecx : eax; | 3296 Register tmp = reg.is(eax) ? ecx : eax; |
3279 | 3297 |
3280 // Preserve the value of all registers. | 3298 // Preserve the value of all registers. |
3281 __ PushSafepointRegisters(); | 3299 PushSafepointRegistersScope scope(this); |
3282 | 3300 |
3283 // There was overflow, so bits 30 and 31 of the original integer | 3301 // There was overflow, so bits 30 and 31 of the original integer |
3284 // disagree. Try to allocate a heap number in new space and store | 3302 // disagree. Try to allocate a heap number in new space and store |
3285 // the value in there. If that fails, call the runtime system. | 3303 // the value in there. If that fails, call the runtime system. |
3286 NearLabel done; | 3304 NearLabel done; |
3287 __ SmiUntag(reg); | 3305 __ SmiUntag(reg); |
3288 __ xor_(reg, 0x80000000); | 3306 __ xor_(reg, 0x80000000); |
3289 __ cvtsi2sd(xmm0, Operand(reg)); | 3307 __ cvtsi2sd(xmm0, Operand(reg)); |
3290 if (FLAG_inline_new) { | 3308 if (FLAG_inline_new) { |
3291 __ AllocateHeapNumber(reg, tmp, no_reg, &slow); | 3309 __ AllocateHeapNumber(reg, tmp, no_reg, &slow); |
3292 __ jmp(&done); | 3310 __ jmp(&done); |
3293 } | 3311 } |
3294 | 3312 |
3295 // Slow case: Call the runtime system to do the number allocation. | 3313 // Slow case: Call the runtime system to do the number allocation. |
3296 __ bind(&slow); | 3314 __ bind(&slow); |
3297 | 3315 |
3298 // TODO(3095996): Put a valid pointer value in the stack slot where the result | 3316 // TODO(3095996): Put a valid pointer value in the stack slot where the result |
3299 // register is stored, as this register is in the pointer map, but contains an | 3317 // register is stored, as this register is in the pointer map, but contains an |
3300 // integer value. | 3318 // integer value. |
3301 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); | 3319 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); |
3302 | 3320 |
3303 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 3321 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); |
3304 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | |
3305 RecordSafepointWithRegisters( | |
3306 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | |
3307 if (!reg.is(eax)) __ mov(reg, eax); | 3322 if (!reg.is(eax)) __ mov(reg, eax); |
3308 | 3323 |
3309 // Done. Put the value in xmm0 into the value of the allocated heap | 3324 // Done. Put the value in xmm0 into the value of the allocated heap |
3310 // number. | 3325 // number. |
3311 __ bind(&done); | 3326 __ bind(&done); |
3312 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0); | 3327 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0); |
3313 __ StoreToSafepointRegisterSlot(reg, reg); | 3328 __ StoreToSafepointRegisterSlot(reg, reg); |
3314 __ PopSafepointRegisters(); | |
3315 } | 3329 } |
3316 | 3330 |
3317 | 3331 |
3318 void LCodeGen::DoNumberTagD(LNumberTagD* instr) { | 3332 void LCodeGen::DoNumberTagD(LNumberTagD* instr) { |
3319 class DeferredNumberTagD: public LDeferredCode { | 3333 class DeferredNumberTagD: public LDeferredCode { |
3320 public: | 3334 public: |
3321 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr) | 3335 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr) |
3322 : LDeferredCode(codegen), instr_(instr) { } | 3336 : LDeferredCode(codegen), instr_(instr) { } |
3323 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); } | 3337 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); } |
3324 private: | 3338 private: |
(...skipping 15 matching lines...) Expand all Loading... | |
3340 } | 3354 } |
3341 | 3355 |
3342 | 3356 |
3343 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { | 3357 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { |
3344 // TODO(3095996): Get rid of this. For now, we need to make the | 3358 // TODO(3095996): Get rid of this. For now, we need to make the |
3345 // result register contain a valid pointer because it is already | 3359 // result register contain a valid pointer because it is already |
3346 // contained in the register pointer map. | 3360 // contained in the register pointer map. |
3347 Register reg = ToRegister(instr->result()); | 3361 Register reg = ToRegister(instr->result()); |
3348 __ Set(reg, Immediate(0)); | 3362 __ Set(reg, Immediate(0)); |
3349 | 3363 |
3350 __ PushSafepointRegisters(); | 3364 PushSafepointRegistersScope scope(this); |
3351 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 3365 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); |
3352 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | |
3353 RecordSafepointWithRegisters( | |
3354 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | |
3355 __ StoreToSafepointRegisterSlot(reg, eax); | 3366 __ StoreToSafepointRegisterSlot(reg, eax); |
3356 __ PopSafepointRegisters(); | |
3357 } | 3367 } |
3358 | 3368 |
3359 | 3369 |
3360 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 3370 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
3361 LOperand* input = instr->InputAt(0); | 3371 LOperand* input = instr->InputAt(0); |
3362 ASSERT(input->IsRegister() && input->Equals(instr->result())); | 3372 ASSERT(input->IsRegister() && input->Equals(instr->result())); |
3363 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); | 3373 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); |
3364 __ SmiTag(ToRegister(input)); | 3374 __ SmiTag(ToRegister(input)); |
3365 } | 3375 } |
3366 | 3376 |
(...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3761 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 3771 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
3762 __ push(Immediate(instr->hydrogen()->constant_elements())); | 3772 __ push(Immediate(instr->hydrogen()->constant_elements())); |
3763 | 3773 |
3764 // Pick the right runtime function or stub to call. | 3774 // Pick the right runtime function or stub to call. |
3765 int length = instr->hydrogen()->length(); | 3775 int length = instr->hydrogen()->length(); |
3766 if (instr->hydrogen()->IsCopyOnWrite()) { | 3776 if (instr->hydrogen()->IsCopyOnWrite()) { |
3767 ASSERT(instr->hydrogen()->depth() == 1); | 3777 ASSERT(instr->hydrogen()->depth() == 1); |
3768 FastCloneShallowArrayStub::Mode mode = | 3778 FastCloneShallowArrayStub::Mode mode = |
3769 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; | 3779 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; |
3770 FastCloneShallowArrayStub stub(mode, length); | 3780 FastCloneShallowArrayStub stub(mode, length); |
3771 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 3781 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
3772 } else if (instr->hydrogen()->depth() > 1) { | 3782 } else if (instr->hydrogen()->depth() > 1) { |
3773 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, false); | 3783 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, RESTORE_CONTEXT); |
3774 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { | 3784 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
3775 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, false); | 3785 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, RESTORE_CONTEXT); |
3776 } else { | 3786 } else { |
3777 FastCloneShallowArrayStub::Mode mode = | 3787 FastCloneShallowArrayStub::Mode mode = |
3778 FastCloneShallowArrayStub::CLONE_ELEMENTS; | 3788 FastCloneShallowArrayStub::CLONE_ELEMENTS; |
3779 FastCloneShallowArrayStub stub(mode, length); | 3789 FastCloneShallowArrayStub stub(mode, length); |
3780 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 3790 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
3781 } | 3791 } |
3782 } | 3792 } |
3783 | 3793 |
3784 | 3794 |
3785 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { | 3795 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { |
3786 ASSERT(ToRegister(instr->context()).is(esi)); | 3796 ASSERT(ToRegister(instr->context()).is(esi)); |
3787 // Setup the parameters to the stub/runtime call. | 3797 // Setup the parameters to the stub/runtime call. |
3788 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); | 3798 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); |
3789 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); | 3799 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); |
3790 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 3800 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
3791 __ push(Immediate(instr->hydrogen()->constant_properties())); | 3801 __ push(Immediate(instr->hydrogen()->constant_properties())); |
3792 int flags = instr->hydrogen()->fast_elements() | 3802 int flags = instr->hydrogen()->fast_elements() |
3793 ? ObjectLiteral::kFastElements | 3803 ? ObjectLiteral::kFastElements |
3794 : ObjectLiteral::kNoFlags; | 3804 : ObjectLiteral::kNoFlags; |
3795 flags |= instr->hydrogen()->has_function() | 3805 flags |= instr->hydrogen()->has_function() |
3796 ? ObjectLiteral::kHasFunction | 3806 ? ObjectLiteral::kHasFunction |
3797 : ObjectLiteral::kNoFlags; | 3807 : ObjectLiteral::kNoFlags; |
3798 __ push(Immediate(Smi::FromInt(flags))); | 3808 __ push(Immediate(Smi::FromInt(flags))); |
3799 | 3809 |
3800 // Pick the right runtime function to call. | 3810 // Pick the right runtime function to call. |
3801 if (instr->hydrogen()->depth() > 1) { | 3811 if (instr->hydrogen()->depth() > 1) { |
3802 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); | 3812 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr, CONTEXT_ADJUSTED); |
3803 } else { | 3813 } else { |
3804 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); | 3814 CallRuntime(Runtime::kCreateObjectLiteralShallow, |
3815 4, | |
3816 instr, | |
3817 CONTEXT_ADJUSTED); | |
3805 } | 3818 } |
3806 } | 3819 } |
3807 | 3820 |
3808 | 3821 |
3809 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { | 3822 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { |
3810 ASSERT(ToRegister(instr->InputAt(0)).is(eax)); | 3823 ASSERT(ToRegister(instr->InputAt(0)).is(eax)); |
3811 __ push(eax); | 3824 __ push(eax); |
3812 CallRuntime(Runtime::kToFastProperties, 1, instr); | 3825 CallRuntime(Runtime::kToFastProperties, 1, instr, CONTEXT_ADJUSTED); |
3813 } | 3826 } |
3814 | 3827 |
3815 | 3828 |
3816 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { | 3829 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { |
3817 NearLabel materialized; | 3830 NearLabel materialized; |
3818 // Registers will be used as follows: | 3831 // Registers will be used as follows: |
3819 // edi = JS function. | 3832 // edi = JS function. |
3820 // ecx = literals array. | 3833 // ecx = literals array. |
3821 // ebx = regexp literal. | 3834 // ebx = regexp literal. |
3822 // eax = regexp literal clone. | 3835 // eax = regexp literal clone. |
3823 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); | 3836 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); |
3824 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset)); | 3837 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset)); |
3825 int literal_offset = FixedArray::kHeaderSize + | 3838 int literal_offset = FixedArray::kHeaderSize + |
3826 instr->hydrogen()->literal_index() * kPointerSize; | 3839 instr->hydrogen()->literal_index() * kPointerSize; |
3827 __ mov(ebx, FieldOperand(ecx, literal_offset)); | 3840 __ mov(ebx, FieldOperand(ecx, literal_offset)); |
3828 __ cmp(ebx, factory()->undefined_value()); | 3841 __ cmp(ebx, factory()->undefined_value()); |
3829 __ j(not_equal, &materialized); | 3842 __ j(not_equal, &materialized); |
3830 | 3843 |
3831 // Create regexp literal using runtime function | 3844 // Create regexp literal using runtime function |
3832 // Result will be in eax. | 3845 // Result will be in eax. |
3833 __ push(ecx); | 3846 __ push(ecx); |
3834 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 3847 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
3835 __ push(Immediate(instr->hydrogen()->pattern())); | 3848 __ push(Immediate(instr->hydrogen()->pattern())); |
3836 __ push(Immediate(instr->hydrogen()->flags())); | 3849 __ push(Immediate(instr->hydrogen()->flags())); |
3837 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, false); | 3850 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, RESTORE_CONTEXT); |
3838 __ mov(ebx, eax); | 3851 __ mov(ebx, eax); |
3839 | 3852 |
3840 __ bind(&materialized); | 3853 __ bind(&materialized); |
3841 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | 3854 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
3842 Label allocated, runtime_allocate; | 3855 Label allocated, runtime_allocate; |
3843 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); | 3856 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); |
3844 __ jmp(&allocated); | 3857 __ jmp(&allocated); |
3845 | 3858 |
3846 __ bind(&runtime_allocate); | 3859 __ bind(&runtime_allocate); |
3847 __ push(ebx); | 3860 __ push(ebx); |
3848 __ push(Immediate(Smi::FromInt(size))); | 3861 __ push(Immediate(Smi::FromInt(size))); |
3849 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, false); | 3862 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, RESTORE_CONTEXT); |
3850 __ pop(ebx); | 3863 __ pop(ebx); |
3851 | 3864 |
3852 __ bind(&allocated); | 3865 __ bind(&allocated); |
3853 // Copy the content into the newly allocated memory. | 3866 // Copy the content into the newly allocated memory. |
3854 // (Unroll copy loop once for better throughput). | 3867 // (Unroll copy loop once for better throughput). |
3855 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { | 3868 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { |
3856 __ mov(edx, FieldOperand(ebx, i)); | 3869 __ mov(edx, FieldOperand(ebx, i)); |
3857 __ mov(ecx, FieldOperand(ebx, i + kPointerSize)); | 3870 __ mov(ecx, FieldOperand(ebx, i + kPointerSize)); |
3858 __ mov(FieldOperand(eax, i), edx); | 3871 __ mov(FieldOperand(eax, i), edx); |
3859 __ mov(FieldOperand(eax, i + kPointerSize), ecx); | 3872 __ mov(FieldOperand(eax, i + kPointerSize), ecx); |
3860 } | 3873 } |
3861 if ((size % (2 * kPointerSize)) != 0) { | 3874 if ((size % (2 * kPointerSize)) != 0) { |
3862 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); | 3875 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); |
3863 __ mov(FieldOperand(eax, size - kPointerSize), edx); | 3876 __ mov(FieldOperand(eax, size - kPointerSize), edx); |
3864 } | 3877 } |
3865 } | 3878 } |
3866 | 3879 |
3867 | 3880 |
3868 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 3881 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
3869 // Use the fast case closure allocation code that allocates in new | 3882 // Use the fast case closure allocation code that allocates in new |
3870 // space for nested functions that don't need literals cloning. | 3883 // space for nested functions that don't need literals cloning. |
3871 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); | 3884 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); |
3872 bool pretenure = instr->hydrogen()->pretenure(); | 3885 bool pretenure = instr->hydrogen()->pretenure(); |
3873 if (!pretenure && shared_info->num_literals() == 0) { | 3886 if (!pretenure && shared_info->num_literals() == 0) { |
3874 FastNewClosureStub stub( | 3887 FastNewClosureStub stub( |
3875 shared_info->strict_mode() ? kStrictMode : kNonStrictMode); | 3888 shared_info->strict_mode() ? kStrictMode : kNonStrictMode); |
3876 __ push(Immediate(shared_info)); | 3889 __ push(Immediate(shared_info)); |
3877 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 3890 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
3878 } else { | 3891 } else { |
3879 __ push(Operand(ebp, StandardFrameConstants::kContextOffset)); | 3892 __ push(Operand(ebp, StandardFrameConstants::kContextOffset)); |
3880 __ push(Immediate(shared_info)); | 3893 __ push(Immediate(shared_info)); |
3881 __ push(Immediate(pretenure | 3894 __ push(Immediate(pretenure |
3882 ? factory()->true_value() | 3895 ? factory()->true_value() |
3883 : factory()->false_value())); | 3896 : factory()->false_value())); |
3884 CallRuntime(Runtime::kNewClosure, 3, instr, false); | 3897 CallRuntime(Runtime::kNewClosure, 3, instr, RESTORE_CONTEXT); |
3885 } | 3898 } |
3886 } | 3899 } |
3887 | 3900 |
3888 | 3901 |
3889 void LCodeGen::DoTypeof(LTypeof* instr) { | 3902 void LCodeGen::DoTypeof(LTypeof* instr) { |
3890 LOperand* input = instr->InputAt(0); | 3903 LOperand* input = instr->InputAt(0); |
3891 if (input->IsConstantOperand()) { | 3904 if (input->IsConstantOperand()) { |
3892 __ push(ToImmediate(input)); | 3905 __ push(ToImmediate(input)); |
3893 } else { | 3906 } else { |
3894 __ push(ToOperand(input)); | 3907 __ push(ToOperand(input)); |
3895 } | 3908 } |
3896 CallRuntime(Runtime::kTypeof, 1, instr, false); | 3909 CallRuntime(Runtime::kTypeof, 1, instr, RESTORE_CONTEXT); |
3897 } | 3910 } |
3898 | 3911 |
3899 | 3912 |
3900 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { | 3913 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { |
3901 Register input = ToRegister(instr->InputAt(0)); | 3914 Register input = ToRegister(instr->InputAt(0)); |
3902 Register result = ToRegister(instr->result()); | 3915 Register result = ToRegister(instr->result()); |
3903 Label true_label; | 3916 Label true_label; |
3904 Label false_label; | 3917 Label false_label; |
3905 NearLabel done; | 3918 NearLabel done; |
3906 | 3919 |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4089 | 4102 |
4090 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 4103 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
4091 // Perform stack overflow check. | 4104 // Perform stack overflow check. |
4092 NearLabel done; | 4105 NearLabel done; |
4093 ExternalReference stack_limit = | 4106 ExternalReference stack_limit = |
4094 ExternalReference::address_of_stack_limit(isolate()); | 4107 ExternalReference::address_of_stack_limit(isolate()); |
4095 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 4108 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
4096 __ j(above_equal, &done); | 4109 __ j(above_equal, &done); |
4097 | 4110 |
4098 StackCheckStub stub; | 4111 StackCheckStub stub; |
4099 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); | 4112 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
4100 __ bind(&done); | 4113 __ bind(&done); |
4101 } | 4114 } |
4102 | 4115 |
4103 | 4116 |
4104 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 4117 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
4105 // This is a pseudo-instruction that ensures that the environment here is | 4118 // This is a pseudo-instruction that ensures that the environment here is |
4106 // properly registered for deoptimization and records the assembler's PC | 4119 // properly registered for deoptimization and records the assembler's PC |
4107 // offset. | 4120 // offset. |
4108 LEnvironment* environment = instr->environment(); | 4121 LEnvironment* environment = instr->environment(); |
4109 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), | 4122 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), |
4110 instr->SpilledDoubleRegisterArray()); | 4123 instr->SpilledDoubleRegisterArray()); |
4111 | 4124 |
4112 // If the environment were already registered, we would have no way of | 4125 // If the environment were already registered, we would have no way of |
4113 // backpatching it with the spill slot operands. | 4126 // backpatching it with the spill slot operands. |
4114 ASSERT(!environment->HasBeenRegistered()); | 4127 ASSERT(!environment->HasBeenRegistered()); |
4115 RegisterEnvironmentForDeoptimization(environment); | 4128 RegisterEnvironmentForDeoptimization(environment); |
4116 ASSERT(osr_pc_offset_ == -1); | 4129 ASSERT(osr_pc_offset_ == -1); |
4117 osr_pc_offset_ = masm()->pc_offset(); | 4130 osr_pc_offset_ = masm()->pc_offset(); |
4118 } | 4131 } |
4119 | 4132 |
4120 | 4133 |
4121 #undef __ | 4134 #undef __ |
4122 | 4135 |
4123 } } // namespace v8::internal | 4136 } } // namespace v8::internal |
4124 | 4137 |
4125 #endif // V8_TARGET_ARCH_IA32 | 4138 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |