Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(488)

Side by Side Diff: src/ia32/lithium-codegen-ia32.cc

Issue 6793017: In LCodeGen::DoDeferredLInstanceOfKnownGlobal emit safepoint with registers for the call to stub. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: port to arm and x64 Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 390 matching lines...) Expand 10 before | Expand all | Expand 10 after
401 } else if (op->IsConstantOperand()) { 401 } else if (op->IsConstantOperand()) {
402 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op)); 402 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
403 int src_index = DefineDeoptimizationLiteral(literal); 403 int src_index = DefineDeoptimizationLiteral(literal);
404 translation->StoreLiteral(src_index); 404 translation->StoreLiteral(src_index);
405 } else { 405 } else {
406 UNREACHABLE(); 406 UNREACHABLE();
407 } 407 }
408 } 408 }
409 409
410 410
411 void LCodeGen::CallCode(Handle<Code> code, 411 void LCodeGen::CallCodeGeneric(Handle<Code> code,
412 RelocInfo::Mode mode, 412 RelocInfo::Mode mode,
413 LInstruction* instr, 413 LInstruction* instr,
414 bool adjusted) { 414 ContextMode context_mode,
415 SafepointMode safepoint_mode) {
415 ASSERT(instr != NULL); 416 ASSERT(instr != NULL);
416 LPointerMap* pointers = instr->pointer_map(); 417 LPointerMap* pointers = instr->pointer_map();
417 RecordPosition(pointers->position()); 418 RecordPosition(pointers->position());
418 419
419 if (!adjusted) { 420 if (context_mode == RESTORE_CONTEXT) {
420 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 421 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
421 } 422 }
422 __ call(code, mode); 423 __ call(code, mode);
423 424
424 RegisterLazyDeoptimization(instr); 425 RegisterLazyDeoptimization(instr, safepoint_mode);
425 426
426 // Signal that we don't inline smi code before these stubs in the 427 // Signal that we don't inline smi code before these stubs in the
427 // optimizing code generator. 428 // optimizing code generator.
428 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || 429 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
429 code->kind() == Code::COMPARE_IC) { 430 code->kind() == Code::COMPARE_IC) {
430 __ nop(); 431 __ nop();
431 } 432 }
432 } 433 }
433 434
434 435
436 void LCodeGen::CallCode(Handle<Code> code,
437 RelocInfo::Mode mode,
438 LInstruction* instr,
439 ContextMode context_mode) {
440 CallCodeGeneric(code, mode, instr, context_mode, RECORD_SIMPLE_SAFEPOINT);
441 }
442
443
435 void LCodeGen::CallRuntime(const Runtime::Function* fun, 444 void LCodeGen::CallRuntime(const Runtime::Function* fun,
436 int argc, 445 int argc,
437 LInstruction* instr, 446 LInstruction* instr,
438 bool adjusted) { 447 ContextMode context_mode) {
439 ASSERT(instr != NULL); 448 ASSERT(instr != NULL);
440 ASSERT(instr->HasPointerMap()); 449 ASSERT(instr->HasPointerMap());
441 LPointerMap* pointers = instr->pointer_map(); 450 LPointerMap* pointers = instr->pointer_map();
442 RecordPosition(pointers->position()); 451 RecordPosition(pointers->position());
443 452
444 if (!adjusted) { 453 if (context_mode == RESTORE_CONTEXT) {
445 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 454 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
446 } 455 }
447 __ CallRuntime(fun, argc); 456 __ CallRuntime(fun, argc);
448 457
449 RegisterLazyDeoptimization(instr); 458 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
450 } 459 }
451 460
452 461
453 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { 462 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
463 int argc,
464 LInstruction* instr) {
465 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
466 __ CallRuntimeSaveDoubles(id);
467 RecordSafepointWithRegisters(
468 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
469 }
470
471
472 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
473 SafepointMode safepoint_mode) {
454 // Create the environment to bailout to. If the call has side effects 474 // Create the environment to bailout to. If the call has side effects
455 // execution has to continue after the call otherwise execution can continue 475 // execution has to continue after the call otherwise execution can continue
456 // from a previous bailout point repeating the call. 476 // from a previous bailout point repeating the call.
457 LEnvironment* deoptimization_environment; 477 LEnvironment* deoptimization_environment;
458 if (instr->HasDeoptimizationEnvironment()) { 478 if (instr->HasDeoptimizationEnvironment()) {
459 deoptimization_environment = instr->deoptimization_environment(); 479 deoptimization_environment = instr->deoptimization_environment();
460 } else { 480 } else {
461 deoptimization_environment = instr->environment(); 481 deoptimization_environment = instr->environment();
462 } 482 }
463 483
464 RegisterEnvironmentForDeoptimization(deoptimization_environment); 484 RegisterEnvironmentForDeoptimization(deoptimization_environment);
465 RecordSafepoint(instr->pointer_map(), 485 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
466 deoptimization_environment->deoptimization_index()); 486 RecordSafepoint(instr->pointer_map(),
487 deoptimization_environment->deoptimization_index());
488 } else {
489 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
490 RecordSafepointWithRegisters(
491 instr->pointer_map(),
492 0,
493 deoptimization_environment->deoptimization_index());
494 }
467 } 495 }
468 496
469 497
470 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { 498 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
471 if (!environment->HasBeenRegistered()) { 499 if (!environment->HasBeenRegistered()) {
472 // Physical stack frame layout: 500 // Physical stack frame layout:
473 // -x ............. -4 0 ..................................... y 501 // -x ............. -4 0 ..................................... y
474 // [incoming arguments] [spill slots] [pushed outgoing arguments] 502 // [incoming arguments] [spill slots] [pushed outgoing arguments]
475 503
476 // Layout of the environment: 504 // Layout of the environment:
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
605 633
606 inlined_function_count_ = deoptimization_literals_.length(); 634 inlined_function_count_ = deoptimization_literals_.length();
607 } 635 }
608 636
609 637
610 void LCodeGen::RecordSafepoint( 638 void LCodeGen::RecordSafepoint(
611 LPointerMap* pointers, 639 LPointerMap* pointers,
612 Safepoint::Kind kind, 640 Safepoint::Kind kind,
613 int arguments, 641 int arguments,
614 int deoptimization_index) { 642 int deoptimization_index) {
643 ASSERT(kind == expected_safepoint_kind_);
615 const ZoneList<LOperand*>* operands = pointers->operands(); 644 const ZoneList<LOperand*>* operands = pointers->operands();
616 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), 645 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
617 kind, arguments, deoptimization_index); 646 kind, arguments, deoptimization_index);
618 for (int i = 0; i < operands->length(); i++) { 647 for (int i = 0; i < operands->length(); i++) {
619 LOperand* pointer = operands->at(i); 648 LOperand* pointer = operands->at(i);
620 if (pointer->IsStackSlot()) { 649 if (pointer->IsStackSlot()) {
621 safepoint.DefinePointerSlot(pointer->index()); 650 safepoint.DefinePointerSlot(pointer->index());
622 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { 651 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
623 safepoint.DefinePointerRegister(ToRegister(pointer)); 652 safepoint.DefinePointerRegister(ToRegister(pointer));
624 } 653 }
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
690 // Nothing to do. 719 // Nothing to do.
691 } 720 }
692 721
693 722
694 void LCodeGen::DoCallStub(LCallStub* instr) { 723 void LCodeGen::DoCallStub(LCallStub* instr) {
695 ASSERT(ToRegister(instr->context()).is(esi)); 724 ASSERT(ToRegister(instr->context()).is(esi));
696 ASSERT(ToRegister(instr->result()).is(eax)); 725 ASSERT(ToRegister(instr->result()).is(eax));
697 switch (instr->hydrogen()->major_key()) { 726 switch (instr->hydrogen()->major_key()) {
698 case CodeStub::RegExpConstructResult: { 727 case CodeStub::RegExpConstructResult: {
699 RegExpConstructResultStub stub; 728 RegExpConstructResultStub stub;
700 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 729 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
701 break; 730 break;
702 } 731 }
703 case CodeStub::RegExpExec: { 732 case CodeStub::RegExpExec: {
704 RegExpExecStub stub; 733 RegExpExecStub stub;
705 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 734 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
706 break; 735 break;
707 } 736 }
708 case CodeStub::SubString: { 737 case CodeStub::SubString: {
709 SubStringStub stub; 738 SubStringStub stub;
710 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 739 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
711 break; 740 break;
712 } 741 }
713 case CodeStub::NumberToString: { 742 case CodeStub::NumberToString: {
714 NumberToStringStub stub; 743 NumberToStringStub stub;
715 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 744 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
716 break; 745 break;
717 } 746 }
718 case CodeStub::StringAdd: { 747 case CodeStub::StringAdd: {
719 StringAddStub stub(NO_STRING_ADD_FLAGS); 748 StringAddStub stub(NO_STRING_ADD_FLAGS);
720 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 749 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
721 break; 750 break;
722 } 751 }
723 case CodeStub::StringCompare: { 752 case CodeStub::StringCompare: {
724 StringCompareStub stub; 753 StringCompareStub stub;
725 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 754 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
726 break; 755 break;
727 } 756 }
728 case CodeStub::TranscendentalCache: { 757 case CodeStub::TranscendentalCache: {
729 TranscendentalCacheStub stub(instr->transcendental_type(), 758 TranscendentalCacheStub stub(instr->transcendental_type(),
730 TranscendentalCacheStub::TAGGED); 759 TranscendentalCacheStub::TAGGED);
731 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 760 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
732 break; 761 break;
733 } 762 }
734 default: 763 default:
735 UNREACHABLE(); 764 UNREACHABLE();
736 } 765 }
737 } 766 }
738 767
739 768
740 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { 769 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
741 // Nothing to do. 770 // Nothing to do.
(...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after
1136 1165
1137 void LCodeGen::DoBitNotI(LBitNotI* instr) { 1166 void LCodeGen::DoBitNotI(LBitNotI* instr) {
1138 LOperand* input = instr->InputAt(0); 1167 LOperand* input = instr->InputAt(0);
1139 ASSERT(input->Equals(instr->result())); 1168 ASSERT(input->Equals(instr->result()));
1140 __ not_(ToRegister(input)); 1169 __ not_(ToRegister(input));
1141 } 1170 }
1142 1171
1143 1172
1144 void LCodeGen::DoThrow(LThrow* instr) { 1173 void LCodeGen::DoThrow(LThrow* instr) {
1145 __ push(ToOperand(instr->InputAt(0))); 1174 __ push(ToOperand(instr->InputAt(0)));
1146 CallRuntime(Runtime::kThrow, 1, instr, false); 1175 CallRuntime(Runtime::kThrow, 1, instr, RESTORE_CONTEXT);
1147 1176
1148 if (FLAG_debug_code) { 1177 if (FLAG_debug_code) {
1149 Comment("Unreachable code."); 1178 Comment("Unreachable code.");
1150 __ int3(); 1179 __ int3();
1151 } 1180 }
1152 } 1181 }
1153 1182
1154 1183
1155 void LCodeGen::DoAddI(LAddI* instr) { 1184 void LCodeGen::DoAddI(LAddI* instr) {
1156 LOperand* left = instr->InputAt(0); 1185 LOperand* left = instr->InputAt(0);
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
1211 } 1240 }
1212 } 1241 }
1213 1242
1214 1243
1215 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { 1244 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1216 ASSERT(ToRegister(instr->InputAt(0)).is(edx)); 1245 ASSERT(ToRegister(instr->InputAt(0)).is(edx));
1217 ASSERT(ToRegister(instr->InputAt(1)).is(eax)); 1246 ASSERT(ToRegister(instr->InputAt(1)).is(eax));
1218 ASSERT(ToRegister(instr->result()).is(eax)); 1247 ASSERT(ToRegister(instr->result()).is(eax));
1219 1248
1220 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE); 1249 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
1221 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 1250 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
1222 } 1251 }
1223 1252
1224 1253
1225 int LCodeGen::GetNextEmittedBlock(int block) { 1254 int LCodeGen::GetNextEmittedBlock(int block) {
1226 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { 1255 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1227 LLabel* label = chunk_->GetLabel(i); 1256 LLabel* label = chunk_->GetLabel(i);
1228 if (!label->HasReplacement()) return i; 1257 if (!label->HasReplacement()) return i;
1229 } 1258 }
1230 return -1; 1259 return -1;
1231 } 1260 }
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
1323 __ jmp(deferred_stack_check->entry()); 1352 __ jmp(deferred_stack_check->entry());
1324 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); 1353 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1325 } else { 1354 } else {
1326 __ jmp(chunk_->GetAssemblyLabel(block)); 1355 __ jmp(chunk_->GetAssemblyLabel(block));
1327 } 1356 }
1328 } 1357 }
1329 } 1358 }
1330 1359
1331 1360
1332 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { 1361 void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
1333 __ pushad(); 1362 PushSafepointRegistersScope scope(this);
1334 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 1363 CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr);
1335 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
1336 RecordSafepointWithRegisters(
1337 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1338 __ popad();
1339 } 1364 }
1340 1365
1341 void LCodeGen::DoGoto(LGoto* instr) { 1366 void LCodeGen::DoGoto(LGoto* instr) {
1342 class DeferredStackCheck: public LDeferredCode { 1367 class DeferredStackCheck: public LDeferredCode {
1343 public: 1368 public:
1344 DeferredStackCheck(LCodeGen* codegen, LGoto* instr) 1369 DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1345 : LDeferredCode(codegen), instr_(instr) { } 1370 : LDeferredCode(codegen), instr_(instr) { }
1346 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } 1371 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1347 private: 1372 private:
1348 LGoto* instr_; 1373 LGoto* instr_;
(...skipping 481 matching lines...) Expand 10 before | Expand all | Expand 10 after
1830 1855
1831 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); 1856 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
1832 EmitBranch(true_block, false_block, equal); 1857 EmitBranch(true_block, false_block, equal);
1833 } 1858 }
1834 1859
1835 1860
1836 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { 1861 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
1837 // Object and function are in fixed registers defined by the stub. 1862 // Object and function are in fixed registers defined by the stub.
1838 ASSERT(ToRegister(instr->context()).is(esi)); 1863 ASSERT(ToRegister(instr->context()).is(esi));
1839 InstanceofStub stub(InstanceofStub::kArgsInRegisters); 1864 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1840 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1865 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
1841 1866
1842 NearLabel true_value, done; 1867 NearLabel true_value, done;
1843 __ test(eax, Operand(eax)); 1868 __ test(eax, Operand(eax));
1844 __ j(zero, &true_value); 1869 __ j(zero, &true_value);
1845 __ mov(ToRegister(instr->result()), factory()->false_value()); 1870 __ mov(ToRegister(instr->result()), factory()->false_value());
1846 __ jmp(&done); 1871 __ jmp(&done);
1847 __ bind(&true_value); 1872 __ bind(&true_value);
1848 __ mov(ToRegister(instr->result()), factory()->true_value()); 1873 __ mov(ToRegister(instr->result()), factory()->true_value());
1849 __ bind(&done); 1874 __ bind(&done);
1850 } 1875 }
1851 1876
1852 1877
1853 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { 1878 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1854 ASSERT(ToRegister(instr->context()).is(esi)); 1879 ASSERT(ToRegister(instr->context()).is(esi));
1855 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1880 int true_block = chunk_->LookupDestination(instr->true_block_id());
1856 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1881 int false_block = chunk_->LookupDestination(instr->false_block_id());
1857 1882
1858 InstanceofStub stub(InstanceofStub::kArgsInRegisters); 1883 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1859 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1884 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
1860 __ test(eax, Operand(eax)); 1885 __ test(eax, Operand(eax));
1861 EmitBranch(true_block, false_block, zero); 1886 EmitBranch(true_block, false_block, zero);
1862 } 1887 }
1863 1888
1864 1889
1865 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { 1890 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1866 class DeferredInstanceOfKnownGlobal: public LDeferredCode { 1891 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1867 public: 1892 public:
1868 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, 1893 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1869 LInstanceOfKnownGlobal* instr) 1894 LInstanceOfKnownGlobal* instr)
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
1921 1946
1922 // Here result has either true or false. Deferred code also produces true or 1947 // Here result has either true or false. Deferred code also produces true or
1923 // false object. 1948 // false object.
1924 __ bind(deferred->exit()); 1949 __ bind(deferred->exit());
1925 __ bind(&done); 1950 __ bind(&done);
1926 } 1951 }
1927 1952
1928 1953
1929 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 1954 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
1930 Label* map_check) { 1955 Label* map_check) {
1931 __ PushSafepointRegisters(); 1956 PushSafepointRegistersScope scope(this);
1932 1957
1933 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; 1958 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
1934 flags = static_cast<InstanceofStub::Flags>( 1959 flags = static_cast<InstanceofStub::Flags>(
1935 flags | InstanceofStub::kArgsInRegisters); 1960 flags | InstanceofStub::kArgsInRegisters);
1936 flags = static_cast<InstanceofStub::Flags>( 1961 flags = static_cast<InstanceofStub::Flags>(
1937 flags | InstanceofStub::kCallSiteInlineCheck); 1962 flags | InstanceofStub::kCallSiteInlineCheck);
1938 flags = static_cast<InstanceofStub::Flags>( 1963 flags = static_cast<InstanceofStub::Flags>(
1939 flags | InstanceofStub::kReturnTrueFalseObject); 1964 flags | InstanceofStub::kReturnTrueFalseObject);
1940 InstanceofStub stub(flags); 1965 InstanceofStub stub(flags);
1941 1966
1942 // Get the temp register reserved by the instruction. This needs to be edi as 1967 // Get the temp register reserved by the instruction. This needs to be a
1943 // its slot of the pushing of safepoint registers is used to communicate the 1968 // register which is pushed last by PushSafepointRegisters as top of the
1944 // offset to the location of the map check. 1969 // stack is used to pass the offset to the location of the map check to
1970 // the stub.
1945 Register temp = ToRegister(instr->TempAt(0)); 1971 Register temp = ToRegister(instr->TempAt(0));
1946 ASSERT(temp.is(edi)); 1972 ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0);
1947 __ mov(InstanceofStub::right(), Immediate(instr->function())); 1973 __ mov(InstanceofStub::right(), Immediate(instr->function()));
1948 static const int kAdditionalDelta = 16; 1974 static const int kAdditionalDelta = 16;
1949 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; 1975 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
1950 __ mov(temp, Immediate(delta)); 1976 __ mov(temp, Immediate(delta));
1951 __ StoreToSafepointRegisterSlot(temp, temp); 1977 __ StoreToSafepointRegisterSlot(temp, temp);
1952 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 1978 CallCodeGeneric(stub.GetCode(),
1979 RelocInfo::CODE_TARGET,
1980 instr,
1981 RESTORE_CONTEXT,
1982 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
1953 // Put the result value into the eax slot and restore all registers. 1983 // Put the result value into the eax slot and restore all registers.
1954 __ StoreToSafepointRegisterSlot(eax, eax); 1984 __ StoreToSafepointRegisterSlot(eax, eax);
1955 __ PopSafepointRegisters();
1956 } 1985 }
1957 1986
1958 1987
1959 static Condition ComputeCompareCondition(Token::Value op) { 1988 static Condition ComputeCompareCondition(Token::Value op) {
1960 switch (op) { 1989 switch (op) {
1961 case Token::EQ_STRICT: 1990 case Token::EQ_STRICT:
1962 case Token::EQ: 1991 case Token::EQ:
1963 return equal; 1992 return equal;
1964 case Token::LT: 1993 case Token::LT:
1965 return less; 1994 return less;
1966 case Token::GT: 1995 case Token::GT:
1967 return greater; 1996 return greater;
1968 case Token::LTE: 1997 case Token::LTE:
1969 return less_equal; 1998 return less_equal;
1970 case Token::GTE: 1999 case Token::GTE:
1971 return greater_equal; 2000 return greater_equal;
1972 default: 2001 default:
1973 UNREACHABLE(); 2002 UNREACHABLE();
1974 return no_condition; 2003 return no_condition;
1975 } 2004 }
1976 } 2005 }
1977 2006
1978 2007
1979 void LCodeGen::DoCmpT(LCmpT* instr) { 2008 void LCodeGen::DoCmpT(LCmpT* instr) {
1980 Token::Value op = instr->op(); 2009 Token::Value op = instr->op();
1981 2010
1982 Handle<Code> ic = CompareIC::GetUninitialized(op); 2011 Handle<Code> ic = CompareIC::GetUninitialized(op);
1983 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); 2012 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
1984 2013
1985 Condition condition = ComputeCompareCondition(op); 2014 Condition condition = ComputeCompareCondition(op);
1986 if (op == Token::GT || op == Token::LTE) { 2015 if (op == Token::GT || op == Token::LTE) {
1987 condition = ReverseCondition(condition); 2016 condition = ReverseCondition(condition);
1988 } 2017 }
1989 NearLabel true_value, done; 2018 NearLabel true_value, done;
1990 __ test(eax, Operand(eax)); 2019 __ test(eax, Operand(eax));
1991 __ j(condition, &true_value); 2020 __ j(condition, &true_value);
1992 __ mov(ToRegister(instr->result()), factory()->false_value()); 2021 __ mov(ToRegister(instr->result()), factory()->false_value());
1993 __ jmp(&done); 2022 __ jmp(&done);
1994 __ bind(&true_value); 2023 __ bind(&true_value);
1995 __ mov(ToRegister(instr->result()), factory()->true_value()); 2024 __ mov(ToRegister(instr->result()), factory()->true_value());
1996 __ bind(&done); 2025 __ bind(&done);
1997 } 2026 }
1998 2027
1999 2028
2000 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) { 2029 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
2001 Token::Value op = instr->op(); 2030 Token::Value op = instr->op();
2002 int true_block = chunk_->LookupDestination(instr->true_block_id()); 2031 int true_block = chunk_->LookupDestination(instr->true_block_id());
2003 int false_block = chunk_->LookupDestination(instr->false_block_id()); 2032 int false_block = chunk_->LookupDestination(instr->false_block_id());
2004 2033
2005 Handle<Code> ic = CompareIC::GetUninitialized(op); 2034 Handle<Code> ic = CompareIC::GetUninitialized(op);
2006 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); 2035 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
2007 2036
2008 // The compare stub expects compare condition and the input operands 2037 // The compare stub expects compare condition and the input operands
2009 // reversed for GT and LTE. 2038 // reversed for GT and LTE.
2010 Condition condition = ComputeCompareCondition(op); 2039 Condition condition = ComputeCompareCondition(op);
2011 if (op == Token::GT || op == Token::LTE) { 2040 if (op == Token::GT || op == Token::LTE) {
2012 condition = ReverseCondition(condition); 2041 condition = ReverseCondition(condition);
2013 } 2042 }
2014 __ test(eax, Operand(eax)); 2043 __ test(eax, Operand(eax));
2015 EmitBranch(true_block, false_block, condition); 2044 EmitBranch(true_block, false_block, condition);
2016 } 2045 }
(...skipping 27 matching lines...) Expand all
2044 2073
2045 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { 2074 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2046 ASSERT(ToRegister(instr->context()).is(esi)); 2075 ASSERT(ToRegister(instr->context()).is(esi));
2047 ASSERT(ToRegister(instr->global_object()).is(eax)); 2076 ASSERT(ToRegister(instr->global_object()).is(eax));
2048 ASSERT(ToRegister(instr->result()).is(eax)); 2077 ASSERT(ToRegister(instr->result()).is(eax));
2049 2078
2050 __ mov(ecx, instr->name()); 2079 __ mov(ecx, instr->name());
2051 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET : 2080 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET :
2052 RelocInfo::CODE_TARGET_CONTEXT; 2081 RelocInfo::CODE_TARGET_CONTEXT;
2053 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 2082 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2054 CallCode(ic, mode, instr); 2083 CallCode(ic, mode, instr, CONTEXT_ADJUSTED);
2055 } 2084 }
2056 2085
2057 2086
2058 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { 2087 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
2059 Register value = ToRegister(instr->InputAt(0)); 2088 Register value = ToRegister(instr->InputAt(0));
2060 Operand cell_operand = Operand::Cell(instr->hydrogen()->cell()); 2089 Operand cell_operand = Operand::Cell(instr->hydrogen()->cell());
2061 2090
2062 // If the cell we are storing to contains the hole it could have 2091 // If the cell we are storing to contains the hole it could have
2063 // been deleted from the property dictionary. In that case, we need 2092 // been deleted from the property dictionary. In that case, we need
2064 // to update the property details in the property dictionary to mark 2093 // to update the property details in the property dictionary to mark
2065 // it as no longer deleted. We deoptimize in that case. 2094 // it as no longer deleted. We deoptimize in that case.
2066 if (instr->hydrogen()->check_hole_value()) { 2095 if (instr->hydrogen()->check_hole_value()) {
2067 __ cmp(cell_operand, factory()->the_hole_value()); 2096 __ cmp(cell_operand, factory()->the_hole_value());
2068 DeoptimizeIf(equal, instr->environment()); 2097 DeoptimizeIf(equal, instr->environment());
2069 } 2098 }
2070 2099
2071 // Store the value. 2100 // Store the value.
2072 __ mov(cell_operand, value); 2101 __ mov(cell_operand, value);
2073 } 2102 }
2074 2103
2075 2104
2076 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { 2105 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2077 ASSERT(ToRegister(instr->context()).is(esi)); 2106 ASSERT(ToRegister(instr->context()).is(esi));
2078 ASSERT(ToRegister(instr->global_object()).is(edx)); 2107 ASSERT(ToRegister(instr->global_object()).is(edx));
2079 ASSERT(ToRegister(instr->value()).is(eax)); 2108 ASSERT(ToRegister(instr->value()).is(eax));
2080 2109
2081 __ mov(ecx, instr->name()); 2110 __ mov(ecx, instr->name());
2082 Handle<Code> ic = isolate()->builtins()->StoreIC_Initialize(); 2111 Handle<Code> ic = isolate()->builtins()->StoreIC_Initialize();
2083 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); 2112 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr, CONTEXT_ADJUSTED);
2084 } 2113 }
2085 2114
2086 2115
2087 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { 2116 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2088 Register context = ToRegister(instr->context()); 2117 Register context = ToRegister(instr->context());
2089 Register result = ToRegister(instr->result()); 2118 Register result = ToRegister(instr->result());
2090 __ mov(result, ContextOperand(context, instr->slot_index())); 2119 __ mov(result, ContextOperand(context, instr->slot_index()));
2091 } 2120 }
2092 2121
2093 2122
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
2139 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { 2168 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2140 Register object = ToRegister(instr->object()); 2169 Register object = ToRegister(instr->object());
2141 Register result = ToRegister(instr->result()); 2170 Register result = ToRegister(instr->result());
2142 2171
2143 int map_count = instr->hydrogen()->types()->length(); 2172 int map_count = instr->hydrogen()->types()->length();
2144 Handle<String> name = instr->hydrogen()->name(); 2173 Handle<String> name = instr->hydrogen()->name();
2145 if (map_count == 0) { 2174 if (map_count == 0) {
2146 ASSERT(instr->hydrogen()->need_generic()); 2175 ASSERT(instr->hydrogen()->need_generic());
2147 __ mov(ecx, name); 2176 __ mov(ecx, name);
2148 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 2177 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2149 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); 2178 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
2150 } else { 2179 } else {
2151 NearLabel done; 2180 NearLabel done;
2152 for (int i = 0; i < map_count - 1; ++i) { 2181 for (int i = 0; i < map_count - 1; ++i) {
2153 Handle<Map> map = instr->hydrogen()->types()->at(i); 2182 Handle<Map> map = instr->hydrogen()->types()->at(i);
2154 NearLabel next; 2183 NearLabel next;
2155 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map); 2184 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map);
2156 __ j(not_equal, &next); 2185 __ j(not_equal, &next);
2157 EmitLoadField(result, object, map, name); 2186 EmitLoadField(result, object, map, name);
2158 __ jmp(&done); 2187 __ jmp(&done);
2159 __ bind(&next); 2188 __ bind(&next);
2160 } 2189 }
2161 Handle<Map> map = instr->hydrogen()->types()->last(); 2190 Handle<Map> map = instr->hydrogen()->types()->last();
2162 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map); 2191 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map);
2163 if (instr->hydrogen()->need_generic()) { 2192 if (instr->hydrogen()->need_generic()) {
2164 NearLabel generic; 2193 NearLabel generic;
2165 __ j(not_equal, &generic); 2194 __ j(not_equal, &generic);
2166 EmitLoadField(result, object, map, name); 2195 EmitLoadField(result, object, map, name);
2167 __ jmp(&done); 2196 __ jmp(&done);
2168 __ bind(&generic); 2197 __ bind(&generic);
2169 __ mov(ecx, name); 2198 __ mov(ecx, name);
2170 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 2199 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2171 CallCode(ic, RelocInfo::CODE_TARGET, instr, false); 2200 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
2172 } else { 2201 } else {
2173 DeoptimizeIf(not_equal, instr->environment()); 2202 DeoptimizeIf(not_equal, instr->environment());
2174 EmitLoadField(result, object, map, name); 2203 EmitLoadField(result, object, map, name);
2175 } 2204 }
2176 __ bind(&done); 2205 __ bind(&done);
2177 } 2206 }
2178 } 2207 }
2179 2208
2180 2209
2181 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { 2210 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2182 ASSERT(ToRegister(instr->context()).is(esi)); 2211 ASSERT(ToRegister(instr->context()).is(esi));
2183 ASSERT(ToRegister(instr->object()).is(eax)); 2212 ASSERT(ToRegister(instr->object()).is(eax));
2184 ASSERT(ToRegister(instr->result()).is(eax)); 2213 ASSERT(ToRegister(instr->result()).is(eax));
2185 2214
2186 __ mov(ecx, instr->name()); 2215 __ mov(ecx, instr->name());
2187 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 2216 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2188 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2217 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2189 } 2218 }
2190 2219
2191 2220
2192 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { 2221 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2193 Register function = ToRegister(instr->function()); 2222 Register function = ToRegister(instr->function());
2194 Register temp = ToRegister(instr->TempAt(0)); 2223 Register temp = ToRegister(instr->TempAt(0));
2195 Register result = ToRegister(instr->result()); 2224 Register result = ToRegister(instr->result());
2196 2225
2197 // Check that the function really is a function. 2226 // Check that the function really is a function.
2198 __ CmpObjectType(function, JS_FUNCTION_TYPE, result); 2227 __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
(...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after
2341 } 2370 }
2342 } 2371 }
2343 2372
2344 2373
2345 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { 2374 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2346 ASSERT(ToRegister(instr->context()).is(esi)); 2375 ASSERT(ToRegister(instr->context()).is(esi));
2347 ASSERT(ToRegister(instr->object()).is(edx)); 2376 ASSERT(ToRegister(instr->object()).is(edx));
2348 ASSERT(ToRegister(instr->key()).is(eax)); 2377 ASSERT(ToRegister(instr->key()).is(eax));
2349 2378
2350 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 2379 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2351 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2380 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2352 } 2381 }
2353 2382
2354 2383
2355 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { 2384 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2356 Register result = ToRegister(instr->result()); 2385 Register result = ToRegister(instr->result());
2357 2386
2358 // Check for arguments adapter frame. 2387 // Check for arguments adapter frame.
2359 NearLabel done, adapted; 2388 NearLabel done, adapted;
2360 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); 2389 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2361 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset)); 2390 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset));
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after
2529 RecordPosition(pointers->position()); 2558 RecordPosition(pointers->position());
2530 2559
2531 // Invoke function. 2560 // Invoke function.
2532 if (*function == *info()->closure()) { 2561 if (*function == *info()->closure()) {
2533 __ CallSelf(); 2562 __ CallSelf();
2534 } else { 2563 } else {
2535 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); 2564 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
2536 } 2565 }
2537 2566
2538 // Setup deoptimization. 2567 // Setup deoptimization.
2539 RegisterLazyDeoptimization(instr); 2568 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
2540 } 2569 }
2541 2570
2542 2571
2543 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { 2572 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2544 ASSERT(ToRegister(instr->result()).is(eax)); 2573 ASSERT(ToRegister(instr->result()).is(eax));
2545 __ mov(edi, instr->function()); 2574 __ mov(edi, instr->function());
2546 CallKnownFunction(instr->function(), instr->arity(), instr); 2575 CallKnownFunction(instr->function(), instr->arity(), instr);
2547 } 2576 }
2548 2577
2549 2578
2550 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { 2579 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2551 Register input_reg = ToRegister(instr->InputAt(0)); 2580 Register input_reg = ToRegister(instr->InputAt(0));
2552 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), 2581 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
2553 factory()->heap_number_map()); 2582 factory()->heap_number_map());
2554 DeoptimizeIf(not_equal, instr->environment()); 2583 DeoptimizeIf(not_equal, instr->environment());
2555 2584
2556 Label done; 2585 Label done;
2557 Register tmp = input_reg.is(eax) ? ecx : eax; 2586 Register tmp = input_reg.is(eax) ? ecx : eax;
2558 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx; 2587 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx;
2559 2588
2560 // Preserve the value of all registers. 2589 // Preserve the value of all registers.
2561 __ PushSafepointRegisters(); 2590 PushSafepointRegistersScope scope(this);
2562 2591
2563 Label negative; 2592 Label negative;
2564 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); 2593 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2565 // Check the sign of the argument. If the argument is positive, just 2594 // Check the sign of the argument. If the argument is positive, just
2566 // return it. We do not need to patch the stack since |input| and 2595 // return it. We do not need to patch the stack since |input| and
2567 // |result| are the same register and |input| will be restored 2596 // |result| are the same register and |input| will be restored
2568 // unchanged by popping safepoint registers. 2597 // unchanged by popping safepoint registers.
2569 __ test(tmp, Immediate(HeapNumber::kSignMask)); 2598 __ test(tmp, Immediate(HeapNumber::kSignMask));
2570 __ j(not_zero, &negative); 2599 __ j(not_zero, &negative);
2571 __ jmp(&done); 2600 __ jmp(&done);
2572 2601
2573 __ bind(&negative); 2602 __ bind(&negative);
2574 2603
2575 Label allocated, slow; 2604 Label allocated, slow;
2576 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow); 2605 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow);
2577 __ jmp(&allocated); 2606 __ jmp(&allocated);
2578 2607
2579 // Slow case: Call the runtime system to do the number allocation. 2608 // Slow case: Call the runtime system to do the number allocation.
2580 __ bind(&slow); 2609 __ bind(&slow);
2581 2610
2582 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 2611 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
2583 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); 2612
2584 RecordSafepointWithRegisters(
2585 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2586 // Set the pointer to the new heap number in tmp. 2613 // Set the pointer to the new heap number in tmp.
2587 if (!tmp.is(eax)) __ mov(tmp, eax); 2614 if (!tmp.is(eax)) __ mov(tmp, eax);
2588 2615
2589 // Restore input_reg after call to runtime. 2616 // Restore input_reg after call to runtime.
2590 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); 2617 __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
2591 2618
2592 __ bind(&allocated); 2619 __ bind(&allocated);
2593 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset)); 2620 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2594 __ and_(tmp2, ~HeapNumber::kSignMask); 2621 __ and_(tmp2, ~HeapNumber::kSignMask);
2595 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2); 2622 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2);
2596 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); 2623 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset));
2597 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2); 2624 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2);
2598 __ StoreToSafepointRegisterSlot(input_reg, tmp); 2625 __ StoreToSafepointRegisterSlot(input_reg, tmp);
2599 2626
2600 __ bind(&done); 2627 __ bind(&done);
2601 __ PopSafepointRegisters();
2602 } 2628 }
2603 2629
2604 2630
2605 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) { 2631 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2606 Register input_reg = ToRegister(instr->InputAt(0)); 2632 Register input_reg = ToRegister(instr->InputAt(0));
2607 __ test(input_reg, Operand(input_reg)); 2633 __ test(input_reg, Operand(input_reg));
2608 Label is_positive; 2634 Label is_positive;
2609 __ j(not_sign, &is_positive); 2635 __ j(not_sign, &is_positive);
2610 __ neg(input_reg); 2636 __ neg(input_reg);
2611 __ test(input_reg, Operand(input_reg)); 2637 __ test(input_reg, Operand(input_reg));
(...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after
2813 __ movdbl(input_reg, Operand(esp, 0)); 2839 __ movdbl(input_reg, Operand(esp, 0));
2814 __ add(Operand(esp), Immediate(kDoubleSize)); 2840 __ add(Operand(esp), Immediate(kDoubleSize));
2815 __ bind(&done); 2841 __ bind(&done);
2816 } 2842 }
2817 2843
2818 2844
2819 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { 2845 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
2820 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); 2846 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2821 TranscendentalCacheStub stub(TranscendentalCache::COS, 2847 TranscendentalCacheStub stub(TranscendentalCache::COS,
2822 TranscendentalCacheStub::UNTAGGED); 2848 TranscendentalCacheStub::UNTAGGED);
2823 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 2849 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
2824 } 2850 }
2825 2851
2826 2852
2827 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { 2853 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
2828 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); 2854 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2829 TranscendentalCacheStub stub(TranscendentalCache::SIN, 2855 TranscendentalCacheStub stub(TranscendentalCache::SIN,
2830 TranscendentalCacheStub::UNTAGGED); 2856 TranscendentalCacheStub::UNTAGGED);
2831 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 2857 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
2832 } 2858 }
2833 2859
2834 2860
2835 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { 2861 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2836 switch (instr->op()) { 2862 switch (instr->op()) {
2837 case kMathAbs: 2863 case kMathAbs:
2838 DoMathAbs(instr); 2864 DoMathAbs(instr);
2839 break; 2865 break;
2840 case kMathFloor: 2866 case kMathFloor:
2841 DoMathFloor(instr); 2867 DoMathFloor(instr);
(...skipping 24 matching lines...) Expand all
2866 2892
2867 2893
2868 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { 2894 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
2869 ASSERT(ToRegister(instr->context()).is(esi)); 2895 ASSERT(ToRegister(instr->context()).is(esi));
2870 ASSERT(ToRegister(instr->key()).is(ecx)); 2896 ASSERT(ToRegister(instr->key()).is(ecx));
2871 ASSERT(ToRegister(instr->result()).is(eax)); 2897 ASSERT(ToRegister(instr->result()).is(eax));
2872 2898
2873 int arity = instr->arity(); 2899 int arity = instr->arity();
2874 Handle<Code> ic = isolate()->stub_cache()-> 2900 Handle<Code> ic = isolate()->stub_cache()->
2875 ComputeKeyedCallInitialize(arity, NOT_IN_LOOP); 2901 ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
2876 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2902 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2877 } 2903 }
2878 2904
2879 2905
2880 void LCodeGen::DoCallNamed(LCallNamed* instr) { 2906 void LCodeGen::DoCallNamed(LCallNamed* instr) {
2881 ASSERT(ToRegister(instr->context()).is(esi)); 2907 ASSERT(ToRegister(instr->context()).is(esi));
2882 ASSERT(ToRegister(instr->result()).is(eax)); 2908 ASSERT(ToRegister(instr->result()).is(eax));
2883 2909
2884 int arity = instr->arity(); 2910 int arity = instr->arity();
2885 Handle<Code> ic = isolate()->stub_cache()-> 2911 Handle<Code> ic = isolate()->stub_cache()->
2886 ComputeCallInitialize(arity, NOT_IN_LOOP); 2912 ComputeCallInitialize(arity, NOT_IN_LOOP);
2887 __ mov(ecx, instr->name()); 2913 __ mov(ecx, instr->name());
2888 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2914 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2889 } 2915 }
2890 2916
2891 2917
2892 void LCodeGen::DoCallFunction(LCallFunction* instr) { 2918 void LCodeGen::DoCallFunction(LCallFunction* instr) {
2893 ASSERT(ToRegister(instr->context()).is(esi)); 2919 ASSERT(ToRegister(instr->context()).is(esi));
2894 ASSERT(ToRegister(instr->result()).is(eax)); 2920 ASSERT(ToRegister(instr->result()).is(eax));
2895 2921
2896 int arity = instr->arity(); 2922 int arity = instr->arity();
2897 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); 2923 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
2898 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 2924 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2899 __ Drop(1); 2925 __ Drop(1);
2900 } 2926 }
2901 2927
2902 2928
2903 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { 2929 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
2904 ASSERT(ToRegister(instr->context()).is(esi)); 2930 ASSERT(ToRegister(instr->context()).is(esi));
2905 ASSERT(ToRegister(instr->result()).is(eax)); 2931 ASSERT(ToRegister(instr->result()).is(eax));
2906 2932
2907 int arity = instr->arity(); 2933 int arity = instr->arity();
2908 Handle<Code> ic = isolate()->stub_cache()-> 2934 Handle<Code> ic = isolate()->stub_cache()->
2909 ComputeCallInitialize(arity, NOT_IN_LOOP); 2935 ComputeCallInitialize(arity, NOT_IN_LOOP);
2910 __ mov(ecx, instr->name()); 2936 __ mov(ecx, instr->name());
2911 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); 2937 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr, CONTEXT_ADJUSTED);
2912 } 2938 }
2913 2939
2914 2940
2915 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { 2941 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
2916 ASSERT(ToRegister(instr->result()).is(eax)); 2942 ASSERT(ToRegister(instr->result()).is(eax));
2917 __ mov(edi, instr->target()); 2943 __ mov(edi, instr->target());
2918 CallKnownFunction(instr->target(), instr->arity(), instr); 2944 CallKnownFunction(instr->target(), instr->arity(), instr);
2919 } 2945 }
2920 2946
2921 2947
2922 void LCodeGen::DoCallNew(LCallNew* instr) { 2948 void LCodeGen::DoCallNew(LCallNew* instr) {
2923 ASSERT(ToRegister(instr->context()).is(esi)); 2949 ASSERT(ToRegister(instr->context()).is(esi));
2924 ASSERT(ToRegister(instr->constructor()).is(edi)); 2950 ASSERT(ToRegister(instr->constructor()).is(edi));
2925 ASSERT(ToRegister(instr->result()).is(eax)); 2951 ASSERT(ToRegister(instr->result()).is(eax));
2926 2952
2927 Handle<Code> builtin = isolate()->builtins()->JSConstructCall(); 2953 Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
2928 __ Set(eax, Immediate(instr->arity())); 2954 __ Set(eax, Immediate(instr->arity()));
2929 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); 2955 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr, CONTEXT_ADJUSTED);
2930 } 2956 }
2931 2957
2932 2958
2933 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { 2959 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
2934 CallRuntime(instr->function(), instr->arity(), instr, false); 2960 CallRuntime(instr->function(), instr->arity(), instr, RESTORE_CONTEXT);
2935 } 2961 }
2936 2962
2937 2963
2938 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { 2964 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
2939 Register object = ToRegister(instr->object()); 2965 Register object = ToRegister(instr->object());
2940 Register value = ToRegister(instr->value()); 2966 Register value = ToRegister(instr->value());
2941 int offset = instr->offset(); 2967 int offset = instr->offset();
2942 2968
2943 if (!instr->transition().is_null()) { 2969 if (!instr->transition().is_null()) {
2944 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition()); 2970 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
(...skipping 22 matching lines...) Expand all
2967 2993
2968 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { 2994 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
2969 ASSERT(ToRegister(instr->context()).is(esi)); 2995 ASSERT(ToRegister(instr->context()).is(esi));
2970 ASSERT(ToRegister(instr->object()).is(edx)); 2996 ASSERT(ToRegister(instr->object()).is(edx));
2971 ASSERT(ToRegister(instr->value()).is(eax)); 2997 ASSERT(ToRegister(instr->value()).is(eax));
2972 2998
2973 __ mov(ecx, instr->name()); 2999 __ mov(ecx, instr->name());
2974 Handle<Code> ic = info_->is_strict() 3000 Handle<Code> ic = info_->is_strict()
2975 ? isolate()->builtins()->StoreIC_Initialize_Strict() 3001 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2976 : isolate()->builtins()->StoreIC_Initialize(); 3002 : isolate()->builtins()->StoreIC_Initialize();
2977 CallCode(ic, RelocInfo::CODE_TARGET, instr); 3003 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
2978 } 3004 }
2979 3005
2980 3006
2981 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { 3007 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
2982 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); 3008 __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
2983 DeoptimizeIf(above_equal, instr->environment()); 3009 DeoptimizeIf(above_equal, instr->environment());
2984 } 3010 }
2985 3011
2986 3012
2987 void LCodeGen::DoStoreKeyedSpecializedArrayElement( 3013 void LCodeGen::DoStoreKeyedSpecializedArrayElement(
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
3067 3093
3068 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { 3094 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3069 ASSERT(ToRegister(instr->context()).is(esi)); 3095 ASSERT(ToRegister(instr->context()).is(esi));
3070 ASSERT(ToRegister(instr->object()).is(edx)); 3096 ASSERT(ToRegister(instr->object()).is(edx));
3071 ASSERT(ToRegister(instr->key()).is(ecx)); 3097 ASSERT(ToRegister(instr->key()).is(ecx));
3072 ASSERT(ToRegister(instr->value()).is(eax)); 3098 ASSERT(ToRegister(instr->value()).is(eax));
3073 3099
3074 Handle<Code> ic = info_->is_strict() 3100 Handle<Code> ic = info_->is_strict()
3075 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() 3101 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3076 : isolate()->builtins()->KeyedStoreIC_Initialize(); 3102 : isolate()->builtins()->KeyedStoreIC_Initialize();
3077 CallCode(ic, RelocInfo::CODE_TARGET, instr); 3103 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
3078 } 3104 }
3079 3105
3080 3106
3081 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { 3107 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3082 class DeferredStringCharCodeAt: public LDeferredCode { 3108 class DeferredStringCharCodeAt: public LDeferredCode {
3083 public: 3109 public:
3084 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) 3110 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3085 : LDeferredCode(codegen), instr_(instr) { } 3111 : LDeferredCode(codegen), instr_(instr) { }
3086 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); } 3112 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3087 private: 3113 private:
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
3185 3211
3186 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) { 3212 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3187 Register string = ToRegister(instr->string()); 3213 Register string = ToRegister(instr->string());
3188 Register result = ToRegister(instr->result()); 3214 Register result = ToRegister(instr->result());
3189 3215
3190 // TODO(3095996): Get rid of this. For now, we need to make the 3216 // TODO(3095996): Get rid of this. For now, we need to make the
3191 // result register contain a valid pointer because it is already 3217 // result register contain a valid pointer because it is already
3192 // contained in the register pointer map. 3218 // contained in the register pointer map.
3193 __ Set(result, Immediate(0)); 3219 __ Set(result, Immediate(0));
3194 3220
3195 __ PushSafepointRegisters(); 3221 PushSafepointRegistersScope scope(this);
3196 __ push(string); 3222 __ push(string);
3197 // Push the index as a smi. This is safe because of the checks in 3223 // Push the index as a smi. This is safe because of the checks in
3198 // DoStringCharCodeAt above. 3224 // DoStringCharCodeAt above.
3199 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); 3225 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3200 if (instr->index()->IsConstantOperand()) { 3226 if (instr->index()->IsConstantOperand()) {
3201 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); 3227 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3202 __ push(Immediate(Smi::FromInt(const_index))); 3228 __ push(Immediate(Smi::FromInt(const_index)));
3203 } else { 3229 } else {
3204 Register index = ToRegister(instr->index()); 3230 Register index = ToRegister(instr->index());
3205 __ SmiTag(index); 3231 __ SmiTag(index);
3206 __ push(index); 3232 __ push(index);
3207 } 3233 }
3208 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 3234 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
3209 __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt);
3210 RecordSafepointWithRegisters(
3211 instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex);
3212 if (FLAG_debug_code) { 3235 if (FLAG_debug_code) {
3213 __ AbortIfNotSmi(eax); 3236 __ AbortIfNotSmi(eax);
3214 } 3237 }
3215 __ SmiUntag(eax); 3238 __ SmiUntag(eax);
3216 __ StoreToSafepointRegisterSlot(result, eax); 3239 __ StoreToSafepointRegisterSlot(result, eax);
3217 __ PopSafepointRegisters();
3218 } 3240 }
3219 3241
3220 3242
3221 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { 3243 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3222 class DeferredStringCharFromCode: public LDeferredCode { 3244 class DeferredStringCharFromCode: public LDeferredCode {
3223 public: 3245 public:
3224 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr) 3246 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3225 : LDeferredCode(codegen), instr_(instr) { } 3247 : LDeferredCode(codegen), instr_(instr) { }
3226 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); } 3248 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3227 private: 3249 private:
(...skipping 22 matching lines...) Expand all
3250 3272
3251 void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) { 3273 void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3252 Register char_code = ToRegister(instr->char_code()); 3274 Register char_code = ToRegister(instr->char_code());
3253 Register result = ToRegister(instr->result()); 3275 Register result = ToRegister(instr->result());
3254 3276
3255 // TODO(3095996): Get rid of this. For now, we need to make the 3277 // TODO(3095996): Get rid of this. For now, we need to make the
3256 // result register contain a valid pointer because it is already 3278 // result register contain a valid pointer because it is already
3257 // contained in the register pointer map. 3279 // contained in the register pointer map.
3258 __ Set(result, Immediate(0)); 3280 __ Set(result, Immediate(0));
3259 3281
3260 __ PushSafepointRegisters(); 3282 PushSafepointRegistersScope scope(this);
3261 __ SmiTag(char_code); 3283 __ SmiTag(char_code);
3262 __ push(char_code); 3284 __ push(char_code);
3263 __ CallRuntimeSaveDoubles(Runtime::kCharFromCode); 3285 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
3264 RecordSafepointWithRegisters(
3265 instr->pointer_map(), 1, Safepoint::kNoDeoptimizationIndex);
3266 __ StoreToSafepointRegisterSlot(result, eax); 3286 __ StoreToSafepointRegisterSlot(result, eax);
3267 __ PopSafepointRegisters();
3268 } 3287 }
3269 3288
3270 3289
3271 void LCodeGen::DoStringLength(LStringLength* instr) { 3290 void LCodeGen::DoStringLength(LStringLength* instr) {
3272 Register string = ToRegister(instr->string()); 3291 Register string = ToRegister(instr->string());
3273 Register result = ToRegister(instr->result()); 3292 Register result = ToRegister(instr->result());
3274 __ mov(result, FieldOperand(string, String::kLengthOffset)); 3293 __ mov(result, FieldOperand(string, String::kLengthOffset));
3275 } 3294 }
3276 3295
3277 3296
(...skipping 26 matching lines...) Expand all
3304 __ bind(deferred->exit()); 3323 __ bind(deferred->exit());
3305 } 3324 }
3306 3325
3307 3326
3308 void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) { 3327 void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3309 Label slow; 3328 Label slow;
3310 Register reg = ToRegister(instr->InputAt(0)); 3329 Register reg = ToRegister(instr->InputAt(0));
3311 Register tmp = reg.is(eax) ? ecx : eax; 3330 Register tmp = reg.is(eax) ? ecx : eax;
3312 3331
3313 // Preserve the value of all registers. 3332 // Preserve the value of all registers.
3314 __ PushSafepointRegisters(); 3333 PushSafepointRegistersScope scope(this);
3315 3334
3316 // There was overflow, so bits 30 and 31 of the original integer 3335 // There was overflow, so bits 30 and 31 of the original integer
3317 // disagree. Try to allocate a heap number in new space and store 3336 // disagree. Try to allocate a heap number in new space and store
3318 // the value in there. If that fails, call the runtime system. 3337 // the value in there. If that fails, call the runtime system.
3319 NearLabel done; 3338 NearLabel done;
3320 __ SmiUntag(reg); 3339 __ SmiUntag(reg);
3321 __ xor_(reg, 0x80000000); 3340 __ xor_(reg, 0x80000000);
3322 __ cvtsi2sd(xmm0, Operand(reg)); 3341 __ cvtsi2sd(xmm0, Operand(reg));
3323 if (FLAG_inline_new) { 3342 if (FLAG_inline_new) {
3324 __ AllocateHeapNumber(reg, tmp, no_reg, &slow); 3343 __ AllocateHeapNumber(reg, tmp, no_reg, &slow);
3325 __ jmp(&done); 3344 __ jmp(&done);
3326 } 3345 }
3327 3346
3328 // Slow case: Call the runtime system to do the number allocation. 3347 // Slow case: Call the runtime system to do the number allocation.
3329 __ bind(&slow); 3348 __ bind(&slow);
3330 3349
3331 // TODO(3095996): Put a valid pointer value in the stack slot where the result 3350 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3332 // register is stored, as this register is in the pointer map, but contains an 3351 // register is stored, as this register is in the pointer map, but contains an
3333 // integer value. 3352 // integer value.
3334 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); 3353 __ StoreToSafepointRegisterSlot(reg, Immediate(0));
3335 3354
3336 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 3355 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
3337 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3338 RecordSafepointWithRegisters(
3339 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
3340 if (!reg.is(eax)) __ mov(reg, eax); 3356 if (!reg.is(eax)) __ mov(reg, eax);
3341 3357
3342 // Done. Put the value in xmm0 into the value of the allocated heap 3358 // Done. Put the value in xmm0 into the value of the allocated heap
3343 // number. 3359 // number.
3344 __ bind(&done); 3360 __ bind(&done);
3345 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0); 3361 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0);
3346 __ StoreToSafepointRegisterSlot(reg, reg); 3362 __ StoreToSafepointRegisterSlot(reg, reg);
3347 __ PopSafepointRegisters();
3348 } 3363 }
3349 3364
3350 3365
3351 void LCodeGen::DoNumberTagD(LNumberTagD* instr) { 3366 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3352 class DeferredNumberTagD: public LDeferredCode { 3367 class DeferredNumberTagD: public LDeferredCode {
3353 public: 3368 public:
3354 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr) 3369 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3355 : LDeferredCode(codegen), instr_(instr) { } 3370 : LDeferredCode(codegen), instr_(instr) { }
3356 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); } 3371 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3357 private: 3372 private:
(...skipping 15 matching lines...) Expand all
3373 } 3388 }
3374 3389
3375 3390
3376 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { 3391 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3377 // TODO(3095996): Get rid of this. For now, we need to make the 3392 // TODO(3095996): Get rid of this. For now, we need to make the
3378 // result register contain a valid pointer because it is already 3393 // result register contain a valid pointer because it is already
3379 // contained in the register pointer map. 3394 // contained in the register pointer map.
3380 Register reg = ToRegister(instr->result()); 3395 Register reg = ToRegister(instr->result());
3381 __ Set(reg, Immediate(0)); 3396 __ Set(reg, Immediate(0));
3382 3397
3383 __ PushSafepointRegisters(); 3398 PushSafepointRegistersScope scope(this);
3384 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 3399 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
3385 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3386 RecordSafepointWithRegisters(
3387 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
3388 __ StoreToSafepointRegisterSlot(reg, eax); 3400 __ StoreToSafepointRegisterSlot(reg, eax);
3389 __ PopSafepointRegisters();
3390 } 3401 }
3391 3402
3392 3403
3393 void LCodeGen::DoSmiTag(LSmiTag* instr) { 3404 void LCodeGen::DoSmiTag(LSmiTag* instr) {
3394 LOperand* input = instr->InputAt(0); 3405 LOperand* input = instr->InputAt(0);
3395 ASSERT(input->IsRegister() && input->Equals(instr->result())); 3406 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3396 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); 3407 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3397 __ SmiTag(ToRegister(input)); 3408 __ SmiTag(ToRegister(input));
3398 } 3409 }
3399 3410
(...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after
3794 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); 3805 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3795 __ push(Immediate(instr->hydrogen()->constant_elements())); 3806 __ push(Immediate(instr->hydrogen()->constant_elements()));
3796 3807
3797 // Pick the right runtime function or stub to call. 3808 // Pick the right runtime function or stub to call.
3798 int length = instr->hydrogen()->length(); 3809 int length = instr->hydrogen()->length();
3799 if (instr->hydrogen()->IsCopyOnWrite()) { 3810 if (instr->hydrogen()->IsCopyOnWrite()) {
3800 ASSERT(instr->hydrogen()->depth() == 1); 3811 ASSERT(instr->hydrogen()->depth() == 1);
3801 FastCloneShallowArrayStub::Mode mode = 3812 FastCloneShallowArrayStub::Mode mode =
3802 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; 3813 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
3803 FastCloneShallowArrayStub stub(mode, length); 3814 FastCloneShallowArrayStub stub(mode, length);
3804 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 3815 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
3805 } else if (instr->hydrogen()->depth() > 1) { 3816 } else if (instr->hydrogen()->depth() > 1) {
3806 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, false); 3817 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, RESTORE_CONTEXT);
3807 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 3818 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
3808 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, false); 3819 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, RESTORE_CONTEXT);
3809 } else { 3820 } else {
3810 FastCloneShallowArrayStub::Mode mode = 3821 FastCloneShallowArrayStub::Mode mode =
3811 FastCloneShallowArrayStub::CLONE_ELEMENTS; 3822 FastCloneShallowArrayStub::CLONE_ELEMENTS;
3812 FastCloneShallowArrayStub stub(mode, length); 3823 FastCloneShallowArrayStub stub(mode, length);
3813 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 3824 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
3814 } 3825 }
3815 } 3826 }
3816 3827
3817 3828
3818 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { 3829 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
3819 ASSERT(ToRegister(instr->context()).is(esi)); 3830 ASSERT(ToRegister(instr->context()).is(esi));
3820 // Setup the parameters to the stub/runtime call. 3831 // Setup the parameters to the stub/runtime call.
3821 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 3832 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3822 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); 3833 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3823 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); 3834 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3824 __ push(Immediate(instr->hydrogen()->constant_properties())); 3835 __ push(Immediate(instr->hydrogen()->constant_properties()));
3825 int flags = instr->hydrogen()->fast_elements() 3836 int flags = instr->hydrogen()->fast_elements()
3826 ? ObjectLiteral::kFastElements 3837 ? ObjectLiteral::kFastElements
3827 : ObjectLiteral::kNoFlags; 3838 : ObjectLiteral::kNoFlags;
3828 flags |= instr->hydrogen()->has_function() 3839 flags |= instr->hydrogen()->has_function()
3829 ? ObjectLiteral::kHasFunction 3840 ? ObjectLiteral::kHasFunction
3830 : ObjectLiteral::kNoFlags; 3841 : ObjectLiteral::kNoFlags;
3831 __ push(Immediate(Smi::FromInt(flags))); 3842 __ push(Immediate(Smi::FromInt(flags)));
3832 3843
3833 // Pick the right runtime function to call. 3844 // Pick the right runtime function to call.
3834 if (instr->hydrogen()->depth() > 1) { 3845 if (instr->hydrogen()->depth() > 1) {
3835 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); 3846 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr, CONTEXT_ADJUSTED);
3836 } else { 3847 } else {
3837 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); 3848 CallRuntime(Runtime::kCreateObjectLiteralShallow,
3849 4,
3850 instr,
3851 CONTEXT_ADJUSTED);
3838 } 3852 }
3839 } 3853 }
3840 3854
3841 3855
3842 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { 3856 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
3843 ASSERT(ToRegister(instr->InputAt(0)).is(eax)); 3857 ASSERT(ToRegister(instr->InputAt(0)).is(eax));
3844 __ push(eax); 3858 __ push(eax);
3845 CallRuntime(Runtime::kToFastProperties, 1, instr); 3859 CallRuntime(Runtime::kToFastProperties, 1, instr, CONTEXT_ADJUSTED);
3846 } 3860 }
3847 3861
3848 3862
3849 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { 3863 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
3850 NearLabel materialized; 3864 NearLabel materialized;
3851 // Registers will be used as follows: 3865 // Registers will be used as follows:
3852 // edi = JS function. 3866 // edi = JS function.
3853 // ecx = literals array. 3867 // ecx = literals array.
3854 // ebx = regexp literal. 3868 // ebx = regexp literal.
3855 // eax = regexp literal clone. 3869 // eax = regexp literal clone.
3856 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 3870 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3857 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset)); 3871 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
3858 int literal_offset = FixedArray::kHeaderSize + 3872 int literal_offset = FixedArray::kHeaderSize +
3859 instr->hydrogen()->literal_index() * kPointerSize; 3873 instr->hydrogen()->literal_index() * kPointerSize;
3860 __ mov(ebx, FieldOperand(ecx, literal_offset)); 3874 __ mov(ebx, FieldOperand(ecx, literal_offset));
3861 __ cmp(ebx, factory()->undefined_value()); 3875 __ cmp(ebx, factory()->undefined_value());
3862 __ j(not_equal, &materialized); 3876 __ j(not_equal, &materialized);
3863 3877
3864 // Create regexp literal using runtime function 3878 // Create regexp literal using runtime function
3865 // Result will be in eax. 3879 // Result will be in eax.
3866 __ push(ecx); 3880 __ push(ecx);
3867 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); 3881 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3868 __ push(Immediate(instr->hydrogen()->pattern())); 3882 __ push(Immediate(instr->hydrogen()->pattern()));
3869 __ push(Immediate(instr->hydrogen()->flags())); 3883 __ push(Immediate(instr->hydrogen()->flags()));
3870 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, false); 3884 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, RESTORE_CONTEXT);
3871 __ mov(ebx, eax); 3885 __ mov(ebx, eax);
3872 3886
3873 __ bind(&materialized); 3887 __ bind(&materialized);
3874 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 3888 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3875 Label allocated, runtime_allocate; 3889 Label allocated, runtime_allocate;
3876 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); 3890 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
3877 __ jmp(&allocated); 3891 __ jmp(&allocated);
3878 3892
3879 __ bind(&runtime_allocate); 3893 __ bind(&runtime_allocate);
3880 __ push(ebx); 3894 __ push(ebx);
3881 __ push(Immediate(Smi::FromInt(size))); 3895 __ push(Immediate(Smi::FromInt(size)));
3882 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, false); 3896 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, RESTORE_CONTEXT);
3883 __ pop(ebx); 3897 __ pop(ebx);
3884 3898
3885 __ bind(&allocated); 3899 __ bind(&allocated);
3886 // Copy the content into the newly allocated memory. 3900 // Copy the content into the newly allocated memory.
3887 // (Unroll copy loop once for better throughput). 3901 // (Unroll copy loop once for better throughput).
3888 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { 3902 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
3889 __ mov(edx, FieldOperand(ebx, i)); 3903 __ mov(edx, FieldOperand(ebx, i));
3890 __ mov(ecx, FieldOperand(ebx, i + kPointerSize)); 3904 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
3891 __ mov(FieldOperand(eax, i), edx); 3905 __ mov(FieldOperand(eax, i), edx);
3892 __ mov(FieldOperand(eax, i + kPointerSize), ecx); 3906 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
3893 } 3907 }
3894 if ((size % (2 * kPointerSize)) != 0) { 3908 if ((size % (2 * kPointerSize)) != 0) {
3895 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); 3909 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
3896 __ mov(FieldOperand(eax, size - kPointerSize), edx); 3910 __ mov(FieldOperand(eax, size - kPointerSize), edx);
3897 } 3911 }
3898 } 3912 }
3899 3913
3900 3914
3901 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { 3915 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
3902 // Use the fast case closure allocation code that allocates in new 3916 // Use the fast case closure allocation code that allocates in new
3903 // space for nested functions that don't need literals cloning. 3917 // space for nested functions that don't need literals cloning.
3904 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); 3918 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
3905 bool pretenure = instr->hydrogen()->pretenure(); 3919 bool pretenure = instr->hydrogen()->pretenure();
3906 if (!pretenure && shared_info->num_literals() == 0) { 3920 if (!pretenure && shared_info->num_literals() == 0) {
3907 FastNewClosureStub stub( 3921 FastNewClosureStub stub(
3908 shared_info->strict_mode() ? kStrictMode : kNonStrictMode); 3922 shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
3909 __ push(Immediate(shared_info)); 3923 __ push(Immediate(shared_info));
3910 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 3924 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
3911 } else { 3925 } else {
3912 __ push(Operand(ebp, StandardFrameConstants::kContextOffset)); 3926 __ push(Operand(ebp, StandardFrameConstants::kContextOffset));
3913 __ push(Immediate(shared_info)); 3927 __ push(Immediate(shared_info));
3914 __ push(Immediate(pretenure 3928 __ push(Immediate(pretenure
3915 ? factory()->true_value() 3929 ? factory()->true_value()
3916 : factory()->false_value())); 3930 : factory()->false_value()));
3917 CallRuntime(Runtime::kNewClosure, 3, instr, false); 3931 CallRuntime(Runtime::kNewClosure, 3, instr, RESTORE_CONTEXT);
3918 } 3932 }
3919 } 3933 }
3920 3934
3921 3935
3922 void LCodeGen::DoTypeof(LTypeof* instr) { 3936 void LCodeGen::DoTypeof(LTypeof* instr) {
3923 LOperand* input = instr->InputAt(0); 3937 LOperand* input = instr->InputAt(0);
3924 if (input->IsConstantOperand()) { 3938 if (input->IsConstantOperand()) {
3925 __ push(ToImmediate(input)); 3939 __ push(ToImmediate(input));
3926 } else { 3940 } else {
3927 __ push(ToOperand(input)); 3941 __ push(ToOperand(input));
3928 } 3942 }
3929 CallRuntime(Runtime::kTypeof, 1, instr, false); 3943 CallRuntime(Runtime::kTypeof, 1, instr, RESTORE_CONTEXT);
3930 } 3944 }
3931 3945
3932 3946
3933 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { 3947 void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
3934 Register input = ToRegister(instr->InputAt(0)); 3948 Register input = ToRegister(instr->InputAt(0));
3935 Register result = ToRegister(instr->result()); 3949 Register result = ToRegister(instr->result());
3936 Label true_label; 3950 Label true_label;
3937 Label false_label; 3951 Label false_label;
3938 NearLabel done; 3952 NearLabel done;
3939 3953
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after
4122 4136
4123 void LCodeGen::DoStackCheck(LStackCheck* instr) { 4137 void LCodeGen::DoStackCheck(LStackCheck* instr) {
4124 // Perform stack overflow check. 4138 // Perform stack overflow check.
4125 NearLabel done; 4139 NearLabel done;
4126 ExternalReference stack_limit = 4140 ExternalReference stack_limit =
4127 ExternalReference::address_of_stack_limit(isolate()); 4141 ExternalReference::address_of_stack_limit(isolate());
4128 __ cmp(esp, Operand::StaticVariable(stack_limit)); 4142 __ cmp(esp, Operand::StaticVariable(stack_limit));
4129 __ j(above_equal, &done); 4143 __ j(above_equal, &done);
4130 4144
4131 StackCheckStub stub; 4145 StackCheckStub stub;
4132 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); 4146 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
4133 __ bind(&done); 4147 __ bind(&done);
4134 } 4148 }
4135 4149
4136 4150
4137 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { 4151 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
4138 // This is a pseudo-instruction that ensures that the environment here is 4152 // This is a pseudo-instruction that ensures that the environment here is
4139 // properly registered for deoptimization and records the assembler's PC 4153 // properly registered for deoptimization and records the assembler's PC
4140 // offset. 4154 // offset.
4141 LEnvironment* environment = instr->environment(); 4155 LEnvironment* environment = instr->environment();
4142 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), 4156 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4143 instr->SpilledDoubleRegisterArray()); 4157 instr->SpilledDoubleRegisterArray());
4144 4158
4145 // If the environment were already registered, we would have no way of 4159 // If the environment were already registered, we would have no way of
4146 // backpatching it with the spill slot operands. 4160 // backpatching it with the spill slot operands.
4147 ASSERT(!environment->HasBeenRegistered()); 4161 ASSERT(!environment->HasBeenRegistered());
4148 RegisterEnvironmentForDeoptimization(environment); 4162 RegisterEnvironmentForDeoptimization(environment);
4149 ASSERT(osr_pc_offset_ == -1); 4163 ASSERT(osr_pc_offset_ == -1);
4150 osr_pc_offset_ = masm()->pc_offset(); 4164 osr_pc_offset_ = masm()->pc_offset();
4151 } 4165 }
4152 4166
4153 4167
4154 #undef __ 4168 #undef __
4155 4169
4156 } } // namespace v8::internal 4170 } } // namespace v8::internal
4157 4171
4158 #endif // V8_TARGET_ARCH_IA32 4172 #endif // V8_TARGET_ARCH_IA32
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698