Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/ia32/lithium-codegen-ia32.cc

Issue 7132002: Remove RESTORE_CONTEXT flag from ia32 crankshaft codegen. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: last linting changes Created 9 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/ia32/lithium-ia32.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 410 matching lines...) Expand 10 before | Expand all | Expand 10 after
421 translation->StoreLiteral(src_index); 421 translation->StoreLiteral(src_index);
422 } else { 422 } else {
423 UNREACHABLE(); 423 UNREACHABLE();
424 } 424 }
425 } 425 }
426 426
427 427
428 void LCodeGen::CallCodeGeneric(Handle<Code> code, 428 void LCodeGen::CallCodeGeneric(Handle<Code> code,
429 RelocInfo::Mode mode, 429 RelocInfo::Mode mode,
430 LInstruction* instr, 430 LInstruction* instr,
431 ContextMode context_mode,
432 SafepointMode safepoint_mode) { 431 SafepointMode safepoint_mode) {
433 ASSERT(instr != NULL); 432 ASSERT(instr != NULL);
434 LPointerMap* pointers = instr->pointer_map(); 433 LPointerMap* pointers = instr->pointer_map();
435 RecordPosition(pointers->position()); 434 RecordPosition(pointers->position());
436 435
437 if (context_mode == RESTORE_CONTEXT) {
438 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
439 }
440 __ call(code, mode); 436 __ call(code, mode);
441 437
442 RegisterLazyDeoptimization(instr, safepoint_mode); 438 RegisterLazyDeoptimization(instr, safepoint_mode);
443 439
444 // Signal that we don't inline smi code before these stubs in the 440 // Signal that we don't inline smi code before these stubs in the
445 // optimizing code generator. 441 // optimizing code generator.
446 if (code->kind() == Code::BINARY_OP_IC || 442 if (code->kind() == Code::BINARY_OP_IC ||
447 code->kind() == Code::COMPARE_IC) { 443 code->kind() == Code::COMPARE_IC) {
448 __ nop(); 444 __ nop();
449 } 445 }
450 } 446 }
451 447
452 448
453 void LCodeGen::CallCode(Handle<Code> code, 449 void LCodeGen::CallCode(Handle<Code> code,
454 RelocInfo::Mode mode, 450 RelocInfo::Mode mode,
455 LInstruction* instr, 451 LInstruction* instr) {
456 ContextMode context_mode) { 452 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
457 CallCodeGeneric(code, mode, instr, context_mode, RECORD_SIMPLE_SAFEPOINT);
458 } 453 }
459 454
460 455
461 void LCodeGen::CallRuntime(const Runtime::Function* fun, 456 void LCodeGen::CallRuntime(const Runtime::Function* fun,
462 int argc, 457 int argc,
463 LInstruction* instr, 458 LInstruction* instr) {
464 ContextMode context_mode) {
465 ASSERT(instr != NULL); 459 ASSERT(instr != NULL);
466 ASSERT(instr->HasPointerMap()); 460 ASSERT(instr->HasPointerMap());
467 LPointerMap* pointers = instr->pointer_map(); 461 LPointerMap* pointers = instr->pointer_map();
468 RecordPosition(pointers->position()); 462 RecordPosition(pointers->position());
469 463
470 if (context_mode == RESTORE_CONTEXT) {
471 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
472 }
473 __ CallRuntime(fun, argc); 464 __ CallRuntime(fun, argc);
474 465
475 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); 466 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
476 } 467 }
477 468
478 469
479 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, 470 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
480 int argc, 471 int argc,
481 LInstruction* instr) { 472 LInstruction* instr,
482 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 473 LOperand* context) {
474 ASSERT(context->IsRegister() || context->IsStackSlot());
475 if (context->IsRegister()) {
476 if (!ToRegister(context).is(esi)) {
477 __ mov(esi, ToRegister(context));
478 }
479 } else {
480 // Context is stack slot.
481 __ mov(esi, ToOperand(context));
482 }
483
483 __ CallRuntimeSaveDoubles(id); 484 __ CallRuntimeSaveDoubles(id);
484 RecordSafepointWithRegisters( 485 RecordSafepointWithRegisters(
485 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex); 486 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
486 } 487 }
487 488
488 489
489 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr, 490 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
490 SafepointMode safepoint_mode) { 491 SafepointMode safepoint_mode) {
491 // Create the environment to bailout to. If the call has side effects 492 // Create the environment to bailout to. If the call has side effects
492 // execution has to continue after the call otherwise execution can continue 493 // execution has to continue after the call otherwise execution can continue
(...skipping 248 matching lines...) Expand 10 before | Expand all | Expand 10 after
741 // Nothing to do. 742 // Nothing to do.
742 } 743 }
743 744
744 745
745 void LCodeGen::DoCallStub(LCallStub* instr) { 746 void LCodeGen::DoCallStub(LCallStub* instr) {
746 ASSERT(ToRegister(instr->context()).is(esi)); 747 ASSERT(ToRegister(instr->context()).is(esi));
747 ASSERT(ToRegister(instr->result()).is(eax)); 748 ASSERT(ToRegister(instr->result()).is(eax));
748 switch (instr->hydrogen()->major_key()) { 749 switch (instr->hydrogen()->major_key()) {
749 case CodeStub::RegExpConstructResult: { 750 case CodeStub::RegExpConstructResult: {
750 RegExpConstructResultStub stub; 751 RegExpConstructResultStub stub;
751 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); 752 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
752 break; 753 break;
753 } 754 }
754 case CodeStub::RegExpExec: { 755 case CodeStub::RegExpExec: {
755 RegExpExecStub stub; 756 RegExpExecStub stub;
756 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); 757 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
757 break; 758 break;
758 } 759 }
759 case CodeStub::SubString: { 760 case CodeStub::SubString: {
760 SubStringStub stub; 761 SubStringStub stub;
761 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); 762 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
762 break; 763 break;
763 } 764 }
764 case CodeStub::NumberToString: { 765 case CodeStub::NumberToString: {
765 NumberToStringStub stub; 766 NumberToStringStub stub;
766 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); 767 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
767 break; 768 break;
768 } 769 }
769 case CodeStub::StringAdd: { 770 case CodeStub::StringAdd: {
770 StringAddStub stub(NO_STRING_ADD_FLAGS); 771 StringAddStub stub(NO_STRING_ADD_FLAGS);
771 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); 772 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
772 break; 773 break;
773 } 774 }
774 case CodeStub::StringCompare: { 775 case CodeStub::StringCompare: {
775 StringCompareStub stub; 776 StringCompareStub stub;
776 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); 777 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
777 break; 778 break;
778 } 779 }
779 case CodeStub::TranscendentalCache: { 780 case CodeStub::TranscendentalCache: {
780 TranscendentalCacheStub stub(instr->transcendental_type(), 781 TranscendentalCacheStub stub(instr->transcendental_type(),
781 TranscendentalCacheStub::TAGGED); 782 TranscendentalCacheStub::TAGGED);
782 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); 783 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
783 break; 784 break;
784 } 785 }
785 default: 786 default:
786 UNREACHABLE(); 787 UNREACHABLE();
787 } 788 }
788 } 789 }
789 790
790 791
791 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { 792 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
792 // Nothing to do. 793 // Nothing to do.
(...skipping 456 matching lines...) Expand 10 before | Expand all | Expand 10 after
1249 1250
1250 1251
1251 void LCodeGen::DoBitNotI(LBitNotI* instr) { 1252 void LCodeGen::DoBitNotI(LBitNotI* instr) {
1252 LOperand* input = instr->InputAt(0); 1253 LOperand* input = instr->InputAt(0);
1253 ASSERT(input->Equals(instr->result())); 1254 ASSERT(input->Equals(instr->result()));
1254 __ not_(ToRegister(input)); 1255 __ not_(ToRegister(input));
1255 } 1256 }
1256 1257
1257 1258
1258 void LCodeGen::DoThrow(LThrow* instr) { 1259 void LCodeGen::DoThrow(LThrow* instr) {
1259 __ push(ToOperand(instr->InputAt(0))); 1260 __ push(ToOperand(instr->value()));
1260 CallRuntime(Runtime::kThrow, 1, instr, RESTORE_CONTEXT); 1261 ASSERT(ToRegister(instr->context()).is(esi));
1262 CallRuntime(Runtime::kThrow, 1, instr);
1261 1263
1262 if (FLAG_debug_code) { 1264 if (FLAG_debug_code) {
1263 Comment("Unreachable code."); 1265 Comment("Unreachable code.");
1264 __ int3(); 1266 __ int3();
1265 } 1267 }
1266 } 1268 }
1267 1269
1268 1270
1269 void LCodeGen::DoAddI(LAddI* instr) { 1271 void LCodeGen::DoAddI(LAddI* instr) {
1270 LOperand* left = instr->InputAt(0); 1272 LOperand* left = instr->InputAt(0);
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
1320 break; 1322 break;
1321 } 1323 }
1322 default: 1324 default:
1323 UNREACHABLE(); 1325 UNREACHABLE();
1324 break; 1326 break;
1325 } 1327 }
1326 } 1328 }
1327 1329
1328 1330
1329 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { 1331 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1330 ASSERT(ToRegister(instr->InputAt(0)).is(edx)); 1332 ASSERT(ToRegister(instr->context()).is(esi));
1331 ASSERT(ToRegister(instr->InputAt(1)).is(eax)); 1333 ASSERT(ToRegister(instr->left()).is(edx));
1334 ASSERT(ToRegister(instr->right()).is(eax));
1332 ASSERT(ToRegister(instr->result()).is(eax)); 1335 ASSERT(ToRegister(instr->result()).is(eax));
1333 1336
1334 BinaryOpStub stub(instr->op(), NO_OVERWRITE); 1337 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
1335 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); 1338 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1336 } 1339 }
1337 1340
1338 1341
1339 int LCodeGen::GetNextEmittedBlock(int block) { 1342 int LCodeGen::GetNextEmittedBlock(int block) {
1340 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { 1343 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1341 LLabel* label = chunk_->GetLabel(i); 1344 LLabel* label = chunk_->GetLabel(i);
1342 if (!label->HasReplacement()) return i; 1345 if (!label->HasReplacement()) return i;
1343 } 1346 }
1344 return -1; 1347 return -1;
1345 } 1348 }
(...skipping 414 matching lines...) Expand 10 before | Expand all | Expand 10 after
1760 1763
1761 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); 1764 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
1762 EmitBranch(true_block, false_block, equal); 1765 EmitBranch(true_block, false_block, equal);
1763 } 1766 }
1764 1767
1765 1768
1766 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { 1769 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
1767 // Object and function are in fixed registers defined by the stub. 1770 // Object and function are in fixed registers defined by the stub.
1768 ASSERT(ToRegister(instr->context()).is(esi)); 1771 ASSERT(ToRegister(instr->context()).is(esi));
1769 InstanceofStub stub(InstanceofStub::kArgsInRegisters); 1772 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1770 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); 1773 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1771 1774
1772 Label true_value, done; 1775 Label true_value, done;
1773 __ test(eax, Operand(eax)); 1776 __ test(eax, Operand(eax));
1774 __ j(zero, &true_value, Label::kNear); 1777 __ j(zero, &true_value, Label::kNear);
1775 __ mov(ToRegister(instr->result()), factory()->false_value()); 1778 __ mov(ToRegister(instr->result()), factory()->false_value());
1776 __ jmp(&done, Label::kNear); 1779 __ jmp(&done, Label::kNear);
1777 __ bind(&true_value); 1780 __ bind(&true_value);
1778 __ mov(ToRegister(instr->result()), factory()->true_value()); 1781 __ mov(ToRegister(instr->result()), factory()->true_value());
1779 __ bind(&done); 1782 __ bind(&done);
1780 } 1783 }
(...skipping 13 matching lines...) Expand all
1794 1797
1795 private: 1798 private:
1796 LInstanceOfKnownGlobal* instr_; 1799 LInstanceOfKnownGlobal* instr_;
1797 Label map_check_; 1800 Label map_check_;
1798 }; 1801 };
1799 1802
1800 DeferredInstanceOfKnownGlobal* deferred; 1803 DeferredInstanceOfKnownGlobal* deferred;
1801 deferred = new DeferredInstanceOfKnownGlobal(this, instr); 1804 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
1802 1805
1803 Label done, false_result; 1806 Label done, false_result;
1804 Register object = ToRegister(instr->InputAt(0)); 1807 Register object = ToRegister(instr->InputAt(1));
1805 Register temp = ToRegister(instr->TempAt(0)); 1808 Register temp = ToRegister(instr->TempAt(0));
1806 1809
1807 // A Smi is not an instance of anything. 1810 // A Smi is not an instance of anything.
1808 __ JumpIfSmi(object, &false_result); 1811 __ JumpIfSmi(object, &false_result);
1809 1812
1810 // This is the inlined call site instanceof cache. The two occurences of the 1813 // This is the inlined call site instanceof cache. The two occurences of the
1811 // hole value will be patched to the last map/result pair generated by the 1814 // hole value will be patched to the last map/result pair generated by the
1812 // instanceof stub. 1815 // instanceof stub.
1813 Label cache_miss; 1816 Label cache_miss;
1814 Register map = ToRegister(instr->TempAt(0)); 1817 Register map = ToRegister(instr->TempAt(0));
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
1856 flags | InstanceofStub::kReturnTrueFalseObject); 1859 flags | InstanceofStub::kReturnTrueFalseObject);
1857 InstanceofStub stub(flags); 1860 InstanceofStub stub(flags);
1858 1861
1859 // Get the temp register reserved by the instruction. This needs to be a 1862 // Get the temp register reserved by the instruction. This needs to be a
1860 // register which is pushed last by PushSafepointRegisters as top of the 1863 // register which is pushed last by PushSafepointRegisters as top of the
1861 // stack is used to pass the offset to the location of the map check to 1864 // stack is used to pass the offset to the location of the map check to
1862 // the stub. 1865 // the stub.
1863 Register temp = ToRegister(instr->TempAt(0)); 1866 Register temp = ToRegister(instr->TempAt(0));
1864 ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0); 1867 ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0);
1865 __ mov(InstanceofStub::right(), Immediate(instr->function())); 1868 __ mov(InstanceofStub::right(), Immediate(instr->function()));
1866 static const int kAdditionalDelta = 16; 1869 static const int kAdditionalDelta = 13;
1867 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; 1870 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
1868 __ mov(temp, Immediate(delta)); 1871 __ mov(temp, Immediate(delta));
1869 __ StoreToSafepointRegisterSlot(temp, temp); 1872 __ StoreToSafepointRegisterSlot(temp, temp);
1870 CallCodeGeneric(stub.GetCode(), 1873 CallCodeGeneric(stub.GetCode(),
1871 RelocInfo::CODE_TARGET, 1874 RelocInfo::CODE_TARGET,
1872 instr, 1875 instr,
1873 RESTORE_CONTEXT,
1874 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); 1876 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
1875 // Put the result value into the eax slot and restore all registers. 1877 // Put the result value into the eax slot and restore all registers.
1876 __ StoreToSafepointRegisterSlot(eax, eax); 1878 __ StoreToSafepointRegisterSlot(eax, eax);
1877 } 1879 }
1878 1880
1879 1881
1880 static Condition ComputeCompareCondition(Token::Value op) { 1882 static Condition ComputeCompareCondition(Token::Value op) {
1881 switch (op) { 1883 switch (op) {
1882 case Token::EQ_STRICT: 1884 case Token::EQ_STRICT:
1883 case Token::EQ: 1885 case Token::EQ:
(...skipping 10 matching lines...) Expand all
1894 UNREACHABLE(); 1896 UNREACHABLE();
1895 return no_condition; 1897 return no_condition;
1896 } 1898 }
1897 } 1899 }
1898 1900
1899 1901
1900 void LCodeGen::DoCmpT(LCmpT* instr) { 1902 void LCodeGen::DoCmpT(LCmpT* instr) {
1901 Token::Value op = instr->op(); 1903 Token::Value op = instr->op();
1902 1904
1903 Handle<Code> ic = CompareIC::GetUninitialized(op); 1905 Handle<Code> ic = CompareIC::GetUninitialized(op);
1904 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); 1906 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1905 1907
1906 Condition condition = ComputeCompareCondition(op); 1908 Condition condition = ComputeCompareCondition(op);
1907 if (op == Token::GT || op == Token::LTE) { 1909 if (op == Token::GT || op == Token::LTE) {
1908 condition = ReverseCondition(condition); 1910 condition = ReverseCondition(condition);
1909 } 1911 }
1910 Label true_value, done; 1912 Label true_value, done;
1911 __ test(eax, Operand(eax)); 1913 __ test(eax, Operand(eax));
1912 __ j(condition, &true_value, Label::kNear); 1914 __ j(condition, &true_value, Label::kNear);
1913 __ mov(ToRegister(instr->result()), factory()->false_value()); 1915 __ mov(ToRegister(instr->result()), factory()->false_value());
1914 __ jmp(&done, Label::kNear); 1916 __ jmp(&done, Label::kNear);
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
1946 1948
1947 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { 1949 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
1948 ASSERT(ToRegister(instr->context()).is(esi)); 1950 ASSERT(ToRegister(instr->context()).is(esi));
1949 ASSERT(ToRegister(instr->global_object()).is(eax)); 1951 ASSERT(ToRegister(instr->global_object()).is(eax));
1950 ASSERT(ToRegister(instr->result()).is(eax)); 1952 ASSERT(ToRegister(instr->result()).is(eax));
1951 1953
1952 __ mov(ecx, instr->name()); 1954 __ mov(ecx, instr->name());
1953 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET : 1955 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET :
1954 RelocInfo::CODE_TARGET_CONTEXT; 1956 RelocInfo::CODE_TARGET_CONTEXT;
1955 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1957 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1956 CallCode(ic, mode, instr, CONTEXT_ADJUSTED); 1958 CallCode(ic, mode, instr);
1957 } 1959 }
1958 1960
1959 1961
1960 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { 1962 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
1961 Register value = ToRegister(instr->InputAt(0)); 1963 Register value = ToRegister(instr->InputAt(0));
1962 Operand cell_operand = Operand::Cell(instr->hydrogen()->cell()); 1964 Operand cell_operand = Operand::Cell(instr->hydrogen()->cell());
1963 1965
1964 // If the cell we are storing to contains the hole it could have 1966 // If the cell we are storing to contains the hole it could have
1965 // been deleted from the property dictionary. In that case, we need 1967 // been deleted from the property dictionary. In that case, we need
1966 // to update the property details in the property dictionary to mark 1968 // to update the property details in the property dictionary to mark
(...skipping 10 matching lines...) Expand all
1977 1979
1978 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { 1980 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
1979 ASSERT(ToRegister(instr->context()).is(esi)); 1981 ASSERT(ToRegister(instr->context()).is(esi));
1980 ASSERT(ToRegister(instr->global_object()).is(edx)); 1982 ASSERT(ToRegister(instr->global_object()).is(edx));
1981 ASSERT(ToRegister(instr->value()).is(eax)); 1983 ASSERT(ToRegister(instr->value()).is(eax));
1982 1984
1983 __ mov(ecx, instr->name()); 1985 __ mov(ecx, instr->name());
1984 Handle<Code> ic = instr->strict_mode() 1986 Handle<Code> ic = instr->strict_mode()
1985 ? isolate()->builtins()->StoreIC_Initialize_Strict() 1987 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1986 : isolate()->builtins()->StoreIC_Initialize(); 1988 : isolate()->builtins()->StoreIC_Initialize();
1987 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr, CONTEXT_ADJUSTED); 1989 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
1988 } 1990 }
1989 1991
1990 1992
1991 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { 1993 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
1992 Register context = ToRegister(instr->context()); 1994 Register context = ToRegister(instr->context());
1993 Register result = ToRegister(instr->result()); 1995 Register result = ToRegister(instr->result());
1994 __ mov(result, ContextOperand(context, instr->slot_index())); 1996 __ mov(result, ContextOperand(context, instr->slot_index()));
1995 } 1997 }
1996 1998
1997 1999
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
2049 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { 2051 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2050 Register object = ToRegister(instr->object()); 2052 Register object = ToRegister(instr->object());
2051 Register result = ToRegister(instr->result()); 2053 Register result = ToRegister(instr->result());
2052 2054
2053 int map_count = instr->hydrogen()->types()->length(); 2055 int map_count = instr->hydrogen()->types()->length();
2054 Handle<String> name = instr->hydrogen()->name(); 2056 Handle<String> name = instr->hydrogen()->name();
2055 if (map_count == 0) { 2057 if (map_count == 0) {
2056 ASSERT(instr->hydrogen()->need_generic()); 2058 ASSERT(instr->hydrogen()->need_generic());
2057 __ mov(ecx, name); 2059 __ mov(ecx, name);
2058 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 2060 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2059 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); 2061 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2060 } else { 2062 } else {
2061 Label done; 2063 Label done;
2062 for (int i = 0; i < map_count - 1; ++i) { 2064 for (int i = 0; i < map_count - 1; ++i) {
2063 Handle<Map> map = instr->hydrogen()->types()->at(i); 2065 Handle<Map> map = instr->hydrogen()->types()->at(i);
2064 Label next; 2066 Label next;
2065 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map); 2067 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map);
2066 __ j(not_equal, &next, Label::kNear); 2068 __ j(not_equal, &next, Label::kNear);
2067 EmitLoadFieldOrConstantFunction(result, object, map, name); 2069 EmitLoadFieldOrConstantFunction(result, object, map, name);
2068 __ jmp(&done, Label::kNear); 2070 __ jmp(&done, Label::kNear);
2069 __ bind(&next); 2071 __ bind(&next);
2070 } 2072 }
2071 Handle<Map> map = instr->hydrogen()->types()->last(); 2073 Handle<Map> map = instr->hydrogen()->types()->last();
2072 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map); 2074 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map);
2073 if (instr->hydrogen()->need_generic()) { 2075 if (instr->hydrogen()->need_generic()) {
2074 Label generic; 2076 Label generic;
2075 __ j(not_equal, &generic, Label::kNear); 2077 __ j(not_equal, &generic, Label::kNear);
2076 EmitLoadFieldOrConstantFunction(result, object, map, name); 2078 EmitLoadFieldOrConstantFunction(result, object, map, name);
2077 __ jmp(&done, Label::kNear); 2079 __ jmp(&done, Label::kNear);
2078 __ bind(&generic); 2080 __ bind(&generic);
2079 __ mov(ecx, name); 2081 __ mov(ecx, name);
2080 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 2082 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2081 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); 2083 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2082 } else { 2084 } else {
2083 DeoptimizeIf(not_equal, instr->environment()); 2085 DeoptimizeIf(not_equal, instr->environment());
2084 EmitLoadFieldOrConstantFunction(result, object, map, name); 2086 EmitLoadFieldOrConstantFunction(result, object, map, name);
2085 } 2087 }
2086 __ bind(&done); 2088 __ bind(&done);
2087 } 2089 }
2088 } 2090 }
2089 2091
2090 2092
2091 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { 2093 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2092 ASSERT(ToRegister(instr->context()).is(esi)); 2094 ASSERT(ToRegister(instr->context()).is(esi));
2093 ASSERT(ToRegister(instr->object()).is(eax)); 2095 ASSERT(ToRegister(instr->object()).is(eax));
2094 ASSERT(ToRegister(instr->result()).is(eax)); 2096 ASSERT(ToRegister(instr->result()).is(eax));
2095 2097
2096 __ mov(ecx, instr->name()); 2098 __ mov(ecx, instr->name());
2097 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 2099 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2098 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); 2100 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2099 } 2101 }
2100 2102
2101 2103
2102 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { 2104 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2103 Register function = ToRegister(instr->function()); 2105 Register function = ToRegister(instr->function());
2104 Register temp = ToRegister(instr->TempAt(0)); 2106 Register temp = ToRegister(instr->TempAt(0));
2105 Register result = ToRegister(instr->result()); 2107 Register result = ToRegister(instr->result());
2106 2108
2107 // Check that the function really is a function. 2109 // Check that the function really is a function.
2108 __ CmpObjectType(function, JS_FUNCTION_TYPE, result); 2110 __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
(...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after
2287 } 2289 }
2288 } 2290 }
2289 2291
2290 2292
2291 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { 2293 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2292 ASSERT(ToRegister(instr->context()).is(esi)); 2294 ASSERT(ToRegister(instr->context()).is(esi));
2293 ASSERT(ToRegister(instr->object()).is(edx)); 2295 ASSERT(ToRegister(instr->object()).is(edx));
2294 ASSERT(ToRegister(instr->key()).is(eax)); 2296 ASSERT(ToRegister(instr->key()).is(eax));
2295 2297
2296 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 2298 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2297 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); 2299 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2298 } 2300 }
2299 2301
2300 2302
2301 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { 2303 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2302 Register result = ToRegister(instr->result()); 2304 Register result = ToRegister(instr->result());
2303 2305
2304 // Check for arguments adapter frame. 2306 // Check for arguments adapter frame.
2305 Label done, adapted; 2307 Label done, adapted;
2306 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); 2308 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2307 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset)); 2309 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset));
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after
2517 ASSERT(ToRegister(instr->result()).is(eax)); 2519 ASSERT(ToRegister(instr->result()).is(eax));
2518 __ mov(edi, instr->function()); 2520 __ mov(edi, instr->function());
2519 CallKnownFunction(instr->function(), 2521 CallKnownFunction(instr->function(),
2520 instr->arity(), 2522 instr->arity(),
2521 instr, 2523 instr,
2522 CALL_AS_METHOD); 2524 CALL_AS_METHOD);
2523 } 2525 }
2524 2526
2525 2527
2526 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { 2528 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2527 Register input_reg = ToRegister(instr->InputAt(0)); 2529 Register input_reg = ToRegister(instr->value());
2528 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), 2530 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
2529 factory()->heap_number_map()); 2531 factory()->heap_number_map());
2530 DeoptimizeIf(not_equal, instr->environment()); 2532 DeoptimizeIf(not_equal, instr->environment());
2531 2533
2532 Label done; 2534 Label done;
2533 Register tmp = input_reg.is(eax) ? ecx : eax; 2535 Register tmp = input_reg.is(eax) ? ecx : eax;
2534 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx; 2536 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx;
2535 2537
2536 // Preserve the value of all registers. 2538 // Preserve the value of all registers.
2537 PushSafepointRegistersScope scope(this); 2539 PushSafepointRegistersScope scope(this);
(...skipping 10 matching lines...) Expand all
2548 2550
2549 __ bind(&negative); 2551 __ bind(&negative);
2550 2552
2551 Label allocated, slow; 2553 Label allocated, slow;
2552 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow); 2554 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow);
2553 __ jmp(&allocated); 2555 __ jmp(&allocated);
2554 2556
2555 // Slow case: Call the runtime system to do the number allocation. 2557 // Slow case: Call the runtime system to do the number allocation.
2556 __ bind(&slow); 2558 __ bind(&slow);
2557 2559
2558 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); 2560 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0,
2561 instr, instr->context());
2559 2562
2560 // Set the pointer to the new heap number in tmp. 2563 // Set the pointer to the new heap number in tmp.
2561 if (!tmp.is(eax)) __ mov(tmp, eax); 2564 if (!tmp.is(eax)) __ mov(tmp, eax);
2562 2565
2563 // Restore input_reg after call to runtime. 2566 // Restore input_reg after call to runtime.
2564 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); 2567 __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
2565 2568
2566 __ bind(&allocated); 2569 __ bind(&allocated);
2567 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset)); 2570 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2568 __ and_(tmp2, ~HeapNumber::kSignMask); 2571 __ and_(tmp2, ~HeapNumber::kSignMask);
2569 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2); 2572 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2);
2570 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); 2573 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset));
2571 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2); 2574 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2);
2572 __ StoreToSafepointRegisterSlot(input_reg, tmp); 2575 __ StoreToSafepointRegisterSlot(input_reg, tmp);
2573 2576
2574 __ bind(&done); 2577 __ bind(&done);
2575 } 2578 }
2576 2579
2577 2580
2578 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) { 2581 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2579 Register input_reg = ToRegister(instr->InputAt(0)); 2582 Register input_reg = ToRegister(instr->value());
2580 __ test(input_reg, Operand(input_reg)); 2583 __ test(input_reg, Operand(input_reg));
2581 Label is_positive; 2584 Label is_positive;
2582 __ j(not_sign, &is_positive); 2585 __ j(not_sign, &is_positive);
2583 __ neg(input_reg); 2586 __ neg(input_reg);
2584 __ test(input_reg, Operand(input_reg)); 2587 __ test(input_reg, Operand(input_reg));
2585 DeoptimizeIf(negative, instr->environment()); 2588 DeoptimizeIf(negative, instr->environment());
2586 __ bind(&is_positive); 2589 __ bind(&is_positive);
2587 } 2590 }
2588 2591
2589 2592
2590 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) { 2593 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2591 // Class for deferred case. 2594 // Class for deferred case.
2592 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode { 2595 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2593 public: 2596 public:
2594 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, 2597 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2595 LUnaryMathOperation* instr) 2598 LUnaryMathOperation* instr)
2596 : LDeferredCode(codegen), instr_(instr) { } 2599 : LDeferredCode(codegen), instr_(instr) { }
2597 virtual void Generate() { 2600 virtual void Generate() {
2598 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_); 2601 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2599 } 2602 }
2600 private: 2603 private:
2601 LUnaryMathOperation* instr_; 2604 LUnaryMathOperation* instr_;
2602 }; 2605 };
2603 2606
2604 ASSERT(instr->InputAt(0)->Equals(instr->result())); 2607 ASSERT(instr->value()->Equals(instr->result()));
2605 Representation r = instr->hydrogen()->value()->representation(); 2608 Representation r = instr->hydrogen()->value()->representation();
2606 2609
2607 if (r.IsDouble()) { 2610 if (r.IsDouble()) {
2608 XMMRegister scratch = xmm0; 2611 XMMRegister scratch = xmm0;
2609 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); 2612 XMMRegister input_reg = ToDoubleRegister(instr->value());
2610 __ xorps(scratch, scratch); 2613 __ xorps(scratch, scratch);
2611 __ subsd(scratch, input_reg); 2614 __ subsd(scratch, input_reg);
2612 __ pand(input_reg, scratch); 2615 __ pand(input_reg, scratch);
2613 } else if (r.IsInteger32()) { 2616 } else if (r.IsInteger32()) {
2614 EmitIntegerMathAbs(instr); 2617 EmitIntegerMathAbs(instr);
2615 } else { // Tagged case. 2618 } else { // Tagged case.
2616 DeferredMathAbsTaggedHeapNumber* deferred = 2619 DeferredMathAbsTaggedHeapNumber* deferred =
2617 new DeferredMathAbsTaggedHeapNumber(this, instr); 2620 new DeferredMathAbsTaggedHeapNumber(this, instr);
2618 Register input_reg = ToRegister(instr->InputAt(0)); 2621 Register input_reg = ToRegister(instr->value());
2619 // Smi check. 2622 // Smi check.
2620 __ JumpIfNotSmi(input_reg, deferred->entry()); 2623 __ JumpIfNotSmi(input_reg, deferred->entry());
2621 EmitIntegerMathAbs(instr); 2624 EmitIntegerMathAbs(instr);
2622 __ bind(deferred->exit()); 2625 __ bind(deferred->exit());
2623 } 2626 }
2624 } 2627 }
2625 2628
2626 2629
2627 void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) { 2630 void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2628 XMMRegister xmm_scratch = xmm0; 2631 XMMRegister xmm_scratch = xmm0;
2629 Register output_reg = ToRegister(instr->result()); 2632 Register output_reg = ToRegister(instr->result());
2630 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); 2633 XMMRegister input_reg = ToDoubleRegister(instr->value());
2631 __ xorps(xmm_scratch, xmm_scratch); // Zero the register. 2634 __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
2632 __ ucomisd(input_reg, xmm_scratch); 2635 __ ucomisd(input_reg, xmm_scratch);
2633 2636
2634 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 2637 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2635 DeoptimizeIf(below_equal, instr->environment()); 2638 DeoptimizeIf(below_equal, instr->environment());
2636 } else { 2639 } else {
2637 DeoptimizeIf(below, instr->environment()); 2640 DeoptimizeIf(below, instr->environment());
2638 } 2641 }
2639 2642
2640 // Use truncating instruction (OK because input is positive). 2643 // Use truncating instruction (OK because input is positive).
2641 __ cvttsd2si(output_reg, Operand(input_reg)); 2644 __ cvttsd2si(output_reg, Operand(input_reg));
2642 2645
2643 // Overflow is signalled with minint. 2646 // Overflow is signalled with minint.
2644 __ cmp(output_reg, 0x80000000u); 2647 __ cmp(output_reg, 0x80000000u);
2645 DeoptimizeIf(equal, instr->environment()); 2648 DeoptimizeIf(equal, instr->environment());
2646 } 2649 }
2647 2650
2648 2651
2649 void LCodeGen::DoMathRound(LUnaryMathOperation* instr) { 2652 void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2650 XMMRegister xmm_scratch = xmm0; 2653 XMMRegister xmm_scratch = xmm0;
2651 Register output_reg = ToRegister(instr->result()); 2654 Register output_reg = ToRegister(instr->result());
2652 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); 2655 XMMRegister input_reg = ToDoubleRegister(instr->value());
2653 2656
2654 Label below_half, done; 2657 Label below_half, done;
2655 // xmm_scratch = 0.5 2658 // xmm_scratch = 0.5
2656 ExternalReference one_half = ExternalReference::address_of_one_half(); 2659 ExternalReference one_half = ExternalReference::address_of_one_half();
2657 __ movdbl(xmm_scratch, Operand::StaticVariable(one_half)); 2660 __ movdbl(xmm_scratch, Operand::StaticVariable(one_half));
2658 2661
2659 __ ucomisd(xmm_scratch, input_reg); 2662 __ ucomisd(xmm_scratch, input_reg);
2660 __ j(above, &below_half); 2663 __ j(above, &below_half);
2661 // input = input + 0.5 2664 // input = input + 0.5
2662 __ addsd(input_reg, xmm_scratch); 2665 __ addsd(input_reg, xmm_scratch);
(...skipping 24 matching lines...) Expand all
2687 __ cvtss2sd(xmm_scratch, xmm_scratch); 2690 __ cvtss2sd(xmm_scratch, xmm_scratch);
2688 __ ucomisd(input_reg, xmm_scratch); 2691 __ ucomisd(input_reg, xmm_scratch);
2689 DeoptimizeIf(below, instr->environment()); 2692 DeoptimizeIf(below, instr->environment());
2690 } 2693 }
2691 __ Set(output_reg, Immediate(0)); 2694 __ Set(output_reg, Immediate(0));
2692 __ bind(&done); 2695 __ bind(&done);
2693 } 2696 }
2694 2697
2695 2698
2696 void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) { 2699 void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
2697 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); 2700 XMMRegister input_reg = ToDoubleRegister(instr->value());
2698 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); 2701 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2699 __ sqrtsd(input_reg, input_reg); 2702 __ sqrtsd(input_reg, input_reg);
2700 } 2703 }
2701 2704
2702 2705
2703 void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) { 2706 void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
2704 XMMRegister xmm_scratch = xmm0; 2707 XMMRegister xmm_scratch = xmm0;
2705 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); 2708 XMMRegister input_reg = ToDoubleRegister(instr->value());
2706 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); 2709 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2707 __ xorps(xmm_scratch, xmm_scratch); 2710 __ xorps(xmm_scratch, xmm_scratch);
2708 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0. 2711 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0.
2709 __ sqrtsd(input_reg, input_reg); 2712 __ sqrtsd(input_reg, input_reg);
2710 } 2713 }
2711 2714
2712 2715
2713 void LCodeGen::DoPower(LPower* instr) { 2716 void LCodeGen::DoPower(LPower* instr) {
2714 LOperand* left = instr->InputAt(0); 2717 LOperand* left = instr->InputAt(0);
2715 LOperand* right = instr->InputAt(1); 2718 LOperand* right = instr->InputAt(1);
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
2763 // Return value is in st(0) on ia32. 2766 // Return value is in st(0) on ia32.
2764 // Store it into the (fixed) result register. 2767 // Store it into the (fixed) result register.
2765 __ sub(Operand(esp), Immediate(kDoubleSize)); 2768 __ sub(Operand(esp), Immediate(kDoubleSize));
2766 __ fstp_d(Operand(esp, 0)); 2769 __ fstp_d(Operand(esp, 0));
2767 __ movdbl(result_reg, Operand(esp, 0)); 2770 __ movdbl(result_reg, Operand(esp, 0));
2768 __ add(Operand(esp), Immediate(kDoubleSize)); 2771 __ add(Operand(esp), Immediate(kDoubleSize));
2769 } 2772 }
2770 2773
2771 2774
2772 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { 2775 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
2773 ASSERT(instr->InputAt(0)->Equals(instr->result())); 2776 ASSERT(instr->value()->Equals(instr->result()));
2774 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); 2777 XMMRegister input_reg = ToDoubleRegister(instr->value());
2775 Label positive, done, zero; 2778 Label positive, done, zero;
2776 __ xorps(xmm0, xmm0); 2779 __ xorps(xmm0, xmm0);
2777 __ ucomisd(input_reg, xmm0); 2780 __ ucomisd(input_reg, xmm0);
2778 __ j(above, &positive, Label::kNear); 2781 __ j(above, &positive, Label::kNear);
2779 __ j(equal, &zero, Label::kNear); 2782 __ j(equal, &zero, Label::kNear);
2780 ExternalReference nan = ExternalReference::address_of_nan(); 2783 ExternalReference nan = ExternalReference::address_of_nan();
2781 __ movdbl(input_reg, Operand::StaticVariable(nan)); 2784 __ movdbl(input_reg, Operand::StaticVariable(nan));
2782 __ jmp(&done, Label::kNear); 2785 __ jmp(&done, Label::kNear);
2783 __ bind(&zero); 2786 __ bind(&zero);
2784 __ push(Immediate(0xFFF00000)); 2787 __ push(Immediate(0xFFF00000));
(...skipping 11 matching lines...) Expand all
2796 __ movdbl(input_reg, Operand(esp, 0)); 2799 __ movdbl(input_reg, Operand(esp, 0));
2797 __ add(Operand(esp), Immediate(kDoubleSize)); 2800 __ add(Operand(esp), Immediate(kDoubleSize));
2798 __ bind(&done); 2801 __ bind(&done);
2799 } 2802 }
2800 2803
2801 2804
2802 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { 2805 void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
2803 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); 2806 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2804 TranscendentalCacheStub stub(TranscendentalCache::COS, 2807 TranscendentalCacheStub stub(TranscendentalCache::COS,
2805 TranscendentalCacheStub::UNTAGGED); 2808 TranscendentalCacheStub::UNTAGGED);
2806 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); 2809 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2807 } 2810 }
2808 2811
2809 2812
2810 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { 2813 void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
2811 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); 2814 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2812 TranscendentalCacheStub stub(TranscendentalCache::SIN, 2815 TranscendentalCacheStub stub(TranscendentalCache::SIN,
2813 TranscendentalCacheStub::UNTAGGED); 2816 TranscendentalCacheStub::UNTAGGED);
2814 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); 2817 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2815 } 2818 }
2816 2819
2817 2820
2818 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { 2821 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2819 switch (instr->op()) { 2822 switch (instr->op()) {
2820 case kMathAbs: 2823 case kMathAbs:
2821 DoMathAbs(instr); 2824 DoMathAbs(instr);
2822 break; 2825 break;
2823 case kMathFloor: 2826 case kMathFloor:
2824 DoMathFloor(instr); 2827 DoMathFloor(instr);
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
2864 2867
2865 2868
2866 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { 2869 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
2867 ASSERT(ToRegister(instr->context()).is(esi)); 2870 ASSERT(ToRegister(instr->context()).is(esi));
2868 ASSERT(ToRegister(instr->key()).is(ecx)); 2871 ASSERT(ToRegister(instr->key()).is(ecx));
2869 ASSERT(ToRegister(instr->result()).is(eax)); 2872 ASSERT(ToRegister(instr->result()).is(eax));
2870 2873
2871 int arity = instr->arity(); 2874 int arity = instr->arity();
2872 Handle<Code> ic = isolate()->stub_cache()-> 2875 Handle<Code> ic = isolate()->stub_cache()->
2873 ComputeKeyedCallInitialize(arity, NOT_IN_LOOP); 2876 ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
2874 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); 2877 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2875 } 2878 }
2876 2879
2877 2880
2878 void LCodeGen::DoCallNamed(LCallNamed* instr) { 2881 void LCodeGen::DoCallNamed(LCallNamed* instr) {
2879 ASSERT(ToRegister(instr->context()).is(esi)); 2882 ASSERT(ToRegister(instr->context()).is(esi));
2880 ASSERT(ToRegister(instr->result()).is(eax)); 2883 ASSERT(ToRegister(instr->result()).is(eax));
2881 2884
2882 int arity = instr->arity(); 2885 int arity = instr->arity();
2883 RelocInfo::Mode mode = RelocInfo::CODE_TARGET; 2886 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
2884 Handle<Code> ic = 2887 Handle<Code> ic =
2885 isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode); 2888 isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
2886 __ mov(ecx, instr->name()); 2889 __ mov(ecx, instr->name());
2887 CallCode(ic, mode, instr, CONTEXT_ADJUSTED); 2890 CallCode(ic, mode, instr);
2888 } 2891 }
2889 2892
2890 2893
2891 void LCodeGen::DoCallFunction(LCallFunction* instr) { 2894 void LCodeGen::DoCallFunction(LCallFunction* instr) {
2892 ASSERT(ToRegister(instr->context()).is(esi)); 2895 ASSERT(ToRegister(instr->context()).is(esi));
2893 ASSERT(ToRegister(instr->result()).is(eax)); 2896 ASSERT(ToRegister(instr->result()).is(eax));
2894 2897
2895 int arity = instr->arity(); 2898 int arity = instr->arity();
2896 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_IMPLICIT); 2899 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_IMPLICIT);
2897 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); 2900 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2898 __ Drop(1); 2901 __ Drop(1);
2899 } 2902 }
2900 2903
2901 2904
2902 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { 2905 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
2903 ASSERT(ToRegister(instr->context()).is(esi)); 2906 ASSERT(ToRegister(instr->context()).is(esi));
2904 ASSERT(ToRegister(instr->result()).is(eax)); 2907 ASSERT(ToRegister(instr->result()).is(eax));
2905 2908
2906 int arity = instr->arity(); 2909 int arity = instr->arity();
2907 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT; 2910 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
2908 Handle<Code> ic = 2911 Handle<Code> ic =
2909 isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode); 2912 isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
2910 __ mov(ecx, instr->name()); 2913 __ mov(ecx, instr->name());
2911 CallCode(ic, mode, instr, CONTEXT_ADJUSTED); 2914 CallCode(ic, mode, instr);
2912 } 2915 }
2913 2916
2914 2917
2915 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { 2918 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
2916 ASSERT(ToRegister(instr->result()).is(eax)); 2919 ASSERT(ToRegister(instr->result()).is(eax));
2917 __ mov(edi, instr->target()); 2920 __ mov(edi, instr->target());
2918 CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION); 2921 CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
2919 } 2922 }
2920 2923
2921 2924
2922 void LCodeGen::DoCallNew(LCallNew* instr) { 2925 void LCodeGen::DoCallNew(LCallNew* instr) {
2923 ASSERT(ToRegister(instr->context()).is(esi)); 2926 ASSERT(ToRegister(instr->context()).is(esi));
2924 ASSERT(ToRegister(instr->constructor()).is(edi)); 2927 ASSERT(ToRegister(instr->constructor()).is(edi));
2925 ASSERT(ToRegister(instr->result()).is(eax)); 2928 ASSERT(ToRegister(instr->result()).is(eax));
2926 2929
2927 Handle<Code> builtin = isolate()->builtins()->JSConstructCall(); 2930 Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
2928 __ Set(eax, Immediate(instr->arity())); 2931 __ Set(eax, Immediate(instr->arity()));
2929 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr, CONTEXT_ADJUSTED); 2932 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
2930 } 2933 }
2931 2934
2932 2935
2933 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { 2936 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
2934 CallRuntime(instr->function(), instr->arity(), instr, RESTORE_CONTEXT); 2937 CallRuntime(instr->function(), instr->arity(), instr);
2935 } 2938 }
2936 2939
2937 2940
2938 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { 2941 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
2939 Register object = ToRegister(instr->object()); 2942 Register object = ToRegister(instr->object());
2940 Register value = ToRegister(instr->value()); 2943 Register value = ToRegister(instr->value());
2941 int offset = instr->offset(); 2944 int offset = instr->offset();
2942 2945
2943 if (!instr->transition().is_null()) { 2946 if (!instr->transition().is_null()) {
2944 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition()); 2947 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
(...skipping 22 matching lines...) Expand all
2967 2970
2968 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { 2971 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
2969 ASSERT(ToRegister(instr->context()).is(esi)); 2972 ASSERT(ToRegister(instr->context()).is(esi));
2970 ASSERT(ToRegister(instr->object()).is(edx)); 2973 ASSERT(ToRegister(instr->object()).is(edx));
2971 ASSERT(ToRegister(instr->value()).is(eax)); 2974 ASSERT(ToRegister(instr->value()).is(eax));
2972 2975
2973 __ mov(ecx, instr->name()); 2976 __ mov(ecx, instr->name());
2974 Handle<Code> ic = instr->strict_mode() 2977 Handle<Code> ic = instr->strict_mode()
2975 ? isolate()->builtins()->StoreIC_Initialize_Strict() 2978 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2976 : isolate()->builtins()->StoreIC_Initialize(); 2979 : isolate()->builtins()->StoreIC_Initialize();
2977 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); 2980 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2978 } 2981 }
2979 2982
2980 2983
2981 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { 2984 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
2982 __ cmp(ToRegister(instr->index()), ToOperand(instr->length())); 2985 __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
2983 DeoptimizeIf(above_equal, instr->environment()); 2986 DeoptimizeIf(above_equal, instr->environment());
2984 } 2987 }
2985 2988
2986 2989
2987 void LCodeGen::DoStoreKeyedSpecializedArrayElement( 2990 void LCodeGen::DoStoreKeyedSpecializedArrayElement(
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
3057 3060
3058 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { 3061 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3059 ASSERT(ToRegister(instr->context()).is(esi)); 3062 ASSERT(ToRegister(instr->context()).is(esi));
3060 ASSERT(ToRegister(instr->object()).is(edx)); 3063 ASSERT(ToRegister(instr->object()).is(edx));
3061 ASSERT(ToRegister(instr->key()).is(ecx)); 3064 ASSERT(ToRegister(instr->key()).is(ecx));
3062 ASSERT(ToRegister(instr->value()).is(eax)); 3065 ASSERT(ToRegister(instr->value()).is(eax));
3063 3066
3064 Handle<Code> ic = instr->strict_mode() 3067 Handle<Code> ic = instr->strict_mode()
3065 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() 3068 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3066 : isolate()->builtins()->KeyedStoreIC_Initialize(); 3069 : isolate()->builtins()->KeyedStoreIC_Initialize();
3067 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); 3070 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3068 } 3071 }
3069 3072
3070 3073
3071 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { 3074 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3072 class DeferredStringCharCodeAt: public LDeferredCode { 3075 class DeferredStringCharCodeAt: public LDeferredCode {
3073 public: 3076 public:
3074 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) 3077 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3075 : LDeferredCode(codegen), instr_(instr) { } 3078 : LDeferredCode(codegen), instr_(instr) { }
3076 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); } 3079 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3077 private: 3080 private:
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
3188 // DoStringCharCodeAt above. 3191 // DoStringCharCodeAt above.
3189 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); 3192 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3190 if (instr->index()->IsConstantOperand()) { 3193 if (instr->index()->IsConstantOperand()) {
3191 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); 3194 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3192 __ push(Immediate(Smi::FromInt(const_index))); 3195 __ push(Immediate(Smi::FromInt(const_index)));
3193 } else { 3196 } else {
3194 Register index = ToRegister(instr->index()); 3197 Register index = ToRegister(instr->index());
3195 __ SmiTag(index); 3198 __ SmiTag(index);
3196 __ push(index); 3199 __ push(index);
3197 } 3200 }
3198 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr); 3201 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2,
3202 instr, instr->context());
3199 if (FLAG_debug_code) { 3203 if (FLAG_debug_code) {
3200 __ AbortIfNotSmi(eax); 3204 __ AbortIfNotSmi(eax);
3201 } 3205 }
3202 __ SmiUntag(eax); 3206 __ SmiUntag(eax);
3203 __ StoreToSafepointRegisterSlot(result, eax); 3207 __ StoreToSafepointRegisterSlot(result, eax);
3204 } 3208 }
3205 3209
3206 3210
3207 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { 3211 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3208 class DeferredStringCharFromCode: public LDeferredCode { 3212 class DeferredStringCharFromCode: public LDeferredCode {
(...skipping 30 matching lines...) Expand all
3239 Register result = ToRegister(instr->result()); 3243 Register result = ToRegister(instr->result());
3240 3244
3241 // TODO(3095996): Get rid of this. For now, we need to make the 3245 // TODO(3095996): Get rid of this. For now, we need to make the
3242 // result register contain a valid pointer because it is already 3246 // result register contain a valid pointer because it is already
3243 // contained in the register pointer map. 3247 // contained in the register pointer map.
3244 __ Set(result, Immediate(0)); 3248 __ Set(result, Immediate(0));
3245 3249
3246 PushSafepointRegistersScope scope(this); 3250 PushSafepointRegistersScope scope(this);
3247 __ SmiTag(char_code); 3251 __ SmiTag(char_code);
3248 __ push(char_code); 3252 __ push(char_code);
3249 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr); 3253 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context());
3250 __ StoreToSafepointRegisterSlot(result, eax); 3254 __ StoreToSafepointRegisterSlot(result, eax);
3251 } 3255 }
3252 3256
3253 3257
3254 void LCodeGen::DoStringLength(LStringLength* instr) { 3258 void LCodeGen::DoStringLength(LStringLength* instr) {
3255 Register string = ToRegister(instr->string()); 3259 Register string = ToRegister(instr->string());
3256 Register result = ToRegister(instr->result()); 3260 Register result = ToRegister(instr->result());
3257 __ mov(result, FieldOperand(string, String::kLengthOffset)); 3261 __ mov(result, FieldOperand(string, String::kLengthOffset));
3258 } 3262 }
3259 3263
3260 3264
3261 void LCodeGen::DoStringAdd(LStringAdd* instr) { 3265 void LCodeGen::DoStringAdd(LStringAdd* instr) {
3262 if (instr->left()->IsConstantOperand()) { 3266 if (instr->left()->IsConstantOperand()) {
3263 __ push(ToImmediate(instr->left())); 3267 __ push(ToImmediate(instr->left()));
3264 } else { 3268 } else {
3265 __ push(ToOperand(instr->left())); 3269 __ push(ToOperand(instr->left()));
3266 } 3270 }
3267 if (instr->right()->IsConstantOperand()) { 3271 if (instr->right()->IsConstantOperand()) {
3268 __ push(ToImmediate(instr->right())); 3272 __ push(ToImmediate(instr->right()));
3269 } else { 3273 } else {
3270 __ push(ToOperand(instr->right())); 3274 __ push(ToOperand(instr->right()));
3271 } 3275 }
3272 StringAddStub stub(NO_STRING_CHECK_IN_STUB); 3276 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
3273 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); 3277 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3274 } 3278 }
3275 3279
3276 3280
3277 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { 3281 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
3278 LOperand* input = instr->InputAt(0); 3282 LOperand* input = instr->InputAt(0);
3279 ASSERT(input->IsRegister() || input->IsStackSlot()); 3283 ASSERT(input->IsRegister() || input->IsStackSlot());
3280 LOperand* output = instr->result(); 3284 LOperand* output = instr->result();
3281 ASSERT(output->IsDoubleRegister()); 3285 ASSERT(output->IsDoubleRegister());
3282 __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input)); 3286 __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input));
3283 } 3287 }
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
3324 __ jmp(&done, Label::kNear); 3328 __ jmp(&done, Label::kNear);
3325 } 3329 }
3326 3330
3327 // Slow case: Call the runtime system to do the number allocation. 3331 // Slow case: Call the runtime system to do the number allocation.
3328 __ bind(&slow); 3332 __ bind(&slow);
3329 3333
3330 // TODO(3095996): Put a valid pointer value in the stack slot where the result 3334 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3331 // register is stored, as this register is in the pointer map, but contains an 3335 // register is stored, as this register is in the pointer map, but contains an
3332 // integer value. 3336 // integer value.
3333 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); 3337 __ StoreToSafepointRegisterSlot(reg, Immediate(0));
3334 3338 // NumberTagI and NumberTagD use the context from the frame, rather than
3335 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); 3339 // the environment's HContext or HInlinedContext value.
3340 // They only call Runtime::kAllocateHeapNumber.
3341 // The corresponding HChange instructions are added in a phase that does
3342 // not have easy access to the local context.
3343 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3344 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3345 RecordSafepointWithRegisters(
3346 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
3336 if (!reg.is(eax)) __ mov(reg, eax); 3347 if (!reg.is(eax)) __ mov(reg, eax);
3337 3348
3338 // Done. Put the value in xmm0 into the value of the allocated heap 3349 // Done. Put the value in xmm0 into the value of the allocated heap
3339 // number. 3350 // number.
3340 __ bind(&done); 3351 __ bind(&done);
3341 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0); 3352 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0);
3342 __ StoreToSafepointRegisterSlot(reg, reg); 3353 __ StoreToSafepointRegisterSlot(reg, reg);
3343 } 3354 }
3344 3355
3345 3356
(...skipping 23 matching lines...) Expand all
3369 3380
3370 3381
3371 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { 3382 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3372 // TODO(3095996): Get rid of this. For now, we need to make the 3383 // TODO(3095996): Get rid of this. For now, we need to make the
3373 // result register contain a valid pointer because it is already 3384 // result register contain a valid pointer because it is already
3374 // contained in the register pointer map. 3385 // contained in the register pointer map.
3375 Register reg = ToRegister(instr->result()); 3386 Register reg = ToRegister(instr->result());
3376 __ Set(reg, Immediate(0)); 3387 __ Set(reg, Immediate(0));
3377 3388
3378 PushSafepointRegistersScope scope(this); 3389 PushSafepointRegistersScope scope(this);
3379 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); 3390 // NumberTagI and NumberTagD use the context from the frame, rather than
3391 // the environment's HContext or HInlinedContext value.
3392 // They only call Runtime::kAllocateHeapNumber.
3393 // The corresponding HChange instructions are added in a phase that does
3394 // not have easy access to the local context.
3395 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3396 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3397 RecordSafepointWithRegisters(instr->pointer_map(), 0,
3398 Safepoint::kNoDeoptimizationIndex);
3380 __ StoreToSafepointRegisterSlot(reg, eax); 3399 __ StoreToSafepointRegisterSlot(reg, eax);
3381 } 3400 }
3382 3401
3383 3402
3384 void LCodeGen::DoSmiTag(LSmiTag* instr) { 3403 void LCodeGen::DoSmiTag(LSmiTag* instr) {
3385 LOperand* input = instr->InputAt(0); 3404 LOperand* input = instr->InputAt(0);
3386 ASSERT(input->IsRegister() && input->Equals(instr->result())); 3405 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3387 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); 3406 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3388 __ SmiTag(ToRegister(input)); 3407 __ SmiTag(ToRegister(input));
3389 } 3408 }
(...skipping 449 matching lines...) Expand 10 before | Expand all | Expand 10 after
3839 } 3858 }
3840 3859
3841 // Check the holder map. 3860 // Check the holder map.
3842 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), 3861 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3843 Handle<Map>(current_prototype->map())); 3862 Handle<Map>(current_prototype->map()));
3844 DeoptimizeIf(not_equal, instr->environment()); 3863 DeoptimizeIf(not_equal, instr->environment());
3845 } 3864 }
3846 3865
3847 3866
3848 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { 3867 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
3868 ASSERT(ToRegister(instr->context()).is(esi));
3849 // Setup the parameters to the stub/runtime call. 3869 // Setup the parameters to the stub/runtime call.
3850 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 3870 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3851 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); 3871 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3852 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); 3872 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3853 __ push(Immediate(instr->hydrogen()->constant_elements())); 3873 __ push(Immediate(instr->hydrogen()->constant_elements()));
3854 3874
3855 // Pick the right runtime function or stub to call. 3875 // Pick the right runtime function or stub to call.
3856 int length = instr->hydrogen()->length(); 3876 int length = instr->hydrogen()->length();
3857 if (instr->hydrogen()->IsCopyOnWrite()) { 3877 if (instr->hydrogen()->IsCopyOnWrite()) {
3858 ASSERT(instr->hydrogen()->depth() == 1); 3878 ASSERT(instr->hydrogen()->depth() == 1);
3859 FastCloneShallowArrayStub::Mode mode = 3879 FastCloneShallowArrayStub::Mode mode =
3860 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; 3880 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
3861 FastCloneShallowArrayStub stub(mode, length); 3881 FastCloneShallowArrayStub stub(mode, length);
3862 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); 3882 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3863 } else if (instr->hydrogen()->depth() > 1) { 3883 } else if (instr->hydrogen()->depth() > 1) {
3864 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, RESTORE_CONTEXT); 3884 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
3865 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 3885 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
3866 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, RESTORE_CONTEXT); 3886 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
3867 } else { 3887 } else {
3868 FastCloneShallowArrayStub::Mode mode = 3888 FastCloneShallowArrayStub::Mode mode =
3869 FastCloneShallowArrayStub::CLONE_ELEMENTS; 3889 FastCloneShallowArrayStub::CLONE_ELEMENTS;
3870 FastCloneShallowArrayStub stub(mode, length); 3890 FastCloneShallowArrayStub stub(mode, length);
3871 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); 3891 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3872 } 3892 }
3873 } 3893 }
3874 3894
3875 3895
3876 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { 3896 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
3877 ASSERT(ToRegister(instr->context()).is(esi)); 3897 ASSERT(ToRegister(instr->context()).is(esi));
3878 // Setup the parameters to the stub/runtime call. 3898 // Setup the parameters to the stub/runtime call.
3879 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 3899 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3880 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); 3900 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3881 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); 3901 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3882 __ push(Immediate(instr->hydrogen()->constant_properties())); 3902 __ push(Immediate(instr->hydrogen()->constant_properties()));
3883 int flags = instr->hydrogen()->fast_elements() 3903 int flags = instr->hydrogen()->fast_elements()
3884 ? ObjectLiteral::kFastElements 3904 ? ObjectLiteral::kFastElements
3885 : ObjectLiteral::kNoFlags; 3905 : ObjectLiteral::kNoFlags;
3886 flags |= instr->hydrogen()->has_function() 3906 flags |= instr->hydrogen()->has_function()
3887 ? ObjectLiteral::kHasFunction 3907 ? ObjectLiteral::kHasFunction
3888 : ObjectLiteral::kNoFlags; 3908 : ObjectLiteral::kNoFlags;
3889 __ push(Immediate(Smi::FromInt(flags))); 3909 __ push(Immediate(Smi::FromInt(flags)));
3890 3910
3891 // Pick the right runtime function to call. 3911 // Pick the right runtime function to call.
3892 if (instr->hydrogen()->depth() > 1) { 3912 if (instr->hydrogen()->depth() > 1) {
3893 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr, CONTEXT_ADJUSTED); 3913 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
3894 } else { 3914 } else {
3895 CallRuntime(Runtime::kCreateObjectLiteralShallow, 3915 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
3896 4,
3897 instr,
3898 CONTEXT_ADJUSTED);
3899 } 3916 }
3900 } 3917 }
3901 3918
3902 3919
3903 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { 3920 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
3904 ASSERT(ToRegister(instr->InputAt(0)).is(eax)); 3921 ASSERT(ToRegister(instr->InputAt(0)).is(eax));
3905 __ push(eax); 3922 __ push(eax);
3906 CallRuntime(Runtime::kToFastProperties, 1, instr, CONTEXT_ADJUSTED); 3923 CallRuntime(Runtime::kToFastProperties, 1, instr);
3907 } 3924 }
3908 3925
3909 3926
3910 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { 3927 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
3928 ASSERT(ToRegister(instr->context()).is(esi));
3911 Label materialized; 3929 Label materialized;
3912 // Registers will be used as follows: 3930 // Registers will be used as follows:
3913 // edi = JS function. 3931 // edi = JS function.
3914 // ecx = literals array. 3932 // ecx = literals array.
3915 // ebx = regexp literal. 3933 // ebx = regexp literal.
3916 // eax = regexp literal clone. 3934 // eax = regexp literal clone.
3935 // esi = context.
3917 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 3936 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3918 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset)); 3937 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
3919 int literal_offset = FixedArray::kHeaderSize + 3938 int literal_offset = FixedArray::kHeaderSize +
3920 instr->hydrogen()->literal_index() * kPointerSize; 3939 instr->hydrogen()->literal_index() * kPointerSize;
3921 __ mov(ebx, FieldOperand(ecx, literal_offset)); 3940 __ mov(ebx, FieldOperand(ecx, literal_offset));
3922 __ cmp(ebx, factory()->undefined_value()); 3941 __ cmp(ebx, factory()->undefined_value());
3923 __ j(not_equal, &materialized, Label::kNear); 3942 __ j(not_equal, &materialized, Label::kNear);
3924 3943
3925 // Create regexp literal using runtime function 3944 // Create regexp literal using runtime function
3926 // Result will be in eax. 3945 // Result will be in eax.
3927 __ push(ecx); 3946 __ push(ecx);
3928 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); 3947 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3929 __ push(Immediate(instr->hydrogen()->pattern())); 3948 __ push(Immediate(instr->hydrogen()->pattern()));
3930 __ push(Immediate(instr->hydrogen()->flags())); 3949 __ push(Immediate(instr->hydrogen()->flags()));
3931 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, RESTORE_CONTEXT); 3950 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
3932 __ mov(ebx, eax); 3951 __ mov(ebx, eax);
3933 3952
3934 __ bind(&materialized); 3953 __ bind(&materialized);
3935 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 3954 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3936 Label allocated, runtime_allocate; 3955 Label allocated, runtime_allocate;
3937 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); 3956 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
3938 __ jmp(&allocated); 3957 __ jmp(&allocated);
3939 3958
3940 __ bind(&runtime_allocate); 3959 __ bind(&runtime_allocate);
3941 __ push(ebx); 3960 __ push(ebx);
3942 __ push(Immediate(Smi::FromInt(size))); 3961 __ push(Immediate(Smi::FromInt(size)));
3943 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, RESTORE_CONTEXT); 3962 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
3944 __ pop(ebx); 3963 __ pop(ebx);
3945 3964
3946 __ bind(&allocated); 3965 __ bind(&allocated);
3947 // Copy the content into the newly allocated memory. 3966 // Copy the content into the newly allocated memory.
3948 // (Unroll copy loop once for better throughput). 3967 // (Unroll copy loop once for better throughput).
3949 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { 3968 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
3950 __ mov(edx, FieldOperand(ebx, i)); 3969 __ mov(edx, FieldOperand(ebx, i));
3951 __ mov(ecx, FieldOperand(ebx, i + kPointerSize)); 3970 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
3952 __ mov(FieldOperand(eax, i), edx); 3971 __ mov(FieldOperand(eax, i), edx);
3953 __ mov(FieldOperand(eax, i + kPointerSize), ecx); 3972 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
3954 } 3973 }
3955 if ((size % (2 * kPointerSize)) != 0) { 3974 if ((size % (2 * kPointerSize)) != 0) {
3956 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); 3975 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
3957 __ mov(FieldOperand(eax, size - kPointerSize), edx); 3976 __ mov(FieldOperand(eax, size - kPointerSize), edx);
3958 } 3977 }
3959 } 3978 }
3960 3979
3961 3980
3962 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { 3981 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
3982 ASSERT(ToRegister(instr->context()).is(esi));
3963 // Use the fast case closure allocation code that allocates in new 3983 // Use the fast case closure allocation code that allocates in new
3964 // space for nested functions that don't need literals cloning. 3984 // space for nested functions that don't need literals cloning.
3965 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); 3985 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
3966 bool pretenure = instr->hydrogen()->pretenure(); 3986 bool pretenure = instr->hydrogen()->pretenure();
3967 if (!pretenure && shared_info->num_literals() == 0) { 3987 if (!pretenure && shared_info->num_literals() == 0) {
3968 FastNewClosureStub stub( 3988 FastNewClosureStub stub(
3969 shared_info->strict_mode() ? kStrictMode : kNonStrictMode); 3989 shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
3970 __ push(Immediate(shared_info)); 3990 __ push(Immediate(shared_info));
3971 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); 3991 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3972 } else { 3992 } else {
3973 __ push(Operand(ebp, StandardFrameConstants::kContextOffset)); 3993 __ push(Operand(ebp, StandardFrameConstants::kContextOffset));
3974 __ push(Immediate(shared_info)); 3994 __ push(Immediate(shared_info));
3975 __ push(Immediate(pretenure 3995 __ push(Immediate(pretenure
3976 ? factory()->true_value() 3996 ? factory()->true_value()
3977 : factory()->false_value())); 3997 : factory()->false_value()));
3978 CallRuntime(Runtime::kNewClosure, 3, instr, RESTORE_CONTEXT); 3998 CallRuntime(Runtime::kNewClosure, 3, instr);
3979 } 3999 }
3980 } 4000 }
3981 4001
3982 4002
3983 void LCodeGen::DoTypeof(LTypeof* instr) { 4003 void LCodeGen::DoTypeof(LTypeof* instr) {
3984 LOperand* input = instr->InputAt(0); 4004 LOperand* input = instr->InputAt(1);
3985 if (input->IsConstantOperand()) { 4005 if (input->IsConstantOperand()) {
3986 __ push(ToImmediate(input)); 4006 __ push(ToImmediate(input));
3987 } else { 4007 } else {
3988 __ push(ToOperand(input)); 4008 __ push(ToOperand(input));
3989 } 4009 }
3990 CallRuntime(Runtime::kTypeof, 1, instr, RESTORE_CONTEXT); 4010 CallRuntime(Runtime::kTypeof, 1, instr);
3991 } 4011 }
3992 4012
3993 4013
3994 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { 4014 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
3995 Register input = ToRegister(instr->InputAt(0)); 4015 Register input = ToRegister(instr->InputAt(0));
3996 int true_block = chunk_->LookupDestination(instr->true_block_id()); 4016 int true_block = chunk_->LookupDestination(instr->true_block_id());
3997 int false_block = chunk_->LookupDestination(instr->false_block_id()); 4017 int false_block = chunk_->LookupDestination(instr->false_block_id());
3998 Label* true_label = chunk_->GetAssemblyLabel(true_block); 4018 Label* true_label = chunk_->GetAssemblyLabel(true_block);
3999 Label* false_label = chunk_->GetAssemblyLabel(false_block); 4019 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4000 4020
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
4123 LPointerMap* pointers = instr->pointer_map(); 4143 LPointerMap* pointers = instr->pointer_map();
4124 LEnvironment* env = instr->deoptimization_environment(); 4144 LEnvironment* env = instr->deoptimization_environment();
4125 RecordPosition(pointers->position()); 4145 RecordPosition(pointers->position());
4126 RegisterEnvironmentForDeoptimization(env); 4146 RegisterEnvironmentForDeoptimization(env);
4127 // Create safepoint generator that will also ensure enough space in the 4147 // Create safepoint generator that will also ensure enough space in the
4128 // reloc info for patching in deoptimization (since this is invoking a 4148 // reloc info for patching in deoptimization (since this is invoking a
4129 // builtin) 4149 // builtin)
4130 SafepointGenerator safepoint_generator(this, 4150 SafepointGenerator safepoint_generator(this,
4131 pointers, 4151 pointers,
4132 env->deoptimization_index()); 4152 env->deoptimization_index());
4133 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4134 __ push(Immediate(Smi::FromInt(strict_mode_flag()))); 4153 __ push(Immediate(Smi::FromInt(strict_mode_flag())));
4135 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator); 4154 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
4136 } 4155 }
4137 4156
4138 4157
4139 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { 4158 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
4140 PushSafepointRegistersScope scope(this); 4159 PushSafepointRegistersScope scope(this);
4141 CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr); 4160 CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr, instr->context());
4142 } 4161 }
4143 4162
4144 4163
4145 void LCodeGen::DoStackCheck(LStackCheck* instr) { 4164 void LCodeGen::DoStackCheck(LStackCheck* instr) {
4146 class DeferredStackCheck: public LDeferredCode { 4165 class DeferredStackCheck: public LDeferredCode {
4147 public: 4166 public:
4148 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) 4167 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4149 : LDeferredCode(codegen), instr_(instr) { } 4168 : LDeferredCode(codegen), instr_(instr) { }
4150 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } 4169 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4151 private: 4170 private:
4152 LStackCheck* instr_; 4171 LStackCheck* instr_;
4153 }; 4172 };
4154 4173
4155 if (instr->hydrogen()->is_function_entry()) { 4174 if (instr->hydrogen()->is_function_entry()) {
4156 // Perform stack overflow check. 4175 // Perform stack overflow check.
4157 Label done; 4176 Label done;
4158 ExternalReference stack_limit = 4177 ExternalReference stack_limit =
4159 ExternalReference::address_of_stack_limit(isolate()); 4178 ExternalReference::address_of_stack_limit(isolate());
4160 __ cmp(esp, Operand::StaticVariable(stack_limit)); 4179 __ cmp(esp, Operand::StaticVariable(stack_limit));
4161 __ j(above_equal, &done, Label::kNear); 4180 __ j(above_equal, &done, Label::kNear);
4162 4181
4182 ASSERT(instr->context()->IsRegister());
4183 ASSERT(ToRegister(instr->context()).is(esi));
4163 StackCheckStub stub; 4184 StackCheckStub stub;
4164 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); 4185 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4165 __ bind(&done); 4186 __ bind(&done);
4166 } else { 4187 } else {
4167 ASSERT(instr->hydrogen()->is_backwards_branch()); 4188 ASSERT(instr->hydrogen()->is_backwards_branch());
4168 // Perform stack overflow check if this goto needs it before jumping. 4189 // Perform stack overflow check if this goto needs it before jumping.
4169 DeferredStackCheck* deferred_stack_check = 4190 DeferredStackCheck* deferred_stack_check =
4170 new DeferredStackCheck(this, instr); 4191 new DeferredStackCheck(this, instr);
4171 ExternalReference stack_limit = 4192 ExternalReference stack_limit =
4172 ExternalReference::address_of_stack_limit(isolate()); 4193 ExternalReference::address_of_stack_limit(isolate());
4173 __ cmp(esp, Operand::StaticVariable(stack_limit)); 4194 __ cmp(esp, Operand::StaticVariable(stack_limit));
4174 __ j(below, deferred_stack_check->entry()); 4195 __ j(below, deferred_stack_check->entry());
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
4212 LPointerMap* pointers = instr->pointer_map(); 4233 LPointerMap* pointers = instr->pointer_map();
4213 LEnvironment* env = instr->deoptimization_environment(); 4234 LEnvironment* env = instr->deoptimization_environment();
4214 RecordPosition(pointers->position()); 4235 RecordPosition(pointers->position());
4215 RegisterEnvironmentForDeoptimization(env); 4236 RegisterEnvironmentForDeoptimization(env);
4216 // Create safepoint generator that will also ensure enough space in the 4237 // Create safepoint generator that will also ensure enough space in the
4217 // reloc info for patching in deoptimization (since this is invoking a 4238 // reloc info for patching in deoptimization (since this is invoking a
4218 // builtin) 4239 // builtin)
4219 SafepointGenerator safepoint_generator(this, 4240 SafepointGenerator safepoint_generator(this,
4220 pointers, 4241 pointers,
4221 env->deoptimization_index()); 4242 env->deoptimization_index());
4222 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4223 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); 4243 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
4224 } 4244 }
4225 4245
4226 4246
4227 #undef __ 4247 #undef __
4228 4248
4229 } } // namespace v8::internal 4249 } } // namespace v8::internal
4230 4250
4231 #endif // V8_TARGET_ARCH_IA32 4251 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/ia32/lithium-ia32.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698