Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(134)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 6713074: Require an isolate parameter for most external reference creation to (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Further cleanup Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/builtins-ia32.cc ('k') | src/ia32/codegen-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 745 matching lines...) Expand 10 before | Expand all | Expand 10 after
756 // overflowed the smi range). 756 // overflowed the smi range).
757 switch (op_) { 757 switch (op_) {
758 case Token::SHL: { 758 case Token::SHL: {
759 Comment perform_float(masm, "-- Perform float operation on smis"); 759 Comment perform_float(masm, "-- Perform float operation on smis");
760 __ bind(&use_fp_on_smis); 760 __ bind(&use_fp_on_smis);
761 if (runtime_operands_type_ != BinaryOpIC::UNINIT_OR_SMI) { 761 if (runtime_operands_type_ != BinaryOpIC::UNINIT_OR_SMI) {
762 // Result we want is in left == edx, so we can put the allocated heap 762 // Result we want is in left == edx, so we can put the allocated heap
763 // number in eax. 763 // number in eax.
764 __ AllocateHeapNumber(eax, ecx, ebx, slow); 764 __ AllocateHeapNumber(eax, ecx, ebx, slow);
765 // Store the result in the HeapNumber and return. 765 // Store the result in the HeapNumber and return.
766 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { 766 if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
767 CpuFeatures::Scope use_sse2(SSE2); 767 CpuFeatures::Scope use_sse2(SSE2);
768 __ cvtsi2sd(xmm0, Operand(left)); 768 __ cvtsi2sd(xmm0, Operand(left));
769 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); 769 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
770 } else { 770 } else {
771 // It's OK to overwrite the right argument on the stack because we 771 // It's OK to overwrite the right argument on the stack because we
772 // are about to return. 772 // are about to return.
773 __ mov(Operand(esp, 1 * kPointerSize), left); 773 __ mov(Operand(esp, 1 * kPointerSize), left);
774 __ fild_s(Operand(esp, 1 * kPointerSize)); 774 __ fild_s(Operand(esp, 1 * kPointerSize));
775 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); 775 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
776 } 776 }
(...skipping 29 matching lines...) Expand all
806 // Left was clobbered but a copy is in edi. Right is in ebx for 806 // Left was clobbered but a copy is in edi. Right is in ebx for
807 // division. 807 // division.
808 __ mov(edx, edi); 808 __ mov(edx, edi);
809 __ mov(eax, right); 809 __ mov(eax, right);
810 break; 810 break;
811 default: UNREACHABLE(); 811 default: UNREACHABLE();
812 break; 812 break;
813 } 813 }
814 if (runtime_operands_type_ != BinaryOpIC::UNINIT_OR_SMI) { 814 if (runtime_operands_type_ != BinaryOpIC::UNINIT_OR_SMI) {
815 __ AllocateHeapNumber(ecx, ebx, no_reg, slow); 815 __ AllocateHeapNumber(ecx, ebx, no_reg, slow);
816 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { 816 if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
817 CpuFeatures::Scope use_sse2(SSE2); 817 CpuFeatures::Scope use_sse2(SSE2);
818 FloatingPointHelper::LoadSSE2Smis(masm, ebx); 818 FloatingPointHelper::LoadSSE2Smis(masm, ebx);
819 switch (op_) { 819 switch (op_) {
820 case Token::ADD: __ addsd(xmm0, xmm1); break; 820 case Token::ADD: __ addsd(xmm0, xmm1); break;
821 case Token::SUB: __ subsd(xmm0, xmm1); break; 821 case Token::SUB: __ subsd(xmm0, xmm1); break;
822 case Token::MUL: __ mulsd(xmm0, xmm1); break; 822 case Token::MUL: __ mulsd(xmm0, xmm1); break;
823 case Token::DIV: __ divsd(xmm0, xmm1); break; 823 case Token::DIV: __ divsd(xmm0, xmm1); break;
824 default: UNREACHABLE(); 824 default: UNREACHABLE();
825 } 825 }
826 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0); 826 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0);
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
910 // Execution reaches this point when the first non-smi argument occurs 910 // Execution reaches this point when the first non-smi argument occurs
911 // (and only if smi code is generated). This is the right moment to 911 // (and only if smi code is generated). This is the right moment to
912 // patch to HEAP_NUMBERS state. The transition is attempted only for 912 // patch to HEAP_NUMBERS state. The transition is attempted only for
913 // the four basic operations. The stub stays in the DEFAULT state 913 // the four basic operations. The stub stays in the DEFAULT state
914 // forever for all other operations (also if smi code is skipped). 914 // forever for all other operations (also if smi code is skipped).
915 GenerateTypeTransition(masm); 915 GenerateTypeTransition(masm);
916 break; 916 break;
917 } 917 }
918 918
919 Label not_floats; 919 Label not_floats;
920 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { 920 if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
921 CpuFeatures::Scope use_sse2(SSE2); 921 CpuFeatures::Scope use_sse2(SSE2);
922 if (static_operands_type_.IsNumber()) { 922 if (static_operands_type_.IsNumber()) {
923 if (FLAG_debug_code) { 923 if (FLAG_debug_code) {
924 // Assert at runtime that inputs are only numbers. 924 // Assert at runtime that inputs are only numbers.
925 __ AbortIfNotNumber(edx); 925 __ AbortIfNotNumber(edx);
926 __ AbortIfNotNumber(eax); 926 __ AbortIfNotNumber(eax);
927 } 927 }
928 if (static_operands_type_.IsSmi()) { 928 if (static_operands_type_.IsSmi()) {
929 if (FLAG_debug_code) { 929 if (FLAG_debug_code) {
930 __ AbortIfNotSmi(edx); 930 __ AbortIfNotSmi(edx);
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after
1044 __ test(eax, Immediate(kSmiTagMask)); 1044 __ test(eax, Immediate(kSmiTagMask));
1045 __ j(not_zero, &skip_allocation, not_taken); 1045 __ j(not_zero, &skip_allocation, not_taken);
1046 // Fall through! 1046 // Fall through!
1047 case NO_OVERWRITE: 1047 case NO_OVERWRITE:
1048 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime); 1048 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
1049 __ bind(&skip_allocation); 1049 __ bind(&skip_allocation);
1050 break; 1050 break;
1051 default: UNREACHABLE(); 1051 default: UNREACHABLE();
1052 } 1052 }
1053 // Store the result in the HeapNumber and return. 1053 // Store the result in the HeapNumber and return.
1054 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { 1054 if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
1055 CpuFeatures::Scope use_sse2(SSE2); 1055 CpuFeatures::Scope use_sse2(SSE2);
1056 __ cvtsi2sd(xmm0, Operand(ebx)); 1056 __ cvtsi2sd(xmm0, Operand(ebx));
1057 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); 1057 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1058 } else { 1058 } else {
1059 __ mov(Operand(esp, 1 * kPointerSize), ebx); 1059 __ mov(Operand(esp, 1 * kPointerSize), ebx);
1060 __ fild_s(Operand(esp, 1 * kPointerSize)); 1060 __ fild_s(Operand(esp, 1 * kPointerSize));
1061 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); 1061 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1062 } 1062 }
1063 GenerateReturn(masm); 1063 GenerateReturn(masm);
1064 } 1064 }
(...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after
1262 // encoded into the key, the encoding is opaque, so push them too. 1262 // encoded into the key, the encoding is opaque, so push them too.
1263 __ push(Immediate(Smi::FromInt(MinorKey()))); 1263 __ push(Immediate(Smi::FromInt(MinorKey())));
1264 __ push(Immediate(Smi::FromInt(op_))); 1264 __ push(Immediate(Smi::FromInt(op_)));
1265 __ push(Immediate(Smi::FromInt(runtime_operands_type_))); 1265 __ push(Immediate(Smi::FromInt(runtime_operands_type_)));
1266 1266
1267 __ push(ecx); // Push return address. 1267 __ push(ecx); // Push return address.
1268 1268
1269 // Patch the caller to an appropriate specialized stub and return the 1269 // Patch the caller to an appropriate specialized stub and return the
1270 // operation result to the caller of the stub. 1270 // operation result to the caller of the stub.
1271 __ TailCallExternalReference( 1271 __ TailCallExternalReference(
1272 ExternalReference(IC_Utility(IC::kBinaryOp_Patch)), 1272 ExternalReference(IC_Utility(IC::kBinaryOp_Patch), masm->isolate()),
1273 5, 1273 5,
1274 1); 1274 1);
1275 } 1275 }
1276 1276
1277 1277
1278 Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) { 1278 Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
1279 GenericBinaryOpStub stub(key, type_info); 1279 GenericBinaryOpStub stub(key, type_info);
1280 return stub.GetCode(); 1280 return stub.GetCode();
1281 } 1281 }
1282 1282
(...skipping 15 matching lines...) Expand all
1298 // encoded into the key, the encoding is opaque, so push them too. 1298 // encoded into the key, the encoding is opaque, so push them too.
1299 __ push(Immediate(Smi::FromInt(MinorKey()))); 1299 __ push(Immediate(Smi::FromInt(MinorKey())));
1300 __ push(Immediate(Smi::FromInt(op_))); 1300 __ push(Immediate(Smi::FromInt(op_)));
1301 __ push(Immediate(Smi::FromInt(operands_type_))); 1301 __ push(Immediate(Smi::FromInt(operands_type_)));
1302 1302
1303 __ push(ecx); // Push return address. 1303 __ push(ecx); // Push return address.
1304 1304
1305 // Patch the caller to an appropriate specialized stub and return the 1305 // Patch the caller to an appropriate specialized stub and return the
1306 // operation result to the caller of the stub. 1306 // operation result to the caller of the stub.
1307 __ TailCallExternalReference( 1307 __ TailCallExternalReference(
1308 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)), 1308 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch),
1309 masm->isolate()),
1309 5, 1310 5,
1310 1); 1311 1);
1311 } 1312 }
1312 1313
1313 1314
1314 // Prepare for a type transition runtime call when the args are already on 1315 // Prepare for a type transition runtime call when the args are already on
1315 // the stack, under the return address. 1316 // the stack, under the return address.
1316 void TypeRecordingBinaryOpStub::GenerateTypeTransitionWithSavedArgs( 1317 void TypeRecordingBinaryOpStub::GenerateTypeTransitionWithSavedArgs(
1317 MacroAssembler* masm) { 1318 MacroAssembler* masm) {
1318 __ pop(ecx); // Save return address. 1319 __ pop(ecx); // Save return address.
1319 // Left and right arguments are already on top of the stack. 1320 // Left and right arguments are already on top of the stack.
1320 // Push this stub's key. Although the operation and the type info are 1321 // Push this stub's key. Although the operation and the type info are
1321 // encoded into the key, the encoding is opaque, so push them too. 1322 // encoded into the key, the encoding is opaque, so push them too.
1322 __ push(Immediate(Smi::FromInt(MinorKey()))); 1323 __ push(Immediate(Smi::FromInt(MinorKey())));
1323 __ push(Immediate(Smi::FromInt(op_))); 1324 __ push(Immediate(Smi::FromInt(op_)));
1324 __ push(Immediate(Smi::FromInt(operands_type_))); 1325 __ push(Immediate(Smi::FromInt(operands_type_)));
1325 1326
1326 __ push(ecx); // Push return address. 1327 __ push(ecx); // Push return address.
1327 1328
1328 // Patch the caller to an appropriate specialized stub and return the 1329 // Patch the caller to an appropriate specialized stub and return the
1329 // operation result to the caller of the stub. 1330 // operation result to the caller of the stub.
1330 __ TailCallExternalReference( 1331 __ TailCallExternalReference(
1331 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)), 1332 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch),
1333 masm->isolate()),
1332 5, 1334 5,
1333 1); 1335 1);
1334 } 1336 }
1335 1337
1336 1338
1337 void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) { 1339 void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) {
1338 switch (operands_type_) { 1340 switch (operands_type_) {
1339 case TRBinaryOpIC::UNINITIALIZED: 1341 case TRBinaryOpIC::UNINITIALIZED:
1340 GenerateTypeTransition(masm); 1342 GenerateTypeTransition(masm);
1341 break; 1343 break;
(...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after
1639 } else { 1641 } else {
1640 ASSERT(allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS); 1642 ASSERT(allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS);
1641 switch (op_) { 1643 switch (op_) {
1642 case Token::SHL: { 1644 case Token::SHL: {
1643 Comment perform_float(masm, "-- Perform float operation on smis"); 1645 Comment perform_float(masm, "-- Perform float operation on smis");
1644 __ bind(&use_fp_on_smis); 1646 __ bind(&use_fp_on_smis);
1645 // Result we want is in left == edx, so we can put the allocated heap 1647 // Result we want is in left == edx, so we can put the allocated heap
1646 // number in eax. 1648 // number in eax.
1647 __ AllocateHeapNumber(eax, ecx, ebx, slow); 1649 __ AllocateHeapNumber(eax, ecx, ebx, slow);
1648 // Store the result in the HeapNumber and return. 1650 // Store the result in the HeapNumber and return.
1649 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { 1651 if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
1650 CpuFeatures::Scope use_sse2(SSE2); 1652 CpuFeatures::Scope use_sse2(SSE2);
1651 __ cvtsi2sd(xmm0, Operand(left)); 1653 __ cvtsi2sd(xmm0, Operand(left));
1652 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); 1654 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1653 } else { 1655 } else {
1654 // It's OK to overwrite the right argument on the stack because we 1656 // It's OK to overwrite the right argument on the stack because we
1655 // are about to return. 1657 // are about to return.
1656 __ mov(Operand(esp, 1 * kPointerSize), left); 1658 __ mov(Operand(esp, 1 * kPointerSize), left);
1657 __ fild_s(Operand(esp, 1 * kPointerSize)); 1659 __ fild_s(Operand(esp, 1 * kPointerSize));
1658 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); 1660 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1659 } 1661 }
(...skipping 24 matching lines...) Expand all
1684 case Token::DIV: 1686 case Token::DIV:
1685 // Left was clobbered but a copy is in edi. Right is in ebx for 1687 // Left was clobbered but a copy is in edi. Right is in ebx for
1686 // division. 1688 // division.
1687 __ mov(edx, edi); 1689 __ mov(edx, edi);
1688 __ mov(eax, right); 1690 __ mov(eax, right);
1689 break; 1691 break;
1690 default: UNREACHABLE(); 1692 default: UNREACHABLE();
1691 break; 1693 break;
1692 } 1694 }
1693 __ AllocateHeapNumber(ecx, ebx, no_reg, slow); 1695 __ AllocateHeapNumber(ecx, ebx, no_reg, slow);
1694 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { 1696 if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
1695 CpuFeatures::Scope use_sse2(SSE2); 1697 CpuFeatures::Scope use_sse2(SSE2);
1696 FloatingPointHelper::LoadSSE2Smis(masm, ebx); 1698 FloatingPointHelper::LoadSSE2Smis(masm, ebx);
1697 switch (op_) { 1699 switch (op_) {
1698 case Token::ADD: __ addsd(xmm0, xmm1); break; 1700 case Token::ADD: __ addsd(xmm0, xmm1); break;
1699 case Token::SUB: __ subsd(xmm0, xmm1); break; 1701 case Token::SUB: __ subsd(xmm0, xmm1); break;
1700 case Token::MUL: __ mulsd(xmm0, xmm1); break; 1702 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1701 case Token::DIV: __ divsd(xmm0, xmm1); break; 1703 case Token::DIV: __ divsd(xmm0, xmm1); break;
1702 default: UNREACHABLE(); 1704 default: UNREACHABLE();
1703 } 1705 }
1704 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0); 1706 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0);
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
1816 ASSERT(operands_type_ == TRBinaryOpIC::INT32); 1818 ASSERT(operands_type_ == TRBinaryOpIC::INT32);
1817 1819
1818 // Floating point case. 1820 // Floating point case.
1819 switch (op_) { 1821 switch (op_) {
1820 case Token::ADD: 1822 case Token::ADD:
1821 case Token::SUB: 1823 case Token::SUB:
1822 case Token::MUL: 1824 case Token::MUL:
1823 case Token::DIV: { 1825 case Token::DIV: {
1824 Label not_floats; 1826 Label not_floats;
1825 Label not_int32; 1827 Label not_int32;
1826 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { 1828 if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
1827 CpuFeatures::Scope use_sse2(SSE2); 1829 CpuFeatures::Scope use_sse2(SSE2);
1828 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats); 1830 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
1829 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx); 1831 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx);
1830 switch (op_) { 1832 switch (op_) {
1831 case Token::ADD: __ addsd(xmm0, xmm1); break; 1833 case Token::ADD: __ addsd(xmm0, xmm1); break;
1832 case Token::SUB: __ subsd(xmm0, xmm1); break; 1834 case Token::SUB: __ subsd(xmm0, xmm1); break;
1833 case Token::MUL: __ mulsd(xmm0, xmm1); break; 1835 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1834 case Token::DIV: __ divsd(xmm0, xmm1); break; 1836 case Token::DIV: __ divsd(xmm0, xmm1); break;
1835 default: UNREACHABLE(); 1837 default: UNREACHABLE();
1836 } 1838 }
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
1937 __ test(eax, Immediate(kSmiTagMask)); 1939 __ test(eax, Immediate(kSmiTagMask));
1938 __ j(not_zero, &skip_allocation, not_taken); 1940 __ j(not_zero, &skip_allocation, not_taken);
1939 // Fall through! 1941 // Fall through!
1940 case NO_OVERWRITE: 1942 case NO_OVERWRITE:
1941 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime); 1943 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
1942 __ bind(&skip_allocation); 1944 __ bind(&skip_allocation);
1943 break; 1945 break;
1944 default: UNREACHABLE(); 1946 default: UNREACHABLE();
1945 } 1947 }
1946 // Store the result in the HeapNumber and return. 1948 // Store the result in the HeapNumber and return.
1947 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { 1949 if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
1948 CpuFeatures::Scope use_sse2(SSE2); 1950 CpuFeatures::Scope use_sse2(SSE2);
1949 __ cvtsi2sd(xmm0, Operand(ebx)); 1951 __ cvtsi2sd(xmm0, Operand(ebx));
1950 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); 1952 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1951 } else { 1953 } else {
1952 __ mov(Operand(esp, 1 * kPointerSize), ebx); 1954 __ mov(Operand(esp, 1 * kPointerSize), ebx);
1953 __ fild_s(Operand(esp, 1 * kPointerSize)); 1955 __ fild_s(Operand(esp, 1 * kPointerSize));
1954 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); 1956 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1955 } 1957 }
1956 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack. 1958 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1957 } 1959 }
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
2017 Label call_runtime; 2019 Label call_runtime;
2018 ASSERT(operands_type_ == TRBinaryOpIC::HEAP_NUMBER); 2020 ASSERT(operands_type_ == TRBinaryOpIC::HEAP_NUMBER);
2019 2021
2020 // Floating point case. 2022 // Floating point case.
2021 switch (op_) { 2023 switch (op_) {
2022 case Token::ADD: 2024 case Token::ADD:
2023 case Token::SUB: 2025 case Token::SUB:
2024 case Token::MUL: 2026 case Token::MUL:
2025 case Token::DIV: { 2027 case Token::DIV: {
2026 Label not_floats; 2028 Label not_floats;
2027 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { 2029 if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
2028 CpuFeatures::Scope use_sse2(SSE2); 2030 CpuFeatures::Scope use_sse2(SSE2);
2029 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats); 2031 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
2030 2032
2031 switch (op_) { 2033 switch (op_) {
2032 case Token::ADD: __ addsd(xmm0, xmm1); break; 2034 case Token::ADD: __ addsd(xmm0, xmm1); break;
2033 case Token::SUB: __ subsd(xmm0, xmm1); break; 2035 case Token::SUB: __ subsd(xmm0, xmm1); break;
2034 case Token::MUL: __ mulsd(xmm0, xmm1); break; 2036 case Token::MUL: __ mulsd(xmm0, xmm1); break;
2035 case Token::DIV: __ divsd(xmm0, xmm1); break; 2037 case Token::DIV: __ divsd(xmm0, xmm1); break;
2036 default: UNREACHABLE(); 2038 default: UNREACHABLE();
2037 } 2039 }
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
2120 __ test(eax, Immediate(kSmiTagMask)); 2122 __ test(eax, Immediate(kSmiTagMask));
2121 __ j(not_zero, &skip_allocation, not_taken); 2123 __ j(not_zero, &skip_allocation, not_taken);
2122 // Fall through! 2124 // Fall through!
2123 case NO_OVERWRITE: 2125 case NO_OVERWRITE:
2124 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime); 2126 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
2125 __ bind(&skip_allocation); 2127 __ bind(&skip_allocation);
2126 break; 2128 break;
2127 default: UNREACHABLE(); 2129 default: UNREACHABLE();
2128 } 2130 }
2129 // Store the result in the HeapNumber and return. 2131 // Store the result in the HeapNumber and return.
2130 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { 2132 if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
2131 CpuFeatures::Scope use_sse2(SSE2); 2133 CpuFeatures::Scope use_sse2(SSE2);
2132 __ cvtsi2sd(xmm0, Operand(ebx)); 2134 __ cvtsi2sd(xmm0, Operand(ebx));
2133 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); 2135 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
2134 } else { 2136 } else {
2135 __ mov(Operand(esp, 1 * kPointerSize), ebx); 2137 __ mov(Operand(esp, 1 * kPointerSize), ebx);
2136 __ fild_s(Operand(esp, 1 * kPointerSize)); 2138 __ fild_s(Operand(esp, 1 * kPointerSize));
2137 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); 2139 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
2138 } 2140 }
2139 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack. 2141 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
2140 } 2142 }
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
2221 2223
2222 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); 2224 GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
2223 2225
2224 // Floating point case. 2226 // Floating point case.
2225 switch (op_) { 2227 switch (op_) {
2226 case Token::ADD: 2228 case Token::ADD:
2227 case Token::SUB: 2229 case Token::SUB:
2228 case Token::MUL: 2230 case Token::MUL:
2229 case Token::DIV: { 2231 case Token::DIV: {
2230 Label not_floats; 2232 Label not_floats;
2231 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { 2233 if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
2232 CpuFeatures::Scope use_sse2(SSE2); 2234 CpuFeatures::Scope use_sse2(SSE2);
2233 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats); 2235 FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
2234 2236
2235 switch (op_) { 2237 switch (op_) {
2236 case Token::ADD: __ addsd(xmm0, xmm1); break; 2238 case Token::ADD: __ addsd(xmm0, xmm1); break;
2237 case Token::SUB: __ subsd(xmm0, xmm1); break; 2239 case Token::SUB: __ subsd(xmm0, xmm1); break;
2238 case Token::MUL: __ mulsd(xmm0, xmm1); break; 2240 case Token::MUL: __ mulsd(xmm0, xmm1); break;
2239 case Token::DIV: __ divsd(xmm0, xmm1); break; 2241 case Token::DIV: __ divsd(xmm0, xmm1); break;
2240 default: UNREACHABLE(); 2242 default: UNREACHABLE();
2241 } 2243 }
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
2319 __ test(eax, Immediate(kSmiTagMask)); 2321 __ test(eax, Immediate(kSmiTagMask));
2320 __ j(not_zero, &skip_allocation, not_taken); 2322 __ j(not_zero, &skip_allocation, not_taken);
2321 // Fall through! 2323 // Fall through!
2322 case NO_OVERWRITE: 2324 case NO_OVERWRITE:
2323 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime); 2325 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
2324 __ bind(&skip_allocation); 2326 __ bind(&skip_allocation);
2325 break; 2327 break;
2326 default: UNREACHABLE(); 2328 default: UNREACHABLE();
2327 } 2329 }
2328 // Store the result in the HeapNumber and return. 2330 // Store the result in the HeapNumber and return.
2329 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { 2331 if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
2330 CpuFeatures::Scope use_sse2(SSE2); 2332 CpuFeatures::Scope use_sse2(SSE2);
2331 __ cvtsi2sd(xmm0, Operand(ebx)); 2333 __ cvtsi2sd(xmm0, Operand(ebx));
2332 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); 2334 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
2333 } else { 2335 } else {
2334 __ mov(Operand(esp, 1 * kPointerSize), ebx); 2336 __ mov(Operand(esp, 1 * kPointerSize), ebx);
2335 __ fild_s(Operand(esp, 1 * kPointerSize)); 2337 __ fild_s(Operand(esp, 1 * kPointerSize));
2336 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); 2338 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
2337 } 2339 }
2338 __ ret(2 * kPointerSize); 2340 __ ret(2 * kPointerSize);
2339 } 2341 }
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after
2517 __ cmp(Operand(ebx), Immediate(FACTORY->heap_number_map())); 2519 __ cmp(Operand(ebx), Immediate(FACTORY->heap_number_map()));
2518 __ j(not_equal, &runtime_call); 2520 __ j(not_equal, &runtime_call);
2519 // Input is a HeapNumber. Push it on the FPU stack and load its 2521 // Input is a HeapNumber. Push it on the FPU stack and load its
2520 // low and high words into ebx, edx. 2522 // low and high words into ebx, edx.
2521 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset)); 2523 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
2522 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset)); 2524 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
2523 __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset)); 2525 __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset));
2524 2526
2525 __ bind(&loaded); 2527 __ bind(&loaded);
2526 } else { // UNTAGGED. 2528 } else { // UNTAGGED.
2527 if (Isolate::Current()->cpu_features()->IsSupported(SSE4_1)) { 2529 if (masm->isolate()->cpu_features()->IsSupported(SSE4_1)) {
2528 CpuFeatures::Scope sse4_scope(SSE4_1); 2530 CpuFeatures::Scope sse4_scope(SSE4_1);
2529 __ pextrd(Operand(edx), xmm1, 0x1); // copy xmm1[63..32] to edx. 2531 __ pextrd(Operand(edx), xmm1, 0x1); // copy xmm1[63..32] to edx.
2530 } else { 2532 } else {
2531 __ pshufd(xmm0, xmm1, 0x1); 2533 __ pshufd(xmm0, xmm1, 0x1);
2532 __ movd(Operand(edx), xmm0); 2534 __ movd(Operand(edx), xmm0);
2533 } 2535 }
2534 __ movd(Operand(ebx), xmm1); 2536 __ movd(Operand(ebx), xmm1);
2535 } 2537 }
2536 2538
2537 // ST[0] or xmm1 == double value 2539 // ST[0] or xmm1 == double value
(...skipping 10 matching lines...) Expand all
2548 __ sar(eax, 8); 2550 __ sar(eax, 8);
2549 __ xor_(ecx, Operand(eax)); 2551 __ xor_(ecx, Operand(eax));
2550 ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize)); 2552 ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
2551 __ and_(Operand(ecx), 2553 __ and_(Operand(ecx),
2552 Immediate(TranscendentalCache::SubCache::kCacheSize - 1)); 2554 Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
2553 2555
2554 // ST[0] or xmm1 == double value. 2556 // ST[0] or xmm1 == double value.
2555 // ebx = low 32 bits of double value. 2557 // ebx = low 32 bits of double value.
2556 // edx = high 32 bits of double value. 2558 // edx = high 32 bits of double value.
2557 // ecx = TranscendentalCache::hash(double value). 2559 // ecx = TranscendentalCache::hash(double value).
2558 __ mov(eax, 2560 ExternalReference cache_array =
2559 Immediate(ExternalReference::transcendental_cache_array_address())); 2561 ExternalReference::transcendental_cache_array_address(masm->isolate());
2560 // Eax points to cache array. 2562 __ mov(eax, Immediate(cache_array));
2561 __ mov(eax, Operand(eax, type_ * sizeof( 2563 int cache_array_index =
2562 Isolate::Current()->transcendental_cache()->caches_[0]))); 2564 type_ * sizeof(masm->isolate()->transcendental_cache()->caches_[0]);
2565 __ mov(eax, Operand(eax, cache_array_index));
2563 // Eax points to the cache for the type type_. 2566 // Eax points to the cache for the type type_.
2564 // If NULL, the cache hasn't been initialized yet, so go through runtime. 2567 // If NULL, the cache hasn't been initialized yet, so go through runtime.
2565 __ test(eax, Operand(eax)); 2568 __ test(eax, Operand(eax));
2566 __ j(zero, &runtime_call_clear_stack); 2569 __ j(zero, &runtime_call_clear_stack);
2567 #ifdef DEBUG 2570 #ifdef DEBUG
2568 // Check that the layout of cache elements match expectations. 2571 // Check that the layout of cache elements match expectations.
2569 { TranscendentalCache::SubCache::Element test_elem[2]; 2572 { TranscendentalCache::SubCache::Element test_elem[2];
2570 char* elem_start = reinterpret_cast<char*>(&test_elem[0]); 2573 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
2571 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]); 2574 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
2572 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0])); 2575 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
2638 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); 2641 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
2639 __ LeaveInternalFrame(); 2642 __ LeaveInternalFrame();
2640 __ Ret(); 2643 __ Ret();
2641 } 2644 }
2642 2645
2643 // Call runtime, doing whatever allocation and cleanup is necessary. 2646 // Call runtime, doing whatever allocation and cleanup is necessary.
2644 if (tagged) { 2647 if (tagged) {
2645 __ bind(&runtime_call_clear_stack); 2648 __ bind(&runtime_call_clear_stack);
2646 __ fstp(0); 2649 __ fstp(0);
2647 __ bind(&runtime_call); 2650 __ bind(&runtime_call);
2648 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1); 2651 ExternalReference runtime =
2652 ExternalReference(RuntimeFunction(), masm->isolate());
2653 __ TailCallExternalReference(runtime, 1, 1);
2649 } else { // UNTAGGED. 2654 } else { // UNTAGGED.
2650 __ bind(&runtime_call_clear_stack); 2655 __ bind(&runtime_call_clear_stack);
2651 __ bind(&runtime_call); 2656 __ bind(&runtime_call);
2652 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache); 2657 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
2653 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm1); 2658 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm1);
2654 __ EnterInternalFrame(); 2659 __ EnterInternalFrame();
2655 __ push(eax); 2660 __ push(eax);
2656 __ CallRuntime(RuntimeFunction(), 1); 2661 __ CallRuntime(RuntimeFunction(), 1);
2657 __ LeaveInternalFrame(); 2662 __ LeaveInternalFrame();
2658 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); 2663 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
2769 void IntegerConvert(MacroAssembler* masm, 2774 void IntegerConvert(MacroAssembler* masm,
2770 Register source, 2775 Register source,
2771 TypeInfo type_info, 2776 TypeInfo type_info,
2772 bool use_sse3, 2777 bool use_sse3,
2773 Label* conversion_failure) { 2778 Label* conversion_failure) {
2774 ASSERT(!source.is(ecx) && !source.is(edi) && !source.is(ebx)); 2779 ASSERT(!source.is(ecx) && !source.is(edi) && !source.is(ebx));
2775 Label done, right_exponent, normal_exponent; 2780 Label done, right_exponent, normal_exponent;
2776 Register scratch = ebx; 2781 Register scratch = ebx;
2777 Register scratch2 = edi; 2782 Register scratch2 = edi;
2778 if (type_info.IsInteger32() && 2783 if (type_info.IsInteger32() &&
2779 Isolate::Current()->cpu_features()->IsEnabled(SSE2)) { 2784 masm->isolate()->cpu_features()->IsEnabled(SSE2)) {
2780 CpuFeatures::Scope scope(SSE2); 2785 CpuFeatures::Scope scope(SSE2);
2781 __ cvttsd2si(ecx, FieldOperand(source, HeapNumber::kValueOffset)); 2786 __ cvttsd2si(ecx, FieldOperand(source, HeapNumber::kValueOffset));
2782 return; 2787 return;
2783 } 2788 }
2784 if (!type_info.IsInteger32() || !use_sse3) { 2789 if (!type_info.IsInteger32() || !use_sse3) {
2785 // Get exponent word. 2790 // Get exponent word.
2786 __ mov(scratch, FieldOperand(source, HeapNumber::kExponentOffset)); 2791 __ mov(scratch, FieldOperand(source, HeapNumber::kExponentOffset));
2787 // Get exponent alone in scratch2. 2792 // Get exponent alone in scratch2.
2788 __ mov(scratch2, scratch); 2793 __ mov(scratch2, scratch);
2789 __ and_(scratch2, HeapNumber::kExponentMask); 2794 __ and_(scratch2, HeapNumber::kExponentMask);
(...skipping 524 matching lines...) Expand 10 before | Expand all | Expand 10 after
3314 3319
3315 // Check if the operand is a heap number. 3320 // Check if the operand is a heap number.
3316 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); 3321 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3317 __ cmp(edx, FACTORY->heap_number_map()); 3322 __ cmp(edx, FACTORY->heap_number_map());
3318 __ j(not_equal, &slow, not_taken); 3323 __ j(not_equal, &slow, not_taken);
3319 3324
3320 // Convert the heap number in eax to an untagged integer in ecx. 3325 // Convert the heap number in eax to an untagged integer in ecx.
3321 IntegerConvert(masm, 3326 IntegerConvert(masm,
3322 eax, 3327 eax,
3323 TypeInfo::Unknown(), 3328 TypeInfo::Unknown(),
3324 Isolate::Current()->cpu_features()->IsSupported(SSE3), 3329 masm->isolate()->cpu_features()->IsSupported(SSE3),
3325 &slow); 3330 &slow);
3326 3331
3327 // Do the bitwise operation and check if the result fits in a smi. 3332 // Do the bitwise operation and check if the result fits in a smi.
3328 NearLabel try_float; 3333 NearLabel try_float;
3329 __ not_(ecx); 3334 __ not_(ecx);
3330 __ cmp(ecx, 0xc0000000); 3335 __ cmp(ecx, 0xc0000000);
3331 __ j(sign, &try_float, not_taken); 3336 __ j(sign, &try_float, not_taken);
3332 3337
3333 // Tag the result as a smi and we're done. 3338 // Tag the result as a smi and we're done.
3334 STATIC_ASSERT(kSmiTagSize == 1); 3339 STATIC_ASSERT(kSmiTagSize == 1);
3335 __ lea(eax, Operand(ecx, times_2, kSmiTag)); 3340 __ lea(eax, Operand(ecx, times_2, kSmiTag));
3336 __ jmp(&done); 3341 __ jmp(&done);
3337 3342
3338 // Try to store the result in a heap number. 3343 // Try to store the result in a heap number.
3339 __ bind(&try_float); 3344 __ bind(&try_float);
3340 if (overwrite_ == UNARY_NO_OVERWRITE) { 3345 if (overwrite_ == UNARY_NO_OVERWRITE) {
3341 // Allocate a fresh heap number, but don't overwrite eax until 3346 // Allocate a fresh heap number, but don't overwrite eax until
3342 // we're sure we can do it without going through the slow case 3347 // we're sure we can do it without going through the slow case
3343 // that needs the value in eax. 3348 // that needs the value in eax.
3344 __ AllocateHeapNumber(ebx, edx, edi, &slow); 3349 __ AllocateHeapNumber(ebx, edx, edi, &slow);
3345 __ mov(eax, Operand(ebx)); 3350 __ mov(eax, Operand(ebx));
3346 } 3351 }
3347 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { 3352 if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
3348 CpuFeatures::Scope use_sse2(SSE2); 3353 CpuFeatures::Scope use_sse2(SSE2);
3349 __ cvtsi2sd(xmm0, Operand(ecx)); 3354 __ cvtsi2sd(xmm0, Operand(ecx));
3350 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); 3355 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
3351 } else { 3356 } else {
3352 __ push(ecx); 3357 __ push(ecx);
3353 __ fild_s(Operand(esp, 0)); 3358 __ fild_s(Operand(esp, 0));
3354 __ pop(ecx); 3359 __ pop(ecx);
3355 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); 3360 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
3356 } 3361 }
3357 } else { 3362 } else {
(...skipping 373 matching lines...) Expand 10 before | Expand all | Expand 10 after
3731 3736
3732 static const int kLastMatchInfoOffset = 1 * kPointerSize; 3737 static const int kLastMatchInfoOffset = 1 * kPointerSize;
3733 static const int kPreviousIndexOffset = 2 * kPointerSize; 3738 static const int kPreviousIndexOffset = 2 * kPointerSize;
3734 static const int kSubjectOffset = 3 * kPointerSize; 3739 static const int kSubjectOffset = 3 * kPointerSize;
3735 static const int kJSRegExpOffset = 4 * kPointerSize; 3740 static const int kJSRegExpOffset = 4 * kPointerSize;
3736 3741
3737 Label runtime, invoke_regexp; 3742 Label runtime, invoke_regexp;
3738 3743
3739 // Ensure that a RegExp stack is allocated. 3744 // Ensure that a RegExp stack is allocated.
3740 ExternalReference address_of_regexp_stack_memory_address = 3745 ExternalReference address_of_regexp_stack_memory_address =
3741 ExternalReference::address_of_regexp_stack_memory_address(); 3746 ExternalReference::address_of_regexp_stack_memory_address(
3747 masm->isolate());
3742 ExternalReference address_of_regexp_stack_memory_size = 3748 ExternalReference address_of_regexp_stack_memory_size =
3743 ExternalReference::address_of_regexp_stack_memory_size(); 3749 ExternalReference::address_of_regexp_stack_memory_size(masm->isolate());
3744 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); 3750 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3745 __ test(ebx, Operand(ebx)); 3751 __ test(ebx, Operand(ebx));
3746 __ j(zero, &runtime, not_taken); 3752 __ j(zero, &runtime, not_taken);
3747 3753
3748 // Check that the first argument is a JSRegExp object. 3754 // Check that the first argument is a JSRegExp object.
3749 __ mov(eax, Operand(esp, kJSRegExpOffset)); 3755 __ mov(eax, Operand(esp, kJSRegExpOffset));
3750 STATIC_ASSERT(kSmiTag == 0); 3756 STATIC_ASSERT(kSmiTag == 0);
3751 __ test(eax, Immediate(kSmiTagMask)); 3757 __ test(eax, Immediate(kSmiTagMask));
3752 __ j(zero, &runtime); 3758 __ j(zero, &runtime);
3753 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx); 3759 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
(...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after
3913 // Argument 7: Indicate that this is a direct call from JavaScript. 3919 // Argument 7: Indicate that this is a direct call from JavaScript.
3914 __ mov(Operand(esp, 6 * kPointerSize), Immediate(1)); 3920 __ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
3915 3921
3916 // Argument 6: Start (high end) of backtracking stack memory area. 3922 // Argument 6: Start (high end) of backtracking stack memory area.
3917 __ mov(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_address)); 3923 __ mov(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_address));
3918 __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); 3924 __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3919 __ mov(Operand(esp, 5 * kPointerSize), ecx); 3925 __ mov(Operand(esp, 5 * kPointerSize), ecx);
3920 3926
3921 // Argument 5: static offsets vector buffer. 3927 // Argument 5: static offsets vector buffer.
3922 __ mov(Operand(esp, 4 * kPointerSize), 3928 __ mov(Operand(esp, 4 * kPointerSize),
3923 Immediate(ExternalReference::address_of_static_offsets_vector())); 3929 Immediate(ExternalReference::address_of_static_offsets_vector(
3930 masm->isolate())));
3924 3931
3925 // Argument 4: End of string data 3932 // Argument 4: End of string data
3926 // Argument 3: Start of string data 3933 // Argument 3: Start of string data
3927 NearLabel setup_two_byte, setup_rest; 3934 NearLabel setup_two_byte, setup_rest;
3928 __ test(edi, Operand(edi)); 3935 __ test(edi, Operand(edi));
3929 __ mov(edi, FieldOperand(eax, String::kLengthOffset)); 3936 __ mov(edi, FieldOperand(eax, String::kLengthOffset));
3930 __ j(zero, &setup_two_byte); 3937 __ j(zero, &setup_two_byte);
3931 __ SmiUntag(edi); 3938 __ SmiUntag(edi);
3932 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqAsciiString::kHeaderSize)); 3939 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqAsciiString::kHeaderSize));
3933 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. 3940 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
3965 Label failure; 3972 Label failure;
3966 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE); 3973 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
3967 __ j(equal, &failure, taken); 3974 __ j(equal, &failure, taken);
3968 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION); 3975 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
3969 // If not exception it can only be retry. Handle that in the runtime system. 3976 // If not exception it can only be retry. Handle that in the runtime system.
3970 __ j(not_equal, &runtime); 3977 __ j(not_equal, &runtime);
3971 // Result must now be exception. If there is no pending exception already a 3978 // Result must now be exception. If there is no pending exception already a
3972 // stack overflow (on the backtrack stack) was detected in RegExp code but 3979 // stack overflow (on the backtrack stack) was detected in RegExp code but
3973 // haven't created the exception yet. Handle that in the runtime system. 3980 // haven't created the exception yet. Handle that in the runtime system.
3974 // TODO(592): Rerunning the RegExp to get the stack overflow exception. 3981 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
3975 ExternalReference pending_exception(Isolate::k_pending_exception_address); 3982 ExternalReference pending_exception(Isolate::k_pending_exception_address,
3983 masm->isolate());
3976 __ mov(edx, 3984 __ mov(edx,
3977 Operand::StaticVariable(ExternalReference::the_hole_value_location())); 3985 Operand::StaticVariable(ExternalReference::the_hole_value_location(
3986 masm->isolate())));
3978 __ mov(eax, Operand::StaticVariable(pending_exception)); 3987 __ mov(eax, Operand::StaticVariable(pending_exception));
3979 __ cmp(edx, Operand(eax)); 3988 __ cmp(edx, Operand(eax));
3980 __ j(equal, &runtime); 3989 __ j(equal, &runtime);
3981 // For exception, throw the exception again. 3990 // For exception, throw the exception again.
3982 3991
3983 // Clear the pending exception variable. 3992 // Clear the pending exception variable.
3984 __ mov(Operand::StaticVariable(pending_exception), edx); 3993 __ mov(Operand::StaticVariable(pending_exception), edx);
3985 3994
3986 // Special handling of termination exceptions which are uncatchable 3995 // Special handling of termination exceptions which are uncatchable
3987 // by javascript code. 3996 // by javascript code.
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
4026 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax); 4035 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
4027 __ mov(ecx, ebx); 4036 __ mov(ecx, ebx);
4028 __ RecordWrite(ecx, RegExpImpl::kLastSubjectOffset, eax, edi); 4037 __ RecordWrite(ecx, RegExpImpl::kLastSubjectOffset, eax, edi);
4029 __ mov(eax, Operand(esp, kSubjectOffset)); 4038 __ mov(eax, Operand(esp, kSubjectOffset));
4030 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax); 4039 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
4031 __ mov(ecx, ebx); 4040 __ mov(ecx, ebx);
4032 __ RecordWrite(ecx, RegExpImpl::kLastInputOffset, eax, edi); 4041 __ RecordWrite(ecx, RegExpImpl::kLastInputOffset, eax, edi);
4033 4042
4034 // Get the static offsets vector filled by the native regexp code. 4043 // Get the static offsets vector filled by the native regexp code.
4035 ExternalReference address_of_static_offsets_vector = 4044 ExternalReference address_of_static_offsets_vector =
4036 ExternalReference::address_of_static_offsets_vector(); 4045 ExternalReference::address_of_static_offsets_vector(masm->isolate());
4037 __ mov(ecx, Immediate(address_of_static_offsets_vector)); 4046 __ mov(ecx, Immediate(address_of_static_offsets_vector));
4038 4047
4039 // ebx: last_match_info backing store (FixedArray) 4048 // ebx: last_match_info backing store (FixedArray)
4040 // ecx: offsets vector 4049 // ecx: offsets vector
4041 // edx: number of capture registers 4050 // edx: number of capture registers
4042 NearLabel next_capture, done; 4051 NearLabel next_capture, done;
4043 // Capture register counter starts from number of capture registers and 4052 // Capture register counter starts from number of capture registers and
4044 // counts down until wraping after zero. 4053 // counts down until wraping after zero.
4045 __ bind(&next_capture); 4054 __ bind(&next_capture);
4046 __ sub(Operand(edx), Immediate(1)); 4055 __ sub(Operand(edx), Immediate(1));
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
4155 Register scratch1, 4164 Register scratch1,
4156 Register scratch2, 4165 Register scratch2,
4157 bool object_is_smi, 4166 bool object_is_smi,
4158 Label* not_found) { 4167 Label* not_found) {
4159 // Use of registers. Register result is used as a temporary. 4168 // Use of registers. Register result is used as a temporary.
4160 Register number_string_cache = result; 4169 Register number_string_cache = result;
4161 Register mask = scratch1; 4170 Register mask = scratch1;
4162 Register scratch = scratch2; 4171 Register scratch = scratch2;
4163 4172
4164 // Load the number string cache. 4173 // Load the number string cache.
4165 ExternalReference roots_address = ExternalReference::roots_address(); 4174 ExternalReference roots_address =
4175 ExternalReference::roots_address(masm->isolate());
4166 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex)); 4176 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
4167 __ mov(number_string_cache, 4177 __ mov(number_string_cache,
4168 Operand::StaticArray(scratch, times_pointer_size, roots_address)); 4178 Operand::StaticArray(scratch, times_pointer_size, roots_address));
4169 // Make the hash mask from the length of the number string cache. It 4179 // Make the hash mask from the length of the number string cache. It
4170 // contains two elements (number and string) for each cache entry. 4180 // contains two elements (number and string) for each cache entry.
4171 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); 4181 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
4172 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two. 4182 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two.
4173 __ sub(Operand(mask), Immediate(1)); // Make mask. 4183 __ sub(Operand(mask), Immediate(1)); // Make mask.
4174 4184
4175 // Calculate the entry in the number string cache. The hash value in the 4185 // Calculate the entry in the number string cache. The hash value in the
(...skipping 24 matching lines...) Expand all
4200 __ and_(scratch, Operand(mask)); 4210 __ and_(scratch, Operand(mask));
4201 Register index = scratch; 4211 Register index = scratch;
4202 Register probe = mask; 4212 Register probe = mask;
4203 __ mov(probe, 4213 __ mov(probe,
4204 FieldOperand(number_string_cache, 4214 FieldOperand(number_string_cache,
4205 index, 4215 index,
4206 times_twice_pointer_size, 4216 times_twice_pointer_size,
4207 FixedArray::kHeaderSize)); 4217 FixedArray::kHeaderSize));
4208 __ test(probe, Immediate(kSmiTagMask)); 4218 __ test(probe, Immediate(kSmiTagMask));
4209 __ j(zero, not_found); 4219 __ j(zero, not_found);
4210 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { 4220 if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
4211 CpuFeatures::Scope fscope(SSE2); 4221 CpuFeatures::Scope fscope(SSE2);
4212 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset)); 4222 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
4213 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset)); 4223 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
4214 __ ucomisd(xmm0, xmm1); 4224 __ ucomisd(xmm0, xmm1);
4215 } else { 4225 } else {
4216 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset)); 4226 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset));
4217 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset)); 4227 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
4218 __ FCmp(); 4228 __ FCmp();
4219 } 4229 }
4220 __ j(parity_even, not_found); // Bail out if NaN is involved. 4230 __ j(parity_even, not_found); // Bail out if NaN is involved.
(...skipping 217 matching lines...) Expand 10 before | Expand all | Expand 10 after
4438 __ j(equal, &return_not_equal); 4448 __ j(equal, &return_not_equal);
4439 4449
4440 // Fall through to the general case. 4450 // Fall through to the general case.
4441 __ bind(&slow); 4451 __ bind(&slow);
4442 } 4452 }
4443 4453
4444 // Generate the number comparison code. 4454 // Generate the number comparison code.
4445 if (include_number_compare_) { 4455 if (include_number_compare_) {
4446 Label non_number_comparison; 4456 Label non_number_comparison;
4447 Label unordered; 4457 Label unordered;
4448 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { 4458 if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
4449 CpuFeatures::Scope use_sse2(SSE2); 4459 CpuFeatures::Scope use_sse2(SSE2);
4450 CpuFeatures::Scope use_cmov(CMOV); 4460 CpuFeatures::Scope use_cmov(CMOV);
4451 4461
4452 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison); 4462 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
4453 __ ucomisd(xmm0, xmm1); 4463 __ ucomisd(xmm0, xmm1);
4454 4464
4455 // Don't base result on EFLAGS when a NaN is involved. 4465 // Don't base result on EFLAGS when a NaN is involved.
4456 __ j(parity_even, &unordered, not_taken); 4466 __ j(parity_even, &unordered, not_taken);
4457 // Return a result of -1, 0, or 1, based on EFLAGS. 4467 // Return a result of -1, 0, or 1, based on EFLAGS.
4458 __ mov(eax, 0); // equal 4468 __ mov(eax, 0); // equal
(...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after
4657 __ InvokeFunction(edi, actual, JUMP_FUNCTION); 4667 __ InvokeFunction(edi, actual, JUMP_FUNCTION);
4658 4668
4659 // Slow-case: Non-function called. 4669 // Slow-case: Non-function called.
4660 __ bind(&slow); 4670 __ bind(&slow);
4661 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 4671 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
4662 // of the original receiver from the call site). 4672 // of the original receiver from the call site).
4663 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi); 4673 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
4664 __ Set(eax, Immediate(argc_)); 4674 __ Set(eax, Immediate(argc_));
4665 __ Set(ebx, Immediate(0)); 4675 __ Set(ebx, Immediate(0));
4666 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); 4676 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
4667 Handle<Code> adaptor(Isolate::Current()->builtins()->builtin( 4677 Handle<Code> adaptor(masm->isolate()->builtins()->builtin(
4668 Builtins::ArgumentsAdaptorTrampoline)); 4678 Builtins::ArgumentsAdaptorTrampoline));
4669 __ jmp(adaptor, RelocInfo::CODE_TARGET); 4679 __ jmp(adaptor, RelocInfo::CODE_TARGET);
4670 } 4680 }
4671 4681
4672 4682
4673 bool CEntryStub::NeedsImmovableCode() { 4683 bool CEntryStub::NeedsImmovableCode() {
4674 return false; 4684 return false;
4675 } 4685 }
4676 4686
4677 4687
(...skipping 26 matching lines...) Expand all
4704 // Pass failure code returned from last attempt as first argument to 4714 // Pass failure code returned from last attempt as first argument to
4705 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the 4715 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
4706 // stack alignment is known to be correct. This function takes one argument 4716 // stack alignment is known to be correct. This function takes one argument
4707 // which is passed on the stack, and we know that the stack has been 4717 // which is passed on the stack, and we know that the stack has been
4708 // prepared to pass at least one argument. 4718 // prepared to pass at least one argument.
4709 __ mov(Operand(esp, 0 * kPointerSize), eax); // Result. 4719 __ mov(Operand(esp, 0 * kPointerSize), eax); // Result.
4710 __ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY); 4720 __ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
4711 } 4721 }
4712 4722
4713 ExternalReference scope_depth = 4723 ExternalReference scope_depth =
4714 ExternalReference::heap_always_allocate_scope_depth(); 4724 ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
4715 if (always_allocate_scope) { 4725 if (always_allocate_scope) {
4716 __ inc(Operand::StaticVariable(scope_depth)); 4726 __ inc(Operand::StaticVariable(scope_depth));
4717 } 4727 }
4718 4728
4719 // Call C function. 4729 // Call C function.
4720 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc. 4730 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
4721 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv. 4731 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
4722 __ mov(Operand(esp, 2 * kPointerSize), 4732 __ mov(Operand(esp, 2 * kPointerSize),
4723 Immediate(ExternalReference::isolate_address())); 4733 Immediate(ExternalReference::isolate_address()));
4724 __ call(Operand(ebx)); 4734 __ call(Operand(ebx));
(...skipping 15 matching lines...) Expand all
4740 4750
4741 // Check for failure result. 4751 // Check for failure result.
4742 Label failure_returned; 4752 Label failure_returned;
4743 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); 4753 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
4744 __ lea(ecx, Operand(eax, 1)); 4754 __ lea(ecx, Operand(eax, 1));
4745 // Lower 2 bits of ecx are 0 iff eax has failure tag. 4755 // Lower 2 bits of ecx are 0 iff eax has failure tag.
4746 __ test(ecx, Immediate(kFailureTagMask)); 4756 __ test(ecx, Immediate(kFailureTagMask));
4747 __ j(zero, &failure_returned, not_taken); 4757 __ j(zero, &failure_returned, not_taken);
4748 4758
4749 ExternalReference pending_exception_address( 4759 ExternalReference pending_exception_address(
4750 Isolate::k_pending_exception_address); 4760 Isolate::k_pending_exception_address, masm->isolate());
4751 4761
4752 // Check that there is no pending exception, otherwise we 4762 // Check that there is no pending exception, otherwise we
4753 // should have returned some failure value. 4763 // should have returned some failure value.
4754 if (FLAG_debug_code) { 4764 if (FLAG_debug_code) {
4755 __ push(edx); 4765 __ push(edx);
4756 __ mov(edx, Operand::StaticVariable( 4766 __ mov(edx, Operand::StaticVariable(
4757 ExternalReference::the_hole_value_location())); 4767 ExternalReference::the_hole_value_location(masm->isolate())));
4758 NearLabel okay; 4768 NearLabel okay;
4759 __ cmp(edx, Operand::StaticVariable(pending_exception_address)); 4769 __ cmp(edx, Operand::StaticVariable(pending_exception_address));
4760 // Cannot use check here as it attempts to generate call into runtime. 4770 // Cannot use check here as it attempts to generate call into runtime.
4761 __ j(equal, &okay); 4771 __ j(equal, &okay);
4762 __ int3(); 4772 __ int3();
4763 __ bind(&okay); 4773 __ bind(&okay);
4764 __ pop(edx); 4774 __ pop(edx);
4765 } 4775 }
4766 4776
4767 // Exit the JavaScript to C++ exit frame. 4777 // Exit the JavaScript to C++ exit frame.
4768 __ LeaveExitFrame(save_doubles_); 4778 __ LeaveExitFrame(save_doubles_);
4769 __ ret(0); 4779 __ ret(0);
4770 4780
4771 // Handling of failure. 4781 // Handling of failure.
4772 __ bind(&failure_returned); 4782 __ bind(&failure_returned);
4773 4783
4774 Label retry; 4784 Label retry;
4775 // If the returned exception is RETRY_AFTER_GC continue at retry label 4785 // If the returned exception is RETRY_AFTER_GC continue at retry label
4776 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); 4786 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
4777 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); 4787 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
4778 __ j(zero, &retry, taken); 4788 __ j(zero, &retry, taken);
4779 4789
4780 // Special handling of out of memory exceptions. 4790 // Special handling of out of memory exceptions.
4781 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException())); 4791 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
4782 __ j(equal, throw_out_of_memory_exception); 4792 __ j(equal, throw_out_of_memory_exception);
4783 4793
4784 // Retrieve the pending exception and clear the variable. 4794 // Retrieve the pending exception and clear the variable.
4795 ExternalReference the_hole_location =
4796 ExternalReference::the_hole_value_location(masm->isolate());
4785 __ mov(eax, Operand::StaticVariable(pending_exception_address)); 4797 __ mov(eax, Operand::StaticVariable(pending_exception_address));
4786 __ mov(edx, 4798 __ mov(edx, Operand::StaticVariable(the_hole_location));
4787 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
4788 __ mov(Operand::StaticVariable(pending_exception_address), edx); 4799 __ mov(Operand::StaticVariable(pending_exception_address), edx);
4789 4800
4790 // Special handling of termination exceptions which are uncatchable 4801 // Special handling of termination exceptions which are uncatchable
4791 // by javascript code. 4802 // by javascript code.
4792 __ cmp(eax, FACTORY->termination_exception()); 4803 __ cmp(eax, FACTORY->termination_exception());
4793 __ j(equal, throw_termination_exception); 4804 __ j(equal, throw_termination_exception);
4794 4805
4795 // Handle normal exception. 4806 // Handle normal exception.
4796 __ jmp(throw_normal_exception); 4807 __ jmp(throw_normal_exception);
4797 4808
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
4882 // Push marker in two places. 4893 // Push marker in two places.
4883 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; 4894 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
4884 __ push(Immediate(Smi::FromInt(marker))); // context slot 4895 __ push(Immediate(Smi::FromInt(marker))); // context slot
4885 __ push(Immediate(Smi::FromInt(marker))); // function slot 4896 __ push(Immediate(Smi::FromInt(marker))); // function slot
4886 // Save callee-saved registers (C calling conventions). 4897 // Save callee-saved registers (C calling conventions).
4887 __ push(edi); 4898 __ push(edi);
4888 __ push(esi); 4899 __ push(esi);
4889 __ push(ebx); 4900 __ push(ebx);
4890 4901
4891 // Save copies of the top frame descriptor on the stack. 4902 // Save copies of the top frame descriptor on the stack.
4892 ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address); 4903 ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, masm->isolate());
4893 __ push(Operand::StaticVariable(c_entry_fp)); 4904 __ push(Operand::StaticVariable(c_entry_fp));
4894 4905
4895 #ifdef ENABLE_LOGGING_AND_PROFILING 4906 #ifdef ENABLE_LOGGING_AND_PROFILING
4896 // If this is the outermost JS call, set js_entry_sp value. 4907 // If this is the outermost JS call, set js_entry_sp value.
4897 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address); 4908 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address,
4909 masm->isolate());
4898 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0)); 4910 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
4899 __ j(not_equal, &not_outermost_js); 4911 __ j(not_equal, &not_outermost_js);
4900 __ mov(Operand::StaticVariable(js_entry_sp), ebp); 4912 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
4901 __ bind(&not_outermost_js); 4913 __ bind(&not_outermost_js);
4902 #endif 4914 #endif
4903 4915
4904 // Call a faked try-block that does the invoke. 4916 // Call a faked try-block that does the invoke.
4905 __ call(&invoke); 4917 __ call(&invoke);
4906 4918
4907 // Caught exception: Store result (exception) in the pending 4919 // Caught exception: Store result (exception) in the pending
4908 // exception field in the JSEnv and return a failure sentinel. 4920 // exception field in the JSEnv and return a failure sentinel.
4909 ExternalReference pending_exception(Isolate::k_pending_exception_address); 4921 ExternalReference pending_exception(Isolate::k_pending_exception_address,
4922 masm->isolate());
4910 __ mov(Operand::StaticVariable(pending_exception), eax); 4923 __ mov(Operand::StaticVariable(pending_exception), eax);
4911 __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception())); 4924 __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
4912 __ jmp(&exit); 4925 __ jmp(&exit);
4913 4926
4914 // Invoke: Link this frame into the handler chain. 4927 // Invoke: Link this frame into the handler chain.
4915 __ bind(&invoke); 4928 __ bind(&invoke);
4916 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER); 4929 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
4917 4930
4918 // Clear any pending exceptions. 4931 // Clear any pending exceptions.
4919 __ mov(edx, 4932 ExternalReference the_hole_location =
4920 Operand::StaticVariable(ExternalReference::the_hole_value_location())); 4933 ExternalReference::the_hole_value_location(masm->isolate());
4934 __ mov(edx, Operand::StaticVariable(the_hole_location));
4921 __ mov(Operand::StaticVariable(pending_exception), edx); 4935 __ mov(Operand::StaticVariable(pending_exception), edx);
4922 4936
4923 // Fake a receiver (NULL). 4937 // Fake a receiver (NULL).
4924 __ push(Immediate(0)); // receiver 4938 __ push(Immediate(0)); // receiver
4925 4939
4926 // Invoke the function by calling through JS entry trampoline 4940 // Invoke the function by calling through JS entry trampoline
4927 // builtin and pop the faked function when we return. Notice that we 4941 // builtin and pop the faked function when we return. Notice that we
4928 // cannot store a reference to the trampoline code directly in this 4942 // cannot store a reference to the trampoline code directly in this
4929 // stub, because the builtin stubs may not have been generated yet. 4943 // stub, because the builtin stubs may not have been generated yet.
4930 if (is_construct) { 4944 if (is_construct) {
4931 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline); 4945 ExternalReference construct_entry(
4946 Builtins::JSConstructEntryTrampoline,
4947 masm->isolate());
4932 __ mov(edx, Immediate(construct_entry)); 4948 __ mov(edx, Immediate(construct_entry));
4933 } else { 4949 } else {
4934 ExternalReference entry(Builtins::JSEntryTrampoline); 4950 ExternalReference entry(Builtins::JSEntryTrampoline,
4951 masm->isolate());
4935 __ mov(edx, Immediate(entry)); 4952 __ mov(edx, Immediate(entry));
4936 } 4953 }
4937 __ mov(edx, Operand(edx, 0)); // deref address 4954 __ mov(edx, Operand(edx, 0)); // deref address
4938 __ lea(edx, FieldOperand(edx, Code::kHeaderSize)); 4955 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
4939 __ call(Operand(edx)); 4956 __ call(Operand(edx));
4940 4957
4941 // Unlink this frame from the handler chain. 4958 // Unlink this frame from the handler chain.
4942 __ pop(Operand::StaticVariable(ExternalReference( 4959 __ pop(Operand::StaticVariable(ExternalReference(
4943 Isolate::k_handler_address))); 4960 Isolate::k_handler_address,
4961 masm->isolate())));
4944 // Pop next_sp. 4962 // Pop next_sp.
4945 __ add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize)); 4963 __ add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
4946 4964
4947 #ifdef ENABLE_LOGGING_AND_PROFILING 4965 #ifdef ENABLE_LOGGING_AND_PROFILING
4948 // If current EBP value is the same as js_entry_sp value, it means that 4966 // If current EBP value is the same as js_entry_sp value, it means that
4949 // the current function is the outermost. 4967 // the current function is the outermost.
4950 __ cmp(ebp, Operand::StaticVariable(js_entry_sp)); 4968 __ cmp(ebp, Operand::StaticVariable(js_entry_sp));
4951 __ j(not_equal, &not_outermost_js_2); 4969 __ j(not_equal, &not_outermost_js_2);
4952 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0)); 4970 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
4953 __ bind(&not_outermost_js_2); 4971 __ bind(&not_outermost_js_2);
4954 #endif 4972 #endif
4955 4973
4956 // Restore the top frame descriptor from the stack. 4974 // Restore the top frame descriptor from the stack.
4957 __ bind(&exit); 4975 __ bind(&exit);
4958 __ pop(Operand::StaticVariable(ExternalReference( 4976 __ pop(Operand::StaticVariable(ExternalReference(
4959 Isolate::k_c_entry_fp_address))); 4977 Isolate::k_c_entry_fp_address,
4978 masm->isolate())));
4960 4979
4961 // Restore callee-saved registers (C calling conventions). 4980 // Restore callee-saved registers (C calling conventions).
4962 __ pop(ebx); 4981 __ pop(ebx);
4963 __ pop(esi); 4982 __ pop(esi);
4964 __ pop(edi); 4983 __ pop(edi);
4965 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers 4984 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers
4966 4985
4967 // Restore frame pointer and return. 4986 // Restore frame pointer and return.
4968 __ pop(ebp); 4987 __ pop(ebp);
4969 __ ret(0); 4988 __ ret(0);
(...skipping 28 matching lines...) Expand all
4998 Register scratch = ecx; 5017 Register scratch = ecx;
4999 5018
5000 // Constants describing the call site code to patch. 5019 // Constants describing the call site code to patch.
5001 static const int kDeltaToCmpImmediate = 2; 5020 static const int kDeltaToCmpImmediate = 2;
5002 static const int kDeltaToMov = 8; 5021 static const int kDeltaToMov = 8;
5003 static const int kDeltaToMovImmediate = 9; 5022 static const int kDeltaToMovImmediate = 9;
5004 static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81); 5023 static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81);
5005 static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff); 5024 static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff);
5006 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8); 5025 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
5007 5026
5008 ExternalReference roots_address = ExternalReference::roots_address(); 5027 ExternalReference roots_address =
5028 ExternalReference::roots_address(masm->isolate());
5009 5029
5010 ASSERT_EQ(object.code(), InstanceofStub::left().code()); 5030 ASSERT_EQ(object.code(), InstanceofStub::left().code());
5011 ASSERT_EQ(function.code(), InstanceofStub::right().code()); 5031 ASSERT_EQ(function.code(), InstanceofStub::right().code());
5012 5032
5013 // Get the object and function - they are always both needed. 5033 // Get the object and function - they are always both needed.
5014 Label slow, not_js_object; 5034 Label slow, not_js_object;
5015 if (!HasArgsInRegisters()) { 5035 if (!HasArgsInRegisters()) {
5016 __ mov(object, Operand(esp, 2 * kPointerSize)); 5036 __ mov(object, Operand(esp, 2 * kPointerSize));
5017 __ mov(function, Operand(esp, 1 * kPointerSize)); 5037 __ mov(function, Operand(esp, 1 * kPointerSize));
5018 } 5038 }
(...skipping 888 matching lines...) Expand 10 before | Expand all | Expand 10 after
5907 // Collect the two characters in a register. 5927 // Collect the two characters in a register.
5908 Register chars = c1; 5928 Register chars = c1;
5909 __ shl(c2, kBitsPerByte); 5929 __ shl(c2, kBitsPerByte);
5910 __ or_(chars, Operand(c2)); 5930 __ or_(chars, Operand(c2));
5911 5931
5912 // chars: two character string, char 1 in byte 0 and char 2 in byte 1. 5932 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
5913 // hash: hash of two character string. 5933 // hash: hash of two character string.
5914 5934
5915 // Load the symbol table. 5935 // Load the symbol table.
5916 Register symbol_table = c2; 5936 Register symbol_table = c2;
5917 ExternalReference roots_address = ExternalReference::roots_address(); 5937 ExternalReference roots_address =
5938 ExternalReference::roots_address(masm->isolate());
5918 __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex)); 5939 __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex));
5919 __ mov(symbol_table, 5940 __ mov(symbol_table,
5920 Operand::StaticArray(scratch, times_pointer_size, roots_address)); 5941 Operand::StaticArray(scratch, times_pointer_size, roots_address));
5921 5942
5922 // Calculate capacity mask from the symbol table capacity. 5943 // Calculate capacity mask from the symbol table capacity.
5923 Register mask = scratch2; 5944 Register mask = scratch2;
5924 __ mov(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset)); 5945 __ mov(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
5925 __ SmiUntag(mask); 5946 __ SmiUntag(mask);
5926 __ sub(Operand(mask), Immediate(1)); 5947 __ sub(Operand(mask), Immediate(1));
5927 5948
(...skipping 436 matching lines...) Expand 10 before | Expand all | Expand 10 after
6364 __ test(ecx, Immediate(kSmiTagMask)); 6385 __ test(ecx, Immediate(kSmiTagMask));
6365 __ j(zero, &generic_stub, not_taken); 6386 __ j(zero, &generic_stub, not_taken);
6366 6387
6367 __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx); 6388 __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx);
6368 __ j(not_equal, &miss, not_taken); 6389 __ j(not_equal, &miss, not_taken);
6369 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx); 6390 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
6370 __ j(not_equal, &miss, not_taken); 6391 __ j(not_equal, &miss, not_taken);
6371 6392
6372 // Inlining the double comparison and falling back to the general compare 6393 // Inlining the double comparison and falling back to the general compare
6373 // stub if NaN is involved or SS2 or CMOV is unsupported. 6394 // stub if NaN is involved or SS2 or CMOV is unsupported.
6374 CpuFeatures* cpu_features = Isolate::Current()->cpu_features(); 6395 CpuFeatures* cpu_features = masm->isolate()->cpu_features();
6375 if (cpu_features->IsSupported(SSE2) && cpu_features->IsSupported(CMOV)) { 6396 if (cpu_features->IsSupported(SSE2) && cpu_features->IsSupported(CMOV)) {
6376 CpuFeatures::Scope scope1(SSE2); 6397 CpuFeatures::Scope scope1(SSE2);
6377 CpuFeatures::Scope scope2(CMOV); 6398 CpuFeatures::Scope scope2(CMOV);
6378 6399
6379 // Load left and right operand 6400 // Load left and right operand
6380 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); 6401 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
6381 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); 6402 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
6382 6403
6383 // Compare operands 6404 // Compare operands
6384 __ ucomisd(xmm0, xmm1); 6405 __ ucomisd(xmm0, xmm1);
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
6430 6451
6431 6452
6432 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { 6453 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
6433 // Save the registers. 6454 // Save the registers.
6434 __ pop(ecx); 6455 __ pop(ecx);
6435 __ push(edx); 6456 __ push(edx);
6436 __ push(eax); 6457 __ push(eax);
6437 __ push(ecx); 6458 __ push(ecx);
6438 6459
6439 // Call the runtime system in a fresh internal frame. 6460 // Call the runtime system in a fresh internal frame.
6440 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss)); 6461 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
6462 masm->isolate());
6441 __ EnterInternalFrame(); 6463 __ EnterInternalFrame();
6442 __ push(edx); 6464 __ push(edx);
6443 __ push(eax); 6465 __ push(eax);
6444 __ push(Immediate(Smi::FromInt(op_))); 6466 __ push(Immediate(Smi::FromInt(op_)));
6445 __ CallExternalReference(miss, 3); 6467 __ CallExternalReference(miss, 3);
6446 __ LeaveInternalFrame(); 6468 __ LeaveInternalFrame();
6447 6469
6448 // Compute the entry point of the rewritten stub. 6470 // Compute the entry point of the rewritten stub.
6449 __ lea(edi, FieldOperand(eax, Code::kHeaderSize)); 6471 __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
6450 6472
6451 // Restore registers. 6473 // Restore registers.
6452 __ pop(ecx); 6474 __ pop(ecx);
6453 __ pop(eax); 6475 __ pop(eax);
6454 __ pop(edx); 6476 __ pop(edx);
6455 __ push(ecx); 6477 __ push(ecx);
6456 6478
6457 // Do a tail call to the rewritten stub. 6479 // Do a tail call to the rewritten stub.
6458 __ jmp(Operand(edi)); 6480 __ jmp(Operand(edi));
6459 } 6481 }
6460 6482
6461 6483
6462 #undef __ 6484 #undef __
6463 6485
6464 } } // namespace v8::internal 6486 } } // namespace v8::internal
6465 6487
6466 #endif // V8_TARGET_ARCH_IA32 6488 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/builtins-ia32.cc ('k') | src/ia32/codegen-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698