OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
225 | 225 |
226 // Return and remove the on-stack parameters. | 226 // Return and remove the on-stack parameters. |
227 __ ret(3 * kPointerSize); | 227 __ ret(3 * kPointerSize); |
228 | 228 |
229 __ bind(&slow_case); | 229 __ bind(&slow_case); |
230 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 230 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
231 } | 231 } |
232 | 232 |
233 | 233 |
234 void ToBooleanStub::Generate(MacroAssembler* masm) { | 234 void ToBooleanStub::Generate(MacroAssembler* masm) { |
235 // This stub overrides SometimesSetsUpAFrame() to return false. That means | |
236 // we cannot call anything that could cause a GC from this stub. | |
237 Label false_result, true_result, not_string; | 235 Label false_result, true_result, not_string; |
238 __ movq(rax, Operand(rsp, 1 * kPointerSize)); | 236 __ movq(rax, Operand(rsp, 1 * kPointerSize)); |
239 | 237 |
240 // undefined -> false | 238 // undefined -> false |
241 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); | 239 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); |
242 __ j(equal, &false_result); | 240 __ j(equal, &false_result); |
243 | 241 |
244 // Boolean -> its value | 242 // Boolean -> its value |
245 __ CompareRoot(rax, Heap::kFalseValueRootIndex); | 243 __ CompareRoot(rax, Heap::kFalseValueRootIndex); |
246 __ j(equal, &false_result); | 244 __ j(equal, &false_result); |
(...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
559 __ shl(kScratchRegister, Immediate(63)); | 557 __ shl(kScratchRegister, Immediate(63)); |
560 __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister); | 558 __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister); |
561 } else { | 559 } else { |
562 // Allocate a heap number before calculating the answer, | 560 // Allocate a heap number before calculating the answer, |
563 // so we don't have an untagged double around during GC. | 561 // so we don't have an untagged double around during GC. |
564 Label slow_allocate_heapnumber, heapnumber_allocated; | 562 Label slow_allocate_heapnumber, heapnumber_allocated; |
565 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber); | 563 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber); |
566 __ jmp(&heapnumber_allocated); | 564 __ jmp(&heapnumber_allocated); |
567 | 565 |
568 __ bind(&slow_allocate_heapnumber); | 566 __ bind(&slow_allocate_heapnumber); |
569 { | 567 __ EnterInternalFrame(); |
570 FrameScope scope(masm, StackFrame::INTERNAL); | 568 __ push(rax); |
571 __ push(rax); | 569 __ CallRuntime(Runtime::kNumberAlloc, 0); |
572 __ CallRuntime(Runtime::kNumberAlloc, 0); | 570 __ movq(rcx, rax); |
573 __ movq(rcx, rax); | 571 __ pop(rax); |
574 __ pop(rax); | 572 __ LeaveInternalFrame(); |
575 } | |
576 __ bind(&heapnumber_allocated); | 573 __ bind(&heapnumber_allocated); |
577 // rcx: allocated 'empty' number | 574 // rcx: allocated 'empty' number |
578 | 575 |
579 // Copy the double value to the new heap number, flipping the sign. | 576 // Copy the double value to the new heap number, flipping the sign. |
580 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); | 577 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); |
581 __ Set(kScratchRegister, 0x01); | 578 __ Set(kScratchRegister, 0x01); |
582 __ shl(kScratchRegister, Immediate(63)); | 579 __ shl(kScratchRegister, Immediate(63)); |
583 __ xor_(rdx, kScratchRegister); // Flip sign. | 580 __ xor_(rdx, kScratchRegister); // Flip sign. |
584 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); | 581 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); |
585 __ movq(rax, rcx); | 582 __ movq(rax, rcx); |
(...skipping 829 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1415 __ bind(&skip_cache); | 1412 __ bind(&skip_cache); |
1416 __ subq(rsp, Immediate(kDoubleSize)); | 1413 __ subq(rsp, Immediate(kDoubleSize)); |
1417 __ movsd(Operand(rsp, 0), xmm1); | 1414 __ movsd(Operand(rsp, 0), xmm1); |
1418 __ fld_d(Operand(rsp, 0)); | 1415 __ fld_d(Operand(rsp, 0)); |
1419 GenerateOperation(masm); | 1416 GenerateOperation(masm); |
1420 __ fstp_d(Operand(rsp, 0)); | 1417 __ fstp_d(Operand(rsp, 0)); |
1421 __ movsd(xmm1, Operand(rsp, 0)); | 1418 __ movsd(xmm1, Operand(rsp, 0)); |
1422 __ addq(rsp, Immediate(kDoubleSize)); | 1419 __ addq(rsp, Immediate(kDoubleSize)); |
1423 // We return the value in xmm1 without adding it to the cache, but | 1420 // We return the value in xmm1 without adding it to the cache, but |
1424 // we cause a scavenging GC so that future allocations will succeed. | 1421 // we cause a scavenging GC so that future allocations will succeed. |
1425 { | 1422 __ EnterInternalFrame(); |
1426 FrameScope scope(masm, StackFrame::INTERNAL); | 1423 // Allocate an unused object bigger than a HeapNumber. |
1427 // Allocate an unused object bigger than a HeapNumber. | 1424 __ Push(Smi::FromInt(2 * kDoubleSize)); |
1428 __ Push(Smi::FromInt(2 * kDoubleSize)); | 1425 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); |
1429 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); | 1426 __ LeaveInternalFrame(); |
1430 } | |
1431 __ Ret(); | 1427 __ Ret(); |
1432 } | 1428 } |
1433 | 1429 |
1434 // Call runtime, doing whatever allocation and cleanup is necessary. | 1430 // Call runtime, doing whatever allocation and cleanup is necessary. |
1435 if (tagged) { | 1431 if (tagged) { |
1436 __ bind(&runtime_call_clear_stack); | 1432 __ bind(&runtime_call_clear_stack); |
1437 __ fstp(0); | 1433 __ fstp(0); |
1438 __ bind(&runtime_call); | 1434 __ bind(&runtime_call); |
1439 __ TailCallExternalReference( | 1435 __ TailCallExternalReference( |
1440 ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1); | 1436 ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1); |
1441 } else { // UNTAGGED. | 1437 } else { // UNTAGGED. |
1442 __ bind(&runtime_call_clear_stack); | 1438 __ bind(&runtime_call_clear_stack); |
1443 __ bind(&runtime_call); | 1439 __ bind(&runtime_call); |
1444 __ AllocateHeapNumber(rax, rdi, &skip_cache); | 1440 __ AllocateHeapNumber(rax, rdi, &skip_cache); |
1445 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1); | 1441 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1); |
1446 { | 1442 __ EnterInternalFrame(); |
1447 FrameScope scope(masm, StackFrame::INTERNAL); | 1443 __ push(rax); |
1448 __ push(rax); | 1444 __ CallRuntime(RuntimeFunction(), 1); |
1449 __ CallRuntime(RuntimeFunction(), 1); | 1445 __ LeaveInternalFrame(); |
1450 } | |
1451 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); | 1446 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); |
1452 __ Ret(); | 1447 __ Ret(); |
1453 } | 1448 } |
1454 } | 1449 } |
1455 | 1450 |
1456 | 1451 |
1457 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { | 1452 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { |
1458 switch (type_) { | 1453 switch (type_) { |
1459 // Add more cases when necessary. | 1454 // Add more cases when necessary. |
1460 case TranscendentalCache::SIN: return Runtime::kMath_sin; | 1455 case TranscendentalCache::SIN: return Runtime::kMath_sin; |
(...skipping 3445 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4906 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { | 4901 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { |
4907 // Save the registers. | 4902 // Save the registers. |
4908 __ pop(rcx); | 4903 __ pop(rcx); |
4909 __ push(rdx); | 4904 __ push(rdx); |
4910 __ push(rax); | 4905 __ push(rax); |
4911 __ push(rcx); | 4906 __ push(rcx); |
4912 | 4907 |
4913 // Call the runtime system in a fresh internal frame. | 4908 // Call the runtime system in a fresh internal frame. |
4914 ExternalReference miss = | 4909 ExternalReference miss = |
4915 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); | 4910 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); |
4916 { | 4911 __ EnterInternalFrame(); |
4917 FrameScope scope(masm, StackFrame::INTERNAL); | 4912 __ push(rdx); |
4918 __ push(rdx); | 4913 __ push(rax); |
4919 __ push(rax); | 4914 __ Push(Smi::FromInt(op_)); |
4920 __ Push(Smi::FromInt(op_)); | 4915 __ CallExternalReference(miss, 3); |
4921 __ CallExternalReference(miss, 3); | 4916 __ LeaveInternalFrame(); |
4922 } | |
4923 | 4917 |
4924 // Compute the entry point of the rewritten stub. | 4918 // Compute the entry point of the rewritten stub. |
4925 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize)); | 4919 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize)); |
4926 | 4920 |
4927 // Restore registers. | 4921 // Restore registers. |
4928 __ pop(rcx); | 4922 __ pop(rcx); |
4929 __ pop(rax); | 4923 __ pop(rax); |
4930 __ pop(rdx); | 4924 __ pop(rdx); |
4931 __ push(rcx); | 4925 __ push(rcx); |
4932 | 4926 |
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5043 __ push(r0); | 5037 __ push(r0); |
5044 __ CallStub(&stub); | 5038 __ CallStub(&stub); |
5045 | 5039 |
5046 __ testq(r0, r0); | 5040 __ testq(r0, r0); |
5047 __ j(zero, miss); | 5041 __ j(zero, miss); |
5048 __ jmp(done); | 5042 __ jmp(done); |
5049 } | 5043 } |
5050 | 5044 |
5051 | 5045 |
5052 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { | 5046 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { |
5053 // This stub overrides SometimesSetsUpAFrame() to return false. That means | |
5054 // we cannot call anything that could cause a GC from this stub. | |
5055 // Stack frame on entry: | 5047 // Stack frame on entry: |
5056 // esp[0 * kPointerSize]: return address. | 5048 // esp[0 * kPointerSize]: return address. |
5057 // esp[1 * kPointerSize]: key's hash. | 5049 // esp[1 * kPointerSize]: key's hash. |
5058 // esp[2 * kPointerSize]: key. | 5050 // esp[2 * kPointerSize]: key. |
5059 // Registers: | 5051 // Registers: |
5060 // dictionary_: StringDictionary to probe. | 5052 // dictionary_: StringDictionary to probe. |
5061 // result_: used as scratch. | 5053 // result_: used as scratch. |
5062 // index_: will hold an index of entry if lookup is successful. | 5054 // index_: will hold an index of entry if lookup is successful. |
5063 // might alias with result_. | 5055 // might alias with result_. |
5064 // Returns: | 5056 // Returns: |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5135 __ Drop(1); | 5127 __ Drop(1); |
5136 __ ret(2 * kPointerSize); | 5128 __ ret(2 * kPointerSize); |
5137 } | 5129 } |
5138 | 5130 |
5139 | 5131 |
5140 #undef __ | 5132 #undef __ |
5141 | 5133 |
5142 } } // namespace v8::internal | 5134 } } // namespace v8::internal |
5143 | 5135 |
5144 #endif // V8_TARGET_ARCH_X64 | 5136 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |