OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
225 | 225 |
226 // Return and remove the on-stack parameters. | 226 // Return and remove the on-stack parameters. |
227 __ ret(3 * kPointerSize); | 227 __ ret(3 * kPointerSize); |
228 | 228 |
229 __ bind(&slow_case); | 229 __ bind(&slow_case); |
230 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 230 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
231 } | 231 } |
232 | 232 |
233 | 233 |
234 void ToBooleanStub::Generate(MacroAssembler* masm) { | 234 void ToBooleanStub::Generate(MacroAssembler* masm) { |
| 235 // This stub overrides SometimesSetsUpAFrame() to return false. That means |
| 236 // we cannot call anything that could cause a GC from this stub. |
235 Label false_result, true_result, not_string; | 237 Label false_result, true_result, not_string; |
236 __ movq(rax, Operand(rsp, 1 * kPointerSize)); | 238 __ movq(rax, Operand(rsp, 1 * kPointerSize)); |
237 | 239 |
238 // undefined -> false | 240 // undefined -> false |
239 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); | 241 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); |
240 __ j(equal, &false_result); | 242 __ j(equal, &false_result); |
241 | 243 |
242 // Boolean -> its value | 244 // Boolean -> its value |
243 __ CompareRoot(rax, Heap::kFalseValueRootIndex); | 245 __ CompareRoot(rax, Heap::kFalseValueRootIndex); |
244 __ j(equal, &false_result); | 246 __ j(equal, &false_result); |
(...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
557 __ shl(kScratchRegister, Immediate(63)); | 559 __ shl(kScratchRegister, Immediate(63)); |
558 __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister); | 560 __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister); |
559 } else { | 561 } else { |
560 // Allocate a heap number before calculating the answer, | 562 // Allocate a heap number before calculating the answer, |
561 // so we don't have an untagged double around during GC. | 563 // so we don't have an untagged double around during GC. |
562 Label slow_allocate_heapnumber, heapnumber_allocated; | 564 Label slow_allocate_heapnumber, heapnumber_allocated; |
563 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber); | 565 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber); |
564 __ jmp(&heapnumber_allocated); | 566 __ jmp(&heapnumber_allocated); |
565 | 567 |
566 __ bind(&slow_allocate_heapnumber); | 568 __ bind(&slow_allocate_heapnumber); |
567 __ EnterInternalFrame(); | 569 { |
568 __ push(rax); | 570 FrameScope scope(masm, StackFrame::INTERNAL); |
569 __ CallRuntime(Runtime::kNumberAlloc, 0); | 571 __ push(rax); |
570 __ movq(rcx, rax); | 572 __ CallRuntime(Runtime::kNumberAlloc, 0); |
571 __ pop(rax); | 573 __ movq(rcx, rax); |
572 __ LeaveInternalFrame(); | 574 __ pop(rax); |
| 575 } |
573 __ bind(&heapnumber_allocated); | 576 __ bind(&heapnumber_allocated); |
574 // rcx: allocated 'empty' number | 577 // rcx: allocated 'empty' number |
575 | 578 |
576 // Copy the double value to the new heap number, flipping the sign. | 579 // Copy the double value to the new heap number, flipping the sign. |
577 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); | 580 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); |
578 __ Set(kScratchRegister, 0x01); | 581 __ Set(kScratchRegister, 0x01); |
579 __ shl(kScratchRegister, Immediate(63)); | 582 __ shl(kScratchRegister, Immediate(63)); |
580 __ xor_(rdx, kScratchRegister); // Flip sign. | 583 __ xor_(rdx, kScratchRegister); // Flip sign. |
581 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); | 584 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); |
582 __ movq(rax, rcx); | 585 __ movq(rax, rcx); |
(...skipping 829 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1412 __ bind(&skip_cache); | 1415 __ bind(&skip_cache); |
1413 __ subq(rsp, Immediate(kDoubleSize)); | 1416 __ subq(rsp, Immediate(kDoubleSize)); |
1414 __ movsd(Operand(rsp, 0), xmm1); | 1417 __ movsd(Operand(rsp, 0), xmm1); |
1415 __ fld_d(Operand(rsp, 0)); | 1418 __ fld_d(Operand(rsp, 0)); |
1416 GenerateOperation(masm); | 1419 GenerateOperation(masm); |
1417 __ fstp_d(Operand(rsp, 0)); | 1420 __ fstp_d(Operand(rsp, 0)); |
1418 __ movsd(xmm1, Operand(rsp, 0)); | 1421 __ movsd(xmm1, Operand(rsp, 0)); |
1419 __ addq(rsp, Immediate(kDoubleSize)); | 1422 __ addq(rsp, Immediate(kDoubleSize)); |
1420 // We return the value in xmm1 without adding it to the cache, but | 1423 // We return the value in xmm1 without adding it to the cache, but |
1421 // we cause a scavenging GC so that future allocations will succeed. | 1424 // we cause a scavenging GC so that future allocations will succeed. |
1422 __ EnterInternalFrame(); | 1425 { |
1423 // Allocate an unused object bigger than a HeapNumber. | 1426 FrameScope scope(masm, StackFrame::INTERNAL); |
1424 __ Push(Smi::FromInt(2 * kDoubleSize)); | 1427 // Allocate an unused object bigger than a HeapNumber. |
1425 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); | 1428 __ Push(Smi::FromInt(2 * kDoubleSize)); |
1426 __ LeaveInternalFrame(); | 1429 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); |
| 1430 } |
1427 __ Ret(); | 1431 __ Ret(); |
1428 } | 1432 } |
1429 | 1433 |
1430 // Call runtime, doing whatever allocation and cleanup is necessary. | 1434 // Call runtime, doing whatever allocation and cleanup is necessary. |
1431 if (tagged) { | 1435 if (tagged) { |
1432 __ bind(&runtime_call_clear_stack); | 1436 __ bind(&runtime_call_clear_stack); |
1433 __ fstp(0); | 1437 __ fstp(0); |
1434 __ bind(&runtime_call); | 1438 __ bind(&runtime_call); |
1435 __ TailCallExternalReference( | 1439 __ TailCallExternalReference( |
1436 ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1); | 1440 ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1); |
1437 } else { // UNTAGGED. | 1441 } else { // UNTAGGED. |
1438 __ bind(&runtime_call_clear_stack); | 1442 __ bind(&runtime_call_clear_stack); |
1439 __ bind(&runtime_call); | 1443 __ bind(&runtime_call); |
1440 __ AllocateHeapNumber(rax, rdi, &skip_cache); | 1444 __ AllocateHeapNumber(rax, rdi, &skip_cache); |
1441 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1); | 1445 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1); |
1442 __ EnterInternalFrame(); | 1446 { |
1443 __ push(rax); | 1447 FrameScope scope(masm, StackFrame::INTERNAL); |
1444 __ CallRuntime(RuntimeFunction(), 1); | 1448 __ push(rax); |
1445 __ LeaveInternalFrame(); | 1449 __ CallRuntime(RuntimeFunction(), 1); |
| 1450 } |
1446 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); | 1451 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); |
1447 __ Ret(); | 1452 __ Ret(); |
1448 } | 1453 } |
1449 } | 1454 } |
1450 | 1455 |
1451 | 1456 |
1452 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { | 1457 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { |
1453 switch (type_) { | 1458 switch (type_) { |
1454 // Add more cases when necessary. | 1459 // Add more cases when necessary. |
1455 case TranscendentalCache::SIN: return Runtime::kMath_sin; | 1460 case TranscendentalCache::SIN: return Runtime::kMath_sin; |
(...skipping 3445 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4901 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { | 4906 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { |
4902 // Save the registers. | 4907 // Save the registers. |
4903 __ pop(rcx); | 4908 __ pop(rcx); |
4904 __ push(rdx); | 4909 __ push(rdx); |
4905 __ push(rax); | 4910 __ push(rax); |
4906 __ push(rcx); | 4911 __ push(rcx); |
4907 | 4912 |
4908 // Call the runtime system in a fresh internal frame. | 4913 // Call the runtime system in a fresh internal frame. |
4909 ExternalReference miss = | 4914 ExternalReference miss = |
4910 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); | 4915 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); |
4911 __ EnterInternalFrame(); | 4916 { |
4912 __ push(rdx); | 4917 FrameScope scope(masm, StackFrame::INTERNAL); |
4913 __ push(rax); | 4918 __ push(rdx); |
4914 __ Push(Smi::FromInt(op_)); | 4919 __ push(rax); |
4915 __ CallExternalReference(miss, 3); | 4920 __ Push(Smi::FromInt(op_)); |
4916 __ LeaveInternalFrame(); | 4921 __ CallExternalReference(miss, 3); |
| 4922 } |
4917 | 4923 |
4918 // Compute the entry point of the rewritten stub. | 4924 // Compute the entry point of the rewritten stub. |
4919 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize)); | 4925 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize)); |
4920 | 4926 |
4921 // Restore registers. | 4927 // Restore registers. |
4922 __ pop(rcx); | 4928 __ pop(rcx); |
4923 __ pop(rax); | 4929 __ pop(rax); |
4924 __ pop(rdx); | 4930 __ pop(rdx); |
4925 __ push(rcx); | 4931 __ push(rcx); |
4926 | 4932 |
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5037 __ push(r0); | 5043 __ push(r0); |
5038 __ CallStub(&stub); | 5044 __ CallStub(&stub); |
5039 | 5045 |
5040 __ testq(r0, r0); | 5046 __ testq(r0, r0); |
5041 __ j(zero, miss); | 5047 __ j(zero, miss); |
5042 __ jmp(done); | 5048 __ jmp(done); |
5043 } | 5049 } |
5044 | 5050 |
5045 | 5051 |
5046 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { | 5052 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { |
| 5053 // This stub overrides SometimesSetsUpAFrame() to return false. That means |
| 5054 // we cannot call anything that could cause a GC from this stub. |
5047 // Stack frame on entry: | 5055 // Stack frame on entry: |
5048 // esp[0 * kPointerSize]: return address. | 5056 // esp[0 * kPointerSize]: return address. |
5049 // esp[1 * kPointerSize]: key's hash. | 5057 // esp[1 * kPointerSize]: key's hash. |
5050 // esp[2 * kPointerSize]: key. | 5058 // esp[2 * kPointerSize]: key. |
5051 // Registers: | 5059 // Registers: |
5052 // dictionary_: StringDictionary to probe. | 5060 // dictionary_: StringDictionary to probe. |
5053 // result_: used as scratch. | 5061 // result_: used as scratch. |
5054 // index_: will hold an index of entry if lookup is successful. | 5062 // index_: will hold an index of entry if lookup is successful. |
5055 // might alias with result_. | 5063 // might alias with result_. |
5056 // Returns: | 5064 // Returns: |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5127 __ Drop(1); | 5135 __ Drop(1); |
5128 __ ret(2 * kPointerSize); | 5136 __ ret(2 * kPointerSize); |
5129 } | 5137 } |
5130 | 5138 |
5131 | 5139 |
5132 #undef __ | 5140 #undef __ |
5133 | 5141 |
5134 } } // namespace v8::internal | 5142 } } // namespace v8::internal |
5135 | 5143 |
5136 #endif // V8_TARGET_ARCH_X64 | 5144 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |