OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 215 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
226 __ ret(3 * kPointerSize); | 226 __ ret(3 * kPointerSize); |
227 | 227 |
228 __ bind(&slow_case); | 228 __ bind(&slow_case); |
229 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 229 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
230 } | 230 } |
231 | 231 |
232 | 232 |
233 // The stub expects its argument on the stack and returns its result in tos_: | 233 // The stub expects its argument on the stack and returns its result in tos_: |
234 // zero for false, and a non-zero value for true. | 234 // zero for false, and a non-zero value for true. |
235 void ToBooleanStub::Generate(MacroAssembler* masm) { | 235 void ToBooleanStub::Generate(MacroAssembler* masm) { |
| 236 // This stub overrides SometimesSetsUpAFrame() to return false. That means |
| 237 // we cannot call anything that could cause a GC from this stub. |
236 Label patch; | 238 Label patch; |
237 const Register argument = rax; | 239 const Register argument = rax; |
238 const Register map = rdx; | 240 const Register map = rdx; |
239 | 241 |
240 if (!types_.IsEmpty()) { | 242 if (!types_.IsEmpty()) { |
241 __ movq(argument, Operand(rsp, 1 * kPointerSize)); | 243 __ movq(argument, Operand(rsp, 1 * kPointerSize)); |
242 } | 244 } |
243 | 245 |
244 // undefined -> false | 246 // undefined -> false |
245 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false); | 247 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false); |
(...skipping 369 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
615 __ shl(kScratchRegister, Immediate(63)); | 617 __ shl(kScratchRegister, Immediate(63)); |
616 __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister); | 618 __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister); |
617 } else { | 619 } else { |
618 // Allocate a heap number before calculating the answer, | 620 // Allocate a heap number before calculating the answer, |
619 // so we don't have an untagged double around during GC. | 621 // so we don't have an untagged double around during GC. |
620 Label slow_allocate_heapnumber, heapnumber_allocated; | 622 Label slow_allocate_heapnumber, heapnumber_allocated; |
621 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber); | 623 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber); |
622 __ jmp(&heapnumber_allocated); | 624 __ jmp(&heapnumber_allocated); |
623 | 625 |
624 __ bind(&slow_allocate_heapnumber); | 626 __ bind(&slow_allocate_heapnumber); |
625 __ EnterInternalFrame(); | 627 { |
626 __ push(rax); | 628 FrameScope scope(masm, StackFrame::INTERNAL); |
627 __ CallRuntime(Runtime::kNumberAlloc, 0); | 629 __ push(rax); |
628 __ movq(rcx, rax); | 630 __ CallRuntime(Runtime::kNumberAlloc, 0); |
629 __ pop(rax); | 631 __ movq(rcx, rax); |
630 __ LeaveInternalFrame(); | 632 __ pop(rax); |
| 633 } |
631 __ bind(&heapnumber_allocated); | 634 __ bind(&heapnumber_allocated); |
632 // rcx: allocated 'empty' number | 635 // rcx: allocated 'empty' number |
633 | 636 |
634 // Copy the double value to the new heap number, flipping the sign. | 637 // Copy the double value to the new heap number, flipping the sign. |
635 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); | 638 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); |
636 __ Set(kScratchRegister, 0x01); | 639 __ Set(kScratchRegister, 0x01); |
637 __ shl(kScratchRegister, Immediate(63)); | 640 __ shl(kScratchRegister, Immediate(63)); |
638 __ xor_(rdx, kScratchRegister); // Flip sign. | 641 __ xor_(rdx, kScratchRegister); // Flip sign. |
639 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); | 642 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); |
640 __ movq(rax, rcx); | 643 __ movq(rax, rcx); |
(...skipping 805 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1446 __ bind(&skip_cache); | 1449 __ bind(&skip_cache); |
1447 __ subq(rsp, Immediate(kDoubleSize)); | 1450 __ subq(rsp, Immediate(kDoubleSize)); |
1448 __ movsd(Operand(rsp, 0), xmm1); | 1451 __ movsd(Operand(rsp, 0), xmm1); |
1449 __ fld_d(Operand(rsp, 0)); | 1452 __ fld_d(Operand(rsp, 0)); |
1450 GenerateOperation(masm); | 1453 GenerateOperation(masm); |
1451 __ fstp_d(Operand(rsp, 0)); | 1454 __ fstp_d(Operand(rsp, 0)); |
1452 __ movsd(xmm1, Operand(rsp, 0)); | 1455 __ movsd(xmm1, Operand(rsp, 0)); |
1453 __ addq(rsp, Immediate(kDoubleSize)); | 1456 __ addq(rsp, Immediate(kDoubleSize)); |
1454 // We return the value in xmm1 without adding it to the cache, but | 1457 // We return the value in xmm1 without adding it to the cache, but |
1455 // we cause a scavenging GC so that future allocations will succeed. | 1458 // we cause a scavenging GC so that future allocations will succeed. |
1456 __ EnterInternalFrame(); | 1459 { |
1457 // Allocate an unused object bigger than a HeapNumber. | 1460 FrameScope scope(masm, StackFrame::INTERNAL); |
1458 __ Push(Smi::FromInt(2 * kDoubleSize)); | 1461 // Allocate an unused object bigger than a HeapNumber. |
1459 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); | 1462 __ Push(Smi::FromInt(2 * kDoubleSize)); |
1460 __ LeaveInternalFrame(); | 1463 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); |
| 1464 } |
1461 __ Ret(); | 1465 __ Ret(); |
1462 } | 1466 } |
1463 | 1467 |
1464 // Call runtime, doing whatever allocation and cleanup is necessary. | 1468 // Call runtime, doing whatever allocation and cleanup is necessary. |
1465 if (tagged) { | 1469 if (tagged) { |
1466 __ bind(&runtime_call_clear_stack); | 1470 __ bind(&runtime_call_clear_stack); |
1467 __ fstp(0); | 1471 __ fstp(0); |
1468 __ bind(&runtime_call); | 1472 __ bind(&runtime_call); |
1469 __ TailCallExternalReference( | 1473 __ TailCallExternalReference( |
1470 ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1); | 1474 ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1); |
1471 } else { // UNTAGGED. | 1475 } else { // UNTAGGED. |
1472 __ bind(&runtime_call_clear_stack); | 1476 __ bind(&runtime_call_clear_stack); |
1473 __ bind(&runtime_call); | 1477 __ bind(&runtime_call); |
1474 __ AllocateHeapNumber(rax, rdi, &skip_cache); | 1478 __ AllocateHeapNumber(rax, rdi, &skip_cache); |
1475 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1); | 1479 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1); |
1476 __ EnterInternalFrame(); | 1480 { |
1477 __ push(rax); | 1481 FrameScope scope(masm, StackFrame::INTERNAL); |
1478 __ CallRuntime(RuntimeFunction(), 1); | 1482 __ push(rax); |
1479 __ LeaveInternalFrame(); | 1483 __ CallRuntime(RuntimeFunction(), 1); |
| 1484 } |
1480 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); | 1485 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); |
1481 __ Ret(); | 1486 __ Ret(); |
1482 } | 1487 } |
1483 } | 1488 } |
1484 | 1489 |
1485 | 1490 |
1486 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { | 1491 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { |
1487 switch (type_) { | 1492 switch (type_) { |
1488 // Add more cases when necessary. | 1493 // Add more cases when necessary. |
1489 case TranscendentalCache::SIN: return Runtime::kMath_sin; | 1494 case TranscendentalCache::SIN: return Runtime::kMath_sin; |
(...skipping 3774 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5264 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { | 5269 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { |
5265 // Save the registers. | 5270 // Save the registers. |
5266 __ pop(rcx); | 5271 __ pop(rcx); |
5267 __ push(rdx); | 5272 __ push(rdx); |
5268 __ push(rax); | 5273 __ push(rax); |
5269 __ push(rcx); | 5274 __ push(rcx); |
5270 | 5275 |
5271 // Call the runtime system in a fresh internal frame. | 5276 // Call the runtime system in a fresh internal frame. |
5272 ExternalReference miss = | 5277 ExternalReference miss = |
5273 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); | 5278 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); |
5274 __ EnterInternalFrame(); | 5279 { |
5275 __ push(rdx); | 5280 FrameScope scope(masm, StackFrame::INTERNAL); |
5276 __ push(rax); | 5281 __ push(rdx); |
5277 __ Push(Smi::FromInt(op_)); | 5282 __ push(rax); |
5278 __ CallExternalReference(miss, 3); | 5283 __ Push(Smi::FromInt(op_)); |
5279 __ LeaveInternalFrame(); | 5284 __ CallExternalReference(miss, 3); |
| 5285 } |
5280 | 5286 |
5281 // Compute the entry point of the rewritten stub. | 5287 // Compute the entry point of the rewritten stub. |
5282 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize)); | 5288 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize)); |
5283 | 5289 |
5284 // Restore registers. | 5290 // Restore registers. |
5285 __ pop(rcx); | 5291 __ pop(rcx); |
5286 __ pop(rax); | 5292 __ pop(rax); |
5287 __ pop(rdx); | 5293 __ pop(rdx); |
5288 __ push(rcx); | 5294 __ push(rcx); |
5289 | 5295 |
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5400 __ push(r0); | 5406 __ push(r0); |
5401 __ CallStub(&stub); | 5407 __ CallStub(&stub); |
5402 | 5408 |
5403 __ testq(r0, r0); | 5409 __ testq(r0, r0); |
5404 __ j(zero, miss); | 5410 __ j(zero, miss); |
5405 __ jmp(done); | 5411 __ jmp(done); |
5406 } | 5412 } |
5407 | 5413 |
5408 | 5414 |
5409 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { | 5415 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { |
| 5416 // This stub overrides SometimesSetsUpAFrame() to return false. That means |
| 5417 // we cannot call anything that could cause a GC from this stub. |
5410 // Stack frame on entry: | 5418 // Stack frame on entry: |
5411 // esp[0 * kPointerSize]: return address. | 5419 // esp[0 * kPointerSize]: return address. |
5412 // esp[1 * kPointerSize]: key's hash. | 5420 // esp[1 * kPointerSize]: key's hash. |
5413 // esp[2 * kPointerSize]: key. | 5421 // esp[2 * kPointerSize]: key. |
5414 // Registers: | 5422 // Registers: |
5415 // dictionary_: StringDictionary to probe. | 5423 // dictionary_: StringDictionary to probe. |
5416 // result_: used as scratch. | 5424 // result_: used as scratch. |
5417 // index_: will hold an index of entry if lookup is successful. | 5425 // index_: will hold an index of entry if lookup is successful. |
5418 // might alias with result_. | 5426 // might alias with result_. |
5419 // Returns: | 5427 // Returns: |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5490 __ Drop(1); | 5498 __ Drop(1); |
5491 __ ret(2 * kPointerSize); | 5499 __ ret(2 * kPointerSize); |
5492 } | 5500 } |
5493 | 5501 |
5494 | 5502 |
5495 #undef __ | 5503 #undef __ |
5496 | 5504 |
5497 } } // namespace v8::internal | 5505 } } // namespace v8::internal |
5498 | 5506 |
5499 #endif // V8_TARGET_ARCH_X64 | 5507 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |