OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 517 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
528 } | 528 } |
529 | 529 |
530 | 530 |
531 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, | 531 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, |
532 Label* slow) { | 532 Label* slow) { |
533 // Check if the operand is a heap number. | 533 // Check if the operand is a heap number. |
534 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | 534 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
535 Heap::kHeapNumberMapRootIndex); | 535 Heap::kHeapNumberMapRootIndex); |
536 __ j(not_equal, slow); | 536 __ j(not_equal, slow); |
537 | 537 |
538 // Operand is a float, negate its value by flipping sign bit. | 538 // Operand is a float, negate its value by flipping the sign bit. |
539 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); | |
540 __ Set(kScratchRegister, 0x01); | |
541 __ shl(kScratchRegister, Immediate(63)); | |
542 __ xor_(rdx, kScratchRegister); // Flip sign. | |
543 // rdx is value to store. | |
544 if (mode_ == UNARY_OVERWRITE) { | 539 if (mode_ == UNARY_OVERWRITE) { |
545 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rdx); | 540 __ Set(kScratchRegister, 0x01); |
| 541 __ shl(kScratchRegister, Immediate(63)); |
| 542 __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister); |
546 } else { | 543 } else { |
| 544 // Allocate a heap number before calculating the answer, |
| 545 // so we don't have an untagged double around during GC. |
547 Label slow_allocate_heapnumber, heapnumber_allocated; | 546 Label slow_allocate_heapnumber, heapnumber_allocated; |
548 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber); | 547 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber); |
549 __ jmp(&heapnumber_allocated); | 548 __ jmp(&heapnumber_allocated); |
550 | 549 |
551 __ bind(&slow_allocate_heapnumber); | 550 __ bind(&slow_allocate_heapnumber); |
552 __ EnterInternalFrame(); | 551 __ EnterInternalFrame(); |
553 __ push(rdx); | 552 __ push(rax); |
554 __ CallRuntime(Runtime::kNumberAlloc, 0); | 553 __ CallRuntime(Runtime::kNumberAlloc, 0); |
555 __ movq(rcx, rax); | 554 __ movq(rcx, rax); |
556 __ pop(rdx); | 555 __ pop(rax); |
557 __ LeaveInternalFrame(); | 556 __ LeaveInternalFrame(); |
558 | |
559 __ bind(&heapnumber_allocated); | 557 __ bind(&heapnumber_allocated); |
560 // rcx: allocated 'empty' number | 558 // rcx: allocated 'empty' number |
| 559 |
| 560 // Copy the double value to the new heap number, flipping the sign. |
| 561 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); |
| 562 __ Set(kScratchRegister, 0x01); |
| 563 __ shl(kScratchRegister, Immediate(63)); |
| 564 __ xor_(rdx, kScratchRegister); // Flip sign. |
561 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); | 565 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); |
562 __ movq(rax, rcx); | 566 __ movq(rax, rcx); |
563 } | 567 } |
564 __ ret(0); | 568 __ ret(0); |
565 } | 569 } |
566 | 570 |
567 | 571 |
568 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeBitNot( | 572 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeBitNot( |
569 MacroAssembler* masm, | 573 MacroAssembler* masm, |
570 Label* slow) { | 574 Label* slow) { |
(...skipping 4159 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4730 // Do a tail call to the rewritten stub. | 4734 // Do a tail call to the rewritten stub. |
4731 __ jmp(rdi); | 4735 __ jmp(rdi); |
4732 } | 4736 } |
4733 | 4737 |
4734 | 4738 |
4735 #undef __ | 4739 #undef __ |
4736 | 4740 |
4737 } } // namespace v8::internal | 4741 } } // namespace v8::internal |
4738 | 4742 |
4739 #endif // V8_TARGET_ARCH_X64 | 4743 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |