OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 517 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
528 } | 528 } |
529 | 529 |
530 | 530 |
531 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, | 531 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, |
532 Label* slow) { | 532 Label* slow) { |
533 // Check if the operand is a heap number. | 533 // Check if the operand is a heap number. |
534 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | 534 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
535 Heap::kHeapNumberMapRootIndex); | 535 Heap::kHeapNumberMapRootIndex); |
536 __ j(not_equal, slow); | 536 __ j(not_equal, slow); |
537 | 537 |
538 // Operand is a float, negate its value by flipping sign bit. | 538 // Allocate a heap number (if necessary) before calculating the answer, |
539 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); | 539 // so we don't have an untagged double around during GC. |
540 __ Set(kScratchRegister, 0x01); | 540 if (mode_ != UNARY_OVERWRITE) { |
541 __ shl(kScratchRegister, Immediate(63)); | |
542 __ xor_(rdx, kScratchRegister); // Flip sign. | |
543 // rdx is value to store. | |
544 if (mode_ == UNARY_OVERWRITE) { | |
545 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rdx); | |
546 } else { | |
547 Label slow_allocate_heapnumber, heapnumber_allocated; | 541 Label slow_allocate_heapnumber, heapnumber_allocated; |
548 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber); | 542 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber); |
549 __ jmp(&heapnumber_allocated); | 543 __ jmp(&heapnumber_allocated); |
550 | 544 |
551 __ bind(&slow_allocate_heapnumber); | 545 __ bind(&slow_allocate_heapnumber); |
552 __ EnterInternalFrame(); | 546 __ EnterInternalFrame(); |
553 __ push(rdx); | 547 __ push(rax); |
554 __ CallRuntime(Runtime::kNumberAlloc, 0); | 548 __ CallRuntime(Runtime::kNumberAlloc, 0); |
555 __ movq(rcx, rax); | 549 __ movq(rcx, rax); |
556 __ pop(rdx); | 550 __ pop(rax); |
557 __ LeaveInternalFrame(); | 551 __ LeaveInternalFrame(); |
552 __ bind(&heapnumber_allocated); | |
553 } | |
558 | 554 |
559 __ bind(&heapnumber_allocated); | 555 // Operand is a float, negate its value by flipping sign bit. |
556 if (mode_ == UNARY_OVERWRITE) { | |
Søren Thygesen Gjesse
2011/05/02 12:46:25
How about hoisting the loading of the scratch regi
| |
557 __ Set(kScratchRegister, 0x01); | |
558 __ shl(kScratchRegister, Immediate(63)); | |
559 __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister); | |
560 } else { | |
561 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); | |
562 __ Set(kScratchRegister, 0x01); | |
563 __ shl(kScratchRegister, Immediate(63)); | |
564 __ xor_(rdx, kScratchRegister); // Flip sign. | |
560 // rcx: allocated 'empty' number | 565 // rcx: allocated 'empty' number |
561 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); | 566 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); |
562 __ movq(rax, rcx); | 567 __ movq(rax, rcx); |
563 } | 568 } |
564 __ ret(0); | 569 __ ret(0); |
565 } | 570 } |
566 | 571 |
567 | 572 |
568 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeBitNot( | 573 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeBitNot( |
569 MacroAssembler* masm, | 574 MacroAssembler* masm, |
(...skipping 4160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4730 // Do a tail call to the rewritten stub. | 4735 // Do a tail call to the rewritten stub. |
4731 __ jmp(rdi); | 4736 __ jmp(rdi); |
4732 } | 4737 } |
4733 | 4738 |
4734 | 4739 |
4735 #undef __ | 4740 #undef __ |
4736 | 4741 |
4737 } } // namespace v8::internal | 4742 } } // namespace v8::internal |
4738 | 4743 |
4739 #endif // V8_TARGET_ARCH_X64 | 4744 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |