OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 692 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
703 } | 703 } |
704 | 704 |
705 | 705 |
706 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, | 706 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, |
707 Label* slow) { | 707 Label* slow) { |
708 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); | 708 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); |
709 __ cmp(edx, masm->isolate()->factory()->heap_number_map()); | 709 __ cmp(edx, masm->isolate()->factory()->heap_number_map()); |
710 __ j(not_equal, slow); | 710 __ j(not_equal, slow); |
711 | 711 |
712 if (mode_ == UNARY_OVERWRITE) { | 712 if (mode_ == UNARY_OVERWRITE) { |
713 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset)); | 713 __ xor_(FieldOperand(eax, HeapNumber::kExponentOffset), |
714 __ xor_(edx, HeapNumber::kSignMask); // Flip sign. | 714 Immediate(HeapNumber::kSignMask)); // Flip sign. |
715 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), edx); | |
716 } else { | 715 } else { |
717 __ mov(edx, Operand(eax)); | 716 __ mov(edx, Operand(eax)); |
718 // edx: operand | 717 // edx: operand |
719 | 718 |
720 Label slow_allocate_heapnumber, heapnumber_allocated; | 719 Label slow_allocate_heapnumber, heapnumber_allocated; |
721 __ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber); | 720 __ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber); |
722 __ jmp(&heapnumber_allocated); | 721 __ jmp(&heapnumber_allocated); |
723 | 722 |
724 __ bind(&slow_allocate_heapnumber); | 723 __ bind(&slow_allocate_heapnumber); |
725 __ EnterInternalFrame(); | 724 __ EnterInternalFrame(); |
(...skipping 5132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5858 // Do a tail call to the rewritten stub. | 5857 // Do a tail call to the rewritten stub. |
5859 __ jmp(Operand(edi)); | 5858 __ jmp(Operand(edi)); |
5860 } | 5859 } |
5861 | 5860 |
5862 | 5861 |
5863 #undef __ | 5862 #undef __ |
5864 | 5863 |
5865 } } // namespace v8::internal | 5864 } } // namespace v8::internal |
5866 | 5865 |
5867 #endif // V8_TARGET_ARCH_IA32 | 5866 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |