OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 575 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
586 __ jmp(&loaded, Label::kNear); | 586 __ jmp(&loaded, Label::kNear); |
587 | 587 |
588 __ bind(&input_not_smi); | 588 __ bind(&input_not_smi); |
589 // Check if input is a HeapNumber. | 589 // Check if input is a HeapNumber. |
590 __ LoadRoot(rbx, Heap::kHeapNumberMapRootIndex); | 590 __ LoadRoot(rbx, Heap::kHeapNumberMapRootIndex); |
591 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); | 591 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); |
592 __ j(not_equal, &runtime_call); | 592 __ j(not_equal, &runtime_call); |
593 // Input is a HeapNumber. Push it on the FPU stack and load its | 593 // Input is a HeapNumber. Push it on the FPU stack and load its |
594 // bits into rbx. | 594 // bits into rbx. |
595 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset)); | 595 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset)); |
596 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset)); | 596 __ MoveDouble(rbx, FieldOperand(rax, HeapNumber::kValueOffset)); |
597 __ movq(rdx, rbx); | 597 __ movq(rdx, rbx); |
598 | 598 |
599 __ bind(&loaded); | 599 __ bind(&loaded); |
600 } else { // UNTAGGED. | 600 } else { // UNTAGGED. |
601 __ movq(rbx, xmm1); | 601 __ movq(rbx, xmm1); |
602 __ movq(rdx, xmm1); | 602 __ movq(rdx, xmm1); |
603 } | 603 } |
604 | 604 |
605 // ST[0] == double value, if TAGGED. | 605 // ST[0] == double value, if TAGGED. |
606 // rbx = bits of double value. | 606 // rbx = bits of double value. |
(...skipping 5230 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5837 __ bind(&fast_elements_case); | 5837 __ bind(&fast_elements_case); |
5838 GenerateCase(masm, FAST_ELEMENTS); | 5838 GenerateCase(masm, FAST_ELEMENTS); |
5839 } | 5839 } |
5840 | 5840 |
5841 | 5841 |
5842 #undef __ | 5842 #undef __ |
5843 | 5843 |
5844 } } // namespace v8::internal | 5844 } } // namespace v8::internal |
5845 | 5845 |
5846 #endif // V8_TARGET_ARCH_X64 | 5846 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |