| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 966 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 977 } else if (is_uint32(x)) { | 977 } else if (is_uint32(x)) { |
| 978 movl(dst, Immediate(static_cast<uint32_t>(x))); | 978 movl(dst, Immediate(static_cast<uint32_t>(x))); |
| 979 } else if (is_int32(x)) { | 979 } else if (is_int32(x)) { |
| 980 movq(dst, Immediate(static_cast<int32_t>(x))); | 980 movq(dst, Immediate(static_cast<int32_t>(x))); |
| 981 } else { | 981 } else { |
| 982 movq(dst, x); | 982 movq(dst, x); |
| 983 } | 983 } |
| 984 } | 984 } |
| 985 | 985 |
| 986 | 986 |
| 987 void MacroAssembler::Set(const Operand& dst, int64_t x) { | 987 void MacroAssembler::Set(const Operand& dst, intptr_t x) { |
| 988 if (is_int32(x)) { | 988 if (kPointerSize == kInt64Size) { |
| 989 movq(dst, Immediate(static_cast<int32_t>(x))); | 989 if (is_int32(x)) { |
| 990 movp(dst, Immediate(static_cast<int32_t>(x))); |
| 991 } else { |
| 992 Set(kScratchRegister, x); |
| 993 movp(dst, kScratchRegister); |
| 994 } |
| 990 } else { | 995 } else { |
| 991 Set(kScratchRegister, x); | 996 ASSERT(kPointerSize == kInt32Size); |
| 992 movq(dst, kScratchRegister); | 997 movp(dst, Immediate(static_cast<int32_t>(x))); |
| 993 } | 998 } |
| 994 } | 999 } |
| 995 | 1000 |
| 996 | 1001 |
| 997 // ---------------------------------------------------------------------------- | 1002 // ---------------------------------------------------------------------------- |
| 998 // Smi tagging, untagging and tag detection. | 1003 // Smi tagging, untagging and tag detection. |
| 999 | 1004 |
| 1000 bool MacroAssembler::IsUnsafeInt(const int32_t x) { | 1005 bool MacroAssembler::IsUnsafeInt(const int32_t x) { |
| 1001 static const int kMaxBits = 17; | 1006 static const int kMaxBits = 17; |
| 1002 return !is_intn(x, kMaxBits); | 1007 return !is_intn(x, kMaxBits); |
| (...skipping 1582 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2585 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte)); | 2590 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte)); |
| 2586 } | 2591 } |
| 2587 | 2592 |
| 2588 | 2593 |
| 2589 void MacroAssembler::Jump(ExternalReference ext) { | 2594 void MacroAssembler::Jump(ExternalReference ext) { |
| 2590 LoadAddress(kScratchRegister, ext); | 2595 LoadAddress(kScratchRegister, ext); |
| 2591 jmp(kScratchRegister); | 2596 jmp(kScratchRegister); |
| 2592 } | 2597 } |
| 2593 | 2598 |
| 2594 | 2599 |
| 2600 void MacroAssembler::Jump(const Operand& op) { |
| 2601 if (kPointerSize == kInt64Size) { |
| 2602 jmp(op); |
| 2603 } else { |
| 2604 ASSERT(kPointerSize == kInt32Size); |
| 2605 movp(kScratchRegister, op); |
| 2606 jmp(kScratchRegister); |
| 2607 } |
| 2608 } |
| 2609 |
| 2610 |
| 2595 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) { | 2611 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) { |
| 2596 Move(kScratchRegister, destination, rmode); | 2612 Move(kScratchRegister, destination, rmode); |
| 2597 jmp(kScratchRegister); | 2613 jmp(kScratchRegister); |
| 2598 } | 2614 } |
| 2599 | 2615 |
| 2600 | 2616 |
| 2601 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) { | 2617 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) { |
| 2602 // TODO(X64): Inline this | 2618 // TODO(X64): Inline this |
| 2603 jmp(code_object, rmode); | 2619 jmp(code_object, rmode); |
| 2604 } | 2620 } |
| (...skipping 11 matching lines...) Expand all Loading... |
| 2616 int end_position = pc_offset() + CallSize(ext); | 2632 int end_position = pc_offset() + CallSize(ext); |
| 2617 #endif | 2633 #endif |
| 2618 LoadAddress(kScratchRegister, ext); | 2634 LoadAddress(kScratchRegister, ext); |
| 2619 call(kScratchRegister); | 2635 call(kScratchRegister); |
| 2620 #ifdef DEBUG | 2636 #ifdef DEBUG |
| 2621 CHECK_EQ(end_position, pc_offset()); | 2637 CHECK_EQ(end_position, pc_offset()); |
| 2622 #endif | 2638 #endif |
| 2623 } | 2639 } |
| 2624 | 2640 |
| 2625 | 2641 |
| 2642 void MacroAssembler::Call(const Operand& op) { |
| 2643 if (kPointerSize == kInt64Size) { |
| 2644 call(op); |
| 2645 } else { |
| 2646 ASSERT(kPointerSize == kInt32Size); |
| 2647 movp(kScratchRegister, op); |
| 2648 call(kScratchRegister); |
| 2649 } |
| 2650 } |
| 2651 |
| 2652 |
| 2626 void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) { | 2653 void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) { |
| 2627 #ifdef DEBUG | 2654 #ifdef DEBUG |
| 2628 int end_position = pc_offset() + CallSize(destination); | 2655 int end_position = pc_offset() + CallSize(destination); |
| 2629 #endif | 2656 #endif |
| 2630 Move(kScratchRegister, destination, rmode); | 2657 Move(kScratchRegister, destination, rmode); |
| 2631 call(kScratchRegister); | 2658 call(kScratchRegister); |
| 2632 #ifdef DEBUG | 2659 #ifdef DEBUG |
| 2633 CHECK_EQ(pc_offset(), end_position); | 2660 CHECK_EQ(pc_offset(), end_position); |
| 2634 #endif | 2661 #endif |
| 2635 } | 2662 } |
| (...skipping 2357 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4993 j(equal, found); | 5020 j(equal, found); |
| 4994 movp(current, FieldOperand(current, Map::kPrototypeOffset)); | 5021 movp(current, FieldOperand(current, Map::kPrototypeOffset)); |
| 4995 CompareRoot(current, Heap::kNullValueRootIndex); | 5022 CompareRoot(current, Heap::kNullValueRootIndex); |
| 4996 j(not_equal, &loop_again); | 5023 j(not_equal, &loop_again); |
| 4997 } | 5024 } |
| 4998 | 5025 |
| 4999 | 5026 |
| 5000 } } // namespace v8::internal | 5027 } } // namespace v8::internal |
| 5001 | 5028 |
| 5002 #endif // V8_TARGET_ARCH_X64 | 5029 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |