| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include <limits.h> // For LONG_MIN, LONG_MAX. | 5 #include <limits.h> // For LONG_MIN, LONG_MAX. |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_ARM | 7 #if V8_TARGET_ARCH_ARM |
| 8 | 8 |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/base/division-by-constant.h" | 10 #include "src/base/division-by-constant.h" |
| (...skipping 2642 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2653 } | 2653 } |
| 2654 } | 2654 } |
| 2655 | 2655 |
| 2656 | 2656 |
| 2657 void MacroAssembler::LoadTransitionedArrayMapConditional( | 2657 void MacroAssembler::LoadTransitionedArrayMapConditional( |
| 2658 ElementsKind expected_kind, | 2658 ElementsKind expected_kind, |
| 2659 ElementsKind transitioned_kind, | 2659 ElementsKind transitioned_kind, |
| 2660 Register map_in_out, | 2660 Register map_in_out, |
| 2661 Register scratch, | 2661 Register scratch, |
| 2662 Label* no_map_match) { | 2662 Label* no_map_match) { |
| 2663 DCHECK(IsFastElementsKind(expected_kind)); |
| 2664 DCHECK(IsFastElementsKind(transitioned_kind)); |
| 2665 |
| 2663 // Check that the function's map is the same as the expected cached map. | 2666 // Check that the function's map is the same as the expected cached map. |
| 2664 LoadNativeContextSlot(Context::JS_ARRAY_MAPS_INDEX, scratch); | 2667 ldr(scratch, NativeContextMemOperand()); |
| 2665 size_t offset = expected_kind * kPointerSize + FixedArrayBase::kHeaderSize; | 2668 ldr(ip, ContextMemOperand(scratch, Context::ArrayMapIndex(expected_kind))); |
| 2666 ldr(ip, FieldMemOperand(scratch, offset)); | |
| 2667 cmp(map_in_out, ip); | 2669 cmp(map_in_out, ip); |
| 2668 b(ne, no_map_match); | 2670 b(ne, no_map_match); |
| 2669 | 2671 |
| 2670 // Use the transitioned cached map. | 2672 // Use the transitioned cached map. |
| 2671 offset = transitioned_kind * kPointerSize + | 2673 ldr(map_in_out, |
| 2672 FixedArrayBase::kHeaderSize; | 2674 ContextMemOperand(scratch, Context::ArrayMapIndex(transitioned_kind))); |
| 2673 ldr(map_in_out, FieldMemOperand(scratch, offset)); | |
| 2674 } | 2675 } |
| 2675 | 2676 |
| 2676 | 2677 |
| 2677 void MacroAssembler::LoadNativeContextSlot(int index, Register dst) { | 2678 void MacroAssembler::LoadNativeContextSlot(int index, Register dst) { |
| 2678 ldr(dst, NativeContextMemOperand()); | 2679 ldr(dst, NativeContextMemOperand()); |
| 2679 ldr(dst, ContextMemOperand(dst, index)); | 2680 ldr(dst, ContextMemOperand(dst, index)); |
| 2680 } | 2681 } |
| 2681 | 2682 |
| 2682 | 2683 |
| 2683 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, | 2684 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
| (...skipping 997 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3681 } | 3682 } |
| 3682 } | 3683 } |
| 3683 if (mag.shift > 0) mov(result, Operand(result, ASR, mag.shift)); | 3684 if (mag.shift > 0) mov(result, Operand(result, ASR, mag.shift)); |
| 3684 add(result, result, Operand(dividend, LSR, 31)); | 3685 add(result, result, Operand(dividend, LSR, 31)); |
| 3685 } | 3686 } |
| 3686 | 3687 |
| 3687 } // namespace internal | 3688 } // namespace internal |
| 3688 } // namespace v8 | 3689 } // namespace v8 |
| 3689 | 3690 |
| 3690 #endif // V8_TARGET_ARCH_ARM | 3691 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |