| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_ARM64 | 7 #if V8_TARGET_ARCH_ARM64 |
| 8 | 8 |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/debug.h" | 10 #include "src/debug.h" |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 60 | 60 |
| 61 | 61 |
| 62 void BreakLocationIterator::ClearDebugBreakAtReturn() { | 62 void BreakLocationIterator::ClearDebugBreakAtReturn() { |
| 63 // Reset the code emitted by EmitReturnSequence to its original state. | 63 // Reset the code emitted by EmitReturnSequence to its original state. |
| 64 rinfo()->PatchCode(original_rinfo()->pc(), | 64 rinfo()->PatchCode(original_rinfo()->pc(), |
| 65 Assembler::kJSRetSequenceInstructions); | 65 Assembler::kJSRetSequenceInstructions); |
| 66 } | 66 } |
| 67 | 67 |
| 68 | 68 |
| 69 bool Debug::IsDebugBreakAtReturn(RelocInfo* rinfo) { | 69 bool Debug::IsDebugBreakAtReturn(RelocInfo* rinfo) { |
| 70 ASSERT(RelocInfo::IsJSReturn(rinfo->rmode())); | 70 DCHECK(RelocInfo::IsJSReturn(rinfo->rmode())); |
| 71 return rinfo->IsPatchedReturnSequence(); | 71 return rinfo->IsPatchedReturnSequence(); |
| 72 } | 72 } |
| 73 | 73 |
| 74 | 74 |
| 75 bool BreakLocationIterator::IsDebugBreakAtSlot() { | 75 bool BreakLocationIterator::IsDebugBreakAtSlot() { |
| 76 ASSERT(IsDebugBreakSlot()); | 76 DCHECK(IsDebugBreakSlot()); |
| 77 // Check whether the debug break slot instructions have been patched. | 77 // Check whether the debug break slot instructions have been patched. |
| 78 return rinfo()->IsPatchedDebugBreakSlotSequence(); | 78 return rinfo()->IsPatchedDebugBreakSlotSequence(); |
| 79 } | 79 } |
| 80 | 80 |
| 81 | 81 |
| 82 void BreakLocationIterator::SetDebugBreakAtSlot() { | 82 void BreakLocationIterator::SetDebugBreakAtSlot() { |
| 83 // Patch the code emitted by DebugCodegen::GenerateSlots, changing the debug | 83 // Patch the code emitted by DebugCodegen::GenerateSlots, changing the debug |
| 84 // break slot code from | 84 // break slot code from |
| 85 // mov x0, x0 @ nop DEBUG_BREAK_NOP | 85 // mov x0, x0 @ nop DEBUG_BREAK_NOP |
| 86 // mov x0, x0 @ nop DEBUG_BREAK_NOP | 86 // mov x0, x0 @ nop DEBUG_BREAK_NOP |
| (...skipping 24 matching lines...) Expand all Loading... |
| 111 // code. By using blr, event hough control will not return after the branch, | 111 // code. By using blr, event hough control will not return after the branch, |
| 112 // this call site will be registered in the frame (lr being saved as the pc | 112 // this call site will be registered in the frame (lr being saved as the pc |
| 113 // of the next instruction to execute for this frame). The debugger can now | 113 // of the next instruction to execute for this frame). The debugger can now |
| 114 // iterate on the frames to find call to debug break slot code. | 114 // iterate on the frames to find call to debug break slot code. |
| 115 patcher.blr(ip0); | 115 patcher.blr(ip0); |
| 116 patcher.dc64(reinterpret_cast<int64_t>(entry)); | 116 patcher.dc64(reinterpret_cast<int64_t>(entry)); |
| 117 } | 117 } |
| 118 | 118 |
| 119 | 119 |
| 120 void BreakLocationIterator::ClearDebugBreakAtSlot() { | 120 void BreakLocationIterator::ClearDebugBreakAtSlot() { |
| 121 ASSERT(IsDebugBreakSlot()); | 121 DCHECK(IsDebugBreakSlot()); |
| 122 rinfo()->PatchCode(original_rinfo()->pc(), | 122 rinfo()->PatchCode(original_rinfo()->pc(), |
| 123 Assembler::kDebugBreakSlotInstructions); | 123 Assembler::kDebugBreakSlotInstructions); |
| 124 } | 124 } |
| 125 | 125 |
| 126 | 126 |
| 127 static void Generate_DebugBreakCallHelper(MacroAssembler* masm, | 127 static void Generate_DebugBreakCallHelper(MacroAssembler* masm, |
| 128 RegList object_regs, | 128 RegList object_regs, |
| 129 RegList non_object_regs, | 129 RegList non_object_regs, |
| 130 Register scratch) { | 130 Register scratch) { |
| 131 { | 131 { |
| (...skipping 11 matching lines...) Expand all Loading... |
| 143 // | 143 // |
| 144 // Also: | 144 // Also: |
| 145 // * object_regs may be modified during the C code by the garbage | 145 // * object_regs may be modified during the C code by the garbage |
| 146 // collector. Every object register must be a valid tagged pointer or | 146 // collector. Every object register must be a valid tagged pointer or |
| 147 // SMI. | 147 // SMI. |
| 148 // | 148 // |
| 149 // * non_object_regs will be converted to SMIs so that the garbage | 149 // * non_object_regs will be converted to SMIs so that the garbage |
| 150 // collector doesn't try to interpret them as pointers. | 150 // collector doesn't try to interpret them as pointers. |
| 151 // | 151 // |
| 152 // TODO(jbramley): Why can't this handle callee-saved registers? | 152 // TODO(jbramley): Why can't this handle callee-saved registers? |
| 153 ASSERT((~kCallerSaved.list() & object_regs) == 0); | 153 DCHECK((~kCallerSaved.list() & object_regs) == 0); |
| 154 ASSERT((~kCallerSaved.list() & non_object_regs) == 0); | 154 DCHECK((~kCallerSaved.list() & non_object_regs) == 0); |
| 155 ASSERT((object_regs & non_object_regs) == 0); | 155 DCHECK((object_regs & non_object_regs) == 0); |
| 156 ASSERT((scratch.Bit() & object_regs) == 0); | 156 DCHECK((scratch.Bit() & object_regs) == 0); |
| 157 ASSERT((scratch.Bit() & non_object_regs) == 0); | 157 DCHECK((scratch.Bit() & non_object_regs) == 0); |
| 158 ASSERT((masm->TmpList()->list() & (object_regs | non_object_regs)) == 0); | 158 DCHECK((masm->TmpList()->list() & (object_regs | non_object_regs)) == 0); |
| 159 STATIC_ASSERT(kSmiValueSize == 32); | 159 STATIC_ASSERT(kSmiValueSize == 32); |
| 160 | 160 |
| 161 CPURegList non_object_list = | 161 CPURegList non_object_list = |
| 162 CPURegList(CPURegister::kRegister, kXRegSizeInBits, non_object_regs); | 162 CPURegList(CPURegister::kRegister, kXRegSizeInBits, non_object_regs); |
| 163 while (!non_object_list.IsEmpty()) { | 163 while (!non_object_list.IsEmpty()) { |
| 164 // Store each non-object register as two SMIs. | 164 // Store each non-object register as two SMIs. |
| 165 Register reg = Register(non_object_list.PopLowestIndex()); | 165 Register reg = Register(non_object_list.PopLowestIndex()); |
| 166 __ Lsr(scratch, reg, 32); | 166 __ Lsr(scratch, reg, 32); |
| 167 __ SmiTagAndPush(scratch, reg); | 167 __ SmiTagAndPush(scratch, reg); |
| 168 | 168 |
| (...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 369 // Re-run JSFunction, x1 is function, cp is context. | 369 // Re-run JSFunction, x1 is function, cp is context. |
| 370 __ Br(scratch); | 370 __ Br(scratch); |
| 371 } | 371 } |
| 372 | 372 |
| 373 | 373 |
| 374 const bool LiveEdit::kFrameDropperSupported = true; | 374 const bool LiveEdit::kFrameDropperSupported = true; |
| 375 | 375 |
| 376 } } // namespace v8::internal | 376 } } // namespace v8::internal |
| 377 | 377 |
| 378 #endif // V8_TARGET_ARCH_ARM64 | 378 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |