OLD | NEW |
---|---|
1 // Copyright (c) 2010 Google Inc. | 1 // Copyright (c) 2010 Google Inc. |
2 // All rights reserved. | 2 // All rights reserved. |
3 // | 3 // |
4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
5 // modification, are permitted provided that the following conditions are | 5 // modification, are permitted provided that the following conditions are |
6 // met: | 6 // met: |
7 // | 7 // |
8 // * Redistributions of source code must retain the above copyright | 8 // * Redistributions of source code must retain the above copyright |
9 // notice, this list of conditions and the following disclaimer. | 9 // notice, this list of conditions and the following disclaimer. |
10 // * Redistributions in binary form must reproduce the above | 10 // * Redistributions in binary form must reproduce the above |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
140 // Make sure we recovered all the essentials. | 140 // Make sure we recovered all the essentials. |
141 static const int essentials = (StackFrameAMD64::CONTEXT_VALID_RIP | 141 static const int essentials = (StackFrameAMD64::CONTEXT_VALID_RIP |
142 | StackFrameAMD64::CONTEXT_VALID_RSP); | 142 | StackFrameAMD64::CONTEXT_VALID_RSP); |
143 if ((frame->context_validity & essentials) != essentials) | 143 if ((frame->context_validity & essentials) != essentials) |
144 return NULL; | 144 return NULL; |
145 | 145 |
146 frame->trust = StackFrame::FRAME_TRUST_CFI; | 146 frame->trust = StackFrame::FRAME_TRUST_CFI; |
147 return frame.release(); | 147 return frame.release(); |
148 } | 148 } |
149 | 149 |
150 bool StackwalkerAMD64::IsEndOfStack(uint64_t caller_rip, uint64_t caller_rsp, | |
151 uint64_t callee_rsp) { | |
152 // Treat an instruction address of 0 as end-of-stack. | |
153 if (caller_rip == 0) { | |
154 return true; | |
155 } | |
156 | |
157 // If the new stack pointer is at a lower address than the old, then | |
158 // that's clearly incorrect. Treat this as end-of-stack to enforce | |
159 // progress and avoid infinite loops. | |
160 if (caller_rsp < callee_rsp) { | |
161 return true; | |
162 } | |
163 | |
164 return false; | |
165 } | |
166 | |
150 StackFrameAMD64* StackwalkerAMD64::GetCallerByFramePointerRecovery( | 167 StackFrameAMD64* StackwalkerAMD64::GetCallerByFramePointerRecovery( |
151 const vector<StackFrame*>& frames) { | 168 const vector<StackFrame*>& frames) { |
152 StackFrameAMD64* last_frame = static_cast<StackFrameAMD64*>(frames.back()); | 169 StackFrameAMD64* last_frame = static_cast<StackFrameAMD64*>(frames.back()); |
153 uint64_t last_rsp = last_frame->context.rsp; | 170 uint64_t last_rsp = last_frame->context.rsp; |
154 uint64_t last_rbp = last_frame->context.rbp; | 171 uint64_t last_rbp = last_frame->context.rbp; |
155 | 172 |
156 // Assume the presence of a frame pointer. This is not mandated by the | 173 // Assume the presence of a frame pointer. This is not mandated by the |
157 // AMD64 ABI, c.f. section 3.2.2 footnote 7, though it is typical for | 174 // AMD64 ABI, c.f. section 3.2.2 footnote 7, though it is typical for |
158 // compilers to still preserve the frame pointer and not treat %rbp as a | 175 // compilers to still preserve the frame pointer and not treat %rbp as a |
159 // general purpose register. | 176 // general purpose register. |
160 // | 177 // |
161 // With this assumption, the CALL instruction pushes the return address | 178 // With this assumption, the CALL instruction pushes the return address |
162 // onto the stack and sets %rip to the procedure to enter. The procedure | 179 // onto the stack and sets %rip to the procedure to enter. The procedure |
163 // then establishes the stack frame with a prologue that PUSHes the current | 180 // then establishes the stack frame with a prologue that PUSHes the current |
164 // %rbp onto the stack, MOVes the current %rsp to %rbp, and then allocates | 181 // %rbp onto the stack, MOVes the current %rsp to %rbp, and then allocates |
165 // space for any local variables. Using this procedure linking information, | 182 // space for any local variables. Using this procedure linking information, |
166 // it is possible to locate frame information for the callee: | 183 // it is possible to locate frame information for the callee: |
167 // | 184 // |
168 // %caller_rsp = *(%callee_rbp + 16) | 185 // %caller_rsp = *(%callee_rbp + 16) |
169 // %caller_rip = *(%callee_rbp + 8) | 186 // %caller_rip = *(%callee_rbp + 8) |
170 // %caller_rbp = *(%callee_rbp) | 187 // %caller_rbp = *(%callee_rbp) |
171 | 188 |
172 uint64_t caller_rip, caller_rbp; | 189 uint64_t caller_rip, caller_rbp; |
173 if (memory_->GetMemoryAtAddress(last_rbp + 8, &caller_rip) && | 190 if (memory_->GetMemoryAtAddress(last_rbp + 8, &caller_rip) && |
174 memory_->GetMemoryAtAddress(last_rbp, &caller_rbp)) { | 191 memory_->GetMemoryAtAddress(last_rbp, &caller_rbp)) { |
175 uint64_t caller_rsp = last_rbp + 16; | 192 uint64_t caller_rsp = last_rbp + 16; |
176 | 193 |
177 // Simple sanity check that the stack is growing downwards as expected. | 194 // Simple sanity check that the stack is growing downwards as expected. |
178 if (caller_rbp < last_rbp || caller_rsp < last_rsp) | 195 if (caller_rbp < last_rbp || caller_rsp < last_rsp) |
Mark Mentovai
2015/10/16 02:44:18
This caller_rsp < last_rsp check is duplicated in
ivanpe
2015/10/16 03:35:42
Done.
| |
179 return NULL; | 196 return NULL; |
180 | 197 |
198 if (IsEndOfStack(caller_rip, caller_rsp, last_rsp)) { | |
199 // Reached end-of-stack. | |
200 return NULL; | |
201 } | |
202 | |
181 StackFrameAMD64* frame = new StackFrameAMD64(); | 203 StackFrameAMD64* frame = new StackFrameAMD64(); |
182 frame->trust = StackFrame::FRAME_TRUST_FP; | 204 frame->trust = StackFrame::FRAME_TRUST_FP; |
183 frame->context = last_frame->context; | 205 frame->context = last_frame->context; |
184 frame->context.rip = caller_rip; | 206 frame->context.rip = caller_rip; |
185 frame->context.rsp = caller_rsp; | 207 frame->context.rsp = caller_rsp; |
186 frame->context.rbp = caller_rbp; | 208 frame->context.rbp = caller_rbp; |
187 frame->context_validity = StackFrameAMD64::CONTEXT_VALID_RIP | | 209 frame->context_validity = StackFrameAMD64::CONTEXT_VALID_RIP | |
188 StackFrameAMD64::CONTEXT_VALID_RSP | | 210 StackFrameAMD64::CONTEXT_VALID_RSP | |
189 StackFrameAMD64::CONTEXT_VALID_RBP; | 211 StackFrameAMD64::CONTEXT_VALID_RBP; |
190 return frame; | 212 return frame; |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
277 if (system_info_->os_short == "nacl") { | 299 if (system_info_->os_short == "nacl") { |
278 // Apply constraints from Native Client's x86-64 sandbox. These | 300 // Apply constraints from Native Client's x86-64 sandbox. These |
279 // registers have the 4GB-aligned sandbox base address (from r15) | 301 // registers have the 4GB-aligned sandbox base address (from r15) |
280 // added to them, and only the bottom 32 bits are relevant for | 302 // added to them, and only the bottom 32 bits are relevant for |
281 // stack walking. | 303 // stack walking. |
282 new_frame->context.rip = static_cast<uint32_t>(new_frame->context.rip); | 304 new_frame->context.rip = static_cast<uint32_t>(new_frame->context.rip); |
283 new_frame->context.rsp = static_cast<uint32_t>(new_frame->context.rsp); | 305 new_frame->context.rsp = static_cast<uint32_t>(new_frame->context.rsp); |
284 new_frame->context.rbp = static_cast<uint32_t>(new_frame->context.rbp); | 306 new_frame->context.rbp = static_cast<uint32_t>(new_frame->context.rbp); |
285 } | 307 } |
286 | 308 |
287 // Treat an instruction address of 0 as end-of-stack. | 309 if (IsEndOfStack(new_frame->context.rip, new_frame->context.rsp, |
288 if (new_frame->context.rip == 0) | 310 last_frame->context.rsp)) { |
311 // Reached end-of-stack. | |
289 return NULL; | 312 return NULL; |
290 | 313 } |
291 // If the new stack pointer is at a lower address than the old, then | |
292 // that's clearly incorrect. Treat this as end-of-stack to enforce | |
293 // progress and avoid infinite loops. | |
294 if (new_frame->context.rsp <= last_frame->context.rsp) | |
295 return NULL; | |
296 | 314 |
297 // new_frame->context.rip is the return address, which is the instruction | 315 // new_frame->context.rip is the return address, which is the instruction |
298 // after the CALL that caused us to arrive at the callee. Set | 316 // after the CALL that caused us to arrive at the callee. Set |
299 // new_frame->instruction to one less than that, so it points within the | 317 // new_frame->instruction to one less than that, so it points within the |
300 // CALL instruction. See StackFrame::instruction for details, and | 318 // CALL instruction. See StackFrame::instruction for details, and |
301 // StackFrameAMD64::ReturnAddress. | 319 // StackFrameAMD64::ReturnAddress. |
302 new_frame->instruction = new_frame->context.rip - 1; | 320 new_frame->instruction = new_frame->context.rip - 1; |
303 | 321 |
304 return new_frame.release(); | 322 return new_frame.release(); |
305 } | 323 } |
306 | 324 |
307 } // namespace google_breakpad | 325 } // namespace google_breakpad |
OLD | NEW |