OLD | NEW |
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 273 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
284 | 284 |
285 // Compute callee's stack pointer before making changes and save it as | 285 // Compute callee's stack pointer before making changes and save it as |
286 // ip register so that it is restored as sp register on exit, thereby | 286 // ip register so that it is restored as sp register on exit, thereby |
287 // popping the args. | 287 // popping the args. |
288 | 288 |
289 // ip = sp + kPointerSize * #args; | 289 // ip = sp + kPointerSize * #args; |
290 add(ip, sp, Operand(r0, LSL, kPointerSizeLog2)); | 290 add(ip, sp, Operand(r0, LSL, kPointerSizeLog2)); |
291 | 291 |
292 // Align the stack at this point. After this point we have 5 pushes, | 292 // Align the stack at this point. After this point we have 5 pushes, |
293 // so in fact we have to unalign here! See also the assert on the | 293 // so in fact we have to unalign here! See also the assert on the |
294 // alignment immediately below. | 294 // alignment in AlignStack. |
295 #if defined(V8_HOST_ARCH_ARM) | 295 AlignStack(1); |
296 // Running on the real platform. Use the alignment as mandated by the local | |
297 // environment. | |
298 // Note: This will break if we ever start generating snapshots on one ARM | |
299 // platform for another ARM platform with a different alignment. | |
300 int activation_frame_alignment = OS::ActivationFrameAlignment(); | |
301 #else // defined(V8_HOST_ARCH_ARM) | |
302 // If we are using the simulator then we should always align to the expected | |
303 // alignment. As the simulator is used to generate snapshots we do not know | |
304 // if the target platform will need alignment, so we will always align at | |
305 // this point here. | |
306 int activation_frame_alignment = 2 * kPointerSize; | |
307 #endif // defined(V8_HOST_ARCH_ARM) | |
308 if (activation_frame_alignment != kPointerSize) { | |
309 // This code needs to be made more general if this assert doesn't hold. | |
310 ASSERT(activation_frame_alignment == 2 * kPointerSize); | |
311 mov(r7, Operand(Smi::FromInt(0))); | |
312 tst(sp, Operand(activation_frame_alignment - 1)); | |
313 push(r7, eq); // Conditional push instruction. | |
314 } | |
315 | 296 |
316 // Push in reverse order: caller_fp, sp_on_exit, and caller_pc. | 297 // Push in reverse order: caller_fp, sp_on_exit, and caller_pc. |
317 stm(db_w, sp, fp.bit() | ip.bit() | lr.bit()); | 298 stm(db_w, sp, fp.bit() | ip.bit() | lr.bit()); |
318 mov(fp, Operand(sp)); // setup new frame pointer | 299 mov(fp, Operand(sp)); // setup new frame pointer |
319 | 300 |
320 // Push debug marker. | 301 // Push debug marker. |
321 mov(ip, Operand(type == StackFrame::EXIT_DEBUG ? 1 : 0)); | 302 mov(ip, Operand(type == StackFrame::EXIT_DEBUG ? 1 : 0)); |
322 push(ip); | 303 push(ip); |
323 | 304 |
324 // Save the frame pointer and the context in top. | 305 // Save the frame pointer and the context in top. |
(...skipping 11 matching lines...) Expand all Loading... |
336 // Save the state of all registers to the stack from the memory | 317 // Save the state of all registers to the stack from the memory |
337 // location. This is needed to allow nested break points. | 318 // location. This is needed to allow nested break points. |
338 if (type == StackFrame::EXIT_DEBUG) { | 319 if (type == StackFrame::EXIT_DEBUG) { |
339 // Use sp as base to push. | 320 // Use sp as base to push. |
340 CopyRegistersFromMemoryToStack(sp, kJSCallerSaved); | 321 CopyRegistersFromMemoryToStack(sp, kJSCallerSaved); |
341 } | 322 } |
342 #endif | 323 #endif |
343 } | 324 } |
344 | 325 |
345 | 326 |
| 327 void MacroAssembler::AlignStack(int offset) { |
| 328 #if defined(V8_HOST_ARCH_ARM) |
| 329 // Running on the real platform. Use the alignment as mandated by the local |
| 330 // environment. |
| 331 // Note: This will break if we ever start generating snapshots on one ARM |
| 332 // platform for another ARM platform with a different alignment. |
| 333 int activation_frame_alignment = OS::ActivationFrameAlignment(); |
| 334 #else // defined(V8_HOST_ARCH_ARM) |
| 335 // If we are using the simulator then we should always align to the expected |
| 336 // alignment. As the simulator is used to generate snapshots we do not know |
| 337 // if the target platform will need alignment, so we will always align at |
| 338 // this point here. |
| 339 int activation_frame_alignment = 2 * kPointerSize; |
| 340 #endif // defined(V8_HOST_ARCH_ARM) |
| 341 if (activation_frame_alignment != kPointerSize) { |
| 342 // This code needs to be made more general if this assert doesn't hold. |
| 343 ASSERT(activation_frame_alignment == 2 * kPointerSize); |
| 344 mov(r7, Operand(Smi::FromInt(0))); |
| 345 tst(sp, Operand(activation_frame_alignment - offset)); |
| 346 push(r7, eq); // Conditional push instruction. |
| 347 } |
| 348 } |
| 349 |
| 350 |
346 void MacroAssembler::LeaveExitFrame(StackFrame::Type type) { | 351 void MacroAssembler::LeaveExitFrame(StackFrame::Type type) { |
347 #ifdef ENABLE_DEBUGGER_SUPPORT | 352 #ifdef ENABLE_DEBUGGER_SUPPORT |
348 // Restore the memory copy of the registers by digging them out from | 353 // Restore the memory copy of the registers by digging them out from |
349 // the stack. This is needed to allow nested break points. | 354 // the stack. This is needed to allow nested break points. |
350 if (type == StackFrame::EXIT_DEBUG) { | 355 if (type == StackFrame::EXIT_DEBUG) { |
351 // This code intentionally clobbers r2 and r3. | 356 // This code intentionally clobbers r2 and r3. |
352 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize; | 357 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize; |
353 const int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize; | 358 const int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize; |
354 add(r3, fp, Operand(kOffset)); | 359 add(r3, fp, Operand(kOffset)); |
355 CopyRegistersFromStackToMemory(r3, r2, kJSCallerSaved); | 360 CopyRegistersFromStackToMemory(r3, r2, kJSCallerSaved); |
(...skipping 819 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1175 } | 1180 } |
1176 | 1181 |
1177 | 1182 |
1178 void CodePatcher::Emit(Address addr) { | 1183 void CodePatcher::Emit(Address addr) { |
1179 masm()->emit(reinterpret_cast<Instr>(addr)); | 1184 masm()->emit(reinterpret_cast<Instr>(addr)); |
1180 } | 1185 } |
1181 #endif // ENABLE_DEBUGGER_SUPPORT | 1186 #endif // ENABLE_DEBUGGER_SUPPORT |
1182 | 1187 |
1183 | 1188 |
1184 } } // namespace v8::internal | 1189 } } // namespace v8::internal |
OLD | NEW |