| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 98 } | 98 } |
| 99 // Reconstruct the 64-bit value from two smis. | 99 // Reconstruct the 64-bit value from two smis. |
| 100 if ((non_object_regs & (1 << r)) != 0) { | 100 if ((non_object_regs & (1 << r)) != 0) { |
| 101 __ pop(kScratchRegister); | 101 __ pop(kScratchRegister); |
| 102 __ SmiToInteger32(kScratchRegister, kScratchRegister); | 102 __ SmiToInteger32(kScratchRegister, kScratchRegister); |
| 103 __ shl(kScratchRegister, Immediate(32)); | 103 __ shl(kScratchRegister, Immediate(32)); |
| 104 __ pop(reg); | 104 __ pop(reg); |
| 105 __ SmiToInteger32(reg, reg); | 105 __ SmiToInteger32(reg, reg); |
| 106 __ or_(reg, kScratchRegister); | 106 __ or_(reg, kScratchRegister); |
| 107 } | 107 } |
| 108 } | 108 } |
| 109 | 109 |
| 110 // Get rid of the internal frame. | 110 // Get rid of the internal frame. |
| 111 __ LeaveInternalFrame(); | 111 __ LeaveInternalFrame(); |
| 112 | 112 |
| 113 // If this call did not replace a call but patched other code then there will | 113 // If this call did not replace a call but patched other code then there will |
| 114 // be an unwanted return address left on the stack. Here we get rid of that. | 114 // be an unwanted return address left on the stack. Here we get rid of that. |
| 115 if (convert_call_to_jmp) { | 115 if (convert_call_to_jmp) { |
| 116 __ addq(rsp, Immediate(kPointerSize)); | 116 __ addq(rsp, Immediate(kPointerSize)); |
| 117 } | 117 } |
| 118 | 118 |
| (...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 303 ASSERT(IsDebugBreakSlot()); | 303 ASSERT(IsDebugBreakSlot()); |
| 304 rinfo()->PatchCode(original_rinfo()->pc(), Assembler::kDebugBreakSlotLength); | 304 rinfo()->PatchCode(original_rinfo()->pc(), Assembler::kDebugBreakSlotLength); |
| 305 } | 305 } |
| 306 | 306 |
| 307 | 307 |
| 308 #endif // ENABLE_DEBUGGER_SUPPORT | 308 #endif // ENABLE_DEBUGGER_SUPPORT |
| 309 | 309 |
| 310 } } // namespace v8::internal | 310 } } // namespace v8::internal |
| 311 | 311 |
| 312 #endif // V8_TARGET_ARCH_X64 | 312 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |