OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_IA32_CODE_STUBS_IA32_H_ | 5 #ifndef V8_IA32_CODE_STUBS_IA32_H_ |
6 #define V8_IA32_CODE_STUBS_IA32_H_ | 6 #define V8_IA32_CODE_STUBS_IA32_H_ |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 | 10 |
(...skipping 256 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
267 masm->pop(ecx); | 267 masm->pop(ecx); |
268 } | 268 } |
269 if (!scratch0_.is(scratch0_orig_)) masm->pop(scratch0_); | 269 if (!scratch0_.is(scratch0_orig_)) masm->pop(scratch0_); |
270 } | 270 } |
271 | 271 |
272 // If we have to call into C then we need to save and restore all caller- | 272 // If we have to call into C then we need to save and restore all caller- |
273 // saved registers that were not already preserved. The caller saved | 273 // saved registers that were not already preserved. The caller saved |
274 // registers are eax, ecx and edx. The three scratch registers (incl. ecx) | 274 // registers are eax, ecx and edx. The three scratch registers (incl. ecx) |
275 // will be restored by other means so we don't bother pushing them here. | 275 // will be restored by other means so we don't bother pushing them here. |
276 void SaveCallerSaveRegisters(MacroAssembler* masm, SaveFPRegsMode mode) { | 276 void SaveCallerSaveRegisters(MacroAssembler* masm, SaveFPRegsMode mode) { |
277 if (!scratch0_.is(eax) && !scratch1_.is(eax)) masm->push(eax); | 277 masm->PushCallerSaved(mode, ecx, scratch0_, scratch1_); |
278 if (!scratch0_.is(edx) && !scratch1_.is(edx)) masm->push(edx); | |
279 if (mode == kSaveFPRegs) { | |
280 masm->sub(esp, | |
281 Immediate(kDoubleSize * (XMMRegister::kMaxNumRegisters - 1))); | |
282 // Save all XMM registers except XMM0. | |
283 for (int i = XMMRegister::kMaxNumRegisters - 1; i > 0; i--) { | |
284 XMMRegister reg = XMMRegister::from_code(i); | |
285 masm->movsd(Operand(esp, (i - 1) * kDoubleSize), reg); | |
286 } | |
287 } | |
288 } | 278 } |
289 | 279 |
290 inline void RestoreCallerSaveRegisters(MacroAssembler*masm, | 280 inline void RestoreCallerSaveRegisters(MacroAssembler* masm, |
291 SaveFPRegsMode mode) { | 281 SaveFPRegsMode mode) { |
292 if (mode == kSaveFPRegs) { | 282 masm->PopCallerSaved(mode, ecx, scratch0_, scratch1_); |
293 // Restore all XMM registers except XMM0. | |
294 for (int i = XMMRegister::kMaxNumRegisters - 1; i > 0; i--) { | |
295 XMMRegister reg = XMMRegister::from_code(i); | |
296 masm->movsd(reg, Operand(esp, (i - 1) * kDoubleSize)); | |
297 } | |
298 masm->add(esp, | |
299 Immediate(kDoubleSize * (XMMRegister::kMaxNumRegisters - 1))); | |
300 } | |
301 if (!scratch0_.is(edx) && !scratch1_.is(edx)) masm->pop(edx); | |
302 if (!scratch0_.is(eax) && !scratch1_.is(eax)) masm->pop(eax); | |
303 } | 283 } |
304 | 284 |
305 inline Register object() { return object_; } | 285 inline Register object() { return object_; } |
306 inline Register address() { return address_; } | 286 inline Register address() { return address_; } |
307 inline Register scratch0() { return scratch0_; } | 287 inline Register scratch0() { return scratch0_; } |
308 inline Register scratch1() { return scratch1_; } | 288 inline Register scratch1() { return scratch1_; } |
309 | 289 |
310 private: | 290 private: |
311 Register object_orig_; | 291 Register object_orig_; |
312 Register address_orig_; | 292 Register address_orig_; |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
384 RegisterAllocation regs_; | 364 RegisterAllocation regs_; |
385 | 365 |
386 DISALLOW_COPY_AND_ASSIGN(RecordWriteStub); | 366 DISALLOW_COPY_AND_ASSIGN(RecordWriteStub); |
387 }; | 367 }; |
388 | 368 |
389 | 369 |
390 } // namespace internal | 370 } // namespace internal |
391 } // namespace v8 | 371 } // namespace v8 |
392 | 372 |
393 #endif // V8_IA32_CODE_STUBS_IA32_H_ | 373 #endif // V8_IA32_CODE_STUBS_IA32_H_ |
OLD | NEW |