OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4288 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4299 __ push(r14); | 4299 __ push(r14); |
4300 __ push(r15); | 4300 __ push(r15); |
4301 #ifdef _WIN64 | 4301 #ifdef _WIN64 |
4302 __ push(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI. | 4302 __ push(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI. |
4303 __ push(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI. | 4303 __ push(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI. |
4304 #endif | 4304 #endif |
4305 __ push(rbx); | 4305 __ push(rbx); |
4306 | 4306 |
4307 #ifdef _WIN64 | 4307 #ifdef _WIN64 |
4308 // On Win64 XMM6-XMM15 are callee-save | 4308 // On Win64 XMM6-XMM15 are callee-save |
4309 __ subq(rsp, Immediate(kCalleeSaveXMMRegisters * kFullXMMRegisterSize); | 4309 __ subq(rsp, Immediate(kCalleeSaveXMMRegisters * kFullXMMRegisterSize)); |
4310 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 0), xmm6); | 4310 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 0), xmm6); |
4311 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 1), xmm7); | 4311 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 1), xmm7); |
4312 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 2), xmm8); | 4312 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 2), xmm8); |
4313 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 3), xmm9); | 4313 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 3), xmm9); |
4314 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 4), xmm10); | 4314 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 4), xmm10); |
4315 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 5), xmm11); | 4315 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 5), xmm11); |
4316 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 6), xmm12); | 4316 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 6), xmm12); |
4317 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 7), xmm13); | 4317 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 7), xmm13); |
4318 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 8), xmm14); | 4318 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 8), xmm14); |
4319 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 9), xmm15); | 4319 __ movdqu(Operand(rsp, kFullXMMRegisterSize * 9), xmm15); |
(...skipping 2428 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6748 #endif | 6748 #endif |
6749 | 6749 |
6750 __ Ret(); | 6750 __ Ret(); |
6751 } | 6751 } |
6752 | 6752 |
6753 #undef __ | 6753 #undef __ |
6754 | 6754 |
6755 } } // namespace v8::internal | 6755 } } // namespace v8::internal |
6756 | 6756 |
6757 #endif // V8_TARGET_ARCH_X64 | 6757 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |