| OLD | NEW |
| 1 ;***************************************************************************** | 1 ;***************************************************************************** |
| 2 ;* x86inc.asm: x264asm abstraction layer | 2 ;* x86inc.asm: x264asm abstraction layer |
| 3 ;***************************************************************************** | 3 ;***************************************************************************** |
| 4 ;* Copyright (C) 2005-2012 x264 project | 4 ;* Copyright (C) 2005-2012 x264 project |
| 5 ;* | 5 ;* |
| 6 ;* Authors: Loren Merritt <lorenm@u.washington.edu> | 6 ;* Authors: Loren Merritt <lorenm@u.washington.edu> |
| 7 ;* Anton Mitrofanov <BugMaster@narod.ru> | 7 ;* Anton Mitrofanov <BugMaster@narod.ru> |
| 8 ;* Jason Garrett-Glaser <darkshikari@gmail.com> | 8 ;* Jason Garrett-Glaser <darkshikari@gmail.com> |
| 9 ;* Henrik Gramner <hengar-6@student.ltu.se> | 9 ;* Henrik Gramner <hengar-6@student.ltu.se> |
| 10 ;* | 10 ;* |
| (...skipping 377 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 388 %xdefine %1m r %+ %%i %+ m | 388 %xdefine %1m r %+ %%i %+ m |
| 389 %xdefine %1mp r %+ %%i %+ mp | 389 %xdefine %1mp r %+ %%i %+ mp |
| 390 CAT_XDEFINE arg_name, %%i, %1 | 390 CAT_XDEFINE arg_name, %%i, %1 |
| 391 %assign %%i %%i+1 | 391 %assign %%i %%i+1 |
| 392 %rotate 1 | 392 %rotate 1 |
| 393 %endrep | 393 %endrep |
| 394 %xdefine stack_offset %%stack_offset | 394 %xdefine stack_offset %%stack_offset |
| 395 %assign n_arg_names %0 | 395 %assign n_arg_names %0 |
| 396 %endmacro | 396 %endmacro |
| 397 | 397 |
| 398 %if ARCH_X86_64 |
| 399 %macro ALLOC_STACK 2 ; stack_size, num_regs |
| 400 %assign %%stack_aligment ((mmsize + 15) & ~15) |
| 401 %assign stack_size_padded %1 |
| 402 |
| 403 %assign %%reg_num (%2 - 1) |
| 404 %xdefine rsp_tmp r %+ %%reg_num |
| 405 mov rsp_tmp, rsp |
| 406 sub rsp, stack_size_padded |
| 407 and rsp, ~(%%stack_aligment - 1) |
| 408 %endmacro |
| 409 |
| 410 %macro RESTORE_STACK 0 ; reset rsp register |
| 411 mov rsp, rsp_tmp |
| 412 %endmacro |
| 413 %endif |
| 414 |
| 398 %if WIN64 ; Windows x64 ;================================================= | 415 %if WIN64 ; Windows x64 ;================================================= |
| 399 | 416 |
| 400 DECLARE_REG 0, rcx, ecx, cx, cl | 417 DECLARE_REG 0, rcx, ecx, cx, cl |
| 401 DECLARE_REG 1, rdx, edx, dx, dl | 418 DECLARE_REG 1, rdx, edx, dx, dl |
| 402 DECLARE_REG 2, R8, R8D, R8W, R8B | 419 DECLARE_REG 2, R8, R8D, R8W, R8B |
| 403 DECLARE_REG 3, R9, R9D, R9W, R9B | 420 DECLARE_REG 3, R9, R9D, R9W, R9B |
| 404 DECLARE_REG 4, R10, R10D, R10W, R10B, 40 | 421 DECLARE_REG 4, R10, R10D, R10W, R10B, 40 |
| 405 DECLARE_REG 5, R11, R11D, R11W, R11B, 48 | 422 DECLARE_REG 5, R11, R11D, R11W, R11B, 48 |
| 406 DECLARE_REG 6, rax, eax, ax, al, 56 | 423 DECLARE_REG 6, rax, eax, ax, al, 56 |
| 407 DECLARE_REG 7, rdi, edi, di, dil, 64 | 424 DECLARE_REG 7, rdi, edi, di, dil, 64 |
| (...skipping 786 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1194 %else | 1211 %else |
| 1195 %6 %1, %2, %3 | 1212 %6 %1, %2, %3 |
| 1196 %7 %1, %4 | 1213 %7 %1, %4 |
| 1197 %endif | 1214 %endif |
| 1198 %endmacro | 1215 %endmacro |
| 1199 %endmacro | 1216 %endmacro |
| 1200 | 1217 |
| 1201 FMA_INSTR pmacsdd, pmulld, paddd | 1218 FMA_INSTR pmacsdd, pmulld, paddd |
| 1202 FMA_INSTR pmacsww, pmullw, paddw | 1219 FMA_INSTR pmacsww, pmullw, paddw |
| 1203 FMA_INSTR pmadcswd, pmaddwd, paddd | 1220 FMA_INSTR pmadcswd, pmaddwd, paddd |
| OLD | NEW |