|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
%include "libavutil/x86/x86util.asm" |
|
|
|
cextern pw_4 |
|
cextern pw_5 |
|
|
|
SECTION .text |
|
|
|
|
|
|
|
%macro UNPACK_8TO16 4 |
|
mova m%2, m%3 |
|
punpckh%1 m%3, m%4 |
|
punpckl%1 m%2, m%4 |
|
%endmacro |
|
|
|
%macro STORE_4_WORDS 6 |
|
%if cpuflag(sse4) |
|
pextrw %1, %5, %6+0 |
|
pextrw %2, %5, %6+1 |
|
pextrw %3, %5, %6+2 |
|
pextrw %4, %5, %6+3 |
|
%else |
|
movd %6d, %5 |
|
%if mmsize==16 |
|
psrldq %5, 4 |
|
%else |
|
psrlq %5, 32 |
|
%endif |
|
mov %1, %6w |
|
shr %6, 16 |
|
mov %2, %6w |
|
movd %6d, %5 |
|
mov %3, %6w |
|
shr %6, 16 |
|
mov %4, %6w |
|
%endif |
|
%endmacro |
|
|
|
|
|
|
|
%macro VC1_LOOP_FILTER_A0 4 |
|
psubw %1, %4 |
|
psubw %2, %3 |
|
paddw %1, %1 |
|
pmullw %2, [pw_5] |
|
psubw %1, %2 |
|
paddw %1, [pw_4] |
|
psraw %1, 3 |
|
%endmacro |
|
|
|
|
|
|
|
|
|
|
|
%macro VC1_FILTER 1 |
|
PABSW m4, m7 |
|
PABSW m3, m6 |
|
PABSW m2, m5 |
|
mova m6, m4 |
|
pminsw m3, m2 |
|
pcmpgtw m6, m3 |
|
psubw m3, m4 |
|
pmullw m3, [pw_5] |
|
PABSW m2, m3 |
|
psraw m2, 3 |
|
pxor m7, m3 |
|
|
|
pxor m5, m5 |
|
movd m3, r2d |
|
%if %1 > 4 |
|
punpcklbw m3, m3 |
|
%endif |
|
punpcklbw m3, m5 |
|
pcmpgtw m3, m4 |
|
pand m6, m3 |
|
|
|
mova m3, m0 |
|
psubw m3, m1 |
|
PABSW m4, m3 |
|
psraw m4, 1 |
|
pxor m3, m7 |
|
psraw m3, 15 |
|
pminsw m2, m4 |
|
pcmpgtw m4, m5 |
|
pand m6, m4 |
|
|
|
|
|
%if mmsize==16 |
|
pshuflw m4, m6, 0xaa |
|
%if %1 > 4 |
|
pshufhw m4, m4, 0xaa |
|
%endif |
|
%else |
|
pshufw m4, m6, 0xaa |
|
%endif |
|
pandn m3, m4 |
|
pand m2, m6 |
|
pand m3, m2 |
|
|
|
psraw m7, 15 |
|
pxor m3, m7 |
|
psubw m3, m7 |
|
psubw m0, m3 |
|
paddw m1, m3 |
|
packuswb m0, m0 |
|
packuswb m1, m1 |
|
%endmacro |
|
|
|
|
|
|
|
%macro VC1_V_LOOP_FILTER 2 |
|
pxor m5, m5 |
|
mov%2 m6, [r4] |
|
mov%2 m4, [r4+r1] |
|
mov%2 m7, [r4+2*r1] |
|
mov%2 m0, [r4+r3] |
|
punpcklbw m6, m5 |
|
punpcklbw m4, m5 |
|
punpcklbw m7, m5 |
|
punpcklbw m0, m5 |
|
|
|
VC1_LOOP_FILTER_A0 m6, m4, m7, m0 |
|
mov%2 m1, [r0] |
|
mov%2 m2, [r0+r1] |
|
punpcklbw m1, m5 |
|
punpcklbw m2, m5 |
|
mova m4, m0 |
|
VC1_LOOP_FILTER_A0 m7, m4, m1, m2 |
|
mov%2 m3, [r0+2*r1] |
|
mov%2 m4, [r0+r3] |
|
punpcklbw m3, m5 |
|
punpcklbw m4, m5 |
|
mova m5, m1 |
|
VC1_LOOP_FILTER_A0 m5, m2, m3, m4 |
|
|
|
VC1_FILTER %1 |
|
mov%2 [r4+r3], m0 |
|
mov%2 [r0], m1 |
|
%endmacro |
|
|
|
|
|
|
|
|
|
%macro VC1_H_LOOP_FILTER 1-2 |
|
%if %1 == 4 |
|
movq m0, [r0 -4] |
|
movq m1, [r0+ r1-4] |
|
movq m2, [r0+2*r1-4] |
|
movq m3, [r0+ r3-4] |
|
TRANSPOSE4x4B 0, 1, 2, 3, 4 |
|
%else |
|
movq m0, [r0 -4] |
|
movq m4, [r0+ r1-4] |
|
movq m1, [r0+2*r1-4] |
|
movq m5, [r0+ r3-4] |
|
movq m2, [r4 -4] |
|
movq m6, [r4+ r1-4] |
|
movq m3, [r4+2*r1-4] |
|
movq m7, [r4+ r3-4] |
|
punpcklbw m0, m4 |
|
punpcklbw m1, m5 |
|
punpcklbw m2, m6 |
|
punpcklbw m3, m7 |
|
TRANSPOSE4x4W 0, 1, 2, 3, 4 |
|
%endif |
|
pxor m5, m5 |
|
|
|
UNPACK_8TO16 bw, 6, 0, 5 |
|
UNPACK_8TO16 bw, 7, 1, 5 |
|
VC1_LOOP_FILTER_A0 m6, m0, m7, m1 |
|
UNPACK_8TO16 bw, 4, 2, 5 |
|
mova m0, m1 |
|
VC1_LOOP_FILTER_A0 m7, m1, m4, m2 |
|
UNPACK_8TO16 bw, 1, 3, 5 |
|
mova m5, m4 |
|
VC1_LOOP_FILTER_A0 m5, m2, m1, m3 |
|
SWAP 1, 4 |
|
|
|
VC1_FILTER %1 |
|
punpcklbw m0, m1 |
|
%if %0 > 1 |
|
STORE_4_WORDS [r0-1], [r0+r1-1], [r0+2*r1-1], [r0+r3-1], m0, %2 |
|
%if %1 > 4 |
|
psrldq m0, 4 |
|
STORE_4_WORDS [r4-1], [r4+r1-1], [r4+2*r1-1], [r4+r3-1], m0, %2 |
|
%endif |
|
%else |
|
STORE_4_WORDS [r0-1], [r0+r1-1], [r0+2*r1-1], [r0+r3-1], m0, 0 |
|
STORE_4_WORDS [r4-1], [r4+r1-1], [r4+2*r1-1], [r4+r3-1], m0, 4 |
|
%endif |
|
%endmacro |
|
|
|
|
|
%macro START_V_FILTER 0 |
|
mov r4, r0 |
|
lea r3, [4*r1] |
|
sub r4, r3 |
|
lea r3, [r1+2*r1] |
|
imul r2, 0x01010101 |
|
%endmacro |
|
|
|
%macro START_H_FILTER 1 |
|
lea r3, [r1+2*r1] |
|
%if %1 > 4 |
|
lea r4, [r0+4*r1] |
|
%endif |
|
imul r2, 0x01010101 |
|
%endmacro |
|
|
|
|
|
INIT_MMX mmxext |
|
cglobal vc1_v_loop_filter4, 3,5,0 |
|
START_V_FILTER |
|
VC1_V_LOOP_FILTER 4, d |
|
RET |
|
|
|
|
|
INIT_MMX mmxext |
|
cglobal vc1_h_loop_filter4, 3,5,0 |
|
START_H_FILTER 4 |
|
VC1_H_LOOP_FILTER 4, r4 |
|
RET |
|
|
|
INIT_XMM sse2 |
|
|
|
cglobal vc1_v_loop_filter8, 3,5,8 |
|
START_V_FILTER |
|
VC1_V_LOOP_FILTER 8, q |
|
RET |
|
|
|
|
|
cglobal vc1_h_loop_filter8, 3,6,8 |
|
START_H_FILTER 8 |
|
VC1_H_LOOP_FILTER 8, r5 |
|
RET |
|
|
|
INIT_MMX ssse3 |
|
|
|
cglobal vc1_v_loop_filter4, 3,5,0 |
|
START_V_FILTER |
|
VC1_V_LOOP_FILTER 4, d |
|
RET |
|
|
|
|
|
cglobal vc1_h_loop_filter4, 3,5,0 |
|
START_H_FILTER 4 |
|
VC1_H_LOOP_FILTER 4, r4 |
|
RET |
|
|
|
INIT_XMM ssse3 |
|
|
|
cglobal vc1_v_loop_filter8, 3,5,8 |
|
START_V_FILTER |
|
VC1_V_LOOP_FILTER 8, q |
|
RET |
|
|
|
|
|
cglobal vc1_h_loop_filter8, 3,6,8 |
|
START_H_FILTER 8 |
|
VC1_H_LOOP_FILTER 8, r5 |
|
RET |
|
|
|
INIT_XMM sse4 |
|
|
|
cglobal vc1_h_loop_filter8, 3,5,8 |
|
START_H_FILTER 8 |
|
VC1_H_LOOP_FILTER 8 |
|
RET |
|
|