summaryrefslogtreecommitdiffstats
path: root/mp3lib
diff options
context:
space:
mode:
Diffstat (limited to 'mp3lib')
-rw-r--r--mp3lib/dct64_k7.c64
1 files changed, 32 insertions, 32 deletions
diff --git a/mp3lib/dct64_k7.c b/mp3lib/dct64_k7.c
index ea4cc0a282..1fca3592fd 100644
--- a/mp3lib/dct64_k7.c
+++ b/mp3lib/dct64_k7.c
@@ -32,8 +32,8 @@ void dct64_MMX_3dnowex(real *a,real *b,real *c)
" movq 8(%%eax), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" pshufw $78, 120(%%eax), %%mm1\n\t"
-" pshufw $78, 112(%%eax), %%mm5\n\t"
+" pswapd 120(%%eax), %%mm1\n\t"
+" pswapd 112(%%eax), %%mm5\n\t"
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
" movq %%mm0, (%%edx)\n\t"
@@ -51,8 +51,8 @@ void dct64_MMX_3dnowex(real *a,real *b,real *c)
" movq 24(%%eax), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" pshufw $78, 104(%%eax), %%mm1\n\t"
-" pshufw $78, 96(%%eax), %%mm5\n\t"
+" pswapd 104(%%eax), %%mm1\n\t"
+" pswapd 96(%%eax), %%mm5\n\t"
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
" movq %%mm0, 16(%%edx)\n\t"
@@ -70,8 +70,8 @@ void dct64_MMX_3dnowex(real *a,real *b,real *c)
" movq 40(%%eax), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" pshufw $78, 88(%%eax), %%mm1\n\t"
-" pshufw $78, 80(%%eax), %%mm5\n\t"
+" pswapd 88(%%eax), %%mm1\n\t"
+" pswapd 80(%%eax), %%mm5\n\t"
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
" movq %%mm0, 32(%%edx)\n\t"
@@ -89,8 +89,8 @@ void dct64_MMX_3dnowex(real *a,real *b,real *c)
" movq 56(%%eax), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" pshufw $78, 72(%%eax), %%mm1\n\t"
-" pshufw $78, 64(%%eax), %%mm5\n\t"
+" pswapd 72(%%eax), %%mm1\n\t"
+" pswapd 64(%%eax), %%mm5\n\t"
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
" movq %%mm0, 48(%%edx)\n\t"
@@ -110,8 +110,8 @@ void dct64_MMX_3dnowex(real *a,real *b,real *c)
" movq 8(%%edx), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" pshufw $78, 56(%%edx), %%mm1\n\t"
-" pshufw $78, 48(%%edx), %%mm5\n\t"
+" pswapd 56(%%edx), %%mm1\n\t"
+" pswapd 48(%%edx), %%mm5\n\t"
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
" movq %%mm0, (%%ecx)\n\t"
@@ -129,8 +129,8 @@ void dct64_MMX_3dnowex(real *a,real *b,real *c)
" movq 24(%%edx), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" pshufw $78, 40(%%edx), %%mm1\n\t"
-" pshufw $78, 32(%%edx), %%mm5\n\t"
+" pswapd 40(%%edx), %%mm1\n\t"
+" pswapd 32(%%edx), %%mm5\n\t"
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
" movq %%mm0, 16(%%ecx)\n\t"
@@ -150,8 +150,8 @@ void dct64_MMX_3dnowex(real *a,real *b,real *c)
" movq 72(%%edx), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" pshufw $78, 120(%%edx), %%mm1\n\t"
-" pshufw $78, 112(%%edx), %%mm5\n\t"
+" pswapd 120(%%edx), %%mm1\n\t"
+" pswapd 112(%%edx), %%mm5\n\t"
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
" movq %%mm0, 64(%%ecx)\n\t"
@@ -169,8 +169,8 @@ void dct64_MMX_3dnowex(real *a,real *b,real *c)
" movq 88(%%edx), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" pshufw $78, 104(%%edx), %%mm1\n\t"
-" pshufw $78, 96(%%edx), %%mm5\n\t"
+" pswapd 104(%%edx), %%mm1\n\t"
+" pswapd 96(%%edx), %%mm5\n\t"
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
" movq %%mm0, 80(%%ecx)\n\t"
@@ -193,8 +193,8 @@ void dct64_MMX_3dnowex(real *a,real *b,real *c)
" movq 8(%%ecx), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" pshufw $78, 24(%%ecx), %%mm1\n\t"
-" pshufw $78, 16(%%ecx), %%mm5\n\t"
+" pswapd 24(%%ecx), %%mm1\n\t"
+" pswapd 16(%%ecx), %%mm5\n\t"
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
" movq %%mm0, (%%edx)\n\t"
@@ -212,8 +212,8 @@ void dct64_MMX_3dnowex(real *a,real *b,real *c)
" movq 40(%%ecx), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" pshufw $78, 56(%%ecx), %%mm1\n\t"
-" pshufw $78, 48(%%ecx), %%mm5\n\t"
+" pswapd 56(%%ecx), %%mm1\n\t"
+" pswapd 48(%%ecx), %%mm5\n\t"
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
" movq %%mm0, 32(%%edx)\n\t"
@@ -231,8 +231,8 @@ void dct64_MMX_3dnowex(real *a,real *b,real *c)
" movq 72(%%ecx), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" pshufw $78, 88(%%ecx), %%mm1\n\t"
-" pshufw $78, 80(%%ecx), %%mm5\n\t"
+" pswapd 88(%%ecx), %%mm1\n\t"
+" pswapd 80(%%ecx), %%mm5\n\t"
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
" movq %%mm0, 64(%%edx)\n\t"
@@ -250,8 +250,8 @@ void dct64_MMX_3dnowex(real *a,real *b,real *c)
" movq 104(%%ecx), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" pshufw $78, 120(%%ecx), %%mm1\n\t"
-" pshufw $78, 112(%%ecx), %%mm5\n\t"
+" pswapd 120(%%ecx), %%mm1\n\t"
+" pswapd 112(%%ecx), %%mm5\n\t"
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
" movq %%mm0, 96(%%edx)\n\t"
@@ -273,8 +273,8 @@ void dct64_MMX_3dnowex(real *a,real *b,real *c)
" movq 16(%%edx), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" pshufw $78, 8(%%edx), %%mm1\n\t"
-" pshufw $78, 24(%%edx), %%mm5\n\t"
+" pswapd 8(%%edx), %%mm1\n\t"
+" pswapd 24(%%edx), %%mm5\n\t"
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
" movq %%mm0, (%%ecx)\n\t"
@@ -292,8 +292,8 @@ void dct64_MMX_3dnowex(real *a,real *b,real *c)
" movq 48(%%edx), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" pshufw $78, 40(%%edx), %%mm1\n\t"
-" pshufw $78, 56(%%edx), %%mm5\n\t"
+" pswapd 40(%%edx), %%mm1\n\t"
+" pswapd 56(%%edx), %%mm5\n\t"
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
" movq %%mm0, 32(%%ecx)\n\t"
@@ -311,8 +311,8 @@ void dct64_MMX_3dnowex(real *a,real *b,real *c)
" movq 80(%%edx), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" pshufw $78, 72(%%edx), %%mm1\n\t"
-" pshufw $78, 88(%%edx), %%mm5\n\t"
+" pswapd 72(%%edx), %%mm1\n\t"
+" pswapd 88(%%edx), %%mm5\n\t"
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
" movq %%mm0, 64(%%ecx)\n\t"
@@ -330,8 +330,8 @@ void dct64_MMX_3dnowex(real *a,real *b,real *c)
" movq 112(%%edx), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" pshufw $78, 104(%%edx), %%mm1\n\t"
-" pshufw $78, 120(%%edx), %%mm5\n\t"
+" pswapd 104(%%edx), %%mm1\n\t"
+" pswapd 120(%%edx), %%mm5\n\t"
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
" movq %%mm0, 96(%%ecx)\n\t"