summaryrefslogtreecommitdiffstats
path: root/mp3lib/dct64_3dnow.c
diff options
context:
space:
mode:
Diffstat (limited to 'mp3lib/dct64_3dnow.c')
-rw-r--r--mp3lib/dct64_3dnow.c917
1 files changed, 459 insertions, 458 deletions
diff --git a/mp3lib/dct64_3dnow.c b/mp3lib/dct64_3dnow.c
index d528654207..5d9ebf3d63 100644
--- a/mp3lib/dct64_3dnow.c
+++ b/mp3lib/dct64_3dnow.c
@@ -11,6 +11,7 @@
#include "config.h"
#include "mangle.h"
#include "mpg123.h"
+#include "libavutil/x86_cpu.h"
static unsigned long long int attribute_used __attribute__((aligned(8))) x_plus_minus_3dnow = 0x8000000000000000ULL;
static float attribute_used plus_1f = 1.0;
@@ -19,21 +20,21 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
{
char tmp[256];
__asm__ volatile(
-" movl %2,%%eax\n\t"
+" mov %2,%%"REG_a"\n\t"
-" leal 128+%3,%%edx\n\t"
-" movl %0,%%esi\n\t"
-" movl %1,%%edi\n\t"
-" movl $"MANGLE(costab_mmx)",%%ebx\n\t"
-" leal %3,%%ecx\n\t"
+" lea 128+%3,%%"REG_d"\n\t"
+" mov %0,%%"REG_S"\n\t"
+" mov %1,%%"REG_D"\n\t"
+" mov $"MANGLE(costab_mmx)",%%"REG_b"\n\t"
+" lea %3,%%"REG_c"\n\t"
/* Phase 1*/
-" movq (%%eax), %%mm0\n\t"
-" movq 8(%%eax), %%mm4\n\t"
+" movq (%%"REG_a"), %%mm0\n\t"
+" movq 8(%%"REG_a"), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" movq 120(%%eax), %%mm1\n\t"
-" movq 112(%%eax), %%mm5\n\t"
+" movq 120(%%"REG_a"), %%mm1\n\t"
+" movq 112(%%"REG_a"), %%mm5\n\t"
/* n.b.: pswapd*/
" movq %%mm1, %%mm2\n\t"
" movq %%mm5, %%mm6\n\t"
@@ -44,25 +45,25 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/**/
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
-" movq %%mm0, (%%edx)\n\t"
-" movq %%mm4, 8(%%edx)\n\t"
+" movq %%mm0, (%%"REG_d")\n\t"
+" movq %%mm4, 8(%%"REG_d")\n\t"
" pfsub %%mm1, %%mm3\n\t"
" pfsub %%mm5, %%mm7\n\t"
-" pfmul (%%ebx), %%mm3\n\t"
-" pfmul 8(%%ebx), %%mm7\n\t"
-" movd %%mm3, 124(%%edx)\n\t"
-" movd %%mm7, 116(%%edx)\n\t"
+" pfmul (%%"REG_b"), %%mm3\n\t"
+" pfmul 8(%%"REG_b"), %%mm7\n\t"
+" movd %%mm3, 124(%%"REG_d")\n\t"
+" movd %%mm7, 116(%%"REG_d")\n\t"
" psrlq $32, %%mm3\n\t"
" psrlq $32, %%mm7\n\t"
-" movd %%mm3, 120(%%edx)\n\t"
-" movd %%mm7, 112(%%edx)\n\t"
+" movd %%mm3, 120(%%"REG_d")\n\t"
+" movd %%mm7, 112(%%"REG_d")\n\t"
-" movq 16(%%eax), %%mm0\n\t"
-" movq 24(%%eax), %%mm4\n\t"
+" movq 16(%%"REG_a"), %%mm0\n\t"
+" movq 24(%%"REG_a"), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" movq 104(%%eax), %%mm1\n\t"
-" movq 96(%%eax), %%mm5\n\t"
+" movq 104(%%"REG_a"), %%mm1\n\t"
+" movq 96(%%"REG_a"), %%mm5\n\t"
/* n.b.: pswapd*/
" movq %%mm1, %%mm2\n\t"
" movq %%mm5, %%mm6\n\t"
@@ -73,25 +74,25 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/**/
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
-" movq %%mm0, 16(%%edx)\n\t"
-" movq %%mm4, 24(%%edx)\n\t"
+" movq %%mm0, 16(%%"REG_d")\n\t"
+" movq %%mm4, 24(%%"REG_d")\n\t"
" pfsub %%mm1, %%mm3\n\t"
" pfsub %%mm5, %%mm7\n\t"
-" pfmul 16(%%ebx), %%mm3\n\t"
-" pfmul 24(%%ebx), %%mm7\n\t"
-" movd %%mm3, 108(%%edx)\n\t"
-" movd %%mm7, 100(%%edx)\n\t"
+" pfmul 16(%%"REG_b"), %%mm3\n\t"
+" pfmul 24(%%"REG_b"), %%mm7\n\t"
+" movd %%mm3, 108(%%"REG_d")\n\t"
+" movd %%mm7, 100(%%"REG_d")\n\t"
" psrlq $32, %%mm3\n\t"
" psrlq $32, %%mm7\n\t"
-" movd %%mm3, 104(%%edx)\n\t"
-" movd %%mm7, 96(%%edx)\n\t"
+" movd %%mm3, 104(%%"REG_d")\n\t"
+" movd %%mm7, 96(%%"REG_d")\n\t"
-" movq 32(%%eax), %%mm0\n\t"
-" movq 40(%%eax), %%mm4\n\t"
+" movq 32(%%"REG_a"), %%mm0\n\t"
+" movq 40(%%"REG_a"), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" movq 88(%%eax), %%mm1\n\t"
-" movq 80(%%eax), %%mm5\n\t"
+" movq 88(%%"REG_a"), %%mm1\n\t"
+" movq 80(%%"REG_a"), %%mm5\n\t"
/* n.b.: pswapd*/
" movq %%mm1, %%mm2\n\t"
" movq %%mm5, %%mm6\n\t"
@@ -102,25 +103,25 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/**/
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
-" movq %%mm0, 32(%%edx)\n\t"
-" movq %%mm4, 40(%%edx)\n\t"
+" movq %%mm0, 32(%%"REG_d")\n\t"
+" movq %%mm4, 40(%%"REG_d")\n\t"
" pfsub %%mm1, %%mm3\n\t"
" pfsub %%mm5, %%mm7\n\t"
-" pfmul 32(%%ebx), %%mm3\n\t"
-" pfmul 40(%%ebx), %%mm7\n\t"
-" movd %%mm3, 92(%%edx)\n\t"
-" movd %%mm7, 84(%%edx)\n\t"
+" pfmul 32(%%"REG_b"), %%mm3\n\t"
+" pfmul 40(%%"REG_b"), %%mm7\n\t"
+" movd %%mm3, 92(%%"REG_d")\n\t"
+" movd %%mm7, 84(%%"REG_d")\n\t"
" psrlq $32, %%mm3\n\t"
" psrlq $32, %%mm7\n\t"
-" movd %%mm3, 88(%%edx)\n\t"
-" movd %%mm7, 80(%%edx)\n\t"
+" movd %%mm3, 88(%%"REG_d")\n\t"
+" movd %%mm7, 80(%%"REG_d")\n\t"
-" movq 48(%%eax), %%mm0\n\t"
-" movq 56(%%eax), %%mm4\n\t"
+" movq 48(%%"REG_a"), %%mm0\n\t"
+" movq 56(%%"REG_a"), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" movq 72(%%eax), %%mm1\n\t"
-" movq 64(%%eax), %%mm5\n\t"
+" movq 72(%%"REG_a"), %%mm1\n\t"
+" movq 64(%%"REG_a"), %%mm5\n\t"
/* n.b.: pswapd*/
" movq %%mm1, %%mm2\n\t"
" movq %%mm5, %%mm6\n\t"
@@ -131,27 +132,27 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/**/
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
-" movq %%mm0, 48(%%edx)\n\t"
-" movq %%mm4, 56(%%edx)\n\t"
+" movq %%mm0, 48(%%"REG_d")\n\t"
+" movq %%mm4, 56(%%"REG_d")\n\t"
" pfsub %%mm1, %%mm3\n\t"
" pfsub %%mm5, %%mm7\n\t"
-" pfmul 48(%%ebx), %%mm3\n\t"
-" pfmul 56(%%ebx), %%mm7\n\t"
-" movd %%mm3, 76(%%edx)\n\t"
-" movd %%mm7, 68(%%edx)\n\t"
+" pfmul 48(%%"REG_b"), %%mm3\n\t"
+" pfmul 56(%%"REG_b"), %%mm7\n\t"
+" movd %%mm3, 76(%%"REG_d")\n\t"
+" movd %%mm7, 68(%%"REG_d")\n\t"
" psrlq $32, %%mm3\n\t"
" psrlq $32, %%mm7\n\t"
-" movd %%mm3, 72(%%edx)\n\t"
-" movd %%mm7, 64(%%edx)\n\t"
+" movd %%mm3, 72(%%"REG_d")\n\t"
+" movd %%mm7, 64(%%"REG_d")\n\t"
/* Phase 2*/
-" movq (%%edx), %%mm0\n\t"
-" movq 8(%%edx), %%mm4\n\t"
+" movq (%%"REG_d"), %%mm0\n\t"
+" movq 8(%%"REG_d"), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" movq 56(%%edx), %%mm1\n\t"
-" movq 48(%%edx), %%mm5\n\t"
+" movq 56(%%"REG_d"), %%mm1\n\t"
+" movq 48(%%"REG_d"), %%mm5\n\t"
/* n.b.: pswapd*/
" movq %%mm1, %%mm2\n\t"
" movq %%mm5, %%mm6\n\t"
@@ -162,25 +163,25 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/**/
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
-" movq %%mm0, (%%ecx)\n\t"
-" movq %%mm4, 8(%%ecx)\n\t"
+" movq %%mm0, (%%"REG_c")\n\t"
+" movq %%mm4, 8(%%"REG_c")\n\t"
" pfsub %%mm1, %%mm3\n\t"
" pfsub %%mm5, %%mm7\n\t"
-" pfmul 64(%%ebx), %%mm3\n\t"
-" pfmul 72(%%ebx), %%mm7\n\t"
-" movd %%mm3, 60(%%ecx)\n\t"
-" movd %%mm7, 52(%%ecx)\n\t"
+" pfmul 64(%%"REG_b"), %%mm3\n\t"
+" pfmul 72(%%"REG_b"), %%mm7\n\t"
+" movd %%mm3, 60(%%"REG_c")\n\t"
+" movd %%mm7, 52(%%"REG_c")\n\t"
" psrlq $32, %%mm3\n\t"
" psrlq $32, %%mm7\n\t"
-" movd %%mm3, 56(%%ecx)\n\t"
-" movd %%mm7, 48(%%ecx)\n\t"
+" movd %%mm3, 56(%%"REG_c")\n\t"
+" movd %%mm7, 48(%%"REG_c")\n\t"
-" movq 16(%%edx), %%mm0\n\t"
-" movq 24(%%edx), %%mm4\n\t"
+" movq 16(%%"REG_d"), %%mm0\n\t"
+" movq 24(%%"REG_d"), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" movq 40(%%edx), %%mm1\n\t"
-" movq 32(%%edx), %%mm5\n\t"
+" movq 40(%%"REG_d"), %%mm1\n\t"
+" movq 32(%%"REG_d"), %%mm5\n\t"
/* n.b.: pswapd*/
" movq %%mm1, %%mm2\n\t"
" movq %%mm5, %%mm6\n\t"
@@ -191,27 +192,27 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/**/
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
-" movq %%mm0, 16(%%ecx)\n\t"
-" movq %%mm4, 24(%%ecx)\n\t"
+" movq %%mm0, 16(%%"REG_c")\n\t"
+" movq %%mm4, 24(%%"REG_c")\n\t"
" pfsub %%mm1, %%mm3\n\t"
" pfsub %%mm5, %%mm7\n\t"
-" pfmul 80(%%ebx), %%mm3\n\t"
-" pfmul 88(%%ebx), %%mm7\n\t"
-" movd %%mm3, 44(%%ecx)\n\t"
-" movd %%mm7, 36(%%ecx)\n\t"
+" pfmul 80(%%"REG_b"), %%mm3\n\t"
+" pfmul 88(%%"REG_b"), %%mm7\n\t"
+" movd %%mm3, 44(%%"REG_c")\n\t"
+" movd %%mm7, 36(%%"REG_c")\n\t"
" psrlq $32, %%mm3\n\t"
" psrlq $32, %%mm7\n\t"
-" movd %%mm3, 40(%%ecx)\n\t"
-" movd %%mm7, 32(%%ecx)\n\t"
+" movd %%mm3, 40(%%"REG_c")\n\t"
+" movd %%mm7, 32(%%"REG_c")\n\t"
/* Phase 3*/
-" movq 64(%%edx), %%mm0\n\t"
-" movq 72(%%edx), %%mm4\n\t"
+" movq 64(%%"REG_d"), %%mm0\n\t"
+" movq 72(%%"REG_d"), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" movq 120(%%edx), %%mm1\n\t"
-" movq 112(%%edx), %%mm5\n\t"
+" movq 120(%%"REG_d"), %%mm1\n\t"
+" movq 112(%%"REG_d"), %%mm5\n\t"
/* n.b.: pswapd*/
" movq %%mm1, %%mm2\n\t"
" movq %%mm5, %%mm6\n\t"
@@ -222,25 +223,25 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/**/
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
-" movq %%mm0, 64(%%ecx)\n\t"
-" movq %%mm4, 72(%%ecx)\n\t"
+" movq %%mm0, 64(%%"REG_c")\n\t"
+" movq %%mm4, 72(%%"REG_c")\n\t"
" pfsubr %%mm1, %%mm3\n\t"
" pfsubr %%mm5, %%mm7\n\t"
-" pfmul 64(%%ebx), %%mm3\n\t"
-" pfmul 72(%%ebx), %%mm7\n\t"
-" movd %%mm3, 124(%%ecx)\n\t"
-" movd %%mm7, 116(%%ecx)\n\t"
+" pfmul 64(%%"REG_b"), %%mm3\n\t"
+" pfmul 72(%%"REG_b"), %%mm7\n\t"
+" movd %%mm3, 124(%%"REG_c")\n\t"
+" movd %%mm7, 116(%%"REG_c")\n\t"
" psrlq $32, %%mm3\n\t"
" psrlq $32, %%mm7\n\t"
-" movd %%mm3, 120(%%ecx)\n\t"
-" movd %%mm7, 112(%%ecx)\n\t"
+" movd %%mm3, 120(%%"REG_c")\n\t"
+" movd %%mm7, 112(%%"REG_c")\n\t"
-" movq 80(%%edx), %%mm0\n\t"
-" movq 88(%%edx), %%mm4\n\t"
+" movq 80(%%"REG_d"), %%mm0\n\t"
+" movq 88(%%"REG_d"), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" movq 104(%%edx), %%mm1\n\t"
-" movq 96(%%edx), %%mm5\n\t"
+" movq 104(%%"REG_d"), %%mm1\n\t"
+" movq 96(%%"REG_d"), %%mm5\n\t"
/* n.b.: pswapd*/
" movq %%mm1, %%mm2\n\t"
" movq %%mm5, %%mm6\n\t"
@@ -251,27 +252,27 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/**/
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
-" movq %%mm0, 80(%%ecx)\n\t"
-" movq %%mm4, 88(%%ecx)\n\t"
+" movq %%mm0, 80(%%"REG_c")\n\t"
+" movq %%mm4, 88(%%"REG_c")\n\t"
" pfsubr %%mm1, %%mm3\n\t"
" pfsubr %%mm5, %%mm7\n\t"
-" pfmul 80(%%ebx), %%mm3\n\t"
-" pfmul 88(%%ebx), %%mm7\n\t"
-" movd %%mm3, 108(%%ecx)\n\t"
-" movd %%mm7, 100(%%ecx)\n\t"
+" pfmul 80(%%"REG_b"), %%mm3\n\t"
+" pfmul 88(%%"REG_b"), %%mm7\n\t"
+" movd %%mm3, 108(%%"REG_c")\n\t"
+" movd %%mm7, 100(%%"REG_c")\n\t"
" psrlq $32, %%mm3\n\t"
" psrlq $32, %%mm7\n\t"
-" movd %%mm3, 104(%%ecx)\n\t"
-" movd %%mm7, 96(%%ecx)\n\t"
+" movd %%mm3, 104(%%"REG_c")\n\t"
+" movd %%mm7, 96(%%"REG_c")\n\t"
/* Phase 4*/
-" movq (%%ecx), %%mm0\n\t"
-" movq 8(%%ecx), %%mm4\n\t"
+" movq (%%"REG_c"), %%mm0\n\t"
+" movq 8(%%"REG_c"), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" movq 24(%%ecx), %%mm1\n\t"
-" movq 16(%%ecx), %%mm5\n\t"
+" movq 24(%%"REG_c"), %%mm1\n\t"
+" movq 16(%%"REG_c"), %%mm5\n\t"
/* n.b.: pswapd*/
" movq %%mm1, %%mm2\n\t"
" movq %%mm5, %%mm6\n\t"
@@ -282,25 +283,25 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/**/
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
-" movq %%mm0, (%%edx)\n\t"
-" movq %%mm4, 8(%%edx)\n\t"
+" movq %%mm0, (%%"REG_d")\n\t"
+" movq %%mm4, 8(%%"REG_d")\n\t"
" pfsub %%mm1, %%mm3\n\t"
" pfsub %%mm5, %%mm7\n\t"
-" pfmul 96(%%ebx), %%mm3\n\t"
-" pfmul 104(%%ebx), %%mm7\n\t"
-" movd %%mm3, 28(%%edx)\n\t"
-" movd %%mm7, 20(%%edx)\n\t"
+" pfmul 96(%%"REG_b"), %%mm3\n\t"
+" pfmul 104(%%"REG_b"), %%mm7\n\t"
+" movd %%mm3, 28(%%"REG_d")\n\t"
+" movd %%mm7, 20(%%"REG_d")\n\t"
" psrlq $32, %%mm3\n\t"
" psrlq $32, %%mm7\n\t"
-" movd %%mm3, 24(%%edx)\n\t"
-" movd %%mm7, 16(%%edx)\n\t"
+" movd %%mm3, 24(%%"REG_d")\n\t"
+" movd %%mm7, 16(%%"REG_d")\n\t"
-" movq 32(%%ecx), %%mm0\n\t"
-" movq 40(%%ecx), %%mm4\n\t"
+" movq 32(%%"REG_c"), %%mm0\n\t"
+" movq 40(%%"REG_c"), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" movq 56(%%ecx), %%mm1\n\t"
-" movq 48(%%ecx), %%mm5\n\t"
+" movq 56(%%"REG_c"), %%mm1\n\t"
+" movq 48(%%"REG_c"), %%mm5\n\t"
/* n.b.: pswapd*/
" movq %%mm1, %%mm2\n\t"
" movq %%mm5, %%mm6\n\t"
@@ -311,25 +312,25 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/**/
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
-" movq %%mm0, 32(%%edx)\n\t"
-" movq %%mm4, 40(%%edx)\n\t"
+" movq %%mm0, 32(%%"REG_d")\n\t"
+" movq %%mm4, 40(%%"REG_d")\n\t"
" pfsubr %%mm1, %%mm3\n\t"
" pfsubr %%mm5, %%mm7\n\t"
-" pfmul 96(%%ebx), %%mm3\n\t"
-" pfmul 104(%%ebx), %%mm7\n\t"
-" movd %%mm3, 60(%%edx)\n\t"
-" movd %%mm7, 52(%%edx)\n\t"
+" pfmul 96(%%"REG_b"), %%mm3\n\t"
+" pfmul 104(%%"REG_b"), %%mm7\n\t"
+" movd %%mm3, 60(%%"REG_d")\n\t"
+" movd %%mm7, 52(%%"REG_d")\n\t"
" psrlq $32, %%mm3\n\t"
" psrlq $32, %%mm7\n\t"
-" movd %%mm3, 56(%%edx)\n\t"
-" movd %%mm7, 48(%%edx)\n\t"
+" movd %%mm3, 56(%%"REG_d")\n\t"
+" movd %%mm7, 48(%%"REG_d")\n\t"
-" movq 64(%%ecx), %%mm0\n\t"
-" movq 72(%%ecx), %%mm4\n\t"
+" movq 64(%%"REG_c"), %%mm0\n\t"
+" movq 72(%%"REG_c"), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" movq 88(%%ecx), %%mm1\n\t"
-" movq 80(%%ecx), %%mm5\n\t"
+" movq 88(%%"REG_c"), %%mm1\n\t"
+" movq 80(%%"REG_c"), %%mm5\n\t"
/* n.b.: pswapd*/
" movq %%mm1, %%mm2\n\t"
" movq %%mm5, %%mm6\n\t"
@@ -340,25 +341,25 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/**/
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
-" movq %%mm0, 64(%%edx)\n\t"
-" movq %%mm4, 72(%%edx)\n\t"
+" movq %%mm0, 64(%%"REG_d")\n\t"
+" movq %%mm4, 72(%%"REG_d")\n\t"
" pfsub %%mm1, %%mm3\n\t"
" pfsub %%mm5, %%mm7\n\t"
-" pfmul 96(%%ebx), %%mm3\n\t"
-" pfmul 104(%%ebx), %%mm7\n\t"
-" movd %%mm3, 92(%%edx)\n\t"
-" movd %%mm7, 84(%%edx)\n\t"
+" pfmul 96(%%"REG_b"), %%mm3\n\t"
+" pfmul 104(%%"REG_b"), %%mm7\n\t"
+" movd %%mm3, 92(%%"REG_d")\n\t"
+" movd %%mm7, 84(%%"REG_d")\n\t"
" psrlq $32, %%mm3\n\t"
" psrlq $32, %%mm7\n\t"
-" movd %%mm3, 88(%%edx)\n\t"
-" movd %%mm7, 80(%%edx)\n\t"
+" movd %%mm3, 88(%%"REG_d")\n\t"
+" movd %%mm7, 80(%%"REG_d")\n\t"
-" movq 96(%%ecx), %%mm0\n\t"
-" movq 104(%%ecx), %%mm4\n\t"
+" movq 96(%%"REG_c"), %%mm0\n\t"
+" movq 104(%%"REG_c"), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" movq 120(%%ecx), %%mm1\n\t"
-" movq 112(%%ecx), %%mm5\n\t"
+" movq 120(%%"REG_c"), %%mm1\n\t"
+" movq 112(%%"REG_c"), %%mm5\n\t"
/* n.b.: pswapd*/
" movq %%mm1, %%mm2\n\t"
" movq %%mm5, %%mm6\n\t"
@@ -369,27 +370,27 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/**/
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
-" movq %%mm0, 96(%%edx)\n\t"
-" movq %%mm4, 104(%%edx)\n\t"
+" movq %%mm0, 96(%%"REG_d")\n\t"
+" movq %%mm4, 104(%%"REG_d")\n\t"
" pfsubr %%mm1, %%mm3\n\t"
" pfsubr %%mm5, %%mm7\n\t"
-" pfmul 96(%%ebx), %%mm3\n\t"
-" pfmul 104(%%ebx), %%mm7\n\t"
-" movd %%mm3, 124(%%edx)\n\t"
-" movd %%mm7, 116(%%edx)\n\t"
+" pfmul 96(%%"REG_b"), %%mm3\n\t"
+" pfmul 104(%%"REG_b"), %%mm7\n\t"
+" movd %%mm3, 124(%%"REG_d")\n\t"
+" movd %%mm7, 116(%%"REG_d")\n\t"
" psrlq $32, %%mm3\n\t"
" psrlq $32, %%mm7\n\t"
-" movd %%mm3, 120(%%edx)\n\t"
-" movd %%mm7, 112(%%edx)\n\t"
+" movd %%mm3, 120(%%"REG_d")\n\t"
+" movd %%mm7, 112(%%"REG_d")\n\t"
/* Phase 5 */
-" movq (%%edx), %%mm0\n\t"
-" movq 16(%%edx), %%mm4\n\t"
+" movq (%%"REG_d"), %%mm0\n\t"
+" movq 16(%%"REG_d"), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" movq 8(%%edx), %%mm1\n\t"
-" movq 24(%%edx), %%mm5\n\t"
+" movq 8(%%"REG_d"), %%mm1\n\t"
+" movq 24(%%"REG_d"), %%mm5\n\t"
/* n.b.: pswapd*/
" movq %%mm1, %%mm2\n\t"
" movq %%mm5, %%mm6\n\t"
@@ -400,25 +401,25 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/**/
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
-" movq %%mm0, (%%ecx)\n\t"
-" movq %%mm4, 16(%%ecx)\n\t"
+" movq %%mm0, (%%"REG_c")\n\t"
+" movq %%mm4, 16(%%"REG_c")\n\t"
" pfsub %%mm1, %%mm3\n\t"
" pfsubr %%mm5, %%mm7\n\t"
-" pfmul 112(%%ebx), %%mm3\n\t"
-" pfmul 112(%%ebx), %%mm7\n\t"
-" movd %%mm3, 12(%%ecx)\n\t"
-" movd %%mm7, 28(%%ecx)\n\t"
+" pfmul 112(%%"REG_b"), %%mm3\n\t"
+" pfmul 112(%%"REG_b"), %%mm7\n\t"
+" movd %%mm3, 12(%%"REG_c")\n\t"
+" movd %%mm7, 28(%%"REG_c")\n\t"
" psrlq $32, %%mm3\n\t"
" psrlq $32, %%mm7\n\t"
-" movd %%mm3, 8(%%ecx)\n\t"
-" movd %%mm7, 24(%%ecx)\n\t"
+" movd %%mm3, 8(%%"REG_c")\n\t"
+" movd %%mm7, 24(%%"REG_c")\n\t"
-" movq 32(%%edx), %%mm0\n\t"
-" movq 48(%%edx), %%mm4\n\t"
+" movq 32(%%"REG_d"), %%mm0\n\t"
+" movq 48(%%"REG_d"), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" movq 40(%%edx), %%mm1\n\t"
-" movq 56(%%edx), %%mm5\n\t"
+" movq 40(%%"REG_d"), %%mm1\n\t"
+" movq 56(%%"REG_d"), %%mm5\n\t"
/* n.b.: pswapd*/
" movq %%mm1, %%mm2\n\t"
" movq %%mm5, %%mm6\n\t"
@@ -429,25 +430,25 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/**/
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
-" movq %%mm0, 32(%%ecx)\n\t"
-" movq %%mm4, 48(%%ecx)\n\t"
+" movq %%mm0, 32(%%"REG_c")\n\t"
+" movq %%mm4, 48(%%"REG_c")\n\t"
" pfsub %%mm1, %%mm3\n\t"
" pfsubr %%mm5, %%mm7\n\t"
-" pfmul 112(%%ebx), %%mm3\n\t"
-" pfmul 112(%%ebx), %%mm7\n\t"
-" movd %%mm3, 44(%%ecx)\n\t"
-" movd %%mm7, 60(%%ecx)\n\t"
+" pfmul 112(%%"REG_b"), %%mm3\n\t"
+" pfmul 112(%%"REG_b"), %%mm7\n\t"
+" movd %%mm3, 44(%%"REG_c")\n\t"
+" movd %%mm7, 60(%%"REG_c")\n\t"
" psrlq $32, %%mm3\n\t"
" psrlq $32, %%mm7\n\t"
-" movd %%mm3, 40(%%ecx)\n\t"
-" movd %%mm7, 56(%%ecx)\n\t"
+" movd %%mm3, 40(%%"REG_c")\n\t"
+" movd %%mm7, 56(%%"REG_c")\n\t"
-" movq 64(%%edx), %%mm0\n\t"
-" movq 80(%%edx), %%mm4\n\t"
+" movq 64(%%"REG_d"), %%mm0\n\t"
+" movq 80(%%"REG_d"), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" movq 72(%%edx), %%mm1\n\t"
-" movq 88(%%edx), %%mm5\n\t"
+" movq 72(%%"REG_d"), %%mm1\n\t"
+" movq 88(%%"REG_d"), %%mm5\n\t"
/* n.b.: pswapd*/
" movq %%mm1, %%mm2\n\t"
" movq %%mm5, %%mm6\n\t"
@@ -458,25 +459,25 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/**/
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
-" movq %%mm0, 64(%%ecx)\n\t"
-" movq %%mm4, 80(%%ecx)\n\t"
+" movq %%mm0, 64(%%"REG_c")\n\t"
+" movq %%mm4, 80(%%"REG_c")\n\t"
" pfsub %%mm1, %%mm3\n\t"
" pfsubr %%mm5, %%mm7\n\t"
-" pfmul 112(%%ebx), %%mm3\n\t"
-" pfmul 112(%%ebx), %%mm7\n\t"
-" movd %%mm3, 76(%%ecx)\n\t"
-" movd %%mm7, 92(%%ecx)\n\t"
+" pfmul 112(%%"REG_b"), %%mm3\n\t"
+" pfmul 112(%%"REG_b"), %%mm7\n\t"
+" movd %%mm3, 76(%%"REG_c")\n\t"
+" movd %%mm7, 92(%%"REG_c")\n\t"
" psrlq $32, %%mm3\n\t"
" psrlq $32, %%mm7\n\t"
-" movd %%mm3, 72(%%ecx)\n\t"
-" movd %%mm7, 88(%%ecx)\n\t"
+" movd %%mm3, 72(%%"REG_c")\n\t"
+" movd %%mm7, 88(%%"REG_c")\n\t"
-" movq 96(%%edx), %%mm0\n\t"
-" movq 112(%%edx), %%mm4\n\t"
+" movq 96(%%"REG_d"), %%mm0\n\t"
+" movq 112(%%"REG_d"), %%mm4\n\t"
" movq %%mm0, %%mm3\n\t"
" movq %%mm4, %%mm7\n\t"
-" movq 104(%%edx), %%mm1\n\t"
-" movq 120(%%edx), %%mm5\n\t"
+" movq 104(%%"REG_d"), %%mm1\n\t"
+" movq 120(%%"REG_d"), %%mm5\n\t"
/* n.b.: pswapd*/
" movq %%mm1, %%mm2\n\t"
" movq %%mm5, %%mm6\n\t"
@@ -487,28 +488,28 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/**/
" pfadd %%mm1, %%mm0\n\t"
" pfadd %%mm5, %%mm4\n\t"
-" movq %%mm0, 96(%%ecx)\n\t"
-" movq %%mm4, 112(%%ecx)\n\t"
+" movq %%mm0, 96(%%"REG_c")\n\t"
+" movq %%mm4, 112(%%"REG_c")\n\t"
" pfsub %%mm1, %%mm3\n\t"
" pfsubr %%mm5, %%mm7\n\t"
-" pfmul 112(%%ebx), %%mm3\n\t"
-" pfmul 112(%%ebx), %%mm7\n\t"
-" movd %%mm3, 108(%%ecx)\n\t"
-" movd %%mm7, 124(%%ecx)\n\t"
+" pfmul 112(%%"REG_b"), %%mm3\n\t"
+" pfmul 112(%%"REG_b"), %%mm7\n\t"
+" movd %%mm3, 108(%%"REG_c")\n\t"
+" movd %%mm7, 124(%%"REG_c")\n\t"
" psrlq $32, %%mm3\n\t"
" psrlq $32, %%mm7\n\t"
-" movd %%mm3, 104(%%ecx)\n\t"
-" movd %%mm7, 120(%%ecx)\n\t"
+" movd %%mm3, 104(%%"REG_c")\n\t"
+" movd %%mm7, 120(%%"REG_c")\n\t"
/* Phase 6. This is the end of easy road. */
/* Code below is coded in scalar mode. Should be optimized */
" movd "MANGLE(plus_1f)", %%mm6\n\t"
-" punpckldq 120(%%ebx), %%mm6\n\t" /* mm6 = 1.0 | 120(%%ebx)*/
+" punpckldq 120(%%"REG_b"), %%mm6\n\t" /* mm6 = 1.0 | 120(%%"REG_b")*/
" movq "MANGLE(x_plus_minus_3dnow)", %%mm7\n\t" /* mm7 = +1 | -1 */
-" movq 32(%%ecx), %%mm0\n\t"
-" movq 64(%%ecx), %%mm2\n\t"
+" movq 32(%%"REG_c"), %%mm0\n\t"
+" movq 64(%%"REG_c"), %%mm2\n\t"
" movq %%mm0, %%mm1\n\t"
" movq %%mm2, %%mm3\n\t"
" pxor %%mm7, %%mm1\n\t"
@@ -517,14 +518,14 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
" pfacc %%mm3, %%mm2\n\t"
" pfmul %%mm6, %%mm0\n\t"
" pfmul %%mm6, %%mm2\n\t"
-" movq %%mm0, 32(%%edx)\n\t"
-" movq %%mm2, 64(%%edx)\n\t"
-
-" movd 44(%%ecx), %%mm0\n\t"
-" movd 40(%%ecx), %%mm2\n\t"
-" movd 120(%%ebx), %%mm3\n\t"
-" punpckldq 76(%%ecx), %%mm0\n\t"
-" punpckldq 72(%%ecx), %%mm2\n\t"
+" movq %%mm0, 32(%%"REG_d")\n\t"
+" movq %%mm2, 64(%%"REG_d")\n\t"
+
+" movd 44(%%"REG_c"), %%mm0\n\t"
+" movd 40(%%"REG_c"), %%mm2\n\t"
+" movd 120(%%"REG_b"), %%mm3\n\t"
+" punpckldq 76(%%"REG_c"), %%mm0\n\t"
+" punpckldq 72(%%"REG_c"), %%mm2\n\t"
" punpckldq %%mm3, %%mm3\n\t"
" movq %%mm0, %%mm4\n\t"
" movq %%mm2, %%mm5\n\t"
@@ -536,86 +537,86 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
" movq %%mm0, %%mm2\n\t"
" punpckldq %%mm1, %%mm0\n\t"
" punpckhdq %%mm1, %%mm2\n\t"
-" movq %%mm0, 40(%%edx)\n\t"
-" movq %%mm2, 72(%%edx)\n\t"
-
-" movd 48(%%ecx), %%mm3\n\t"
-" movd 60(%%ecx), %%mm2\n\t"
-" pfsub 52(%%ecx), %%mm3\n\t"
-" pfsub 56(%%ecx), %%mm2\n\t"
-" pfmul 120(%%ebx), %%mm3\n\t"
-" pfmul 120(%%ebx), %%mm2\n\t"
+" movq %%mm0, 40(%%"REG_d")\n\t"
+" movq %%mm2, 72(%%"REG_d")\n\t"
+
+" movd 48(%%"REG_c"), %%mm3\n\t"
+" movd 60(%%"REG_c"), %%mm2\n\t"
+" pfsub 52(%%"REG_c"), %%mm3\n\t"
+" pfsub 56(%%"REG_c"), %%mm2\n\t"
+" pfmul 120(%%"REG_b"), %%mm3\n\t"
+" pfmul 120(%%"REG_b"), %%mm2\n\t"
" movq %%mm2, %%mm1\n\t"
-" pfadd 56(%%ecx), %%mm1\n\t"
-" pfadd 60(%%ecx), %%mm1\n\t"
+" pfadd 56(%%"REG_c"), %%mm1\n\t"
+" pfadd 60(%%"REG_c"), %%mm1\n\t"
" movq %%mm1, %%mm0\n\t"
-" pfadd 48(%%ecx), %%mm0\n\t"
-" pfadd 52(%%ecx), %%mm0\n\t"
+" pfadd 48(%%"REG_c"), %%mm0\n\t"
+" pfadd 52(%%"REG_c"), %%mm0\n\t"
" pfadd %%mm3, %%mm1\n\t"
" punpckldq %%mm2, %%mm1\n\t"
" pfadd %%mm3, %%mm2\n\t"
" punpckldq %%mm2, %%mm0\n\t"
-" movq %%mm1, 56(%%edx)\n\t"
-" movq %%mm0, 48(%%edx)\n\t"
+" movq %%mm1, 56(%%"REG_d")\n\t"
+" movq %%mm0, 48(%%"REG_d")\n\t"
/*---*/
-" movd 92(%%ecx), %%mm1\n\t"
-" pfsub 88(%%ecx), %%mm1\n\t"
-" pfmul 120(%%ebx), %%mm1\n\t"
-" movd %%mm1, 92(%%edx)\n\t"
-" pfadd 92(%%ecx), %%mm1\n\t"
-" pfadd 88(%%ecx), %%mm1\n\t"
+" movd 92(%%"REG_c"), %%mm1\n\t"
+" pfsub 88(%%"REG_c"), %%mm1\n\t"
+" pfmul 120(%%"REG_b"), %%mm1\n\t"
+" movd %%mm1, 92(%%"REG_d")\n\t"
+" pfadd 92(%%"REG_c"), %%mm1\n\t"
+" pfadd 88(%%"REG_c"), %%mm1\n\t"
" movq %%mm1, %%mm0\n\t"
-" pfadd 80(%%ecx), %%mm0\n\t"
-" pfadd 84(%%ecx), %%mm0\n\t"
-" movd %%mm0, 80(%%edx)\n\t"
+" pfadd 80(%%"REG_c"), %%mm0\n\t"
+" pfadd 84(%%"REG_c"), %%mm0\n\t"
+" movd %%mm0, 80(%%"REG_d")\n\t"
-" movd 80(%%ecx), %%mm0\n\t"
-" pfsub 84(%%ecx), %%mm0\n\t"
-" pfmul 120(%%ebx), %%mm0\n\t"
+" movd 80(%%"REG_c"), %%mm0\n\t"
+" pfsub 84(%%"REG_c"), %%mm0\n\t"
+" pfmul 120(%%"REG_b"), %%mm0\n\t"
" pfadd %%mm0, %%mm1\n\t"
-" pfadd 92(%%edx), %%mm0\n\t"
+" pfadd 92(%%"REG_d"), %%mm0\n\t"
" punpckldq %%mm1, %%mm0\n\t"
-" movq %%mm0, 84(%%edx)\n\t"
+" movq %%mm0, 84(%%"REG_d")\n\t"
-" movq 96(%%ecx), %%mm0\n\t"
+" movq 96(%%"REG_c"), %%mm0\n\t"
" movq %%mm0, %%mm1\n\t"
" pxor %%mm7, %%mm1\n\t"
" pfacc %%mm1, %%mm0\n\t"
" pfmul %%mm6, %%mm0\n\t"
-" movq %%mm0, 96(%%edx)\n\t"
-
-" movd 108(%%ecx), %%mm0\n\t"
-" pfsub 104(%%ecx), %%mm0\n\t"
-" pfmul 120(%%ebx), %%mm0\n\t"
-" movd %%mm0, 108(%%edx)\n\t"
-" pfadd 104(%%ecx), %%mm0\n\t"
-" pfadd 108(%%ecx), %%mm0\n\t"
-" movd %%mm0, 104(%%edx)\n\t"
-
-" movd 124(%%ecx), %%mm1\n\t"
-" pfsub 120(%%ecx), %%mm1\n\t"
-" pfmul 120(%%ebx), %%mm1\n\t"
-" movd %%mm1, 124(%%edx)\n\t"
-" pfadd 120(%%ecx), %%mm1\n\t"
-" pfadd 124(%%ecx), %%mm1\n\t"
+" movq %%mm0, 96(%%"REG_d")\n\t"
+
+" movd 108(%%"REG_c"), %%mm0\n\t"
+" pfsub 104(%%"REG_c"), %%mm0\n\t"
+" pfmul 120(%%"REG_b"), %%mm0\n\t"
+" movd %%mm0, 108(%%"REG_d")\n\t"
+" pfadd 104(%%"REG_c"), %%mm0\n\t"
+" pfadd 108(%%"REG_c"), %%mm0\n\t"
+" movd %%mm0, 104(%%"REG_d")\n\t"
+
+" movd 124(%%"REG_c"), %%mm1\n\t"
+" pfsub 120(%%"REG_c"), %%mm1\n\t"
+" pfmul 120(%%"REG_b"), %%mm1\n\t"
+" movd %%mm1, 124(%%"REG_d")\n\t"
+" pfadd 120(%%"REG_c"), %%mm1\n\t"
+" pfadd 124(%%"REG_c"), %%mm1\n\t"
" movq %%mm1, %%mm0\n\t"
-" pfadd 112(%%ecx), %%mm0\n\t"
-" pfadd 116(%%ecx), %%mm0\n\t"
-" movd %%mm0, 112(%%edx)\n\t"
+" pfadd 112(%%"REG_c"), %%mm0\n\t"
+" pfadd 116(%%"REG_c"), %%mm0\n\t"
+" movd %%mm0, 112(%%"REG_d")\n\t"
-" movd 112(%%ecx), %%mm0\n\t"
-" pfsub 116(%%ecx), %%mm0\n\t"
-" pfmul 120(%%ebx), %%mm0\n\t"
+" movd 112(%%"REG_c"), %%mm0\n\t"
+" pfsub 116(%%"REG_c"), %%mm0\n\t"
+" pfmul 120(%%"REG_b"), %%mm0\n\t"
" pfadd %%mm0,%%mm1\n\t"
-" pfadd 124(%%edx), %%mm0\n\t"
+" pfadd 124(%%"REG_d"), %%mm0\n\t"
" punpckldq %%mm1, %%mm0\n\t"
-" movq %%mm0, 116(%%edx)\n\t"
+" movq %%mm0, 116(%%"REG_d")\n\t"
// this code is broken, there is nothing modifying the z flag above.
#if 0
@@ -624,299 +625,299 @@ void dct64_MMX_3dnow(short *a,short *b,real *c)
/* Phase 7*/
/* Code below is coded in scalar mode. Should be optimized */
-" movd (%%ecx), %%mm0\n\t"
-" pfadd 4(%%ecx), %%mm0\n\t"
-" movd %%mm0, 1024(%%esi)\n\t"
-
-" movd (%%ecx), %%mm0\n\t"
-" pfsub 4(%%ecx), %%mm0\n\t"
-" pfmul 120(%%ebx), %%mm0\n\t"
-" movd %%mm0, (%%esi)\n\t"
-" movd %%mm0, (%%edi)\n\t"
-
-" movd 12(%%ecx), %%mm0\n\t"
-" pfsub 8(%%ecx), %%mm0\n\t"
-" pfmul 120(%%ebx), %%mm0\n\t"
-" movd %%mm0, 512(%%edi)\n\t"
-" pfadd 12(%%ecx), %%mm0\n\t"
-" pfadd 8(%%ecx), %%mm0\n\t"
-" movd %%mm0, 512(%%esi)\n\t"
-
-" movd 16(%%ecx), %%mm0\n\t"
-" pfsub 20(%%ecx), %%mm0\n\t"
-" pfmul 120(%%ebx), %%mm0\n\t"
+" movd (%%"REG_c"), %%mm0\n\t"
+" pfadd 4(%%"REG_c"), %%mm0\n\t"
+" movd %%mm0, 1024(%%"REG_S")\n\t"
+
+" movd (%%"REG_c"), %%mm0\n\t"
+" pfsub 4(%%"REG_c"), %%mm0\n\t"
+" pfmul 120(%%"REG_b"), %%mm0\n\t"
+" movd %%mm0, (%%"REG_S")\n\t"
+" movd %%mm0, (%%"REG_D")\n\t"
+
+" movd 12(%%"REG_c"), %%mm0\n\t"
+" pfsub 8(%%"REG_c"), %%mm0\n\t"
+" pfmul 120(%%"REG_b"), %%mm0\n\t"
+" movd %%mm0, 512(%%"REG_D")\n\t"
+" pfadd 12(%%"REG_c"), %%mm0\n\t"
+" pfadd 8(%%"REG_c"), %%mm0\n\t"
+" movd %%mm0, 512(%%"REG_S")\n\t"
+
+" movd 16(%%"REG_c"), %%mm0\n\t"
+" pfsub 20(%%"REG_c"), %%mm0\n\t"
+" pfmul 120(%%"REG_b"), %%mm0\n\t"
" movq %%mm0, %%mm3\n\t"
-" movd 28(%%ecx), %%mm0\n\t"
-" pfsub 24(%%ecx), %%mm0\n\t"
-" pfmul 120(%%ebx), %%mm0\n\t"
-" movd %%mm0, 768(%%edi)\n\t"
+" movd 28(%%"REG_c"), %%mm0\n\t"
+" pfsub 24(%%"REG_c"), %%mm0\n\t"
+" pfmul 120(%%"REG_b"), %%mm0\n\t"
+" movd %%mm0, 768(%%"REG_D")\n\t"
" movq %%mm0, %%mm2\n\t"
-" pfadd 24(%%ecx), %%mm0\n\t"
-" pfadd 28(%%ecx), %%mm0\n\t"
+" pfadd 24(%%"REG_c"), %%mm0\n\t"
+" pfadd 28(%%"REG_c"), %%mm0\n\t"
" movq %%mm0, %%mm1\n\t"
-" pfadd 16(%%ecx), %%mm0\n\t"
-" pfadd 20(%%ecx), %%mm0\n\t"
-" movd %%mm0, 768(%%esi)\n\t"
+" pfadd 16(%%"REG_c"), %%mm0\n\t"
+" pfadd 20(%%"REG_c"), %%mm0\n\t"
+" movd %%mm0, 768(%%"REG_S")\n\t"
" pfadd %%mm3, %%mm1\n\t"
-" movd %%mm1, 256(%%esi)\n\t"
+" movd %%mm1, 256(%%"REG_S")\n\t"
" pfadd %%mm3, %%mm2\n\t"
-" movd %%mm2, 256(%%edi)\n\t"
+" movd %%mm2, 256(%%"REG_D")\n\t"
/* Phase 8*/
-" movq 32(%%edx), %%mm0\n\t"
-" movq 48(%%edx), %%mm1\n\t"
-" pfadd 48(%%edx), %%mm0\n\t"
-" pfadd 40(%%edx), %%mm1\n\t"
-" movd %%mm0, 896(%%esi)\n\t"
-" movd %%mm1, 640(%%esi)\n\t"
+" movq 32(%%"REG_d"), %%mm0\n\t"
+" movq 48(%%"REG_d"), %%mm1\n\t"
+" pfadd 48(%%"REG_d"), %%mm0\n\t"
+" pfadd 40(%%"REG_d"), %%mm1\n\t"
+" movd %%mm0, 896(%%"REG_S")\n\t"
+" movd %%mm1, 640(%%"REG_S")\n\t"
" psrlq $32, %%mm0\n\t"
" psrlq $32, %%mm1\n\t"
-" movd %%mm0, 128(%%edi)\n\t"
-" movd %%mm1, 384(%%edi)\n\t"
-
-" movd 40(%%edx), %%mm0\n\t"
-" pfadd 56(%%edx), %%mm0\n\t"
-" movd %%mm0, 384(%%esi)\n\t"
-
-" movd 56(%%edx), %%mm0\n\t"
-" pfadd 36(%%edx), %%mm0\n\t"
-" movd %%mm0, 128(%%esi)\n\t"
-
-" movd 60(%%edx), %%mm0\n\t"
-" movd %%mm0, 896(%%edi)\n\t"
-" pfadd 44(%%edx), %%mm0\n\t"
-" movd %%mm0, 640(%%edi)\n\t"
-
-" movq 96(%%edx), %%mm0\n\t"
-" movq 112(%%edx), %%mm2\n\t"
-" movq 104(%%edx), %%mm4\n\t"
-" pfadd 112(%%edx), %%mm0\n\t"
-" pfadd 104(%%edx), %%mm2\n\t"
-" pfadd 120(%%edx), %%mm4\n\t"
+" movd %%mm0, 128(%%"REG_D")\n\t"
+" movd %%mm1, 384(%%"REG_D")\n\t"
+
+" movd 40(%%"REG_d"), %%mm0\n\t"
+" pfadd 56(%%"REG_d"), %%mm0\n\t"
+" movd %%mm0, 384(%%"REG_S")\n\t"
+
+" movd 56(%%"REG_d"), %%mm0\n\t"
+" pfadd 36(%%"REG_d"), %%mm0\n\t"
+" movd %%mm0, 128(%%"REG_S")\n\t"
+
+" movd 60(%%"REG_d"), %%mm0\n\t"
+" movd %%mm0, 896(%%"REG_D")\n\t"
+" pfadd 44(%%"REG_d"), %%mm0\n\t"
+" movd %%mm0, 640(%%"REG_D")\n\t"
+
+" movq 96(%%"REG_d"), %%mm0\n\t"
+" movq 112(%%"REG_d"), %%mm2\n\t"
+" movq 104(%%"REG_d"), %%mm4\n\t"
+" pfadd 112(%%"REG_d"), %%mm0\n\t"
+" pfadd 104(%%"REG_d"), %%mm2\n\t"
+" pfadd 120(%%"REG_d"), %%mm4\n\t"
" movq %%mm0, %%mm1\n\t"
" movq %%mm2, %%mm3\n\t"
" movq %%mm4, %%mm5\n\t"
-" pfadd 64(%%edx), %%mm0\n\t"
-" pfadd 80(%%edx), %%mm2\n\t"
-" pfadd 72(%%edx), %%mm4\n\t"
-" movd %%mm0, 960(%%esi)\n\t"
-" movd %%mm2, 704(%%esi)\n\t"
-" movd %%mm4, 448(%%esi)\n\t"
+" pfadd 64(%%"REG_d"), %%mm0\n\t"
+" pfadd 80(%%"REG_d"), %%mm2\n\t"
+" pfadd 72(%%"REG_d"), %%mm4\n\t"
+" movd %%mm0, 960(%%"REG_S")\n\t"
+" movd %%mm2, 704(%%"REG_S")\n\t"
+" movd %%mm4, 448(%%"REG_S")\n\t"
" psrlq $32, %%mm0\n\t"
" psrlq $32, %%mm2\n\t"
" psrlq $32, %%mm4\n\t"
-" movd %%mm0, 64(%%edi)\n\t"
-" movd %%mm2, 320(%%edi)\n\t"
-" movd %%mm4, 576(%%edi)\n\t"
-" pfadd 80(%%edx), %%mm1\n\t"
-" pfadd 72(%%edx), %%mm3\n\t"
-" pfadd 88(%%edx), %%mm5\n\t"
-" movd %%mm1, 832(%%esi)\n\t"
-" movd %%mm3, 576(%%esi)\n\t"
-" movd %%mm5, 320(%%esi)\n\t"
+" movd %%mm0, 64(%%"REG_D")\n\t"
+" movd %%mm2, 320(%%"REG_D")\n\t"
+" movd %%mm4, 576(%%"REG_D")\n\t"
+" pfadd 80(%%"REG_d"), %%mm1\n\t"
+" pfadd 72(%%"REG_d"), %%mm3\n\t"
+" pfadd 88(%%"REG_d"), %%mm5\n\t"
+" movd %%mm1, 832(%%"REG_S")\n\t"
+" movd %%mm3, 576(%%"REG_S")\n\t"
+" movd %%mm5, 320(%%"REG_S")\n\t"
" psrlq $32, %%mm1\n\t"<