changeset 736:59b0a9ec8604

K7 3dnow-dsp support
author nickols_k
date Wed, 09 May 2001 07:59:55 +0000
parents 7fc213046812
children e8d5e6d4d85f
files mp3lib/dct36_k7.s mp3lib/dct64_k7.s mp3lib/decod386.c mp3lib/decode_k7.s mp3lib/mpg123.h
diffstat 5 files changed, 1487 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mp3lib/dct36_k7.s	Wed May 09 07:59:55 2001 +0000
@@ -0,0 +1,511 @@
+///
+/// Replacement of dct36() with AMD's 3DNowEx(DSP)! SIMD operations support
+///
+/// This code based 'dct36_3dnow.s' by Syuuhei Kashiyama
+/// <squash@mb.kcom.ne.jp>,only some types of changes have been made:
+///
+///  - added new opcode PSWAPD
+///  - change function name for support 3DNowEx! automatic detect
+///
+/// note: because K7 processors are an aggresive out-of-order three-way
+///       superscalar ones instruction order is not significand for them.
+///
+/// Modified by Nick Kurshev <nickols_k@mail.ru>
+///
+/
+/ dct36_3dnow.s - 3DNow! optimized dct36()
+/
+/ This code based 'dct36_3dnow.s' by Syuuhei Kashiyama
+/ <squash@mb.kcom.ne.jp>,only two types of changes have been made:
+/
+/ - remove PREFETCH instruction for speedup
+/ - change function name for support 3DNow! automatic detect
+/
+/ You can find Kashiyama's original 3dnow! support patch
+/ (for mpg123-0.59o) at
+/ http://user.ecc.u-tokyo.ac.jp/~g810370/linux-simd/ (Japanese).
+/
+/ by KIMURA Takuhiro <kim@hannah.ipc.miyakyo-u.ac.jp> - until 31.Mar.1999
+/                    <kim@comtec.co.jp>               - after  1.Apr.1999
+/
+	
+///
+/// Replacement of dct36() with AMD's 3DNow! SIMD operations support
+/// 
+/// Syuuhei Kashiyama <squash@mb.kcom.ne.jp>
+/// 
+/// The author of this program disclaim whole expressed or implied
+/// warranties with regard to this program, and in no event shall the
+/// author of this program liable to whatever resulted from the use of
+/// this program. Use it at your own risk.
+/// 
+
+	.globl dct36_3dnowex
+	.type	 dct36_3dnowex,@function
+dct36_3dnowex:
+	pushl %ebp
+	movl %esp,%ebp
+	subl $120,%esp
+	pushl %esi
+	pushl %ebx
+	movl 8(%ebp),%eax
+	movl 12(%ebp),%esi
+	movl 16(%ebp),%ecx
+	movl 20(%ebp),%edx
+	movl 24(%ebp),%ebx
+	leal -128(%ebp),%esp
+
+	femms
+	movq (%eax),%mm0
+	movq 4(%eax),%mm1
+	pfadd %mm1,%mm0
+	movq %mm0,4(%eax)
+	psrlq $32,%mm1
+	movq 12(%eax),%mm2
+	punpckldq %mm2,%mm1
+	pfadd %mm2,%mm1
+	movq %mm1,12(%eax)
+	psrlq $32,%mm2
+	movq 20(%eax),%mm3
+	punpckldq %mm3,%mm2
+	pfadd %mm3,%mm2
+	movq %mm2,20(%eax)
+	psrlq $32,%mm3
+	movq 28(%eax),%mm4
+	punpckldq %mm4,%mm3
+	pfadd %mm4,%mm3
+	movq %mm3,28(%eax)
+	psrlq $32,%mm4
+	movq 36(%eax),%mm5
+	punpckldq %mm5,%mm4
+	pfadd %mm5,%mm4
+	movq %mm4,36(%eax)
+	psrlq $32,%mm5
+	movq 44(%eax),%mm6
+	punpckldq %mm6,%mm5
+	pfadd %mm6,%mm5
+	movq %mm5,44(%eax)
+	psrlq $32,%mm6
+	movq 52(%eax),%mm7
+	punpckldq %mm7,%mm6
+	pfadd %mm7,%mm6
+	movq %mm6,52(%eax)
+	psrlq $32,%mm7
+	movq 60(%eax),%mm0
+	punpckldq %mm0,%mm7
+	pfadd %mm0,%mm7
+	movq %mm7,60(%eax)
+	psrlq $32,%mm0
+	movd 68(%eax),%mm1
+	pfadd %mm1,%mm0
+	movd %mm0,68(%eax)
+	movd 4(%eax),%mm0
+	movd 12(%eax),%mm1
+	punpckldq %mm1,%mm0
+	punpckldq 20(%eax),%mm1
+	pfadd %mm1,%mm0
+	movd %mm0,12(%eax)
+	psrlq $32,%mm0
+	movd %mm0,20(%eax)
+	psrlq $32,%mm1
+	movd 28(%eax),%mm2
+	punpckldq %mm2,%mm1
+	punpckldq 36(%eax),%mm2
+	pfadd %mm2,%mm1
+	movd %mm1,28(%eax)
+	psrlq $32,%mm1
+	movd %mm1,36(%eax)
+	psrlq $32,%mm2
+	movd 44(%eax),%mm3
+	punpckldq %mm3,%mm2
+	punpckldq 52(%eax),%mm3
+	pfadd %mm3,%mm2
+	movd %mm2,44(%eax)
+	psrlq $32,%mm2
+	movd %mm2,52(%eax)
+	psrlq $32,%mm3
+	movd 60(%eax),%mm4
+	punpckldq %mm4,%mm3
+	punpckldq 68(%eax),%mm4
+	pfadd %mm4,%mm3
+	movd %mm3,60(%eax)
+	psrlq $32,%mm3
+	movd %mm3,68(%eax)
+	movq 24(%eax),%mm0
+	movq 48(%eax),%mm1
+	movd COS9+12,%mm2
+	punpckldq %mm2,%mm2
+	movd COS9+24,%mm3
+	punpckldq %mm3,%mm3
+	pfmul %mm2,%mm0
+	pfmul %mm3,%mm1
+	pushl %eax
+	movl $1,%eax
+	movd %eax,%mm7
+	pi2fd %mm7,%mm7
+	popl %eax
+	movq 8(%eax),%mm2
+	movd COS9+4,%mm3
+	punpckldq %mm3,%mm3
+	pfmul %mm3,%mm2
+	pfadd %mm0,%mm2
+	movq 40(%eax),%mm3
+	movd COS9+20,%mm4
+	punpckldq %mm4,%mm4
+	pfmul %mm4,%mm3
+	pfadd %mm3,%mm2
+	movq 56(%eax),%mm3
+	movd COS9+28,%mm4
+	punpckldq %mm4,%mm4
+	pfmul %mm4,%mm3
+	pfadd %mm3,%mm2
+	movq (%eax),%mm3
+	movq 16(%eax),%mm4
+	movd COS9+8,%mm5
+	punpckldq %mm5,%mm5
+	pfmul %mm5,%mm4
+	pfadd %mm4,%mm3
+	movq 32(%eax),%mm4
+	movd COS9+16,%mm5
+	punpckldq %mm5,%mm5
+	pfmul %mm5,%mm4
+	pfadd %mm4,%mm3
+	pfadd %mm1,%mm3
+	movq 64(%eax),%mm4
+	movd COS9+32,%mm5
+	punpckldq %mm5,%mm5
+	pfmul %mm5,%mm4
+	pfadd %mm4,%mm3
+	movq %mm2,%mm4
+	pfadd %mm3,%mm4
+	movq %mm7,%mm5
+	punpckldq tfcos36+0,%mm5
+	pfmul %mm5,%mm4
+	movq %mm4,%mm5
+	pfacc %mm5,%mm5
+	movd 108(%edx),%mm6
+	punpckldq 104(%edx),%mm6
+	pfmul %mm6,%mm5
+	pswapd %mm5, %mm5
+        movq   %mm5, 32(%ecx)
+	movq %mm4,%mm6
+	punpckldq %mm6,%mm5
+	pfsub %mm6,%mm5
+	punpckhdq %mm5,%mm5
+	movd 32(%edx),%mm6
+	punpckldq 36(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd 32(%esi),%mm6
+	punpckldq 36(%esi),%mm6
+	pfadd %mm6,%mm5
+	movd %mm5,1024(%ebx)
+	psrlq $32,%mm5
+	movd %mm5,1152(%ebx)
+	movq %mm3,%mm4
+	pfsub %mm2,%mm4
+	movq %mm7,%mm5
+	punpckldq tfcos36+32,%mm5
+	pfmul %mm5,%mm4
+	movq %mm4,%mm5
+	pfacc %mm5,%mm5
+	movd 140(%edx),%mm6
+	punpckldq 72(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd %mm5,68(%ecx)
+	psrlq $32,%mm5
+	movd %mm5,0(%ecx)
+	movq %mm4,%mm6
+	punpckldq %mm6,%mm5
+	pfsub %mm6,%mm5
+	punpckhdq %mm5,%mm5
+	movd 0(%edx),%mm6
+	punpckldq 68(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd 0(%esi),%mm6
+	punpckldq 68(%esi),%mm6
+	pfadd %mm6,%mm5
+	movd %mm5,0(%ebx)
+	psrlq $32,%mm5
+	movd %mm5,2176(%ebx)
+	movq 8(%eax),%mm2
+	movq 40(%eax),%mm3
+	pfsub %mm3,%mm2
+	movq 56(%eax),%mm3
+	pfsub %mm3,%mm2
+	movd COS9+12,%mm3
+	punpckldq %mm3,%mm3
+	pfmul %mm3,%mm2
+	movq 16(%eax),%mm3
+	movq 32(%eax),%mm4
+	pfsub %mm4,%mm3
+	movq 64(%eax),%mm4
+	pfsub %mm4,%mm3
+	movd COS9+24,%mm4
+	punpckldq %mm4,%mm4
+	pfmul %mm4,%mm3
+	movq 48(%eax),%mm4
+	pfsub %mm4,%mm3
+	movq (%eax),%mm4
+	pfadd %mm4,%mm3
+	movq %mm2,%mm4
+	pfadd %mm3,%mm4
+	movq %mm7,%mm5
+	punpckldq tfcos36+4,%mm5
+	pfmul %mm5,%mm4
+	movq %mm4,%mm5
+	pfacc %mm5,%mm5
+	movd 112(%edx),%mm6
+	punpckldq 100(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd %mm5,40(%ecx)
+	psrlq $32,%mm5
+	movd %mm5,28(%ecx)
+	movq %mm4,%mm6
+	punpckldq %mm6,%mm5
+	pfsub %mm6,%mm5
+	punpckhdq %mm5,%mm5
+	movd 28(%edx),%mm6
+	punpckldq 40(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd 28(%esi),%mm6
+	punpckldq 40(%esi),%mm6
+	pfadd %mm6,%mm5
+	movd %mm5,896(%ebx)
+	psrlq $32,%mm5
+	movd %mm5,1280(%ebx)
+	movq %mm3,%mm4
+	pfsub %mm2,%mm4
+	movq %mm7,%mm5
+	punpckldq tfcos36+28,%mm5
+	pfmul %mm5,%mm4
+	movq %mm4,%mm5
+	pfacc %mm5,%mm5
+	movd 136(%edx),%mm6
+	punpckldq 76(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd %mm5,64(%ecx)
+	psrlq $32,%mm5
+	movd %mm5,4(%ecx)
+	movq %mm4,%mm6
+	punpckldq %mm6,%mm5
+	pfsub %mm6,%mm5
+	punpckhdq %mm5,%mm5
+	movd 4(%edx),%mm6
+	punpckldq 64(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd 4(%esi),%mm6
+	punpckldq 64(%esi),%mm6
+	pfadd %mm6,%mm5
+	movd %mm5,128(%ebx)
+	psrlq $32,%mm5
+	movd %mm5,2048(%ebx)
+
+	movq 8(%eax),%mm2
+	movd COS9+20,%mm3
+	punpckldq %mm3,%mm3
+	pfmul %mm3,%mm2
+	pfsub %mm0,%mm2
+	movq 40(%eax),%mm3
+	movd COS9+28,%mm4
+	punpckldq %mm4,%mm4
+	pfmul %mm4,%mm3
+	pfsub %mm3,%mm2
+	movq 56(%eax),%mm3
+	movd COS9+4,%mm4
+	punpckldq %mm4,%mm4
+	pfmul %mm4,%mm3
+	pfadd %mm3,%mm2
+	movq (%eax),%mm3
+	movq 16(%eax),%mm4
+	movd COS9+32,%mm5
+	punpckldq %mm5,%mm5
+	pfmul %mm5,%mm4
+	pfsub %mm4,%mm3
+	movq 32(%eax),%mm4
+	movd COS9+8,%mm5
+	punpckldq %mm5,%mm5
+	pfmul %mm5,%mm4
+	pfsub %mm4,%mm3
+	pfadd %mm1,%mm3
+	movq 64(%eax),%mm4
+	movd COS9+16,%mm5
+	punpckldq %mm5,%mm5
+	pfmul %mm5,%mm4
+	pfadd %mm4,%mm3
+	movq %mm2,%mm4
+	pfadd %mm3,%mm4
+	movq %mm7,%mm5
+	punpckldq tfcos36+8,%mm5
+	pfmul %mm5,%mm4
+	movq %mm4,%mm5
+	pfacc %mm5,%mm5
+	movd 116(%edx),%mm6
+	punpckldq 96(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd %mm5,44(%ecx)
+	psrlq $32,%mm5
+	movd %mm5,24(%ecx)
+	movq %mm4,%mm6
+	punpckldq %mm6,%mm5
+	pfsub %mm6,%mm5
+	punpckhdq %mm5,%mm5
+	movd 24(%edx),%mm6
+	punpckldq 44(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd 24(%esi),%mm6
+	punpckldq 44(%esi),%mm6
+	pfadd %mm6,%mm5
+	movd %mm5,768(%ebx)
+	psrlq $32,%mm5
+	movd %mm5,1408(%ebx)
+	movq %mm3,%mm4
+	pfsub %mm2,%mm4
+	movq %mm7,%mm5
+	punpckldq tfcos36+24,%mm5
+	pfmul %mm5,%mm4
+	movq %mm4,%mm5
+	pfacc %mm5,%mm5
+	movd 132(%edx),%mm6
+	punpckldq 80(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd %mm5,60(%ecx)
+	psrlq $32,%mm5
+	movd %mm5,8(%ecx)
+	movq %mm4,%mm6
+	punpckldq %mm6,%mm5
+	pfsub %mm6,%mm5
+	punpckhdq %mm5,%mm5
+	movd 8(%edx),%mm6
+	punpckldq 60(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd 8(%esi),%mm6
+	punpckldq 60(%esi),%mm6
+	pfadd %mm6,%mm5
+	movd %mm5,256(%ebx)
+	psrlq $32,%mm5
+	movd %mm5,1920(%ebx)
+	movq 8(%eax),%mm2
+	movd COS9+28,%mm3
+	punpckldq %mm3,%mm3
+	pfmul %mm3,%mm2
+	pfsub %mm0,%mm2
+	movq 40(%eax),%mm3
+	movd COS9+4,%mm4
+	punpckldq %mm4,%mm4
+	pfmul %mm4,%mm3
+	pfadd %mm3,%mm2
+	movq 56(%eax),%mm3
+	movd COS9+20,%mm4
+	punpckldq %mm4,%mm4
+	pfmul %mm4,%mm3
+	pfsub %mm3,%mm2
+	movq (%eax),%mm3
+	movq 16(%eax),%mm4
+	movd COS9+16,%mm5
+	punpckldq %mm5,%mm5
+	pfmul %mm5,%mm4
+	pfsub %mm4,%mm3
+	movq 32(%eax),%mm4
+	movd COS9+32,%mm5
+	punpckldq %mm5,%mm5
+	pfmul %mm5,%mm4
+	pfadd %mm4,%mm3
+	pfadd %mm1,%mm3
+	movq 64(%eax),%mm4
+	movd COS9+8,%mm5
+	punpckldq %mm5,%mm5
+	pfmul %mm5,%mm4
+	pfsub %mm4,%mm3
+	movq %mm2,%mm4
+	pfadd %mm3,%mm4
+	movq %mm7,%mm5
+	punpckldq tfcos36+12,%mm5
+	pfmul %mm5,%mm4
+	movq %mm4,%mm5
+	pfacc %mm5,%mm5
+	movd 120(%edx),%mm6
+	punpckldq 92(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd %mm5,48(%ecx)
+	psrlq $32,%mm5
+	movd %mm5,20(%ecx)
+	movq %mm4,%mm6
+	punpckldq %mm6,%mm5
+	pfsub %mm6,%mm5
+	punpckhdq %mm5,%mm5
+	movd 20(%edx),%mm6
+	punpckldq 48(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd 20(%esi),%mm6
+	punpckldq 48(%esi),%mm6
+	pfadd %mm6,%mm5
+	movd %mm5,640(%ebx)
+	psrlq $32,%mm5
+	movd %mm5,1536(%ebx)
+	movq %mm3,%mm4
+	pfsub %mm2,%mm4
+	movq %mm7,%mm5
+	punpckldq tfcos36+20,%mm5
+	pfmul %mm5,%mm4
+	movq %mm4,%mm5
+	pfacc %mm5,%mm5
+	movd 128(%edx),%mm6
+	punpckldq 84(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd %mm5,56(%ecx)
+	psrlq $32,%mm5
+	movd %mm5,12(%ecx)
+	movq %mm4,%mm6
+	punpckldq %mm6,%mm5
+	pfsub %mm6,%mm5
+	punpckhdq %mm5,%mm5
+	movd 12(%edx),%mm6
+	punpckldq 56(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd 12(%esi),%mm6
+	punpckldq 56(%esi),%mm6
+	pfadd %mm6,%mm5
+	movd %mm5,384(%ebx)
+	psrlq $32,%mm5
+	movd %mm5,1792(%ebx)
+
+	movq (%eax),%mm4
+	movq 16(%eax),%mm3
+	pfsub %mm3,%mm4
+	movq 32(%eax),%mm3
+	pfadd %mm3,%mm4
+	movq 48(%eax),%mm3
+	pfsub %mm3,%mm4
+	movq 64(%eax),%mm3
+	pfadd %mm3,%mm4
+	movq %mm7,%mm5
+	punpckldq tfcos36+16,%mm5
+	pfmul %mm5,%mm4
+	movq %mm4,%mm5
+	pfacc %mm5,%mm5
+	movd 124(%edx),%mm6
+	punpckldq 88(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd %mm5,52(%ecx)
+	psrlq $32,%mm5
+	movd %mm5,16(%ecx)
+	movq %mm4,%mm6
+	punpckldq %mm6,%mm5
+	pfsub %mm6,%mm5
+	punpckhdq %mm5,%mm5
+	movd 16(%edx),%mm6
+	punpckldq 52(%edx),%mm6
+	pfmul %mm6,%mm5
+	movd 16(%esi),%mm6
+	punpckldq 52(%esi),%mm6
+	pfadd %mm6,%mm5
+	movd %mm5,512(%ebx)
+	psrlq $32,%mm5
+	movd %mm5,1664(%ebx)
+
+	femms
+	popl %ebx
+	popl %esi
+	movl %ebp,%esp
+	popl %ebp
+	ret
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mp3lib/dct64_k7.s	Wed May 09 07:59:55 2001 +0000
@@ -0,0 +1,683 @@
+///
+/// Replacement of dct64() with AMD's 3DNowEx(DSP)! SIMD operations support
+///
+/// This code based 'dct64_3dnow.s' by Syuuhei Kashiyama
+/// <squash@mb.kcom.ne.jp>,only some types of changes have been made:
+///
+///  - added new opcode PSWAPD
+///  - change function name for support 3DNowEx! automatic detect
+///
+/// note: because K7 processors are an aggresive out-of-order three-way
+///       superscalar ones instruction order is not significand for them.
+///
+/// Modified by Nick Kurshev <nickols_k@mail.ru>
+///
+/// The author of this program disclaim whole expressed or implied
+/// warranties with regard to this program, and in no event shall the
+/// author of this program liable to whatever resulted from the use of
+/// this program. Use it at your own risk.
+///
+
+        .globl dct64_3dnowex
+        .type    dct64_3dnowex,@function
+dct64_3dnowex:
+        subl $256,%esp
+        pushl %ebp
+        pushl %edi
+        pushl %esi
+        pushl %ebx
+        leal 16(%esp),%ebx
+        movl 284(%esp),%edi
+        movl 276(%esp),%ebp
+        movl 280(%esp),%edx
+        leal 128(%ebx),%esi
+
+        / femms
+
+        // 1
+        movl pnts,%eax
+        movq 0(%edi),%mm0
+        movq %mm0,%mm1
+        movd 124(%edi),%mm2
+        punpckldq 120(%edi),%mm2
+        movq 0(%eax),%mm3
+        pfadd %mm2,%mm0
+        movq %mm0,0(%ebx)
+        pfsub %mm2,%mm1
+        pfmul %mm3,%mm1
+        pswapd %mm1, %mm1
+        movq   %mm1, 120(%ebx)
+        movq 8(%edi),%mm4
+        movq %mm4,%mm5
+        movd 116(%edi),%mm6
+        punpckldq 112(%edi),%mm6
+        movq 8(%eax),%mm7
+        pfadd %mm6,%mm4
+        movq %mm4,8(%ebx)
+        pfsub %mm6,%mm5
+        pfmul %mm7,%mm5
+        pswapd %mm5, %mm5
+        movq   %mm5, 112(%ebx)
+        movq 16(%edi),%mm0
+        movq %mm0,%mm1
+        movd 108(%edi),%mm2
+        punpckldq 104(%edi),%mm2
+        movq 16(%eax),%mm3
+        pfadd %mm2,%mm0
+        movq %mm0,16(%ebx)
+        pfsub %mm2,%mm1
+        pfmul %mm3,%mm1
+        pswapd %mm1, %mm1
+        movq   %mm1, 104(%ebx)
+        movq 24(%edi),%mm4
+        movq %mm4,%mm5
+        movd 100(%edi),%mm6
+        punpckldq 96(%edi),%mm6
+        movq 24(%eax),%mm7
+        pfadd %mm6,%mm4
+        movq %mm4,24(%ebx)
+        pfsub %mm6,%mm5
+        pfmul %mm7,%mm5
+        pswapd %mm5, %mm5
+        movq   %mm5, 96(%ebx)
+        movq 32(%edi),%mm0
+        movq %mm0,%mm1
+        movd 92(%edi),%mm2
+        punpckldq 88(%edi),%mm2
+        movq 32(%eax),%mm3
+        pfadd %mm2,%mm0
+        movq %mm0,32(%ebx)
+        pfsub %mm2,%mm1
+        pfmul %mm3,%mm1
+        pswapd %mm1, %mm1
+        movq   %mm1, 88(%ebx)
+        movq 40(%edi),%mm4
+        movq %mm4,%mm5
+        movd 84(%edi),%mm6
+        punpckldq 80(%edi),%mm6
+        movq 40(%eax),%mm7
+        pfadd %mm6,%mm4
+        movq %mm4,40(%ebx)
+        pfsub %mm6,%mm5
+        pfmul %mm7,%mm5
+        pswapd %mm5, %mm5
+        movq   %mm5, 80(%ebx)
+        movq 48(%edi),%mm0
+        movq %mm0,%mm1
+        movd 76(%edi),%mm2
+        punpckldq 72(%edi),%mm2
+        movq 48(%eax),%mm3
+        pfadd %mm2,%mm0
+        movq %mm0,48(%ebx)
+        pfsub %mm2,%mm1
+        pfmul %mm3,%mm1
+        pswapd %mm1, %mm1
+        movq   %mm1, 72(%ebx)
+        movq 56(%edi),%mm4
+        movq %mm4,%mm5
+        movd 68(%edi),%mm6
+        punpckldq 64(%edi),%mm6
+        movq 56(%eax),%mm7
+        pfadd %mm6,%mm4
+        movq %mm4,56(%ebx)
+        pfsub %mm6,%mm5
+        pfmul %mm7,%mm5
+        pswapd %mm5, %mm5
+        movq   %mm5, 64(%ebx)
+
+        // 2
+        movl pnts+4,%eax
+        / 0, 14
+        movq 0(%ebx),%mm0
+        movq %mm0,%mm1
+        movd 60(%ebx),%mm2
+        punpckldq 56(%ebx),%mm2
+        movq 0(%eax),%mm3
+        pfadd %mm2,%mm0
+        movq %mm0,0(%esi)
+        pfsub %mm2,%mm1
+        pfmul %mm3,%mm1
+        pswapd %mm1, %mm1
+        movq   %mm1, 56(%esi)
+        / 16, 30
+        movq 64(%ebx),%mm0
+        movq %mm0,%mm1
+        movd 124(%ebx),%mm2
+        punpckldq 120(%ebx),%mm2
+        pfadd %mm2,%mm0
+        movq %mm0,64(%esi)
+        pfsubr %mm2,%mm1
+        pfmul %mm3,%mm1
+        pswapd %mm1, %mm1
+        movq   %mm1, 120(%esi)
+        movq 8(%ebx),%mm4
+        / 2, 12
+        movq %mm4,%mm5
+        movd 52(%ebx),%mm6
+        punpckldq 48(%ebx),%mm6
+        movq 8(%eax),%mm7
+        pfadd %mm6,%mm4
+        movq %mm4,8(%esi)
+        pfsub %mm6,%mm5
+        pfmul %mm7,%mm5
+        pswapd %mm5, %mm5
+        movq   %mm5, 48(%esi)
+        movq 72(%ebx),%mm4
+        / 18, 28
+        movq %mm4,%mm5
+        movd 116(%ebx),%mm6
+        punpckldq 112(%ebx),%mm6
+        pfadd %mm6,%mm4
+        movq %mm4,72(%esi)
+        pfsubr %mm6,%mm5
+        pfmul %mm7,%mm5
+        pswapd %mm5, %mm5
+        movq   %mm5, 112(%esi)
+        movq 16(%ebx),%mm0
+        / 4, 10
+        movq %mm0,%mm1
+        movd 44(%ebx),%mm2
+        punpckldq 40(%ebx),%mm2
+        movq 16(%eax),%mm3
+        pfadd %mm2,%mm0
+        movq %mm0,16(%esi)
+        pfsub %mm2,%mm1
+        pfmul %mm3,%mm1
+        pswapd %mm1, %mm1
+        movq   %mm1, 40(%esi)
+        movq 80(%ebx),%mm0
+        / 20, 26
+        movq %mm0,%mm1
+        movd 108(%ebx),%mm2
+        punpckldq 104(%ebx),%mm2
+        pfadd %mm2,%mm0
+        movq %mm0,80(%esi)
+        pfsubr %mm2,%mm1
+        pfmul %mm3,%mm1
+        pswapd %mm1, %mm1
+        movq   %mm1, 104(%esi)
+        movq 24(%ebx),%mm4
+        / 6, 8
+        movq %mm4,%mm5
+        movd 36(%ebx),%mm6
+        punpckldq 32(%ebx),%mm6
+        movq 24(%eax),%mm7
+        pfadd %mm6,%mm4
+        movq %mm4,24(%esi)
+        pfsub %mm6,%mm5
+        pfmul %mm7,%mm5
+        pswapd %mm5, %mm5
+        movq   %mm5, 32(%esi)
+        movq 88(%ebx),%mm4
+        / 22, 24
+        movq %mm4,%mm5
+        movd 100(%ebx),%mm6
+        punpckldq 96(%ebx),%mm6
+        pfadd %mm6,%mm4
+        movq %mm4,88(%esi)
+        pfsubr %mm6,%mm5
+        pfmul %mm7,%mm5
+        pswapd %mm5, %mm5
+        movq   %mm5, 96(%esi)
+
+        // 3
+        movl pnts+8,%eax
+        movq 0(%eax),%mm0
+        movq 8(%eax),%mm1
+        movq 0(%esi),%mm2
+        / 0, 6
+        movq %mm2,%mm3
+        movd 28(%esi),%mm4
+        punpckldq 24(%esi),%mm4
+        pfadd %mm4,%mm2
+        pfsub %mm4,%mm3
+        pfmul %mm0,%mm3
+        movq %mm2,0(%ebx)
+        pswapd %mm3, %mm3
+        movq   %mm3, 24(%ebx)
+        movq 8(%esi),%mm5
+        / 2, 4
+        movq %mm5,%mm6
+        movd 20(%esi),%mm7
+        punpckldq 16(%esi),%mm7
+        pfadd %mm7,%mm5
+        pfsub %mm7,%mm6
+        pfmul %mm1,%mm6
+        movq %mm5,8(%ebx)
+        pswapd %mm6, %mm6
+        movq   %mm6, 16(%ebx)
+        movq 32(%esi),%mm2
+        / 8, 14
+        movq %mm2,%mm3
+        movd 60(%esi),%mm4
+        punpckldq 56(%esi),%mm4
+        pfadd %mm4,%mm2
+        pfsubr %mm4,%mm3
+        pfmul %mm0,%mm3
+        movq %mm2,32(%ebx)
+        pswapd %mm3, %mm3
+        movq   %mm3, 56(%ebx)
+        movq 40(%esi),%mm5
+        / 10, 12
+        movq %mm5,%mm6
+        movd 52(%esi),%mm7
+        punpckldq 48(%esi),%mm7
+        pfadd %mm7,%mm5
+        pfsubr %mm7,%mm6
+        pfmul %mm1,%mm6
+        movq %mm5,40(%ebx)
+        pswapd %mm6, %mm6
+        movq   %mm6, 48(%ebx)
+        movq 64(%esi),%mm2
+        / 16, 22
+        movq %mm2,%mm3
+        movd 92(%esi),%mm4
+        punpckldq 88(%esi),%mm4
+        pfadd %mm4,%mm2
+        pfsub %mm4,%mm3
+        pfmul %mm0,%mm3
+        movq %mm2,64(%ebx)
+        pswapd %mm3, %mm3
+        movq   %mm3, 88(%ebx)
+        movq 72(%esi),%mm5
+        / 18, 20
+        movq %mm5,%mm6
+        movd 84(%esi),%mm7
+        punpckldq 80(%esi),%mm7
+        pfadd %mm7,%mm5
+        pfsub %mm7,%mm6
+        pfmul %mm1,%mm6
+        movq %mm5,72(%ebx)
+        pswapd %mm6, %mm6
+        movq   %mm6, 80(%ebx)
+        movq 96(%esi),%mm2
+        / 24, 30
+        movq %mm2,%mm3
+        movd 124(%esi),%mm4
+        punpckldq 120(%esi),%mm4
+        pfadd %mm4,%mm2
+        pfsubr %mm4,%mm3
+        pfmul %mm0,%mm3
+        movq %mm2,96(%ebx)
+        pswapd %mm3, %mm3
+        movq   %mm3, 120(%ebx)
+        movq 104(%esi),%mm5
+        / 26, 28
+        movq %mm5,%mm6
+        movd 116(%esi),%mm7
+        punpckldq 112(%esi),%mm7
+        pfadd %mm7,%mm5
+        pfsubr %mm7,%mm6
+        pfmul %mm1,%mm6
+        movq %mm5,104(%ebx)
+        pswapd %mm6, %mm6
+        movq   %mm6, 112(%ebx)
+
+        // 4
+        movl pnts+12,%eax
+        movq 0(%eax),%mm0
+        movq 0(%ebx),%mm1
+        / 0
+        movq %mm1,%mm2
+        movd 12(%ebx),%mm3
+        punpckldq 8(%ebx),%mm3
+        pfadd %mm3,%mm1
+        pfsub %mm3,%mm2
+        pfmul %mm0,%mm2
+        movq %mm1,0(%esi)
+        pswapd %mm2, %mm2
+        movq   %mm2, 8(%esi)
+        movq 16(%ebx),%mm4
+        / 4
+        movq %mm4,%mm5
+        movd 28(%ebx),%mm6
+        punpckldq 24(%ebx),%mm6
+        pfadd %mm6,%mm4
+        pfsubr %mm6,%mm5
+        pfmul %mm0,%mm5
+        movq %mm4,16(%esi)
+        pswapd %mm5, %mm5
+        movq   %mm5, 24(%esi)
+        movq 32(%ebx),%mm1
+        / 8
+        movq %mm1,%mm2
+        movd 44(%ebx),%mm3
+        punpckldq 40(%ebx),%mm3
+        pfadd %mm3,%mm1
+        pfsub %mm3,%mm2
+        pfmul %mm0,%mm2
+        movq %mm1,32(%esi)
+        pswapd %mm2, %mm2
+        movq   %mm2, 40(%esi)
+        movq 48(%ebx),%mm4
+        / 12
+        movq %mm4,%mm5
+        movd 60(%ebx),%mm6
+        punpckldq 56(%ebx),%mm6
+        pfadd %mm6,%mm4
+        pfsubr %mm6,%mm5
+        pfmul %mm0,%mm5
+        movq %mm4,48(%esi)
+        pswapd %mm5, %mm5
+        movq   %mm5, 56(%esi)
+        movq 64(%ebx),%mm1
+        / 16
+        movq %mm1,%mm2
+        movd 76(%ebx),%mm3
+        punpckldq 72(%ebx),%mm3
+        pfadd %mm3,%mm1
+        pfsub %mm3,%mm2
+        pfmul %mm0,%mm2
+        movq %mm1,64(%esi)
+        pswapd %mm2, %mm2
+        movq   %mm2, 72(%esi)
+        movq 80(%ebx),%mm4
+        / 20
+        movq %mm4,%mm5
+        movd 92(%ebx),%mm6
+        punpckldq 88(%ebx),%mm6
+        pfadd %mm6,%mm4
+        pfsubr %mm6,%mm5
+        pfmul %mm0,%mm5
+        movq %mm4,80(%esi)
+        pswapd %mm5, %mm5
+        movq   %mm5, 88(%esi)
+        movq 96(%ebx),%mm1
+        / 24
+        movq %mm1,%mm2
+        movd 108(%ebx),%mm3
+        punpckldq 104(%ebx),%mm3
+        pfadd %mm3,%mm1
+        pfsub %mm3,%mm2
+        pfmul %mm0,%mm2
+        movq %mm1,96(%esi)
+        pswapd %mm2, %mm2
+        movq   %mm2, 104(%esi)
+        movq 112(%ebx),%mm4
+        / 28
+        movq %mm4,%mm5
+        movd 124(%ebx),%mm6
+        punpckldq 120(%ebx),%mm6
+        pfadd %mm6,%mm4
+        pfsubr %mm6,%mm5
+        pfmul %mm0,%mm5
+        movq %mm4,112(%esi)
+        pswapd %mm5, %mm5
+        movq   %mm5, 120(%esi)
+
+        // 5
+        movl $-1,%eax
+        movd %eax,%mm1
+        movl $1,%eax
+        movd %eax,%mm0
+        / L | H
+        punpckldq %mm1,%mm0
+        pi2fd %mm0,%mm0
+        / 1.0 | -1.0
+        movd %eax,%mm1
+        pi2fd %mm1,%mm1
+        movl pnts+16,%eax
+        movd 0(%eax),%mm2
+        punpckldq %mm2,%mm1
+        / 1.0 | cos0
+        movq 0(%esi),%mm2
+        / 0
+        movq %mm2,%mm3
+        pfmul %mm0,%mm3
+        pfacc %mm3,%mm2
+        pfmul %mm1,%mm2
+        movq %mm2,0(%ebx)
+        movq 8(%esi),%mm4
+        movq %mm4,%mm5
+        pfmul %mm0,%mm5
+        pfacc %mm5,%mm4
+        pfmul %mm0,%mm4
+        pfmul %mm1,%mm4
+        movq %mm4,%mm5
+        psrlq $32,%mm5
+        pfacc %mm5,%mm4
+        movq %mm4,8(%ebx)
+        movq 16(%esi),%mm2
+        / 4
+        movq %mm2,%mm3
+        pfmul %mm0,%mm3
+        pfacc %mm3,%mm2
+        pfmul %mm1,%mm2
+        movq 24(%esi),%mm4
+        movq %mm4,%mm5
+        pfmul %mm0,%mm5
+        pfacc %mm5,%mm4
+        pfmul %mm0,%mm4
+        pfmul %mm1,%mm4
+        movq %mm4,%mm5
+        psrlq $32,%mm5
+        pfacc %mm5,%mm4
+        movq %mm2,%mm3
+        psrlq $32,%mm3
+        pfadd %mm4,%mm2
+        pfadd %mm3,%mm4
+        movq %mm2,16(%ebx)
+        movq %mm4,24(%ebx)
+        movq 32(%esi),%mm2
+        / 8
+        movq %mm2,%mm3
+        pfmul %mm0,%mm3
+        pfacc %mm3,%mm2
+        pfmul %mm1,%mm2
+        movq %mm2,32(%ebx)
+        movq 40(%esi),%mm4
+        movq %mm4,%mm5
+        pfmul %mm0,%mm5
+        pfacc %mm5,%mm4
+        pfmul %mm0,%mm4
+        pfmul %mm1,%mm4
+        movq %mm4,%mm5
+        psrlq $32,%mm5
+        pfacc %mm5,%mm4
+        movq %mm4,40(%ebx)
+        movq 48(%esi),%mm2
+        / 12
+        movq %mm2,%mm3
+        pfmul %mm0,%mm3
+        pfacc %mm3,%mm2
+        pfmul %mm1,%mm2
+        movq 56(%esi),%mm4
+        movq %mm4,%mm5
+        pfmul %mm0,%mm5
+        pfacc %mm5,%mm4
+        pfmul %mm0,%mm4
+        pfmul %mm1,%mm4
+        movq %mm4,%mm5
+        psrlq $32,%mm5
+        pfacc %mm5,%mm4
+        movq %mm2,%mm3
+        psrlq $32,%mm3
+        pfadd %mm4,%mm2
+        pfadd %mm3,%mm4
+        movq %mm2,48(%ebx)
+        movq %mm4,56(%ebx)
+        movq 64(%esi),%mm2
+        / 16
+        movq %mm2,%mm3
+        pfmul %mm0,%mm3
+        pfacc %mm3,%mm2
+        pfmul %mm1,%mm2
+        movq %mm2,64(%ebx)
+        movq 72(%esi),%mm4
+        movq %mm4,%mm5
+        pfmul %mm0,%mm5
+        pfacc %mm5,%mm4
+        pfmul %mm0,%mm4
+        pfmul %mm1,%mm4
+        movq %mm4,%mm5
+        psrlq $32,%mm5
+        pfacc %mm5,%mm4
+        movq %mm4,72(%ebx)
+        movq 80(%esi),%mm2
+        / 20
+        movq %mm2,%mm3
+        pfmul %mm0,%mm3
+        pfacc %mm3,%mm2
+        pfmul %mm1,%mm2
+        movq 88(%esi),%mm4
+        movq %mm4,%mm5
+        pfmul %mm0,%mm5
+        pfacc %mm5,%mm4
+        pfmul %mm0,%mm4
+        pfmul %mm1,%mm4
+        movq %mm4,%mm5
+        psrlq $32,%mm5
+        pfacc %mm5,%mm4
+        movq %mm2,%mm3
+        psrlq $32,%mm3
+        pfadd %mm4,%mm2
+        pfadd %mm3,%mm4
+        movq %mm2,80(%ebx)
+        movq %mm4,88(%ebx)
+        movq 96(%esi),%mm2
+        / 24
+        movq %mm2,%mm3
+        pfmul %mm0,%mm3
+        pfacc %mm3,%mm2
+        pfmul %mm1,%mm2
+        movq %mm2,96(%ebx)
+        movq 104(%esi),%mm4
+        movq %mm4,%mm5
+        pfmul %mm0,%mm5
+        pfacc %mm5,%mm4
+        pfmul %mm0,%mm4
+        pfmul %mm1,%mm4
+        movq %mm4,%mm5
+        psrlq $32,%mm5
+        pfacc %mm5,%mm4
+        movq %mm4,104(%ebx)
+        movq 112(%esi),%mm2
+        / 28
+        movq %mm2,%mm3
+        pfmul %mm0,%mm3
+        pfacc %mm3,%mm2
+        pfmul %mm1,%mm2
+        movq 120(%esi),%mm4
+        movq %mm4,%mm5
+        pfmul %mm0,%mm5
+        pfacc %mm5,%mm4
+        pfmul %mm0,%mm4
+        pfmul %mm1,%mm4
+        movq %mm4,%mm5
+        psrlq $32,%mm5
+        pfacc %mm5,%mm4
+        movq %mm2,%mm3
+        psrlq $32,%mm3
+        pfadd %mm4,%mm2
+        pfadd %mm3,%mm4
+        movq %mm2,112(%ebx)
+        movq %mm4,120(%ebx)
+
+        // Phase6
+        movl 0(%ebx),%eax
+        movl %eax,1024(%ebp)
+        movl 4(%ebx),%eax
+        movl %eax,0(%ebp)
+        movl %eax,0(%edx)
+        movl 8(%ebx),%eax
+        movl %eax,512(%ebp)
+        movl 12(%ebx),%eax
+        movl %eax,512(%edx)
+
+        movl 16(%ebx),%eax
+        movl %eax,768(%ebp)
+        movl 20(%ebx),%eax
+        movl %eax,256(%edx)
+
+        movl 24(%ebx),%eax
+        movl %eax,256(%ebp)
+        movl 28(%ebx),%eax
+        movl %eax,768(%edx)
+
+        movq 32(%ebx),%mm0
+        movq 48(%ebx),%mm1
+        pfadd %mm1,%mm0
+        movd %mm0,896(%ebp)
+        psrlq $32,%mm0
+        movd %mm0,128(%edx)
+        movq 40(%ebx),%mm2
+        pfadd %mm2,%mm1
+        movd %mm1,640(%ebp)
+        psrlq $32,%mm1
+        movd %mm1,384(%edx)
+
+        movq 56(%ebx),%mm3
+        pfadd %mm3,%mm2
+        movd %mm2,384(%ebp)
+        psrlq $32,%mm2
+        movd %mm2,640(%edx)
+
+        movd 36(%ebx),%mm4
+        pfadd %mm4,%mm3
+        movd %mm3,128(%ebp)
+        psrlq $32,%mm3
+        movd %mm3,896(%edx)
+        movq 96(%ebx),%mm0
+        movq 64(%ebx),%mm1
+
+        movq 112(%ebx),%mm2
+        pfadd %mm2,%mm0
+        movq %mm0,%mm3
+        pfadd %mm1,%mm3
+        movd %mm3,960(%ebp)
+        psrlq $32,%mm3
+        movd %mm3,64(%edx)
+        movq 80(%ebx),%mm1
+        pfadd %mm1,%mm0
+        movd %mm0,832(%ebp)
+        psrlq $32,%mm0
+        movd %mm0,192(%edx)
+        movq 104(%ebx),%mm3
+        pfadd %mm3,%mm2
+        movq %mm2,%mm4
+        pfadd %mm1,%mm4
+        movd %mm4,704(%ebp)
+        psrlq $32,%mm4
+        movd %mm4,320(%edx)
+        movq 72(%ebx),%mm1
+        pfadd %mm1,%mm2
+        movd %mm2,576(%ebp)
+        psrlq $32,%mm2
+        movd %mm2,448(%edx)
+
+        movq 120(%ebx),%mm4
+        pfadd %mm4,%mm3
+        movq %mm3,%mm5
+        pfadd %mm1,%mm5
+        movd %mm5,448(%ebp)
+        psrlq $32,%mm5
+        movd %mm5,576(%edx)
+        movq 88(%ebx),%mm1
+        pfadd %mm1,%mm3
+        movd %mm3,320(%ebp)
+        psrlq $32,%mm3
+        movd %mm3,704(%edx)
+
+        movd 100(%ebx),%mm5
+        pfadd %mm5,%mm4
+        movq %mm4,%mm6
+        pfadd %mm1,%mm6
+        movd %mm6,192(%ebp)
+        psrlq $32,%mm6
+        movd %mm6,832(%edx)
+        movd 68(%ebx),%mm1
+        pfadd %mm1,%mm4
+        movd %mm4,64(%ebp)
+        psrlq $32,%mm4
+        movd %mm4,960(%edx)
+
+        / femms
+
+        popl %ebx
+        popl %esi
+        popl %edi
+        popl %ebp
+        addl $256,%esp
+
+        ret
+
--- a/mp3lib/decod386.c	Wed May 09 07:55:32 2001 +0000
+++ b/mp3lib/decod386.c	Wed May 09 07:59:55 2001 +0000
@@ -117,6 +117,15 @@
   int clip = 0;
   int bo1;
 
+  #ifdef HAVE_3DNOWEX
+  if ( _3dnow > 1 )
+   {
+    int ret;
+    ret=synth_1to1_3dnowex( bandPtr,channel,out+*pnt );
+    *pnt+=128;
+    return ret;
+   }
+  #endif
   #ifdef HAVE_3DNOW
   if ( _3dnow )
    {
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mp3lib/decode_k7.s	Wed May 09 07:59:55 2001 +0000
@@ -0,0 +1,279 @@
+///
+/// Replacement of synth_1to1() with AMD's 3DNowEx(DSP)! SIMD operations support
+///
+/// This code based 'decode_3dnow.s' by Syuuhei Kashiyama
+/// <squash@mb.kcom.ne.jp>,only some types of changes have been made:
+///
+///  - change function name for support 3DNowEx! automatic detect
+///
+/// note: because K7 processors are an aggresive out-of-order three-way
+///       superscalar ones instruction order is not significand for them.
+///
+/// Modified by Nick Kurshev <nickols_k@mail.ru>
+///
+/ synth_1to1_3dnow works the same way as the c version of
+/ synth_1to1. this assembler code based 'decode-i586.s'
+/ (by Stefan Bieschewski <stb@acm.org>), two types of changes
+/ have been made:
+/ - use {MMX,3DNow!} instruction for reduce cpu
+/ - remove unused(?) local symbols
+/
+/ useful sources of information on optimizing 3DNow! code include:
+/ AMD 3DNow! Technology Manual (Publication #21928)
+/     English:  http://www.amd.com/K6/k6docs/pdf/21928d.pdf
+/    (Japanese: http://www.amd.com/japan/K6/k6docs/j21928c.pdf)
+/ AMD-K6-2 Processor Code Optimization Application Note (Publication #21924)
+/     English:  http://www.amd.com/K6/k6docs/pdf/21924b.pdf
+/
+/ This code was tested only AMD-K6-2 processor Linux systems,
+/ please tell me:
+/ - whether this code works on other 3DNow! capable processors
+/  (ex.IDT-C6-2) or not
+/ - whether this code works on other OSes or not
+/
+/ by KIMURA Takuhiro <kim@hannah.ipc.miyakyo-u.ac.jp> - until 31.Mar.1998
+/                    <kim@comtec.co.jp>               - after  1.Apr.1998
+
+/ Enhancments for q-word operation by Michael Hipp
+
+.bss
+        .comm   buffs,4352,4
+.data
+        .align 4
+bo:
+        .long 1
+.text
+.globl synth_1to1_3dnowex
+synth_1to1_3dnowex:
+        subl  $12,%esp
+        pushl %ebp
+        pushl %edi
+        pushl %esi
+        pushl %ebx
+	
+        movl  32(%esp),%eax
+        movl  40(%esp),%esi
+        movl  $0,%edi
+        movl  bo,%ebp
+        cmpl  %edi,36(%esp)
+        jne   .L48
+        decl  %ebp
+        andl  $15,%ebp
+        movl  %ebp,bo
+        movl  $buffs,%ecx
+        jmp   .L49
+.L48:
+        addl  $2,%esi
+        movl  $buffs+2176,%ecx
+.L49:
+        testl $1,%ebp
+        je    .L50
+        movl  %ecx,%ebx
+        movl  %ebp,16(%esp)
+        pushl %eax
+        movl  20(%esp),%edx
+        leal  (%ebx,%edx,4),%eax
+        pushl %eax
+        movl  24(%esp),%eax
+        incl  %eax
+        andl  $15,%eax
+        leal  1088(,%eax,4),%eax
+        addl  %ebx,%eax
+        jmp   .L74
+.L50:
+        leal  1088(%ecx),%ebx
+        leal  1(%ebp),%edx
+        movl  %edx,16(%esp)
+        pushl %eax
+        leal  1092(%ecx,%ebp,4),%eax
+        pushl %eax
+        leal  (%ecx,%ebp,4),%eax
+.L74:
+        pushl %eax
+        call  dct64_3dnowex
+        addl  $12,%esp
+        movl  16(%esp),%edx
+        leal  0(,%edx,4),%edx
+        movl  $decwin+64,%eax
+        movl  %eax,%ecx
+        subl  %edx,%ecx
+        movl  $16,%ebp
+
+.L55:
+        movq  (%ecx),%mm4
+        movq  (%ebx),%mm3
+        movq  8(%ecx),%mm0
+        movq  8(%ebx),%mm1
+        pfmul %mm3,%mm4
+
+        movq  16(%ecx),%mm2
+        pfmul %mm1,%mm0
+        movq  16(%ebx),%mm3
+        pfadd %mm0,%mm4
+
+        movq  24(%ecx),%mm0
+        pfmul %mm2,%mm3
+        movq  24(%ebx),%mm1
+        pfadd %mm3,%mm4
+
+        movq  32(%ecx),%mm2
+        pfmul %mm1,%mm0
+        movq  32(%ebx),%mm3
+        pfadd %mm0,%mm4
+
+        movq  40(%ecx),%mm0
+        pfmul %mm2,%mm3
+        movq  40(%ebx),%mm1
+        pfadd %mm3,%mm4
+
+        movq  48(%ecx),%mm2
+        pfmul %mm1,%mm0
+        movq  48(%ebx),%mm3
+        pfadd %mm0,%mm4
+
+        movq  56(%ecx),%mm0
+        pfmul %mm2,%mm3
+        movq  56(%ebx),%mm1
+        pfadd %mm3,%mm4
+
+        pfmul %mm1,%mm0
+        pfadd %mm0,%mm4
+
+        movq  %mm4,%mm0
+        psrlq $32,%mm0
+        pfsub %mm0,%mm4
+
+        pf2id %mm4,%mm4
+        movd  %mm4,%eax
+
+        sar   $16,%eax
+        movw  %ax,(%esi)
+
+        addl  $64,%ebx
+        subl  $-128,%ecx
+        addl  $4,%esi
+        decl  %ebp
+        jnz  .L55
+
+/ --- end of  loop 1 ---
+
+        movd  (%ecx),%mm2
+        movd  (%ebx),%mm1
+        pfmul %mm1,%mm2
+
+        movd  8(%ecx),%mm0
+        movd  8(%ebx),%mm1
+        pfmul %mm0,%mm1
+        pfadd %mm1,%mm2
+
+        movd  16(%ecx),%mm0
+        movd  16(%ebx),%mm1
+        pfmul %mm0,%mm1
+        pfadd %mm1,%mm2
+
+        movd  24(%ecx),%mm0
+        movd  24(%ebx),%mm1
+        pfmul %mm0,%mm1
+        pfadd %mm1,%mm2
+
+        movd  32(%ecx),%mm0
+        movd  32(%ebx),%mm1
+        pfmul %mm0,%mm1
+        pfadd %mm1,%mm2
+
+        movd  40(%ecx),%mm0
+        movd  40(%ebx),%mm1
+        pfmul %mm0,%mm1
+        pfadd %mm1,%mm2
+
+        movd  48(%ecx),%mm0
+        movd  48(%ebx),%mm1
+        pfmul %mm0,%mm1
+        pfadd %mm1,%mm2
+
+        movd  56(%ecx),%mm0
+        movd  56(%ebx),%mm1
+        pfmul %mm0,%mm1
+        pfadd %mm1,%mm2
+
+        pf2id %mm2,%mm2
+        movd  %mm2,%eax
+
+        sar   $16,%eax
+
+        movw  %ax,(%esi)
+
+        addl  $-64,%ebx
+        addl  $4,%esi
+        addl  $256,%ecx
+        movl  $15,%ebp
+
+.L68:
+        psubd %mm0,%mm0
+
+        movq  (%ebx),%mm1
+        movq  (%ecx),%mm2
+        pfmul %mm1,%mm2
+        pfsub %mm2,%mm0
+
+        movq  8(%ebx),%mm3
+        movq  8(%ecx),%mm4
+        pfmul %mm3,%mm4
+        pfsub %mm4,%mm0
+
+        movq  16(%ebx),%mm1
+        movq  16(%ecx),%mm2
+        pfmul %mm1,%mm2
+        pfsub %mm2,%mm0
+
+        movq  24(%ebx),%mm3
+        movq  24(%ecx),%mm4
+        pfmul %mm3,%mm4
+        pfsub %mm4,%mm0
+
+        movq  32(%ebx),%mm1
+        movq  32(%ecx),%mm2
+        pfmul %mm1,%mm2
+        pfsub %mm2,%mm0
+
+        movq  40(%ebx),%mm3
+        movq  40(%ecx),%mm4
+        pfmul %mm3,%mm4
+        pfsub %mm4,%mm0
+
+        movq  48(%ebx),%mm1
+        movq  48(%ecx),%mm2
+        pfmul %mm1,%mm2
+        pfsub %mm2,%mm0
+
+        movq  56(%ebx),%mm3
+        movq  56(%ecx),%mm4
+        pfmul %mm3,%mm4
+        pfsub %mm4,%mm0
+
+        pfacc %mm0,%mm0
+
+        pf2id %mm0,%mm0
+        movd  %mm0,%eax
+
+        sar   $16,%eax
+
+        movw  %ax,(%esi)
+
+        addl  $-64,%ebx
+        subl  $-128,%ecx
+        addl  $4,%esi
+        decl  %ebp
+        jnz   .L68
+
+/ --- end of loop 2
+
+        femms
+
+        movl  %edi,%eax
+        popl  %ebx
+        popl  %esi
+        popl  %edi
+        popl  %ebp
+        addl  $12,%esp
+        ret
--- a/mp3lib/mpg123.h	Wed May 09 07:55:32 2001 +0000
+++ b/mp3lib/mpg123.h	Wed May 09 07:59:55 2001 +0000
@@ -123,3 +123,8 @@
  extern void dct36_3dnow(real *,real *,real *,real *,real *);
  extern int  synth_1to1_3dnow( real *,int,unsigned char * );
 #endif
+#ifdef HAVE_3DNOWEX
+ extern void dct64_3dnowex( real *,real *, real * );
+ extern void dct36_3dnowex(real *,real *,real *,real *,real *);
+ extern int  synth_1to1_3dnowex( real *,int,unsigned char * );
+#endif