diff common.h @ 2293:15cfba1b97b5 libavcodec

adapting existing mmx/mmx2/sse/3dnow optimizations so they work on x86_64 patch by (Aurelien Jacobs <aurel at gnuage dot org>)
author michael
date Mon, 11 Oct 2004 02:19:29 +0000
parents 3d4a1f8e6a27
children 1925d732ea42
line wrap: on
line diff
--- a/common.h	Sat Oct 09 12:02:19 2004 +0000
+++ b/common.h	Mon Oct 11 02:19:29 2004 +0000
@@ -254,7 +254,7 @@
 
 extern const uint32_t inverse[256];
 
-#ifdef ARCH_X86
+#if defined(ARCH_X86) || defined(ARCH_X86_64)
 #    define FASTDIV(a,b) \
     ({\
         int ret,dmy;\
@@ -271,7 +271,7 @@
 #    define FASTDIV(a,b)   ((a)/(b))
 #endif
  
-#ifdef ARCH_X86
+#if defined(ARCH_X86) || defined(ARCH_X86_64)
 // avoid +32 for shift optimization (gcc should do that ...)
 static inline  int32_t NEG_SSR32( int32_t a, int8_t s){
     asm ("sarl %1, %0\n\t"
@@ -390,7 +390,7 @@
 #endif
 
 /* used to avoid missaligned exceptions on some archs (alpha, ...) */
-#ifdef ARCH_X86
+#if defined(ARCH_X86) || defined(ARCH_X86_64)
 #    define unaligned32(a) (*(const uint32_t*)(a))
 #else
 #    ifdef __GNUC__
@@ -460,7 +460,7 @@
 static inline void put_bits(PutBitContext *s, int n, unsigned int value)
 {
 #    ifdef ALIGNED_BITSTREAM_WRITER
-#        ifdef ARCH_X86
+#        if defined(ARCH_X86) || defined(ARCH_X86_64)
     asm volatile(
 	"movl %0, %%ecx			\n\t"
 	"xorl %%eax, %%eax		\n\t"
@@ -491,7 +491,7 @@
     s->index= index;
 #        endif
 #    else //ALIGNED_BITSTREAM_WRITER
-#        ifdef ARCH_X86
+#        if defined(ARCH_X86) || defined(ARCH_X86_64)
     asm volatile(
 	"movl $7, %%ecx			\n\t"
 	"andl %0, %%ecx			\n\t"
@@ -738,7 +738,7 @@
         name##_bit_count-= 32;\
     }\
 
-#ifdef ARCH_X86
+#if defined(ARCH_X86) || defined(ARCH_X86_64)
 #   define SKIP_CACHE(name, gb, num)\
         asm(\
             "shldl %2, %1, %0		\n\t"\
@@ -1218,7 +1218,7 @@
 #define MKBETAG(a,b,c,d) (d | (c << 8) | (b << 16) | (a << 24))
 
 
-#ifdef ARCH_X86
+#if defined(ARCH_X86) || defined(ARCH_X86_64)
 #define MASK_ABS(mask, level)\
             asm volatile(\
 		"cdq			\n\t"\
@@ -1252,7 +1252,7 @@
 }
 #endif
 
-#ifdef ARCH_X86
+#if defined(ARCH_X86) || defined(ARCH_X86_64)
 static inline long long rdtsc(void)
 {
 	long long l;