diff libmpcodecs/vf_spp.c @ 27754:08d18fe9da52

Change all occurrences of asm and __asm to __asm__, same as was done for FFmpeg. Neither variant is valid C99 syntax, but __asm__ is the most portable variant.
author diego
date Thu, 16 Oct 2008 18:59:27 +0000
parents 82601a38e2a7
children 25337a2147e7
line wrap: on
line diff
--- a/libmpcodecs/vf_spp.c	Thu Oct 16 18:28:38 2008 +0000
+++ b/libmpcodecs/vf_spp.c	Thu Oct 16 18:59:27 2008 +0000
@@ -153,7 +153,7 @@
 	
 	threshold1= qp*((1<<4) - bias) - 1;
 	
-        asm volatile(
+        __asm__ volatile(
 #define REQUANT_CORE(dst0, dst1, dst2, dst3, src0, src1, src2, src3) \
 		"movq " #src0 ", %%mm0	\n\t"\
 		"movq " #src1 ", %%mm1	\n\t"\
@@ -221,7 +221,7 @@
 
 	threshold1= qp*((1<<4) - bias) - 1;
 	
-        asm volatile(
+        __asm__ volatile(
 #undef REQUANT_CORE
 #define REQUANT_CORE(dst0, dst1, dst2, dst3, src0, src1, src2, src3) \
 		"movq " #src0 ", %%mm0	\n\t"\
@@ -334,7 +334,7 @@
 	for(y=0; y<height; y++){
 		uint8_t *dst1= dst;
 		int16_t *src1= src;
-		asm volatile(
+		__asm__ volatile(
 			"movq (%3), %%mm3	\n\t"
 			"movq (%3), %%mm4	\n\t"
 			"movd %4, %%mm2		\n\t"
@@ -500,10 +500,10 @@
 	}
 
 #ifdef HAVE_MMX
-	if(gCpuCaps.hasMMX) asm volatile ("emms\n\t");
+	if(gCpuCaps.hasMMX) __asm__ volatile ("emms\n\t");
 #endif
 #ifdef HAVE_MMX2
-	if(gCpuCaps.hasMMX2) asm volatile ("sfence\n\t");
+	if(gCpuCaps.hasMMX2) __asm__ volatile ("sfence\n\t");
 #endif
 
 	return vf_next_put_image(vf,dmpi, pts);