comparison libvo/osd_template.c @ 28290:25337a2147e7

Lots and lots of #ifdef ARCH_... -> #if ARCH_... and #ifdef HAVE_MMX etc -> #if HAVE_MMX. There might be still more that need to be fixed.
author reimar
date Fri, 16 Jan 2009 09:21:21 +0000
parents 08d18fe9da52
children 31287e75b5d8
comparison
equal deleted inserted replaced
28289:4210f3621db1 28290:25337a2147e7
5 #undef PREFETCH 5 #undef PREFETCH
6 #undef EMMS 6 #undef EMMS
7 #undef PREFETCHW 7 #undef PREFETCHW
8 #undef PAVGB 8 #undef PAVGB
9 9
10 #ifdef HAVE_3DNOW 10 #if HAVE_3DNOW
11 #define PREFETCH "prefetch" 11 #define PREFETCH "prefetch"
12 #define PREFETCHW "prefetchw" 12 #define PREFETCHW "prefetchw"
13 #define PAVGB "pavgusb" 13 #define PAVGB "pavgusb"
14 #elif defined ( HAVE_MMX2 ) 14 #elif HAVE_MMX2
15 #define PREFETCH "prefetchnta" 15 #define PREFETCH "prefetchnta"
16 #define PREFETCHW "prefetcht0" 16 #define PREFETCHW "prefetcht0"
17 #define PAVGB "pavgb" 17 #define PAVGB "pavgb"
18 #else 18 #else
19 #define PREFETCH " # nop" 19 #define PREFETCH " # nop"
20 #define PREFETCHW " # nop" 20 #define PREFETCHW " # nop"
21 #endif 21 #endif
22 22
23 #ifdef HAVE_3DNOW 23 #if HAVE_3DNOW
24 /* On K6 femms is faster of emms. On K7 femms is directly mapped on emms. */ 24 /* On K6 femms is faster of emms. On K7 femms is directly mapped on emms. */
25 #define EMMS "femms" 25 #define EMMS "femms"
26 #else 26 #else
27 #define EMMS "emms" 27 #define EMMS "emms"
28 #endif 28 #endif
29 29
30 static inline void RENAME(vo_draw_alpha_yv12)(int w,int h, unsigned char* src, unsigned char *srca, int srcstride, unsigned char* dstbase,int dststride){ 30 static inline void RENAME(vo_draw_alpha_yv12)(int w,int h, unsigned char* src, unsigned char *srca, int srcstride, unsigned char* dstbase,int dststride){
31 int y; 31 int y;
32 #if defined(FAST_OSD) && !defined(HAVE_MMX) 32 #if defined(FAST_OSD) && !HAVE_MMX
33 w=w>>1; 33 w=w>>1;
34 #endif 34 #endif
35 #ifdef HAVE_MMX 35 #if HAVE_MMX
36 __asm__ volatile( 36 __asm__ volatile(
37 "pcmpeqb %%mm5, %%mm5\n\t" // F..F 37 "pcmpeqb %%mm5, %%mm5\n\t" // F..F
38 "movq %%mm5, %%mm4\n\t" 38 "movq %%mm5, %%mm4\n\t"
39 "movq %%mm5, %%mm7\n\t" 39 "movq %%mm5, %%mm7\n\t"
40 "psllw $8, %%mm5\n\t" //FF00FF00FF00 40 "psllw $8, %%mm5\n\t" //FF00FF00FF00
41 "psrlw $8, %%mm4\n\t" //00FF00FF00FF 41 "psrlw $8, %%mm4\n\t" //00FF00FF00FF
42 ::); 42 ::);
43 #endif 43 #endif
44 for(y=0;y<h;y++){ 44 for(y=0;y<h;y++){
45 register int x; 45 register int x;
46 #ifdef HAVE_MMX 46 #if HAVE_MMX
47 __asm__ volatile( 47 __asm__ volatile(
48 PREFETCHW" %0\n\t" 48 PREFETCHW" %0\n\t"
49 PREFETCH" %1\n\t" 49 PREFETCH" %1\n\t"
50 PREFETCH" %2\n\t" 50 PREFETCH" %2\n\t"
51 ::"m"(*dstbase),"m"(*srca),"m"(*src):"memory"); 51 ::"m"(*dstbase),"m"(*srca),"m"(*src):"memory");
89 #endif 89 #endif
90 src+=srcstride; 90 src+=srcstride;
91 srca+=srcstride; 91 srca+=srcstride;
92 dstbase+=dststride; 92 dstbase+=dststride;
93 } 93 }
94 #ifdef HAVE_MMX 94 #if HAVE_MMX
95 __asm__ volatile(EMMS:::"memory"); 95 __asm__ volatile(EMMS:::"memory");
96 #endif 96 #endif
97 return; 97 return;
98 } 98 }
99 99
100 static inline void RENAME(vo_draw_alpha_yuy2)(int w,int h, unsigned char* src, unsigned char *srca, int srcstride, unsigned char* dstbase,int dststride){ 100 static inline void RENAME(vo_draw_alpha_yuy2)(int w,int h, unsigned char* src, unsigned char *srca, int srcstride, unsigned char* dstbase,int dststride){
101 int y; 101 int y;
102 #if defined(FAST_OSD) && !defined(HAVE_MMX) 102 #if defined(FAST_OSD) && !HAVE_MMX
103 w=w>>1; 103 w=w>>1;
104 #endif 104 #endif
105 #ifdef HAVE_MMX 105 #if HAVE_MMX
106 __asm__ volatile( 106 __asm__ volatile(
107 "pxor %%mm7, %%mm7\n\t" 107 "pxor %%mm7, %%mm7\n\t"
108 "pcmpeqb %%mm5, %%mm5\n\t" // F..F 108 "pcmpeqb %%mm5, %%mm5\n\t" // F..F
109 "movq %%mm5, %%mm6\n\t" 109 "movq %%mm5, %%mm6\n\t"
110 "movq %%mm5, %%mm4\n\t" 110 "movq %%mm5, %%mm4\n\t"
112 "psrlw $8, %%mm4\n\t" //00FF00FF00FF 112 "psrlw $8, %%mm4\n\t" //00FF00FF00FF
113 ::); 113 ::);
114 #endif 114 #endif
115 for(y=0;y<h;y++){ 115 for(y=0;y<h;y++){
116 register int x; 116 register int x;
117 #ifdef HAVE_MMX 117 #if HAVE_MMX
118 __asm__ volatile( 118 __asm__ volatile(
119 PREFETCHW" %0\n\t" 119 PREFETCHW" %0\n\t"
120 PREFETCH" %1\n\t" 120 PREFETCH" %1\n\t"
121 PREFETCH" %2\n\t" 121 PREFETCH" %2\n\t"
122 ::"m"(*dstbase),"m"(*srca),"m"(*src)); 122 ::"m"(*dstbase),"m"(*srca),"m"(*src));
161 #endif 161 #endif
162 src+=srcstride; 162 src+=srcstride;
163 srca+=srcstride; 163 srca+=srcstride;
164 dstbase+=dststride; 164 dstbase+=dststride;
165 } 165 }
166 #ifdef HAVE_MMX 166 #if HAVE_MMX
167 __asm__ volatile(EMMS:::"memory"); 167 __asm__ volatile(EMMS:::"memory");
168 #endif 168 #endif
169 return; 169 return;
170 } 170 }
171 171
193 } 193 }
194 } 194 }
195 195
196 static inline void RENAME(vo_draw_alpha_rgb24)(int w,int h, unsigned char* src, unsigned char *srca, int srcstride, unsigned char* dstbase,int dststride){ 196 static inline void RENAME(vo_draw_alpha_rgb24)(int w,int h, unsigned char* src, unsigned char *srca, int srcstride, unsigned char* dstbase,int dststride){
197 int y; 197 int y;
198 #ifdef HAVE_MMX 198 #if HAVE_MMX
199 __asm__ volatile( 199 __asm__ volatile(
200 "pxor %%mm7, %%mm7\n\t" 200 "pxor %%mm7, %%mm7\n\t"
201 "pcmpeqb %%mm6, %%mm6\n\t" // F..F 201 "pcmpeqb %%mm6, %%mm6\n\t" // F..F
202 ::); 202 ::);
203 #endif 203 #endif
204 for(y=0;y<h;y++){ 204 for(y=0;y<h;y++){
205 register unsigned char *dst = dstbase; 205 register unsigned char *dst = dstbase;
206 register int x; 206 register int x;
207 #if defined(ARCH_X86) && (!defined(ARCH_X86_64) || defined(HAVE_MMX)) 207 #if ARCH_X86 && (!ARCH_X86_64 || HAVE_MMX)
208 #ifdef HAVE_MMX 208 #if HAVE_MMX
209 __asm__ volatile( 209 __asm__ volatile(
210 PREFETCHW" %0\n\t" 210 PREFETCHW" %0\n\t"
211 PREFETCH" %1\n\t" 211 PREFETCH" %1\n\t"
212 PREFETCH" %2\n\t" 212 PREFETCH" %2\n\t"
213 ::"m"(*dst),"m"(*srca),"m"(*src):"memory"); 213 ::"m"(*dst),"m"(*srca),"m"(*src):"memory");
293 #endif /* arch_x86 */ 293 #endif /* arch_x86 */
294 src+=srcstride; 294 src+=srcstride;
295 srca+=srcstride; 295 srca+=srcstride;
296 dstbase+=dststride; 296 dstbase+=dststride;
297 } 297 }
298 #ifdef HAVE_MMX 298 #if HAVE_MMX
299 __asm__ volatile(EMMS:::"memory"); 299 __asm__ volatile(EMMS:::"memory");
300 #endif 300 #endif
301 return; 301 return;
302 } 302 }
303 303
304 static inline void RENAME(vo_draw_alpha_rgb32)(int w,int h, unsigned char* src, unsigned char *srca, int srcstride, unsigned char* dstbase,int dststride){ 304 static inline void RENAME(vo_draw_alpha_rgb32)(int w,int h, unsigned char* src, unsigned char *srca, int srcstride, unsigned char* dstbase,int dststride){
305 int y; 305 int y;
306 #ifdef WORDS_BIGENDIAN 306 #ifdef WORDS_BIGENDIAN
307 dstbase++; 307 dstbase++;
308 #endif 308 #endif
309 #ifdef HAVE_MMX 309 #if HAVE_MMX
310 #ifdef HAVE_3DNOW 310 #if HAVE_3DNOW
311 __asm__ volatile( 311 __asm__ volatile(
312 "pxor %%mm7, %%mm7\n\t" 312 "pxor %%mm7, %%mm7\n\t"
313 "pcmpeqb %%mm6, %%mm6\n\t" // F..F 313 "pcmpeqb %%mm6, %%mm6\n\t" // F..F
314 ::); 314 ::);
315 #else /* HAVE_3DNOW */ 315 #else /* HAVE_3DNOW */
322 ::); 322 ::);
323 #endif /* HAVE_3DNOW */ 323 #endif /* HAVE_3DNOW */
324 #endif /* HAVE_MMX */ 324 #endif /* HAVE_MMX */
325 for(y=0;y<h;y++){ 325 for(y=0;y<h;y++){
326 register int x; 326 register int x;
327 #if defined(ARCH_X86) && (!defined(ARCH_X86_64) || defined(HAVE_MMX)) 327 #if ARCH_X86 && (!ARCH_X86_64 || HAVE_MMX)
328 #ifdef HAVE_MMX 328 #if HAVE_MMX
329 #ifdef HAVE_3DNOW 329 #if HAVE_3DNOW
330 __asm__ volatile( 330 __asm__ volatile(
331 PREFETCHW" %0\n\t" 331 PREFETCHW" %0\n\t"
332 PREFETCH" %1\n\t" 332 PREFETCH" %1\n\t"
333 PREFETCH" %2\n\t" 333 PREFETCH" %2\n\t"
334 ::"m"(*dstbase),"m"(*srca),"m"(*src):"memory"); 334 ::"m"(*dstbase),"m"(*srca),"m"(*src):"memory");
458 #endif /* arch_x86 */ 458 #endif /* arch_x86 */
459 src+=srcstride; 459 src+=srcstride;
460 srca+=srcstride; 460 srca+=srcstride;
461 dstbase+=dststride; 461 dstbase+=dststride;
462 } 462 }
463 #ifdef HAVE_MMX 463 #if HAVE_MMX
464 __asm__ volatile(EMMS:::"memory"); 464 __asm__ volatile(EMMS:::"memory");
465 #endif 465 #endif
466 return; 466 return;
467 } 467 }