736
|
1 ///
|
|
2 /// Replacement of dct64() with AMD's 3DNowEx(DSP)! SIMD operations support
|
|
3 ///
|
|
4 /// This code based 'dct64_3dnow.s' by Syuuhei Kashiyama
|
|
5 /// <squash@mb.kcom.ne.jp>,only some types of changes have been made:
|
|
6 ///
|
781
|
7 /// - added new opcodes PSWAPD, PFPNACC
|
|
8 /// - decreased number of opcodes (as it was suggested by k7 manual)
|
|
9 /// (using memory reference as operand of instructions)
|
|
10 /// - Phase 6 is rewritten with mixing of cpu and mmx opcodes
|
736
|
11 /// - change function name for support 3DNowEx! automatic detect
|
|
12 ///
|
|
13 /// note: because K7 processors are an aggresive out-of-order three-way
|
|
14 /// superscalar ones instruction order is not significand for them.
|
|
15 ///
|
|
16 /// Modified by Nick Kurshev <nickols_k@mail.ru>
|
|
17 ///
|
|
18 /// The author of this program disclaim whole expressed or implied
|
|
19 /// warranties with regard to this program, and in no event shall the
|
|
20 /// author of this program liable to whatever resulted from the use of
|
|
21 /// this program. Use it at your own risk.
|
|
22 ///
|
|
23
|
|
24 .globl dct64_3dnowex
|
|
25 .type dct64_3dnowex,@function
|
781
|
26
|
|
27 /* Discrete Cosine Tansform (DCT) for subband synthesis */
|
|
28 /* void dct64(real *a,real *b,real *c) */
|
736
|
29 dct64_3dnowex:
|
|
30 subl $256,%esp
|
|
31 pushl %ebp
|
|
32 pushl %edi
|
|
33 pushl %esi
|
|
34 pushl %ebx
|
781
|
35 leal 16(%esp),%ebx /* ebx -> real tmp1[32] */
|
|
36 movl 284(%esp),%edi /* edi -> c */
|
|
37 movl 276(%esp),%ebp /* ebp -> a */
|
|
38 movl 280(%esp),%edx /* edx -> b */
|
|
39 leal 128(%ebx),%esi /* esi -> real tmp2[32] */
|
736
|
40
|
|
41 / femms
|
|
42
|
|
43 // 1
|
|
44 movl pnts,%eax
|
781
|
45
|
|
46 movq 0(%edi),%mm0 /* mm0 = c[0x00] | c[0x01]*/
|
|
47 movq %mm0,%mm1 /* mm1 = mm0 */
|
|
48 movd 124(%edi),%mm2 /* mm2 = c[0x1f] */
|
|
49 punpckldq 120(%edi),%mm2 /* mm2 = c[0x1f] | c[0x1E] */
|
|
50 pfadd %mm2,%mm0 /* mm0 = c[0x00]+c[0x1F] | c[0x1E]+c[0x01] */
|
|
51 movq %mm0,0(%ebx) /* tmp[0, 1] = mm0 */
|
|
52 pfsub %mm2,%mm1 /* c[0x00]-c[0x1f] | c[0x01]-c[0x1e] */
|
|
53 pfmul 0(%eax),%mm1 /* (c[0x00]-c[0x1f])*pnts[0]|(c[0x01]-c[0x1e])*pnts[1]*/
|
|
54 pswapd %mm1, %mm1 /* (c[0x01]-c[0x1e])*pnts[1]|(c[0x00]-c[0x1f])*pnts[0]*/
|
|
55 movq %mm1, 120(%ebx) /* tmp1[30, 31]=mm1 */
|
|
56
|
736
|
57 movq 8(%edi),%mm4
|
|
58 movq %mm4,%mm5
|
|
59 movd 116(%edi),%mm6
|
|
60 punpckldq 112(%edi),%mm6
|
|
61 pfadd %mm6,%mm4
|
|
62 movq %mm4,8(%ebx)
|
|
63 pfsub %mm6,%mm5
|
781
|
64 pfmul 8(%eax),%mm5
|
736
|
65 pswapd %mm5, %mm5
|
|
66 movq %mm5, 112(%ebx)
|
781
|
67
|
736
|
68 movq 16(%edi),%mm0
|
|
69 movq %mm0,%mm1
|
|
70 movd 108(%edi),%mm2
|
|
71 punpckldq 104(%edi),%mm2
|
|
72 pfadd %mm2,%mm0
|
|
73 movq %mm0,16(%ebx)
|
|
74 pfsub %mm2,%mm1
|
781
|
75 pfmul 16(%eax),%mm1
|
736
|
76 pswapd %mm1, %mm1
|
|
77 movq %mm1, 104(%ebx)
|
781
|
78
|
736
|
79 movq 24(%edi),%mm4
|
|
80 movq %mm4,%mm5
|
|
81 movd 100(%edi),%mm6
|
|
82 punpckldq 96(%edi),%mm6
|
|
83 pfadd %mm6,%mm4
|
|
84 movq %mm4,24(%ebx)
|
|
85 pfsub %mm6,%mm5
|
781
|
86 pfmul 24(%eax),%mm5
|
736
|
87 pswapd %mm5, %mm5
|
|
88 movq %mm5, 96(%ebx)
|
781
|
89
|
736
|
90 movq 32(%edi),%mm0
|
|
91 movq %mm0,%mm1
|
|
92 movd 92(%edi),%mm2
|
|
93 punpckldq 88(%edi),%mm2
|
|
94 pfadd %mm2,%mm0
|
|
95 movq %mm0,32(%ebx)
|
|
96 pfsub %mm2,%mm1
|
781
|
97 pfmul 32(%eax),%mm1
|
736
|
98 pswapd %mm1, %mm1
|
|
99 movq %mm1, 88(%ebx)
|
781
|
100
|
736
|
101 movq 40(%edi),%mm4
|
|
102 movq %mm4,%mm5
|
|
103 movd 84(%edi),%mm6
|
|
104 punpckldq 80(%edi),%mm6
|
|
105 pfadd %mm6,%mm4
|
|
106 movq %mm4,40(%ebx)
|
|
107 pfsub %mm6,%mm5
|
781
|
108 pfmul 40(%eax),%mm5
|
736
|
109 pswapd %mm5, %mm5
|
|
110 movq %mm5, 80(%ebx)
|
781
|
111
|
736
|
112 movq 48(%edi),%mm0
|
|
113 movq %mm0,%mm1
|
|
114 movd 76(%edi),%mm2
|
|
115 punpckldq 72(%edi),%mm2
|
|
116 pfadd %mm2,%mm0
|
|
117 movq %mm0,48(%ebx)
|
|
118 pfsub %mm2,%mm1
|
781
|
119 pfmul 48(%eax),%mm1
|
736
|
120 pswapd %mm1, %mm1
|
|
121 movq %mm1, 72(%ebx)
|
781
|
122
|
736
|
123 movq 56(%edi),%mm4
|
|
124 movq %mm4,%mm5
|
|
125 movd 68(%edi),%mm6
|
|
126 punpckldq 64(%edi),%mm6
|
|
127 pfadd %mm6,%mm4
|
|
128 movq %mm4,56(%ebx)
|
|
129 pfsub %mm6,%mm5
|
781
|
130 pfmul 56(%eax),%mm5
|
736
|
131 pswapd %mm5, %mm5
|
|
132 movq %mm5, 64(%ebx)
|
|
133
|
|
134 // 2
|
|
135 movl pnts+4,%eax
|
|
136 / 0, 14
|
781
|
137 movq 0(%ebx),%mm0 /* mm0 = tmp1[0] | tmp1[1] */
|
736
|
138 movq %mm0,%mm1
|
781
|
139 movd 60(%ebx),%mm2 /* mm2 = tmp1[0x0F] */
|
|
140 punpckldq 56(%ebx),%mm2 /* mm2 = tmp1[0x0E] | tmp1[0x0F] */
|
|
141 movq 0(%eax),%mm3 /* mm3 = pnts[0] | pnts[1] */
|
|
142 pfadd %mm2,%mm0 /* mm0 = tmp1[0]+tmp1[0x0F]|tmp1[1]+tmp1[0x0E]*/
|
|
143 movq %mm0,0(%esi) /* tmp2[0, 1] = mm0 */
|
|
144 pfsub %mm2,%mm1 /* mm1 = tmp1[0]-tmp1[0x0F]|tmp1[1]-tmp1[0x0E]*/
|
|
145 pfmul %mm3,%mm1 /* mm1 = (tmp1[0]-tmp1[0x0F])*pnts[0]|(tmp1[1]-tmp1[0x0E])*pnts[1]*/
|
|
146 pswapd %mm1, %mm1 /* mm1 = (tmp1[1]-tmp1[0x0E])*pnts[1]|(tmp1[0]-tmp1[0x0F])*pnts[0]*/
|
|
147 movq %mm1, 56(%esi) /* tmp2[0x0E, 0x0F] = mm1 */
|
736
|
148 / 16, 30
|
|
149 movq 64(%ebx),%mm0
|
|
150 movq %mm0,%mm1
|
|
151 movd 124(%ebx),%mm2
|
|
152 punpckldq 120(%ebx),%mm2
|
|
153 pfadd %mm2,%mm0
|
|
154 movq %mm0,64(%esi)
|
|
155 pfsubr %mm2,%mm1
|
|
156 pfmul %mm3,%mm1
|
|
157 pswapd %mm1, %mm1
|
|
158 movq %mm1, 120(%esi)
|
|
159 movq 8(%ebx),%mm4
|
|
160 / 2, 12
|
|
161 movq %mm4,%mm5
|
|
162 movd 52(%ebx),%mm6
|
|
163 punpckldq 48(%ebx),%mm6
|
|
164 movq 8(%eax),%mm7
|
|
165 pfadd %mm6,%mm4
|
|
166 movq %mm4,8(%esi)
|
|
167 pfsub %mm6,%mm5
|
|
168 pfmul %mm7,%mm5
|
|
169 pswapd %mm5, %mm5
|
|
170 movq %mm5, 48(%esi)
|
|
171 movq 72(%ebx),%mm4
|
|
172 / 18, 28
|
|
173 movq %mm4,%mm5
|
|
174 movd 116(%ebx),%mm6
|
|
175 punpckldq 112(%ebx),%mm6
|
|
176 pfadd %mm6,%mm4
|
|
177 movq %mm4,72(%esi)
|
|
178 pfsubr %mm6,%mm5
|
|
179 pfmul %mm7,%mm5
|
|
180 pswapd %mm5, %mm5
|
|
181 movq %mm5, 112(%esi)
|
|
182 movq 16(%ebx),%mm0
|
|
183 / 4, 10
|
|
184 movq %mm0,%mm1
|
|
185 movd 44(%ebx),%mm2
|
|
186 punpckldq 40(%ebx),%mm2
|
|
187 movq 16(%eax),%mm3
|
|
188 pfadd %mm2,%mm0
|
|
189 movq %mm0,16(%esi)
|
|
190 pfsub %mm2,%mm1
|
|
191 pfmul %mm3,%mm1
|
|
192 pswapd %mm1, %mm1
|
|
193 movq %mm1, 40(%esi)
|
|
194 movq 80(%ebx),%mm0
|
|
195 / 20, 26
|
|
196 movq %mm0,%mm1
|
|
197 movd 108(%ebx),%mm2
|
|
198 punpckldq 104(%ebx),%mm2
|
|
199 pfadd %mm2,%mm0
|
|
200 movq %mm0,80(%esi)
|
|
201 pfsubr %mm2,%mm1
|
|
202 pfmul %mm3,%mm1
|
|
203 pswapd %mm1, %mm1
|
|
204 movq %mm1, 104(%esi)
|
|
205 movq 24(%ebx),%mm4
|
|
206 / 6, 8
|
|
207 movq %mm4,%mm5
|
|
208 movd 36(%ebx),%mm6
|
|
209 punpckldq 32(%ebx),%mm6
|
|
210 movq 24(%eax),%mm7
|
|
211 pfadd %mm6,%mm4
|
|
212 movq %mm4,24(%esi)
|
|
213 pfsub %mm6,%mm5
|
|
214 pfmul %mm7,%mm5
|
|
215 pswapd %mm5, %mm5
|
|
216 movq %mm5, 32(%esi)
|
|
217 movq 88(%ebx),%mm4
|
|
218 / 22, 24
|
|
219 movq %mm4,%mm5
|
|
220 movd 100(%ebx),%mm6
|
|
221 punpckldq 96(%ebx),%mm6
|
|
222 pfadd %mm6,%mm4
|
|
223 movq %mm4,88(%esi)
|
|
224 pfsubr %mm6,%mm5
|
|
225 pfmul %mm7,%mm5
|
|
226 pswapd %mm5, %mm5
|
|
227 movq %mm5, 96(%esi)
|
|
228
|
|
229 // 3
|
|
230 movl pnts+8,%eax
|
|
231 movq 0(%eax),%mm0
|
|
232 movq 8(%eax),%mm1
|
|
233 movq 0(%esi),%mm2
|
|
234 / 0, 6
|
|
235 movq %mm2,%mm3
|
|
236 movd 28(%esi),%mm4
|
|
237 punpckldq 24(%esi),%mm4
|
|
238 pfadd %mm4,%mm2
|
|
239 pfsub %mm4,%mm3
|
|
240 pfmul %mm0,%mm3
|
|
241 movq %mm2,0(%ebx)
|
|
242 pswapd %mm3, %mm3
|
|
243 movq %mm3, 24(%ebx)
|
|
244 movq 8(%esi),%mm5
|
|
245 / 2, 4
|
|
246 movq %mm5,%mm6
|
|
247 movd 20(%esi),%mm7
|
|
248 punpckldq 16(%esi),%mm7
|
|
249 pfadd %mm7,%mm5
|
|
250 pfsub %mm7,%mm6
|
|
251 pfmul %mm1,%mm6
|
|
252 movq %mm5,8(%ebx)
|
|
253 pswapd %mm6, %mm6
|
|
254 movq %mm6, 16(%ebx)
|
|
255 movq 32(%esi),%mm2
|
|
256 / 8, 14
|
|
257 movq %mm2,%mm3
|
|
258 movd 60(%esi),%mm4
|
|
259 punpckldq 56(%esi),%mm4
|
|
260 pfadd %mm4,%mm2
|
|
261 pfsubr %mm4,%mm3
|
|
262 pfmul %mm0,%mm3
|
|
263 movq %mm2,32(%ebx)
|
|
264 pswapd %mm3, %mm3
|
|
265 movq %mm3, 56(%ebx)
|
|
266 movq 40(%esi),%mm5
|
|
267 / 10, 12
|
|
268 movq %mm5,%mm6
|
|
269 movd 52(%esi),%mm7
|
|
270 punpckldq 48(%esi),%mm7
|
|
271 pfadd %mm7,%mm5
|
|
272 pfsubr %mm7,%mm6
|
|
273 pfmul %mm1,%mm6
|
|
274 movq %mm5,40(%ebx)
|
|
275 pswapd %mm6, %mm6
|
|
276 movq %mm6, 48(%ebx)
|
|
277 movq 64(%esi),%mm2
|
|
278 / 16, 22
|
|
279 movq %mm2,%mm3
|
|
280 movd 92(%esi),%mm4
|
|
281 punpckldq 88(%esi),%mm4
|
|
282 pfadd %mm4,%mm2
|
|
283 pfsub %mm4,%mm3
|
|
284 pfmul %mm0,%mm3
|
|
285 movq %mm2,64(%ebx)
|
|
286 pswapd %mm3, %mm3
|
|
287 movq %mm3, 88(%ebx)
|
|
288 movq 72(%esi),%mm5
|
|
289 / 18, 20
|
|
290 movq %mm5,%mm6
|
|
291 movd 84(%esi),%mm7
|
|
292 punpckldq 80(%esi),%mm7
|
|
293 pfadd %mm7,%mm5
|
|
294 pfsub %mm7,%mm6
|
|
295 pfmul %mm1,%mm6
|
|
296 movq %mm5,72(%ebx)
|
|
297 pswapd %mm6, %mm6
|
|
298 movq %mm6, 80(%ebx)
|
|
299 movq 96(%esi),%mm2
|
|
300 / 24, 30
|
|
301 movq %mm2,%mm3
|
|
302 movd 124(%esi),%mm4
|
|
303 punpckldq 120(%esi),%mm4
|
|
304 pfadd %mm4,%mm2
|
|
305 pfsubr %mm4,%mm3
|
|
306 pfmul %mm0,%mm3
|
|
307 movq %mm2,96(%ebx)
|
|
308 pswapd %mm3, %mm3
|
|
309 movq %mm3, 120(%ebx)
|
|
310 movq 104(%esi),%mm5
|
|
311 / 26, 28
|
|
312 movq %mm5,%mm6
|
|
313 movd 116(%esi),%mm7
|
|
314 punpckldq 112(%esi),%mm7
|
|
315 pfadd %mm7,%mm5
|
|
316 pfsubr %mm7,%mm6
|
|
317 pfmul %mm1,%mm6
|
|
318 movq %mm5,104(%ebx)
|
|
319 pswapd %mm6, %mm6
|
|
320 movq %mm6, 112(%ebx)
|
|
321
|
|
322 // 4
|
781
|
323 movl pnts+12,%eax
|
|
324 movq 0(%eax),%mm0 /* mm0 = pnts[3] | pnts[4] */
|
|
325 movq 0(%ebx),%mm1 /* mm1 = tmp1[0] | tmp1[1] */
|
736
|
326 / 0
|
|
327 movq %mm1,%mm2
|
781
|
328 movd 12(%ebx),%mm3 /* mm3 = tmp1[3] */
|
|
329 punpckldq 8(%ebx),%mm3 /* mm3 = tmp1[3] | tmp1[2] */
|
|
330 pfadd %mm3,%mm1 /* mm1 = tmp1[0]+tmp1[3] | tmp1[1]+tmp1[2]*/
|
|
331 pfsub %mm3,%mm2 /* mm2 = tmp1[0]-tmp1[3] | tmp1[0]-tmp1[2]*/
|
|
332 pfmul %mm0,%mm2 /* mm2 = tmp1[0]-tmp1[3]*pnts[3]|tmp1[0]-tmp1[2]*pnts[4]*/
|
|
333 movq %mm1,0(%esi) /* tmp2[0, 1] = mm1 */
|
|
334 pswapd %mm2, %mm2 /* mm2 = tmp1[0]-tmp1[2]*pnts[4]|tmp1[0]-tmp1[3]*pnts[3] */
|
|
335 movq %mm2, 8(%esi) /* tmp2[2, 3] = mm2 */
|
736
|
336 movq 16(%ebx),%mm4
|
|
337 / 4
|
|
338 movq %mm4,%mm5
|
|
339 movd 28(%ebx),%mm6
|
|
340 punpckldq 24(%ebx),%mm6
|
|
341 pfadd %mm6,%mm4
|
|
342 pfsubr %mm6,%mm5
|
|
343 pfmul %mm0,%mm5
|
|
344 movq %mm4,16(%esi)
|
|
345 pswapd %mm5, %mm5
|
|
346 movq %mm5, 24(%esi)
|
|
347 movq 32(%ebx),%mm1
|
|
348 / 8
|
|
349 movq %mm1,%mm2
|
|
350 movd 44(%ebx),%mm3
|
|
351 punpckldq 40(%ebx),%mm3
|
|
352 pfadd %mm3,%mm1
|
|
353 pfsub %mm3,%mm2
|
|
354 pfmul %mm0,%mm2
|
|
355 movq %mm1,32(%esi)
|
|
356 pswapd %mm2, %mm2
|
|
357 movq %mm2, 40(%esi)
|
|
358 movq 48(%ebx),%mm4
|
|
359 / 12
|
|
360 movq %mm4,%mm5
|
|
361 movd 60(%ebx),%mm6
|
|
362 punpckldq 56(%ebx),%mm6
|
|
363 pfadd %mm6,%mm4
|
|
364 pfsubr %mm6,%mm5
|
|
365 pfmul %mm0,%mm5
|
|
366 movq %mm4,48(%esi)
|
|
367 pswapd %mm5, %mm5
|
|
368 movq %mm5, 56(%esi)
|
|
369 movq 64(%ebx),%mm1
|
|
370 / 16
|
|
371 movq %mm1,%mm2
|
|
372 movd 76(%ebx),%mm3
|
|
373 punpckldq 72(%ebx),%mm3
|
|
374 pfadd %mm3,%mm1
|
|
375 pfsub %mm3,%mm2
|
|
376 pfmul %mm0,%mm2
|
|
377 movq %mm1,64(%esi)
|
|
378 pswapd %mm2, %mm2
|
|
379 movq %mm2, 72(%esi)
|
|
380 movq 80(%ebx),%mm4
|
|
381 / 20
|
|
382 movq %mm4,%mm5
|
|
383 movd 92(%ebx),%mm6
|
|
384 punpckldq 88(%ebx),%mm6
|
|
385 pfadd %mm6,%mm4
|
|
386 pfsubr %mm6,%mm5
|
|
387 pfmul %mm0,%mm5
|
|
388 movq %mm4,80(%esi)
|
|
389 pswapd %mm5, %mm5
|
|
390 movq %mm5, 88(%esi)
|
|
391 movq 96(%ebx),%mm1
|
|
392 / 24
|
|
393 movq %mm1,%mm2
|
|
394 movd 108(%ebx),%mm3
|
|
395 punpckldq 104(%ebx),%mm3
|
|
396 pfadd %mm3,%mm1
|
|
397 pfsub %mm3,%mm2
|
|
398 pfmul %mm0,%mm2
|
|
399 movq %mm1,96(%esi)
|
|
400 pswapd %mm2, %mm2
|
|
401 movq %mm2, 104(%esi)
|
|
402 movq 112(%ebx),%mm4
|
|
403 / 28
|
|
404 movq %mm4,%mm5
|
|
405 movd 124(%ebx),%mm6
|
|
406 punpckldq 120(%ebx),%mm6
|
|
407 pfadd %mm6,%mm4
|
|
408 pfsubr %mm6,%mm5
|
|
409 pfmul %mm0,%mm5
|
|
410 movq %mm4,112(%esi)
|
|
411 pswapd %mm5, %mm5
|
|
412 movq %mm5, 120(%esi)
|
|
413
|
|
414 // 5
|
|
415 movl $-1,%eax
|
|
416 movd %eax,%mm1
|
|
417 movl $1,%eax
|
|
418 movd %eax,%mm0
|
|
419 / L | H
|
|
420 punpckldq %mm1,%mm0
|
781
|
421 pi2fd %mm0,%mm0 /* mm0 = 1.0 | -1.0 */
|
736
|
422 movd %eax,%mm1
|
|
423 pi2fd %mm1,%mm1
|
|
424 movl pnts+16,%eax
|
|
425 movd 0(%eax),%mm2
|
781
|
426 punpckldq %mm2,%mm1 /* mm1 = 1.0 | cos0 */
|
|
427 movq 0(%esi),%mm2 /* mm2 = tmp2[0] | tmp2[1] */
|
736
|
428 / 0
|
781
|
429 pfpnacc %mm2, %mm2
|
|
430 pswapd %mm2, %mm2 /* mm2 = tmp2[0]+tmp2[1]|tmp2[0]-tmp2[1]*/
|
|
431 pfmul %mm1,%mm2 /* mm2 = tmp2[0]+tmp2[1]|(tmp2[0]-tmp2[1])*cos0*/
|
|
432 movq %mm2,0(%ebx) /* tmp1[0, 1] = mm2 */
|
|
433 movq 8(%esi),%mm4 /* mm4 = tmp2[2] | tmp2[3]*/
|
|
434 pfpnacc %mm4, %mm4
|
|
435 pswapd %mm4, %mm4 /* mm4 = tmp2[2]+tmp2[3]|tmp2[2]-tmp2[3]*/
|
|
436 pfmul %mm0,%mm4 /* mm4 = tmp2[2]+tmp2[3]|tmp2[3]-tmp2[2]*/
|
|
437 pfmul %mm1,%mm4 /* mm4 = tmp2[2]+tmp2[3]|(tmp2[3]-tmp2[2])*cos0*/
|
736
|
438 movq %mm4,%mm5
|
781
|
439 psrlq $32,%mm5 /* mm5 = (tmp2[3]-tmp2[2])*cos0 */
|
|
440 pfacc %mm5,%mm4 /* mm4 = tmp2[2]+tmp2[3]+(tmp2[3]-tmp2[2])*cos0|(tmp2[3]-tmp2[2])*cos0*/
|
|
441 movq %mm4,8(%ebx) /* tmp1[2, 3] = mm4 */
|
736
|
442 movq 16(%esi),%mm2
|
|
443 / 4
|
781
|
444 pfpnacc %mm2, %mm2
|
|
445 pswapd %mm2, %mm2
|
|
446
|
736
|
447 pfmul %mm1,%mm2
|
|
448 movq 24(%esi),%mm4
|
781
|
449 pfpnacc %mm4, %mm4
|
|
450 pswapd %mm4, %mm4
|
|
451
|
736
|
452 pfmul %mm0,%mm4
|
|
453 pfmul %mm1,%mm4
|
|
454 movq %mm4,%mm5
|
|
455 psrlq $32,%mm5
|
|
456 pfacc %mm5,%mm4
|
|
457 movq %mm2,%mm3
|
|
458 psrlq $32,%mm3
|
|
459 pfadd %mm4,%mm2
|
|
460 pfadd %mm3,%mm4
|
|
461 movq %mm2,16(%ebx)
|
|
462 movq %mm4,24(%ebx)
|
|
463 movq 32(%esi),%mm2
|
|
464 / 8
|
781
|
465 pfpnacc %mm2, %mm2
|
|
466 pswapd %mm2, %mm2
|
|
467
|
736
|
468 pfmul %mm1,%mm2
|
|
469 movq %mm2,32(%ebx)
|
|
470 movq 40(%esi),%mm4
|
781
|
471 pfpnacc %mm4, %mm4
|
|
472 pswapd %mm4, %mm4
|
736
|
473 pfmul %mm0,%mm4
|
|
474 pfmul %mm1,%mm4
|
|
475 movq %mm4,%mm5
|
|
476 psrlq $32,%mm5
|
|
477 pfacc %mm5,%mm4
|
|
478 movq %mm4,40(%ebx)
|
|
479 movq 48(%esi),%mm2
|
|
480 / 12
|
781
|
481 pfpnacc %mm2, %mm2
|
|
482 pswapd %mm2, %mm2
|
736
|
483 pfmul %mm1,%mm2
|
|
484 movq 56(%esi),%mm4
|
781
|
485 pfpnacc %mm4, %mm4
|
|
486 pswapd %mm4, %mm4
|
736
|
487 pfmul %mm0,%mm4
|
|
488 pfmul %mm1,%mm4
|
|
489 movq %mm4,%mm5
|
|
490 psrlq $32,%mm5
|
|
491 pfacc %mm5,%mm4
|
|
492 movq %mm2,%mm3
|
|
493 psrlq $32,%mm3
|
|
494 pfadd %mm4,%mm2
|
|
495 pfadd %mm3,%mm4
|
|
496 movq %mm2,48(%ebx)
|
|
497 movq %mm4,56(%ebx)
|
|
498 movq 64(%esi),%mm2
|
|
499 / 16
|
781
|
500 pfpnacc %mm2, %mm2
|
|
501 pswapd %mm2, %mm2
|
736
|
502 pfmul %mm1,%mm2
|
|
503 movq %mm2,64(%ebx)
|
|
504 movq 72(%esi),%mm4
|
781
|
505 pfpnacc %mm4, %mm4
|
|
506 pswapd %mm4, %mm4
|
736
|
507 pfmul %mm0,%mm4
|
|
508 pfmul %mm1,%mm4
|
|
509 movq %mm4,%mm5
|
|
510 psrlq $32,%mm5
|
|
511 pfacc %mm5,%mm4
|
|
512 movq %mm4,72(%ebx)
|
|
513 movq 80(%esi),%mm2
|
|
514 / 20
|
781
|
515 pfpnacc %mm2, %mm2
|
|
516 pswapd %mm2, %mm2
|
736
|
517 pfmul %mm1,%mm2
|
|
518 movq 88(%esi),%mm4
|
781
|
519 pfpnacc %mm4, %mm4
|
|
520 pswapd %mm4, %mm4
|
736
|
521 pfmul %mm0,%mm4
|
|
522 pfmul %mm1,%mm4
|
|
523 movq %mm4,%mm5
|
|
524 psrlq $32,%mm5
|
|
525 pfacc %mm5,%mm4
|
|
526 movq %mm2,%mm3
|
|
527 psrlq $32,%mm3
|
|
528 pfadd %mm4,%mm2
|
|
529 pfadd %mm3,%mm4
|
|
530 movq %mm2,80(%ebx)
|
|
531 movq %mm4,88(%ebx)
|
|
532 movq 96(%esi),%mm2
|
|
533 / 24
|
781
|
534 pfpnacc %mm2, %mm2
|
|
535 pswapd %mm2, %mm2
|
736
|
536 pfmul %mm1,%mm2
|
|
537 movq %mm2,96(%ebx)
|
|
538 movq 104(%esi),%mm4
|
781
|
539 pfpnacc %mm4, %mm4
|
|
540 pswapd %mm4, %mm4
|
736
|
541 pfmul %mm0,%mm4
|
|
542 pfmul %mm1,%mm4
|
|
543 movq %mm4,%mm5
|
|
544 psrlq $32,%mm5
|
|
545 pfacc %mm5,%mm4
|
|
546 movq %mm4,104(%ebx)
|
|
547 movq 112(%esi),%mm2
|
|
548 / 28
|
781
|
549 pfpnacc %mm2, %mm2
|
|
550 pswapd %mm2, %mm2
|
736
|
551 pfmul %mm1,%mm2
|
|
552 movq 120(%esi),%mm4
|
781
|
553 pfpnacc %mm4, %mm4
|
|
554 pswapd %mm4, %mm4
|
736
|
555 pfmul %mm0,%mm4
|
|
556 pfmul %mm1,%mm4
|
|
557 movq %mm4,%mm5
|
|
558 psrlq $32,%mm5
|
|
559 pfacc %mm5,%mm4
|
|
560 movq %mm2,%mm3
|
|
561 psrlq $32,%mm3
|
|
562 pfadd %mm4,%mm2
|
|
563 pfadd %mm3,%mm4
|
|
564 movq %mm2,112(%ebx)
|
|
565 movq %mm4,120(%ebx)
|
|
566
|
|
567 // Phase6
|
781
|
568 movd 0(%ebx),%mm0
|
|
569 movd %mm0,1024(%ebp)
|
736
|
570 movl 4(%ebx),%eax
|
|
571 movl %eax,0(%ebp)
|
|
572 movl %eax,0(%edx)
|
781
|
573 movd 8(%ebx),%mm2
|
|
574 movd %mm2,512(%ebp)
|
|
575 movd 12(%ebx),%mm3
|
|
576 movd %mm3,512(%edx)
|
736
|
577
|
|
578 movl 16(%ebx),%eax
|
|
579 movl %eax,768(%ebp)
|
781
|
580 movd 20(%ebx),%mm5
|
|
581 movd %mm5,256(%edx)
|
736
|
582
|
781
|
583 movd 24(%ebx),%mm6
|
|
584 movd %mm6,256(%ebp)
|
|
585 movd 28(%ebx),%mm7
|
|
586 movd %mm7,768(%edx)
|
736
|
587
|
781
|
588 movq 32(%ebx),%mm0 /* mm0 = tmp1[8] | tmp1[9] */
|
|
589 movq 48(%ebx),%mm1 /* mm1 = tmp1[12] | tmp1[13] */
|
|
590 pfadd %mm1,%mm0 /* mm0 = tmp1[8]+tmp1[12]| tmp1[9]+tmp1[13]*/
|
|
591 movd %mm0,896(%ebp) /* a[0xE0] = tmp1[8]+tmp1[12] */
|
736
|
592 psrlq $32,%mm0
|
781
|
593 movd %mm0,128(%edx) /* a[0x20] = tmp1[9]+tmp1[13] */
|
736
|
594 movq 40(%ebx),%mm2
|
|
595 pfadd %mm2,%mm1
|
|
596 movd %mm1,640(%ebp)
|
|
597 psrlq $32,%mm1
|
|
598 movd %mm1,384(%edx)
|
|
599
|
|
600 movq 56(%ebx),%mm3
|
|
601 pfadd %mm3,%mm2
|
|
602 movd %mm2,384(%ebp)
|
|
603 psrlq $32,%mm2
|
|
604 movd %mm2,640(%edx)
|
|
605
|
|
606 movd 36(%ebx),%mm4
|
|
607 pfadd %mm4,%mm3
|
|
608 movd %mm3,128(%ebp)
|
|
609 psrlq $32,%mm3
|
|
610 movd %mm3,896(%edx)
|
|
611 movq 96(%ebx),%mm0
|
|
612 movq 64(%ebx),%mm1
|
|
613
|
|
614 movq 112(%ebx),%mm2
|
|
615 pfadd %mm2,%mm0
|
|
616 movq %mm0,%mm3
|
|
617 pfadd %mm1,%mm3
|
|
618 movd %mm3,960(%ebp)
|
|
619 psrlq $32,%mm3
|
|
620 movd %mm3,64(%edx)
|
|
621 movq 80(%ebx),%mm1
|
|
622 pfadd %mm1,%mm0
|
|
623 movd %mm0,832(%ebp)
|
|
624 psrlq $32,%mm0
|
|
625 movd %mm0,192(%edx)
|
|
626 movq 104(%ebx),%mm3
|
|
627 pfadd %mm3,%mm2
|
|
628 movq %mm2,%mm4
|
|
629 pfadd %mm1,%mm4
|
|
630 movd %mm4,704(%ebp)
|
|
631 psrlq $32,%mm4
|
|
632 movd %mm4,320(%edx)
|
|
633 movq 72(%ebx),%mm1
|
|
634 pfadd %mm1,%mm2
|
|
635 movd %mm2,576(%ebp)
|
|
636 psrlq $32,%mm2
|
|
637 movd %mm2,448(%edx)
|
|
638
|
|
639 movq 120(%ebx),%mm4
|
|
640 pfadd %mm4,%mm3
|
|
641 movq %mm3,%mm5
|
|
642 pfadd %mm1,%mm5
|
|
643 movd %mm5,448(%ebp)
|
|
644 psrlq $32,%mm5
|
|
645 movd %mm5,576(%edx)
|
|
646 movq 88(%ebx),%mm1
|
|
647 pfadd %mm1,%mm3
|
|
648 movd %mm3,320(%ebp)
|
|
649 psrlq $32,%mm3
|
|
650 movd %mm3,704(%edx)
|
|
651
|
|
652 movd 100(%ebx),%mm5
|
|
653 pfadd %mm5,%mm4
|
|
654 movq %mm4,%mm6
|
|
655 pfadd %mm1,%mm6
|
|
656 movd %mm6,192(%ebp)
|
|
657 psrlq $32,%mm6
|
|
658 movd %mm6,832(%edx)
|
|
659 movd 68(%ebx),%mm1
|
|
660 pfadd %mm1,%mm4
|
|
661 movd %mm4,64(%ebp)
|
|
662 psrlq $32,%mm4
|
|
663 movd %mm4,960(%edx)
|
|
664
|
|
665 / femms
|
|
666
|
|
667 popl %ebx
|
|
668 popl %esi
|
|
669 popl %edi
|
|
670 popl %ebp
|
|
671 addl $256,%esp
|
|
672
|
781
|
673 ret $12
|
736
|
674
|