736
|
1 ///
|
|
2 /// Replacement of dct64() with AMD's 3DNowEx(DSP)! SIMD operations support
|
|
3 ///
|
|
4 /// This code based 'dct64_3dnow.s' by Syuuhei Kashiyama
|
|
5 /// <squash@mb.kcom.ne.jp>,only some types of changes have been made:
|
|
6 ///
|
781
|
7 /// - added new opcodes PSWAPD, PFPNACC
|
|
8 /// - decreased number of opcodes (as it was suggested by k7 manual)
|
|
9 /// (using memory reference as operand of instructions)
|
|
10 /// - Phase 6 is rewritten with mixing of cpu and mmx opcodes
|
736
|
11 /// - change function name for support 3DNowEx! automatic detect
|
1173
|
12 /// - negation of 3dnow reg was replaced with PXOR 0x800000000, MMi instead
|
|
13 /// of PFMUL as it was suggested by athlon manual. (Two not separated PFMUL
|
|
14 /// can not be paired, but PXOR can be).
|
736
|
15 ///
|
|
16 /// note: because K7 processors are an aggresive out-of-order three-way
|
|
17 /// superscalar ones instruction order is not significand for them.
|
|
18 ///
|
|
19 /// Modified by Nick Kurshev <nickols_k@mail.ru>
|
|
20 ///
|
|
21 /// The author of this program disclaim whole expressed or implied
|
|
22 /// warranties with regard to this program, and in no event shall the
|
|
23 /// author of this program liable to whatever resulted from the use of
|
|
24 /// this program. Use it at your own risk.
|
|
25 ///
|
|
26
|
1173
|
27 .data
|
|
28 .align 8
|
|
29 plus_minus_3dnow: .long 0x00000000, 0x80000000
|
|
30
|
|
31 .text
|
736
|
32 .globl dct64_3dnowex
|
|
33 .type dct64_3dnowex,@function
|
781
|
34
|
|
35 /* Discrete Cosine Tansform (DCT) for subband synthesis */
|
|
36 /* void dct64(real *a,real *b,real *c) */
|
736
|
37 dct64_3dnowex:
|
|
38 subl $256,%esp
|
|
39 pushl %ebp
|
|
40 pushl %edi
|
|
41 pushl %esi
|
|
42 pushl %ebx
|
781
|
43 leal 16(%esp),%ebx /* ebx -> real tmp1[32] */
|
|
44 movl 284(%esp),%edi /* edi -> c */
|
|
45 movl 276(%esp),%ebp /* ebp -> a */
|
|
46 movl 280(%esp),%edx /* edx -> b */
|
|
47 leal 128(%ebx),%esi /* esi -> real tmp2[32] */
|
736
|
48
|
|
49 / femms
|
|
50
|
|
51 // 1
|
|
52 movl pnts,%eax
|
781
|
53
|
|
54 movq 0(%edi),%mm0 /* mm0 = c[0x00] | c[0x01]*/
|
|
55 movq %mm0,%mm1 /* mm1 = mm0 */
|
|
56 movd 124(%edi),%mm2 /* mm2 = c[0x1f] */
|
|
57 punpckldq 120(%edi),%mm2 /* mm2 = c[0x1f] | c[0x1E] */
|
|
58 pfadd %mm2,%mm0 /* mm0 = c[0x00]+c[0x1F] | c[0x1E]+c[0x01] */
|
|
59 movq %mm0,0(%ebx) /* tmp[0, 1] = mm0 */
|
|
60 pfsub %mm2,%mm1 /* c[0x00]-c[0x1f] | c[0x01]-c[0x1e] */
|
|
61 pfmul 0(%eax),%mm1 /* (c[0x00]-c[0x1f])*pnts[0]|(c[0x01]-c[0x1e])*pnts[1]*/
|
|
62 pswapd %mm1, %mm1 /* (c[0x01]-c[0x1e])*pnts[1]|(c[0x00]-c[0x1f])*pnts[0]*/
|
|
63 movq %mm1, 120(%ebx) /* tmp1[30, 31]=mm1 */
|
|
64
|
736
|
65 movq 8(%edi),%mm4
|
|
66 movq %mm4,%mm5
|
|
67 movd 116(%edi),%mm6
|
|
68 punpckldq 112(%edi),%mm6
|
|
69 pfadd %mm6,%mm4
|
|
70 movq %mm4,8(%ebx)
|
|
71 pfsub %mm6,%mm5
|
781
|
72 pfmul 8(%eax),%mm5
|
736
|
73 pswapd %mm5, %mm5
|
|
74 movq %mm5, 112(%ebx)
|
781
|
75
|
736
|
76 movq 16(%edi),%mm0
|
|
77 movq %mm0,%mm1
|
|
78 movd 108(%edi),%mm2
|
|
79 punpckldq 104(%edi),%mm2
|
|
80 pfadd %mm2,%mm0
|
|
81 movq %mm0,16(%ebx)
|
|
82 pfsub %mm2,%mm1
|
781
|
83 pfmul 16(%eax),%mm1
|
736
|
84 pswapd %mm1, %mm1
|
|
85 movq %mm1, 104(%ebx)
|
781
|
86
|
736
|
87 movq 24(%edi),%mm4
|
|
88 movq %mm4,%mm5
|
|
89 movd 100(%edi),%mm6
|
|
90 punpckldq 96(%edi),%mm6
|
|
91 pfadd %mm6,%mm4
|
|
92 movq %mm4,24(%ebx)
|
|
93 pfsub %mm6,%mm5
|
781
|
94 pfmul 24(%eax),%mm5
|
736
|
95 pswapd %mm5, %mm5
|
|
96 movq %mm5, 96(%ebx)
|
781
|
97
|
736
|
98 movq 32(%edi),%mm0
|
|
99 movq %mm0,%mm1
|
|
100 movd 92(%edi),%mm2
|
|
101 punpckldq 88(%edi),%mm2
|
|
102 pfadd %mm2,%mm0
|
|
103 movq %mm0,32(%ebx)
|
|
104 pfsub %mm2,%mm1
|
781
|
105 pfmul 32(%eax),%mm1
|
736
|
106 pswapd %mm1, %mm1
|
|
107 movq %mm1, 88(%ebx)
|
781
|
108
|
736
|
109 movq 40(%edi),%mm4
|
|
110 movq %mm4,%mm5
|
|
111 movd 84(%edi),%mm6
|
|
112 punpckldq 80(%edi),%mm6
|
|
113 pfadd %mm6,%mm4
|
|
114 movq %mm4,40(%ebx)
|
|
115 pfsub %mm6,%mm5
|
781
|
116 pfmul 40(%eax),%mm5
|
736
|
117 pswapd %mm5, %mm5
|
|
118 movq %mm5, 80(%ebx)
|
781
|
119
|
736
|
120 movq 48(%edi),%mm0
|
|
121 movq %mm0,%mm1
|
|
122 movd 76(%edi),%mm2
|
|
123 punpckldq 72(%edi),%mm2
|
|
124 pfadd %mm2,%mm0
|
|
125 movq %mm0,48(%ebx)
|
|
126 pfsub %mm2,%mm1
|
781
|
127 pfmul 48(%eax),%mm1
|
736
|
128 pswapd %mm1, %mm1
|
|
129 movq %mm1, 72(%ebx)
|
781
|
130
|
736
|
131 movq 56(%edi),%mm4
|
|
132 movq %mm4,%mm5
|
|
133 movd 68(%edi),%mm6
|
|
134 punpckldq 64(%edi),%mm6
|
|
135 pfadd %mm6,%mm4
|
|
136 movq %mm4,56(%ebx)
|
|
137 pfsub %mm6,%mm5
|
781
|
138 pfmul 56(%eax),%mm5
|
736
|
139 pswapd %mm5, %mm5
|
|
140 movq %mm5, 64(%ebx)
|
|
141
|
|
142 // 2
|
|
143 movl pnts+4,%eax
|
|
144 / 0, 14
|
781
|
145 movq 0(%ebx),%mm0 /* mm0 = tmp1[0] | tmp1[1] */
|
736
|
146 movq %mm0,%mm1
|
781
|
147 movd 60(%ebx),%mm2 /* mm2 = tmp1[0x0F] */
|
|
148 punpckldq 56(%ebx),%mm2 /* mm2 = tmp1[0x0E] | tmp1[0x0F] */
|
|
149 movq 0(%eax),%mm3 /* mm3 = pnts[0] | pnts[1] */
|
|
150 pfadd %mm2,%mm0 /* mm0 = tmp1[0]+tmp1[0x0F]|tmp1[1]+tmp1[0x0E]*/
|
|
151 movq %mm0,0(%esi) /* tmp2[0, 1] = mm0 */
|
|
152 pfsub %mm2,%mm1 /* mm1 = tmp1[0]-tmp1[0x0F]|tmp1[1]-tmp1[0x0E]*/
|
|
153 pfmul %mm3,%mm1 /* mm1 = (tmp1[0]-tmp1[0x0F])*pnts[0]|(tmp1[1]-tmp1[0x0E])*pnts[1]*/
|
|
154 pswapd %mm1, %mm1 /* mm1 = (tmp1[1]-tmp1[0x0E])*pnts[1]|(tmp1[0]-tmp1[0x0F])*pnts[0]*/
|
|
155 movq %mm1, 56(%esi) /* tmp2[0x0E, 0x0F] = mm1 */
|
736
|
156 / 16, 30
|
|
157 movq 64(%ebx),%mm0
|
|
158 movq %mm0,%mm1
|
|
159 movd 124(%ebx),%mm2
|
|
160 punpckldq 120(%ebx),%mm2
|
|
161 pfadd %mm2,%mm0
|
|
162 movq %mm0,64(%esi)
|
|
163 pfsubr %mm2,%mm1
|
|
164 pfmul %mm3,%mm1
|
|
165 pswapd %mm1, %mm1
|
|
166 movq %mm1, 120(%esi)
|
|
167 movq 8(%ebx),%mm4
|
|
168 / 2, 12
|
|
169 movq %mm4,%mm5
|
|
170 movd 52(%ebx),%mm6
|
|
171 punpckldq 48(%ebx),%mm6
|
|
172 movq 8(%eax),%mm7
|
|
173 pfadd %mm6,%mm4
|
|
174 movq %mm4,8(%esi)
|
|
175 pfsub %mm6,%mm5
|
|
176 pfmul %mm7,%mm5
|
|
177 pswapd %mm5, %mm5
|
|
178 movq %mm5, 48(%esi)
|
|
179 movq 72(%ebx),%mm4
|
|
180 / 18, 28
|
|
181 movq %mm4,%mm5
|
|
182 movd 116(%ebx),%mm6
|
|
183 punpckldq 112(%ebx),%mm6
|
|
184 pfadd %mm6,%mm4
|
|
185 movq %mm4,72(%esi)
|
|
186 pfsubr %mm6,%mm5
|
|
187 pfmul %mm7,%mm5
|
|
188 pswapd %mm5, %mm5
|
|
189 movq %mm5, 112(%esi)
|
|
190 movq 16(%ebx),%mm0
|
|
191 / 4, 10
|
|
192 movq %mm0,%mm1
|
|
193 movd 44(%ebx),%mm2
|
|
194 punpckldq 40(%ebx),%mm2
|
|
195 movq 16(%eax),%mm3
|
|
196 pfadd %mm2,%mm0
|
|
197 movq %mm0,16(%esi)
|
|
198 pfsub %mm2,%mm1
|
|
199 pfmul %mm3,%mm1
|
|
200 pswapd %mm1, %mm1
|
|
201 movq %mm1, 40(%esi)
|
|
202 movq 80(%ebx),%mm0
|
|
203 / 20, 26
|
|
204 movq %mm0,%mm1
|
|
205 movd 108(%ebx),%mm2
|
|
206 punpckldq 104(%ebx),%mm2
|
|
207 pfadd %mm2,%mm0
|
|
208 movq %mm0,80(%esi)
|
|
209 pfsubr %mm2,%mm1
|
|
210 pfmul %mm3,%mm1
|
|
211 pswapd %mm1, %mm1
|
|
212 movq %mm1, 104(%esi)
|
|
213 movq 24(%ebx),%mm4
|
|
214 / 6, 8
|
|
215 movq %mm4,%mm5
|
|
216 movd 36(%ebx),%mm6
|
|
217 punpckldq 32(%ebx),%mm6
|
|
218 movq 24(%eax),%mm7
|
|
219 pfadd %mm6,%mm4
|
|
220 movq %mm4,24(%esi)
|
|
221 pfsub %mm6,%mm5
|
|
222 pfmul %mm7,%mm5
|
|
223 pswapd %mm5, %mm5
|
|
224 movq %mm5, 32(%esi)
|
|
225 movq 88(%ebx),%mm4
|
|
226 / 22, 24
|
|
227 movq %mm4,%mm5
|
|
228 movd 100(%ebx),%mm6
|
|
229 punpckldq 96(%ebx),%mm6
|
|
230 pfadd %mm6,%mm4
|
|
231 movq %mm4,88(%esi)
|
|
232 pfsubr %mm6,%mm5
|
|
233 pfmul %mm7,%mm5
|
|
234 pswapd %mm5, %mm5
|
|
235 movq %mm5, 96(%esi)
|
|
236
|
|
237 // 3
|
|
238 movl pnts+8,%eax
|
|
239 movq 0(%eax),%mm0
|
|
240 movq 8(%eax),%mm1
|
|
241 movq 0(%esi),%mm2
|
|
242 / 0, 6
|
|
243 movq %mm2,%mm3
|
|
244 movd 28(%esi),%mm4
|
|
245 punpckldq 24(%esi),%mm4
|
|
246 pfadd %mm4,%mm2
|
|
247 pfsub %mm4,%mm3
|
|
248 pfmul %mm0,%mm3
|
|
249 movq %mm2,0(%ebx)
|
|
250 pswapd %mm3, %mm3
|
|
251 movq %mm3, 24(%ebx)
|
|
252 movq 8(%esi),%mm5
|
|
253 / 2, 4
|
|
254 movq %mm5,%mm6
|
|
255 movd 20(%esi),%mm7
|
|
256 punpckldq 16(%esi),%mm7
|
|
257 pfadd %mm7,%mm5
|
|
258 pfsub %mm7,%mm6
|
|
259 pfmul %mm1,%mm6
|
|
260 movq %mm5,8(%ebx)
|
|
261 pswapd %mm6, %mm6
|
|
262 movq %mm6, 16(%ebx)
|
|
263 movq 32(%esi),%mm2
|
|
264 / 8, 14
|
|
265 movq %mm2,%mm3
|
|
266 movd 60(%esi),%mm4
|
|
267 punpckldq 56(%esi),%mm4
|
|
268 pfadd %mm4,%mm2
|
|
269 pfsubr %mm4,%mm3
|
|
270 pfmul %mm0,%mm3
|
|
271 movq %mm2,32(%ebx)
|
|
272 pswapd %mm3, %mm3
|
|
273 movq %mm3, 56(%ebx)
|
|
274 movq 40(%esi),%mm5
|
|
275 / 10, 12
|
|
276 movq %mm5,%mm6
|
|
277 movd 52(%esi),%mm7
|
|
278 punpckldq 48(%esi),%mm7
|
|
279 pfadd %mm7,%mm5
|
|
280 pfsubr %mm7,%mm6
|
|
281 pfmul %mm1,%mm6
|
|
282 movq %mm5,40(%ebx)
|
|
283 pswapd %mm6, %mm6
|
|
284 movq %mm6, 48(%ebx)
|
|
285 movq 64(%esi),%mm2
|
|
286 / 16, 22
|
|
287 movq %mm2,%mm3
|
|
288 movd 92(%esi),%mm4
|
|
289 punpckldq 88(%esi),%mm4
|
|
290 pfadd %mm4,%mm2
|
|
291 pfsub %mm4,%mm3
|
|
292 pfmul %mm0,%mm3
|
|
293 movq %mm2,64(%ebx)
|
|
294 pswapd %mm3, %mm3
|
|
295 movq %mm3, 88(%ebx)
|
|
296 movq 72(%esi),%mm5
|
|
297 / 18, 20
|
|
298 movq %mm5,%mm6
|
|
299 movd 84(%esi),%mm7
|
|
300 punpckldq 80(%esi),%mm7
|
|
301 pfadd %mm7,%mm5
|
|
302 pfsub %mm7,%mm6
|
|
303 pfmul %mm1,%mm6
|
|
304 movq %mm5,72(%ebx)
|
|
305 pswapd %mm6, %mm6
|
|
306 movq %mm6, 80(%ebx)
|
|
307 movq 96(%esi),%mm2
|
|
308 / 24, 30
|
|
309 movq %mm2,%mm3
|
|
310 movd 124(%esi),%mm4
|
|
311 punpckldq 120(%esi),%mm4
|
|
312 pfadd %mm4,%mm2
|
|
313 pfsubr %mm4,%mm3
|
|
314 pfmul %mm0,%mm3
|
|
315 movq %mm2,96(%ebx)
|
|
316 pswapd %mm3, %mm3
|
|
317 movq %mm3, 120(%ebx)
|
|
318 movq 104(%esi),%mm5
|
|
319 / 26, 28
|
|
320 movq %mm5,%mm6
|
|
321 movd 116(%esi),%mm7
|
|
322 punpckldq 112(%esi),%mm7
|
|
323 pfadd %mm7,%mm5
|
|
324 pfsubr %mm7,%mm6
|
|
325 pfmul %mm1,%mm6
|
|
326 movq %mm5,104(%ebx)
|
|
327 pswapd %mm6, %mm6
|
|
328 movq %mm6, 112(%ebx)
|
|
329
|
|
330 // 4
|
781
|
331 movl pnts+12,%eax
|
|
332 movq 0(%eax),%mm0 /* mm0 = pnts[3] | pnts[4] */
|
|
333 movq 0(%ebx),%mm1 /* mm1 = tmp1[0] | tmp1[1] */
|
736
|
334 / 0
|
|
335 movq %mm1,%mm2
|
781
|
336 movd 12(%ebx),%mm3 /* mm3 = tmp1[3] */
|
|
337 punpckldq 8(%ebx),%mm3 /* mm3 = tmp1[3] | tmp1[2] */
|
|
338 pfadd %mm3,%mm1 /* mm1 = tmp1[0]+tmp1[3] | tmp1[1]+tmp1[2]*/
|
|
339 pfsub %mm3,%mm2 /* mm2 = tmp1[0]-tmp1[3] | tmp1[0]-tmp1[2]*/
|
|
340 pfmul %mm0,%mm2 /* mm2 = tmp1[0]-tmp1[3]*pnts[3]|tmp1[0]-tmp1[2]*pnts[4]*/
|
|
341 movq %mm1,0(%esi) /* tmp2[0, 1] = mm1 */
|
|
342 pswapd %mm2, %mm2 /* mm2 = tmp1[0]-tmp1[2]*pnts[4]|tmp1[0]-tmp1[3]*pnts[3] */
|
|
343 movq %mm2, 8(%esi) /* tmp2[2, 3] = mm2 */
|
736
|
344 movq 16(%ebx),%mm4
|
|
345 / 4
|
|
346 movq %mm4,%mm5
|
|
347 movd 28(%ebx),%mm6
|
|
348 punpckldq 24(%ebx),%mm6
|
|
349 pfadd %mm6,%mm4
|
|
350 pfsubr %mm6,%mm5
|
|
351 pfmul %mm0,%mm5
|
|
352 movq %mm4,16(%esi)
|
|
353 pswapd %mm5, %mm5
|
|
354 movq %mm5, 24(%esi)
|
|
355 movq 32(%ebx),%mm1
|
|
356 / 8
|
|
357 movq %mm1,%mm2
|
|
358 movd 44(%ebx),%mm3
|
|
359 punpckldq 40(%ebx),%mm3
|
|
360 pfadd %mm3,%mm1
|
|
361 pfsub %mm3,%mm2
|
|
362 pfmul %mm0,%mm2
|
|
363 movq %mm1,32(%esi)
|
|
364 pswapd %mm2, %mm2
|
|
365 movq %mm2, 40(%esi)
|
|
366 movq 48(%ebx),%mm4
|
|
367 / 12
|
|
368 movq %mm4,%mm5
|
|
369 movd 60(%ebx),%mm6
|
|
370 punpckldq 56(%ebx),%mm6
|
|
371 pfadd %mm6,%mm4
|
|
372 pfsubr %mm6,%mm5
|
|
373 pfmul %mm0,%mm5
|
|
374 movq %mm4,48(%esi)
|
|
375 pswapd %mm5, %mm5
|
|
376 movq %mm5, 56(%esi)
|
|
377 movq 64(%ebx),%mm1
|
|
378 / 16
|
|
379 movq %mm1,%mm2
|
|
380 movd 76(%ebx),%mm3
|
|
381 punpckldq 72(%ebx),%mm3
|
|
382 pfadd %mm3,%mm1
|
|
383 pfsub %mm3,%mm2
|
|
384 pfmul %mm0,%mm2
|
|
385 movq %mm1,64(%esi)
|
|
386 pswapd %mm2, %mm2
|
|
387 movq %mm2, 72(%esi)
|
|
388 movq 80(%ebx),%mm4
|
|
389 / 20
|
|
390 movq %mm4,%mm5
|
|
391 movd 92(%ebx),%mm6
|
|
392 punpckldq 88(%ebx),%mm6
|
|
393 pfadd %mm6,%mm4
|
|
394 pfsubr %mm6,%mm5
|
|
395 pfmul %mm0,%mm5
|
|
396 movq %mm4,80(%esi)
|
|
397 pswapd %mm5, %mm5
|
|
398 movq %mm5, 88(%esi)
|
|
399 movq 96(%ebx),%mm1
|
|
400 / 24
|
|
401 movq %mm1,%mm2
|
|
402 movd 108(%ebx),%mm3
|
|
403 punpckldq 104(%ebx),%mm3
|
|
404 pfadd %mm3,%mm1
|
|
405 pfsub %mm3,%mm2
|
|
406 pfmul %mm0,%mm2
|
|
407 movq %mm1,96(%esi)
|
|
408 pswapd %mm2, %mm2
|
|
409 movq %mm2, 104(%esi)
|
|
410 movq 112(%ebx),%mm4
|
|
411 / 28
|
|
412 movq %mm4,%mm5
|
|
413 movd 124(%ebx),%mm6
|
|
414 punpckldq 120(%ebx),%mm6
|
|
415 pfadd %mm6,%mm4
|
|
416 pfsubr %mm6,%mm5
|
|
417 pfmul %mm0,%mm5
|
|
418 movq %mm4,112(%esi)
|
|
419 pswapd %mm5, %mm5
|
|
420 movq %mm5, 120(%esi)
|
|
421
|
|
422 // 5
|
1173
|
423 movq plus_minus_3dnow, %mm0 /* mm0 = 1.0 | -1.0 */
|
736
|
424 movl $1,%eax
|
|
425 movd %eax,%mm1
|
|
426 pi2fd %mm1,%mm1
|
|
427 movl pnts+16,%eax
|
|
428 movd 0(%eax),%mm2
|
781
|
429 punpckldq %mm2,%mm1 /* mm1 = 1.0 | cos0 */
|
|
430 movq 0(%esi),%mm2 /* mm2 = tmp2[0] | tmp2[1] */
|
736
|
431 / 0
|
781
|
432 pfpnacc %mm2, %mm2
|
|
433 pswapd %mm2, %mm2 /* mm2 = tmp2[0]+tmp2[1]|tmp2[0]-tmp2[1]*/
|
|
434 pfmul %mm1,%mm2 /* mm2 = tmp2[0]+tmp2[1]|(tmp2[0]-tmp2[1])*cos0*/
|
|
435 movq %mm2,0(%ebx) /* tmp1[0, 1] = mm2 */
|
|
436 movq 8(%esi),%mm4 /* mm4 = tmp2[2] | tmp2[3]*/
|
|
437 pfpnacc %mm4, %mm4
|
|
438 pswapd %mm4, %mm4 /* mm4 = tmp2[2]+tmp2[3]|tmp2[2]-tmp2[3]*/
|
1173
|
439 pxor %mm0,%mm4 /* mm4 = tmp2[2]+tmp2[3]|tmp2[3]-tmp2[2]*/
|
781
|
440 pfmul %mm1,%mm4 /* mm4 = tmp2[2]+tmp2[3]|(tmp2[3]-tmp2[2])*cos0*/
|
736
|
441 movq %mm4,%mm5
|
781
|
442 psrlq $32,%mm5 /* mm5 = (tmp2[3]-tmp2[2])*cos0 */
|
|
443 pfacc %mm5,%mm4 /* mm4 = tmp2[2]+tmp2[3]+(tmp2[3]-tmp2[2])*cos0|(tmp2[3]-tmp2[2])*cos0*/
|
|
444 movq %mm4,8(%ebx) /* tmp1[2, 3] = mm4 */
|
736
|
445 movq 16(%esi),%mm2
|
|
446 / 4
|
781
|
447 pfpnacc %mm2, %mm2
|
|
448 pswapd %mm2, %mm2
|
|
449
|
736
|
450 pfmul %mm1,%mm2
|
|
451 movq 24(%esi),%mm4
|
781
|
452 pfpnacc %mm4, %mm4
|
|
453 pswapd %mm4, %mm4
|
|
454
|
1173
|
455 pxor %mm0,%mm4
|
736
|
456 pfmul %mm1,%mm4
|
|
457 movq %mm4,%mm5
|
|
458 psrlq $32,%mm5
|
|
459 pfacc %mm5,%mm4
|
|
460 movq %mm2,%mm3
|
|
461 psrlq $32,%mm3
|
|
462 pfadd %mm4,%mm2
|
|
463 pfadd %mm3,%mm4
|
|
464 movq %mm2,16(%ebx)
|
|
465 movq %mm4,24(%ebx)
|
|
466 movq 32(%esi),%mm2
|
|
467 / 8
|
781
|
468 pfpnacc %mm2, %mm2
|
|
469 pswapd %mm2, %mm2
|
|
470
|
736
|
471 pfmul %mm1,%mm2
|
|
472 movq %mm2,32(%ebx)
|
|
473 movq 40(%esi),%mm4
|
781
|
474 pfpnacc %mm4, %mm4
|
|
475 pswapd %mm4, %mm4
|
1173
|
476 pxor %mm0,%mm4
|
736
|
477 pfmul %mm1,%mm4
|
|
478 movq %mm4,%mm5
|
|
479 psrlq $32,%mm5
|
|
480 pfacc %mm5,%mm4
|
|
481 movq %mm4,40(%ebx)
|
|
482 movq 48(%esi),%mm2
|
|
483 / 12
|
781
|
484 pfpnacc %mm2, %mm2
|
|
485 pswapd %mm2, %mm2
|
736
|
486 pfmul %mm1,%mm2
|
|
487 movq 56(%esi),%mm4
|
781
|
488 pfpnacc %mm4, %mm4
|
|
489 pswapd %mm4, %mm4
|
1173
|
490 pxor %mm0,%mm4
|
736
|
491 pfmul %mm1,%mm4
|
|
492 movq %mm4,%mm5
|
|
493 psrlq $32,%mm5
|
|
494 pfacc %mm5,%mm4
|
|
495 movq %mm2,%mm3
|
|
496 psrlq $32,%mm3
|
|
497 pfadd %mm4,%mm2
|
|
498 pfadd %mm3,%mm4
|
|
499 movq %mm2,48(%ebx)
|
|
500 movq %mm4,56(%ebx)
|
|
501 movq 64(%esi),%mm2
|
|
502 / 16
|
781
|
503 pfpnacc %mm2, %mm2
|
|
504 pswapd %mm2, %mm2
|
736
|
505 pfmul %mm1,%mm2
|
|
506 movq %mm2,64(%ebx)
|
|
507 movq 72(%esi),%mm4
|
781
|
508 pfpnacc %mm4, %mm4
|
|
509 pswapd %mm4, %mm4
|
1173
|
510 pxor %mm0,%mm4
|
736
|
511 pfmul %mm1,%mm4
|
|
512 movq %mm4,%mm5
|
|
513 psrlq $32,%mm5
|
|
514 pfacc %mm5,%mm4
|
|
515 movq %mm4,72(%ebx)
|
|
516 movq 80(%esi),%mm2
|
|
517 / 20
|
781
|
518 pfpnacc %mm2, %mm2
|
|
519 pswapd %mm2, %mm2
|
736
|
520 pfmul %mm1,%mm2
|
|
521 movq 88(%esi),%mm4
|
781
|
522 pfpnacc %mm4, %mm4
|
|
523 pswapd %mm4, %mm4
|
1173
|
524 pxor %mm0,%mm4
|
736
|
525 pfmul %mm1,%mm4
|
|
526 movq %mm4,%mm5
|
|
527 psrlq $32,%mm5
|
|
528 pfacc %mm5,%mm4
|
|
529 movq %mm2,%mm3
|
|
530 psrlq $32,%mm3
|
|
531 pfadd %mm4,%mm2
|
|
532 pfadd %mm3,%mm4
|
|
533 movq %mm2,80(%ebx)
|
|
534 movq %mm4,88(%ebx)
|
|
535 movq 96(%esi),%mm2
|
|
536 / 24
|
781
|
537 pfpnacc %mm2, %mm2
|
|
538 pswapd %mm2, %mm2
|
736
|
539 pfmul %mm1,%mm2
|
|
540 movq %mm2,96(%ebx)
|
|
541 movq 104(%esi),%mm4
|
781
|
542 pfpnacc %mm4, %mm4
|
|
543 pswapd %mm4, %mm4
|
1173
|
544 pxor %mm0,%mm4
|
736
|
545 pfmul %mm1,%mm4
|
|
546 movq %mm4,%mm5
|
|
547 psrlq $32,%mm5
|
|
548 pfacc %mm5,%mm4
|
|
549 movq %mm4,104(%ebx)
|
|
550 movq 112(%esi),%mm2
|
|
551 / 28
|
781
|
552 pfpnacc %mm2, %mm2
|
|
553 pswapd %mm2, %mm2
|
736
|
554 pfmul %mm1,%mm2
|
|
555 movq 120(%esi),%mm4
|
781
|
556 pfpnacc %mm4, %mm4
|
|
557 pswapd %mm4, %mm4
|
1173
|
558 pxor %mm0,%mm4
|
736
|
559 pfmul %mm1,%mm4
|
|
560 movq %mm4,%mm5
|
|
561 psrlq $32,%mm5
|
|
562 pfacc %mm5,%mm4
|
|
563 movq %mm2,%mm3
|
|
564 psrlq $32,%mm3
|
|
565 pfadd %mm4,%mm2
|
|
566 pfadd %mm3,%mm4
|
|
567 movq %mm2,112(%ebx)
|
|
568 movq %mm4,120(%ebx)
|
|
569
|
|
570 // Phase6
|
781
|
571 movd 0(%ebx),%mm0
|
|
572 movd %mm0,1024(%ebp)
|
736
|
573 movl 4(%ebx),%eax
|
|
574 movl %eax,0(%ebp)
|
|
575 movl %eax,0(%edx)
|
781
|
576 movd 8(%ebx),%mm2
|
|
577 movd %mm2,512(%ebp)
|
|
578 movd 12(%ebx),%mm3
|
|
579 movd %mm3,512(%edx)
|
736
|
580
|
|
581 movl 16(%ebx),%eax
|
|
582 movl %eax,768(%ebp)
|
781
|
583 movd 20(%ebx),%mm5
|
|
584 movd %mm5,256(%edx)
|
736
|
585
|
781
|
586 movd 24(%ebx),%mm6
|
|
587 movd %mm6,256(%ebp)
|
|
588 movd 28(%ebx),%mm7
|
|
589 movd %mm7,768(%edx)
|
736
|
590
|
781
|
591 movq 32(%ebx),%mm0 /* mm0 = tmp1[8] | tmp1[9] */
|
|
592 movq 48(%ebx),%mm1 /* mm1 = tmp1[12] | tmp1[13] */
|
|
593 pfadd %mm1,%mm0 /* mm0 = tmp1[8]+tmp1[12]| tmp1[9]+tmp1[13]*/
|
|
594 movd %mm0,896(%ebp) /* a[0xE0] = tmp1[8]+tmp1[12] */
|
736
|
595 psrlq $32,%mm0
|
781
|
596 movd %mm0,128(%edx) /* a[0x20] = tmp1[9]+tmp1[13] */
|
736
|
597 movq 40(%ebx),%mm2
|
|
598 pfadd %mm2,%mm1
|
|
599 movd %mm1,640(%ebp)
|
|
600 psrlq $32,%mm1
|
|
601 movd %mm1,384(%edx)
|
|
602
|
|
603 movq 56(%ebx),%mm3
|
|
604 pfadd %mm3,%mm2
|
|
605 movd %mm2,384(%ebp)
|
|
606 psrlq $32,%mm2
|
|
607 movd %mm2,640(%edx)
|
|
608
|
|
609 movd 36(%ebx),%mm4
|
|
610 pfadd %mm4,%mm3
|
|
611 movd %mm3,128(%ebp)
|
|
612 psrlq $32,%mm3
|
|
613 movd %mm3,896(%edx)
|
|
614 movq 96(%ebx),%mm0
|
|
615 movq 64(%ebx),%mm1
|
|
616
|
|
617 movq 112(%ebx),%mm2
|
|
618 pfadd %mm2,%mm0
|
|
619 movq %mm0,%mm3
|
|
620 pfadd %mm1,%mm3
|
|
621 movd %mm3,960(%ebp)
|
|
622 psrlq $32,%mm3
|
|
623 movd %mm3,64(%edx)
|
|
624 movq 80(%ebx),%mm1
|
|
625 pfadd %mm1,%mm0
|
|
626 movd %mm0,832(%ebp)
|
|
627 psrlq $32,%mm0
|
|
628 movd %mm0,192(%edx)
|
|
629 movq 104(%ebx),%mm3
|
|
630 pfadd %mm3,%mm2
|
|
631 movq %mm2,%mm4
|
|
632 pfadd %mm1,%mm4
|
|
633 movd %mm4,704(%ebp)
|
|
634 psrlq $32,%mm4
|
|
635 movd %mm4,320(%edx)
|
|
636 movq 72(%ebx),%mm1
|
|
637 pfadd %mm1,%mm2
|
|
638 movd %mm2,576(%ebp)
|
|
639 psrlq $32,%mm2
|
|
640 movd %mm2,448(%edx)
|
|
641
|
|
642 movq 120(%ebx),%mm4
|
|
643 pfadd %mm4,%mm3
|
|
644 movq %mm3,%mm5
|
|
645 pfadd %mm1,%mm5
|
|
646 movd %mm5,448(%ebp)
|
|
647 psrlq $32,%mm5
|
|
648 movd %mm5,576(%edx)
|
|
649 movq 88(%ebx),%mm1
|
|
650 pfadd %mm1,%mm3
|
|
651 movd %mm3,320(%ebp)
|
|
652 psrlq $32,%mm3
|
|
653 movd %mm3,704(%edx)
|
|
654
|
|
655 movd 100(%ebx),%mm5
|
|
656 pfadd %mm5,%mm4
|
|
657 movq %mm4,%mm6
|
|
658 pfadd %mm1,%mm6
|
|
659 movd %mm6,192(%ebp)
|
|
660 psrlq $32,%mm6
|
|
661 movd %mm6,832(%edx)
|
|
662 movd 68(%ebx),%mm1
|
|
663 pfadd %mm1,%mm4
|
|
664 movd %mm4,64(%ebp)
|
|
665 psrlq $32,%mm4
|
|
666 movd %mm4,960(%edx)
|
|
667
|
|
668 / femms
|
|
669
|
|
670 popl %ebx
|
|
671 popl %esi
|
|
672 popl %edi
|
|
673 popl %ebp
|
|
674 addl $256,%esp
|
|
675
|
781
|
676 ret $12
|
736
|
677
|