Lines Matching refs:BB

59 #define BB	%ecx  macro
121 leal 16 * SIZE + BUFFER, BB
145 movapd %xmm0, -16 * SIZE(BB)
146 movapd %xmm1, -14 * SIZE(BB)
147 movapd %xmm2, -12 * SIZE(BB)
148 movapd %xmm3, -10 * SIZE(BB)
150 movapd %xmm4, -8 * SIZE(BB)
151 movapd %xmm5, -6 * SIZE(BB)
152 movapd %xmm6, -4 * SIZE(BB)
153 movapd %xmm7, -2 * SIZE(BB)
156 addl $16 * SIZE, BB
172 movapd %xmm0, -16 * SIZE(BB)
173 movapd %xmm1, -14 * SIZE(BB)
175 addl $4 * SIZE, BB
195 leal 16 * SIZE + BUFFER, BB
197 leal 16 * SIZE + BUFFER, BB
201 leal (BB, %eax, 4), BB /* because it's doubled */
206 movapd -16 * SIZE(BB), %xmm1
242 movapd -14 * SIZE(BB), %xmm1
248 movapd -12 * SIZE(BB), %xmm2
257 movapd -10 * SIZE(BB), %xmm2
263 movapd -8 * SIZE(BB), %xmm1
273 movapd -6 * SIZE(BB), %xmm1
279 movapd -4 * SIZE(BB), %xmm2
288 movapd -2 * SIZE(BB), %xmm2
295 movapd 0 * SIZE(BB), %xmm1
304 movapd 2 * SIZE(BB), %xmm1
310 movapd 4 * SIZE(BB), %xmm2
319 movapd 6 * SIZE(BB), %xmm2
325 movapd 8 * SIZE(BB), %xmm1
334 movapd 10 * SIZE(BB), %xmm1
340 movapd 12 * SIZE(BB), %xmm2
349 movapd 14 * SIZE(BB), %xmm2
351 subl $-32 * SIZE, BB
356 movapd -16 * SIZE(BB), %xmm1
385 movapd -14 * SIZE(BB), %xmm1
394 movapd -12 * SIZE(BB), %xmm1
398 addl $4 * SIZE, BB
442 leal (BB, %eax, 4), BB
463 leal 16 * SIZE + BUFFER, BB
465 leal 16 * SIZE + BUFFER, BB
469 leal (BB, %eax, 4), BB /* because it's doubled */
474 movapd -16 * SIZE(BB), %xmm1
478 movapd -8 * SIZE(BB), %xmm3
498 mulpd -14 * SIZE(BB), %xmm0
500 movapd -12 * SIZE(BB), %xmm1
504 mulpd -10 * SIZE(BB), %xmm0
506 movapd 0 * SIZE(BB), %xmm1
510 mulpd -6 * SIZE(BB), %xmm0
512 movapd -4 * SIZE(BB), %xmm3
516 mulpd -2 * SIZE(BB), %xmm0
518 movapd 8 * SIZE(BB), %xmm3
522 mulpd 2 * SIZE(BB), %xmm2
524 movapd 4 * SIZE(BB), %xmm1
528 mulpd 6 * SIZE(BB), %xmm2
530 movapd 16 * SIZE(BB), %xmm1
534 mulpd 10 * SIZE(BB), %xmm2
536 movapd 12 * SIZE(BB), %xmm3
540 mulpd 14 * SIZE(BB), %xmm2
542 movapd 24 * SIZE(BB), %xmm3
547 addl $ 32 * SIZE, BB
567 mulpd -14 * SIZE(BB), %xmm0
569 movapd -12 * SIZE(BB), %xmm1
574 addl $4 * SIZE, BB
608 leal (BB, %eax, 4), BB
627 leal 16 * SIZE + BUFFER, BB
629 leal 16 * SIZE + BUFFER, BB
633 leal (BB, %eax, 4), BB /* because it's doubled */
638 movsd -16 * SIZE(BB), %xmm1
642 movsd -8 * SIZE(BB), %xmm3
666 mulsd -14 * SIZE(BB), %xmm0
668 movsd -12 * SIZE(BB), %xmm1
672 mulsd -10 * SIZE(BB), %xmm0
674 movsd 0 * SIZE(BB), %xmm1
678 mulsd -6 * SIZE(BB), %xmm0
680 movsd -4 * SIZE(BB), %xmm3
684 mulsd -2 * SIZE(BB), %xmm0
686 movsd 8 * SIZE(BB), %xmm3
690 mulsd 2 * SIZE(BB), %xmm2
692 movsd 4 * SIZE(BB), %xmm1
696 mulsd 6 * SIZE(BB), %xmm2
698 movsd 16 * SIZE(BB), %xmm1
702 mulsd 10 * SIZE(BB), %xmm2
704 movsd 12 * SIZE(BB), %xmm3
708 mulsd 14 * SIZE(BB), %xmm2
710 movsd 24 * SIZE(BB), %xmm3
715 addl $32 * SIZE, BB
735 mulsd -14 * SIZE(BB), %xmm0
737 movsd -12 * SIZE(BB), %xmm1
742 addl $4 * SIZE, BB
788 leal 16 * SIZE + BUFFER, BB
810 movapd %xmm0, -16 * SIZE(BB)
811 movapd %xmm1, -14 * SIZE(BB)
812 movapd %xmm2, -12 * SIZE(BB)
813 movapd %xmm3, -10 * SIZE(BB)
814 movapd %xmm4, -8 * SIZE(BB)
815 movapd %xmm5, -6 * SIZE(BB)
816 movapd %xmm6, -4 * SIZE(BB)
817 movapd %xmm7, -2 * SIZE(BB)
820 addl $16 * SIZE, BB
835 movapd %xmm0, -16 * SIZE(BB)
837 addl $2 * SIZE, BB
854 leal 16 * SIZE + BUFFER, BB
856 leal 16 * SIZE + BUFFER, BB
860 leal (BB, %eax, 2), BB
865 movapd -16 * SIZE(BB), %xmm1
869 movapd -8 * SIZE(BB), %xmm3
899 movapd -14 * SIZE(BB), %xmm1
905 movapd -12 * SIZE(BB), %xmm1
911 movapd -10 * SIZE(BB), %xmm1
917 movapd 0 * SIZE(BB), %xmm1
923 movapd -6 * SIZE(BB), %xmm3
929 movapd -4 * SIZE(BB), %xmm3
935 movapd -2 * SIZE(BB), %xmm3
941 movapd 8 * SIZE(BB), %xmm3
944 subl $-16 * SIZE, BB
968 movapd -14 * SIZE(BB), %xmm1
971 addl $2 * SIZE, BB
1004 leal (BB, %eax, 2), BB
1025 leal 16 * SIZE + BUFFER, BB
1027 leal 16 * SIZE + BUFFER, BB
1031 leal (BB, %eax, 2), BB
1036 movapd -16 * SIZE(BB), %xmm1
1039 movapd -8 * SIZE(BB), %xmm3
1064 movapd -14 * SIZE(BB), %xmm1
1068 movapd -12 * SIZE(BB), %xmm1
1072 movapd -10 * SIZE(BB), %xmm1
1076 movapd 0 * SIZE(BB), %xmm1
1080 movapd -6 * SIZE(BB), %xmm3
1084 movapd -4 * SIZE(BB), %xmm3
1088 movapd -2 * SIZE(BB), %xmm3
1092 movapd 8 * SIZE(BB), %xmm3
1095 subl $-16 * SIZE, BB
1117 movapd -14 * SIZE(BB), %xmm1
1120 addl $2 * SIZE, BB
1145 leal (BB, %eax, 2), BB
1164 leal 16 * SIZE + BUFFER, BB
1166 leal 16 * SIZE + BUFFER, BB
1170 leal (BB, %eax, 2), BB
1175 movsd -16 * SIZE(BB), %xmm1
1177 movsd -8 * SIZE(BB), %xmm3
1199 movsd -14 * SIZE(BB), %xmm1
1203 movsd -12 * SIZE(BB), %xmm1
1207 movsd -10 * SIZE(BB), %xmm1
1211 movsd -0 * SIZE(BB), %xmm1
1215 movsd -6 * SIZE(BB), %xmm3
1219 movsd -4 * SIZE(BB), %xmm3
1223 movsd -2 * SIZE(BB), %xmm3
1227 movsd 8 * SIZE(BB), %xmm3
1230 subl $-16 * SIZE, BB
1252 movsd -14 * SIZE(BB), %xmm1
1255 addl $2 * SIZE, BB