xref: /freebsd/sys/crypto/openssl/i386/aesni-x86.S (revision 06c3fb27)
1/* Do not modify. This file is auto-generated from aesni-x86.pl. */
2#ifdef PIC
3.text
4.globl	aesni_encrypt
5.type	aesni_encrypt,@function
6.align	16
7aesni_encrypt:
8.L_aesni_encrypt_begin:
9	#ifdef __CET__
10
11.byte	243,15,30,251
12	#endif
13
14	movl	4(%esp),%eax
15	movl	12(%esp),%edx
16	movups	(%eax),%xmm2
17	movl	240(%edx),%ecx
18	movl	8(%esp),%eax
19	movups	(%edx),%xmm0
20	movups	16(%edx),%xmm1
21	leal	32(%edx),%edx
22	xorps	%xmm0,%xmm2
23.L000enc1_loop_1:
24.byte	102,15,56,220,209
25	decl	%ecx
26	movups	(%edx),%xmm1
27	leal	16(%edx),%edx
28	jnz	.L000enc1_loop_1
29.byte	102,15,56,221,209
30	pxor	%xmm0,%xmm0
31	pxor	%xmm1,%xmm1
32	movups	%xmm2,(%eax)
33	pxor	%xmm2,%xmm2
34	ret
35.size	aesni_encrypt,.-.L_aesni_encrypt_begin
36.globl	aesni_decrypt
37.type	aesni_decrypt,@function
38.align	16
39aesni_decrypt:
40.L_aesni_decrypt_begin:
41	#ifdef __CET__
42
43.byte	243,15,30,251
44	#endif
45
46	movl	4(%esp),%eax
47	movl	12(%esp),%edx
48	movups	(%eax),%xmm2
49	movl	240(%edx),%ecx
50	movl	8(%esp),%eax
51	movups	(%edx),%xmm0
52	movups	16(%edx),%xmm1
53	leal	32(%edx),%edx
54	xorps	%xmm0,%xmm2
55.L001dec1_loop_2:
56.byte	102,15,56,222,209
57	decl	%ecx
58	movups	(%edx),%xmm1
59	leal	16(%edx),%edx
60	jnz	.L001dec1_loop_2
61.byte	102,15,56,223,209
62	pxor	%xmm0,%xmm0
63	pxor	%xmm1,%xmm1
64	movups	%xmm2,(%eax)
65	pxor	%xmm2,%xmm2
66	ret
67.size	aesni_decrypt,.-.L_aesni_decrypt_begin
68.type	_aesni_encrypt2,@function
69.align	16
70_aesni_encrypt2:
71	#ifdef __CET__
72
73.byte	243,15,30,251
74	#endif
75
76	movups	(%edx),%xmm0
77	shll	$4,%ecx
78	movups	16(%edx),%xmm1
79	xorps	%xmm0,%xmm2
80	pxor	%xmm0,%xmm3
81	movups	32(%edx),%xmm0
82	leal	32(%edx,%ecx,1),%edx
83	negl	%ecx
84	addl	$16,%ecx
85.L002enc2_loop:
86.byte	102,15,56,220,209
87.byte	102,15,56,220,217
88	movups	(%edx,%ecx,1),%xmm1
89	addl	$32,%ecx
90.byte	102,15,56,220,208
91.byte	102,15,56,220,216
92	movups	-16(%edx,%ecx,1),%xmm0
93	jnz	.L002enc2_loop
94.byte	102,15,56,220,209
95.byte	102,15,56,220,217
96.byte	102,15,56,221,208
97.byte	102,15,56,221,216
98	ret
99.size	_aesni_encrypt2,.-_aesni_encrypt2
100.type	_aesni_decrypt2,@function
101.align	16
102_aesni_decrypt2:
103	#ifdef __CET__
104
105.byte	243,15,30,251
106	#endif
107
108	movups	(%edx),%xmm0
109	shll	$4,%ecx
110	movups	16(%edx),%xmm1
111	xorps	%xmm0,%xmm2
112	pxor	%xmm0,%xmm3
113	movups	32(%edx),%xmm0
114	leal	32(%edx,%ecx,1),%edx
115	negl	%ecx
116	addl	$16,%ecx
117.L003dec2_loop:
118.byte	102,15,56,222,209
119.byte	102,15,56,222,217
120	movups	(%edx,%ecx,1),%xmm1
121	addl	$32,%ecx
122.byte	102,15,56,222,208
123.byte	102,15,56,222,216
124	movups	-16(%edx,%ecx,1),%xmm0
125	jnz	.L003dec2_loop
126.byte	102,15,56,222,209
127.byte	102,15,56,222,217
128.byte	102,15,56,223,208
129.byte	102,15,56,223,216
130	ret
131.size	_aesni_decrypt2,.-_aesni_decrypt2
132.type	_aesni_encrypt3,@function
133.align	16
134_aesni_encrypt3:
135	#ifdef __CET__
136
137.byte	243,15,30,251
138	#endif
139
140	movups	(%edx),%xmm0
141	shll	$4,%ecx
142	movups	16(%edx),%xmm1
143	xorps	%xmm0,%xmm2
144	pxor	%xmm0,%xmm3
145	pxor	%xmm0,%xmm4
146	movups	32(%edx),%xmm0
147	leal	32(%edx,%ecx,1),%edx
148	negl	%ecx
149	addl	$16,%ecx
150.L004enc3_loop:
151.byte	102,15,56,220,209
152.byte	102,15,56,220,217
153.byte	102,15,56,220,225
154	movups	(%edx,%ecx,1),%xmm1
155	addl	$32,%ecx
156.byte	102,15,56,220,208
157.byte	102,15,56,220,216
158.byte	102,15,56,220,224
159	movups	-16(%edx,%ecx,1),%xmm0
160	jnz	.L004enc3_loop
161.byte	102,15,56,220,209
162.byte	102,15,56,220,217
163.byte	102,15,56,220,225
164.byte	102,15,56,221,208
165.byte	102,15,56,221,216
166.byte	102,15,56,221,224
167	ret
168.size	_aesni_encrypt3,.-_aesni_encrypt3
169.type	_aesni_decrypt3,@function
170.align	16
171_aesni_decrypt3:
172	#ifdef __CET__
173
174.byte	243,15,30,251
175	#endif
176
177	movups	(%edx),%xmm0
178	shll	$4,%ecx
179	movups	16(%edx),%xmm1
180	xorps	%xmm0,%xmm2
181	pxor	%xmm0,%xmm3
182	pxor	%xmm0,%xmm4
183	movups	32(%edx),%xmm0
184	leal	32(%edx,%ecx,1),%edx
185	negl	%ecx
186	addl	$16,%ecx
187.L005dec3_loop:
188.byte	102,15,56,222,209
189.byte	102,15,56,222,217
190.byte	102,15,56,222,225
191	movups	(%edx,%ecx,1),%xmm1
192	addl	$32,%ecx
193.byte	102,15,56,222,208
194.byte	102,15,56,222,216
195.byte	102,15,56,222,224
196	movups	-16(%edx,%ecx,1),%xmm0
197	jnz	.L005dec3_loop
198.byte	102,15,56,222,209
199.byte	102,15,56,222,217
200.byte	102,15,56,222,225
201.byte	102,15,56,223,208
202.byte	102,15,56,223,216
203.byte	102,15,56,223,224
204	ret
205.size	_aesni_decrypt3,.-_aesni_decrypt3
206.type	_aesni_encrypt4,@function
207.align	16
208_aesni_encrypt4:
209	#ifdef __CET__
210
211.byte	243,15,30,251
212	#endif
213
214	movups	(%edx),%xmm0
215	movups	16(%edx),%xmm1
216	shll	$4,%ecx
217	xorps	%xmm0,%xmm2
218	pxor	%xmm0,%xmm3
219	pxor	%xmm0,%xmm4
220	pxor	%xmm0,%xmm5
221	movups	32(%edx),%xmm0
222	leal	32(%edx,%ecx,1),%edx
223	negl	%ecx
224.byte	15,31,64,0
225	addl	$16,%ecx
226.L006enc4_loop:
227.byte	102,15,56,220,209
228.byte	102,15,56,220,217
229.byte	102,15,56,220,225
230.byte	102,15,56,220,233
231	movups	(%edx,%ecx,1),%xmm1
232	addl	$32,%ecx
233.byte	102,15,56,220,208
234.byte	102,15,56,220,216
235.byte	102,15,56,220,224
236.byte	102,15,56,220,232
237	movups	-16(%edx,%ecx,1),%xmm0
238	jnz	.L006enc4_loop
239.byte	102,15,56,220,209
240.byte	102,15,56,220,217
241.byte	102,15,56,220,225
242.byte	102,15,56,220,233
243.byte	102,15,56,221,208
244.byte	102,15,56,221,216
245.byte	102,15,56,221,224
246.byte	102,15,56,221,232
247	ret
248.size	_aesni_encrypt4,.-_aesni_encrypt4
249.type	_aesni_decrypt4,@function
250.align	16
251_aesni_decrypt4:
252	#ifdef __CET__
253
254.byte	243,15,30,251
255	#endif
256
257	movups	(%edx),%xmm0
258	movups	16(%edx),%xmm1
259	shll	$4,%ecx
260	xorps	%xmm0,%xmm2
261	pxor	%xmm0,%xmm3
262	pxor	%xmm0,%xmm4
263	pxor	%xmm0,%xmm5
264	movups	32(%edx),%xmm0
265	leal	32(%edx,%ecx,1),%edx
266	negl	%ecx
267.byte	15,31,64,0
268	addl	$16,%ecx
269.L007dec4_loop:
270.byte	102,15,56,222,209
271.byte	102,15,56,222,217
272.byte	102,15,56,222,225
273.byte	102,15,56,222,233
274	movups	(%edx,%ecx,1),%xmm1
275	addl	$32,%ecx
276.byte	102,15,56,222,208
277.byte	102,15,56,222,216
278.byte	102,15,56,222,224
279.byte	102,15,56,222,232
280	movups	-16(%edx,%ecx,1),%xmm0
281	jnz	.L007dec4_loop
282.byte	102,15,56,222,209
283.byte	102,15,56,222,217
284.byte	102,15,56,222,225
285.byte	102,15,56,222,233
286.byte	102,15,56,223,208
287.byte	102,15,56,223,216
288.byte	102,15,56,223,224
289.byte	102,15,56,223,232
290	ret
291.size	_aesni_decrypt4,.-_aesni_decrypt4
292.type	_aesni_encrypt6,@function
293.align	16
294_aesni_encrypt6:
295	#ifdef __CET__
296
297.byte	243,15,30,251
298	#endif
299
300	movups	(%edx),%xmm0
301	shll	$4,%ecx
302	movups	16(%edx),%xmm1
303	xorps	%xmm0,%xmm2
304	pxor	%xmm0,%xmm3
305	pxor	%xmm0,%xmm4
306.byte	102,15,56,220,209
307	pxor	%xmm0,%xmm5
308	pxor	%xmm0,%xmm6
309.byte	102,15,56,220,217
310	leal	32(%edx,%ecx,1),%edx
311	negl	%ecx
312.byte	102,15,56,220,225
313	pxor	%xmm0,%xmm7
314	movups	(%edx,%ecx,1),%xmm0
315	addl	$16,%ecx
316	jmp	.L008_aesni_encrypt6_inner
317.align	16
318.L009enc6_loop:
319.byte	102,15,56,220,209
320.byte	102,15,56,220,217
321.byte	102,15,56,220,225
322.L008_aesni_encrypt6_inner:
323.byte	102,15,56,220,233
324.byte	102,15,56,220,241
325.byte	102,15,56,220,249
326.L_aesni_encrypt6_enter:
327	movups	(%edx,%ecx,1),%xmm1
328	addl	$32,%ecx
329.byte	102,15,56,220,208
330.byte	102,15,56,220,216
331.byte	102,15,56,220,224
332.byte	102,15,56,220,232
333.byte	102,15,56,220,240
334.byte	102,15,56,220,248
335	movups	-16(%edx,%ecx,1),%xmm0
336	jnz	.L009enc6_loop
337.byte	102,15,56,220,209
338.byte	102,15,56,220,217
339.byte	102,15,56,220,225
340.byte	102,15,56,220,233
341.byte	102,15,56,220,241
342.byte	102,15,56,220,249
343.byte	102,15,56,221,208
344.byte	102,15,56,221,216
345.byte	102,15,56,221,224
346.byte	102,15,56,221,232
347.byte	102,15,56,221,240
348.byte	102,15,56,221,248
349	ret
350.size	_aesni_encrypt6,.-_aesni_encrypt6
351.type	_aesni_decrypt6,@function
352.align	16
353_aesni_decrypt6:
354	#ifdef __CET__
355
356.byte	243,15,30,251
357	#endif
358
359	movups	(%edx),%xmm0
360	shll	$4,%ecx
361	movups	16(%edx),%xmm1
362	xorps	%xmm0,%xmm2
363	pxor	%xmm0,%xmm3
364	pxor	%xmm0,%xmm4
365.byte	102,15,56,222,209
366	pxor	%xmm0,%xmm5
367	pxor	%xmm0,%xmm6
368.byte	102,15,56,222,217
369	leal	32(%edx,%ecx,1),%edx
370	negl	%ecx
371.byte	102,15,56,222,225
372	pxor	%xmm0,%xmm7
373	movups	(%edx,%ecx,1),%xmm0
374	addl	$16,%ecx
375	jmp	.L010_aesni_decrypt6_inner
376.align	16
377.L011dec6_loop:
378.byte	102,15,56,222,209
379.byte	102,15,56,222,217
380.byte	102,15,56,222,225
381.L010_aesni_decrypt6_inner:
382.byte	102,15,56,222,233
383.byte	102,15,56,222,241
384.byte	102,15,56,222,249
385.L_aesni_decrypt6_enter:
386	movups	(%edx,%ecx,1),%xmm1
387	addl	$32,%ecx
388.byte	102,15,56,222,208
389.byte	102,15,56,222,216
390.byte	102,15,56,222,224
391.byte	102,15,56,222,232
392.byte	102,15,56,222,240
393.byte	102,15,56,222,248
394	movups	-16(%edx,%ecx,1),%xmm0
395	jnz	.L011dec6_loop
396.byte	102,15,56,222,209
397.byte	102,15,56,222,217
398.byte	102,15,56,222,225
399.byte	102,15,56,222,233
400.byte	102,15,56,222,241
401.byte	102,15,56,222,249
402.byte	102,15,56,223,208
403.byte	102,15,56,223,216
404.byte	102,15,56,223,224
405.byte	102,15,56,223,232
406.byte	102,15,56,223,240
407.byte	102,15,56,223,248
408	ret
409.size	_aesni_decrypt6,.-_aesni_decrypt6
410.globl	aesni_ecb_encrypt
411.type	aesni_ecb_encrypt,@function
412.align	16
413aesni_ecb_encrypt:
414.L_aesni_ecb_encrypt_begin:
415	#ifdef __CET__
416
417.byte	243,15,30,251
418	#endif
419
420	pushl	%ebp
421	pushl	%ebx
422	pushl	%esi
423	pushl	%edi
424	movl	20(%esp),%esi
425	movl	24(%esp),%edi
426	movl	28(%esp),%eax
427	movl	32(%esp),%edx
428	movl	36(%esp),%ebx
429	andl	$-16,%eax
430	jz	.L012ecb_ret
431	movl	240(%edx),%ecx
432	testl	%ebx,%ebx
433	jz	.L013ecb_decrypt
434	movl	%edx,%ebp
435	movl	%ecx,%ebx
436	cmpl	$96,%eax
437	jb	.L014ecb_enc_tail
438	movdqu	(%esi),%xmm2
439	movdqu	16(%esi),%xmm3
440	movdqu	32(%esi),%xmm4
441	movdqu	48(%esi),%xmm5
442	movdqu	64(%esi),%xmm6
443	movdqu	80(%esi),%xmm7
444	leal	96(%esi),%esi
445	subl	$96,%eax
446	jmp	.L015ecb_enc_loop6_enter
447.align	16
448.L016ecb_enc_loop6:
449	movups	%xmm2,(%edi)
450	movdqu	(%esi),%xmm2
451	movups	%xmm3,16(%edi)
452	movdqu	16(%esi),%xmm3
453	movups	%xmm4,32(%edi)
454	movdqu	32(%esi),%xmm4
455	movups	%xmm5,48(%edi)
456	movdqu	48(%esi),%xmm5
457	movups	%xmm6,64(%edi)
458	movdqu	64(%esi),%xmm6
459	movups	%xmm7,80(%edi)
460	leal	96(%edi),%edi
461	movdqu	80(%esi),%xmm7
462	leal	96(%esi),%esi
463.L015ecb_enc_loop6_enter:
464	call	_aesni_encrypt6
465	movl	%ebp,%edx
466	movl	%ebx,%ecx
467	subl	$96,%eax
468	jnc	.L016ecb_enc_loop6
469	movups	%xmm2,(%edi)
470	movups	%xmm3,16(%edi)
471	movups	%xmm4,32(%edi)
472	movups	%xmm5,48(%edi)
473	movups	%xmm6,64(%edi)
474	movups	%xmm7,80(%edi)
475	leal	96(%edi),%edi
476	addl	$96,%eax
477	jz	.L012ecb_ret
478.L014ecb_enc_tail:
479	movups	(%esi),%xmm2
480	cmpl	$32,%eax
481	jb	.L017ecb_enc_one
482	movups	16(%esi),%xmm3
483	je	.L018ecb_enc_two
484	movups	32(%esi),%xmm4
485	cmpl	$64,%eax
486	jb	.L019ecb_enc_three
487	movups	48(%esi),%xmm5
488	je	.L020ecb_enc_four
489	movups	64(%esi),%xmm6
490	xorps	%xmm7,%xmm7
491	call	_aesni_encrypt6
492	movups	%xmm2,(%edi)
493	movups	%xmm3,16(%edi)
494	movups	%xmm4,32(%edi)
495	movups	%xmm5,48(%edi)
496	movups	%xmm6,64(%edi)
497	jmp	.L012ecb_ret
498.align	16
499.L017ecb_enc_one:
500	movups	(%edx),%xmm0
501	movups	16(%edx),%xmm1
502	leal	32(%edx),%edx
503	xorps	%xmm0,%xmm2
504.L021enc1_loop_3:
505.byte	102,15,56,220,209
506	decl	%ecx
507	movups	(%edx),%xmm1
508	leal	16(%edx),%edx
509	jnz	.L021enc1_loop_3
510.byte	102,15,56,221,209
511	movups	%xmm2,(%edi)
512	jmp	.L012ecb_ret
513.align	16
514.L018ecb_enc_two:
515	call	_aesni_encrypt2
516	movups	%xmm2,(%edi)
517	movups	%xmm3,16(%edi)
518	jmp	.L012ecb_ret
519.align	16
520.L019ecb_enc_three:
521	call	_aesni_encrypt3
522	movups	%xmm2,(%edi)
523	movups	%xmm3,16(%edi)
524	movups	%xmm4,32(%edi)
525	jmp	.L012ecb_ret
526.align	16
527.L020ecb_enc_four:
528	call	_aesni_encrypt4
529	movups	%xmm2,(%edi)
530	movups	%xmm3,16(%edi)
531	movups	%xmm4,32(%edi)
532	movups	%xmm5,48(%edi)
533	jmp	.L012ecb_ret
534.align	16
535.L013ecb_decrypt:
536	movl	%edx,%ebp
537	movl	%ecx,%ebx
538	cmpl	$96,%eax
539	jb	.L022ecb_dec_tail
540	movdqu	(%esi),%xmm2
541	movdqu	16(%esi),%xmm3
542	movdqu	32(%esi),%xmm4
543	movdqu	48(%esi),%xmm5
544	movdqu	64(%esi),%xmm6
545	movdqu	80(%esi),%xmm7
546	leal	96(%esi),%esi
547	subl	$96,%eax
548	jmp	.L023ecb_dec_loop6_enter
549.align	16
550.L024ecb_dec_loop6:
551	movups	%xmm2,(%edi)
552	movdqu	(%esi),%xmm2
553	movups	%xmm3,16(%edi)
554	movdqu	16(%esi),%xmm3
555	movups	%xmm4,32(%edi)
556	movdqu	32(%esi),%xmm4
557	movups	%xmm5,48(%edi)
558	movdqu	48(%esi),%xmm5
559	movups	%xmm6,64(%edi)
560	movdqu	64(%esi),%xmm6
561	movups	%xmm7,80(%edi)
562	leal	96(%edi),%edi
563	movdqu	80(%esi),%xmm7
564	leal	96(%esi),%esi
565.L023ecb_dec_loop6_enter:
566	call	_aesni_decrypt6
567	movl	%ebp,%edx
568	movl	%ebx,%ecx
569	subl	$96,%eax
570	jnc	.L024ecb_dec_loop6
571	movups	%xmm2,(%edi)
572	movups	%xmm3,16(%edi)
573	movups	%xmm4,32(%edi)
574	movups	%xmm5,48(%edi)
575	movups	%xmm6,64(%edi)
576	movups	%xmm7,80(%edi)
577	leal	96(%edi),%edi
578	addl	$96,%eax
579	jz	.L012ecb_ret
580.L022ecb_dec_tail:
581	movups	(%esi),%xmm2
582	cmpl	$32,%eax
583	jb	.L025ecb_dec_one
584	movups	16(%esi),%xmm3
585	je	.L026ecb_dec_two
586	movups	32(%esi),%xmm4
587	cmpl	$64,%eax
588	jb	.L027ecb_dec_three
589	movups	48(%esi),%xmm5
590	je	.L028ecb_dec_four
591	movups	64(%esi),%xmm6
592	xorps	%xmm7,%xmm7
593	call	_aesni_decrypt6
594	movups	%xmm2,(%edi)
595	movups	%xmm3,16(%edi)
596	movups	%xmm4,32(%edi)
597	movups	%xmm5,48(%edi)
598	movups	%xmm6,64(%edi)
599	jmp	.L012ecb_ret
600.align	16
601.L025ecb_dec_one:
602	movups	(%edx),%xmm0
603	movups	16(%edx),%xmm1
604	leal	32(%edx),%edx
605	xorps	%xmm0,%xmm2
606.L029dec1_loop_4:
607.byte	102,15,56,222,209
608	decl	%ecx
609	movups	(%edx),%xmm1
610	leal	16(%edx),%edx
611	jnz	.L029dec1_loop_4
612.byte	102,15,56,223,209
613	movups	%xmm2,(%edi)
614	jmp	.L012ecb_ret
615.align	16
616.L026ecb_dec_two:
617	call	_aesni_decrypt2
618	movups	%xmm2,(%edi)
619	movups	%xmm3,16(%edi)
620	jmp	.L012ecb_ret
621.align	16
622.L027ecb_dec_three:
623	call	_aesni_decrypt3
624	movups	%xmm2,(%edi)
625	movups	%xmm3,16(%edi)
626	movups	%xmm4,32(%edi)
627	jmp	.L012ecb_ret
628.align	16
629.L028ecb_dec_four:
630	call	_aesni_decrypt4
631	movups	%xmm2,(%edi)
632	movups	%xmm3,16(%edi)
633	movups	%xmm4,32(%edi)
634	movups	%xmm5,48(%edi)
635.L012ecb_ret:
636	pxor	%xmm0,%xmm0
637	pxor	%xmm1,%xmm1
638	pxor	%xmm2,%xmm2
639	pxor	%xmm3,%xmm3
640	pxor	%xmm4,%xmm4
641	pxor	%xmm5,%xmm5
642	pxor	%xmm6,%xmm6
643	pxor	%xmm7,%xmm7
644	popl	%edi
645	popl	%esi
646	popl	%ebx
647	popl	%ebp
648	ret
649.size	aesni_ecb_encrypt,.-.L_aesni_ecb_encrypt_begin
650.globl	aesni_ccm64_encrypt_blocks
651.type	aesni_ccm64_encrypt_blocks,@function
652.align	16
653aesni_ccm64_encrypt_blocks:
654.L_aesni_ccm64_encrypt_blocks_begin:
655	#ifdef __CET__
656
657.byte	243,15,30,251
658	#endif
659
660	pushl	%ebp
661	pushl	%ebx
662	pushl	%esi
663	pushl	%edi
664	movl	20(%esp),%esi
665	movl	24(%esp),%edi
666	movl	28(%esp),%eax
667	movl	32(%esp),%edx
668	movl	36(%esp),%ebx
669	movl	40(%esp),%ecx
670	movl	%esp,%ebp
671	subl	$60,%esp
672	andl	$-16,%esp
673	movl	%ebp,48(%esp)
674	movdqu	(%ebx),%xmm7
675	movdqu	(%ecx),%xmm3
676	movl	240(%edx),%ecx
677	movl	$202182159,(%esp)
678	movl	$134810123,4(%esp)
679	movl	$67438087,8(%esp)
680	movl	$66051,12(%esp)
681	movl	$1,%ebx
682	xorl	%ebp,%ebp
683	movl	%ebx,16(%esp)
684	movl	%ebp,20(%esp)
685	movl	%ebp,24(%esp)
686	movl	%ebp,28(%esp)
687	shll	$4,%ecx
688	movl	$16,%ebx
689	leal	(%edx),%ebp
690	movdqa	(%esp),%xmm5
691	movdqa	%xmm7,%xmm2
692	leal	32(%edx,%ecx,1),%edx
693	subl	%ecx,%ebx
694.byte	102,15,56,0,253
695.L030ccm64_enc_outer:
696	movups	(%ebp),%xmm0
697	movl	%ebx,%ecx
698	movups	(%esi),%xmm6
699	xorps	%xmm0,%xmm2
700	movups	16(%ebp),%xmm1
701	xorps	%xmm6,%xmm0
702	xorps	%xmm0,%xmm3
703	movups	32(%ebp),%xmm0
704.L031ccm64_enc2_loop:
705.byte	102,15,56,220,209
706.byte	102,15,56,220,217
707	movups	(%edx,%ecx,1),%xmm1
708	addl	$32,%ecx
709.byte	102,15,56,220,208
710.byte	102,15,56,220,216
711	movups	-16(%edx,%ecx,1),%xmm0
712	jnz	.L031ccm64_enc2_loop
713.byte	102,15,56,220,209
714.byte	102,15,56,220,217
715	paddq	16(%esp),%xmm7
716	decl	%eax
717.byte	102,15,56,221,208
718.byte	102,15,56,221,216
719	leal	16(%esi),%esi
720	xorps	%xmm2,%xmm6
721	movdqa	%xmm7,%xmm2
722	movups	%xmm6,(%edi)
723.byte	102,15,56,0,213
724	leal	16(%edi),%edi
725	jnz	.L030ccm64_enc_outer
726	movl	48(%esp),%esp
727	movl	40(%esp),%edi
728	movups	%xmm3,(%edi)
729	pxor	%xmm0,%xmm0
730	pxor	%xmm1,%xmm1
731	pxor	%xmm2,%xmm2
732	pxor	%xmm3,%xmm3
733	pxor	%xmm4,%xmm4
734	pxor	%xmm5,%xmm5
735	pxor	%xmm6,%xmm6
736	pxor	%xmm7,%xmm7
737	popl	%edi
738	popl	%esi
739	popl	%ebx
740	popl	%ebp
741	ret
742.size	aesni_ccm64_encrypt_blocks,.-.L_aesni_ccm64_encrypt_blocks_begin
743.globl	aesni_ccm64_decrypt_blocks
744.type	aesni_ccm64_decrypt_blocks,@function
745.align	16
746aesni_ccm64_decrypt_blocks:
747.L_aesni_ccm64_decrypt_blocks_begin:
748	#ifdef __CET__
749
750.byte	243,15,30,251
751	#endif
752
753	pushl	%ebp
754	pushl	%ebx
755	pushl	%esi
756	pushl	%edi
757	movl	20(%esp),%esi
758	movl	24(%esp),%edi
759	movl	28(%esp),%eax
760	movl	32(%esp),%edx
761	movl	36(%esp),%ebx
762	movl	40(%esp),%ecx
763	movl	%esp,%ebp
764	subl	$60,%esp
765	andl	$-16,%esp
766	movl	%ebp,48(%esp)
767	movdqu	(%ebx),%xmm7
768	movdqu	(%ecx),%xmm3
769	movl	240(%edx),%ecx
770	movl	$202182159,(%esp)
771	movl	$134810123,4(%esp)
772	movl	$67438087,8(%esp)
773	movl	$66051,12(%esp)
774	movl	$1,%ebx
775	xorl	%ebp,%ebp
776	movl	%ebx,16(%esp)
777	movl	%ebp,20(%esp)
778	movl	%ebp,24(%esp)
779	movl	%ebp,28(%esp)
780	movdqa	(%esp),%xmm5
781	movdqa	%xmm7,%xmm2
782	movl	%edx,%ebp
783	movl	%ecx,%ebx
784.byte	102,15,56,0,253
785	movups	(%edx),%xmm0
786	movups	16(%edx),%xmm1
787	leal	32(%edx),%edx
788	xorps	%xmm0,%xmm2
789.L032enc1_loop_5:
790.byte	102,15,56,220,209
791	decl	%ecx
792	movups	(%edx),%xmm1
793	leal	16(%edx),%edx
794	jnz	.L032enc1_loop_5
795.byte	102,15,56,221,209
796	shll	$4,%ebx
797	movl	$16,%ecx
798	movups	(%esi),%xmm6
799	paddq	16(%esp),%xmm7
800	leal	16(%esi),%esi
801	subl	%ebx,%ecx
802	leal	32(%ebp,%ebx,1),%edx
803	movl	%ecx,%ebx
804	jmp	.L033ccm64_dec_outer
805.align	16
806.L033ccm64_dec_outer:
807	xorps	%xmm2,%xmm6
808	movdqa	%xmm7,%xmm2
809	movups	%xmm6,(%edi)
810	leal	16(%edi),%edi
811.byte	102,15,56,0,213
812	subl	$1,%eax
813	jz	.L034ccm64_dec_break
814	movups	(%ebp),%xmm0
815	movl	%ebx,%ecx
816	movups	16(%ebp),%xmm1
817	xorps	%xmm0,%xmm6
818	xorps	%xmm0,%xmm2
819	xorps	%xmm6,%xmm3
820	movups	32(%ebp),%xmm0
821.L035ccm64_dec2_loop:
822.byte	102,15,56,220,209
823.byte	102,15,56,220,217
824	movups	(%edx,%ecx,1),%xmm1
825	addl	$32,%ecx
826.byte	102,15,56,220,208
827.byte	102,15,56,220,216
828	movups	-16(%edx,%ecx,1),%xmm0
829	jnz	.L035ccm64_dec2_loop
830	movups	(%esi),%xmm6
831	paddq	16(%esp),%xmm7
832.byte	102,15,56,220,209
833.byte	102,15,56,220,217
834.byte	102,15,56,221,208
835.byte	102,15,56,221,216
836	leal	16(%esi),%esi
837	jmp	.L033ccm64_dec_outer
838.align	16
839.L034ccm64_dec_break:
840	movl	240(%ebp),%ecx
841	movl	%ebp,%edx
842	movups	(%edx),%xmm0
843	movups	16(%edx),%xmm1
844	xorps	%xmm0,%xmm6
845	leal	32(%edx),%edx
846	xorps	%xmm6,%xmm3
847.L036enc1_loop_6:
848.byte	102,15,56,220,217
849	decl	%ecx
850	movups	(%edx),%xmm1
851	leal	16(%edx),%edx
852	jnz	.L036enc1_loop_6
853.byte	102,15,56,221,217
854	movl	48(%esp),%esp
855	movl	40(%esp),%edi
856	movups	%xmm3,(%edi)
857	pxor	%xmm0,%xmm0
858	pxor	%xmm1,%xmm1
859	pxor	%xmm2,%xmm2
860	pxor	%xmm3,%xmm3
861	pxor	%xmm4,%xmm4
862	pxor	%xmm5,%xmm5
863	pxor	%xmm6,%xmm6
864	pxor	%xmm7,%xmm7
865	popl	%edi
866	popl	%esi
867	popl	%ebx
868	popl	%ebp
869	ret
870.size	aesni_ccm64_decrypt_blocks,.-.L_aesni_ccm64_decrypt_blocks_begin
871.globl	aesni_ctr32_encrypt_blocks
872.type	aesni_ctr32_encrypt_blocks,@function
873.align	16
874aesni_ctr32_encrypt_blocks:
875.L_aesni_ctr32_encrypt_blocks_begin:
876	#ifdef __CET__
877
878.byte	243,15,30,251
879	#endif
880
881	pushl	%ebp
882	pushl	%ebx
883	pushl	%esi
884	pushl	%edi
885	movl	20(%esp),%esi
886	movl	24(%esp),%edi
887	movl	28(%esp),%eax
888	movl	32(%esp),%edx
889	movl	36(%esp),%ebx
890	movl	%esp,%ebp
891	subl	$88,%esp
892	andl	$-16,%esp
893	movl	%ebp,80(%esp)
894	cmpl	$1,%eax
895	je	.L037ctr32_one_shortcut
896	movdqu	(%ebx),%xmm7
897	movl	$202182159,(%esp)
898	movl	$134810123,4(%esp)
899	movl	$67438087,8(%esp)
900	movl	$66051,12(%esp)
901	movl	$6,%ecx
902	xorl	%ebp,%ebp
903	movl	%ecx,16(%esp)
904	movl	%ecx,20(%esp)
905	movl	%ecx,24(%esp)
906	movl	%ebp,28(%esp)
907.byte	102,15,58,22,251,3
908.byte	102,15,58,34,253,3
909	movl	240(%edx),%ecx
910	bswap	%ebx
911	pxor	%xmm0,%xmm0
912	pxor	%xmm1,%xmm1
913	movdqa	(%esp),%xmm2
914.byte	102,15,58,34,195,0
915	leal	3(%ebx),%ebp
916.byte	102,15,58,34,205,0
917	incl	%ebx
918.byte	102,15,58,34,195,1
919	incl	%ebp
920.byte	102,15,58,34,205,1
921	incl	%ebx
922.byte	102,15,58,34,195,2
923	incl	%ebp
924.byte	102,15,58,34,205,2
925	movdqa	%xmm0,48(%esp)
926.byte	102,15,56,0,194
927	movdqu	(%edx),%xmm6
928	movdqa	%xmm1,64(%esp)
929.byte	102,15,56,0,202
930	pshufd	$192,%xmm0,%xmm2
931	pshufd	$128,%xmm0,%xmm3
932	cmpl	$6,%eax
933	jb	.L038ctr32_tail
934	pxor	%xmm6,%xmm7
935	shll	$4,%ecx
936	movl	$16,%ebx
937	movdqa	%xmm7,32(%esp)
938	movl	%edx,%ebp
939	subl	%ecx,%ebx
940	leal	32(%edx,%ecx,1),%edx
941	subl	$6,%eax
942	jmp	.L039ctr32_loop6
943.align	16
944.L039ctr32_loop6:
945	pshufd	$64,%xmm0,%xmm4
946	movdqa	32(%esp),%xmm0
947	pshufd	$192,%xmm1,%xmm5
948	pxor	%xmm0,%xmm2
949	pshufd	$128,%xmm1,%xmm6
950	pxor	%xmm0,%xmm3
951	pshufd	$64,%xmm1,%xmm7
952	movups	16(%ebp),%xmm1
953	pxor	%xmm0,%xmm4
954	pxor	%xmm0,%xmm5
955.byte	102,15,56,220,209
956	pxor	%xmm0,%xmm6
957	pxor	%xmm0,%xmm7
958.byte	102,15,56,220,217
959	movups	32(%ebp),%xmm0
960	movl	%ebx,%ecx
961.byte	102,15,56,220,225
962.byte	102,15,56,220,233
963.byte	102,15,56,220,241
964.byte	102,15,56,220,249
965	call	.L_aesni_encrypt6_enter
966	movups	(%esi),%xmm1
967	movups	16(%esi),%xmm0
968	xorps	%xmm1,%xmm2
969	movups	32(%esi),%xmm1
970	xorps	%xmm0,%xmm3
971	movups	%xmm2,(%edi)
972	movdqa	16(%esp),%xmm0
973	xorps	%xmm1,%xmm4
974	movdqa	64(%esp),%xmm1
975	movups	%xmm3,16(%edi)
976	movups	%xmm4,32(%edi)
977	paddd	%xmm0,%xmm1
978	paddd	48(%esp),%xmm0
979	movdqa	(%esp),%xmm2
980	movups	48(%esi),%xmm3
981	movups	64(%esi),%xmm4
982	xorps	%xmm3,%xmm5
983	movups	80(%esi),%xmm3
984	leal	96(%esi),%esi
985	movdqa	%xmm0,48(%esp)
986.byte	102,15,56,0,194
987	xorps	%xmm4,%xmm6
988	movups	%xmm5,48(%edi)
989	xorps	%xmm3,%xmm7
990	movdqa	%xmm1,64(%esp)
991.byte	102,15,56,0,202
992	movups	%xmm6,64(%edi)
993	pshufd	$192,%xmm0,%xmm2
994	movups	%xmm7,80(%edi)
995	leal	96(%edi),%edi
996	pshufd	$128,%xmm0,%xmm3
997	subl	$6,%eax
998	jnc	.L039ctr32_loop6
999	addl	$6,%eax
1000	jz	.L040ctr32_ret
1001	movdqu	(%ebp),%xmm7
1002	movl	%ebp,%edx
1003	pxor	32(%esp),%xmm7
1004	movl	240(%ebp),%ecx
1005.L038ctr32_tail:
1006	por	%xmm7,%xmm2
1007	cmpl	$2,%eax
1008	jb	.L041ctr32_one
1009	pshufd	$64,%xmm0,%xmm4
1010	por	%xmm7,%xmm3
1011	je	.L042ctr32_two
1012	pshufd	$192,%xmm1,%xmm5
1013	por	%xmm7,%xmm4
1014	cmpl	$4,%eax
1015	jb	.L043ctr32_three
1016	pshufd	$128,%xmm1,%xmm6
1017	por	%xmm7,%xmm5
1018	je	.L044ctr32_four
1019	por	%xmm7,%xmm6
1020	call	_aesni_encrypt6
1021	movups	(%esi),%xmm1
1022	movups	16(%esi),%xmm0
1023	xorps	%xmm1,%xmm2
1024	movups	32(%esi),%xmm1
1025	xorps	%xmm0,%xmm3
1026	movups	48(%esi),%xmm0
1027	xorps	%xmm1,%xmm4
1028	movups	64(%esi),%xmm1
1029	xorps	%xmm0,%xmm5
1030	movups	%xmm2,(%edi)
1031	xorps	%xmm1,%xmm6
1032	movups	%xmm3,16(%edi)
1033	movups	%xmm4,32(%edi)
1034	movups	%xmm5,48(%edi)
1035	movups	%xmm6,64(%edi)
1036	jmp	.L040ctr32_ret
1037.align	16
1038.L037ctr32_one_shortcut:
1039	movups	(%ebx),%xmm2
1040	movl	240(%edx),%ecx
1041.L041ctr32_one:
1042	movups	(%edx),%xmm0
1043	movups	16(%edx),%xmm1
1044	leal	32(%edx),%edx
1045	xorps	%xmm0,%xmm2
1046.L045enc1_loop_7:
1047.byte	102,15,56,220,209
1048	decl	%ecx
1049	movups	(%edx),%xmm1
1050	leal	16(%edx),%edx
1051	jnz	.L045enc1_loop_7
1052.byte	102,15,56,221,209
1053	movups	(%esi),%xmm6
1054	xorps	%xmm2,%xmm6
1055	movups	%xmm6,(%edi)
1056	jmp	.L040ctr32_ret
1057.align	16
1058.L042ctr32_two:
1059	call	_aesni_encrypt2
1060	movups	(%esi),%xmm5
1061	movups	16(%esi),%xmm6
1062	xorps	%xmm5,%xmm2
1063	xorps	%xmm6,%xmm3
1064	movups	%xmm2,(%edi)
1065	movups	%xmm3,16(%edi)
1066	jmp	.L040ctr32_ret
1067.align	16
1068.L043ctr32_three:
1069	call	_aesni_encrypt3
1070	movups	(%esi),%xmm5
1071	movups	16(%esi),%xmm6
1072	xorps	%xmm5,%xmm2
1073	movups	32(%esi),%xmm7
1074	xorps	%xmm6,%xmm3
1075	movups	%xmm2,(%edi)
1076	xorps	%xmm7,%xmm4
1077	movups	%xmm3,16(%edi)
1078	movups	%xmm4,32(%edi)
1079	jmp	.L040ctr32_ret
1080.align	16
1081.L044ctr32_four:
1082	call	_aesni_encrypt4
1083	movups	(%esi),%xmm6
1084	movups	16(%esi),%xmm7
1085	movups	32(%esi),%xmm1
1086	xorps	%xmm6,%xmm2
1087	movups	48(%esi),%xmm0
1088	xorps	%xmm7,%xmm3
1089	movups	%xmm2,(%edi)
1090	xorps	%xmm1,%xmm4
1091	movups	%xmm3,16(%edi)
1092	xorps	%xmm0,%xmm5
1093	movups	%xmm4,32(%edi)
1094	movups	%xmm5,48(%edi)
1095.L040ctr32_ret:
1096	pxor	%xmm0,%xmm0
1097	pxor	%xmm1,%xmm1
1098	pxor	%xmm2,%xmm2
1099	pxor	%xmm3,%xmm3
1100	pxor	%xmm4,%xmm4
1101	movdqa	%xmm0,32(%esp)
1102	pxor	%xmm5,%xmm5
1103	movdqa	%xmm0,48(%esp)
1104	pxor	%xmm6,%xmm6
1105	movdqa	%xmm0,64(%esp)
1106	pxor	%xmm7,%xmm7
1107	movl	80(%esp),%esp
1108	popl	%edi
1109	popl	%esi
1110	popl	%ebx
1111	popl	%ebp
1112	ret
1113.size	aesni_ctr32_encrypt_blocks,.-.L_aesni_ctr32_encrypt_blocks_begin
1114.globl	aesni_xts_encrypt
1115.type	aesni_xts_encrypt,@function
1116.align	16
1117aesni_xts_encrypt:
1118.L_aesni_xts_encrypt_begin:
1119	#ifdef __CET__
1120
1121.byte	243,15,30,251
1122	#endif
1123
1124	pushl	%ebp
1125	pushl	%ebx
1126	pushl	%esi
1127	pushl	%edi
1128	movl	36(%esp),%edx
1129	movl	40(%esp),%esi
1130	movl	240(%edx),%ecx
1131	movups	(%esi),%xmm2
1132	movups	(%edx),%xmm0
1133	movups	16(%edx),%xmm1
1134	leal	32(%edx),%edx
1135	xorps	%xmm0,%xmm2
1136.L046enc1_loop_8:
1137.byte	102,15,56,220,209
1138	decl	%ecx
1139	movups	(%edx),%xmm1
1140	leal	16(%edx),%edx
1141	jnz	.L046enc1_loop_8
1142.byte	102,15,56,221,209
1143	movl	20(%esp),%esi
1144	movl	24(%esp),%edi
1145	movl	28(%esp),%eax
1146	movl	32(%esp),%edx
1147	movl	%esp,%ebp
1148	subl	$120,%esp
1149	movl	240(%edx),%ecx
1150	andl	$-16,%esp
1151	movl	$135,96(%esp)
1152	movl	$0,100(%esp)
1153	movl	$1,104(%esp)
1154	movl	$0,108(%esp)
1155	movl	%eax,112(%esp)
1156	movl	%ebp,116(%esp)
1157	movdqa	%xmm2,%xmm1
1158	pxor	%xmm0,%xmm0
1159	movdqa	96(%esp),%xmm3
1160	pcmpgtd	%xmm1,%xmm0
1161	andl	$-16,%eax
1162	movl	%edx,%ebp
1163	movl	%ecx,%ebx
1164	subl	$96,%eax
1165	jc	.L047xts_enc_short
1166	shll	$4,%ecx
1167	movl	$16,%ebx
1168	subl	%ecx,%ebx
1169	leal	32(%edx,%ecx,1),%edx
1170	jmp	.L048xts_enc_loop6
1171.align	16
1172.L048xts_enc_loop6:
1173	pshufd	$19,%xmm0,%xmm2
1174	pxor	%xmm0,%xmm0
1175	movdqa	%xmm1,(%esp)
1176	paddq	%xmm1,%xmm1
1177	pand	%xmm3,%xmm2
1178	pcmpgtd	%xmm1,%xmm0
1179	pxor	%xmm2,%xmm1
1180	pshufd	$19,%xmm0,%xmm2
1181	pxor	%xmm0,%xmm0
1182	movdqa	%xmm1,16(%esp)
1183	paddq	%xmm1,%xmm1
1184	pand	%xmm3,%xmm2
1185	pcmpgtd	%xmm1,%xmm0
1186	pxor	%xmm2,%xmm1
1187	pshufd	$19,%xmm0,%xmm2
1188	pxor	%xmm0,%xmm0
1189	movdqa	%xmm1,32(%esp)
1190	paddq	%xmm1,%xmm1
1191	pand	%xmm3,%xmm2
1192	pcmpgtd	%xmm1,%xmm0
1193	pxor	%xmm2,%xmm1
1194	pshufd	$19,%xmm0,%xmm2
1195	pxor	%xmm0,%xmm0
1196	movdqa	%xmm1,48(%esp)
1197	paddq	%xmm1,%xmm1
1198	pand	%xmm3,%xmm2
1199	pcmpgtd	%xmm1,%xmm0
1200	pxor	%xmm2,%xmm1
1201	pshufd	$19,%xmm0,%xmm7
1202	movdqa	%xmm1,64(%esp)
1203	paddq	%xmm1,%xmm1
1204	movups	(%ebp),%xmm0
1205	pand	%xmm3,%xmm7
1206	movups	(%esi),%xmm2
1207	pxor	%xmm1,%xmm7
1208	movl	%ebx,%ecx
1209	movdqu	16(%esi),%xmm3
1210	xorps	%xmm0,%xmm2
1211	movdqu	32(%esi),%xmm4
1212	pxor	%xmm0,%xmm3
1213	movdqu	48(%esi),%xmm5
1214	pxor	%xmm0,%xmm4
1215	movdqu	64(%esi),%xmm6
1216	pxor	%xmm0,%xmm5
1217	movdqu	80(%esi),%xmm1
1218	pxor	%xmm0,%xmm6
1219	leal	96(%esi),%esi
1220	pxor	(%esp),%xmm2
1221	movdqa	%xmm7,80(%esp)
1222	pxor	%xmm1,%xmm7
1223	movups	16(%ebp),%xmm1
1224	pxor	16(%esp),%xmm3
1225	pxor	32(%esp),%xmm4
1226.byte	102,15,56,220,209
1227	pxor	48(%esp),%xmm5
1228	pxor	64(%esp),%xmm6
1229.byte	102,15,56,220,217
1230	pxor	%xmm0,%xmm7
1231	movups	32(%ebp),%xmm0
1232.byte	102,15,56,220,225
1233.byte	102,15,56,220,233
1234.byte	102,15,56,220,241
1235.byte	102,15,56,220,249
1236	call	.L_aesni_encrypt6_enter
1237	movdqa	80(%esp),%xmm1
1238	pxor	%xmm0,%xmm0
1239	xorps	(%esp),%xmm2
1240	pcmpgtd	%xmm1,%xmm0
1241	xorps	16(%esp),%xmm3
1242	movups	%xmm2,(%edi)
1243	xorps	32(%esp),%xmm4
1244	movups	%xmm3,16(%edi)
1245	xorps	48(%esp),%xmm5
1246	movups	%xmm4,32(%edi)
1247	xorps	64(%esp),%xmm6
1248	movups	%xmm5,48(%edi)
1249	xorps	%xmm1,%xmm7
1250	movups	%xmm6,64(%edi)
1251	pshufd	$19,%xmm0,%xmm2
1252	movups	%xmm7,80(%edi)
1253	leal	96(%edi),%edi
1254	movdqa	96(%esp),%xmm3
1255	pxor	%xmm0,%xmm0
1256	paddq	%xmm1,%xmm1
1257	pand	%xmm3,%xmm2
1258	pcmpgtd	%xmm1,%xmm0
1259	pxor	%xmm2,%xmm1
1260	subl	$96,%eax
1261	jnc	.L048xts_enc_loop6
1262	movl	240(%ebp),%ecx
1263	movl	%ebp,%edx
1264	movl	%ecx,%ebx
1265.L047xts_enc_short:
1266	addl	$96,%eax
1267	jz	.L049xts_enc_done6x
1268	movdqa	%xmm1,%xmm5
1269	cmpl	$32,%eax
1270	jb	.L050xts_enc_one
1271	pshufd	$19,%xmm0,%xmm2
1272	pxor	%xmm0,%xmm0
1273	paddq	%xmm1,%xmm1
1274	pand	%xmm3,%xmm2
1275	pcmpgtd	%xmm1,%xmm0
1276	pxor	%xmm2,%xmm1
1277	je	.L051xts_enc_two
1278	pshufd	$19,%xmm0,%xmm2
1279	pxor	%xmm0,%xmm0
1280	movdqa	%xmm1,%xmm6
1281	paddq	%xmm1,%xmm1
1282	pand	%xmm3,%xmm2
1283	pcmpgtd	%xmm1,%xmm0
1284	pxor	%xmm2,%xmm1
1285	cmpl	$64,%eax
1286	jb	.L052xts_enc_three
1287	pshufd	$19,%xmm0,%xmm2
1288	pxor	%xmm0,%xmm0
1289	movdqa	%xmm1,%xmm7
1290	paddq	%xmm1,%xmm1
1291	pand	%xmm3,%xmm2
1292	pcmpgtd	%xmm1,%xmm0
1293	pxor	%xmm2,%xmm1
1294	movdqa	%xmm5,(%esp)
1295	movdqa	%xmm6,16(%esp)
1296	je	.L053xts_enc_four
1297	movdqa	%xmm7,32(%esp)
1298	pshufd	$19,%xmm0,%xmm7
1299	movdqa	%xmm1,48(%esp)
1300	paddq	%xmm1,%xmm1
1301	pand	%xmm3,%xmm7
1302	pxor	%xmm1,%xmm7
1303	movdqu	(%esi),%xmm2
1304	movdqu	16(%esi),%xmm3
1305	movdqu	32(%esi),%xmm4
1306	pxor	(%esp),%xmm2
1307	movdqu	48(%esi),%xmm5
1308	pxor	16(%esp),%xmm3
1309	movdqu	64(%esi),%xmm6
1310	pxor	32(%esp),%xmm4
1311	leal	80(%esi),%esi
1312	pxor	48(%esp),%xmm5
1313	movdqa	%xmm7,64(%esp)
1314	pxor	%xmm7,%xmm6
1315	call	_aesni_encrypt6
1316	movaps	64(%esp),%xmm1
1317	xorps	(%esp),%xmm2
1318	xorps	16(%esp),%xmm3
1319	xorps	32(%esp),%xmm4
1320	movups	%xmm2,(%edi)
1321	xorps	48(%esp),%xmm5
1322	movups	%xmm3,16(%edi)
1323	xorps	%xmm1,%xmm6
1324	movups	%xmm4,32(%edi)
1325	movups	%xmm5,48(%edi)
1326	movups	%xmm6,64(%edi)
1327	leal	80(%edi),%edi
1328	jmp	.L054xts_enc_done
1329.align	16
1330.L050xts_enc_one:
1331	movups	(%esi),%xmm2
1332	leal	16(%esi),%esi
1333	xorps	%xmm5,%xmm2
1334	movups	(%edx),%xmm0
1335	movups	16(%edx),%xmm1
1336	leal	32(%edx),%edx
1337	xorps	%xmm0,%xmm2
1338.L055enc1_loop_9:
1339.byte	102,15,56,220,209
1340	decl	%ecx
1341	movups	(%edx),%xmm1
1342	leal	16(%edx),%edx
1343	jnz	.L055enc1_loop_9
1344.byte	102,15,56,221,209
1345	xorps	%xmm5,%xmm2
1346	movups	%xmm2,(%edi)
1347	leal	16(%edi),%edi
1348	movdqa	%xmm5,%xmm1
1349	jmp	.L054xts_enc_done
1350.align	16
1351.L051xts_enc_two:
1352	movaps	%xmm1,%xmm6
1353	movups	(%esi),%xmm2
1354	movups	16(%esi),%xmm3
1355	leal	32(%esi),%esi
1356	xorps	%xmm5,%xmm2
1357	xorps	%xmm6,%xmm3
1358	call	_aesni_encrypt2
1359	xorps	%xmm5,%xmm2
1360	xorps	%xmm6,%xmm3
1361	movups	%xmm2,(%edi)
1362	movups	%xmm3,16(%edi)
1363	leal	32(%edi),%edi
1364	movdqa	%xmm6,%xmm1
1365	jmp	.L054xts_enc_done
1366.align	16
1367.L052xts_enc_three:
1368	movaps	%xmm1,%xmm7
1369	movups	(%esi),%xmm2
1370	movups	16(%esi),%xmm3
1371	movups	32(%esi),%xmm4
1372	leal	48(%esi),%esi
1373	xorps	%xmm5,%xmm2
1374	xorps	%xmm6,%xmm3
1375	xorps	%xmm7,%xmm4
1376	call	_aesni_encrypt3
1377	xorps	%xmm5,%xmm2
1378	xorps	%xmm6,%xmm3
1379	xorps	%xmm7,%xmm4
1380	movups	%xmm2,(%edi)
1381	movups	%xmm3,16(%edi)
1382	movups	%xmm4,32(%edi)
1383	leal	48(%edi),%edi
1384	movdqa	%xmm7,%xmm1
1385	jmp	.L054xts_enc_done
1386.align	16
1387.L053xts_enc_four:
1388	movaps	%xmm1,%xmm6
1389	movups	(%esi),%xmm2
1390	movups	16(%esi),%xmm3
1391	movups	32(%esi),%xmm4
1392	xorps	(%esp),%xmm2
1393	movups	48(%esi),%xmm5
1394	leal	64(%esi),%esi
1395	xorps	16(%esp),%xmm3
1396	xorps	%xmm7,%xmm4
1397	xorps	%xmm6,%xmm5
1398	call	_aesni_encrypt4
1399	xorps	(%esp),%xmm2
1400	xorps	16(%esp),%xmm3
1401	xorps	%xmm7,%xmm4
1402	movups	%xmm2,(%edi)
1403	xorps	%xmm6,%xmm5
1404	movups	%xmm3,16(%edi)
1405	movups	%xmm4,32(%edi)
1406	movups	%xmm5,48(%edi)
1407	leal	64(%edi),%edi
1408	movdqa	%xmm6,%xmm1
1409	jmp	.L054xts_enc_done
1410.align	16
1411.L049xts_enc_done6x:
1412	movl	112(%esp),%eax
1413	andl	$15,%eax
1414	jz	.L056xts_enc_ret
1415	movdqa	%xmm1,%xmm5
1416	movl	%eax,112(%esp)
1417	jmp	.L057xts_enc_steal
1418.align	16
1419.L054xts_enc_done:
1420	movl	112(%esp),%eax
1421	pxor	%xmm0,%xmm0
1422	andl	$15,%eax
1423	jz	.L056xts_enc_ret
1424	pcmpgtd	%xmm1,%xmm0
1425	movl	%eax,112(%esp)
1426	pshufd	$19,%xmm0,%xmm5
1427	paddq	%xmm1,%xmm1
1428	pand	96(%esp),%xmm5
1429	pxor	%xmm1,%xmm5
1430.L057xts_enc_steal:
1431	movzbl	(%esi),%ecx
1432	movzbl	-16(%edi),%edx
1433	leal	1(%esi),%esi
1434	movb	%cl,-16(%edi)
1435	movb	%dl,(%edi)
1436	leal	1(%edi),%edi
1437	subl	$1,%eax
1438	jnz	.L057xts_enc_steal
1439	subl	112(%esp),%edi
1440	movl	%ebp,%edx
1441	movl	%ebx,%ecx
1442	movups	-16(%edi),%xmm2
1443	xorps	%xmm5,%xmm2
1444	movups	(%edx),%xmm0
1445	movups	16(%edx),%xmm1
1446	leal	32(%edx),%edx
1447	xorps	%xmm0,%xmm2
1448.L058enc1_loop_10:
1449.byte	102,15,56,220,209
1450	decl	%ecx
1451	movups	(%edx),%xmm1
1452	leal	16(%edx),%edx
1453	jnz	.L058enc1_loop_10
1454.byte	102,15,56,221,209
1455	xorps	%xmm5,%xmm2
1456	movups	%xmm2,-16(%edi)
1457.L056xts_enc_ret:
1458	pxor	%xmm0,%xmm0
1459	pxor	%xmm1,%xmm1
1460	pxor	%xmm2,%xmm2
1461	movdqa	%xmm0,(%esp)
1462	pxor	%xmm3,%xmm3
1463	movdqa	%xmm0,16(%esp)
1464	pxor	%xmm4,%xmm4
1465	movdqa	%xmm0,32(%esp)
1466	pxor	%xmm5,%xmm5
1467	movdqa	%xmm0,48(%esp)
1468	pxor	%xmm6,%xmm6
1469	movdqa	%xmm0,64(%esp)
1470	pxor	%xmm7,%xmm7
1471	movdqa	%xmm0,80(%esp)
1472	movl	116(%esp),%esp
1473	popl	%edi
1474	popl	%esi
1475	popl	%ebx
1476	popl	%ebp
1477	ret
1478.size	aesni_xts_encrypt,.-.L_aesni_xts_encrypt_begin
1479.globl	aesni_xts_decrypt
1480.type	aesni_xts_decrypt,@function
1481.align	16
1482aesni_xts_decrypt:
1483.L_aesni_xts_decrypt_begin:
1484	#ifdef __CET__
1485
1486.byte	243,15,30,251
1487	#endif
1488
1489	pushl	%ebp
1490	pushl	%ebx
1491	pushl	%esi
1492	pushl	%edi
1493	movl	36(%esp),%edx
1494	movl	40(%esp),%esi
1495	movl	240(%edx),%ecx
1496	movups	(%esi),%xmm2
1497	movups	(%edx),%xmm0
1498	movups	16(%edx),%xmm1
1499	leal	32(%edx),%edx
1500	xorps	%xmm0,%xmm2
1501.L059enc1_loop_11:
1502.byte	102,15,56,220,209
1503	decl	%ecx
1504	movups	(%edx),%xmm1
1505	leal	16(%edx),%edx
1506	jnz	.L059enc1_loop_11
1507.byte	102,15,56,221,209
1508	movl	20(%esp),%esi
1509	movl	24(%esp),%edi
1510	movl	28(%esp),%eax
1511	movl	32(%esp),%edx
1512	movl	%esp,%ebp
1513	subl	$120,%esp
1514	andl	$-16,%esp
1515	xorl	%ebx,%ebx
1516	testl	$15,%eax
1517	setnz	%bl
1518	shll	$4,%ebx
1519	subl	%ebx,%eax
1520	movl	$135,96(%esp)
1521	movl	$0,100(%esp)
1522	movl	$1,104(%esp)
1523	movl	$0,108(%esp)
1524	movl	%eax,112(%esp)
1525	movl	%ebp,116(%esp)
1526	movl	240(%edx),%ecx
1527	movl	%edx,%ebp
1528	movl	%ecx,%ebx
1529	movdqa	%xmm2,%xmm1
1530	pxor	%xmm0,%xmm0
1531	movdqa	96(%esp),%xmm3
1532	pcmpgtd	%xmm1,%xmm0
1533	andl	$-16,%eax
1534	subl	$96,%eax
1535	jc	.L060xts_dec_short
1536	shll	$4,%ecx
1537	movl	$16,%ebx
1538	subl	%ecx,%ebx
1539	leal	32(%edx,%ecx,1),%edx
1540	jmp	.L061xts_dec_loop6
1541.align	16
1542.L061xts_dec_loop6:
1543	pshufd	$19,%xmm0,%xmm2
1544	pxor	%xmm0,%xmm0
1545	movdqa	%xmm1,(%esp)
1546	paddq	%xmm1,%xmm1
1547	pand	%xmm3,%xmm2
1548	pcmpgtd	%xmm1,%xmm0
1549	pxor	%xmm2,%xmm1
1550	pshufd	$19,%xmm0,%xmm2
1551	pxor	%xmm0,%xmm0
1552	movdqa	%xmm1,16(%esp)
1553	paddq	%xmm1,%xmm1
1554	pand	%xmm3,%xmm2
1555	pcmpgtd	%xmm1,%xmm0
1556	pxor	%xmm2,%xmm1
1557	pshufd	$19,%xmm0,%xmm2
1558	pxor	%xmm0,%xmm0
1559	movdqa	%xmm1,32(%esp)
1560	paddq	%xmm1,%xmm1
1561	pand	%xmm3,%xmm2
1562	pcmpgtd	%xmm1,%xmm0
1563	pxor	%xmm2,%xmm1
1564	pshufd	$19,%xmm0,%xmm2
1565	pxor	%xmm0,%xmm0
1566	movdqa	%xmm1,48(%esp)
1567	paddq	%xmm1,%xmm1
1568	pand	%xmm3,%xmm2
1569	pcmpgtd	%xmm1,%xmm0
1570	pxor	%xmm2,%xmm1
1571	pshufd	$19,%xmm0,%xmm7
1572	movdqa	%xmm1,64(%esp)
1573	paddq	%xmm1,%xmm1
1574	movups	(%ebp),%xmm0
1575	pand	%xmm3,%xmm7
1576	movups	(%esi),%xmm2
1577	pxor	%xmm1,%xmm7
1578	movl	%ebx,%ecx
1579	movdqu	16(%esi),%xmm3
1580	xorps	%xmm0,%xmm2
1581	movdqu	32(%esi),%xmm4
1582	pxor	%xmm0,%xmm3
1583	movdqu	48(%esi),%xmm5
1584	pxor	%xmm0,%xmm4
1585	movdqu	64(%esi),%xmm6
1586	pxor	%xmm0,%xmm5
1587	movdqu	80(%esi),%xmm1
1588	pxor	%xmm0,%xmm6
1589	leal	96(%esi),%esi
1590	pxor	(%esp),%xmm2
1591	movdqa	%xmm7,80(%esp)
1592	pxor	%xmm1,%xmm7
1593	movups	16(%ebp),%xmm1
1594	pxor	16(%esp),%xmm3
1595	pxor	32(%esp),%xmm4
1596.byte	102,15,56,222,209
1597	pxor	48(%esp),%xmm5
1598	pxor	64(%esp),%xmm6
1599.byte	102,15,56,222,217
1600	pxor	%xmm0,%xmm7
1601	movups	32(%ebp),%xmm0
1602.byte	102,15,56,222,225
1603.byte	102,15,56,222,233
1604.byte	102,15,56,222,241
1605.byte	102,15,56,222,249
1606	call	.L_aesni_decrypt6_enter
1607	movdqa	80(%esp),%xmm1
1608	pxor	%xmm0,%xmm0
1609	xorps	(%esp),%xmm2
1610	pcmpgtd	%xmm1,%xmm0
1611	xorps	16(%esp),%xmm3
1612	movups	%xmm2,(%edi)
1613	xorps	32(%esp),%xmm4
1614	movups	%xmm3,16(%edi)
1615	xorps	48(%esp),%xmm5
1616	movups	%xmm4,32(%edi)
1617	xorps	64(%esp),%xmm6
1618	movups	%xmm5,48(%edi)
1619	xorps	%xmm1,%xmm7
1620	movups	%xmm6,64(%edi)
1621	pshufd	$19,%xmm0,%xmm2
1622	movups	%xmm7,80(%edi)
1623	leal	96(%edi),%edi
1624	movdqa	96(%esp),%xmm3
1625	pxor	%xmm0,%xmm0
1626	paddq	%xmm1,%xmm1
1627	pand	%xmm3,%xmm2
1628	pcmpgtd	%xmm1,%xmm0
1629	pxor	%xmm2,%xmm1
1630	subl	$96,%eax
1631	jnc	.L061xts_dec_loop6
1632	movl	240(%ebp),%ecx
1633	movl	%ebp,%edx
1634	movl	%ecx,%ebx
1635.L060xts_dec_short:
1636	addl	$96,%eax
1637	jz	.L062xts_dec_done6x
1638	movdqa	%xmm1,%xmm5
1639	cmpl	$32,%eax
1640	jb	.L063xts_dec_one
1641	pshufd	$19,%xmm0,%xmm2
1642	pxor	%xmm0,%xmm0
1643	paddq	%xmm1,%xmm1
1644	pand	%xmm3,%xmm2
1645	pcmpgtd	%xmm1,%xmm0
1646	pxor	%xmm2,%xmm1
1647	je	.L064xts_dec_two
1648	pshufd	$19,%xmm0,%xmm2
1649	pxor	%xmm0,%xmm0
1650	movdqa	%xmm1,%xmm6
1651	paddq	%xmm1,%xmm1
1652	pand	%xmm3,%xmm2
1653	pcmpgtd	%xmm1,%xmm0
1654	pxor	%xmm2,%xmm1
1655	cmpl	$64,%eax
1656	jb	.L065xts_dec_three
1657	pshufd	$19,%xmm0,%xmm2
1658	pxor	%xmm0,%xmm0
1659	movdqa	%xmm1,%xmm7
1660	paddq	%xmm1,%xmm1
1661	pand	%xmm3,%xmm2
1662	pcmpgtd	%xmm1,%xmm0
1663	pxor	%xmm2,%xmm1
1664	movdqa	%xmm5,(%esp)
1665	movdqa	%xmm6,16(%esp)
1666	je	.L066xts_dec_four
1667	movdqa	%xmm7,32(%esp)
1668	pshufd	$19,%xmm0,%xmm7
1669	movdqa	%xmm1,48(%esp)
1670	paddq	%xmm1,%xmm1
1671	pand	%xmm3,%xmm7
1672	pxor	%xmm1,%xmm7
1673	movdqu	(%esi),%xmm2
1674	movdqu	16(%esi),%xmm3
1675	movdqu	32(%esi),%xmm4
1676	pxor	(%esp),%xmm2
1677	movdqu	48(%esi),%xmm5
1678	pxor	16(%esp),%xmm3
1679	movdqu	64(%esi),%xmm6
1680	pxor	32(%esp),%xmm4
1681	leal	80(%esi),%esi
1682	pxor	48(%esp),%xmm5
1683	movdqa	%xmm7,64(%esp)
1684	pxor	%xmm7,%xmm6
1685	call	_aesni_decrypt6
1686	movaps	64(%esp),%xmm1
1687	xorps	(%esp),%xmm2
1688	xorps	16(%esp),%xmm3
1689	xorps	32(%esp),%xmm4
1690	movups	%xmm2,(%edi)
1691	xorps	48(%esp),%xmm5
1692	movups	%xmm3,16(%edi)
1693	xorps	%xmm1,%xmm6
1694	movups	%xmm4,32(%edi)
1695	movups	%xmm5,48(%edi)
1696	movups	%xmm6,64(%edi)
1697	leal	80(%edi),%edi
1698	jmp	.L067xts_dec_done
1699.align	16
1700.L063xts_dec_one:
1701	movups	(%esi),%xmm2
1702	leal	16(%esi),%esi
1703	xorps	%xmm5,%xmm2
1704	movups	(%edx),%xmm0
1705	movups	16(%edx),%xmm1
1706	leal	32(%edx),%edx
1707	xorps	%xmm0,%xmm2
1708.L068dec1_loop_12:
1709.byte	102,15,56,222,209
1710	decl	%ecx
1711	movups	(%edx),%xmm1
1712	leal	16(%edx),%edx
1713	jnz	.L068dec1_loop_12
1714.byte	102,15,56,223,209
1715	xorps	%xmm5,%xmm2
1716	movups	%xmm2,(%edi)
1717	leal	16(%edi),%edi
1718	movdqa	%xmm5,%xmm1
1719	jmp	.L067xts_dec_done
1720.align	16
1721.L064xts_dec_two:
1722	movaps	%xmm1,%xmm6
1723	movups	(%esi),%xmm2
1724	movups	16(%esi),%xmm3
1725	leal	32(%esi),%esi
1726	xorps	%xmm5,%xmm2
1727	xorps	%xmm6,%xmm3
1728	call	_aesni_decrypt2
1729	xorps	%xmm5,%xmm2
1730	xorps	%xmm6,%xmm3
1731	movups	%xmm2,(%edi)
1732	movups	%xmm3,16(%edi)
1733	leal	32(%edi),%edi
1734	movdqa	%xmm6,%xmm1
1735	jmp	.L067xts_dec_done
1736.align	16
1737.L065xts_dec_three:
1738	movaps	%xmm1,%xmm7
1739	movups	(%esi),%xmm2
1740	movups	16(%esi),%xmm3
1741	movups	32(%esi),%xmm4
1742	leal	48(%esi),%esi
1743	xorps	%xmm5,%xmm2
1744	xorps	%xmm6,%xmm3
1745	xorps	%xmm7,%xmm4
1746	call	_aesni_decrypt3
1747	xorps	%xmm5,%xmm2
1748	xorps	%xmm6,%xmm3
1749	xorps	%xmm7,%xmm4
1750	movups	%xmm2,(%edi)
1751	movups	%xmm3,16(%edi)
1752	movups	%xmm4,32(%edi)
1753	leal	48(%edi),%edi
1754	movdqa	%xmm7,%xmm1
1755	jmp	.L067xts_dec_done
1756.align	16
1757.L066xts_dec_four:
1758	movaps	%xmm1,%xmm6
1759	movups	(%esi),%xmm2
1760	movups	16(%esi),%xmm3
1761	movups	32(%esi),%xmm4
1762	xorps	(%esp),%xmm2
1763	movups	48(%esi),%xmm5
1764	leal	64(%esi),%esi
1765	xorps	16(%esp),%xmm3
1766	xorps	%xmm7,%xmm4
1767	xorps	%xmm6,%xmm5
1768	call	_aesni_decrypt4
1769	xorps	(%esp),%xmm2
1770	xorps	16(%esp),%xmm3
1771	xorps	%xmm7,%xmm4
1772	movups	%xmm2,(%edi)
1773	xorps	%xmm6,%xmm5
1774	movups	%xmm3,16(%edi)
1775	movups	%xmm4,32(%edi)
1776	movups	%xmm5,48(%edi)
1777	leal	64(%edi),%edi
1778	movdqa	%xmm6,%xmm1
1779	jmp	.L067xts_dec_done
1780.align	16
1781.L062xts_dec_done6x:
1782	movl	112(%esp),%eax
1783	andl	$15,%eax
1784	jz	.L069xts_dec_ret
1785	movl	%eax,112(%esp)
1786	jmp	.L070xts_dec_only_one_more
1787.align	16
1788.L067xts_dec_done:
1789	movl	112(%esp),%eax
1790	pxor	%xmm0,%xmm0
1791	andl	$15,%eax
1792	jz	.L069xts_dec_ret
1793	pcmpgtd	%xmm1,%xmm0
1794	movl	%eax,112(%esp)
1795	pshufd	$19,%xmm0,%xmm2
1796	pxor	%xmm0,%xmm0
1797	movdqa	96(%esp),%xmm3
1798	paddq	%xmm1,%xmm1
1799	pand	%xmm3,%xmm2
1800	pcmpgtd	%xmm1,%xmm0
1801	pxor	%xmm2,%xmm1
1802.L070xts_dec_only_one_more:
1803	pshufd	$19,%xmm0,%xmm5
1804	movdqa	%xmm1,%xmm6
1805	paddq	%xmm1,%xmm1
1806	pand	%xmm3,%xmm5
1807	pxor	%xmm1,%xmm5
1808	movl	%ebp,%edx
1809	movl	%ebx,%ecx
1810	movups	(%esi),%xmm2
1811	xorps	%xmm5,%xmm2
1812	movups	(%edx),%xmm0
1813	movups	16(%edx),%xmm1
1814	leal	32(%edx),%edx
1815	xorps	%xmm0,%xmm2
1816.L071dec1_loop_13:
1817.byte	102,15,56,222,209
1818	decl	%ecx
1819	movups	(%edx),%xmm1
1820	leal	16(%edx),%edx
1821	jnz	.L071dec1_loop_13
1822.byte	102,15,56,223,209
1823	xorps	%xmm5,%xmm2
1824	movups	%xmm2,(%edi)
1825.L072xts_dec_steal:
1826	movzbl	16(%esi),%ecx
1827	movzbl	(%edi),%edx
1828	leal	1(%esi),%esi
1829	movb	%cl,(%edi)
1830	movb	%dl,16(%edi)
1831	leal	1(%edi),%edi
1832	subl	$1,%eax
1833	jnz	.L072xts_dec_steal
1834	subl	112(%esp),%edi
1835	movl	%ebp,%edx
1836	movl	%ebx,%ecx
1837	movups	(%edi),%xmm2
1838	xorps	%xmm6,%xmm2
1839	movups	(%edx),%xmm0
1840	movups	16(%edx),%xmm1
1841	leal	32(%edx),%edx
1842	xorps	%xmm0,%xmm2
1843.L073dec1_loop_14:
1844.byte	102,15,56,222,209
1845	decl	%ecx
1846	movups	(%edx),%xmm1
1847	leal	16(%edx),%edx
1848	jnz	.L073dec1_loop_14
1849.byte	102,15,56,223,209
1850	xorps	%xmm6,%xmm2
1851	movups	%xmm2,(%edi)
1852.L069xts_dec_ret:
1853	pxor	%xmm0,%xmm0
1854	pxor	%xmm1,%xmm1
1855	pxor	%xmm2,%xmm2
1856	movdqa	%xmm0,(%esp)
1857	pxor	%xmm3,%xmm3
1858	movdqa	%xmm0,16(%esp)
1859	pxor	%xmm4,%xmm4
1860	movdqa	%xmm0,32(%esp)
1861	pxor	%xmm5,%xmm5
1862	movdqa	%xmm0,48(%esp)
1863	pxor	%xmm6,%xmm6
1864	movdqa	%xmm0,64(%esp)
1865	pxor	%xmm7,%xmm7
1866	movdqa	%xmm0,80(%esp)
1867	movl	116(%esp),%esp
1868	popl	%edi
1869	popl	%esi
1870	popl	%ebx
1871	popl	%ebp
1872	ret
1873.size	aesni_xts_decrypt,.-.L_aesni_xts_decrypt_begin
1874.globl	aesni_ocb_encrypt
1875.type	aesni_ocb_encrypt,@function
1876.align	16
1877aesni_ocb_encrypt:
1878.L_aesni_ocb_encrypt_begin:
1879	#ifdef __CET__
1880
1881.byte	243,15,30,251
1882	#endif
1883
1884	pushl	%ebp
1885	pushl	%ebx
1886	pushl	%esi
1887	pushl	%edi
1888	movl	40(%esp),%ecx
1889	movl	48(%esp),%ebx
1890	movl	20(%esp),%esi
1891	movl	24(%esp),%edi
1892	movl	28(%esp),%eax
1893	movl	32(%esp),%edx
1894	movdqu	(%ecx),%xmm0
1895	movl	36(%esp),%ebp
1896	movdqu	(%ebx),%xmm1
1897	movl	44(%esp),%ebx
1898	movl	%esp,%ecx
1899	subl	$132,%esp
1900	andl	$-16,%esp
1901	subl	%esi,%edi
1902	shll	$4,%eax
1903	leal	-96(%esi,%eax,1),%eax
1904	movl	%edi,120(%esp)
1905	movl	%eax,124(%esp)
1906	movl	%ecx,128(%esp)
1907	movl	240(%edx),%ecx
1908	testl	$1,%ebp
1909	jnz	.L074odd
1910	bsfl	%ebp,%eax
1911	addl	$1,%ebp
1912	shll	$4,%eax
1913	movdqu	(%ebx,%eax,1),%xmm7
1914	movl	%edx,%eax
1915	movdqu	(%esi),%xmm2
1916	leal	16(%esi),%esi
1917	pxor	%xmm0,%xmm7
1918	pxor	%xmm2,%xmm1
1919	pxor	%xmm7,%xmm2
1920	movdqa	%xmm1,%xmm6
1921	movups	(%edx),%xmm0
1922	movups	16(%edx),%xmm1
1923	leal	32(%edx),%edx
1924	xorps	%xmm0,%xmm2
1925.L075enc1_loop_15:
1926.byte	102,15,56,220,209
1927	decl	%ecx
1928	movups	(%edx),%xmm1
1929	leal	16(%edx),%edx
1930	jnz	.L075enc1_loop_15
1931.byte	102,15,56,221,209
1932	xorps	%xmm7,%xmm2
1933	movdqa	%xmm7,%xmm0
1934	movdqa	%xmm6,%xmm1
1935	movups	%xmm2,-16(%edi,%esi,1)
1936	movl	240(%eax),%ecx
1937	movl	%eax,%edx
1938	movl	124(%esp),%eax
1939.L074odd:
1940	shll	$4,%ecx
1941	movl	$16,%edi
1942	subl	%ecx,%edi
1943	movl	%edx,112(%esp)
1944	leal	32(%edx,%ecx,1),%edx
1945	movl	%edi,116(%esp)
1946	cmpl	%eax,%esi
1947	ja	.L076short
1948	jmp	.L077grandloop
1949.align	32
1950.L077grandloop:
1951	leal	1(%ebp),%ecx
1952	leal	3(%ebp),%eax
1953	leal	5(%ebp),%edi
1954	addl	$6,%ebp
1955	bsfl	%ecx,%ecx
1956	bsfl	%eax,%eax
1957	bsfl	%edi,%edi
1958	shll	$4,%ecx
1959	shll	$4,%eax
1960	shll	$4,%edi
1961	movdqu	(%ebx),%xmm2
1962	movdqu	(%ebx,%ecx,1),%xmm3
1963	movl	116(%esp),%ecx
1964	movdqa	%xmm2,%xmm4
1965	movdqu	(%ebx,%eax,1),%xmm5
1966	movdqa	%xmm2,%xmm6
1967	movdqu	(%ebx,%edi,1),%xmm7
1968	pxor	%xmm0,%xmm2
1969	pxor	%xmm2,%xmm3
1970	movdqa	%xmm2,(%esp)
1971	pxor	%xmm3,%xmm4
1972	movdqa	%xmm3,16(%esp)
1973	pxor	%xmm4,%xmm5
1974	movdqa	%xmm4,32(%esp)
1975	pxor	%xmm5,%xmm6
1976	movdqa	%xmm5,48(%esp)
1977	pxor	%xmm6,%xmm7
1978	movdqa	%xmm6,64(%esp)
1979	movdqa	%xmm7,80(%esp)
1980	movups	-48(%edx,%ecx,1),%xmm0
1981	movdqu	(%esi),%xmm2
1982	movdqu	16(%esi),%xmm3
1983	movdqu	32(%esi),%xmm4
1984	movdqu	48(%esi),%xmm5
1985	movdqu	64(%esi),%xmm6
1986	movdqu	80(%esi),%xmm7
1987	leal	96(%esi),%esi
1988	pxor	%xmm2,%xmm1
1989	pxor	%xmm0,%xmm2
1990	pxor	%xmm3,%xmm1
1991	pxor	%xmm0,%xmm3
1992	pxor	%xmm4,%xmm1
1993	pxor	%xmm0,%xmm4
1994	pxor	%xmm5,%xmm1
1995	pxor	%xmm0,%xmm5
1996	pxor	%xmm6,%xmm1
1997	pxor	%xmm0,%xmm6
1998	pxor	%xmm7,%xmm1
1999	pxor	%xmm0,%xmm7
2000	movdqa	%xmm1,96(%esp)
2001	movups	-32(%edx,%ecx,1),%xmm1
2002	pxor	(%esp),%xmm2
2003	pxor	16(%esp),%xmm3
2004	pxor	32(%esp),%xmm4
2005	pxor	48(%esp),%xmm5
2006	pxor	64(%esp),%xmm6
2007	pxor	80(%esp),%xmm7
2008	movups	-16(%edx,%ecx,1),%xmm0
2009.byte	102,15,56,220,209
2010.byte	102,15,56,220,217
2011.byte	102,15,56,220,225
2012.byte	102,15,56,220,233
2013.byte	102,15,56,220,241
2014.byte	102,15,56,220,249
2015	movl	120(%esp),%edi
2016	movl	124(%esp),%eax
2017	call	.L_aesni_encrypt6_enter
2018	movdqa	80(%esp),%xmm0
2019	pxor	(%esp),%xmm2
2020	pxor	16(%esp),%xmm3
2021	pxor	32(%esp),%xmm4
2022	pxor	48(%esp),%xmm5
2023	pxor	64(%esp),%xmm6
2024	pxor	%xmm0,%xmm7
2025	movdqa	96(%esp),%xmm1
2026	movdqu	%xmm2,-96(%edi,%esi,1)
2027	movdqu	%xmm3,-80(%edi,%esi,1)
2028	movdqu	%xmm4,-64(%edi,%esi,1)
2029	movdqu	%xmm5,-48(%edi,%esi,1)
2030	movdqu	%xmm6,-32(%edi,%esi,1)
2031	movdqu	%xmm7,-16(%edi,%esi,1)
2032	cmpl	%eax,%esi
2033	jbe	.L077grandloop
2034.L076short:
2035	addl	$96,%eax
2036	subl	%esi,%eax
2037	jz	.L078done
2038	cmpl	$32,%eax
2039	jb	.L079one
2040	je	.L080two
2041	cmpl	$64,%eax
2042	jb	.L081three
2043	je	.L082four
2044	leal	1(%ebp),%ecx
2045	leal	3(%ebp),%eax
2046	bsfl	%ecx,%ecx
2047	bsfl	%eax,%eax
2048	shll	$4,%ecx
2049	shll	$4,%eax
2050	movdqu	(%ebx),%xmm2
2051	movdqu	(%ebx,%ecx,1),%xmm3
2052	movl	116(%esp),%ecx
2053	movdqa	%xmm2,%xmm4
2054	movdqu	(%ebx,%eax,1),%xmm5
2055	movdqa	%xmm2,%xmm6
2056	pxor	%xmm0,%xmm2
2057	pxor	%xmm2,%xmm3
2058	movdqa	%xmm2,(%esp)
2059	pxor	%xmm3,%xmm4
2060	movdqa	%xmm3,16(%esp)
2061	pxor	%xmm4,%xmm5
2062	movdqa	%xmm4,32(%esp)
2063	pxor	%xmm5,%xmm6
2064	movdqa	%xmm5,48(%esp)
2065	pxor	%xmm6,%xmm7
2066	movdqa	%xmm6,64(%esp)
2067	movups	-48(%edx,%ecx,1),%xmm0
2068	movdqu	(%esi),%xmm2
2069	movdqu	16(%esi),%xmm3
2070	movdqu	32(%esi),%xmm4
2071	movdqu	48(%esi),%xmm5
2072	movdqu	64(%esi),%xmm6
2073	pxor	%xmm7,%xmm7
2074	pxor	%xmm2,%xmm1
2075	pxor	%xmm0,%xmm2
2076	pxor	%xmm3,%xmm1
2077	pxor	%xmm0,%xmm3
2078	pxor	%xmm4,%xmm1
2079	pxor	%xmm0,%xmm4
2080	pxor	%xmm5,%xmm1
2081	pxor	%xmm0,%xmm5
2082	pxor	%xmm6,%xmm1
2083	pxor	%xmm0,%xmm6
2084	movdqa	%xmm1,96(%esp)
2085	movups	-32(%edx,%ecx,1),%xmm1
2086	pxor	(%esp),%xmm2
2087	pxor	16(%esp),%xmm3
2088	pxor	32(%esp),%xmm4
2089	pxor	48(%esp),%xmm5
2090	pxor	64(%esp),%xmm6
2091	movups	-16(%edx,%ecx,1),%xmm0
2092.byte	102,15,56,220,209
2093.byte	102,15,56,220,217
2094.byte	102,15,56,220,225
2095.byte	102,15,56,220,233
2096.byte	102,15,56,220,241
2097.byte	102,15,56,220,249
2098	movl	120(%esp),%edi
2099	call	.L_aesni_encrypt6_enter
2100	movdqa	64(%esp),%xmm0
2101	pxor	(%esp),%xmm2
2102	pxor	16(%esp),%xmm3
2103	pxor	32(%esp),%xmm4
2104	pxor	48(%esp),%xmm5
2105	pxor	%xmm0,%xmm6
2106	movdqa	96(%esp),%xmm1
2107	movdqu	%xmm2,(%edi,%esi,1)
2108	movdqu	%xmm3,16(%edi,%esi,1)
2109	movdqu	%xmm4,32(%edi,%esi,1)
2110	movdqu	%xmm5,48(%edi,%esi,1)
2111	movdqu	%xmm6,64(%edi,%esi,1)
2112	jmp	.L078done
2113.align	16
2114.L079one:
2115	movdqu	(%ebx),%xmm7
2116	movl	112(%esp),%edx
2117	movdqu	(%esi),%xmm2
2118	movl	240(%edx),%ecx
2119	pxor	%xmm0,%xmm7
2120	pxor	%xmm2,%xmm1
2121	pxor	%xmm7,%xmm2
2122	movdqa	%xmm1,%xmm6
2123	movl	120(%esp),%edi
2124	movups	(%edx),%xmm0
2125	movups	16(%edx),%xmm1
2126	leal	32(%edx),%edx
2127	xorps	%xmm0,%xmm2
2128.L083enc1_loop_16:
2129.byte	102,15,56,220,209
2130	decl	%ecx
2131	movups	(%edx),%xmm1
2132	leal	16(%edx),%edx
2133	jnz	.L083enc1_loop_16
2134.byte	102,15,56,221,209
2135	xorps	%xmm7,%xmm2
2136	movdqa	%xmm7,%xmm0
2137	movdqa	%xmm6,%xmm1
2138	movups	%xmm2,(%edi,%esi,1)
2139	jmp	.L078done
2140.align	16
2141.L080two:
2142	leal	1(%ebp),%ecx
2143	movl	112(%esp),%edx
2144	bsfl	%ecx,%ecx
2145	shll	$4,%ecx
2146	movdqu	(%ebx),%xmm6
2147	movdqu	(%ebx,%ecx,1),%xmm7
2148	movdqu	(%esi),%xmm2
2149	movdqu	16(%esi),%xmm3
2150	movl	240(%edx),%ecx
2151	pxor	%xmm0,%xmm6
2152	pxor	%xmm6,%xmm7
2153	pxor	%xmm2,%xmm1
2154	pxor	%xmm6,%xmm2
2155	pxor	%xmm3,%xmm1
2156	pxor	%xmm7,%xmm3
2157	movdqa	%xmm1,%xmm5
2158	movl	120(%esp),%edi
2159	call	_aesni_encrypt2
2160	xorps	%xmm6,%xmm2
2161	xorps	%xmm7,%xmm3
2162	movdqa	%xmm7,%xmm0
2163	movdqa	%xmm5,%xmm1
2164	movups	%xmm2,(%edi,%esi,1)
2165	movups	%xmm3,16(%edi,%esi,1)
2166	jmp	.L078done
2167.align	16
2168.L081three:
2169	leal	1(%ebp),%ecx
2170	movl	112(%esp),%edx
2171	bsfl	%ecx,%ecx
2172	shll	$4,%ecx
2173	movdqu	(%ebx),%xmm5
2174	movdqu	(%ebx,%ecx,1),%xmm6
2175	movdqa	%xmm5,%xmm7
2176	movdqu	(%esi),%xmm2
2177	movdqu	16(%esi),%xmm3
2178	movdqu	32(%esi),%xmm4
2179	movl	240(%edx),%ecx
2180	pxor	%xmm0,%xmm5
2181	pxor	%xmm5,%xmm6
2182	pxor	%xmm6,%xmm7
2183	pxor	%xmm2,%xmm1
2184	pxor	%xmm5,%xmm2
2185	pxor	%xmm3,%xmm1
2186	pxor	%xmm6,%xmm3
2187	pxor	%xmm4,%xmm1
2188	pxor	%xmm7,%xmm4
2189	movdqa	%xmm1,96(%esp)
2190	movl	120(%esp),%edi
2191	call	_aesni_encrypt3
2192	xorps	%xmm5,%xmm2
2193	xorps	%xmm6,%xmm3
2194	xorps	%xmm7,%xmm4
2195	movdqa	%xmm7,%xmm0
2196	movdqa	96(%esp),%xmm1
2197	movups	%xmm2,(%edi,%esi,1)
2198	movups	%xmm3,16(%edi,%esi,1)
2199	movups	%xmm4,32(%edi,%esi,1)
2200	jmp	.L078done
2201.align	16
2202.L082four:
2203	leal	1(%ebp),%ecx
2204	leal	3(%ebp),%eax
2205	bsfl	%ecx,%ecx
2206	bsfl	%eax,%eax
2207	movl	112(%esp),%edx
2208	shll	$4,%ecx
2209	shll	$4,%eax
2210	movdqu	(%ebx),%xmm4
2211	movdqu	(%ebx,%ecx,1),%xmm5
2212	movdqa	%xmm4,%xmm6
2213	movdqu	(%ebx,%eax,1),%xmm7
2214	pxor	%xmm0,%xmm4
2215	movdqu	(%esi),%xmm2
2216	pxor	%xmm4,%xmm5
2217	movdqu	16(%esi),%xmm3
2218	pxor	%xmm5,%xmm6
2219	movdqa	%xmm4,(%esp)
2220	pxor	%xmm6,%xmm7
2221	movdqa	%xmm5,16(%esp)
2222	movdqu	32(%esi),%xmm4
2223	movdqu	48(%esi),%xmm5
2224	movl	240(%edx),%ecx
2225	pxor	%xmm2,%xmm1
2226	pxor	(%esp),%xmm2
2227	pxor	%xmm3,%xmm1
2228	pxor	16(%esp),%xmm3
2229	pxor	%xmm4,%xmm1
2230	pxor	%xmm6,%xmm4
2231	pxor	%xmm5,%xmm1
2232	pxor	%xmm7,%xmm5
2233	movdqa	%xmm1,96(%esp)
2234	movl	120(%esp),%edi
2235	call	_aesni_encrypt4
2236	xorps	(%esp),%xmm2
2237	xorps	16(%esp),%xmm3
2238	xorps	%xmm6,%xmm4
2239	movups	%xmm2,(%edi,%esi,1)
2240	xorps	%xmm7,%xmm5
2241	movups	%xmm3,16(%edi,%esi,1)
2242	movdqa	%xmm7,%xmm0
2243	movups	%xmm4,32(%edi,%esi,1)
2244	movdqa	96(%esp),%xmm1
2245	movups	%xmm5,48(%edi,%esi,1)
2246.L078done:
2247	movl	128(%esp),%edx
2248	pxor	%xmm2,%xmm2
2249	pxor	%xmm3,%xmm3
2250	movdqa	%xmm2,(%esp)
2251	pxor	%xmm4,%xmm4
2252	movdqa	%xmm2,16(%esp)
2253	pxor	%xmm5,%xmm5
2254	movdqa	%xmm2,32(%esp)
2255	pxor	%xmm6,%xmm6
2256	movdqa	%xmm2,48(%esp)
2257	pxor	%xmm7,%xmm7
2258	movdqa	%xmm2,64(%esp)
2259	movdqa	%xmm2,80(%esp)
2260	movdqa	%xmm2,96(%esp)
2261	leal	(%edx),%esp
2262	movl	40(%esp),%ecx
2263	movl	48(%esp),%ebx
2264	movdqu	%xmm0,(%ecx)
2265	pxor	%xmm0,%xmm0
2266	movdqu	%xmm1,(%ebx)
2267	pxor	%xmm1,%xmm1
2268	popl	%edi
2269	popl	%esi
2270	popl	%ebx
2271	popl	%ebp
2272	ret
2273.size	aesni_ocb_encrypt,.-.L_aesni_ocb_encrypt_begin
2274.globl	aesni_ocb_decrypt
2275.type	aesni_ocb_decrypt,@function
2276.align	16
2277aesni_ocb_decrypt:
2278.L_aesni_ocb_decrypt_begin:
2279	#ifdef __CET__
2280
2281.byte	243,15,30,251
2282	#endif
2283
2284	pushl	%ebp
2285	pushl	%ebx
2286	pushl	%esi
2287	pushl	%edi
2288	movl	40(%esp),%ecx
2289	movl	48(%esp),%ebx
2290	movl	20(%esp),%esi
2291	movl	24(%esp),%edi
2292	movl	28(%esp),%eax
2293	movl	32(%esp),%edx
2294	movdqu	(%ecx),%xmm0
2295	movl	36(%esp),%ebp
2296	movdqu	(%ebx),%xmm1
2297	movl	44(%esp),%ebx
2298	movl	%esp,%ecx
2299	subl	$132,%esp
2300	andl	$-16,%esp
2301	subl	%esi,%edi
2302	shll	$4,%eax
2303	leal	-96(%esi,%eax,1),%eax
2304	movl	%edi,120(%esp)
2305	movl	%eax,124(%esp)
2306	movl	%ecx,128(%esp)
2307	movl	240(%edx),%ecx
2308	testl	$1,%ebp
2309	jnz	.L084odd
2310	bsfl	%ebp,%eax
2311	addl	$1,%ebp
2312	shll	$4,%eax
2313	movdqu	(%ebx,%eax,1),%xmm7
2314	movl	%edx,%eax
2315	movdqu	(%esi),%xmm2
2316	leal	16(%esi),%esi
2317	pxor	%xmm0,%xmm7
2318	pxor	%xmm7,%xmm2
2319	movdqa	%xmm1,%xmm6
2320	movups	(%edx),%xmm0
2321	movups	16(%edx),%xmm1
2322	leal	32(%edx),%edx
2323	xorps	%xmm0,%xmm2
2324.L085dec1_loop_17:
2325.byte	102,15,56,222,209
2326	decl	%ecx
2327	movups	(%edx),%xmm1
2328	leal	16(%edx),%edx
2329	jnz	.L085dec1_loop_17
2330.byte	102,15,56,223,209
2331	xorps	%xmm7,%xmm2
2332	movaps	%xmm6,%xmm1
2333	movdqa	%xmm7,%xmm0
2334	xorps	%xmm2,%xmm1
2335	movups	%xmm2,-16(%edi,%esi,1)
2336	movl	240(%eax),%ecx
2337	movl	%eax,%edx
2338	movl	124(%esp),%eax
2339.L084odd:
2340	shll	$4,%ecx
2341	movl	$16,%edi
2342	subl	%ecx,%edi
2343	movl	%edx,112(%esp)
2344	leal	32(%edx,%ecx,1),%edx
2345	movl	%edi,116(%esp)
2346	cmpl	%eax,%esi
2347	ja	.L086short
2348	jmp	.L087grandloop
2349.align	32
2350.L087grandloop:
2351	leal	1(%ebp),%ecx
2352	leal	3(%ebp),%eax
2353	leal	5(%ebp),%edi
2354	addl	$6,%ebp
2355	bsfl	%ecx,%ecx
2356	bsfl	%eax,%eax
2357	bsfl	%edi,%edi
2358	shll	$4,%ecx
2359	shll	$4,%eax
2360	shll	$4,%edi
2361	movdqu	(%ebx),%xmm2
2362	movdqu	(%ebx,%ecx,1),%xmm3
2363	movl	116(%esp),%ecx
2364	movdqa	%xmm2,%xmm4
2365	movdqu	(%ebx,%eax,1),%xmm5
2366	movdqa	%xmm2,%xmm6
2367	movdqu	(%ebx,%edi,1),%xmm7
2368	pxor	%xmm0,%xmm2
2369	pxor	%xmm2,%xmm3
2370	movdqa	%xmm2,(%esp)
2371	pxor	%xmm3,%xmm4
2372	movdqa	%xmm3,16(%esp)
2373	pxor	%xmm4,%xmm5
2374	movdqa	%xmm4,32(%esp)
2375	pxor	%xmm5,%xmm6
2376	movdqa	%xmm5,48(%esp)
2377	pxor	%xmm6,%xmm7
2378	movdqa	%xmm6,64(%esp)
2379	movdqa	%xmm7,80(%esp)
2380	movups	-48(%edx,%ecx,1),%xmm0
2381	movdqu	(%esi),%xmm2
2382	movdqu	16(%esi),%xmm3
2383	movdqu	32(%esi),%xmm4
2384	movdqu	48(%esi),%xmm5
2385	movdqu	64(%esi),%xmm6
2386	movdqu	80(%esi),%xmm7
2387	leal	96(%esi),%esi
2388	movdqa	%xmm1,96(%esp)
2389	pxor	%xmm0,%xmm2
2390	pxor	%xmm0,%xmm3
2391	pxor	%xmm0,%xmm4
2392	pxor	%xmm0,%xmm5
2393	pxor	%xmm0,%xmm6
2394	pxor	%xmm0,%xmm7
2395	movups	-32(%edx,%ecx,1),%xmm1
2396	pxor	(%esp),%xmm2
2397	pxor	16(%esp),%xmm3
2398	pxor	32(%esp),%xmm4
2399	pxor	48(%esp),%xmm5
2400	pxor	64(%esp),%xmm6
2401	pxor	80(%esp),%xmm7
2402	movups	-16(%edx,%ecx,1),%xmm0
2403.byte	102,15,56,222,209
2404.byte	102,15,56,222,217
2405.byte	102,15,56,222,225
2406.byte	102,15,56,222,233
2407.byte	102,15,56,222,241
2408.byte	102,15,56,222,249
2409	movl	120(%esp),%edi
2410	movl	124(%esp),%eax
2411	call	.L_aesni_decrypt6_enter
2412	movdqa	80(%esp),%xmm0
2413	pxor	(%esp),%xmm2
2414	movdqa	96(%esp),%xmm1
2415	pxor	16(%esp),%xmm3
2416	pxor	32(%esp),%xmm4
2417	pxor	48(%esp),%xmm5
2418	pxor	64(%esp),%xmm6
2419	pxor	%xmm0,%xmm7
2420	pxor	%xmm2,%xmm1
2421	movdqu	%xmm2,-96(%edi,%esi,1)
2422	pxor	%xmm3,%xmm1
2423	movdqu	%xmm3,-80(%edi,%esi,1)
2424	pxor	%xmm4,%xmm1
2425	movdqu	%xmm4,-64(%edi,%esi,1)
2426	pxor	%xmm5,%xmm1
2427	movdqu	%xmm5,-48(%edi,%esi,1)
2428	pxor	%xmm6,%xmm1
2429	movdqu	%xmm6,-32(%edi,%esi,1)
2430	pxor	%xmm7,%xmm1
2431	movdqu	%xmm7,-16(%edi,%esi,1)
2432	cmpl	%eax,%esi
2433	jbe	.L087grandloop
2434.L086short:
2435	addl	$96,%eax
2436	subl	%esi,%eax
2437	jz	.L088done
2438	cmpl	$32,%eax
2439	jb	.L089one
2440	je	.L090two
2441	cmpl	$64,%eax
2442	jb	.L091three
2443	je	.L092four
2444	leal	1(%ebp),%ecx
2445	leal	3(%ebp),%eax
2446	bsfl	%ecx,%ecx
2447	bsfl	%eax,%eax
2448	shll	$4,%ecx
2449	shll	$4,%eax
2450	movdqu	(%ebx),%xmm2
2451	movdqu	(%ebx,%ecx,1),%xmm3
2452	movl	116(%esp),%ecx
2453	movdqa	%xmm2,%xmm4
2454	movdqu	(%ebx,%eax,1),%xmm5
2455	movdqa	%xmm2,%xmm6
2456	pxor	%xmm0,%xmm2
2457	pxor	%xmm2,%xmm3
2458	movdqa	%xmm2,(%esp)
2459	pxor	%xmm3,%xmm4
2460	movdqa	%xmm3,16(%esp)
2461	pxor	%xmm4,%xmm5
2462	movdqa	%xmm4,32(%esp)
2463	pxor	%xmm5,%xmm6
2464	movdqa	%xmm5,48(%esp)
2465	pxor	%xmm6,%xmm7
2466	movdqa	%xmm6,64(%esp)
2467	movups	-48(%edx,%ecx,1),%xmm0
2468	movdqu	(%esi),%xmm2
2469	movdqu	16(%esi),%xmm3
2470	movdqu	32(%esi),%xmm4
2471	movdqu	48(%esi),%xmm5
2472	movdqu	64(%esi),%xmm6
2473	pxor	%xmm7,%xmm7
2474	movdqa	%xmm1,96(%esp)
2475	pxor	%xmm0,%xmm2
2476	pxor	%xmm0,%xmm3
2477	pxor	%xmm0,%xmm4
2478	pxor	%xmm0,%xmm5
2479	pxor	%xmm0,%xmm6
2480	movups	-32(%edx,%ecx,1),%xmm1
2481	pxor	(%esp),%xmm2
2482	pxor	16(%esp),%xmm3
2483	pxor	32(%esp),%xmm4
2484	pxor	48(%esp),%xmm5
2485	pxor	64(%esp),%xmm6
2486	movups	-16(%edx,%ecx,1),%xmm0
2487.byte	102,15,56,222,209
2488.byte	102,15,56,222,217
2489.byte	102,15,56,222,225
2490.byte	102,15,56,222,233
2491.byte	102,15,56,222,241
2492.byte	102,15,56,222,249
2493	movl	120(%esp),%edi
2494	call	.L_aesni_decrypt6_enter
2495	movdqa	64(%esp),%xmm0
2496	pxor	(%esp),%xmm2
2497	movdqa	96(%esp),%xmm1
2498	pxor	16(%esp),%xmm3
2499	pxor	32(%esp),%xmm4
2500	pxor	48(%esp),%xmm5
2501	pxor	%xmm0,%xmm6
2502	pxor	%xmm2,%xmm1
2503	movdqu	%xmm2,(%edi,%esi,1)
2504	pxor	%xmm3,%xmm1
2505	movdqu	%xmm3,16(%edi,%esi,1)
2506	pxor	%xmm4,%xmm1
2507	movdqu	%xmm4,32(%edi,%esi,1)
2508	pxor	%xmm5,%xmm1
2509	movdqu	%xmm5,48(%edi,%esi,1)
2510	pxor	%xmm6,%xmm1
2511	movdqu	%xmm6,64(%edi,%esi,1)
2512	jmp	.L088done
2513.align	16
2514.L089one:
2515	movdqu	(%ebx),%xmm7
2516	movl	112(%esp),%edx
2517	movdqu	(%esi),%xmm2
2518	movl	240(%edx),%ecx
2519	pxor	%xmm0,%xmm7
2520	pxor	%xmm7,%xmm2
2521	movdqa	%xmm1,%xmm6
2522	movl	120(%esp),%edi
2523	movups	(%edx),%xmm0
2524	movups	16(%edx),%xmm1
2525	leal	32(%edx),%edx
2526	xorps	%xmm0,%xmm2
2527.L093dec1_loop_18:
2528.byte	102,15,56,222,209
2529	decl	%ecx
2530	movups	(%edx),%xmm1
2531	leal	16(%edx),%edx
2532	jnz	.L093dec1_loop_18
2533.byte	102,15,56,223,209
2534	xorps	%xmm7,%xmm2
2535	movaps	%xmm6,%xmm1
2536	movdqa	%xmm7,%xmm0
2537	xorps	%xmm2,%xmm1
2538	movups	%xmm2,(%edi,%esi,1)
2539	jmp	.L088done
2540.align	16
2541.L090two:
2542	leal	1(%ebp),%ecx
2543	movl	112(%esp),%edx
2544	bsfl	%ecx,%ecx
2545	shll	$4,%ecx
2546	movdqu	(%ebx),%xmm6
2547	movdqu	(%ebx,%ecx,1),%xmm7
2548	movdqu	(%esi),%xmm2
2549	movdqu	16(%esi),%xmm3
2550	movl	240(%edx),%ecx
2551	movdqa	%xmm1,%xmm5
2552	pxor	%xmm0,%xmm6
2553	pxor	%xmm6,%xmm7
2554	pxor	%xmm6,%xmm2
2555	pxor	%xmm7,%xmm3
2556	movl	120(%esp),%edi
2557	call	_aesni_decrypt2
2558	xorps	%xmm6,%xmm2
2559	xorps	%xmm7,%xmm3
2560	movdqa	%xmm7,%xmm0
2561	xorps	%xmm2,%xmm5
2562	movups	%xmm2,(%edi,%esi,1)
2563	xorps	%xmm3,%xmm5
2564	movups	%xmm3,16(%edi,%esi,1)
2565	movaps	%xmm5,%xmm1
2566	jmp	.L088done
2567.align	16
2568.L091three:
2569	leal	1(%ebp),%ecx
2570	movl	112(%esp),%edx
2571	bsfl	%ecx,%ecx
2572	shll	$4,%ecx
2573	movdqu	(%ebx),%xmm5
2574	movdqu	(%ebx,%ecx,1),%xmm6
2575	movdqa	%xmm5,%xmm7
2576	movdqu	(%esi),%xmm2
2577	movdqu	16(%esi),%xmm3
2578	movdqu	32(%esi),%xmm4
2579	movl	240(%edx),%ecx
2580	movdqa	%xmm1,96(%esp)
2581	pxor	%xmm0,%xmm5
2582	pxor	%xmm5,%xmm6
2583	pxor	%xmm6,%xmm7
2584	pxor	%xmm5,%xmm2
2585	pxor	%xmm6,%xmm3
2586	pxor	%xmm7,%xmm4
2587	movl	120(%esp),%edi
2588	call	_aesni_decrypt3
2589	movdqa	96(%esp),%xmm1
2590	xorps	%xmm5,%xmm2
2591	xorps	%xmm6,%xmm3
2592	xorps	%xmm7,%xmm4
2593	movups	%xmm2,(%edi,%esi,1)
2594	pxor	%xmm2,%xmm1
2595	movdqa	%xmm7,%xmm0
2596	movups	%xmm3,16(%edi,%esi,1)
2597	pxor	%xmm3,%xmm1
2598	movups	%xmm4,32(%edi,%esi,1)
2599	pxor	%xmm4,%xmm1
2600	jmp	.L088done
2601.align	16
2602.L092four:
2603	leal	1(%ebp),%ecx
2604	leal	3(%ebp),%eax
2605	bsfl	%ecx,%ecx
2606	bsfl	%eax,%eax
2607	movl	112(%esp),%edx
2608	shll	$4,%ecx
2609	shll	$4,%eax
2610	movdqu	(%ebx),%xmm4
2611	movdqu	(%ebx,%ecx,1),%xmm5
2612	movdqa	%xmm4,%xmm6
2613	movdqu	(%ebx,%eax,1),%xmm7
2614	pxor	%xmm0,%xmm4
2615	movdqu	(%esi),%xmm2
2616	pxor	%xmm4,%xmm5
2617	movdqu	16(%esi),%xmm3
2618	pxor	%xmm5,%xmm6
2619	movdqa	%xmm4,(%esp)
2620	pxor	%xmm6,%xmm7
2621	movdqa	%xmm5,16(%esp)
2622	movdqu	32(%esi),%xmm4
2623	movdqu	48(%esi),%xmm5
2624	movl	240(%edx),%ecx
2625	movdqa	%xmm1,96(%esp)
2626	pxor	(%esp),%xmm2
2627	pxor	16(%esp),%xmm3
2628	pxor	%xmm6,%xmm4
2629	pxor	%xmm7,%xmm5
2630	movl	120(%esp),%edi
2631	call	_aesni_decrypt4
2632	movdqa	96(%esp),%xmm1
2633	xorps	(%esp),%xmm2
2634	xorps	16(%esp),%xmm3
2635	xorps	%xmm6,%xmm4
2636	movups	%xmm2,(%edi,%esi,1)
2637	pxor	%xmm2,%xmm1
2638	xorps	%xmm7,%xmm5
2639	movups	%xmm3,16(%edi,%esi,1)
2640	pxor	%xmm3,%xmm1
2641	movdqa	%xmm7,%xmm0
2642	movups	%xmm4,32(%edi,%esi,1)
2643	pxor	%xmm4,%xmm1
2644	movups	%xmm5,48(%edi,%esi,1)
2645	pxor	%xmm5,%xmm1
2646.L088done:
2647	movl	128(%esp),%edx
2648	pxor	%xmm2,%xmm2
2649	pxor	%xmm3,%xmm3
2650	movdqa	%xmm2,(%esp)
2651	pxor	%xmm4,%xmm4
2652	movdqa	%xmm2,16(%esp)
2653	pxor	%xmm5,%xmm5
2654	movdqa	%xmm2,32(%esp)
2655	pxor	%xmm6,%xmm6
2656	movdqa	%xmm2,48(%esp)
2657	pxor	%xmm7,%xmm7
2658	movdqa	%xmm2,64(%esp)
2659	movdqa	%xmm2,80(%esp)
2660	movdqa	%xmm2,96(%esp)
2661	leal	(%edx),%esp
2662	movl	40(%esp),%ecx
2663	movl	48(%esp),%ebx
2664	movdqu	%xmm0,(%ecx)
2665	pxor	%xmm0,%xmm0
2666	movdqu	%xmm1,(%ebx)
2667	pxor	%xmm1,%xmm1
2668	popl	%edi
2669	popl	%esi
2670	popl	%ebx
2671	popl	%ebp
2672	ret
2673.size	aesni_ocb_decrypt,.-.L_aesni_ocb_decrypt_begin
2674.globl	aesni_cbc_encrypt
2675.type	aesni_cbc_encrypt,@function
2676.align	16
2677aesni_cbc_encrypt:
2678.L_aesni_cbc_encrypt_begin:
2679	#ifdef __CET__
2680
2681.byte	243,15,30,251
2682	#endif
2683
2684	pushl	%ebp
2685	pushl	%ebx
2686	pushl	%esi
2687	pushl	%edi
2688	movl	20(%esp),%esi
2689	movl	%esp,%ebx
2690	movl	24(%esp),%edi
2691	subl	$24,%ebx
2692	movl	28(%esp),%eax
2693	andl	$-16,%ebx
2694	movl	32(%esp),%edx
2695	movl	36(%esp),%ebp
2696	testl	%eax,%eax
2697	jz	.L094cbc_abort
2698	cmpl	$0,40(%esp)
2699	xchgl	%esp,%ebx
2700	movups	(%ebp),%xmm7
2701	movl	240(%edx),%ecx
2702	movl	%edx,%ebp
2703	movl	%ebx,16(%esp)
2704	movl	%ecx,%ebx
2705	je	.L095cbc_decrypt
2706	movaps	%xmm7,%xmm2
2707	cmpl	$16,%eax
2708	jb	.L096cbc_enc_tail
2709	subl	$16,%eax
2710	jmp	.L097cbc_enc_loop
2711.align	16
2712.L097cbc_enc_loop:
2713	movups	(%esi),%xmm7
2714	leal	16(%esi),%esi
2715	movups	(%edx),%xmm0
2716	movups	16(%edx),%xmm1
2717	xorps	%xmm0,%xmm7
2718	leal	32(%edx),%edx
2719	xorps	%xmm7,%xmm2
2720.L098enc1_loop_19:
2721.byte	102,15,56,220,209
2722	decl	%ecx
2723	movups	(%edx),%xmm1
2724	leal	16(%edx),%edx
2725	jnz	.L098enc1_loop_19
2726.byte	102,15,56,221,209
2727	movl	%ebx,%ecx
2728	movl	%ebp,%edx
2729	movups	%xmm2,(%edi)
2730	leal	16(%edi),%edi
2731	subl	$16,%eax
2732	jnc	.L097cbc_enc_loop
2733	addl	$16,%eax
2734	jnz	.L096cbc_enc_tail
2735	movaps	%xmm2,%xmm7
2736	pxor	%xmm2,%xmm2
2737	jmp	.L099cbc_ret
2738.L096cbc_enc_tail:
2739	movl	%eax,%ecx
2740.long	2767451785
2741	movl	$16,%ecx
2742	subl	%eax,%ecx
2743	xorl	%eax,%eax
2744.long	2868115081
2745	leal	-16(%edi),%edi
2746	movl	%ebx,%ecx
2747	movl	%edi,%esi
2748	movl	%ebp,%edx
2749	jmp	.L097cbc_enc_loop
2750.align	16
2751.L095cbc_decrypt:
2752	cmpl	$80,%eax
2753	jbe	.L100cbc_dec_tail
2754	movaps	%xmm7,(%esp)
2755	subl	$80,%eax
2756	jmp	.L101cbc_dec_loop6_enter
2757.align	16
2758.L102cbc_dec_loop6:
2759	movaps	%xmm0,(%esp)
2760	movups	%xmm7,(%edi)
2761	leal	16(%edi),%edi
2762.L101cbc_dec_loop6_enter:
2763	movdqu	(%esi),%xmm2
2764	movdqu	16(%esi),%xmm3
2765	movdqu	32(%esi),%xmm4
2766	movdqu	48(%esi),%xmm5
2767	movdqu	64(%esi),%xmm6
2768	movdqu	80(%esi),%xmm7
2769	call	_aesni_decrypt6
2770	movups	(%esi),%xmm1
2771	movups	16(%esi),%xmm0
2772	xorps	(%esp),%xmm2
2773	xorps	%xmm1,%xmm3
2774	movups	32(%esi),%xmm1
2775	xorps	%xmm0,%xmm4
2776	movups	48(%esi),%xmm0
2777	xorps	%xmm1,%xmm5
2778	movups	64(%esi),%xmm1
2779	xorps	%xmm0,%xmm6
2780	movups	80(%esi),%xmm0
2781	xorps	%xmm1,%xmm7
2782	movups	%xmm2,(%edi)
2783	movups	%xmm3,16(%edi)
2784	leal	96(%esi),%esi
2785	movups	%xmm4,32(%edi)
2786	movl	%ebx,%ecx
2787	movups	%xmm5,48(%edi)
2788	movl	%ebp,%edx
2789	movups	%xmm6,64(%edi)
2790	leal	80(%edi),%edi
2791	subl	$96,%eax
2792	ja	.L102cbc_dec_loop6
2793	movaps	%xmm7,%xmm2
2794	movaps	%xmm0,%xmm7
2795	addl	$80,%eax
2796	jle	.L103cbc_dec_clear_tail_collected
2797	movups	%xmm2,(%edi)
2798	leal	16(%edi),%edi
2799.L100cbc_dec_tail:
2800	movups	(%esi),%xmm2
2801	movaps	%xmm2,%xmm6
2802	cmpl	$16,%eax
2803	jbe	.L104cbc_dec_one
2804	movups	16(%esi),%xmm3
2805	movaps	%xmm3,%xmm5
2806	cmpl	$32,%eax
2807	jbe	.L105cbc_dec_two
2808	movups	32(%esi),%xmm4
2809	cmpl	$48,%eax
2810	jbe	.L106cbc_dec_three
2811	movups	48(%esi),%xmm5
2812	cmpl	$64,%eax
2813	jbe	.L107cbc_dec_four
2814	movups	64(%esi),%xmm6
2815	movaps	%xmm7,(%esp)
2816	movups	(%esi),%xmm2
2817	xorps	%xmm7,%xmm7
2818	call	_aesni_decrypt6
2819	movups	(%esi),%xmm1
2820	movups	16(%esi),%xmm0
2821	xorps	(%esp),%xmm2
2822	xorps	%xmm1,%xmm3
2823	movups	32(%esi),%xmm1
2824	xorps	%xmm0,%xmm4
2825	movups	48(%esi),%xmm0
2826	xorps	%xmm1,%xmm5
2827	movups	64(%esi),%xmm7
2828	xorps	%xmm0,%xmm6
2829	movups	%xmm2,(%edi)
2830	movups	%xmm3,16(%edi)
2831	pxor	%xmm3,%xmm3
2832	movups	%xmm4,32(%edi)
2833	pxor	%xmm4,%xmm4
2834	movups	%xmm5,48(%edi)
2835	pxor	%xmm5,%xmm5
2836	leal	64(%edi),%edi
2837	movaps	%xmm6,%xmm2
2838	pxor	%xmm6,%xmm6
2839	subl	$80,%eax
2840	jmp	.L108cbc_dec_tail_collected
2841.align	16
2842.L104cbc_dec_one:
2843	movups	(%edx),%xmm0
2844	movups	16(%edx),%xmm1
2845	leal	32(%edx),%edx
2846	xorps	%xmm0,%xmm2
2847.L109dec1_loop_20:
2848.byte	102,15,56,222,209
2849	decl	%ecx
2850	movups	(%edx),%xmm1
2851	leal	16(%edx),%edx
2852	jnz	.L109dec1_loop_20
2853.byte	102,15,56,223,209
2854	xorps	%xmm7,%xmm2
2855	movaps	%xmm6,%xmm7
2856	subl	$16,%eax
2857	jmp	.L108cbc_dec_tail_collected
2858.align	16
2859.L105cbc_dec_two:
2860	call	_aesni_decrypt2
2861	xorps	%xmm7,%xmm2
2862	xorps	%xmm6,%xmm3
2863	movups	%xmm2,(%edi)
2864	movaps	%xmm3,%xmm2
2865	pxor	%xmm3,%xmm3
2866	leal	16(%edi),%edi
2867	movaps	%xmm5,%xmm7
2868	subl	$32,%eax
2869	jmp	.L108cbc_dec_tail_collected
2870.align	16
2871.L106cbc_dec_three:
2872	call	_aesni_decrypt3
2873	xorps	%xmm7,%xmm2
2874	xorps	%xmm6,%xmm3
2875	xorps	%xmm5,%xmm4
2876	movups	%xmm2,(%edi)
2877	movaps	%xmm4,%xmm2
2878	pxor	%xmm4,%xmm4
2879	movups	%xmm3,16(%edi)
2880	pxor	%xmm3,%xmm3
2881	leal	32(%edi),%edi
2882	movups	32(%esi),%xmm7
2883	subl	$48,%eax
2884	jmp	.L108cbc_dec_tail_collected
2885.align	16
2886.L107cbc_dec_four:
2887	call	_aesni_decrypt4
2888	movups	16(%esi),%xmm1
2889	movups	32(%esi),%xmm0
2890	xorps	%xmm7,%xmm2
2891	movups	48(%esi),%xmm7
2892	xorps	%xmm6,%xmm3
2893	movups	%xmm2,(%edi)
2894	xorps	%xmm1,%xmm4
2895	movups	%xmm3,16(%edi)
2896	pxor	%xmm3,%xmm3
2897	xorps	%xmm0,%xmm5
2898	movups	%xmm4,32(%edi)
2899	pxor	%xmm4,%xmm4
2900	leal	48(%edi),%edi
2901	movaps	%xmm5,%xmm2
2902	pxor	%xmm5,%xmm5
2903	subl	$64,%eax
2904	jmp	.L108cbc_dec_tail_collected
2905.align	16
2906.L103cbc_dec_clear_tail_collected:
2907	pxor	%xmm3,%xmm3
2908	pxor	%xmm4,%xmm4
2909	pxor	%xmm5,%xmm5
2910	pxor	%xmm6,%xmm6
2911.L108cbc_dec_tail_collected:
2912	andl	$15,%eax
2913	jnz	.L110cbc_dec_tail_partial
2914	movups	%xmm2,(%edi)
2915	pxor	%xmm0,%xmm0
2916	jmp	.L099cbc_ret
2917.align	16
2918.L110cbc_dec_tail_partial:
2919	movaps	%xmm2,(%esp)
2920	pxor	%xmm0,%xmm0
2921	movl	$16,%ecx
2922	movl	%esp,%esi
2923	subl	%eax,%ecx
2924.long	2767451785
2925	movdqa	%xmm2,(%esp)
2926.L099cbc_ret:
2927	movl	16(%esp),%esp
2928	movl	36(%esp),%ebp
2929	pxor	%xmm2,%xmm2
2930	pxor	%xmm1,%xmm1
2931	movups	%xmm7,(%ebp)
2932	pxor	%xmm7,%xmm7
2933.L094cbc_abort:
2934	popl	%edi
2935	popl	%esi
2936	popl	%ebx
2937	popl	%ebp
2938	ret
2939.size	aesni_cbc_encrypt,.-.L_aesni_cbc_encrypt_begin
2940.type	_aesni_set_encrypt_key,@function
2941.align	16
2942_aesni_set_encrypt_key:
2943	#ifdef __CET__
2944
2945.byte	243,15,30,251
2946	#endif
2947
2948	pushl	%ebp
2949	pushl	%ebx
2950	testl	%eax,%eax
2951	jz	.L111bad_pointer
2952	testl	%edx,%edx
2953	jz	.L111bad_pointer
2954	call	.L112pic
2955.L112pic:
2956	popl	%ebx
2957	leal	.Lkey_const-.L112pic(%ebx),%ebx
2958	leal	OPENSSL_ia32cap_P-.Lkey_const(%ebx),%ebp
2959	movups	(%eax),%xmm0
2960	xorps	%xmm4,%xmm4
2961	movl	4(%ebp),%ebp
2962	leal	16(%edx),%edx
2963	andl	$268437504,%ebp
2964	cmpl	$256,%ecx
2965	je	.L11314rounds
2966	cmpl	$192,%ecx
2967	je	.L11412rounds
2968	cmpl	$128,%ecx
2969	jne	.L115bad_keybits
2970.align	16
2971.L11610rounds:
2972	cmpl	$268435456,%ebp
2973	je	.L11710rounds_alt
2974	movl	$9,%ecx
2975	movups	%xmm0,-16(%edx)
2976.byte	102,15,58,223,200,1
2977	call	.L118key_128_cold
2978.byte	102,15,58,223,200,2
2979	call	.L119key_128
2980.byte	102,15,58,223,200,4
2981	call	.L119key_128
2982.byte	102,15,58,223,200,8
2983	call	.L119key_128
2984.byte	102,15,58,223,200,16
2985	call	.L119key_128
2986.byte	102,15,58,223,200,32
2987	call	.L119key_128
2988.byte	102,15,58,223,200,64
2989	call	.L119key_128
2990.byte	102,15,58,223,200,128
2991	call	.L119key_128
2992.byte	102,15,58,223,200,27
2993	call	.L119key_128
2994.byte	102,15,58,223,200,54
2995	call	.L119key_128
2996	movups	%xmm0,(%edx)
2997	movl	%ecx,80(%edx)
2998	jmp	.L120good_key
2999.align	16
3000.L119key_128:
3001	movups	%xmm0,(%edx)
3002	leal	16(%edx),%edx
3003.L118key_128_cold:
3004	shufps	$16,%xmm0,%xmm4
3005	xorps	%xmm4,%xmm0
3006	shufps	$140,%xmm0,%xmm4
3007	xorps	%xmm4,%xmm0
3008	shufps	$255,%xmm1,%xmm1
3009	xorps	%xmm1,%xmm0
3010	ret
3011.align	16
3012.L11710rounds_alt:
3013	movdqa	(%ebx),%xmm5
3014	movl	$8,%ecx
3015	movdqa	32(%ebx),%xmm4
3016	movdqa	%xmm0,%xmm2
3017	movdqu	%xmm0,-16(%edx)
3018.L121loop_key128:
3019.byte	102,15,56,0,197
3020.byte	102,15,56,221,196
3021	pslld	$1,%xmm4
3022	leal	16(%edx),%edx
3023	movdqa	%xmm2,%xmm3
3024	pslldq	$4,%xmm2
3025	pxor	%xmm2,%xmm3
3026	pslldq	$4,%xmm2
3027	pxor	%xmm2,%xmm3
3028	pslldq	$4,%xmm2
3029	pxor	%xmm3,%xmm2
3030	pxor	%xmm2,%xmm0
3031	movdqu	%xmm0,-16(%edx)
3032	movdqa	%xmm0,%xmm2
3033	decl	%ecx
3034	jnz	.L121loop_key128
3035	movdqa	48(%ebx),%xmm4
3036.byte	102,15,56,0,197
3037.byte	102,15,56,221,196
3038	pslld	$1,%xmm4
3039	movdqa	%xmm2,%xmm3
3040	pslldq	$4,%xmm2
3041	pxor	%xmm2,%xmm3
3042	pslldq	$4,%xmm2
3043	pxor	%xmm2,%xmm3
3044	pslldq	$4,%xmm2
3045	pxor	%xmm3,%xmm2
3046	pxor	%xmm2,%xmm0
3047	movdqu	%xmm0,(%edx)
3048	movdqa	%xmm0,%xmm2
3049.byte	102,15,56,0,197
3050.byte	102,15,56,221,196
3051	movdqa	%xmm2,%xmm3
3052	pslldq	$4,%xmm2
3053	pxor	%xmm2,%xmm3
3054	pslldq	$4,%xmm2
3055	pxor	%xmm2,%xmm3
3056	pslldq	$4,%xmm2
3057	pxor	%xmm3,%xmm2
3058	pxor	%xmm2,%xmm0
3059	movdqu	%xmm0,16(%edx)
3060	movl	$9,%ecx
3061	movl	%ecx,96(%edx)
3062	jmp	.L120good_key
3063.align	16
3064.L11412rounds:
3065	movq	16(%eax),%xmm2
3066	cmpl	$268435456,%ebp
3067	je	.L12212rounds_alt
3068	movl	$11,%ecx
3069	movups	%xmm0,-16(%edx)
3070.byte	102,15,58,223,202,1
3071	call	.L123key_192a_cold
3072.byte	102,15,58,223,202,2
3073	call	.L124key_192b
3074.byte	102,15,58,223,202,4
3075	call	.L125key_192a
3076.byte	102,15,58,223,202,8
3077	call	.L124key_192b
3078.byte	102,15,58,223,202,16
3079	call	.L125key_192a
3080.byte	102,15,58,223,202,32
3081	call	.L124key_192b
3082.byte	102,15,58,223,202,64
3083	call	.L125key_192a
3084.byte	102,15,58,223,202,128
3085	call	.L124key_192b
3086	movups	%xmm0,(%edx)
3087	movl	%ecx,48(%edx)
3088	jmp	.L120good_key
3089.align	16
3090.L125key_192a:
3091	movups	%xmm0,(%edx)
3092	leal	16(%edx),%edx
3093.align	16
3094.L123key_192a_cold:
3095	movaps	%xmm2,%xmm5
3096.L126key_192b_warm:
3097	shufps	$16,%xmm0,%xmm4
3098	movdqa	%xmm2,%xmm3
3099	xorps	%xmm4,%xmm0
3100	shufps	$140,%xmm0,%xmm4
3101	pslldq	$4,%xmm3
3102	xorps	%xmm4,%xmm0
3103	pshufd	$85,%xmm1,%xmm1
3104	pxor	%xmm3,%xmm2
3105	pxor	%xmm1,%xmm0
3106	pshufd	$255,%xmm0,%xmm3
3107	pxor	%xmm3,%xmm2
3108	ret
3109.align	16
3110.L124key_192b:
3111	movaps	%xmm0,%xmm3
3112	shufps	$68,%xmm0,%xmm5
3113	movups	%xmm5,(%edx)
3114	shufps	$78,%xmm2,%xmm3
3115	movups	%xmm3,16(%edx)
3116	leal	32(%edx),%edx
3117	jmp	.L126key_192b_warm
3118.align	16
3119.L12212rounds_alt:
3120	movdqa	16(%ebx),%xmm5
3121	movdqa	32(%ebx),%xmm4
3122	movl	$8,%ecx
3123	movdqu	%xmm0,-16(%edx)
3124.L127loop_key192:
3125	movq	%xmm2,(%edx)
3126	movdqa	%xmm2,%xmm1
3127.byte	102,15,56,0,213
3128.byte	102,15,56,221,212
3129	pslld	$1,%xmm4
3130	leal	24(%edx),%edx
3131	movdqa	%xmm0,%xmm3
3132	pslldq	$4,%xmm0
3133	pxor	%xmm0,%xmm3
3134	pslldq	$4,%xmm0
3135	pxor	%xmm0,%xmm3
3136	pslldq	$4,%xmm0
3137	pxor	%xmm3,%xmm0
3138	pshufd	$255,%xmm0,%xmm3
3139	pxor	%xmm1,%xmm3
3140	pslldq	$4,%xmm1
3141	pxor	%xmm1,%xmm3
3142	pxor	%xmm2,%xmm0
3143	pxor	%xmm3,%xmm2
3144	movdqu	%xmm0,-16(%edx)
3145	decl	%ecx
3146	jnz	.L127loop_key192
3147	movl	$11,%ecx
3148	movl	%ecx,32(%edx)
3149	jmp	.L120good_key
3150.align	16
3151.L11314rounds:
3152	movups	16(%eax),%xmm2
3153	leal	16(%edx),%edx
3154	cmpl	$268435456,%ebp
3155	je	.L12814rounds_alt
3156	movl	$13,%ecx
3157	movups	%xmm0,-32(%edx)
3158	movups	%xmm2,-16(%edx)
3159.byte	102,15,58,223,202,1
3160	call	.L129key_256a_cold
3161.byte	102,15,58,223,200,1
3162	call	.L130key_256b
3163.byte	102,15,58,223,202,2
3164	call	.L131key_256a
3165.byte	102,15,58,223,200,2
3166	call	.L130key_256b
3167.byte	102,15,58,223,202,4
3168	call	.L131key_256a
3169.byte	102,15,58,223,200,4
3170	call	.L130key_256b
3171.byte	102,15,58,223,202,8
3172	call	.L131key_256a
3173.byte	102,15,58,223,200,8
3174	call	.L130key_256b
3175.byte	102,15,58,223,202,16
3176	call	.L131key_256a
3177.byte	102,15,58,223,200,16
3178	call	.L130key_256b
3179.byte	102,15,58,223,202,32
3180	call	.L131key_256a
3181.byte	102,15,58,223,200,32
3182	call	.L130key_256b
3183.byte	102,15,58,223,202,64
3184	call	.L131key_256a
3185	movups	%xmm0,(%edx)
3186	movl	%ecx,16(%edx)
3187	xorl	%eax,%eax
3188	jmp	.L120good_key
3189.align	16
3190.L131key_256a:
3191	movups	%xmm2,(%edx)
3192	leal	16(%edx),%edx
3193.L129key_256a_cold:
3194	shufps	$16,%xmm0,%xmm4
3195	xorps	%xmm4,%xmm0
3196	shufps	$140,%xmm0,%xmm4
3197	xorps	%xmm4,%xmm0
3198	shufps	$255,%xmm1,%xmm1
3199	xorps	%xmm1,%xmm0
3200	ret
3201.align	16
3202.L130key_256b:
3203	movups	%xmm0,(%edx)
3204	leal	16(%edx),%edx
3205	shufps	$16,%xmm2,%xmm4
3206	xorps	%xmm4,%xmm2
3207	shufps	$140,%xmm2,%xmm4
3208	xorps	%xmm4,%xmm2
3209	shufps	$170,%xmm1,%xmm1
3210	xorps	%xmm1,%xmm2
3211	ret
3212.align	16
3213.L12814rounds_alt:
3214	movdqa	(%ebx),%xmm5
3215	movdqa	32(%ebx),%xmm4
3216	movl	$7,%ecx
3217	movdqu	%xmm0,-32(%edx)
3218	movdqa	%xmm2,%xmm1
3219	movdqu	%xmm2,-16(%edx)
3220.L132loop_key256:
3221.byte	102,15,56,0,213
3222.byte	102,15,56,221,212
3223	movdqa	%xmm0,%xmm3
3224	pslldq	$4,%xmm0
3225	pxor	%xmm0,%xmm3
3226	pslldq	$4,%xmm0
3227	pxor	%xmm0,%xmm3
3228	pslldq	$4,%xmm0
3229	pxor	%xmm3,%xmm0
3230	pslld	$1,%xmm4
3231	pxor	%xmm2,%xmm0
3232	movdqu	%xmm0,(%edx)
3233	decl	%ecx
3234	jz	.L133done_key256
3235	pshufd	$255,%xmm0,%xmm2
3236	pxor	%xmm3,%xmm3
3237.byte	102,15,56,221,211
3238	movdqa	%xmm1,%xmm3
3239	pslldq	$4,%xmm1
3240	pxor	%xmm1,%xmm3
3241	pslldq	$4,%xmm1
3242	pxor	%xmm1,%xmm3
3243	pslldq	$4,%xmm1
3244	pxor	%xmm3,%xmm1
3245	pxor	%xmm1,%xmm2
3246	movdqu	%xmm2,16(%edx)
3247	leal	32(%edx),%edx
3248	movdqa	%xmm2,%xmm1
3249	jmp	.L132loop_key256
3250.L133done_key256:
3251	movl	$13,%ecx
3252	movl	%ecx,16(%edx)
3253.L120good_key:
3254	pxor	%xmm0,%xmm0
3255	pxor	%xmm1,%xmm1
3256	pxor	%xmm2,%xmm2
3257	pxor	%xmm3,%xmm3
3258	pxor	%xmm4,%xmm4
3259	pxor	%xmm5,%xmm5
3260	xorl	%eax,%eax
3261	popl	%ebx
3262	popl	%ebp
3263	ret
3264.align	4
3265.L111bad_pointer:
3266	movl	$-1,%eax
3267	popl	%ebx
3268	popl	%ebp
3269	ret
3270.align	4
3271.L115bad_keybits:
3272	pxor	%xmm0,%xmm0
3273	movl	$-2,%eax
3274	popl	%ebx
3275	popl	%ebp
3276	ret
3277.size	_aesni_set_encrypt_key,.-_aesni_set_encrypt_key
3278.globl	aesni_set_encrypt_key
3279.type	aesni_set_encrypt_key,@function
3280.align	16
3281aesni_set_encrypt_key:
3282.L_aesni_set_encrypt_key_begin:
3283	#ifdef __CET__
3284
3285.byte	243,15,30,251
3286	#endif
3287
3288	movl	4(%esp),%eax
3289	movl	8(%esp),%ecx
3290	movl	12(%esp),%edx
3291	call	_aesni_set_encrypt_key
3292	ret
3293.size	aesni_set_encrypt_key,.-.L_aesni_set_encrypt_key_begin
3294.globl	aesni_set_decrypt_key
3295.type	aesni_set_decrypt_key,@function
3296.align	16
3297aesni_set_decrypt_key:
3298.L_aesni_set_decrypt_key_begin:
3299	#ifdef __CET__
3300
3301.byte	243,15,30,251
3302	#endif
3303
3304	movl	4(%esp),%eax
3305	movl	8(%esp),%ecx
3306	movl	12(%esp),%edx
3307	call	_aesni_set_encrypt_key
3308	movl	12(%esp),%edx
3309	shll	$4,%ecx
3310	testl	%eax,%eax
3311	jnz	.L134dec_key_ret
3312	leal	16(%edx,%ecx,1),%eax
3313	movups	(%edx),%xmm0
3314	movups	(%eax),%xmm1
3315	movups	%xmm0,(%eax)
3316	movups	%xmm1,(%edx)
3317	leal	16(%edx),%edx
3318	leal	-16(%eax),%eax
3319.L135dec_key_inverse:
3320	movups	(%edx),%xmm0
3321	movups	(%eax),%xmm1
3322.byte	102,15,56,219,192
3323.byte	102,15,56,219,201
3324	leal	16(%edx),%edx
3325	leal	-16(%eax),%eax
3326	movups	%xmm0,16(%eax)
3327	movups	%xmm1,-16(%edx)
3328	cmpl	%edx,%eax
3329	ja	.L135dec_key_inverse
3330	movups	(%edx),%xmm0
3331.byte	102,15,56,219,192
3332	movups	%xmm0,(%edx)
3333	pxor	%xmm0,%xmm0
3334	pxor	%xmm1,%xmm1
3335	xorl	%eax,%eax
3336.L134dec_key_ret:
3337	ret
3338.size	aesni_set_decrypt_key,.-.L_aesni_set_decrypt_key_begin
3339.align	64
3340.Lkey_const:
3341.long	202313229,202313229,202313229,202313229
3342.long	67569157,67569157,67569157,67569157
3343.long	1,1,1,1
3344.long	27,27,27,27
3345.byte	65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69
3346.byte	83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83
3347.byte	32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
3348.byte	115,108,46,111,114,103,62,0
3349.comm	OPENSSL_ia32cap_P,16,4
3350
3351	.section ".note.gnu.property", "a"
3352	.p2align 2
3353	.long 1f - 0f
3354	.long 4f - 1f
3355	.long 5
33560:
3357	.asciz "GNU"
33581:
3359	.p2align 2
3360	.long 0xc0000002
3361	.long 3f - 2f
33622:
3363	.long 3
33643:
3365	.p2align 2
33664:
3367#else
3368.text
3369.globl	aesni_encrypt
3370.type	aesni_encrypt,@function
3371.align	16
3372aesni_encrypt:
3373.L_aesni_encrypt_begin:
3374	#ifdef __CET__
3375
3376.byte	243,15,30,251
3377	#endif
3378
3379	movl	4(%esp),%eax
3380	movl	12(%esp),%edx
3381	movups	(%eax),%xmm2
3382	movl	240(%edx),%ecx
3383	movl	8(%esp),%eax
3384	movups	(%edx),%xmm0
3385	movups	16(%edx),%xmm1
3386	leal	32(%edx),%edx
3387	xorps	%xmm0,%xmm2
3388.L000enc1_loop_1:
3389.byte	102,15,56,220,209
3390	decl	%ecx
3391	movups	(%edx),%xmm1
3392	leal	16(%edx),%edx
3393	jnz	.L000enc1_loop_1
3394.byte	102,15,56,221,209
3395	pxor	%xmm0,%xmm0
3396	pxor	%xmm1,%xmm1
3397	movups	%xmm2,(%eax)
3398	pxor	%xmm2,%xmm2
3399	ret
3400.size	aesni_encrypt,.-.L_aesni_encrypt_begin
3401.globl	aesni_decrypt
3402.type	aesni_decrypt,@function
3403.align	16
3404aesni_decrypt:
3405.L_aesni_decrypt_begin:
3406	#ifdef __CET__
3407
3408.byte	243,15,30,251
3409	#endif
3410
3411	movl	4(%esp),%eax
3412	movl	12(%esp),%edx
3413	movups	(%eax),%xmm2
3414	movl	240(%edx),%ecx
3415	movl	8(%esp),%eax
3416	movups	(%edx),%xmm0
3417	movups	16(%edx),%xmm1
3418	leal	32(%edx),%edx
3419	xorps	%xmm0,%xmm2
3420.L001dec1_loop_2:
3421.byte	102,15,56,222,209
3422	decl	%ecx
3423	movups	(%edx),%xmm1
3424	leal	16(%edx),%edx
3425	jnz	.L001dec1_loop_2
3426.byte	102,15,56,223,209
3427	pxor	%xmm0,%xmm0
3428	pxor	%xmm1,%xmm1
3429	movups	%xmm2,(%eax)
3430	pxor	%xmm2,%xmm2
3431	ret
3432.size	aesni_decrypt,.-.L_aesni_decrypt_begin
3433.type	_aesni_encrypt2,@function
3434.align	16
3435_aesni_encrypt2:
3436	#ifdef __CET__
3437
3438.byte	243,15,30,251
3439	#endif
3440
3441	movups	(%edx),%xmm0
3442	shll	$4,%ecx
3443	movups	16(%edx),%xmm1
3444	xorps	%xmm0,%xmm2
3445	pxor	%xmm0,%xmm3
3446	movups	32(%edx),%xmm0
3447	leal	32(%edx,%ecx,1),%edx
3448	negl	%ecx
3449	addl	$16,%ecx
3450.L002enc2_loop:
3451.byte	102,15,56,220,209
3452.byte	102,15,56,220,217
3453	movups	(%edx,%ecx,1),%xmm1
3454	addl	$32,%ecx
3455.byte	102,15,56,220,208
3456.byte	102,15,56,220,216
3457	movups	-16(%edx,%ecx,1),%xmm0
3458	jnz	.L002enc2_loop
3459.byte	102,15,56,220,209
3460.byte	102,15,56,220,217
3461.byte	102,15,56,221,208
3462.byte	102,15,56,221,216
3463	ret
3464.size	_aesni_encrypt2,.-_aesni_encrypt2
3465.type	_aesni_decrypt2,@function
3466.align	16
3467_aesni_decrypt2:
3468	#ifdef __CET__
3469
3470.byte	243,15,30,251
3471	#endif
3472
3473	movups	(%edx),%xmm0
3474	shll	$4,%ecx
3475	movups	16(%edx),%xmm1
3476	xorps	%xmm0,%xmm2
3477	pxor	%xmm0,%xmm3
3478	movups	32(%edx),%xmm0
3479	leal	32(%edx,%ecx,1),%edx
3480	negl	%ecx
3481	addl	$16,%ecx
3482.L003dec2_loop:
3483.byte	102,15,56,222,209
3484.byte	102,15,56,222,217
3485	movups	(%edx,%ecx,1),%xmm1
3486	addl	$32,%ecx
3487.byte	102,15,56,222,208
3488.byte	102,15,56,222,216
3489	movups	-16(%edx,%ecx,1),%xmm0
3490	jnz	.L003dec2_loop
3491.byte	102,15,56,222,209
3492.byte	102,15,56,222,217
3493.byte	102,15,56,223,208
3494.byte	102,15,56,223,216
3495	ret
3496.size	_aesni_decrypt2,.-_aesni_decrypt2
3497.type	_aesni_encrypt3,@function
3498.align	16
3499_aesni_encrypt3:
3500	#ifdef __CET__
3501
3502.byte	243,15,30,251
3503	#endif
3504
3505	movups	(%edx),%xmm0
3506	shll	$4,%ecx
3507	movups	16(%edx),%xmm1
3508	xorps	%xmm0,%xmm2
3509	pxor	%xmm0,%xmm3
3510	pxor	%xmm0,%xmm4
3511	movups	32(%edx),%xmm0
3512	leal	32(%edx,%ecx,1),%edx
3513	negl	%ecx
3514	addl	$16,%ecx
3515.L004enc3_loop:
3516.byte	102,15,56,220,209
3517.byte	102,15,56,220,217
3518.byte	102,15,56,220,225
3519	movups	(%edx,%ecx,1),%xmm1
3520	addl	$32,%ecx
3521.byte	102,15,56,220,208
3522.byte	102,15,56,220,216
3523.byte	102,15,56,220,224
3524	movups	-16(%edx,%ecx,1),%xmm0
3525	jnz	.L004enc3_loop
3526.byte	102,15,56,220,209
3527.byte	102,15,56,220,217
3528.byte	102,15,56,220,225
3529.byte	102,15,56,221,208
3530.byte	102,15,56,221,216
3531.byte	102,15,56,221,224
3532	ret
3533.size	_aesni_encrypt3,.-_aesni_encrypt3
3534.type	_aesni_decrypt3,@function
3535.align	16
3536_aesni_decrypt3:
3537	#ifdef __CET__
3538
3539.byte	243,15,30,251
3540	#endif
3541
3542	movups	(%edx),%xmm0
3543	shll	$4,%ecx
3544	movups	16(%edx),%xmm1
3545	xorps	%xmm0,%xmm2
3546	pxor	%xmm0,%xmm3
3547	pxor	%xmm0,%xmm4
3548	movups	32(%edx),%xmm0
3549	leal	32(%edx,%ecx,1),%edx
3550	negl	%ecx
3551	addl	$16,%ecx
3552.L005dec3_loop:
3553.byte	102,15,56,222,209
3554.byte	102,15,56,222,217
3555.byte	102,15,56,222,225
3556	movups	(%edx,%ecx,1),%xmm1
3557	addl	$32,%ecx
3558.byte	102,15,56,222,208
3559.byte	102,15,56,222,216
3560.byte	102,15,56,222,224
3561	movups	-16(%edx,%ecx,1),%xmm0
3562	jnz	.L005dec3_loop
3563.byte	102,15,56,222,209
3564.byte	102,15,56,222,217
3565.byte	102,15,56,222,225
3566.byte	102,15,56,223,208
3567.byte	102,15,56,223,216
3568.byte	102,15,56,223,224
3569	ret
3570.size	_aesni_decrypt3,.-_aesni_decrypt3
3571.type	_aesni_encrypt4,@function
3572.align	16
3573_aesni_encrypt4:
3574	#ifdef __CET__
3575
3576.byte	243,15,30,251
3577	#endif
3578
3579	movups	(%edx),%xmm0
3580	movups	16(%edx),%xmm1
3581	shll	$4,%ecx
3582	xorps	%xmm0,%xmm2
3583	pxor	%xmm0,%xmm3
3584	pxor	%xmm0,%xmm4
3585	pxor	%xmm0,%xmm5
3586	movups	32(%edx),%xmm0
3587	leal	32(%edx,%ecx,1),%edx
3588	negl	%ecx
3589.byte	15,31,64,0
3590	addl	$16,%ecx
3591.L006enc4_loop:
3592.byte	102,15,56,220,209
3593.byte	102,15,56,220,217
3594.byte	102,15,56,220,225
3595.byte	102,15,56,220,233
3596	movups	(%edx,%ecx,1),%xmm1
3597	addl	$32,%ecx
3598.byte	102,15,56,220,208
3599.byte	102,15,56,220,216
3600.byte	102,15,56,220,224
3601.byte	102,15,56,220,232
3602	movups	-16(%edx,%ecx,1),%xmm0
3603	jnz	.L006enc4_loop
3604.byte	102,15,56,220,209
3605.byte	102,15,56,220,217
3606.byte	102,15,56,220,225
3607.byte	102,15,56,220,233
3608.byte	102,15,56,221,208
3609.byte	102,15,56,221,216
3610.byte	102,15,56,221,224
3611.byte	102,15,56,221,232
3612	ret
3613.size	_aesni_encrypt4,.-_aesni_encrypt4
3614.type	_aesni_decrypt4,@function
3615.align	16
3616_aesni_decrypt4:
3617	#ifdef __CET__
3618
3619.byte	243,15,30,251
3620	#endif
3621
3622	movups	(%edx),%xmm0
3623	movups	16(%edx),%xmm1
3624	shll	$4,%ecx
3625	xorps	%xmm0,%xmm2
3626	pxor	%xmm0,%xmm3
3627	pxor	%xmm0,%xmm4
3628	pxor	%xmm0,%xmm5
3629	movups	32(%edx),%xmm0
3630	leal	32(%edx,%ecx,1),%edx
3631	negl	%ecx
3632.byte	15,31,64,0
3633	addl	$16,%ecx
3634.L007dec4_loop:
3635.byte	102,15,56,222,209
3636.byte	102,15,56,222,217
3637.byte	102,15,56,222,225
3638.byte	102,15,56,222,233
3639	movups	(%edx,%ecx,1),%xmm1
3640	addl	$32,%ecx
3641.byte	102,15,56,222,208
3642.byte	102,15,56,222,216
3643.byte	102,15,56,222,224
3644.byte	102,15,56,222,232
3645	movups	-16(%edx,%ecx,1),%xmm0
3646	jnz	.L007dec4_loop
3647.byte	102,15,56,222,209
3648.byte	102,15,56,222,217
3649.byte	102,15,56,222,225
3650.byte	102,15,56,222,233
3651.byte	102,15,56,223,208
3652.byte	102,15,56,223,216
3653.byte	102,15,56,223,224
3654.byte	102,15,56,223,232
3655	ret
3656.size	_aesni_decrypt4,.-_aesni_decrypt4
3657.type	_aesni_encrypt6,@function
3658.align	16
3659_aesni_encrypt6:
3660	#ifdef __CET__
3661
3662.byte	243,15,30,251
3663	#endif
3664
3665	movups	(%edx),%xmm0
3666	shll	$4,%ecx
3667	movups	16(%edx),%xmm1
3668	xorps	%xmm0,%xmm2
3669	pxor	%xmm0,%xmm3
3670	pxor	%xmm0,%xmm4
3671.byte	102,15,56,220,209
3672	pxor	%xmm0,%xmm5
3673	pxor	%xmm0,%xmm6
3674.byte	102,15,56,220,217
3675	leal	32(%edx,%ecx,1),%edx
3676	negl	%ecx
3677.byte	102,15,56,220,225
3678	pxor	%xmm0,%xmm7
3679	movups	(%edx,%ecx,1),%xmm0
3680	addl	$16,%ecx
3681	jmp	.L008_aesni_encrypt6_inner
3682.align	16
3683.L009enc6_loop:
3684.byte	102,15,56,220,209
3685.byte	102,15,56,220,217
3686.byte	102,15,56,220,225
3687.L008_aesni_encrypt6_inner:
3688.byte	102,15,56,220,233
3689.byte	102,15,56,220,241
3690.byte	102,15,56,220,249
3691.L_aesni_encrypt6_enter:
3692	movups	(%edx,%ecx,1),%xmm1
3693	addl	$32,%ecx
3694.byte	102,15,56,220,208
3695.byte	102,15,56,220,216
3696.byte	102,15,56,220,224
3697.byte	102,15,56,220,232
3698.byte	102,15,56,220,240
3699.byte	102,15,56,220,248
3700	movups	-16(%edx,%ecx,1),%xmm0
3701	jnz	.L009enc6_loop
3702.byte	102,15,56,220,209
3703.byte	102,15,56,220,217
3704.byte	102,15,56,220,225
3705.byte	102,15,56,220,233
3706.byte	102,15,56,220,241
3707.byte	102,15,56,220,249
3708.byte	102,15,56,221,208
3709.byte	102,15,56,221,216
3710.byte	102,15,56,221,224
3711.byte	102,15,56,221,232
3712.byte	102,15,56,221,240
3713.byte	102,15,56,221,248
3714	ret
3715.size	_aesni_encrypt6,.-_aesni_encrypt6
3716.type	_aesni_decrypt6,@function
3717.align	16
3718_aesni_decrypt6:
3719	#ifdef __CET__
3720
3721.byte	243,15,30,251
3722	#endif
3723
3724	movups	(%edx),%xmm0
3725	shll	$4,%ecx
3726	movups	16(%edx),%xmm1
3727	xorps	%xmm0,%xmm2
3728	pxor	%xmm0,%xmm3
3729	pxor	%xmm0,%xmm4
3730.byte	102,15,56,222,209
3731	pxor	%xmm0,%xmm5
3732	pxor	%xmm0,%xmm6
3733.byte	102,15,56,222,217
3734	leal	32(%edx,%ecx,1),%edx
3735	negl	%ecx
3736.byte	102,15,56,222,225
3737	pxor	%xmm0,%xmm7
3738	movups	(%edx,%ecx,1),%xmm0
3739	addl	$16,%ecx
3740	jmp	.L010_aesni_decrypt6_inner
3741.align	16
3742.L011dec6_loop:
3743.byte	102,15,56,222,209
3744.byte	102,15,56,222,217
3745.byte	102,15,56,222,225
3746.L010_aesni_decrypt6_inner:
3747.byte	102,15,56,222,233
3748.byte	102,15,56,222,241
3749.byte	102,15,56,222,249
3750.L_aesni_decrypt6_enter:
3751	movups	(%edx,%ecx,1),%xmm1
3752	addl	$32,%ecx
3753.byte	102,15,56,222,208
3754.byte	102,15,56,222,216
3755.byte	102,15,56,222,224
3756.byte	102,15,56,222,232
3757.byte	102,15,56,222,240
3758.byte	102,15,56,222,248
3759	movups	-16(%edx,%ecx,1),%xmm0
3760	jnz	.L011dec6_loop
3761.byte	102,15,56,222,209
3762.byte	102,15,56,222,217
3763.byte	102,15,56,222,225
3764.byte	102,15,56,222,233
3765.byte	102,15,56,222,241
3766.byte	102,15,56,222,249
3767.byte	102,15,56,223,208
3768.byte	102,15,56,223,216
3769.byte	102,15,56,223,224
3770.byte	102,15,56,223,232
3771.byte	102,15,56,223,240
3772.byte	102,15,56,223,248
3773	ret
3774.size	_aesni_decrypt6,.-_aesni_decrypt6
3775.globl	aesni_ecb_encrypt
3776.type	aesni_ecb_encrypt,@function
3777.align	16
3778aesni_ecb_encrypt:
3779.L_aesni_ecb_encrypt_begin:
3780	#ifdef __CET__
3781
3782.byte	243,15,30,251
3783	#endif
3784
3785	pushl	%ebp
3786	pushl	%ebx
3787	pushl	%esi
3788	pushl	%edi
3789	movl	20(%esp),%esi
3790	movl	24(%esp),%edi
3791	movl	28(%esp),%eax
3792	movl	32(%esp),%edx
3793	movl	36(%esp),%ebx
3794	andl	$-16,%eax
3795	jz	.L012ecb_ret
3796	movl	240(%edx),%ecx
3797	testl	%ebx,%ebx
3798	jz	.L013ecb_decrypt
3799	movl	%edx,%ebp
3800	movl	%ecx,%ebx
3801	cmpl	$96,%eax
3802	jb	.L014ecb_enc_tail
3803	movdqu	(%esi),%xmm2
3804	movdqu	16(%esi),%xmm3
3805	movdqu	32(%esi),%xmm4
3806	movdqu	48(%esi),%xmm5
3807	movdqu	64(%esi),%xmm6
3808	movdqu	80(%esi),%xmm7
3809	leal	96(%esi),%esi
3810	subl	$96,%eax
3811	jmp	.L015ecb_enc_loop6_enter
3812.align	16
3813.L016ecb_enc_loop6:
3814	movups	%xmm2,(%edi)
3815	movdqu	(%esi),%xmm2
3816	movups	%xmm3,16(%edi)
3817	movdqu	16(%esi),%xmm3
3818	movups	%xmm4,32(%edi)
3819	movdqu	32(%esi),%xmm4
3820	movups	%xmm5,48(%edi)
3821	movdqu	48(%esi),%xmm5
3822	movups	%xmm6,64(%edi)
3823	movdqu	64(%esi),%xmm6
3824	movups	%xmm7,80(%edi)
3825	leal	96(%edi),%edi
3826	movdqu	80(%esi),%xmm7
3827	leal	96(%esi),%esi
3828.L015ecb_enc_loop6_enter:
3829	call	_aesni_encrypt6
3830	movl	%ebp,%edx
3831	movl	%ebx,%ecx
3832	subl	$96,%eax
3833	jnc	.L016ecb_enc_loop6
3834	movups	%xmm2,(%edi)
3835	movups	%xmm3,16(%edi)
3836	movups	%xmm4,32(%edi)
3837	movups	%xmm5,48(%edi)
3838	movups	%xmm6,64(%edi)
3839	movups	%xmm7,80(%edi)
3840	leal	96(%edi),%edi
3841	addl	$96,%eax
3842	jz	.L012ecb_ret
3843.L014ecb_enc_tail:
3844	movups	(%esi),%xmm2
3845	cmpl	$32,%eax
3846	jb	.L017ecb_enc_one
3847	movups	16(%esi),%xmm3
3848	je	.L018ecb_enc_two
3849	movups	32(%esi),%xmm4
3850	cmpl	$64,%eax
3851	jb	.L019ecb_enc_three
3852	movups	48(%esi),%xmm5
3853	je	.L020ecb_enc_four
3854	movups	64(%esi),%xmm6
3855	xorps	%xmm7,%xmm7
3856	call	_aesni_encrypt6
3857	movups	%xmm2,(%edi)
3858	movups	%xmm3,16(%edi)
3859	movups	%xmm4,32(%edi)
3860	movups	%xmm5,48(%edi)
3861	movups	%xmm6,64(%edi)
3862	jmp	.L012ecb_ret
3863.align	16
3864.L017ecb_enc_one:
3865	movups	(%edx),%xmm0
3866	movups	16(%edx),%xmm1
3867	leal	32(%edx),%edx
3868	xorps	%xmm0,%xmm2
3869.L021enc1_loop_3:
3870.byte	102,15,56,220,209
3871	decl	%ecx
3872	movups	(%edx),%xmm1
3873	leal	16(%edx),%edx
3874	jnz	.L021enc1_loop_3
3875.byte	102,15,56,221,209
3876	movups	%xmm2,(%edi)
3877	jmp	.L012ecb_ret
3878.align	16
3879.L018ecb_enc_two:
3880	call	_aesni_encrypt2
3881	movups	%xmm2,(%edi)
3882	movups	%xmm3,16(%edi)
3883	jmp	.L012ecb_ret
3884.align	16
3885.L019ecb_enc_three:
3886	call	_aesni_encrypt3
3887	movups	%xmm2,(%edi)
3888	movups	%xmm3,16(%edi)
3889	movups	%xmm4,32(%edi)
3890	jmp	.L012ecb_ret
3891.align	16
3892.L020ecb_enc_four:
3893	call	_aesni_encrypt4
3894	movups	%xmm2,(%edi)
3895	movups	%xmm3,16(%edi)
3896	movups	%xmm4,32(%edi)
3897	movups	%xmm5,48(%edi)
3898	jmp	.L012ecb_ret
3899.align	16
3900.L013ecb_decrypt:
3901	movl	%edx,%ebp
3902	movl	%ecx,%ebx
3903	cmpl	$96,%eax
3904	jb	.L022ecb_dec_tail
3905	movdqu	(%esi),%xmm2
3906	movdqu	16(%esi),%xmm3
3907	movdqu	32(%esi),%xmm4
3908	movdqu	48(%esi),%xmm5
3909	movdqu	64(%esi),%xmm6
3910	movdqu	80(%esi),%xmm7
3911	leal	96(%esi),%esi
3912	subl	$96,%eax
3913	jmp	.L023ecb_dec_loop6_enter
3914.align	16
3915.L024ecb_dec_loop6:
3916	movups	%xmm2,(%edi)
3917	movdqu	(%esi),%xmm2
3918	movups	%xmm3,16(%edi)
3919	movdqu	16(%esi),%xmm3
3920	movups	%xmm4,32(%edi)
3921	movdqu	32(%esi),%xmm4
3922	movups	%xmm5,48(%edi)
3923	movdqu	48(%esi),%xmm5
3924	movups	%xmm6,64(%edi)
3925	movdqu	64(%esi),%xmm6
3926	movups	%xmm7,80(%edi)
3927	leal	96(%edi),%edi
3928	movdqu	80(%esi),%xmm7
3929	leal	96(%esi),%esi
3930.L023ecb_dec_loop6_enter:
3931	call	_aesni_decrypt6
3932	movl	%ebp,%edx
3933	movl	%ebx,%ecx
3934	subl	$96,%eax
3935	jnc	.L024ecb_dec_loop6
3936	movups	%xmm2,(%edi)
3937	movups	%xmm3,16(%edi)
3938	movups	%xmm4,32(%edi)
3939	movups	%xmm5,48(%edi)
3940	movups	%xmm6,64(%edi)
3941	movups	%xmm7,80(%edi)
3942	leal	96(%edi),%edi
3943	addl	$96,%eax
3944	jz	.L012ecb_ret
3945.L022ecb_dec_tail:
3946	movups	(%esi),%xmm2
3947	cmpl	$32,%eax
3948	jb	.L025ecb_dec_one
3949	movups	16(%esi),%xmm3
3950	je	.L026ecb_dec_two
3951	movups	32(%esi),%xmm4
3952	cmpl	$64,%eax
3953	jb	.L027ecb_dec_three
3954	movups	48(%esi),%xmm5
3955	je	.L028ecb_dec_four
3956	movups	64(%esi),%xmm6
3957	xorps	%xmm7,%xmm7
3958	call	_aesni_decrypt6
3959	movups	%xmm2,(%edi)
3960	movups	%xmm3,16(%edi)
3961	movups	%xmm4,32(%edi)
3962	movups	%xmm5,48(%edi)
3963	movups	%xmm6,64(%edi)
3964	jmp	.L012ecb_ret
3965.align	16
3966.L025ecb_dec_one:
3967	movups	(%edx),%xmm0
3968	movups	16(%edx),%xmm1
3969	leal	32(%edx),%edx
3970	xorps	%xmm0,%xmm2
3971.L029dec1_loop_4:
3972.byte	102,15,56,222,209
3973	decl	%ecx
3974	movups	(%edx),%xmm1
3975	leal	16(%edx),%edx
3976	jnz	.L029dec1_loop_4
3977.byte	102,15,56,223,209
3978	movups	%xmm2,(%edi)
3979	jmp	.L012ecb_ret
3980.align	16
3981.L026ecb_dec_two:
3982	call	_aesni_decrypt2
3983	movups	%xmm2,(%edi)
3984	movups	%xmm3,16(%edi)
3985	jmp	.L012ecb_ret
3986.align	16
3987.L027ecb_dec_three:
3988	call	_aesni_decrypt3
3989	movups	%xmm2,(%edi)
3990	movups	%xmm3,16(%edi)
3991	movups	%xmm4,32(%edi)
3992	jmp	.L012ecb_ret
3993.align	16
3994.L028ecb_dec_four:
3995	call	_aesni_decrypt4
3996	movups	%xmm2,(%edi)
3997	movups	%xmm3,16(%edi)
3998	movups	%xmm4,32(%edi)
3999	movups	%xmm5,48(%edi)
4000.L012ecb_ret:
4001	pxor	%xmm0,%xmm0
4002	pxor	%xmm1,%xmm1
4003	pxor	%xmm2,%xmm2
4004	pxor	%xmm3,%xmm3
4005	pxor	%xmm4,%xmm4
4006	pxor	%xmm5,%xmm5
4007	pxor	%xmm6,%xmm6
4008	pxor	%xmm7,%xmm7
4009	popl	%edi
4010	popl	%esi
4011	popl	%ebx
4012	popl	%ebp
4013	ret
4014.size	aesni_ecb_encrypt,.-.L_aesni_ecb_encrypt_begin
4015.globl	aesni_ccm64_encrypt_blocks
4016.type	aesni_ccm64_encrypt_blocks,@function
4017.align	16
4018aesni_ccm64_encrypt_blocks:
4019.L_aesni_ccm64_encrypt_blocks_begin:
4020	#ifdef __CET__
4021
4022.byte	243,15,30,251
4023	#endif
4024
4025	pushl	%ebp
4026	pushl	%ebx
4027	pushl	%esi
4028	pushl	%edi
4029	movl	20(%esp),%esi
4030	movl	24(%esp),%edi
4031	movl	28(%esp),%eax
4032	movl	32(%esp),%edx
4033	movl	36(%esp),%ebx
4034	movl	40(%esp),%ecx
4035	movl	%esp,%ebp
4036	subl	$60,%esp
4037	andl	$-16,%esp
4038	movl	%ebp,48(%esp)
4039	movdqu	(%ebx),%xmm7
4040	movdqu	(%ecx),%xmm3
4041	movl	240(%edx),%ecx
4042	movl	$202182159,(%esp)
4043	movl	$134810123,4(%esp)
4044	movl	$67438087,8(%esp)
4045	movl	$66051,12(%esp)
4046	movl	$1,%ebx
4047	xorl	%ebp,%ebp
4048	movl	%ebx,16(%esp)
4049	movl	%ebp,20(%esp)
4050	movl	%ebp,24(%esp)
4051	movl	%ebp,28(%esp)
4052	shll	$4,%ecx
4053	movl	$16,%ebx
4054	leal	(%edx),%ebp
4055	movdqa	(%esp),%xmm5
4056	movdqa	%xmm7,%xmm2
4057	leal	32(%edx,%ecx,1),%edx
4058	subl	%ecx,%ebx
4059.byte	102,15,56,0,253
4060.L030ccm64_enc_outer:
4061	movups	(%ebp),%xmm0
4062	movl	%ebx,%ecx
4063	movups	(%esi),%xmm6
4064	xorps	%xmm0,%xmm2
4065	movups	16(%ebp),%xmm1
4066	xorps	%xmm6,%xmm0
4067	xorps	%xmm0,%xmm3
4068	movups	32(%ebp),%xmm0
4069.L031ccm64_enc2_loop:
4070.byte	102,15,56,220,209
4071.byte	102,15,56,220,217
4072	movups	(%edx,%ecx,1),%xmm1
4073	addl	$32,%ecx
4074.byte	102,15,56,220,208
4075.byte	102,15,56,220,216
4076	movups	-16(%edx,%ecx,1),%xmm0
4077	jnz	.L031ccm64_enc2_loop
4078.byte	102,15,56,220,209
4079.byte	102,15,56,220,217
4080	paddq	16(%esp),%xmm7
4081	decl	%eax
4082.byte	102,15,56,221,208
4083.byte	102,15,56,221,216
4084	leal	16(%esi),%esi
4085	xorps	%xmm2,%xmm6
4086	movdqa	%xmm7,%xmm2
4087	movups	%xmm6,(%edi)
4088.byte	102,15,56,0,213
4089	leal	16(%edi),%edi
4090	jnz	.L030ccm64_enc_outer
4091	movl	48(%esp),%esp
4092	movl	40(%esp),%edi
4093	movups	%xmm3,(%edi)
4094	pxor	%xmm0,%xmm0
4095	pxor	%xmm1,%xmm1
4096	pxor	%xmm2,%xmm2
4097	pxor	%xmm3,%xmm3
4098	pxor	%xmm4,%xmm4
4099	pxor	%xmm5,%xmm5
4100	pxor	%xmm6,%xmm6
4101	pxor	%xmm7,%xmm7
4102	popl	%edi
4103	popl	%esi
4104	popl	%ebx
4105	popl	%ebp
4106	ret
4107.size	aesni_ccm64_encrypt_blocks,.-.L_aesni_ccm64_encrypt_blocks_begin
4108.globl	aesni_ccm64_decrypt_blocks
4109.type	aesni_ccm64_decrypt_blocks,@function
4110.align	16
4111aesni_ccm64_decrypt_blocks:
4112.L_aesni_ccm64_decrypt_blocks_begin:
4113	#ifdef __CET__
4114
4115.byte	243,15,30,251
4116	#endif
4117
4118	pushl	%ebp
4119	pushl	%ebx
4120	pushl	%esi
4121	pushl	%edi
4122	movl	20(%esp),%esi
4123	movl	24(%esp),%edi
4124	movl	28(%esp),%eax
4125	movl	32(%esp),%edx
4126	movl	36(%esp),%ebx
4127	movl	40(%esp),%ecx
4128	movl	%esp,%ebp
4129	subl	$60,%esp
4130	andl	$-16,%esp
4131	movl	%ebp,48(%esp)
4132	movdqu	(%ebx),%xmm7
4133	movdqu	(%ecx),%xmm3
4134	movl	240(%edx),%ecx
4135	movl	$202182159,(%esp)
4136	movl	$134810123,4(%esp)
4137	movl	$67438087,8(%esp)
4138	movl	$66051,12(%esp)
4139	movl	$1,%ebx
4140	xorl	%ebp,%ebp
4141	movl	%ebx,16(%esp)
4142	movl	%ebp,20(%esp)
4143	movl	%ebp,24(%esp)
4144	movl	%ebp,28(%esp)
4145	movdqa	(%esp),%xmm5
4146	movdqa	%xmm7,%xmm2
4147	movl	%edx,%ebp
4148	movl	%ecx,%ebx
4149.byte	102,15,56,0,253
4150	movups	(%edx),%xmm0
4151	movups	16(%edx),%xmm1
4152	leal	32(%edx),%edx
4153	xorps	%xmm0,%xmm2
4154.L032enc1_loop_5:
4155.byte	102,15,56,220,209
4156	decl	%ecx
4157	movups	(%edx),%xmm1
4158	leal	16(%edx),%edx
4159	jnz	.L032enc1_loop_5
4160.byte	102,15,56,221,209
4161	shll	$4,%ebx
4162	movl	$16,%ecx
4163	movups	(%esi),%xmm6
4164	paddq	16(%esp),%xmm7
4165	leal	16(%esi),%esi
4166	subl	%ebx,%ecx
4167	leal	32(%ebp,%ebx,1),%edx
4168	movl	%ecx,%ebx
4169	jmp	.L033ccm64_dec_outer
4170.align	16
4171.L033ccm64_dec_outer:
4172	xorps	%xmm2,%xmm6
4173	movdqa	%xmm7,%xmm2
4174	movups	%xmm6,(%edi)
4175	leal	16(%edi),%edi
4176.byte	102,15,56,0,213
4177	subl	$1,%eax
4178	jz	.L034ccm64_dec_break
4179	movups	(%ebp),%xmm0
4180	movl	%ebx,%ecx
4181	movups	16(%ebp),%xmm1
4182	xorps	%xmm0,%xmm6
4183	xorps	%xmm0,%xmm2
4184	xorps	%xmm6,%xmm3
4185	movups	32(%ebp),%xmm0
4186.L035ccm64_dec2_loop:
4187.byte	102,15,56,220,209
4188.byte	102,15,56,220,217
4189	movups	(%edx,%ecx,1),%xmm1
4190	addl	$32,%ecx
4191.byte	102,15,56,220,208
4192.byte	102,15,56,220,216
4193	movups	-16(%edx,%ecx,1),%xmm0
4194	jnz	.L035ccm64_dec2_loop
4195	movups	(%esi),%xmm6
4196	paddq	16(%esp),%xmm7
4197.byte	102,15,56,220,209
4198.byte	102,15,56,220,217
4199.byte	102,15,56,221,208
4200.byte	102,15,56,221,216
4201	leal	16(%esi),%esi
4202	jmp	.L033ccm64_dec_outer
4203.align	16
4204.L034ccm64_dec_break:
4205	movl	240(%ebp),%ecx
4206	movl	%ebp,%edx
4207	movups	(%edx),%xmm0
4208	movups	16(%edx),%xmm1
4209	xorps	%xmm0,%xmm6
4210	leal	32(%edx),%edx
4211	xorps	%xmm6,%xmm3
4212.L036enc1_loop_6:
4213.byte	102,15,56,220,217
4214	decl	%ecx
4215	movups	(%edx),%xmm1
4216	leal	16(%edx),%edx
4217	jnz	.L036enc1_loop_6
4218.byte	102,15,56,221,217
4219	movl	48(%esp),%esp
4220	movl	40(%esp),%edi
4221	movups	%xmm3,(%edi)
4222	pxor	%xmm0,%xmm0
4223	pxor	%xmm1,%xmm1
4224	pxor	%xmm2,%xmm2
4225	pxor	%xmm3,%xmm3
4226	pxor	%xmm4,%xmm4
4227	pxor	%xmm5,%xmm5
4228	pxor	%xmm6,%xmm6
4229	pxor	%xmm7,%xmm7
4230	popl	%edi
4231	popl	%esi
4232	popl	%ebx
4233	popl	%ebp
4234	ret
4235.size	aesni_ccm64_decrypt_blocks,.-.L_aesni_ccm64_decrypt_blocks_begin
4236.globl	aesni_ctr32_encrypt_blocks
4237.type	aesni_ctr32_encrypt_blocks,@function
4238.align	16
4239aesni_ctr32_encrypt_blocks:
4240.L_aesni_ctr32_encrypt_blocks_begin:
4241	#ifdef __CET__
4242
4243.byte	243,15,30,251
4244	#endif
4245
4246	pushl	%ebp
4247	pushl	%ebx
4248	pushl	%esi
4249	pushl	%edi
4250	movl	20(%esp),%esi
4251	movl	24(%esp),%edi
4252	movl	28(%esp),%eax
4253	movl	32(%esp),%edx
4254	movl	36(%esp),%ebx
4255	movl	%esp,%ebp
4256	subl	$88,%esp
4257	andl	$-16,%esp
4258	movl	%ebp,80(%esp)
4259	cmpl	$1,%eax
4260	je	.L037ctr32_one_shortcut
4261	movdqu	(%ebx),%xmm7
4262	movl	$202182159,(%esp)
4263	movl	$134810123,4(%esp)
4264	movl	$67438087,8(%esp)
4265	movl	$66051,12(%esp)
4266	movl	$6,%ecx
4267	xorl	%ebp,%ebp
4268	movl	%ecx,16(%esp)
4269	movl	%ecx,20(%esp)
4270	movl	%ecx,24(%esp)
4271	movl	%ebp,28(%esp)
4272.byte	102,15,58,22,251,3
4273.byte	102,15,58,34,253,3
4274	movl	240(%edx),%ecx
4275	bswap	%ebx
4276	pxor	%xmm0,%xmm0
4277	pxor	%xmm1,%xmm1
4278	movdqa	(%esp),%xmm2
4279.byte	102,15,58,34,195,0
4280	leal	3(%ebx),%ebp
4281.byte	102,15,58,34,205,0
4282	incl	%ebx
4283.byte	102,15,58,34,195,1
4284	incl	%ebp
4285.byte	102,15,58,34,205,1
4286	incl	%ebx
4287.byte	102,15,58,34,195,2
4288	incl	%ebp
4289.byte	102,15,58,34,205,2
4290	movdqa	%xmm0,48(%esp)
4291.byte	102,15,56,0,194
4292	movdqu	(%edx),%xmm6
4293	movdqa	%xmm1,64(%esp)
4294.byte	102,15,56,0,202
4295	pshufd	$192,%xmm0,%xmm2
4296	pshufd	$128,%xmm0,%xmm3
4297	cmpl	$6,%eax
4298	jb	.L038ctr32_tail
4299	pxor	%xmm6,%xmm7
4300	shll	$4,%ecx
4301	movl	$16,%ebx
4302	movdqa	%xmm7,32(%esp)
4303	movl	%edx,%ebp
4304	subl	%ecx,%ebx
4305	leal	32(%edx,%ecx,1),%edx
4306	subl	$6,%eax
4307	jmp	.L039ctr32_loop6
4308.align	16
4309.L039ctr32_loop6:
4310	pshufd	$64,%xmm0,%xmm4
4311	movdqa	32(%esp),%xmm0
4312	pshufd	$192,%xmm1,%xmm5
4313	pxor	%xmm0,%xmm2
4314	pshufd	$128,%xmm1,%xmm6
4315	pxor	%xmm0,%xmm3
4316	pshufd	$64,%xmm1,%xmm7
4317	movups	16(%ebp),%xmm1
4318	pxor	%xmm0,%xmm4
4319	pxor	%xmm0,%xmm5
4320.byte	102,15,56,220,209
4321	pxor	%xmm0,%xmm6
4322	pxor	%xmm0,%xmm7
4323.byte	102,15,56,220,217
4324	movups	32(%ebp),%xmm0
4325	movl	%ebx,%ecx
4326.byte	102,15,56,220,225
4327.byte	102,15,56,220,233
4328.byte	102,15,56,220,241
4329.byte	102,15,56,220,249
4330	call	.L_aesni_encrypt6_enter
4331	movups	(%esi),%xmm1
4332	movups	16(%esi),%xmm0
4333	xorps	%xmm1,%xmm2
4334	movups	32(%esi),%xmm1
4335	xorps	%xmm0,%xmm3
4336	movups	%xmm2,(%edi)
4337	movdqa	16(%esp),%xmm0
4338	xorps	%xmm1,%xmm4
4339	movdqa	64(%esp),%xmm1
4340	movups	%xmm3,16(%edi)
4341	movups	%xmm4,32(%edi)
4342	paddd	%xmm0,%xmm1
4343	paddd	48(%esp),%xmm0
4344	movdqa	(%esp),%xmm2
4345	movups	48(%esi),%xmm3
4346	movups	64(%esi),%xmm4
4347	xorps	%xmm3,%xmm5
4348	movups	80(%esi),%xmm3
4349	leal	96(%esi),%esi
4350	movdqa	%xmm0,48(%esp)
4351.byte	102,15,56,0,194
4352	xorps	%xmm4,%xmm6
4353	movups	%xmm5,48(%edi)
4354	xorps	%xmm3,%xmm7
4355	movdqa	%xmm1,64(%esp)
4356.byte	102,15,56,0,202
4357	movups	%xmm6,64(%edi)
4358	pshufd	$192,%xmm0,%xmm2
4359	movups	%xmm7,80(%edi)
4360	leal	96(%edi),%edi
4361	pshufd	$128,%xmm0,%xmm3
4362	subl	$6,%eax
4363	jnc	.L039ctr32_loop6
4364	addl	$6,%eax
4365	jz	.L040ctr32_ret
4366	movdqu	(%ebp),%xmm7
4367	movl	%ebp,%edx
4368	pxor	32(%esp),%xmm7
4369	movl	240(%ebp),%ecx
4370.L038ctr32_tail:
4371	por	%xmm7,%xmm2
4372	cmpl	$2,%eax
4373	jb	.L041ctr32_one
4374	pshufd	$64,%xmm0,%xmm4
4375	por	%xmm7,%xmm3
4376	je	.L042ctr32_two
4377	pshufd	$192,%xmm1,%xmm5
4378	por	%xmm7,%xmm4
4379	cmpl	$4,%eax
4380	jb	.L043ctr32_three
4381	pshufd	$128,%xmm1,%xmm6
4382	por	%xmm7,%xmm5
4383	je	.L044ctr32_four
4384	por	%xmm7,%xmm6
4385	call	_aesni_encrypt6
4386	movups	(%esi),%xmm1
4387	movups	16(%esi),%xmm0
4388	xorps	%xmm1,%xmm2
4389	movups	32(%esi),%xmm1
4390	xorps	%xmm0,%xmm3
4391	movups	48(%esi),%xmm0
4392	xorps	%xmm1,%xmm4
4393	movups	64(%esi),%xmm1
4394	xorps	%xmm0,%xmm5
4395	movups	%xmm2,(%edi)
4396	xorps	%xmm1,%xmm6
4397	movups	%xmm3,16(%edi)
4398	movups	%xmm4,32(%edi)
4399	movups	%xmm5,48(%edi)
4400	movups	%xmm6,64(%edi)
4401	jmp	.L040ctr32_ret
4402.align	16
4403.L037ctr32_one_shortcut:
4404	movups	(%ebx),%xmm2
4405	movl	240(%edx),%ecx
4406.L041ctr32_one:
4407	movups	(%edx),%xmm0
4408	movups	16(%edx),%xmm1
4409	leal	32(%edx),%edx
4410	xorps	%xmm0,%xmm2
4411.L045enc1_loop_7:
4412.byte	102,15,56,220,209
4413	decl	%ecx
4414	movups	(%edx),%xmm1
4415	leal	16(%edx),%edx
4416	jnz	.L045enc1_loop_7
4417.byte	102,15,56,221,209
4418	movups	(%esi),%xmm6
4419	xorps	%xmm2,%xmm6
4420	movups	%xmm6,(%edi)
4421	jmp	.L040ctr32_ret
4422.align	16
4423.L042ctr32_two:
4424	call	_aesni_encrypt2
4425	movups	(%esi),%xmm5
4426	movups	16(%esi),%xmm6
4427	xorps	%xmm5,%xmm2
4428	xorps	%xmm6,%xmm3
4429	movups	%xmm2,(%edi)
4430	movups	%xmm3,16(%edi)
4431	jmp	.L040ctr32_ret
4432.align	16
4433.L043ctr32_three:
4434	call	_aesni_encrypt3
4435	movups	(%esi),%xmm5
4436	movups	16(%esi),%xmm6
4437	xorps	%xmm5,%xmm2
4438	movups	32(%esi),%xmm7
4439	xorps	%xmm6,%xmm3
4440	movups	%xmm2,(%edi)
4441	xorps	%xmm7,%xmm4
4442	movups	%xmm3,16(%edi)
4443	movups	%xmm4,32(%edi)
4444	jmp	.L040ctr32_ret
4445.align	16
4446.L044ctr32_four:
4447	call	_aesni_encrypt4
4448	movups	(%esi),%xmm6
4449	movups	16(%esi),%xmm7
4450	movups	32(%esi),%xmm1
4451	xorps	%xmm6,%xmm2
4452	movups	48(%esi),%xmm0
4453	xorps	%xmm7,%xmm3
4454	movups	%xmm2,(%edi)
4455	xorps	%xmm1,%xmm4
4456	movups	%xmm3,16(%edi)
4457	xorps	%xmm0,%xmm5
4458	movups	%xmm4,32(%edi)
4459	movups	%xmm5,48(%edi)
4460.L040ctr32_ret:
4461	pxor	%xmm0,%xmm0
4462	pxor	%xmm1,%xmm1
4463	pxor	%xmm2,%xmm2
4464	pxor	%xmm3,%xmm3
4465	pxor	%xmm4,%xmm4
4466	movdqa	%xmm0,32(%esp)
4467	pxor	%xmm5,%xmm5
4468	movdqa	%xmm0,48(%esp)
4469	pxor	%xmm6,%xmm6
4470	movdqa	%xmm0,64(%esp)
4471	pxor	%xmm7,%xmm7
4472	movl	80(%esp),%esp
4473	popl	%edi
4474	popl	%esi
4475	popl	%ebx
4476	popl	%ebp
4477	ret
4478.size	aesni_ctr32_encrypt_blocks,.-.L_aesni_ctr32_encrypt_blocks_begin
4479.globl	aesni_xts_encrypt
4480.type	aesni_xts_encrypt,@function
4481.align	16
4482aesni_xts_encrypt:
4483.L_aesni_xts_encrypt_begin:
4484	#ifdef __CET__
4485
4486.byte	243,15,30,251
4487	#endif
4488
4489	pushl	%ebp
4490	pushl	%ebx
4491	pushl	%esi
4492	pushl	%edi
4493	movl	36(%esp),%edx
4494	movl	40(%esp),%esi
4495	movl	240(%edx),%ecx
4496	movups	(%esi),%xmm2
4497	movups	(%edx),%xmm0
4498	movups	16(%edx),%xmm1
4499	leal	32(%edx),%edx
4500	xorps	%xmm0,%xmm2
4501.L046enc1_loop_8:
4502.byte	102,15,56,220,209
4503	decl	%ecx
4504	movups	(%edx),%xmm1
4505	leal	16(%edx),%edx
4506	jnz	.L046enc1_loop_8
4507.byte	102,15,56,221,209
4508	movl	20(%esp),%esi
4509	movl	24(%esp),%edi
4510	movl	28(%esp),%eax
4511	movl	32(%esp),%edx
4512	movl	%esp,%ebp
4513	subl	$120,%esp
4514	movl	240(%edx),%ecx
4515	andl	$-16,%esp
4516	movl	$135,96(%esp)
4517	movl	$0,100(%esp)
4518	movl	$1,104(%esp)
4519	movl	$0,108(%esp)
4520	movl	%eax,112(%esp)
4521	movl	%ebp,116(%esp)
4522	movdqa	%xmm2,%xmm1
4523	pxor	%xmm0,%xmm0
4524	movdqa	96(%esp),%xmm3
4525	pcmpgtd	%xmm1,%xmm0
4526	andl	$-16,%eax
4527	movl	%edx,%ebp
4528	movl	%ecx,%ebx
4529	subl	$96,%eax
4530	jc	.L047xts_enc_short
4531	shll	$4,%ecx
4532	movl	$16,%ebx
4533	subl	%ecx,%ebx
4534	leal	32(%edx,%ecx,1),%edx
4535	jmp	.L048xts_enc_loop6
4536.align	16
4537.L048xts_enc_loop6:
4538	pshufd	$19,%xmm0,%xmm2
4539	pxor	%xmm0,%xmm0
4540	movdqa	%xmm1,(%esp)
4541	paddq	%xmm1,%xmm1
4542	pand	%xmm3,%xmm2
4543	pcmpgtd	%xmm1,%xmm0
4544	pxor	%xmm2,%xmm1
4545	pshufd	$19,%xmm0,%xmm2
4546	pxor	%xmm0,%xmm0
4547	movdqa	%xmm1,16(%esp)
4548	paddq	%xmm1,%xmm1
4549	pand	%xmm3,%xmm2
4550	pcmpgtd	%xmm1,%xmm0
4551	pxor	%xmm2,%xmm1
4552	pshufd	$19,%xmm0,%xmm2
4553	pxor	%xmm0,%xmm0
4554	movdqa	%xmm1,32(%esp)
4555	paddq	%xmm1,%xmm1
4556	pand	%xmm3,%xmm2
4557	pcmpgtd	%xmm1,%xmm0
4558	pxor	%xmm2,%xmm1
4559	pshufd	$19,%xmm0,%xmm2
4560	pxor	%xmm0,%xmm0
4561	movdqa	%xmm1,48(%esp)
4562	paddq	%xmm1,%xmm1
4563	pand	%xmm3,%xmm2
4564	pcmpgtd	%xmm1,%xmm0
4565	pxor	%xmm2,%xmm1
4566	pshufd	$19,%xmm0,%xmm7
4567	movdqa	%xmm1,64(%esp)
4568	paddq	%xmm1,%xmm1
4569	movups	(%ebp),%xmm0
4570	pand	%xmm3,%xmm7
4571	movups	(%esi),%xmm2
4572	pxor	%xmm1,%xmm7
4573	movl	%ebx,%ecx
4574	movdqu	16(%esi),%xmm3
4575	xorps	%xmm0,%xmm2
4576	movdqu	32(%esi),%xmm4
4577	pxor	%xmm0,%xmm3
4578	movdqu	48(%esi),%xmm5
4579	pxor	%xmm0,%xmm4
4580	movdqu	64(%esi),%xmm6
4581	pxor	%xmm0,%xmm5
4582	movdqu	80(%esi),%xmm1
4583	pxor	%xmm0,%xmm6
4584	leal	96(%esi),%esi
4585	pxor	(%esp),%xmm2
4586	movdqa	%xmm7,80(%esp)
4587	pxor	%xmm1,%xmm7
4588	movups	16(%ebp),%xmm1
4589	pxor	16(%esp),%xmm3
4590	pxor	32(%esp),%xmm4
4591.byte	102,15,56,220,209
4592	pxor	48(%esp),%xmm5
4593	pxor	64(%esp),%xmm6
4594.byte	102,15,56,220,217
4595	pxor	%xmm0,%xmm7
4596	movups	32(%ebp),%xmm0
4597.byte	102,15,56,220,225
4598.byte	102,15,56,220,233
4599.byte	102,15,56,220,241
4600.byte	102,15,56,220,249
4601	call	.L_aesni_encrypt6_enter
4602	movdqa	80(%esp),%xmm1
4603	pxor	%xmm0,%xmm0
4604	xorps	(%esp),%xmm2
4605	pcmpgtd	%xmm1,%xmm0
4606	xorps	16(%esp),%xmm3
4607	movups	%xmm2,(%edi)
4608	xorps	32(%esp),%xmm4
4609	movups	%xmm3,16(%edi)
4610	xorps	48(%esp),%xmm5
4611	movups	%xmm4,32(%edi)
4612	xorps	64(%esp),%xmm6
4613	movups	%xmm5,48(%edi)
4614	xorps	%xmm1,%xmm7
4615	movups	%xmm6,64(%edi)
4616	pshufd	$19,%xmm0,%xmm2
4617	movups	%xmm7,80(%edi)
4618	leal	96(%edi),%edi
4619	movdqa	96(%esp),%xmm3
4620	pxor	%xmm0,%xmm0
4621	paddq	%xmm1,%xmm1
4622	pand	%xmm3,%xmm2
4623	pcmpgtd	%xmm1,%xmm0
4624	pxor	%xmm2,%xmm1
4625	subl	$96,%eax
4626	jnc	.L048xts_enc_loop6
4627	movl	240(%ebp),%ecx
4628	movl	%ebp,%edx
4629	movl	%ecx,%ebx
4630.L047xts_enc_short:
4631	addl	$96,%eax
4632	jz	.L049xts_enc_done6x
4633	movdqa	%xmm1,%xmm5
4634	cmpl	$32,%eax
4635	jb	.L050xts_enc_one
4636	pshufd	$19,%xmm0,%xmm2
4637	pxor	%xmm0,%xmm0
4638	paddq	%xmm1,%xmm1
4639	pand	%xmm3,%xmm2
4640	pcmpgtd	%xmm1,%xmm0
4641	pxor	%xmm2,%xmm1
4642	je	.L051xts_enc_two
4643	pshufd	$19,%xmm0,%xmm2
4644	pxor	%xmm0,%xmm0
4645	movdqa	%xmm1,%xmm6
4646	paddq	%xmm1,%xmm1
4647	pand	%xmm3,%xmm2
4648	pcmpgtd	%xmm1,%xmm0
4649	pxor	%xmm2,%xmm1
4650	cmpl	$64,%eax
4651	jb	.L052xts_enc_three
4652	pshufd	$19,%xmm0,%xmm2
4653	pxor	%xmm0,%xmm0
4654	movdqa	%xmm1,%xmm7
4655	paddq	%xmm1,%xmm1
4656	pand	%xmm3,%xmm2
4657	pcmpgtd	%xmm1,%xmm0
4658	pxor	%xmm2,%xmm1
4659	movdqa	%xmm5,(%esp)
4660	movdqa	%xmm6,16(%esp)
4661	je	.L053xts_enc_four
4662	movdqa	%xmm7,32(%esp)
4663	pshufd	$19,%xmm0,%xmm7
4664	movdqa	%xmm1,48(%esp)
4665	paddq	%xmm1,%xmm1
4666	pand	%xmm3,%xmm7
4667	pxor	%xmm1,%xmm7
4668	movdqu	(%esi),%xmm2
4669	movdqu	16(%esi),%xmm3
4670	movdqu	32(%esi),%xmm4
4671	pxor	(%esp),%xmm2
4672	movdqu	48(%esi),%xmm5
4673	pxor	16(%esp),%xmm3
4674	movdqu	64(%esi),%xmm6
4675	pxor	32(%esp),%xmm4
4676	leal	80(%esi),%esi
4677	pxor	48(%esp),%xmm5
4678	movdqa	%xmm7,64(%esp)
4679	pxor	%xmm7,%xmm6
4680	call	_aesni_encrypt6
4681	movaps	64(%esp),%xmm1
4682	xorps	(%esp),%xmm2
4683	xorps	16(%esp),%xmm3
4684	xorps	32(%esp),%xmm4
4685	movups	%xmm2,(%edi)
4686	xorps	48(%esp),%xmm5
4687	movups	%xmm3,16(%edi)
4688	xorps	%xmm1,%xmm6
4689	movups	%xmm4,32(%edi)
4690	movups	%xmm5,48(%edi)
4691	movups	%xmm6,64(%edi)
4692	leal	80(%edi),%edi
4693	jmp	.L054xts_enc_done
4694.align	16
4695.L050xts_enc_one:
4696	movups	(%esi),%xmm2
4697	leal	16(%esi),%esi
4698	xorps	%xmm5,%xmm2
4699	movups	(%edx),%xmm0
4700	movups	16(%edx),%xmm1
4701	leal	32(%edx),%edx
4702	xorps	%xmm0,%xmm2
4703.L055enc1_loop_9:
4704.byte	102,15,56,220,209
4705	decl	%ecx
4706	movups	(%edx),%xmm1
4707	leal	16(%edx),%edx
4708	jnz	.L055enc1_loop_9
4709.byte	102,15,56,221,209
4710	xorps	%xmm5,%xmm2
4711	movups	%xmm2,(%edi)
4712	leal	16(%edi),%edi
4713	movdqa	%xmm5,%xmm1
4714	jmp	.L054xts_enc_done
4715.align	16
4716.L051xts_enc_two:
4717	movaps	%xmm1,%xmm6
4718	movups	(%esi),%xmm2
4719	movups	16(%esi),%xmm3
4720	leal	32(%esi),%esi
4721	xorps	%xmm5,%xmm2
4722	xorps	%xmm6,%xmm3
4723	call	_aesni_encrypt2
4724	xorps	%xmm5,%xmm2
4725	xorps	%xmm6,%xmm3
4726	movups	%xmm2,(%edi)
4727	movups	%xmm3,16(%edi)
4728	leal	32(%edi),%edi
4729	movdqa	%xmm6,%xmm1
4730	jmp	.L054xts_enc_done
4731.align	16
4732.L052xts_enc_three:
4733	movaps	%xmm1,%xmm7
4734	movups	(%esi),%xmm2
4735	movups	16(%esi),%xmm3
4736	movups	32(%esi),%xmm4
4737	leal	48(%esi),%esi
4738	xorps	%xmm5,%xmm2
4739	xorps	%xmm6,%xmm3
4740	xorps	%xmm7,%xmm4
4741	call	_aesni_encrypt3
4742	xorps	%xmm5,%xmm2
4743	xorps	%xmm6,%xmm3
4744	xorps	%xmm7,%xmm4
4745	movups	%xmm2,(%edi)
4746	movups	%xmm3,16(%edi)
4747	movups	%xmm4,32(%edi)
4748	leal	48(%edi),%edi
4749	movdqa	%xmm7,%xmm1
4750	jmp	.L054xts_enc_done
4751.align	16
4752.L053xts_enc_four:
4753	movaps	%xmm1,%xmm6
4754	movups	(%esi),%xmm2
4755	movups	16(%esi),%xmm3
4756	movups	32(%esi),%xmm4
4757	xorps	(%esp),%xmm2
4758	movups	48(%esi),%xmm5
4759	leal	64(%esi),%esi
4760	xorps	16(%esp),%xmm3
4761	xorps	%xmm7,%xmm4
4762	xorps	%xmm6,%xmm5
4763	call	_aesni_encrypt4
4764	xorps	(%esp),%xmm2
4765	xorps	16(%esp),%xmm3
4766	xorps	%xmm7,%xmm4
4767	movups	%xmm2,(%edi)
4768	xorps	%xmm6,%xmm5
4769	movups	%xmm3,16(%edi)
4770	movups	%xmm4,32(%edi)
4771	movups	%xmm5,48(%edi)
4772	leal	64(%edi),%edi
4773	movdqa	%xmm6,%xmm1
4774	jmp	.L054xts_enc_done
4775.align	16
4776.L049xts_enc_done6x:
4777	movl	112(%esp),%eax
4778	andl	$15,%eax
4779	jz	.L056xts_enc_ret
4780	movdqa	%xmm1,%xmm5
4781	movl	%eax,112(%esp)
4782	jmp	.L057xts_enc_steal
4783.align	16
4784.L054xts_enc_done:
4785	movl	112(%esp),%eax
4786	pxor	%xmm0,%xmm0
4787	andl	$15,%eax
4788	jz	.L056xts_enc_ret
4789	pcmpgtd	%xmm1,%xmm0
4790	movl	%eax,112(%esp)
4791	pshufd	$19,%xmm0,%xmm5
4792	paddq	%xmm1,%xmm1
4793	pand	96(%esp),%xmm5
4794	pxor	%xmm1,%xmm5
4795.L057xts_enc_steal:
4796	movzbl	(%esi),%ecx
4797	movzbl	-16(%edi),%edx
4798	leal	1(%esi),%esi
4799	movb	%cl,-16(%edi)
4800	movb	%dl,(%edi)
4801	leal	1(%edi),%edi
4802	subl	$1,%eax
4803	jnz	.L057xts_enc_steal
4804	subl	112(%esp),%edi
4805	movl	%ebp,%edx
4806	movl	%ebx,%ecx
4807	movups	-16(%edi),%xmm2
4808	xorps	%xmm5,%xmm2
4809	movups	(%edx),%xmm0
4810	movups	16(%edx),%xmm1
4811	leal	32(%edx),%edx
4812	xorps	%xmm0,%xmm2
4813.L058enc1_loop_10:
4814.byte	102,15,56,220,209
4815	decl	%ecx
4816	movups	(%edx),%xmm1
4817	leal	16(%edx),%edx
4818	jnz	.L058enc1_loop_10
4819.byte	102,15,56,221,209
4820	xorps	%xmm5,%xmm2
4821	movups	%xmm2,-16(%edi)
4822.L056xts_enc_ret:
4823	pxor	%xmm0,%xmm0
4824	pxor	%xmm1,%xmm1
4825	pxor	%xmm2,%xmm2
4826	movdqa	%xmm0,(%esp)
4827	pxor	%xmm3,%xmm3
4828	movdqa	%xmm0,16(%esp)
4829	pxor	%xmm4,%xmm4
4830	movdqa	%xmm0,32(%esp)
4831	pxor	%xmm5,%xmm5
4832	movdqa	%xmm0,48(%esp)
4833	pxor	%xmm6,%xmm6
4834	movdqa	%xmm0,64(%esp)
4835	pxor	%xmm7,%xmm7
4836	movdqa	%xmm0,80(%esp)
4837	movl	116(%esp),%esp
4838	popl	%edi
4839	popl	%esi
4840	popl	%ebx
4841	popl	%ebp
4842	ret
4843.size	aesni_xts_encrypt,.-.L_aesni_xts_encrypt_begin
4844.globl	aesni_xts_decrypt
4845.type	aesni_xts_decrypt,@function
4846.align	16
4847aesni_xts_decrypt:
4848.L_aesni_xts_decrypt_begin:
4849	#ifdef __CET__
4850
4851.byte	243,15,30,251
4852	#endif
4853
4854	pushl	%ebp
4855	pushl	%ebx
4856	pushl	%esi
4857	pushl	%edi
4858	movl	36(%esp),%edx
4859	movl	40(%esp),%esi
4860	movl	240(%edx),%ecx
4861	movups	(%esi),%xmm2
4862	movups	(%edx),%xmm0
4863	movups	16(%edx),%xmm1
4864	leal	32(%edx),%edx
4865	xorps	%xmm0,%xmm2
4866.L059enc1_loop_11:
4867.byte	102,15,56,220,209
4868	decl	%ecx
4869	movups	(%edx),%xmm1
4870	leal	16(%edx),%edx
4871	jnz	.L059enc1_loop_11
4872.byte	102,15,56,221,209
4873	movl	20(%esp),%esi
4874	movl	24(%esp),%edi
4875	movl	28(%esp),%eax
4876	movl	32(%esp),%edx
4877	movl	%esp,%ebp
4878	subl	$120,%esp
4879	andl	$-16,%esp
4880	xorl	%ebx,%ebx
4881	testl	$15,%eax
4882	setnz	%bl
4883	shll	$4,%ebx
4884	subl	%ebx,%eax
4885	movl	$135,96(%esp)
4886	movl	$0,100(%esp)
4887	movl	$1,104(%esp)
4888	movl	$0,108(%esp)
4889	movl	%eax,112(%esp)
4890	movl	%ebp,116(%esp)
4891	movl	240(%edx),%ecx
4892	movl	%edx,%ebp
4893	movl	%ecx,%ebx
4894	movdqa	%xmm2,%xmm1
4895	pxor	%xmm0,%xmm0
4896	movdqa	96(%esp),%xmm3
4897	pcmpgtd	%xmm1,%xmm0
4898	andl	$-16,%eax
4899	subl	$96,%eax
4900	jc	.L060xts_dec_short
4901	shll	$4,%ecx
4902	movl	$16,%ebx
4903	subl	%ecx,%ebx
4904	leal	32(%edx,%ecx,1),%edx
4905	jmp	.L061xts_dec_loop6
4906.align	16
4907.L061xts_dec_loop6:
4908	pshufd	$19,%xmm0,%xmm2
4909	pxor	%xmm0,%xmm0
4910	movdqa	%xmm1,(%esp)
4911	paddq	%xmm1,%xmm1
4912	pand	%xmm3,%xmm2
4913	pcmpgtd	%xmm1,%xmm0
4914	pxor	%xmm2,%xmm1
4915	pshufd	$19,%xmm0,%xmm2
4916	pxor	%xmm0,%xmm0
4917	movdqa	%xmm1,16(%esp)
4918	paddq	%xmm1,%xmm1
4919	pand	%xmm3,%xmm2
4920	pcmpgtd	%xmm1,%xmm0
4921	pxor	%xmm2,%xmm1
4922	pshufd	$19,%xmm0,%xmm2
4923	pxor	%xmm0,%xmm0
4924	movdqa	%xmm1,32(%esp)
4925	paddq	%xmm1,%xmm1
4926	pand	%xmm3,%xmm2
4927	pcmpgtd	%xmm1,%xmm0
4928	pxor	%xmm2,%xmm1
4929	pshufd	$19,%xmm0,%xmm2
4930	pxor	%xmm0,%xmm0
4931	movdqa	%xmm1,48(%esp)
4932	paddq	%xmm1,%xmm1
4933	pand	%xmm3,%xmm2
4934	pcmpgtd	%xmm1,%xmm0
4935	pxor	%xmm2,%xmm1
4936	pshufd	$19,%xmm0,%xmm7
4937	movdqa	%xmm1,64(%esp)
4938	paddq	%xmm1,%xmm1
4939	movups	(%ebp),%xmm0
4940	pand	%xmm3,%xmm7
4941	movups	(%esi),%xmm2
4942	pxor	%xmm1,%xmm7
4943	movl	%ebx,%ecx
4944	movdqu	16(%esi),%xmm3
4945	xorps	%xmm0,%xmm2
4946	movdqu	32(%esi),%xmm4
4947	pxor	%xmm0,%xmm3
4948	movdqu	48(%esi),%xmm5
4949	pxor	%xmm0,%xmm4
4950	movdqu	64(%esi),%xmm6
4951	pxor	%xmm0,%xmm5
4952	movdqu	80(%esi),%xmm1
4953	pxor	%xmm0,%xmm6
4954	leal	96(%esi),%esi
4955	pxor	(%esp),%xmm2
4956	movdqa	%xmm7,80(%esp)
4957	pxor	%xmm1,%xmm7
4958	movups	16(%ebp),%xmm1
4959	pxor	16(%esp),%xmm3
4960	pxor	32(%esp),%xmm4
4961.byte	102,15,56,222,209
4962	pxor	48(%esp),%xmm5
4963	pxor	64(%esp),%xmm6
4964.byte	102,15,56,222,217
4965	pxor	%xmm0,%xmm7
4966	movups	32(%ebp),%xmm0
4967.byte	102,15,56,222,225
4968.byte	102,15,56,222,233
4969.byte	102,15,56,222,241
4970.byte	102,15,56,222,249
4971	call	.L_aesni_decrypt6_enter
4972	movdqa	80(%esp),%xmm1
4973	pxor	%xmm0,%xmm0
4974	xorps	(%esp),%xmm2
4975	pcmpgtd	%xmm1,%xmm0
4976	xorps	16(%esp),%xmm3
4977	movups	%xmm2,(%edi)
4978	xorps	32(%esp),%xmm4
4979	movups	%xmm3,16(%edi)
4980	xorps	48(%esp),%xmm5
4981	movups	%xmm4,32(%edi)
4982	xorps	64(%esp),%xmm6
4983	movups	%xmm5,48(%edi)
4984	xorps	%xmm1,%xmm7
4985	movups	%xmm6,64(%edi)
4986	pshufd	$19,%xmm0,%xmm2
4987	movups	%xmm7,80(%edi)
4988	leal	96(%edi),%edi
4989	movdqa	96(%esp),%xmm3
4990	pxor	%xmm0,%xmm0
4991	paddq	%xmm1,%xmm1
4992	pand	%xmm3,%xmm2
4993	pcmpgtd	%xmm1,%xmm0
4994	pxor	%xmm2,%xmm1
4995	subl	$96,%eax
4996	jnc	.L061xts_dec_loop6
4997	movl	240(%ebp),%ecx
4998	movl	%ebp,%edx
4999	movl	%ecx,%ebx
5000.L060xts_dec_short:
5001	addl	$96,%eax
5002	jz	.L062xts_dec_done6x
5003	movdqa	%xmm1,%xmm5
5004	cmpl	$32,%eax
5005	jb	.L063xts_dec_one
5006	pshufd	$19,%xmm0,%xmm2
5007	pxor	%xmm0,%xmm0
5008	paddq	%xmm1,%xmm1
5009	pand	%xmm3,%xmm2
5010	pcmpgtd	%xmm1,%xmm0
5011	pxor	%xmm2,%xmm1
5012	je	.L064xts_dec_two
5013	pshufd	$19,%xmm0,%xmm2
5014	pxor	%xmm0,%xmm0
5015	movdqa	%xmm1,%xmm6
5016	paddq	%xmm1,%xmm1
5017	pand	%xmm3,%xmm2
5018	pcmpgtd	%xmm1,%xmm0
5019	pxor	%xmm2,%xmm1
5020	cmpl	$64,%eax
5021	jb	.L065xts_dec_three
5022	pshufd	$19,%xmm0,%xmm2
5023	pxor	%xmm0,%xmm0
5024	movdqa	%xmm1,%xmm7
5025	paddq	%xmm1,%xmm1
5026	pand	%xmm3,%xmm2
5027	pcmpgtd	%xmm1,%xmm0
5028	pxor	%xmm2,%xmm1
5029	movdqa	%xmm5,(%esp)
5030	movdqa	%xmm6,16(%esp)
5031	je	.L066xts_dec_four
5032	movdqa	%xmm7,32(%esp)
5033	pshufd	$19,%xmm0,%xmm7
5034	movdqa	%xmm1,48(%esp)
5035	paddq	%xmm1,%xmm1
5036	pand	%xmm3,%xmm7
5037	pxor	%xmm1,%xmm7
5038	movdqu	(%esi),%xmm2
5039	movdqu	16(%esi),%xmm3
5040	movdqu	32(%esi),%xmm4
5041	pxor	(%esp),%xmm2
5042	movdqu	48(%esi),%xmm5
5043	pxor	16(%esp),%xmm3
5044	movdqu	64(%esi),%xmm6
5045	pxor	32(%esp),%xmm4
5046	leal	80(%esi),%esi
5047	pxor	48(%esp),%xmm5
5048	movdqa	%xmm7,64(%esp)
5049	pxor	%xmm7,%xmm6
5050	call	_aesni_decrypt6
5051	movaps	64(%esp),%xmm1
5052	xorps	(%esp),%xmm2
5053	xorps	16(%esp),%xmm3
5054	xorps	32(%esp),%xmm4
5055	movups	%xmm2,(%edi)
5056	xorps	48(%esp),%xmm5
5057	movups	%xmm3,16(%edi)
5058	xorps	%xmm1,%xmm6
5059	movups	%xmm4,32(%edi)
5060	movups	%xmm5,48(%edi)
5061	movups	%xmm6,64(%edi)
5062	leal	80(%edi),%edi
5063	jmp	.L067xts_dec_done
5064.align	16
5065.L063xts_dec_one:
5066	movups	(%esi),%xmm2
5067	leal	16(%esi),%esi
5068	xorps	%xmm5,%xmm2
5069	movups	(%edx),%xmm0
5070	movups	16(%edx),%xmm1
5071	leal	32(%edx),%edx
5072	xorps	%xmm0,%xmm2
5073.L068dec1_loop_12:
5074.byte	102,15,56,222,209
5075	decl	%ecx
5076	movups	(%edx),%xmm1
5077	leal	16(%edx),%edx
5078	jnz	.L068dec1_loop_12
5079.byte	102,15,56,223,209
5080	xorps	%xmm5,%xmm2
5081	movups	%xmm2,(%edi)
5082	leal	16(%edi),%edi
5083	movdqa	%xmm5,%xmm1
5084	jmp	.L067xts_dec_done
5085.align	16
5086.L064xts_dec_two:
5087	movaps	%xmm1,%xmm6
5088	movups	(%esi),%xmm2
5089	movups	16(%esi),%xmm3
5090	leal	32(%esi),%esi
5091	xorps	%xmm5,%xmm2
5092	xorps	%xmm6,%xmm3
5093	call	_aesni_decrypt2
5094	xorps	%xmm5,%xmm2
5095	xorps	%xmm6,%xmm3
5096	movups	%xmm2,(%edi)
5097	movups	%xmm3,16(%edi)
5098	leal	32(%edi),%edi
5099	movdqa	%xmm6,%xmm1
5100	jmp	.L067xts_dec_done
5101.align	16
5102.L065xts_dec_three:
5103	movaps	%xmm1,%xmm7
5104	movups	(%esi),%xmm2
5105	movups	16(%esi),%xmm3
5106	movups	32(%esi),%xmm4
5107	leal	48(%esi),%esi
5108	xorps	%xmm5,%xmm2
5109	xorps	%xmm6,%xmm3
5110	xorps	%xmm7,%xmm4
5111	call	_aesni_decrypt3
5112	xorps	%xmm5,%xmm2
5113	xorps	%xmm6,%xmm3
5114	xorps	%xmm7,%xmm4
5115	movups	%xmm2,(%edi)
5116	movups	%xmm3,16(%edi)
5117	movups	%xmm4,32(%edi)
5118	leal	48(%edi),%edi
5119	movdqa	%xmm7,%xmm1
5120	jmp	.L067xts_dec_done
5121.align	16
5122.L066xts_dec_four:
5123	movaps	%xmm1,%xmm6
5124	movups	(%esi),%xmm2
5125	movups	16(%esi),%xmm3
5126	movups	32(%esi),%xmm4
5127	xorps	(%esp),%xmm2
5128	movups	48(%esi),%xmm5
5129	leal	64(%esi),%esi
5130	xorps	16(%esp),%xmm3
5131	xorps	%xmm7,%xmm4
5132	xorps	%xmm6,%xmm5
5133	call	_aesni_decrypt4
5134	xorps	(%esp),%xmm2
5135	xorps	16(%esp),%xmm3
5136	xorps	%xmm7,%xmm4
5137	movups	%xmm2,(%edi)
5138	xorps	%xmm6,%xmm5
5139	movups	%xmm3,16(%edi)
5140	movups	%xmm4,32(%edi)
5141	movups	%xmm5,48(%edi)
5142	leal	64(%edi),%edi
5143	movdqa	%xmm6,%xmm1
5144	jmp	.L067xts_dec_done
5145.align	16
5146.L062xts_dec_done6x:
5147	movl	112(%esp),%eax
5148	andl	$15,%eax
5149	jz	.L069xts_dec_ret
5150	movl	%eax,112(%esp)
5151	jmp	.L070xts_dec_only_one_more
5152.align	16
5153.L067xts_dec_done:
5154	movl	112(%esp),%eax
5155	pxor	%xmm0,%xmm0
5156	andl	$15,%eax
5157	jz	.L069xts_dec_ret
5158	pcmpgtd	%xmm1,%xmm0
5159	movl	%eax,112(%esp)
5160	pshufd	$19,%xmm0,%xmm2
5161	pxor	%xmm0,%xmm0
5162	movdqa	96(%esp),%xmm3
5163	paddq	%xmm1,%xmm1
5164	pand	%xmm3,%xmm2
5165	pcmpgtd	%xmm1,%xmm0
5166	pxor	%xmm2,%xmm1
5167.L070xts_dec_only_one_more:
5168	pshufd	$19,%xmm0,%xmm5
5169	movdqa	%xmm1,%xmm6
5170	paddq	%xmm1,%xmm1
5171	pand	%xmm3,%xmm5
5172	pxor	%xmm1,%xmm5
5173	movl	%ebp,%edx
5174	movl	%ebx,%ecx
5175	movups	(%esi),%xmm2
5176	xorps	%xmm5,%xmm2
5177	movups	(%edx),%xmm0
5178	movups	16(%edx),%xmm1
5179	leal	32(%edx),%edx
5180	xorps	%xmm0,%xmm2
5181.L071dec1_loop_13:
5182.byte	102,15,56,222,209
5183	decl	%ecx
5184	movups	(%edx),%xmm1
5185	leal	16(%edx),%edx
5186	jnz	.L071dec1_loop_13
5187.byte	102,15,56,223,209
5188	xorps	%xmm5,%xmm2
5189	movups	%xmm2,(%edi)
5190.L072xts_dec_steal:
5191	movzbl	16(%esi),%ecx
5192	movzbl	(%edi),%edx
5193	leal	1(%esi),%esi
5194	movb	%cl,(%edi)
5195	movb	%dl,16(%edi)
5196	leal	1(%edi),%edi
5197	subl	$1,%eax
5198	jnz	.L072xts_dec_steal
5199	subl	112(%esp),%edi
5200	movl	%ebp,%edx
5201	movl	%ebx,%ecx
5202	movups	(%edi),%xmm2
5203	xorps	%xmm6,%xmm2
5204	movups	(%edx),%xmm0
5205	movups	16(%edx),%xmm1
5206	leal	32(%edx),%edx
5207	xorps	%xmm0,%xmm2
5208.L073dec1_loop_14:
5209.byte	102,15,56,222,209
5210	decl	%ecx
5211	movups	(%edx),%xmm1
5212	leal	16(%edx),%edx
5213	jnz	.L073dec1_loop_14
5214.byte	102,15,56,223,209
5215	xorps	%xmm6,%xmm2
5216	movups	%xmm2,(%edi)
5217.L069xts_dec_ret:
5218	pxor	%xmm0,%xmm0
5219	pxor	%xmm1,%xmm1
5220	pxor	%xmm2,%xmm2
5221	movdqa	%xmm0,(%esp)
5222	pxor	%xmm3,%xmm3
5223	movdqa	%xmm0,16(%esp)
5224	pxor	%xmm4,%xmm4
5225	movdqa	%xmm0,32(%esp)
5226	pxor	%xmm5,%xmm5
5227	movdqa	%xmm0,48(%esp)
5228	pxor	%xmm6,%xmm6
5229	movdqa	%xmm0,64(%esp)
5230	pxor	%xmm7,%xmm7
5231	movdqa	%xmm0,80(%esp)
5232	movl	116(%esp),%esp
5233	popl	%edi
5234	popl	%esi
5235	popl	%ebx
5236	popl	%ebp
5237	ret
5238.size	aesni_xts_decrypt,.-.L_aesni_xts_decrypt_begin
5239.globl	aesni_ocb_encrypt
5240.type	aesni_ocb_encrypt,@function
5241.align	16
5242aesni_ocb_encrypt:
5243.L_aesni_ocb_encrypt_begin:
5244	#ifdef __CET__
5245
5246.byte	243,15,30,251
5247	#endif
5248
5249	pushl	%ebp
5250	pushl	%ebx
5251	pushl	%esi
5252	pushl	%edi
5253	movl	40(%esp),%ecx
5254	movl	48(%esp),%ebx
5255	movl	20(%esp),%esi
5256	movl	24(%esp),%edi
5257	movl	28(%esp),%eax
5258	movl	32(%esp),%edx
5259	movdqu	(%ecx),%xmm0
5260	movl	36(%esp),%ebp
5261	movdqu	(%ebx),%xmm1
5262	movl	44(%esp),%ebx
5263	movl	%esp,%ecx
5264	subl	$132,%esp
5265	andl	$-16,%esp
5266	subl	%esi,%edi
5267	shll	$4,%eax
5268	leal	-96(%esi,%eax,1),%eax
5269	movl	%edi,120(%esp)
5270	movl	%eax,124(%esp)
5271	movl	%ecx,128(%esp)
5272	movl	240(%edx),%ecx
5273	testl	$1,%ebp
5274	jnz	.L074odd
5275	bsfl	%ebp,%eax
5276	addl	$1,%ebp
5277	shll	$4,%eax
5278	movdqu	(%ebx,%eax,1),%xmm7
5279	movl	%edx,%eax
5280	movdqu	(%esi),%xmm2
5281	leal	16(%esi),%esi
5282	pxor	%xmm0,%xmm7
5283	pxor	%xmm2,%xmm1
5284	pxor	%xmm7,%xmm2
5285	movdqa	%xmm1,%xmm6
5286	movups	(%edx),%xmm0
5287	movups	16(%edx),%xmm1
5288	leal	32(%edx),%edx
5289	xorps	%xmm0,%xmm2
5290.L075enc1_loop_15:
5291.byte	102,15,56,220,209
5292	decl	%ecx
5293	movups	(%edx),%xmm1
5294	leal	16(%edx),%edx
5295	jnz	.L075enc1_loop_15
5296.byte	102,15,56,221,209
5297	xorps	%xmm7,%xmm2
5298	movdqa	%xmm7,%xmm0
5299	movdqa	%xmm6,%xmm1
5300	movups	%xmm2,-16(%edi,%esi,1)
5301	movl	240(%eax),%ecx
5302	movl	%eax,%edx
5303	movl	124(%esp),%eax
5304.L074odd:
5305	shll	$4,%ecx
5306	movl	$16,%edi
5307	subl	%ecx,%edi
5308	movl	%edx,112(%esp)
5309	leal	32(%edx,%ecx,1),%edx
5310	movl	%edi,116(%esp)
5311	cmpl	%eax,%esi
5312	ja	.L076short
5313	jmp	.L077grandloop
5314.align	32
5315.L077grandloop:
5316	leal	1(%ebp),%ecx
5317	leal	3(%ebp),%eax
5318	leal	5(%ebp),%edi
5319	addl	$6,%ebp
5320	bsfl	%ecx,%ecx
5321	bsfl	%eax,%eax
5322	bsfl	%edi,%edi
5323	shll	$4,%ecx
5324	shll	$4,%eax
5325	shll	$4,%edi
5326	movdqu	(%ebx),%xmm2
5327	movdqu	(%ebx,%ecx,1),%xmm3
5328	movl	116(%esp),%ecx
5329	movdqa	%xmm2,%xmm4
5330	movdqu	(%ebx,%eax,1),%xmm5
5331	movdqa	%xmm2,%xmm6
5332	movdqu	(%ebx,%edi,1),%xmm7
5333	pxor	%xmm0,%xmm2
5334	pxor	%xmm2,%xmm3
5335	movdqa	%xmm2,(%esp)
5336	pxor	%xmm3,%xmm4
5337	movdqa	%xmm3,16(%esp)
5338	pxor	%xmm4,%xmm5
5339	movdqa	%xmm4,32(%esp)
5340	pxor	%xmm5,%xmm6
5341	movdqa	%xmm5,48(%esp)
5342	pxor	%xmm6,%xmm7
5343	movdqa	%xmm6,64(%esp)
5344	movdqa	%xmm7,80(%esp)
5345	movups	-48(%edx,%ecx,1),%xmm0
5346	movdqu	(%esi),%xmm2
5347	movdqu	16(%esi),%xmm3
5348	movdqu	32(%esi),%xmm4
5349	movdqu	48(%esi),%xmm5
5350	movdqu	64(%esi),%xmm6
5351	movdqu	80(%esi),%xmm7
5352	leal	96(%esi),%esi
5353	pxor	%xmm2,%xmm1
5354	pxor	%xmm0,%xmm2
5355	pxor	%xmm3,%xmm1
5356	pxor	%xmm0,%xmm3
5357	pxor	%xmm4,%xmm1
5358	pxor	%xmm0,%xmm4
5359	pxor	%xmm5,%xmm1
5360	pxor	%xmm0,%xmm5
5361	pxor	%xmm6,%xmm1
5362	pxor	%xmm0,%xmm6
5363	pxor	%xmm7,%xmm1
5364	pxor	%xmm0,%xmm7
5365	movdqa	%xmm1,96(%esp)
5366	movups	-32(%edx,%ecx,1),%xmm1
5367	pxor	(%esp),%xmm2
5368	pxor	16(%esp),%xmm3
5369	pxor	32(%esp),%xmm4
5370	pxor	48(%esp),%xmm5
5371	pxor	64(%esp),%xmm6
5372	pxor	80(%esp),%xmm7
5373	movups	-16(%edx,%ecx,1),%xmm0
5374.byte	102,15,56,220,209
5375.byte	102,15,56,220,217
5376.byte	102,15,56,220,225
5377.byte	102,15,56,220,233
5378.byte	102,15,56,220,241
5379.byte	102,15,56,220,249
5380	movl	120(%esp),%edi
5381	movl	124(%esp),%eax
5382	call	.L_aesni_encrypt6_enter
5383	movdqa	80(%esp),%xmm0
5384	pxor	(%esp),%xmm2
5385	pxor	16(%esp),%xmm3
5386	pxor	32(%esp),%xmm4
5387	pxor	48(%esp),%xmm5
5388	pxor	64(%esp),%xmm6
5389	pxor	%xmm0,%xmm7
5390	movdqa	96(%esp),%xmm1
5391	movdqu	%xmm2,-96(%edi,%esi,1)
5392	movdqu	%xmm3,-80(%edi,%esi,1)
5393	movdqu	%xmm4,-64(%edi,%esi,1)
5394	movdqu	%xmm5,-48(%edi,%esi,1)
5395	movdqu	%xmm6,-32(%edi,%esi,1)
5396	movdqu	%xmm7,-16(%edi,%esi,1)
5397	cmpl	%eax,%esi
5398	jbe	.L077grandloop
5399.L076short:
5400	addl	$96,%eax
5401	subl	%esi,%eax
5402	jz	.L078done
5403	cmpl	$32,%eax
5404	jb	.L079one
5405	je	.L080two
5406	cmpl	$64,%eax
5407	jb	.L081three
5408	je	.L082four
5409	leal	1(%ebp),%ecx
5410	leal	3(%ebp),%eax
5411	bsfl	%ecx,%ecx
5412	bsfl	%eax,%eax
5413	shll	$4,%ecx
5414	shll	$4,%eax
5415	movdqu	(%ebx),%xmm2
5416	movdqu	(%ebx,%ecx,1),%xmm3
5417	movl	116(%esp),%ecx
5418	movdqa	%xmm2,%xmm4
5419	movdqu	(%ebx,%eax,1),%xmm5
5420	movdqa	%xmm2,%xmm6
5421	pxor	%xmm0,%xmm2
5422	pxor	%xmm2,%xmm3
5423	movdqa	%xmm2,(%esp)
5424	pxor	%xmm3,%xmm4
5425	movdqa	%xmm3,16(%esp)
5426	pxor	%xmm4,%xmm5
5427	movdqa	%xmm4,32(%esp)
5428	pxor	%xmm5,%xmm6
5429	movdqa	%xmm5,48(%esp)
5430	pxor	%xmm6,%xmm7
5431	movdqa	%xmm6,64(%esp)
5432	movups	-48(%edx,%ecx,1),%xmm0
5433	movdqu	(%esi),%xmm2
5434	movdqu	16(%esi),%xmm3
5435	movdqu	32(%esi),%xmm4
5436	movdqu	48(%esi),%xmm5
5437	movdqu	64(%esi),%xmm6
5438	pxor	%xmm7,%xmm7
5439	pxor	%xmm2,%xmm1
5440	pxor	%xmm0,%xmm2
5441	pxor	%xmm3,%xmm1
5442	pxor	%xmm0,%xmm3
5443	pxor	%xmm4,%xmm1
5444	pxor	%xmm0,%xmm4
5445	pxor	%xmm5,%xmm1
5446	pxor	%xmm0,%xmm5
5447	pxor	%xmm6,%xmm1
5448	pxor	%xmm0,%xmm6
5449	movdqa	%xmm1,96(%esp)
5450	movups	-32(%edx,%ecx,1),%xmm1
5451	pxor	(%esp),%xmm2
5452	pxor	16(%esp),%xmm3
5453	pxor	32(%esp),%xmm4
5454	pxor	48(%esp),%xmm5
5455	pxor	64(%esp),%xmm6
5456	movups	-16(%edx,%ecx,1),%xmm0
5457.byte	102,15,56,220,209
5458.byte	102,15,56,220,217
5459.byte	102,15,56,220,225
5460.byte	102,15,56,220,233
5461.byte	102,15,56,220,241
5462.byte	102,15,56,220,249
5463	movl	120(%esp),%edi
5464	call	.L_aesni_encrypt6_enter
5465	movdqa	64(%esp),%xmm0
5466	pxor	(%esp),%xmm2
5467	pxor	16(%esp),%xmm3
5468	pxor	32(%esp),%xmm4
5469	pxor	48(%esp),%xmm5
5470	pxor	%xmm0,%xmm6
5471	movdqa	96(%esp),%xmm1
5472	movdqu	%xmm2,(%edi,%esi,1)
5473	movdqu	%xmm3,16(%edi,%esi,1)
5474	movdqu	%xmm4,32(%edi,%esi,1)
5475	movdqu	%xmm5,48(%edi,%esi,1)
5476	movdqu	%xmm6,64(%edi,%esi,1)
5477	jmp	.L078done
5478.align	16
5479.L079one:
5480	movdqu	(%ebx),%xmm7
5481	movl	112(%esp),%edx
5482	movdqu	(%esi),%xmm2
5483	movl	240(%edx),%ecx
5484	pxor	%xmm0,%xmm7
5485	pxor	%xmm2,%xmm1
5486	pxor	%xmm7,%xmm2
5487	movdqa	%xmm1,%xmm6
5488	movl	120(%esp),%edi
5489	movups	(%edx),%xmm0
5490	movups	16(%edx),%xmm1
5491	leal	32(%edx),%edx
5492	xorps	%xmm0,%xmm2
5493.L083enc1_loop_16:
5494.byte	102,15,56,220,209
5495	decl	%ecx
5496	movups	(%edx),%xmm1
5497	leal	16(%edx),%edx
5498	jnz	.L083enc1_loop_16
5499.byte	102,15,56,221,209
5500	xorps	%xmm7,%xmm2
5501	movdqa	%xmm7,%xmm0
5502	movdqa	%xmm6,%xmm1
5503	movups	%xmm2,(%edi,%esi,1)
5504	jmp	.L078done
5505.align	16
5506.L080two:
5507	leal	1(%ebp),%ecx
5508	movl	112(%esp),%edx
5509	bsfl	%ecx,%ecx
5510	shll	$4,%ecx
5511	movdqu	(%ebx),%xmm6
5512	movdqu	(%ebx,%ecx,1),%xmm7
5513	movdqu	(%esi),%xmm2
5514	movdqu	16(%esi),%xmm3
5515	movl	240(%edx),%ecx
5516	pxor	%xmm0,%xmm6
5517	pxor	%xmm6,%xmm7
5518	pxor	%xmm2,%xmm1
5519	pxor	%xmm6,%xmm2
5520	pxor	%xmm3,%xmm1
5521	pxor	%xmm7,%xmm3
5522	movdqa	%xmm1,%xmm5
5523	movl	120(%esp),%edi
5524	call	_aesni_encrypt2
5525	xorps	%xmm6,%xmm2
5526	xorps	%xmm7,%xmm3
5527	movdqa	%xmm7,%xmm0
5528	movdqa	%xmm5,%xmm1
5529	movups	%xmm2,(%edi,%esi,1)
5530	movups	%xmm3,16(%edi,%esi,1)
5531	jmp	.L078done
5532.align	16
5533.L081three:
5534	leal	1(%ebp),%ecx
5535	movl	112(%esp),%edx
5536	bsfl	%ecx,%ecx
5537	shll	$4,%ecx
5538	movdqu	(%ebx),%xmm5
5539	movdqu	(%ebx,%ecx,1),%xmm6
5540	movdqa	%xmm5,%xmm7
5541	movdqu	(%esi),%xmm2
5542	movdqu	16(%esi),%xmm3
5543	movdqu	32(%esi),%xmm4
5544	movl	240(%edx),%ecx
5545	pxor	%xmm0,%xmm5
5546	pxor	%xmm5,%xmm6
5547	pxor	%xmm6,%xmm7
5548	pxor	%xmm2,%xmm1
5549	pxor	%xmm5,%xmm2
5550	pxor	%xmm3,%xmm1
5551	pxor	%xmm6,%xmm3
5552	pxor	%xmm4,%xmm1
5553	pxor	%xmm7,%xmm4
5554	movdqa	%xmm1,96(%esp)
5555	movl	120(%esp),%edi
5556	call	_aesni_encrypt3
5557	xorps	%xmm5,%xmm2
5558	xorps	%xmm6,%xmm3
5559	xorps	%xmm7,%xmm4
5560	movdqa	%xmm7,%xmm0
5561	movdqa	96(%esp),%xmm1
5562	movups	%xmm2,(%edi,%esi,1)
5563	movups	%xmm3,16(%edi,%esi,1)
5564	movups	%xmm4,32(%edi,%esi,1)
5565	jmp	.L078done
5566.align	16
5567.L082four:
5568	leal	1(%ebp),%ecx
5569	leal	3(%ebp),%eax
5570	bsfl	%ecx,%ecx
5571	bsfl	%eax,%eax
5572	movl	112(%esp),%edx
5573	shll	$4,%ecx
5574	shll	$4,%eax
5575	movdqu	(%ebx),%xmm4
5576	movdqu	(%ebx,%ecx,1),%xmm5
5577	movdqa	%xmm4,%xmm6
5578	movdqu	(%ebx,%eax,1),%xmm7
5579	pxor	%xmm0,%xmm4
5580	movdqu	(%esi),%xmm2
5581	pxor	%xmm4,%xmm5
5582	movdqu	16(%esi),%xmm3
5583	pxor	%xmm5,%xmm6
5584	movdqa	%xmm4,(%esp)
5585	pxor	%xmm6,%xmm7
5586	movdqa	%xmm5,16(%esp)
5587	movdqu	32(%esi),%xmm4
5588	movdqu	48(%esi),%xmm5
5589	movl	240(%edx),%ecx
5590	pxor	%xmm2,%xmm1
5591	pxor	(%esp),%xmm2
5592	pxor	%xmm3,%xmm1
5593	pxor	16(%esp),%xmm3
5594	pxor	%xmm4,%xmm1
5595	pxor	%xmm6,%xmm4
5596	pxor	%xmm5,%xmm1
5597	pxor	%xmm7,%xmm5
5598	movdqa	%xmm1,96(%esp)
5599	movl	120(%esp),%edi
5600	call	_aesni_encrypt4
5601	xorps	(%esp),%xmm2
5602	xorps	16(%esp),%xmm3
5603	xorps	%xmm6,%xmm4
5604	movups	%xmm2,(%edi,%esi,1)
5605	xorps	%xmm7,%xmm5
5606	movups	%xmm3,16(%edi,%esi,1)
5607	movdqa	%xmm7,%xmm0
5608	movups	%xmm4,32(%edi,%esi,1)
5609	movdqa	96(%esp),%xmm1
5610	movups	%xmm5,48(%edi,%esi,1)
5611.L078done:
5612	movl	128(%esp),%edx
5613	pxor	%xmm2,%xmm2
5614	pxor	%xmm3,%xmm3
5615	movdqa	%xmm2,(%esp)
5616	pxor	%xmm4,%xmm4
5617	movdqa	%xmm2,16(%esp)
5618	pxor	%xmm5,%xmm5
5619	movdqa	%xmm2,32(%esp)
5620	pxor	%xmm6,%xmm6
5621	movdqa	%xmm2,48(%esp)
5622	pxor	%xmm7,%xmm7
5623	movdqa	%xmm2,64(%esp)
5624	movdqa	%xmm2,80(%esp)
5625	movdqa	%xmm2,96(%esp)
5626	leal	(%edx),%esp
5627	movl	40(%esp),%ecx
5628	movl	48(%esp),%ebx
5629	movdqu	%xmm0,(%ecx)
5630	pxor	%xmm0,%xmm0
5631	movdqu	%xmm1,(%ebx)
5632	pxor	%xmm1,%xmm1
5633	popl	%edi
5634	popl	%esi
5635	popl	%ebx
5636	popl	%ebp
5637	ret
5638.size	aesni_ocb_encrypt,.-.L_aesni_ocb_encrypt_begin
5639.globl	aesni_ocb_decrypt
5640.type	aesni_ocb_decrypt,@function
5641.align	16
5642aesni_ocb_decrypt:
5643.L_aesni_ocb_decrypt_begin:
5644	#ifdef __CET__
5645
5646.byte	243,15,30,251
5647	#endif
5648
5649	pushl	%ebp
5650	pushl	%ebx
5651	pushl	%esi
5652	pushl	%edi
5653	movl	40(%esp),%ecx
5654	movl	48(%esp),%ebx
5655	movl	20(%esp),%esi
5656	movl	24(%esp),%edi
5657	movl	28(%esp),%eax
5658	movl	32(%esp),%edx
5659	movdqu	(%ecx),%xmm0
5660	movl	36(%esp),%ebp
5661	movdqu	(%ebx),%xmm1
5662	movl	44(%esp),%ebx
5663	movl	%esp,%ecx
5664	subl	$132,%esp
5665	andl	$-16,%esp
5666	subl	%esi,%edi
5667	shll	$4,%eax
5668	leal	-96(%esi,%eax,1),%eax
5669	movl	%edi,120(%esp)
5670	movl	%eax,124(%esp)
5671	movl	%ecx,128(%esp)
5672	movl	240(%edx),%ecx
5673	testl	$1,%ebp
5674	jnz	.L084odd
5675	bsfl	%ebp,%eax
5676	addl	$1,%ebp
5677	shll	$4,%eax
5678	movdqu	(%ebx,%eax,1),%xmm7
5679	movl	%edx,%eax
5680	movdqu	(%esi),%xmm2
5681	leal	16(%esi),%esi
5682	pxor	%xmm0,%xmm7
5683	pxor	%xmm7,%xmm2
5684	movdqa	%xmm1,%xmm6
5685	movups	(%edx),%xmm0
5686	movups	16(%edx),%xmm1
5687	leal	32(%edx),%edx
5688	xorps	%xmm0,%xmm2
5689.L085dec1_loop_17:
5690.byte	102,15,56,222,209
5691	decl	%ecx
5692	movups	(%edx),%xmm1
5693	leal	16(%edx),%edx
5694	jnz	.L085dec1_loop_17
5695.byte	102,15,56,223,209
5696	xorps	%xmm7,%xmm2
5697	movaps	%xmm6,%xmm1
5698	movdqa	%xmm7,%xmm0
5699	xorps	%xmm2,%xmm1
5700	movups	%xmm2,-16(%edi,%esi,1)
5701	movl	240(%eax),%ecx
5702	movl	%eax,%edx
5703	movl	124(%esp),%eax
5704.L084odd:
5705	shll	$4,%ecx
5706	movl	$16,%edi
5707	subl	%ecx,%edi
5708	movl	%edx,112(%esp)
5709	leal	32(%edx,%ecx,1),%edx
5710	movl	%edi,116(%esp)
5711	cmpl	%eax,%esi
5712	ja	.L086short
5713	jmp	.L087grandloop
5714.align	32
5715.L087grandloop:
5716	leal	1(%ebp),%ecx
5717	leal	3(%ebp),%eax
5718	leal	5(%ebp),%edi
5719	addl	$6,%ebp
5720	bsfl	%ecx,%ecx
5721	bsfl	%eax,%eax
5722	bsfl	%edi,%edi
5723	shll	$4,%ecx
5724	shll	$4,%eax
5725	shll	$4,%edi
5726	movdqu	(%ebx),%xmm2
5727	movdqu	(%ebx,%ecx,1),%xmm3
5728	movl	116(%esp),%ecx
5729	movdqa	%xmm2,%xmm4
5730	movdqu	(%ebx,%eax,1),%xmm5
5731	movdqa	%xmm2,%xmm6
5732	movdqu	(%ebx,%edi,1),%xmm7
5733	pxor	%xmm0,%xmm2
5734	pxor	%xmm2,%xmm3
5735	movdqa	%xmm2,(%esp)
5736	pxor	%xmm3,%xmm4
5737	movdqa	%xmm3,16(%esp)
5738	pxor	%xmm4,%xmm5
5739	movdqa	%xmm4,32(%esp)
5740	pxor	%xmm5,%xmm6
5741	movdqa	%xmm5,48(%esp)
5742	pxor	%xmm6,%xmm7
5743	movdqa	%xmm6,64(%esp)
5744	movdqa	%xmm7,80(%esp)
5745	movups	-48(%edx,%ecx,1),%xmm0
5746	movdqu	(%esi),%xmm2
5747	movdqu	16(%esi),%xmm3
5748	movdqu	32(%esi),%xmm4
5749	movdqu	48(%esi),%xmm5
5750	movdqu	64(%esi),%xmm6
5751	movdqu	80(%esi),%xmm7
5752	leal	96(%esi),%esi
5753	movdqa	%xmm1,96(%esp)
5754	pxor	%xmm0,%xmm2
5755	pxor	%xmm0,%xmm3
5756	pxor	%xmm0,%xmm4
5757	pxor	%xmm0,%xmm5
5758	pxor	%xmm0,%xmm6
5759	pxor	%xmm0,%xmm7
5760	movups	-32(%edx,%ecx,1),%xmm1
5761	pxor	(%esp),%xmm2
5762	pxor	16(%esp),%xmm3
5763	pxor	32(%esp),%xmm4
5764	pxor	48(%esp),%xmm5
5765	pxor	64(%esp),%xmm6
5766	pxor	80(%esp),%xmm7
5767	movups	-16(%edx,%ecx,1),%xmm0
5768.byte	102,15,56,222,209
5769.byte	102,15,56,222,217
5770.byte	102,15,56,222,225
5771.byte	102,15,56,222,233
5772.byte	102,15,56,222,241
5773.byte	102,15,56,222,249
5774	movl	120(%esp),%edi
5775	movl	124(%esp),%eax
5776	call	.L_aesni_decrypt6_enter
5777	movdqa	80(%esp),%xmm0
5778	pxor	(%esp),%xmm2
5779	movdqa	96(%esp),%xmm1
5780	pxor	16(%esp),%xmm3
5781	pxor	32(%esp),%xmm4
5782	pxor	48(%esp),%xmm5
5783	pxor	64(%esp),%xmm6
5784	pxor	%xmm0,%xmm7
5785	pxor	%xmm2,%xmm1
5786	movdqu	%xmm2,-96(%edi,%esi,1)
5787	pxor	%xmm3,%xmm1
5788	movdqu	%xmm3,-80(%edi,%esi,1)
5789	pxor	%xmm4,%xmm1
5790	movdqu	%xmm4,-64(%edi,%esi,1)
5791	pxor	%xmm5,%xmm1
5792	movdqu	%xmm5,-48(%edi,%esi,1)
5793	pxor	%xmm6,%xmm1
5794	movdqu	%xmm6,-32(%edi,%esi,1)
5795	pxor	%xmm7,%xmm1
5796	movdqu	%xmm7,-16(%edi,%esi,1)
5797	cmpl	%eax,%esi
5798	jbe	.L087grandloop
5799.L086short:
5800	addl	$96,%eax
5801	subl	%esi,%eax
5802	jz	.L088done
5803	cmpl	$32,%eax
5804	jb	.L089one
5805	je	.L090two
5806	cmpl	$64,%eax
5807	jb	.L091three
5808	je	.L092four
5809	leal	1(%ebp),%ecx
5810	leal	3(%ebp),%eax
5811	bsfl	%ecx,%ecx
5812	bsfl	%eax,%eax
5813	shll	$4,%ecx
5814	shll	$4,%eax
5815	movdqu	(%ebx),%xmm2
5816	movdqu	(%ebx,%ecx,1),%xmm3
5817	movl	116(%esp),%ecx
5818	movdqa	%xmm2,%xmm4
5819	movdqu	(%ebx,%eax,1),%xmm5
5820	movdqa	%xmm2,%xmm6
5821	pxor	%xmm0,%xmm2
5822	pxor	%xmm2,%xmm3
5823	movdqa	%xmm2,(%esp)
5824	pxor	%xmm3,%xmm4
5825	movdqa	%xmm3,16(%esp)
5826	pxor	%xmm4,%xmm5
5827	movdqa	%xmm4,32(%esp)
5828	pxor	%xmm5,%xmm6
5829	movdqa	%xmm5,48(%esp)
5830	pxor	%xmm6,%xmm7
5831	movdqa	%xmm6,64(%esp)
5832	movups	-48(%edx,%ecx,1),%xmm0
5833	movdqu	(%esi),%xmm2
5834	movdqu	16(%esi),%xmm3
5835	movdqu	32(%esi),%xmm4
5836	movdqu	48(%esi),%xmm5
5837	movdqu	64(%esi),%xmm6
5838	pxor	%xmm7,%xmm7
5839	movdqa	%xmm1,96(%esp)
5840	pxor	%xmm0,%xmm2
5841	pxor	%xmm0,%xmm3
5842	pxor	%xmm0,%xmm4
5843	pxor	%xmm0,%xmm5
5844	pxor	%xmm0,%xmm6
5845	movups	-32(%edx,%ecx,1),%xmm1
5846	pxor	(%esp),%xmm2
5847	pxor	16(%esp),%xmm3
5848	pxor	32(%esp),%xmm4
5849	pxor	48(%esp),%xmm5
5850	pxor	64(%esp),%xmm6
5851	movups	-16(%edx,%ecx,1),%xmm0
5852.byte	102,15,56,222,209
5853.byte	102,15,56,222,217
5854.byte	102,15,56,222,225
5855.byte	102,15,56,222,233
5856.byte	102,15,56,222,241
5857.byte	102,15,56,222,249
5858	movl	120(%esp),%edi
5859	call	.L_aesni_decrypt6_enter
5860	movdqa	64(%esp),%xmm0
5861	pxor	(%esp),%xmm2
5862	movdqa	96(%esp),%xmm1
5863	pxor	16(%esp),%xmm3
5864	pxor	32(%esp),%xmm4
5865	pxor	48(%esp),%xmm5
5866	pxor	%xmm0,%xmm6
5867	pxor	%xmm2,%xmm1
5868	movdqu	%xmm2,(%edi,%esi,1)
5869	pxor	%xmm3,%xmm1
5870	movdqu	%xmm3,16(%edi,%esi,1)
5871	pxor	%xmm4,%xmm1
5872	movdqu	%xmm4,32(%edi,%esi,1)
5873	pxor	%xmm5,%xmm1
5874	movdqu	%xmm5,48(%edi,%esi,1)
5875	pxor	%xmm6,%xmm1
5876	movdqu	%xmm6,64(%edi,%esi,1)
5877	jmp	.L088done
5878.align	16
5879.L089one:
5880	movdqu	(%ebx),%xmm7
5881	movl	112(%esp),%edx
5882	movdqu	(%esi),%xmm2
5883	movl	240(%edx),%ecx
5884	pxor	%xmm0,%xmm7
5885	pxor	%xmm7,%xmm2
5886	movdqa	%xmm1,%xmm6
5887	movl	120(%esp),%edi
5888	movups	(%edx),%xmm0
5889	movups	16(%edx),%xmm1
5890	leal	32(%edx),%edx
5891	xorps	%xmm0,%xmm2
5892.L093dec1_loop_18:
5893.byte	102,15,56,222,209
5894	decl	%ecx
5895	movups	(%edx),%xmm1
5896	leal	16(%edx),%edx
5897	jnz	.L093dec1_loop_18
5898.byte	102,15,56,223,209
5899	xorps	%xmm7,%xmm2
5900	movaps	%xmm6,%xmm1
5901	movdqa	%xmm7,%xmm0
5902	xorps	%xmm2,%xmm1
5903	movups	%xmm2,(%edi,%esi,1)
5904	jmp	.L088done
5905.align	16
5906.L090two:
5907	leal	1(%ebp),%ecx
5908	movl	112(%esp),%edx
5909	bsfl	%ecx,%ecx
5910	shll	$4,%ecx
5911	movdqu	(%ebx),%xmm6
5912	movdqu	(%ebx,%ecx,1),%xmm7
5913	movdqu	(%esi),%xmm2
5914	movdqu	16(%esi),%xmm3
5915	movl	240(%edx),%ecx
5916	movdqa	%xmm1,%xmm5
5917	pxor	%xmm0,%xmm6
5918	pxor	%xmm6,%xmm7
5919	pxor	%xmm6,%xmm2
5920	pxor	%xmm7,%xmm3
5921	movl	120(%esp),%edi
5922	call	_aesni_decrypt2
5923	xorps	%xmm6,%xmm2
5924	xorps	%xmm7,%xmm3
5925	movdqa	%xmm7,%xmm0
5926	xorps	%xmm2,%xmm5
5927	movups	%xmm2,(%edi,%esi,1)
5928	xorps	%xmm3,%xmm5
5929	movups	%xmm3,16(%edi,%esi,1)
5930	movaps	%xmm5,%xmm1
5931	jmp	.L088done
5932.align	16
5933.L091three:
5934	leal	1(%ebp),%ecx
5935	movl	112(%esp),%edx
5936	bsfl	%ecx,%ecx
5937	shll	$4,%ecx
5938	movdqu	(%ebx),%xmm5
5939	movdqu	(%ebx,%ecx,1),%xmm6
5940	movdqa	%xmm5,%xmm7
5941	movdqu	(%esi),%xmm2
5942	movdqu	16(%esi),%xmm3
5943	movdqu	32(%esi),%xmm4
5944	movl	240(%edx),%ecx
5945	movdqa	%xmm1,96(%esp)
5946	pxor	%xmm0,%xmm5
5947	pxor	%xmm5,%xmm6
5948	pxor	%xmm6,%xmm7
5949	pxor	%xmm5,%xmm2
5950	pxor	%xmm6,%xmm3
5951	pxor	%xmm7,%xmm4
5952	movl	120(%esp),%edi
5953	call	_aesni_decrypt3
5954	movdqa	96(%esp),%xmm1
5955	xorps	%xmm5,%xmm2
5956	xorps	%xmm6,%xmm3
5957	xorps	%xmm7,%xmm4
5958	movups	%xmm2,(%edi,%esi,1)
5959	pxor	%xmm2,%xmm1
5960	movdqa	%xmm7,%xmm0
5961	movups	%xmm3,16(%edi,%esi,1)
5962	pxor	%xmm3,%xmm1
5963	movups	%xmm4,32(%edi,%esi,1)
5964	pxor	%xmm4,%xmm1
5965	jmp	.L088done
5966.align	16
5967.L092four:
5968	leal	1(%ebp),%ecx
5969	leal	3(%ebp),%eax
5970	bsfl	%ecx,%ecx
5971	bsfl	%eax,%eax
5972	movl	112(%esp),%edx
5973	shll	$4,%ecx
5974	shll	$4,%eax
5975	movdqu	(%ebx),%xmm4
5976	movdqu	(%ebx,%ecx,1),%xmm5
5977	movdqa	%xmm4,%xmm6
5978	movdqu	(%ebx,%eax,1),%xmm7
5979	pxor	%xmm0,%xmm4
5980	movdqu	(%esi),%xmm2
5981	pxor	%xmm4,%xmm5
5982	movdqu	16(%esi),%xmm3
5983	pxor	%xmm5,%xmm6
5984	movdqa	%xmm4,(%esp)
5985	pxor	%xmm6,%xmm7
5986	movdqa	%xmm5,16(%esp)
5987	movdqu	32(%esi),%xmm4
5988	movdqu	48(%esi),%xmm5
5989	movl	240(%edx),%ecx
5990	movdqa	%xmm1,96(%esp)
5991	pxor	(%esp),%xmm2
5992	pxor	16(%esp),%xmm3
5993	pxor	%xmm6,%xmm4
5994	pxor	%xmm7,%xmm5
5995	movl	120(%esp),%edi
5996	call	_aesni_decrypt4
5997	movdqa	96(%esp),%xmm1
5998	xorps	(%esp),%xmm2
5999	xorps	16(%esp),%xmm3
6000	xorps	%xmm6,%xmm4
6001	movups	%xmm2,(%edi,%esi,1)
6002	pxor	%xmm2,%xmm1
6003	xorps	%xmm7,%xmm5
6004	movups	%xmm3,16(%edi,%esi,1)
6005	pxor	%xmm3,%xmm1
6006	movdqa	%xmm7,%xmm0
6007	movups	%xmm4,32(%edi,%esi,1)
6008	pxor	%xmm4,%xmm1
6009	movups	%xmm5,48(%edi,%esi,1)
6010	pxor	%xmm5,%xmm1
6011.L088done:
6012	movl	128(%esp),%edx
6013	pxor	%xmm2,%xmm2
6014	pxor	%xmm3,%xmm3
6015	movdqa	%xmm2,(%esp)
6016	pxor	%xmm4,%xmm4
6017	movdqa	%xmm2,16(%esp)
6018	pxor	%xmm5,%xmm5
6019	movdqa	%xmm2,32(%esp)
6020	pxor	%xmm6,%xmm6
6021	movdqa	%xmm2,48(%esp)
6022	pxor	%xmm7,%xmm7
6023	movdqa	%xmm2,64(%esp)
6024	movdqa	%xmm2,80(%esp)
6025	movdqa	%xmm2,96(%esp)
6026	leal	(%edx),%esp
6027	movl	40(%esp),%ecx
6028	movl	48(%esp),%ebx
6029	movdqu	%xmm0,(%ecx)
6030	pxor	%xmm0,%xmm0
6031	movdqu	%xmm1,(%ebx)
6032	pxor	%xmm1,%xmm1
6033	popl	%edi
6034	popl	%esi
6035	popl	%ebx
6036	popl	%ebp
6037	ret
6038.size	aesni_ocb_decrypt,.-.L_aesni_ocb_decrypt_begin
6039.globl	aesni_cbc_encrypt
6040.type	aesni_cbc_encrypt,@function
6041.align	16
6042aesni_cbc_encrypt:
6043.L_aesni_cbc_encrypt_begin:
6044	#ifdef __CET__
6045
6046.byte	243,15,30,251
6047	#endif
6048
6049	pushl	%ebp
6050	pushl	%ebx
6051	pushl	%esi
6052	pushl	%edi
6053	movl	20(%esp),%esi
6054	movl	%esp,%ebx
6055	movl	24(%esp),%edi
6056	subl	$24,%ebx
6057	movl	28(%esp),%eax
6058	andl	$-16,%ebx
6059	movl	32(%esp),%edx
6060	movl	36(%esp),%ebp
6061	testl	%eax,%eax
6062	jz	.L094cbc_abort
6063	cmpl	$0,40(%esp)
6064	xchgl	%esp,%ebx
6065	movups	(%ebp),%xmm7
6066	movl	240(%edx),%ecx
6067	movl	%edx,%ebp
6068	movl	%ebx,16(%esp)
6069	movl	%ecx,%ebx
6070	je	.L095cbc_decrypt
6071	movaps	%xmm7,%xmm2
6072	cmpl	$16,%eax
6073	jb	.L096cbc_enc_tail
6074	subl	$16,%eax
6075	jmp	.L097cbc_enc_loop
6076.align	16
6077.L097cbc_enc_loop:
6078	movups	(%esi),%xmm7
6079	leal	16(%esi),%esi
6080	movups	(%edx),%xmm0
6081	movups	16(%edx),%xmm1
6082	xorps	%xmm0,%xmm7
6083	leal	32(%edx),%edx
6084	xorps	%xmm7,%xmm2
6085.L098enc1_loop_19:
6086.byte	102,15,56,220,209
6087	decl	%ecx
6088	movups	(%edx),%xmm1
6089	leal	16(%edx),%edx
6090	jnz	.L098enc1_loop_19
6091.byte	102,15,56,221,209
6092	movl	%ebx,%ecx
6093	movl	%ebp,%edx
6094	movups	%xmm2,(%edi)
6095	leal	16(%edi),%edi
6096	subl	$16,%eax
6097	jnc	.L097cbc_enc_loop
6098	addl	$16,%eax
6099	jnz	.L096cbc_enc_tail
6100	movaps	%xmm2,%xmm7
6101	pxor	%xmm2,%xmm2
6102	jmp	.L099cbc_ret
6103.L096cbc_enc_tail:
6104	movl	%eax,%ecx
6105.long	2767451785
6106	movl	$16,%ecx
6107	subl	%eax,%ecx
6108	xorl	%eax,%eax
6109.long	2868115081
6110	leal	-16(%edi),%edi
6111	movl	%ebx,%ecx
6112	movl	%edi,%esi
6113	movl	%ebp,%edx
6114	jmp	.L097cbc_enc_loop
6115.align	16
6116.L095cbc_decrypt:
6117	cmpl	$80,%eax
6118	jbe	.L100cbc_dec_tail
6119	movaps	%xmm7,(%esp)
6120	subl	$80,%eax
6121	jmp	.L101cbc_dec_loop6_enter
6122.align	16
6123.L102cbc_dec_loop6:
6124	movaps	%xmm0,(%esp)
6125	movups	%xmm7,(%edi)
6126	leal	16(%edi),%edi
6127.L101cbc_dec_loop6_enter:
6128	movdqu	(%esi),%xmm2
6129	movdqu	16(%esi),%xmm3
6130	movdqu	32(%esi),%xmm4
6131	movdqu	48(%esi),%xmm5
6132	movdqu	64(%esi),%xmm6
6133	movdqu	80(%esi),%xmm7
6134	call	_aesni_decrypt6
6135	movups	(%esi),%xmm1
6136	movups	16(%esi),%xmm0
6137	xorps	(%esp),%xmm2
6138	xorps	%xmm1,%xmm3
6139	movups	32(%esi),%xmm1
6140	xorps	%xmm0,%xmm4
6141	movups	48(%esi),%xmm0
6142	xorps	%xmm1,%xmm5
6143	movups	64(%esi),%xmm1
6144	xorps	%xmm0,%xmm6
6145	movups	80(%esi),%xmm0
6146	xorps	%xmm1,%xmm7
6147	movups	%xmm2,(%edi)
6148	movups	%xmm3,16(%edi)
6149	leal	96(%esi),%esi
6150	movups	%xmm4,32(%edi)
6151	movl	%ebx,%ecx
6152	movups	%xmm5,48(%edi)
6153	movl	%ebp,%edx
6154	movups	%xmm6,64(%edi)
6155	leal	80(%edi),%edi
6156	subl	$96,%eax
6157	ja	.L102cbc_dec_loop6
6158	movaps	%xmm7,%xmm2
6159	movaps	%xmm0,%xmm7
6160	addl	$80,%eax
6161	jle	.L103cbc_dec_clear_tail_collected
6162	movups	%xmm2,(%edi)
6163	leal	16(%edi),%edi
6164.L100cbc_dec_tail:
6165	movups	(%esi),%xmm2
6166	movaps	%xmm2,%xmm6
6167	cmpl	$16,%eax
6168	jbe	.L104cbc_dec_one
6169	movups	16(%esi),%xmm3
6170	movaps	%xmm3,%xmm5
6171	cmpl	$32,%eax
6172	jbe	.L105cbc_dec_two
6173	movups	32(%esi),%xmm4
6174	cmpl	$48,%eax
6175	jbe	.L106cbc_dec_three
6176	movups	48(%esi),%xmm5
6177	cmpl	$64,%eax
6178	jbe	.L107cbc_dec_four
6179	movups	64(%esi),%xmm6
6180	movaps	%xmm7,(%esp)
6181	movups	(%esi),%xmm2
6182	xorps	%xmm7,%xmm7
6183	call	_aesni_decrypt6
6184	movups	(%esi),%xmm1
6185	movups	16(%esi),%xmm0
6186	xorps	(%esp),%xmm2
6187	xorps	%xmm1,%xmm3
6188	movups	32(%esi),%xmm1
6189	xorps	%xmm0,%xmm4
6190	movups	48(%esi),%xmm0
6191	xorps	%xmm1,%xmm5
6192	movups	64(%esi),%xmm7
6193	xorps	%xmm0,%xmm6
6194	movups	%xmm2,(%edi)
6195	movups	%xmm3,16(%edi)
6196	pxor	%xmm3,%xmm3
6197	movups	%xmm4,32(%edi)
6198	pxor	%xmm4,%xmm4
6199	movups	%xmm5,48(%edi)
6200	pxor	%xmm5,%xmm5
6201	leal	64(%edi),%edi
6202	movaps	%xmm6,%xmm2
6203	pxor	%xmm6,%xmm6
6204	subl	$80,%eax
6205	jmp	.L108cbc_dec_tail_collected
6206.align	16
6207.L104cbc_dec_one:
6208	movups	(%edx),%xmm0
6209	movups	16(%edx),%xmm1
6210	leal	32(%edx),%edx
6211	xorps	%xmm0,%xmm2
6212.L109dec1_loop_20:
6213.byte	102,15,56,222,209
6214	decl	%ecx
6215	movups	(%edx),%xmm1
6216	leal	16(%edx),%edx
6217	jnz	.L109dec1_loop_20
6218.byte	102,15,56,223,209
6219	xorps	%xmm7,%xmm2
6220	movaps	%xmm6,%xmm7
6221	subl	$16,%eax
6222	jmp	.L108cbc_dec_tail_collected
6223.align	16
6224.L105cbc_dec_two:
6225	call	_aesni_decrypt2
6226	xorps	%xmm7,%xmm2
6227	xorps	%xmm6,%xmm3
6228	movups	%xmm2,(%edi)
6229	movaps	%xmm3,%xmm2
6230	pxor	%xmm3,%xmm3
6231	leal	16(%edi),%edi
6232	movaps	%xmm5,%xmm7
6233	subl	$32,%eax
6234	jmp	.L108cbc_dec_tail_collected
6235.align	16
6236.L106cbc_dec_three:
6237	call	_aesni_decrypt3
6238	xorps	%xmm7,%xmm2
6239	xorps	%xmm6,%xmm3
6240	xorps	%xmm5,%xmm4
6241	movups	%xmm2,(%edi)
6242	movaps	%xmm4,%xmm2
6243	pxor	%xmm4,%xmm4
6244	movups	%xmm3,16(%edi)
6245	pxor	%xmm3,%xmm3
6246	leal	32(%edi),%edi
6247	movups	32(%esi),%xmm7
6248	subl	$48,%eax
6249	jmp	.L108cbc_dec_tail_collected
6250.align	16
6251.L107cbc_dec_four:
6252	call	_aesni_decrypt4
6253	movups	16(%esi),%xmm1
6254	movups	32(%esi),%xmm0
6255	xorps	%xmm7,%xmm2
6256	movups	48(%esi),%xmm7
6257	xorps	%xmm6,%xmm3
6258	movups	%xmm2,(%edi)
6259	xorps	%xmm1,%xmm4
6260	movups	%xmm3,16(%edi)
6261	pxor	%xmm3,%xmm3
6262	xorps	%xmm0,%xmm5
6263	movups	%xmm4,32(%edi)
6264	pxor	%xmm4,%xmm4
6265	leal	48(%edi),%edi
6266	movaps	%xmm5,%xmm2
6267	pxor	%xmm5,%xmm5
6268	subl	$64,%eax
6269	jmp	.L108cbc_dec_tail_collected
6270.align	16
6271.L103cbc_dec_clear_tail_collected:
6272	pxor	%xmm3,%xmm3
6273	pxor	%xmm4,%xmm4
6274	pxor	%xmm5,%xmm5
6275	pxor	%xmm6,%xmm6
6276.L108cbc_dec_tail_collected:
6277	andl	$15,%eax
6278	jnz	.L110cbc_dec_tail_partial
6279	movups	%xmm2,(%edi)
6280	pxor	%xmm0,%xmm0
6281	jmp	.L099cbc_ret
6282.align	16
6283.L110cbc_dec_tail_partial:
6284	movaps	%xmm2,(%esp)
6285	pxor	%xmm0,%xmm0
6286	movl	$16,%ecx
6287	movl	%esp,%esi
6288	subl	%eax,%ecx
6289.long	2767451785
6290	movdqa	%xmm2,(%esp)
6291.L099cbc_ret:
6292	movl	16(%esp),%esp
6293	movl	36(%esp),%ebp
6294	pxor	%xmm2,%xmm2
6295	pxor	%xmm1,%xmm1
6296	movups	%xmm7,(%ebp)
6297	pxor	%xmm7,%xmm7
6298.L094cbc_abort:
6299	popl	%edi
6300	popl	%esi
6301	popl	%ebx
6302	popl	%ebp
6303	ret
6304.size	aesni_cbc_encrypt,.-.L_aesni_cbc_encrypt_begin
6305.type	_aesni_set_encrypt_key,@function
6306.align	16
6307_aesni_set_encrypt_key:
6308	#ifdef __CET__
6309
6310.byte	243,15,30,251
6311	#endif
6312
6313	pushl	%ebp
6314	pushl	%ebx
6315	testl	%eax,%eax
6316	jz	.L111bad_pointer
6317	testl	%edx,%edx
6318	jz	.L111bad_pointer
6319	call	.L112pic
6320.L112pic:
6321	popl	%ebx
6322	leal	.Lkey_const-.L112pic(%ebx),%ebx
6323	leal	OPENSSL_ia32cap_P,%ebp
6324	movups	(%eax),%xmm0
6325	xorps	%xmm4,%xmm4
6326	movl	4(%ebp),%ebp
6327	leal	16(%edx),%edx
6328	andl	$268437504,%ebp
6329	cmpl	$256,%ecx
6330	je	.L11314rounds
6331	cmpl	$192,%ecx
6332	je	.L11412rounds
6333	cmpl	$128,%ecx
6334	jne	.L115bad_keybits
6335.align	16
6336.L11610rounds:
6337	cmpl	$268435456,%ebp
6338	je	.L11710rounds_alt
6339	movl	$9,%ecx
6340	movups	%xmm0,-16(%edx)
6341.byte	102,15,58,223,200,1
6342	call	.L118key_128_cold
6343.byte	102,15,58,223,200,2
6344	call	.L119key_128
6345.byte	102,15,58,223,200,4
6346	call	.L119key_128
6347.byte	102,15,58,223,200,8
6348	call	.L119key_128
6349.byte	102,15,58,223,200,16
6350	call	.L119key_128
6351.byte	102,15,58,223,200,32
6352	call	.L119key_128
6353.byte	102,15,58,223,200,64
6354	call	.L119key_128
6355.byte	102,15,58,223,200,128
6356	call	.L119key_128
6357.byte	102,15,58,223,200,27
6358	call	.L119key_128
6359.byte	102,15,58,223,200,54
6360	call	.L119key_128
6361	movups	%xmm0,(%edx)
6362	movl	%ecx,80(%edx)
6363	jmp	.L120good_key
6364.align	16
6365.L119key_128:
6366	movups	%xmm0,(%edx)
6367	leal	16(%edx),%edx
6368.L118key_128_cold:
6369	shufps	$16,%xmm0,%xmm4
6370	xorps	%xmm4,%xmm0
6371	shufps	$140,%xmm0,%xmm4
6372	xorps	%xmm4,%xmm0
6373	shufps	$255,%xmm1,%xmm1
6374	xorps	%xmm1,%xmm0
6375	ret
6376.align	16
6377.L11710rounds_alt:
6378	movdqa	(%ebx),%xmm5
6379	movl	$8,%ecx
6380	movdqa	32(%ebx),%xmm4
6381	movdqa	%xmm0,%xmm2
6382	movdqu	%xmm0,-16(%edx)
6383.L121loop_key128:
6384.byte	102,15,56,0,197
6385.byte	102,15,56,221,196
6386	pslld	$1,%xmm4
6387	leal	16(%edx),%edx
6388	movdqa	%xmm2,%xmm3
6389	pslldq	$4,%xmm2
6390	pxor	%xmm2,%xmm3
6391	pslldq	$4,%xmm2
6392	pxor	%xmm2,%xmm3
6393	pslldq	$4,%xmm2
6394	pxor	%xmm3,%xmm2
6395	pxor	%xmm2,%xmm0
6396	movdqu	%xmm0,-16(%edx)
6397	movdqa	%xmm0,%xmm2
6398	decl	%ecx
6399	jnz	.L121loop_key128
6400	movdqa	48(%ebx),%xmm4
6401.byte	102,15,56,0,197
6402.byte	102,15,56,221,196
6403	pslld	$1,%xmm4
6404	movdqa	%xmm2,%xmm3
6405	pslldq	$4,%xmm2
6406	pxor	%xmm2,%xmm3
6407	pslldq	$4,%xmm2
6408	pxor	%xmm2,%xmm3
6409	pslldq	$4,%xmm2
6410	pxor	%xmm3,%xmm2
6411	pxor	%xmm2,%xmm0
6412	movdqu	%xmm0,(%edx)
6413	movdqa	%xmm0,%xmm2
6414.byte	102,15,56,0,197
6415.byte	102,15,56,221,196
6416	movdqa	%xmm2,%xmm3
6417	pslldq	$4,%xmm2
6418	pxor	%xmm2,%xmm3
6419	pslldq	$4,%xmm2
6420	pxor	%xmm2,%xmm3
6421	pslldq	$4,%xmm2
6422	pxor	%xmm3,%xmm2
6423	pxor	%xmm2,%xmm0
6424	movdqu	%xmm0,16(%edx)
6425	movl	$9,%ecx
6426	movl	%ecx,96(%edx)
6427	jmp	.L120good_key
6428.align	16
6429.L11412rounds:
6430	movq	16(%eax),%xmm2
6431	cmpl	$268435456,%ebp
6432	je	.L12212rounds_alt
6433	movl	$11,%ecx
6434	movups	%xmm0,-16(%edx)
6435.byte	102,15,58,223,202,1
6436	call	.L123key_192a_cold
6437.byte	102,15,58,223,202,2
6438	call	.L124key_192b
6439.byte	102,15,58,223,202,4
6440	call	.L125key_192a
6441.byte	102,15,58,223,202,8
6442	call	.L124key_192b
6443.byte	102,15,58,223,202,16
6444	call	.L125key_192a
6445.byte	102,15,58,223,202,32
6446	call	.L124key_192b
6447.byte	102,15,58,223,202,64
6448	call	.L125key_192a
6449.byte	102,15,58,223,202,128
6450	call	.L124key_192b
6451	movups	%xmm0,(%edx)
6452	movl	%ecx,48(%edx)
6453	jmp	.L120good_key
6454.align	16
6455.L125key_192a:
6456	movups	%xmm0,(%edx)
6457	leal	16(%edx),%edx
6458.align	16
6459.L123key_192a_cold:
6460	movaps	%xmm2,%xmm5
6461.L126key_192b_warm:
6462	shufps	$16,%xmm0,%xmm4
6463	movdqa	%xmm2,%xmm3
6464	xorps	%xmm4,%xmm0
6465	shufps	$140,%xmm0,%xmm4
6466	pslldq	$4,%xmm3
6467	xorps	%xmm4,%xmm0
6468	pshufd	$85,%xmm1,%xmm1
6469	pxor	%xmm3,%xmm2
6470	pxor	%xmm1,%xmm0
6471	pshufd	$255,%xmm0,%xmm3
6472	pxor	%xmm3,%xmm2
6473	ret
6474.align	16
6475.L124key_192b:
6476	movaps	%xmm0,%xmm3
6477	shufps	$68,%xmm0,%xmm5
6478	movups	%xmm5,(%edx)
6479	shufps	$78,%xmm2,%xmm3
6480	movups	%xmm3,16(%edx)
6481	leal	32(%edx),%edx
6482	jmp	.L126key_192b_warm
6483.align	16
6484.L12212rounds_alt:
6485	movdqa	16(%ebx),%xmm5
6486	movdqa	32(%ebx),%xmm4
6487	movl	$8,%ecx
6488	movdqu	%xmm0,-16(%edx)
6489.L127loop_key192:
6490	movq	%xmm2,(%edx)
6491	movdqa	%xmm2,%xmm1
6492.byte	102,15,56,0,213
6493.byte	102,15,56,221,212
6494	pslld	$1,%xmm4
6495	leal	24(%edx),%edx
6496	movdqa	%xmm0,%xmm3
6497	pslldq	$4,%xmm0
6498	pxor	%xmm0,%xmm3
6499	pslldq	$4,%xmm0
6500	pxor	%xmm0,%xmm3
6501	pslldq	$4,%xmm0
6502	pxor	%xmm3,%xmm0
6503	pshufd	$255,%xmm0,%xmm3
6504	pxor	%xmm1,%xmm3
6505	pslldq	$4,%xmm1
6506	pxor	%xmm1,%xmm3
6507	pxor	%xmm2,%xmm0
6508	pxor	%xmm3,%xmm2
6509	movdqu	%xmm0,-16(%edx)
6510	decl	%ecx
6511	jnz	.L127loop_key192
6512	movl	$11,%ecx
6513	movl	%ecx,32(%edx)
6514	jmp	.L120good_key
6515.align	16
6516.L11314rounds:
6517	movups	16(%eax),%xmm2
6518	leal	16(%edx),%edx
6519	cmpl	$268435456,%ebp
6520	je	.L12814rounds_alt
6521	movl	$13,%ecx
6522	movups	%xmm0,-32(%edx)
6523	movups	%xmm2,-16(%edx)
6524.byte	102,15,58,223,202,1
6525	call	.L129key_256a_cold
6526.byte	102,15,58,223,200,1
6527	call	.L130key_256b
6528.byte	102,15,58,223,202,2
6529	call	.L131key_256a
6530.byte	102,15,58,223,200,2
6531	call	.L130key_256b
6532.byte	102,15,58,223,202,4
6533	call	.L131key_256a
6534.byte	102,15,58,223,200,4
6535	call	.L130key_256b
6536.byte	102,15,58,223,202,8
6537	call	.L131key_256a
6538.byte	102,15,58,223,200,8
6539	call	.L130key_256b
6540.byte	102,15,58,223,202,16
6541	call	.L131key_256a
6542.byte	102,15,58,223,200,16
6543	call	.L130key_256b
6544.byte	102,15,58,223,202,32
6545	call	.L131key_256a
6546.byte	102,15,58,223,200,32
6547	call	.L130key_256b
6548.byte	102,15,58,223,202,64
6549	call	.L131key_256a
6550	movups	%xmm0,(%edx)
6551	movl	%ecx,16(%edx)
6552	xorl	%eax,%eax
6553	jmp	.L120good_key
6554.align	16
6555.L131key_256a:
6556	movups	%xmm2,(%edx)
6557	leal	16(%edx),%edx
6558.L129key_256a_cold:
6559	shufps	$16,%xmm0,%xmm4
6560	xorps	%xmm4,%xmm0
6561	shufps	$140,%xmm0,%xmm4
6562	xorps	%xmm4,%xmm0
6563	shufps	$255,%xmm1,%xmm1
6564	xorps	%xmm1,%xmm0
6565	ret
6566.align	16
6567.L130key_256b:
6568	movups	%xmm0,(%edx)
6569	leal	16(%edx),%edx
6570	shufps	$16,%xmm2,%xmm4
6571	xorps	%xmm4,%xmm2
6572	shufps	$140,%xmm2,%xmm4
6573	xorps	%xmm4,%xmm2
6574	shufps	$170,%xmm1,%xmm1
6575	xorps	%xmm1,%xmm2
6576	ret
6577.align	16
6578.L12814rounds_alt:
6579	movdqa	(%ebx),%xmm5
6580	movdqa	32(%ebx),%xmm4
6581	movl	$7,%ecx
6582	movdqu	%xmm0,-32(%edx)
6583	movdqa	%xmm2,%xmm1
6584	movdqu	%xmm2,-16(%edx)
6585.L132loop_key256:
6586.byte	102,15,56,0,213
6587.byte	102,15,56,221,212
6588	movdqa	%xmm0,%xmm3
6589	pslldq	$4,%xmm0
6590	pxor	%xmm0,%xmm3
6591	pslldq	$4,%xmm0
6592	pxor	%xmm0,%xmm3
6593	pslldq	$4,%xmm0
6594	pxor	%xmm3,%xmm0
6595	pslld	$1,%xmm4
6596	pxor	%xmm2,%xmm0
6597	movdqu	%xmm0,(%edx)
6598	decl	%ecx
6599	jz	.L133done_key256
6600	pshufd	$255,%xmm0,%xmm2
6601	pxor	%xmm3,%xmm3
6602.byte	102,15,56,221,211
6603	movdqa	%xmm1,%xmm3
6604	pslldq	$4,%xmm1
6605	pxor	%xmm1,%xmm3
6606	pslldq	$4,%xmm1
6607	pxor	%xmm1,%xmm3
6608	pslldq	$4,%xmm1
6609	pxor	%xmm3,%xmm1
6610	pxor	%xmm1,%xmm2
6611	movdqu	%xmm2,16(%edx)
6612	leal	32(%edx),%edx
6613	movdqa	%xmm2,%xmm1
6614	jmp	.L132loop_key256
6615.L133done_key256:
6616	movl	$13,%ecx
6617	movl	%ecx,16(%edx)
6618.L120good_key:
6619	pxor	%xmm0,%xmm0
6620	pxor	%xmm1,%xmm1
6621	pxor	%xmm2,%xmm2
6622	pxor	%xmm3,%xmm3
6623	pxor	%xmm4,%xmm4
6624	pxor	%xmm5,%xmm5
6625	xorl	%eax,%eax
6626	popl	%ebx
6627	popl	%ebp
6628	ret
6629.align	4
6630.L111bad_pointer:
6631	movl	$-1,%eax
6632	popl	%ebx
6633	popl	%ebp
6634	ret
6635.align	4
6636.L115bad_keybits:
6637	pxor	%xmm0,%xmm0
6638	movl	$-2,%eax
6639	popl	%ebx
6640	popl	%ebp
6641	ret
6642.size	_aesni_set_encrypt_key,.-_aesni_set_encrypt_key
6643.globl	aesni_set_encrypt_key
6644.type	aesni_set_encrypt_key,@function
6645.align	16
6646aesni_set_encrypt_key:
6647.L_aesni_set_encrypt_key_begin:
6648	#ifdef __CET__
6649
6650.byte	243,15,30,251
6651	#endif
6652
6653	movl	4(%esp),%eax
6654	movl	8(%esp),%ecx
6655	movl	12(%esp),%edx
6656	call	_aesni_set_encrypt_key
6657	ret
6658.size	aesni_set_encrypt_key,.-.L_aesni_set_encrypt_key_begin
6659.globl	aesni_set_decrypt_key
6660.type	aesni_set_decrypt_key,@function
6661.align	16
6662aesni_set_decrypt_key:
6663.L_aesni_set_decrypt_key_begin:
6664	#ifdef __CET__
6665
6666.byte	243,15,30,251
6667	#endif
6668
6669	movl	4(%esp),%eax
6670	movl	8(%esp),%ecx
6671	movl	12(%esp),%edx
6672	call	_aesni_set_encrypt_key
6673	movl	12(%esp),%edx
6674	shll	$4,%ecx
6675	testl	%eax,%eax
6676	jnz	.L134dec_key_ret
6677	leal	16(%edx,%ecx,1),%eax
6678	movups	(%edx),%xmm0
6679	movups	(%eax),%xmm1
6680	movups	%xmm0,(%eax)
6681	movups	%xmm1,(%edx)
6682	leal	16(%edx),%edx
6683	leal	-16(%eax),%eax
6684.L135dec_key_inverse:
6685	movups	(%edx),%xmm0
6686	movups	(%eax),%xmm1
6687.byte	102,15,56,219,192
6688.byte	102,15,56,219,201
6689	leal	16(%edx),%edx
6690	leal	-16(%eax),%eax
6691	movups	%xmm0,16(%eax)
6692	movups	%xmm1,-16(%edx)
6693	cmpl	%edx,%eax
6694	ja	.L135dec_key_inverse
6695	movups	(%edx),%xmm0
6696.byte	102,15,56,219,192
6697	movups	%xmm0,(%edx)
6698	pxor	%xmm0,%xmm0
6699	pxor	%xmm1,%xmm1
6700	xorl	%eax,%eax
6701.L134dec_key_ret:
6702	ret
6703.size	aesni_set_decrypt_key,.-.L_aesni_set_decrypt_key_begin
6704.align	64
6705.Lkey_const:
6706.long	202313229,202313229,202313229,202313229
6707.long	67569157,67569157,67569157,67569157
6708.long	1,1,1,1
6709.long	27,27,27,27
6710.byte	65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69
6711.byte	83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83
6712.byte	32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
6713.byte	115,108,46,111,114,103,62,0
6714.comm	OPENSSL_ia32cap_P,16,4
6715
6716	.section ".note.gnu.property", "a"
6717	.p2align 2
6718	.long 1f - 0f
6719	.long 4f - 1f
6720	.long 5
67210:
6722	.asciz "GNU"
67231:
6724	.p2align 2
6725	.long 0xc0000002
6726	.long 3f - 2f
67272:
6728	.long 3
67293:
6730	.p2align 2
67314:
6732#endif
6733