1#include <machine/asm.h>
2.text
3.globl	gcm_gmult_4bit_x86
4.type	gcm_gmult_4bit_x86,@function
5.align	16
6gcm_gmult_4bit_x86:
7.L_gcm_gmult_4bit_x86_begin:
8	pushl	%ebp
9	pushl	%ebx
10	pushl	%esi
11	pushl	%edi
12	subl	$84,%esp
13	movl	104(%esp),%edi
14	movl	108(%esp),%esi
15	movl	(%edi),%ebp
16	movl	4(%edi),%edx
17	movl	8(%edi),%ecx
18	movl	12(%edi),%ebx
19	movl	$0,16(%esp)
20	movl	$471859200,20(%esp)
21	movl	$943718400,24(%esp)
22	movl	$610271232,28(%esp)
23	movl	$1887436800,32(%esp)
24	movl	$1822425088,36(%esp)
25	movl	$1220542464,40(%esp)
26	movl	$1423966208,44(%esp)
27	movl	$3774873600,48(%esp)
28	movl	$4246732800,52(%esp)
29	movl	$3644850176,56(%esp)
30	movl	$3311403008,60(%esp)
31	movl	$2441084928,64(%esp)
32	movl	$2376073216,68(%esp)
33	movl	$2847932416,72(%esp)
34	movl	$3051356160,76(%esp)
35	movl	%ebp,(%esp)
36	movl	%edx,4(%esp)
37	movl	%ecx,8(%esp)
38	movl	%ebx,12(%esp)
39	shrl	$20,%ebx
40	andl	$240,%ebx
41	movl	4(%esi,%ebx,1),%ebp
42	movl	(%esi,%ebx,1),%edx
43	movl	12(%esi,%ebx,1),%ecx
44	movl	8(%esi,%ebx,1),%ebx
45	xorl	%eax,%eax
46	movl	$15,%edi
47	jmp	.L000x86_loop
48.align	16
49.L000x86_loop:
50	movb	%bl,%al
51	shrdl	$4,%ecx,%ebx
52	andb	$15,%al
53	shrdl	$4,%edx,%ecx
54	shrdl	$4,%ebp,%edx
55	shrl	$4,%ebp
56	xorl	16(%esp,%eax,4),%ebp
57	movb	(%esp,%edi,1),%al
58	andb	$240,%al
59	xorl	8(%esi,%eax,1),%ebx
60	xorl	12(%esi,%eax,1),%ecx
61	xorl	(%esi,%eax,1),%edx
62	xorl	4(%esi,%eax,1),%ebp
63	decl	%edi
64	js	.L001x86_break
65	movb	%bl,%al
66	shrdl	$4,%ecx,%ebx
67	andb	$15,%al
68	shrdl	$4,%edx,%ecx
69	shrdl	$4,%ebp,%edx
70	shrl	$4,%ebp
71	xorl	16(%esp,%eax,4),%ebp
72	movb	(%esp,%edi,1),%al
73	shlb	$4,%al
74	xorl	8(%esi,%eax,1),%ebx
75	xorl	12(%esi,%eax,1),%ecx
76	xorl	(%esi,%eax,1),%edx
77	xorl	4(%esi,%eax,1),%ebp
78	jmp	.L000x86_loop
79.align	16
80.L001x86_break:
81	bswap	%ebx
82	bswap	%ecx
83	bswap	%edx
84	bswap	%ebp
85	movl	104(%esp),%edi
86	movl	%ebx,12(%edi)
87	movl	%ecx,8(%edi)
88	movl	%edx,4(%edi)
89	movl	%ebp,(%edi)
90	addl	$84,%esp
91	popl	%edi
92	popl	%esi
93	popl	%ebx
94	popl	%ebp
95	ret
96.size	gcm_gmult_4bit_x86,.-.L_gcm_gmult_4bit_x86_begin
97.globl	gcm_ghash_4bit_x86
98.type	gcm_ghash_4bit_x86,@function
99.align	16
100gcm_ghash_4bit_x86:
101.L_gcm_ghash_4bit_x86_begin:
102	pushl	%ebp
103	pushl	%ebx
104	pushl	%esi
105	pushl	%edi
106	subl	$84,%esp
107	movl	104(%esp),%ebx
108	movl	108(%esp),%esi
109	movl	112(%esp),%edi
110	movl	116(%esp),%ecx
111	addl	%edi,%ecx
112	movl	%ecx,116(%esp)
113	movl	(%ebx),%ebp
114	movl	4(%ebx),%edx
115	movl	8(%ebx),%ecx
116	movl	12(%ebx),%ebx
117	movl	$0,16(%esp)
118	movl	$471859200,20(%esp)
119	movl	$943718400,24(%esp)
120	movl	$610271232,28(%esp)
121	movl	$1887436800,32(%esp)
122	movl	$1822425088,36(%esp)
123	movl	$1220542464,40(%esp)
124	movl	$1423966208,44(%esp)
125	movl	$3774873600,48(%esp)
126	movl	$4246732800,52(%esp)
127	movl	$3644850176,56(%esp)
128	movl	$3311403008,60(%esp)
129	movl	$2441084928,64(%esp)
130	movl	$2376073216,68(%esp)
131	movl	$2847932416,72(%esp)
132	movl	$3051356160,76(%esp)
133.align	16
134.L002x86_outer_loop:
135	xorl	12(%edi),%ebx
136	xorl	8(%edi),%ecx
137	xorl	4(%edi),%edx
138	xorl	(%edi),%ebp
139	movl	%ebx,12(%esp)
140	movl	%ecx,8(%esp)
141	movl	%edx,4(%esp)
142	movl	%ebp,(%esp)
143	shrl	$20,%ebx
144	andl	$240,%ebx
145	movl	4(%esi,%ebx,1),%ebp
146	movl	(%esi,%ebx,1),%edx
147	movl	12(%esi,%ebx,1),%ecx
148	movl	8(%esi,%ebx,1),%ebx
149	xorl	%eax,%eax
150	movl	$15,%edi
151	jmp	.L003x86_loop
152.align	16
153.L003x86_loop:
154	movb	%bl,%al
155	shrdl	$4,%ecx,%ebx
156	andb	$15,%al
157	shrdl	$4,%edx,%ecx
158	shrdl	$4,%ebp,%edx
159	shrl	$4,%ebp
160	xorl	16(%esp,%eax,4),%ebp
161	movb	(%esp,%edi,1),%al
162	andb	$240,%al
163	xorl	8(%esi,%eax,1),%ebx
164	xorl	12(%esi,%eax,1),%ecx
165	xorl	(%esi,%eax,1),%edx
166	xorl	4(%esi,%eax,1),%ebp
167	decl	%edi
168	js	.L004x86_break
169	movb	%bl,%al
170	shrdl	$4,%ecx,%ebx
171	andb	$15,%al
172	shrdl	$4,%edx,%ecx
173	shrdl	$4,%ebp,%edx
174	shrl	$4,%ebp
175	xorl	16(%esp,%eax,4),%ebp
176	movb	(%esp,%edi,1),%al
177	shlb	$4,%al
178	xorl	8(%esi,%eax,1),%ebx
179	xorl	12(%esi,%eax,1),%ecx
180	xorl	(%esi,%eax,1),%edx
181	xorl	4(%esi,%eax,1),%ebp
182	jmp	.L003x86_loop
183.align	16
184.L004x86_break:
185	bswap	%ebx
186	bswap	%ecx
187	bswap	%edx
188	bswap	%ebp
189	movl	112(%esp),%edi
190	leal	16(%edi),%edi
191	cmpl	116(%esp),%edi
192	movl	%edi,112(%esp)
193	jb	.L002x86_outer_loop
194	movl	104(%esp),%edi
195	movl	%ebx,12(%edi)
196	movl	%ecx,8(%edi)
197	movl	%edx,4(%edi)
198	movl	%ebp,(%edi)
199	addl	$84,%esp
200	popl	%edi
201	popl	%esi
202	popl	%ebx
203	popl	%ebp
204	ret
205.size	gcm_ghash_4bit_x86,.-.L_gcm_ghash_4bit_x86_begin
206.globl	gcm_gmult_4bit_mmx
207.type	gcm_gmult_4bit_mmx,@function
208.align	16
209gcm_gmult_4bit_mmx:
210.L_gcm_gmult_4bit_mmx_begin:
211	pushl	%ebp
212	pushl	%ebx
213	pushl	%esi
214	pushl	%edi
215	movl	20(%esp),%edi
216	movl	24(%esp),%esi
217	call	.L005pic_point
218.L005pic_point:
219	popl	%eax
220	leal	.Lrem_4bit-.L005pic_point(%eax),%eax
221	movzbl	15(%edi),%ebx
222	xorl	%ecx,%ecx
223	movl	%ebx,%edx
224	movb	%dl,%cl
225	movl	$14,%ebp
226	shlb	$4,%cl
227	andl	$240,%edx
228	movq	8(%esi,%ecx,1),%mm0
229	movq	(%esi,%ecx,1),%mm1
230	movd	%mm0,%ebx
231	jmp	.L006mmx_loop
232.align	16
233.L006mmx_loop:
234	psrlq	$4,%mm0
235	andl	$15,%ebx
236	movq	%mm1,%mm2
237	psrlq	$4,%mm1
238	pxor	8(%esi,%edx,1),%mm0
239	movb	(%edi,%ebp,1),%cl
240	psllq	$60,%mm2
241	pxor	(%eax,%ebx,8),%mm1
242	decl	%ebp
243	movd	%mm0,%ebx
244	pxor	(%esi,%edx,1),%mm1
245	movl	%ecx,%edx
246	pxor	%mm2,%mm0
247	js	.L007mmx_break
248	shlb	$4,%cl
249	andl	$15,%ebx
250	psrlq	$4,%mm0
251	andl	$240,%edx
252	movq	%mm1,%mm2
253	psrlq	$4,%mm1
254	pxor	8(%esi,%ecx,1),%mm0
255	psllq	$60,%mm2
256	pxor	(%eax,%ebx,8),%mm1
257	movd	%mm0,%ebx
258	pxor	(%esi,%ecx,1),%mm1
259	pxor	%mm2,%mm0
260	jmp	.L006mmx_loop
261.align	16
262.L007mmx_break:
263	shlb	$4,%cl
264	andl	$15,%ebx
265	psrlq	$4,%mm0
266	andl	$240,%edx
267	movq	%mm1,%mm2
268	psrlq	$4,%mm1
269	pxor	8(%esi,%ecx,1),%mm0
270	psllq	$60,%mm2
271	pxor	(%eax,%ebx,8),%mm1
272	movd	%mm0,%ebx
273	pxor	(%esi,%ecx,1),%mm1
274	pxor	%mm2,%mm0
275	psrlq	$4,%mm0
276	andl	$15,%ebx
277	movq	%mm1,%mm2
278	psrlq	$4,%mm1
279	pxor	8(%esi,%edx,1),%mm0
280	psllq	$60,%mm2
281	pxor	(%eax,%ebx,8),%mm1
282	movd	%mm0,%ebx
283	pxor	(%esi,%edx,1),%mm1
284	pxor	%mm2,%mm0
285	psrlq	$32,%mm0
286	movd	%mm1,%edx
287	psrlq	$32,%mm1
288	movd	%mm0,%ecx
289	movd	%mm1,%ebp
290	bswap	%ebx
291	bswap	%edx
292	bswap	%ecx
293	bswap	%ebp
294	emms
295	movl	%ebx,12(%edi)
296	movl	%edx,4(%edi)
297	movl	%ecx,8(%edi)
298	movl	%ebp,(%edi)
299	popl	%edi
300	popl	%esi
301	popl	%ebx
302	popl	%ebp
303	ret
304.size	gcm_gmult_4bit_mmx,.-.L_gcm_gmult_4bit_mmx_begin
305.globl	gcm_ghash_4bit_mmx
306.type	gcm_ghash_4bit_mmx,@function
307.align	16
308gcm_ghash_4bit_mmx:
309.L_gcm_ghash_4bit_mmx_begin:
310	pushl	%ebp
311	pushl	%ebx
312	pushl	%esi
313	pushl	%edi
314	movl	20(%esp),%eax
315	movl	24(%esp),%ebx
316	movl	28(%esp),%ecx
317	movl	32(%esp),%edx
318	movl	%esp,%ebp
319	call	.L008pic_point
320.L008pic_point:
321	popl	%esi
322	leal	.Lrem_8bit-.L008pic_point(%esi),%esi
323	subl	$544,%esp
324	andl	$-64,%esp
325	subl	$16,%esp
326	addl	%ecx,%edx
327	movl	%eax,544(%esp)
328	movl	%edx,552(%esp)
329	movl	%ebp,556(%esp)
330	addl	$128,%ebx
331	leal	144(%esp),%edi
332	leal	400(%esp),%ebp
333	movl	-120(%ebx),%edx
334	movq	-120(%ebx),%mm0
335	movq	-128(%ebx),%mm3
336	shll	$4,%edx
337	movb	%dl,(%esp)
338	movl	-104(%ebx),%edx
339	movq	-104(%ebx),%mm2
340	movq	-112(%ebx),%mm5
341	movq	%mm0,-128(%edi)
342	psrlq	$4,%mm0
343	movq	%mm3,(%edi)
344	movq	%mm3,%mm7
345	psrlq	$4,%mm3
346	shll	$4,%edx
347	movb	%dl,1(%esp)
348	movl	-88(%ebx),%edx
349	movq	-88(%ebx),%mm1
350	psllq	$60,%mm7
351	movq	-96(%ebx),%mm4
352	por	%mm7,%mm0
353	movq	%mm2,-120(%edi)
354	psrlq	$4,%mm2
355	movq	%mm5,8(%edi)
356	movq	%mm5,%mm6
357	movq	%mm0,-128(%ebp)
358	psrlq	$4,%mm5
359	movq	%mm3,(%ebp)
360	shll	$4,%edx
361	movb	%dl,2(%esp)
362	movl	-72(%ebx),%edx
363	movq	-72(%ebx),%mm0
364	psllq	$60,%mm6
365	movq	-80(%ebx),%mm3
366	por	%mm6,%mm2
367	movq	%mm1,-112(%edi)
368	psrlq	$4,%mm1
369	movq	%mm4,16(%edi)
370	movq	%mm4,%mm7
371	movq	%mm2,-120(%ebp)
372	psrlq	$4,%mm4
373	movq	%mm5,8(%ebp)
374	shll	$4,%edx
375	movb	%dl,3(%esp)
376	movl	-56(%ebx),%edx
377	movq	-56(%ebx),%mm2
378	psllq	$60,%mm7
379	movq	-64(%ebx),%mm5
380	por	%mm7,%mm1
381	movq	%mm0,-104(%edi)
382	psrlq	$4,%mm0
383	movq	%mm3,24(%edi)
384	movq	%mm3,%mm6
385	movq	%mm1,-112(%ebp)
386	psrlq	$4,%mm3
387	movq	%mm4,16(%ebp)
388	shll	$4,%edx
389	movb	%dl,4(%esp)
390	movl	-40(%ebx),%edx
391	movq	-40(%ebx),%mm1
392	psllq	$60,%mm6
393	movq	-48(%ebx),%mm4
394	por	%mm6,%mm0
395	movq	%mm2,-96(%edi)
396	psrlq	$4,%mm2
397	movq	%mm5,32(%edi)
398	movq	%mm5,%mm7
399	movq	%mm0,-104(%ebp)
400	psrlq	$4,%mm5
401	movq	%mm3,24(%ebp)
402	shll	$4,%edx
403	movb	%dl,5(%esp)
404	movl	-24(%ebx),%edx
405	movq	-24(%ebx),%mm0
406	psllq	$60,%mm7
407	movq	-32(%ebx),%mm3
408	por	%mm7,%mm2
409	movq	%mm1,-88(%edi)
410	psrlq	$4,%mm1
411	movq	%mm4,40(%edi)
412	movq	%mm4,%mm6
413	movq	%mm2,-96(%ebp)
414	psrlq	$4,%mm4
415	movq	%mm5,32(%ebp)
416	shll	$4,%edx
417	movb	%dl,6(%esp)
418	movl	-8(%ebx),%edx
419	movq	-8(%ebx),%mm2
420	psllq	$60,%mm6
421	movq	-16(%ebx),%mm5
422	por	%mm6,%mm1
423	movq	%mm0,-80(%edi)
424	psrlq	$4,%mm0
425	movq	%mm3,48(%edi)
426	movq	%mm3,%mm7
427	movq	%mm1,-88(%ebp)
428	psrlq	$4,%mm3
429	movq	%mm4,40(%ebp)
430	shll	$4,%edx
431	movb	%dl,7(%esp)
432	movl	8(%ebx),%edx
433	movq	8(%ebx),%mm1
434	psllq	$60,%mm7
435	movq	(%ebx),%mm4
436	por	%mm7,%mm0
437	movq	%mm2,-72(%edi)
438	psrlq	$4,%mm2
439	movq	%mm5,56(%edi)
440	movq	%mm5,%mm6
441	movq	%mm0,-80(%ebp)
442	psrlq	$4,%mm5
443	movq	%mm3,48(%ebp)
444	shll	$4,%edx
445	movb	%dl,8(%esp)
446	movl	24(%ebx),%edx
447	movq	24(%ebx),%mm0
448	psllq	$60,%mm6
449	movq	16(%ebx),%mm3
450	por	%mm6,%mm2
451	movq	%mm1,-64(%edi)
452	psrlq	$4,%mm1
453	movq	%mm4,64(%edi)
454	movq	%mm4,%mm7
455	movq	%mm2,-72(%ebp)
456	psrlq	$4,%mm4
457	movq	%mm5,56(%ebp)
458	shll	$4,%edx
459	movb	%dl,9(%esp)
460	movl	40(%ebx),%edx
461	movq	40(%ebx),%mm2
462	psllq	$60,%mm7
463	movq	32(%ebx),%mm5
464	por	%mm7,%mm1
465	movq	%mm0,-56(%edi)
466	psrlq	$4,%mm0
467	movq	%mm3,72(%edi)
468	movq	%mm3,%mm6
469	movq	%mm1,-64(%ebp)
470	psrlq	$4,%mm3
471	movq	%mm4,64(%ebp)
472	shll	$4,%edx
473	movb	%dl,10(%esp)
474	movl	56(%ebx),%edx
475	movq	56(%ebx),%mm1
476	psllq	$60,%mm6
477	movq	48(%ebx),%mm4
478	por	%mm6,%mm0
479	movq	%mm2,-48(%edi)
480	psrlq	$4,%mm2
481	movq	%mm5,80(%edi)
482	movq	%mm5,%mm7
483	movq	%mm0,-56(%ebp)
484	psrlq	$4,%mm5
485	movq	%mm3,72(%ebp)
486	shll	$4,%edx
487	movb	%dl,11(%esp)
488	movl	72(%ebx),%edx
489	movq	72(%ebx),%mm0
490	psllq	$60,%mm7
491	movq	64(%ebx),%mm3
492	por	%mm7,%mm2
493	movq	%mm1,-40(%edi)
494	psrlq	$4,%mm1
495	movq	%mm4,88(%edi)
496	movq	%mm4,%mm6
497	movq	%mm2,-48(%ebp)
498	psrlq	$4,%mm4
499	movq	%mm5,80(%ebp)
500	shll	$4,%edx
501	movb	%dl,12(%esp)
502	movl	88(%ebx),%edx
503	movq	88(%ebx),%mm2
504	psllq	$60,%mm6
505	movq	80(%ebx),%mm5
506	por	%mm6,%mm1
507	movq	%mm0,-32(%edi)
508	psrlq	$4,%mm0
509	movq	%mm3,96(%edi)
510	movq	%mm3,%mm7
511	movq	%mm1,-40(%ebp)
512	psrlq	$4,%mm3
513	movq	%mm4,88(%ebp)
514	shll	$4,%edx
515	movb	%dl,13(%esp)
516	movl	104(%ebx),%edx
517	movq	104(%ebx),%mm1
518	psllq	$60,%mm7
519	movq	96(%ebx),%mm4
520	por	%mm7,%mm0
521	movq	%mm2,-24(%edi)
522	psrlq	$4,%mm2
523	movq	%mm5,104(%edi)
524	movq	%mm5,%mm6
525	movq	%mm0,-32(%ebp)
526	psrlq	$4,%mm5
527	movq	%mm3,96(%ebp)
528	shll	$4,%edx
529	movb	%dl,14(%esp)
530	movl	120(%ebx),%edx
531	movq	120(%ebx),%mm0
532	psllq	$60,%mm6
533	movq	112(%ebx),%mm3
534	por	%mm6,%mm2
535	movq	%mm1,-16(%edi)
536	psrlq	$4,%mm1
537	movq	%mm4,112(%edi)
538	movq	%mm4,%mm7
539	movq	%mm2,-24(%ebp)
540	psrlq	$4,%mm4
541	movq	%mm5,104(%ebp)
542	shll	$4,%edx
543	movb	%dl,15(%esp)
544	psllq	$60,%mm7
545	por	%mm7,%mm1
546	movq	%mm0,-8(%edi)
547	psrlq	$4,%mm0
548	movq	%mm3,120(%edi)
549	movq	%mm3,%mm6
550	movq	%mm1,-16(%ebp)
551	psrlq	$4,%mm3
552	movq	%mm4,112(%ebp)
553	psllq	$60,%mm6
554	por	%mm6,%mm0
555	movq	%mm0,-8(%ebp)
556	movq	%mm3,120(%ebp)
557	movq	(%eax),%mm6
558	movl	8(%eax),%ebx
559	movl	12(%eax),%edx
560.align	16
561.L009outer:
562	xorl	12(%ecx),%edx
563	xorl	8(%ecx),%ebx
564	pxor	(%ecx),%mm6
565	leal	16(%ecx),%ecx
566	movl	%ebx,536(%esp)
567	movq	%mm6,528(%esp)
568	movl	%ecx,548(%esp)
569	xorl	%eax,%eax
570	roll	$8,%edx
571	movb	%dl,%al
572	movl	%eax,%ebp
573	andb	$15,%al
574	shrl	$4,%ebp
575	pxor	%mm0,%mm0
576	roll	$8,%edx
577	pxor	%mm1,%mm1
578	pxor	%mm2,%mm2
579	movq	16(%esp,%eax,8),%mm7
580	movq	144(%esp,%eax,8),%mm6
581	movb	%dl,%al
582	movd	%mm7,%ebx
583	psrlq	$8,%mm7
584	movq	%mm6,%mm3
585	movl	%eax,%edi
586	psrlq	$8,%mm6
587	pxor	272(%esp,%ebp,8),%mm7
588	andb	$15,%al
589	psllq	$56,%mm3
590	shrl	$4,%edi
591	pxor	16(%esp,%eax,8),%mm7
592	roll	$8,%edx
593	pxor	144(%esp,%eax,8),%mm6
594	pxor	%mm3,%mm7
595	pxor	400(%esp,%ebp,8),%mm6
596	xorb	(%esp,%ebp,1),%bl
597	movb	%dl,%al
598	movd	%mm7,%ecx
599	movzbl	%bl,%ebx
600	psrlq	$8,%mm7
601	movq	%mm6,%mm3
602	movl	%eax,%ebp
603	psrlq	$8,%mm6
604	pxor	272(%esp,%edi,8),%mm7
605	andb	$15,%al
606	psllq	$56,%mm3
607	shrl	$4,%ebp
608	pinsrw	$2,(%esi,%ebx,2),%mm2
609	pxor	16(%esp,%eax,8),%mm7
610	roll	$8,%edx
611	pxor	144(%esp,%eax,8),%mm6
612	pxor	%mm3,%mm7
613	pxor	400(%esp,%edi,8),%mm6
614	xorb	(%esp,%edi,1),%cl
615	movb	%dl,%al
616	movl	536(%esp),%edx
617	movd	%mm7,%ebx
618	movzbl	%cl,%ecx
619	psrlq	$8,%mm7
620	movq	%mm6,%mm3
621	movl	%eax,%edi
622	psrlq	$8,%mm6
623	pxor	272(%esp,%ebp,8),%mm7
624	andb	$15,%al
625	psllq	$56,%mm3
626	pxor	%mm2,%mm6
627	shrl	$4,%edi
628	pinsrw	$2,(%esi,%ecx,2),%mm1
629	pxor	16(%esp,%eax,8),%mm7
630	roll	$8,%edx
631	pxor	144(%esp,%eax,8),%mm6
632	pxor	%mm3,%mm7
633	pxor	400(%esp,%ebp,8),%mm6
634	xorb	(%esp,%ebp,1),%bl
635	movb	%dl,%al
636	movd	%mm7,%ecx
637	movzbl	%bl,%ebx
638	psrlq	$8,%mm7
639	movq	%mm6,%mm3
640	movl	%eax,%ebp
641	psrlq	$8,%mm6
642	pxor	272(%esp,%edi,8),%mm7
643	andb	$15,%al
644	psllq	$56,%mm3
645	pxor	%mm1,%mm6
646	shrl	$4,%ebp
647	pinsrw	$2,(%esi,%ebx,2),%mm0
648	pxor	16(%esp,%eax,8),%mm7
649	roll	$8,%edx
650	pxor	144(%esp,%eax,8),%mm6
651	pxor	%mm3,%mm7
652	pxor	400(%esp,%edi,8),%mm6
653	xorb	(%esp,%edi,1),%cl
654	movb	%dl,%al
655	movd	%mm7,%ebx
656	movzbl	%cl,%ecx
657	psrlq	$8,%mm7
658	movq	%mm6,%mm3
659	movl	%eax,%edi
660	psrlq	$8,%mm6
661	pxor	272(%esp,%ebp,8),%mm7
662	andb	$15,%al
663	psllq	$56,%mm3
664	pxor	%mm0,%mm6
665	shrl	$4,%edi
666	pinsrw	$2,(%esi,%ecx,2),%mm2
667	pxor	16(%esp,%eax,8),%mm7
668	roll	$8,%edx
669	pxor	144(%esp,%eax,8),%mm6
670	pxor	%mm3,%mm7
671	pxor	400(%esp,%ebp,8),%mm6
672	xorb	(%esp,%ebp,1),%bl
673	movb	%dl,%al
674	movd	%mm7,%ecx
675	movzbl	%bl,%ebx
676	psrlq	$8,%mm7
677	movq	%mm6,%mm3
678	movl	%eax,%ebp
679	psrlq	$8,%mm6
680	pxor	272(%esp,%edi,8),%mm7
681	andb	$15,%al
682	psllq	$56,%mm3
683	pxor	%mm2,%mm6
684	shrl	$4,%ebp
685	pinsrw	$2,(%esi,%ebx,2),%mm1
686	pxor	16(%esp,%eax,8),%mm7
687	roll	$8,%edx
688	pxor	144(%esp,%eax,8),%mm6
689	pxor	%mm3,%mm7
690	pxor	400(%esp,%edi,8),%mm6
691	xorb	(%esp,%edi,1),%cl
692	movb	%dl,%al
693	movl	532(%esp),%edx
694	movd	%mm7,%ebx
695	movzbl	%cl,%ecx
696	psrlq	$8,%mm7
697	movq	%mm6,%mm3
698	movl	%eax,%edi
699	psrlq	$8,%mm6
700	pxor	272(%esp,%ebp,8),%mm7
701	andb	$15,%al
702	psllq	$56,%mm3
703	pxor	%mm1,%mm6
704	shrl	$4,%edi
705	pinsrw	$2,(%esi,%ecx,2),%mm0
706	pxor	16(%esp,%eax,8),%mm7
707	roll	$8,%edx
708	pxor	144(%esp,%eax,8),%mm6
709	pxor	%mm3,%mm7
710	pxor	400(%esp,%ebp,8),%mm6
711	xorb	(%esp,%ebp,1),%bl
712	movb	%dl,%al
713	movd	%mm7,%ecx
714	movzbl	%bl,%ebx
715	psrlq	$8,%mm7
716	movq	%mm6,%mm3
717	movl	%eax,%ebp
718	psrlq	$8,%mm6
719	pxor	272(%esp,%edi,8),%mm7
720	andb	$15,%al
721	psllq	$56,%mm3
722	pxor	%mm0,%mm6
723	shrl	$4,%ebp
724	pinsrw	$2,(%esi,%ebx,2),%mm2
725	pxor	16(%esp,%eax,8),%mm7
726	roll	$8,%edx
727	pxor	144(%esp,%eax,8),%mm6
728	pxor	%mm3,%mm7
729	pxor	400(%esp,%edi,8),%mm6
730	xorb	(%esp,%edi,1),%cl
731	movb	%dl,%al
732	movd	%mm7,%ebx
733	movzbl	%cl,%ecx
734	psrlq	$8,%mm7
735	movq	%mm6,%mm3
736	movl	%eax,%edi
737	psrlq	$8,%mm6
738	pxor	272(%esp,%ebp,8),%mm7
739	andb	$15,%al
740	psllq	$56,%mm3
741	pxor	%mm2,%mm6
742	shrl	$4,%edi
743	pinsrw	$2,(%esi,%ecx,2),%mm1
744	pxor	16(%esp,%eax,8),%mm7
745	roll	$8,%edx
746	pxor	144(%esp,%eax,8),%mm6
747	pxor	%mm3,%mm7
748	pxor	400(%esp,%ebp,8),%mm6
749	xorb	(%esp,%ebp,1),%bl
750	movb	%dl,%al
751	movd	%mm7,%ecx
752	movzbl	%bl,%ebx
753	psrlq	$8,%mm7
754	movq	%mm6,%mm3
755	movl	%eax,%ebp
756	psrlq	$8,%mm6
757	pxor	272(%esp,%edi,8),%mm7
758	andb	$15,%al
759	psllq	$56,%mm3
760	pxor	%mm1,%mm6
761	shrl	$4,%ebp
762	pinsrw	$2,(%esi,%ebx,2),%mm0
763	pxor	16(%esp,%eax,8),%mm7
764	roll	$8,%edx
765	pxor	144(%esp,%eax,8),%mm6
766	pxor	%mm3,%mm7
767	pxor	400(%esp,%edi,8),%mm6
768	xorb	(%esp,%edi,1),%cl
769	movb	%dl,%al
770	movl	528(%esp),%edx
771	movd	%mm7,%ebx
772	movzbl	%cl,%ecx
773	psrlq	$8,%mm7
774	movq	%mm6,%mm3
775	movl	%eax,%edi
776	psrlq	$8,%mm6
777	pxor	272(%esp,%ebp,8),%mm7
778	andb	$15,%al
779	psllq	$56,%mm3
780	pxor	%mm0,%mm6
781	shrl	$4,%edi
782	pinsrw	$2,(%esi,%ecx,2),%mm2
783	pxor	16(%esp,%eax,8),%mm7
784	roll	$8,%edx
785	pxor	144(%esp,%eax,8),%mm6
786	pxor	%mm3,%mm7
787	pxor	400(%esp,%ebp,8),%mm6
788	xorb	(%esp,%ebp,1),%bl
789	movb	%dl,%al
790	movd	%mm7,%ecx
791	movzbl	%bl,%ebx
792	psrlq	$8,%mm7
793	movq	%mm6,%mm3
794	movl	%eax,%ebp
795	psrlq	$8,%mm6
796	pxor	272(%esp,%edi,8),%mm7
797	andb	$15,%al
798	psllq	$56,%mm3
799	pxor	%mm2,%mm6
800	shrl	$4,%ebp
801	pinsrw	$2,(%esi,%ebx,2),%mm1
802	pxor	16(%esp,%eax,8),%mm7
803	roll	$8,%edx
804	pxor	144(%esp,%eax,8),%mm6
805	pxor	%mm3,%mm7
806	pxor	400(%esp,%edi,8),%mm6
807	xorb	(%esp,%edi,1),%cl
808	movb	%dl,%al
809	movd	%mm7,%ebx
810	movzbl	%cl,%ecx
811	psrlq	$8,%mm7
812	movq	%mm6,%mm3
813	movl	%eax,%edi
814	psrlq	$8,%mm6
815	pxor	272(%esp,%ebp,8),%mm7
816	andb	$15,%al
817	psllq	$56,%mm3
818	pxor	%mm1,%mm6
819	shrl	$4,%edi
820	pinsrw	$2,(%esi,%ecx,2),%mm0
821	pxor	16(%esp,%eax,8),%mm7
822	roll	$8,%edx
823	pxor	144(%esp,%eax,8),%mm6
824	pxor	%mm3,%mm7
825	pxor	400(%esp,%ebp,8),%mm6
826	xorb	(%esp,%ebp,1),%bl
827	movb	%dl,%al
828	movd	%mm7,%ecx
829	movzbl	%bl,%ebx
830	psrlq	$8,%mm7
831	movq	%mm6,%mm3
832	movl	%eax,%ebp
833	psrlq	$8,%mm6
834	pxor	272(%esp,%edi,8),%mm7
835	andb	$15,%al
836	psllq	$56,%mm3
837	pxor	%mm0,%mm6
838	shrl	$4,%ebp
839	pinsrw	$2,(%esi,%ebx,2),%mm2
840	pxor	16(%esp,%eax,8),%mm7
841	roll	$8,%edx
842	pxor	144(%esp,%eax,8),%mm6
843	pxor	%mm3,%mm7
844	pxor	400(%esp,%edi,8),%mm6
845	xorb	(%esp,%edi,1),%cl
846	movb	%dl,%al
847	movl	524(%esp),%edx
848	movd	%mm7,%ebx
849	movzbl	%cl,%ecx
850	psrlq	$8,%mm7
851	movq	%mm6,%mm3
852	movl	%eax,%edi
853	psrlq	$8,%mm6
854	pxor	272(%esp,%ebp,8),%mm7
855	andb	$15,%al
856	psllq	$56,%mm3
857	pxor	%mm2,%mm6
858	shrl	$4,%edi
859	pinsrw	$2,(%esi,%ecx,2),%mm1
860	pxor	16(%esp,%eax,8),%mm7
861	pxor	144(%esp,%eax,8),%mm6
862	xorb	(%esp,%ebp,1),%bl
863	pxor	%mm3,%mm7
864	pxor	400(%esp,%ebp,8),%mm6
865	movzbl	%bl,%ebx
866	pxor	%mm2,%mm2
867	psllq	$4,%mm1
868	movd	%mm7,%ecx
869	psrlq	$4,%mm7
870	movq	%mm6,%mm3
871	psrlq	$4,%mm6
872	shll	$4,%ecx
873	pxor	16(%esp,%edi,8),%mm7
874	psllq	$60,%mm3
875	movzbl	%cl,%ecx
876	pxor	%mm3,%mm7
877	pxor	144(%esp,%edi,8),%mm6
878	pinsrw	$2,(%esi,%ebx,2),%mm0
879	pxor	%mm1,%mm6
880	movd	%mm7,%edx
881	pinsrw	$3,(%esi,%ecx,2),%mm2
882	psllq	$12,%mm0
883	pxor	%mm0,%mm6
884	psrlq	$32,%mm7
885	pxor	%mm2,%mm6
886	movl	548(%esp),%ecx
887	movd	%mm7,%ebx
888	movq	%mm6,%mm3
889	psllw	$8,%mm6
890	psrlw	$8,%mm3
891	por	%mm3,%mm6
892	bswap	%edx
893	pshufw	$27,%mm6,%mm6
894	bswap	%ebx
895	cmpl	552(%esp),%ecx
896	jne	.L009outer
897	movl	544(%esp),%eax
898	movl	%edx,12(%eax)
899	movl	%ebx,8(%eax)
900	movq	%mm6,(%eax)
901	movl	556(%esp),%esp
902	emms
903	popl	%edi
904	popl	%esi
905	popl	%ebx
906	popl	%ebp
907	ret
908.size	gcm_ghash_4bit_mmx,.-.L_gcm_ghash_4bit_mmx_begin
909.globl	gcm_init_clmul
910.type	gcm_init_clmul,@function
911.align	16
912gcm_init_clmul:
913.L_gcm_init_clmul_begin:
914	movl	4(%esp),%edx
915	movl	8(%esp),%eax
916	call	.L010pic
917.L010pic:
918	popl	%ecx
919	leal	.Lbswap-.L010pic(%ecx),%ecx
920	movdqu	(%eax),%xmm2
921	pshufd	$78,%xmm2,%xmm2
922	pshufd	$255,%xmm2,%xmm4
923	movdqa	%xmm2,%xmm3
924	psllq	$1,%xmm2
925	pxor	%xmm5,%xmm5
926	psrlq	$63,%xmm3
927	pcmpgtd	%xmm4,%xmm5
928	pslldq	$8,%xmm3
929	por	%xmm3,%xmm2
930	pand	16(%ecx),%xmm5
931	pxor	%xmm5,%xmm2
932	movdqa	%xmm2,%xmm0
933	movdqa	%xmm0,%xmm1
934	pshufd	$78,%xmm0,%xmm3
935	pshufd	$78,%xmm2,%xmm4
936	pxor	%xmm0,%xmm3
937	pxor	%xmm2,%xmm4
938.byte	102,15,58,68,194,0
939.byte	102,15,58,68,202,17
940.byte	102,15,58,68,220,0
941	xorps	%xmm0,%xmm3
942	xorps	%xmm1,%xmm3
943	movdqa	%xmm3,%xmm4
944	psrldq	$8,%xmm3
945	pslldq	$8,%xmm4
946	pxor	%xmm3,%xmm1
947	pxor	%xmm4,%xmm0
948	movdqa	%xmm0,%xmm3
949	psllq	$1,%xmm0
950	pxor	%xmm3,%xmm0
951	psllq	$5,%xmm0
952	pxor	%xmm3,%xmm0
953	psllq	$57,%xmm0
954	movdqa	%xmm0,%xmm4
955	pslldq	$8,%xmm0
956	psrldq	$8,%xmm4
957	pxor	%xmm3,%xmm0
958	pxor	%xmm4,%xmm1
959	movdqa	%xmm0,%xmm4
960	psrlq	$5,%xmm0
961	pxor	%xmm4,%xmm0
962	psrlq	$1,%xmm0
963	pxor	%xmm4,%xmm0
964	pxor	%xmm1,%xmm4
965	psrlq	$1,%xmm0
966	pxor	%xmm4,%xmm0
967	movdqu	%xmm2,(%edx)
968	movdqu	%xmm0,16(%edx)
969	ret
970.size	gcm_init_clmul,.-.L_gcm_init_clmul_begin
971.globl	gcm_gmult_clmul
972.type	gcm_gmult_clmul,@function
973.align	16
974gcm_gmult_clmul:
975.L_gcm_gmult_clmul_begin:
976	movl	4(%esp),%eax
977	movl	8(%esp),%edx
978	call	.L011pic
979.L011pic:
980	popl	%ecx
981	leal	.Lbswap-.L011pic(%ecx),%ecx
982	movdqu	(%eax),%xmm0
983	movdqa	(%ecx),%xmm5
984	movups	(%edx),%xmm2
985.byte	102,15,56,0,197
986	movdqa	%xmm0,%xmm1
987	pshufd	$78,%xmm0,%xmm3
988	pshufd	$78,%xmm2,%xmm4
989	pxor	%xmm0,%xmm3
990	pxor	%xmm2,%xmm4
991.byte	102,15,58,68,194,0
992.byte	102,15,58,68,202,17
993.byte	102,15,58,68,220,0
994	xorps	%xmm0,%xmm3
995	xorps	%xmm1,%xmm3
996	movdqa	%xmm3,%xmm4
997	psrldq	$8,%xmm3
998	pslldq	$8,%xmm4
999	pxor	%xmm3,%xmm1
1000	pxor	%xmm4,%xmm0
1001	movdqa	%xmm0,%xmm3
1002	psllq	$1,%xmm0
1003	pxor	%xmm3,%xmm0
1004	psllq	$5,%xmm0
1005	pxor	%xmm3,%xmm0
1006	psllq	$57,%xmm0
1007	movdqa	%xmm0,%xmm4
1008	pslldq	$8,%xmm0
1009	psrldq	$8,%xmm4
1010	pxor	%xmm3,%xmm0
1011	pxor	%xmm4,%xmm1
1012	movdqa	%xmm0,%xmm4
1013	psrlq	$5,%xmm0
1014	pxor	%xmm4,%xmm0
1015	psrlq	$1,%xmm0
1016	pxor	%xmm4,%xmm0
1017	pxor	%xmm1,%xmm4
1018	psrlq	$1,%xmm0
1019	pxor	%xmm4,%xmm0
1020.byte	102,15,56,0,197
1021	movdqu	%xmm0,(%eax)
1022	ret
1023.size	gcm_gmult_clmul,.-.L_gcm_gmult_clmul_begin
1024.globl	gcm_ghash_clmul
1025.type	gcm_ghash_clmul,@function
1026.align	16
1027gcm_ghash_clmul:
1028.L_gcm_ghash_clmul_begin:
1029	pushl	%ebp
1030	pushl	%ebx
1031	pushl	%esi
1032	pushl	%edi
1033	movl	20(%esp),%eax
1034	movl	24(%esp),%edx
1035	movl	28(%esp),%esi
1036	movl	32(%esp),%ebx
1037	call	.L012pic
1038.L012pic:
1039	popl	%ecx
1040	leal	.Lbswap-.L012pic(%ecx),%ecx
1041	movdqu	(%eax),%xmm0
1042	movdqa	(%ecx),%xmm5
1043	movdqu	(%edx),%xmm2
1044.byte	102,15,56,0,197
1045	subl	$16,%ebx
1046	jz	.L013odd_tail
1047	movdqu	(%esi),%xmm3
1048	movdqu	16(%esi),%xmm6
1049.byte	102,15,56,0,221
1050.byte	102,15,56,0,245
1051	pxor	%xmm3,%xmm0
1052	movdqa	%xmm6,%xmm7
1053	pshufd	$78,%xmm6,%xmm3
1054	pshufd	$78,%xmm2,%xmm4
1055	pxor	%xmm6,%xmm3
1056	pxor	%xmm2,%xmm4
1057.byte	102,15,58,68,242,0
1058.byte	102,15,58,68,250,17
1059.byte	102,15,58,68,220,0
1060	xorps	%xmm6,%xmm3
1061	xorps	%xmm7,%xmm3
1062	movdqa	%xmm3,%xmm4
1063	psrldq	$8,%xmm3
1064	pslldq	$8,%xmm4
1065	pxor	%xmm3,%xmm7
1066	pxor	%xmm4,%xmm6
1067	movups	16(%edx),%xmm2
1068	leal	32(%esi),%esi
1069	subl	$32,%ebx
1070	jbe	.L014even_tail
1071.L015mod_loop:
1072	movdqa	%xmm0,%xmm1
1073	pshufd	$78,%xmm0,%xmm3
1074	pshufd	$78,%xmm2,%xmm4
1075	pxor	%xmm0,%xmm3
1076	pxor	%xmm2,%xmm4
1077.byte	102,15,58,68,194,0
1078.byte	102,15,58,68,202,17
1079.byte	102,15,58,68,220,0
1080	xorps	%xmm0,%xmm3
1081	xorps	%xmm1,%xmm3
1082	movdqa	%xmm3,%xmm4
1083	psrldq	$8,%xmm3
1084	pslldq	$8,%xmm4
1085	pxor	%xmm3,%xmm1
1086	pxor	%xmm4,%xmm0
1087	movdqu	(%esi),%xmm3
1088	movups	(%edx),%xmm2
1089	pxor	%xmm6,%xmm0
1090	pxor	%xmm7,%xmm1
1091	movdqu	16(%esi),%xmm6
1092.byte	102,15,56,0,221
1093.byte	102,15,56,0,245
1094	movdqa	%xmm6,%xmm5
1095	movdqa	%xmm6,%xmm7
1096	pxor	%xmm3,%xmm1
1097	movdqa	%xmm0,%xmm3
1098	psllq	$1,%xmm0
1099	pxor	%xmm3,%xmm0
1100	psllq	$5,%xmm0
1101	pxor	%xmm3,%xmm0
1102.byte	102,15,58,68,242,0
1103	psllq	$57,%xmm0
1104	movdqa	%xmm0,%xmm4
1105	pslldq	$8,%xmm0
1106	psrldq	$8,%xmm4
1107	pxor	%xmm3,%xmm0
1108	pshufd	$78,%xmm5,%xmm3
1109	pxor	%xmm4,%xmm1
1110	pxor	%xmm5,%xmm3
1111	pshufd	$78,%xmm2,%xmm5
1112	pxor	%xmm2,%xmm5
1113.byte	102,15,58,68,250,17
1114	movdqa	%xmm0,%xmm4
1115	psrlq	$5,%xmm0
1116	pxor	%xmm4,%xmm0
1117	psrlq	$1,%xmm0
1118	pxor	%xmm4,%xmm0
1119	pxor	%xmm1,%xmm4
1120	psrlq	$1,%xmm0
1121	pxor	%xmm4,%xmm0
1122.byte	102,15,58,68,221,0
1123	movups	16(%edx),%xmm2
1124	xorps	%xmm6,%xmm3
1125	xorps	%xmm7,%xmm3
1126	movdqa	%xmm3,%xmm5
1127	psrldq	$8,%xmm3
1128	pslldq	$8,%xmm5
1129	pxor	%xmm3,%xmm7
1130	pxor	%xmm5,%xmm6
1131	movdqa	(%ecx),%xmm5
1132	leal	32(%esi),%esi
1133	subl	$32,%ebx
1134	ja	.L015mod_loop
1135.L014even_tail:
1136	movdqa	%xmm0,%xmm1
1137	pshufd	$78,%xmm0,%xmm3
1138	pshufd	$78,%xmm2,%xmm4
1139	pxor	%xmm0,%xmm3
1140	pxor	%xmm2,%xmm4
1141.byte	102,15,58,68,194,0
1142.byte	102,15,58,68,202,17
1143.byte	102,15,58,68,220,0
1144	xorps	%xmm0,%xmm3
1145	xorps	%xmm1,%xmm3
1146	movdqa	%xmm3,%xmm4
1147	psrldq	$8,%xmm3
1148	pslldq	$8,%xmm4
1149	pxor	%xmm3,%xmm1
1150	pxor	%xmm4,%xmm0
1151	pxor	%xmm6,%xmm0
1152	pxor	%xmm7,%xmm1
1153	movdqa	%xmm0,%xmm3
1154	psllq	$1,%xmm0
1155	pxor	%xmm3,%xmm0
1156	psllq	$5,%xmm0
1157	pxor	%xmm3,%xmm0
1158	psllq	$57,%xmm0
1159	movdqa	%xmm0,%xmm4
1160	pslldq	$8,%xmm0
1161	psrldq	$8,%xmm4
1162	pxor	%xmm3,%xmm0
1163	pxor	%xmm4,%xmm1
1164	movdqa	%xmm0,%xmm4
1165	psrlq	$5,%xmm0
1166	pxor	%xmm4,%xmm0
1167	psrlq	$1,%xmm0
1168	pxor	%xmm4,%xmm0
1169	pxor	%xmm1,%xmm4
1170	psrlq	$1,%xmm0
1171	pxor	%xmm4,%xmm0
1172	testl	%ebx,%ebx
1173	jnz	.L016done
1174	movups	(%edx),%xmm2
1175.L013odd_tail:
1176	movdqu	(%esi),%xmm3
1177.byte	102,15,56,0,221
1178	pxor	%xmm3,%xmm0
1179	movdqa	%xmm0,%xmm1
1180	pshufd	$78,%xmm0,%xmm3
1181	pshufd	$78,%xmm2,%xmm4
1182	pxor	%xmm0,%xmm3
1183	pxor	%xmm2,%xmm4
1184.byte	102,15,58,68,194,0
1185.byte	102,15,58,68,202,17
1186.byte	102,15,58,68,220,0
1187	xorps	%xmm0,%xmm3
1188	xorps	%xmm1,%xmm3
1189	movdqa	%xmm3,%xmm4
1190	psrldq	$8,%xmm3
1191	pslldq	$8,%xmm4
1192	pxor	%xmm3,%xmm1
1193	pxor	%xmm4,%xmm0
1194	movdqa	%xmm0,%xmm3
1195	psllq	$1,%xmm0
1196	pxor	%xmm3,%xmm0
1197	psllq	$5,%xmm0
1198	pxor	%xmm3,%xmm0
1199	psllq	$57,%xmm0
1200	movdqa	%xmm0,%xmm4
1201	pslldq	$8,%xmm0
1202	psrldq	$8,%xmm4
1203	pxor	%xmm3,%xmm0
1204	pxor	%xmm4,%xmm1
1205	movdqa	%xmm0,%xmm4
1206	psrlq	$5,%xmm0
1207	pxor	%xmm4,%xmm0
1208	psrlq	$1,%xmm0
1209	pxor	%xmm4,%xmm0
1210	pxor	%xmm1,%xmm4
1211	psrlq	$1,%xmm0
1212	pxor	%xmm4,%xmm0
1213.L016done:
1214.byte	102,15,56,0,197
1215	movdqu	%xmm0,(%eax)
1216	popl	%edi
1217	popl	%esi
1218	popl	%ebx
1219	popl	%ebp
1220	ret
1221.size	gcm_ghash_clmul,.-.L_gcm_ghash_clmul_begin
1222.align	64
1223.Lbswap:
1224.byte	15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
1225.byte	1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,194
1226.align	64
1227.Lrem_4bit:
1228.long	0,0,0,471859200,0,943718400,0,610271232
1229.long	0,1887436800,0,1822425088,0,1220542464,0,1423966208
1230.long	0,3774873600,0,4246732800,0,3644850176,0,3311403008
1231.long	0,2441084928,0,2376073216,0,2847932416,0,3051356160
1232.align	64
1233.Lrem_8bit:
1234.value	0,450,900,582,1800,1738,1164,1358
1235.value	3600,4050,3476,3158,2328,2266,2716,2910
1236.value	7200,7650,8100,7782,6952,6890,6316,6510
1237.value	4656,5106,4532,4214,5432,5370,5820,6014
1238.value	14400,14722,15300,14854,16200,16010,15564,15630
1239.value	13904,14226,13780,13334,12632,12442,13020,13086
1240.value	9312,9634,10212,9766,9064,8874,8428,8494
1241.value	10864,11186,10740,10294,11640,11450,12028,12094
1242.value	28800,28994,29444,29382,30600,30282,29708,30158
1243.value	32400,32594,32020,31958,31128,30810,31260,31710
1244.value	27808,28002,28452,28390,27560,27242,26668,27118
1245.value	25264,25458,24884,24822,26040,25722,26172,26622
1246.value	18624,18690,19268,19078,20424,19978,19532,19854
1247.value	18128,18194,17748,17558,16856,16410,16988,17310
1248.value	21728,21794,22372,22182,21480,21034,20588,20910
1249.value	23280,23346,22900,22710,24056,23610,24188,24510
1250.value	57600,57538,57988,58182,58888,59338,58764,58446
1251.value	61200,61138,60564,60758,59416,59866,60316,59998
1252.value	64800,64738,65188,65382,64040,64490,63916,63598
1253.value	62256,62194,61620,61814,62520,62970,63420,63102
1254.value	55616,55426,56004,56070,56904,57226,56780,56334
1255.value	55120,54930,54484,54550,53336,53658,54236,53790
1256.value	50528,50338,50916,50982,49768,50090,49644,49198
1257.value	52080,51890,51444,51510,52344,52666,53244,52798
1258.value	37248,36930,37380,37830,38536,38730,38156,38094
1259.value	40848,40530,39956,40406,39064,39258,39708,39646
1260.value	36256,35938,36388,36838,35496,35690,35116,35054
1261.value	33712,33394,32820,33270,33976,34170,34620,34558
1262.value	43456,43010,43588,43910,44744,44810,44364,44174
1263.value	42960,42514,42068,42390,41176,41242,41820,41630
1264.value	46560,46114,46692,47014,45800,45866,45420,45230
1265.value	48112,47666,47220,47542,48376,48442,49020,48830
1266.byte	71,72,65,83,72,32,102,111,114,32,120,56,54,44,32,67
1267.byte	82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112
1268.byte	112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62
1269.byte	0
1270