1.section	".text",#alloc,#execinstr
2
3.align	64
4rem_4bit:
5	.long	0,0,471859200,0,943718400,0,610271232,0
6	.long	1887436800,0,1822425088,0,1220542464,0,1423966208,0
7	.long	3774873600,0,4246732800,0,3644850176,0,3311403008,0
8	.long	2441084928,0,2376073216,0,2847932416,0,3051356160,0
9.type	rem_4bit,#object
10.size	rem_4bit,(.-rem_4bit)
11
12.globl	gcm_ghash_4bit
13.align	32
14gcm_ghash_4bit:
15	save	%sp,-112,%sp
16	ldub	[%i2+15],%l1
17	ldub	[%i0+15],%l2
18	ldub	[%i0+14],%l3
19	add	%i3,%i2,%i3
20	add	%i1,8,%l6
21
221:	call	.+8
23	add	%o7,rem_4bit-1b,%l4
24
25.Louter:
26	xor	%l2,%l1,%l1
27	and	%l1,0xf0,%l0
28	and	%l1,0x0f,%l1
29	sll	%l1,4,%l1
30	ldx	[%l6+%l1],%o1
31	ldx	[%i1+%l1],%o0
32
33	ldub	[%i2+14],%l1
34
35	ldx	[%l6+%l0],%o3
36	and	%o1,0xf,%l5
37	ldx	[%i1+%l0],%o2
38	sll	%l5,3,%l5
39	ldx	[%l4+%l5],%o4
40	srlx	%o1,4,%o1
41	mov	13,%l7
42	sllx	%o0,60,%o5
43	xor	%o3,%o1,%o1
44	srlx	%o0,4,%o0
45	xor	%o1,%o5,%o1
46
47	xor	%l3,%l1,%l1
48	and	%o1,0xf,%l5
49	and	%l1,0xf0,%l0
50	and	%l1,0x0f,%l1
51	ba	.Lghash_inner
52	sll	%l1,4,%l1
53.align	32
54.Lghash_inner:
55	ldx	[%l6+%l1],%o3
56	sll	%l5,3,%l5
57	xor	%o2,%o0,%o0
58	ldx	[%i1+%l1],%o2
59	srlx	%o1,4,%o1
60	xor	%o4,%o0,%o0
61	ldx	[%l4+%l5],%o4
62	sllx	%o0,60,%o5
63	xor	%o3,%o1,%o1
64	ldub	[%i2+%l7],%l1
65	srlx	%o0,4,%o0
66	xor	%o1,%o5,%o1
67	ldub	[%i0+%l7],%l3
68	xor	%o2,%o0,%o0
69	and	%o1,0xf,%l5
70
71	ldx	[%l6+%l0],%o3
72	sll	%l5,3,%l5
73	xor	%o4,%o0,%o0
74	ldx	[%i1+%l0],%o2
75	srlx	%o1,4,%o1
76	ldx	[%l4+%l5],%o4
77	sllx	%o0,60,%o5
78	xor	%l3,%l1,%l1
79	srlx	%o0,4,%o0
80	and	%l1,0xf0,%l0
81	addcc	%l7,-1,%l7
82	xor	%o1,%o5,%o1
83	and	%l1,0x0f,%l1
84	xor	%o3,%o1,%o1
85	sll	%l1,4,%l1
86	blu	.Lghash_inner
87	and	%o1,0xf,%l5
88
89	ldx	[%l6+%l1],%o3
90	sll	%l5,3,%l5
91	xor	%o2,%o0,%o0
92	ldx	[%i1+%l1],%o2
93	srlx	%o1,4,%o1
94	xor	%o4,%o0,%o0
95	ldx	[%l4+%l5],%o4
96	sllx	%o0,60,%o5
97	xor	%o3,%o1,%o1
98	srlx	%o0,4,%o0
99	xor	%o1,%o5,%o1
100	xor	%o2,%o0,%o0
101
102	add	%i2,16,%i2
103	cmp	%i2,%i3
104	be,pn	%icc,.Ldone
105	and	%o1,0xf,%l5
106
107	ldx	[%l6+%l0],%o3
108	sll	%l5,3,%l5
109	xor	%o4,%o0,%o0
110	ldx	[%i1+%l0],%o2
111	srlx	%o1,4,%o1
112	ldx	[%l4+%l5],%o4
113	sllx	%o0,60,%o5
114	xor	%o3,%o1,%o1
115	ldub	[%i2+15],%l1
116	srlx	%o0,4,%o0
117	xor	%o1,%o5,%o1
118	xor	%o2,%o0,%o0
119	stx	%o1,[%i0+8]
120	xor	%o4,%o0,%o0
121	stx	%o0,[%i0]
122	srl	%o1,8,%l3
123	and	%o1,0xff,%l2
124	ba	.Louter
125	and	%l3,0xff,%l3
126.align	32
127.Ldone:
128	ldx	[%l6+%l0],%o3
129	sll	%l5,3,%l5
130	xor	%o4,%o0,%o0
131	ldx	[%i1+%l0],%o2
132	srlx	%o1,4,%o1
133	ldx	[%l4+%l5],%o4
134	sllx	%o0,60,%o5
135	xor	%o3,%o1,%o1
136	srlx	%o0,4,%o0
137	xor	%o1,%o5,%o1
138	xor	%o2,%o0,%o0
139	stx	%o1,[%i0+8]
140	xor	%o4,%o0,%o0
141	stx	%o0,[%i0]
142
143	ret
144	restore
145.type	gcm_ghash_4bit,#function
146.size	gcm_ghash_4bit,(.-gcm_ghash_4bit)
147.globl	gcm_gmult_4bit
148.align	32
149gcm_gmult_4bit:
150	save	%sp,-112,%sp
151	ldub	[%i0+15],%l1
152	add	%i1,8,%l6
153
1541:	call	.+8
155	add	%o7,rem_4bit-1b,%l4
156
157	and	%l1,0xf0,%l0
158	and	%l1,0x0f,%l1
159	sll	%l1,4,%l1
160	ldx	[%l6+%l1],%o1
161	ldx	[%i1+%l1],%o0
162
163	ldub	[%i0+14],%l1
164
165	ldx	[%l6+%l0],%o3
166	and	%o1,0xf,%l5
167	ldx	[%i1+%l0],%o2
168	sll	%l5,3,%l5
169	ldx	[%l4+%l5],%o4
170	srlx	%o1,4,%o1
171	mov	13,%l7
172	sllx	%o0,60,%o5
173	xor	%o3,%o1,%o1
174	srlx	%o0,4,%o0
175	xor	%o1,%o5,%o1
176
177	and	%o1,0xf,%l5
178	and	%l1,0xf0,%l0
179	and	%l1,0x0f,%l1
180	ba	.Lgmult_inner
181	sll	%l1,4,%l1
182.align	32
183.Lgmult_inner:
184	ldx	[%l6+%l1],%o3
185	sll	%l5,3,%l5
186	xor	%o2,%o0,%o0
187	ldx	[%i1+%l1],%o2
188	srlx	%o1,4,%o1
189	xor	%o4,%o0,%o0
190	ldx	[%l4+%l5],%o4
191	sllx	%o0,60,%o5
192	xor	%o3,%o1,%o1
193	ldub	[%i0+%l7],%l1
194	srlx	%o0,4,%o0
195	xor	%o1,%o5,%o1
196	xor	%o2,%o0,%o0
197	and	%o1,0xf,%l5
198
199	ldx	[%l6+%l0],%o3
200	sll	%l5,3,%l5
201	xor	%o4,%o0,%o0
202	ldx	[%i1+%l0],%o2
203	srlx	%o1,4,%o1
204	ldx	[%l4+%l5],%o4
205	sllx	%o0,60,%o5
206	srlx	%o0,4,%o0
207	and	%l1,0xf0,%l0
208	addcc	%l7,-1,%l7
209	xor	%o1,%o5,%o1
210	and	%l1,0x0f,%l1
211	xor	%o3,%o1,%o1
212	sll	%l1,4,%l1
213	blu	.Lgmult_inner
214	and	%o1,0xf,%l5
215
216	ldx	[%l6+%l1],%o3
217	sll	%l5,3,%l5
218	xor	%o2,%o0,%o0
219	ldx	[%i1+%l1],%o2
220	srlx	%o1,4,%o1
221	xor	%o4,%o0,%o0
222	ldx	[%l4+%l5],%o4
223	sllx	%o0,60,%o5
224	xor	%o3,%o1,%o1
225	srlx	%o0,4,%o0
226	xor	%o1,%o5,%o1
227	xor	%o2,%o0,%o0
228	and	%o1,0xf,%l5
229
230	ldx	[%l6+%l0],%o3
231	sll	%l5,3,%l5
232	xor	%o4,%o0,%o0
233	ldx	[%i1+%l0],%o2
234	srlx	%o1,4,%o1
235	ldx	[%l4+%l5],%o4
236	sllx	%o0,60,%o5
237	xor	%o3,%o1,%o1
238	srlx	%o0,4,%o0
239	xor	%o1,%o5,%o1
240	xor	%o2,%o0,%o0
241	stx	%o1,[%i0+8]
242	xor	%o4,%o0,%o0
243	stx	%o0,[%i0]
244
245	ret
246	restore
247.type	gcm_gmult_4bit,#function
248.size	gcm_gmult_4bit,(.-gcm_gmult_4bit)
249.asciz	"GHASH for SPARCv9, CRYPTOGAMS by <appro@openssl.org>"
250.align	4
251