1 .text
2 .align	6,0x90
3 L_vpaes_consts:
4 .long	218628480,235210255,168496130,67568393
5 .long	252381056,17041926,33884169,51187212
6 .long	252645135,252645135,252645135,252645135
7 .long	1512730624,3266504856,1377990664,3401244816
8 .long	830229760,1275146365,2969422977,3447763452
9 .long	3411033600,2979783055,338359620,2782886510
10 .long	4209124096,907596821,221174255,1006095553
11 .long	191964160,3799684038,3164090317,1589111125
12 .long	182528256,1777043520,2877432650,3265356744
13 .long	1874708224,3503451415,3305285752,363511674
14 .long	1606117888,3487855781,1093350906,2384367825
15 .long	197121,67569157,134941193,202313229
16 .long	67569157,134941193,202313229,197121
17 .long	134941193,202313229,197121,67569157
18 .long	202313229,197121,67569157,134941193
19 .long	33619971,100992007,168364043,235736079
20 .long	235736079,33619971,100992007,168364043
21 .long	168364043,235736079,33619971,100992007
22 .long	100992007,168364043,235736079,33619971
23 .long	50462976,117835012,185207048,252579084
24 .long	252314880,51251460,117574920,184942860
25 .long	184682752,252054788,50987272,118359308
26 .long	118099200,185467140,251790600,50727180
27 .long	2946363062,528716217,1300004225,1881839624
28 .long	1532713819,1532713819,1532713819,1532713819
29 .long	3602276352,4288629033,3737020424,4153884961
30 .long	1354558464,32357713,2958822624,3775749553
31 .long	1201988352,132424512,1572796698,503232858
32 .long	2213177600,1597421020,4103937655,675398315
33 .long	2749646592,4273543773,1511898873,121693092
34 .long	3040248576,1103263732,2871565598,1608280554
35 .long	2236667136,2588920351,482954393,64377734
36 .long	3069987328,291237287,2117370568,3650299247
37 .long	533321216,3573750986,2572112006,1401264716
38 .long	1339849704,2721158661,548607111,3445553514
39 .long	2128193280,3054596040,2183486460,1257083700
40 .long	655635200,1165381986,3923443150,2344132524
41 .long	190078720,256924420,290342170,357187870
42 .long	1610966272,2263057382,4103205268,309794674
43 .long	2592527872,2233205587,1335446729,3402964816
44 .long	3973531904,3225098121,3002836325,1918774430
45 .long	3870401024,2102906079,2284471353,4117666579
46 .long	617007872,1021508343,366931923,691083277
47 .long	2528395776,3491914898,2968704004,1613121270
48 .long	3445188352,3247741094,844474987,4093578302
49 .long	651481088,1190302358,1689581232,574775300
50 .long	4289380608,206939853,2555985458,2489840491
51 .long	2130264064,327674451,3566485037,3349835193
52 .long	2470714624,316102159,3636825756,3393945945
53 .byte	86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105
54 .byte	111,110,32,65,69,83,32,102,111,114,32,120,56,54,47,83
55 .byte	83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117
56 .byte	114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105
57 .byte	118,101,114,115,105,116,121,41,0
58 .align	6,0x90
59 .type	__vpaes_preheat,@function
60 .align	4
61 __vpaes_preheat:
62 	%ifdef __CET__
63 
64 .byte	243,15,30,251
65 	%endif
66 
67 	addl	(%esp),%ebp
68 	movdqa	-48(%ebp),%xmm7
69 	movdqa	-16(%ebp),%xmm6
70 	ret
71 .type	__vpaes_encrypt_core,@function
72 .align	4
73 __vpaes_encrypt_core:
74 	%ifdef __CET__
75 
76 .byte	243,15,30,251
77 	%endif
78 
79 	movl	$16,%ecx
80 	movl	240(%edx),%eax
81 	movdqa	%xmm6,%xmm1
82 	movdqa	(%ebp),%xmm2
83 	pandn	%xmm0,%xmm1
84 	pand	%xmm6,%xmm0
85 	movdqu	(%edx),%xmm5
86 .byte	102,15,56,0,208
87 	movdqa	16(%ebp),%xmm0
88 	pxor	%xmm5,%xmm2
89 	psrld	$4,%xmm1
90 	addl	$16,%edx
91 .byte	102,15,56,0,193
92 	leal	192(%ebp),%ebx
93 	pxor	%xmm2,%xmm0
94 	jmp	L000enc_entry
95 .align	4,0x90
96 L001enc_loop:
97 	movdqa	32(%ebp),%xmm4
98 	movdqa	48(%ebp),%xmm0
99 .byte	102,15,56,0,226
100 .byte	102,15,56,0,195
101 	pxor	%xmm5,%xmm4
102 	movdqa	64(%ebp),%xmm5
103 	pxor	%xmm4,%xmm0
104 	movdqa	-64(%ebx,%ecx,1),%xmm1
105 .byte	102,15,56,0,234
106 	movdqa	80(%ebp),%xmm2
107 	movdqa	(%ebx,%ecx,1),%xmm4
108 .byte	102,15,56,0,211
109 	movdqa	%xmm0,%xmm3
110 	pxor	%xmm5,%xmm2
111 .byte	102,15,56,0,193
112 	addl	$16,%edx
113 	pxor	%xmm2,%xmm0
114 .byte	102,15,56,0,220
115 	addl	$16,%ecx
116 	pxor	%xmm0,%xmm3
117 .byte	102,15,56,0,193
118 	andl	$48,%ecx
119 	subl	$1,%eax
120 	pxor	%xmm3,%xmm0
121 L000enc_entry:
122 	movdqa	%xmm6,%xmm1
123 	movdqa	-32(%ebp),%xmm5
124 	pandn	%xmm0,%xmm1
125 	psrld	$4,%xmm1
126 	pand	%xmm6,%xmm0
127 .byte	102,15,56,0,232
128 	movdqa	%xmm7,%xmm3
129 	pxor	%xmm1,%xmm0
130 .byte	102,15,56,0,217
131 	movdqa	%xmm7,%xmm4
132 	pxor	%xmm5,%xmm3
133 .byte	102,15,56,0,224
134 	movdqa	%xmm7,%xmm2
135 	pxor	%xmm5,%xmm4
136 .byte	102,15,56,0,211
137 	movdqa	%xmm7,%xmm3
138 	pxor	%xmm0,%xmm2
139 .byte	102,15,56,0,220
140 	movdqu	(%edx),%xmm5
141 	pxor	%xmm1,%xmm3
142 	jnz	L001enc_loop
143 	movdqa	96(%ebp),%xmm4
144 	movdqa	112(%ebp),%xmm0
145 .byte	102,15,56,0,226
146 	pxor	%xmm5,%xmm4
147 .byte	102,15,56,0,195
148 	movdqa	64(%ebx,%ecx,1),%xmm1
149 	pxor	%xmm4,%xmm0
150 .byte	102,15,56,0,193
151 	ret
152 .type	__vpaes_decrypt_core,@function
153 .align	4
154 __vpaes_decrypt_core:
155 	%ifdef __CET__
156 
157 .byte	243,15,30,251
158 	%endif
159 
160 	leal	608(%ebp),%ebx
161 	movl	240(%edx),%eax
162 	movdqa	%xmm6,%xmm1
163 	movdqa	-64(%ebx),%xmm2
164 	pandn	%xmm0,%xmm1
165 	movl	%eax,%ecx
166 	psrld	$4,%xmm1
167 	movdqu	(%edx),%xmm5
168 	shll	$4,%ecx
169 	pand	%xmm6,%xmm0
170 .byte	102,15,56,0,208
171 	movdqa	-48(%ebx),%xmm0
172 	xorl	$48,%ecx
173 .byte	102,15,56,0,193
174 	andl	$48,%ecx
175 	pxor	%xmm5,%xmm2
176 	movdqa	176(%ebp),%xmm5
177 	pxor	%xmm2,%xmm0
178 	addl	$16,%edx
179 	leal	-352(%ebx,%ecx,1),%ecx
180 	jmp	L002dec_entry
181 .align	4,0x90
182 L003dec_loop:
183 	movdqa	-32(%ebx),%xmm4
184 	movdqa	-16(%ebx),%xmm1
185 .byte	102,15,56,0,226
186 .byte	102,15,56,0,203
187 	pxor	%xmm4,%xmm0
188 	movdqa	(%ebx),%xmm4
189 	pxor	%xmm1,%xmm0
190 	movdqa	16(%ebx),%xmm1
191 .byte	102,15,56,0,226
192 .byte	102,15,56,0,197
193 .byte	102,15,56,0,203
194 	pxor	%xmm4,%xmm0
195 	movdqa	32(%ebx),%xmm4
196 	pxor	%xmm1,%xmm0
197 	movdqa	48(%ebx),%xmm1
198 .byte	102,15,56,0,226
199 .byte	102,15,56,0,197
200 .byte	102,15,56,0,203
201 	pxor	%xmm4,%xmm0
202 	movdqa	64(%ebx),%xmm4
203 	pxor	%xmm1,%xmm0
204 	movdqa	80(%ebx),%xmm1
205 .byte	102,15,56,0,226
206 .byte	102,15,56,0,197
207 .byte	102,15,56,0,203
208 	pxor	%xmm4,%xmm0
209 	addl	$16,%edx
210 .byte	102,15,58,15,237,12
211 	pxor	%xmm1,%xmm0
212 	subl	$1,%eax
213 L002dec_entry:
214 	movdqa	%xmm6,%xmm1
215 	movdqa	-32(%ebp),%xmm2
216 	pandn	%xmm0,%xmm1
217 	pand	%xmm6,%xmm0
218 	psrld	$4,%xmm1
219 .byte	102,15,56,0,208
220 	movdqa	%xmm7,%xmm3
221 	pxor	%xmm1,%xmm0
222 .byte	102,15,56,0,217
223 	movdqa	%xmm7,%xmm4
224 	pxor	%xmm2,%xmm3
225 .byte	102,15,56,0,224
226 	pxor	%xmm2,%xmm4
227 	movdqa	%xmm7,%xmm2
228 .byte	102,15,56,0,211
229 	movdqa	%xmm7,%xmm3
230 	pxor	%xmm0,%xmm2
231 .byte	102,15,56,0,220
232 	movdqu	(%edx),%xmm0
233 	pxor	%xmm1,%xmm3
234 	jnz	L003dec_loop
235 	movdqa	96(%ebx),%xmm4
236 .byte	102,15,56,0,226
237 	pxor	%xmm0,%xmm4
238 	movdqa	112(%ebx),%xmm0
239 	movdqa	(%ecx),%xmm2
240 .byte	102,15,56,0,195
241 	pxor	%xmm4,%xmm0
242 .byte	102,15,56,0,194
243 	ret
244 .type	__vpaes_schedule_core,@function
245 .align	4
246 __vpaes_schedule_core:
247 	%ifdef __CET__
248 
249 .byte	243,15,30,251
250 	%endif
251 
252 	addl	(%esp),%ebp
253 	movdqu	(%esi),%xmm0
254 	movdqa	320(%ebp),%xmm2
255 	movdqa	%xmm0,%xmm3
256 	leal	(%ebp),%ebx
257 	movdqa	%xmm2,4(%esp)
258 	call	__vpaes_schedule_transform
259 	movdqa	%xmm0,%xmm7
260 	testl	%edi,%edi
261 	jnz	L004schedule_am_decrypting
262 	movdqu	%xmm0,(%edx)
263 	jmp	L005schedule_go
264 L004schedule_am_decrypting:
265 	movdqa	256(%ebp,%ecx,1),%xmm1
266 .byte	102,15,56,0,217
267 	movdqu	%xmm3,(%edx)
268 	xorl	$48,%ecx
269 L005schedule_go:
270 	cmpl	$192,%eax
271 	ja	L006schedule_256
272 	je	L007schedule_192
273 L008schedule_128:
274 	movl	$10,%eax
275 L009loop_schedule_128:
276 	call	__vpaes_schedule_round
277 	decl	%eax
278 	jz	L010schedule_mangle_last
279 	call	__vpaes_schedule_mangle
280 	jmp	L009loop_schedule_128
281 .align	4,0x90
282 L007schedule_192:
283 	movdqu	8(%esi),%xmm0
284 	call	__vpaes_schedule_transform
285 	movdqa	%xmm0,%xmm6
286 	pxor	%xmm4,%xmm4
287 	movhlps	%xmm4,%xmm6
288 	movl	$4,%eax
289 L011loop_schedule_192:
290 	call	__vpaes_schedule_round
291 .byte	102,15,58,15,198,8
292 	call	__vpaes_schedule_mangle
293 	call	__vpaes_schedule_192_smear
294 	call	__vpaes_schedule_mangle
295 	call	__vpaes_schedule_round
296 	decl	%eax
297 	jz	L010schedule_mangle_last
298 	call	__vpaes_schedule_mangle
299 	call	__vpaes_schedule_192_smear
300 	jmp	L011loop_schedule_192
301 .align	4,0x90
302 L006schedule_256:
303 	movdqu	16(%esi),%xmm0
304 	call	__vpaes_schedule_transform
305 	movl	$7,%eax
306 L012loop_schedule_256:
307 	call	__vpaes_schedule_mangle
308 	movdqa	%xmm0,%xmm6
309 	call	__vpaes_schedule_round
310 	decl	%eax
311 	jz	L010schedule_mangle_last
312 	call	__vpaes_schedule_mangle
313 	pshufd	$255,%xmm0,%xmm0
314 	movdqa	%xmm7,20(%esp)
315 	movdqa	%xmm6,%xmm7
316 	call	L_vpaes_schedule_low_round
317 	movdqa	20(%esp),%xmm7
318 	jmp	L012loop_schedule_256
319 .align	4,0x90
320 L010schedule_mangle_last:
321 	leal	384(%ebp),%ebx
322 	testl	%edi,%edi
323 	jnz	L013schedule_mangle_last_dec
324 	movdqa	256(%ebp,%ecx,1),%xmm1
325 .byte	102,15,56,0,193
326 	leal	352(%ebp),%ebx
327 	addl	$32,%edx
328 L013schedule_mangle_last_dec:
329 	addl	$-16,%edx
330 	pxor	336(%ebp),%xmm0
331 	call	__vpaes_schedule_transform
332 	movdqu	%xmm0,(%edx)
333 	pxor	%xmm0,%xmm0
334 	pxor	%xmm1,%xmm1
335 	pxor	%xmm2,%xmm2
336 	pxor	%xmm3,%xmm3
337 	pxor	%xmm4,%xmm4
338 	pxor	%xmm5,%xmm5
339 	pxor	%xmm6,%xmm6
340 	pxor	%xmm7,%xmm7
341 	ret
342 .type	__vpaes_schedule_192_smear,@function
343 .align	4
344 __vpaes_schedule_192_smear:
345 	%ifdef __CET__
346 
347 .byte	243,15,30,251
348 	%endif
349 
350 	pshufd	$128,%xmm6,%xmm1
351 	pshufd	$254,%xmm7,%xmm0
352 	pxor	%xmm1,%xmm6
353 	pxor	%xmm1,%xmm1
354 	pxor	%xmm0,%xmm6
355 	movdqa	%xmm6,%xmm0
356 	movhlps	%xmm1,%xmm6
357 	ret
358 .type	__vpaes_schedule_round,@function
359 .align	4
360 __vpaes_schedule_round:
361 	%ifdef __CET__
362 
363 .byte	243,15,30,251
364 	%endif
365 
366 	movdqa	8(%esp),%xmm2
367 	pxor	%xmm1,%xmm1
368 .byte	102,15,58,15,202,15
369 .byte	102,15,58,15,210,15
370 	pxor	%xmm1,%xmm7
371 	pshufd	$255,%xmm0,%xmm0
372 .byte	102,15,58,15,192,1
373 	movdqa	%xmm2,8(%esp)
374 L_vpaes_schedule_low_round:
375 	movdqa	%xmm7,%xmm1
376 	pslldq	$4,%xmm7
377 	pxor	%xmm1,%xmm7
378 	movdqa	%xmm7,%xmm1
379 	pslldq	$8,%xmm7
380 	pxor	%xmm1,%xmm7
381 	pxor	336(%ebp),%xmm7
382 	movdqa	-16(%ebp),%xmm4
383 	movdqa	-48(%ebp),%xmm5
384 	movdqa	%xmm4,%xmm1
385 	pandn	%xmm0,%xmm1
386 	psrld	$4,%xmm1
387 	pand	%xmm4,%xmm0
388 	movdqa	-32(%ebp),%xmm2
389 .byte	102,15,56,0,208
390 	pxor	%xmm1,%xmm0
391 	movdqa	%xmm5,%xmm3
392 .byte	102,15,56,0,217
393 	pxor	%xmm2,%xmm3
394 	movdqa	%xmm5,%xmm4
395 .byte	102,15,56,0,224
396 	pxor	%xmm2,%xmm4
397 	movdqa	%xmm5,%xmm2
398 .byte	102,15,56,0,211
399 	pxor	%xmm0,%xmm2
400 	movdqa	%xmm5,%xmm3
401 .byte	102,15,56,0,220
402 	pxor	%xmm1,%xmm3
403 	movdqa	32(%ebp),%xmm4
404 .byte	102,15,56,0,226
405 	movdqa	48(%ebp),%xmm0
406 .byte	102,15,56,0,195
407 	pxor	%xmm4,%xmm0
408 	pxor	%xmm7,%xmm0
409 	movdqa	%xmm0,%xmm7
410 	ret
411 .type	__vpaes_schedule_transform,@function
412 .align	4
413 __vpaes_schedule_transform:
414 	%ifdef __CET__
415 
416 .byte	243,15,30,251
417 	%endif
418 
419 	movdqa	-16(%ebp),%xmm2
420 	movdqa	%xmm2,%xmm1
421 	pandn	%xmm0,%xmm1
422 	psrld	$4,%xmm1
423 	pand	%xmm2,%xmm0
424 	movdqa	(%ebx),%xmm2
425 .byte	102,15,56,0,208
426 	movdqa	16(%ebx),%xmm0
427 .byte	102,15,56,0,193
428 	pxor	%xmm2,%xmm0
429 	ret
430 .type	__vpaes_schedule_mangle,@function
431 .align	4
432 __vpaes_schedule_mangle:
433 	%ifdef __CET__
434 
435 .byte	243,15,30,251
436 	%endif
437 
438 	movdqa	%xmm0,%xmm4
439 	movdqa	128(%ebp),%xmm5
440 	testl	%edi,%edi
441 	jnz	L014schedule_mangle_dec
442 	addl	$16,%edx
443 	pxor	336(%ebp),%xmm4
444 .byte	102,15,56,0,229
445 	movdqa	%xmm4,%xmm3
446 .byte	102,15,56,0,229
447 	pxor	%xmm4,%xmm3
448 .byte	102,15,56,0,229
449 	pxor	%xmm4,%xmm3
450 	jmp	L015schedule_mangle_both
451 .align	4,0x90
452 L014schedule_mangle_dec:
453 	movdqa	-16(%ebp),%xmm2
454 	leal	416(%ebp),%esi
455 	movdqa	%xmm2,%xmm1
456 	pandn	%xmm4,%xmm1
457 	psrld	$4,%xmm1
458 	pand	%xmm2,%xmm4
459 	movdqa	(%esi),%xmm2
460 .byte	102,15,56,0,212
461 	movdqa	16(%esi),%xmm3
462 .byte	102,15,56,0,217
463 	pxor	%xmm2,%xmm3
464 .byte	102,15,56,0,221
465 	movdqa	32(%esi),%xmm2
466 .byte	102,15,56,0,212
467 	pxor	%xmm3,%xmm2
468 	movdqa	48(%esi),%xmm3
469 .byte	102,15,56,0,217
470 	pxor	%xmm2,%xmm3
471 .byte	102,15,56,0,221
472 	movdqa	64(%esi),%xmm2
473 .byte	102,15,56,0,212
474 	pxor	%xmm3,%xmm2
475 	movdqa	80(%esi),%xmm3
476 .byte	102,15,56,0,217
477 	pxor	%xmm2,%xmm3
478 .byte	102,15,56,0,221
479 	movdqa	96(%esi),%xmm2
480 .byte	102,15,56,0,212
481 	pxor	%xmm3,%xmm2
482 	movdqa	112(%esi),%xmm3
483 .byte	102,15,56,0,217
484 	pxor	%xmm2,%xmm3
485 	addl	$-16,%edx
486 L015schedule_mangle_both:
487 	movdqa	256(%ebp,%ecx,1),%xmm1
488 .byte	102,15,56,0,217
489 	addl	$-16,%ecx
490 	andl	$48,%ecx
491 	movdqu	%xmm3,(%edx)
492 	ret
493 .globl	_vpaes_set_encrypt_key
494 .type	_vpaes_set_encrypt_key,@function
495 .align	4
496 _vpaes_set_encrypt_key:
497 L_vpaes_set_encrypt_key_begin:
498 	%ifdef __CET__
499 
500 .byte	243,15,30,251
501 	%endif
502 
503 	pushl	%ebp
504 	pushl	%ebx
505 	pushl	%esi
506 	pushl	%edi
507 	movl	20(%esp),%esi
508 	leal	-56(%esp),%ebx
509 	movl	24(%esp),%eax
510 	andl	$-16,%ebx
511 	movl	28(%esp),%edx
512 	xchgl	%esp,%ebx
513 	movl	%ebx,48(%esp)
514 	movl	%eax,%ebx
515 	shrl	$5,%ebx
516 	addl	$5,%ebx
517 	movl	%ebx,240(%edx)
518 	movl	$48,%ecx
519 	movl	$0,%edi
520 	leal	L_vpaes_consts+0x30-L016pic_point,%ebp
521 	call	__vpaes_schedule_core
522 L016pic_point:
523 	movl	48(%esp),%esp
524 	xorl	%eax,%eax
525 	popl	%edi
526 	popl	%esi
527 	popl	%ebx
528 	popl	%ebp
529 	ret
530 .globl	_vpaes_set_decrypt_key
531 .type	_vpaes_set_decrypt_key,@function
532 .align	4
533 _vpaes_set_decrypt_key:
534 L_vpaes_set_decrypt_key_begin:
535 	%ifdef __CET__
536 
537 .byte	243,15,30,251
538 	%endif
539 
540 	pushl	%ebp
541 	pushl	%ebx
542 	pushl	%esi
543 	pushl	%edi
544 	movl	20(%esp),%esi
545 	leal	-56(%esp),%ebx
546 	movl	24(%esp),%eax
547 	andl	$-16,%ebx
548 	movl	28(%esp),%edx
549 	xchgl	%esp,%ebx
550 	movl	%ebx,48(%esp)
551 	movl	%eax,%ebx
552 	shrl	$5,%ebx
553 	addl	$5,%ebx
554 	movl	%ebx,240(%edx)
555 	shll	$4,%ebx
556 	leal	16(%edx,%ebx,1),%edx
557 	movl	$1,%edi
558 	movl	%eax,%ecx
559 	shrl	$1,%ecx
560 	andl	$32,%ecx
561 	xorl	$32,%ecx
562 	leal	L_vpaes_consts+0x30-L017pic_point,%ebp
563 	call	__vpaes_schedule_core
564 L017pic_point:
565 	movl	48(%esp),%esp
566 	xorl	%eax,%eax
567 	popl	%edi
568 	popl	%esi
569 	popl	%ebx
570 	popl	%ebp
571 	ret
572 .globl	_vpaes_encrypt
573 .type	_vpaes_encrypt,@function
574 .align	4
575 _vpaes_encrypt:
576 L_vpaes_encrypt_begin:
577 	%ifdef __CET__
578 
579 .byte	243,15,30,251
580 	%endif
581 
582 	pushl	%ebp
583 	pushl	%ebx
584 	pushl	%esi
585 	pushl	%edi
586 	leal	L_vpaes_consts+0x30-L018pic_point,%ebp
587 	call	__vpaes_preheat
588 L018pic_point:
589 	movl	20(%esp),%esi
590 	leal	-56(%esp),%ebx
591 	movl	24(%esp),%edi
592 	andl	$-16,%ebx
593 	movl	28(%esp),%edx
594 	xchgl	%esp,%ebx
595 	movl	%ebx,48(%esp)
596 	movdqu	(%esi),%xmm0
597 	call	__vpaes_encrypt_core
598 	movdqu	%xmm0,(%edi)
599 	movl	48(%esp),%esp
600 	popl	%edi
601 	popl	%esi
602 	popl	%ebx
603 	popl	%ebp
604 	ret
605 .globl	_vpaes_decrypt
606 .type	_vpaes_decrypt,@function
607 .align	4
608 _vpaes_decrypt:
609 L_vpaes_decrypt_begin:
610 	%ifdef __CET__
611 
612 .byte	243,15,30,251
613 	%endif
614 
615 	pushl	%ebp
616 	pushl	%ebx
617 	pushl	%esi
618 	pushl	%edi
619 	leal	L_vpaes_consts+0x30-L019pic_point,%ebp
620 	call	__vpaes_preheat
621 L019pic_point:
622 	movl	20(%esp),%esi
623 	leal	-56(%esp),%ebx
624 	movl	24(%esp),%edi
625 	andl	$-16,%ebx
626 	movl	28(%esp),%edx
627 	xchgl	%esp,%ebx
628 	movl	%ebx,48(%esp)
629 	movdqu	(%esi),%xmm0
630 	call	__vpaes_decrypt_core
631 	movdqu	%xmm0,(%edi)
632 	movl	48(%esp),%esp
633 	popl	%edi
634 	popl	%esi
635 	popl	%ebx
636 	popl	%ebp
637 	ret
638 .globl	_vpaes_cbc_encrypt
639 .type	_vpaes_cbc_encrypt,@function
640 .align	4
641 _vpaes_cbc_encrypt:
642 L_vpaes_cbc_encrypt_begin:
643 	%ifdef __CET__
644 
645 .byte	243,15,30,251
646 	%endif
647 
648 	pushl	%ebp
649 	pushl	%ebx
650 	pushl	%esi
651 	pushl	%edi
652 	movl	20(%esp),%esi
653 	movl	24(%esp),%edi
654 	movl	28(%esp),%eax
655 	movl	32(%esp),%edx
656 	subl	$16,%eax
657 	jc	L020cbc_abort
658 	leal	-56(%esp),%ebx
659 	movl	36(%esp),%ebp
660 	andl	$-16,%ebx
661 	movl	40(%esp),%ecx
662 	xchgl	%esp,%ebx
663 	movdqu	(%ebp),%xmm1
664 	subl	%esi,%edi
665 	movl	%ebx,48(%esp)
666 	movl	%edi,(%esp)
667 	movl	%edx,4(%esp)
668 	movl	%ebp,8(%esp)
669 	movl	%eax,%edi
670 	leal	L_vpaes_consts+0x30-L021pic_point,%ebp
671 	call	__vpaes_preheat
672 L021pic_point:
673 	cmpl	$0,%ecx
674 	je	L022cbc_dec_loop
675 	jmp	L023cbc_enc_loop
676 .align	4,0x90
677 L023cbc_enc_loop:
678 	movdqu	(%esi),%xmm0
679 	pxor	%xmm1,%xmm0
680 	call	__vpaes_encrypt_core
681 	movl	(%esp),%ebx
682 	movl	4(%esp),%edx
683 	movdqa	%xmm0,%xmm1
684 	movdqu	%xmm0,(%ebx,%esi,1)
685 	leal	16(%esi),%esi
686 	subl	$16,%edi
687 	jnc	L023cbc_enc_loop
688 	jmp	L024cbc_done
689 .align	4,0x90
690 L022cbc_dec_loop:
691 	movdqu	(%esi),%xmm0
692 	movdqa	%xmm1,16(%esp)
693 	movdqa	%xmm0,32(%esp)
694 	call	__vpaes_decrypt_core
695 	movl	(%esp),%ebx
696 	movl	4(%esp),%edx
697 	pxor	16(%esp),%xmm0
698 	movdqa	32(%esp),%xmm1
699 	movdqu	%xmm0,(%ebx,%esi,1)
700 	leal	16(%esi),%esi
701 	subl	$16,%edi
702 	jnc	L022cbc_dec_loop
703 L024cbc_done:
704 	movl	8(%esp),%ebx
705 	movl	48(%esp),%esp
706 	movdqu	%xmm1,(%ebx)
707 L020cbc_abort:
708 	popl	%edi
709 	popl	%esi
710 	popl	%ebx
711 	popl	%ebp
712 	ret
713