1 .text 2 .globl gcm_gmult_4bit_x86 3 .type gcm_gmult_4bit_x86,@function 4 .align 16 5 gcm_gmult_4bit_x86: 6 .L_gcm_gmult_4bit_x86_begin: 7 %ifdef __CET__ 8 9 .byte 243,15,30,251 10 %endif 11 12 pushl %ebp 13 pushl %ebx 14 pushl %esi 15 pushl %edi 16 subl $84,%esp 17 movl 104(%esp),%edi 18 movl 108(%esp),%esi 19 movl (%edi),%ebp 20 movl 4(%edi),%edx 21 movl 8(%edi),%ecx 22 movl 12(%edi),%ebx 23 movl $0,16(%esp) 24 movl $471859200,20(%esp) 25 movl $943718400,24(%esp) 26 movl $610271232,28(%esp) 27 movl $1887436800,32(%esp) 28 movl $1822425088,36(%esp) 29 movl $1220542464,40(%esp) 30 movl $1423966208,44(%esp) 31 movl $3774873600,48(%esp) 32 movl $4246732800,52(%esp) 33 movl $3644850176,56(%esp) 34 movl $3311403008,60(%esp) 35 movl $2441084928,64(%esp) 36 movl $2376073216,68(%esp) 37 movl $2847932416,72(%esp) 38 movl $3051356160,76(%esp) 39 movl %ebp,(%esp) 40 movl %edx,4(%esp) 41 movl %ecx,8(%esp) 42 movl %ebx,12(%esp) 43 shrl $20,%ebx 44 andl $240,%ebx 45 movl 4(%esi,%ebx,1),%ebp 46 movl (%esi,%ebx,1),%edx 47 movl 12(%esi,%ebx,1),%ecx 48 movl 8(%esi,%ebx,1),%ebx 49 xorl %eax,%eax 50 movl $15,%edi 51 jmp .L000x86_loop 52 .align 16 53 .L000x86_loop: 54 movb %bl,%al 55 shrdl $4,%ecx,%ebx 56 andb $15,%al 57 shrdl $4,%edx,%ecx 58 shrdl $4,%ebp,%edx 59 shrl $4,%ebp 60 xorl 16(%esp,%eax,4),%ebp 61 movb (%esp,%edi,1),%al 62 andb $240,%al 63 xorl 8(%esi,%eax,1),%ebx 64 xorl 12(%esi,%eax,1),%ecx 65 xorl (%esi,%eax,1),%edx 66 xorl 4(%esi,%eax,1),%ebp 67 decl %edi 68 js .L001x86_break 69 movb %bl,%al 70 shrdl $4,%ecx,%ebx 71 andb $15,%al 72 shrdl $4,%edx,%ecx 73 shrdl $4,%ebp,%edx 74 shrl $4,%ebp 75 xorl 16(%esp,%eax,4),%ebp 76 movb (%esp,%edi,1),%al 77 shlb $4,%al 78 xorl 8(%esi,%eax,1),%ebx 79 xorl 12(%esi,%eax,1),%ecx 80 xorl (%esi,%eax,1),%edx 81 xorl 4(%esi,%eax,1),%ebp 82 jmp .L000x86_loop 83 .align 16 84 .L001x86_break: 85 bswap %ebx 86 bswap %ecx 87 bswap %edx 88 bswap %ebp 89 movl 104(%esp),%edi 90 movl %ebx,12(%edi) 91 movl %ecx,8(%edi) 92 movl %edx,4(%edi) 93 movl %ebp,(%edi) 94 addl $84,%esp 95 popl %edi 96 popl %esi 97 popl %ebx 98 popl %ebp 99 ret 100 .size gcm_gmult_4bit_x86,.-.L_gcm_gmult_4bit_x86_begin 101 .globl gcm_ghash_4bit_x86 102 .type gcm_ghash_4bit_x86,@function 103 .align 16 104 gcm_ghash_4bit_x86: 105 .L_gcm_ghash_4bit_x86_begin: 106 %ifdef __CET__ 107 108 .byte 243,15,30,251 109 %endif 110 111 pushl %ebp 112 pushl %ebx 113 pushl %esi 114 pushl %edi 115 subl $84,%esp 116 movl 104(%esp),%ebx 117 movl 108(%esp),%esi 118 movl 112(%esp),%edi 119 movl 116(%esp),%ecx 120 addl %edi,%ecx 121 movl %ecx,116(%esp) 122 movl (%ebx),%ebp 123 movl 4(%ebx),%edx 124 movl 8(%ebx),%ecx 125 movl 12(%ebx),%ebx 126 movl $0,16(%esp) 127 movl $471859200,20(%esp) 128 movl $943718400,24(%esp) 129 movl $610271232,28(%esp) 130 movl $1887436800,32(%esp) 131 movl $1822425088,36(%esp) 132 movl $1220542464,40(%esp) 133 movl $1423966208,44(%esp) 134 movl $3774873600,48(%esp) 135 movl $4246732800,52(%esp) 136 movl $3644850176,56(%esp) 137 movl $3311403008,60(%esp) 138 movl $2441084928,64(%esp) 139 movl $2376073216,68(%esp) 140 movl $2847932416,72(%esp) 141 movl $3051356160,76(%esp) 142 .align 16 143 .L002x86_outer_loop: 144 xorl 12(%edi),%ebx 145 xorl 8(%edi),%ecx 146 xorl 4(%edi),%edx 147 xorl (%edi),%ebp 148 movl %ebx,12(%esp) 149 movl %ecx,8(%esp) 150 movl %edx,4(%esp) 151 movl %ebp,(%esp) 152 shrl $20,%ebx 153 andl $240,%ebx 154 movl 4(%esi,%ebx,1),%ebp 155 movl (%esi,%ebx,1),%edx 156 movl 12(%esi,%ebx,1),%ecx 157 movl 8(%esi,%ebx,1),%ebx 158 xorl %eax,%eax 159 movl $15,%edi 160 jmp .L003x86_loop 161 .align 16 162 .L003x86_loop: 163 movb %bl,%al 164 shrdl $4,%ecx,%ebx 165 andb $15,%al 166 shrdl $4,%edx,%ecx 167 shrdl $4,%ebp,%edx 168 shrl $4,%ebp 169 xorl 16(%esp,%eax,4),%ebp 170 movb (%esp,%edi,1),%al 171 andb $240,%al 172 xorl 8(%esi,%eax,1),%ebx 173 xorl 12(%esi,%eax,1),%ecx 174 xorl (%esi,%eax,1),%edx 175 xorl 4(%esi,%eax,1),%ebp 176 decl %edi 177 js .L004x86_break 178 movb %bl,%al 179 shrdl $4,%ecx,%ebx 180 andb $15,%al 181 shrdl $4,%edx,%ecx 182 shrdl $4,%ebp,%edx 183 shrl $4,%ebp 184 xorl 16(%esp,%eax,4),%ebp 185 movb (%esp,%edi,1),%al 186 shlb $4,%al 187 xorl 8(%esi,%eax,1),%ebx 188 xorl 12(%esi,%eax,1),%ecx 189 xorl (%esi,%eax,1),%edx 190 xorl 4(%esi,%eax,1),%ebp 191 jmp .L003x86_loop 192 .align 16 193 .L004x86_break: 194 bswap %ebx 195 bswap %ecx 196 bswap %edx 197 bswap %ebp 198 movl 112(%esp),%edi 199 leal 16(%edi),%edi 200 cmpl 116(%esp),%edi 201 movl %edi,112(%esp) 202 jb .L002x86_outer_loop 203 movl 104(%esp),%edi 204 movl %ebx,12(%edi) 205 movl %ecx,8(%edi) 206 movl %edx,4(%edi) 207 movl %ebp,(%edi) 208 addl $84,%esp 209 popl %edi 210 popl %esi 211 popl %ebx 212 popl %ebp 213 ret 214 .size gcm_ghash_4bit_x86,.-.L_gcm_ghash_4bit_x86_begin 215 .globl gcm_gmult_4bit_mmx 216 .type gcm_gmult_4bit_mmx,@function 217 .align 16 218 gcm_gmult_4bit_mmx: 219 .L_gcm_gmult_4bit_mmx_begin: 220 %ifdef __CET__ 221 222 .byte 243,15,30,251 223 %endif 224 225 pushl %ebp 226 pushl %ebx 227 pushl %esi 228 pushl %edi 229 movl 20(%esp),%edi 230 movl 24(%esp),%esi 231 call .L005pic_point 232 .L005pic_point: 233 popl %eax 234 leal .Lrem_4bit-.L005pic_point(%eax),%eax 235 movzbl 15(%edi),%ebx 236 xorl %ecx,%ecx 237 movl %ebx,%edx 238 movb %dl,%cl 239 movl $14,%ebp 240 shlb $4,%cl 241 andl $240,%edx 242 movq 8(%esi,%ecx,1),%mm0 243 movq (%esi,%ecx,1),%mm1 244 movd %mm0,%ebx 245 jmp .L006mmx_loop 246 .align 16 247 .L006mmx_loop: 248 psrlq $4,%mm0 249 andl $15,%ebx 250 movq %mm1,%mm2 251 psrlq $4,%mm1 252 pxor 8(%esi,%edx,1),%mm0 253 movb (%edi,%ebp,1),%cl 254 psllq $60,%mm2 255 pxor (%eax,%ebx,8),%mm1 256 decl %ebp 257 movd %mm0,%ebx 258 pxor (%esi,%edx,1),%mm1 259 movl %ecx,%edx 260 pxor %mm2,%mm0 261 js .L007mmx_break 262 shlb $4,%cl 263 andl $15,%ebx 264 psrlq $4,%mm0 265 andl $240,%edx 266 movq %mm1,%mm2 267 psrlq $4,%mm1 268 pxor 8(%esi,%ecx,1),%mm0 269 psllq $60,%mm2 270 pxor (%eax,%ebx,8),%mm1 271 movd %mm0,%ebx 272 pxor (%esi,%ecx,1),%mm1 273 pxor %mm2,%mm0 274 jmp .L006mmx_loop 275 .align 16 276 .L007mmx_break: 277 shlb $4,%cl 278 andl $15,%ebx 279 psrlq $4,%mm0 280 andl $240,%edx 281 movq %mm1,%mm2 282 psrlq $4,%mm1 283 pxor 8(%esi,%ecx,1),%mm0 284 psllq $60,%mm2 285 pxor (%eax,%ebx,8),%mm1 286 movd %mm0,%ebx 287 pxor (%esi,%ecx,1),%mm1 288 pxor %mm2,%mm0 289 psrlq $4,%mm0 290 andl $15,%ebx 291 movq %mm1,%mm2 292 psrlq $4,%mm1 293 pxor 8(%esi,%edx,1),%mm0 294 psllq $60,%mm2 295 pxor (%eax,%ebx,8),%mm1 296 movd %mm0,%ebx 297 pxor (%esi,%edx,1),%mm1 298 pxor %mm2,%mm0 299 psrlq $32,%mm0 300 movd %mm1,%edx 301 psrlq $32,%mm1 302 movd %mm0,%ecx 303 movd %mm1,%ebp 304 bswap %ebx 305 bswap %edx 306 bswap %ecx 307 bswap %ebp 308 emms 309 movl %ebx,12(%edi) 310 movl %edx,4(%edi) 311 movl %ecx,8(%edi) 312 movl %ebp,(%edi) 313 popl %edi 314 popl %esi 315 popl %ebx 316 popl %ebp 317 ret 318 .size gcm_gmult_4bit_mmx,.-.L_gcm_gmult_4bit_mmx_begin 319 .globl gcm_ghash_4bit_mmx 320 .type gcm_ghash_4bit_mmx,@function 321 .align 16 322 gcm_ghash_4bit_mmx: 323 .L_gcm_ghash_4bit_mmx_begin: 324 %ifdef __CET__ 325 326 .byte 243,15,30,251 327 %endif 328 329 pushl %ebp 330 pushl %ebx 331 pushl %esi 332 pushl %edi 333 movl 20(%esp),%eax 334 movl 24(%esp),%ebx 335 movl 28(%esp),%ecx 336 movl 32(%esp),%edx 337 movl %esp,%ebp 338 call .L008pic_point 339 .L008pic_point: 340 popl %esi 341 leal .Lrem_8bit-.L008pic_point(%esi),%esi 342 subl $544,%esp 343 andl $-64,%esp 344 subl $16,%esp 345 addl %ecx,%edx 346 movl %eax,544(%esp) 347 movl %edx,552(%esp) 348 movl %ebp,556(%esp) 349 addl $128,%ebx 350 leal 144(%esp),%edi 351 leal 400(%esp),%ebp 352 movl -120(%ebx),%edx 353 movq -120(%ebx),%mm0 354 movq -128(%ebx),%mm3 355 shll $4,%edx 356 movb %dl,(%esp) 357 movl -104(%ebx),%edx 358 movq -104(%ebx),%mm2 359 movq -112(%ebx),%mm5 360 movq %mm0,-128(%edi) 361 psrlq $4,%mm0 362 movq %mm3,(%edi) 363 movq %mm3,%mm7 364 psrlq $4,%mm3 365 shll $4,%edx 366 movb %dl,1(%esp) 367 movl -88(%ebx),%edx 368 movq -88(%ebx),%mm1 369 psllq $60,%mm7 370 movq -96(%ebx),%mm4 371 por %mm7,%mm0 372 movq %mm2,-120(%edi) 373 psrlq $4,%mm2 374 movq %mm5,8(%edi) 375 movq %mm5,%mm6 376 movq %mm0,-128(%ebp) 377 psrlq $4,%mm5 378 movq %mm3,(%ebp) 379 shll $4,%edx 380 movb %dl,2(%esp) 381 movl -72(%ebx),%edx 382 movq -72(%ebx),%mm0 383 psllq $60,%mm6 384 movq -80(%ebx),%mm3 385 por %mm6,%mm2 386 movq %mm1,-112(%edi) 387 psrlq $4,%mm1 388 movq %mm4,16(%edi) 389 movq %mm4,%mm7 390 movq %mm2,-120(%ebp) 391 psrlq $4,%mm4 392 movq %mm5,8(%ebp) 393 shll $4,%edx 394 movb %dl,3(%esp) 395 movl -56(%ebx),%edx 396 movq -56(%ebx),%mm2 397 psllq $60,%mm7 398 movq -64(%ebx),%mm5 399 por %mm7,%mm1 400 movq %mm0,-104(%edi) 401 psrlq $4,%mm0 402 movq %mm3,24(%edi) 403 movq %mm3,%mm6 404 movq %mm1,-112(%ebp) 405 psrlq $4,%mm3 406 movq %mm4,16(%ebp) 407 shll $4,%edx 408 movb %dl,4(%esp) 409 movl -40(%ebx),%edx 410 movq -40(%ebx),%mm1 411 psllq $60,%mm6 412 movq -48(%ebx),%mm4 413 por %mm6,%mm0 414 movq %mm2,-96(%edi) 415 psrlq $4,%mm2 416 movq %mm5,32(%edi) 417 movq %mm5,%mm7 418 movq %mm0,-104(%ebp) 419 psrlq $4,%mm5 420 movq %mm3,24(%ebp) 421 shll $4,%edx 422 movb %dl,5(%esp) 423 movl -24(%ebx),%edx 424 movq -24(%ebx),%mm0 425 psllq $60,%mm7 426 movq -32(%ebx),%mm3 427 por %mm7,%mm2 428 movq %mm1,-88(%edi) 429 psrlq $4,%mm1 430 movq %mm4,40(%edi) 431 movq %mm4,%mm6 432 movq %mm2,-96(%ebp) 433 psrlq $4,%mm4 434 movq %mm5,32(%ebp) 435 shll $4,%edx 436 movb %dl,6(%esp) 437 movl -8(%ebx),%edx 438 movq -8(%ebx),%mm2 439 psllq $60,%mm6 440 movq -16(%ebx),%mm5 441 por %mm6,%mm1 442 movq %mm0,-80(%edi) 443 psrlq $4,%mm0 444 movq %mm3,48(%edi) 445 movq %mm3,%mm7 446 movq %mm1,-88(%ebp) 447 psrlq $4,%mm3 448 movq %mm4,40(%ebp) 449 shll $4,%edx 450 movb %dl,7(%esp) 451 movl 8(%ebx),%edx 452 movq 8(%ebx),%mm1 453 psllq $60,%mm7 454 movq (%ebx),%mm4 455 por %mm7,%mm0 456 movq %mm2,-72(%edi) 457 psrlq $4,%mm2 458 movq %mm5,56(%edi) 459 movq %mm5,%mm6 460 movq %mm0,-80(%ebp) 461 psrlq $4,%mm5 462 movq %mm3,48(%ebp) 463 shll $4,%edx 464 movb %dl,8(%esp) 465 movl 24(%ebx),%edx 466 movq 24(%ebx),%mm0 467 psllq $60,%mm6 468 movq 16(%ebx),%mm3 469 por %mm6,%mm2 470 movq %mm1,-64(%edi) 471 psrlq $4,%mm1 472 movq %mm4,64(%edi) 473 movq %mm4,%mm7 474 movq %mm2,-72(%ebp) 475 psrlq $4,%mm4 476 movq %mm5,56(%ebp) 477 shll $4,%edx 478 movb %dl,9(%esp) 479 movl 40(%ebx),%edx 480 movq 40(%ebx),%mm2 481 psllq $60,%mm7 482 movq 32(%ebx),%mm5 483 por %mm7,%mm1 484 movq %mm0,-56(%edi) 485 psrlq $4,%mm0 486 movq %mm3,72(%edi) 487 movq %mm3,%mm6 488 movq %mm1,-64(%ebp) 489 psrlq $4,%mm3 490 movq %mm4,64(%ebp) 491 shll $4,%edx 492 movb %dl,10(%esp) 493 movl 56(%ebx),%edx 494 movq 56(%ebx),%mm1 495 psllq $60,%mm6 496 movq 48(%ebx),%mm4 497 por %mm6,%mm0 498 movq %mm2,-48(%edi) 499 psrlq $4,%mm2 500 movq %mm5,80(%edi) 501 movq %mm5,%mm7 502 movq %mm0,-56(%ebp) 503 psrlq $4,%mm5 504 movq %mm3,72(%ebp) 505 shll $4,%edx 506 movb %dl,11(%esp) 507 movl 72(%ebx),%edx 508 movq 72(%ebx),%mm0 509 psllq $60,%mm7 510 movq 64(%ebx),%mm3 511 por %mm7,%mm2 512 movq %mm1,-40(%edi) 513 psrlq $4,%mm1 514 movq %mm4,88(%edi) 515 movq %mm4,%mm6 516 movq %mm2,-48(%ebp) 517 psrlq $4,%mm4 518 movq %mm5,80(%ebp) 519 shll $4,%edx 520 movb %dl,12(%esp) 521 movl 88(%ebx),%edx 522 movq 88(%ebx),%mm2 523 psllq $60,%mm6 524 movq 80(%ebx),%mm5 525 por %mm6,%mm1 526 movq %mm0,-32(%edi) 527 psrlq $4,%mm0 528 movq %mm3,96(%edi) 529 movq %mm3,%mm7 530 movq %mm1,-40(%ebp) 531 psrlq $4,%mm3 532 movq %mm4,88(%ebp) 533 shll $4,%edx 534 movb %dl,13(%esp) 535 movl 104(%ebx),%edx 536 movq 104(%ebx),%mm1 537 psllq $60,%mm7 538 movq 96(%ebx),%mm4 539 por %mm7,%mm0 540 movq %mm2,-24(%edi) 541 psrlq $4,%mm2 542 movq %mm5,104(%edi) 543 movq %mm5,%mm6 544 movq %mm0,-32(%ebp) 545 psrlq $4,%mm5 546 movq %mm3,96(%ebp) 547 shll $4,%edx 548 movb %dl,14(%esp) 549 movl 120(%ebx),%edx 550 movq 120(%ebx),%mm0 551 psllq $60,%mm6 552 movq 112(%ebx),%mm3 553 por %mm6,%mm2 554 movq %mm1,-16(%edi) 555 psrlq $4,%mm1 556 movq %mm4,112(%edi) 557 movq %mm4,%mm7 558 movq %mm2,-24(%ebp) 559 psrlq $4,%mm4 560 movq %mm5,104(%ebp) 561 shll $4,%edx 562 movb %dl,15(%esp) 563 psllq $60,%mm7 564 por %mm7,%mm1 565 movq %mm0,-8(%edi) 566 psrlq $4,%mm0 567 movq %mm3,120(%edi) 568 movq %mm3,%mm6 569 movq %mm1,-16(%ebp) 570 psrlq $4,%mm3 571 movq %mm4,112(%ebp) 572 psllq $60,%mm6 573 por %mm6,%mm0 574 movq %mm0,-8(%ebp) 575 movq %mm3,120(%ebp) 576 movq (%eax),%mm6 577 movl 8(%eax),%ebx 578 movl 12(%eax),%edx 579 .align 16 580 .L009outer: 581 xorl 12(%ecx),%edx 582 xorl 8(%ecx),%ebx 583 pxor (%ecx),%mm6 584 leal 16(%ecx),%ecx 585 movl %ebx,536(%esp) 586 movq %mm6,528(%esp) 587 movl %ecx,548(%esp) 588 xorl %eax,%eax 589 roll $8,%edx 590 movb %dl,%al 591 movl %eax,%ebp 592 andb $15,%al 593 shrl $4,%ebp 594 pxor %mm0,%mm0 595 roll $8,%edx 596 pxor %mm1,%mm1 597 pxor %mm2,%mm2 598 movq 16(%esp,%eax,8),%mm7 599 movq 144(%esp,%eax,8),%mm6 600 movb %dl,%al 601 movd %mm7,%ebx 602 psrlq $8,%mm7 603 movq %mm6,%mm3 604 movl %eax,%edi 605 psrlq $8,%mm6 606 pxor 272(%esp,%ebp,8),%mm7 607 andb $15,%al 608 psllq $56,%mm3 609 shrl $4,%edi 610 pxor 16(%esp,%eax,8),%mm7 611 roll $8,%edx 612 pxor 144(%esp,%eax,8),%mm6 613 pxor %mm3,%mm7 614 pxor 400(%esp,%ebp,8),%mm6 615 xorb (%esp,%ebp,1),%bl 616 movb %dl,%al 617 movd %mm7,%ecx 618 movzbl %bl,%ebx 619 psrlq $8,%mm7 620 movq %mm6,%mm3 621 movl %eax,%ebp 622 psrlq $8,%mm6 623 pxor 272(%esp,%edi,8),%mm7 624 andb $15,%al 625 psllq $56,%mm3 626 shrl $4,%ebp 627 pinsrw $2,(%esi,%ebx,2),%mm2 628 pxor 16(%esp,%eax,8),%mm7 629 roll $8,%edx 630 pxor 144(%esp,%eax,8),%mm6 631 pxor %mm3,%mm7 632 pxor 400(%esp,%edi,8),%mm6 633 xorb (%esp,%edi,1),%cl 634 movb %dl,%al 635 movl 536(%esp),%edx 636 movd %mm7,%ebx 637 movzbl %cl,%ecx 638 psrlq $8,%mm7 639 movq %mm6,%mm3 640 movl %eax,%edi 641 psrlq $8,%mm6 642 pxor 272(%esp,%ebp,8),%mm7 643 andb $15,%al 644 psllq $56,%mm3 645 pxor %mm2,%mm6 646 shrl $4,%edi 647 pinsrw $2,(%esi,%ecx,2),%mm1 648 pxor 16(%esp,%eax,8),%mm7 649 roll $8,%edx 650 pxor 144(%esp,%eax,8),%mm6 651 pxor %mm3,%mm7 652 pxor 400(%esp,%ebp,8),%mm6 653 xorb (%esp,%ebp,1),%bl 654 movb %dl,%al 655 movd %mm7,%ecx 656 movzbl %bl,%ebx 657 psrlq $8,%mm7 658 movq %mm6,%mm3 659 movl %eax,%ebp 660 psrlq $8,%mm6 661 pxor 272(%esp,%edi,8),%mm7 662 andb $15,%al 663 psllq $56,%mm3 664 pxor %mm1,%mm6 665 shrl $4,%ebp 666 pinsrw $2,(%esi,%ebx,2),%mm0 667 pxor 16(%esp,%eax,8),%mm7 668 roll $8,%edx 669 pxor 144(%esp,%eax,8),%mm6 670 pxor %mm3,%mm7 671 pxor 400(%esp,%edi,8),%mm6 672 xorb (%esp,%edi,1),%cl 673 movb %dl,%al 674 movd %mm7,%ebx 675 movzbl %cl,%ecx 676 psrlq $8,%mm7 677 movq %mm6,%mm3 678 movl %eax,%edi 679 psrlq $8,%mm6 680 pxor 272(%esp,%ebp,8),%mm7 681 andb $15,%al 682 psllq $56,%mm3 683 pxor %mm0,%mm6 684 shrl $4,%edi 685 pinsrw $2,(%esi,%ecx,2),%mm2 686 pxor 16(%esp,%eax,8),%mm7 687 roll $8,%edx 688 pxor 144(%esp,%eax,8),%mm6 689 pxor %mm3,%mm7 690 pxor 400(%esp,%ebp,8),%mm6 691 xorb (%esp,%ebp,1),%bl 692 movb %dl,%al 693 movd %mm7,%ecx 694 movzbl %bl,%ebx 695 psrlq $8,%mm7 696 movq %mm6,%mm3 697 movl %eax,%ebp 698 psrlq $8,%mm6 699 pxor 272(%esp,%edi,8),%mm7 700 andb $15,%al 701 psllq $56,%mm3 702 pxor %mm2,%mm6 703 shrl $4,%ebp 704 pinsrw $2,(%esi,%ebx,2),%mm1 705 pxor 16(%esp,%eax,8),%mm7 706 roll $8,%edx 707 pxor 144(%esp,%eax,8),%mm6 708 pxor %mm3,%mm7 709 pxor 400(%esp,%edi,8),%mm6 710 xorb (%esp,%edi,1),%cl 711 movb %dl,%al 712 movl 532(%esp),%edx 713 movd %mm7,%ebx 714 movzbl %cl,%ecx 715 psrlq $8,%mm7 716 movq %mm6,%mm3 717 movl %eax,%edi 718 psrlq $8,%mm6 719 pxor 272(%esp,%ebp,8),%mm7 720 andb $15,%al 721 psllq $56,%mm3 722 pxor %mm1,%mm6 723 shrl $4,%edi 724 pinsrw $2,(%esi,%ecx,2),%mm0 725 pxor 16(%esp,%eax,8),%mm7 726 roll $8,%edx 727 pxor 144(%esp,%eax,8),%mm6 728 pxor %mm3,%mm7 729 pxor 400(%esp,%ebp,8),%mm6 730 xorb (%esp,%ebp,1),%bl 731 movb %dl,%al 732 movd %mm7,%ecx 733 movzbl %bl,%ebx 734 psrlq $8,%mm7 735 movq %mm6,%mm3 736 movl %eax,%ebp 737 psrlq $8,%mm6 738 pxor 272(%esp,%edi,8),%mm7 739 andb $15,%al 740 psllq $56,%mm3 741 pxor %mm0,%mm6 742 shrl $4,%ebp 743 pinsrw $2,(%esi,%ebx,2),%mm2 744 pxor 16(%esp,%eax,8),%mm7 745 roll $8,%edx 746 pxor 144(%esp,%eax,8),%mm6 747 pxor %mm3,%mm7 748 pxor 400(%esp,%edi,8),%mm6 749 xorb (%esp,%edi,1),%cl 750 movb %dl,%al 751 movd %mm7,%ebx 752 movzbl %cl,%ecx 753 psrlq $8,%mm7 754 movq %mm6,%mm3 755 movl %eax,%edi 756 psrlq $8,%mm6 757 pxor 272(%esp,%ebp,8),%mm7 758 andb $15,%al 759 psllq $56,%mm3 760 pxor %mm2,%mm6 761 shrl $4,%edi 762 pinsrw $2,(%esi,%ecx,2),%mm1 763 pxor 16(%esp,%eax,8),%mm7 764 roll $8,%edx 765 pxor 144(%esp,%eax,8),%mm6 766 pxor %mm3,%mm7 767 pxor 400(%esp,%ebp,8),%mm6 768 xorb (%esp,%ebp,1),%bl 769 movb %dl,%al 770 movd %mm7,%ecx 771 movzbl %bl,%ebx 772 psrlq $8,%mm7 773 movq %mm6,%mm3 774 movl %eax,%ebp 775 psrlq $8,%mm6 776 pxor 272(%esp,%edi,8),%mm7 777 andb $15,%al 778 psllq $56,%mm3 779 pxor %mm1,%mm6 780 shrl $4,%ebp 781 pinsrw $2,(%esi,%ebx,2),%mm0 782 pxor 16(%esp,%eax,8),%mm7 783 roll $8,%edx 784 pxor 144(%esp,%eax,8),%mm6 785 pxor %mm3,%mm7 786 pxor 400(%esp,%edi,8),%mm6 787 xorb (%esp,%edi,1),%cl 788 movb %dl,%al 789 movl 528(%esp),%edx 790 movd %mm7,%ebx 791 movzbl %cl,%ecx 792 psrlq $8,%mm7 793 movq %mm6,%mm3 794 movl %eax,%edi 795 psrlq $8,%mm6 796 pxor 272(%esp,%ebp,8),%mm7 797 andb $15,%al 798 psllq $56,%mm3 799 pxor %mm0,%mm6 800 shrl $4,%edi 801 pinsrw $2,(%esi,%ecx,2),%mm2 802 pxor 16(%esp,%eax,8),%mm7 803 roll $8,%edx 804 pxor 144(%esp,%eax,8),%mm6 805 pxor %mm3,%mm7 806 pxor 400(%esp,%ebp,8),%mm6 807 xorb (%esp,%ebp,1),%bl 808 movb %dl,%al 809 movd %mm7,%ecx 810 movzbl %bl,%ebx 811 psrlq $8,%mm7 812 movq %mm6,%mm3 813 movl %eax,%ebp 814 psrlq $8,%mm6 815 pxor 272(%esp,%edi,8),%mm7 816 andb $15,%al 817 psllq $56,%mm3 818 pxor %mm2,%mm6 819 shrl $4,%ebp 820 pinsrw $2,(%esi,%ebx,2),%mm1 821 pxor 16(%esp,%eax,8),%mm7 822 roll $8,%edx 823 pxor 144(%esp,%eax,8),%mm6 824 pxor %mm3,%mm7 825 pxor 400(%esp,%edi,8),%mm6 826 xorb (%esp,%edi,1),%cl 827 movb %dl,%al 828 movd %mm7,%ebx 829 movzbl %cl,%ecx 830 psrlq $8,%mm7 831 movq %mm6,%mm3 832 movl %eax,%edi 833 psrlq $8,%mm6 834 pxor 272(%esp,%ebp,8),%mm7 835 andb $15,%al 836 psllq $56,%mm3 837 pxor %mm1,%mm6 838 shrl $4,%edi 839 pinsrw $2,(%esi,%ecx,2),%mm0 840 pxor 16(%esp,%eax,8),%mm7 841 roll $8,%edx 842 pxor 144(%esp,%eax,8),%mm6 843 pxor %mm3,%mm7 844 pxor 400(%esp,%ebp,8),%mm6 845 xorb (%esp,%ebp,1),%bl 846 movb %dl,%al 847 movd %mm7,%ecx 848 movzbl %bl,%ebx 849 psrlq $8,%mm7 850 movq %mm6,%mm3 851 movl %eax,%ebp 852 psrlq $8,%mm6 853 pxor 272(%esp,%edi,8),%mm7 854 andb $15,%al 855 psllq $56,%mm3 856 pxor %mm0,%mm6 857 shrl $4,%ebp 858 pinsrw $2,(%esi,%ebx,2),%mm2 859 pxor 16(%esp,%eax,8),%mm7 860 roll $8,%edx 861 pxor 144(%esp,%eax,8),%mm6 862 pxor %mm3,%mm7 863 pxor 400(%esp,%edi,8),%mm6 864 xorb (%esp,%edi,1),%cl 865 movb %dl,%al 866 movl 524(%esp),%edx 867 movd %mm7,%ebx 868 movzbl %cl,%ecx 869 psrlq $8,%mm7 870 movq %mm6,%mm3 871 movl %eax,%edi 872 psrlq $8,%mm6 873 pxor 272(%esp,%ebp,8),%mm7 874 andb $15,%al 875 psllq $56,%mm3 876 pxor %mm2,%mm6 877 shrl $4,%edi 878 pinsrw $2,(%esi,%ecx,2),%mm1 879 pxor 16(%esp,%eax,8),%mm7 880 pxor 144(%esp,%eax,8),%mm6 881 xorb (%esp,%ebp,1),%bl 882 pxor %mm3,%mm7 883 pxor 400(%esp,%ebp,8),%mm6 884 movzbl %bl,%ebx 885 pxor %mm2,%mm2 886 psllq $4,%mm1 887 movd %mm7,%ecx 888 psrlq $4,%mm7 889 movq %mm6,%mm3 890 psrlq $4,%mm6 891 shll $4,%ecx 892 pxor 16(%esp,%edi,8),%mm7 893 psllq $60,%mm3 894 movzbl %cl,%ecx 895 pxor %mm3,%mm7 896 pxor 144(%esp,%edi,8),%mm6 897 pinsrw $2,(%esi,%ebx,2),%mm0 898 pxor %mm1,%mm6 899 movd %mm7,%edx 900 pinsrw $3,(%esi,%ecx,2),%mm2 901 psllq $12,%mm0 902 pxor %mm0,%mm6 903 psrlq $32,%mm7 904 pxor %mm2,%mm6 905 movl 548(%esp),%ecx 906 movd %mm7,%ebx 907 movq %mm6,%mm3 908 psllw $8,%mm6 909 psrlw $8,%mm3 910 por %mm3,%mm6 911 bswap %edx 912 pshufw $27,%mm6,%mm6 913 bswap %ebx 914 cmpl 552(%esp),%ecx 915 jne .L009outer 916 movl 544(%esp),%eax 917 movl %edx,12(%eax) 918 movl %ebx,8(%eax) 919 movq %mm6,(%eax) 920 movl 556(%esp),%esp 921 emms 922 popl %edi 923 popl %esi 924 popl %ebx 925 popl %ebp 926 ret 927 .size gcm_ghash_4bit_mmx,.-.L_gcm_ghash_4bit_mmx_begin 928 .globl gcm_init_clmul 929 .type gcm_init_clmul,@function 930 .align 16 931 gcm_init_clmul: 932 .L_gcm_init_clmul_begin: 933 %ifdef __CET__ 934 935 .byte 243,15,30,251 936 %endif 937 938 movl 4(%esp),%edx 939 movl 8(%esp),%eax 940 call .L010pic 941 .L010pic: 942 popl %ecx 943 leal .Lbswap-.L010pic(%ecx),%ecx 944 movdqu (%eax),%xmm2 945 pshufd $78,%xmm2,%xmm2 946 pshufd $255,%xmm2,%xmm4 947 movdqa %xmm2,%xmm3 948 psllq $1,%xmm2 949 pxor %xmm5,%xmm5 950 psrlq $63,%xmm3 951 pcmpgtd %xmm4,%xmm5 952 pslldq $8,%xmm3 953 por %xmm3,%xmm2 954 pand 16(%ecx),%xmm5 955 pxor %xmm5,%xmm2 956 movdqa %xmm2,%xmm0 957 movdqa %xmm0,%xmm1 958 pshufd $78,%xmm0,%xmm3 959 pshufd $78,%xmm2,%xmm4 960 pxor %xmm0,%xmm3 961 pxor %xmm2,%xmm4 962 .byte 102,15,58,68,194,0 963 .byte 102,15,58,68,202,17 964 .byte 102,15,58,68,220,0 965 xorps %xmm0,%xmm3 966 xorps %xmm1,%xmm3 967 movdqa %xmm3,%xmm4 968 psrldq $8,%xmm3 969 pslldq $8,%xmm4 970 pxor %xmm3,%xmm1 971 pxor %xmm4,%xmm0 972 movdqa %xmm0,%xmm4 973 movdqa %xmm0,%xmm3 974 psllq $5,%xmm0 975 pxor %xmm0,%xmm3 976 psllq $1,%xmm0 977 pxor %xmm3,%xmm0 978 psllq $57,%xmm0 979 movdqa %xmm0,%xmm3 980 pslldq $8,%xmm0 981 psrldq $8,%xmm3 982 pxor %xmm4,%xmm0 983 pxor %xmm3,%xmm1 984 movdqa %xmm0,%xmm4 985 psrlq $1,%xmm0 986 pxor %xmm4,%xmm1 987 pxor %xmm0,%xmm4 988 psrlq $5,%xmm0 989 pxor %xmm4,%xmm0 990 psrlq $1,%xmm0 991 pxor %xmm1,%xmm0 992 pshufd $78,%xmm2,%xmm3 993 pshufd $78,%xmm0,%xmm4 994 pxor %xmm2,%xmm3 995 movdqu %xmm2,(%edx) 996 pxor %xmm0,%xmm4 997 movdqu %xmm0,16(%edx) 998 .byte 102,15,58,15,227,8 999 movdqu %xmm4,32(%edx) 1000 ret 1001 .size gcm_init_clmul,.-.L_gcm_init_clmul_begin 1002 .globl gcm_gmult_clmul 1003 .type gcm_gmult_clmul,@function 1004 .align 16 1005 gcm_gmult_clmul: 1006 .L_gcm_gmult_clmul_begin: 1007 %ifdef __CET__ 1008 1009 .byte 243,15,30,251 1010 %endif 1011 1012 movl 4(%esp),%eax 1013 movl 8(%esp),%edx 1014 call .L011pic 1015 .L011pic: 1016 popl %ecx 1017 leal .Lbswap-.L011pic(%ecx),%ecx 1018 movdqu (%eax),%xmm0 1019 movdqa (%ecx),%xmm5 1020 movups (%edx),%xmm2 1021 .byte 102,15,56,0,197 1022 movups 32(%edx),%xmm4 1023 movdqa %xmm0,%xmm1 1024 pshufd $78,%xmm0,%xmm3 1025 pxor %xmm0,%xmm3 1026 .byte 102,15,58,68,194,0 1027 .byte 102,15,58,68,202,17 1028 .byte 102,15,58,68,220,0 1029 xorps %xmm0,%xmm3 1030 xorps %xmm1,%xmm3 1031 movdqa %xmm3,%xmm4 1032 psrldq $8,%xmm3 1033 pslldq $8,%xmm4 1034 pxor %xmm3,%xmm1 1035 pxor %xmm4,%xmm0 1036 movdqa %xmm0,%xmm4 1037 movdqa %xmm0,%xmm3 1038 psllq $5,%xmm0 1039 pxor %xmm0,%xmm3 1040 psllq $1,%xmm0 1041 pxor %xmm3,%xmm0 1042 psllq $57,%xmm0 1043 movdqa %xmm0,%xmm3 1044 pslldq $8,%xmm0 1045 psrldq $8,%xmm3 1046 pxor %xmm4,%xmm0 1047 pxor %xmm3,%xmm1 1048 movdqa %xmm0,%xmm4 1049 psrlq $1,%xmm0 1050 pxor %xmm4,%xmm1 1051 pxor %xmm0,%xmm4 1052 psrlq $5,%xmm0 1053 pxor %xmm4,%xmm0 1054 psrlq $1,%xmm0 1055 pxor %xmm1,%xmm0 1056 .byte 102,15,56,0,197 1057 movdqu %xmm0,(%eax) 1058 ret 1059 .size gcm_gmult_clmul,.-.L_gcm_gmult_clmul_begin 1060 .globl gcm_ghash_clmul 1061 .type gcm_ghash_clmul,@function 1062 .align 16 1063 gcm_ghash_clmul: 1064 .L_gcm_ghash_clmul_begin: 1065 %ifdef __CET__ 1066 1067 .byte 243,15,30,251 1068 %endif 1069 1070 pushl %ebp 1071 pushl %ebx 1072 pushl %esi 1073 pushl %edi 1074 movl 20(%esp),%eax 1075 movl 24(%esp),%edx 1076 movl 28(%esp),%esi 1077 movl 32(%esp),%ebx 1078 call .L012pic 1079 .L012pic: 1080 popl %ecx 1081 leal .Lbswap-.L012pic(%ecx),%ecx 1082 movdqu (%eax),%xmm0 1083 movdqa (%ecx),%xmm5 1084 movdqu (%edx),%xmm2 1085 .byte 102,15,56,0,197 1086 subl $16,%ebx 1087 jz .L013odd_tail 1088 movdqu (%esi),%xmm3 1089 movdqu 16(%esi),%xmm6 1090 .byte 102,15,56,0,221 1091 .byte 102,15,56,0,245 1092 movdqu 32(%edx),%xmm5 1093 pxor %xmm3,%xmm0 1094 pshufd $78,%xmm6,%xmm3 1095 movdqa %xmm6,%xmm7 1096 pxor %xmm6,%xmm3 1097 leal 32(%esi),%esi 1098 .byte 102,15,58,68,242,0 1099 .byte 102,15,58,68,250,17 1100 .byte 102,15,58,68,221,0 1101 movups 16(%edx),%xmm2 1102 nop 1103 subl $32,%ebx 1104 jbe .L014even_tail 1105 jmp .L015mod_loop 1106 .align 32 1107 .L015mod_loop: 1108 pshufd $78,%xmm0,%xmm4 1109 movdqa %xmm0,%xmm1 1110 pxor %xmm0,%xmm4 1111 nop 1112 .byte 102,15,58,68,194,0 1113 .byte 102,15,58,68,202,17 1114 .byte 102,15,58,68,229,16 1115 movups (%edx),%xmm2 1116 xorps %xmm6,%xmm0 1117 movdqa (%ecx),%xmm5 1118 xorps %xmm7,%xmm1 1119 movdqu (%esi),%xmm7 1120 pxor %xmm0,%xmm3 1121 movdqu 16(%esi),%xmm6 1122 pxor %xmm1,%xmm3 1123 .byte 102,15,56,0,253 1124 pxor %xmm3,%xmm4 1125 movdqa %xmm4,%xmm3 1126 psrldq $8,%xmm4 1127 pslldq $8,%xmm3 1128 pxor %xmm4,%xmm1 1129 pxor %xmm3,%xmm0 1130 .byte 102,15,56,0,245 1131 pxor %xmm7,%xmm1 1132 movdqa %xmm6,%xmm7 1133 movdqa %xmm0,%xmm4 1134 movdqa %xmm0,%xmm3 1135 psllq $5,%xmm0 1136 pxor %xmm0,%xmm3 1137 psllq $1,%xmm0 1138 pxor %xmm3,%xmm0 1139 .byte 102,15,58,68,242,0 1140 movups 32(%edx),%xmm5 1141 psllq $57,%xmm0 1142 movdqa %xmm0,%xmm3 1143 pslldq $8,%xmm0 1144 psrldq $8,%xmm3 1145 pxor %xmm4,%xmm0 1146 pxor %xmm3,%xmm1 1147 pshufd $78,%xmm7,%xmm3 1148 movdqa %xmm0,%xmm4 1149 psrlq $1,%xmm0 1150 pxor %xmm7,%xmm3 1151 pxor %xmm4,%xmm1 1152 .byte 102,15,58,68,250,17 1153 movups 16(%edx),%xmm2 1154 pxor %xmm0,%xmm4 1155 psrlq $5,%xmm0 1156 pxor %xmm4,%xmm0 1157 psrlq $1,%xmm0 1158 pxor %xmm1,%xmm0 1159 .byte 102,15,58,68,221,0 1160 leal 32(%esi),%esi 1161 subl $32,%ebx 1162 ja .L015mod_loop 1163 .L014even_tail: 1164 pshufd $78,%xmm0,%xmm4 1165 movdqa %xmm0,%xmm1 1166 pxor %xmm0,%xmm4 1167 .byte 102,15,58,68,194,0 1168 .byte 102,15,58,68,202,17 1169 .byte 102,15,58,68,229,16 1170 movdqa (%ecx),%xmm5 1171 xorps %xmm6,%xmm0 1172 xorps %xmm7,%xmm1 1173 pxor %xmm0,%xmm3 1174 pxor %xmm1,%xmm3 1175 pxor %xmm3,%xmm4 1176 movdqa %xmm4,%xmm3 1177 psrldq $8,%xmm4 1178 pslldq $8,%xmm3 1179 pxor %xmm4,%xmm1 1180 pxor %xmm3,%xmm0 1181 movdqa %xmm0,%xmm4 1182 movdqa %xmm0,%xmm3 1183 psllq $5,%xmm0 1184 pxor %xmm0,%xmm3 1185 psllq $1,%xmm0 1186 pxor %xmm3,%xmm0 1187 psllq $57,%xmm0 1188 movdqa %xmm0,%xmm3 1189 pslldq $8,%xmm0 1190 psrldq $8,%xmm3 1191 pxor %xmm4,%xmm0 1192 pxor %xmm3,%xmm1 1193 movdqa %xmm0,%xmm4 1194 psrlq $1,%xmm0 1195 pxor %xmm4,%xmm1 1196 pxor %xmm0,%xmm4 1197 psrlq $5,%xmm0 1198 pxor %xmm4,%xmm0 1199 psrlq $1,%xmm0 1200 pxor %xmm1,%xmm0 1201 testl %ebx,%ebx 1202 jnz .L016done 1203 movups (%edx),%xmm2 1204 .L013odd_tail: 1205 movdqu (%esi),%xmm3 1206 .byte 102,15,56,0,221 1207 pxor %xmm3,%xmm0 1208 movdqa %xmm0,%xmm1 1209 pshufd $78,%xmm0,%xmm3 1210 pshufd $78,%xmm2,%xmm4 1211 pxor %xmm0,%xmm3 1212 pxor %xmm2,%xmm4 1213 .byte 102,15,58,68,194,0 1214 .byte 102,15,58,68,202,17 1215 .byte 102,15,58,68,220,0 1216 xorps %xmm0,%xmm3 1217 xorps %xmm1,%xmm3 1218 movdqa %xmm3,%xmm4 1219 psrldq $8,%xmm3 1220 pslldq $8,%xmm4 1221 pxor %xmm3,%xmm1 1222 pxor %xmm4,%xmm0 1223 movdqa %xmm0,%xmm4 1224 movdqa %xmm0,%xmm3 1225 psllq $5,%xmm0 1226 pxor %xmm0,%xmm3 1227 psllq $1,%xmm0 1228 pxor %xmm3,%xmm0 1229 psllq $57,%xmm0 1230 movdqa %xmm0,%xmm3 1231 pslldq $8,%xmm0 1232 psrldq $8,%xmm3 1233 pxor %xmm4,%xmm0 1234 pxor %xmm3,%xmm1 1235 movdqa %xmm0,%xmm4 1236 psrlq $1,%xmm0 1237 pxor %xmm4,%xmm1 1238 pxor %xmm0,%xmm4 1239 psrlq $5,%xmm0 1240 pxor %xmm4,%xmm0 1241 psrlq $1,%xmm0 1242 pxor %xmm1,%xmm0 1243 .L016done: 1244 .byte 102,15,56,0,197 1245 movdqu %xmm0,(%eax) 1246 popl %edi 1247 popl %esi 1248 popl %ebx 1249 popl %ebp 1250 ret 1251 .size gcm_ghash_clmul,.-.L_gcm_ghash_clmul_begin 1252 .align 64 1253 .Lbswap: 1254 .byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 1255 .byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,194 1256 .align 64 1257 .Lrem_8bit: 1258 .value 0,450,900,582,1800,1738,1164,1358 1259 .value 3600,4050,3476,3158,2328,2266,2716,2910 1260 .value 7200,7650,8100,7782,6952,6890,6316,6510 1261 .value 4656,5106,4532,4214,5432,5370,5820,6014 1262 .value 14400,14722,15300,14854,16200,16010,15564,15630 1263 .value 13904,14226,13780,13334,12632,12442,13020,13086 1264 .value 9312,9634,10212,9766,9064,8874,8428,8494 1265 .value 10864,11186,10740,10294,11640,11450,12028,12094 1266 .value 28800,28994,29444,29382,30600,30282,29708,30158 1267 .value 32400,32594,32020,31958,31128,30810,31260,31710 1268 .value 27808,28002,28452,28390,27560,27242,26668,27118 1269 .value 25264,25458,24884,24822,26040,25722,26172,26622 1270 .value 18624,18690,19268,19078,20424,19978,19532,19854 1271 .value 18128,18194,17748,17558,16856,16410,16988,17310 1272 .value 21728,21794,22372,22182,21480,21034,20588,20910 1273 .value 23280,23346,22900,22710,24056,23610,24188,24510 1274 .value 57600,57538,57988,58182,58888,59338,58764,58446 1275 .value 61200,61138,60564,60758,59416,59866,60316,59998 1276 .value 64800,64738,65188,65382,64040,64490,63916,63598 1277 .value 62256,62194,61620,61814,62520,62970,63420,63102 1278 .value 55616,55426,56004,56070,56904,57226,56780,56334 1279 .value 55120,54930,54484,54550,53336,53658,54236,53790 1280 .value 50528,50338,50916,50982,49768,50090,49644,49198 1281 .value 52080,51890,51444,51510,52344,52666,53244,52798 1282 .value 37248,36930,37380,37830,38536,38730,38156,38094 1283 .value 40848,40530,39956,40406,39064,39258,39708,39646 1284 .value 36256,35938,36388,36838,35496,35690,35116,35054 1285 .value 33712,33394,32820,33270,33976,34170,34620,34558 1286 .value 43456,43010,43588,43910,44744,44810,44364,44174 1287 .value 42960,42514,42068,42390,41176,41242,41820,41630 1288 .value 46560,46114,46692,47014,45800,45866,45420,45230 1289 .value 48112,47666,47220,47542,48376,48442,49020,48830 1290 .align 64 1291 .Lrem_4bit: 1292 .long 0,0,0,471859200,0,943718400,0,610271232 1293 .long 0,1887436800,0,1822425088,0,1220542464,0,1423966208 1294 .long 0,3774873600,0,4246732800,0,3644850176,0,3311403008 1295 .long 0,2441084928,0,2376073216,0,2847932416,0,3051356160 1296 .byte 71,72,65,83,72,32,102,111,114,32,120,56,54,44,32,67 1297 .byte 82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112 1298 .byte 112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62 1299 .byte 0 1300 1301 .section ".note.gnu.property", "a" 1302 .p2align 2 1303 .long 1f - 0f 1304 .long 4f - 1f 1305 .long 5 1306 0: 1307 .asciz "GNU" 1308 1: 1309 .p2align 2 1310 .long 0xc0000002 1311 .long 3f - 2f 1312 2: 1313 .long 3 1314 3: 1315 .p2align 2 1316 4: 1317