| // This file is generated from a similarly-named Perl script in the BoringSSL | 
 | // source tree. Do not edit by hand. | 
 |  | 
 | #include <openssl/asm_base.h> | 
 |  | 
 | #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86) && defined(__ELF__) | 
 | .text | 
 | .globl	gcm_init_clmul | 
 | .hidden	gcm_init_clmul | 
 | .type	gcm_init_clmul,@function | 
 | .align	16 | 
 | gcm_init_clmul: | 
 | .L_gcm_init_clmul_begin: | 
 | 	movl	4(%esp),%edx | 
 | 	movl	8(%esp),%eax | 
 | 	call	.L000pic | 
 | .L000pic: | 
 | 	popl	%ecx | 
 | 	leal	.Lbswap-.L000pic(%ecx),%ecx | 
 | 	movdqu	(%eax),%xmm2 | 
 | 	pshufd	$78,%xmm2,%xmm2 | 
 | 	pshufd	$255,%xmm2,%xmm4 | 
 | 	movdqa	%xmm2,%xmm3 | 
 | 	psllq	$1,%xmm2 | 
 | 	pxor	%xmm5,%xmm5 | 
 | 	psrlq	$63,%xmm3 | 
 | 	pcmpgtd	%xmm4,%xmm5 | 
 | 	pslldq	$8,%xmm3 | 
 | 	por	%xmm3,%xmm2 | 
 | 	pand	16(%ecx),%xmm5 | 
 | 	pxor	%xmm5,%xmm2 | 
 | 	movdqa	%xmm2,%xmm0 | 
 | 	movdqa	%xmm0,%xmm1 | 
 | 	pshufd	$78,%xmm0,%xmm3 | 
 | 	pshufd	$78,%xmm2,%xmm4 | 
 | 	pxor	%xmm0,%xmm3 | 
 | 	pxor	%xmm2,%xmm4 | 
 | .byte	102,15,58,68,194,0 | 
 | .byte	102,15,58,68,202,17 | 
 | .byte	102,15,58,68,220,0 | 
 | 	xorps	%xmm0,%xmm3 | 
 | 	xorps	%xmm1,%xmm3 | 
 | 	movdqa	%xmm3,%xmm4 | 
 | 	psrldq	$8,%xmm3 | 
 | 	pslldq	$8,%xmm4 | 
 | 	pxor	%xmm3,%xmm1 | 
 | 	pxor	%xmm4,%xmm0 | 
 | 	movdqa	%xmm0,%xmm4 | 
 | 	movdqa	%xmm0,%xmm3 | 
 | 	psllq	$5,%xmm0 | 
 | 	pxor	%xmm0,%xmm3 | 
 | 	psllq	$1,%xmm0 | 
 | 	pxor	%xmm3,%xmm0 | 
 | 	psllq	$57,%xmm0 | 
 | 	movdqa	%xmm0,%xmm3 | 
 | 	pslldq	$8,%xmm0 | 
 | 	psrldq	$8,%xmm3 | 
 | 	pxor	%xmm4,%xmm0 | 
 | 	pxor	%xmm3,%xmm1 | 
 | 	movdqa	%xmm0,%xmm4 | 
 | 	psrlq	$1,%xmm0 | 
 | 	pxor	%xmm4,%xmm1 | 
 | 	pxor	%xmm0,%xmm4 | 
 | 	psrlq	$5,%xmm0 | 
 | 	pxor	%xmm4,%xmm0 | 
 | 	psrlq	$1,%xmm0 | 
 | 	pxor	%xmm1,%xmm0 | 
 | 	pshufd	$78,%xmm2,%xmm3 | 
 | 	pshufd	$78,%xmm0,%xmm4 | 
 | 	pxor	%xmm2,%xmm3 | 
 | 	movdqu	%xmm2,(%edx) | 
 | 	pxor	%xmm0,%xmm4 | 
 | 	movdqu	%xmm0,16(%edx) | 
 | .byte	102,15,58,15,227,8 | 
 | 	movdqu	%xmm4,32(%edx) | 
 | 	ret | 
 | .size	gcm_init_clmul,.-.L_gcm_init_clmul_begin | 
 | .globl	gcm_gmult_clmul | 
 | .hidden	gcm_gmult_clmul | 
 | .type	gcm_gmult_clmul,@function | 
 | .align	16 | 
 | gcm_gmult_clmul: | 
 | .L_gcm_gmult_clmul_begin: | 
 | 	movl	4(%esp),%eax | 
 | 	movl	8(%esp),%edx | 
 | 	call	.L001pic | 
 | .L001pic: | 
 | 	popl	%ecx | 
 | 	leal	.Lbswap-.L001pic(%ecx),%ecx | 
 | 	movdqu	(%eax),%xmm0 | 
 | 	movdqa	(%ecx),%xmm5 | 
 | 	movups	(%edx),%xmm2 | 
 | .byte	102,15,56,0,197 | 
 | 	movups	32(%edx),%xmm4 | 
 | 	movdqa	%xmm0,%xmm1 | 
 | 	pshufd	$78,%xmm0,%xmm3 | 
 | 	pxor	%xmm0,%xmm3 | 
 | .byte	102,15,58,68,194,0 | 
 | .byte	102,15,58,68,202,17 | 
 | .byte	102,15,58,68,220,0 | 
 | 	xorps	%xmm0,%xmm3 | 
 | 	xorps	%xmm1,%xmm3 | 
 | 	movdqa	%xmm3,%xmm4 | 
 | 	psrldq	$8,%xmm3 | 
 | 	pslldq	$8,%xmm4 | 
 | 	pxor	%xmm3,%xmm1 | 
 | 	pxor	%xmm4,%xmm0 | 
 | 	movdqa	%xmm0,%xmm4 | 
 | 	movdqa	%xmm0,%xmm3 | 
 | 	psllq	$5,%xmm0 | 
 | 	pxor	%xmm0,%xmm3 | 
 | 	psllq	$1,%xmm0 | 
 | 	pxor	%xmm3,%xmm0 | 
 | 	psllq	$57,%xmm0 | 
 | 	movdqa	%xmm0,%xmm3 | 
 | 	pslldq	$8,%xmm0 | 
 | 	psrldq	$8,%xmm3 | 
 | 	pxor	%xmm4,%xmm0 | 
 | 	pxor	%xmm3,%xmm1 | 
 | 	movdqa	%xmm0,%xmm4 | 
 | 	psrlq	$1,%xmm0 | 
 | 	pxor	%xmm4,%xmm1 | 
 | 	pxor	%xmm0,%xmm4 | 
 | 	psrlq	$5,%xmm0 | 
 | 	pxor	%xmm4,%xmm0 | 
 | 	psrlq	$1,%xmm0 | 
 | 	pxor	%xmm1,%xmm0 | 
 | .byte	102,15,56,0,197 | 
 | 	movdqu	%xmm0,(%eax) | 
 | 	ret | 
 | .size	gcm_gmult_clmul,.-.L_gcm_gmult_clmul_begin | 
 | .globl	gcm_ghash_clmul | 
 | .hidden	gcm_ghash_clmul | 
 | .type	gcm_ghash_clmul,@function | 
 | .align	16 | 
 | gcm_ghash_clmul: | 
 | .L_gcm_ghash_clmul_begin: | 
 | 	pushl	%ebp | 
 | 	pushl	%ebx | 
 | 	pushl	%esi | 
 | 	pushl	%edi | 
 | 	movl	20(%esp),%eax | 
 | 	movl	24(%esp),%edx | 
 | 	movl	28(%esp),%esi | 
 | 	movl	32(%esp),%ebx | 
 | 	call	.L002pic | 
 | .L002pic: | 
 | 	popl	%ecx | 
 | 	leal	.Lbswap-.L002pic(%ecx),%ecx | 
 | 	movdqu	(%eax),%xmm0 | 
 | 	movdqa	(%ecx),%xmm5 | 
 | 	movdqu	(%edx),%xmm2 | 
 | .byte	102,15,56,0,197 | 
 | 	subl	$16,%ebx | 
 | 	jz	.L003odd_tail | 
 | 	movdqu	(%esi),%xmm3 | 
 | 	movdqu	16(%esi),%xmm6 | 
 | .byte	102,15,56,0,221 | 
 | .byte	102,15,56,0,245 | 
 | 	movdqu	32(%edx),%xmm5 | 
 | 	pxor	%xmm3,%xmm0 | 
 | 	pshufd	$78,%xmm6,%xmm3 | 
 | 	movdqa	%xmm6,%xmm7 | 
 | 	pxor	%xmm6,%xmm3 | 
 | 	leal	32(%esi),%esi | 
 | .byte	102,15,58,68,242,0 | 
 | .byte	102,15,58,68,250,17 | 
 | .byte	102,15,58,68,221,0 | 
 | 	movups	16(%edx),%xmm2 | 
 | 	nop | 
 | 	subl	$32,%ebx | 
 | 	jbe	.L004even_tail | 
 | 	jmp	.L005mod_loop | 
 | .align	32 | 
 | .L005mod_loop: | 
 | 	pshufd	$78,%xmm0,%xmm4 | 
 | 	movdqa	%xmm0,%xmm1 | 
 | 	pxor	%xmm0,%xmm4 | 
 | 	nop | 
 | .byte	102,15,58,68,194,0 | 
 | .byte	102,15,58,68,202,17 | 
 | .byte	102,15,58,68,229,16 | 
 | 	movups	(%edx),%xmm2 | 
 | 	xorps	%xmm6,%xmm0 | 
 | 	movdqa	(%ecx),%xmm5 | 
 | 	xorps	%xmm7,%xmm1 | 
 | 	movdqu	(%esi),%xmm7 | 
 | 	pxor	%xmm0,%xmm3 | 
 | 	movdqu	16(%esi),%xmm6 | 
 | 	pxor	%xmm1,%xmm3 | 
 | .byte	102,15,56,0,253 | 
 | 	pxor	%xmm3,%xmm4 | 
 | 	movdqa	%xmm4,%xmm3 | 
 | 	psrldq	$8,%xmm4 | 
 | 	pslldq	$8,%xmm3 | 
 | 	pxor	%xmm4,%xmm1 | 
 | 	pxor	%xmm3,%xmm0 | 
 | .byte	102,15,56,0,245 | 
 | 	pxor	%xmm7,%xmm1 | 
 | 	movdqa	%xmm6,%xmm7 | 
 | 	movdqa	%xmm0,%xmm4 | 
 | 	movdqa	%xmm0,%xmm3 | 
 | 	psllq	$5,%xmm0 | 
 | 	pxor	%xmm0,%xmm3 | 
 | 	psllq	$1,%xmm0 | 
 | 	pxor	%xmm3,%xmm0 | 
 | .byte	102,15,58,68,242,0 | 
 | 	movups	32(%edx),%xmm5 | 
 | 	psllq	$57,%xmm0 | 
 | 	movdqa	%xmm0,%xmm3 | 
 | 	pslldq	$8,%xmm0 | 
 | 	psrldq	$8,%xmm3 | 
 | 	pxor	%xmm4,%xmm0 | 
 | 	pxor	%xmm3,%xmm1 | 
 | 	pshufd	$78,%xmm7,%xmm3 | 
 | 	movdqa	%xmm0,%xmm4 | 
 | 	psrlq	$1,%xmm0 | 
 | 	pxor	%xmm7,%xmm3 | 
 | 	pxor	%xmm4,%xmm1 | 
 | .byte	102,15,58,68,250,17 | 
 | 	movups	16(%edx),%xmm2 | 
 | 	pxor	%xmm0,%xmm4 | 
 | 	psrlq	$5,%xmm0 | 
 | 	pxor	%xmm4,%xmm0 | 
 | 	psrlq	$1,%xmm0 | 
 | 	pxor	%xmm1,%xmm0 | 
 | .byte	102,15,58,68,221,0 | 
 | 	leal	32(%esi),%esi | 
 | 	subl	$32,%ebx | 
 | 	ja	.L005mod_loop | 
 | .L004even_tail: | 
 | 	pshufd	$78,%xmm0,%xmm4 | 
 | 	movdqa	%xmm0,%xmm1 | 
 | 	pxor	%xmm0,%xmm4 | 
 | .byte	102,15,58,68,194,0 | 
 | .byte	102,15,58,68,202,17 | 
 | .byte	102,15,58,68,229,16 | 
 | 	movdqa	(%ecx),%xmm5 | 
 | 	xorps	%xmm6,%xmm0 | 
 | 	xorps	%xmm7,%xmm1 | 
 | 	pxor	%xmm0,%xmm3 | 
 | 	pxor	%xmm1,%xmm3 | 
 | 	pxor	%xmm3,%xmm4 | 
 | 	movdqa	%xmm4,%xmm3 | 
 | 	psrldq	$8,%xmm4 | 
 | 	pslldq	$8,%xmm3 | 
 | 	pxor	%xmm4,%xmm1 | 
 | 	pxor	%xmm3,%xmm0 | 
 | 	movdqa	%xmm0,%xmm4 | 
 | 	movdqa	%xmm0,%xmm3 | 
 | 	psllq	$5,%xmm0 | 
 | 	pxor	%xmm0,%xmm3 | 
 | 	psllq	$1,%xmm0 | 
 | 	pxor	%xmm3,%xmm0 | 
 | 	psllq	$57,%xmm0 | 
 | 	movdqa	%xmm0,%xmm3 | 
 | 	pslldq	$8,%xmm0 | 
 | 	psrldq	$8,%xmm3 | 
 | 	pxor	%xmm4,%xmm0 | 
 | 	pxor	%xmm3,%xmm1 | 
 | 	movdqa	%xmm0,%xmm4 | 
 | 	psrlq	$1,%xmm0 | 
 | 	pxor	%xmm4,%xmm1 | 
 | 	pxor	%xmm0,%xmm4 | 
 | 	psrlq	$5,%xmm0 | 
 | 	pxor	%xmm4,%xmm0 | 
 | 	psrlq	$1,%xmm0 | 
 | 	pxor	%xmm1,%xmm0 | 
 | 	testl	%ebx,%ebx | 
 | 	jnz	.L006done | 
 | 	movups	(%edx),%xmm2 | 
 | .L003odd_tail: | 
 | 	movdqu	(%esi),%xmm3 | 
 | .byte	102,15,56,0,221 | 
 | 	pxor	%xmm3,%xmm0 | 
 | 	movdqa	%xmm0,%xmm1 | 
 | 	pshufd	$78,%xmm0,%xmm3 | 
 | 	pshufd	$78,%xmm2,%xmm4 | 
 | 	pxor	%xmm0,%xmm3 | 
 | 	pxor	%xmm2,%xmm4 | 
 | .byte	102,15,58,68,194,0 | 
 | .byte	102,15,58,68,202,17 | 
 | .byte	102,15,58,68,220,0 | 
 | 	xorps	%xmm0,%xmm3 | 
 | 	xorps	%xmm1,%xmm3 | 
 | 	movdqa	%xmm3,%xmm4 | 
 | 	psrldq	$8,%xmm3 | 
 | 	pslldq	$8,%xmm4 | 
 | 	pxor	%xmm3,%xmm1 | 
 | 	pxor	%xmm4,%xmm0 | 
 | 	movdqa	%xmm0,%xmm4 | 
 | 	movdqa	%xmm0,%xmm3 | 
 | 	psllq	$5,%xmm0 | 
 | 	pxor	%xmm0,%xmm3 | 
 | 	psllq	$1,%xmm0 | 
 | 	pxor	%xmm3,%xmm0 | 
 | 	psllq	$57,%xmm0 | 
 | 	movdqa	%xmm0,%xmm3 | 
 | 	pslldq	$8,%xmm0 | 
 | 	psrldq	$8,%xmm3 | 
 | 	pxor	%xmm4,%xmm0 | 
 | 	pxor	%xmm3,%xmm1 | 
 | 	movdqa	%xmm0,%xmm4 | 
 | 	psrlq	$1,%xmm0 | 
 | 	pxor	%xmm4,%xmm1 | 
 | 	pxor	%xmm0,%xmm4 | 
 | 	psrlq	$5,%xmm0 | 
 | 	pxor	%xmm4,%xmm0 | 
 | 	psrlq	$1,%xmm0 | 
 | 	pxor	%xmm1,%xmm0 | 
 | .L006done: | 
 | .byte	102,15,56,0,197 | 
 | 	movdqu	%xmm0,(%eax) | 
 | 	popl	%edi | 
 | 	popl	%esi | 
 | 	popl	%ebx | 
 | 	popl	%ebp | 
 | 	ret | 
 | .size	gcm_ghash_clmul,.-.L_gcm_ghash_clmul_begin | 
 | .align	64 | 
 | .Lbswap: | 
 | .byte	15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 | 
 | .byte	1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,194 | 
 | .byte	71,72,65,83,72,32,102,111,114,32,120,56,54,44,32,67 | 
 | .byte	82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112 | 
 | .byte	112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62 | 
 | .byte	0 | 
 | #endif  // !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86) && defined(__ELF__) |