| // This file is generated from a similarly-named Perl script in the BoringSSL | 
 | // source tree. Do not edit by hand. | 
 |  | 
 | #include <openssl/asm_base.h> | 
 |  | 
 | #if !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86) && defined(__APPLE__) | 
 | .text | 
 | .globl	_sha1_block_data_order_nohw | 
 | .private_extern	_sha1_block_data_order_nohw | 
 | .align	4 | 
 | _sha1_block_data_order_nohw: | 
 | L_sha1_block_data_order_nohw_begin: | 
 | 	pushl	%ebp | 
 | 	pushl	%ebx | 
 | 	pushl	%esi | 
 | 	pushl	%edi | 
 | 	movl	20(%esp),%ebp | 
 | 	movl	24(%esp),%esi | 
 | 	movl	28(%esp),%eax | 
 | 	subl	$76,%esp | 
 | 	shll	$6,%eax | 
 | 	addl	%esi,%eax | 
 | 	movl	%eax,104(%esp) | 
 | 	movl	16(%ebp),%edi | 
 | 	jmp	L000loop | 
 | .align	4,0x90 | 
 | L000loop: | 
 | 	movl	(%esi),%eax | 
 | 	movl	4(%esi),%ebx | 
 | 	movl	8(%esi),%ecx | 
 | 	movl	12(%esi),%edx | 
 | 	bswap	%eax | 
 | 	bswap	%ebx | 
 | 	bswap	%ecx | 
 | 	bswap	%edx | 
 | 	movl	%eax,(%esp) | 
 | 	movl	%ebx,4(%esp) | 
 | 	movl	%ecx,8(%esp) | 
 | 	movl	%edx,12(%esp) | 
 | 	movl	16(%esi),%eax | 
 | 	movl	20(%esi),%ebx | 
 | 	movl	24(%esi),%ecx | 
 | 	movl	28(%esi),%edx | 
 | 	bswap	%eax | 
 | 	bswap	%ebx | 
 | 	bswap	%ecx | 
 | 	bswap	%edx | 
 | 	movl	%eax,16(%esp) | 
 | 	movl	%ebx,20(%esp) | 
 | 	movl	%ecx,24(%esp) | 
 | 	movl	%edx,28(%esp) | 
 | 	movl	32(%esi),%eax | 
 | 	movl	36(%esi),%ebx | 
 | 	movl	40(%esi),%ecx | 
 | 	movl	44(%esi),%edx | 
 | 	bswap	%eax | 
 | 	bswap	%ebx | 
 | 	bswap	%ecx | 
 | 	bswap	%edx | 
 | 	movl	%eax,32(%esp) | 
 | 	movl	%ebx,36(%esp) | 
 | 	movl	%ecx,40(%esp) | 
 | 	movl	%edx,44(%esp) | 
 | 	movl	48(%esi),%eax | 
 | 	movl	52(%esi),%ebx | 
 | 	movl	56(%esi),%ecx | 
 | 	movl	60(%esi),%edx | 
 | 	bswap	%eax | 
 | 	bswap	%ebx | 
 | 	bswap	%ecx | 
 | 	bswap	%edx | 
 | 	movl	%eax,48(%esp) | 
 | 	movl	%ebx,52(%esp) | 
 | 	movl	%ecx,56(%esp) | 
 | 	movl	%edx,60(%esp) | 
 | 	movl	%esi,100(%esp) | 
 | 	movl	(%ebp),%eax | 
 | 	movl	4(%ebp),%ebx | 
 | 	movl	8(%ebp),%ecx | 
 | 	movl	12(%ebp),%edx | 
 | 	# 00_15 0  | 
 | 	movl	%ecx,%esi | 
 | 	movl	%eax,%ebp | 
 | 	roll	$5,%ebp | 
 | 	xorl	%edx,%esi | 
 | 	addl	%edi,%ebp | 
 | 	movl	(%esp),%edi | 
 | 	andl	%ebx,%esi | 
 | 	rorl	$2,%ebx | 
 | 	xorl	%edx,%esi | 
 | 	leal	1518500249(%ebp,%edi,1),%ebp | 
 | 	addl	%esi,%ebp | 
 | 	# 00_15 1  | 
 | 	movl	%ebx,%edi | 
 | 	movl	%ebp,%esi | 
 | 	roll	$5,%ebp | 
 | 	xorl	%ecx,%edi | 
 | 	addl	%edx,%ebp | 
 | 	movl	4(%esp),%edx | 
 | 	andl	%eax,%edi | 
 | 	rorl	$2,%eax | 
 | 	xorl	%ecx,%edi | 
 | 	leal	1518500249(%ebp,%edx,1),%ebp | 
 | 	addl	%edi,%ebp | 
 | 	# 00_15 2  | 
 | 	movl	%eax,%edx | 
 | 	movl	%ebp,%edi | 
 | 	roll	$5,%ebp | 
 | 	xorl	%ebx,%edx | 
 | 	addl	%ecx,%ebp | 
 | 	movl	8(%esp),%ecx | 
 | 	andl	%esi,%edx | 
 | 	rorl	$2,%esi | 
 | 	xorl	%ebx,%edx | 
 | 	leal	1518500249(%ebp,%ecx,1),%ebp | 
 | 	addl	%edx,%ebp | 
 | 	# 00_15 3  | 
 | 	movl	%esi,%ecx | 
 | 	movl	%ebp,%edx | 
 | 	roll	$5,%ebp | 
 | 	xorl	%eax,%ecx | 
 | 	addl	%ebx,%ebp | 
 | 	movl	12(%esp),%ebx | 
 | 	andl	%edi,%ecx | 
 | 	rorl	$2,%edi | 
 | 	xorl	%eax,%ecx | 
 | 	leal	1518500249(%ebp,%ebx,1),%ebp | 
 | 	addl	%ecx,%ebp | 
 | 	# 00_15 4  | 
 | 	movl	%edi,%ebx | 
 | 	movl	%ebp,%ecx | 
 | 	roll	$5,%ebp | 
 | 	xorl	%esi,%ebx | 
 | 	addl	%eax,%ebp | 
 | 	movl	16(%esp),%eax | 
 | 	andl	%edx,%ebx | 
 | 	rorl	$2,%edx | 
 | 	xorl	%esi,%ebx | 
 | 	leal	1518500249(%ebp,%eax,1),%ebp | 
 | 	addl	%ebx,%ebp | 
 | 	# 00_15 5  | 
 | 	movl	%edx,%eax | 
 | 	movl	%ebp,%ebx | 
 | 	roll	$5,%ebp | 
 | 	xorl	%edi,%eax | 
 | 	addl	%esi,%ebp | 
 | 	movl	20(%esp),%esi | 
 | 	andl	%ecx,%eax | 
 | 	rorl	$2,%ecx | 
 | 	xorl	%edi,%eax | 
 | 	leal	1518500249(%ebp,%esi,1),%ebp | 
 | 	addl	%eax,%ebp | 
 | 	# 00_15 6  | 
 | 	movl	%ecx,%esi | 
 | 	movl	%ebp,%eax | 
 | 	roll	$5,%ebp | 
 | 	xorl	%edx,%esi | 
 | 	addl	%edi,%ebp | 
 | 	movl	24(%esp),%edi | 
 | 	andl	%ebx,%esi | 
 | 	rorl	$2,%ebx | 
 | 	xorl	%edx,%esi | 
 | 	leal	1518500249(%ebp,%edi,1),%ebp | 
 | 	addl	%esi,%ebp | 
 | 	# 00_15 7  | 
 | 	movl	%ebx,%edi | 
 | 	movl	%ebp,%esi | 
 | 	roll	$5,%ebp | 
 | 	xorl	%ecx,%edi | 
 | 	addl	%edx,%ebp | 
 | 	movl	28(%esp),%edx | 
 | 	andl	%eax,%edi | 
 | 	rorl	$2,%eax | 
 | 	xorl	%ecx,%edi | 
 | 	leal	1518500249(%ebp,%edx,1),%ebp | 
 | 	addl	%edi,%ebp | 
 | 	# 00_15 8  | 
 | 	movl	%eax,%edx | 
 | 	movl	%ebp,%edi | 
 | 	roll	$5,%ebp | 
 | 	xorl	%ebx,%edx | 
 | 	addl	%ecx,%ebp | 
 | 	movl	32(%esp),%ecx | 
 | 	andl	%esi,%edx | 
 | 	rorl	$2,%esi | 
 | 	xorl	%ebx,%edx | 
 | 	leal	1518500249(%ebp,%ecx,1),%ebp | 
 | 	addl	%edx,%ebp | 
 | 	# 00_15 9  | 
 | 	movl	%esi,%ecx | 
 | 	movl	%ebp,%edx | 
 | 	roll	$5,%ebp | 
 | 	xorl	%eax,%ecx | 
 | 	addl	%ebx,%ebp | 
 | 	movl	36(%esp),%ebx | 
 | 	andl	%edi,%ecx | 
 | 	rorl	$2,%edi | 
 | 	xorl	%eax,%ecx | 
 | 	leal	1518500249(%ebp,%ebx,1),%ebp | 
 | 	addl	%ecx,%ebp | 
 | 	# 00_15 10  | 
 | 	movl	%edi,%ebx | 
 | 	movl	%ebp,%ecx | 
 | 	roll	$5,%ebp | 
 | 	xorl	%esi,%ebx | 
 | 	addl	%eax,%ebp | 
 | 	movl	40(%esp),%eax | 
 | 	andl	%edx,%ebx | 
 | 	rorl	$2,%edx | 
 | 	xorl	%esi,%ebx | 
 | 	leal	1518500249(%ebp,%eax,1),%ebp | 
 | 	addl	%ebx,%ebp | 
 | 	# 00_15 11  | 
 | 	movl	%edx,%eax | 
 | 	movl	%ebp,%ebx | 
 | 	roll	$5,%ebp | 
 | 	xorl	%edi,%eax | 
 | 	addl	%esi,%ebp | 
 | 	movl	44(%esp),%esi | 
 | 	andl	%ecx,%eax | 
 | 	rorl	$2,%ecx | 
 | 	xorl	%edi,%eax | 
 | 	leal	1518500249(%ebp,%esi,1),%ebp | 
 | 	addl	%eax,%ebp | 
 | 	# 00_15 12  | 
 | 	movl	%ecx,%esi | 
 | 	movl	%ebp,%eax | 
 | 	roll	$5,%ebp | 
 | 	xorl	%edx,%esi | 
 | 	addl	%edi,%ebp | 
 | 	movl	48(%esp),%edi | 
 | 	andl	%ebx,%esi | 
 | 	rorl	$2,%ebx | 
 | 	xorl	%edx,%esi | 
 | 	leal	1518500249(%ebp,%edi,1),%ebp | 
 | 	addl	%esi,%ebp | 
 | 	# 00_15 13  | 
 | 	movl	%ebx,%edi | 
 | 	movl	%ebp,%esi | 
 | 	roll	$5,%ebp | 
 | 	xorl	%ecx,%edi | 
 | 	addl	%edx,%ebp | 
 | 	movl	52(%esp),%edx | 
 | 	andl	%eax,%edi | 
 | 	rorl	$2,%eax | 
 | 	xorl	%ecx,%edi | 
 | 	leal	1518500249(%ebp,%edx,1),%ebp | 
 | 	addl	%edi,%ebp | 
 | 	# 00_15 14  | 
 | 	movl	%eax,%edx | 
 | 	movl	%ebp,%edi | 
 | 	roll	$5,%ebp | 
 | 	xorl	%ebx,%edx | 
 | 	addl	%ecx,%ebp | 
 | 	movl	56(%esp),%ecx | 
 | 	andl	%esi,%edx | 
 | 	rorl	$2,%esi | 
 | 	xorl	%ebx,%edx | 
 | 	leal	1518500249(%ebp,%ecx,1),%ebp | 
 | 	addl	%edx,%ebp | 
 | 	# 00_15 15  | 
 | 	movl	%esi,%ecx | 
 | 	movl	%ebp,%edx | 
 | 	roll	$5,%ebp | 
 | 	xorl	%eax,%ecx | 
 | 	addl	%ebx,%ebp | 
 | 	movl	60(%esp),%ebx | 
 | 	andl	%edi,%ecx | 
 | 	rorl	$2,%edi | 
 | 	xorl	%eax,%ecx | 
 | 	leal	1518500249(%ebp,%ebx,1),%ebp | 
 | 	movl	(%esp),%ebx | 
 | 	addl	%ebp,%ecx | 
 | 	# 16_19 16  | 
 | 	movl	%edi,%ebp | 
 | 	xorl	8(%esp),%ebx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	32(%esp),%ebx | 
 | 	andl	%edx,%ebp | 
 | 	xorl	52(%esp),%ebx | 
 | 	roll	$1,%ebx | 
 | 	xorl	%esi,%ebp | 
 | 	addl	%ebp,%eax | 
 | 	movl	%ecx,%ebp | 
 | 	rorl	$2,%edx | 
 | 	movl	%ebx,(%esp) | 
 | 	roll	$5,%ebp | 
 | 	leal	1518500249(%ebx,%eax,1),%ebx | 
 | 	movl	4(%esp),%eax | 
 | 	addl	%ebp,%ebx | 
 | 	# 16_19 17  | 
 | 	movl	%edx,%ebp | 
 | 	xorl	12(%esp),%eax | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	36(%esp),%eax | 
 | 	andl	%ecx,%ebp | 
 | 	xorl	56(%esp),%eax | 
 | 	roll	$1,%eax | 
 | 	xorl	%edi,%ebp | 
 | 	addl	%ebp,%esi | 
 | 	movl	%ebx,%ebp | 
 | 	rorl	$2,%ecx | 
 | 	movl	%eax,4(%esp) | 
 | 	roll	$5,%ebp | 
 | 	leal	1518500249(%eax,%esi,1),%eax | 
 | 	movl	8(%esp),%esi | 
 | 	addl	%ebp,%eax | 
 | 	# 16_19 18  | 
 | 	movl	%ecx,%ebp | 
 | 	xorl	16(%esp),%esi | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	40(%esp),%esi | 
 | 	andl	%ebx,%ebp | 
 | 	xorl	60(%esp),%esi | 
 | 	roll	$1,%esi | 
 | 	xorl	%edx,%ebp | 
 | 	addl	%ebp,%edi | 
 | 	movl	%eax,%ebp | 
 | 	rorl	$2,%ebx | 
 | 	movl	%esi,8(%esp) | 
 | 	roll	$5,%ebp | 
 | 	leal	1518500249(%esi,%edi,1),%esi | 
 | 	movl	12(%esp),%edi | 
 | 	addl	%ebp,%esi | 
 | 	# 16_19 19  | 
 | 	movl	%ebx,%ebp | 
 | 	xorl	20(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	44(%esp),%edi | 
 | 	andl	%eax,%ebp | 
 | 	xorl	(%esp),%edi | 
 | 	roll	$1,%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	addl	%ebp,%edx | 
 | 	movl	%esi,%ebp | 
 | 	rorl	$2,%eax | 
 | 	movl	%edi,12(%esp) | 
 | 	roll	$5,%ebp | 
 | 	leal	1518500249(%edi,%edx,1),%edi | 
 | 	movl	16(%esp),%edx | 
 | 	addl	%ebp,%edi | 
 | 	# 20_39 20  | 
 | 	movl	%esi,%ebp | 
 | 	xorl	24(%esp),%edx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	48(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	4(%esp),%edx | 
 | 	roll	$1,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	rorl	$2,%esi | 
 | 	movl	%edi,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%edx,16(%esp) | 
 | 	leal	1859775393(%edx,%ecx,1),%edx | 
 | 	movl	20(%esp),%ecx | 
 | 	addl	%ebp,%edx | 
 | 	# 20_39 21  | 
 | 	movl	%edi,%ebp | 
 | 	xorl	28(%esp),%ecx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	52(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	8(%esp),%ecx | 
 | 	roll	$1,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	rorl	$2,%edi | 
 | 	movl	%edx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%ecx,20(%esp) | 
 | 	leal	1859775393(%ecx,%ebx,1),%ecx | 
 | 	movl	24(%esp),%ebx | 
 | 	addl	%ebp,%ecx | 
 | 	# 20_39 22  | 
 | 	movl	%edx,%ebp | 
 | 	xorl	32(%esp),%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	56(%esp),%ebx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	12(%esp),%ebx | 
 | 	roll	$1,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	rorl	$2,%edx | 
 | 	movl	%ecx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%ebx,24(%esp) | 
 | 	leal	1859775393(%ebx,%eax,1),%ebx | 
 | 	movl	28(%esp),%eax | 
 | 	addl	%ebp,%ebx | 
 | 	# 20_39 23  | 
 | 	movl	%ecx,%ebp | 
 | 	xorl	36(%esp),%eax | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	60(%esp),%eax | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	16(%esp),%eax | 
 | 	roll	$1,%eax | 
 | 	addl	%ebp,%esi | 
 | 	rorl	$2,%ecx | 
 | 	movl	%ebx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%eax,28(%esp) | 
 | 	leal	1859775393(%eax,%esi,1),%eax | 
 | 	movl	32(%esp),%esi | 
 | 	addl	%ebp,%eax | 
 | 	# 20_39 24  | 
 | 	movl	%ebx,%ebp | 
 | 	xorl	40(%esp),%esi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	(%esp),%esi | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	20(%esp),%esi | 
 | 	roll	$1,%esi | 
 | 	addl	%ebp,%edi | 
 | 	rorl	$2,%ebx | 
 | 	movl	%eax,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%esi,32(%esp) | 
 | 	leal	1859775393(%esi,%edi,1),%esi | 
 | 	movl	36(%esp),%edi | 
 | 	addl	%ebp,%esi | 
 | 	# 20_39 25  | 
 | 	movl	%eax,%ebp | 
 | 	xorl	44(%esp),%edi | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	4(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	24(%esp),%edi | 
 | 	roll	$1,%edi | 
 | 	addl	%ebp,%edx | 
 | 	rorl	$2,%eax | 
 | 	movl	%esi,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%edi,36(%esp) | 
 | 	leal	1859775393(%edi,%edx,1),%edi | 
 | 	movl	40(%esp),%edx | 
 | 	addl	%ebp,%edi | 
 | 	# 20_39 26  | 
 | 	movl	%esi,%ebp | 
 | 	xorl	48(%esp),%edx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	8(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	28(%esp),%edx | 
 | 	roll	$1,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	rorl	$2,%esi | 
 | 	movl	%edi,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%edx,40(%esp) | 
 | 	leal	1859775393(%edx,%ecx,1),%edx | 
 | 	movl	44(%esp),%ecx | 
 | 	addl	%ebp,%edx | 
 | 	# 20_39 27  | 
 | 	movl	%edi,%ebp | 
 | 	xorl	52(%esp),%ecx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	12(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	32(%esp),%ecx | 
 | 	roll	$1,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	rorl	$2,%edi | 
 | 	movl	%edx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%ecx,44(%esp) | 
 | 	leal	1859775393(%ecx,%ebx,1),%ecx | 
 | 	movl	48(%esp),%ebx | 
 | 	addl	%ebp,%ecx | 
 | 	# 20_39 28  | 
 | 	movl	%edx,%ebp | 
 | 	xorl	56(%esp),%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	16(%esp),%ebx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	36(%esp),%ebx | 
 | 	roll	$1,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	rorl	$2,%edx | 
 | 	movl	%ecx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%ebx,48(%esp) | 
 | 	leal	1859775393(%ebx,%eax,1),%ebx | 
 | 	movl	52(%esp),%eax | 
 | 	addl	%ebp,%ebx | 
 | 	# 20_39 29  | 
 | 	movl	%ecx,%ebp | 
 | 	xorl	60(%esp),%eax | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	20(%esp),%eax | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	40(%esp),%eax | 
 | 	roll	$1,%eax | 
 | 	addl	%ebp,%esi | 
 | 	rorl	$2,%ecx | 
 | 	movl	%ebx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%eax,52(%esp) | 
 | 	leal	1859775393(%eax,%esi,1),%eax | 
 | 	movl	56(%esp),%esi | 
 | 	addl	%ebp,%eax | 
 | 	# 20_39 30  | 
 | 	movl	%ebx,%ebp | 
 | 	xorl	(%esp),%esi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	24(%esp),%esi | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	44(%esp),%esi | 
 | 	roll	$1,%esi | 
 | 	addl	%ebp,%edi | 
 | 	rorl	$2,%ebx | 
 | 	movl	%eax,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%esi,56(%esp) | 
 | 	leal	1859775393(%esi,%edi,1),%esi | 
 | 	movl	60(%esp),%edi | 
 | 	addl	%ebp,%esi | 
 | 	# 20_39 31  | 
 | 	movl	%eax,%ebp | 
 | 	xorl	4(%esp),%edi | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	28(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	48(%esp),%edi | 
 | 	roll	$1,%edi | 
 | 	addl	%ebp,%edx | 
 | 	rorl	$2,%eax | 
 | 	movl	%esi,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%edi,60(%esp) | 
 | 	leal	1859775393(%edi,%edx,1),%edi | 
 | 	movl	(%esp),%edx | 
 | 	addl	%ebp,%edi | 
 | 	# 20_39 32  | 
 | 	movl	%esi,%ebp | 
 | 	xorl	8(%esp),%edx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	32(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	52(%esp),%edx | 
 | 	roll	$1,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	rorl	$2,%esi | 
 | 	movl	%edi,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%edx,(%esp) | 
 | 	leal	1859775393(%edx,%ecx,1),%edx | 
 | 	movl	4(%esp),%ecx | 
 | 	addl	%ebp,%edx | 
 | 	# 20_39 33  | 
 | 	movl	%edi,%ebp | 
 | 	xorl	12(%esp),%ecx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	36(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	56(%esp),%ecx | 
 | 	roll	$1,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	rorl	$2,%edi | 
 | 	movl	%edx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%ecx,4(%esp) | 
 | 	leal	1859775393(%ecx,%ebx,1),%ecx | 
 | 	movl	8(%esp),%ebx | 
 | 	addl	%ebp,%ecx | 
 | 	# 20_39 34  | 
 | 	movl	%edx,%ebp | 
 | 	xorl	16(%esp),%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	40(%esp),%ebx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	60(%esp),%ebx | 
 | 	roll	$1,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	rorl	$2,%edx | 
 | 	movl	%ecx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%ebx,8(%esp) | 
 | 	leal	1859775393(%ebx,%eax,1),%ebx | 
 | 	movl	12(%esp),%eax | 
 | 	addl	%ebp,%ebx | 
 | 	# 20_39 35  | 
 | 	movl	%ecx,%ebp | 
 | 	xorl	20(%esp),%eax | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	44(%esp),%eax | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	(%esp),%eax | 
 | 	roll	$1,%eax | 
 | 	addl	%ebp,%esi | 
 | 	rorl	$2,%ecx | 
 | 	movl	%ebx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%eax,12(%esp) | 
 | 	leal	1859775393(%eax,%esi,1),%eax | 
 | 	movl	16(%esp),%esi | 
 | 	addl	%ebp,%eax | 
 | 	# 20_39 36  | 
 | 	movl	%ebx,%ebp | 
 | 	xorl	24(%esp),%esi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	48(%esp),%esi | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	4(%esp),%esi | 
 | 	roll	$1,%esi | 
 | 	addl	%ebp,%edi | 
 | 	rorl	$2,%ebx | 
 | 	movl	%eax,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%esi,16(%esp) | 
 | 	leal	1859775393(%esi,%edi,1),%esi | 
 | 	movl	20(%esp),%edi | 
 | 	addl	%ebp,%esi | 
 | 	# 20_39 37  | 
 | 	movl	%eax,%ebp | 
 | 	xorl	28(%esp),%edi | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	52(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	8(%esp),%edi | 
 | 	roll	$1,%edi | 
 | 	addl	%ebp,%edx | 
 | 	rorl	$2,%eax | 
 | 	movl	%esi,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%edi,20(%esp) | 
 | 	leal	1859775393(%edi,%edx,1),%edi | 
 | 	movl	24(%esp),%edx | 
 | 	addl	%ebp,%edi | 
 | 	# 20_39 38  | 
 | 	movl	%esi,%ebp | 
 | 	xorl	32(%esp),%edx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	56(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	12(%esp),%edx | 
 | 	roll	$1,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	rorl	$2,%esi | 
 | 	movl	%edi,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%edx,24(%esp) | 
 | 	leal	1859775393(%edx,%ecx,1),%edx | 
 | 	movl	28(%esp),%ecx | 
 | 	addl	%ebp,%edx | 
 | 	# 20_39 39  | 
 | 	movl	%edi,%ebp | 
 | 	xorl	36(%esp),%ecx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	60(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	16(%esp),%ecx | 
 | 	roll	$1,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	rorl	$2,%edi | 
 | 	movl	%edx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%ecx,28(%esp) | 
 | 	leal	1859775393(%ecx,%ebx,1),%ecx | 
 | 	movl	32(%esp),%ebx | 
 | 	addl	%ebp,%ecx | 
 | 	# 40_59 40  | 
 | 	movl	%edi,%ebp | 
 | 	xorl	40(%esp),%ebx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	(%esp),%ebx | 
 | 	andl	%edx,%ebp | 
 | 	xorl	20(%esp),%ebx | 
 | 	roll	$1,%ebx | 
 | 	addl	%eax,%ebp | 
 | 	rorl	$2,%edx | 
 | 	movl	%ecx,%eax | 
 | 	roll	$5,%eax | 
 | 	movl	%ebx,32(%esp) | 
 | 	leal	2400959708(%ebx,%ebp,1),%ebx | 
 | 	movl	%edi,%ebp | 
 | 	addl	%eax,%ebx | 
 | 	andl	%esi,%ebp | 
 | 	movl	36(%esp),%eax | 
 | 	addl	%ebp,%ebx | 
 | 	# 40_59 41  | 
 | 	movl	%edx,%ebp | 
 | 	xorl	44(%esp),%eax | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	4(%esp),%eax | 
 | 	andl	%ecx,%ebp | 
 | 	xorl	24(%esp),%eax | 
 | 	roll	$1,%eax | 
 | 	addl	%esi,%ebp | 
 | 	rorl	$2,%ecx | 
 | 	movl	%ebx,%esi | 
 | 	roll	$5,%esi | 
 | 	movl	%eax,36(%esp) | 
 | 	leal	2400959708(%eax,%ebp,1),%eax | 
 | 	movl	%edx,%ebp | 
 | 	addl	%esi,%eax | 
 | 	andl	%edi,%ebp | 
 | 	movl	40(%esp),%esi | 
 | 	addl	%ebp,%eax | 
 | 	# 40_59 42  | 
 | 	movl	%ecx,%ebp | 
 | 	xorl	48(%esp),%esi | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	8(%esp),%esi | 
 | 	andl	%ebx,%ebp | 
 | 	xorl	28(%esp),%esi | 
 | 	roll	$1,%esi | 
 | 	addl	%edi,%ebp | 
 | 	rorl	$2,%ebx | 
 | 	movl	%eax,%edi | 
 | 	roll	$5,%edi | 
 | 	movl	%esi,40(%esp) | 
 | 	leal	2400959708(%esi,%ebp,1),%esi | 
 | 	movl	%ecx,%ebp | 
 | 	addl	%edi,%esi | 
 | 	andl	%edx,%ebp | 
 | 	movl	44(%esp),%edi | 
 | 	addl	%ebp,%esi | 
 | 	# 40_59 43  | 
 | 	movl	%ebx,%ebp | 
 | 	xorl	52(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	12(%esp),%edi | 
 | 	andl	%eax,%ebp | 
 | 	xorl	32(%esp),%edi | 
 | 	roll	$1,%edi | 
 | 	addl	%edx,%ebp | 
 | 	rorl	$2,%eax | 
 | 	movl	%esi,%edx | 
 | 	roll	$5,%edx | 
 | 	movl	%edi,44(%esp) | 
 | 	leal	2400959708(%edi,%ebp,1),%edi | 
 | 	movl	%ebx,%ebp | 
 | 	addl	%edx,%edi | 
 | 	andl	%ecx,%ebp | 
 | 	movl	48(%esp),%edx | 
 | 	addl	%ebp,%edi | 
 | 	# 40_59 44  | 
 | 	movl	%eax,%ebp | 
 | 	xorl	56(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	16(%esp),%edx | 
 | 	andl	%esi,%ebp | 
 | 	xorl	36(%esp),%edx | 
 | 	roll	$1,%edx | 
 | 	addl	%ecx,%ebp | 
 | 	rorl	$2,%esi | 
 | 	movl	%edi,%ecx | 
 | 	roll	$5,%ecx | 
 | 	movl	%edx,48(%esp) | 
 | 	leal	2400959708(%edx,%ebp,1),%edx | 
 | 	movl	%eax,%ebp | 
 | 	addl	%ecx,%edx | 
 | 	andl	%ebx,%ebp | 
 | 	movl	52(%esp),%ecx | 
 | 	addl	%ebp,%edx | 
 | 	# 40_59 45  | 
 | 	movl	%esi,%ebp | 
 | 	xorl	60(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	20(%esp),%ecx | 
 | 	andl	%edi,%ebp | 
 | 	xorl	40(%esp),%ecx | 
 | 	roll	$1,%ecx | 
 | 	addl	%ebx,%ebp | 
 | 	rorl	$2,%edi | 
 | 	movl	%edx,%ebx | 
 | 	roll	$5,%ebx | 
 | 	movl	%ecx,52(%esp) | 
 | 	leal	2400959708(%ecx,%ebp,1),%ecx | 
 | 	movl	%esi,%ebp | 
 | 	addl	%ebx,%ecx | 
 | 	andl	%eax,%ebp | 
 | 	movl	56(%esp),%ebx | 
 | 	addl	%ebp,%ecx | 
 | 	# 40_59 46  | 
 | 	movl	%edi,%ebp | 
 | 	xorl	(%esp),%ebx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	24(%esp),%ebx | 
 | 	andl	%edx,%ebp | 
 | 	xorl	44(%esp),%ebx | 
 | 	roll	$1,%ebx | 
 | 	addl	%eax,%ebp | 
 | 	rorl	$2,%edx | 
 | 	movl	%ecx,%eax | 
 | 	roll	$5,%eax | 
 | 	movl	%ebx,56(%esp) | 
 | 	leal	2400959708(%ebx,%ebp,1),%ebx | 
 | 	movl	%edi,%ebp | 
 | 	addl	%eax,%ebx | 
 | 	andl	%esi,%ebp | 
 | 	movl	60(%esp),%eax | 
 | 	addl	%ebp,%ebx | 
 | 	# 40_59 47  | 
 | 	movl	%edx,%ebp | 
 | 	xorl	4(%esp),%eax | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	28(%esp),%eax | 
 | 	andl	%ecx,%ebp | 
 | 	xorl	48(%esp),%eax | 
 | 	roll	$1,%eax | 
 | 	addl	%esi,%ebp | 
 | 	rorl	$2,%ecx | 
 | 	movl	%ebx,%esi | 
 | 	roll	$5,%esi | 
 | 	movl	%eax,60(%esp) | 
 | 	leal	2400959708(%eax,%ebp,1),%eax | 
 | 	movl	%edx,%ebp | 
 | 	addl	%esi,%eax | 
 | 	andl	%edi,%ebp | 
 | 	movl	(%esp),%esi | 
 | 	addl	%ebp,%eax | 
 | 	# 40_59 48  | 
 | 	movl	%ecx,%ebp | 
 | 	xorl	8(%esp),%esi | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	32(%esp),%esi | 
 | 	andl	%ebx,%ebp | 
 | 	xorl	52(%esp),%esi | 
 | 	roll	$1,%esi | 
 | 	addl	%edi,%ebp | 
 | 	rorl	$2,%ebx | 
 | 	movl	%eax,%edi | 
 | 	roll	$5,%edi | 
 | 	movl	%esi,(%esp) | 
 | 	leal	2400959708(%esi,%ebp,1),%esi | 
 | 	movl	%ecx,%ebp | 
 | 	addl	%edi,%esi | 
 | 	andl	%edx,%ebp | 
 | 	movl	4(%esp),%edi | 
 | 	addl	%ebp,%esi | 
 | 	# 40_59 49  | 
 | 	movl	%ebx,%ebp | 
 | 	xorl	12(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	36(%esp),%edi | 
 | 	andl	%eax,%ebp | 
 | 	xorl	56(%esp),%edi | 
 | 	roll	$1,%edi | 
 | 	addl	%edx,%ebp | 
 | 	rorl	$2,%eax | 
 | 	movl	%esi,%edx | 
 | 	roll	$5,%edx | 
 | 	movl	%edi,4(%esp) | 
 | 	leal	2400959708(%edi,%ebp,1),%edi | 
 | 	movl	%ebx,%ebp | 
 | 	addl	%edx,%edi | 
 | 	andl	%ecx,%ebp | 
 | 	movl	8(%esp),%edx | 
 | 	addl	%ebp,%edi | 
 | 	# 40_59 50  | 
 | 	movl	%eax,%ebp | 
 | 	xorl	16(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	40(%esp),%edx | 
 | 	andl	%esi,%ebp | 
 | 	xorl	60(%esp),%edx | 
 | 	roll	$1,%edx | 
 | 	addl	%ecx,%ebp | 
 | 	rorl	$2,%esi | 
 | 	movl	%edi,%ecx | 
 | 	roll	$5,%ecx | 
 | 	movl	%edx,8(%esp) | 
 | 	leal	2400959708(%edx,%ebp,1),%edx | 
 | 	movl	%eax,%ebp | 
 | 	addl	%ecx,%edx | 
 | 	andl	%ebx,%ebp | 
 | 	movl	12(%esp),%ecx | 
 | 	addl	%ebp,%edx | 
 | 	# 40_59 51  | 
 | 	movl	%esi,%ebp | 
 | 	xorl	20(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	44(%esp),%ecx | 
 | 	andl	%edi,%ebp | 
 | 	xorl	(%esp),%ecx | 
 | 	roll	$1,%ecx | 
 | 	addl	%ebx,%ebp | 
 | 	rorl	$2,%edi | 
 | 	movl	%edx,%ebx | 
 | 	roll	$5,%ebx | 
 | 	movl	%ecx,12(%esp) | 
 | 	leal	2400959708(%ecx,%ebp,1),%ecx | 
 | 	movl	%esi,%ebp | 
 | 	addl	%ebx,%ecx | 
 | 	andl	%eax,%ebp | 
 | 	movl	16(%esp),%ebx | 
 | 	addl	%ebp,%ecx | 
 | 	# 40_59 52  | 
 | 	movl	%edi,%ebp | 
 | 	xorl	24(%esp),%ebx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	48(%esp),%ebx | 
 | 	andl	%edx,%ebp | 
 | 	xorl	4(%esp),%ebx | 
 | 	roll	$1,%ebx | 
 | 	addl	%eax,%ebp | 
 | 	rorl	$2,%edx | 
 | 	movl	%ecx,%eax | 
 | 	roll	$5,%eax | 
 | 	movl	%ebx,16(%esp) | 
 | 	leal	2400959708(%ebx,%ebp,1),%ebx | 
 | 	movl	%edi,%ebp | 
 | 	addl	%eax,%ebx | 
 | 	andl	%esi,%ebp | 
 | 	movl	20(%esp),%eax | 
 | 	addl	%ebp,%ebx | 
 | 	# 40_59 53  | 
 | 	movl	%edx,%ebp | 
 | 	xorl	28(%esp),%eax | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	52(%esp),%eax | 
 | 	andl	%ecx,%ebp | 
 | 	xorl	8(%esp),%eax | 
 | 	roll	$1,%eax | 
 | 	addl	%esi,%ebp | 
 | 	rorl	$2,%ecx | 
 | 	movl	%ebx,%esi | 
 | 	roll	$5,%esi | 
 | 	movl	%eax,20(%esp) | 
 | 	leal	2400959708(%eax,%ebp,1),%eax | 
 | 	movl	%edx,%ebp | 
 | 	addl	%esi,%eax | 
 | 	andl	%edi,%ebp | 
 | 	movl	24(%esp),%esi | 
 | 	addl	%ebp,%eax | 
 | 	# 40_59 54  | 
 | 	movl	%ecx,%ebp | 
 | 	xorl	32(%esp),%esi | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	56(%esp),%esi | 
 | 	andl	%ebx,%ebp | 
 | 	xorl	12(%esp),%esi | 
 | 	roll	$1,%esi | 
 | 	addl	%edi,%ebp | 
 | 	rorl	$2,%ebx | 
 | 	movl	%eax,%edi | 
 | 	roll	$5,%edi | 
 | 	movl	%esi,24(%esp) | 
 | 	leal	2400959708(%esi,%ebp,1),%esi | 
 | 	movl	%ecx,%ebp | 
 | 	addl	%edi,%esi | 
 | 	andl	%edx,%ebp | 
 | 	movl	28(%esp),%edi | 
 | 	addl	%ebp,%esi | 
 | 	# 40_59 55  | 
 | 	movl	%ebx,%ebp | 
 | 	xorl	36(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	60(%esp),%edi | 
 | 	andl	%eax,%ebp | 
 | 	xorl	16(%esp),%edi | 
 | 	roll	$1,%edi | 
 | 	addl	%edx,%ebp | 
 | 	rorl	$2,%eax | 
 | 	movl	%esi,%edx | 
 | 	roll	$5,%edx | 
 | 	movl	%edi,28(%esp) | 
 | 	leal	2400959708(%edi,%ebp,1),%edi | 
 | 	movl	%ebx,%ebp | 
 | 	addl	%edx,%edi | 
 | 	andl	%ecx,%ebp | 
 | 	movl	32(%esp),%edx | 
 | 	addl	%ebp,%edi | 
 | 	# 40_59 56  | 
 | 	movl	%eax,%ebp | 
 | 	xorl	40(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	(%esp),%edx | 
 | 	andl	%esi,%ebp | 
 | 	xorl	20(%esp),%edx | 
 | 	roll	$1,%edx | 
 | 	addl	%ecx,%ebp | 
 | 	rorl	$2,%esi | 
 | 	movl	%edi,%ecx | 
 | 	roll	$5,%ecx | 
 | 	movl	%edx,32(%esp) | 
 | 	leal	2400959708(%edx,%ebp,1),%edx | 
 | 	movl	%eax,%ebp | 
 | 	addl	%ecx,%edx | 
 | 	andl	%ebx,%ebp | 
 | 	movl	36(%esp),%ecx | 
 | 	addl	%ebp,%edx | 
 | 	# 40_59 57  | 
 | 	movl	%esi,%ebp | 
 | 	xorl	44(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	4(%esp),%ecx | 
 | 	andl	%edi,%ebp | 
 | 	xorl	24(%esp),%ecx | 
 | 	roll	$1,%ecx | 
 | 	addl	%ebx,%ebp | 
 | 	rorl	$2,%edi | 
 | 	movl	%edx,%ebx | 
 | 	roll	$5,%ebx | 
 | 	movl	%ecx,36(%esp) | 
 | 	leal	2400959708(%ecx,%ebp,1),%ecx | 
 | 	movl	%esi,%ebp | 
 | 	addl	%ebx,%ecx | 
 | 	andl	%eax,%ebp | 
 | 	movl	40(%esp),%ebx | 
 | 	addl	%ebp,%ecx | 
 | 	# 40_59 58  | 
 | 	movl	%edi,%ebp | 
 | 	xorl	48(%esp),%ebx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	8(%esp),%ebx | 
 | 	andl	%edx,%ebp | 
 | 	xorl	28(%esp),%ebx | 
 | 	roll	$1,%ebx | 
 | 	addl	%eax,%ebp | 
 | 	rorl	$2,%edx | 
 | 	movl	%ecx,%eax | 
 | 	roll	$5,%eax | 
 | 	movl	%ebx,40(%esp) | 
 | 	leal	2400959708(%ebx,%ebp,1),%ebx | 
 | 	movl	%edi,%ebp | 
 | 	addl	%eax,%ebx | 
 | 	andl	%esi,%ebp | 
 | 	movl	44(%esp),%eax | 
 | 	addl	%ebp,%ebx | 
 | 	# 40_59 59  | 
 | 	movl	%edx,%ebp | 
 | 	xorl	52(%esp),%eax | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	12(%esp),%eax | 
 | 	andl	%ecx,%ebp | 
 | 	xorl	32(%esp),%eax | 
 | 	roll	$1,%eax | 
 | 	addl	%esi,%ebp | 
 | 	rorl	$2,%ecx | 
 | 	movl	%ebx,%esi | 
 | 	roll	$5,%esi | 
 | 	movl	%eax,44(%esp) | 
 | 	leal	2400959708(%eax,%ebp,1),%eax | 
 | 	movl	%edx,%ebp | 
 | 	addl	%esi,%eax | 
 | 	andl	%edi,%ebp | 
 | 	movl	48(%esp),%esi | 
 | 	addl	%ebp,%eax | 
 | 	# 20_39 60  | 
 | 	movl	%ebx,%ebp | 
 | 	xorl	56(%esp),%esi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	16(%esp),%esi | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	36(%esp),%esi | 
 | 	roll	$1,%esi | 
 | 	addl	%ebp,%edi | 
 | 	rorl	$2,%ebx | 
 | 	movl	%eax,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%esi,48(%esp) | 
 | 	leal	3395469782(%esi,%edi,1),%esi | 
 | 	movl	52(%esp),%edi | 
 | 	addl	%ebp,%esi | 
 | 	# 20_39 61  | 
 | 	movl	%eax,%ebp | 
 | 	xorl	60(%esp),%edi | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	20(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	40(%esp),%edi | 
 | 	roll	$1,%edi | 
 | 	addl	%ebp,%edx | 
 | 	rorl	$2,%eax | 
 | 	movl	%esi,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%edi,52(%esp) | 
 | 	leal	3395469782(%edi,%edx,1),%edi | 
 | 	movl	56(%esp),%edx | 
 | 	addl	%ebp,%edi | 
 | 	# 20_39 62  | 
 | 	movl	%esi,%ebp | 
 | 	xorl	(%esp),%edx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	24(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	44(%esp),%edx | 
 | 	roll	$1,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	rorl	$2,%esi | 
 | 	movl	%edi,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%edx,56(%esp) | 
 | 	leal	3395469782(%edx,%ecx,1),%edx | 
 | 	movl	60(%esp),%ecx | 
 | 	addl	%ebp,%edx | 
 | 	# 20_39 63  | 
 | 	movl	%edi,%ebp | 
 | 	xorl	4(%esp),%ecx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	28(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	48(%esp),%ecx | 
 | 	roll	$1,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	rorl	$2,%edi | 
 | 	movl	%edx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%ecx,60(%esp) | 
 | 	leal	3395469782(%ecx,%ebx,1),%ecx | 
 | 	movl	(%esp),%ebx | 
 | 	addl	%ebp,%ecx | 
 | 	# 20_39 64  | 
 | 	movl	%edx,%ebp | 
 | 	xorl	8(%esp),%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	32(%esp),%ebx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	52(%esp),%ebx | 
 | 	roll	$1,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	rorl	$2,%edx | 
 | 	movl	%ecx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%ebx,(%esp) | 
 | 	leal	3395469782(%ebx,%eax,1),%ebx | 
 | 	movl	4(%esp),%eax | 
 | 	addl	%ebp,%ebx | 
 | 	# 20_39 65  | 
 | 	movl	%ecx,%ebp | 
 | 	xorl	12(%esp),%eax | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	36(%esp),%eax | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	56(%esp),%eax | 
 | 	roll	$1,%eax | 
 | 	addl	%ebp,%esi | 
 | 	rorl	$2,%ecx | 
 | 	movl	%ebx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%eax,4(%esp) | 
 | 	leal	3395469782(%eax,%esi,1),%eax | 
 | 	movl	8(%esp),%esi | 
 | 	addl	%ebp,%eax | 
 | 	# 20_39 66  | 
 | 	movl	%ebx,%ebp | 
 | 	xorl	16(%esp),%esi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	40(%esp),%esi | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	60(%esp),%esi | 
 | 	roll	$1,%esi | 
 | 	addl	%ebp,%edi | 
 | 	rorl	$2,%ebx | 
 | 	movl	%eax,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%esi,8(%esp) | 
 | 	leal	3395469782(%esi,%edi,1),%esi | 
 | 	movl	12(%esp),%edi | 
 | 	addl	%ebp,%esi | 
 | 	# 20_39 67  | 
 | 	movl	%eax,%ebp | 
 | 	xorl	20(%esp),%edi | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	44(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	(%esp),%edi | 
 | 	roll	$1,%edi | 
 | 	addl	%ebp,%edx | 
 | 	rorl	$2,%eax | 
 | 	movl	%esi,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%edi,12(%esp) | 
 | 	leal	3395469782(%edi,%edx,1),%edi | 
 | 	movl	16(%esp),%edx | 
 | 	addl	%ebp,%edi | 
 | 	# 20_39 68  | 
 | 	movl	%esi,%ebp | 
 | 	xorl	24(%esp),%edx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	48(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	4(%esp),%edx | 
 | 	roll	$1,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	rorl	$2,%esi | 
 | 	movl	%edi,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%edx,16(%esp) | 
 | 	leal	3395469782(%edx,%ecx,1),%edx | 
 | 	movl	20(%esp),%ecx | 
 | 	addl	%ebp,%edx | 
 | 	# 20_39 69  | 
 | 	movl	%edi,%ebp | 
 | 	xorl	28(%esp),%ecx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	52(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	8(%esp),%ecx | 
 | 	roll	$1,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	rorl	$2,%edi | 
 | 	movl	%edx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%ecx,20(%esp) | 
 | 	leal	3395469782(%ecx,%ebx,1),%ecx | 
 | 	movl	24(%esp),%ebx | 
 | 	addl	%ebp,%ecx | 
 | 	# 20_39 70  | 
 | 	movl	%edx,%ebp | 
 | 	xorl	32(%esp),%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	56(%esp),%ebx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	12(%esp),%ebx | 
 | 	roll	$1,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	rorl	$2,%edx | 
 | 	movl	%ecx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%ebx,24(%esp) | 
 | 	leal	3395469782(%ebx,%eax,1),%ebx | 
 | 	movl	28(%esp),%eax | 
 | 	addl	%ebp,%ebx | 
 | 	# 20_39 71  | 
 | 	movl	%ecx,%ebp | 
 | 	xorl	36(%esp),%eax | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	60(%esp),%eax | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	16(%esp),%eax | 
 | 	roll	$1,%eax | 
 | 	addl	%ebp,%esi | 
 | 	rorl	$2,%ecx | 
 | 	movl	%ebx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%eax,28(%esp) | 
 | 	leal	3395469782(%eax,%esi,1),%eax | 
 | 	movl	32(%esp),%esi | 
 | 	addl	%ebp,%eax | 
 | 	# 20_39 72  | 
 | 	movl	%ebx,%ebp | 
 | 	xorl	40(%esp),%esi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	(%esp),%esi | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	20(%esp),%esi | 
 | 	roll	$1,%esi | 
 | 	addl	%ebp,%edi | 
 | 	rorl	$2,%ebx | 
 | 	movl	%eax,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%esi,32(%esp) | 
 | 	leal	3395469782(%esi,%edi,1),%esi | 
 | 	movl	36(%esp),%edi | 
 | 	addl	%ebp,%esi | 
 | 	# 20_39 73  | 
 | 	movl	%eax,%ebp | 
 | 	xorl	44(%esp),%edi | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	4(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	24(%esp),%edi | 
 | 	roll	$1,%edi | 
 | 	addl	%ebp,%edx | 
 | 	rorl	$2,%eax | 
 | 	movl	%esi,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%edi,36(%esp) | 
 | 	leal	3395469782(%edi,%edx,1),%edi | 
 | 	movl	40(%esp),%edx | 
 | 	addl	%ebp,%edi | 
 | 	# 20_39 74  | 
 | 	movl	%esi,%ebp | 
 | 	xorl	48(%esp),%edx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	8(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	28(%esp),%edx | 
 | 	roll	$1,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	rorl	$2,%esi | 
 | 	movl	%edi,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%edx,40(%esp) | 
 | 	leal	3395469782(%edx,%ecx,1),%edx | 
 | 	movl	44(%esp),%ecx | 
 | 	addl	%ebp,%edx | 
 | 	# 20_39 75  | 
 | 	movl	%edi,%ebp | 
 | 	xorl	52(%esp),%ecx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	12(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	32(%esp),%ecx | 
 | 	roll	$1,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	rorl	$2,%edi | 
 | 	movl	%edx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%ecx,44(%esp) | 
 | 	leal	3395469782(%ecx,%ebx,1),%ecx | 
 | 	movl	48(%esp),%ebx | 
 | 	addl	%ebp,%ecx | 
 | 	# 20_39 76  | 
 | 	movl	%edx,%ebp | 
 | 	xorl	56(%esp),%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	16(%esp),%ebx | 
 | 	xorl	%esi,%ebp | 
 | 	xorl	36(%esp),%ebx | 
 | 	roll	$1,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	rorl	$2,%edx | 
 | 	movl	%ecx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	movl	%ebx,48(%esp) | 
 | 	leal	3395469782(%ebx,%eax,1),%ebx | 
 | 	movl	52(%esp),%eax | 
 | 	addl	%ebp,%ebx | 
 | 	# 20_39 77  | 
 | 	movl	%ecx,%ebp | 
 | 	xorl	60(%esp),%eax | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	20(%esp),%eax | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	40(%esp),%eax | 
 | 	roll	$1,%eax | 
 | 	addl	%ebp,%esi | 
 | 	rorl	$2,%ecx | 
 | 	movl	%ebx,%ebp | 
 | 	roll	$5,%ebp | 
 | 	leal	3395469782(%eax,%esi,1),%eax | 
 | 	movl	56(%esp),%esi | 
 | 	addl	%ebp,%eax | 
 | 	# 20_39 78  | 
 | 	movl	%ebx,%ebp | 
 | 	xorl	(%esp),%esi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	24(%esp),%esi | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	44(%esp),%esi | 
 | 	roll	$1,%esi | 
 | 	addl	%ebp,%edi | 
 | 	rorl	$2,%ebx | 
 | 	movl	%eax,%ebp | 
 | 	roll	$5,%ebp | 
 | 	leal	3395469782(%esi,%edi,1),%esi | 
 | 	movl	60(%esp),%edi | 
 | 	addl	%ebp,%esi | 
 | 	# 20_39 79  | 
 | 	movl	%eax,%ebp | 
 | 	xorl	4(%esp),%edi | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	28(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	48(%esp),%edi | 
 | 	roll	$1,%edi | 
 | 	addl	%ebp,%edx | 
 | 	rorl	$2,%eax | 
 | 	movl	%esi,%ebp | 
 | 	roll	$5,%ebp | 
 | 	leal	3395469782(%edi,%edx,1),%edi | 
 | 	addl	%ebp,%edi | 
 | 	movl	96(%esp),%ebp | 
 | 	movl	100(%esp),%edx | 
 | 	addl	(%ebp),%edi | 
 | 	addl	4(%ebp),%esi | 
 | 	addl	8(%ebp),%eax | 
 | 	addl	12(%ebp),%ebx | 
 | 	addl	16(%ebp),%ecx | 
 | 	movl	%edi,(%ebp) | 
 | 	addl	$64,%edx | 
 | 	movl	%esi,4(%ebp) | 
 | 	cmpl	104(%esp),%edx | 
 | 	movl	%eax,8(%ebp) | 
 | 	movl	%ecx,%edi | 
 | 	movl	%ebx,12(%ebp) | 
 | 	movl	%edx,%esi | 
 | 	movl	%ecx,16(%ebp) | 
 | 	jb	L000loop | 
 | 	addl	$76,%esp | 
 | 	popl	%edi | 
 | 	popl	%esi | 
 | 	popl	%ebx | 
 | 	popl	%ebp | 
 | 	ret | 
 | .globl	_sha1_block_data_order_ssse3 | 
 | .private_extern	_sha1_block_data_order_ssse3 | 
 | .align	4 | 
 | _sha1_block_data_order_ssse3: | 
 | L_sha1_block_data_order_ssse3_begin: | 
 | 	pushl	%ebp | 
 | 	pushl	%ebx | 
 | 	pushl	%esi | 
 | 	pushl	%edi | 
 | 	call	L001pic_point | 
 | L001pic_point: | 
 | 	popl	%ebp | 
 | 	leal	LK_XX_XX-L001pic_point(%ebp),%ebp | 
 | 	movdqa	(%ebp),%xmm7 | 
 | 	movdqa	16(%ebp),%xmm0 | 
 | 	movdqa	32(%ebp),%xmm1 | 
 | 	movdqa	48(%ebp),%xmm2 | 
 | 	movdqa	64(%ebp),%xmm6 | 
 | 	movl	20(%esp),%edi | 
 | 	movl	24(%esp),%ebp | 
 | 	movl	28(%esp),%edx | 
 | 	movl	%esp,%esi | 
 | 	subl	$208,%esp | 
 | 	andl	$-64,%esp | 
 | 	movdqa	%xmm0,112(%esp) | 
 | 	movdqa	%xmm1,128(%esp) | 
 | 	movdqa	%xmm2,144(%esp) | 
 | 	shll	$6,%edx | 
 | 	movdqa	%xmm7,160(%esp) | 
 | 	addl	%ebp,%edx | 
 | 	movdqa	%xmm6,176(%esp) | 
 | 	addl	$64,%ebp | 
 | 	movl	%edi,192(%esp) | 
 | 	movl	%ebp,196(%esp) | 
 | 	movl	%edx,200(%esp) | 
 | 	movl	%esi,204(%esp) | 
 | 	movl	(%edi),%eax | 
 | 	movl	4(%edi),%ebx | 
 | 	movl	8(%edi),%ecx | 
 | 	movl	12(%edi),%edx | 
 | 	movl	16(%edi),%edi | 
 | 	movl	%ebx,%esi | 
 | 	movdqu	-64(%ebp),%xmm0 | 
 | 	movdqu	-48(%ebp),%xmm1 | 
 | 	movdqu	-32(%ebp),%xmm2 | 
 | 	movdqu	-16(%ebp),%xmm3 | 
 | .byte	102,15,56,0,198 | 
 | .byte	102,15,56,0,206 | 
 | .byte	102,15,56,0,214 | 
 | 	movdqa	%xmm7,96(%esp) | 
 | .byte	102,15,56,0,222 | 
 | 	paddd	%xmm7,%xmm0 | 
 | 	paddd	%xmm7,%xmm1 | 
 | 	paddd	%xmm7,%xmm2 | 
 | 	movdqa	%xmm0,(%esp) | 
 | 	psubd	%xmm7,%xmm0 | 
 | 	movdqa	%xmm1,16(%esp) | 
 | 	psubd	%xmm7,%xmm1 | 
 | 	movdqa	%xmm2,32(%esp) | 
 | 	movl	%ecx,%ebp | 
 | 	psubd	%xmm7,%xmm2 | 
 | 	xorl	%edx,%ebp | 
 | 	pshufd	$238,%xmm0,%xmm4 | 
 | 	andl	%ebp,%esi | 
 | 	jmp	L002loop | 
 | .align	4,0x90 | 
 | L002loop: | 
 | 	rorl	$2,%ebx | 
 | 	xorl	%edx,%esi | 
 | 	movl	%eax,%ebp | 
 | 	punpcklqdq	%xmm1,%xmm4 | 
 | 	movdqa	%xmm3,%xmm6 | 
 | 	addl	(%esp),%edi | 
 | 	xorl	%ecx,%ebx | 
 | 	paddd	%xmm3,%xmm7 | 
 | 	movdqa	%xmm0,64(%esp) | 
 | 	roll	$5,%eax | 
 | 	addl	%esi,%edi | 
 | 	psrldq	$4,%xmm6 | 
 | 	andl	%ebx,%ebp | 
 | 	xorl	%ecx,%ebx | 
 | 	pxor	%xmm0,%xmm4 | 
 | 	addl	%eax,%edi | 
 | 	rorl	$7,%eax | 
 | 	pxor	%xmm2,%xmm6 | 
 | 	xorl	%ecx,%ebp | 
 | 	movl	%edi,%esi | 
 | 	addl	4(%esp),%edx | 
 | 	pxor	%xmm6,%xmm4 | 
 | 	xorl	%ebx,%eax | 
 | 	roll	$5,%edi | 
 | 	movdqa	%xmm7,48(%esp) | 
 | 	addl	%ebp,%edx | 
 | 	andl	%eax,%esi | 
 | 	movdqa	%xmm4,%xmm0 | 
 | 	xorl	%ebx,%eax | 
 | 	addl	%edi,%edx | 
 | 	rorl	$7,%edi | 
 | 	movdqa	%xmm4,%xmm6 | 
 | 	xorl	%ebx,%esi | 
 | 	pslldq	$12,%xmm0 | 
 | 	paddd	%xmm4,%xmm4 | 
 | 	movl	%edx,%ebp | 
 | 	addl	8(%esp),%ecx | 
 | 	psrld	$31,%xmm6 | 
 | 	xorl	%eax,%edi | 
 | 	roll	$5,%edx | 
 | 	movdqa	%xmm0,%xmm7 | 
 | 	addl	%esi,%ecx | 
 | 	andl	%edi,%ebp | 
 | 	xorl	%eax,%edi | 
 | 	psrld	$30,%xmm0 | 
 | 	addl	%edx,%ecx | 
 | 	rorl	$7,%edx | 
 | 	por	%xmm6,%xmm4 | 
 | 	xorl	%eax,%ebp | 
 | 	movl	%ecx,%esi | 
 | 	addl	12(%esp),%ebx | 
 | 	pslld	$2,%xmm7 | 
 | 	xorl	%edi,%edx | 
 | 	roll	$5,%ecx | 
 | 	pxor	%xmm0,%xmm4 | 
 | 	movdqa	96(%esp),%xmm0 | 
 | 	addl	%ebp,%ebx | 
 | 	andl	%edx,%esi | 
 | 	pxor	%xmm7,%xmm4 | 
 | 	pshufd	$238,%xmm1,%xmm5 | 
 | 	xorl	%edi,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	rorl	$7,%ecx | 
 | 	xorl	%edi,%esi | 
 | 	movl	%ebx,%ebp | 
 | 	punpcklqdq	%xmm2,%xmm5 | 
 | 	movdqa	%xmm4,%xmm7 | 
 | 	addl	16(%esp),%eax | 
 | 	xorl	%edx,%ecx | 
 | 	paddd	%xmm4,%xmm0 | 
 | 	movdqa	%xmm1,80(%esp) | 
 | 	roll	$5,%ebx | 
 | 	addl	%esi,%eax | 
 | 	psrldq	$4,%xmm7 | 
 | 	andl	%ecx,%ebp | 
 | 	xorl	%edx,%ecx | 
 | 	pxor	%xmm1,%xmm5 | 
 | 	addl	%ebx,%eax | 
 | 	rorl	$7,%ebx | 
 | 	pxor	%xmm3,%xmm7 | 
 | 	xorl	%edx,%ebp | 
 | 	movl	%eax,%esi | 
 | 	addl	20(%esp),%edi | 
 | 	pxor	%xmm7,%xmm5 | 
 | 	xorl	%ecx,%ebx | 
 | 	roll	$5,%eax | 
 | 	movdqa	%xmm0,(%esp) | 
 | 	addl	%ebp,%edi | 
 | 	andl	%ebx,%esi | 
 | 	movdqa	%xmm5,%xmm1 | 
 | 	xorl	%ecx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	rorl	$7,%eax | 
 | 	movdqa	%xmm5,%xmm7 | 
 | 	xorl	%ecx,%esi | 
 | 	pslldq	$12,%xmm1 | 
 | 	paddd	%xmm5,%xmm5 | 
 | 	movl	%edi,%ebp | 
 | 	addl	24(%esp),%edx | 
 | 	psrld	$31,%xmm7 | 
 | 	xorl	%ebx,%eax | 
 | 	roll	$5,%edi | 
 | 	movdqa	%xmm1,%xmm0 | 
 | 	addl	%esi,%edx | 
 | 	andl	%eax,%ebp | 
 | 	xorl	%ebx,%eax | 
 | 	psrld	$30,%xmm1 | 
 | 	addl	%edi,%edx | 
 | 	rorl	$7,%edi | 
 | 	por	%xmm7,%xmm5 | 
 | 	xorl	%ebx,%ebp | 
 | 	movl	%edx,%esi | 
 | 	addl	28(%esp),%ecx | 
 | 	pslld	$2,%xmm0 | 
 | 	xorl	%eax,%edi | 
 | 	roll	$5,%edx | 
 | 	pxor	%xmm1,%xmm5 | 
 | 	movdqa	112(%esp),%xmm1 | 
 | 	addl	%ebp,%ecx | 
 | 	andl	%edi,%esi | 
 | 	pxor	%xmm0,%xmm5 | 
 | 	pshufd	$238,%xmm2,%xmm6 | 
 | 	xorl	%eax,%edi | 
 | 	addl	%edx,%ecx | 
 | 	rorl	$7,%edx | 
 | 	xorl	%eax,%esi | 
 | 	movl	%ecx,%ebp | 
 | 	punpcklqdq	%xmm3,%xmm6 | 
 | 	movdqa	%xmm5,%xmm0 | 
 | 	addl	32(%esp),%ebx | 
 | 	xorl	%edi,%edx | 
 | 	paddd	%xmm5,%xmm1 | 
 | 	movdqa	%xmm2,96(%esp) | 
 | 	roll	$5,%ecx | 
 | 	addl	%esi,%ebx | 
 | 	psrldq	$4,%xmm0 | 
 | 	andl	%edx,%ebp | 
 | 	xorl	%edi,%edx | 
 | 	pxor	%xmm2,%xmm6 | 
 | 	addl	%ecx,%ebx | 
 | 	rorl	$7,%ecx | 
 | 	pxor	%xmm4,%xmm0 | 
 | 	xorl	%edi,%ebp | 
 | 	movl	%ebx,%esi | 
 | 	addl	36(%esp),%eax | 
 | 	pxor	%xmm0,%xmm6 | 
 | 	xorl	%edx,%ecx | 
 | 	roll	$5,%ebx | 
 | 	movdqa	%xmm1,16(%esp) | 
 | 	addl	%ebp,%eax | 
 | 	andl	%ecx,%esi | 
 | 	movdqa	%xmm6,%xmm2 | 
 | 	xorl	%edx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	rorl	$7,%ebx | 
 | 	movdqa	%xmm6,%xmm0 | 
 | 	xorl	%edx,%esi | 
 | 	pslldq	$12,%xmm2 | 
 | 	paddd	%xmm6,%xmm6 | 
 | 	movl	%eax,%ebp | 
 | 	addl	40(%esp),%edi | 
 | 	psrld	$31,%xmm0 | 
 | 	xorl	%ecx,%ebx | 
 | 	roll	$5,%eax | 
 | 	movdqa	%xmm2,%xmm1 | 
 | 	addl	%esi,%edi | 
 | 	andl	%ebx,%ebp | 
 | 	xorl	%ecx,%ebx | 
 | 	psrld	$30,%xmm2 | 
 | 	addl	%eax,%edi | 
 | 	rorl	$7,%eax | 
 | 	por	%xmm0,%xmm6 | 
 | 	xorl	%ecx,%ebp | 
 | 	movdqa	64(%esp),%xmm0 | 
 | 	movl	%edi,%esi | 
 | 	addl	44(%esp),%edx | 
 | 	pslld	$2,%xmm1 | 
 | 	xorl	%ebx,%eax | 
 | 	roll	$5,%edi | 
 | 	pxor	%xmm2,%xmm6 | 
 | 	movdqa	112(%esp),%xmm2 | 
 | 	addl	%ebp,%edx | 
 | 	andl	%eax,%esi | 
 | 	pxor	%xmm1,%xmm6 | 
 | 	pshufd	$238,%xmm3,%xmm7 | 
 | 	xorl	%ebx,%eax | 
 | 	addl	%edi,%edx | 
 | 	rorl	$7,%edi | 
 | 	xorl	%ebx,%esi | 
 | 	movl	%edx,%ebp | 
 | 	punpcklqdq	%xmm4,%xmm7 | 
 | 	movdqa	%xmm6,%xmm1 | 
 | 	addl	48(%esp),%ecx | 
 | 	xorl	%eax,%edi | 
 | 	paddd	%xmm6,%xmm2 | 
 | 	movdqa	%xmm3,64(%esp) | 
 | 	roll	$5,%edx | 
 | 	addl	%esi,%ecx | 
 | 	psrldq	$4,%xmm1 | 
 | 	andl	%edi,%ebp | 
 | 	xorl	%eax,%edi | 
 | 	pxor	%xmm3,%xmm7 | 
 | 	addl	%edx,%ecx | 
 | 	rorl	$7,%edx | 
 | 	pxor	%xmm5,%xmm1 | 
 | 	xorl	%eax,%ebp | 
 | 	movl	%ecx,%esi | 
 | 	addl	52(%esp),%ebx | 
 | 	pxor	%xmm1,%xmm7 | 
 | 	xorl	%edi,%edx | 
 | 	roll	$5,%ecx | 
 | 	movdqa	%xmm2,32(%esp) | 
 | 	addl	%ebp,%ebx | 
 | 	andl	%edx,%esi | 
 | 	movdqa	%xmm7,%xmm3 | 
 | 	xorl	%edi,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	rorl	$7,%ecx | 
 | 	movdqa	%xmm7,%xmm1 | 
 | 	xorl	%edi,%esi | 
 | 	pslldq	$12,%xmm3 | 
 | 	paddd	%xmm7,%xmm7 | 
 | 	movl	%ebx,%ebp | 
 | 	addl	56(%esp),%eax | 
 | 	psrld	$31,%xmm1 | 
 | 	xorl	%edx,%ecx | 
 | 	roll	$5,%ebx | 
 | 	movdqa	%xmm3,%xmm2 | 
 | 	addl	%esi,%eax | 
 | 	andl	%ecx,%ebp | 
 | 	xorl	%edx,%ecx | 
 | 	psrld	$30,%xmm3 | 
 | 	addl	%ebx,%eax | 
 | 	rorl	$7,%ebx | 
 | 	por	%xmm1,%xmm7 | 
 | 	xorl	%edx,%ebp | 
 | 	movdqa	80(%esp),%xmm1 | 
 | 	movl	%eax,%esi | 
 | 	addl	60(%esp),%edi | 
 | 	pslld	$2,%xmm2 | 
 | 	xorl	%ecx,%ebx | 
 | 	roll	$5,%eax | 
 | 	pxor	%xmm3,%xmm7 | 
 | 	movdqa	112(%esp),%xmm3 | 
 | 	addl	%ebp,%edi | 
 | 	andl	%ebx,%esi | 
 | 	pxor	%xmm2,%xmm7 | 
 | 	pshufd	$238,%xmm6,%xmm2 | 
 | 	xorl	%ecx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	rorl	$7,%eax | 
 | 	pxor	%xmm4,%xmm0 | 
 | 	punpcklqdq	%xmm7,%xmm2 | 
 | 	xorl	%ecx,%esi | 
 | 	movl	%edi,%ebp | 
 | 	addl	(%esp),%edx | 
 | 	pxor	%xmm1,%xmm0 | 
 | 	movdqa	%xmm4,80(%esp) | 
 | 	xorl	%ebx,%eax | 
 | 	roll	$5,%edi | 
 | 	movdqa	%xmm3,%xmm4 | 
 | 	addl	%esi,%edx | 
 | 	paddd	%xmm7,%xmm3 | 
 | 	andl	%eax,%ebp | 
 | 	pxor	%xmm2,%xmm0 | 
 | 	xorl	%ebx,%eax | 
 | 	addl	%edi,%edx | 
 | 	rorl	$7,%edi | 
 | 	xorl	%ebx,%ebp | 
 | 	movdqa	%xmm0,%xmm2 | 
 | 	movdqa	%xmm3,48(%esp) | 
 | 	movl	%edx,%esi | 
 | 	addl	4(%esp),%ecx | 
 | 	xorl	%eax,%edi | 
 | 	roll	$5,%edx | 
 | 	pslld	$2,%xmm0 | 
 | 	addl	%ebp,%ecx | 
 | 	andl	%edi,%esi | 
 | 	psrld	$30,%xmm2 | 
 | 	xorl	%eax,%edi | 
 | 	addl	%edx,%ecx | 
 | 	rorl	$7,%edx | 
 | 	xorl	%eax,%esi | 
 | 	movl	%ecx,%ebp | 
 | 	addl	8(%esp),%ebx | 
 | 	xorl	%edi,%edx | 
 | 	roll	$5,%ecx | 
 | 	por	%xmm2,%xmm0 | 
 | 	addl	%esi,%ebx | 
 | 	andl	%edx,%ebp | 
 | 	movdqa	96(%esp),%xmm2 | 
 | 	xorl	%edi,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	12(%esp),%eax | 
 | 	xorl	%edi,%ebp | 
 | 	movl	%ebx,%esi | 
 | 	pshufd	$238,%xmm7,%xmm3 | 
 | 	roll	$5,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	xorl	%edx,%esi | 
 | 	rorl	$7,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	addl	16(%esp),%edi | 
 | 	pxor	%xmm5,%xmm1 | 
 | 	punpcklqdq	%xmm0,%xmm3 | 
 | 	xorl	%ecx,%esi | 
 | 	movl	%eax,%ebp | 
 | 	roll	$5,%eax | 
 | 	pxor	%xmm2,%xmm1 | 
 | 	movdqa	%xmm5,96(%esp) | 
 | 	addl	%esi,%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	movdqa	%xmm4,%xmm5 | 
 | 	rorl	$7,%ebx | 
 | 	paddd	%xmm0,%xmm4 | 
 | 	addl	%eax,%edi | 
 | 	pxor	%xmm3,%xmm1 | 
 | 	addl	20(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	movl	%edi,%esi | 
 | 	roll	$5,%edi | 
 | 	movdqa	%xmm1,%xmm3 | 
 | 	movdqa	%xmm4,(%esp) | 
 | 	addl	%ebp,%edx | 
 | 	xorl	%ebx,%esi | 
 | 	rorl	$7,%eax | 
 | 	addl	%edi,%edx | 
 | 	pslld	$2,%xmm1 | 
 | 	addl	24(%esp),%ecx | 
 | 	xorl	%eax,%esi | 
 | 	psrld	$30,%xmm3 | 
 | 	movl	%edx,%ebp | 
 | 	roll	$5,%edx | 
 | 	addl	%esi,%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	rorl	$7,%edi | 
 | 	addl	%edx,%ecx | 
 | 	por	%xmm3,%xmm1 | 
 | 	addl	28(%esp),%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	movdqa	64(%esp),%xmm3 | 
 | 	movl	%ecx,%esi | 
 | 	roll	$5,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	xorl	%edi,%esi | 
 | 	rorl	$7,%edx | 
 | 	pshufd	$238,%xmm0,%xmm4 | 
 | 	addl	%ecx,%ebx | 
 | 	addl	32(%esp),%eax | 
 | 	pxor	%xmm6,%xmm2 | 
 | 	punpcklqdq	%xmm1,%xmm4 | 
 | 	xorl	%edx,%esi | 
 | 	movl	%ebx,%ebp | 
 | 	roll	$5,%ebx | 
 | 	pxor	%xmm3,%xmm2 | 
 | 	movdqa	%xmm6,64(%esp) | 
 | 	addl	%esi,%eax | 
 | 	xorl	%edx,%ebp | 
 | 	movdqa	128(%esp),%xmm6 | 
 | 	rorl	$7,%ecx | 
 | 	paddd	%xmm1,%xmm5 | 
 | 	addl	%ebx,%eax | 
 | 	pxor	%xmm4,%xmm2 | 
 | 	addl	36(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	movl	%eax,%esi | 
 | 	roll	$5,%eax | 
 | 	movdqa	%xmm2,%xmm4 | 
 | 	movdqa	%xmm5,16(%esp) | 
 | 	addl	%ebp,%edi | 
 | 	xorl	%ecx,%esi | 
 | 	rorl	$7,%ebx | 
 | 	addl	%eax,%edi | 
 | 	pslld	$2,%xmm2 | 
 | 	addl	40(%esp),%edx | 
 | 	xorl	%ebx,%esi | 
 | 	psrld	$30,%xmm4 | 
 | 	movl	%edi,%ebp | 
 | 	roll	$5,%edi | 
 | 	addl	%esi,%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	rorl	$7,%eax | 
 | 	addl	%edi,%edx | 
 | 	por	%xmm4,%xmm2 | 
 | 	addl	44(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	movdqa	80(%esp),%xmm4 | 
 | 	movl	%edx,%esi | 
 | 	roll	$5,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	xorl	%eax,%esi | 
 | 	rorl	$7,%edi | 
 | 	pshufd	$238,%xmm1,%xmm5 | 
 | 	addl	%edx,%ecx | 
 | 	addl	48(%esp),%ebx | 
 | 	pxor	%xmm7,%xmm3 | 
 | 	punpcklqdq	%xmm2,%xmm5 | 
 | 	xorl	%edi,%esi | 
 | 	movl	%ecx,%ebp | 
 | 	roll	$5,%ecx | 
 | 	pxor	%xmm4,%xmm3 | 
 | 	movdqa	%xmm7,80(%esp) | 
 | 	addl	%esi,%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	movdqa	%xmm6,%xmm7 | 
 | 	rorl	$7,%edx | 
 | 	paddd	%xmm2,%xmm6 | 
 | 	addl	%ecx,%ebx | 
 | 	pxor	%xmm5,%xmm3 | 
 | 	addl	52(%esp),%eax | 
 | 	xorl	%edx,%ebp | 
 | 	movl	%ebx,%esi | 
 | 	roll	$5,%ebx | 
 | 	movdqa	%xmm3,%xmm5 | 
 | 	movdqa	%xmm6,32(%esp) | 
 | 	addl	%ebp,%eax | 
 | 	xorl	%edx,%esi | 
 | 	rorl	$7,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	pslld	$2,%xmm3 | 
 | 	addl	56(%esp),%edi | 
 | 	xorl	%ecx,%esi | 
 | 	psrld	$30,%xmm5 | 
 | 	movl	%eax,%ebp | 
 | 	roll	$5,%eax | 
 | 	addl	%esi,%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	rorl	$7,%ebx | 
 | 	addl	%eax,%edi | 
 | 	por	%xmm5,%xmm3 | 
 | 	addl	60(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	movdqa	96(%esp),%xmm5 | 
 | 	movl	%edi,%esi | 
 | 	roll	$5,%edi | 
 | 	addl	%ebp,%edx | 
 | 	xorl	%ebx,%esi | 
 | 	rorl	$7,%eax | 
 | 	pshufd	$238,%xmm2,%xmm6 | 
 | 	addl	%edi,%edx | 
 | 	addl	(%esp),%ecx | 
 | 	pxor	%xmm0,%xmm4 | 
 | 	punpcklqdq	%xmm3,%xmm6 | 
 | 	xorl	%eax,%esi | 
 | 	movl	%edx,%ebp | 
 | 	roll	$5,%edx | 
 | 	pxor	%xmm5,%xmm4 | 
 | 	movdqa	%xmm0,96(%esp) | 
 | 	addl	%esi,%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	movdqa	%xmm7,%xmm0 | 
 | 	rorl	$7,%edi | 
 | 	paddd	%xmm3,%xmm7 | 
 | 	addl	%edx,%ecx | 
 | 	pxor	%xmm6,%xmm4 | 
 | 	addl	4(%esp),%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	movl	%ecx,%esi | 
 | 	roll	$5,%ecx | 
 | 	movdqa	%xmm4,%xmm6 | 
 | 	movdqa	%xmm7,48(%esp) | 
 | 	addl	%ebp,%ebx | 
 | 	xorl	%edi,%esi | 
 | 	rorl	$7,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	pslld	$2,%xmm4 | 
 | 	addl	8(%esp),%eax | 
 | 	xorl	%edx,%esi | 
 | 	psrld	$30,%xmm6 | 
 | 	movl	%ebx,%ebp | 
 | 	roll	$5,%ebx | 
 | 	addl	%esi,%eax | 
 | 	xorl	%edx,%ebp | 
 | 	rorl	$7,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	por	%xmm6,%xmm4 | 
 | 	addl	12(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	movdqa	64(%esp),%xmm6 | 
 | 	movl	%eax,%esi | 
 | 	roll	$5,%eax | 
 | 	addl	%ebp,%edi | 
 | 	xorl	%ecx,%esi | 
 | 	rorl	$7,%ebx | 
 | 	pshufd	$238,%xmm3,%xmm7 | 
 | 	addl	%eax,%edi | 
 | 	addl	16(%esp),%edx | 
 | 	pxor	%xmm1,%xmm5 | 
 | 	punpcklqdq	%xmm4,%xmm7 | 
 | 	xorl	%ebx,%esi | 
 | 	movl	%edi,%ebp | 
 | 	roll	$5,%edi | 
 | 	pxor	%xmm6,%xmm5 | 
 | 	movdqa	%xmm1,64(%esp) | 
 | 	addl	%esi,%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	movdqa	%xmm0,%xmm1 | 
 | 	rorl	$7,%eax | 
 | 	paddd	%xmm4,%xmm0 | 
 | 	addl	%edi,%edx | 
 | 	pxor	%xmm7,%xmm5 | 
 | 	addl	20(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	movl	%edx,%esi | 
 | 	roll	$5,%edx | 
 | 	movdqa	%xmm5,%xmm7 | 
 | 	movdqa	%xmm0,(%esp) | 
 | 	addl	%ebp,%ecx | 
 | 	xorl	%eax,%esi | 
 | 	rorl	$7,%edi | 
 | 	addl	%edx,%ecx | 
 | 	pslld	$2,%xmm5 | 
 | 	addl	24(%esp),%ebx | 
 | 	xorl	%edi,%esi | 
 | 	psrld	$30,%xmm7 | 
 | 	movl	%ecx,%ebp | 
 | 	roll	$5,%ecx | 
 | 	addl	%esi,%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	rorl	$7,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	por	%xmm7,%xmm5 | 
 | 	addl	28(%esp),%eax | 
 | 	movdqa	80(%esp),%xmm7 | 
 | 	rorl	$7,%ecx | 
 | 	movl	%ebx,%esi | 
 | 	xorl	%edx,%ebp | 
 | 	roll	$5,%ebx | 
 | 	pshufd	$238,%xmm4,%xmm0 | 
 | 	addl	%ebp,%eax | 
 | 	xorl	%ecx,%esi | 
 | 	xorl	%edx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	addl	32(%esp),%edi | 
 | 	pxor	%xmm2,%xmm6 | 
 | 	punpcklqdq	%xmm5,%xmm0 | 
 | 	andl	%ecx,%esi | 
 | 	xorl	%edx,%ecx | 
 | 	rorl	$7,%ebx | 
 | 	pxor	%xmm7,%xmm6 | 
 | 	movdqa	%xmm2,80(%esp) | 
 | 	movl	%eax,%ebp | 
 | 	xorl	%ecx,%esi | 
 | 	roll	$5,%eax | 
 | 	movdqa	%xmm1,%xmm2 | 
 | 	addl	%esi,%edi | 
 | 	paddd	%xmm5,%xmm1 | 
 | 	xorl	%ebx,%ebp | 
 | 	pxor	%xmm0,%xmm6 | 
 | 	xorl	%ecx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	addl	36(%esp),%edx | 
 | 	andl	%ebx,%ebp | 
 | 	movdqa	%xmm6,%xmm0 | 
 | 	movdqa	%xmm1,16(%esp) | 
 | 	xorl	%ecx,%ebx | 
 | 	rorl	$7,%eax | 
 | 	movl	%edi,%esi | 
 | 	xorl	%ebx,%ebp | 
 | 	roll	$5,%edi | 
 | 	pslld	$2,%xmm6 | 
 | 	addl	%ebp,%edx | 
 | 	xorl	%eax,%esi | 
 | 	psrld	$30,%xmm0 | 
 | 	xorl	%ebx,%eax | 
 | 	addl	%edi,%edx | 
 | 	addl	40(%esp),%ecx | 
 | 	andl	%eax,%esi | 
 | 	xorl	%ebx,%eax | 
 | 	rorl	$7,%edi | 
 | 	por	%xmm0,%xmm6 | 
 | 	movl	%edx,%ebp | 
 | 	xorl	%eax,%esi | 
 | 	movdqa	96(%esp),%xmm0 | 
 | 	roll	$5,%edx | 
 | 	addl	%esi,%ecx | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	%eax,%edi | 
 | 	addl	%edx,%ecx | 
 | 	pshufd	$238,%xmm5,%xmm1 | 
 | 	addl	44(%esp),%ebx | 
 | 	andl	%edi,%ebp | 
 | 	xorl	%eax,%edi | 
 | 	rorl	$7,%edx | 
 | 	movl	%ecx,%esi | 
 | 	xorl	%edi,%ebp | 
 | 	roll	$5,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	xorl	%edx,%esi | 
 | 	xorl	%edi,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	48(%esp),%eax | 
 | 	pxor	%xmm3,%xmm7 | 
 | 	punpcklqdq	%xmm6,%xmm1 | 
 | 	andl	%edx,%esi | 
 | 	xorl	%edi,%edx | 
 | 	rorl	$7,%ecx | 
 | 	pxor	%xmm0,%xmm7 | 
 | 	movdqa	%xmm3,96(%esp) | 
 | 	movl	%ebx,%ebp | 
 | 	xorl	%edx,%esi | 
 | 	roll	$5,%ebx | 
 | 	movdqa	144(%esp),%xmm3 | 
 | 	addl	%esi,%eax | 
 | 	paddd	%xmm6,%xmm2 | 
 | 	xorl	%ecx,%ebp | 
 | 	pxor	%xmm1,%xmm7 | 
 | 	xorl	%edx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	addl	52(%esp),%edi | 
 | 	andl	%ecx,%ebp | 
 | 	movdqa	%xmm7,%xmm1 | 
 | 	movdqa	%xmm2,32(%esp) | 
 | 	xorl	%edx,%ecx | 
 | 	rorl	$7,%ebx | 
 | 	movl	%eax,%esi | 
 | 	xorl	%ecx,%ebp | 
 | 	roll	$5,%eax | 
 | 	pslld	$2,%xmm7 | 
 | 	addl	%ebp,%edi | 
 | 	xorl	%ebx,%esi | 
 | 	psrld	$30,%xmm1 | 
 | 	xorl	%ecx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	addl	56(%esp),%edx | 
 | 	andl	%ebx,%esi | 
 | 	xorl	%ecx,%ebx | 
 | 	rorl	$7,%eax | 
 | 	por	%xmm1,%xmm7 | 
 | 	movl	%edi,%ebp | 
 | 	xorl	%ebx,%esi | 
 | 	movdqa	64(%esp),%xmm1 | 
 | 	roll	$5,%edi | 
 | 	addl	%esi,%edx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	%ebx,%eax | 
 | 	addl	%edi,%edx | 
 | 	pshufd	$238,%xmm6,%xmm2 | 
 | 	addl	60(%esp),%ecx | 
 | 	andl	%eax,%ebp | 
 | 	xorl	%ebx,%eax | 
 | 	rorl	$7,%edi | 
 | 	movl	%edx,%esi | 
 | 	xorl	%eax,%ebp | 
 | 	roll	$5,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	xorl	%edi,%esi | 
 | 	xorl	%eax,%edi | 
 | 	addl	%edx,%ecx | 
 | 	addl	(%esp),%ebx | 
 | 	pxor	%xmm4,%xmm0 | 
 | 	punpcklqdq	%xmm7,%xmm2 | 
 | 	andl	%edi,%esi | 
 | 	xorl	%eax,%edi | 
 | 	rorl	$7,%edx | 
 | 	pxor	%xmm1,%xmm0 | 
 | 	movdqa	%xmm4,64(%esp) | 
 | 	movl	%ecx,%ebp | 
 | 	xorl	%edi,%esi | 
 | 	roll	$5,%ecx | 
 | 	movdqa	%xmm3,%xmm4 | 
 | 	addl	%esi,%ebx | 
 | 	paddd	%xmm7,%xmm3 | 
 | 	xorl	%edx,%ebp | 
 | 	pxor	%xmm2,%xmm0 | 
 | 	xorl	%edi,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	4(%esp),%eax | 
 | 	andl	%edx,%ebp | 
 | 	movdqa	%xmm0,%xmm2 | 
 | 	movdqa	%xmm3,48(%esp) | 
 | 	xorl	%edi,%edx | 
 | 	rorl	$7,%ecx | 
 | 	movl	%ebx,%esi | 
 | 	xorl	%edx,%ebp | 
 | 	roll	$5,%ebx | 
 | 	pslld	$2,%xmm0 | 
 | 	addl	%ebp,%eax | 
 | 	xorl	%ecx,%esi | 
 | 	psrld	$30,%xmm2 | 
 | 	xorl	%edx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	addl	8(%esp),%edi | 
 | 	andl	%ecx,%esi | 
 | 	xorl	%edx,%ecx | 
 | 	rorl	$7,%ebx | 
 | 	por	%xmm2,%xmm0 | 
 | 	movl	%eax,%ebp | 
 | 	xorl	%ecx,%esi | 
 | 	movdqa	80(%esp),%xmm2 | 
 | 	roll	$5,%eax | 
 | 	addl	%esi,%edi | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	%ecx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	pshufd	$238,%xmm7,%xmm3 | 
 | 	addl	12(%esp),%edx | 
 | 	andl	%ebx,%ebp | 
 | 	xorl	%ecx,%ebx | 
 | 	rorl	$7,%eax | 
 | 	movl	%edi,%esi | 
 | 	xorl	%ebx,%ebp | 
 | 	roll	$5,%edi | 
 | 	addl	%ebp,%edx | 
 | 	xorl	%eax,%esi | 
 | 	xorl	%ebx,%eax | 
 | 	addl	%edi,%edx | 
 | 	addl	16(%esp),%ecx | 
 | 	pxor	%xmm5,%xmm1 | 
 | 	punpcklqdq	%xmm0,%xmm3 | 
 | 	andl	%eax,%esi | 
 | 	xorl	%ebx,%eax | 
 | 	rorl	$7,%edi | 
 | 	pxor	%xmm2,%xmm1 | 
 | 	movdqa	%xmm5,80(%esp) | 
 | 	movl	%edx,%ebp | 
 | 	xorl	%eax,%esi | 
 | 	roll	$5,%edx | 
 | 	movdqa	%xmm4,%xmm5 | 
 | 	addl	%esi,%ecx | 
 | 	paddd	%xmm0,%xmm4 | 
 | 	xorl	%edi,%ebp | 
 | 	pxor	%xmm3,%xmm1 | 
 | 	xorl	%eax,%edi | 
 | 	addl	%edx,%ecx | 
 | 	addl	20(%esp),%ebx | 
 | 	andl	%edi,%ebp | 
 | 	movdqa	%xmm1,%xmm3 | 
 | 	movdqa	%xmm4,(%esp) | 
 | 	xorl	%eax,%edi | 
 | 	rorl	$7,%edx | 
 | 	movl	%ecx,%esi | 
 | 	xorl	%edi,%ebp | 
 | 	roll	$5,%ecx | 
 | 	pslld	$2,%xmm1 | 
 | 	addl	%ebp,%ebx | 
 | 	xorl	%edx,%esi | 
 | 	psrld	$30,%xmm3 | 
 | 	xorl	%edi,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	24(%esp),%eax | 
 | 	andl	%edx,%esi | 
 | 	xorl	%edi,%edx | 
 | 	rorl	$7,%ecx | 
 | 	por	%xmm3,%xmm1 | 
 | 	movl	%ebx,%ebp | 
 | 	xorl	%edx,%esi | 
 | 	movdqa	96(%esp),%xmm3 | 
 | 	roll	$5,%ebx | 
 | 	addl	%esi,%eax | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	%edx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	pshufd	$238,%xmm0,%xmm4 | 
 | 	addl	28(%esp),%edi | 
 | 	andl	%ecx,%ebp | 
 | 	xorl	%edx,%ecx | 
 | 	rorl	$7,%ebx | 
 | 	movl	%eax,%esi | 
 | 	xorl	%ecx,%ebp | 
 | 	roll	$5,%eax | 
 | 	addl	%ebp,%edi | 
 | 	xorl	%ebx,%esi | 
 | 	xorl	%ecx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	addl	32(%esp),%edx | 
 | 	pxor	%xmm6,%xmm2 | 
 | 	punpcklqdq	%xmm1,%xmm4 | 
 | 	andl	%ebx,%esi | 
 | 	xorl	%ecx,%ebx | 
 | 	rorl	$7,%eax | 
 | 	pxor	%xmm3,%xmm2 | 
 | 	movdqa	%xmm6,96(%esp) | 
 | 	movl	%edi,%ebp | 
 | 	xorl	%ebx,%esi | 
 | 	roll	$5,%edi | 
 | 	movdqa	%xmm5,%xmm6 | 
 | 	addl	%esi,%edx | 
 | 	paddd	%xmm1,%xmm5 | 
 | 	xorl	%eax,%ebp | 
 | 	pxor	%xmm4,%xmm2 | 
 | 	xorl	%ebx,%eax | 
 | 	addl	%edi,%edx | 
 | 	addl	36(%esp),%ecx | 
 | 	andl	%eax,%ebp | 
 | 	movdqa	%xmm2,%xmm4 | 
 | 	movdqa	%xmm5,16(%esp) | 
 | 	xorl	%ebx,%eax | 
 | 	rorl	$7,%edi | 
 | 	movl	%edx,%esi | 
 | 	xorl	%eax,%ebp | 
 | 	roll	$5,%edx | 
 | 	pslld	$2,%xmm2 | 
 | 	addl	%ebp,%ecx | 
 | 	xorl	%edi,%esi | 
 | 	psrld	$30,%xmm4 | 
 | 	xorl	%eax,%edi | 
 | 	addl	%edx,%ecx | 
 | 	addl	40(%esp),%ebx | 
 | 	andl	%edi,%esi | 
 | 	xorl	%eax,%edi | 
 | 	rorl	$7,%edx | 
 | 	por	%xmm4,%xmm2 | 
 | 	movl	%ecx,%ebp | 
 | 	xorl	%edi,%esi | 
 | 	movdqa	64(%esp),%xmm4 | 
 | 	roll	$5,%ecx | 
 | 	addl	%esi,%ebx | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	%edi,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	pshufd	$238,%xmm1,%xmm5 | 
 | 	addl	44(%esp),%eax | 
 | 	andl	%edx,%ebp | 
 | 	xorl	%edi,%edx | 
 | 	rorl	$7,%ecx | 
 | 	movl	%ebx,%esi | 
 | 	xorl	%edx,%ebp | 
 | 	roll	$5,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	xorl	%edx,%esi | 
 | 	addl	%ebx,%eax | 
 | 	addl	48(%esp),%edi | 
 | 	pxor	%xmm7,%xmm3 | 
 | 	punpcklqdq	%xmm2,%xmm5 | 
 | 	xorl	%ecx,%esi | 
 | 	movl	%eax,%ebp | 
 | 	roll	$5,%eax | 
 | 	pxor	%xmm4,%xmm3 | 
 | 	movdqa	%xmm7,64(%esp) | 
 | 	addl	%esi,%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	movdqa	%xmm6,%xmm7 | 
 | 	rorl	$7,%ebx | 
 | 	paddd	%xmm2,%xmm6 | 
 | 	addl	%eax,%edi | 
 | 	pxor	%xmm5,%xmm3 | 
 | 	addl	52(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	movl	%edi,%esi | 
 | 	roll	$5,%edi | 
 | 	movdqa	%xmm3,%xmm5 | 
 | 	movdqa	%xmm6,32(%esp) | 
 | 	addl	%ebp,%edx | 
 | 	xorl	%ebx,%esi | 
 | 	rorl	$7,%eax | 
 | 	addl	%edi,%edx | 
 | 	pslld	$2,%xmm3 | 
 | 	addl	56(%esp),%ecx | 
 | 	xorl	%eax,%esi | 
 | 	psrld	$30,%xmm5 | 
 | 	movl	%edx,%ebp | 
 | 	roll	$5,%edx | 
 | 	addl	%esi,%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	rorl	$7,%edi | 
 | 	addl	%edx,%ecx | 
 | 	por	%xmm5,%xmm3 | 
 | 	addl	60(%esp),%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	movl	%ecx,%esi | 
 | 	roll	$5,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	xorl	%edi,%esi | 
 | 	rorl	$7,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	(%esp),%eax | 
 | 	xorl	%edx,%esi | 
 | 	movl	%ebx,%ebp | 
 | 	roll	$5,%ebx | 
 | 	addl	%esi,%eax | 
 | 	xorl	%edx,%ebp | 
 | 	rorl	$7,%ecx | 
 | 	paddd	%xmm3,%xmm7 | 
 | 	addl	%ebx,%eax | 
 | 	addl	4(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	movl	%eax,%esi | 
 | 	movdqa	%xmm7,48(%esp) | 
 | 	roll	$5,%eax | 
 | 	addl	%ebp,%edi | 
 | 	xorl	%ecx,%esi | 
 | 	rorl	$7,%ebx | 
 | 	addl	%eax,%edi | 
 | 	addl	8(%esp),%edx | 
 | 	xorl	%ebx,%esi | 
 | 	movl	%edi,%ebp | 
 | 	roll	$5,%edi | 
 | 	addl	%esi,%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	rorl	$7,%eax | 
 | 	addl	%edi,%edx | 
 | 	addl	12(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	movl	%edx,%esi | 
 | 	roll	$5,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	xorl	%eax,%esi | 
 | 	rorl	$7,%edi | 
 | 	addl	%edx,%ecx | 
 | 	movl	196(%esp),%ebp | 
 | 	cmpl	200(%esp),%ebp | 
 | 	je	L003done | 
 | 	movdqa	160(%esp),%xmm7 | 
 | 	movdqa	176(%esp),%xmm6 | 
 | 	movdqu	(%ebp),%xmm0 | 
 | 	movdqu	16(%ebp),%xmm1 | 
 | 	movdqu	32(%ebp),%xmm2 | 
 | 	movdqu	48(%ebp),%xmm3 | 
 | 	addl	$64,%ebp | 
 | .byte	102,15,56,0,198 | 
 | 	movl	%ebp,196(%esp) | 
 | 	movdqa	%xmm7,96(%esp) | 
 | 	addl	16(%esp),%ebx | 
 | 	xorl	%edi,%esi | 
 | 	movl	%ecx,%ebp | 
 | 	roll	$5,%ecx | 
 | 	addl	%esi,%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	rorl	$7,%edx | 
 | .byte	102,15,56,0,206 | 
 | 	addl	%ecx,%ebx | 
 | 	addl	20(%esp),%eax | 
 | 	xorl	%edx,%ebp | 
 | 	movl	%ebx,%esi | 
 | 	paddd	%xmm7,%xmm0 | 
 | 	roll	$5,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	xorl	%edx,%esi | 
 | 	rorl	$7,%ecx | 
 | 	movdqa	%xmm0,(%esp) | 
 | 	addl	%ebx,%eax | 
 | 	addl	24(%esp),%edi | 
 | 	xorl	%ecx,%esi | 
 | 	movl	%eax,%ebp | 
 | 	psubd	%xmm7,%xmm0 | 
 | 	roll	$5,%eax | 
 | 	addl	%esi,%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	rorl	$7,%ebx | 
 | 	addl	%eax,%edi | 
 | 	addl	28(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	movl	%edi,%esi | 
 | 	roll	$5,%edi | 
 | 	addl	%ebp,%edx | 
 | 	xorl	%ebx,%esi | 
 | 	rorl	$7,%eax | 
 | 	addl	%edi,%edx | 
 | 	addl	32(%esp),%ecx | 
 | 	xorl	%eax,%esi | 
 | 	movl	%edx,%ebp | 
 | 	roll	$5,%edx | 
 | 	addl	%esi,%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	rorl	$7,%edi | 
 | .byte	102,15,56,0,214 | 
 | 	addl	%edx,%ecx | 
 | 	addl	36(%esp),%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	movl	%ecx,%esi | 
 | 	paddd	%xmm7,%xmm1 | 
 | 	roll	$5,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	xorl	%edi,%esi | 
 | 	rorl	$7,%edx | 
 | 	movdqa	%xmm1,16(%esp) | 
 | 	addl	%ecx,%ebx | 
 | 	addl	40(%esp),%eax | 
 | 	xorl	%edx,%esi | 
 | 	movl	%ebx,%ebp | 
 | 	psubd	%xmm7,%xmm1 | 
 | 	roll	$5,%ebx | 
 | 	addl	%esi,%eax | 
 | 	xorl	%edx,%ebp | 
 | 	rorl	$7,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	addl	44(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	movl	%eax,%esi | 
 | 	roll	$5,%eax | 
 | 	addl	%ebp,%edi | 
 | 	xorl	%ecx,%esi | 
 | 	rorl	$7,%ebx | 
 | 	addl	%eax,%edi | 
 | 	addl	48(%esp),%edx | 
 | 	xorl	%ebx,%esi | 
 | 	movl	%edi,%ebp | 
 | 	roll	$5,%edi | 
 | 	addl	%esi,%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	rorl	$7,%eax | 
 | .byte	102,15,56,0,222 | 
 | 	addl	%edi,%edx | 
 | 	addl	52(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	movl	%edx,%esi | 
 | 	paddd	%xmm7,%xmm2 | 
 | 	roll	$5,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	xorl	%eax,%esi | 
 | 	rorl	$7,%edi | 
 | 	movdqa	%xmm2,32(%esp) | 
 | 	addl	%edx,%ecx | 
 | 	addl	56(%esp),%ebx | 
 | 	xorl	%edi,%esi | 
 | 	movl	%ecx,%ebp | 
 | 	psubd	%xmm7,%xmm2 | 
 | 	roll	$5,%ecx | 
 | 	addl	%esi,%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	rorl	$7,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	60(%esp),%eax | 
 | 	xorl	%edx,%ebp | 
 | 	movl	%ebx,%esi | 
 | 	roll	$5,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	rorl	$7,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	movl	192(%esp),%ebp | 
 | 	addl	(%ebp),%eax | 
 | 	addl	4(%ebp),%esi | 
 | 	addl	8(%ebp),%ecx | 
 | 	movl	%eax,(%ebp) | 
 | 	addl	12(%ebp),%edx | 
 | 	movl	%esi,4(%ebp) | 
 | 	addl	16(%ebp),%edi | 
 | 	movl	%ecx,8(%ebp) | 
 | 	movl	%ecx,%ebx | 
 | 	movl	%edx,12(%ebp) | 
 | 	xorl	%edx,%ebx | 
 | 	movl	%edi,16(%ebp) | 
 | 	movl	%esi,%ebp | 
 | 	pshufd	$238,%xmm0,%xmm4 | 
 | 	andl	%ebx,%esi | 
 | 	movl	%ebp,%ebx | 
 | 	jmp	L002loop | 
 | .align	4,0x90 | 
 | L003done: | 
 | 	addl	16(%esp),%ebx | 
 | 	xorl	%edi,%esi | 
 | 	movl	%ecx,%ebp | 
 | 	roll	$5,%ecx | 
 | 	addl	%esi,%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	rorl	$7,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	20(%esp),%eax | 
 | 	xorl	%edx,%ebp | 
 | 	movl	%ebx,%esi | 
 | 	roll	$5,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	xorl	%edx,%esi | 
 | 	rorl	$7,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	addl	24(%esp),%edi | 
 | 	xorl	%ecx,%esi | 
 | 	movl	%eax,%ebp | 
 | 	roll	$5,%eax | 
 | 	addl	%esi,%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	rorl	$7,%ebx | 
 | 	addl	%eax,%edi | 
 | 	addl	28(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	movl	%edi,%esi | 
 | 	roll	$5,%edi | 
 | 	addl	%ebp,%edx | 
 | 	xorl	%ebx,%esi | 
 | 	rorl	$7,%eax | 
 | 	addl	%edi,%edx | 
 | 	addl	32(%esp),%ecx | 
 | 	xorl	%eax,%esi | 
 | 	movl	%edx,%ebp | 
 | 	roll	$5,%edx | 
 | 	addl	%esi,%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	rorl	$7,%edi | 
 | 	addl	%edx,%ecx | 
 | 	addl	36(%esp),%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	movl	%ecx,%esi | 
 | 	roll	$5,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	xorl	%edi,%esi | 
 | 	rorl	$7,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	40(%esp),%eax | 
 | 	xorl	%edx,%esi | 
 | 	movl	%ebx,%ebp | 
 | 	roll	$5,%ebx | 
 | 	addl	%esi,%eax | 
 | 	xorl	%edx,%ebp | 
 | 	rorl	$7,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	addl	44(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	movl	%eax,%esi | 
 | 	roll	$5,%eax | 
 | 	addl	%ebp,%edi | 
 | 	xorl	%ecx,%esi | 
 | 	rorl	$7,%ebx | 
 | 	addl	%eax,%edi | 
 | 	addl	48(%esp),%edx | 
 | 	xorl	%ebx,%esi | 
 | 	movl	%edi,%ebp | 
 | 	roll	$5,%edi | 
 | 	addl	%esi,%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	rorl	$7,%eax | 
 | 	addl	%edi,%edx | 
 | 	addl	52(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	movl	%edx,%esi | 
 | 	roll	$5,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	xorl	%eax,%esi | 
 | 	rorl	$7,%edi | 
 | 	addl	%edx,%ecx | 
 | 	addl	56(%esp),%ebx | 
 | 	xorl	%edi,%esi | 
 | 	movl	%ecx,%ebp | 
 | 	roll	$5,%ecx | 
 | 	addl	%esi,%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	rorl	$7,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	60(%esp),%eax | 
 | 	xorl	%edx,%ebp | 
 | 	movl	%ebx,%esi | 
 | 	roll	$5,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	rorl	$7,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	movl	192(%esp),%ebp | 
 | 	addl	(%ebp),%eax | 
 | 	movl	204(%esp),%esp | 
 | 	addl	4(%ebp),%esi | 
 | 	addl	8(%ebp),%ecx | 
 | 	movl	%eax,(%ebp) | 
 | 	addl	12(%ebp),%edx | 
 | 	movl	%esi,4(%ebp) | 
 | 	addl	16(%ebp),%edi | 
 | 	movl	%ecx,8(%ebp) | 
 | 	movl	%edx,12(%ebp) | 
 | 	movl	%edi,16(%ebp) | 
 | 	popl	%edi | 
 | 	popl	%esi | 
 | 	popl	%ebx | 
 | 	popl	%ebp | 
 | 	ret | 
 | .globl	_sha1_block_data_order_avx | 
 | .private_extern	_sha1_block_data_order_avx | 
 | .align	4 | 
 | _sha1_block_data_order_avx: | 
 | L_sha1_block_data_order_avx_begin: | 
 | 	pushl	%ebp | 
 | 	pushl	%ebx | 
 | 	pushl	%esi | 
 | 	pushl	%edi | 
 | 	call	L004pic_point | 
 | L004pic_point: | 
 | 	popl	%ebp | 
 | 	leal	LK_XX_XX-L004pic_point(%ebp),%ebp | 
 | 	vzeroall | 
 | 	vmovdqa	(%ebp),%xmm7 | 
 | 	vmovdqa	16(%ebp),%xmm0 | 
 | 	vmovdqa	32(%ebp),%xmm1 | 
 | 	vmovdqa	48(%ebp),%xmm2 | 
 | 	vmovdqa	64(%ebp),%xmm6 | 
 | 	movl	20(%esp),%edi | 
 | 	movl	24(%esp),%ebp | 
 | 	movl	28(%esp),%edx | 
 | 	movl	%esp,%esi | 
 | 	subl	$208,%esp | 
 | 	andl	$-64,%esp | 
 | 	vmovdqa	%xmm0,112(%esp) | 
 | 	vmovdqa	%xmm1,128(%esp) | 
 | 	vmovdqa	%xmm2,144(%esp) | 
 | 	shll	$6,%edx | 
 | 	vmovdqa	%xmm7,160(%esp) | 
 | 	addl	%ebp,%edx | 
 | 	vmovdqa	%xmm6,176(%esp) | 
 | 	addl	$64,%ebp | 
 | 	movl	%edi,192(%esp) | 
 | 	movl	%ebp,196(%esp) | 
 | 	movl	%edx,200(%esp) | 
 | 	movl	%esi,204(%esp) | 
 | 	movl	(%edi),%eax | 
 | 	movl	4(%edi),%ebx | 
 | 	movl	8(%edi),%ecx | 
 | 	movl	12(%edi),%edx | 
 | 	movl	16(%edi),%edi | 
 | 	movl	%ebx,%esi | 
 | 	vmovdqu	-64(%ebp),%xmm0 | 
 | 	vmovdqu	-48(%ebp),%xmm1 | 
 | 	vmovdqu	-32(%ebp),%xmm2 | 
 | 	vmovdqu	-16(%ebp),%xmm3 | 
 | 	vpshufb	%xmm6,%xmm0,%xmm0 | 
 | 	vpshufb	%xmm6,%xmm1,%xmm1 | 
 | 	vpshufb	%xmm6,%xmm2,%xmm2 | 
 | 	vmovdqa	%xmm7,96(%esp) | 
 | 	vpshufb	%xmm6,%xmm3,%xmm3 | 
 | 	vpaddd	%xmm7,%xmm0,%xmm4 | 
 | 	vpaddd	%xmm7,%xmm1,%xmm5 | 
 | 	vpaddd	%xmm7,%xmm2,%xmm6 | 
 | 	vmovdqa	%xmm4,(%esp) | 
 | 	movl	%ecx,%ebp | 
 | 	vmovdqa	%xmm5,16(%esp) | 
 | 	xorl	%edx,%ebp | 
 | 	vmovdqa	%xmm6,32(%esp) | 
 | 	andl	%ebp,%esi | 
 | 	jmp	L005loop | 
 | .align	4,0x90 | 
 | L005loop: | 
 | 	shrdl	$2,%ebx,%ebx | 
 | 	xorl	%edx,%esi | 
 | 	vpalignr	$8,%xmm0,%xmm1,%xmm4 | 
 | 	movl	%eax,%ebp | 
 | 	addl	(%esp),%edi | 
 | 	vpaddd	%xmm3,%xmm7,%xmm7 | 
 | 	vmovdqa	%xmm0,64(%esp) | 
 | 	xorl	%ecx,%ebx | 
 | 	shldl	$5,%eax,%eax | 
 | 	vpsrldq	$4,%xmm3,%xmm6 | 
 | 	addl	%esi,%edi | 
 | 	andl	%ebx,%ebp | 
 | 	vpxor	%xmm0,%xmm4,%xmm4 | 
 | 	xorl	%ecx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	vpxor	%xmm2,%xmm6,%xmm6 | 
 | 	shrdl	$7,%eax,%eax | 
 | 	xorl	%ecx,%ebp | 
 | 	vmovdqa	%xmm7,48(%esp) | 
 | 	movl	%edi,%esi | 
 | 	addl	4(%esp),%edx | 
 | 	vpxor	%xmm6,%xmm4,%xmm4 | 
 | 	xorl	%ebx,%eax | 
 | 	shldl	$5,%edi,%edi | 
 | 	addl	%ebp,%edx | 
 | 	andl	%eax,%esi | 
 | 	vpsrld	$31,%xmm4,%xmm6 | 
 | 	xorl	%ebx,%eax | 
 | 	addl	%edi,%edx | 
 | 	shrdl	$7,%edi,%edi | 
 | 	xorl	%ebx,%esi | 
 | 	vpslldq	$12,%xmm4,%xmm0 | 
 | 	vpaddd	%xmm4,%xmm4,%xmm4 | 
 | 	movl	%edx,%ebp | 
 | 	addl	8(%esp),%ecx | 
 | 	xorl	%eax,%edi | 
 | 	shldl	$5,%edx,%edx | 
 | 	vpsrld	$30,%xmm0,%xmm7 | 
 | 	vpor	%xmm6,%xmm4,%xmm4 | 
 | 	addl	%esi,%ecx | 
 | 	andl	%edi,%ebp | 
 | 	xorl	%eax,%edi | 
 | 	addl	%edx,%ecx | 
 | 	vpslld	$2,%xmm0,%xmm0 | 
 | 	shrdl	$7,%edx,%edx | 
 | 	xorl	%eax,%ebp | 
 | 	vpxor	%xmm7,%xmm4,%xmm4 | 
 | 	movl	%ecx,%esi | 
 | 	addl	12(%esp),%ebx | 
 | 	xorl	%edi,%edx | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	vpxor	%xmm0,%xmm4,%xmm4 | 
 | 	addl	%ebp,%ebx | 
 | 	andl	%edx,%esi | 
 | 	vmovdqa	96(%esp),%xmm0 | 
 | 	xorl	%edi,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	xorl	%edi,%esi | 
 | 	vpalignr	$8,%xmm1,%xmm2,%xmm5 | 
 | 	movl	%ebx,%ebp | 
 | 	addl	16(%esp),%eax | 
 | 	vpaddd	%xmm4,%xmm0,%xmm0 | 
 | 	vmovdqa	%xmm1,80(%esp) | 
 | 	xorl	%edx,%ecx | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	vpsrldq	$4,%xmm4,%xmm7 | 
 | 	addl	%esi,%eax | 
 | 	andl	%ecx,%ebp | 
 | 	vpxor	%xmm1,%xmm5,%xmm5 | 
 | 	xorl	%edx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	vpxor	%xmm3,%xmm7,%xmm7 | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	xorl	%edx,%ebp | 
 | 	vmovdqa	%xmm0,(%esp) | 
 | 	movl	%eax,%esi | 
 | 	addl	20(%esp),%edi | 
 | 	vpxor	%xmm7,%xmm5,%xmm5 | 
 | 	xorl	%ecx,%ebx | 
 | 	shldl	$5,%eax,%eax | 
 | 	addl	%ebp,%edi | 
 | 	andl	%ebx,%esi | 
 | 	vpsrld	$31,%xmm5,%xmm7 | 
 | 	xorl	%ecx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	shrdl	$7,%eax,%eax | 
 | 	xorl	%ecx,%esi | 
 | 	vpslldq	$12,%xmm5,%xmm1 | 
 | 	vpaddd	%xmm5,%xmm5,%xmm5 | 
 | 	movl	%edi,%ebp | 
 | 	addl	24(%esp),%edx | 
 | 	xorl	%ebx,%eax | 
 | 	shldl	$5,%edi,%edi | 
 | 	vpsrld	$30,%xmm1,%xmm0 | 
 | 	vpor	%xmm7,%xmm5,%xmm5 | 
 | 	addl	%esi,%edx | 
 | 	andl	%eax,%ebp | 
 | 	xorl	%ebx,%eax | 
 | 	addl	%edi,%edx | 
 | 	vpslld	$2,%xmm1,%xmm1 | 
 | 	shrdl	$7,%edi,%edi | 
 | 	xorl	%ebx,%ebp | 
 | 	vpxor	%xmm0,%xmm5,%xmm5 | 
 | 	movl	%edx,%esi | 
 | 	addl	28(%esp),%ecx | 
 | 	xorl	%eax,%edi | 
 | 	shldl	$5,%edx,%edx | 
 | 	vpxor	%xmm1,%xmm5,%xmm5 | 
 | 	addl	%ebp,%ecx | 
 | 	andl	%edi,%esi | 
 | 	vmovdqa	112(%esp),%xmm1 | 
 | 	xorl	%eax,%edi | 
 | 	addl	%edx,%ecx | 
 | 	shrdl	$7,%edx,%edx | 
 | 	xorl	%eax,%esi | 
 | 	vpalignr	$8,%xmm2,%xmm3,%xmm6 | 
 | 	movl	%ecx,%ebp | 
 | 	addl	32(%esp),%ebx | 
 | 	vpaddd	%xmm5,%xmm1,%xmm1 | 
 | 	vmovdqa	%xmm2,96(%esp) | 
 | 	xorl	%edi,%edx | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	vpsrldq	$4,%xmm5,%xmm0 | 
 | 	addl	%esi,%ebx | 
 | 	andl	%edx,%ebp | 
 | 	vpxor	%xmm2,%xmm6,%xmm6 | 
 | 	xorl	%edi,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	vpxor	%xmm4,%xmm0,%xmm0 | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	xorl	%edi,%ebp | 
 | 	vmovdqa	%xmm1,16(%esp) | 
 | 	movl	%ebx,%esi | 
 | 	addl	36(%esp),%eax | 
 | 	vpxor	%xmm0,%xmm6,%xmm6 | 
 | 	xorl	%edx,%ecx | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	andl	%ecx,%esi | 
 | 	vpsrld	$31,%xmm6,%xmm0 | 
 | 	xorl	%edx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	xorl	%edx,%esi | 
 | 	vpslldq	$12,%xmm6,%xmm2 | 
 | 	vpaddd	%xmm6,%xmm6,%xmm6 | 
 | 	movl	%eax,%ebp | 
 | 	addl	40(%esp),%edi | 
 | 	xorl	%ecx,%ebx | 
 | 	shldl	$5,%eax,%eax | 
 | 	vpsrld	$30,%xmm2,%xmm1 | 
 | 	vpor	%xmm0,%xmm6,%xmm6 | 
 | 	addl	%esi,%edi | 
 | 	andl	%ebx,%ebp | 
 | 	xorl	%ecx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	vpslld	$2,%xmm2,%xmm2 | 
 | 	vmovdqa	64(%esp),%xmm0 | 
 | 	shrdl	$7,%eax,%eax | 
 | 	xorl	%ecx,%ebp | 
 | 	vpxor	%xmm1,%xmm6,%xmm6 | 
 | 	movl	%edi,%esi | 
 | 	addl	44(%esp),%edx | 
 | 	xorl	%ebx,%eax | 
 | 	shldl	$5,%edi,%edi | 
 | 	vpxor	%xmm2,%xmm6,%xmm6 | 
 | 	addl	%ebp,%edx | 
 | 	andl	%eax,%esi | 
 | 	vmovdqa	112(%esp),%xmm2 | 
 | 	xorl	%ebx,%eax | 
 | 	addl	%edi,%edx | 
 | 	shrdl	$7,%edi,%edi | 
 | 	xorl	%ebx,%esi | 
 | 	vpalignr	$8,%xmm3,%xmm4,%xmm7 | 
 | 	movl	%edx,%ebp | 
 | 	addl	48(%esp),%ecx | 
 | 	vpaddd	%xmm6,%xmm2,%xmm2 | 
 | 	vmovdqa	%xmm3,64(%esp) | 
 | 	xorl	%eax,%edi | 
 | 	shldl	$5,%edx,%edx | 
 | 	vpsrldq	$4,%xmm6,%xmm1 | 
 | 	addl	%esi,%ecx | 
 | 	andl	%edi,%ebp | 
 | 	vpxor	%xmm3,%xmm7,%xmm7 | 
 | 	xorl	%eax,%edi | 
 | 	addl	%edx,%ecx | 
 | 	vpxor	%xmm5,%xmm1,%xmm1 | 
 | 	shrdl	$7,%edx,%edx | 
 | 	xorl	%eax,%ebp | 
 | 	vmovdqa	%xmm2,32(%esp) | 
 | 	movl	%ecx,%esi | 
 | 	addl	52(%esp),%ebx | 
 | 	vpxor	%xmm1,%xmm7,%xmm7 | 
 | 	xorl	%edi,%edx | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	andl	%edx,%esi | 
 | 	vpsrld	$31,%xmm7,%xmm1 | 
 | 	xorl	%edi,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	xorl	%edi,%esi | 
 | 	vpslldq	$12,%xmm7,%xmm3 | 
 | 	vpaddd	%xmm7,%xmm7,%xmm7 | 
 | 	movl	%ebx,%ebp | 
 | 	addl	56(%esp),%eax | 
 | 	xorl	%edx,%ecx | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	vpsrld	$30,%xmm3,%xmm2 | 
 | 	vpor	%xmm1,%xmm7,%xmm7 | 
 | 	addl	%esi,%eax | 
 | 	andl	%ecx,%ebp | 
 | 	xorl	%edx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	vpslld	$2,%xmm3,%xmm3 | 
 | 	vmovdqa	80(%esp),%xmm1 | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	xorl	%edx,%ebp | 
 | 	vpxor	%xmm2,%xmm7,%xmm7 | 
 | 	movl	%eax,%esi | 
 | 	addl	60(%esp),%edi | 
 | 	xorl	%ecx,%ebx | 
 | 	shldl	$5,%eax,%eax | 
 | 	vpxor	%xmm3,%xmm7,%xmm7 | 
 | 	addl	%ebp,%edi | 
 | 	andl	%ebx,%esi | 
 | 	vmovdqa	112(%esp),%xmm3 | 
 | 	xorl	%ecx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	vpalignr	$8,%xmm6,%xmm7,%xmm2 | 
 | 	vpxor	%xmm4,%xmm0,%xmm0 | 
 | 	shrdl	$7,%eax,%eax | 
 | 	xorl	%ecx,%esi | 
 | 	movl	%edi,%ebp | 
 | 	addl	(%esp),%edx | 
 | 	vpxor	%xmm1,%xmm0,%xmm0 | 
 | 	vmovdqa	%xmm4,80(%esp) | 
 | 	xorl	%ebx,%eax | 
 | 	shldl	$5,%edi,%edi | 
 | 	vmovdqa	%xmm3,%xmm4 | 
 | 	vpaddd	%xmm7,%xmm3,%xmm3 | 
 | 	addl	%esi,%edx | 
 | 	andl	%eax,%ebp | 
 | 	vpxor	%xmm2,%xmm0,%xmm0 | 
 | 	xorl	%ebx,%eax | 
 | 	addl	%edi,%edx | 
 | 	shrdl	$7,%edi,%edi | 
 | 	xorl	%ebx,%ebp | 
 | 	vpsrld	$30,%xmm0,%xmm2 | 
 | 	vmovdqa	%xmm3,48(%esp) | 
 | 	movl	%edx,%esi | 
 | 	addl	4(%esp),%ecx | 
 | 	xorl	%eax,%edi | 
 | 	shldl	$5,%edx,%edx | 
 | 	vpslld	$2,%xmm0,%xmm0 | 
 | 	addl	%ebp,%ecx | 
 | 	andl	%edi,%esi | 
 | 	xorl	%eax,%edi | 
 | 	addl	%edx,%ecx | 
 | 	shrdl	$7,%edx,%edx | 
 | 	xorl	%eax,%esi | 
 | 	movl	%ecx,%ebp | 
 | 	addl	8(%esp),%ebx | 
 | 	vpor	%xmm2,%xmm0,%xmm0 | 
 | 	xorl	%edi,%edx | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	vmovdqa	96(%esp),%xmm2 | 
 | 	addl	%esi,%ebx | 
 | 	andl	%edx,%ebp | 
 | 	xorl	%edi,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	12(%esp),%eax | 
 | 	xorl	%edi,%ebp | 
 | 	movl	%ebx,%esi | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	xorl	%edx,%esi | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	vpalignr	$8,%xmm7,%xmm0,%xmm3 | 
 | 	vpxor	%xmm5,%xmm1,%xmm1 | 
 | 	addl	16(%esp),%edi | 
 | 	xorl	%ecx,%esi | 
 | 	movl	%eax,%ebp | 
 | 	shldl	$5,%eax,%eax | 
 | 	vpxor	%xmm2,%xmm1,%xmm1 | 
 | 	vmovdqa	%xmm5,96(%esp) | 
 | 	addl	%esi,%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	vmovdqa	%xmm4,%xmm5 | 
 | 	vpaddd	%xmm0,%xmm4,%xmm4 | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	vpxor	%xmm3,%xmm1,%xmm1 | 
 | 	addl	20(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	movl	%edi,%esi | 
 | 	shldl	$5,%edi,%edi | 
 | 	vpsrld	$30,%xmm1,%xmm3 | 
 | 	vmovdqa	%xmm4,(%esp) | 
 | 	addl	%ebp,%edx | 
 | 	xorl	%ebx,%esi | 
 | 	shrdl	$7,%eax,%eax | 
 | 	addl	%edi,%edx | 
 | 	vpslld	$2,%xmm1,%xmm1 | 
 | 	addl	24(%esp),%ecx | 
 | 	xorl	%eax,%esi | 
 | 	movl	%edx,%ebp | 
 | 	shldl	$5,%edx,%edx | 
 | 	addl	%esi,%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	shrdl	$7,%edi,%edi | 
 | 	addl	%edx,%ecx | 
 | 	vpor	%xmm3,%xmm1,%xmm1 | 
 | 	addl	28(%esp),%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	vmovdqa	64(%esp),%xmm3 | 
 | 	movl	%ecx,%esi | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	xorl	%edi,%esi | 
 | 	shrdl	$7,%edx,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	vpalignr	$8,%xmm0,%xmm1,%xmm4 | 
 | 	vpxor	%xmm6,%xmm2,%xmm2 | 
 | 	addl	32(%esp),%eax | 
 | 	xorl	%edx,%esi | 
 | 	movl	%ebx,%ebp | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	vpxor	%xmm3,%xmm2,%xmm2 | 
 | 	vmovdqa	%xmm6,64(%esp) | 
 | 	addl	%esi,%eax | 
 | 	xorl	%edx,%ebp | 
 | 	vmovdqa	128(%esp),%xmm6 | 
 | 	vpaddd	%xmm1,%xmm5,%xmm5 | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	vpxor	%xmm4,%xmm2,%xmm2 | 
 | 	addl	36(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	movl	%eax,%esi | 
 | 	shldl	$5,%eax,%eax | 
 | 	vpsrld	$30,%xmm2,%xmm4 | 
 | 	vmovdqa	%xmm5,16(%esp) | 
 | 	addl	%ebp,%edi | 
 | 	xorl	%ecx,%esi | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	vpslld	$2,%xmm2,%xmm2 | 
 | 	addl	40(%esp),%edx | 
 | 	xorl	%ebx,%esi | 
 | 	movl	%edi,%ebp | 
 | 	shldl	$5,%edi,%edi | 
 | 	addl	%esi,%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	shrdl	$7,%eax,%eax | 
 | 	addl	%edi,%edx | 
 | 	vpor	%xmm4,%xmm2,%xmm2 | 
 | 	addl	44(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	vmovdqa	80(%esp),%xmm4 | 
 | 	movl	%edx,%esi | 
 | 	shldl	$5,%edx,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	xorl	%eax,%esi | 
 | 	shrdl	$7,%edi,%edi | 
 | 	addl	%edx,%ecx | 
 | 	vpalignr	$8,%xmm1,%xmm2,%xmm5 | 
 | 	vpxor	%xmm7,%xmm3,%xmm3 | 
 | 	addl	48(%esp),%ebx | 
 | 	xorl	%edi,%esi | 
 | 	movl	%ecx,%ebp | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	vpxor	%xmm4,%xmm3,%xmm3 | 
 | 	vmovdqa	%xmm7,80(%esp) | 
 | 	addl	%esi,%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	vmovdqa	%xmm6,%xmm7 | 
 | 	vpaddd	%xmm2,%xmm6,%xmm6 | 
 | 	shrdl	$7,%edx,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	vpxor	%xmm5,%xmm3,%xmm3 | 
 | 	addl	52(%esp),%eax | 
 | 	xorl	%edx,%ebp | 
 | 	movl	%ebx,%esi | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	vpsrld	$30,%xmm3,%xmm5 | 
 | 	vmovdqa	%xmm6,32(%esp) | 
 | 	addl	%ebp,%eax | 
 | 	xorl	%edx,%esi | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	vpslld	$2,%xmm3,%xmm3 | 
 | 	addl	56(%esp),%edi | 
 | 	xorl	%ecx,%esi | 
 | 	movl	%eax,%ebp | 
 | 	shldl	$5,%eax,%eax | 
 | 	addl	%esi,%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	vpor	%xmm5,%xmm3,%xmm3 | 
 | 	addl	60(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	vmovdqa	96(%esp),%xmm5 | 
 | 	movl	%edi,%esi | 
 | 	shldl	$5,%edi,%edi | 
 | 	addl	%ebp,%edx | 
 | 	xorl	%ebx,%esi | 
 | 	shrdl	$7,%eax,%eax | 
 | 	addl	%edi,%edx | 
 | 	vpalignr	$8,%xmm2,%xmm3,%xmm6 | 
 | 	vpxor	%xmm0,%xmm4,%xmm4 | 
 | 	addl	(%esp),%ecx | 
 | 	xorl	%eax,%esi | 
 | 	movl	%edx,%ebp | 
 | 	shldl	$5,%edx,%edx | 
 | 	vpxor	%xmm5,%xmm4,%xmm4 | 
 | 	vmovdqa	%xmm0,96(%esp) | 
 | 	addl	%esi,%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	vmovdqa	%xmm7,%xmm0 | 
 | 	vpaddd	%xmm3,%xmm7,%xmm7 | 
 | 	shrdl	$7,%edi,%edi | 
 | 	addl	%edx,%ecx | 
 | 	vpxor	%xmm6,%xmm4,%xmm4 | 
 | 	addl	4(%esp),%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	movl	%ecx,%esi | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	vpsrld	$30,%xmm4,%xmm6 | 
 | 	vmovdqa	%xmm7,48(%esp) | 
 | 	addl	%ebp,%ebx | 
 | 	xorl	%edi,%esi | 
 | 	shrdl	$7,%edx,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	vpslld	$2,%xmm4,%xmm4 | 
 | 	addl	8(%esp),%eax | 
 | 	xorl	%edx,%esi | 
 | 	movl	%ebx,%ebp | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	addl	%esi,%eax | 
 | 	xorl	%edx,%ebp | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	vpor	%xmm6,%xmm4,%xmm4 | 
 | 	addl	12(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	vmovdqa	64(%esp),%xmm6 | 
 | 	movl	%eax,%esi | 
 | 	shldl	$5,%eax,%eax | 
 | 	addl	%ebp,%edi | 
 | 	xorl	%ecx,%esi | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	vpalignr	$8,%xmm3,%xmm4,%xmm7 | 
 | 	vpxor	%xmm1,%xmm5,%xmm5 | 
 | 	addl	16(%esp),%edx | 
 | 	xorl	%ebx,%esi | 
 | 	movl	%edi,%ebp | 
 | 	shldl	$5,%edi,%edi | 
 | 	vpxor	%xmm6,%xmm5,%xmm5 | 
 | 	vmovdqa	%xmm1,64(%esp) | 
 | 	addl	%esi,%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	vmovdqa	%xmm0,%xmm1 | 
 | 	vpaddd	%xmm4,%xmm0,%xmm0 | 
 | 	shrdl	$7,%eax,%eax | 
 | 	addl	%edi,%edx | 
 | 	vpxor	%xmm7,%xmm5,%xmm5 | 
 | 	addl	20(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	movl	%edx,%esi | 
 | 	shldl	$5,%edx,%edx | 
 | 	vpsrld	$30,%xmm5,%xmm7 | 
 | 	vmovdqa	%xmm0,(%esp) | 
 | 	addl	%ebp,%ecx | 
 | 	xorl	%eax,%esi | 
 | 	shrdl	$7,%edi,%edi | 
 | 	addl	%edx,%ecx | 
 | 	vpslld	$2,%xmm5,%xmm5 | 
 | 	addl	24(%esp),%ebx | 
 | 	xorl	%edi,%esi | 
 | 	movl	%ecx,%ebp | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	addl	%esi,%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	shrdl	$7,%edx,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	vpor	%xmm7,%xmm5,%xmm5 | 
 | 	addl	28(%esp),%eax | 
 | 	vmovdqa	80(%esp),%xmm7 | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	movl	%ebx,%esi | 
 | 	xorl	%edx,%ebp | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	xorl	%ecx,%esi | 
 | 	xorl	%edx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	vpalignr	$8,%xmm4,%xmm5,%xmm0 | 
 | 	vpxor	%xmm2,%xmm6,%xmm6 | 
 | 	addl	32(%esp),%edi | 
 | 	andl	%ecx,%esi | 
 | 	xorl	%edx,%ecx | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	vpxor	%xmm7,%xmm6,%xmm6 | 
 | 	vmovdqa	%xmm2,80(%esp) | 
 | 	movl	%eax,%ebp | 
 | 	xorl	%ecx,%esi | 
 | 	vmovdqa	%xmm1,%xmm2 | 
 | 	vpaddd	%xmm5,%xmm1,%xmm1 | 
 | 	shldl	$5,%eax,%eax | 
 | 	addl	%esi,%edi | 
 | 	vpxor	%xmm0,%xmm6,%xmm6 | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	%ecx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	addl	36(%esp),%edx | 
 | 	vpsrld	$30,%xmm6,%xmm0 | 
 | 	vmovdqa	%xmm1,16(%esp) | 
 | 	andl	%ebx,%ebp | 
 | 	xorl	%ecx,%ebx | 
 | 	shrdl	$7,%eax,%eax | 
 | 	movl	%edi,%esi | 
 | 	vpslld	$2,%xmm6,%xmm6 | 
 | 	xorl	%ebx,%ebp | 
 | 	shldl	$5,%edi,%edi | 
 | 	addl	%ebp,%edx | 
 | 	xorl	%eax,%esi | 
 | 	xorl	%ebx,%eax | 
 | 	addl	%edi,%edx | 
 | 	addl	40(%esp),%ecx | 
 | 	andl	%eax,%esi | 
 | 	vpor	%xmm0,%xmm6,%xmm6 | 
 | 	xorl	%ebx,%eax | 
 | 	shrdl	$7,%edi,%edi | 
 | 	vmovdqa	96(%esp),%xmm0 | 
 | 	movl	%edx,%ebp | 
 | 	xorl	%eax,%esi | 
 | 	shldl	$5,%edx,%edx | 
 | 	addl	%esi,%ecx | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	%eax,%edi | 
 | 	addl	%edx,%ecx | 
 | 	addl	44(%esp),%ebx | 
 | 	andl	%edi,%ebp | 
 | 	xorl	%eax,%edi | 
 | 	shrdl	$7,%edx,%edx | 
 | 	movl	%ecx,%esi | 
 | 	xorl	%edi,%ebp | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	xorl	%edx,%esi | 
 | 	xorl	%edi,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	vpalignr	$8,%xmm5,%xmm6,%xmm1 | 
 | 	vpxor	%xmm3,%xmm7,%xmm7 | 
 | 	addl	48(%esp),%eax | 
 | 	andl	%edx,%esi | 
 | 	xorl	%edi,%edx | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	vpxor	%xmm0,%xmm7,%xmm7 | 
 | 	vmovdqa	%xmm3,96(%esp) | 
 | 	movl	%ebx,%ebp | 
 | 	xorl	%edx,%esi | 
 | 	vmovdqa	144(%esp),%xmm3 | 
 | 	vpaddd	%xmm6,%xmm2,%xmm2 | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	addl	%esi,%eax | 
 | 	vpxor	%xmm1,%xmm7,%xmm7 | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	%edx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	addl	52(%esp),%edi | 
 | 	vpsrld	$30,%xmm7,%xmm1 | 
 | 	vmovdqa	%xmm2,32(%esp) | 
 | 	andl	%ecx,%ebp | 
 | 	xorl	%edx,%ecx | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	movl	%eax,%esi | 
 | 	vpslld	$2,%xmm7,%xmm7 | 
 | 	xorl	%ecx,%ebp | 
 | 	shldl	$5,%eax,%eax | 
 | 	addl	%ebp,%edi | 
 | 	xorl	%ebx,%esi | 
 | 	xorl	%ecx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	addl	56(%esp),%edx | 
 | 	andl	%ebx,%esi | 
 | 	vpor	%xmm1,%xmm7,%xmm7 | 
 | 	xorl	%ecx,%ebx | 
 | 	shrdl	$7,%eax,%eax | 
 | 	vmovdqa	64(%esp),%xmm1 | 
 | 	movl	%edi,%ebp | 
 | 	xorl	%ebx,%esi | 
 | 	shldl	$5,%edi,%edi | 
 | 	addl	%esi,%edx | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	%ebx,%eax | 
 | 	addl	%edi,%edx | 
 | 	addl	60(%esp),%ecx | 
 | 	andl	%eax,%ebp | 
 | 	xorl	%ebx,%eax | 
 | 	shrdl	$7,%edi,%edi | 
 | 	movl	%edx,%esi | 
 | 	xorl	%eax,%ebp | 
 | 	shldl	$5,%edx,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	xorl	%edi,%esi | 
 | 	xorl	%eax,%edi | 
 | 	addl	%edx,%ecx | 
 | 	vpalignr	$8,%xmm6,%xmm7,%xmm2 | 
 | 	vpxor	%xmm4,%xmm0,%xmm0 | 
 | 	addl	(%esp),%ebx | 
 | 	andl	%edi,%esi | 
 | 	xorl	%eax,%edi | 
 | 	shrdl	$7,%edx,%edx | 
 | 	vpxor	%xmm1,%xmm0,%xmm0 | 
 | 	vmovdqa	%xmm4,64(%esp) | 
 | 	movl	%ecx,%ebp | 
 | 	xorl	%edi,%esi | 
 | 	vmovdqa	%xmm3,%xmm4 | 
 | 	vpaddd	%xmm7,%xmm3,%xmm3 | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	addl	%esi,%ebx | 
 | 	vpxor	%xmm2,%xmm0,%xmm0 | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	%edi,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	4(%esp),%eax | 
 | 	vpsrld	$30,%xmm0,%xmm2 | 
 | 	vmovdqa	%xmm3,48(%esp) | 
 | 	andl	%edx,%ebp | 
 | 	xorl	%edi,%edx | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	movl	%ebx,%esi | 
 | 	vpslld	$2,%xmm0,%xmm0 | 
 | 	xorl	%edx,%ebp | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	xorl	%ecx,%esi | 
 | 	xorl	%edx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	addl	8(%esp),%edi | 
 | 	andl	%ecx,%esi | 
 | 	vpor	%xmm2,%xmm0,%xmm0 | 
 | 	xorl	%edx,%ecx | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	vmovdqa	80(%esp),%xmm2 | 
 | 	movl	%eax,%ebp | 
 | 	xorl	%ecx,%esi | 
 | 	shldl	$5,%eax,%eax | 
 | 	addl	%esi,%edi | 
 | 	xorl	%ebx,%ebp | 
 | 	xorl	%ecx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	addl	12(%esp),%edx | 
 | 	andl	%ebx,%ebp | 
 | 	xorl	%ecx,%ebx | 
 | 	shrdl	$7,%eax,%eax | 
 | 	movl	%edi,%esi | 
 | 	xorl	%ebx,%ebp | 
 | 	shldl	$5,%edi,%edi | 
 | 	addl	%ebp,%edx | 
 | 	xorl	%eax,%esi | 
 | 	xorl	%ebx,%eax | 
 | 	addl	%edi,%edx | 
 | 	vpalignr	$8,%xmm7,%xmm0,%xmm3 | 
 | 	vpxor	%xmm5,%xmm1,%xmm1 | 
 | 	addl	16(%esp),%ecx | 
 | 	andl	%eax,%esi | 
 | 	xorl	%ebx,%eax | 
 | 	shrdl	$7,%edi,%edi | 
 | 	vpxor	%xmm2,%xmm1,%xmm1 | 
 | 	vmovdqa	%xmm5,80(%esp) | 
 | 	movl	%edx,%ebp | 
 | 	xorl	%eax,%esi | 
 | 	vmovdqa	%xmm4,%xmm5 | 
 | 	vpaddd	%xmm0,%xmm4,%xmm4 | 
 | 	shldl	$5,%edx,%edx | 
 | 	addl	%esi,%ecx | 
 | 	vpxor	%xmm3,%xmm1,%xmm1 | 
 | 	xorl	%edi,%ebp | 
 | 	xorl	%eax,%edi | 
 | 	addl	%edx,%ecx | 
 | 	addl	20(%esp),%ebx | 
 | 	vpsrld	$30,%xmm1,%xmm3 | 
 | 	vmovdqa	%xmm4,(%esp) | 
 | 	andl	%edi,%ebp | 
 | 	xorl	%eax,%edi | 
 | 	shrdl	$7,%edx,%edx | 
 | 	movl	%ecx,%esi | 
 | 	vpslld	$2,%xmm1,%xmm1 | 
 | 	xorl	%edi,%ebp | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	xorl	%edx,%esi | 
 | 	xorl	%edi,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	24(%esp),%eax | 
 | 	andl	%edx,%esi | 
 | 	vpor	%xmm3,%xmm1,%xmm1 | 
 | 	xorl	%edi,%edx | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	vmovdqa	96(%esp),%xmm3 | 
 | 	movl	%ebx,%ebp | 
 | 	xorl	%edx,%esi | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	addl	%esi,%eax | 
 | 	xorl	%ecx,%ebp | 
 | 	xorl	%edx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	addl	28(%esp),%edi | 
 | 	andl	%ecx,%ebp | 
 | 	xorl	%edx,%ecx | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	movl	%eax,%esi | 
 | 	xorl	%ecx,%ebp | 
 | 	shldl	$5,%eax,%eax | 
 | 	addl	%ebp,%edi | 
 | 	xorl	%ebx,%esi | 
 | 	xorl	%ecx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	vpalignr	$8,%xmm0,%xmm1,%xmm4 | 
 | 	vpxor	%xmm6,%xmm2,%xmm2 | 
 | 	addl	32(%esp),%edx | 
 | 	andl	%ebx,%esi | 
 | 	xorl	%ecx,%ebx | 
 | 	shrdl	$7,%eax,%eax | 
 | 	vpxor	%xmm3,%xmm2,%xmm2 | 
 | 	vmovdqa	%xmm6,96(%esp) | 
 | 	movl	%edi,%ebp | 
 | 	xorl	%ebx,%esi | 
 | 	vmovdqa	%xmm5,%xmm6 | 
 | 	vpaddd	%xmm1,%xmm5,%xmm5 | 
 | 	shldl	$5,%edi,%edi | 
 | 	addl	%esi,%edx | 
 | 	vpxor	%xmm4,%xmm2,%xmm2 | 
 | 	xorl	%eax,%ebp | 
 | 	xorl	%ebx,%eax | 
 | 	addl	%edi,%edx | 
 | 	addl	36(%esp),%ecx | 
 | 	vpsrld	$30,%xmm2,%xmm4 | 
 | 	vmovdqa	%xmm5,16(%esp) | 
 | 	andl	%eax,%ebp | 
 | 	xorl	%ebx,%eax | 
 | 	shrdl	$7,%edi,%edi | 
 | 	movl	%edx,%esi | 
 | 	vpslld	$2,%xmm2,%xmm2 | 
 | 	xorl	%eax,%ebp | 
 | 	shldl	$5,%edx,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	xorl	%edi,%esi | 
 | 	xorl	%eax,%edi | 
 | 	addl	%edx,%ecx | 
 | 	addl	40(%esp),%ebx | 
 | 	andl	%edi,%esi | 
 | 	vpor	%xmm4,%xmm2,%xmm2 | 
 | 	xorl	%eax,%edi | 
 | 	shrdl	$7,%edx,%edx | 
 | 	vmovdqa	64(%esp),%xmm4 | 
 | 	movl	%ecx,%ebp | 
 | 	xorl	%edi,%esi | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	addl	%esi,%ebx | 
 | 	xorl	%edx,%ebp | 
 | 	xorl	%edi,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	44(%esp),%eax | 
 | 	andl	%edx,%ebp | 
 | 	xorl	%edi,%edx | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	movl	%ebx,%esi | 
 | 	xorl	%edx,%ebp | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	xorl	%edx,%esi | 
 | 	addl	%ebx,%eax | 
 | 	vpalignr	$8,%xmm1,%xmm2,%xmm5 | 
 | 	vpxor	%xmm7,%xmm3,%xmm3 | 
 | 	addl	48(%esp),%edi | 
 | 	xorl	%ecx,%esi | 
 | 	movl	%eax,%ebp | 
 | 	shldl	$5,%eax,%eax | 
 | 	vpxor	%xmm4,%xmm3,%xmm3 | 
 | 	vmovdqa	%xmm7,64(%esp) | 
 | 	addl	%esi,%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	vmovdqa	%xmm6,%xmm7 | 
 | 	vpaddd	%xmm2,%xmm6,%xmm6 | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	vpxor	%xmm5,%xmm3,%xmm3 | 
 | 	addl	52(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	movl	%edi,%esi | 
 | 	shldl	$5,%edi,%edi | 
 | 	vpsrld	$30,%xmm3,%xmm5 | 
 | 	vmovdqa	%xmm6,32(%esp) | 
 | 	addl	%ebp,%edx | 
 | 	xorl	%ebx,%esi | 
 | 	shrdl	$7,%eax,%eax | 
 | 	addl	%edi,%edx | 
 | 	vpslld	$2,%xmm3,%xmm3 | 
 | 	addl	56(%esp),%ecx | 
 | 	xorl	%eax,%esi | 
 | 	movl	%edx,%ebp | 
 | 	shldl	$5,%edx,%edx | 
 | 	addl	%esi,%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	shrdl	$7,%edi,%edi | 
 | 	addl	%edx,%ecx | 
 | 	vpor	%xmm5,%xmm3,%xmm3 | 
 | 	addl	60(%esp),%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	movl	%ecx,%esi | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	xorl	%edi,%esi | 
 | 	shrdl	$7,%edx,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	(%esp),%eax | 
 | 	vpaddd	%xmm3,%xmm7,%xmm7 | 
 | 	xorl	%edx,%esi | 
 | 	movl	%ebx,%ebp | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	addl	%esi,%eax | 
 | 	vmovdqa	%xmm7,48(%esp) | 
 | 	xorl	%edx,%ebp | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	addl	4(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	movl	%eax,%esi | 
 | 	shldl	$5,%eax,%eax | 
 | 	addl	%ebp,%edi | 
 | 	xorl	%ecx,%esi | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	addl	8(%esp),%edx | 
 | 	xorl	%ebx,%esi | 
 | 	movl	%edi,%ebp | 
 | 	shldl	$5,%edi,%edi | 
 | 	addl	%esi,%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	shrdl	$7,%eax,%eax | 
 | 	addl	%edi,%edx | 
 | 	addl	12(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	movl	%edx,%esi | 
 | 	shldl	$5,%edx,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	xorl	%eax,%esi | 
 | 	shrdl	$7,%edi,%edi | 
 | 	addl	%edx,%ecx | 
 | 	movl	196(%esp),%ebp | 
 | 	cmpl	200(%esp),%ebp | 
 | 	je	L006done | 
 | 	vmovdqa	160(%esp),%xmm7 | 
 | 	vmovdqa	176(%esp),%xmm6 | 
 | 	vmovdqu	(%ebp),%xmm0 | 
 | 	vmovdqu	16(%ebp),%xmm1 | 
 | 	vmovdqu	32(%ebp),%xmm2 | 
 | 	vmovdqu	48(%ebp),%xmm3 | 
 | 	addl	$64,%ebp | 
 | 	vpshufb	%xmm6,%xmm0,%xmm0 | 
 | 	movl	%ebp,196(%esp) | 
 | 	vmovdqa	%xmm7,96(%esp) | 
 | 	addl	16(%esp),%ebx | 
 | 	xorl	%edi,%esi | 
 | 	vpshufb	%xmm6,%xmm1,%xmm1 | 
 | 	movl	%ecx,%ebp | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	vpaddd	%xmm7,%xmm0,%xmm4 | 
 | 	addl	%esi,%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	shrdl	$7,%edx,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	vmovdqa	%xmm4,(%esp) | 
 | 	addl	20(%esp),%eax | 
 | 	xorl	%edx,%ebp | 
 | 	movl	%ebx,%esi | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	xorl	%edx,%esi | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	addl	24(%esp),%edi | 
 | 	xorl	%ecx,%esi | 
 | 	movl	%eax,%ebp | 
 | 	shldl	$5,%eax,%eax | 
 | 	addl	%esi,%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	addl	28(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	movl	%edi,%esi | 
 | 	shldl	$5,%edi,%edi | 
 | 	addl	%ebp,%edx | 
 | 	xorl	%ebx,%esi | 
 | 	shrdl	$7,%eax,%eax | 
 | 	addl	%edi,%edx | 
 | 	addl	32(%esp),%ecx | 
 | 	xorl	%eax,%esi | 
 | 	vpshufb	%xmm6,%xmm2,%xmm2 | 
 | 	movl	%edx,%ebp | 
 | 	shldl	$5,%edx,%edx | 
 | 	vpaddd	%xmm7,%xmm1,%xmm5 | 
 | 	addl	%esi,%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	shrdl	$7,%edi,%edi | 
 | 	addl	%edx,%ecx | 
 | 	vmovdqa	%xmm5,16(%esp) | 
 | 	addl	36(%esp),%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	movl	%ecx,%esi | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	xorl	%edi,%esi | 
 | 	shrdl	$7,%edx,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	40(%esp),%eax | 
 | 	xorl	%edx,%esi | 
 | 	movl	%ebx,%ebp | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	addl	%esi,%eax | 
 | 	xorl	%edx,%ebp | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	addl	44(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	movl	%eax,%esi | 
 | 	shldl	$5,%eax,%eax | 
 | 	addl	%ebp,%edi | 
 | 	xorl	%ecx,%esi | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	addl	48(%esp),%edx | 
 | 	xorl	%ebx,%esi | 
 | 	vpshufb	%xmm6,%xmm3,%xmm3 | 
 | 	movl	%edi,%ebp | 
 | 	shldl	$5,%edi,%edi | 
 | 	vpaddd	%xmm7,%xmm2,%xmm6 | 
 | 	addl	%esi,%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	shrdl	$7,%eax,%eax | 
 | 	addl	%edi,%edx | 
 | 	vmovdqa	%xmm6,32(%esp) | 
 | 	addl	52(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	movl	%edx,%esi | 
 | 	shldl	$5,%edx,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	xorl	%eax,%esi | 
 | 	shrdl	$7,%edi,%edi | 
 | 	addl	%edx,%ecx | 
 | 	addl	56(%esp),%ebx | 
 | 	xorl	%edi,%esi | 
 | 	movl	%ecx,%ebp | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	addl	%esi,%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	shrdl	$7,%edx,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	60(%esp),%eax | 
 | 	xorl	%edx,%ebp | 
 | 	movl	%ebx,%esi | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	movl	192(%esp),%ebp | 
 | 	addl	(%ebp),%eax | 
 | 	addl	4(%ebp),%esi | 
 | 	addl	8(%ebp),%ecx | 
 | 	movl	%eax,(%ebp) | 
 | 	addl	12(%ebp),%edx | 
 | 	movl	%esi,4(%ebp) | 
 | 	addl	16(%ebp),%edi | 
 | 	movl	%ecx,%ebx | 
 | 	movl	%ecx,8(%ebp) | 
 | 	xorl	%edx,%ebx | 
 | 	movl	%edx,12(%ebp) | 
 | 	movl	%edi,16(%ebp) | 
 | 	movl	%esi,%ebp | 
 | 	andl	%ebx,%esi | 
 | 	movl	%ebp,%ebx | 
 | 	jmp	L005loop | 
 | .align	4,0x90 | 
 | L006done: | 
 | 	addl	16(%esp),%ebx | 
 | 	xorl	%edi,%esi | 
 | 	movl	%ecx,%ebp | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	addl	%esi,%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	shrdl	$7,%edx,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	20(%esp),%eax | 
 | 	xorl	%edx,%ebp | 
 | 	movl	%ebx,%esi | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	xorl	%edx,%esi | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	addl	24(%esp),%edi | 
 | 	xorl	%ecx,%esi | 
 | 	movl	%eax,%ebp | 
 | 	shldl	$5,%eax,%eax | 
 | 	addl	%esi,%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	addl	28(%esp),%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	movl	%edi,%esi | 
 | 	shldl	$5,%edi,%edi | 
 | 	addl	%ebp,%edx | 
 | 	xorl	%ebx,%esi | 
 | 	shrdl	$7,%eax,%eax | 
 | 	addl	%edi,%edx | 
 | 	addl	32(%esp),%ecx | 
 | 	xorl	%eax,%esi | 
 | 	movl	%edx,%ebp | 
 | 	shldl	$5,%edx,%edx | 
 | 	addl	%esi,%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	shrdl	$7,%edi,%edi | 
 | 	addl	%edx,%ecx | 
 | 	addl	36(%esp),%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	movl	%ecx,%esi | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	addl	%ebp,%ebx | 
 | 	xorl	%edi,%esi | 
 | 	shrdl	$7,%edx,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	40(%esp),%eax | 
 | 	xorl	%edx,%esi | 
 | 	movl	%ebx,%ebp | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	addl	%esi,%eax | 
 | 	xorl	%edx,%ebp | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	addl	44(%esp),%edi | 
 | 	xorl	%ecx,%ebp | 
 | 	movl	%eax,%esi | 
 | 	shldl	$5,%eax,%eax | 
 | 	addl	%ebp,%edi | 
 | 	xorl	%ecx,%esi | 
 | 	shrdl	$7,%ebx,%ebx | 
 | 	addl	%eax,%edi | 
 | 	addl	48(%esp),%edx | 
 | 	xorl	%ebx,%esi | 
 | 	movl	%edi,%ebp | 
 | 	shldl	$5,%edi,%edi | 
 | 	addl	%esi,%edx | 
 | 	xorl	%ebx,%ebp | 
 | 	shrdl	$7,%eax,%eax | 
 | 	addl	%edi,%edx | 
 | 	addl	52(%esp),%ecx | 
 | 	xorl	%eax,%ebp | 
 | 	movl	%edx,%esi | 
 | 	shldl	$5,%edx,%edx | 
 | 	addl	%ebp,%ecx | 
 | 	xorl	%eax,%esi | 
 | 	shrdl	$7,%edi,%edi | 
 | 	addl	%edx,%ecx | 
 | 	addl	56(%esp),%ebx | 
 | 	xorl	%edi,%esi | 
 | 	movl	%ecx,%ebp | 
 | 	shldl	$5,%ecx,%ecx | 
 | 	addl	%esi,%ebx | 
 | 	xorl	%edi,%ebp | 
 | 	shrdl	$7,%edx,%edx | 
 | 	addl	%ecx,%ebx | 
 | 	addl	60(%esp),%eax | 
 | 	xorl	%edx,%ebp | 
 | 	movl	%ebx,%esi | 
 | 	shldl	$5,%ebx,%ebx | 
 | 	addl	%ebp,%eax | 
 | 	shrdl	$7,%ecx,%ecx | 
 | 	addl	%ebx,%eax | 
 | 	vzeroall | 
 | 	movl	192(%esp),%ebp | 
 | 	addl	(%ebp),%eax | 
 | 	movl	204(%esp),%esp | 
 | 	addl	4(%ebp),%esi | 
 | 	addl	8(%ebp),%ecx | 
 | 	movl	%eax,(%ebp) | 
 | 	addl	12(%ebp),%edx | 
 | 	movl	%esi,4(%ebp) | 
 | 	addl	16(%ebp),%edi | 
 | 	movl	%ecx,8(%ebp) | 
 | 	movl	%edx,12(%ebp) | 
 | 	movl	%edi,16(%ebp) | 
 | 	popl	%edi | 
 | 	popl	%esi | 
 | 	popl	%ebx | 
 | 	popl	%ebp | 
 | 	ret | 
 | .align	6,0x90 | 
 | LK_XX_XX: | 
 | .long	1518500249,1518500249,1518500249,1518500249 | 
 | .long	1859775393,1859775393,1859775393,1859775393 | 
 | .long	2400959708,2400959708,2400959708,2400959708 | 
 | .long	3395469782,3395469782,3395469782,3395469782 | 
 | .long	66051,67438087,134810123,202182159 | 
 | .byte	15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 | 
 | .byte	83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115 | 
 | .byte	102,111,114,109,32,102,111,114,32,120,56,54,44,32,67,82 | 
 | .byte	89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112 | 
 | .byte	114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 | 
 | #endif  // !defined(OPENSSL_NO_ASM) && defined(OPENSSL_X86) && defined(__APPLE__) |