Make libcrypto.so position independent on i386.

This commit is contained in:
jkim 2016-05-10 20:31:09 +00:00
parent 61bb2811b2
commit 37810c099e
46 changed files with 66749 additions and 33345 deletions

View File

@ -26,7 +26,7 @@ SRCS= cpt_err.c cryptlib.c cversion.c ex_data.c mem.c mem_dbg.c o_dir.c \
.if ${MACHINE_CPUARCH} == "amd64"
SRCS+= x86_64cpuid.S
.elif ${MACHINE_CPUARCH} == "i386"
SRCS+= x86cpuid.s
SRCS+= x86cpuid.S
.else
SRCS+= mem_clr.c
.endif
@ -38,7 +38,7 @@ SRCS+= aes_cfb.c aes_ctr.c aes_ecb.c aes_ige.c aes_misc.c aes_ofb.c aes_wrap.c
SRCS+= aes-x86_64.S aesni-mb-x86_64.S aesni-sha1-x86_64.S \
aesni-sha256-x86_64.S aesni-x86_64.S bsaes-x86_64.S vpaes-x86_64.S
.elif ${MACHINE_CPUARCH} == "i386"
SRCS+= aes-586.s aesni-x86.s vpaes-x86.s
SRCS+= aes-586.S aesni-x86.S vpaes-x86.S
.else
SRCS+= aes_cbc.c aes_core.c
.endif
@ -63,9 +63,9 @@ INCS+= asn1.h asn1_mac.h asn1t.h
SRCS+= bf_cfb64.c bf_ecb.c bf_ofb64.c bf_skey.c
.if ${MACHINE_CPUARCH} == "i386"
.if ${MACHINE_CPU:Mi686}
SRCS+= bf-686.s
SRCS+= bf-686.S
.else
SRCS+= bf-586.s
SRCS+= bf-586.S
.endif
.else
SRCS+= bf_enc.c
@ -87,7 +87,7 @@ SRCS+= bn_add.c bn_blind.c bn_const.c bn_ctx.c bn_depr.c bn_div.c bn_err.c \
SRCS+= rsaz-avx2.S rsaz-x86_64.S rsaz_exp.c x86_64-gcc.c x86_64-gf2m.S \
x86_64-mont.S x86_64-mont5.S
.elif ${MACHINE_CPUARCH} == "i386"
SRCS+= bn-586.s co-586.s x86-gf2m.s x86-mont.s
SRCS+= bn-586.S co-586.S x86-gf2m.S x86-mont.S
.else
SRCS+= bn_asm.c
.endif
@ -102,7 +102,7 @@ SRCS+= cmll_cfb.c cmll_ctr.c cmll_ecb.c cmll_ofb.c cmll_utl.c
.if ${MACHINE_CPUARCH} == "amd64"
SRCS+= cmll_misc.c cmll-x86_64.S
.elif ${MACHINE_CPUARCH} == "i386"
SRCS+= cmll-x86.s
SRCS+= cmll-x86.S
.else
SRCS+= camellia.c cmll_cbc.c cmll_misc.c
.endif
@ -137,7 +137,7 @@ SRCS+= cbc_cksm.c cbc_enc.c cfb64ede.c cfb64enc.c cfb_enc.c des_old.c \
fcrypt.c ofb64ede.c ofb64enc.c ofb_enc.c pcbc_enc.c qud_cksm.c \
rand_key.c read2pwd.c rpc_enc.c set_key.c str2key.c xcbc_enc.c
.if ${MACHINE_CPUARCH} == "i386"
SRCS+= crypt586.s des-586.s
SRCS+= crypt586.S des-586.S
.else
SRCS+= des_enc.c fcrypt_b.c
.endif
@ -222,7 +222,7 @@ SRCS+= md5_dgst.c md5_one.c
.if ${MACHINE_CPUARCH} == "amd64"
SRCS+= md5-x86_64.S
.elif ${MACHINE_CPUARCH} == "i386"
SRCS+= md5-586.s
SRCS+= md5-586.S
.endif
INCS+= md5.h
@ -236,7 +236,7 @@ SRCS+= cbc128.c ccm128.c cfb128.c ctr128.c cts128.c gcm128.c ofb128.c \
.if ${MACHINE_CPUARCH} == "amd64"
SRCS+= aesni-gcm-x86_64.S ghash-x86_64.S
.elif ${MACHINE_CPUARCH} == "i386"
SRCS+= ghash-x86.s
SRCS+= ghash-x86.S
.endif
INCS+= modes.h
@ -282,7 +282,7 @@ SRCS+= rc4_utl.c
.if ${MACHINE_CPUARCH} == "amd64"
SRCS+= rc4-md5-x86_64.S rc4-x86_64.S
.elif ${MACHINE_CPUARCH} == "i386"
SRCS+= rc4-586.s
SRCS+= rc4-586.S
.else
SRCS+= rc4_enc.c rc4_skey.c
.endif
@ -291,7 +291,7 @@ INCS+= rc4.h
# rc5
SRCS+= rc5_ecb.c rc5_skey.c rc5cfb64.c rc5ofb64.c
.if ${MACHINE_CPUARCH} == "i386"
SRCS+= rc5-586.s
SRCS+= rc5-586.S
.else
SRCS+= rc5_enc.c
.endif
@ -300,7 +300,7 @@ INCS+= rc5.h
# ripemd
SRCS+= rmd_dgst.c rmd_one.c
.if ${MACHINE_CPUARCH} == "i386"
SRCS+= rmd-586.s
SRCS+= rmd-586.S
.endif
INCS+= ripemd.h
@ -321,7 +321,7 @@ SRCS+= sha1_one.c sha1dgst.c sha256.c sha512.c sha_dgst.c sha_one.c
SRCS+= sha1-mb-x86_64.S sha1-x86_64.S sha256-mb-x86_64.S sha256-x86_64.S \
sha512-x86_64.S
.elif ${MACHINE_CPUARCH} == "i386"
SRCS+= sha1-586.s sha256-586.s sha512-586.s
SRCS+= sha1-586.S sha256-586.S sha512-586.S
.endif
INCS+= sha.h
@ -352,7 +352,7 @@ SRCS+= wp_dgst.c
.if ${MACHINE_CPUARCH} == "amd64"
SRCS+= wp-x86_64.S
.elif ${MACHINE_CPUARCH} == "i386"
SRCS+= wp-mmx.s wp_block.c
SRCS+= wp-mmx.S wp_block.c
.else
SRCS+= wp_block.c
.endif
@ -386,9 +386,6 @@ CFLAGS+= -I${LCRYPTO_SRC}/crypto/asn1
CFLAGS+= -I${LCRYPTO_SRC}/crypto/evp
CFLAGS+= -I${LCRYPTO_SRC}/crypto/modes
.if !empty(SRCS:M*.s)
AFLAGS+= --noexecstack
.endif
.if !empty(SRCS:M*.S)
ACFLAGS+= -Wa,--noexecstack
.endif

View File

@ -1,8 +1,8 @@
# $FreeBSD$
# Use this to help generate the asm *.[Ss] files after an import. It is not
# Use this to help generate the asm *.S files after an import. It is not
# perfect by any means, but does what is needed.
# Do a 'make -f Makefile.asm all' and it will generate *.s. Move them
# to the i386 subdir, and correct any exposed paths and $ FreeBSD $ tags.
# Do a 'make -f Makefile.asm all' and it will generate *.S. Move them
# to the arch subdir, and correct any exposed paths and $ FreeBSD $ tags.
.include "Makefile.inc"
@ -127,16 +127,21 @@ SRCS+= wp-mmx.pl
# cpuid
SRCS+= x86cpuid.pl
ASM= ${SRCS:S/.pl/.s/}
ASM= ${SRCS:S/.pl/.S/}
all: ${ASM}
CLEANFILES+= ${SRCS:M*.pl:S/.pl$/.s/}
CLEANFILES+= ${SRCS:M*.pl:S/.pl$/.S/}
.SUFFIXES: .pl
.pl.s:
.pl.S:
( echo ' # $$'FreeBSD'$$' ;\
perl ${PERLPATH} ${.IMPSRC} elf ${CFLAGS} ) > ${.TARGET}
echo '#ifdef PIC' ;\
perl ${PERLPATH} ${.IMPSRC} elf ${CFLAGS} -fpic -DPIC ;\
echo '#else' ;\
perl ${PERLPATH} ${.IMPSRC} elf ${CFLAGS} ;\
echo '#endif') |\
sed -E 's|(\.file[[:blank:]]+)".*"|\1"${.TARGET}"|' > ${.TARGET}
.endif
.include <bsd.prog.mk>

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,897 +0,0 @@
# $FreeBSD$
.file "bf-586.s"
.text
.globl BF_encrypt
.type BF_encrypt,@function
.align 16
BF_encrypt:
.L_BF_encrypt_begin:
pushl %ebp
pushl %ebx
movl 12(%esp),%ebx
movl 16(%esp),%ebp
pushl %esi
pushl %edi
movl (%ebx),%edi
movl 4(%ebx),%esi
xorl %eax,%eax
movl (%ebp),%ebx
xorl %ecx,%ecx
xorl %ebx,%edi
movl 4(%ebp),%edx
movl %edi,%ebx
xorl %edx,%esi
shrl $16,%ebx
movl %edi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%esi
movl 8(%ebp),%edx
movl %esi,%ebx
xorl %edx,%edi
shrl $16,%ebx
movl %esi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%edi
movl 12(%ebp),%edx
movl %edi,%ebx
xorl %edx,%esi
shrl $16,%ebx
movl %edi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%esi
movl 16(%ebp),%edx
movl %esi,%ebx
xorl %edx,%edi
shrl $16,%ebx
movl %esi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%edi
movl 20(%ebp),%edx
movl %edi,%ebx
xorl %edx,%esi
shrl $16,%ebx
movl %edi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%esi
movl 24(%ebp),%edx
movl %esi,%ebx
xorl %edx,%edi
shrl $16,%ebx
movl %esi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%edi
movl 28(%ebp),%edx
movl %edi,%ebx
xorl %edx,%esi
shrl $16,%ebx
movl %edi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%esi
movl 32(%ebp),%edx
movl %esi,%ebx
xorl %edx,%edi
shrl $16,%ebx
movl %esi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%edi
movl 36(%ebp),%edx
movl %edi,%ebx
xorl %edx,%esi
shrl $16,%ebx
movl %edi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%esi
movl 40(%ebp),%edx
movl %esi,%ebx
xorl %edx,%edi
shrl $16,%ebx
movl %esi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%edi
movl 44(%ebp),%edx
movl %edi,%ebx
xorl %edx,%esi
shrl $16,%ebx
movl %edi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%esi
movl 48(%ebp),%edx
movl %esi,%ebx
xorl %edx,%edi
shrl $16,%ebx
movl %esi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%edi
movl 52(%ebp),%edx
movl %edi,%ebx
xorl %edx,%esi
shrl $16,%ebx
movl %edi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%esi
movl 56(%ebp),%edx
movl %esi,%ebx
xorl %edx,%edi
shrl $16,%ebx
movl %esi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%edi
movl 60(%ebp),%edx
movl %edi,%ebx
xorl %edx,%esi
shrl $16,%ebx
movl %edi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%esi
movl 64(%ebp),%edx
movl %esi,%ebx
xorl %edx,%edi
shrl $16,%ebx
movl %esi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
movl 20(%esp),%eax
xorl %ebx,%edi
movl 68(%ebp),%edx
xorl %edx,%esi
movl %edi,4(%eax)
movl %esi,(%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size BF_encrypt,.-.L_BF_encrypt_begin
.globl BF_decrypt
.type BF_decrypt,@function
.align 16
BF_decrypt:
.L_BF_decrypt_begin:
pushl %ebp
pushl %ebx
movl 12(%esp),%ebx
movl 16(%esp),%ebp
pushl %esi
pushl %edi
movl (%ebx),%edi
movl 4(%ebx),%esi
xorl %eax,%eax
movl 68(%ebp),%ebx
xorl %ecx,%ecx
xorl %ebx,%edi
movl 64(%ebp),%edx
movl %edi,%ebx
xorl %edx,%esi
shrl $16,%ebx
movl %edi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%esi
movl 60(%ebp),%edx
movl %esi,%ebx
xorl %edx,%edi
shrl $16,%ebx
movl %esi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%edi
movl 56(%ebp),%edx
movl %edi,%ebx
xorl %edx,%esi
shrl $16,%ebx
movl %edi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%esi
movl 52(%ebp),%edx
movl %esi,%ebx
xorl %edx,%edi
shrl $16,%ebx
movl %esi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%edi
movl 48(%ebp),%edx
movl %edi,%ebx
xorl %edx,%esi
shrl $16,%ebx
movl %edi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%esi
movl 44(%ebp),%edx
movl %esi,%ebx
xorl %edx,%edi
shrl $16,%ebx
movl %esi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%edi
movl 40(%ebp),%edx
movl %edi,%ebx
xorl %edx,%esi
shrl $16,%ebx
movl %edi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%esi
movl 36(%ebp),%edx
movl %esi,%ebx
xorl %edx,%edi
shrl $16,%ebx
movl %esi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%edi
movl 32(%ebp),%edx
movl %edi,%ebx
xorl %edx,%esi
shrl $16,%ebx
movl %edi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%esi
movl 28(%ebp),%edx
movl %esi,%ebx
xorl %edx,%edi
shrl $16,%ebx
movl %esi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%edi
movl 24(%ebp),%edx
movl %edi,%ebx
xorl %edx,%esi
shrl $16,%ebx
movl %edi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%esi
movl 20(%ebp),%edx
movl %esi,%ebx
xorl %edx,%edi
shrl $16,%ebx
movl %esi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%edi
movl 16(%ebp),%edx
movl %edi,%ebx
xorl %edx,%esi
shrl $16,%ebx
movl %edi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%esi
movl 12(%ebp),%edx
movl %esi,%ebx
xorl %edx,%edi
shrl $16,%ebx
movl %esi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%edi
movl 8(%ebp),%edx
movl %edi,%ebx
xorl %edx,%esi
shrl $16,%ebx
movl %edi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
xorl %eax,%eax
xorl %ebx,%esi
movl 4(%ebp),%edx
movl %esi,%ebx
xorl %edx,%edi
shrl $16,%ebx
movl %esi,%edx
movb %bh,%al
andl $255,%ebx
movb %dh,%cl
andl $255,%edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax,%ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax,%ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx,%ebx
movl 20(%esp),%eax
xorl %ebx,%edi
movl (%ebp),%edx
xorl %edx,%esi
movl %edi,4(%eax)
movl %esi,(%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size BF_decrypt,.-.L_BF_decrypt_begin
.globl BF_cbc_encrypt
.type BF_cbc_encrypt,@function
.align 16
BF_cbc_encrypt:
.L_BF_cbc_encrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 28(%esp),%ebp
movl 36(%esp),%ebx
movl (%ebx),%esi
movl 4(%ebx),%edi
pushl %edi
pushl %esi
pushl %edi
pushl %esi
movl %esp,%ebx
movl 36(%esp),%esi
movl 40(%esp),%edi
movl 56(%esp),%ecx
movl 48(%esp),%eax
pushl %eax
pushl %ebx
cmpl $0,%ecx
jz .L000decrypt
andl $4294967288,%ebp
movl 8(%esp),%eax
movl 12(%esp),%ebx
jz .L001encrypt_finish
.L002encrypt_loop:
movl (%esi),%ecx
movl 4(%esi),%edx
xorl %ecx,%eax
xorl %edx,%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_BF_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz .L002encrypt_loop
.L001encrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz .L003finish
call .L004PIC_point
.L004PIC_point:
popl %edx
leal .L005cbc_enc_jmp_table-.L004PIC_point(%edx),%ecx
movl (%ecx,%ebp,4),%ebp
addl %edx,%ebp
xorl %ecx,%ecx
xorl %edx,%edx
jmp *%ebp
.L006ej7:
movb 6(%esi),%dh
shll $8,%edx
.L007ej6:
movb 5(%esi),%dh
.L008ej5:
movb 4(%esi),%dl
.L009ej4:
movl (%esi),%ecx
jmp .L010ejend
.L011ej3:
movb 2(%esi),%ch
shll $8,%ecx
.L012ej2:
movb 1(%esi),%ch
.L013ej1:
movb (%esi),%cl
.L010ejend:
xorl %ecx,%eax
xorl %edx,%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_BF_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
jmp .L003finish
.L000decrypt:
andl $4294967288,%ebp
movl 16(%esp),%eax
movl 20(%esp),%ebx
jz .L014decrypt_finish
.L015decrypt_loop:
movl (%esi),%eax
movl 4(%esi),%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_BF_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
movl %ecx,(%edi)
movl %edx,4(%edi)
movl %eax,16(%esp)
movl %ebx,20(%esp)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz .L015decrypt_loop
.L014decrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz .L003finish
movl (%esi),%eax
movl 4(%esi),%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_BF_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
.L016dj7:
rorl $16,%edx
movb %dl,6(%edi)
shrl $16,%edx
.L017dj6:
movb %dh,5(%edi)
.L018dj5:
movb %dl,4(%edi)
.L019dj4:
movl %ecx,(%edi)
jmp .L020djend
.L021dj3:
rorl $16,%ecx
movb %cl,2(%edi)
shll $16,%ecx
.L022dj2:
movb %ch,1(%esi)
.L023dj1:
movb %cl,(%esi)
.L020djend:
jmp .L003finish
.L003finish:
movl 60(%esp),%ecx
addl $24,%esp
movl %eax,(%ecx)
movl %ebx,4(%ecx)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 64
.L005cbc_enc_jmp_table:
.long 0
.long .L013ej1-.L004PIC_point
.long .L012ej2-.L004PIC_point
.long .L011ej3-.L004PIC_point
.long .L009ej4-.L004PIC_point
.long .L008ej5-.L004PIC_point
.long .L007ej6-.L004PIC_point
.long .L006ej7-.L004PIC_point
.align 64
.size BF_cbc_encrypt,.-.L_BF_cbc_encrypt_begin

File diff suppressed because it is too large Load Diff

View File

@ -1,865 +0,0 @@
# $FreeBSD$
.file "bf-686.s"
.text
.globl BF_encrypt
.type BF_encrypt,@function
.align 16
BF_encrypt:
.L_BF_encrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%eax
movl (%eax),%ecx
movl 4(%eax),%edx
movl 24(%esp),%edi
xorl %eax,%eax
xorl %ebx,%ebx
xorl (%edi),%ecx
rorl $16,%ecx
movl 4(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 8(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 12(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 16(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 20(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 24(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 28(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 32(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 36(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 40(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 44(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 48(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 52(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 56(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 60(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 64(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
xorl 68(%edi),%edx
movl 20(%esp),%eax
movl %edx,(%eax)
movl %ecx,4(%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size BF_encrypt,.-.L_BF_encrypt_begin
.globl BF_decrypt
.type BF_decrypt,@function
.align 16
BF_decrypt:
.L_BF_decrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%eax
movl (%eax),%ecx
movl 4(%eax),%edx
movl 24(%esp),%edi
xorl %eax,%eax
xorl %ebx,%ebx
xorl 68(%edi),%ecx
rorl $16,%ecx
movl 64(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 60(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 56(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 52(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 48(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 44(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 40(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 36(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 32(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 28(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 24(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 20(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 16(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 12(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
rorl $16,%ecx
movl 8(%edi),%esi
movb %ch,%al
movb %cl,%bl
rorl $16,%ecx
xorl %esi,%edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch,%al
movb %cl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%edx
rorl $16,%edx
movl 4(%edi),%esi
movb %dh,%al
movb %dl,%bl
rorl $16,%edx
xorl %esi,%ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh,%al
movb %dl,%bl
addl %ebp,%esi
movl 2120(%edi,%eax,4),%eax
xorl %eax,%esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp,%esi
xorl %eax,%eax
xorl %esi,%ecx
xorl (%edi),%edx
movl 20(%esp),%eax
movl %edx,(%eax)
movl %ecx,4(%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size BF_decrypt,.-.L_BF_decrypt_begin
.globl BF_cbc_encrypt
.type BF_cbc_encrypt,@function
.align 16
BF_cbc_encrypt:
.L_BF_cbc_encrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 28(%esp),%ebp
movl 36(%esp),%ebx
movl (%ebx),%esi
movl 4(%ebx),%edi
pushl %edi
pushl %esi
pushl %edi
pushl %esi
movl %esp,%ebx
movl 36(%esp),%esi
movl 40(%esp),%edi
movl 56(%esp),%ecx
movl 48(%esp),%eax
pushl %eax
pushl %ebx
cmpl $0,%ecx
jz .L000decrypt
andl $4294967288,%ebp
movl 8(%esp),%eax
movl 12(%esp),%ebx
jz .L001encrypt_finish
.L002encrypt_loop:
movl (%esi),%ecx
movl 4(%esi),%edx
xorl %ecx,%eax
xorl %edx,%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_BF_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz .L002encrypt_loop
.L001encrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz .L003finish
call .L004PIC_point
.L004PIC_point:
popl %edx
leal .L005cbc_enc_jmp_table-.L004PIC_point(%edx),%ecx
movl (%ecx,%ebp,4),%ebp
addl %edx,%ebp
xorl %ecx,%ecx
xorl %edx,%edx
jmp *%ebp
.L006ej7:
movb 6(%esi),%dh
shll $8,%edx
.L007ej6:
movb 5(%esi),%dh
.L008ej5:
movb 4(%esi),%dl
.L009ej4:
movl (%esi),%ecx
jmp .L010ejend
.L011ej3:
movb 2(%esi),%ch
shll $8,%ecx
.L012ej2:
movb 1(%esi),%ch
.L013ej1:
movb (%esi),%cl
.L010ejend:
xorl %ecx,%eax
xorl %edx,%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_BF_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
jmp .L003finish
.L000decrypt:
andl $4294967288,%ebp
movl 16(%esp),%eax
movl 20(%esp),%ebx
jz .L014decrypt_finish
.L015decrypt_loop:
movl (%esi),%eax
movl 4(%esi),%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_BF_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
movl %ecx,(%edi)
movl %edx,4(%edi)
movl %eax,16(%esp)
movl %ebx,20(%esp)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz .L015decrypt_loop
.L014decrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz .L003finish
movl (%esi),%eax
movl 4(%esi),%ebx
bswap %eax
bswap %ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_BF_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
bswap %eax
bswap %ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
.L016dj7:
rorl $16,%edx
movb %dl,6(%edi)
shrl $16,%edx
.L017dj6:
movb %dh,5(%edi)
.L018dj5:
movb %dl,4(%edi)
.L019dj4:
movl %ecx,(%edi)
jmp .L020djend
.L021dj3:
rorl $16,%ecx
movb %cl,2(%edi)
shll $16,%ecx
.L022dj2:
movb %ch,1(%esi)
.L023dj1:
movb %cl,(%esi)
.L020djend:
jmp .L003finish
.L003finish:
movl 60(%esp),%ecx
addl $24,%esp
movl %eax,(%ecx)
movl %ebx,4(%ecx)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 64
.L005cbc_enc_jmp_table:
.long 0
.long .L013ej1-.L004PIC_point
.long .L012ej2-.L004PIC_point
.long .L011ej3-.L004PIC_point
.long .L009ej4-.L004PIC_point
.long .L008ej5-.L004PIC_point
.long .L007ej6-.L004PIC_point
.long .L006ej7-.L004PIC_point
.align 64
.size BF_cbc_encrypt,.-.L_BF_cbc_encrypt_begin

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,876 +0,0 @@
# $FreeBSD$
.file "crypt586.s"
.text
.globl fcrypt_body
.type fcrypt_body,@function
.align 16
fcrypt_body:
.L_fcrypt_body_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
xorl %edi,%edi
xorl %esi,%esi
leal DES_SPtrans,%edx
pushl %edx
movl 28(%esp),%ebp
pushl $25
.L000start:
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl (%ebp),%ebx
xorl %ebx,%eax
movl 4(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 8(%ebp),%ebx
xorl %ebx,%eax
movl 12(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 16(%ebp),%ebx
xorl %ebx,%eax
movl 20(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 24(%ebp),%ebx
xorl %ebx,%eax
movl 28(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 32(%ebp),%ebx
xorl %ebx,%eax
movl 36(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 40(%ebp),%ebx
xorl %ebx,%eax
movl 44(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 48(%ebp),%ebx
xorl %ebx,%eax
movl 52(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 56(%ebp),%ebx
xorl %ebx,%eax
movl 60(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 64(%ebp),%ebx
xorl %ebx,%eax
movl 68(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 72(%ebp),%ebx
xorl %ebx,%eax
movl 76(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 80(%ebp),%ebx
xorl %ebx,%eax
movl 84(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 88(%ebp),%ebx
xorl %ebx,%eax
movl 92(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 96(%ebp),%ebx
xorl %ebx,%eax
movl 100(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 104(%ebp),%ebx
xorl %ebx,%eax
movl 108(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %esi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %esi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 112(%ebp),%ebx
xorl %ebx,%eax
movl 116(%ebp),%ecx
xorl %esi,%eax
xorl %esi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%edi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%edi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%edi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%edi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%edi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%edi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%edi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%edi
movl 32(%esp),%ebp
movl 36(%esp),%eax
movl %edi,%edx
shrl $16,%edx
movl 40(%esp),%ecx
xorl %edi,%edx
andl %edx,%eax
andl %ecx,%edx
movl %eax,%ebx
shll $16,%ebx
movl %edx,%ecx
shll $16,%ecx
xorl %ebx,%eax
xorl %ecx,%edx
movl 120(%ebp),%ebx
xorl %ebx,%eax
movl 124(%ebp),%ecx
xorl %edi,%eax
xorl %edi,%edx
xorl %ecx,%edx
andl $0xfcfcfcfc,%eax
xorl %ebx,%ebx
andl $0xcfcfcfcf,%edx
xorl %ecx,%ecx
movb %al,%bl
movb %ah,%cl
rorl $4,%edx
movl 4(%esp),%ebp
xorl (%ebp,%ebx,1),%esi
movb %dl,%bl
xorl 0x200(%ebp,%ecx,1),%esi
movb %dh,%cl
shrl $16,%eax
xorl 0x100(%ebp,%ebx,1),%esi
movb %ah,%bl
shrl $16,%edx
xorl 0x300(%ebp,%ecx,1),%esi
movb %dh,%cl
andl $0xff,%eax
andl $0xff,%edx
movl 0x600(%ebp,%ebx,1),%ebx
xorl %ebx,%esi
movl 0x700(%ebp,%ecx,1),%ebx
xorl %ebx,%esi
movl 0x400(%ebp,%eax,1),%ebx
xorl %ebx,%esi
movl 0x500(%ebp,%edx,1),%ebx
xorl %ebx,%esi
movl 32(%esp),%ebp
movl (%esp),%ebx
movl %edi,%eax
decl %ebx
movl %esi,%edi
movl %eax,%esi
movl %ebx,(%esp)
jnz .L000start
movl 28(%esp),%edx
rorl $1,%edi
movl %esi,%eax
xorl %edi,%esi
andl $0xaaaaaaaa,%esi
xorl %esi,%eax
xorl %esi,%edi
roll $23,%eax
movl %eax,%esi
xorl %edi,%eax
andl $0x03fc03fc,%eax
xorl %eax,%esi
xorl %eax,%edi
roll $10,%esi
movl %esi,%eax
xorl %edi,%esi
andl $0x33333333,%esi
xorl %esi,%eax
xorl %esi,%edi
roll $18,%edi
movl %edi,%esi
xorl %eax,%edi
andl $0xfff0000f,%edi
xorl %edi,%esi
xorl %edi,%eax
roll $12,%esi
movl %esi,%edi
xorl %eax,%esi
andl $0xf0f0f0f0,%esi
xorl %esi,%edi
xorl %esi,%eax
rorl $4,%eax
movl %eax,(%edx)
movl %edi,4(%edx)
addl $8,%esp
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size fcrypt_body,.-.L_fcrypt_body_begin

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,680 +0,0 @@
# $FreeBSD$
.file "md5-586.s"
.text
.globl md5_block_asm_data_order
.type md5_block_asm_data_order,@function
.align 16
md5_block_asm_data_order:
.L_md5_block_asm_data_order_begin:
pushl %esi
pushl %edi
movl 12(%esp),%edi
movl 16(%esp),%esi
movl 20(%esp),%ecx
pushl %ebp
shll $6,%ecx
pushl %ebx
addl %esi,%ecx
subl $64,%ecx
movl (%edi),%eax
pushl %ecx
movl 4(%edi),%ebx
movl 8(%edi),%ecx
movl 12(%edi),%edx
.L000start:
movl %ecx,%edi
movl (%esi),%ebp
xorl %edx,%edi
andl %ebx,%edi
leal 3614090360(%eax,%ebp,1),%eax
xorl %edx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $7,%eax
movl 4(%esi),%ebp
addl %ebx,%eax
xorl %ecx,%edi
andl %eax,%edi
leal 3905402710(%edx,%ebp,1),%edx
xorl %ecx,%edi
addl %edi,%edx
movl %eax,%edi
roll $12,%edx
movl 8(%esi),%ebp
addl %eax,%edx
xorl %ebx,%edi
andl %edx,%edi
leal 606105819(%ecx,%ebp,1),%ecx
xorl %ebx,%edi
addl %edi,%ecx
movl %edx,%edi
roll $17,%ecx
movl 12(%esi),%ebp
addl %edx,%ecx
xorl %eax,%edi
andl %ecx,%edi
leal 3250441966(%ebx,%ebp,1),%ebx
xorl %eax,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $22,%ebx
movl 16(%esi),%ebp
addl %ecx,%ebx
xorl %edx,%edi
andl %ebx,%edi
leal 4118548399(%eax,%ebp,1),%eax
xorl %edx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $7,%eax
movl 20(%esi),%ebp
addl %ebx,%eax
xorl %ecx,%edi
andl %eax,%edi
leal 1200080426(%edx,%ebp,1),%edx
xorl %ecx,%edi
addl %edi,%edx
movl %eax,%edi
roll $12,%edx
movl 24(%esi),%ebp
addl %eax,%edx
xorl %ebx,%edi
andl %edx,%edi
leal 2821735955(%ecx,%ebp,1),%ecx
xorl %ebx,%edi
addl %edi,%ecx
movl %edx,%edi
roll $17,%ecx
movl 28(%esi),%ebp
addl %edx,%ecx
xorl %eax,%edi
andl %ecx,%edi
leal 4249261313(%ebx,%ebp,1),%ebx
xorl %eax,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $22,%ebx
movl 32(%esi),%ebp
addl %ecx,%ebx
xorl %edx,%edi
andl %ebx,%edi
leal 1770035416(%eax,%ebp,1),%eax
xorl %edx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $7,%eax
movl 36(%esi),%ebp
addl %ebx,%eax
xorl %ecx,%edi
andl %eax,%edi
leal 2336552879(%edx,%ebp,1),%edx
xorl %ecx,%edi
addl %edi,%edx
movl %eax,%edi
roll $12,%edx
movl 40(%esi),%ebp
addl %eax,%edx
xorl %ebx,%edi
andl %edx,%edi
leal 4294925233(%ecx,%ebp,1),%ecx
xorl %ebx,%edi
addl %edi,%ecx
movl %edx,%edi
roll $17,%ecx
movl 44(%esi),%ebp
addl %edx,%ecx
xorl %eax,%edi
andl %ecx,%edi
leal 2304563134(%ebx,%ebp,1),%ebx
xorl %eax,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $22,%ebx
movl 48(%esi),%ebp
addl %ecx,%ebx
xorl %edx,%edi
andl %ebx,%edi
leal 1804603682(%eax,%ebp,1),%eax
xorl %edx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $7,%eax
movl 52(%esi),%ebp
addl %ebx,%eax
xorl %ecx,%edi
andl %eax,%edi
leal 4254626195(%edx,%ebp,1),%edx
xorl %ecx,%edi
addl %edi,%edx
movl %eax,%edi
roll $12,%edx
movl 56(%esi),%ebp
addl %eax,%edx
xorl %ebx,%edi
andl %edx,%edi
leal 2792965006(%ecx,%ebp,1),%ecx
xorl %ebx,%edi
addl %edi,%ecx
movl %edx,%edi
roll $17,%ecx
movl 60(%esi),%ebp
addl %edx,%ecx
xorl %eax,%edi
andl %ecx,%edi
leal 1236535329(%ebx,%ebp,1),%ebx
xorl %eax,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $22,%ebx
movl 4(%esi),%ebp
addl %ecx,%ebx
leal 4129170786(%eax,%ebp,1),%eax
xorl %ebx,%edi
andl %edx,%edi
movl 24(%esi),%ebp
xorl %ecx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $5,%eax
addl %ebx,%eax
leal 3225465664(%edx,%ebp,1),%edx
xorl %eax,%edi
andl %ecx,%edi
movl 44(%esi),%ebp
xorl %ebx,%edi
addl %edi,%edx
movl %eax,%edi
roll $9,%edx
addl %eax,%edx
leal 643717713(%ecx,%ebp,1),%ecx
xorl %edx,%edi
andl %ebx,%edi
movl (%esi),%ebp
xorl %eax,%edi
addl %edi,%ecx
movl %edx,%edi
roll $14,%ecx
addl %edx,%ecx
leal 3921069994(%ebx,%ebp,1),%ebx
xorl %ecx,%edi
andl %eax,%edi
movl 20(%esi),%ebp
xorl %edx,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $20,%ebx
addl %ecx,%ebx
leal 3593408605(%eax,%ebp,1),%eax
xorl %ebx,%edi
andl %edx,%edi
movl 40(%esi),%ebp
xorl %ecx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $5,%eax
addl %ebx,%eax
leal 38016083(%edx,%ebp,1),%edx
xorl %eax,%edi
andl %ecx,%edi
movl 60(%esi),%ebp
xorl %ebx,%edi
addl %edi,%edx
movl %eax,%edi
roll $9,%edx
addl %eax,%edx
leal 3634488961(%ecx,%ebp,1),%ecx
xorl %edx,%edi
andl %ebx,%edi
movl 16(%esi),%ebp
xorl %eax,%edi
addl %edi,%ecx
movl %edx,%edi
roll $14,%ecx
addl %edx,%ecx
leal 3889429448(%ebx,%ebp,1),%ebx
xorl %ecx,%edi
andl %eax,%edi
movl 36(%esi),%ebp
xorl %edx,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $20,%ebx
addl %ecx,%ebx
leal 568446438(%eax,%ebp,1),%eax
xorl %ebx,%edi
andl %edx,%edi
movl 56(%esi),%ebp
xorl %ecx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $5,%eax
addl %ebx,%eax
leal 3275163606(%edx,%ebp,1),%edx
xorl %eax,%edi
andl %ecx,%edi
movl 12(%esi),%ebp
xorl %ebx,%edi
addl %edi,%edx
movl %eax,%edi
roll $9,%edx
addl %eax,%edx
leal 4107603335(%ecx,%ebp,1),%ecx
xorl %edx,%edi
andl %ebx,%edi
movl 32(%esi),%ebp
xorl %eax,%edi
addl %edi,%ecx
movl %edx,%edi
roll $14,%ecx
addl %edx,%ecx
leal 1163531501(%ebx,%ebp,1),%ebx
xorl %ecx,%edi
andl %eax,%edi
movl 52(%esi),%ebp
xorl %edx,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $20,%ebx
addl %ecx,%ebx
leal 2850285829(%eax,%ebp,1),%eax
xorl %ebx,%edi
andl %edx,%edi
movl 8(%esi),%ebp
xorl %ecx,%edi
addl %edi,%eax
movl %ebx,%edi
roll $5,%eax
addl %ebx,%eax
leal 4243563512(%edx,%ebp,1),%edx
xorl %eax,%edi
andl %ecx,%edi
movl 28(%esi),%ebp
xorl %ebx,%edi
addl %edi,%edx
movl %eax,%edi
roll $9,%edx
addl %eax,%edx
leal 1735328473(%ecx,%ebp,1),%ecx
xorl %edx,%edi
andl %ebx,%edi
movl 48(%esi),%ebp
xorl %eax,%edi
addl %edi,%ecx
movl %edx,%edi
roll $14,%ecx
addl %edx,%ecx
leal 2368359562(%ebx,%ebp,1),%ebx
xorl %ecx,%edi
andl %eax,%edi
movl 20(%esi),%ebp
xorl %edx,%edi
addl %edi,%ebx
movl %ecx,%edi
roll $20,%ebx
addl %ecx,%ebx
xorl %edx,%edi
xorl %ebx,%edi
leal 4294588738(%eax,%ebp,1),%eax
addl %edi,%eax
roll $4,%eax
movl 32(%esi),%ebp
movl %ebx,%edi
leal 2272392833(%edx,%ebp,1),%edx
addl %ebx,%eax
xorl %ecx,%edi
xorl %eax,%edi
movl 44(%esi),%ebp
addl %edi,%edx
movl %eax,%edi
roll $11,%edx
addl %eax,%edx
xorl %ebx,%edi
xorl %edx,%edi
leal 1839030562(%ecx,%ebp,1),%ecx
addl %edi,%ecx
roll $16,%ecx
movl 56(%esi),%ebp
movl %edx,%edi
leal 4259657740(%ebx,%ebp,1),%ebx
addl %edx,%ecx
xorl %eax,%edi
xorl %ecx,%edi
movl 4(%esi),%ebp
addl %edi,%ebx
movl %ecx,%edi
roll $23,%ebx
addl %ecx,%ebx
xorl %edx,%edi
xorl %ebx,%edi
leal 2763975236(%eax,%ebp,1),%eax
addl %edi,%eax
roll $4,%eax
movl 16(%esi),%ebp
movl %ebx,%edi
leal 1272893353(%edx,%ebp,1),%edx
addl %ebx,%eax
xorl %ecx,%edi
xorl %eax,%edi
movl 28(%esi),%ebp
addl %edi,%edx
movl %eax,%edi
roll $11,%edx
addl %eax,%edx
xorl %ebx,%edi
xorl %edx,%edi
leal 4139469664(%ecx,%ebp,1),%ecx
addl %edi,%ecx
roll $16,%ecx
movl 40(%esi),%ebp
movl %edx,%edi
leal 3200236656(%ebx,%ebp,1),%ebx
addl %edx,%ecx
xorl %eax,%edi
xorl %ecx,%edi
movl 52(%esi),%ebp
addl %edi,%ebx
movl %ecx,%edi
roll $23,%ebx
addl %ecx,%ebx
xorl %edx,%edi
xorl %ebx,%edi
leal 681279174(%eax,%ebp,1),%eax
addl %edi,%eax
roll $4,%eax
movl (%esi),%ebp
movl %ebx,%edi
leal 3936430074(%edx,%ebp,1),%edx
addl %ebx,%eax
xorl %ecx,%edi
xorl %eax,%edi
movl 12(%esi),%ebp
addl %edi,%edx
movl %eax,%edi
roll $11,%edx
addl %eax,%edx
xorl %ebx,%edi
xorl %edx,%edi
leal 3572445317(%ecx,%ebp,1),%ecx
addl %edi,%ecx
roll $16,%ecx
movl 24(%esi),%ebp
movl %edx,%edi
leal 76029189(%ebx,%ebp,1),%ebx
addl %edx,%ecx
xorl %eax,%edi
xorl %ecx,%edi
movl 36(%esi),%ebp
addl %edi,%ebx
movl %ecx,%edi
roll $23,%ebx
addl %ecx,%ebx
xorl %edx,%edi
xorl %ebx,%edi
leal 3654602809(%eax,%ebp,1),%eax
addl %edi,%eax
roll $4,%eax
movl 48(%esi),%ebp
movl %ebx,%edi
leal 3873151461(%edx,%ebp,1),%edx
addl %ebx,%eax
xorl %ecx,%edi
xorl %eax,%edi
movl 60(%esi),%ebp
addl %edi,%edx
movl %eax,%edi
roll $11,%edx
addl %eax,%edx
xorl %ebx,%edi
xorl %edx,%edi
leal 530742520(%ecx,%ebp,1),%ecx
addl %edi,%ecx
roll $16,%ecx
movl 8(%esi),%ebp
movl %edx,%edi
leal 3299628645(%ebx,%ebp,1),%ebx
addl %edx,%ecx
xorl %eax,%edi
xorl %ecx,%edi
movl (%esi),%ebp
addl %edi,%ebx
movl $-1,%edi
roll $23,%ebx
addl %ecx,%ebx
xorl %edx,%edi
orl %ebx,%edi
leal 4096336452(%eax,%ebp,1),%eax
xorl %ecx,%edi
movl 28(%esi),%ebp
addl %edi,%eax
movl $-1,%edi
roll $6,%eax
xorl %ecx,%edi
addl %ebx,%eax
orl %eax,%edi
leal 1126891415(%edx,%ebp,1),%edx
xorl %ebx,%edi
movl 56(%esi),%ebp
addl %edi,%edx
movl $-1,%edi
roll $10,%edx
xorl %ebx,%edi
addl %eax,%edx
orl %edx,%edi
leal 2878612391(%ecx,%ebp,1),%ecx
xorl %eax,%edi
movl 20(%esi),%ebp
addl %edi,%ecx
movl $-1,%edi
roll $15,%ecx
xorl %eax,%edi
addl %edx,%ecx
orl %ecx,%edi
leal 4237533241(%ebx,%ebp,1),%ebx
xorl %edx,%edi
movl 48(%esi),%ebp
addl %edi,%ebx
movl $-1,%edi
roll $21,%ebx
xorl %edx,%edi
addl %ecx,%ebx
orl %ebx,%edi
leal 1700485571(%eax,%ebp,1),%eax
xorl %ecx,%edi
movl 12(%esi),%ebp
addl %edi,%eax
movl $-1,%edi
roll $6,%eax
xorl %ecx,%edi
addl %ebx,%eax
orl %eax,%edi
leal 2399980690(%edx,%ebp,1),%edx
xorl %ebx,%edi
movl 40(%esi),%ebp
addl %edi,%edx
movl $-1,%edi
roll $10,%edx
xorl %ebx,%edi
addl %eax,%edx
orl %edx,%edi
leal 4293915773(%ecx,%ebp,1),%ecx
xorl %eax,%edi
movl 4(%esi),%ebp
addl %edi,%ecx
movl $-1,%edi
roll $15,%ecx
xorl %eax,%edi
addl %edx,%ecx
orl %ecx,%edi
leal 2240044497(%ebx,%ebp,1),%ebx
xorl %edx,%edi
movl 32(%esi),%ebp
addl %edi,%ebx
movl $-1,%edi
roll $21,%ebx
xorl %edx,%edi
addl %ecx,%ebx
orl %ebx,%edi
leal 1873313359(%eax,%ebp,1),%eax
xorl %ecx,%edi
movl 60(%esi),%ebp
addl %edi,%eax
movl $-1,%edi
roll $6,%eax
xorl %ecx,%edi
addl %ebx,%eax
orl %eax,%edi
leal 4264355552(%edx,%ebp,1),%edx
xorl %ebx,%edi
movl 24(%esi),%ebp
addl %edi,%edx
movl $-1,%edi
roll $10,%edx
xorl %ebx,%edi
addl %eax,%edx
orl %edx,%edi
leal 2734768916(%ecx,%ebp,1),%ecx
xorl %eax,%edi
movl 52(%esi),%ebp
addl %edi,%ecx
movl $-1,%edi
roll $15,%ecx
xorl %eax,%edi
addl %edx,%ecx
orl %ecx,%edi
leal 1309151649(%ebx,%ebp,1),%ebx
xorl %edx,%edi
movl 16(%esi),%ebp
addl %edi,%ebx
movl $-1,%edi
roll $21,%ebx
xorl %edx,%edi
addl %ecx,%ebx
orl %ebx,%edi
leal 4149444226(%eax,%ebp,1),%eax
xorl %ecx,%edi
movl 44(%esi),%ebp
addl %edi,%eax
movl $-1,%edi
roll $6,%eax
xorl %ecx,%edi
addl %ebx,%eax
orl %eax,%edi
leal 3174756917(%edx,%ebp,1),%edx
xorl %ebx,%edi
movl 8(%esi),%ebp
addl %edi,%edx
movl $-1,%edi
roll $10,%edx
xorl %ebx,%edi
addl %eax,%edx
orl %edx,%edi
leal 718787259(%ecx,%ebp,1),%ecx
xorl %eax,%edi
movl 36(%esi),%ebp
addl %edi,%ecx
movl $-1,%edi
roll $15,%ecx
xorl %eax,%edi
addl %edx,%ecx
orl %ecx,%edi
leal 3951481745(%ebx,%ebp,1),%ebx
xorl %edx,%edi
movl 24(%esp),%ebp
addl %edi,%ebx
addl $64,%esi
roll $21,%ebx
movl (%ebp),%edi
addl %ecx,%ebx
addl %edi,%eax
movl 4(%ebp),%edi
addl %edi,%ebx
movl 8(%ebp),%edi
addl %edi,%ecx
movl 12(%ebp),%edi
addl %edi,%edx
movl %eax,(%ebp)
movl %ebx,4(%ebp)
movl (%esp),%edi
movl %ecx,8(%ebp)
movl %edx,12(%ebp)
cmpl %esi,%edi
jae .L000start
popl %eax
popl %ebx
popl %ebp
popl %edi
popl %esi
ret
.size md5_block_asm_data_order,.-.L_md5_block_asm_data_order_begin

View File

@ -0,0 +1,757 @@
# $FreeBSD$
#ifdef PIC
.file "rc4-586.S"
.text
.globl RC4
.type RC4,@function
.align 16
RC4:
.L_RC4_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%edi
movl 24(%esp),%edx
movl 28(%esp),%esi
movl 32(%esp),%ebp
xorl %eax,%eax
xorl %ebx,%ebx
cmpl $0,%edx
je .L000abort
movb (%edi),%al
movb 4(%edi),%bl
addl $8,%edi
leal (%esi,%edx,1),%ecx
subl %esi,%ebp
movl %ecx,24(%esp)
incb %al
cmpl $-1,256(%edi)
je .L001RC4_CHAR
movl (%edi,%eax,4),%ecx
andl $-4,%edx
jz .L002loop1
movl %ebp,32(%esp)
testl $-8,%edx
jz .L003go4loop4
call .L004PIC_me_up
.L004PIC_me_up:
popl %ebp
leal OPENSSL_ia32cap_P-.L004PIC_me_up(%ebp),%ebp
btl $26,(%ebp)
jnc .L003go4loop4
movl 32(%esp),%ebp
andl $-8,%edx
leal -8(%esi,%edx,1),%edx
movl %edx,-4(%edi)
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
movq (%esi),%mm0
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm2
jmp .L005loop_mmx_enter
.align 16
.L006loop_mmx:
addb %cl,%bl
psllq $56,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movq (%esi),%mm0
movq %mm2,-8(%ebp,%esi,1)
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm2
.L005loop_mmx_enter:
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm0,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $8,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $16,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $24,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $32,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $40,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $48,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
movl %ebx,%edx
xorl %ebx,%ebx
movb %dl,%bl
cmpl -4(%edi),%esi
leal 8(%esi),%esi
jb .L006loop_mmx
psllq $56,%mm1
pxor %mm1,%mm2
movq %mm2,-8(%ebp,%esi,1)
emms
cmpl 24(%esp),%esi
je .L007done
jmp .L002loop1
.align 16
.L003go4loop4:
leal -4(%esi,%edx,1),%edx
movl %edx,28(%esp)
.L008loop4:
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
movl (%edi,%eax,4),%ecx
movl (%edi,%edx,4),%ebp
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
rorl $8,%ebp
movl (%edi,%eax,4),%ecx
orl (%edi,%edx,4),%ebp
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
rorl $8,%ebp
movl (%edi,%eax,4),%ecx
orl (%edi,%edx,4),%ebp
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
rorl $8,%ebp
movl 32(%esp),%ecx
orl (%edi,%edx,4),%ebp
rorl $8,%ebp
xorl (%esi),%ebp
cmpl 28(%esp),%esi
movl %ebp,(%ecx,%esi,1)
leal 4(%esi),%esi
movl (%edi,%eax,4),%ecx
jb .L008loop4
cmpl 24(%esp),%esi
je .L007done
movl 32(%esp),%ebp
.align 16
.L002loop1:
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
movl (%edi,%edx,4),%edx
xorb (%esi),%dl
leal 1(%esi),%esi
movl (%edi,%eax,4),%ecx
cmpl 24(%esp),%esi
movb %dl,-1(%ebp,%esi,1)
jb .L002loop1
jmp .L007done
.align 16
.L001RC4_CHAR:
movzbl (%edi,%eax,1),%ecx
.L009cloop1:
addb %cl,%bl
movzbl (%edi,%ebx,1),%edx
movb %cl,(%edi,%ebx,1)
movb %dl,(%edi,%eax,1)
addb %cl,%dl
movzbl (%edi,%edx,1),%edx
addb $1,%al
xorb (%esi),%dl
leal 1(%esi),%esi
movzbl (%edi,%eax,1),%ecx
cmpl 24(%esp),%esi
movb %dl,-1(%ebp,%esi,1)
jb .L009cloop1
.L007done:
decb %al
movl %ebx,-4(%edi)
movb %al,-8(%edi)
.L000abort:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size RC4,.-.L_RC4_begin
.globl private_RC4_set_key
.type private_RC4_set_key,@function
.align 16
private_RC4_set_key:
.L_private_RC4_set_key_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%edi
movl 24(%esp),%ebp
movl 28(%esp),%esi
call .L010PIC_me_up
.L010PIC_me_up:
popl %edx
leal OPENSSL_ia32cap_P-.L010PIC_me_up(%edx),%edx
leal 8(%edi),%edi
leal (%esi,%ebp,1),%esi
negl %ebp
xorl %eax,%eax
movl %ebp,-4(%edi)
btl $20,(%edx)
jc .L011c1stloop
.align 16
.L012w1stloop:
movl %eax,(%edi,%eax,4)
addb $1,%al
jnc .L012w1stloop
xorl %ecx,%ecx
xorl %edx,%edx
.align 16
.L013w2ndloop:
movl (%edi,%ecx,4),%eax
addb (%esi,%ebp,1),%dl
addb %al,%dl
addl $1,%ebp
movl (%edi,%edx,4),%ebx
jnz .L014wnowrap
movl -4(%edi),%ebp
.L014wnowrap:
movl %eax,(%edi,%edx,4)
movl %ebx,(%edi,%ecx,4)
addb $1,%cl
jnc .L013w2ndloop
jmp .L015exit
.align 16
.L011c1stloop:
movb %al,(%edi,%eax,1)
addb $1,%al
jnc .L011c1stloop
xorl %ecx,%ecx
xorl %edx,%edx
xorl %ebx,%ebx
.align 16
.L016c2ndloop:
movb (%edi,%ecx,1),%al
addb (%esi,%ebp,1),%dl
addb %al,%dl
addl $1,%ebp
movb (%edi,%edx,1),%bl
jnz .L017cnowrap
movl -4(%edi),%ebp
.L017cnowrap:
movb %al,(%edi,%edx,1)
movb %bl,(%edi,%ecx,1)
addb $1,%cl
jnc .L016c2ndloop
movl $-1,256(%edi)
.L015exit:
xorl %eax,%eax
movl %eax,-8(%edi)
movl %eax,-4(%edi)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size private_RC4_set_key,.-.L_private_RC4_set_key_begin
.globl RC4_options
.type RC4_options,@function
.align 16
RC4_options:
.L_RC4_options_begin:
call .L018pic_point
.L018pic_point:
popl %eax
leal .L019opts-.L018pic_point(%eax),%eax
call .L020PIC_me_up
.L020PIC_me_up:
popl %edx
leal OPENSSL_ia32cap_P-.L020PIC_me_up(%edx),%edx
movl (%edx),%edx
btl $20,%edx
jc .L0211xchar
btl $26,%edx
jnc .L022ret
addl $25,%eax
ret
.L0211xchar:
addl $12,%eax
.L022ret:
ret
.align 64
.L019opts:
.byte 114,99,52,40,52,120,44,105,110,116,41,0
.byte 114,99,52,40,49,120,44,99,104,97,114,41,0
.byte 114,99,52,40,56,120,44,109,109,120,41,0
.byte 82,67,52,32,102,111,114,32,120,56,54,44,32,67,82,89
.byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
.byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.align 64
.size RC4_options,.-.L_RC4_options_begin
.comm OPENSSL_ia32cap_P,16,4
#else
.file "rc4-586.S"
.text
.globl RC4
.type RC4,@function
.align 16
RC4:
.L_RC4_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%edi
movl 24(%esp),%edx
movl 28(%esp),%esi
movl 32(%esp),%ebp
xorl %eax,%eax
xorl %ebx,%ebx
cmpl $0,%edx
je .L000abort
movb (%edi),%al
movb 4(%edi),%bl
addl $8,%edi
leal (%esi,%edx,1),%ecx
subl %esi,%ebp
movl %ecx,24(%esp)
incb %al
cmpl $-1,256(%edi)
je .L001RC4_CHAR
movl (%edi,%eax,4),%ecx
andl $-4,%edx
jz .L002loop1
movl %ebp,32(%esp)
testl $-8,%edx
jz .L003go4loop4
leal OPENSSL_ia32cap_P,%ebp
btl $26,(%ebp)
jnc .L003go4loop4
movl 32(%esp),%ebp
andl $-8,%edx
leal -8(%esi,%edx,1),%edx
movl %edx,-4(%edi)
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
movq (%esi),%mm0
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm2
jmp .L004loop_mmx_enter
.align 16
.L005loop_mmx:
addb %cl,%bl
psllq $56,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movq (%esi),%mm0
movq %mm2,-8(%ebp,%esi,1)
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm2
.L004loop_mmx_enter:
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm0,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $8,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $16,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $24,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $32,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $40,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $48,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
movl %ebx,%edx
xorl %ebx,%ebx
movb %dl,%bl
cmpl -4(%edi),%esi
leal 8(%esi),%esi
jb .L005loop_mmx
psllq $56,%mm1
pxor %mm1,%mm2
movq %mm2,-8(%ebp,%esi,1)
emms
cmpl 24(%esp),%esi
je .L006done
jmp .L002loop1
.align 16
.L003go4loop4:
leal -4(%esi,%edx,1),%edx
movl %edx,28(%esp)
.L007loop4:
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
movl (%edi,%eax,4),%ecx
movl (%edi,%edx,4),%ebp
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
rorl $8,%ebp
movl (%edi,%eax,4),%ecx
orl (%edi,%edx,4),%ebp
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
rorl $8,%ebp
movl (%edi,%eax,4),%ecx
orl (%edi,%edx,4),%ebp
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
rorl $8,%ebp
movl 32(%esp),%ecx
orl (%edi,%edx,4),%ebp
rorl $8,%ebp
xorl (%esi),%ebp
cmpl 28(%esp),%esi
movl %ebp,(%ecx,%esi,1)
leal 4(%esi),%esi
movl (%edi,%eax,4),%ecx
jb .L007loop4
cmpl 24(%esp),%esi
je .L006done
movl 32(%esp),%ebp
.align 16
.L002loop1:
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
movl (%edi,%edx,4),%edx
xorb (%esi),%dl
leal 1(%esi),%esi
movl (%edi,%eax,4),%ecx
cmpl 24(%esp),%esi
movb %dl,-1(%ebp,%esi,1)
jb .L002loop1
jmp .L006done
.align 16
.L001RC4_CHAR:
movzbl (%edi,%eax,1),%ecx
.L008cloop1:
addb %cl,%bl
movzbl (%edi,%ebx,1),%edx
movb %cl,(%edi,%ebx,1)
movb %dl,(%edi,%eax,1)
addb %cl,%dl
movzbl (%edi,%edx,1),%edx
addb $1,%al
xorb (%esi),%dl
leal 1(%esi),%esi
movzbl (%edi,%eax,1),%ecx
cmpl 24(%esp),%esi
movb %dl,-1(%ebp,%esi,1)
jb .L008cloop1
.L006done:
decb %al
movl %ebx,-4(%edi)
movb %al,-8(%edi)
.L000abort:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size RC4,.-.L_RC4_begin
.globl private_RC4_set_key
.type private_RC4_set_key,@function
.align 16
private_RC4_set_key:
.L_private_RC4_set_key_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%edi
movl 24(%esp),%ebp
movl 28(%esp),%esi
leal OPENSSL_ia32cap_P,%edx
leal 8(%edi),%edi
leal (%esi,%ebp,1),%esi
negl %ebp
xorl %eax,%eax
movl %ebp,-4(%edi)
btl $20,(%edx)
jc .L009c1stloop
.align 16
.L010w1stloop:
movl %eax,(%edi,%eax,4)
addb $1,%al
jnc .L010w1stloop
xorl %ecx,%ecx
xorl %edx,%edx
.align 16
.L011w2ndloop:
movl (%edi,%ecx,4),%eax
addb (%esi,%ebp,1),%dl
addb %al,%dl
addl $1,%ebp
movl (%edi,%edx,4),%ebx
jnz .L012wnowrap
movl -4(%edi),%ebp
.L012wnowrap:
movl %eax,(%edi,%edx,4)
movl %ebx,(%edi,%ecx,4)
addb $1,%cl
jnc .L011w2ndloop
jmp .L013exit
.align 16
.L009c1stloop:
movb %al,(%edi,%eax,1)
addb $1,%al
jnc .L009c1stloop
xorl %ecx,%ecx
xorl %edx,%edx
xorl %ebx,%ebx
.align 16
.L014c2ndloop:
movb (%edi,%ecx,1),%al
addb (%esi,%ebp,1),%dl
addb %al,%dl
addl $1,%ebp
movb (%edi,%edx,1),%bl
jnz .L015cnowrap
movl -4(%edi),%ebp
.L015cnowrap:
movb %al,(%edi,%edx,1)
movb %bl,(%edi,%ecx,1)
addb $1,%cl
jnc .L014c2ndloop
movl $-1,256(%edi)
.L013exit:
xorl %eax,%eax
movl %eax,-8(%edi)
movl %eax,-4(%edi)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size private_RC4_set_key,.-.L_private_RC4_set_key_begin
.globl RC4_options
.type RC4_options,@function
.align 16
RC4_options:
.L_RC4_options_begin:
call .L016pic_point
.L016pic_point:
popl %eax
leal .L017opts-.L016pic_point(%eax),%eax
leal OPENSSL_ia32cap_P,%edx
movl (%edx),%edx
btl $20,%edx
jc .L0181xchar
btl $26,%edx
jnc .L019ret
addl $25,%eax
ret
.L0181xchar:
addl $12,%eax
.L019ret:
ret
.align 64
.L017opts:
.byte 114,99,52,40,52,120,44,105,110,116,41,0
.byte 114,99,52,40,49,120,44,99,104,97,114,41,0
.byte 114,99,52,40,56,120,44,109,109,120,41,0
.byte 82,67,52,32,102,111,114,32,120,56,54,44,32,67,82,89
.byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
.byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.align 64
.size RC4_options,.-.L_RC4_options_begin
.comm OPENSSL_ia32cap_P,16,4
#endif

View File

@ -1,373 +0,0 @@
# $FreeBSD$
.file "rc4-586.s"
.text
.globl RC4
.type RC4,@function
.align 16
RC4:
.L_RC4_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%edi
movl 24(%esp),%edx
movl 28(%esp),%esi
movl 32(%esp),%ebp
xorl %eax,%eax
xorl %ebx,%ebx
cmpl $0,%edx
je .L000abort
movb (%edi),%al
movb 4(%edi),%bl
addl $8,%edi
leal (%esi,%edx,1),%ecx
subl %esi,%ebp
movl %ecx,24(%esp)
incb %al
cmpl $-1,256(%edi)
je .L001RC4_CHAR
movl (%edi,%eax,4),%ecx
andl $-4,%edx
jz .L002loop1
movl %ebp,32(%esp)
testl $-8,%edx
jz .L003go4loop4
leal OPENSSL_ia32cap_P,%ebp
btl $26,(%ebp)
jnc .L003go4loop4
movl 32(%esp),%ebp
andl $-8,%edx
leal -8(%esi,%edx,1),%edx
movl %edx,-4(%edi)
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
movq (%esi),%mm0
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm2
jmp .L004loop_mmx_enter
.align 16
.L005loop_mmx:
addb %cl,%bl
psllq $56,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movq (%esi),%mm0
movq %mm2,-8(%ebp,%esi,1)
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm2
.L004loop_mmx_enter:
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm0,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $8,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $16,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $24,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $32,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $40,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
addb %cl,%bl
psllq $48,%mm1
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
incl %eax
addl %ecx,%edx
movzbl %al,%eax
movzbl %dl,%edx
pxor %mm1,%mm2
movl (%edi,%eax,4),%ecx
movd (%edi,%edx,4),%mm1
movl %ebx,%edx
xorl %ebx,%ebx
movb %dl,%bl
cmpl -4(%edi),%esi
leal 8(%esi),%esi
jb .L005loop_mmx
psllq $56,%mm1
pxor %mm1,%mm2
movq %mm2,-8(%ebp,%esi,1)
emms
cmpl 24(%esp),%esi
je .L006done
jmp .L002loop1
.align 16
.L003go4loop4:
leal -4(%esi,%edx,1),%edx
movl %edx,28(%esp)
.L007loop4:
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
movl (%edi,%eax,4),%ecx
movl (%edi,%edx,4),%ebp
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
rorl $8,%ebp
movl (%edi,%eax,4),%ecx
orl (%edi,%edx,4),%ebp
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
rorl $8,%ebp
movl (%edi,%eax,4),%ecx
orl (%edi,%edx,4),%ebp
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
rorl $8,%ebp
movl 32(%esp),%ecx
orl (%edi,%edx,4),%ebp
rorl $8,%ebp
xorl (%esi),%ebp
cmpl 28(%esp),%esi
movl %ebp,(%ecx,%esi,1)
leal 4(%esi),%esi
movl (%edi,%eax,4),%ecx
jb .L007loop4
cmpl 24(%esp),%esi
je .L006done
movl 32(%esp),%ebp
.align 16
.L002loop1:
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
movl %edx,(%edi,%eax,4)
addl %ecx,%edx
incb %al
andl $255,%edx
movl (%edi,%edx,4),%edx
xorb (%esi),%dl
leal 1(%esi),%esi
movl (%edi,%eax,4),%ecx
cmpl 24(%esp),%esi
movb %dl,-1(%ebp,%esi,1)
jb .L002loop1
jmp .L006done
.align 16
.L001RC4_CHAR:
movzbl (%edi,%eax,1),%ecx
.L008cloop1:
addb %cl,%bl
movzbl (%edi,%ebx,1),%edx
movb %cl,(%edi,%ebx,1)
movb %dl,(%edi,%eax,1)
addb %cl,%dl
movzbl (%edi,%edx,1),%edx
addb $1,%al
xorb (%esi),%dl
leal 1(%esi),%esi
movzbl (%edi,%eax,1),%ecx
cmpl 24(%esp),%esi
movb %dl,-1(%ebp,%esi,1)
jb .L008cloop1
.L006done:
decb %al
movl %ebx,-4(%edi)
movb %al,-8(%edi)
.L000abort:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size RC4,.-.L_RC4_begin
.globl private_RC4_set_key
.type private_RC4_set_key,@function
.align 16
private_RC4_set_key:
.L_private_RC4_set_key_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%edi
movl 24(%esp),%ebp
movl 28(%esp),%esi
leal OPENSSL_ia32cap_P,%edx
leal 8(%edi),%edi
leal (%esi,%ebp,1),%esi
negl %ebp
xorl %eax,%eax
movl %ebp,-4(%edi)
btl $20,(%edx)
jc .L009c1stloop
.align 16
.L010w1stloop:
movl %eax,(%edi,%eax,4)
addb $1,%al
jnc .L010w1stloop
xorl %ecx,%ecx
xorl %edx,%edx
.align 16
.L011w2ndloop:
movl (%edi,%ecx,4),%eax
addb (%esi,%ebp,1),%dl
addb %al,%dl
addl $1,%ebp
movl (%edi,%edx,4),%ebx
jnz .L012wnowrap
movl -4(%edi),%ebp
.L012wnowrap:
movl %eax,(%edi,%edx,4)
movl %ebx,(%edi,%ecx,4)
addb $1,%cl
jnc .L011w2ndloop
jmp .L013exit
.align 16
.L009c1stloop:
movb %al,(%edi,%eax,1)
addb $1,%al
jnc .L009c1stloop
xorl %ecx,%ecx
xorl %edx,%edx
xorl %ebx,%ebx
.align 16
.L014c2ndloop:
movb (%edi,%ecx,1),%al
addb (%esi,%ebp,1),%dl
addb %al,%dl
addl $1,%ebp
movb (%edi,%edx,1),%bl
jnz .L015cnowrap
movl -4(%edi),%ebp
.L015cnowrap:
movb %al,(%edi,%edx,1)
movb %bl,(%edi,%ecx,1)
addb $1,%cl
jnc .L014c2ndloop
movl $-1,256(%edi)
.L013exit:
xorl %eax,%eax
movl %eax,-8(%edi)
movl %eax,-4(%edi)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size private_RC4_set_key,.-.L_private_RC4_set_key_begin
.globl RC4_options
.type RC4_options,@function
.align 16
RC4_options:
.L_RC4_options_begin:
call .L016pic_point
.L016pic_point:
popl %eax
leal .L017opts-.L016pic_point(%eax),%eax
leal OPENSSL_ia32cap_P,%edx
movl (%edx),%edx
btl $20,%edx
jc .L0181xchar
btl $26,%edx
jnc .L019ret
addl $25,%eax
ret
.L0181xchar:
addl $12,%eax
.L019ret:
ret
.align 64
.L017opts:
.byte 114,99,52,40,52,120,44,105,110,116,41,0
.byte 114,99,52,40,49,120,44,99,104,97,114,41,0
.byte 114,99,52,40,56,120,44,109,109,120,41,0
.byte 82,67,52,32,102,111,114,32,120,56,54,44,32,67,82,89
.byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
.byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.align 64
.size RC4_options,.-.L_RC4_options_begin
.comm OPENSSL_ia32cap_P,16,4

File diff suppressed because it is too large Load Diff

View File

@ -1,565 +0,0 @@
# $FreeBSD$
.file "rc5-586.s"
.text
.globl RC5_32_encrypt
.type RC5_32_encrypt,@function
.align 16
RC5_32_encrypt:
.L_RC5_32_encrypt_begin:
pushl %ebp
pushl %esi
pushl %edi
movl 16(%esp),%edx
movl 20(%esp),%ebp
movl (%edx),%edi
movl 4(%edx),%esi
pushl %ebx
movl (%ebp),%ebx
addl 4(%ebp),%edi
addl 8(%ebp),%esi
xorl %esi,%edi
movl 12(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 16(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 20(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 24(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 28(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 32(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 36(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 40(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 44(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 48(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 52(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 56(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 60(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 64(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 68(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 72(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
cmpl $8,%ebx
je .L000rc5_exit
xorl %esi,%edi
movl 76(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 80(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 84(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 88(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 92(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 96(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 100(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 104(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
cmpl $12,%ebx
je .L000rc5_exit
xorl %esi,%edi
movl 108(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 112(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 116(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 120(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 124(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 128(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
xorl %esi,%edi
movl 132(%ebp),%eax
movl %esi,%ecx
roll %cl,%edi
addl %eax,%edi
xorl %edi,%esi
movl 136(%ebp),%eax
movl %edi,%ecx
roll %cl,%esi
addl %eax,%esi
.L000rc5_exit:
movl %edi,(%edx)
movl %esi,4(%edx)
popl %ebx
popl %edi
popl %esi
popl %ebp
ret
.size RC5_32_encrypt,.-.L_RC5_32_encrypt_begin
.globl RC5_32_decrypt
.type RC5_32_decrypt,@function
.align 16
RC5_32_decrypt:
.L_RC5_32_decrypt_begin:
pushl %ebp
pushl %esi
pushl %edi
movl 16(%esp),%edx
movl 20(%esp),%ebp
movl (%edx),%edi
movl 4(%edx),%esi
pushl %ebx
movl (%ebp),%ebx
cmpl $12,%ebx
je .L001rc5_dec_12
cmpl $8,%ebx
je .L002rc5_dec_8
movl 136(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 132(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 128(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 124(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 120(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 116(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 112(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 108(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
.L001rc5_dec_12:
movl 104(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 100(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 96(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 92(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 88(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 84(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 80(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 76(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
.L002rc5_dec_8:
movl 72(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 68(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 64(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 60(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 56(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 52(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 48(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 44(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 40(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 36(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 32(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 28(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 24(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 20(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
movl 16(%ebp),%eax
subl %eax,%esi
movl %edi,%ecx
rorl %cl,%esi
xorl %edi,%esi
movl 12(%ebp),%eax
subl %eax,%edi
movl %esi,%ecx
rorl %cl,%edi
xorl %esi,%edi
subl 8(%ebp),%esi
subl 4(%ebp),%edi
.L003rc5_exit:
movl %edi,(%edx)
movl %esi,4(%edx)
popl %ebx
popl %edi
popl %esi
popl %ebp
ret
.size RC5_32_decrypt,.-.L_RC5_32_decrypt_begin
.globl RC5_32_cbc_encrypt
.type RC5_32_cbc_encrypt,@function
.align 16
RC5_32_cbc_encrypt:
.L_RC5_32_cbc_encrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 28(%esp),%ebp
movl 36(%esp),%ebx
movl (%ebx),%esi
movl 4(%ebx),%edi
pushl %edi
pushl %esi
pushl %edi
pushl %esi
movl %esp,%ebx
movl 36(%esp),%esi
movl 40(%esp),%edi
movl 56(%esp),%ecx
movl 48(%esp),%eax
pushl %eax
pushl %ebx
cmpl $0,%ecx
jz .L004decrypt
andl $4294967288,%ebp
movl 8(%esp),%eax
movl 12(%esp),%ebx
jz .L005encrypt_finish
.L006encrypt_loop:
movl (%esi),%ecx
movl 4(%esi),%edx
xorl %ecx,%eax
xorl %edx,%ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_RC5_32_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz .L006encrypt_loop
.L005encrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz .L007finish
call .L008PIC_point
.L008PIC_point:
popl %edx
leal .L009cbc_enc_jmp_table-.L008PIC_point(%edx),%ecx
movl (%ecx,%ebp,4),%ebp
addl %edx,%ebp
xorl %ecx,%ecx
xorl %edx,%edx
jmp *%ebp
.L010ej7:
movb 6(%esi),%dh
shll $8,%edx
.L011ej6:
movb 5(%esi),%dh
.L012ej5:
movb 4(%esi),%dl
.L013ej4:
movl (%esi),%ecx
jmp .L014ejend
.L015ej3:
movb 2(%esi),%ch
shll $8,%ecx
.L016ej2:
movb 1(%esi),%ch
.L017ej1:
movb (%esi),%cl
.L014ejend:
xorl %ecx,%eax
xorl %edx,%ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_RC5_32_encrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
movl %eax,(%edi)
movl %ebx,4(%edi)
jmp .L007finish
.L004decrypt:
andl $4294967288,%ebp
movl 16(%esp),%eax
movl 20(%esp),%ebx
jz .L018decrypt_finish
.L019decrypt_loop:
movl (%esi),%eax
movl 4(%esi),%ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_RC5_32_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
movl %ecx,(%edi)
movl %edx,4(%edi)
movl %eax,16(%esp)
movl %ebx,20(%esp)
addl $8,%esi
addl $8,%edi
subl $8,%ebp
jnz .L019decrypt_loop
.L018decrypt_finish:
movl 52(%esp),%ebp
andl $7,%ebp
jz .L007finish
movl (%esi),%eax
movl 4(%esi),%ebx
movl %eax,8(%esp)
movl %ebx,12(%esp)
call .L_RC5_32_decrypt_begin
movl 8(%esp),%eax
movl 12(%esp),%ebx
movl 16(%esp),%ecx
movl 20(%esp),%edx
xorl %eax,%ecx
xorl %ebx,%edx
movl (%esi),%eax
movl 4(%esi),%ebx
.L020dj7:
rorl $16,%edx
movb %dl,6(%edi)
shrl $16,%edx
.L021dj6:
movb %dh,5(%edi)
.L022dj5:
movb %dl,4(%edi)
.L023dj4:
movl %ecx,(%edi)
jmp .L024djend
.L025dj3:
rorl $16,%ecx
movb %cl,2(%edi)
shll $16,%ecx
.L026dj2:
movb %ch,1(%esi)
.L027dj1:
movb %cl,(%esi)
.L024djend:
jmp .L007finish
.L007finish:
movl 60(%esp),%ecx
addl $24,%esp
movl %eax,(%ecx)
movl %ebx,4(%ecx)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 64
.L009cbc_enc_jmp_table:
.long 0
.long .L017ej1-.L008PIC_point
.long .L016ej2-.L008PIC_point
.long .L015ej3-.L008PIC_point
.long .L013ej4-.L008PIC_point
.long .L012ej5-.L008PIC_point
.long .L011ej6-.L008PIC_point
.long .L010ej7-.L008PIC_point
.align 64
.size RC5_32_cbc_encrypt,.-.L_RC5_32_cbc_encrypt_begin

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,662 +0,0 @@
# $FreeBSD$
.file "vpaes-x86.s"
.text
.align 64
.L_vpaes_consts:
.long 218628480,235210255,168496130,67568393
.long 252381056,17041926,33884169,51187212
.long 252645135,252645135,252645135,252645135
.long 1512730624,3266504856,1377990664,3401244816
.long 830229760,1275146365,2969422977,3447763452
.long 3411033600,2979783055,338359620,2782886510
.long 4209124096,907596821,221174255,1006095553
.long 191964160,3799684038,3164090317,1589111125
.long 182528256,1777043520,2877432650,3265356744
.long 1874708224,3503451415,3305285752,363511674
.long 1606117888,3487855781,1093350906,2384367825
.long 197121,67569157,134941193,202313229
.long 67569157,134941193,202313229,197121
.long 134941193,202313229,197121,67569157
.long 202313229,197121,67569157,134941193
.long 33619971,100992007,168364043,235736079
.long 235736079,33619971,100992007,168364043
.long 168364043,235736079,33619971,100992007
.long 100992007,168364043,235736079,33619971
.long 50462976,117835012,185207048,252579084
.long 252314880,51251460,117574920,184942860
.long 184682752,252054788,50987272,118359308
.long 118099200,185467140,251790600,50727180
.long 2946363062,528716217,1300004225,1881839624
.long 1532713819,1532713819,1532713819,1532713819
.long 3602276352,4288629033,3737020424,4153884961
.long 1354558464,32357713,2958822624,3775749553
.long 1201988352,132424512,1572796698,503232858
.long 2213177600,1597421020,4103937655,675398315
.long 2749646592,4273543773,1511898873,121693092
.long 3040248576,1103263732,2871565598,1608280554
.long 2236667136,2588920351,482954393,64377734
.long 3069987328,291237287,2117370568,3650299247
.long 533321216,3573750986,2572112006,1401264716
.long 1339849704,2721158661,548607111,3445553514
.long 2128193280,3054596040,2183486460,1257083700
.long 655635200,1165381986,3923443150,2344132524
.long 190078720,256924420,290342170,357187870
.long 1610966272,2263057382,4103205268,309794674
.long 2592527872,2233205587,1335446729,3402964816
.long 3973531904,3225098121,3002836325,1918774430
.long 3870401024,2102906079,2284471353,4117666579
.long 617007872,1021508343,366931923,691083277
.long 2528395776,3491914898,2968704004,1613121270
.long 3445188352,3247741094,844474987,4093578302
.long 651481088,1190302358,1689581232,574775300
.long 4289380608,206939853,2555985458,2489840491
.long 2130264064,327674451,3566485037,3349835193
.long 2470714624,316102159,3636825756,3393945945
.byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105
.byte 111,110,32,65,69,83,32,102,111,114,32,120,56,54,47,83
.byte 83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117
.byte 114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105
.byte 118,101,114,115,105,116,121,41,0
.align 64
.type _vpaes_preheat,@function
.align 16
_vpaes_preheat:
addl (%esp),%ebp
movdqa -48(%ebp),%xmm7
movdqa -16(%ebp),%xmm6
ret
.size _vpaes_preheat,.-_vpaes_preheat
.type _vpaes_encrypt_core,@function
.align 16
_vpaes_encrypt_core:
movl $16,%ecx
movl 240(%edx),%eax
movdqa %xmm6,%xmm1
movdqa (%ebp),%xmm2
pandn %xmm0,%xmm1
pand %xmm6,%xmm0
movdqu (%edx),%xmm5
.byte 102,15,56,0,208
movdqa 16(%ebp),%xmm0
pxor %xmm5,%xmm2
psrld $4,%xmm1
addl $16,%edx
.byte 102,15,56,0,193
leal 192(%ebp),%ebx
pxor %xmm2,%xmm0
jmp .L000enc_entry
.align 16
.L001enc_loop:
movdqa 32(%ebp),%xmm4
movdqa 48(%ebp),%xmm0
.byte 102,15,56,0,226
.byte 102,15,56,0,195
pxor %xmm5,%xmm4
movdqa 64(%ebp),%xmm5
pxor %xmm4,%xmm0
movdqa -64(%ebx,%ecx,1),%xmm1
.byte 102,15,56,0,234
movdqa 80(%ebp),%xmm2
movdqa (%ebx,%ecx,1),%xmm4
.byte 102,15,56,0,211
movdqa %xmm0,%xmm3
pxor %xmm5,%xmm2
.byte 102,15,56,0,193
addl $16,%edx
pxor %xmm2,%xmm0
.byte 102,15,56,0,220
addl $16,%ecx
pxor %xmm0,%xmm3
.byte 102,15,56,0,193
andl $48,%ecx
subl $1,%eax
pxor %xmm3,%xmm0
.L000enc_entry:
movdqa %xmm6,%xmm1
movdqa -32(%ebp),%xmm5
pandn %xmm0,%xmm1
psrld $4,%xmm1
pand %xmm6,%xmm0
.byte 102,15,56,0,232
movdqa %xmm7,%xmm3
pxor %xmm1,%xmm0
.byte 102,15,56,0,217
movdqa %xmm7,%xmm4
pxor %xmm5,%xmm3
.byte 102,15,56,0,224
movdqa %xmm7,%xmm2
pxor %xmm5,%xmm4
.byte 102,15,56,0,211
movdqa %xmm7,%xmm3
pxor %xmm0,%xmm2
.byte 102,15,56,0,220
movdqu (%edx),%xmm5
pxor %xmm1,%xmm3
jnz .L001enc_loop
movdqa 96(%ebp),%xmm4
movdqa 112(%ebp),%xmm0
.byte 102,15,56,0,226
pxor %xmm5,%xmm4
.byte 102,15,56,0,195
movdqa 64(%ebx,%ecx,1),%xmm1
pxor %xmm4,%xmm0
.byte 102,15,56,0,193
ret
.size _vpaes_encrypt_core,.-_vpaes_encrypt_core
.type _vpaes_decrypt_core,@function
.align 16
_vpaes_decrypt_core:
leal 608(%ebp),%ebx
movl 240(%edx),%eax
movdqa %xmm6,%xmm1
movdqa -64(%ebx),%xmm2
pandn %xmm0,%xmm1
movl %eax,%ecx
psrld $4,%xmm1
movdqu (%edx),%xmm5
shll $4,%ecx
pand %xmm6,%xmm0
.byte 102,15,56,0,208
movdqa -48(%ebx),%xmm0
xorl $48,%ecx
.byte 102,15,56,0,193
andl $48,%ecx
pxor %xmm5,%xmm2
movdqa 176(%ebp),%xmm5
pxor %xmm2,%xmm0
addl $16,%edx
leal -352(%ebx,%ecx,1),%ecx
jmp .L002dec_entry
.align 16
.L003dec_loop:
movdqa -32(%ebx),%xmm4
movdqa -16(%ebx),%xmm1
.byte 102,15,56,0,226
.byte 102,15,56,0,203
pxor %xmm4,%xmm0
movdqa (%ebx),%xmm4
pxor %xmm1,%xmm0
movdqa 16(%ebx),%xmm1
.byte 102,15,56,0,226
.byte 102,15,56,0,197
.byte 102,15,56,0,203
pxor %xmm4,%xmm0
movdqa 32(%ebx),%xmm4
pxor %xmm1,%xmm0
movdqa 48(%ebx),%xmm1
.byte 102,15,56,0,226
.byte 102,15,56,0,197
.byte 102,15,56,0,203
pxor %xmm4,%xmm0
movdqa 64(%ebx),%xmm4
pxor %xmm1,%xmm0
movdqa 80(%ebx),%xmm1
.byte 102,15,56,0,226
.byte 102,15,56,0,197
.byte 102,15,56,0,203
pxor %xmm4,%xmm0
addl $16,%edx
.byte 102,15,58,15,237,12
pxor %xmm1,%xmm0
subl $1,%eax
.L002dec_entry:
movdqa %xmm6,%xmm1
movdqa -32(%ebp),%xmm2
pandn %xmm0,%xmm1
pand %xmm6,%xmm0
psrld $4,%xmm1
.byte 102,15,56,0,208
movdqa %xmm7,%xmm3
pxor %xmm1,%xmm0
.byte 102,15,56,0,217
movdqa %xmm7,%xmm4
pxor %xmm2,%xmm3
.byte 102,15,56,0,224
pxor %xmm2,%xmm4
movdqa %xmm7,%xmm2
.byte 102,15,56,0,211
movdqa %xmm7,%xmm3
pxor %xmm0,%xmm2
.byte 102,15,56,0,220
movdqu (%edx),%xmm0
pxor %xmm1,%xmm3
jnz .L003dec_loop
movdqa 96(%ebx),%xmm4
.byte 102,15,56,0,226
pxor %xmm0,%xmm4
movdqa 112(%ebx),%xmm0
movdqa (%ecx),%xmm2
.byte 102,15,56,0,195
pxor %xmm4,%xmm0
.byte 102,15,56,0,194
ret
.size _vpaes_decrypt_core,.-_vpaes_decrypt_core
.type _vpaes_schedule_core,@function
.align 16
_vpaes_schedule_core:
addl (%esp),%ebp
movdqu (%esi),%xmm0
movdqa 320(%ebp),%xmm2
movdqa %xmm0,%xmm3
leal (%ebp),%ebx
movdqa %xmm2,4(%esp)
call _vpaes_schedule_transform
movdqa %xmm0,%xmm7
testl %edi,%edi
jnz .L004schedule_am_decrypting
movdqu %xmm0,(%edx)
jmp .L005schedule_go
.L004schedule_am_decrypting:
movdqa 256(%ebp,%ecx,1),%xmm1
.byte 102,15,56,0,217
movdqu %xmm3,(%edx)
xorl $48,%ecx
.L005schedule_go:
cmpl $192,%eax
ja .L006schedule_256
je .L007schedule_192
.L008schedule_128:
movl $10,%eax
.L009loop_schedule_128:
call _vpaes_schedule_round
decl %eax
jz .L010schedule_mangle_last
call _vpaes_schedule_mangle
jmp .L009loop_schedule_128
.align 16
.L007schedule_192:
movdqu 8(%esi),%xmm0
call _vpaes_schedule_transform
movdqa %xmm0,%xmm6
pxor %xmm4,%xmm4
movhlps %xmm4,%xmm6
movl $4,%eax
.L011loop_schedule_192:
call _vpaes_schedule_round
.byte 102,15,58,15,198,8
call _vpaes_schedule_mangle
call _vpaes_schedule_192_smear
call _vpaes_schedule_mangle
call _vpaes_schedule_round
decl %eax
jz .L010schedule_mangle_last
call _vpaes_schedule_mangle
call _vpaes_schedule_192_smear
jmp .L011loop_schedule_192
.align 16
.L006schedule_256:
movdqu 16(%esi),%xmm0
call _vpaes_schedule_transform
movl $7,%eax
.L012loop_schedule_256:
call _vpaes_schedule_mangle
movdqa %xmm0,%xmm6
call _vpaes_schedule_round
decl %eax
jz .L010schedule_mangle_last
call _vpaes_schedule_mangle
pshufd $255,%xmm0,%xmm0
movdqa %xmm7,20(%esp)
movdqa %xmm6,%xmm7
call .L_vpaes_schedule_low_round
movdqa 20(%esp),%xmm7
jmp .L012loop_schedule_256
.align 16
.L010schedule_mangle_last:
leal 384(%ebp),%ebx
testl %edi,%edi
jnz .L013schedule_mangle_last_dec
movdqa 256(%ebp,%ecx,1),%xmm1
.byte 102,15,56,0,193
leal 352(%ebp),%ebx
addl $32,%edx
.L013schedule_mangle_last_dec:
addl $-16,%edx
pxor 336(%ebp),%xmm0
call _vpaes_schedule_transform
movdqu %xmm0,(%edx)
pxor %xmm0,%xmm0
pxor %xmm1,%xmm1
pxor %xmm2,%xmm2
pxor %xmm3,%xmm3
pxor %xmm4,%xmm4
pxor %xmm5,%xmm5
pxor %xmm6,%xmm6
pxor %xmm7,%xmm7
ret
.size _vpaes_schedule_core,.-_vpaes_schedule_core
.type _vpaes_schedule_192_smear,@function
.align 16
_vpaes_schedule_192_smear:
pshufd $128,%xmm6,%xmm1
pshufd $254,%xmm7,%xmm0
pxor %xmm1,%xmm6
pxor %xmm1,%xmm1
pxor %xmm0,%xmm6
movdqa %xmm6,%xmm0
movhlps %xmm1,%xmm6
ret
.size _vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear
.type _vpaes_schedule_round,@function
.align 16
_vpaes_schedule_round:
movdqa 8(%esp),%xmm2
pxor %xmm1,%xmm1
.byte 102,15,58,15,202,15
.byte 102,15,58,15,210,15
pxor %xmm1,%xmm7
pshufd $255,%xmm0,%xmm0
.byte 102,15,58,15,192,1
movdqa %xmm2,8(%esp)
.L_vpaes_schedule_low_round:
movdqa %xmm7,%xmm1
pslldq $4,%xmm7
pxor %xmm1,%xmm7
movdqa %xmm7,%xmm1
pslldq $8,%xmm7
pxor %xmm1,%xmm7
pxor 336(%ebp),%xmm7
movdqa -16(%ebp),%xmm4
movdqa -48(%ebp),%xmm5
movdqa %xmm4,%xmm1
pandn %xmm0,%xmm1
psrld $4,%xmm1
pand %xmm4,%xmm0
movdqa -32(%ebp),%xmm2
.byte 102,15,56,0,208
pxor %xmm1,%xmm0
movdqa %xmm5,%xmm3
.byte 102,15,56,0,217
pxor %xmm2,%xmm3
movdqa %xmm5,%xmm4
.byte 102,15,56,0,224
pxor %xmm2,%xmm4
movdqa %xmm5,%xmm2
.byte 102,15,56,0,211
pxor %xmm0,%xmm2
movdqa %xmm5,%xmm3
.byte 102,15,56,0,220
pxor %xmm1,%xmm3
movdqa 32(%ebp),%xmm4
.byte 102,15,56,0,226
movdqa 48(%ebp),%xmm0
.byte 102,15,56,0,195
pxor %xmm4,%xmm0
pxor %xmm7,%xmm0
movdqa %xmm0,%xmm7
ret
.size _vpaes_schedule_round,.-_vpaes_schedule_round
.type _vpaes_schedule_transform,@function
.align 16
_vpaes_schedule_transform:
movdqa -16(%ebp),%xmm2
movdqa %xmm2,%xmm1
pandn %xmm0,%xmm1
psrld $4,%xmm1
pand %xmm2,%xmm0
movdqa (%ebx),%xmm2
.byte 102,15,56,0,208
movdqa 16(%ebx),%xmm0
.byte 102,15,56,0,193
pxor %xmm2,%xmm0
ret
.size _vpaes_schedule_transform,.-_vpaes_schedule_transform
.type _vpaes_schedule_mangle,@function
.align 16
_vpaes_schedule_mangle:
movdqa %xmm0,%xmm4
movdqa 128(%ebp),%xmm5
testl %edi,%edi
jnz .L014schedule_mangle_dec
addl $16,%edx
pxor 336(%ebp),%xmm4
.byte 102,15,56,0,229
movdqa %xmm4,%xmm3
.byte 102,15,56,0,229
pxor %xmm4,%xmm3
.byte 102,15,56,0,229
pxor %xmm4,%xmm3
jmp .L015schedule_mangle_both
.align 16
.L014schedule_mangle_dec:
movdqa -16(%ebp),%xmm2
leal 416(%ebp),%esi
movdqa %xmm2,%xmm1
pandn %xmm4,%xmm1
psrld $4,%xmm1
pand %xmm2,%xmm4
movdqa (%esi),%xmm2
.byte 102,15,56,0,212
movdqa 16(%esi),%xmm3
.byte 102,15,56,0,217
pxor %xmm2,%xmm3
.byte 102,15,56,0,221
movdqa 32(%esi),%xmm2
.byte 102,15,56,0,212
pxor %xmm3,%xmm2
movdqa 48(%esi),%xmm3
.byte 102,15,56,0,217
pxor %xmm2,%xmm3
.byte 102,15,56,0,221
movdqa 64(%esi),%xmm2
.byte 102,15,56,0,212
pxor %xmm3,%xmm2
movdqa 80(%esi),%xmm3
.byte 102,15,56,0,217
pxor %xmm2,%xmm3
.byte 102,15,56,0,221
movdqa 96(%esi),%xmm2
.byte 102,15,56,0,212
pxor %xmm3,%xmm2
movdqa 112(%esi),%xmm3
.byte 102,15,56,0,217
pxor %xmm2,%xmm3
addl $-16,%edx
.L015schedule_mangle_both:
movdqa 256(%ebp,%ecx,1),%xmm1
.byte 102,15,56,0,217
addl $-16,%ecx
andl $48,%ecx
movdqu %xmm3,(%edx)
ret
.size _vpaes_schedule_mangle,.-_vpaes_schedule_mangle
.globl vpaes_set_encrypt_key
.type vpaes_set_encrypt_key,@function
.align 16
vpaes_set_encrypt_key:
.L_vpaes_set_encrypt_key_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%esi
leal -56(%esp),%ebx
movl 24(%esp),%eax
andl $-16,%ebx
movl 28(%esp),%edx
xchgl %esp,%ebx
movl %ebx,48(%esp)
movl %eax,%ebx
shrl $5,%ebx
addl $5,%ebx
movl %ebx,240(%edx)
movl $48,%ecx
movl $0,%edi
leal .L_vpaes_consts+0x30-.L016pic_point,%ebp
call _vpaes_schedule_core
.L016pic_point:
movl 48(%esp),%esp
xorl %eax,%eax
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size vpaes_set_encrypt_key,.-.L_vpaes_set_encrypt_key_begin
.globl vpaes_set_decrypt_key
.type vpaes_set_decrypt_key,@function
.align 16
vpaes_set_decrypt_key:
.L_vpaes_set_decrypt_key_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%esi
leal -56(%esp),%ebx
movl 24(%esp),%eax
andl $-16,%ebx
movl 28(%esp),%edx
xchgl %esp,%ebx
movl %ebx,48(%esp)
movl %eax,%ebx
shrl $5,%ebx
addl $5,%ebx
movl %ebx,240(%edx)
shll $4,%ebx
leal 16(%edx,%ebx,1),%edx
movl $1,%edi
movl %eax,%ecx
shrl $1,%ecx
andl $32,%ecx
xorl $32,%ecx
leal .L_vpaes_consts+0x30-.L017pic_point,%ebp
call _vpaes_schedule_core
.L017pic_point:
movl 48(%esp),%esp
xorl %eax,%eax
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size vpaes_set_decrypt_key,.-.L_vpaes_set_decrypt_key_begin
.globl vpaes_encrypt
.type vpaes_encrypt,@function
.align 16
vpaes_encrypt:
.L_vpaes_encrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
leal .L_vpaes_consts+0x30-.L018pic_point,%ebp
call _vpaes_preheat
.L018pic_point:
movl 20(%esp),%esi
leal -56(%esp),%ebx
movl 24(%esp),%edi
andl $-16,%ebx
movl 28(%esp),%edx
xchgl %esp,%ebx
movl %ebx,48(%esp)
movdqu (%esi),%xmm0
call _vpaes_encrypt_core
movdqu %xmm0,(%edi)
movl 48(%esp),%esp
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size vpaes_encrypt,.-.L_vpaes_encrypt_begin
.globl vpaes_decrypt
.type vpaes_decrypt,@function
.align 16
vpaes_decrypt:
.L_vpaes_decrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
leal .L_vpaes_consts+0x30-.L019pic_point,%ebp
call _vpaes_preheat
.L019pic_point:
movl 20(%esp),%esi
leal -56(%esp),%ebx
movl 24(%esp),%edi
andl $-16,%ebx
movl 28(%esp),%edx
xchgl %esp,%ebx
movl %ebx,48(%esp)
movdqu (%esi),%xmm0
call _vpaes_decrypt_core
movdqu %xmm0,(%edi)
movl 48(%esp),%esp
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size vpaes_decrypt,.-.L_vpaes_decrypt_begin
.globl vpaes_cbc_encrypt
.type vpaes_cbc_encrypt,@function
.align 16
vpaes_cbc_encrypt:
.L_vpaes_cbc_encrypt_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp),%esi
movl 24(%esp),%edi
movl 28(%esp),%eax
movl 32(%esp),%edx
subl $16,%eax
jc .L020cbc_abort
leal -56(%esp),%ebx
movl 36(%esp),%ebp
andl $-16,%ebx
movl 40(%esp),%ecx
xchgl %esp,%ebx
movdqu (%ebp),%xmm1
subl %esi,%edi
movl %ebx,48(%esp)
movl %edi,(%esp)
movl %edx,4(%esp)
movl %ebp,8(%esp)
movl %eax,%edi
leal .L_vpaes_consts+0x30-.L021pic_point,%ebp
call _vpaes_preheat
.L021pic_point:
cmpl $0,%ecx
je .L022cbc_dec_loop
jmp .L023cbc_enc_loop
.align 16
.L023cbc_enc_loop:
movdqu (%esi),%xmm0
pxor %xmm1,%xmm0
call _vpaes_encrypt_core
movl (%esp),%ebx
movl 4(%esp),%edx
movdqa %xmm0,%xmm1
movdqu %xmm0,(%ebx,%esi,1)
leal 16(%esi),%esi
subl $16,%edi
jnc .L023cbc_enc_loop
jmp .L024cbc_done
.align 16
.L022cbc_dec_loop:
movdqu (%esi),%xmm0
movdqa %xmm1,16(%esp)
movdqa %xmm0,32(%esp)
call _vpaes_decrypt_core
movl (%esp),%ebx
movl 4(%esp),%edx
pxor 16(%esp),%xmm0
movdqa 32(%esp),%xmm1
movdqu %xmm0,(%ebx,%esi,1)
leal 16(%esi),%esi
subl $16,%edi
jnc .L022cbc_dec_loop
.L024cbc_done:
movl 8(%esp),%ebx
movl 48(%esp),%esp
movdqu %xmm1,(%ebx)
.L020cbc_abort:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size vpaes_cbc_encrypt,.-.L_vpaes_cbc_encrypt_begin

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,693 @@
# $FreeBSD$
#ifdef PIC
.file "x86-gf2m.S"
.text
.type _mul_1x1_mmx,@function
.align 16
_mul_1x1_mmx:
subl $36,%esp
movl %eax,%ecx
leal (%eax,%eax,1),%edx
andl $1073741823,%ecx
leal (%edx,%edx,1),%ebp
movl $0,(%esp)
andl $2147483647,%edx
movd %eax,%mm2
movd %ebx,%mm3
movl %ecx,4(%esp)
xorl %edx,%ecx
pxor %mm5,%mm5
pxor %mm4,%mm4
movl %edx,8(%esp)
xorl %ebp,%edx
movl %ecx,12(%esp)
pcmpgtd %mm2,%mm5
paddd %mm2,%mm2
xorl %edx,%ecx
movl %ebp,16(%esp)
xorl %edx,%ebp
pand %mm3,%mm5
pcmpgtd %mm2,%mm4
movl %ecx,20(%esp)
xorl %ecx,%ebp
psllq $31,%mm5
pand %mm3,%mm4
movl %edx,24(%esp)
movl $7,%esi
movl %ebp,28(%esp)
movl %esi,%ebp
andl %ebx,%esi
shrl $3,%ebx
movl %ebp,%edi
psllq $30,%mm4
andl %ebx,%edi
shrl $3,%ebx
movd (%esp,%esi,4),%mm0
movl %ebp,%esi
andl %ebx,%esi
shrl $3,%ebx
movd (%esp,%edi,4),%mm2
movl %ebp,%edi
psllq $3,%mm2
andl %ebx,%edi
shrl $3,%ebx
pxor %mm2,%mm0
movd (%esp,%esi,4),%mm1
movl %ebp,%esi
psllq $6,%mm1
andl %ebx,%esi
shrl $3,%ebx
pxor %mm1,%mm0
movd (%esp,%edi,4),%mm2
movl %ebp,%edi
psllq $9,%mm2
andl %ebx,%edi
shrl $3,%ebx
pxor %mm2,%mm0
movd (%esp,%esi,4),%mm1
movl %ebp,%esi
psllq $12,%mm1
andl %ebx,%esi
shrl $3,%ebx
pxor %mm1,%mm0
movd (%esp,%edi,4),%mm2
movl %ebp,%edi
psllq $15,%mm2
andl %ebx,%edi
shrl $3,%ebx
pxor %mm2,%mm0
movd (%esp,%esi,4),%mm1
movl %ebp,%esi
psllq $18,%mm1
andl %ebx,%esi
shrl $3,%ebx
pxor %mm1,%mm0
movd (%esp,%edi,4),%mm2
movl %ebp,%edi
psllq $21,%mm2
andl %ebx,%edi
shrl $3,%ebx
pxor %mm2,%mm0
movd (%esp,%esi,4),%mm1
movl %ebp,%esi
psllq $24,%mm1
andl %ebx,%esi
shrl $3,%ebx
pxor %mm1,%mm0
movd (%esp,%edi,4),%mm2
pxor %mm4,%mm0
psllq $27,%mm2
pxor %mm2,%mm0
movd (%esp,%esi,4),%mm1
pxor %mm5,%mm0
psllq $30,%mm1
addl $36,%esp
pxor %mm1,%mm0
ret
.size _mul_1x1_mmx,.-_mul_1x1_mmx
.type _mul_1x1_ialu,@function
.align 16
_mul_1x1_ialu:
subl $36,%esp
movl %eax,%ecx
leal (%eax,%eax,1),%edx
leal (,%eax,4),%ebp
andl $1073741823,%ecx
leal (%eax,%eax,1),%edi
sarl $31,%eax
movl $0,(%esp)
andl $2147483647,%edx
movl %ecx,4(%esp)
xorl %edx,%ecx
movl %edx,8(%esp)
xorl %ebp,%edx
movl %ecx,12(%esp)
xorl %edx,%ecx
movl %ebp,16(%esp)
xorl %edx,%ebp
movl %ecx,20(%esp)
xorl %ecx,%ebp
sarl $31,%edi
andl %ebx,%eax
movl %edx,24(%esp)
andl %ebx,%edi
movl %ebp,28(%esp)
movl %eax,%edx
shll $31,%eax
movl %edi,%ecx
shrl $1,%edx
movl $7,%esi
shll $30,%edi
andl %ebx,%esi
shrl $2,%ecx
xorl %edi,%eax
shrl $3,%ebx
movl $7,%edi
andl %ebx,%edi
shrl $3,%ebx
xorl %ecx,%edx
xorl (%esp,%esi,4),%eax
movl $7,%esi
andl %ebx,%esi
shrl $3,%ebx
movl (%esp,%edi,4),%ebp
movl $7,%edi
movl %ebp,%ecx
shll $3,%ebp
andl %ebx,%edi
shrl $29,%ecx
xorl %ebp,%eax
shrl $3,%ebx
xorl %ecx,%edx
movl (%esp,%esi,4),%ecx
movl $7,%esi
movl %ecx,%ebp
shll $6,%ecx
andl %ebx,%esi
shrl $26,%ebp
xorl %ecx,%eax
shrl $3,%ebx
xorl %ebp,%edx
movl (%esp,%edi,4),%ebp
movl $7,%edi
movl %ebp,%ecx
shll $9,%ebp
andl %ebx,%edi
shrl $23,%ecx
xorl %ebp,%eax
shrl $3,%ebx
xorl %ecx,%edx
movl (%esp,%esi,4),%ecx
movl $7,%esi
movl %ecx,%ebp
shll $12,%ecx
andl %ebx,%esi
shrl $20,%ebp
xorl %ecx,%eax
shrl $3,%ebx
xorl %ebp,%edx
movl (%esp,%edi,4),%ebp
movl $7,%edi
movl %ebp,%ecx
shll $15,%ebp
andl %ebx,%edi
shrl $17,%ecx
xorl %ebp,%eax
shrl $3,%ebx
xorl %ecx,%edx
movl (%esp,%esi,4),%ecx
movl $7,%esi
movl %ecx,%ebp
shll $18,%ecx
andl %ebx,%esi
shrl $14,%ebp
xorl %ecx,%eax
shrl $3,%ebx
xorl %ebp,%edx
movl (%esp,%edi,4),%ebp
movl $7,%edi
movl %ebp,%ecx
shll $21,%ebp
andl %ebx,%edi
shrl $11,%ecx
xorl %ebp,%eax
shrl $3,%ebx
xorl %ecx,%edx
movl (%esp,%esi,4),%ecx
movl $7,%esi
movl %ecx,%ebp
shll $24,%ecx
andl %ebx,%esi
shrl $8,%ebp
xorl %ecx,%eax
shrl $3,%ebx
xorl %ebp,%edx
movl (%esp,%edi,4),%ebp
movl %ebp,%ecx
shll $27,%ebp
movl (%esp,%esi,4),%edi
shrl $5,%ecx
movl %edi,%esi
xorl %ebp,%eax
shll $30,%edi
xorl %ecx,%edx
shrl $2,%esi
xorl %edi,%eax
xorl %esi,%edx
addl $36,%esp
ret
.size _mul_1x1_ialu,.-_mul_1x1_ialu
.globl bn_GF2m_mul_2x2
.type bn_GF2m_mul_2x2,@function
.align 16
bn_GF2m_mul_2x2:
.L_bn_GF2m_mul_2x2_begin:
call .L000PIC_me_up
.L000PIC_me_up:
popl %edx
leal OPENSSL_ia32cap_P-.L000PIC_me_up(%edx),%edx
movl (%edx),%eax
movl 4(%edx),%edx
testl $8388608,%eax
jz .L001ialu
testl $16777216,%eax
jz .L002mmx
testl $2,%edx
jz .L002mmx
movups 8(%esp),%xmm0
shufps $177,%xmm0,%xmm0
.byte 102,15,58,68,192,1
movl 4(%esp),%eax
movups %xmm0,(%eax)
ret
.align 16
.L002mmx:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 24(%esp),%eax
movl 32(%esp),%ebx
call _mul_1x1_mmx
movq %mm0,%mm7
movl 28(%esp),%eax
movl 36(%esp),%ebx
call _mul_1x1_mmx
movq %mm0,%mm6
movl 24(%esp),%eax
movl 32(%esp),%ebx
xorl 28(%esp),%eax
xorl 36(%esp),%ebx
call _mul_1x1_mmx
pxor %mm7,%mm0
movl 20(%esp),%eax
pxor %mm6,%mm0
movq %mm0,%mm2
psllq $32,%mm0
popl %edi
psrlq $32,%mm2
popl %esi
pxor %mm6,%mm0
popl %ebx
pxor %mm7,%mm2
movq %mm0,(%eax)
popl %ebp
movq %mm2,8(%eax)
emms
ret
.align 16
.L001ialu:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
subl $20,%esp
movl 44(%esp),%eax
movl 52(%esp),%ebx
call _mul_1x1_ialu
movl %eax,8(%esp)
movl %edx,12(%esp)
movl 48(%esp),%eax
movl 56(%esp),%ebx
call _mul_1x1_ialu
movl %eax,(%esp)
movl %edx,4(%esp)
movl 44(%esp),%eax
movl 52(%esp),%ebx
xorl 48(%esp),%eax
xorl 56(%esp),%ebx
call _mul_1x1_ialu
movl 40(%esp),%ebp
movl (%esp),%ebx
movl 4(%esp),%ecx
movl 8(%esp),%edi
movl 12(%esp),%esi
xorl %edx,%eax
xorl %ecx,%edx
xorl %ebx,%eax
movl %ebx,(%ebp)
xorl %edi,%edx
movl %esi,12(%ebp)
xorl %esi,%eax
addl $20,%esp
xorl %esi,%edx
popl %edi
xorl %edx,%eax
popl %esi
movl %edx,8(%ebp)
popl %ebx
movl %eax,4(%ebp)
popl %ebp
ret
.size bn_GF2m_mul_2x2,.-.L_bn_GF2m_mul_2x2_begin
.byte 71,70,40,50,94,109,41,32,77,117,108,116,105,112,108,105
.byte 99,97,116,105,111,110,32,102,111,114,32,120,56,54,44,32
.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
.byte 62,0
.comm OPENSSL_ia32cap_P,16,4
#else
.file "x86-gf2m.S"
.text
.type _mul_1x1_mmx,@function
.align 16
_mul_1x1_mmx:
subl $36,%esp
movl %eax,%ecx
leal (%eax,%eax,1),%edx
andl $1073741823,%ecx
leal (%edx,%edx,1),%ebp
movl $0,(%esp)
andl $2147483647,%edx
movd %eax,%mm2
movd %ebx,%mm3
movl %ecx,4(%esp)
xorl %edx,%ecx
pxor %mm5,%mm5
pxor %mm4,%mm4
movl %edx,8(%esp)
xorl %ebp,%edx
movl %ecx,12(%esp)
pcmpgtd %mm2,%mm5
paddd %mm2,%mm2
xorl %edx,%ecx
movl %ebp,16(%esp)
xorl %edx,%ebp
pand %mm3,%mm5
pcmpgtd %mm2,%mm4
movl %ecx,20(%esp)
xorl %ecx,%ebp
psllq $31,%mm5
pand %mm3,%mm4
movl %edx,24(%esp)
movl $7,%esi
movl %ebp,28(%esp)
movl %esi,%ebp
andl %ebx,%esi
shrl $3,%ebx
movl %ebp,%edi
psllq $30,%mm4
andl %ebx,%edi
shrl $3,%ebx
movd (%esp,%esi,4),%mm0
movl %ebp,%esi
andl %ebx,%esi
shrl $3,%ebx
movd (%esp,%edi,4),%mm2
movl %ebp,%edi
psllq $3,%mm2
andl %ebx,%edi
shrl $3,%ebx
pxor %mm2,%mm0
movd (%esp,%esi,4),%mm1
movl %ebp,%esi
psllq $6,%mm1
andl %ebx,%esi
shrl $3,%ebx
pxor %mm1,%mm0
movd (%esp,%edi,4),%mm2
movl %ebp,%edi
psllq $9,%mm2
andl %ebx,%edi
shrl $3,%ebx
pxor %mm2,%mm0
movd (%esp,%esi,4),%mm1
movl %ebp,%esi
psllq $12,%mm1
andl %ebx,%esi
shrl $3,%ebx
pxor %mm1,%mm0
movd (%esp,%edi,4),%mm2
movl %ebp,%edi
psllq $15,%mm2
andl %ebx,%edi
shrl $3,%ebx
pxor %mm2,%mm0
movd (%esp,%esi,4),%mm1
movl %ebp,%esi
psllq $18,%mm1
andl %ebx,%esi
shrl $3,%ebx
pxor %mm1,%mm0
movd (%esp,%edi,4),%mm2
movl %ebp,%edi
psllq $21,%mm2
andl %ebx,%edi
shrl $3,%ebx
pxor %mm2,%mm0
movd (%esp,%esi,4),%mm1
movl %ebp,%esi
psllq $24,%mm1
andl %ebx,%esi
shrl $3,%ebx
pxor %mm1,%mm0
movd (%esp,%edi,4),%mm2
pxor %mm4,%mm0
psllq $27,%mm2
pxor %mm2,%mm0
movd (%esp,%esi,4),%mm1
pxor %mm5,%mm0
psllq $30,%mm1
addl $36,%esp
pxor %mm1,%mm0
ret
.size _mul_1x1_mmx,.-_mul_1x1_mmx
.type _mul_1x1_ialu,@function
.align 16
_mul_1x1_ialu:
subl $36,%esp
movl %eax,%ecx
leal (%eax,%eax,1),%edx
leal (,%eax,4),%ebp
andl $1073741823,%ecx
leal (%eax,%eax,1),%edi
sarl $31,%eax
movl $0,(%esp)
andl $2147483647,%edx
movl %ecx,4(%esp)
xorl %edx,%ecx
movl %edx,8(%esp)
xorl %ebp,%edx
movl %ecx,12(%esp)
xorl %edx,%ecx
movl %ebp,16(%esp)
xorl %edx,%ebp
movl %ecx,20(%esp)
xorl %ecx,%ebp
sarl $31,%edi
andl %ebx,%eax
movl %edx,24(%esp)
andl %ebx,%edi
movl %ebp,28(%esp)
movl %eax,%edx
shll $31,%eax
movl %edi,%ecx
shrl $1,%edx
movl $7,%esi
shll $30,%edi
andl %ebx,%esi
shrl $2,%ecx
xorl %edi,%eax
shrl $3,%ebx
movl $7,%edi
andl %ebx,%edi
shrl $3,%ebx
xorl %ecx,%edx
xorl (%esp,%esi,4),%eax
movl $7,%esi
andl %ebx,%esi
shrl $3,%ebx
movl (%esp,%edi,4),%ebp
movl $7,%edi
movl %ebp,%ecx
shll $3,%ebp
andl %ebx,%edi
shrl $29,%ecx
xorl %ebp,%eax
shrl $3,%ebx
xorl %ecx,%edx
movl (%esp,%esi,4),%ecx
movl $7,%esi
movl %ecx,%ebp
shll $6,%ecx
andl %ebx,%esi
shrl $26,%ebp
xorl %ecx,%eax
shrl $3,%ebx
xorl %ebp,%edx
movl (%esp,%edi,4),%ebp
movl $7,%edi
movl %ebp,%ecx
shll $9,%ebp
andl %ebx,%edi
shrl $23,%ecx
xorl %ebp,%eax
shrl $3,%ebx
xorl %ecx,%edx
movl (%esp,%esi,4),%ecx
movl $7,%esi
movl %ecx,%ebp
shll $12,%ecx
andl %ebx,%esi
shrl $20,%ebp
xorl %ecx,%eax
shrl $3,%ebx
xorl %ebp,%edx
movl (%esp,%edi,4),%ebp
movl $7,%edi
movl %ebp,%ecx
shll $15,%ebp
andl %ebx,%edi
shrl $17,%ecx
xorl %ebp,%eax
shrl $3,%ebx
xorl %ecx,%edx
movl (%esp,%esi,4),%ecx
movl $7,%esi
movl %ecx,%ebp
shll $18,%ecx
andl %ebx,%esi
shrl $14,%ebp
xorl %ecx,%eax
shrl $3,%ebx
xorl %ebp,%edx
movl (%esp,%edi,4),%ebp
movl $7,%edi
movl %ebp,%ecx
shll $21,%ebp
andl %ebx,%edi
shrl $11,%ecx
xorl %ebp,%eax
shrl $3,%ebx
xorl %ecx,%edx
movl (%esp,%esi,4),%ecx
movl $7,%esi
movl %ecx,%ebp
shll $24,%ecx
andl %ebx,%esi
shrl $8,%ebp
xorl %ecx,%eax
shrl $3,%ebx
xorl %ebp,%edx
movl (%esp,%edi,4),%ebp
movl %ebp,%ecx
shll $27,%ebp
movl (%esp,%esi,4),%edi
shrl $5,%ecx
movl %edi,%esi
xorl %ebp,%eax
shll $30,%edi
xorl %ecx,%edx
shrl $2,%esi
xorl %edi,%eax
xorl %esi,%edx
addl $36,%esp
ret
.size _mul_1x1_ialu,.-_mul_1x1_ialu
.globl bn_GF2m_mul_2x2
.type bn_GF2m_mul_2x2,@function
.align 16
bn_GF2m_mul_2x2:
.L_bn_GF2m_mul_2x2_begin:
leal OPENSSL_ia32cap_P,%edx
movl (%edx),%eax
movl 4(%edx),%edx
testl $8388608,%eax
jz .L000ialu
testl $16777216,%eax
jz .L001mmx
testl $2,%edx
jz .L001mmx
movups 8(%esp),%xmm0
shufps $177,%xmm0,%xmm0
.byte 102,15,58,68,192,1
movl 4(%esp),%eax
movups %xmm0,(%eax)
ret
.align 16
.L001mmx:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 24(%esp),%eax
movl 32(%esp),%ebx
call _mul_1x1_mmx
movq %mm0,%mm7
movl 28(%esp),%eax
movl 36(%esp),%ebx
call _mul_1x1_mmx
movq %mm0,%mm6
movl 24(%esp),%eax
movl 32(%esp),%ebx
xorl 28(%esp),%eax
xorl 36(%esp),%ebx
call _mul_1x1_mmx
pxor %mm7,%mm0
movl 20(%esp),%eax
pxor %mm6,%mm0
movq %mm0,%mm2
psllq $32,%mm0
popl %edi
psrlq $32,%mm2
popl %esi
pxor %mm6,%mm0
popl %ebx
pxor %mm7,%mm2
movq %mm0,(%eax)
popl %ebp
movq %mm2,8(%eax)
emms
ret
.align 16
.L000ialu:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
subl $20,%esp
movl 44(%esp),%eax
movl 52(%esp),%ebx
call _mul_1x1_ialu
movl %eax,8(%esp)
movl %edx,12(%esp)
movl 48(%esp),%eax
movl 56(%esp),%ebx
call _mul_1x1_ialu
movl %eax,(%esp)
movl %edx,4(%esp)
movl 44(%esp),%eax
movl 52(%esp),%ebx
xorl 48(%esp),%eax
xorl 56(%esp),%ebx
call _mul_1x1_ialu
movl 40(%esp),%ebp
movl (%esp),%ebx
movl 4(%esp),%ecx
movl 8(%esp),%edi
movl 12(%esp),%esi
xorl %edx,%eax
xorl %ecx,%edx
xorl %ebx,%eax
movl %ebx,(%ebp)
xorl %edi,%edx
movl %esi,12(%ebp)
xorl %esi,%eax
addl $20,%esp
xorl %esi,%edx
popl %edi
xorl %edx,%eax
popl %esi
movl %edx,8(%ebp)
popl %ebx
movl %eax,4(%ebp)
popl %ebp
ret
.size bn_GF2m_mul_2x2,.-.L_bn_GF2m_mul_2x2_begin
.byte 71,70,40,50,94,109,41,32,77,117,108,116,105,112,108,105
.byte 99,97,116,105,111,110,32,102,111,114,32,120,56,54,44,32
.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
.byte 62,0
.comm OPENSSL_ia32cap_P,16,4
#endif

View File

@ -1,344 +0,0 @@
# $FreeBSD$
.file "x86-gf2m.s"
.text
.type _mul_1x1_mmx,@function
.align 16
_mul_1x1_mmx:
subl $36,%esp
movl %eax,%ecx
leal (%eax,%eax,1),%edx
andl $1073741823,%ecx
leal (%edx,%edx,1),%ebp
movl $0,(%esp)
andl $2147483647,%edx
movd %eax,%mm2
movd %ebx,%mm3
movl %ecx,4(%esp)
xorl %edx,%ecx
pxor %mm5,%mm5
pxor %mm4,%mm4
movl %edx,8(%esp)
xorl %ebp,%edx
movl %ecx,12(%esp)
pcmpgtd %mm2,%mm5
paddd %mm2,%mm2
xorl %edx,%ecx
movl %ebp,16(%esp)
xorl %edx,%ebp
pand %mm3,%mm5
pcmpgtd %mm2,%mm4
movl %ecx,20(%esp)
xorl %ecx,%ebp
psllq $31,%mm5
pand %mm3,%mm4
movl %edx,24(%esp)
movl $7,%esi
movl %ebp,28(%esp)
movl %esi,%ebp
andl %ebx,%esi
shrl $3,%ebx
movl %ebp,%edi
psllq $30,%mm4
andl %ebx,%edi
shrl $3,%ebx
movd (%esp,%esi,4),%mm0
movl %ebp,%esi
andl %ebx,%esi
shrl $3,%ebx
movd (%esp,%edi,4),%mm2
movl %ebp,%edi
psllq $3,%mm2
andl %ebx,%edi
shrl $3,%ebx
pxor %mm2,%mm0
movd (%esp,%esi,4),%mm1
movl %ebp,%esi
psllq $6,%mm1
andl %ebx,%esi
shrl $3,%ebx
pxor %mm1,%mm0
movd (%esp,%edi,4),%mm2
movl %ebp,%edi
psllq $9,%mm2
andl %ebx,%edi
shrl $3,%ebx
pxor %mm2,%mm0
movd (%esp,%esi,4),%mm1
movl %ebp,%esi
psllq $12,%mm1
andl %ebx,%esi
shrl $3,%ebx
pxor %mm1,%mm0
movd (%esp,%edi,4),%mm2
movl %ebp,%edi
psllq $15,%mm2
andl %ebx,%edi
shrl $3,%ebx
pxor %mm2,%mm0
movd (%esp,%esi,4),%mm1
movl %ebp,%esi
psllq $18,%mm1
andl %ebx,%esi
shrl $3,%ebx
pxor %mm1,%mm0
movd (%esp,%edi,4),%mm2
movl %ebp,%edi
psllq $21,%mm2
andl %ebx,%edi
shrl $3,%ebx
pxor %mm2,%mm0
movd (%esp,%esi,4),%mm1
movl %ebp,%esi
psllq $24,%mm1
andl %ebx,%esi
shrl $3,%ebx
pxor %mm1,%mm0
movd (%esp,%edi,4),%mm2
pxor %mm4,%mm0
psllq $27,%mm2
pxor %mm2,%mm0
movd (%esp,%esi,4),%mm1
pxor %mm5,%mm0
psllq $30,%mm1
addl $36,%esp
pxor %mm1,%mm0
ret
.size _mul_1x1_mmx,.-_mul_1x1_mmx
.type _mul_1x1_ialu,@function
.align 16
_mul_1x1_ialu:
subl $36,%esp
movl %eax,%ecx
leal (%eax,%eax,1),%edx
leal (,%eax,4),%ebp
andl $1073741823,%ecx
leal (%eax,%eax,1),%edi
sarl $31,%eax
movl $0,(%esp)
andl $2147483647,%edx
movl %ecx,4(%esp)
xorl %edx,%ecx
movl %edx,8(%esp)
xorl %ebp,%edx
movl %ecx,12(%esp)
xorl %edx,%ecx
movl %ebp,16(%esp)
xorl %edx,%ebp
movl %ecx,20(%esp)
xorl %ecx,%ebp
sarl $31,%edi
andl %ebx,%eax
movl %edx,24(%esp)
andl %ebx,%edi
movl %ebp,28(%esp)
movl %eax,%edx
shll $31,%eax
movl %edi,%ecx
shrl $1,%edx
movl $7,%esi
shll $30,%edi
andl %ebx,%esi
shrl $2,%ecx
xorl %edi,%eax
shrl $3,%ebx
movl $7,%edi
andl %ebx,%edi
shrl $3,%ebx
xorl %ecx,%edx
xorl (%esp,%esi,4),%eax
movl $7,%esi
andl %ebx,%esi
shrl $3,%ebx
movl (%esp,%edi,4),%ebp
movl $7,%edi
movl %ebp,%ecx
shll $3,%ebp
andl %ebx,%edi
shrl $29,%ecx
xorl %ebp,%eax
shrl $3,%ebx
xorl %ecx,%edx
movl (%esp,%esi,4),%ecx
movl $7,%esi
movl %ecx,%ebp
shll $6,%ecx
andl %ebx,%esi
shrl $26,%ebp
xorl %ecx,%eax
shrl $3,%ebx
xorl %ebp,%edx
movl (%esp,%edi,4),%ebp
movl $7,%edi
movl %ebp,%ecx
shll $9,%ebp
andl %ebx,%edi
shrl $23,%ecx
xorl %ebp,%eax
shrl $3,%ebx
xorl %ecx,%edx
movl (%esp,%esi,4),%ecx
movl $7,%esi
movl %ecx,%ebp
shll $12,%ecx
andl %ebx,%esi
shrl $20,%ebp
xorl %ecx,%eax
shrl $3,%ebx
xorl %ebp,%edx
movl (%esp,%edi,4),%ebp
movl $7,%edi
movl %ebp,%ecx
shll $15,%ebp
andl %ebx,%edi
shrl $17,%ecx
xorl %ebp,%eax
shrl $3,%ebx
xorl %ecx,%edx
movl (%esp,%esi,4),%ecx
movl $7,%esi
movl %ecx,%ebp
shll $18,%ecx
andl %ebx,%esi
shrl $14,%ebp
xorl %ecx,%eax
shrl $3,%ebx
xorl %ebp,%edx
movl (%esp,%edi,4),%ebp
movl $7,%edi
movl %ebp,%ecx
shll $21,%ebp
andl %ebx,%edi
shrl $11,%ecx
xorl %ebp,%eax
shrl $3,%ebx
xorl %ecx,%edx
movl (%esp,%esi,4),%ecx
movl $7,%esi
movl %ecx,%ebp
shll $24,%ecx
andl %ebx,%esi
shrl $8,%ebp
xorl %ecx,%eax
shrl $3,%ebx
xorl %ebp,%edx
movl (%esp,%edi,4),%ebp
movl %ebp,%ecx
shll $27,%ebp
movl (%esp,%esi,4),%edi
shrl $5,%ecx
movl %edi,%esi
xorl %ebp,%eax
shll $30,%edi
xorl %ecx,%edx
shrl $2,%esi
xorl %edi,%eax
xorl %esi,%edx
addl $36,%esp
ret
.size _mul_1x1_ialu,.-_mul_1x1_ialu
.globl bn_GF2m_mul_2x2
.type bn_GF2m_mul_2x2,@function
.align 16
bn_GF2m_mul_2x2:
.L_bn_GF2m_mul_2x2_begin:
leal OPENSSL_ia32cap_P,%edx
movl (%edx),%eax
movl 4(%edx),%edx
testl $8388608,%eax
jz .L000ialu
testl $16777216,%eax
jz .L001mmx
testl $2,%edx
jz .L001mmx
movups 8(%esp),%xmm0
shufps $177,%xmm0,%xmm0
.byte 102,15,58,68,192,1
movl 4(%esp),%eax
movups %xmm0,(%eax)
ret
.align 16
.L001mmx:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 24(%esp),%eax
movl 32(%esp),%ebx
call _mul_1x1_mmx
movq %mm0,%mm7
movl 28(%esp),%eax
movl 36(%esp),%ebx
call _mul_1x1_mmx
movq %mm0,%mm6
movl 24(%esp),%eax
movl 32(%esp),%ebx
xorl 28(%esp),%eax
xorl 36(%esp),%ebx
call _mul_1x1_mmx
pxor %mm7,%mm0
movl 20(%esp),%eax
pxor %mm6,%mm0
movq %mm0,%mm2
psllq $32,%mm0
popl %edi
psrlq $32,%mm2
popl %esi
pxor %mm6,%mm0
popl %ebx
pxor %mm7,%mm2
movq %mm0,(%eax)
popl %ebp
movq %mm2,8(%eax)
emms
ret
.align 16
.L000ialu:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
subl $20,%esp
movl 44(%esp),%eax
movl 52(%esp),%ebx
call _mul_1x1_ialu
movl %eax,8(%esp)
movl %edx,12(%esp)
movl 48(%esp),%eax
movl 56(%esp),%ebx
call _mul_1x1_ialu
movl %eax,(%esp)
movl %edx,4(%esp)
movl 44(%esp),%eax
movl 52(%esp),%ebx
xorl 48(%esp),%eax
xorl 56(%esp),%ebx
call _mul_1x1_ialu
movl 40(%esp),%ebp
movl (%esp),%ebx
movl 4(%esp),%ecx
movl 8(%esp),%edi
movl 12(%esp),%esi
xorl %edx,%eax
xorl %ecx,%edx
xorl %ebx,%eax
movl %ebx,(%ebp)
xorl %edi,%edx
movl %esi,12(%ebp)
xorl %esi,%eax
addl $20,%esp
xorl %esi,%edx
popl %edi
xorl %edx,%eax
popl %esi
movl %edx,8(%ebp)
popl %ebx
movl %eax,4(%ebp)
popl %ebp
ret
.size bn_GF2m_mul_2x2,.-.L_bn_GF2m_mul_2x2_begin
.byte 71,70,40,50,94,109,41,32,77,117,108,116,105,112,108,105
.byte 99,97,116,105,111,110,32,102,111,114,32,120,56,54,44,32
.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
.byte 62,0
.comm OPENSSL_ia32cap_P,16,4

View File

@ -0,0 +1,935 @@
# $FreeBSD$
#ifdef PIC
.file "x86-mont.S"
.text
.globl bn_mul_mont
.type bn_mul_mont,@function
.align 16
bn_mul_mont:
.L_bn_mul_mont_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
xorl %eax,%eax
movl 40(%esp),%edi
cmpl $4,%edi
jl .L000just_leave
leal 20(%esp),%esi
leal 24(%esp),%edx
movl %esp,%ebp
addl $2,%edi
negl %edi
leal -32(%esp,%edi,4),%esp
negl %edi
movl %esp,%eax
subl %edx,%eax
andl $2047,%eax
subl %eax,%esp
xorl %esp,%edx
andl $2048,%edx
xorl $2048,%edx
subl %edx,%esp
andl $-64,%esp
movl %ebp,%eax
subl %esp,%eax
andl $-4096,%eax
.L001page_walk:
movl (%esp,%eax,1),%edx
subl $4096,%eax
.byte 46
jnc .L001page_walk
movl (%esi),%eax
movl 4(%esi),%ebx
movl 8(%esi),%ecx
movl 12(%esi),%edx
movl 16(%esi),%esi
movl (%esi),%esi
movl %eax,4(%esp)
movl %ebx,8(%esp)
movl %ecx,12(%esp)
movl %edx,16(%esp)
movl %esi,20(%esp)
leal -3(%edi),%ebx
movl %ebp,24(%esp)
call .L002PIC_me_up
.L002PIC_me_up:
popl %eax
leal OPENSSL_ia32cap_P-.L002PIC_me_up(%eax),%eax
btl $26,(%eax)
jnc .L003non_sse2
movl $-1,%eax
movd %eax,%mm7
movl 8(%esp),%esi
movl 12(%esp),%edi
movl 16(%esp),%ebp
xorl %edx,%edx
xorl %ecx,%ecx
movd (%edi),%mm4
movd (%esi),%mm5
movd (%ebp),%mm3
pmuludq %mm4,%mm5
movq %mm5,%mm2
movq %mm5,%mm0
pand %mm7,%mm0
pmuludq 20(%esp),%mm5
pmuludq %mm5,%mm3
paddq %mm0,%mm3
movd 4(%ebp),%mm1
movd 4(%esi),%mm0
psrlq $32,%mm2
psrlq $32,%mm3
incl %ecx
.align 16
.L0041st:
pmuludq %mm4,%mm0
pmuludq %mm5,%mm1
paddq %mm0,%mm2
paddq %mm1,%mm3
movq %mm2,%mm0
pand %mm7,%mm0
movd 4(%ebp,%ecx,4),%mm1
paddq %mm0,%mm3
movd 4(%esi,%ecx,4),%mm0
psrlq $32,%mm2
movd %mm3,28(%esp,%ecx,4)
psrlq $32,%mm3
leal 1(%ecx),%ecx
cmpl %ebx,%ecx
jl .L0041st
pmuludq %mm4,%mm0
pmuludq %mm5,%mm1
paddq %mm0,%mm2
paddq %mm1,%mm3
movq %mm2,%mm0
pand %mm7,%mm0
paddq %mm0,%mm3
movd %mm3,28(%esp,%ecx,4)
psrlq $32,%mm2
psrlq $32,%mm3
paddq %mm2,%mm3
movq %mm3,32(%esp,%ebx,4)
incl %edx
.L005outer:
xorl %ecx,%ecx
movd (%edi,%edx,4),%mm4
movd (%esi),%mm5
movd 32(%esp),%mm6
movd (%ebp),%mm3
pmuludq %mm4,%mm5
paddq %mm6,%mm5
movq %mm5,%mm0
movq %mm5,%mm2
pand %mm7,%mm0
pmuludq 20(%esp),%mm5
pmuludq %mm5,%mm3
paddq %mm0,%mm3
movd 36(%esp),%mm6
movd 4(%ebp),%mm1
movd 4(%esi),%mm0
psrlq $32,%mm2
psrlq $32,%mm3
paddq %mm6,%mm2
incl %ecx
decl %ebx
.L006inner:
pmuludq %mm4,%mm0
pmuludq %mm5,%mm1
paddq %mm0,%mm2
paddq %mm1,%mm3
movq %mm2,%mm0
movd 36(%esp,%ecx,4),%mm6
pand %mm7,%mm0
movd 4(%ebp,%ecx,4),%mm1
paddq %mm0,%mm3
movd 4(%esi,%ecx,4),%mm0
psrlq $32,%mm2
movd %mm3,28(%esp,%ecx,4)
psrlq $32,%mm3
paddq %mm6,%mm2
decl %ebx
leal 1(%ecx),%ecx
jnz .L006inner
movl %ecx,%ebx
pmuludq %mm4,%mm0
pmuludq %mm5,%mm1
paddq %mm0,%mm2
paddq %mm1,%mm3
movq %mm2,%mm0
pand %mm7,%mm0
paddq %mm0,%mm3
movd %mm3,28(%esp,%ecx,4)
psrlq $32,%mm2
psrlq $32,%mm3
movd 36(%esp,%ebx,4),%mm6
paddq %mm2,%mm3
paddq %mm6,%mm3
movq %mm3,32(%esp,%ebx,4)
leal 1(%edx),%edx
cmpl %ebx,%edx
jle .L005outer
emms
jmp .L007common_tail
.align 16
.L003non_sse2:
movl 8(%esp),%esi
leal 1(%ebx),%ebp
movl 12(%esp),%edi
xorl %ecx,%ecx
movl %esi,%edx
andl $1,%ebp
subl %edi,%edx
leal 4(%edi,%ebx,4),%eax
orl %edx,%ebp
movl (%edi),%edi
jz .L008bn_sqr_mont
movl %eax,28(%esp)
movl (%esi),%eax
xorl %edx,%edx
.align 16
.L009mull:
movl %edx,%ebp
mull %edi
addl %eax,%ebp
leal 1(%ecx),%ecx
adcl $0,%edx
movl (%esi,%ecx,4),%eax
cmpl %ebx,%ecx
movl %ebp,28(%esp,%ecx,4)
jl .L009mull
movl %edx,%ebp
mull %edi
movl 20(%esp),%edi
addl %ebp,%eax
movl 16(%esp),%esi
adcl $0,%edx
imull 32(%esp),%edi
movl %eax,32(%esp,%ebx,4)
xorl %ecx,%ecx
movl %edx,36(%esp,%ebx,4)
movl %ecx,40(%esp,%ebx,4)
movl (%esi),%eax
mull %edi
addl 32(%esp),%eax
movl 4(%esi),%eax
adcl $0,%edx
incl %ecx
jmp .L0102ndmadd
.align 16
.L0111stmadd:
movl %edx,%ebp
mull %edi
addl 32(%esp,%ecx,4),%ebp
leal 1(%ecx),%ecx
adcl $0,%edx
addl %eax,%ebp
movl (%esi,%ecx,4),%eax
adcl $0,%edx
cmpl %ebx,%ecx
movl %ebp,28(%esp,%ecx,4)
jl .L0111stmadd
movl %edx,%ebp
mull %edi
addl 32(%esp,%ebx,4),%eax
movl 20(%esp),%edi
adcl $0,%edx
movl 16(%esp),%esi
addl %eax,%ebp
adcl $0,%edx
imull 32(%esp),%edi
xorl %ecx,%ecx
addl 36(%esp,%ebx,4),%edx
movl %ebp,32(%esp,%ebx,4)
adcl $0,%ecx
movl (%esi),%eax
movl %edx,36(%esp,%ebx,4)
movl %ecx,40(%esp,%ebx,4)
mull %edi
addl 32(%esp),%eax
movl 4(%esi),%eax
adcl $0,%edx
movl $1,%ecx
.align 16
.L0102ndmadd:
movl %edx,%ebp
mull %edi
addl 32(%esp,%ecx,4),%ebp
leal 1(%ecx),%ecx
adcl $0,%edx
addl %eax,%ebp
movl (%esi,%ecx,4),%eax
adcl $0,%edx
cmpl %ebx,%ecx
movl %ebp,24(%esp,%ecx,4)
jl .L0102ndmadd
movl %edx,%ebp
mull %edi
addl 32(%esp,%ebx,4),%ebp
adcl $0,%edx
addl %eax,%ebp
adcl $0,%edx
movl %ebp,28(%esp,%ebx,4)
xorl %eax,%eax
movl 12(%esp),%ecx
addl 36(%esp,%ebx,4),%edx
adcl 40(%esp,%ebx,4),%eax
leal 4(%ecx),%ecx
movl %edx,32(%esp,%ebx,4)
cmpl 28(%esp),%ecx
movl %eax,36(%esp,%ebx,4)
je .L007common_tail
movl (%ecx),%edi
movl 8(%esp),%esi
movl %ecx,12(%esp)
xorl %ecx,%ecx
xorl %edx,%edx
movl (%esi),%eax
jmp .L0111stmadd
.align 16
.L008bn_sqr_mont:
movl %ebx,(%esp)
movl %ecx,12(%esp)
movl %edi,%eax
mull %edi
movl %eax,32(%esp)
movl %edx,%ebx
shrl $1,%edx
andl $1,%ebx
incl %ecx
.align 16
.L012sqr:
movl (%esi,%ecx,4),%eax
movl %edx,%ebp
mull %edi
addl %ebp,%eax
leal 1(%ecx),%ecx
adcl $0,%edx
leal (%ebx,%eax,2),%ebp
shrl $31,%eax
cmpl (%esp),%ecx
movl %eax,%ebx
movl %ebp,28(%esp,%ecx,4)
jl .L012sqr
movl (%esi,%ecx,4),%eax
movl %edx,%ebp
mull %edi
addl %ebp,%eax
movl 20(%esp),%edi
adcl $0,%edx
movl 16(%esp),%esi
leal (%ebx,%eax,2),%ebp
imull 32(%esp),%edi
shrl $31,%eax
movl %ebp,32(%esp,%ecx,4)
leal (%eax,%edx,2),%ebp
movl (%esi),%eax
shrl $31,%edx
movl %ebp,36(%esp,%ecx,4)
movl %edx,40(%esp,%ecx,4)
mull %edi
addl 32(%esp),%eax
movl %ecx,%ebx
adcl $0,%edx
movl 4(%esi),%eax
movl $1,%ecx
.align 16
.L0133rdmadd:
movl %edx,%ebp
mull %edi
addl 32(%esp,%ecx,4),%ebp
adcl $0,%edx
addl %eax,%ebp
movl 4(%esi,%ecx,4),%eax
adcl $0,%edx
movl %ebp,28(%esp,%ecx,4)
movl %edx,%ebp
mull %edi
addl 36(%esp,%ecx,4),%ebp
leal 2(%ecx),%ecx
adcl $0,%edx
addl %eax,%ebp
movl (%esi,%ecx,4),%eax
adcl $0,%edx
cmpl %ebx,%ecx
movl %ebp,24(%esp,%ecx,4)
jl .L0133rdmadd
movl %edx,%ebp
mull %edi
addl 32(%esp,%ebx,4),%ebp
adcl $0,%edx
addl %eax,%ebp
adcl $0,%edx
movl %ebp,28(%esp,%ebx,4)
movl 12(%esp),%ecx
xorl %eax,%eax
movl 8(%esp),%esi
addl 36(%esp,%ebx,4),%edx
adcl 40(%esp,%ebx,4),%eax
movl %edx,32(%esp,%ebx,4)
cmpl %ebx,%ecx
movl %eax,36(%esp,%ebx,4)
je .L007common_tail
movl 4(%esi,%ecx,4),%edi
leal 1(%ecx),%ecx
movl %edi,%eax
movl %ecx,12(%esp)
mull %edi
addl 32(%esp,%ecx,4),%eax
adcl $0,%edx
movl %eax,32(%esp,%ecx,4)
xorl %ebp,%ebp
cmpl %ebx,%ecx
leal 1(%ecx),%ecx
je .L014sqrlast
movl %edx,%ebx
shrl $1,%edx
andl $1,%ebx
.align 16
.L015sqradd:
movl (%esi,%ecx,4),%eax
movl %edx,%ebp
mull %edi
addl %ebp,%eax
leal (%eax,%eax,1),%ebp
adcl $0,%edx
shrl $31,%eax
addl 32(%esp,%ecx,4),%ebp
leal 1(%ecx),%ecx
adcl $0,%eax
addl %ebx,%ebp
adcl $0,%eax
cmpl (%esp),%ecx
movl %ebp,28(%esp,%ecx,4)
movl %eax,%ebx
jle .L015sqradd
movl %edx,%ebp
addl %edx,%edx
shrl $31,%ebp
addl %ebx,%edx
adcl $0,%ebp
.L014sqrlast:
movl 20(%esp),%edi
movl 16(%esp),%esi
imull 32(%esp),%edi
addl 32(%esp,%ecx,4),%edx
movl (%esi),%eax
adcl $0,%ebp
movl %edx,32(%esp,%ecx,4)
movl %ebp,36(%esp,%ecx,4)
mull %edi
addl 32(%esp),%eax
leal -1(%ecx),%ebx
adcl $0,%edx
movl $1,%ecx
movl 4(%esi),%eax
jmp .L0133rdmadd
.align 16
.L007common_tail:
movl 16(%esp),%ebp
movl 4(%esp),%edi
leal 32(%esp),%esi
movl (%esi),%eax
movl %ebx,%ecx
xorl %edx,%edx
.align 16
.L016sub:
sbbl (%ebp,%edx,4),%eax
movl %eax,(%edi,%edx,4)
decl %ecx
movl 4(%esi,%edx,4),%eax
leal 1(%edx),%edx
jge .L016sub
sbbl $0,%eax
andl %eax,%esi
notl %eax
movl %edi,%ebp
andl %eax,%ebp
orl %ebp,%esi
.align 16
.L017copy:
movl (%esi,%ebx,4),%eax
movl %eax,(%edi,%ebx,4)
movl %ecx,32(%esp,%ebx,4)
decl %ebx
jge .L017copy
movl 24(%esp),%esp
movl $1,%eax
.L000just_leave:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size bn_mul_mont,.-.L_bn_mul_mont_begin
.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105
.byte 112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56
.byte 54,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121
.byte 32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46
.byte 111,114,103,62,0
.comm OPENSSL_ia32cap_P,16,4
#else
.file "x86-mont.S"
.text
.globl bn_mul_mont
.type bn_mul_mont,@function
.align 16
bn_mul_mont:
.L_bn_mul_mont_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
xorl %eax,%eax
movl 40(%esp),%edi
cmpl $4,%edi
jl .L000just_leave
leal 20(%esp),%esi
leal 24(%esp),%edx
movl %esp,%ebp
addl $2,%edi
negl %edi
leal -32(%esp,%edi,4),%esp
negl %edi
movl %esp,%eax
subl %edx,%eax
andl $2047,%eax
subl %eax,%esp
xorl %esp,%edx
andl $2048,%edx
xorl $2048,%edx
subl %edx,%esp
andl $-64,%esp
movl %ebp,%eax
subl %esp,%eax
andl $-4096,%eax
.L001page_walk:
movl (%esp,%eax,1),%edx
subl $4096,%eax
.byte 46
jnc .L001page_walk
movl (%esi),%eax
movl 4(%esi),%ebx
movl 8(%esi),%ecx
movl 12(%esi),%edx
movl 16(%esi),%esi
movl (%esi),%esi
movl %eax,4(%esp)
movl %ebx,8(%esp)
movl %ecx,12(%esp)
movl %edx,16(%esp)
movl %esi,20(%esp)
leal -3(%edi),%ebx
movl %ebp,24(%esp)
leal OPENSSL_ia32cap_P,%eax
btl $26,(%eax)
jnc .L002non_sse2
movl $-1,%eax
movd %eax,%mm7
movl 8(%esp),%esi
movl 12(%esp),%edi
movl 16(%esp),%ebp
xorl %edx,%edx
xorl %ecx,%ecx
movd (%edi),%mm4
movd (%esi),%mm5
movd (%ebp),%mm3
pmuludq %mm4,%mm5
movq %mm5,%mm2
movq %mm5,%mm0
pand %mm7,%mm0
pmuludq 20(%esp),%mm5
pmuludq %mm5,%mm3
paddq %mm0,%mm3
movd 4(%ebp),%mm1
movd 4(%esi),%mm0
psrlq $32,%mm2
psrlq $32,%mm3
incl %ecx
.align 16
.L0031st:
pmuludq %mm4,%mm0
pmuludq %mm5,%mm1
paddq %mm0,%mm2
paddq %mm1,%mm3
movq %mm2,%mm0
pand %mm7,%mm0
movd 4(%ebp,%ecx,4),%mm1
paddq %mm0,%mm3
movd 4(%esi,%ecx,4),%mm0
psrlq $32,%mm2
movd %mm3,28(%esp,%ecx,4)
psrlq $32,%mm3
leal 1(%ecx),%ecx
cmpl %ebx,%ecx
jl .L0031st
pmuludq %mm4,%mm0
pmuludq %mm5,%mm1
paddq %mm0,%mm2
paddq %mm1,%mm3
movq %mm2,%mm0
pand %mm7,%mm0
paddq %mm0,%mm3
movd %mm3,28(%esp,%ecx,4)
psrlq $32,%mm2
psrlq $32,%mm3
paddq %mm2,%mm3
movq %mm3,32(%esp,%ebx,4)
incl %edx
.L004outer:
xorl %ecx,%ecx
movd (%edi,%edx,4),%mm4
movd (%esi),%mm5
movd 32(%esp),%mm6
movd (%ebp),%mm3
pmuludq %mm4,%mm5
paddq %mm6,%mm5
movq %mm5,%mm0
movq %mm5,%mm2
pand %mm7,%mm0
pmuludq 20(%esp),%mm5
pmuludq %mm5,%mm3
paddq %mm0,%mm3
movd 36(%esp),%mm6
movd 4(%ebp),%mm1
movd 4(%esi),%mm0
psrlq $32,%mm2
psrlq $32,%mm3
paddq %mm6,%mm2
incl %ecx
decl %ebx
.L005inner:
pmuludq %mm4,%mm0
pmuludq %mm5,%mm1
paddq %mm0,%mm2
paddq %mm1,%mm3
movq %mm2,%mm0
movd 36(%esp,%ecx,4),%mm6
pand %mm7,%mm0
movd 4(%ebp,%ecx,4),%mm1
paddq %mm0,%mm3
movd 4(%esi,%ecx,4),%mm0
psrlq $32,%mm2
movd %mm3,28(%esp,%ecx,4)
psrlq $32,%mm3
paddq %mm6,%mm2
decl %ebx
leal 1(%ecx),%ecx
jnz .L005inner
movl %ecx,%ebx
pmuludq %mm4,%mm0
pmuludq %mm5,%mm1
paddq %mm0,%mm2
paddq %mm1,%mm3
movq %mm2,%mm0
pand %mm7,%mm0
paddq %mm0,%mm3
movd %mm3,28(%esp,%ecx,4)
psrlq $32,%mm2
psrlq $32,%mm3
movd 36(%esp,%ebx,4),%mm6
paddq %mm2,%mm3
paddq %mm6,%mm3
movq %mm3,32(%esp,%ebx,4)
leal 1(%edx),%edx
cmpl %ebx,%edx
jle .L004outer
emms
jmp .L006common_tail
.align 16
.L002non_sse2:
movl 8(%esp),%esi
leal 1(%ebx),%ebp
movl 12(%esp),%edi
xorl %ecx,%ecx
movl %esi,%edx
andl $1,%ebp
subl %edi,%edx
leal 4(%edi,%ebx,4),%eax
orl %edx,%ebp
movl (%edi),%edi
jz .L007bn_sqr_mont
movl %eax,28(%esp)
movl (%esi),%eax
xorl %edx,%edx
.align 16
.L008mull:
movl %edx,%ebp
mull %edi
addl %eax,%ebp
leal 1(%ecx),%ecx
adcl $0,%edx
movl (%esi,%ecx,4),%eax
cmpl %ebx,%ecx
movl %ebp,28(%esp,%ecx,4)
jl .L008mull
movl %edx,%ebp
mull %edi
movl 20(%esp),%edi
addl %ebp,%eax
movl 16(%esp),%esi
adcl $0,%edx
imull 32(%esp),%edi
movl %eax,32(%esp,%ebx,4)
xorl %ecx,%ecx
movl %edx,36(%esp,%ebx,4)
movl %ecx,40(%esp,%ebx,4)
movl (%esi),%eax
mull %edi
addl 32(%esp),%eax
movl 4(%esi),%eax
adcl $0,%edx
incl %ecx
jmp .L0092ndmadd
.align 16
.L0101stmadd:
movl %edx,%ebp
mull %edi
addl 32(%esp,%ecx,4),%ebp
leal 1(%ecx),%ecx
adcl $0,%edx
addl %eax,%ebp
movl (%esi,%ecx,4),%eax
adcl $0,%edx
cmpl %ebx,%ecx
movl %ebp,28(%esp,%ecx,4)
jl .L0101stmadd
movl %edx,%ebp
mull %edi
addl 32(%esp,%ebx,4),%eax
movl 20(%esp),%edi
adcl $0,%edx
movl 16(%esp),%esi
addl %eax,%ebp
adcl $0,%edx
imull 32(%esp),%edi
xorl %ecx,%ecx
addl 36(%esp,%ebx,4),%edx
movl %ebp,32(%esp,%ebx,4)
adcl $0,%ecx
movl (%esi),%eax
movl %edx,36(%esp,%ebx,4)
movl %ecx,40(%esp,%ebx,4)
mull %edi
addl 32(%esp),%eax
movl 4(%esi),%eax
adcl $0,%edx
movl $1,%ecx
.align 16
.L0092ndmadd:
movl %edx,%ebp
mull %edi
addl 32(%esp,%ecx,4),%ebp
leal 1(%ecx),%ecx
adcl $0,%edx
addl %eax,%ebp
movl (%esi,%ecx,4),%eax
adcl $0,%edx
cmpl %ebx,%ecx
movl %ebp,24(%esp,%ecx,4)
jl .L0092ndmadd
movl %edx,%ebp
mull %edi
addl 32(%esp,%ebx,4),%ebp
adcl $0,%edx
addl %eax,%ebp
adcl $0,%edx
movl %ebp,28(%esp,%ebx,4)
xorl %eax,%eax
movl 12(%esp),%ecx
addl 36(%esp,%ebx,4),%edx
adcl 40(%esp,%ebx,4),%eax
leal 4(%ecx),%ecx
movl %edx,32(%esp,%ebx,4)
cmpl 28(%esp),%ecx
movl %eax,36(%esp,%ebx,4)
je .L006common_tail
movl (%ecx),%edi
movl 8(%esp),%esi
movl %ecx,12(%esp)
xorl %ecx,%ecx
xorl %edx,%edx
movl (%esi),%eax
jmp .L0101stmadd
.align 16
.L007bn_sqr_mont:
movl %ebx,(%esp)
movl %ecx,12(%esp)
movl %edi,%eax
mull %edi
movl %eax,32(%esp)
movl %edx,%ebx
shrl $1,%edx
andl $1,%ebx
incl %ecx
.align 16
.L011sqr:
movl (%esi,%ecx,4),%eax
movl %edx,%ebp
mull %edi
addl %ebp,%eax
leal 1(%ecx),%ecx
adcl $0,%edx
leal (%ebx,%eax,2),%ebp
shrl $31,%eax
cmpl (%esp),%ecx
movl %eax,%ebx
movl %ebp,28(%esp,%ecx,4)
jl .L011sqr
movl (%esi,%ecx,4),%eax
movl %edx,%ebp
mull %edi
addl %ebp,%eax
movl 20(%esp),%edi
adcl $0,%edx
movl 16(%esp),%esi
leal (%ebx,%eax,2),%ebp
imull 32(%esp),%edi
shrl $31,%eax
movl %ebp,32(%esp,%ecx,4)
leal (%eax,%edx,2),%ebp
movl (%esi),%eax
shrl $31,%edx
movl %ebp,36(%esp,%ecx,4)
movl %edx,40(%esp,%ecx,4)
mull %edi
addl 32(%esp),%eax
movl %ecx,%ebx
adcl $0,%edx
movl 4(%esi),%eax
movl $1,%ecx
.align 16
.L0123rdmadd:
movl %edx,%ebp
mull %edi
addl 32(%esp,%ecx,4),%ebp
adcl $0,%edx
addl %eax,%ebp
movl 4(%esi,%ecx,4),%eax
adcl $0,%edx
movl %ebp,28(%esp,%ecx,4)
movl %edx,%ebp
mull %edi
addl 36(%esp,%ecx,4),%ebp
leal 2(%ecx),%ecx
adcl $0,%edx
addl %eax,%ebp
movl (%esi,%ecx,4),%eax
adcl $0,%edx
cmpl %ebx,%ecx
movl %ebp,24(%esp,%ecx,4)
jl .L0123rdmadd
movl %edx,%ebp
mull %edi
addl 32(%esp,%ebx,4),%ebp
adcl $0,%edx
addl %eax,%ebp
adcl $0,%edx
movl %ebp,28(%esp,%ebx,4)
movl 12(%esp),%ecx
xorl %eax,%eax
movl 8(%esp),%esi
addl 36(%esp,%ebx,4),%edx
adcl 40(%esp,%ebx,4),%eax
movl %edx,32(%esp,%ebx,4)
cmpl %ebx,%ecx
movl %eax,36(%esp,%ebx,4)
je .L006common_tail
movl 4(%esi,%ecx,4),%edi
leal 1(%ecx),%ecx
movl %edi,%eax
movl %ecx,12(%esp)
mull %edi
addl 32(%esp,%ecx,4),%eax
adcl $0,%edx
movl %eax,32(%esp,%ecx,4)
xorl %ebp,%ebp
cmpl %ebx,%ecx
leal 1(%ecx),%ecx
je .L013sqrlast
movl %edx,%ebx
shrl $1,%edx
andl $1,%ebx
.align 16
.L014sqradd:
movl (%esi,%ecx,4),%eax
movl %edx,%ebp
mull %edi
addl %ebp,%eax
leal (%eax,%eax,1),%ebp
adcl $0,%edx
shrl $31,%eax
addl 32(%esp,%ecx,4),%ebp
leal 1(%ecx),%ecx
adcl $0,%eax
addl %ebx,%ebp
adcl $0,%eax
cmpl (%esp),%ecx
movl %ebp,28(%esp,%ecx,4)
movl %eax,%ebx
jle .L014sqradd
movl %edx,%ebp
addl %edx,%edx
shrl $31,%ebp
addl %ebx,%edx
adcl $0,%ebp
.L013sqrlast:
movl 20(%esp),%edi
movl 16(%esp),%esi
imull 32(%esp),%edi
addl 32(%esp,%ecx,4),%edx
movl (%esi),%eax
adcl $0,%ebp
movl %edx,32(%esp,%ecx,4)
movl %ebp,36(%esp,%ecx,4)
mull %edi
addl 32(%esp),%eax
leal -1(%ecx),%ebx
adcl $0,%edx
movl $1,%ecx
movl 4(%esi),%eax
jmp .L0123rdmadd
.align 16
.L006common_tail:
movl 16(%esp),%ebp
movl 4(%esp),%edi
leal 32(%esp),%esi
movl (%esi),%eax
movl %ebx,%ecx
xorl %edx,%edx
.align 16
.L015sub:
sbbl (%ebp,%edx,4),%eax
movl %eax,(%edi,%edx,4)
decl %ecx
movl 4(%esi,%edx,4),%eax
leal 1(%edx),%edx
jge .L015sub
sbbl $0,%eax
andl %eax,%esi
notl %eax
movl %edi,%ebp
andl %eax,%ebp
orl %ebp,%esi
.align 16
.L016copy:
movl (%esi,%ebx,4),%eax
movl %eax,(%edi,%ebx,4)
movl %ecx,32(%esp,%ebx,4)
decl %ebx
jge .L016copy
movl 24(%esp),%esp
movl $1,%eax
.L000just_leave:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size bn_mul_mont,.-.L_bn_mul_mont_begin
.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105
.byte 112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56
.byte 54,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121
.byte 32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46
.byte 111,114,103,62,0
.comm OPENSSL_ia32cap_P,16,4
#endif

View File

@ -1,465 +0,0 @@
# $FreeBSD$
.file "x86-mont.s"
.text
.globl bn_mul_mont
.type bn_mul_mont,@function
.align 16
bn_mul_mont:
.L_bn_mul_mont_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
xorl %eax,%eax
movl 40(%esp),%edi
cmpl $4,%edi
jl .L000just_leave
leal 20(%esp),%esi
leal 24(%esp),%edx
movl %esp,%ebp
addl $2,%edi
negl %edi
leal -32(%esp,%edi,4),%esp
negl %edi
movl %esp,%eax
subl %edx,%eax
andl $2047,%eax
subl %eax,%esp
xorl %esp,%edx
andl $2048,%edx
xorl $2048,%edx
subl %edx,%esp
andl $-64,%esp
movl %ebp,%eax
subl %esp,%eax
andl $-4096,%eax
.L001page_walk:
movl (%esp,%eax,1),%edx
subl $4096,%eax
.byte 46
jnc .L001page_walk
movl (%esi),%eax
movl 4(%esi),%ebx
movl 8(%esi),%ecx
movl 12(%esi),%edx
movl 16(%esi),%esi
movl (%esi),%esi
movl %eax,4(%esp)
movl %ebx,8(%esp)
movl %ecx,12(%esp)
movl %edx,16(%esp)
movl %esi,20(%esp)
leal -3(%edi),%ebx
movl %ebp,24(%esp)
leal OPENSSL_ia32cap_P,%eax
btl $26,(%eax)
jnc .L002non_sse2
movl $-1,%eax
movd %eax,%mm7
movl 8(%esp),%esi
movl 12(%esp),%edi
movl 16(%esp),%ebp
xorl %edx,%edx
xorl %ecx,%ecx
movd (%edi),%mm4
movd (%esi),%mm5
movd (%ebp),%mm3
pmuludq %mm4,%mm5
movq %mm5,%mm2
movq %mm5,%mm0
pand %mm7,%mm0
pmuludq 20(%esp),%mm5
pmuludq %mm5,%mm3
paddq %mm0,%mm3
movd 4(%ebp),%mm1
movd 4(%esi),%mm0
psrlq $32,%mm2
psrlq $32,%mm3
incl %ecx
.align 16
.L0031st:
pmuludq %mm4,%mm0
pmuludq %mm5,%mm1
paddq %mm0,%mm2
paddq %mm1,%mm3
movq %mm2,%mm0
pand %mm7,%mm0
movd 4(%ebp,%ecx,4),%mm1
paddq %mm0,%mm3
movd 4(%esi,%ecx,4),%mm0
psrlq $32,%mm2
movd %mm3,28(%esp,%ecx,4)
psrlq $32,%mm3
leal 1(%ecx),%ecx
cmpl %ebx,%ecx
jl .L0031st
pmuludq %mm4,%mm0
pmuludq %mm5,%mm1
paddq %mm0,%mm2
paddq %mm1,%mm3
movq %mm2,%mm0
pand %mm7,%mm0
paddq %mm0,%mm3
movd %mm3,28(%esp,%ecx,4)
psrlq $32,%mm2
psrlq $32,%mm3
paddq %mm2,%mm3
movq %mm3,32(%esp,%ebx,4)
incl %edx
.L004outer:
xorl %ecx,%ecx
movd (%edi,%edx,4),%mm4
movd (%esi),%mm5
movd 32(%esp),%mm6
movd (%ebp),%mm3
pmuludq %mm4,%mm5
paddq %mm6,%mm5
movq %mm5,%mm0
movq %mm5,%mm2
pand %mm7,%mm0
pmuludq 20(%esp),%mm5
pmuludq %mm5,%mm3
paddq %mm0,%mm3
movd 36(%esp),%mm6
movd 4(%ebp),%mm1
movd 4(%esi),%mm0
psrlq $32,%mm2
psrlq $32,%mm3
paddq %mm6,%mm2
incl %ecx
decl %ebx
.L005inner:
pmuludq %mm4,%mm0
pmuludq %mm5,%mm1
paddq %mm0,%mm2
paddq %mm1,%mm3
movq %mm2,%mm0
movd 36(%esp,%ecx,4),%mm6
pand %mm7,%mm0
movd 4(%ebp,%ecx,4),%mm1
paddq %mm0,%mm3
movd 4(%esi,%ecx,4),%mm0
psrlq $32,%mm2
movd %mm3,28(%esp,%ecx,4)
psrlq $32,%mm3
paddq %mm6,%mm2
decl %ebx
leal 1(%ecx),%ecx
jnz .L005inner
movl %ecx,%ebx
pmuludq %mm4,%mm0
pmuludq %mm5,%mm1
paddq %mm0,%mm2
paddq %mm1,%mm3
movq %mm2,%mm0
pand %mm7,%mm0
paddq %mm0,%mm3
movd %mm3,28(%esp,%ecx,4)
psrlq $32,%mm2
psrlq $32,%mm3
movd 36(%esp,%ebx,4),%mm6
paddq %mm2,%mm3
paddq %mm6,%mm3
movq %mm3,32(%esp,%ebx,4)
leal 1(%edx),%edx
cmpl %ebx,%edx
jle .L004outer
emms
jmp .L006common_tail
.align 16
.L002non_sse2:
movl 8(%esp),%esi
leal 1(%ebx),%ebp
movl 12(%esp),%edi
xorl %ecx,%ecx
movl %esi,%edx
andl $1,%ebp
subl %edi,%edx
leal 4(%edi,%ebx,4),%eax
orl %edx,%ebp
movl (%edi),%edi
jz .L007bn_sqr_mont
movl %eax,28(%esp)
movl (%esi),%eax
xorl %edx,%edx
.align 16
.L008mull:
movl %edx,%ebp
mull %edi
addl %eax,%ebp
leal 1(%ecx),%ecx
adcl $0,%edx
movl (%esi,%ecx,4),%eax
cmpl %ebx,%ecx
movl %ebp,28(%esp,%ecx,4)
jl .L008mull
movl %edx,%ebp
mull %edi
movl 20(%esp),%edi
addl %ebp,%eax
movl 16(%esp),%esi
adcl $0,%edx
imull 32(%esp),%edi
movl %eax,32(%esp,%ebx,4)
xorl %ecx,%ecx
movl %edx,36(%esp,%ebx,4)
movl %ecx,40(%esp,%ebx,4)
movl (%esi),%eax
mull %edi
addl 32(%esp),%eax
movl 4(%esi),%eax
adcl $0,%edx
incl %ecx
jmp .L0092ndmadd
.align 16
.L0101stmadd:
movl %edx,%ebp
mull %edi
addl 32(%esp,%ecx,4),%ebp
leal 1(%ecx),%ecx
adcl $0,%edx
addl %eax,%ebp
movl (%esi,%ecx,4),%eax
adcl $0,%edx
cmpl %ebx,%ecx
movl %ebp,28(%esp,%ecx,4)
jl .L0101stmadd
movl %edx,%ebp
mull %edi
addl 32(%esp,%ebx,4),%eax
movl 20(%esp),%edi
adcl $0,%edx
movl 16(%esp),%esi
addl %eax,%ebp
adcl $0,%edx
imull 32(%esp),%edi
xorl %ecx,%ecx
addl 36(%esp,%ebx,4),%edx
movl %ebp,32(%esp,%ebx,4)
adcl $0,%ecx
movl (%esi),%eax
movl %edx,36(%esp,%ebx,4)
movl %ecx,40(%esp,%ebx,4)
mull %edi
addl 32(%esp),%eax
movl 4(%esi),%eax
adcl $0,%edx
movl $1,%ecx
.align 16
.L0092ndmadd:
movl %edx,%ebp
mull %edi
addl 32(%esp,%ecx,4),%ebp
leal 1(%ecx),%ecx
adcl $0,%edx
addl %eax,%ebp
movl (%esi,%ecx,4),%eax
adcl $0,%edx
cmpl %ebx,%ecx
movl %ebp,24(%esp,%ecx,4)
jl .L0092ndmadd
movl %edx,%ebp
mull %edi
addl 32(%esp,%ebx,4),%ebp
adcl $0,%edx
addl %eax,%ebp
adcl $0,%edx
movl %ebp,28(%esp,%ebx,4)
xorl %eax,%eax
movl 12(%esp),%ecx
addl 36(%esp,%ebx,4),%edx
adcl 40(%esp,%ebx,4),%eax
leal 4(%ecx),%ecx
movl %edx,32(%esp,%ebx,4)
cmpl 28(%esp),%ecx
movl %eax,36(%esp,%ebx,4)
je .L006common_tail
movl (%ecx),%edi
movl 8(%esp),%esi
movl %ecx,12(%esp)
xorl %ecx,%ecx
xorl %edx,%edx
movl (%esi),%eax
jmp .L0101stmadd
.align 16
.L007bn_sqr_mont:
movl %ebx,(%esp)
movl %ecx,12(%esp)
movl %edi,%eax
mull %edi
movl %eax,32(%esp)
movl %edx,%ebx
shrl $1,%edx
andl $1,%ebx
incl %ecx
.align 16
.L011sqr:
movl (%esi,%ecx,4),%eax
movl %edx,%ebp
mull %edi
addl %ebp,%eax
leal 1(%ecx),%ecx
adcl $0,%edx
leal (%ebx,%eax,2),%ebp
shrl $31,%eax
cmpl (%esp),%ecx
movl %eax,%ebx
movl %ebp,28(%esp,%ecx,4)
jl .L011sqr
movl (%esi,%ecx,4),%eax
movl %edx,%ebp
mull %edi
addl %ebp,%eax
movl 20(%esp),%edi
adcl $0,%edx
movl 16(%esp),%esi
leal (%ebx,%eax,2),%ebp
imull 32(%esp),%edi
shrl $31,%eax
movl %ebp,32(%esp,%ecx,4)
leal (%eax,%edx,2),%ebp
movl (%esi),%eax
shrl $31,%edx
movl %ebp,36(%esp,%ecx,4)
movl %edx,40(%esp,%ecx,4)
mull %edi
addl 32(%esp),%eax
movl %ecx,%ebx
adcl $0,%edx
movl 4(%esi),%eax
movl $1,%ecx
.align 16
.L0123rdmadd:
movl %edx,%ebp
mull %edi
addl 32(%esp,%ecx,4),%ebp
adcl $0,%edx
addl %eax,%ebp
movl 4(%esi,%ecx,4),%eax
adcl $0,%edx
movl %ebp,28(%esp,%ecx,4)
movl %edx,%ebp
mull %edi
addl 36(%esp,%ecx,4),%ebp
leal 2(%ecx),%ecx
adcl $0,%edx
addl %eax,%ebp
movl (%esi,%ecx,4),%eax
adcl $0,%edx
cmpl %ebx,%ecx
movl %ebp,24(%esp,%ecx,4)
jl .L0123rdmadd
movl %edx,%ebp
mull %edi
addl 32(%esp,%ebx,4),%ebp
adcl $0,%edx
addl %eax,%ebp
adcl $0,%edx
movl %ebp,28(%esp,%ebx,4)
movl 12(%esp),%ecx
xorl %eax,%eax
movl 8(%esp),%esi
addl 36(%esp,%ebx,4),%edx
adcl 40(%esp,%ebx,4),%eax
movl %edx,32(%esp,%ebx,4)
cmpl %ebx,%ecx
movl %eax,36(%esp,%ebx,4)
je .L006common_tail
movl 4(%esi,%ecx,4),%edi
leal 1(%ecx),%ecx
movl %edi,%eax
movl %ecx,12(%esp)
mull %edi
addl 32(%esp,%ecx,4),%eax
adcl $0,%edx
movl %eax,32(%esp,%ecx,4)
xorl %ebp,%ebp
cmpl %ebx,%ecx
leal 1(%ecx),%ecx
je .L013sqrlast
movl %edx,%ebx
shrl $1,%edx
andl $1,%ebx
.align 16
.L014sqradd:
movl (%esi,%ecx,4),%eax
movl %edx,%ebp
mull %edi
addl %ebp,%eax
leal (%eax,%eax,1),%ebp
adcl $0,%edx
shrl $31,%eax
addl 32(%esp,%ecx,4),%ebp
leal 1(%ecx),%ecx
adcl $0,%eax
addl %ebx,%ebp
adcl $0,%eax
cmpl (%esp),%ecx
movl %ebp,28(%esp,%ecx,4)
movl %eax,%ebx
jle .L014sqradd
movl %edx,%ebp
addl %edx,%edx
shrl $31,%ebp
addl %ebx,%edx
adcl $0,%ebp
.L013sqrlast:
movl 20(%esp),%edi
movl 16(%esp),%esi
imull 32(%esp),%edi
addl 32(%esp,%ecx,4),%edx
movl (%esi),%eax
adcl $0,%ebp
movl %edx,32(%esp,%ecx,4)
movl %ebp,36(%esp,%ecx,4)
mull %edi
addl 32(%esp),%eax
leal -1(%ecx),%ebx
adcl $0,%edx
movl $1,%ecx
movl 4(%esi),%eax
jmp .L0123rdmadd
.align 16
.L006common_tail:
movl 16(%esp),%ebp
movl 4(%esp),%edi
leal 32(%esp),%esi
movl (%esi),%eax
movl %ebx,%ecx
xorl %edx,%edx
.align 16
.L015sub:
sbbl (%ebp,%edx,4),%eax
movl %eax,(%edi,%edx,4)
decl %ecx
movl 4(%esi,%edx,4),%eax
leal 1(%edx),%edx
jge .L015sub
sbbl $0,%eax
andl %eax,%esi
notl %eax
movl %edi,%ebp
andl %eax,%ebp
orl %ebp,%esi
.align 16
.L016copy:
movl (%esi,%ebx,4),%eax
movl %eax,(%edi,%ebx,4)
movl %ecx,32(%esp,%ebx,4)
decl %ebx
jge .L016copy
movl 24(%esp),%esp
movl $1,%eax
.L000just_leave:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size bn_mul_mont,.-.L_bn_mul_mont_begin
.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105
.byte 112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56
.byte 54,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121
.byte 32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46
.byte 111,114,103,62,0
.comm OPENSSL_ia32cap_P,16,4

View File

@ -0,0 +1,739 @@
# $FreeBSD$
#ifdef PIC
.file "x86cpuid.S"
.text
.globl OPENSSL_ia32_cpuid
.type OPENSSL_ia32_cpuid,@function
.align 16
OPENSSL_ia32_cpuid:
.L_OPENSSL_ia32_cpuid_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
xorl %edx,%edx
pushfl
popl %eax
movl %eax,%ecx
xorl $2097152,%eax
pushl %eax
popfl
pushfl
popl %eax
xorl %eax,%ecx
xorl %eax,%eax
btl $21,%ecx
jnc .L000nocpuid
movl 20(%esp),%esi
movl %eax,8(%esi)
.byte 0x0f,0xa2
movl %eax,%edi
xorl %eax,%eax
cmpl $1970169159,%ebx
setne %al
movl %eax,%ebp
cmpl $1231384169,%edx
setne %al
orl %eax,%ebp
cmpl $1818588270,%ecx
setne %al
orl %eax,%ebp
jz .L001intel
cmpl $1752462657,%ebx
setne %al
movl %eax,%esi
cmpl $1769238117,%edx
setne %al
orl %eax,%esi
cmpl $1145913699,%ecx
setne %al
orl %eax,%esi
jnz .L001intel
movl $2147483648,%eax
.byte 0x0f,0xa2
cmpl $2147483649,%eax
jb .L001intel
movl %eax,%esi
movl $2147483649,%eax
.byte 0x0f,0xa2
orl %ecx,%ebp
andl $2049,%ebp
cmpl $2147483656,%esi
jb .L001intel
movl $2147483656,%eax
.byte 0x0f,0xa2
movzbl %cl,%esi
incl %esi
movl $1,%eax
xorl %ecx,%ecx
.byte 0x0f,0xa2
btl $28,%edx
jnc .L002generic
shrl $16,%ebx
andl $255,%ebx
cmpl %esi,%ebx
ja .L002generic
andl $4026531839,%edx
jmp .L002generic
.L001intel:
cmpl $7,%edi
jb .L003cacheinfo
movl 20(%esp),%esi
movl $7,%eax
xorl %ecx,%ecx
.byte 0x0f,0xa2
movl %ebx,8(%esi)
.L003cacheinfo:
cmpl $4,%edi
movl $-1,%edi
jb .L004nocacheinfo
movl $4,%eax
movl $0,%ecx
.byte 0x0f,0xa2
movl %eax,%edi
shrl $14,%edi
andl $4095,%edi
.L004nocacheinfo:
movl $1,%eax
xorl %ecx,%ecx
.byte 0x0f,0xa2
andl $3220176895,%edx
cmpl $0,%ebp
jne .L005notintel
orl $1073741824,%edx
andb $15,%ah
cmpb $15,%ah
jne .L005notintel
orl $1048576,%edx
.L005notintel:
btl $28,%edx
jnc .L002generic
andl $4026531839,%edx
cmpl $0,%edi
je .L002generic
orl $268435456,%edx
shrl $16,%ebx
cmpb $1,%bl
ja .L002generic
andl $4026531839,%edx
.L002generic:
andl $2048,%ebp
andl $4294965247,%ecx
movl %edx,%esi
orl %ecx,%ebp
btl $27,%ecx
jnc .L006clear_avx
xorl %ecx,%ecx
.byte 15,1,208
andl $6,%eax
cmpl $6,%eax
je .L007done
cmpl $2,%eax
je .L006clear_avx
.L008clear_xmm:
andl $4261412861,%ebp
andl $4278190079,%esi
.L006clear_avx:
andl $4026525695,%ebp
movl 20(%esp),%edi
andl $4294967263,8(%edi)
.L007done:
movl %esi,%eax
movl %ebp,%edx
.L000nocpuid:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size OPENSSL_ia32_cpuid,.-.L_OPENSSL_ia32_cpuid_begin
.globl OPENSSL_rdtsc
.type OPENSSL_rdtsc,@function
.align 16
OPENSSL_rdtsc:
.L_OPENSSL_rdtsc_begin:
xorl %eax,%eax
xorl %edx,%edx
call .L009PIC_me_up
.L009PIC_me_up:
popl %ecx
leal OPENSSL_ia32cap_P-.L009PIC_me_up(%ecx),%ecx
btl $4,(%ecx)
jnc .L010notsc
.byte 0x0f,0x31
.L010notsc:
ret
.size OPENSSL_rdtsc,.-.L_OPENSSL_rdtsc_begin
.globl OPENSSL_instrument_halt
.type OPENSSL_instrument_halt,@function
.align 16
OPENSSL_instrument_halt:
.L_OPENSSL_instrument_halt_begin:
call .L011PIC_me_up
.L011PIC_me_up:
popl %ecx
leal OPENSSL_ia32cap_P-.L011PIC_me_up(%ecx),%ecx
btl $4,(%ecx)
jnc .L012nohalt
.long 2421723150
andl $3,%eax
jnz .L012nohalt
pushfl
popl %eax
btl $9,%eax
jnc .L012nohalt
.byte 0x0f,0x31
pushl %edx
pushl %eax
hlt
.byte 0x0f,0x31
subl (%esp),%eax
sbbl 4(%esp),%edx
addl $8,%esp
ret
.L012nohalt:
xorl %eax,%eax
xorl %edx,%edx
ret
.size OPENSSL_instrument_halt,.-.L_OPENSSL_instrument_halt_begin
.globl OPENSSL_far_spin
.type OPENSSL_far_spin,@function
.align 16
OPENSSL_far_spin:
.L_OPENSSL_far_spin_begin:
pushfl
popl %eax
btl $9,%eax
jnc .L013nospin
movl 4(%esp),%eax
movl 8(%esp),%ecx
.long 2430111262
xorl %eax,%eax
movl (%ecx),%edx
jmp .L014spin
.align 16
.L014spin:
incl %eax
cmpl (%ecx),%edx
je .L014spin
.long 529567888
ret
.L013nospin:
xorl %eax,%eax
xorl %edx,%edx
ret
.size OPENSSL_far_spin,.-.L_OPENSSL_far_spin_begin
.globl OPENSSL_wipe_cpu
.type OPENSSL_wipe_cpu,@function
.align 16
OPENSSL_wipe_cpu:
.L_OPENSSL_wipe_cpu_begin:
xorl %eax,%eax
xorl %edx,%edx
call .L015PIC_me_up
.L015PIC_me_up:
popl %ecx
leal OPENSSL_ia32cap_P-.L015PIC_me_up(%ecx),%ecx
movl (%ecx),%ecx
btl $1,(%ecx)
jnc .L016no_x87
andl $83886080,%ecx
cmpl $83886080,%ecx
jne .L017no_sse2
pxor %xmm0,%xmm0
pxor %xmm1,%xmm1
pxor %xmm2,%xmm2
pxor %xmm3,%xmm3
pxor %xmm4,%xmm4
pxor %xmm5,%xmm5
pxor %xmm6,%xmm6
pxor %xmm7,%xmm7
.L017no_sse2:
.long 4007259865,4007259865,4007259865,4007259865,2430851995
.L016no_x87:
leal 4(%esp),%eax
ret
.size OPENSSL_wipe_cpu,.-.L_OPENSSL_wipe_cpu_begin
.globl OPENSSL_atomic_add
.type OPENSSL_atomic_add,@function
.align 16
OPENSSL_atomic_add:
.L_OPENSSL_atomic_add_begin:
movl 4(%esp),%edx
movl 8(%esp),%ecx
pushl %ebx
nop
movl (%edx),%eax
.L018spin:
leal (%eax,%ecx,1),%ebx
nop
.long 447811568
jne .L018spin
movl %ebx,%eax
popl %ebx
ret
.size OPENSSL_atomic_add,.-.L_OPENSSL_atomic_add_begin
.globl OPENSSL_indirect_call
.type OPENSSL_indirect_call,@function
.align 16
OPENSSL_indirect_call:
.L_OPENSSL_indirect_call_begin:
pushl %ebp
movl %esp,%ebp
subl $28,%esp
movl 12(%ebp),%ecx
movl %ecx,(%esp)
movl 16(%ebp),%edx
movl %edx,4(%esp)
movl 20(%ebp),%eax
movl %eax,8(%esp)
movl 24(%ebp),%eax
movl %eax,12(%esp)
movl 28(%ebp),%eax
movl %eax,16(%esp)
movl 32(%ebp),%eax
movl %eax,20(%esp)
movl 36(%ebp),%eax
movl %eax,24(%esp)
call *8(%ebp)
movl %ebp,%esp
popl %ebp
ret
.size OPENSSL_indirect_call,.-.L_OPENSSL_indirect_call_begin
.globl OPENSSL_cleanse
.type OPENSSL_cleanse,@function
.align 16
OPENSSL_cleanse:
.L_OPENSSL_cleanse_begin:
movl 4(%esp),%edx
movl 8(%esp),%ecx
xorl %eax,%eax
cmpl $7,%ecx
jae .L019lot
cmpl $0,%ecx
je .L020ret
.L021little:
movb %al,(%edx)
subl $1,%ecx
leal 1(%edx),%edx
jnz .L021little
.L020ret:
ret
.align 16
.L019lot:
testl $3,%edx
jz .L022aligned
movb %al,(%edx)
leal -1(%ecx),%ecx
leal 1(%edx),%edx
jmp .L019lot
.L022aligned:
movl %eax,(%edx)
leal -4(%ecx),%ecx
testl $-4,%ecx
leal 4(%edx),%edx
jnz .L022aligned
cmpl $0,%ecx
jne .L021little
ret
.size OPENSSL_cleanse,.-.L_OPENSSL_cleanse_begin
.globl OPENSSL_ia32_rdrand
.type OPENSSL_ia32_rdrand,@function
.align 16
OPENSSL_ia32_rdrand:
.L_OPENSSL_ia32_rdrand_begin:
movl $8,%ecx
.L023loop:
.byte 15,199,240
jc .L024break
loop .L023loop
.L024break:
cmpl $0,%eax
cmovel %ecx,%eax
ret
.size OPENSSL_ia32_rdrand,.-.L_OPENSSL_ia32_rdrand_begin
.globl OPENSSL_ia32_rdseed
.type OPENSSL_ia32_rdseed,@function
.align 16
OPENSSL_ia32_rdseed:
.L_OPENSSL_ia32_rdseed_begin:
movl $8,%ecx
.L025loop:
.byte 15,199,248
jc .L026break
loop .L025loop
.L026break:
cmpl $0,%eax
cmovel %ecx,%eax
ret
.size OPENSSL_ia32_rdseed,.-.L_OPENSSL_ia32_rdseed_begin
.hidden OPENSSL_cpuid_setup
.hidden OPENSSL_ia32cap_P
.comm OPENSSL_ia32cap_P,16,4
.section .init
call OPENSSL_cpuid_setup
#else
.file "x86cpuid.S"
.text
.globl OPENSSL_ia32_cpuid
.type OPENSSL_ia32_cpuid,@function
.align 16
OPENSSL_ia32_cpuid:
.L_OPENSSL_ia32_cpuid_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
xorl %edx,%edx
pushfl
popl %eax
movl %eax,%ecx
xorl $2097152,%eax
pushl %eax
popfl
pushfl
popl %eax
xorl %eax,%ecx
xorl %eax,%eax
btl $21,%ecx
jnc .L000nocpuid
movl 20(%esp),%esi
movl %eax,8(%esi)
.byte 0x0f,0xa2
movl %eax,%edi
xorl %eax,%eax
cmpl $1970169159,%ebx
setne %al
movl %eax,%ebp
cmpl $1231384169,%edx
setne %al
orl %eax,%ebp
cmpl $1818588270,%ecx
setne %al
orl %eax,%ebp
jz .L001intel
cmpl $1752462657,%ebx
setne %al
movl %eax,%esi
cmpl $1769238117,%edx
setne %al
orl %eax,%esi
cmpl $1145913699,%ecx
setne %al
orl %eax,%esi
jnz .L001intel
movl $2147483648,%eax
.byte 0x0f,0xa2
cmpl $2147483649,%eax
jb .L001intel
movl %eax,%esi
movl $2147483649,%eax
.byte 0x0f,0xa2
orl %ecx,%ebp
andl $2049,%ebp
cmpl $2147483656,%esi
jb .L001intel
movl $2147483656,%eax
.byte 0x0f,0xa2
movzbl %cl,%esi
incl %esi
movl $1,%eax
xorl %ecx,%ecx
.byte 0x0f,0xa2
btl $28,%edx
jnc .L002generic
shrl $16,%ebx
andl $255,%ebx
cmpl %esi,%ebx
ja .L002generic
andl $4026531839,%edx
jmp .L002generic
.L001intel:
cmpl $7,%edi
jb .L003cacheinfo
movl 20(%esp),%esi
movl $7,%eax
xorl %ecx,%ecx
.byte 0x0f,0xa2
movl %ebx,8(%esi)
.L003cacheinfo:
cmpl $4,%edi
movl $-1,%edi
jb .L004nocacheinfo
movl $4,%eax
movl $0,%ecx
.byte 0x0f,0xa2
movl %eax,%edi
shrl $14,%edi
andl $4095,%edi
.L004nocacheinfo:
movl $1,%eax
xorl %ecx,%ecx
.byte 0x0f,0xa2
andl $3220176895,%edx
cmpl $0,%ebp
jne .L005notintel
orl $1073741824,%edx
andb $15,%ah
cmpb $15,%ah
jne .L005notintel
orl $1048576,%edx
.L005notintel:
btl $28,%edx
jnc .L002generic
andl $4026531839,%edx
cmpl $0,%edi
je .L002generic
orl $268435456,%edx
shrl $16,%ebx
cmpb $1,%bl
ja .L002generic
andl $4026531839,%edx
.L002generic:
andl $2048,%ebp
andl $4294965247,%ecx
movl %edx,%esi
orl %ecx,%ebp
btl $27,%ecx
jnc .L006clear_avx
xorl %ecx,%ecx
.byte 15,1,208
andl $6,%eax
cmpl $6,%eax
je .L007done
cmpl $2,%eax
je .L006clear_avx
.L008clear_xmm:
andl $4261412861,%ebp
andl $4278190079,%esi
.L006clear_avx:
andl $4026525695,%ebp
movl 20(%esp),%edi
andl $4294967263,8(%edi)
.L007done:
movl %esi,%eax
movl %ebp,%edx
.L000nocpuid:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size OPENSSL_ia32_cpuid,.-.L_OPENSSL_ia32_cpuid_begin
.globl OPENSSL_rdtsc
.type OPENSSL_rdtsc,@function
.align 16
OPENSSL_rdtsc:
.L_OPENSSL_rdtsc_begin:
xorl %eax,%eax
xorl %edx,%edx
leal OPENSSL_ia32cap_P,%ecx
btl $4,(%ecx)
jnc .L009notsc
.byte 0x0f,0x31
.L009notsc:
ret
.size OPENSSL_rdtsc,.-.L_OPENSSL_rdtsc_begin
.globl OPENSSL_instrument_halt
.type OPENSSL_instrument_halt,@function
.align 16
OPENSSL_instrument_halt:
.L_OPENSSL_instrument_halt_begin:
leal OPENSSL_ia32cap_P,%ecx
btl $4,(%ecx)
jnc .L010nohalt
.long 2421723150
andl $3,%eax
jnz .L010nohalt
pushfl
popl %eax
btl $9,%eax
jnc .L010nohalt
.byte 0x0f,0x31
pushl %edx
pushl %eax
hlt
.byte 0x0f,0x31
subl (%esp),%eax
sbbl 4(%esp),%edx
addl $8,%esp
ret
.L010nohalt:
xorl %eax,%eax
xorl %edx,%edx
ret
.size OPENSSL_instrument_halt,.-.L_OPENSSL_instrument_halt_begin
.globl OPENSSL_far_spin
.type OPENSSL_far_spin,@function
.align 16
OPENSSL_far_spin:
.L_OPENSSL_far_spin_begin:
pushfl
popl %eax
btl $9,%eax
jnc .L011nospin
movl 4(%esp),%eax
movl 8(%esp),%ecx
.long 2430111262
xorl %eax,%eax
movl (%ecx),%edx
jmp .L012spin
.align 16
.L012spin:
incl %eax
cmpl (%ecx),%edx
je .L012spin
.long 529567888
ret
.L011nospin:
xorl %eax,%eax
xorl %edx,%edx
ret
.size OPENSSL_far_spin,.-.L_OPENSSL_far_spin_begin
.globl OPENSSL_wipe_cpu
.type OPENSSL_wipe_cpu,@function
.align 16
OPENSSL_wipe_cpu:
.L_OPENSSL_wipe_cpu_begin:
xorl %eax,%eax
xorl %edx,%edx
leal OPENSSL_ia32cap_P,%ecx
movl (%ecx),%ecx
btl $1,(%ecx)
jnc .L013no_x87
andl $83886080,%ecx
cmpl $83886080,%ecx
jne .L014no_sse2
pxor %xmm0,%xmm0
pxor %xmm1,%xmm1
pxor %xmm2,%xmm2
pxor %xmm3,%xmm3
pxor %xmm4,%xmm4
pxor %xmm5,%xmm5
pxor %xmm6,%xmm6
pxor %xmm7,%xmm7
.L014no_sse2:
.long 4007259865,4007259865,4007259865,4007259865,2430851995
.L013no_x87:
leal 4(%esp),%eax
ret
.size OPENSSL_wipe_cpu,.-.L_OPENSSL_wipe_cpu_begin
.globl OPENSSL_atomic_add
.type OPENSSL_atomic_add,@function
.align 16
OPENSSL_atomic_add:
.L_OPENSSL_atomic_add_begin:
movl 4(%esp),%edx
movl 8(%esp),%ecx
pushl %ebx
nop
movl (%edx),%eax
.L015spin:
leal (%eax,%ecx,1),%ebx
nop
.long 447811568
jne .L015spin
movl %ebx,%eax
popl %ebx
ret
.size OPENSSL_atomic_add,.-.L_OPENSSL_atomic_add_begin
.globl OPENSSL_indirect_call
.type OPENSSL_indirect_call,@function
.align 16
OPENSSL_indirect_call:
.L_OPENSSL_indirect_call_begin:
pushl %ebp
movl %esp,%ebp
subl $28,%esp
movl 12(%ebp),%ecx
movl %ecx,(%esp)
movl 16(%ebp),%edx
movl %edx,4(%esp)
movl 20(%ebp),%eax
movl %eax,8(%esp)
movl 24(%ebp),%eax
movl %eax,12(%esp)
movl 28(%ebp),%eax
movl %eax,16(%esp)
movl 32(%ebp),%eax
movl %eax,20(%esp)
movl 36(%ebp),%eax
movl %eax,24(%esp)
call *8(%ebp)
movl %ebp,%esp
popl %ebp
ret
.size OPENSSL_indirect_call,.-.L_OPENSSL_indirect_call_begin
.globl OPENSSL_cleanse
.type OPENSSL_cleanse,@function
.align 16
OPENSSL_cleanse:
.L_OPENSSL_cleanse_begin:
movl 4(%esp),%edx
movl 8(%esp),%ecx
xorl %eax,%eax
cmpl $7,%ecx
jae .L016lot
cmpl $0,%ecx
je .L017ret
.L018little:
movb %al,(%edx)
subl $1,%ecx
leal 1(%edx),%edx
jnz .L018little
.L017ret:
ret
.align 16
.L016lot:
testl $3,%edx
jz .L019aligned
movb %al,(%edx)
leal -1(%ecx),%ecx
leal 1(%edx),%edx
jmp .L016lot
.L019aligned:
movl %eax,(%edx)
leal -4(%ecx),%ecx
testl $-4,%ecx
leal 4(%edx),%edx
jnz .L019aligned
cmpl $0,%ecx
jne .L018little
ret
.size OPENSSL_cleanse,.-.L_OPENSSL_cleanse_begin
.globl OPENSSL_ia32_rdrand
.type OPENSSL_ia32_rdrand,@function
.align 16
OPENSSL_ia32_rdrand:
.L_OPENSSL_ia32_rdrand_begin:
movl $8,%ecx
.L020loop:
.byte 15,199,240
jc .L021break
loop .L020loop
.L021break:
cmpl $0,%eax
cmovel %ecx,%eax
ret
.size OPENSSL_ia32_rdrand,.-.L_OPENSSL_ia32_rdrand_begin
.globl OPENSSL_ia32_rdseed
.type OPENSSL_ia32_rdseed,@function
.align 16
OPENSSL_ia32_rdseed:
.L_OPENSSL_ia32_rdseed_begin:
movl $8,%ecx
.L022loop:
.byte 15,199,248
jc .L023break
loop .L022loop
.L023break:
cmpl $0,%eax
cmovel %ecx,%eax
ret
.size OPENSSL_ia32_rdseed,.-.L_OPENSSL_ia32_rdseed_begin
.hidden OPENSSL_cpuid_setup
.hidden OPENSSL_ia32cap_P
.comm OPENSSL_ia32cap_P,16,4
.section .init
call OPENSSL_cpuid_setup
#endif

View File

@ -1,364 +0,0 @@
# $FreeBSD$
.file "x86cpuid.s"
.text
.globl OPENSSL_ia32_cpuid
.type OPENSSL_ia32_cpuid,@function
.align 16
OPENSSL_ia32_cpuid:
.L_OPENSSL_ia32_cpuid_begin:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
xorl %edx,%edx
pushfl
popl %eax
movl %eax,%ecx
xorl $2097152,%eax
pushl %eax
popfl
pushfl
popl %eax
xorl %eax,%ecx
xorl %eax,%eax
btl $21,%ecx
jnc .L000nocpuid
movl 20(%esp),%esi
movl %eax,8(%esi)
.byte 0x0f,0xa2
movl %eax,%edi
xorl %eax,%eax
cmpl $1970169159,%ebx
setne %al
movl %eax,%ebp
cmpl $1231384169,%edx
setne %al
orl %eax,%ebp
cmpl $1818588270,%ecx
setne %al
orl %eax,%ebp
jz .L001intel
cmpl $1752462657,%ebx
setne %al
movl %eax,%esi
cmpl $1769238117,%edx
setne %al
orl %eax,%esi
cmpl $1145913699,%ecx
setne %al
orl %eax,%esi
jnz .L001intel
movl $2147483648,%eax
.byte 0x0f,0xa2
cmpl $2147483649,%eax
jb .L001intel
movl %eax,%esi
movl $2147483649,%eax
.byte 0x0f,0xa2
orl %ecx,%ebp
andl $2049,%ebp
cmpl $2147483656,%esi
jb .L001intel
movl $2147483656,%eax
.byte 0x0f,0xa2
movzbl %cl,%esi
incl %esi
movl $1,%eax
xorl %ecx,%ecx
.byte 0x0f,0xa2
btl $28,%edx
jnc .L002generic
shrl $16,%ebx
andl $255,%ebx
cmpl %esi,%ebx
ja .L002generic
andl $4026531839,%edx
jmp .L002generic
.L001intel:
cmpl $7,%edi
jb .L003cacheinfo
movl 20(%esp),%esi
movl $7,%eax
xorl %ecx,%ecx
.byte 0x0f,0xa2
movl %ebx,8(%esi)
.L003cacheinfo:
cmpl $4,%edi
movl $-1,%edi
jb .L004nocacheinfo
movl $4,%eax
movl $0,%ecx
.byte 0x0f,0xa2
movl %eax,%edi
shrl $14,%edi
andl $4095,%edi
.L004nocacheinfo:
movl $1,%eax
xorl %ecx,%ecx
.byte 0x0f,0xa2
andl $3220176895,%edx
cmpl $0,%ebp
jne .L005notintel
orl $1073741824,%edx
andb $15,%ah
cmpb $15,%ah
jne .L005notintel
orl $1048576,%edx
.L005notintel:
btl $28,%edx
jnc .L002generic
andl $4026531839,%edx
cmpl $0,%edi
je .L002generic
orl $268435456,%edx
shrl $16,%ebx
cmpb $1,%bl
ja .L002generic
andl $4026531839,%edx
.L002generic:
andl $2048,%ebp
andl $4294965247,%ecx
movl %edx,%esi
orl %ecx,%ebp
btl $27,%ecx
jnc .L006clear_avx
xorl %ecx,%ecx
.byte 15,1,208
andl $6,%eax
cmpl $6,%eax
je .L007done
cmpl $2,%eax
je .L006clear_avx
.L008clear_xmm:
andl $4261412861,%ebp
andl $4278190079,%esi
.L006clear_avx:
andl $4026525695,%ebp
movl 20(%esp),%edi
andl $4294967263,8(%edi)
.L007done:
movl %esi,%eax
movl %ebp,%edx
.L000nocpuid:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.size OPENSSL_ia32_cpuid,.-.L_OPENSSL_ia32_cpuid_begin
.globl OPENSSL_rdtsc
.type OPENSSL_rdtsc,@function
.align 16
OPENSSL_rdtsc:
.L_OPENSSL_rdtsc_begin:
xorl %eax,%eax
xorl %edx,%edx
leal OPENSSL_ia32cap_P,%ecx
btl $4,(%ecx)
jnc .L009notsc
.byte 0x0f,0x31
.L009notsc:
ret
.size OPENSSL_rdtsc,.-.L_OPENSSL_rdtsc_begin
.globl OPENSSL_instrument_halt
.type OPENSSL_instrument_halt,@function
.align 16
OPENSSL_instrument_halt:
.L_OPENSSL_instrument_halt_begin:
leal OPENSSL_ia32cap_P,%ecx
btl $4,(%ecx)
jnc .L010nohalt
.long 2421723150
andl $3,%eax
jnz .L010nohalt
pushfl
popl %eax
btl $9,%eax
jnc .L010nohalt
.byte 0x0f,0x31
pushl %edx
pushl %eax
hlt
.byte 0x0f,0x31
subl (%esp),%eax
sbbl 4(%esp),%edx
addl $8,%esp
ret
.L010nohalt:
xorl %eax,%eax
xorl %edx,%edx
ret
.size OPENSSL_instrument_halt,.-.L_OPENSSL_instrument_halt_begin
.globl OPENSSL_far_spin
.type OPENSSL_far_spin,@function
.align 16
OPENSSL_far_spin:
.L_OPENSSL_far_spin_begin:
pushfl
popl %eax
btl $9,%eax
jnc .L011nospin
movl 4(%esp),%eax
movl 8(%esp),%ecx
.long 2430111262
xorl %eax,%eax
movl (%ecx),%edx
jmp .L012spin
.align 16
.L012spin:
incl %eax
cmpl (%ecx),%edx
je .L012spin
.long 529567888
ret
.L011nospin:
xorl %eax,%eax
xorl %edx,%edx
ret
.size OPENSSL_far_spin,.-.L_OPENSSL_far_spin_begin
.globl OPENSSL_wipe_cpu
.type OPENSSL_wipe_cpu,@function
.align 16
OPENSSL_wipe_cpu:
.L_OPENSSL_wipe_cpu_begin:
xorl %eax,%eax
xorl %edx,%edx
leal OPENSSL_ia32cap_P,%ecx
movl (%ecx),%ecx
btl $1,(%ecx)
jnc .L013no_x87
andl $83886080,%ecx
cmpl $83886080,%ecx
jne .L014no_sse2
pxor %xmm0,%xmm0
pxor %xmm1,%xmm1
pxor %xmm2,%xmm2
pxor %xmm3,%xmm3
pxor %xmm4,%xmm4
pxor %xmm5,%xmm5
pxor %xmm6,%xmm6
pxor %xmm7,%xmm7
.L014no_sse2:
.long 4007259865,4007259865,4007259865,4007259865,2430851995
.L013no_x87:
leal 4(%esp),%eax
ret
.size OPENSSL_wipe_cpu,.-.L_OPENSSL_wipe_cpu_begin
.globl OPENSSL_atomic_add
.type OPENSSL_atomic_add,@function
.align 16
OPENSSL_atomic_add:
.L_OPENSSL_atomic_add_begin:
movl 4(%esp),%edx
movl 8(%esp),%ecx
pushl %ebx
nop
movl (%edx),%eax
.L015spin:
leal (%eax,%ecx,1),%ebx
nop
.long 447811568
jne .L015spin
movl %ebx,%eax
popl %ebx
ret
.size OPENSSL_atomic_add,.-.L_OPENSSL_atomic_add_begin
.globl OPENSSL_indirect_call
.type OPENSSL_indirect_call,@function
.align 16
OPENSSL_indirect_call:
.L_OPENSSL_indirect_call_begin:
pushl %ebp
movl %esp,%ebp
subl $28,%esp
movl 12(%ebp),%ecx
movl %ecx,(%esp)
movl 16(%ebp),%edx
movl %edx,4(%esp)
movl 20(%ebp),%eax
movl %eax,8(%esp)
movl 24(%ebp),%eax
movl %eax,12(%esp)
movl 28(%ebp),%eax
movl %eax,16(%esp)
movl 32(%ebp),%eax
movl %eax,20(%esp)
movl 36(%ebp),%eax
movl %eax,24(%esp)
call *8(%ebp)
movl %ebp,%esp
popl %ebp
ret
.size OPENSSL_indirect_call,.-.L_OPENSSL_indirect_call_begin
.globl OPENSSL_cleanse
.type OPENSSL_cleanse,@function
.align 16
OPENSSL_cleanse:
.L_OPENSSL_cleanse_begin:
movl 4(%esp),%edx
movl 8(%esp),%ecx
xorl %eax,%eax
cmpl $7,%ecx
jae .L016lot
cmpl $0,%ecx
je .L017ret
.L018little:
movb %al,(%edx)
subl $1,%ecx
leal 1(%edx),%edx
jnz .L018little
.L017ret:
ret
.align 16
.L016lot:
testl $3,%edx
jz .L019aligned
movb %al,(%edx)
leal -1(%ecx),%ecx
leal 1(%edx),%edx
jmp .L016lot
.L019aligned:
movl %eax,(%edx)
leal -4(%ecx),%ecx
testl $-4,%ecx
leal 4(%edx),%edx
jnz .L019aligned
cmpl $0,%ecx
jne .L018little
ret
.size OPENSSL_cleanse,.-.L_OPENSSL_cleanse_begin
.globl OPENSSL_ia32_rdrand
.type OPENSSL_ia32_rdrand,@function
.align 16
OPENSSL_ia32_rdrand:
.L_OPENSSL_ia32_rdrand_begin:
movl $8,%ecx
.L020loop:
.byte 15,199,240
jc .L021break
loop .L020loop
.L021break:
cmpl $0,%eax
cmovel %ecx,%eax
ret
.size OPENSSL_ia32_rdrand,.-.L_OPENSSL_ia32_rdrand_begin
.globl OPENSSL_ia32_rdseed
.type OPENSSL_ia32_rdseed,@function
.align 16
OPENSSL_ia32_rdseed:
.L_OPENSSL_ia32_rdseed_begin:
movl $8,%ecx
.L022loop:
.byte 15,199,248
jc .L023break
loop .L022loop
.L023break:
cmpl $0,%eax
cmovel %ecx,%eax
ret
.size OPENSSL_ia32_rdseed,.-.L_OPENSSL_ia32_rdseed_begin
.hidden OPENSSL_cpuid_setup
.hidden OPENSSL_ia32cap_P
.comm OPENSSL_ia32cap_P,16,4
.section .init
call OPENSSL_cpuid_setup