1
0
mirror of https://github.com/veracrypt/VeraCrypt.git synced 2025-11-11 19:08:26 -06:00

Crypto: Add optimized SHA-512 and SHA-256 assembly implementations for x86_64 and x86. This improves speed by 30%.

This commit is contained in:
Mounir IDRASSI
2017-06-23 02:07:32 +02:00
parent ab7b5dc685
commit 546d6cff44
28 changed files with 5408 additions and 884 deletions

View File

@@ -36,15 +36,32 @@ ifeq "$(PLATFORM)" "MacOSX"
OBJSEX += ../Crypto/Twofish_asm.oo
OBJSEX += ../Crypto/Camellia_asm.oo
OBJSEX += ../Crypto/Camellia_aesni_asm.oo
OBJS += ../Crypto/sha256-nayuki.oo
OBJS += ../Crypto/sha512-nayuki.oo
OBJS += ../Crypto/sha256_avx1.oo
OBJS += ../Crypto/sha256_avx2.oo
OBJS += ../Crypto/sha256_sse4.oo
OBJS += ../Crypto/sha512_avx1.oo
OBJS += ../Crypto/sha512_avx2.oo
OBJS += ../Crypto/sha512_sse4.oo
else ifeq "$(CPU_ARCH)" "x86"
OBJS += ../Crypto/Aes_x86.o
OBJS += ../Crypto/Aes_hw_cpu.o
OBJS += ../Crypto/sha256-x86-nayuki.o
OBJS += ../Crypto/sha512-x86-nayuki.o
else ifeq "$(CPU_ARCH)" "x64"
OBJS += ../Crypto/Aes_x64.o
OBJS += ../Crypto/Aes_hw_cpu.o
OBJS += ../Crypto/Twofish_x64.o
OBJS += ../Crypto/Camellia_x64.o
OBJS += ../Crypto/Camellia_aesni_x64.o
OBJS += ../Crypto/sha512-x64-nayuki.o
OBJS += ../Crypto/sha256_avx1_x64.o
OBJS += ../Crypto/sha256_avx2_x64.o
OBJS += ../Crypto/sha256_sse4_x64.o
OBJS += ../Crypto/sha512_avx1_x64.o
OBJS += ../Crypto/sha512_avx2_x64.o
OBJS += ../Crypto/sha512_sse4_x64.o
else
OBJS += ../Crypto/Aescrypt.o
endif
@@ -87,6 +104,33 @@ ifeq "$(PLATFORM)" "MacOSX"
../Crypto/Camellia_aesni_asm.oo: ../Crypto/Camellia_aesni_x64.S
@echo Assembling $(<F)
$(YASM) -p gas -f macho64 -o ../Crypto/Camellia_aesni_asm.oo ../Crypto/Camellia_aesni_x64.S
../Crypto/sha256-nayuki.oo: ../Crypto/sha256-x86-nayuki.S
@echo Assembling $(<F)
$(YASM) -p gas -f macho32 -o ../Crypto/sha256-nayuki.oo ../Crypto/sha256-x86-nayuki.S
../Crypto/sha256_avx1.oo: ../Crypto/sha256_avx1_x64.asm
@echo Assembling $(<F)
$(YASM) -f macho64 -o ../Crypto/sha256_avx1.oo ../Crypto/sha256_avx1_x64.asm
../Crypto/sha256_avx2.oo: ../Crypto/sha256_avx2_x64.asm
@echo Assembling $(<F)
$(YASM) -f macho64 -o ../Crypto/sha256_avx2.oo ../Crypto/sha256_avx2_x64.asm
../Crypto/sha256_sse4.oo: ../Crypto/sha256_sse4_x64.asm
@echo Assembling $(<F)
$(YASM) -f macho64 -o ../Crypto/sha256_sse4.oo ../Crypto/sha256_sse4_x64.asm
../Crypto/sha512-nayuki.oo: ../Crypto/sha512-x86-nayuki.S ../Crypto/sha512-x64-nayuki.S
@echo Assembling $(<F)
$(YASM) -p gas -f macho32 -o ../Crypto/sha512-x86-nayuki.o ../Crypto/sha512-x86-nayuki.S
$(YASM) -p gas -f macho64 -o ../Crypto/sha512-x64-nayuki.o ../Crypto/sha512-x64-nayuki.S
lipo -create ../Crypto/sha512-x64-nayuki.o ../Crypto/sha512-x64-nayuki.o -output ../Crypto/sha512-nayuki.oo
rm -fr ../Crypto/sha512-x86-nayuki.o ../Crypto/sha512-x64-nayuki.o
../Crypto/sha512_avx1.oo: ../Crypto/sha512_avx1_x64.asm
@echo Assembling $(<F)
$(YASM) -f macho64 -o ../Crypto/sha512_avx1.oo ../Crypto/sha512_avx1_x64.asm
../Crypto/sha512_avx2.oo: ../Crypto/sha512_avx2_x64.asm
@echo Assembling $(<F)
$(YASM) -f macho64 -o ../Crypto/sha512_avx2.oo ../Crypto/sha512_avx2_x64.asm
../Crypto/sha512_sse4.oo: ../Crypto/sha512_sse4_x64.asm
@echo Assembling $(<F)
$(YASM) -f macho64 -o ../Crypto/sha512_sse4.oo ../Crypto/sha512_sse4_x64.asm
endif
include $(BUILD_INC)/Makefile.inc