VeraCrypt
aboutsummaryrefslogtreecommitdiff
path: root/src/Crypto/Sha2.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/Crypto/Sha2.c')
-rw-r--r--src/Crypto/Sha2.c38
1 files changed, 35 insertions, 3 deletions
diff --git a/src/Crypto/Sha2.c b/src/Crypto/Sha2.c
index 31cba7f5..27e61c3d 100644
--- a/src/Crypto/Sha2.c
+++ b/src/Crypto/Sha2.c
@@ -27,7 +27,7 @@ extern "C"
#endif
#if CRYPTOPP_BOOL_X64 || ((CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32) && !defined (TC_MACOSX))
- void sha512_compress_nayuki(uint_64t state[8], const uint_8t block[128]);
+ void VC_CDECL sha512_compress_nayuki(uint_64t state[8], const uint_8t block[128]);
#endif
#if defined(__cplusplus)
}
@@ -306,10 +306,17 @@ extern "C"
void sha256_sse4(void *input_data, uint_32t digest[8], uint_64t num_blks);
void sha256_rorx(void *input_data, uint_32t digest[8], uint_64t num_blks);
void sha256_avx(void *input_data, uint_32t digest[8], uint_64t num_blks);
+#if CRYPTOPP_SHANI_AVAILABLE
+ void sha256_intel(void *input_data, uint_32t digest[8], uint_64t num_blks);
+#endif
#endif
#if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
- void sha256_compress_nayuki(uint_32t state[8], const uint_8t block[64]);
+ void VC_CDECL sha256_compress_nayuki(uint_32t state[8], const uint_8t block[64]);
+#endif
+
+#if CRYPTOPP_ARM_SHA2_AVAILABLE
+ void sha256_compress_digest_armv8(const void* input_data, uint_32t digest[8], uint_64t num_blks);
#endif
#if defined(__cplusplus)
@@ -447,7 +454,7 @@ static void CRYPTOPP_FASTCALL X86_SHA256_HashBlocks(uint_32t *state, const uint_
#if defined(__GNUC__)
#if CRYPTOPP_BOOL_X64
- CRYPTOPP_ALIGN_DATA(16) byte workspace[LOCALS_SIZE] ;
+ CRYPTOPP_ALIGN_DATA(16) uint8 workspace[LOCALS_SIZE] ;
#endif
__asm__ __volatile__
(
@@ -717,6 +724,13 @@ void StdSha256Transform(sha256_ctx* ctx, void* mp, uint_64t num_blks)
#ifndef NO_OPTIMIZED_VERSIONS
#if CRYPTOPP_BOOL_X64
+#if CRYPTOPP_SHANI_AVAILABLE
+void IntelSha256Transform(sha256_ctx* ctx, void* mp, uint_64t num_blks)
+{
+ sha256_intel(mp, ctx->hash, num_blks);
+}
+#endif
+
void Avx2Sha256Transform(sha256_ctx* ctx, void* mp, uint_64t num_blks)
{
if (num_blks > 1)
@@ -747,6 +761,13 @@ void SSE2Sha256Transform(sha256_ctx* ctx, void* mp, uint_64t num_blks)
}
#endif
+#if CRYPTOPP_ARM_SHA2_AVAILABLE
+void ArmSha256Transform(sha256_ctx* ctx, void* mp, uint_64t num_blks)
+{
+ sha256_compress_digest_armv8(mp, ctx->hash, num_blks);
+}
+#endif
+
#if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
void Sha256AsmTransform(sha256_ctx* ctx, void* mp, uint_64t num_blks)
{
@@ -775,6 +796,11 @@ void sha256_begin(sha256_ctx* ctx)
{
#ifndef NO_OPTIMIZED_VERSIONS
#if CRYPTOPP_BOOL_X64
+#if CRYPTOPP_SHANI_AVAILABLE
+ if (HasSHA256())
+ sha256transfunc = IntelSha256Transform;
+ else
+#endif
if (g_isIntel && HasSAVX2() && HasSBMI2())
sha256transfunc = Avx2Sha256Transform;
else if (g_isIntel && HasSAVX())
@@ -790,6 +816,12 @@ void sha256_begin(sha256_ctx* ctx)
else
#endif
+#if CRYPTOPP_ARM_SHA2_AVAILABLE
+ if (HasSHA256())
+ sha256transfunc = ArmSha256Transform;
+ else
+#endif
+
#if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
sha256transfunc = Sha256AsmTransform;
#else