diff options
Diffstat (limited to 'src/Crypto')
93 files changed, 24125 insertions, 3312 deletions
diff --git a/src/Crypto/Aes.h b/src/Crypto/Aes.h index e12c6fc8..dcadfc2b 100644 --- a/src/Crypto/Aes.h +++ b/src/Crypto/Aes.h @@ -34,8 +34,13 @@ #define _AES_H #include "Common/Tcdefs.h" +#ifdef WOLFCRYPT_BACKEND + #include <wolfssl/options.h> + #include <wolfssl/wolfcrypt/aes.h> +#endif + #ifndef EXIT_SUCCESS #define EXIT_SUCCESS 0 #define EXIT_FAILURE 1 #endif @@ -92,13 +97,21 @@ typedef union typedef struct { uint_32t ks[KS_LENGTH]; aes_inf inf; +#ifdef WOLFCRYPT_BACKEND + XtsAes wc_enc_xts; + Aes wc_enc_aes; +#endif } aes_encrypt_ctx; typedef struct { uint_32t ks[KS_LENGTH]; aes_inf inf; +#ifdef WOLFCRYPT_BACKEND + XtsAes wc_dec_xts; + Aes wc_dec_aes; +#endif } aes_decrypt_ctx; /* This routine must be called before first use if non-static */ /* tables are being used */ @@ -125,9 +138,9 @@ AES_RETURN aes_encrypt_key256(const unsigned char *key, aes_encrypt_ctx cx[1]); #if defined(AES_VAR) AES_RETURN aes_encrypt_key(const unsigned char *key, int key_len, aes_encrypt_ctx cx[1]); #endif -AES_RETURN aes_encrypt(const unsigned char *in, unsigned char *out, const aes_encrypt_ctx cx[1]); +AES_RETURN VC_CDECL aes_encrypt(const unsigned char *in, unsigned char *out, const aes_encrypt_ctx cx[1]); #endif #if defined( AES_DECRYPT ) @@ -147,10 +160,17 @@ AES_RETURN aes_decrypt_key256(const unsigned char *key, aes_decrypt_ctx cx[1]); #if defined(AES_VAR) AES_RETURN aes_decrypt_key(const unsigned char *key, int key_len, aes_decrypt_ctx cx[1]); #endif -AES_RETURN aes_decrypt(const unsigned char *in, unsigned char *out, const aes_decrypt_ctx cx[1]); +AES_RETURN VC_CDECL aes_decrypt(const unsigned char *in, unsigned char *out, const aes_decrypt_ctx cx[1]); + +#endif +#ifdef WOLFCRYPT_BACKEND +AES_RETURN xts_encrypt_key256(const unsigned char *key, aes_encrypt_ctx cx[1]); +AES_RETURN xts_decrypt_key256(const unsigned char *key, aes_decrypt_ctx cx[1]); +AES_RETURN xts_encrypt(const unsigned char *in, unsigned char *out, word64 length, word64 sector, const aes_encrypt_ctx cx[1]); +AES_RETURN xts_decrypt(const unsigned char *in, unsigned char *out, word64 length, word64 sector, const aes_decrypt_ctx cx[1]); #endif #if defined(AES_MODES) diff --git a/src/Crypto/Aes_hw_cpu.asm b/src/Crypto/Aes_hw_cpu.asm index edc20b29..c8df89f3 100644 --- a/src/Crypto/Aes_hw_cpu.asm +++ b/src/Crypto/Aes_hw_cpu.asm @@ -7,13 +7,13 @@ ; %ifidn __BITS__, 16 - %define R e + %define R(x) e %+ x %elifidn __BITS__, 32 - %define R e + %define R(x) e %+ x %elifidn __BITS__, 64 - %define R r + %define R(x) r %+ x %endif %macro export_function 1-2 0 @@ -104,27 +104,27 @@ ; Load data blocks %assign block 1 %rep BLOCK_COUNT - movdqu xmm%[block], [%[R]dx + 16 * (block - 1)] + movdqu xmm %+ block, [R(dx) + 16 * (block - 1)] %assign block block+1 %endrep ; Encrypt/decrypt data blocks %assign round 0 %rep 15 - movdqu xmm0, [%[R]cx + 16 * round] + movdqu xmm0, [R(cx) + 16 * round] %assign block 1 %rep BLOCK_COUNT %if round = 0 - pxor xmm%[block], xmm0 + pxor xmm %+ block, xmm0 %else %if round < 14 - aes%[OPERATION] xmm%[block], xmm0 + aes %+ OPERATION xmm %+ block, xmm0 %else - aes%[OPERATION]last xmm%[block], xmm0 + aes %+ OPERATION %+ last xmm %+ block, xmm0 %endif %endif %assign block block+1 @@ -135,9 +135,9 @@ ; Store data blocks %assign block 1 %rep BLOCK_COUNT - movdqu [%[R]dx + 16 * (block - 1)], xmm%[block] + movdqu [R(dx) + 16 * (block - 1)], xmm %+ block %assign block block+1 %endrep %undef OPERATION @@ -161,16 +161,16 @@ %endif mov eax, 32 / MAX_REG_BLOCK_COUNT .1: - aes_hw_cpu %[OPERATION_32_BLOCKS], MAX_REG_BLOCK_COUNT + aes_hw_cpu OPERATION_32_BLOCKS, MAX_REG_BLOCK_COUNT - add %[R]dx, 16 * MAX_REG_BLOCK_COUNT + add R(dx), 16 * MAX_REG_BLOCK_COUNT dec eax jnz .1 %if (32 % MAX_REG_BLOCK_COUNT) != 0 - aes_hw_cpu %[OPERATION_32_BLOCKS], (32 % MAX_REG_BLOCK_COUNT) + aes_hw_cpu OPERATION_32_BLOCKS, (32 % MAX_REG_BLOCK_COUNT) %endif %ifidn __OUTPUT_FORMAT__, win64 %if MAX_REG_BLOCK_COUNT > 5 @@ -200,11 +200,11 @@ ; void aes_hw_cpu_enable_sse (); export_function aes_hw_cpu_enable_sse - mov %[R]ax, cr4 + mov R(ax), cr4 or ax, 1 << 9 - mov cr4, %[R]ax + mov cr4, R(ax) ret %ifidn __BITS__, 16 @@ -289,17 +289,17 @@ ; We comment this since we have an alternative C implementation ; that supports Hyper-V detection workaround ; ; export_function is_aes_hw_cpu_supported -; push %[R]bx +; push R(bx) ; ; mov eax, 1 ; cpuid ; mov eax, ecx ; shr eax, 25 ; and eax, 1 ; -; pop %[R]bx +; pop R(bx) ; ret ; void aes_hw_cpu_decrypt (const byte *ks, byte *data); @@ -330,4 +330,15 @@ aes_function_exit %endif ; __BITS__ != 16 + +%ifidn __OUTPUT_FORMAT__,elf +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf32 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf64 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif + diff --git a/src/Crypto/Aes_hw_cpu.h b/src/Crypto/Aes_hw_cpu.h index 1265ac47..face0a0c 100644 --- a/src/Crypto/Aes_hw_cpu.h +++ b/src/Crypto/Aes_hw_cpu.h @@ -3,9 +3,9 @@ Copyright (c) 2008-2012 TrueCrypt Developers Association and which is governed by the TrueCrypt License 3.0. Modifications and additions to the original source code (contained in this file) - and all other portions of this file are Copyright (c) 2013-2016 IDRIX + and all other portions of this file are Copyright (c) 2013-2017 IDRIX and are governed by the Apache License 2.0 the full text of which is contained in the file License.txt included in VeraCrypt binary and source code distribution packages. */ @@ -19,14 +19,16 @@ extern "C" { #endif -byte is_aes_hw_cpu_supported (); +#if defined (TC_WINDOWS_BOOT) +uint8 is_aes_hw_cpu_supported (); +#endif void aes_hw_cpu_enable_sse (); -void aes_hw_cpu_decrypt (const byte *ks, byte *data); -void aes_hw_cpu_decrypt_32_blocks (const byte *ks, byte *data); -void aes_hw_cpu_encrypt (const byte *ks, byte *data); -void aes_hw_cpu_encrypt_32_blocks (const byte *ks, byte *data); +void aes_hw_cpu_decrypt (const uint8 *ks, uint8 *data); +void VC_CDECL aes_hw_cpu_decrypt_32_blocks (const uint8 *ks, uint8 *data); +void aes_hw_cpu_encrypt (const uint8 *ks, uint8 *data); +void VC_CDECL aes_hw_cpu_encrypt_32_blocks (const uint8 *ks, uint8 *data); #if defined(__cplusplus) } #endif diff --git a/src/Crypto/Aes_x64.asm b/src/Crypto/Aes_x64.asm index f74d0328..65965af1 100644 --- a/src/Crypto/Aes_x64.asm +++ b/src/Crypto/Aes_x64.asm @@ -904,4 +904,15 @@ end_prologue %endif %endif %endif + +%ifidn __OUTPUT_FORMAT__,elf +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf32 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf64 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif + diff --git a/src/Crypto/Aes_x86.asm b/src/Crypto/Aes_x86.asm index 484e31a7..c45f790d 100644 --- a/src/Crypto/Aes_x86.asm +++ b/src/Crypto/Aes_x86.asm @@ -643,4 +643,15 @@ stk_spc equ 20 ; stack space add esp,stk_spc do_exit %endif + +%ifidn __OUTPUT_FORMAT__,elf +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf32 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf64 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif + diff --git a/src/Crypto/Aescrypt.c b/src/Crypto/Aescrypt.c index 46175981..7348e2cf 100644 --- a/src/Crypto/Aescrypt.c +++ b/src/Crypto/Aescrypt.c @@ -93,9 +93,9 @@ extern "C" #else #define fwd_lrnd(y,x,k,c) (s(y,c) = (k)[c] ^ no_table(x,t_use(s,box),fwd_var,rf1,c)) #endif -AES_RETURN aes_encrypt(const unsigned char *in, unsigned char *out, const aes_encrypt_ctx cx[1]) +AES_RETURN VC_CDECL aes_encrypt(const unsigned char *in, unsigned char *out, const aes_encrypt_ctx cx[1]) { uint_32t locals(b0, b1); const uint_32t *kp; #if defined( dec_fmvars ) dec_fmvars; /* declare variables for fwd_mcol() if needed */ @@ -230,9 +230,9 @@ AES_RETURN aes_encrypt(const unsigned char *in, unsigned char *out, const aes_en #define key_ofs 1 #define rnd_key(n) (kp - n * N_COLS) #endif -AES_RETURN aes_decrypt(const unsigned char *in, unsigned char *out, const aes_decrypt_ctx cx[1]) +AES_RETURN VC_CDECL aes_decrypt(const unsigned char *in, unsigned char *out, const aes_decrypt_ctx cx[1]) { uint_32t locals(b0, b1); #if defined( dec_imvars ) dec_imvars; /* declare variables for inv_mcol() if needed */ #endif diff --git a/src/Crypto/Aeskey.c b/src/Crypto/Aeskey.c index c9ab0269..9b7bfd18 100644 --- a/src/Crypto/Aeskey.c +++ b/src/Crypto/Aeskey.c @@ -26,8 +26,9 @@ */ #include "Aesopt.h" #include "Aestab.h" +#include "Common/Tcdefs.h" #ifdef USE_VIA_ACE_IF_PRESENT # include "aes_via_ace.h" #endif @@ -94,8 +95,10 @@ AES_RETURN aes_encrypt_key128(const unsigned char *key, aes_encrypt_ctx cx[1]) if(VIA_ACE_AVAILABLE) cx->inf.b[1] = 0xff; #endif + burn(ss, sizeof(ss)); + #if defined( AES_ERR_CHK ) return EXIT_SUCCESS; #endif } @@ -146,8 +149,10 @@ AES_RETURN aes_encrypt_key192(const unsigned char *key, aes_encrypt_ctx cx[1]) if(VIA_ACE_AVAILABLE) cx->inf.b[1] = 0xff; #endif + burn(ss, sizeof(ss)); + #if defined( AES_ERR_CHK ) return EXIT_SUCCESS; #endif } @@ -201,8 +206,10 @@ AES_RETURN aes_encrypt_key256(const unsigned char *key, aes_encrypt_ctx cx[1]) if(VIA_ACE_AVAILABLE) cx->inf.b[1] = 0xff; #endif + burn(ss, sizeof(ss)); + #if defined( AES_ERR_CHK ) return EXIT_SUCCESS; #endif } @@ -351,8 +358,10 @@ AES_RETURN aes_decrypt_key128(const unsigned char *key, aes_decrypt_ctx cx[1]) if(VIA_ACE_AVAILABLE) cx->inf.b[1] = 0xff; #endif + burn(ss, sizeof(ss)); + #if defined( AES_ERR_CHK ) return EXIT_SUCCESS; #endif } @@ -438,8 +447,10 @@ AES_RETURN aes_decrypt_key192(const unsigned char *key, aes_decrypt_ctx cx[1]) if(VIA_ACE_AVAILABLE) cx->inf.b[1] = 0xff; #endif + burn(ss, sizeof(ss)); + #if defined( AES_ERR_CHK ) return EXIT_SUCCESS; #endif } @@ -537,8 +548,10 @@ AES_RETURN aes_decrypt_key256(const unsigned char *key, aes_decrypt_ctx cx[1]) if(VIA_ACE_AVAILABLE) cx->inf.b[1] = 0xff; #endif + burn(ss, sizeof(ss)); + #if defined( AES_ERR_CHK ) return EXIT_SUCCESS; #endif } diff --git a/src/Crypto/Camellia.c b/src/Crypto/Camellia.c index c9f1e563..3fd49023 100644 --- a/src/Crypto/Camellia.c +++ b/src/Crypto/Camellia.c @@ -1,17 +1,1179 @@ +#include "Camellia.h" +#include "Common/Endian.h" +#include "Crypto/cpu.h" +#include "Crypto/misc.h" + +#if CRYPTOPP_BOOL_X64 && !defined(CRYPTOPP_DISABLE_ASM) + +/* camellia.c ver 1.2.0-x86_64_asm1.1 + * + * Copyright (c) 2006,2007 + * NTT (Nippon Telegraph and Telephone Corporation) . All rights reserved. + * + * SuperCop integration: + * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi> + * + * VeraCrypt integration: + * Copyright © 2017 Mounir IDRASSI <mounir.idrassi@idrix.fr> + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer as + * the first lines of this file unmodified. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY NTT ``AS IS'' AND ANY EXPRESS OR + * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL NTT BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + /* -This code is written by kerukuro for cppcrypto library (http://cppcrypto.sourceforge.net/) -and released into public domain. -*/ + * Algorithm Specification + * http://info.isl.ntt.co.jp/crypt/eng/camellia/specifications.html + */ /* Adapted for VeraCrypt */ +#include "Common/Crypto.h" -#include "Camellia.h" -#include "Common/Endian.h" -#include "Crypto/misc.h" +#ifndef _WIN32 +extern int IsAesHwCpuSupported (); +#endif +void camellia_encrypt_asm(const uint8 *ctx, void *dst, const void *src); +void camellia_decrypt_asm(const uint8 *ctx, void *dst, const void *src); +void camellia_enc_blk2(const uint8 *ctx, uint8 *dst, const uint8 *src); +void camellia_dec_blk2(const uint8 *ctx, uint8 *dst, const uint8 *src); +void camellia_ecb_enc_16way(const uint8 *ctx, uint8 *dst, const uint8 *src); +void camellia_ecb_dec_16way(const uint8 *ctx, uint8 *dst, const uint8 *src); + +/* key constants */ + +#define CAMELLIA_SIGMA1L (0xA09E667FL) +#define CAMELLIA_SIGMA1R (0x3BCC908BL) +#define CAMELLIA_SIGMA2L (0xB67AE858L) +#define CAMELLIA_SIGMA2R (0x4CAA73B2L) +#define CAMELLIA_SIGMA3L (0xC6EF372FL) +#define CAMELLIA_SIGMA3R (0xE94F82BEL) +#define CAMELLIA_SIGMA4L (0x54FF53A5L) +#define CAMELLIA_SIGMA4R (0xF1D36F1CL) +#define CAMELLIA_SIGMA5L (0x10E527FAL) +#define CAMELLIA_SIGMA5R (0xDE682D1DL) +#define CAMELLIA_SIGMA6L (0xB05688C2L) +#define CAMELLIA_SIGMA6R (0xB3E6C1FDL) +/* + * macros + */ + + +# define GETU32(p) bswap_32(*((uint32 *)(p))) +# define PUTU32(ct, st) {*((uint32 *)(ct)) = bswap_32((st));} + +#define CamelliaSubkeyL(INDEX) (subkey[(INDEX)*2]) +#define CamelliaSubkeyR(INDEX) (subkey[(INDEX)*2 + 1]) + +/* rotation right shift 1byte */ +#define CAMELLIA_RR8(x) (((x) >> 8) + ((x) << 24)) +/* rotation left shift 1bit */ +#define CAMELLIA_RL1(x) (((x) << 1) + ((x) >> 31)) +/* rotation left shift 1byte */ +#define CAMELLIA_RL8(x) (((x) << 8) + ((x) >> 24)) + +#define CAMELLIA_ROLDQ(ll, lr, rl, rr, w0, w1, bits) \ + do { \ + w0 = ll; \ + ll = (ll << bits) + (lr >> (32 - bits)); \ + lr = (lr << bits) + (rl >> (32 - bits)); \ + rl = (rl << bits) + (rr >> (32 - bits)); \ + rr = (rr << bits) + (w0 >> (32 - bits)); \ + } while(0) + +#define CAMELLIA_ROLDQo32(ll, lr, rl, rr, w0, w1, bits) \ + do { \ + w0 = ll; \ + w1 = lr; \ + ll = (lr << (bits - 32)) + (rl >> (64 - bits)); \ + lr = (rl << (bits - 32)) + (rr >> (64 - bits)); \ + rl = (rr << (bits - 32)) + (w0 >> (64 - bits)); \ + rr = (w0 << (bits - 32)) + (w1 >> (64 - bits)); \ + } while(0) + +#define CAMELLIA_SP11101110(INDEX) (camellia_sp11101110[(INDEX)]) +#define CAMELLIA_SP02220222(INDEX) (camellia_sp02220222[(INDEX)]) +#define CAMELLIA_SP30333033(INDEX) (camellia_sp30333033[(INDEX)]) +#define CAMELLIA_SP44044404(INDEX) (camellia_sp44044404[(INDEX)]) + +#define CAMELLIA_SP10011110(INDEX) (camellia_sp10011110[(INDEX)]) +#define CAMELLIA_SP22000222(INDEX) (camellia_sp22000222[(INDEX)]) +#define CAMELLIA_SP03303033(INDEX) (camellia_sp03303033[(INDEX)]) +#define CAMELLIA_SP00444404(INDEX) (camellia_sp00444404[(INDEX)]) + +#define CAMELLIA_F(xl, xr, kl, kr, yl, yr, il, ir, t0, t1) \ + do { \ + uint64 __tmp; \ + il = xl ^ kl; \ + ir = xr ^ kr; \ + t0 = il >> 16; \ + t1 = ir >> 16; \ + __tmp = CAMELLIA_SP11101110(ir & 0xff) \ + ^ CAMELLIA_SP02220222((t1 >> 8) & 0xff) \ + ^ CAMELLIA_SP30333033(t1 & 0xff) \ + ^ CAMELLIA_SP44044404((ir >> 8) & 0xff) \ + ^ CAMELLIA_SP10011110((t0 >> 8) & 0xff) \ + ^ CAMELLIA_SP22000222(t0 & 0xff) \ + ^ CAMELLIA_SP03303033((il >> 8) & 0xff) \ + ^ CAMELLIA_SP00444404(il & 0xff); \ + yl = (uint32) __tmp; \ + yr = (uint32) (__tmp >> 32); \ + } while(0) + +const uint64 camellia_sp10011110[256] = { + 0x7000007070707000ULL, 0x8200008282828200ULL, 0x2c00002c2c2c2c00ULL, + 0xec0000ecececec00ULL, 0xb30000b3b3b3b300ULL, 0x2700002727272700ULL, + 0xc00000c0c0c0c000ULL, 0xe50000e5e5e5e500ULL, 0xe40000e4e4e4e400ULL, + 0x8500008585858500ULL, 0x5700005757575700ULL, 0x3500003535353500ULL, + 0xea0000eaeaeaea00ULL, 0x0c00000c0c0c0c00ULL, 0xae0000aeaeaeae00ULL, + 0x4100004141414100ULL, 0x2300002323232300ULL, 0xef0000efefefef00ULL, + 0x6b00006b6b6b6b00ULL, 0x9300009393939300ULL, 0x4500004545454500ULL, + 0x1900001919191900ULL, 0xa50000a5a5a5a500ULL, 0x2100002121212100ULL, + 0xed0000edededed00ULL, 0x0e00000e0e0e0e00ULL, 0x4f00004f4f4f4f00ULL, + 0x4e00004e4e4e4e00ULL, 0x1d00001d1d1d1d00ULL, 0x6500006565656500ULL, + 0x9200009292929200ULL, 0xbd0000bdbdbdbd00ULL, 0x8600008686868600ULL, + 0xb80000b8b8b8b800ULL, 0xaf0000afafafaf00ULL, 0x8f00008f8f8f8f00ULL, + 0x7c00007c7c7c7c00ULL, 0xeb0000ebebebeb00ULL, 0x1f00001f1f1f1f00ULL, + 0xce0000cececece00ULL, 0x3e00003e3e3e3e00ULL, 0x3000003030303000ULL, + 0xdc0000dcdcdcdc00ULL, 0x5f00005f5f5f5f00ULL, 0x5e00005e5e5e5e00ULL, + 0xc50000c5c5c5c500ULL, 0x0b00000b0b0b0b00ULL, 0x1a00001a1a1a1a00ULL, + 0xa60000a6a6a6a600ULL, 0xe10000e1e1e1e100ULL, 0x3900003939393900ULL, + 0xca0000cacacaca00ULL, 0xd50000d5d5d5d500ULL, 0x4700004747474700ULL, + 0x5d00005d5d5d5d00ULL, 0x3d00003d3d3d3d00ULL, 0xd90000d9d9d9d900ULL, + 0x0100000101010100ULL, 0x5a00005a5a5a5a00ULL, 0xd60000d6d6d6d600ULL, + 0x5100005151515100ULL, 0x5600005656565600ULL, 0x6c00006c6c6c6c00ULL, + 0x4d00004d4d4d4d00ULL, 0x8b00008b8b8b8b00ULL, 0x0d00000d0d0d0d00ULL, + 0x9a00009a9a9a9a00ULL, 0x6600006666666600ULL, 0xfb0000fbfbfbfb00ULL, + 0xcc0000cccccccc00ULL, 0xb00000b0b0b0b000ULL, 0x2d00002d2d2d2d00ULL, + 0x7400007474747400ULL, 0x1200001212121200ULL, 0x2b00002b2b2b2b00ULL, + 0x2000002020202000ULL, 0xf00000f0f0f0f000ULL, 0xb10000b1b1b1b100ULL, + 0x8400008484848400ULL, 0x9900009999999900ULL, 0xdf0000dfdfdfdf00ULL, + 0x4c00004c4c4c4c00ULL, 0xcb0000cbcbcbcb00ULL, 0xc20000c2c2c2c200ULL, + 0x3400003434343400ULL, 0x7e00007e7e7e7e00ULL, 0x7600007676767600ULL, + 0x0500000505050500ULL, 0x6d00006d6d6d6d00ULL, 0xb70000b7b7b7b700ULL, + 0xa90000a9a9a9a900ULL, 0x3100003131313100ULL, 0xd10000d1d1d1d100ULL, + 0x1700001717171700ULL, 0x0400000404040400ULL, 0xd70000d7d7d7d700ULL, + 0x1400001414141400ULL, 0x5800005858585800ULL, 0x3a00003a3a3a3a00ULL, + 0x6100006161616100ULL, 0xde0000dededede00ULL, 0x1b00001b1b1b1b00ULL, + 0x1100001111111100ULL, 0x1c00001c1c1c1c00ULL, 0x3200003232323200ULL, + 0x0f00000f0f0f0f00ULL, 0x9c00009c9c9c9c00ULL, 0x1600001616161600ULL, + 0x5300005353535300ULL, 0x1800001818181800ULL, 0xf20000f2f2f2f200ULL, + 0x2200002222222200ULL, 0xfe0000fefefefe00ULL, 0x4400004444444400ULL, + 0xcf0000cfcfcfcf00ULL, 0xb20000b2b2b2b200ULL, 0xc30000c3c3c3c300ULL, + 0xb50000b5b5b5b500ULL, 0x7a00007a7a7a7a00ULL, 0x9100009191919100ULL, + 0x2400002424242400ULL, 0x0800000808080800ULL, 0xe80000e8e8e8e800ULL, + 0xa80000a8a8a8a800ULL, 0x6000006060606000ULL, 0xfc0000fcfcfcfc00ULL, + 0x6900006969696900ULL, 0x5000005050505000ULL, 0xaa0000aaaaaaaa00ULL, + 0xd00000d0d0d0d000ULL, 0xa00000a0a0a0a000ULL, 0x7d00007d7d7d7d00ULL, + 0xa10000a1a1a1a100ULL, 0x8900008989898900ULL, 0x6200006262626200ULL, + 0x9700009797979700ULL, 0x5400005454545400ULL, 0x5b00005b5b5b5b00ULL, + 0x1e00001e1e1e1e00ULL, 0x9500009595959500ULL, 0xe00000e0e0e0e000ULL, + 0xff0000ffffffff00ULL, 0x6400006464646400ULL, 0xd20000d2d2d2d200ULL, + 0x1000001010101000ULL, 0xc40000c4c4c4c400ULL, 0x0000000000000000ULL, + 0x4800004848484800ULL, 0xa30000a3a3a3a300ULL, 0xf70000f7f7f7f700ULL, + 0x7500007575757500ULL, 0xdb0000dbdbdbdb00ULL, 0x8a00008a8a8a8a00ULL, + 0x0300000303030300ULL, 0xe60000e6e6e6e600ULL, 0xda0000dadadada00ULL, + 0x0900000909090900ULL, 0x3f00003f3f3f3f00ULL, 0xdd0000dddddddd00ULL, + 0x9400009494949400ULL, 0x8700008787878700ULL, 0x5c00005c5c5c5c00ULL, + 0x8300008383838300ULL, 0x0200000202020200ULL, 0xcd0000cdcdcdcd00ULL, + 0x4a00004a4a4a4a00ULL, 0x9000009090909000ULL, 0x3300003333333300ULL, + 0x7300007373737300ULL, 0x6700006767676700ULL, 0xf60000f6f6f6f600ULL, + 0xf30000f3f3f3f300ULL, 0x9d00009d9d9d9d00ULL, 0x7f00007f7f7f7f00ULL, + 0xbf0000bfbfbfbf00ULL, 0xe20000e2e2e2e200ULL, 0x5200005252525200ULL, + 0x9b00009b9b9b9b00ULL, 0xd80000d8d8d8d800ULL, 0x2600002626262600ULL, + 0xc80000c8c8c8c800ULL, 0x3700003737373700ULL, 0xc60000c6c6c6c600ULL, + 0x3b00003b3b3b3b00ULL, 0x8100008181818100ULL, 0x9600009696969600ULL, + 0x6f00006f6f6f6f00ULL, 0x4b00004b4b4b4b00ULL, 0x1300001313131300ULL, + 0xbe0000bebebebe00ULL, 0x6300006363636300ULL, 0x2e00002e2e2e2e00ULL, + 0xe90000e9e9e9e900ULL, 0x7900007979797900ULL, 0xa70000a7a7a7a700ULL, + 0x8c00008c8c8c8c00ULL, 0x9f00009f9f9f9f00ULL, 0x6e00006e6e6e6e00ULL, + 0xbc0000bcbcbcbc00ULL, 0x8e00008e8e8e8e00ULL, 0x2900002929292900ULL, + 0xf50000f5f5f5f500ULL, 0xf90000f9f9f9f900ULL, 0xb60000b6b6b6b600ULL, + 0x2f00002f2f2f2f00ULL, 0xfd0000fdfdfdfd00ULL, 0xb40000b4b4b4b400ULL, + 0x5900005959595900ULL, 0x7800007878787800ULL, 0x9800009898989800ULL, + 0x0600000606060600ULL, 0x6a00006a6a6a6a00ULL, 0xe70000e7e7e7e700ULL, + 0x4600004646464600ULL, 0x7100007171717100ULL, 0xba0000babababa00ULL, + 0xd40000d4d4d4d400ULL, 0x2500002525252500ULL, 0xab0000abababab00ULL, + 0x4200004242424200ULL, 0x8800008888888800ULL, 0xa20000a2a2a2a200ULL, + 0x8d00008d8d8d8d00ULL, 0xfa0000fafafafa00ULL, 0x7200007272727200ULL, + 0x0700000707070700ULL, 0xb90000b9b9b9b900ULL, 0x5500005555555500ULL, + 0xf80000f8f8f8f800ULL, 0xee0000eeeeeeee00ULL, 0xac0000acacacac00ULL, + 0x0a00000a0a0a0a00ULL, 0x3600003636363600ULL, 0x4900004949494900ULL, + 0x2a00002a2a2a2a00ULL, 0x6800006868686800ULL, 0x3c00003c3c3c3c00ULL, + 0x3800003838383800ULL, 0xf10000f1f1f1f100ULL, 0xa40000a4a4a4a400ULL, + 0x4000004040404000ULL, 0x2800002828282800ULL, 0xd30000d3d3d3d300ULL, + 0x7b00007b7b7b7b00ULL, 0xbb0000bbbbbbbb00ULL, 0xc90000c9c9c9c900ULL, + 0x4300004343434300ULL, 0xc10000c1c1c1c100ULL, 0x1500001515151500ULL, + 0xe30000e3e3e3e300ULL, 0xad0000adadadad00ULL, 0xf40000f4f4f4f400ULL, + 0x7700007777777700ULL, 0xc70000c7c7c7c700ULL, 0x8000008080808000ULL, + 0x9e00009e9e9e9e00ULL, +}; + +const uint64 camellia_sp22000222[256] = { + 0xe0e0000000e0e0e0ULL, 0x0505000000050505ULL, 0x5858000000585858ULL, + 0xd9d9000000d9d9d9ULL, 0x6767000000676767ULL, 0x4e4e0000004e4e4eULL, + 0x8181000000818181ULL, 0xcbcb000000cbcbcbULL, 0xc9c9000000c9c9c9ULL, + 0x0b0b0000000b0b0bULL, 0xaeae000000aeaeaeULL, 0x6a6a0000006a6a6aULL, + 0xd5d5000000d5d5d5ULL, 0x1818000000181818ULL, 0x5d5d0000005d5d5dULL, + 0x8282000000828282ULL, 0x4646000000464646ULL, 0xdfdf000000dfdfdfULL, + 0xd6d6000000d6d6d6ULL, 0x2727000000272727ULL, 0x8a8a0000008a8a8aULL, + 0x3232000000323232ULL, 0x4b4b0000004b4b4bULL, 0x4242000000424242ULL, + 0xdbdb000000dbdbdbULL, 0x1c1c0000001c1c1cULL, 0x9e9e0000009e9e9eULL, + 0x9c9c0000009c9c9cULL, 0x3a3a0000003a3a3aULL, 0xcaca000000cacacaULL, + 0x2525000000252525ULL, 0x7b7b0000007b7b7bULL, 0x0d0d0000000d0d0dULL, + 0x7171000000717171ULL, 0x5f5f0000005f5f5fULL, 0x1f1f0000001f1f1fULL, + 0xf8f8000000f8f8f8ULL, 0xd7d7000000d7d7d7ULL, 0x3e3e0000003e3e3eULL, + 0x9d9d0000009d9d9dULL, 0x7c7c0000007c7c7cULL, 0x6060000000606060ULL, + 0xb9b9000000b9b9b9ULL, 0xbebe000000bebebeULL, 0xbcbc000000bcbcbcULL, + 0x8b8b0000008b8b8bULL, 0x1616000000161616ULL, 0x3434000000343434ULL, + 0x4d4d0000004d4d4dULL, 0xc3c3000000c3c3c3ULL, 0x7272000000727272ULL, + 0x9595000000959595ULL, 0xabab000000abababULL, 0x8e8e0000008e8e8eULL, + 0xbaba000000bababaULL, 0x7a7a0000007a7a7aULL, 0xb3b3000000b3b3b3ULL, + 0x0202000000020202ULL, 0xb4b4000000b4b4b4ULL, 0xadad000000adadadULL, + 0xa2a2000000a2a2a2ULL, 0xacac000000acacacULL, 0xd8d8000000d8d8d8ULL, + 0x9a9a0000009a9a9aULL, 0x1717000000171717ULL, 0x1a1a0000001a1a1aULL, + 0x3535000000353535ULL, 0xcccc000000ccccccULL, 0xf7f7000000f7f7f7ULL, + 0x9999000000999999ULL, 0x6161000000616161ULL, 0x5a5a0000005a5a5aULL, + 0xe8e8000000e8e8e8ULL, 0x2424000000242424ULL, 0x5656000000565656ULL, + 0x4040000000404040ULL, 0xe1e1000000e1e1e1ULL, 0x6363000000636363ULL, + 0x0909000000090909ULL, 0x3333000000333333ULL, 0xbfbf000000bfbfbfULL, + 0x9898000000989898ULL, 0x9797000000979797ULL, 0x8585000000858585ULL, + 0x6868000000686868ULL, 0xfcfc000000fcfcfcULL, 0xecec000000ecececULL, + 0x0a0a0000000a0a0aULL, 0xdada000000dadadaULL, 0x6f6f0000006f6f6fULL, + 0x5353000000535353ULL, 0x6262000000626262ULL, 0xa3a3000000a3a3a3ULL, + 0x2e2e0000002e2e2eULL, 0x0808000000080808ULL, 0xafaf000000afafafULL, + 0x2828000000282828ULL, 0xb0b0000000b0b0b0ULL, 0x7474000000747474ULL, + 0xc2c2000000c2c2c2ULL, 0xbdbd000000bdbdbdULL, 0x3636000000363636ULL, + 0x2222000000222222ULL, 0x3838000000383838ULL, 0x6464000000646464ULL, + 0x1e1e0000001e1e1eULL, 0x3939000000393939ULL, 0x2c2c0000002c2c2cULL, + 0xa6a6000000a6a6a6ULL, 0x3030000000303030ULL, 0xe5e5000000e5e5e5ULL, + 0x4444000000444444ULL, 0xfdfd000000fdfdfdULL, 0x8888000000888888ULL, + 0x9f9f0000009f9f9fULL, 0x6565000000656565ULL, 0x8787000000878787ULL, + 0x6b6b0000006b6b6bULL, 0xf4f4000000f4f4f4ULL, 0x2323000000232323ULL, + 0x4848000000484848ULL, 0x1010000000101010ULL, 0xd1d1000000d1d1d1ULL, + 0x5151000000515151ULL, 0xc0c0000000c0c0c0ULL, 0xf9f9000000f9f9f9ULL, + 0xd2d2000000d2d2d2ULL, 0xa0a0000000a0a0a0ULL, 0x5555000000555555ULL, + 0xa1a1000000a1a1a1ULL, 0x4141000000414141ULL, 0xfafa000000fafafaULL, + 0x4343000000434343ULL, 0x1313000000131313ULL, 0xc4c4000000c4c4c4ULL, + 0x2f2f0000002f2f2fULL, 0xa8a8000000a8a8a8ULL, 0xb6b6000000b6b6b6ULL, + 0x3c3c0000003c3c3cULL, 0x2b2b0000002b2b2bULL, 0xc1c1000000c1c1c1ULL, + 0xffff000000ffffffULL, 0xc8c8000000c8c8c8ULL, 0xa5a5000000a5a5a5ULL, + 0x2020000000202020ULL, 0x8989000000898989ULL, 0x0000000000000000ULL, + 0x9090000000909090ULL, 0x4747000000474747ULL, 0xefef000000efefefULL, + 0xeaea000000eaeaeaULL, 0xb7b7000000b7b7b7ULL, 0x1515000000151515ULL, + 0x0606000000060606ULL, 0xcdcd000000cdcdcdULL, 0xb5b5000000b5b5b5ULL, + 0x1212000000121212ULL, 0x7e7e0000007e7e7eULL, 0xbbbb000000bbbbbbULL, + 0x2929000000292929ULL, 0x0f0f0000000f0f0fULL, 0xb8b8000000b8b8b8ULL, + 0x0707000000070707ULL, 0x0404000000040404ULL, 0x9b9b0000009b9b9bULL, + 0x9494000000949494ULL, 0x2121000000212121ULL, 0x6666000000666666ULL, + 0xe6e6000000e6e6e6ULL, 0xcece000000cececeULL, 0xeded000000edededULL, + 0xe7e7000000e7e7e7ULL, 0x3b3b0000003b3b3bULL, 0xfefe000000fefefeULL, + 0x7f7f0000007f7f7fULL, 0xc5c5000000c5c5c5ULL, 0xa4a4000000a4a4a4ULL, + 0x3737000000373737ULL, 0xb1b1000000b1b1b1ULL, 0x4c4c0000004c4c4cULL, + 0x9191000000919191ULL, 0x6e6e0000006e6e6eULL, 0x8d8d0000008d8d8dULL, + 0x7676000000767676ULL, 0x0303000000030303ULL, 0x2d2d0000002d2d2dULL, + 0xdede000000dededeULL, 0x9696000000969696ULL, 0x2626000000262626ULL, + 0x7d7d0000007d7d7dULL, 0xc6c6000000c6c6c6ULL, 0x5c5c0000005c5c5cULL, + 0xd3d3000000d3d3d3ULL, 0xf2f2000000f2f2f2ULL, 0x4f4f0000004f4f4fULL, + 0x1919000000191919ULL, 0x3f3f0000003f3f3fULL, 0xdcdc000000dcdcdcULL, + 0x7979000000797979ULL, 0x1d1d0000001d1d1dULL, 0x5252000000525252ULL, + 0xebeb000000ebebebULL, 0xf3f3000000f3f3f3ULL, 0x6d6d0000006d6d6dULL, + 0x5e5e0000005e5e5eULL, 0xfbfb000000fbfbfbULL, 0x6969000000696969ULL, + 0xb2b2000000b2b2b2ULL, 0xf0f0000000f0f0f0ULL, 0x3131000000313131ULL, + 0x0c0c0000000c0c0cULL, 0xd4d4000000d4d4d4ULL, 0xcfcf000000cfcfcfULL, + 0x8c8c0000008c8c8cULL, 0xe2e2000000e2e2e2ULL, 0x7575000000757575ULL, + 0xa9a9000000a9a9a9ULL, 0x4a4a0000004a4a4aULL, 0x5757000000575757ULL, + 0x8484000000848484ULL, 0x1111000000111111ULL, 0x4545000000454545ULL, + 0x1b1b0000001b1b1bULL, 0xf5f5000000f5f5f5ULL, 0xe4e4000000e4e4e4ULL, + 0x0e0e0000000e0e0eULL, 0x7373000000737373ULL, 0xaaaa000000aaaaaaULL, + 0xf1f1000000f1f1f1ULL, 0xdddd000000ddddddULL, 0x5959000000595959ULL, + 0x1414000000141414ULL, 0x6c6c0000006c6c6cULL, 0x9292000000929292ULL, + 0x5454000000545454ULL, 0xd0d0000000d0d0d0ULL, 0x7878000000787878ULL, + 0x7070000000707070ULL, 0xe3e3000000e3e3e3ULL, 0x4949000000494949ULL, + 0x8080000000808080ULL, 0x5050000000505050ULL, 0xa7a7000000a7a7a7ULL, + 0xf6f6000000f6f6f6ULL, 0x7777000000777777ULL, 0x9393000000939393ULL, + 0x8686000000868686ULL, 0x8383000000838383ULL, 0x2a2a0000002a2a2aULL, + 0xc7c7000000c7c7c7ULL, 0x5b5b0000005b5b5bULL, 0xe9e9000000e9e9e9ULL, + 0xeeee000000eeeeeeULL, 0x8f8f0000008f8f8fULL, 0x0101000000010101ULL, + 0x3d3d0000003d3d3dULL, +}; + +const uint64 camellia_sp03303033[256] = { + 0x0038380038003838ULL, 0x0041410041004141ULL, 0x0016160016001616ULL, + 0x0076760076007676ULL, 0x00d9d900d900d9d9ULL, 0x0093930093009393ULL, + 0x0060600060006060ULL, 0x00f2f200f200f2f2ULL, 0x0072720072007272ULL, + 0x00c2c200c200c2c2ULL, 0x00abab00ab00ababULL, 0x009a9a009a009a9aULL, + 0x0075750075007575ULL, 0x0006060006000606ULL, 0x0057570057005757ULL, + 0x00a0a000a000a0a0ULL, 0x0091910091009191ULL, 0x00f7f700f700f7f7ULL, + 0x00b5b500b500b5b5ULL, 0x00c9c900c900c9c9ULL, 0x00a2a200a200a2a2ULL, + 0x008c8c008c008c8cULL, 0x00d2d200d200d2d2ULL, 0x0090900090009090ULL, + 0x00f6f600f600f6f6ULL, 0x0007070007000707ULL, 0x00a7a700a700a7a7ULL, + 0x0027270027002727ULL, 0x008e8e008e008e8eULL, 0x00b2b200b200b2b2ULL, + 0x0049490049004949ULL, 0x00dede00de00dedeULL, 0x0043430043004343ULL, + 0x005c5c005c005c5cULL, 0x00d7d700d700d7d7ULL, 0x00c7c700c700c7c7ULL, + 0x003e3e003e003e3eULL, 0x00f5f500f500f5f5ULL, 0x008f8f008f008f8fULL, + 0x0067670067006767ULL, 0x001f1f001f001f1fULL, 0x0018180018001818ULL, + 0x006e6e006e006e6eULL, 0x00afaf00af00afafULL, 0x002f2f002f002f2fULL, + 0x00e2e200e200e2e2ULL, 0x0085850085008585ULL, 0x000d0d000d000d0dULL, + 0x0053530053005353ULL, 0x00f0f000f000f0f0ULL, 0x009c9c009c009c9cULL, + 0x0065650065006565ULL, 0x00eaea00ea00eaeaULL, 0x00a3a300a300a3a3ULL, + 0x00aeae00ae00aeaeULL, 0x009e9e009e009e9eULL, 0x00ecec00ec00ececULL, + 0x0080800080008080ULL, 0x002d2d002d002d2dULL, 0x006b6b006b006b6bULL, + 0x00a8a800a800a8a8ULL, 0x002b2b002b002b2bULL, 0x0036360036003636ULL, + 0x00a6a600a600a6a6ULL, 0x00c5c500c500c5c5ULL, 0x0086860086008686ULL, + 0x004d4d004d004d4dULL, 0x0033330033003333ULL, 0x00fdfd00fd00fdfdULL, + 0x0066660066006666ULL, 0x0058580058005858ULL, 0x0096960096009696ULL, + 0x003a3a003a003a3aULL, 0x0009090009000909ULL, 0x0095950095009595ULL, + 0x0010100010001010ULL, 0x0078780078007878ULL, 0x00d8d800d800d8d8ULL, + 0x0042420042004242ULL, 0x00cccc00cc00ccccULL, 0x00efef00ef00efefULL, + 0x0026260026002626ULL, 0x00e5e500e500e5e5ULL, 0x0061610061006161ULL, + 0x001a1a001a001a1aULL, 0x003f3f003f003f3fULL, 0x003b3b003b003b3bULL, + 0x0082820082008282ULL, 0x00b6b600b600b6b6ULL, 0x00dbdb00db00dbdbULL, + 0x00d4d400d400d4d4ULL, 0x0098980098009898ULL, 0x00e8e800e800e8e8ULL, + 0x008b8b008b008b8bULL, 0x0002020002000202ULL, 0x00ebeb00eb00ebebULL, + 0x000a0a000a000a0aULL, 0x002c2c002c002c2cULL, 0x001d1d001d001d1dULL, + 0x00b0b000b000b0b0ULL, 0x006f6f006f006f6fULL, 0x008d8d008d008d8dULL, + 0x0088880088008888ULL, 0x000e0e000e000e0eULL, 0x0019190019001919ULL, + 0x0087870087008787ULL, 0x004e4e004e004e4eULL, 0x000b0b000b000b0bULL, + 0x00a9a900a900a9a9ULL, 0x000c0c000c000c0cULL, 0x0079790079007979ULL, + 0x0011110011001111ULL, 0x007f7f007f007f7fULL, 0x0022220022002222ULL, + 0x00e7e700e700e7e7ULL, 0x0059590059005959ULL, 0x00e1e100e100e1e1ULL, + 0x00dada00da00dadaULL, 0x003d3d003d003d3dULL, 0x00c8c800c800c8c8ULL, + 0x0012120012001212ULL, 0x0004040004000404ULL, 0x0074740074007474ULL, + 0x0054540054005454ULL, 0x0030300030003030ULL, 0x007e7e007e007e7eULL, + 0x00b4b400b400b4b4ULL, 0x0028280028002828ULL, 0x0055550055005555ULL, + 0x0068680068006868ULL, 0x0050500050005050ULL, 0x00bebe00be00bebeULL, + 0x00d0d000d000d0d0ULL, 0x00c4c400c400c4c4ULL, 0x0031310031003131ULL, + 0x00cbcb00cb00cbcbULL, 0x002a2a002a002a2aULL, 0x00adad00ad00adadULL, + 0x000f0f000f000f0fULL, 0x00caca00ca00cacaULL, 0x0070700070007070ULL, + 0x00ffff00ff00ffffULL, 0x0032320032003232ULL, 0x0069690069006969ULL, + 0x0008080008000808ULL, 0x0062620062006262ULL, 0x0000000000000000ULL, + 0x0024240024002424ULL, 0x00d1d100d100d1d1ULL, 0x00fbfb00fb00fbfbULL, + 0x00baba00ba00babaULL, 0x00eded00ed00ededULL, 0x0045450045004545ULL, + 0x0081810081008181ULL, 0x0073730073007373ULL, 0x006d6d006d006d6dULL, + 0x0084840084008484ULL, 0x009f9f009f009f9fULL, 0x00eeee00ee00eeeeULL, + 0x004a4a004a004a4aULL, 0x00c3c300c300c3c3ULL, 0x002e2e002e002e2eULL, + 0x00c1c100c100c1c1ULL, 0x0001010001000101ULL, 0x00e6e600e600e6e6ULL, + 0x0025250025002525ULL, 0x0048480048004848ULL, 0x0099990099009999ULL, + 0x00b9b900b900b9b9ULL, 0x00b3b300b300b3b3ULL, 0x007b7b007b007b7bULL, + 0x00f9f900f900f9f9ULL, 0x00cece00ce00ceceULL, 0x00bfbf00bf00bfbfULL, + 0x00dfdf00df00dfdfULL, 0x0071710071007171ULL, 0x0029290029002929ULL, + 0x00cdcd00cd00cdcdULL, 0x006c6c006c006c6cULL, 0x0013130013001313ULL, + 0x0064640064006464ULL, 0x009b9b009b009b9bULL, 0x0063630063006363ULL, + 0x009d9d009d009d9dULL, 0x00c0c000c000c0c0ULL, 0x004b4b004b004b4bULL, + 0x00b7b700b700b7b7ULL, 0x00a5a500a500a5a5ULL, 0x0089890089008989ULL, + 0x005f5f005f005f5fULL, 0x00b1b100b100b1b1ULL, 0x0017170017001717ULL, + 0x00f4f400f400f4f4ULL, 0x00bcbc00bc00bcbcULL, 0x00d3d300d300d3d3ULL, + 0x0046460046004646ULL, 0x00cfcf00cf00cfcfULL, 0x0037370037003737ULL, + 0x005e5e005e005e5eULL, 0x0047470047004747ULL, 0x0094940094009494ULL, + 0x00fafa00fa00fafaULL, 0x00fcfc00fc00fcfcULL, 0x005b5b005b005b5bULL, + 0x0097970097009797ULL, 0x00fefe00fe00fefeULL, 0x005a5a005a005a5aULL, + 0x00acac00ac00acacULL, 0x003c3c003c003c3cULL, 0x004c4c004c004c4cULL, + 0x0003030003000303ULL, 0x0035350035003535ULL, 0x00f3f300f300f3f3ULL, + 0x0023230023002323ULL, 0x00b8b800b800b8b8ULL, 0x005d5d005d005d5dULL, + 0x006a6a006a006a6aULL, 0x0092920092009292ULL, 0x00d5d500d500d5d5ULL, + 0x0021210021002121ULL, 0x0044440044004444ULL, 0x0051510051005151ULL, + 0x00c6c600c600c6c6ULL, 0x007d7d007d007d7dULL, 0x0039390039003939ULL, + 0x0083830083008383ULL, 0x00dcdc00dc00dcdcULL, 0x00aaaa00aa00aaaaULL, + 0x007c7c007c007c7cULL, 0x0077770077007777ULL, 0x0056560056005656ULL, + 0x0005050005000505ULL, 0x001b1b001b001b1bULL, 0x00a4a400a400a4a4ULL, + 0x0015150015001515ULL, 0x0034340034003434ULL, 0x001e1e001e001e1eULL, + 0x001c1c001c001c1cULL, 0x00f8f800f800f8f8ULL, 0x0052520052005252ULL, + 0x0020200020002020ULL, 0x0014140014001414ULL, 0x00e9e900e900e9e9ULL, + 0x00bdbd00bd00bdbdULL, 0x00dddd00dd00ddddULL, 0x00e4e400e400e4e4ULL, + 0x00a1a100a100a1a1ULL, 0x00e0e000e000e0e0ULL, 0x008a8a008a008a8aULL, + 0x00f1f100f100f1f1ULL, 0x00d6d600d600d6d6ULL, 0x007a7a007a007a7aULL, + 0x00bbbb00bb00bbbbULL, 0x00e3e300e300e3e3ULL, 0x0040400040004040ULL, + 0x004f4f004f004f4fULL, +}; + +const uint64 camellia_sp00444404[256] = { + 0x0000707070700070ULL, 0x00002c2c2c2c002cULL, 0x0000b3b3b3b300b3ULL, + 0x0000c0c0c0c000c0ULL, 0x0000e4e4e4e400e4ULL, 0x0000575757570057ULL, + 0x0000eaeaeaea00eaULL, 0x0000aeaeaeae00aeULL, 0x0000232323230023ULL, + 0x00006b6b6b6b006bULL, 0x0000454545450045ULL, 0x0000a5a5a5a500a5ULL, + 0x0000edededed00edULL, 0x00004f4f4f4f004fULL, 0x00001d1d1d1d001dULL, + 0x0000929292920092ULL, 0x0000868686860086ULL, 0x0000afafafaf00afULL, + 0x00007c7c7c7c007cULL, 0x00001f1f1f1f001fULL, 0x00003e3e3e3e003eULL, + 0x0000dcdcdcdc00dcULL, 0x00005e5e5e5e005eULL, 0x00000b0b0b0b000bULL, + 0x0000a6a6a6a600a6ULL, 0x0000393939390039ULL, 0x0000d5d5d5d500d5ULL, + 0x00005d5d5d5d005dULL, 0x0000d9d9d9d900d9ULL, 0x00005a5a5a5a005aULL, + 0x0000515151510051ULL, 0x00006c6c6c6c006cULL, 0x00008b8b8b8b008bULL, + 0x00009a9a9a9a009aULL, 0x0000fbfbfbfb00fbULL, 0x0000b0b0b0b000b0ULL, + 0x0000747474740074ULL, 0x00002b2b2b2b002bULL, 0x0000f0f0f0f000f0ULL, + 0x0000848484840084ULL, 0x0000dfdfdfdf00dfULL, 0x0000cbcbcbcb00cbULL, + 0x0000343434340034ULL, 0x0000767676760076ULL, 0x00006d6d6d6d006dULL, + 0x0000a9a9a9a900a9ULL, 0x0000d1d1d1d100d1ULL, 0x0000040404040004ULL, + 0x0000141414140014ULL, 0x00003a3a3a3a003aULL, 0x0000dededede00deULL, + 0x0000111111110011ULL, 0x0000323232320032ULL, 0x00009c9c9c9c009cULL, + 0x0000535353530053ULL, 0x0000f2f2f2f200f2ULL, 0x0000fefefefe00feULL, + 0x0000cfcfcfcf00cfULL, 0x0000c3c3c3c300c3ULL, 0x00007a7a7a7a007aULL, + 0x0000242424240024ULL, 0x0000e8e8e8e800e8ULL, 0x0000606060600060ULL, + 0x0000696969690069ULL, 0x0000aaaaaaaa00aaULL, 0x0000a0a0a0a000a0ULL, + 0x0000a1a1a1a100a1ULL, 0x0000626262620062ULL, 0x0000545454540054ULL, + 0x00001e1e1e1e001eULL, 0x0000e0e0e0e000e0ULL, 0x0000646464640064ULL, + 0x0000101010100010ULL, 0x0000000000000000ULL, 0x0000a3a3a3a300a3ULL, + 0x0000757575750075ULL, 0x00008a8a8a8a008aULL, 0x0000e6e6e6e600e6ULL, + 0x0000090909090009ULL, 0x0000dddddddd00ddULL, 0x0000878787870087ULL, + 0x0000838383830083ULL, 0x0000cdcdcdcd00cdULL, 0x0000909090900090ULL, + 0x0000737373730073ULL, 0x0000f6f6f6f600f6ULL, 0x00009d9d9d9d009dULL, + 0x0000bfbfbfbf00bfULL, 0x0000525252520052ULL, 0x0000d8d8d8d800d8ULL, + 0x0000c8c8c8c800c8ULL, 0x0000c6c6c6c600c6ULL, 0x0000818181810081ULL, + 0x00006f6f6f6f006fULL, 0x0000131313130013ULL, 0x0000636363630063ULL, + 0x0000e9e9e9e900e9ULL, 0x0000a7a7a7a700a7ULL, 0x00009f9f9f9f009fULL, + 0x0000bcbcbcbc00bcULL, 0x0000292929290029ULL, 0x0000f9f9f9f900f9ULL, + 0x00002f2f2f2f002fULL, 0x0000b4b4b4b400b4ULL, 0x0000787878780078ULL, + 0x0000060606060006ULL, 0x0000e7e7e7e700e7ULL, 0x0000717171710071ULL, + 0x0000d4d4d4d400d4ULL, 0x0000abababab00abULL, 0x0000888888880088ULL, + 0x00008d8d8d8d008dULL, 0x0000727272720072ULL, 0x0000b9b9b9b900b9ULL, + 0x0000f8f8f8f800f8ULL, 0x0000acacacac00acULL, 0x0000363636360036ULL, + 0x00002a2a2a2a002aULL, 0x00003c3c3c3c003cULL, 0x0000f1f1f1f100f1ULL, + 0x0000404040400040ULL, 0x0000d3d3d3d300d3ULL, 0x0000bbbbbbbb00bbULL, + 0x0000434343430043ULL, 0x0000151515150015ULL, 0x0000adadadad00adULL, + 0x0000777777770077ULL, 0x0000808080800080ULL, 0x0000828282820082ULL, + 0x0000ecececec00ecULL, 0x0000272727270027ULL, 0x0000e5e5e5e500e5ULL, + 0x0000858585850085ULL, 0x0000353535350035ULL, 0x00000c0c0c0c000cULL, + 0x0000414141410041ULL, 0x0000efefefef00efULL, 0x0000939393930093ULL, + 0x0000191919190019ULL, 0x0000212121210021ULL, 0x00000e0e0e0e000eULL, + 0x00004e4e4e4e004eULL, 0x0000656565650065ULL, 0x0000bdbdbdbd00bdULL, + 0x0000b8b8b8b800b8ULL, 0x00008f8f8f8f008fULL, 0x0000ebebebeb00ebULL, + 0x0000cececece00ceULL, 0x0000303030300030ULL, 0x00005f5f5f5f005fULL, + 0x0000c5c5c5c500c5ULL, 0x00001a1a1a1a001aULL, 0x0000e1e1e1e100e1ULL, + 0x0000cacacaca00caULL, 0x0000474747470047ULL, 0x00003d3d3d3d003dULL, + 0x0000010101010001ULL, 0x0000d6d6d6d600d6ULL, 0x0000565656560056ULL, + 0x00004d4d4d4d004dULL, 0x00000d0d0d0d000dULL, 0x0000666666660066ULL, + 0x0000cccccccc00ccULL, 0x00002d2d2d2d002dULL, 0x0000121212120012ULL, + 0x0000202020200020ULL, 0x0000b1b1b1b100b1ULL, 0x0000999999990099ULL, + 0x00004c4c4c4c004cULL, 0x0000c2c2c2c200c2ULL, 0x00007e7e7e7e007eULL, + 0x0000050505050005ULL, 0x0000b7b7b7b700b7ULL, 0x0000313131310031ULL, + 0x0000171717170017ULL, 0x0000d7d7d7d700d7ULL, 0x0000585858580058ULL, + 0x0000616161610061ULL, 0x00001b1b1b1b001bULL, 0x00001c1c1c1c001cULL, + 0x00000f0f0f0f000fULL, 0x0000161616160016ULL, 0x0000181818180018ULL, + 0x0000222222220022ULL, 0x0000444444440044ULL, 0x0000b2b2b2b200b2ULL, + 0x0000b5b5b5b500b5ULL, 0x0000919191910091ULL, 0x0000080808080008ULL, + 0x0000a8a8a8a800a8ULL, 0x0000fcfcfcfc00fcULL, 0x0000505050500050ULL, + 0x0000d0d0d0d000d0ULL, 0x00007d7d7d7d007dULL, 0x0000898989890089ULL, + 0x0000979797970097ULL, 0x00005b5b5b5b005bULL, 0x0000959595950095ULL, + 0x0000ffffffff00ffULL, 0x0000d2d2d2d200d2ULL, 0x0000c4c4c4c400c4ULL, + 0x0000484848480048ULL, 0x0000f7f7f7f700f7ULL, 0x0000dbdbdbdb00dbULL, + 0x0000030303030003ULL, 0x0000dadadada00daULL, 0x00003f3f3f3f003fULL, + 0x0000949494940094ULL, 0x00005c5c5c5c005cULL, 0x0000020202020002ULL, + 0x00004a4a4a4a004aULL, 0x0000333333330033ULL, 0x0000676767670067ULL, + 0x0000f3f3f3f300f3ULL, 0x00007f7f7f7f007fULL, 0x0000e2e2e2e200e2ULL, + 0x00009b9b9b9b009bULL, 0x0000262626260026ULL, 0x0000373737370037ULL, + 0x00003b3b3b3b003bULL, 0x0000969696960096ULL, 0x00004b4b4b4b004bULL, + 0x0000bebebebe00beULL, 0x00002e2e2e2e002eULL, 0x0000797979790079ULL, + 0x00008c8c8c8c008cULL, 0x00006e6e6e6e006eULL, 0x00008e8e8e8e008eULL, + 0x0000f5f5f5f500f5ULL, 0x0000b6b6b6b600b6ULL, 0x0000fdfdfdfd00fdULL, + 0x0000595959590059ULL, 0x0000989898980098ULL, 0x00006a6a6a6a006aULL, + 0x0000464646460046ULL, 0x0000babababa00baULL, 0x0000252525250025ULL, + 0x0000424242420042ULL, 0x0000a2a2a2a200a2ULL, 0x0000fafafafa00faULL, + 0x0000070707070007ULL, 0x0000555555550055ULL, 0x0000eeeeeeee00eeULL, + 0x00000a0a0a0a000aULL, 0x0000494949490049ULL, 0x0000686868680068ULL, + 0x0000383838380038ULL, 0x0000a4a4a4a400a4ULL, 0x0000282828280028ULL, + 0x00007b7b7b7b007bULL, 0x0000c9c9c9c900c9ULL, 0x0000c1c1c1c100c1ULL, + 0x0000e3e3e3e300e3ULL, 0x0000f4f4f4f400f4ULL, 0x0000c7c7c7c700c7ULL, + 0x00009e9e9e9e009eULL, +}; + +const uint64 camellia_sp02220222[256] = { + 0x00e0e0e000e0e0e0ULL, 0x0005050500050505ULL, 0x0058585800585858ULL, + 0x00d9d9d900d9d9d9ULL, 0x0067676700676767ULL, 0x004e4e4e004e4e4eULL, + 0x0081818100818181ULL, 0x00cbcbcb00cbcbcbULL, 0x00c9c9c900c9c9c9ULL, + 0x000b0b0b000b0b0bULL, 0x00aeaeae00aeaeaeULL, 0x006a6a6a006a6a6aULL, + 0x00d5d5d500d5d5d5ULL, 0x0018181800181818ULL, 0x005d5d5d005d5d5dULL, + 0x0082828200828282ULL, 0x0046464600464646ULL, 0x00dfdfdf00dfdfdfULL, + 0x00d6d6d600d6d6d6ULL, 0x0027272700272727ULL, 0x008a8a8a008a8a8aULL, + 0x0032323200323232ULL, 0x004b4b4b004b4b4bULL, 0x0042424200424242ULL, + 0x00dbdbdb00dbdbdbULL, 0x001c1c1c001c1c1cULL, 0x009e9e9e009e9e9eULL, + 0x009c9c9c009c9c9cULL, 0x003a3a3a003a3a3aULL, 0x00cacaca00cacacaULL, + 0x0025252500252525ULL, 0x007b7b7b007b7b7bULL, 0x000d0d0d000d0d0dULL, + 0x0071717100717171ULL, 0x005f5f5f005f5f5fULL, 0x001f1f1f001f1f1fULL, + 0x00f8f8f800f8f8f8ULL, 0x00d7d7d700d7d7d7ULL, 0x003e3e3e003e3e3eULL, + 0x009d9d9d009d9d9dULL, 0x007c7c7c007c7c7cULL, 0x0060606000606060ULL, + 0x00b9b9b900b9b9b9ULL, 0x00bebebe00bebebeULL, 0x00bcbcbc00bcbcbcULL, + 0x008b8b8b008b8b8bULL, 0x0016161600161616ULL, 0x0034343400343434ULL, + 0x004d4d4d004d4d4dULL, 0x00c3c3c300c3c3c3ULL, 0x0072727200727272ULL, + 0x0095959500959595ULL, 0x00ababab00abababULL, 0x008e8e8e008e8e8eULL, + 0x00bababa00bababaULL, 0x007a7a7a007a7a7aULL, 0x00b3b3b300b3b3b3ULL, + 0x0002020200020202ULL, 0x00b4b4b400b4b4b4ULL, 0x00adadad00adadadULL, + 0x00a2a2a200a2a2a2ULL, 0x00acacac00acacacULL, 0x00d8d8d800d8d8d8ULL, + 0x009a9a9a009a9a9aULL, 0x0017171700171717ULL, 0x001a1a1a001a1a1aULL, + 0x0035353500353535ULL, 0x00cccccc00ccccccULL, 0x00f7f7f700f7f7f7ULL, + 0x0099999900999999ULL, 0x0061616100616161ULL, 0x005a5a5a005a5a5aULL, + 0x00e8e8e800e8e8e8ULL, 0x0024242400242424ULL, 0x0056565600565656ULL, + 0x0040404000404040ULL, 0x00e1e1e100e1e1e1ULL, 0x0063636300636363ULL, + 0x0009090900090909ULL, 0x0033333300333333ULL, 0x00bfbfbf00bfbfbfULL, + 0x0098989800989898ULL, 0x0097979700979797ULL, 0x0085858500858585ULL, + 0x0068686800686868ULL, 0x00fcfcfc00fcfcfcULL, 0x00ececec00ecececULL, + 0x000a0a0a000a0a0aULL, 0x00dadada00dadadaULL, 0x006f6f6f006f6f6fULL, + 0x0053535300535353ULL, 0x0062626200626262ULL, 0x00a3a3a300a3a3a3ULL, + 0x002e2e2e002e2e2eULL, 0x0008080800080808ULL, 0x00afafaf00afafafULL, + 0x0028282800282828ULL, 0x00b0b0b000b0b0b0ULL, 0x0074747400747474ULL, + 0x00c2c2c200c2c2c2ULL, 0x00bdbdbd00bdbdbdULL, 0x0036363600363636ULL, + 0x0022222200222222ULL, 0x0038383800383838ULL, 0x0064646400646464ULL, + 0x001e1e1e001e1e1eULL, 0x0039393900393939ULL, 0x002c2c2c002c2c2cULL, + 0x00a6a6a600a6a6a6ULL, 0x0030303000303030ULL, 0x00e5e5e500e5e5e5ULL, + 0x0044444400444444ULL, 0x00fdfdfd00fdfdfdULL, 0x0088888800888888ULL, + 0x009f9f9f009f9f9fULL, 0x0065656500656565ULL, 0x0087878700878787ULL, + 0x006b6b6b006b6b6bULL, 0x00f4f4f400f4f4f4ULL, 0x0023232300232323ULL, + 0x0048484800484848ULL, 0x0010101000101010ULL, 0x00d1d1d100d1d1d1ULL, + 0x0051515100515151ULL, 0x00c0c0c000c0c0c0ULL, 0x00f9f9f900f9f9f9ULL, + 0x00d2d2d200d2d2d2ULL, 0x00a0a0a000a0a0a0ULL, 0x0055555500555555ULL, + 0x00a1a1a100a1a1a1ULL, 0x0041414100414141ULL, 0x00fafafa00fafafaULL, + 0x0043434300434343ULL, 0x0013131300131313ULL, 0x00c4c4c400c4c4c4ULL, + 0x002f2f2f002f2f2fULL, 0x00a8a8a800a8a8a8ULL, 0x00b6b6b600b6b6b6ULL, + 0x003c3c3c003c3c3cULL, 0x002b2b2b002b2b2bULL, 0x00c1c1c100c1c1c1ULL, + 0x00ffffff00ffffffULL, 0x00c8c8c800c8c8c8ULL, 0x00a5a5a500a5a5a5ULL, + 0x0020202000202020ULL, 0x0089898900898989ULL, 0x0000000000000000ULL, + 0x0090909000909090ULL, 0x0047474700474747ULL, 0x00efefef00efefefULL, + 0x00eaeaea00eaeaeaULL, 0x00b7b7b700b7b7b7ULL, 0x0015151500151515ULL, + 0x0006060600060606ULL, 0x00cdcdcd00cdcdcdULL, 0x00b5b5b500b5b5b5ULL, + 0x0012121200121212ULL, 0x007e7e7e007e7e7eULL, 0x00bbbbbb00bbbbbbULL, + 0x0029292900292929ULL, 0x000f0f0f000f0f0fULL, 0x00b8b8b800b8b8b8ULL, + 0x0007070700070707ULL, 0x0004040400040404ULL, 0x009b9b9b009b9b9bULL, + 0x0094949400949494ULL, 0x0021212100212121ULL, 0x0066666600666666ULL, + 0x00e6e6e600e6e6e6ULL, 0x00cecece00cececeULL, 0x00ededed00edededULL, + 0x00e7e7e700e7e7e7ULL, 0x003b3b3b003b3b3bULL, 0x00fefefe00fefefeULL, + 0x007f7f7f007f7f7fULL, 0x00c5c5c500c5c5c5ULL, 0x00a4a4a400a4a4a4ULL, + 0x0037373700373737ULL, 0x00b1b1b100b1b1b1ULL, 0x004c4c4c004c4c4cULL, + 0x0091919100919191ULL, 0x006e6e6e006e6e6eULL, 0x008d8d8d008d8d8dULL, + 0x0076767600767676ULL, 0x0003030300030303ULL, 0x002d2d2d002d2d2dULL, + 0x00dedede00dededeULL, 0x0096969600969696ULL, 0x0026262600262626ULL, + 0x007d7d7d007d7d7dULL, 0x00c6c6c600c6c6c6ULL, 0x005c5c5c005c5c5cULL, + 0x00d3d3d300d3d3d3ULL, 0x00f2f2f200f2f2f2ULL, 0x004f4f4f004f4f4fULL, + 0x0019191900191919ULL, 0x003f3f3f003f3f3fULL, 0x00dcdcdc00dcdcdcULL, + 0x0079797900797979ULL, 0x001d1d1d001d1d1dULL, 0x0052525200525252ULL, + 0x00ebebeb00ebebebULL, 0x00f3f3f300f3f3f3ULL, 0x006d6d6d006d6d6dULL, + 0x005e5e5e005e5e5eULL, 0x00fbfbfb00fbfbfbULL, 0x0069696900696969ULL, + 0x00b2b2b200b2b2b2ULL, 0x00f0f0f000f0f0f0ULL, 0x0031313100313131ULL, + 0x000c0c0c000c0c0cULL, 0x00d4d4d400d4d4d4ULL, 0x00cfcfcf00cfcfcfULL, + 0x008c8c8c008c8c8cULL, 0x00e2e2e200e2e2e2ULL, 0x0075757500757575ULL, + 0x00a9a9a900a9a9a9ULL, 0x004a4a4a004a4a4aULL, 0x0057575700575757ULL, + 0x0084848400848484ULL, 0x0011111100111111ULL, 0x0045454500454545ULL, + 0x001b1b1b001b1b1bULL, 0x00f5f5f500f5f5f5ULL, 0x00e4e4e400e4e4e4ULL, + 0x000e0e0e000e0e0eULL, 0x0073737300737373ULL, 0x00aaaaaa00aaaaaaULL, + 0x00f1f1f100f1f1f1ULL, 0x00dddddd00ddddddULL, 0x0059595900595959ULL, + 0x0014141400141414ULL, 0x006c6c6c006c6c6cULL, 0x0092929200929292ULL, + 0x0054545400545454ULL, 0x00d0d0d000d0d0d0ULL, 0x0078787800787878ULL, + 0x0070707000707070ULL, 0x00e3e3e300e3e3e3ULL, 0x0049494900494949ULL, + 0x0080808000808080ULL, 0x0050505000505050ULL, 0x00a7a7a700a7a7a7ULL, + 0x00f6f6f600f6f6f6ULL, 0x0077777700777777ULL, 0x0093939300939393ULL, + 0x0086868600868686ULL, 0x0083838300838383ULL, 0x002a2a2a002a2a2aULL, + 0x00c7c7c700c7c7c7ULL, 0x005b5b5b005b5b5bULL, 0x00e9e9e900e9e9e9ULL, + 0x00eeeeee00eeeeeeULL, 0x008f8f8f008f8f8fULL, 0x0001010100010101ULL, + 0x003d3d3d003d3d3dULL, +}; + +const uint64 camellia_sp30333033[256] = { + 0x3800383838003838ULL, 0x4100414141004141ULL, 0x1600161616001616ULL, + 0x7600767676007676ULL, 0xd900d9d9d900d9d9ULL, 0x9300939393009393ULL, + 0x6000606060006060ULL, 0xf200f2f2f200f2f2ULL, 0x7200727272007272ULL, + 0xc200c2c2c200c2c2ULL, 0xab00ababab00ababULL, 0x9a009a9a9a009a9aULL, + 0x7500757575007575ULL, 0x0600060606000606ULL, 0x5700575757005757ULL, + 0xa000a0a0a000a0a0ULL, 0x9100919191009191ULL, 0xf700f7f7f700f7f7ULL, + 0xb500b5b5b500b5b5ULL, 0xc900c9c9c900c9c9ULL, 0xa200a2a2a200a2a2ULL, + 0x8c008c8c8c008c8cULL, 0xd200d2d2d200d2d2ULL, 0x9000909090009090ULL, + 0xf600f6f6f600f6f6ULL, 0x0700070707000707ULL, 0xa700a7a7a700a7a7ULL, + 0x2700272727002727ULL, 0x8e008e8e8e008e8eULL, 0xb200b2b2b200b2b2ULL, + 0x4900494949004949ULL, 0xde00dedede00dedeULL, 0x4300434343004343ULL, + 0x5c005c5c5c005c5cULL, 0xd700d7d7d700d7d7ULL, 0xc700c7c7c700c7c7ULL, + 0x3e003e3e3e003e3eULL, 0xf500f5f5f500f5f5ULL, 0x8f008f8f8f008f8fULL, + 0x6700676767006767ULL, 0x1f001f1f1f001f1fULL, 0x1800181818001818ULL, + 0x6e006e6e6e006e6eULL, 0xaf00afafaf00afafULL, 0x2f002f2f2f002f2fULL, + 0xe200e2e2e200e2e2ULL, 0x8500858585008585ULL, 0x0d000d0d0d000d0dULL, + 0x5300535353005353ULL, 0xf000f0f0f000f0f0ULL, 0x9c009c9c9c009c9cULL, + 0x6500656565006565ULL, 0xea00eaeaea00eaeaULL, 0xa300a3a3a300a3a3ULL, + 0xae00aeaeae00aeaeULL, 0x9e009e9e9e009e9eULL, 0xec00ececec00ececULL, + 0x8000808080008080ULL, 0x2d002d2d2d002d2dULL, 0x6b006b6b6b006b6bULL, + 0xa800a8a8a800a8a8ULL, 0x2b002b2b2b002b2bULL, 0x3600363636003636ULL, + 0xa600a6a6a600a6a6ULL, 0xc500c5c5c500c5c5ULL, 0x8600868686008686ULL, + 0x4d004d4d4d004d4dULL, 0x3300333333003333ULL, 0xfd00fdfdfd00fdfdULL, + 0x6600666666006666ULL, 0x5800585858005858ULL, 0x9600969696009696ULL, + 0x3a003a3a3a003a3aULL, 0x0900090909000909ULL, 0x9500959595009595ULL, + 0x1000101010001010ULL, 0x7800787878007878ULL, 0xd800d8d8d800d8d8ULL, + 0x4200424242004242ULL, 0xcc00cccccc00ccccULL, 0xef00efefef00efefULL, + 0x2600262626002626ULL, 0xe500e5e5e500e5e5ULL, 0x6100616161006161ULL, + 0x1a001a1a1a001a1aULL, 0x3f003f3f3f003f3fULL, 0x3b003b3b3b003b3bULL, + 0x8200828282008282ULL, 0xb600b6b6b600b6b6ULL, 0xdb00dbdbdb00dbdbULL, + 0xd400d4d4d400d4d4ULL, 0x9800989898009898ULL, 0xe800e8e8e800e8e8ULL, + 0x8b008b8b8b008b8bULL, 0x0200020202000202ULL, 0xeb00ebebeb00ebebULL, + 0x0a000a0a0a000a0aULL, 0x2c002c2c2c002c2cULL, 0x1d001d1d1d001d1dULL, + 0xb000b0b0b000b0b0ULL, 0x6f006f6f6f006f6fULL, 0x8d008d8d8d008d8dULL, + 0x8800888888008888ULL, 0x0e000e0e0e000e0eULL, 0x1900191919001919ULL, + 0x8700878787008787ULL, 0x4e004e4e4e004e4eULL, 0x0b000b0b0b000b0bULL, + 0xa900a9a9a900a9a9ULL, 0x0c000c0c0c000c0cULL, 0x7900797979007979ULL, + 0x1100111111001111ULL, 0x7f007f7f7f007f7fULL, 0x2200222222002222ULL, + 0xe700e7e7e700e7e7ULL, 0x5900595959005959ULL, 0xe100e1e1e100e1e1ULL, + 0xda00dadada00dadaULL, 0x3d003d3d3d003d3dULL, 0xc800c8c8c800c8c8ULL, + 0x1200121212001212ULL, 0x0400040404000404ULL, 0x7400747474007474ULL, + 0x5400545454005454ULL, 0x3000303030003030ULL, 0x7e007e7e7e007e7eULL, + 0xb400b4b4b400b4b4ULL, 0x2800282828002828ULL, 0x5500555555005555ULL, + 0x6800686868006868ULL, 0x5000505050005050ULL, 0xbe00bebebe00bebeULL, + 0xd000d0d0d000d0d0ULL, 0xc400c4c4c400c4c4ULL, 0x3100313131003131ULL, + 0xcb00cbcbcb00cbcbULL, 0x2a002a2a2a002a2aULL, 0xad00adadad00adadULL, + 0x0f000f0f0f000f0fULL, 0xca00cacaca00cacaULL, 0x7000707070007070ULL, + 0xff00ffffff00ffffULL, 0x3200323232003232ULL, 0x6900696969006969ULL, + 0x0800080808000808ULL, 0x6200626262006262ULL, 0x0000000000000000ULL, + 0x2400242424002424ULL, 0xd100d1d1d100d1d1ULL, 0xfb00fbfbfb00fbfbULL, + 0xba00bababa00babaULL, 0xed00ededed00ededULL, 0x4500454545004545ULL, + 0x8100818181008181ULL, 0x7300737373007373ULL, 0x6d006d6d6d006d6dULL, + 0x8400848484008484ULL, 0x9f009f9f9f009f9fULL, 0xee00eeeeee00eeeeULL, + 0x4a004a4a4a004a4aULL, 0xc300c3c3c300c3c3ULL, 0x2e002e2e2e002e2eULL, + 0xc100c1c1c100c1c1ULL, 0x0100010101000101ULL, 0xe600e6e6e600e6e6ULL, + 0x2500252525002525ULL, 0x4800484848004848ULL, 0x9900999999009999ULL, + 0xb900b9b9b900b9b9ULL, 0xb300b3b3b300b3b3ULL, 0x7b007b7b7b007b7bULL, + 0xf900f9f9f900f9f9ULL, 0xce00cecece00ceceULL, 0xbf00bfbfbf00bfbfULL, + 0xdf00dfdfdf00dfdfULL, 0x7100717171007171ULL, 0x2900292929002929ULL, + 0xcd00cdcdcd00cdcdULL, 0x6c006c6c6c006c6cULL, 0x1300131313001313ULL, + 0x6400646464006464ULL, 0x9b009b9b9b009b9bULL, 0x6300636363006363ULL, + 0x9d009d9d9d009d9dULL, 0xc000c0c0c000c0c0ULL, 0x4b004b4b4b004b4bULL, + 0xb700b7b7b700b7b7ULL, 0xa500a5a5a500a5a5ULL, 0x8900898989008989ULL, + 0x5f005f5f5f005f5fULL, 0xb100b1b1b100b1b1ULL, 0x1700171717001717ULL, + 0xf400f4f4f400f4f4ULL, 0xbc00bcbcbc00bcbcULL, 0xd300d3d3d300d3d3ULL, + 0x4600464646004646ULL, 0xcf00cfcfcf00cfcfULL, 0x3700373737003737ULL, + 0x5e005e5e5e005e5eULL, 0x4700474747004747ULL, 0x9400949494009494ULL, + 0xfa00fafafa00fafaULL, 0xfc00fcfcfc00fcfcULL, 0x5b005b5b5b005b5bULL, + 0x9700979797009797ULL, 0xfe00fefefe00fefeULL, 0x5a005a5a5a005a5aULL, + 0xac00acacac00acacULL, 0x3c003c3c3c003c3cULL, 0x4c004c4c4c004c4cULL, + 0x0300030303000303ULL, 0x3500353535003535ULL, 0xf300f3f3f300f3f3ULL, + 0x2300232323002323ULL, 0xb800b8b8b800b8b8ULL, 0x5d005d5d5d005d5dULL, + 0x6a006a6a6a006a6aULL, 0x9200929292009292ULL, 0xd500d5d5d500d5d5ULL, + 0x2100212121002121ULL, 0x4400444444004444ULL, 0x5100515151005151ULL, + 0xc600c6c6c600c6c6ULL, 0x7d007d7d7d007d7dULL, 0x3900393939003939ULL, + 0x8300838383008383ULL, 0xdc00dcdcdc00dcdcULL, 0xaa00aaaaaa00aaaaULL, + 0x7c007c7c7c007c7cULL, 0x7700777777007777ULL, 0x5600565656005656ULL, + 0x0500050505000505ULL, 0x1b001b1b1b001b1bULL, 0xa400a4a4a400a4a4ULL, + 0x1500151515001515ULL, 0x3400343434003434ULL, 0x1e001e1e1e001e1eULL, + 0x1c001c1c1c001c1cULL, 0xf800f8f8f800f8f8ULL, 0x5200525252005252ULL, + 0x2000202020002020ULL, 0x1400141414001414ULL, 0xe900e9e9e900e9e9ULL, + 0xbd00bdbdbd00bdbdULL, 0xdd00dddddd00ddddULL, 0xe400e4e4e400e4e4ULL, + 0xa100a1a1a100a1a1ULL, 0xe000e0e0e000e0e0ULL, 0x8a008a8a8a008a8aULL, + 0xf100f1f1f100f1f1ULL, 0xd600d6d6d600d6d6ULL, 0x7a007a7a7a007a7aULL, + 0xbb00bbbbbb00bbbbULL, 0xe300e3e3e300e3e3ULL, 0x4000404040004040ULL, + 0x4f004f4f4f004f4fULL, +}; + +const uint64 camellia_sp44044404[256] = { + 0x7070007070700070ULL, 0x2c2c002c2c2c002cULL, 0xb3b300b3b3b300b3ULL, + 0xc0c000c0c0c000c0ULL, 0xe4e400e4e4e400e4ULL, 0x5757005757570057ULL, + 0xeaea00eaeaea00eaULL, 0xaeae00aeaeae00aeULL, 0x2323002323230023ULL, + 0x6b6b006b6b6b006bULL, 0x4545004545450045ULL, 0xa5a500a5a5a500a5ULL, + 0xeded00ededed00edULL, 0x4f4f004f4f4f004fULL, 0x1d1d001d1d1d001dULL, + 0x9292009292920092ULL, 0x8686008686860086ULL, 0xafaf00afafaf00afULL, + 0x7c7c007c7c7c007cULL, 0x1f1f001f1f1f001fULL, 0x3e3e003e3e3e003eULL, + 0xdcdc00dcdcdc00dcULL, 0x5e5e005e5e5e005eULL, 0x0b0b000b0b0b000bULL, + 0xa6a600a6a6a600a6ULL, 0x3939003939390039ULL, 0xd5d500d5d5d500d5ULL, + 0x5d5d005d5d5d005dULL, 0xd9d900d9d9d900d9ULL, 0x5a5a005a5a5a005aULL, + 0x5151005151510051ULL, 0x6c6c006c6c6c006cULL, 0x8b8b008b8b8b008bULL, + 0x9a9a009a9a9a009aULL, 0xfbfb00fbfbfb00fbULL, 0xb0b000b0b0b000b0ULL, + 0x7474007474740074ULL, 0x2b2b002b2b2b002bULL, 0xf0f000f0f0f000f0ULL, + 0x8484008484840084ULL, 0xdfdf00dfdfdf00dfULL, 0xcbcb00cbcbcb00cbULL, + 0x3434003434340034ULL, 0x7676007676760076ULL, 0x6d6d006d6d6d006dULL, + 0xa9a900a9a9a900a9ULL, 0xd1d100d1d1d100d1ULL, 0x0404000404040004ULL, + 0x1414001414140014ULL, 0x3a3a003a3a3a003aULL, 0xdede00dedede00deULL, + 0x1111001111110011ULL, 0x3232003232320032ULL, 0x9c9c009c9c9c009cULL, + 0x5353005353530053ULL, 0xf2f200f2f2f200f2ULL, 0xfefe00fefefe00feULL, + 0xcfcf00cfcfcf00cfULL, 0xc3c300c3c3c300c3ULL, 0x7a7a007a7a7a007aULL, + 0x2424002424240024ULL, 0xe8e800e8e8e800e8ULL, 0x6060006060600060ULL, + 0x6969006969690069ULL, 0xaaaa00aaaaaa00aaULL, 0xa0a000a0a0a000a0ULL, + 0xa1a100a1a1a100a1ULL, 0x6262006262620062ULL, 0x5454005454540054ULL, + 0x1e1e001e1e1e001eULL, 0xe0e000e0e0e000e0ULL, 0x6464006464640064ULL, + 0x1010001010100010ULL, 0x0000000000000000ULL, 0xa3a300a3a3a300a3ULL, + 0x7575007575750075ULL, 0x8a8a008a8a8a008aULL, 0xe6e600e6e6e600e6ULL, + 0x0909000909090009ULL, 0xdddd00dddddd00ddULL, 0x8787008787870087ULL, + 0x8383008383830083ULL, 0xcdcd00cdcdcd00cdULL, 0x9090009090900090ULL, + 0x7373007373730073ULL, 0xf6f600f6f6f600f6ULL, 0x9d9d009d9d9d009dULL, + 0xbfbf00bfbfbf00bfULL, 0x5252005252520052ULL, 0xd8d800d8d8d800d8ULL, + 0xc8c800c8c8c800c8ULL, 0xc6c600c6c6c600c6ULL, 0x8181008181810081ULL, + 0x6f6f006f6f6f006fULL, 0x1313001313130013ULL, 0x6363006363630063ULL, + 0xe9e900e9e9e900e9ULL, 0xa7a700a7a7a700a7ULL, 0x9f9f009f9f9f009fULL, + 0xbcbc00bcbcbc00bcULL, 0x2929002929290029ULL, 0xf9f900f9f9f900f9ULL, + 0x2f2f002f2f2f002fULL, 0xb4b400b4b4b400b4ULL, 0x7878007878780078ULL, + 0x0606000606060006ULL, 0xe7e700e7e7e700e7ULL, 0x7171007171710071ULL, + 0xd4d400d4d4d400d4ULL, 0xabab00ababab00abULL, 0x8888008888880088ULL, + 0x8d8d008d8d8d008dULL, 0x7272007272720072ULL, 0xb9b900b9b9b900b9ULL, + 0xf8f800f8f8f800f8ULL, 0xacac00acacac00acULL, 0x3636003636360036ULL, + 0x2a2a002a2a2a002aULL, 0x3c3c003c3c3c003cULL, 0xf1f100f1f1f100f1ULL, + 0x4040004040400040ULL, 0xd3d300d3d3d300d3ULL, 0xbbbb00bbbbbb00bbULL, + 0x4343004343430043ULL, 0x1515001515150015ULL, 0xadad00adadad00adULL, + 0x7777007777770077ULL, 0x8080008080800080ULL, 0x8282008282820082ULL, + 0xecec00ececec00ecULL, 0x2727002727270027ULL, 0xe5e500e5e5e500e5ULL, + 0x8585008585850085ULL, 0x3535003535350035ULL, 0x0c0c000c0c0c000cULL, + 0x4141004141410041ULL, 0xefef00efefef00efULL, 0x9393009393930093ULL, + 0x1919001919190019ULL, 0x2121002121210021ULL, 0x0e0e000e0e0e000eULL, + 0x4e4e004e4e4e004eULL, 0x6565006565650065ULL, 0xbdbd00bdbdbd00bdULL, + 0xb8b800b8b8b800b8ULL, 0x8f8f008f8f8f008fULL, 0xebeb00ebebeb00ebULL, + 0xcece00cecece00ceULL, 0x3030003030300030ULL, 0x5f5f005f5f5f005fULL, + 0xc5c500c5c5c500c5ULL, 0x1a1a001a1a1a001aULL, 0xe1e100e1e1e100e1ULL, + 0xcaca00cacaca00caULL, 0x4747004747470047ULL, 0x3d3d003d3d3d003dULL, + 0x0101000101010001ULL, 0xd6d600d6d6d600d6ULL, 0x5656005656560056ULL, + 0x4d4d004d4d4d004dULL, 0x0d0d000d0d0d000dULL, 0x6666006666660066ULL, + 0xcccc00cccccc00ccULL, 0x2d2d002d2d2d002dULL, 0x1212001212120012ULL, + 0x2020002020200020ULL, 0xb1b100b1b1b100b1ULL, 0x9999009999990099ULL, + 0x4c4c004c4c4c004cULL, 0xc2c200c2c2c200c2ULL, 0x7e7e007e7e7e007eULL, + 0x0505000505050005ULL, 0xb7b700b7b7b700b7ULL, 0x3131003131310031ULL, + 0x1717001717170017ULL, 0xd7d700d7d7d700d7ULL, 0x5858005858580058ULL, + 0x6161006161610061ULL, 0x1b1b001b1b1b001bULL, 0x1c1c001c1c1c001cULL, + 0x0f0f000f0f0f000fULL, 0x1616001616160016ULL, 0x1818001818180018ULL, + 0x2222002222220022ULL, 0x4444004444440044ULL, 0xb2b200b2b2b200b2ULL, + 0xb5b500b5b5b500b5ULL, 0x9191009191910091ULL, 0x0808000808080008ULL, + 0xa8a800a8a8a800a8ULL, 0xfcfc00fcfcfc00fcULL, 0x5050005050500050ULL, + 0xd0d000d0d0d000d0ULL, 0x7d7d007d7d7d007dULL, 0x8989008989890089ULL, + 0x9797009797970097ULL, 0x5b5b005b5b5b005bULL, 0x9595009595950095ULL, + 0xffff00ffffff00ffULL, 0xd2d200d2d2d200d2ULL, 0xc4c400c4c4c400c4ULL, + 0x4848004848480048ULL, 0xf7f700f7f7f700f7ULL, 0xdbdb00dbdbdb00dbULL, + 0x0303000303030003ULL, 0xdada00dadada00daULL, 0x3f3f003f3f3f003fULL, + 0x9494009494940094ULL, 0x5c5c005c5c5c005cULL, 0x0202000202020002ULL, + 0x4a4a004a4a4a004aULL, 0x3333003333330033ULL, 0x6767006767670067ULL, + 0xf3f300f3f3f300f3ULL, 0x7f7f007f7f7f007fULL, 0xe2e200e2e2e200e2ULL, + 0x9b9b009b9b9b009bULL, 0x2626002626260026ULL, 0x3737003737370037ULL, + 0x3b3b003b3b3b003bULL, 0x9696009696960096ULL, 0x4b4b004b4b4b004bULL, + 0xbebe00bebebe00beULL, 0x2e2e002e2e2e002eULL, 0x7979007979790079ULL, + 0x8c8c008c8c8c008cULL, 0x6e6e006e6e6e006eULL, 0x8e8e008e8e8e008eULL, + 0xf5f500f5f5f500f5ULL, 0xb6b600b6b6b600b6ULL, 0xfdfd00fdfdfd00fdULL, + 0x5959005959590059ULL, 0x9898009898980098ULL, 0x6a6a006a6a6a006aULL, + 0x4646004646460046ULL, 0xbaba00bababa00baULL, 0x2525002525250025ULL, + 0x4242004242420042ULL, 0xa2a200a2a2a200a2ULL, 0xfafa00fafafa00faULL, + 0x0707000707070007ULL, 0x5555005555550055ULL, 0xeeee00eeeeee00eeULL, + 0x0a0a000a0a0a000aULL, 0x4949004949490049ULL, 0x6868006868680068ULL, + 0x3838003838380038ULL, 0xa4a400a4a4a400a4ULL, 0x2828002828280028ULL, + 0x7b7b007b7b7b007bULL, 0xc9c900c9c9c900c9ULL, 0xc1c100c1c1c100c1ULL, + 0xe3e300e3e3e300e3ULL, 0xf4f400f4f4f400f4ULL, 0xc7c700c7c7c700c7ULL, + 0x9e9e009e9e9e009eULL, +}; + +const uint64 camellia_sp11101110[256] = { + 0x7070700070707000ULL, 0x8282820082828200ULL, 0x2c2c2c002c2c2c00ULL, + 0xececec00ececec00ULL, 0xb3b3b300b3b3b300ULL, 0x2727270027272700ULL, + 0xc0c0c000c0c0c000ULL, 0xe5e5e500e5e5e500ULL, 0xe4e4e400e4e4e400ULL, + 0x8585850085858500ULL, 0x5757570057575700ULL, 0x3535350035353500ULL, + 0xeaeaea00eaeaea00ULL, 0x0c0c0c000c0c0c00ULL, 0xaeaeae00aeaeae00ULL, + 0x4141410041414100ULL, 0x2323230023232300ULL, 0xefefef00efefef00ULL, + 0x6b6b6b006b6b6b00ULL, 0x9393930093939300ULL, 0x4545450045454500ULL, + 0x1919190019191900ULL, 0xa5a5a500a5a5a500ULL, 0x2121210021212100ULL, + 0xededed00ededed00ULL, 0x0e0e0e000e0e0e00ULL, 0x4f4f4f004f4f4f00ULL, + 0x4e4e4e004e4e4e00ULL, 0x1d1d1d001d1d1d00ULL, 0x6565650065656500ULL, + 0x9292920092929200ULL, 0xbdbdbd00bdbdbd00ULL, 0x8686860086868600ULL, + 0xb8b8b800b8b8b800ULL, 0xafafaf00afafaf00ULL, 0x8f8f8f008f8f8f00ULL, + 0x7c7c7c007c7c7c00ULL, 0xebebeb00ebebeb00ULL, 0x1f1f1f001f1f1f00ULL, + 0xcecece00cecece00ULL, 0x3e3e3e003e3e3e00ULL, 0x3030300030303000ULL, + 0xdcdcdc00dcdcdc00ULL, 0x5f5f5f005f5f5f00ULL, 0x5e5e5e005e5e5e00ULL, + 0xc5c5c500c5c5c500ULL, 0x0b0b0b000b0b0b00ULL, 0x1a1a1a001a1a1a00ULL, + 0xa6a6a600a6a6a600ULL, 0xe1e1e100e1e1e100ULL, 0x3939390039393900ULL, + 0xcacaca00cacaca00ULL, 0xd5d5d500d5d5d500ULL, 0x4747470047474700ULL, + 0x5d5d5d005d5d5d00ULL, 0x3d3d3d003d3d3d00ULL, 0xd9d9d900d9d9d900ULL, + 0x0101010001010100ULL, 0x5a5a5a005a5a5a00ULL, 0xd6d6d600d6d6d600ULL, + 0x5151510051515100ULL, 0x5656560056565600ULL, 0x6c6c6c006c6c6c00ULL, + 0x4d4d4d004d4d4d00ULL, 0x8b8b8b008b8b8b00ULL, 0x0d0d0d000d0d0d00ULL, + 0x9a9a9a009a9a9a00ULL, 0x6666660066666600ULL, 0xfbfbfb00fbfbfb00ULL, + 0xcccccc00cccccc00ULL, 0xb0b0b000b0b0b000ULL, 0x2d2d2d002d2d2d00ULL, + 0x7474740074747400ULL, 0x1212120012121200ULL, 0x2b2b2b002b2b2b00ULL, + 0x2020200020202000ULL, 0xf0f0f000f0f0f000ULL, 0xb1b1b100b1b1b100ULL, + 0x8484840084848400ULL, 0x9999990099999900ULL, 0xdfdfdf00dfdfdf00ULL, + 0x4c4c4c004c4c4c00ULL, 0xcbcbcb00cbcbcb00ULL, 0xc2c2c200c2c2c200ULL, + 0x3434340034343400ULL, 0x7e7e7e007e7e7e00ULL, 0x7676760076767600ULL, + 0x0505050005050500ULL, 0x6d6d6d006d6d6d00ULL, 0xb7b7b700b7b7b700ULL, + 0xa9a9a900a9a9a900ULL, 0x3131310031313100ULL, 0xd1d1d100d1d1d100ULL, + 0x1717170017171700ULL, 0x0404040004040400ULL, 0xd7d7d700d7d7d700ULL, + 0x1414140014141400ULL, 0x5858580058585800ULL, 0x3a3a3a003a3a3a00ULL, + 0x6161610061616100ULL, 0xdedede00dedede00ULL, 0x1b1b1b001b1b1b00ULL, + 0x1111110011111100ULL, 0x1c1c1c001c1c1c00ULL, 0x3232320032323200ULL, + 0x0f0f0f000f0f0f00ULL, 0x9c9c9c009c9c9c00ULL, 0x1616160016161600ULL, + 0x5353530053535300ULL, 0x1818180018181800ULL, 0xf2f2f200f2f2f200ULL, + 0x2222220022222200ULL, 0xfefefe00fefefe00ULL, 0x4444440044444400ULL, + 0xcfcfcf00cfcfcf00ULL, 0xb2b2b200b2b2b200ULL, 0xc3c3c300c3c3c300ULL, + 0xb5b5b500b5b5b500ULL, 0x7a7a7a007a7a7a00ULL, 0x9191910091919100ULL, + 0x2424240024242400ULL, 0x0808080008080800ULL, 0xe8e8e800e8e8e800ULL, + 0xa8a8a800a8a8a800ULL, 0x6060600060606000ULL, 0xfcfcfc00fcfcfc00ULL, + 0x6969690069696900ULL, 0x5050500050505000ULL, 0xaaaaaa00aaaaaa00ULL, + 0xd0d0d000d0d0d000ULL, 0xa0a0a000a0a0a000ULL, 0x7d7d7d007d7d7d00ULL, + 0xa1a1a100a1a1a100ULL, 0x8989890089898900ULL, 0x6262620062626200ULL, + 0x9797970097979700ULL, 0x5454540054545400ULL, 0x5b5b5b005b5b5b00ULL, + 0x1e1e1e001e1e1e00ULL, 0x9595950095959500ULL, 0xe0e0e000e0e0e000ULL, + 0xffffff00ffffff00ULL, 0x6464640064646400ULL, 0xd2d2d200d2d2d200ULL, + 0x1010100010101000ULL, 0xc4c4c400c4c4c400ULL, 0x0000000000000000ULL, + 0x4848480048484800ULL, 0xa3a3a300a3a3a300ULL, 0xf7f7f700f7f7f700ULL, + 0x7575750075757500ULL, 0xdbdbdb00dbdbdb00ULL, 0x8a8a8a008a8a8a00ULL, + 0x0303030003030300ULL, 0xe6e6e600e6e6e600ULL, 0xdadada00dadada00ULL, + 0x0909090009090900ULL, 0x3f3f3f003f3f3f00ULL, 0xdddddd00dddddd00ULL, + 0x9494940094949400ULL, 0x8787870087878700ULL, 0x5c5c5c005c5c5c00ULL, + 0x8383830083838300ULL, 0x0202020002020200ULL, 0xcdcdcd00cdcdcd00ULL, + 0x4a4a4a004a4a4a00ULL, 0x9090900090909000ULL, 0x3333330033333300ULL, + 0x7373730073737300ULL, 0x6767670067676700ULL, 0xf6f6f600f6f6f600ULL, + 0xf3f3f300f3f3f300ULL, 0x9d9d9d009d9d9d00ULL, 0x7f7f7f007f7f7f00ULL, + 0xbfbfbf00bfbfbf00ULL, 0xe2e2e200e2e2e200ULL, 0x5252520052525200ULL, + 0x9b9b9b009b9b9b00ULL, 0xd8d8d800d8d8d800ULL, 0x2626260026262600ULL, + 0xc8c8c800c8c8c800ULL, 0x3737370037373700ULL, 0xc6c6c600c6c6c600ULL, + 0x3b3b3b003b3b3b00ULL, 0x8181810081818100ULL, 0x9696960096969600ULL, + 0x6f6f6f006f6f6f00ULL, 0x4b4b4b004b4b4b00ULL, 0x1313130013131300ULL, + 0xbebebe00bebebe00ULL, 0x6363630063636300ULL, 0x2e2e2e002e2e2e00ULL, + 0xe9e9e900e9e9e900ULL, 0x7979790079797900ULL, 0xa7a7a700a7a7a700ULL, + 0x8c8c8c008c8c8c00ULL, 0x9f9f9f009f9f9f00ULL, 0x6e6e6e006e6e6e00ULL, + 0xbcbcbc00bcbcbc00ULL, 0x8e8e8e008e8e8e00ULL, 0x2929290029292900ULL, + 0xf5f5f500f5f5f500ULL, 0xf9f9f900f9f9f900ULL, 0xb6b6b600b6b6b600ULL, + 0x2f2f2f002f2f2f00ULL, 0xfdfdfd00fdfdfd00ULL, 0xb4b4b400b4b4b400ULL, + 0x5959590059595900ULL, 0x7878780078787800ULL, 0x9898980098989800ULL, + 0x0606060006060600ULL, 0x6a6a6a006a6a6a00ULL, 0xe7e7e700e7e7e700ULL, + 0x4646460046464600ULL, 0x7171710071717100ULL, 0xbababa00bababa00ULL, + 0xd4d4d400d4d4d400ULL, 0x2525250025252500ULL, 0xababab00ababab00ULL, + 0x4242420042424200ULL, 0x8888880088888800ULL, 0xa2a2a200a2a2a200ULL, + 0x8d8d8d008d8d8d00ULL, 0xfafafa00fafafa00ULL, 0x7272720072727200ULL, + 0x0707070007070700ULL, 0xb9b9b900b9b9b900ULL, 0x5555550055555500ULL, + 0xf8f8f800f8f8f800ULL, 0xeeeeee00eeeeee00ULL, 0xacacac00acacac00ULL, + 0x0a0a0a000a0a0a00ULL, 0x3636360036363600ULL, 0x4949490049494900ULL, + 0x2a2a2a002a2a2a00ULL, 0x6868680068686800ULL, 0x3c3c3c003c3c3c00ULL, + 0x3838380038383800ULL, 0xf1f1f100f1f1f100ULL, 0xa4a4a400a4a4a400ULL, + 0x4040400040404000ULL, 0x2828280028282800ULL, 0xd3d3d300d3d3d300ULL, + 0x7b7b7b007b7b7b00ULL, 0xbbbbbb00bbbbbb00ULL, 0xc9c9c900c9c9c900ULL, + 0x4343430043434300ULL, 0xc1c1c100c1c1c100ULL, 0x1515150015151500ULL, + 0xe3e3e300e3e3e300ULL, 0xadadad00adadad00ULL, 0xf4f4f400f4f4f400ULL, + 0x7777770077777700ULL, 0xc7c7c700c7c7c700ULL, 0x8080800080808000ULL, + 0x9e9e9e009e9e9e00ULL, +}; + + +/** + * Stuff related to the Camellia key schedule + */ +#define subl(x) subL[(x)] +#define subr(x) subR[(x)] + +void camellia_set_key(const unsigned __int8 key[], unsigned __int8 *ks) +{ + uint32 kll,klr,krl,krr; /* left half of key */ + uint32 krll,krlr,krrl,krrr; /* right half of key */ + uint32 il, ir, t0, t1, w0, w1; /* temporary variables */ + uint32 kw4l, kw4r, dw, tl, tr; + uint32 subL[34]; + uint32 subR[34]; + uint32 *subkey = (uint32*) ks; + /** + * key = (kll || klr || krl || krr || krll || krlr || krrl || krrr) + * (|| is concatination) + */ + kll = GETU32(key ); + klr = GETU32(key + 4); + krl = GETU32(key + 8); + krr = GETU32(key + 12); + krll = GETU32(key + 16); + krlr = GETU32(key + 20); + krrl = GETU32(key + 24); + krrr = GETU32(key + 28); + /* generate KL dependent subkeys */ + subl(0) = kll; subr(0) = klr; + subl(1) = krl; subr(1) = krr; + CAMELLIA_ROLDQo32(kll, klr, krl, krr, w0, w1, 45); + subl(12) = kll; subr(12) = klr; + subl(13) = krl; subr(13) = krr; + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); + subl(16) = kll; subr(16) = klr; + subl(17) = krl; subr(17) = krr; + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 17); + subl(22) = kll; subr(22) = klr; + subl(23) = krl; subr(23) = krr; + CAMELLIA_ROLDQo32(kll, klr, krl, krr, w0, w1, 34); + subl(30) = kll; subr(30) = klr; + subl(31) = krl; subr(31) = krr; + /* generate KR dependent subkeys */ + CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 15); + subl(4) = krll; subr(4) = krlr; + subl(5) = krrl; subr(5) = krrr; + CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 15); + subl(8) = krll; subr(8) = krlr; + subl(9) = krrl; subr(9) = krrr; + CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30); + subl(18) = krll; subr(18) = krlr; + subl(19) = krrl; subr(19) = krrr; + CAMELLIA_ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 34); + subl(26) = krll; subr(26) = krlr; + subl(27) = krrl; subr(27) = krrr; + CAMELLIA_ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 34); + /* generate KA */ + kll = subl(0) ^ krll; klr = subr(0) ^ krlr; + krl = subl(1) ^ krrl; krr = subr(1) ^ krrr; + CAMELLIA_F(kll, klr, + CAMELLIA_SIGMA1L, CAMELLIA_SIGMA1R, + w0, w1, il, ir, t0, t1); + krl ^= w0; krr ^= w1; + CAMELLIA_F(krl, krr, + CAMELLIA_SIGMA2L, CAMELLIA_SIGMA2R, + kll, klr, il, ir, t0, t1); + kll ^= krll; klr ^= krlr; + CAMELLIA_F(kll, klr, + CAMELLIA_SIGMA3L, CAMELLIA_SIGMA3R, + krl, krr, il, ir, t0, t1); + krl ^= w0 ^ krrl; krr ^= w1 ^ krrr; + CAMELLIA_F(krl, krr, + CAMELLIA_SIGMA4L, CAMELLIA_SIGMA4R, + w0, w1, il, ir, t0, t1); + kll ^= w0; klr ^= w1; + /* generate KB */ + krll ^= kll; krlr ^= klr; + krrl ^= krl; krrr ^= krr; + CAMELLIA_F(krll, krlr, + CAMELLIA_SIGMA5L, CAMELLIA_SIGMA5R, + w0, w1, il, ir, t0, t1); + krrl ^= w0; krrr ^= w1; + CAMELLIA_F(krrl, krrr, + CAMELLIA_SIGMA6L, CAMELLIA_SIGMA6R, + w0, w1, il, ir, t0, t1); + krll ^= w0; krlr ^= w1; + /* generate KA dependent subkeys */ + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); + subl(6) = kll; subr(6) = klr; + subl(7) = krl; subr(7) = krr; + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 30); + subl(14) = kll; subr(14) = klr; + subl(15) = krl; subr(15) = krr; + subl(24) = klr; subr(24) = krl; + subl(25) = krr; subr(25) = kll; + CAMELLIA_ROLDQo32(kll, klr, krl, krr, w0, w1, 49); + subl(28) = kll; subr(28) = klr; + subl(29) = krl; subr(29) = krr; + /* generate KB dependent subkeys */ + subl(2) = krll; subr(2) = krlr; + subl(3) = krrl; subr(3) = krrr; + CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30); + subl(10) = krll; subr(10) = krlr; + subl(11) = krrl; subr(11) = krrr; + CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30); + subl(20) = krll; subr(20) = krlr; + subl(21) = krrl; subr(21) = krrr; + CAMELLIA_ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 51); + subl(32) = krll; subr(32) = krlr; + subl(33) = krrl; subr(33) = krrr; + /* absorb kw2 to other subkeys */ + subl(3) ^= subl(1); subr(3) ^= subr(1); + subl(5) ^= subl(1); subr(5) ^= subr(1); + subl(7) ^= subl(1); subr(7) ^= subr(1); + subl(1) ^= subr(1) & ~subr(9); + dw = subl(1) & subl(9), subr(1) ^= CAMELLIA_RL1(dw); + subl(11) ^= subl(1); subr(11) ^= subr(1); + subl(13) ^= subl(1); subr(13) ^= subr(1); + subl(15) ^= subl(1); subr(15) ^= subr(1); + subl(1) ^= subr(1) & ~subr(17); + dw = subl(1) & subl(17), subr(1) ^= CAMELLIA_RL1(dw); + subl(19) ^= subl(1); subr(19) ^= subr(1); + subl(21) ^= subl(1); subr(21) ^= subr(1); + subl(23) ^= subl(1); subr(23) ^= subr(1); + subl(1) ^= subr(1) & ~subr(25); + dw = subl(1) & subl(25), subr(1) ^= CAMELLIA_RL1(dw); + subl(27) ^= subl(1); subr(27) ^= subr(1); + subl(29) ^= subl(1); subr(29) ^= subr(1); + subl(31) ^= subl(1); subr(31) ^= subr(1); + subl(32) ^= subl(1); subr(32) ^= subr(1); + /* absorb kw4 to other subkeys */ + kw4l = subl(33); kw4r = subr(33); + subl(30) ^= kw4l; subr(30) ^= kw4r; + subl(28) ^= kw4l; subr(28) ^= kw4r; + subl(26) ^= kw4l; subr(26) ^= kw4r; + kw4l ^= kw4r & ~subr(24); + dw = kw4l & subl(24), kw4r ^= CAMELLIA_RL1(dw); + subl(22) ^= kw4l; subr(22) ^= kw4r; + subl(20) ^= kw4l; subr(20) ^= kw4r; + subl(18) ^= kw4l; subr(18) ^= kw4r; + kw4l ^= kw4r & ~subr(16); + dw = kw4l & subl(16), kw4r ^= CAMELLIA_RL1(dw); + subl(14) ^= kw4l; subr(14) ^= kw4r; + subl(12) ^= kw4l; subr(12) ^= kw4r; + subl(10) ^= kw4l; subr(10) ^= kw4r; + kw4l ^= kw4r & ~subr(8); + dw = kw4l & subl(8), kw4r ^= CAMELLIA_RL1(dw); + subl(6) ^= kw4l; subr(6) ^= kw4r; + subl(4) ^= kw4l; subr(4) ^= kw4r; + subl(2) ^= kw4l; subr(2) ^= kw4r; + subl(0) ^= kw4l; subr(0) ^= kw4r; + /* key XOR is end of F-function */ + CamelliaSubkeyL(0) = subl(0) ^ subl(2); + CamelliaSubkeyR(0) = subr(0) ^ subr(2); + CamelliaSubkeyL(2) = subl(3); + CamelliaSubkeyR(2) = subr(3); + CamelliaSubkeyL(3) = subl(2) ^ subl(4); + CamelliaSubkeyR(3) = subr(2) ^ subr(4); + CamelliaSubkeyL(4) = subl(3) ^ subl(5); + CamelliaSubkeyR(4) = subr(3) ^ subr(5); + CamelliaSubkeyL(5) = subl(4) ^ subl(6); + CamelliaSubkeyR(5) = subr(4) ^ subr(6); + CamelliaSubkeyL(6) = subl(5) ^ subl(7); + CamelliaSubkeyR(6) = subr(5) ^ subr(7); + tl = subl(10) ^ (subr(10) & ~subr(8)); + dw = tl & subl(8), tr = subr(10) ^ CAMELLIA_RL1(dw); + CamelliaSubkeyL(7) = subl(6) ^ tl; + CamelliaSubkeyR(7) = subr(6) ^ tr; + CamelliaSubkeyL(8) = subl(8); + CamelliaSubkeyR(8) = subr(8); + CamelliaSubkeyL(9) = subl(9); + CamelliaSubkeyR(9) = subr(9); + tl = subl(7) ^ (subr(7) & ~subr(9)); + dw = tl & subl(9), tr = subr(7) ^ CAMELLIA_RL1(dw); + CamelliaSubkeyL(10) = tl ^ subl(11); + CamelliaSubkeyR(10) = tr ^ subr(11); + CamelliaSubkeyL(11) = subl(10) ^ subl(12); + CamelliaSubkeyR(11) = subr(10) ^ subr(12); + CamelliaSubkeyL(12) = subl(11) ^ subl(13); + CamelliaSubkeyR(12) = subr(11) ^ subr(13); + CamelliaSubkeyL(13) = subl(12) ^ subl(14); + CamelliaSubkeyR(13) = subr(12) ^ subr(14); + CamelliaSubkeyL(14) = subl(13) ^ subl(15); + CamelliaSubkeyR(14) = subr(13) ^ subr(15); + tl = subl(18) ^ (subr(18) & ~subr(16)); + dw = tl & subl(16), tr = subr(18) ^ CAMELLIA_RL1(dw); + CamelliaSubkeyL(15) = subl(14) ^ tl; + CamelliaSubkeyR(15) = subr(14) ^ tr; + CamelliaSubkeyL(16) = subl(16); + CamelliaSubkeyR(16) = subr(16); + CamelliaSubkeyL(17) = subl(17); + CamelliaSubkeyR(17) = subr(17); + tl = subl(15) ^ (subr(15) & ~subr(17)); + dw = tl & subl(17), tr = subr(15) ^ CAMELLIA_RL1(dw); + CamelliaSubkeyL(18) = tl ^ subl(19); + CamelliaSubkeyR(18) = tr ^ subr(19); + CamelliaSubkeyL(19) = subl(18) ^ subl(20); + CamelliaSubkeyR(19) = subr(18) ^ subr(20); + CamelliaSubkeyL(20) = subl(19) ^ subl(21); + CamelliaSubkeyR(20) = subr(19) ^ subr(21); + CamelliaSubkeyL(21) = subl(20) ^ subl(22); + CamelliaSubkeyR(21) = subr(20) ^ subr(22); + CamelliaSubkeyL(22) = subl(21) ^ subl(23); + CamelliaSubkeyR(22) = subr(21) ^ subr(23); + tl = subl(26) ^ (subr(26) & ~subr(24)); + dw = tl & subl(24), tr = subr(26) ^ CAMELLIA_RL1(dw); + CamelliaSubkeyL(23) = subl(22) ^ tl; + CamelliaSubkeyR(23) = subr(22) ^ tr; + CamelliaSubkeyL(24) = subl(24); + CamelliaSubkeyR(24) = subr(24); + CamelliaSubkeyL(25) = subl(25); + CamelliaSubkeyR(25) = subr(25); + tl = subl(23) ^ (subr(23) & ~subr(25)); + dw = tl & subl(25), tr = subr(23) ^ CAMELLIA_RL1(dw); + CamelliaSubkeyL(26) = tl ^ subl(27); + CamelliaSubkeyR(26) = tr ^ subr(27); + CamelliaSubkeyL(27) = subl(26) ^ subl(28); + CamelliaSubkeyR(27) = subr(26) ^ subr(28); + CamelliaSubkeyL(28) = subl(27) ^ subl(29); + CamelliaSubkeyR(28) = subr(27) ^ subr(29); + CamelliaSubkeyL(29) = subl(28) ^ subl(30); + CamelliaSubkeyR(29) = subr(28) ^ subr(30); + CamelliaSubkeyL(30) = subl(29) ^ subl(31); + CamelliaSubkeyR(30) = subr(29) ^ subr(31); + CamelliaSubkeyL(31) = subl(30); + CamelliaSubkeyR(31) = subr(30); + CamelliaSubkeyL(32) = subl(32) ^ subl(31); + CamelliaSubkeyR(32) = subr(32) ^ subr(31); + return; +} + +void camellia_encrypt(const unsigned __int8 *inBlock, unsigned __int8 *outBlock, unsigned __int8 *ks) +{ + camellia_encrypt_asm (ks, outBlock, inBlock); +} + +void camellia_decrypt(const unsigned __int8 *inBlock, unsigned __int8 *outBlock, unsigned __int8 *ks) +{ + camellia_decrypt_asm (ks, outBlock, inBlock); +} + +void camellia_encrypt_blocks(unsigned __int8 *instance, const uint8* in_blk, uint8* out_blk, uint32 blockCount) +{ +#if !defined (_UEFI) + if ((blockCount >= 16) && IsCpuIntel() && IsAesHwCpuSupported () && HasSAVX()) /* on AMD cpu, AVX is too slow */ + { +#if defined (TC_WINDOWS_DRIVER) + XSTATE_SAVE SaveState; + if (NT_SUCCESS (KeSaveExtendedProcessorState(XSTATE_MASK_GSSE, &SaveState))) + { +#endif + while (blockCount >= 16) + { + camellia_ecb_enc_16way (instance, out_blk, in_blk); + out_blk += 16 * 16; + in_blk += 16 * 16; + blockCount -= 16; + } +#if defined (TC_WINDOWS_DRIVER) + KeRestoreExtendedProcessorState(&SaveState); + } +#endif + } +#endif + + while (blockCount >= 2) + { + camellia_enc_blk2 (instance, out_blk, in_blk); + out_blk += 2 * 16; + in_blk += 2 * 16; + blockCount -= 2; + } + + if (blockCount) + camellia_encrypt (in_blk, out_blk, instance); +} + +void camellia_decrypt_blocks(unsigned __int8 *instance, const uint8* in_blk, uint8* out_blk, uint32 blockCount) +{ +#if !defined (_UEFI) + if ((blockCount >= 16) && IsCpuIntel() && IsAesHwCpuSupported () && HasSAVX()) /* on AMD cpu, AVX is too slow */ + { +#if defined (TC_WINDOWS_DRIVER) + XSTATE_SAVE SaveState; + if (NT_SUCCESS (KeSaveExtendedProcessorState(XSTATE_MASK_GSSE, &SaveState))) + { +#endif + while (blockCount >= 16) + { + camellia_ecb_dec_16way (instance, out_blk, in_blk); + out_blk += 16 * 16; + in_blk += 16 * 16; + blockCount -= 16; + } +#if defined (TC_WINDOWS_DRIVER) + KeRestoreExtendedProcessorState(&SaveState); + } +#endif + } +#endif + + while (blockCount >= 2) + { + camellia_dec_blk2 (instance, out_blk, in_blk); + out_blk += 2 * 16; + in_blk += 2 * 16; + blockCount -= 2; + } + + if (blockCount) + camellia_decrypt (in_blk, out_blk, instance); +} + +#else + +/* +This code is written by kerukuro for cppcrypto library (http://cppcrypto.sourceforge.net/) +and released into public domain. +*/ + +/* Adapted for VeraCrypt */ //#define CPPCRYPTO_DEBUG static const uint64 S[8][256] = { @@ -720,4 +1882,6 @@ void camellia_decrypt(const unsigned __int8 *in, unsigned __int8 *out, unsigned *(uint64*)out = bswap_64(l); *(uint64*)(out + 8) = bswap_64(r); } + +#endif diff --git a/src/Crypto/Camellia.h b/src/Crypto/Camellia.h index 75e89b5f..63c97476 100644 --- a/src/Crypto/Camellia.h +++ b/src/Crypto/Camellia.h @@ -1,8 +1,9 @@ #ifndef HEADER_Crypto_Camellia #define HEADER_Crypto_Camellia #include "Common/Tcdefs.h" +#include "config.h" #ifdef __cplusplus extern "C" { @@ -15,8 +16,13 @@ extern "C" void camellia_set_key(const unsigned __int8 userKey[], unsigned __int8 *ks); void camellia_encrypt(const unsigned __int8 *inBlock, unsigned __int8 *outBlock, unsigned __int8 *ks); void camellia_decrypt(const unsigned __int8 *inBlock, unsigned __int8 *outBlock, unsigned __int8 *ks); +#if CRYPTOPP_BOOL_X64 && !defined(CRYPTOPP_DISABLE_ASM) +void camellia_encrypt_blocks(unsigned __int8 *ks, const uint8* in_blk, uint8* out_blk, uint32 blockCount); +void camellia_decrypt_blocks(unsigned __int8 *ks, const uint8* in_blk, uint8* out_blk, uint32 blockCount); +#endif + #ifdef __cplusplus } #endif diff --git a/src/Crypto/CamelliaSmall.c b/src/Crypto/CamelliaSmall.c index 8ba7ed74..d6911d61 100644 --- a/src/Crypto/CamelliaSmall.c +++ b/src/Crypto/CamelliaSmall.c @@ -42,12 +42,12 @@ */ #ifndef GET_UINT32_BE #define GET_UINT32_BE(n,b,i) \ { \ - (n) = ( (unsigned __int32) (b)[(i) ] << 24 ) \ - | ( (unsigned __int32) (b)[(i) + 1] << 16 ) \ - | ( (unsigned __int32) (b)[(i) + 2] << 8 ) \ - | ( (unsigned __int32) (b)[(i) + 3] ); \ + (n) = ( (unsigned __int32) (b)[(i) ] << 24 ); \ + (n)|= ( (unsigned __int32) (b)[(i) + 1] << 16 ); \ + (n)|= ( (unsigned __int32) (b)[(i) + 2] << 8 ); \ + (n)|= ( (unsigned __int32) (b)[(i) + 3] ); \ } #endif #ifndef PUT_UINT32_BE diff --git a/src/Crypto/Camellia_aesni_x64.S b/src/Crypto/Camellia_aesni_x64.S new file mode 100644 index 00000000..f3008ee3 --- /dev/null +++ b/src/Crypto/Camellia_aesni_x64.S @@ -0,0 +1,340 @@ +/* camellia_aesni.S ver 1.2 + * + * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi> + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH + * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY + * AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, + * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM + * LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE + * OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + * PERFORMANCE OF THIS SOFTWARE. + */ + + /* Adapted to VeraCrypt + - original file pre-processed using "gcc -E" + - added instructions at begining of each functiont to adapt to Windows parameters passing convention + - Fix "'ADDR32' relocation" error when building Windows driver by explicitely using %rip addressing when + accessing various locally defined data variables. This has no performance impact. + */ + +.data +.align 16 + + +.Lshufb_16x16b: + .byte 0 + (0), 4 + (0), 8 + (0), 12 + (0), 0 + (1), 4 + (1), 8 + (1), 12 + (1), 0 + (2), 4 + (2), 8 + (2), 12 + (2), 0 + (3), 4 + (3), 8 + (3), 12 + (3); + +.Lpack_bswap: + .long 0x00010203 + .long 0x04050607 + .long 0x80808080 + .long 0x80808080 + + +.Lbswap128_mask: + .byte 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 + +.Lpre_tf_lo_s1: + .byte 0x45, 0xe8, 0x40, 0xed, 0x2e, 0x83, 0x2b, 0x86 + .byte 0x4b, 0xe6, 0x4e, 0xe3, 0x20, 0x8d, 0x25, 0x88 +.Lpre_tf_hi_s1: + .byte 0x00, 0x51, 0xf1, 0xa0, 0x8a, 0xdb, 0x7b, 0x2a + .byte 0x09, 0x58, 0xf8, 0xa9, 0x83, 0xd2, 0x72, 0x23 + +.Lpre_tf_lo_s4: + .byte 0x45, 0x40, 0x2e, 0x2b, 0x4b, 0x4e, 0x20, 0x25 + .byte 0x14, 0x11, 0x7f, 0x7a, 0x1a, 0x1f, 0x71, 0x74 +.Lpre_tf_hi_s4: + .byte 0x00, 0xf1, 0x8a, 0x7b, 0x09, 0xf8, 0x83, 0x72 + .byte 0xad, 0x5c, 0x27, 0xd6, 0xa4, 0x55, 0x2e, 0xdf + +.Lpost_tf_lo_s1: + .byte 0x3c, 0xcc, 0xcf, 0x3f, 0x32, 0xc2, 0xc1, 0x31 + .byte 0xdc, 0x2c, 0x2f, 0xdf, 0xd2, 0x22, 0x21, 0xd1 +.Lpost_tf_hi_s1: + .byte 0x00, 0xf9, 0x86, 0x7f, 0xd7, 0x2e, 0x51, 0xa8 + .byte 0xa4, 0x5d, 0x22, 0xdb, 0x73, 0x8a, 0xf5, 0x0c + +.Lpost_tf_lo_s2: + .byte 0x78, 0x99, 0x9f, 0x7e, 0x64, 0x85, 0x83, 0x62 + .byte 0xb9, 0x58, 0x5e, 0xbf, 0xa5, 0x44, 0x42, 0xa3 +.Lpost_tf_hi_s2: + .byte 0x00, 0xf3, 0x0d, 0xfe, 0xaf, 0x5c, 0xa2, 0x51 + .byte 0x49, 0xba, 0x44, 0xb7, 0xe6, 0x15, 0xeb, 0x18 + +.Lpost_tf_lo_s3: + .byte 0x1e, 0x66, 0xe7, 0x9f, 0x19, 0x61, 0xe0, 0x98 + .byte 0x6e, 0x16, 0x97, 0xef, 0x69, 0x11, 0x90, 0xe8 +.Lpost_tf_hi_s3: + .byte 0x00, 0xfc, 0x43, 0xbf, 0xeb, 0x17, 0xa8, 0x54 + .byte 0x52, 0xae, 0x11, 0xed, 0xb9, 0x45, 0xfa, 0x06 + + +.Linv_shift_row: + .byte 0x00, 0x0d, 0x0a, 0x07, 0x04, 0x01, 0x0e, 0x0b + .byte 0x08, 0x05, 0x02, 0x0f, 0x0c, 0x09, 0x06, 0x03 + + +.align 4 + +.L0f0f0f0f: + .long 0x0f0f0f0f + +.text + +.align 8 + +__camellia_enc_blk16: + + leaq 8 * 16(%rax), %rcx; + + vmovdqu %xmm11, (%rax); vmovdqu %xmm15, (%rcx); vpunpckhdq %xmm4, %xmm0, %xmm15; vpunpckldq %xmm4, %xmm0, %xmm0; vpunpckldq %xmm12, %xmm8, %xmm11; vpunpckhdq %xmm12, %xmm8, %xmm8; vpunpckhqdq %xmm11, %xmm0, %xmm4; vpunpcklqdq %xmm11, %xmm0, %xmm0; vpunpckhqdq %xmm8, %xmm15, %xmm12; vpunpcklqdq %xmm8, %xmm15, %xmm8;; vpunpckhdq %xmm5, %xmm1, %xmm15; vpunpckldq %xmm5, %xmm1, %xmm1; vpunpckldq %xmm13, %xmm9, %xmm11; vpunpckhdq %xmm13, %xmm9, %xmm9; vpunpckhqdq %xmm11, %xmm1, %xmm5; vpunpcklqdq %xmm11, %xmm1, %xmm1; vpunpckhqdq %xmm9, %xmm15, %xmm13; vpunpcklqdq %xmm9, %xmm15, %xmm9;; vmovdqu (%rax), %xmm11; vmovdqu (%rcx), %xmm15; vmovdqu %xmm0, (%rax); vmovdqu %xmm4, (%rcx); vpunpckhdq %xmm6, %xmm2, %xmm4; vpunpckldq %xmm6, %xmm2, %xmm2; vpunpckldq %xmm14, %xmm10, %xmm0; vpunpckhdq %xmm14, %xmm10, %xmm10; vpunpckhqdq %xmm0, %xmm2, %xmm6; vpunpcklqdq %xmm0, %xmm2, %xmm2; vpunpckhqdq %xmm10, %xmm4, %xmm14; vpunpcklqdq %xmm10, %xmm4, %xmm10;; vpunpckhdq %xmm7, %xmm3, %xmm4; vpunpckldq %xmm7, %xmm3, %xmm3; vpunpckldq %xmm15, %xmm11, %xmm0; vpunpckhdq %xmm15, %xmm11, %xmm11; vpunpckhqdq %xmm0, %xmm3, %xmm7; vpunpcklqdq %xmm0, %xmm3, %xmm3; vpunpckhqdq %xmm11, %xmm4, %xmm15; vpunpcklqdq %xmm11, %xmm4, %xmm11;; vmovdqu .Lshufb_16x16b(%rip), %xmm0; vmovdqu (%rcx), %xmm4; vpshufb %xmm0, %xmm8, %xmm8; vpshufb %xmm0, %xmm12, %xmm12; vpshufb %xmm0, %xmm1, %xmm1; vpshufb %xmm0, %xmm5, %xmm5; vpshufb %xmm0, %xmm9, %xmm9; vpshufb %xmm0, %xmm13, %xmm13; vpshufb %xmm0, %xmm4, %xmm4; vpshufb %xmm0, %xmm2, %xmm2; vpshufb %xmm0, %xmm6, %xmm6; vpshufb %xmm0, %xmm10, %xmm10; vpshufb %xmm0, %xmm14, %xmm14; vpshufb %xmm0, %xmm3, %xmm3; vpshufb %xmm0, %xmm7, %xmm7; vpshufb %xmm0, %xmm11, %xmm11; vpshufb %xmm0, %xmm15, %xmm15; vmovdqu %xmm15, (%rcx); vmovdqu (%rax), %xmm15; vpshufb %xmm0, %xmm15, %xmm0; vmovdqu %xmm11, (%rax); vpunpckhdq %xmm1, %xmm0, %xmm15; vpunpckldq %xmm1, %xmm0, %xmm0; vpunpckldq %xmm3, %xmm2, %xmm11; vpunpckhdq %xmm3, %xmm2, %xmm2; vpunpckhqdq %xmm11, %xmm0, %xmm1; vpunpcklqdq %xmm11, %xmm0, %xmm0; vpunpckhqdq %xmm2, %xmm15, %xmm3; vpunpcklqdq %xmm2, %xmm15, %xmm2;; vpunpckhdq %xmm5, %xmm4, %xmm15; vpunpckldq %xmm5, %xmm4, %xmm4; vpunpckldq %xmm7, %xmm6, %xmm11; vpunpckhdq %xmm7, %xmm6, %xmm6; vpunpckhqdq %xmm11, %xmm4, %xmm5; vpunpcklqdq %xmm11, %xmm4, %xmm4; vpunpckhqdq %xmm6, %xmm15, %xmm7; vpunpcklqdq %xmm6, %xmm15, %xmm6;; vmovdqu (%rax), %xmm11; vmovdqu (%rcx), %xmm15; vmovdqu %xmm1, (%rax); vmovdqu %xmm5, (%rcx); vpunpckhdq %xmm9, %xmm8, %xmm5; vpunpckldq %xmm9, %xmm8, %xmm8; vpunpckldq %xmm11, %xmm10, %xmm1; vpunpckhdq %xmm11, %xmm10, %xmm10; vpunpckhqdq %xmm1, %xmm8, %xmm9; vpunpcklqdq %xmm1, %xmm8, %xmm8; vpunpckhqdq %xmm10, %xmm5, %xmm11; vpunpcklqdq %xmm10, %xmm5, %xmm10;; vpunpckhdq %xmm13, %xmm12, %xmm5; vpunpckldq %xmm13, %xmm12, %xmm12; vpunpckldq %xmm15, %xmm14, %xmm1; vpunpckhdq %xmm15, %xmm14, %xmm14; vpunpckhqdq %xmm1, %xmm12, %xmm13; vpunpcklqdq %xmm1, %xmm12, %xmm12; vpunpckhqdq %xmm14, %xmm5, %xmm15; vpunpcklqdq %xmm14, %xmm5, %xmm14;; vmovdqu (%rax), %xmm1; vmovdqu (%rcx), %xmm5;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax); vmovdqu %xmm8, 0 * 16(%rcx); vmovdqu %xmm9, 1 * 16(%rcx); vmovdqu %xmm10, 2 * 16(%rcx); vmovdqu %xmm11, 3 * 16(%rcx); vmovdqu %xmm12, 4 * 16(%rcx); vmovdqu %xmm13, 5 * 16(%rcx); vmovdqu %xmm14, 6 * 16(%rcx); vmovdqu %xmm15, 7 * 16(%rcx);; + + + + vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((0) + 2) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((0) + 2) + (1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax);;; vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((0) + 4) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((0) + 4) + (1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax);;; vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((0) + 6) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((0) + 6) + (1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; ;;; + + + + vpxor %xmm12, %xmm12, %xmm12; vmovd ((0 + (8) * 8) + 0)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpand %xmm0, %xmm8, %xmm8; vpand %xmm1, %xmm9, %xmm9; vpand %xmm2, %xmm10, %xmm10; vpand %xmm3, %xmm11, %xmm11; vpcmpgtb %xmm11, %xmm12, %xmm13; vpaddb %xmm11, %xmm11, %xmm11; vpabsb %xmm13, %xmm13; vpcmpgtb %xmm10, %xmm12, %xmm14; vpaddb %xmm10, %xmm10, %xmm10; vpabsb %xmm14, %xmm14; vpcmpgtb %xmm9, %xmm12, %xmm15; vpaddb %xmm9, %xmm9, %xmm9; vpabsb %xmm15, %xmm15; vpor %xmm13, %xmm10, %xmm10; vpcmpgtb %xmm8, %xmm12, %xmm13; vpaddb %xmm8, %xmm8, %xmm8; vpabsb %xmm13, %xmm13; vpor %xmm14, %xmm9, %xmm9; vpor %xmm15, %xmm8, %xmm8; vpor %xmm13, %xmm11, %xmm11;; vpxor %xmm4, %xmm8, %xmm4; vmovdqu %xmm4, 4 * 16(%rax); vpxor %xmm5, %xmm9, %xmm5; vmovdqu %xmm5, 5 * 16(%rax); vpxor %xmm6, %xmm10, %xmm6; vmovdqu %xmm6, 6 * 16(%rax); vpxor %xmm7, %xmm11, %xmm7; vmovdqu %xmm7, 7 * 16(%rax); vmovd ((0 + (8) * 8) + 12)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpor 4 * 16(%rcx), %xmm8, %xmm8; vpor 5 * 16(%rcx), %xmm9, %xmm9; vpor 6 * 16(%rcx), %xmm10, %xmm10; vpor 7 * 16(%rcx), %xmm11, %xmm11; vpxor 0 * 16(%rcx), %xmm8, %xmm8; vpxor 1 * 16(%rcx), %xmm9, %xmm9; vpxor 2 * 16(%rcx), %xmm10, %xmm10; vpxor 3 * 16(%rcx), %xmm11, %xmm11; vmovdqu %xmm8, 0 * 16(%rcx); vmovdqu %xmm9, 1 * 16(%rcx); vmovdqu %xmm10, 2 * 16(%rcx); vmovdqu %xmm11, 3 * 16(%rcx); vmovd ((0 + (8) * 8) + 8)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpand 0 * 16(%rcx), %xmm8, %xmm8; vpand 1 * 16(%rcx), %xmm9, %xmm9; vpand 2 * 16(%rcx), %xmm10, %xmm10; vpand 3 * 16(%rcx), %xmm11, %xmm11; vpcmpgtb %xmm11, %xmm12, %xmm13; vpaddb %xmm11, %xmm11, %xmm11; vpabsb %xmm13, %xmm13; vpcmpgtb %xmm10, %xmm12, %xmm14; vpaddb %xmm10, %xmm10, %xmm10; vpabsb %xmm14, %xmm14; vpcmpgtb %xmm9, %xmm12, %xmm15; vpaddb %xmm9, %xmm9, %xmm9; vpabsb %xmm15, %xmm15; vpor %xmm13, %xmm10, %xmm10; vpcmpgtb %xmm8, %xmm12, %xmm13; vpaddb %xmm8, %xmm8, %xmm8; vpabsb %xmm13, %xmm13; vpor %xmm14, %xmm9, %xmm9; vpor %xmm15, %xmm8, %xmm8; vpor %xmm13, %xmm11, %xmm11;; vpxor 4 * 16(%rcx), %xmm8, %xmm8; vpxor 5 * 16(%rcx), %xmm9, %xmm9; vpxor 6 * 16(%rcx), %xmm10, %xmm10; vpxor 7 * 16(%rcx), %xmm11, %xmm11; vmovdqu %xmm8, 4 * 16(%rcx); vmovdqu %xmm9, 5 * 16(%rcx); vmovdqu %xmm10, 6 * 16(%rcx); vmovdqu %xmm11, 7 * 16(%rcx); vmovd ((0 + (8) * 8) + 4)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpor %xmm4, %xmm8, %xmm8; vpor %xmm5, %xmm9, %xmm9; vpor %xmm6, %xmm10, %xmm10; vpor %xmm7, %xmm11, %xmm11; vpxor %xmm0, %xmm8, %xmm0; vmovdqu %xmm0, 0 * 16(%rax); vpxor %xmm1, %xmm9, %xmm1; vmovdqu %xmm1, 1 * 16(%rax); vpxor %xmm2, %xmm10, %xmm2; vmovdqu %xmm2, 2 * 16(%rax); vpxor %xmm3, %xmm11, %xmm3; vmovdqu %xmm3, 3 * 16(%rax);; + + + + + + + + vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((8) + 2) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((8) + 2) + (1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax);;; vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((8) + 4) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((8) + 4) + (1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax);;; vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((8) + 6) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((8) + 6) + (1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; ;;; + + + + vpxor %xmm12, %xmm12, %xmm12; vmovd ((0 + (16) * 8) + 0)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpand %xmm0, %xmm8, %xmm8; vpand %xmm1, %xmm9, %xmm9; vpand %xmm2, %xmm10, %xmm10; vpand %xmm3, %xmm11, %xmm11; vpcmpgtb %xmm11, %xmm12, %xmm13; vpaddb %xmm11, %xmm11, %xmm11; vpabsb %xmm13, %xmm13; vpcmpgtb %xmm10, %xmm12, %xmm14; vpaddb %xmm10, %xmm10, %xmm10; vpabsb %xmm14, %xmm14; vpcmpgtb %xmm9, %xmm12, %xmm15; vpaddb %xmm9, %xmm9, %xmm9; vpabsb %xmm15, %xmm15; vpor %xmm13, %xmm10, %xmm10; vpcmpgtb %xmm8, %xmm12, %xmm13; vpaddb %xmm8, %xmm8, %xmm8; vpabsb %xmm13, %xmm13; vpor %xmm14, %xmm9, %xmm9; vpor %xmm15, %xmm8, %xmm8; vpor %xmm13, %xmm11, %xmm11;; vpxor %xmm4, %xmm8, %xmm4; vmovdqu %xmm4, 4 * 16(%rax); vpxor %xmm5, %xmm9, %xmm5; vmovdqu %xmm5, 5 * 16(%rax); vpxor %xmm6, %xmm10, %xmm6; vmovdqu %xmm6, 6 * 16(%rax); vpxor %xmm7, %xmm11, %xmm7; vmovdqu %xmm7, 7 * 16(%rax); vmovd ((0 + (16) * 8) + 12)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpor 4 * 16(%rcx), %xmm8, %xmm8; vpor 5 * 16(%rcx), %xmm9, %xmm9; vpor 6 * 16(%rcx), %xmm10, %xmm10; vpor 7 * 16(%rcx), %xmm11, %xmm11; vpxor 0 * 16(%rcx), %xmm8, %xmm8; vpxor 1 * 16(%rcx), %xmm9, %xmm9; vpxor 2 * 16(%rcx), %xmm10, %xmm10; vpxor 3 * 16(%rcx), %xmm11, %xmm11; vmovdqu %xmm8, 0 * 16(%rcx); vmovdqu %xmm9, 1 * 16(%rcx); vmovdqu %xmm10, 2 * 16(%rcx); vmovdqu %xmm11, 3 * 16(%rcx); vmovd ((0 + (16) * 8) + 8)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpand 0 * 16(%rcx), %xmm8, %xmm8; vpand 1 * 16(%rcx), %xmm9, %xmm9; vpand 2 * 16(%rcx), %xmm10, %xmm10; vpand 3 * 16(%rcx), %xmm11, %xmm11; vpcmpgtb %xmm11, %xmm12, %xmm13; vpaddb %xmm11, %xmm11, %xmm11; vpabsb %xmm13, %xmm13; vpcmpgtb %xmm10, %xmm12, %xmm14; vpaddb %xmm10, %xmm10, %xmm10; vpabsb %xmm14, %xmm14; vpcmpgtb %xmm9, %xmm12, %xmm15; vpaddb %xmm9, %xmm9, %xmm9; vpabsb %xmm15, %xmm15; vpor %xmm13, %xmm10, %xmm10; vpcmpgtb %xmm8, %xmm12, %xmm13; vpaddb %xmm8, %xmm8, %xmm8; vpabsb %xmm13, %xmm13; vpor %xmm14, %xmm9, %xmm9; vpor %xmm15, %xmm8, %xmm8; vpor %xmm13, %xmm11, %xmm11;; vpxor 4 * 16(%rcx), %xmm8, %xmm8; vpxor 5 * 16(%rcx), %xmm9, %xmm9; vpxor 6 * 16(%rcx), %xmm10, %xmm10; vpxor 7 * 16(%rcx), %xmm11, %xmm11; vmovdqu %xmm8, 4 * 16(%rcx); vmovdqu %xmm9, 5 * 16(%rcx); vmovdqu %xmm10, 6 * 16(%rcx); vmovdqu %xmm11, 7 * 16(%rcx); vmovd ((0 + (16) * 8) + 4)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpor %xmm4, %xmm8, %xmm8; vpor %xmm5, %xmm9, %xmm9; vpor %xmm6, %xmm10, %xmm10; vpor %xmm7, %xmm11, %xmm11; vpxor %xmm0, %xmm8, %xmm0; vmovdqu %xmm0, 0 * 16(%rax); vpxor %xmm1, %xmm9, %xmm1; vmovdqu %xmm1, 1 * 16(%rax); vpxor %xmm2, %xmm10, %xmm2; vmovdqu %xmm2, 2 * 16(%rax); vpxor %xmm3, %xmm11, %xmm3; vmovdqu %xmm3, 3 * 16(%rax);; + + + + + + + + vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((16) + 2) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((16) + 2) + (1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax);;; vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((16) + 4) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((16) + 4) + (1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax);;; vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((16) + 6) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((16) + 6) + (1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; ;;; + + vpxor %xmm12, %xmm12, %xmm12; vmovd ((0 + (24) * 8) + 0)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpand %xmm0, %xmm8, %xmm8; vpand %xmm1, %xmm9, %xmm9; vpand %xmm2, %xmm10, %xmm10; vpand %xmm3, %xmm11, %xmm11; vpcmpgtb %xmm11, %xmm12, %xmm13; vpaddb %xmm11, %xmm11, %xmm11; vpabsb %xmm13, %xmm13; vpcmpgtb %xmm10, %xmm12, %xmm14; vpaddb %xmm10, %xmm10, %xmm10; vpabsb %xmm14, %xmm14; vpcmpgtb %xmm9, %xmm12, %xmm15; vpaddb %xmm9, %xmm9, %xmm9; vpabsb %xmm15, %xmm15; vpor %xmm13, %xmm10, %xmm10; vpcmpgtb %xmm8, %xmm12, %xmm13; vpaddb %xmm8, %xmm8, %xmm8; vpabsb %xmm13, %xmm13; vpor %xmm14, %xmm9, %xmm9; vpor %xmm15, %xmm8, %xmm8; vpor %xmm13, %xmm11, %xmm11;; vpxor %xmm4, %xmm8, %xmm4; vmovdqu %xmm4, 4 * 16(%rax); vpxor %xmm5, %xmm9, %xmm5; vmovdqu %xmm5, 5 * 16(%rax); vpxor %xmm6, %xmm10, %xmm6; vmovdqu %xmm6, 6 * 16(%rax); vpxor %xmm7, %xmm11, %xmm7; vmovdqu %xmm7, 7 * 16(%rax); vmovd ((0 + (24) * 8) + 12)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpor 4 * 16(%rcx), %xmm8, %xmm8; vpor 5 * 16(%rcx), %xmm9, %xmm9; vpor 6 * 16(%rcx), %xmm10, %xmm10; vpor 7 * 16(%rcx), %xmm11, %xmm11; vpxor 0 * 16(%rcx), %xmm8, %xmm8; vpxor 1 * 16(%rcx), %xmm9, %xmm9; vpxor 2 * 16(%rcx), %xmm10, %xmm10; vpxor 3 * 16(%rcx), %xmm11, %xmm11; vmovdqu %xmm8, 0 * 16(%rcx); vmovdqu %xmm9, 1 * 16(%rcx); vmovdqu %xmm10, 2 * 16(%rcx); vmovdqu %xmm11, 3 * 16(%rcx); vmovd ((0 + (24) * 8) + 8)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpand 0 * 16(%rcx), %xmm8, %xmm8; vpand 1 * 16(%rcx), %xmm9, %xmm9; vpand 2 * 16(%rcx), %xmm10, %xmm10; vpand 3 * 16(%rcx), %xmm11, %xmm11; vpcmpgtb %xmm11, %xmm12, %xmm13; vpaddb %xmm11, %xmm11, %xmm11; vpabsb %xmm13, %xmm13; vpcmpgtb %xmm10, %xmm12, %xmm14; vpaddb %xmm10, %xmm10, %xmm10; vpabsb %xmm14, %xmm14; vpcmpgtb %xmm9, %xmm12, %xmm15; vpaddb %xmm9, %xmm9, %xmm9; vpabsb %xmm15, %xmm15; vpor %xmm13, %xmm10, %xmm10; vpcmpgtb %xmm8, %xmm12, %xmm13; vpaddb %xmm8, %xmm8, %xmm8; vpabsb %xmm13, %xmm13; vpor %xmm14, %xmm9, %xmm9; vpor %xmm15, %xmm8, %xmm8; vpor %xmm13, %xmm11, %xmm11;; vpxor 4 * 16(%rcx), %xmm8, %xmm8; vpxor 5 * 16(%rcx), %xmm9, %xmm9; vpxor 6 * 16(%rcx), %xmm10, %xmm10; vpxor 7 * 16(%rcx), %xmm11, %xmm11; vmovdqu %xmm8, 4 * 16(%rcx); vmovdqu %xmm9, 5 * 16(%rcx); vmovdqu %xmm10, 6 * 16(%rcx); vmovdqu %xmm11, 7 * 16(%rcx); vmovd ((0 + (24) * 8) + 4)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpor %xmm4, %xmm8, %xmm8; vpor %xmm5, %xmm9, %xmm9; vpor %xmm6, %xmm10, %xmm10; vpor %xmm7, %xmm11, %xmm11; vpxor %xmm0, %xmm8, %xmm0; vmovdqu %xmm0, 0 * 16(%rax); vpxor %xmm1, %xmm9, %xmm1; vmovdqu %xmm1, 1 * 16(%rax); vpxor %xmm2, %xmm10, %xmm2; vmovdqu %xmm2, 2 * 16(%rax); vpxor %xmm3, %xmm11, %xmm3; vmovdqu %xmm3, 3 * 16(%rax);; + + vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((24) + 2) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((24) + 2) + (1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax);;; vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((24) + 4) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((24) + 4) + (1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax);;; vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((24) + 6) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((24) + 6) + (1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; ;;; + + vmovdqu 0 * 16(%rcx), %xmm8; + vmovdqu 1 * 16(%rcx), %xmm9; + vmovdqu 2 * 16(%rcx), %xmm10; + vmovdqu 3 * 16(%rcx), %xmm11; + vmovdqu 4 * 16(%rcx), %xmm12; + vmovdqu 5 * 16(%rcx), %xmm13; + vmovdqu 6 * 16(%rcx), %xmm14; + vmovdqu 7 * 16(%rcx), %xmm15; + + vmovdqu %xmm6, (%rax); vmovdqu %xmm7, 1 * 16(%rax); vpunpckhdq %xmm9, %xmm8, %xmm7; vpunpckldq %xmm9, %xmm8, %xmm8; vpunpckldq %xmm11, %xmm10, %xmm6; vpunpckhdq %xmm11, %xmm10, %xmm10; vpunpckhqdq %xmm6, %xmm8, %xmm9; vpunpcklqdq %xmm6, %xmm8, %xmm8; vpunpckhqdq %xmm10, %xmm7, %xmm11; vpunpcklqdq %xmm10, %xmm7, %xmm10;; vpunpckhdq %xmm13, %xmm12, %xmm7; vpunpckldq %xmm13, %xmm12, %xmm12; vpunpckldq %xmm15, %xmm14, %xmm6; vpunpckhdq %xmm15, %xmm14, %xmm14; vpunpckhqdq %xmm6, %xmm12, %xmm13; vpunpcklqdq %xmm6, %xmm12, %xmm12; vpunpckhqdq %xmm14, %xmm7, %xmm15; vpunpcklqdq %xmm14, %xmm7, %xmm14;; vmovdqu (%rax), %xmm6; vmovdqu 1 * 16(%rax), %xmm7; vmovdqu %xmm8, (%rax); vmovdqu %xmm9, 1 * 16(%rax); vpunpckhdq %xmm1, %xmm0, %xmm9; vpunpckldq %xmm1, %xmm0, %xmm0; vpunpckldq %xmm3, %xmm2, %xmm8; vpunpckhdq %xmm3, %xmm2, %xmm2; vpunpckhqdq %xmm8, %xmm0, %xmm1; vpunpcklqdq %xmm8, %xmm0, %xmm0; vpunpckhqdq %xmm2, %xmm9, %xmm3; vpunpcklqdq %xmm2, %xmm9, %xmm2;; vpunpckhdq %xmm5, %xmm4, %xmm9; vpunpckldq %xmm5, %xmm4, %xmm4; vpunpckldq %xmm7, %xmm6, %xmm8; vpunpckhdq %xmm7, %xmm6, %xmm6; vpunpckhqdq %xmm8, %xmm4, %xmm5; vpunpcklqdq %xmm8, %xmm4, %xmm4; vpunpckhqdq %xmm6, %xmm9, %xmm7; vpunpcklqdq %xmm6, %xmm9, %xmm6;; vmovdqu .Lshufb_16x16b(%rip), %xmm8; vmovdqu 1 * 16(%rax), %xmm9; vpshufb %xmm8, %xmm10, %xmm10; vpshufb %xmm8, %xmm11, %xmm11; vpshufb %xmm8, %xmm12, %xmm12; vpshufb %xmm8, %xmm13, %xmm13; vpshufb %xmm8, %xmm14, %xmm14; vpshufb %xmm8, %xmm15, %xmm15; vpshufb %xmm8, %xmm9, %xmm9; vpshufb %xmm8, %xmm0, %xmm0; vpshufb %xmm8, %xmm1, %xmm1; vpshufb %xmm8, %xmm2, %xmm2; vpshufb %xmm8, %xmm3, %xmm3; vpshufb %xmm8, %xmm4, %xmm4; vpshufb %xmm8, %xmm5, %xmm5; vpshufb %xmm8, %xmm6, %xmm6; vpshufb %xmm8, %xmm7, %xmm7; vmovdqu %xmm7, 1 * 16(%rax); vmovdqu (%rax), %xmm7; vpshufb %xmm8, %xmm7, %xmm8; vmovdqu %xmm6, (%rax); vpunpckhdq %xmm12, %xmm8, %xmm7; vpunpckldq %xmm12, %xmm8, %xmm8; vpunpckldq %xmm4, %xmm0, %xmm6; vpunpckhdq %xmm4, %xmm0, %xmm0; vpunpckhqdq %xmm6, %xmm8, %xmm12; vpunpcklqdq %xmm6, %xmm8, %xmm8; vpunpckhqdq %xmm0, %xmm7, %xmm4; vpunpcklqdq %xmm0, %xmm7, %xmm0;; vpunpckhdq %xmm13, %xmm9, %xmm7; vpunpckldq %xmm13, %xmm9, %xmm9; vpunpckldq %xmm5, %xmm1, %xmm6; vpunpckhdq %xmm5, %xmm1, %xmm1; vpunpckhqdq %xmm6, %xmm9, %xmm13; vpunpcklqdq %xmm6, %xmm9, %xmm9; vpunpckhqdq %xmm1, %xmm7, %xmm5; vpunpcklqdq %xmm1, %xmm7, %xmm1;; vmovdqu (%rax), %xmm6; vmovdqu 1 * 16(%rax), %xmm7; vmovdqu %xmm12, (%rax); vmovdqu %xmm13, 1 * 16(%rax); vpunpckhdq %xmm14, %xmm10, %xmm13; vpunpckldq %xmm14, %xmm10, %xmm10; vpunpckldq %xmm6, %xmm2, %xmm12; vpunpckhdq %xmm6, %xmm2, %xmm2; vpunpckhqdq %xmm12, %xmm10, %xmm14; vpunpcklqdq %xmm12, %xmm10, %xmm10; vpunpckhqdq %xmm2, %xmm13, %xmm6; vpunpcklqdq %xmm2, %xmm13, %xmm2;; vpunpckhdq %xmm15, %xmm11, %xmm13; vpunpckldq %xmm15, %xmm11, %xmm11; vpunpckldq %xmm7, %xmm3, %xmm12; vpunpckhdq %xmm7, %xmm3, %xmm3; vpunpckhqdq %xmm12, %xmm11, %xmm15; vpunpcklqdq %xmm12, %xmm11, %xmm11; vpunpckhqdq %xmm3, %xmm13, %xmm7; vpunpcklqdq %xmm3, %xmm13, %xmm3;; vmovdqu (%rax), %xmm12; vmovdqu 1 * 16(%rax), %xmm13;; vmovdqu %xmm0, (%rax); vmovq (256)(%rdi), %xmm0; vpshufb .Lpack_bswap(%rip), %xmm0, %xmm0; vpxor %xmm0, %xmm15, %xmm15; vpxor %xmm0, %xmm14, %xmm14; vpxor %xmm0, %xmm13, %xmm13; vpxor %xmm0, %xmm12, %xmm12; vpxor %xmm0, %xmm11, %xmm11; vpxor %xmm0, %xmm10, %xmm10; vpxor %xmm0, %xmm9, %xmm9; vpxor %xmm0, %xmm8, %xmm8; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm0, %xmm4, %xmm4; vpxor %xmm0, %xmm3, %xmm3; vpxor %xmm0, %xmm2, %xmm2; vpxor %xmm0, %xmm1, %xmm1; vpxor (%rax), %xmm0, %xmm0;; + + + + ret; + + +.align 8 + +__camellia_dec_blk16: + + leaq 8 * 16(%rax), %rcx; + + vmovdqu %xmm11, (%rax); vmovdqu %xmm15, (%rcx); vpunpckhdq %xmm4, %xmm0, %xmm15; vpunpckldq %xmm4, %xmm0, %xmm0; vpunpckldq %xmm12, %xmm8, %xmm11; vpunpckhdq %xmm12, %xmm8, %xmm8; vpunpckhqdq %xmm11, %xmm0, %xmm4; vpunpcklqdq %xmm11, %xmm0, %xmm0; vpunpckhqdq %xmm8, %xmm15, %xmm12; vpunpcklqdq %xmm8, %xmm15, %xmm8;; vpunpckhdq %xmm5, %xmm1, %xmm15; vpunpckldq %xmm5, %xmm1, %xmm1; vpunpckldq %xmm13, %xmm9, %xmm11; vpunpckhdq %xmm13, %xmm9, %xmm9; vpunpckhqdq %xmm11, %xmm1, %xmm5; vpunpcklqdq %xmm11, %xmm1, %xmm1; vpunpckhqdq %xmm9, %xmm15, %xmm13; vpunpcklqdq %xmm9, %xmm15, %xmm9;; vmovdqu (%rax), %xmm11; vmovdqu (%rcx), %xmm15; vmovdqu %xmm0, (%rax); vmovdqu %xmm4, (%rcx); vpunpckhdq %xmm6, %xmm2, %xmm4; vpunpckldq %xmm6, %xmm2, %xmm2; vpunpckldq %xmm14, %xmm10, %xmm0; vpunpckhdq %xmm14, %xmm10, %xmm10; vpunpckhqdq %xmm0, %xmm2, %xmm6; vpunpcklqdq %xmm0, %xmm2, %xmm2; vpunpckhqdq %xmm10, %xmm4, %xmm14; vpunpcklqdq %xmm10, %xmm4, %xmm10;; vpunpckhdq %xmm7, %xmm3, %xmm4; vpunpckldq %xmm7, %xmm3, %xmm3; vpunpckldq %xmm15, %xmm11, %xmm0; vpunpckhdq %xmm15, %xmm11, %xmm11; vpunpckhqdq %xmm0, %xmm3, %xmm7; vpunpcklqdq %xmm0, %xmm3, %xmm3; vpunpckhqdq %xmm11, %xmm4, %xmm15; vpunpcklqdq %xmm11, %xmm4, %xmm11;; vmovdqu .Lshufb_16x16b(%rip), %xmm0; vmovdqu (%rcx), %xmm4; vpshufb %xmm0, %xmm8, %xmm8; vpshufb %xmm0, %xmm12, %xmm12; vpshufb %xmm0, %xmm1, %xmm1; vpshufb %xmm0, %xmm5, %xmm5; vpshufb %xmm0, %xmm9, %xmm9; vpshufb %xmm0, %xmm13, %xmm13; vpshufb %xmm0, %xmm4, %xmm4; vpshufb %xmm0, %xmm2, %xmm2; vpshufb %xmm0, %xmm6, %xmm6; vpshufb %xmm0, %xmm10, %xmm10; vpshufb %xmm0, %xmm14, %xmm14; vpshufb %xmm0, %xmm3, %xmm3; vpshufb %xmm0, %xmm7, %xmm7; vpshufb %xmm0, %xmm11, %xmm11; vpshufb %xmm0, %xmm15, %xmm15; vmovdqu %xmm15, (%rcx); vmovdqu (%rax), %xmm15; vpshufb %xmm0, %xmm15, %xmm0; vmovdqu %xmm11, (%rax); vpunpckhdq %xmm1, %xmm0, %xmm15; vpunpckldq %xmm1, %xmm0, %xmm0; vpunpckldq %xmm3, %xmm2, %xmm11; vpunpckhdq %xmm3, %xmm2, %xmm2; vpunpckhqdq %xmm11, %xmm0, %xmm1; vpunpcklqdq %xmm11, %xmm0, %xmm0; vpunpckhqdq %xmm2, %xmm15, %xmm3; vpunpcklqdq %xmm2, %xmm15, %xmm2;; vpunpckhdq %xmm5, %xmm4, %xmm15; vpunpckldq %xmm5, %xmm4, %xmm4; vpunpckldq %xmm7, %xmm6, %xmm11; vpunpckhdq %xmm7, %xmm6, %xmm6; vpunpckhqdq %xmm11, %xmm4, %xmm5; vpunpcklqdq %xmm11, %xmm4, %xmm4; vpunpckhqdq %xmm6, %xmm15, %xmm7; vpunpcklqdq %xmm6, %xmm15, %xmm6;; vmovdqu (%rax), %xmm11; vmovdqu (%rcx), %xmm15; vmovdqu %xmm1, (%rax); vmovdqu %xmm5, (%rcx); vpunpckhdq %xmm9, %xmm8, %xmm5; vpunpckldq %xmm9, %xmm8, %xmm8; vpunpckldq %xmm11, %xmm10, %xmm1; vpunpckhdq %xmm11, %xmm10, %xmm10; vpunpckhqdq %xmm1, %xmm8, %xmm9; vpunpcklqdq %xmm1, %xmm8, %xmm8; vpunpckhqdq %xmm10, %xmm5, %xmm11; vpunpcklqdq %xmm10, %xmm5, %xmm10;; vpunpckhdq %xmm13, %xmm12, %xmm5; vpunpckldq %xmm13, %xmm12, %xmm12; vpunpckldq %xmm15, %xmm14, %xmm1; vpunpckhdq %xmm15, %xmm14, %xmm14; vpunpckhqdq %xmm1, %xmm12, %xmm13; vpunpcklqdq %xmm1, %xmm12, %xmm12; vpunpckhqdq %xmm14, %xmm5, %xmm15; vpunpcklqdq %xmm14, %xmm5, %xmm14;; vmovdqu (%rax), %xmm1; vmovdqu (%rcx), %xmm5;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax); vmovdqu %xmm8, 0 * 16(%rcx); vmovdqu %xmm9, 1 * 16(%rcx); vmovdqu %xmm10, 2 * 16(%rcx); vmovdqu %xmm11, 3 * 16(%rcx); vmovdqu %xmm12, 4 * 16(%rcx); vmovdqu %xmm13, 5 * 16(%rcx); vmovdqu %xmm14, 6 * 16(%rcx); vmovdqu %xmm15, 7 * 16(%rcx);; + + + + vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((24) + 7) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((24) + 7) + (-1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax);;; vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((24) + 5) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((24) + 5) + (-1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax);;; vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((24) + 3) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((24) + 3) + (-1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; ;;; + + vpxor %xmm12, %xmm12, %xmm12; vmovd ((0 + (24) * 8) + 8)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpand %xmm0, %xmm8, %xmm8; vpand %xmm1, %xmm9, %xmm9; vpand %xmm2, %xmm10, %xmm10; vpand %xmm3, %xmm11, %xmm11; vpcmpgtb %xmm11, %xmm12, %xmm13; vpaddb %xmm11, %xmm11, %xmm11; vpabsb %xmm13, %xmm13; vpcmpgtb %xmm10, %xmm12, %xmm14; vpaddb %xmm10, %xmm10, %xmm10; vpabsb %xmm14, %xmm14; vpcmpgtb %xmm9, %xmm12, %xmm15; vpaddb %xmm9, %xmm9, %xmm9; vpabsb %xmm15, %xmm15; vpor %xmm13, %xmm10, %xmm10; vpcmpgtb %xmm8, %xmm12, %xmm13; vpaddb %xmm8, %xmm8, %xmm8; vpabsb %xmm13, %xmm13; vpor %xmm14, %xmm9, %xmm9; vpor %xmm15, %xmm8, %xmm8; vpor %xmm13, %xmm11, %xmm11;; vpxor %xmm4, %xmm8, %xmm4; vmovdqu %xmm4, 4 * 16(%rax); vpxor %xmm5, %xmm9, %xmm5; vmovdqu %xmm5, 5 * 16(%rax); vpxor %xmm6, %xmm10, %xmm6; vmovdqu %xmm6, 6 * 16(%rax); vpxor %xmm7, %xmm11, %xmm7; vmovdqu %xmm7, 7 * 16(%rax); vmovd ((0 + (24) * 8) + 4)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpor 4 * 16(%rcx), %xmm8, %xmm8; vpor 5 * 16(%rcx), %xmm9, %xmm9; vpor 6 * 16(%rcx), %xmm10, %xmm10; vpor 7 * 16(%rcx), %xmm11, %xmm11; vpxor 0 * 16(%rcx), %xmm8, %xmm8; vpxor 1 * 16(%rcx), %xmm9, %xmm9; vpxor 2 * 16(%rcx), %xmm10, %xmm10; vpxor 3 * 16(%rcx), %xmm11, %xmm11; vmovdqu %xmm8, 0 * 16(%rcx); vmovdqu %xmm9, 1 * 16(%rcx); vmovdqu %xmm10, 2 * 16(%rcx); vmovdqu %xmm11, 3 * 16(%rcx); vmovd ((0 + (24) * 8) + 0)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpand 0 * 16(%rcx), %xmm8, %xmm8; vpand 1 * 16(%rcx), %xmm9, %xmm9; vpand 2 * 16(%rcx), %xmm10, %xmm10; vpand 3 * 16(%rcx), %xmm11, %xmm11; vpcmpgtb %xmm11, %xmm12, %xmm13; vpaddb %xmm11, %xmm11, %xmm11; vpabsb %xmm13, %xmm13; vpcmpgtb %xmm10, %xmm12, %xmm14; vpaddb %xmm10, %xmm10, %xmm10; vpabsb %xmm14, %xmm14; vpcmpgtb %xmm9, %xmm12, %xmm15; vpaddb %xmm9, %xmm9, %xmm9; vpabsb %xmm15, %xmm15; vpor %xmm13, %xmm10, %xmm10; vpcmpgtb %xmm8, %xmm12, %xmm13; vpaddb %xmm8, %xmm8, %xmm8; vpabsb %xmm13, %xmm13; vpor %xmm14, %xmm9, %xmm9; vpor %xmm15, %xmm8, %xmm8; vpor %xmm13, %xmm11, %xmm11;; vpxor 4 * 16(%rcx), %xmm8, %xmm8; vpxor 5 * 16(%rcx), %xmm9, %xmm9; vpxor 6 * 16(%rcx), %xmm10, %xmm10; vpxor 7 * 16(%rcx), %xmm11, %xmm11; vmovdqu %xmm8, 4 * 16(%rcx); vmovdqu %xmm9, 5 * 16(%rcx); vmovdqu %xmm10, 6 * 16(%rcx); vmovdqu %xmm11, 7 * 16(%rcx); vmovd ((0 + (24) * 8) + 12)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpor %xmm4, %xmm8, %xmm8; vpor %xmm5, %xmm9, %xmm9; vpor %xmm6, %xmm10, %xmm10; vpor %xmm7, %xmm11, %xmm11; vpxor %xmm0, %xmm8, %xmm0; vmovdqu %xmm0, 0 * 16(%rax); vpxor %xmm1, %xmm9, %xmm1; vmovdqu %xmm1, 1 * 16(%rax); vpxor %xmm2, %xmm10, %xmm2; vmovdqu %xmm2, 2 * 16(%rax); vpxor %xmm3, %xmm11, %xmm3; vmovdqu %xmm3, 3 * 16(%rax);; + + + vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((16) + 7) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((16) + 7) + (-1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax);;; vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((16) + 5) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((16) + 5) + (-1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax);;; vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((16) + 3) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((16) + 3) + (-1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; ;;; + + + + vpxor %xmm12, %xmm12, %xmm12; vmovd ((0 + (16) * 8) + 8)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpand %xmm0, %xmm8, %xmm8; vpand %xmm1, %xmm9, %xmm9; vpand %xmm2, %xmm10, %xmm10; vpand %xmm3, %xmm11, %xmm11; vpcmpgtb %xmm11, %xmm12, %xmm13; vpaddb %xmm11, %xmm11, %xmm11; vpabsb %xmm13, %xmm13; vpcmpgtb %xmm10, %xmm12, %xmm14; vpaddb %xmm10, %xmm10, %xmm10; vpabsb %xmm14, %xmm14; vpcmpgtb %xmm9, %xmm12, %xmm15; vpaddb %xmm9, %xmm9, %xmm9; vpabsb %xmm15, %xmm15; vpor %xmm13, %xmm10, %xmm10; vpcmpgtb %xmm8, %xmm12, %xmm13; vpaddb %xmm8, %xmm8, %xmm8; vpabsb %xmm13, %xmm13; vpor %xmm14, %xmm9, %xmm9; vpor %xmm15, %xmm8, %xmm8; vpor %xmm13, %xmm11, %xmm11;; vpxor %xmm4, %xmm8, %xmm4; vmovdqu %xmm4, 4 * 16(%rax); vpxor %xmm5, %xmm9, %xmm5; vmovdqu %xmm5, 5 * 16(%rax); vpxor %xmm6, %xmm10, %xmm6; vmovdqu %xmm6, 6 * 16(%rax); vpxor %xmm7, %xmm11, %xmm7; vmovdqu %xmm7, 7 * 16(%rax); vmovd ((0 + (16) * 8) + 4)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpor 4 * 16(%rcx), %xmm8, %xmm8; vpor 5 * 16(%rcx), %xmm9, %xmm9; vpor 6 * 16(%rcx), %xmm10, %xmm10; vpor 7 * 16(%rcx), %xmm11, %xmm11; vpxor 0 * 16(%rcx), %xmm8, %xmm8; vpxor 1 * 16(%rcx), %xmm9, %xmm9; vpxor 2 * 16(%rcx), %xmm10, %xmm10; vpxor 3 * 16(%rcx), %xmm11, %xmm11; vmovdqu %xmm8, 0 * 16(%rcx); vmovdqu %xmm9, 1 * 16(%rcx); vmovdqu %xmm10, 2 * 16(%rcx); vmovdqu %xmm11, 3 * 16(%rcx); vmovd ((0 + (16) * 8) + 0)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpand 0 * 16(%rcx), %xmm8, %xmm8; vpand 1 * 16(%rcx), %xmm9, %xmm9; vpand 2 * 16(%rcx), %xmm10, %xmm10; vpand 3 * 16(%rcx), %xmm11, %xmm11; vpcmpgtb %xmm11, %xmm12, %xmm13; vpaddb %xmm11, %xmm11, %xmm11; vpabsb %xmm13, %xmm13; vpcmpgtb %xmm10, %xmm12, %xmm14; vpaddb %xmm10, %xmm10, %xmm10; vpabsb %xmm14, %xmm14; vpcmpgtb %xmm9, %xmm12, %xmm15; vpaddb %xmm9, %xmm9, %xmm9; vpabsb %xmm15, %xmm15; vpor %xmm13, %xmm10, %xmm10; vpcmpgtb %xmm8, %xmm12, %xmm13; vpaddb %xmm8, %xmm8, %xmm8; vpabsb %xmm13, %xmm13; vpor %xmm14, %xmm9, %xmm9; vpor %xmm15, %xmm8, %xmm8; vpor %xmm13, %xmm11, %xmm11;; vpxor 4 * 16(%rcx), %xmm8, %xmm8; vpxor 5 * 16(%rcx), %xmm9, %xmm9; vpxor 6 * 16(%rcx), %xmm10, %xmm10; vpxor 7 * 16(%rcx), %xmm11, %xmm11; vmovdqu %xmm8, 4 * 16(%rcx); vmovdqu %xmm9, 5 * 16(%rcx); vmovdqu %xmm10, 6 * 16(%rcx); vmovdqu %xmm11, 7 * 16(%rcx); vmovd ((0 + (16) * 8) + 12)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpor %xmm4, %xmm8, %xmm8; vpor %xmm5, %xmm9, %xmm9; vpor %xmm6, %xmm10, %xmm10; vpor %xmm7, %xmm11, %xmm11; vpxor %xmm0, %xmm8, %xmm0; vmovdqu %xmm0, 0 * 16(%rax); vpxor %xmm1, %xmm9, %xmm1; vmovdqu %xmm1, 1 * 16(%rax); vpxor %xmm2, %xmm10, %xmm2; vmovdqu %xmm2, 2 * 16(%rax); vpxor %xmm3, %xmm11, %xmm3; vmovdqu %xmm3, 3 * 16(%rax);; + + + + + + + + vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((8) + 7) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((8) + 7) + (-1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax);;; vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((8) + 5) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((8) + 5) + (-1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax);;; vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((8) + 3) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((8) + 3) + (-1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; ;;; + + + + vpxor %xmm12, %xmm12, %xmm12; vmovd ((0 + (8) * 8) + 8)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpand %xmm0, %xmm8, %xmm8; vpand %xmm1, %xmm9, %xmm9; vpand %xmm2, %xmm10, %xmm10; vpand %xmm3, %xmm11, %xmm11; vpcmpgtb %xmm11, %xmm12, %xmm13; vpaddb %xmm11, %xmm11, %xmm11; vpabsb %xmm13, %xmm13; vpcmpgtb %xmm10, %xmm12, %xmm14; vpaddb %xmm10, %xmm10, %xmm10; vpabsb %xmm14, %xmm14; vpcmpgtb %xmm9, %xmm12, %xmm15; vpaddb %xmm9, %xmm9, %xmm9; vpabsb %xmm15, %xmm15; vpor %xmm13, %xmm10, %xmm10; vpcmpgtb %xmm8, %xmm12, %xmm13; vpaddb %xmm8, %xmm8, %xmm8; vpabsb %xmm13, %xmm13; vpor %xmm14, %xmm9, %xmm9; vpor %xmm15, %xmm8, %xmm8; vpor %xmm13, %xmm11, %xmm11;; vpxor %xmm4, %xmm8, %xmm4; vmovdqu %xmm4, 4 * 16(%rax); vpxor %xmm5, %xmm9, %xmm5; vmovdqu %xmm5, 5 * 16(%rax); vpxor %xmm6, %xmm10, %xmm6; vmovdqu %xmm6, 6 * 16(%rax); vpxor %xmm7, %xmm11, %xmm7; vmovdqu %xmm7, 7 * 16(%rax); vmovd ((0 + (8) * 8) + 4)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpor 4 * 16(%rcx), %xmm8, %xmm8; vpor 5 * 16(%rcx), %xmm9, %xmm9; vpor 6 * 16(%rcx), %xmm10, %xmm10; vpor 7 * 16(%rcx), %xmm11, %xmm11; vpxor 0 * 16(%rcx), %xmm8, %xmm8; vpxor 1 * 16(%rcx), %xmm9, %xmm9; vpxor 2 * 16(%rcx), %xmm10, %xmm10; vpxor 3 * 16(%rcx), %xmm11, %xmm11; vmovdqu %xmm8, 0 * 16(%rcx); vmovdqu %xmm9, 1 * 16(%rcx); vmovdqu %xmm10, 2 * 16(%rcx); vmovdqu %xmm11, 3 * 16(%rcx); vmovd ((0 + (8) * 8) + 0)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpand 0 * 16(%rcx), %xmm8, %xmm8; vpand 1 * 16(%rcx), %xmm9, %xmm9; vpand 2 * 16(%rcx), %xmm10, %xmm10; vpand 3 * 16(%rcx), %xmm11, %xmm11; vpcmpgtb %xmm11, %xmm12, %xmm13; vpaddb %xmm11, %xmm11, %xmm11; vpabsb %xmm13, %xmm13; vpcmpgtb %xmm10, %xmm12, %xmm14; vpaddb %xmm10, %xmm10, %xmm10; vpabsb %xmm14, %xmm14; vpcmpgtb %xmm9, %xmm12, %xmm15; vpaddb %xmm9, %xmm9, %xmm9; vpabsb %xmm15, %xmm15; vpor %xmm13, %xmm10, %xmm10; vpcmpgtb %xmm8, %xmm12, %xmm13; vpaddb %xmm8, %xmm8, %xmm8; vpabsb %xmm13, %xmm13; vpor %xmm14, %xmm9, %xmm9; vpor %xmm15, %xmm8, %xmm8; vpor %xmm13, %xmm11, %xmm11;; vpxor 4 * 16(%rcx), %xmm8, %xmm8; vpxor 5 * 16(%rcx), %xmm9, %xmm9; vpxor 6 * 16(%rcx), %xmm10, %xmm10; vpxor 7 * 16(%rcx), %xmm11, %xmm11; vmovdqu %xmm8, 4 * 16(%rcx); vmovdqu %xmm9, 5 * 16(%rcx); vmovdqu %xmm10, 6 * 16(%rcx); vmovdqu %xmm11, 7 * 16(%rcx); vmovd ((0 + (8) * 8) + 12)(%rdi), %xmm8; vpshufb %xmm12, %xmm8, %xmm11; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm10; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm9; vpsrldq $1, %xmm8, %xmm8; vpshufb %xmm12, %xmm8, %xmm8; vpor %xmm4, %xmm8, %xmm8; vpor %xmm5, %xmm9, %xmm9; vpor %xmm6, %xmm10, %xmm10; vpor %xmm7, %xmm11, %xmm11; vpxor %xmm0, %xmm8, %xmm0; vmovdqu %xmm0, 0 * 16(%rax); vpxor %xmm1, %xmm9, %xmm1; vmovdqu %xmm1, 1 * 16(%rax); vpxor %xmm2, %xmm10, %xmm2; vmovdqu %xmm2, 2 * 16(%rax); vpxor %xmm3, %xmm11, %xmm3; vmovdqu %xmm3, 3 * 16(%rax);; + + + + + + + + vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((0) + 7) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((0) + 7) + (-1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax);;; vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((0) + 5) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((0) + 5) + (-1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; vmovdqu %xmm0, 0 * 16(%rax); vmovdqu %xmm1, 1 * 16(%rax); vmovdqu %xmm2, 2 * 16(%rax); vmovdqu %xmm3, 3 * 16(%rax); vmovdqu %xmm4, 4 * 16(%rax); vmovdqu %xmm5, 5 * 16(%rax); vmovdqu %xmm6, 6 * 16(%rax); vmovdqu %xmm7, 7 * 16(%rax);;; vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm2, %xmm2; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm6, %xmm6; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm3, %xmm11, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm2, %xmm2; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm6, %xmm6; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm5, %xmm11, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + ((0) + 3) * 8)(%rdi), %xmm8; vpand %xmm1, %xmm15, %xmm10; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm1, %xmm13, %xmm1; vpxor %xmm10, %xmm1, %xmm1;; vpand %xmm4, %xmm15, %xmm10; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm4, %xmm13, %xmm4; vpxor %xmm10, %xmm4, %xmm4;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm5, %xmm0, %xmm0; vpxor %xmm6, %xmm1, %xmm1; vpxor %xmm7, %xmm2, %xmm2; vpxor %xmm4, %xmm3, %xmm3; vpxor %xmm2, %xmm4, %xmm4; vpxor %xmm3, %xmm5, %xmm5; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm1, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm11, %xmm4, %xmm4; vpxor 0 * 16(%rcx), %xmm4, %xmm4; vpxor %xmm10, %xmm5, %xmm5; vpxor 1 * 16(%rcx), %xmm5, %xmm5; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm6, %xmm6; vpxor 2 * 16(%rcx), %xmm6, %xmm6; vpxor %xmm8, %xmm7, %xmm7; vpxor 3 * 16(%rcx), %xmm7, %xmm7; vpxor %xmm15, %xmm0, %xmm0; vpxor 4 * 16(%rcx), %xmm0, %xmm0; vpxor %xmm14, %xmm1, %xmm1; vpxor 5 * 16(%rcx), %xmm1, %xmm1; vpxor %xmm13, %xmm2, %xmm2; vpxor 6 * 16(%rcx), %xmm2, %xmm2; vpxor %xmm12, %xmm3, %xmm3; vpxor 7 * 16(%rcx), %xmm3, %xmm3;; vmovdqu %xmm4, 0 * 16(%rcx); vmovdqu %xmm5, 1 * 16(%rcx); vmovdqu %xmm6, 2 * 16(%rcx); vmovdqu %xmm7, 3 * 16(%rcx); vmovdqu %xmm0, 4 * 16(%rcx); vmovdqu %xmm1, 5 * 16(%rcx); vmovdqu %xmm2, 6 * 16(%rcx); vmovdqu %xmm3, 7 * 16(%rcx); vmovdqu .Linv_shift_row(%rip), %xmm12; vbroadcastss .L0f0f0f0f(%rip), %xmm15; vmovdqu .Lpre_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpre_tf_hi_s1(%rip), %xmm9; vpshufb %xmm12, %xmm4, %xmm4; vpshufb %xmm12, %xmm3, %xmm3; vpshufb %xmm12, %xmm5, %xmm5; vpshufb %xmm12, %xmm0, %xmm0; vpshufb %xmm12, %xmm6, %xmm6; vpshufb %xmm12, %xmm1, %xmm1; vpshufb %xmm12, %xmm7, %xmm7; vpshufb %xmm12, %xmm2, %xmm2; vmovdqu .Lpre_tf_lo_s4(%rip), %xmm10; vmovdqu .Lpre_tf_hi_s4(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm5, %xmm15, %xmm14; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm5, %xmm9, %xmm5; vpxor %xmm14, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm14; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm0, %xmm9, %xmm0; vpxor %xmm14, %xmm0, %xmm0;; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm6, %xmm9, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm1, %xmm9, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm12, %xmm12, %xmm12; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm7, %xmm11, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm2, %xmm11, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s1(%rip), %xmm8; vmovdqu .Lpost_tf_hi_s1(%rip), %xmm9; vaesenclast %xmm12, %xmm4, %xmm4; vaesenclast %xmm12, %xmm3, %xmm3; vaesenclast %xmm12, %xmm5, %xmm5; vaesenclast %xmm12, %xmm0, %xmm0; vaesenclast %xmm12, %xmm6, %xmm6; vaesenclast %xmm12, %xmm1, %xmm1; vaesenclast %xmm12, %xmm7, %xmm7; vaesenclast %xmm12, %xmm2, %xmm2; vmovdqu .Lpost_tf_lo_s3(%rip), %xmm10; vmovdqu .Lpost_tf_hi_s3(%rip), %xmm11; vpand %xmm4, %xmm15, %xmm14; vpandn %xmm4, %xmm15, %xmm4; vpsrld $4, %xmm4, %xmm4; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm4, %xmm9, %xmm4; vpxor %xmm14, %xmm4, %xmm4;; vpand %xmm3, %xmm15, %xmm14; vpandn %xmm3, %xmm15, %xmm3; vpsrld $4, %xmm3, %xmm3; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm3, %xmm9, %xmm3; vpxor %xmm14, %xmm3, %xmm3;; vpand %xmm7, %xmm15, %xmm14; vpandn %xmm7, %xmm15, %xmm7; vpsrld $4, %xmm7, %xmm7; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm7, %xmm9, %xmm7; vpxor %xmm14, %xmm7, %xmm7;; vpand %xmm2, %xmm15, %xmm14; vpandn %xmm2, %xmm15, %xmm2; vpsrld $4, %xmm2, %xmm2; vpshufb %xmm14, %xmm8, %xmm14; vpshufb %xmm2, %xmm9, %xmm2; vpxor %xmm14, %xmm2, %xmm2;; vmovdqu .Lpost_tf_lo_s2(%rip), %xmm12; vmovdqu .Lpost_tf_hi_s2(%rip), %xmm13; vpand %xmm6, %xmm15, %xmm14; vpandn %xmm6, %xmm15, %xmm6; vpsrld $4, %xmm6, %xmm6; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm6, %xmm11, %xmm6; vpxor %xmm14, %xmm6, %xmm6;; vpand %xmm1, %xmm15, %xmm14; vpandn %xmm1, %xmm15, %xmm1; vpsrld $4, %xmm1, %xmm1; vpshufb %xmm14, %xmm10, %xmm14; vpshufb %xmm1, %xmm11, %xmm1; vpxor %xmm14, %xmm1, %xmm1;; vpxor %xmm14, %xmm14, %xmm14; vmovq (0 + (((0) + 3) + (-1)) * 8)(%rdi), %xmm8; vpand %xmm5, %xmm15, %xmm10; vpandn %xmm5, %xmm15, %xmm5; vpsrld $4, %xmm5, %xmm5; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm5, %xmm13, %xmm5; vpxor %xmm10, %xmm5, %xmm5;; vpand %xmm0, %xmm15, %xmm10; vpandn %xmm0, %xmm15, %xmm0; vpsrld $4, %xmm0, %xmm0; vpshufb %xmm10, %xmm12, %xmm10; vpshufb %xmm0, %xmm13, %xmm0; vpxor %xmm10, %xmm0, %xmm0;; vpsrldq $5, %xmm8, %xmm13; vpsrldq $1, %xmm8, %xmm9; vpsrldq $2, %xmm8, %xmm10; vpsrldq $3, %xmm8, %xmm11; vpsrldq $4, %xmm8, %xmm12; vpshufb %xmm14, %xmm8, %xmm8; vpshufb %xmm14, %xmm9, %xmm9; vpshufb %xmm14, %xmm10, %xmm10; vpshufb %xmm14, %xmm11, %xmm11; vpshufb %xmm14, %xmm12, %xmm12; vpsrldq $2, %xmm13, %xmm15; vpshufb %xmm14, %xmm15, %xmm15; vpxor %xmm1, %xmm4, %xmm4; vpxor %xmm2, %xmm5, %xmm5; vpxor %xmm3, %xmm6, %xmm6; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm6, %xmm0, %xmm0; vpxor %xmm7, %xmm1, %xmm1; vpxor %xmm4, %xmm2, %xmm2; vpxor %xmm5, %xmm3, %xmm3; vpxor %xmm3, %xmm4, %xmm4; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm1, %xmm6, %xmm6; vpxor %xmm2, %xmm7, %xmm7; vpxor %xmm7, %xmm0, %xmm0; vpxor %xmm4, %xmm1, %xmm1; vpxor %xmm5, %xmm2, %xmm2; vpxor %xmm6, %xmm3, %xmm3; vpxor %xmm11, %xmm0, %xmm0; vpxor 0 * 16(%rax), %xmm0, %xmm0; vpxor %xmm10, %xmm1, %xmm1; vpxor 1 * 16(%rax), %xmm1, %xmm1; vpsrldq $1, %xmm13, %xmm11; vpshufb %xmm14, %xmm13, %xmm13; vpshufb %xmm14, %xmm11, %xmm14; vpxor %xmm9, %xmm2, %xmm2; vpxor 2 * 16(%rax), %xmm2, %xmm2; vpxor %xmm8, %xmm3, %xmm3; vpxor 3 * 16(%rax), %xmm3, %xmm3; vpxor %xmm15, %xmm4, %xmm4; vpxor 4 * 16(%rax), %xmm4, %xmm4; vpxor %xmm14, %xmm5, %xmm5; vpxor 5 * 16(%rax), %xmm5, %xmm5; vpxor %xmm13, %xmm6, %xmm6; vpxor 6 * 16(%rax), %xmm6, %xmm6; vpxor %xmm12, %xmm7, %xmm7; vpxor 7 * 16(%rax), %xmm7, %xmm7;; ;;; + + + + + vmovdqu 0 * 16(%rcx), %xmm8; + vmovdqu 1 * 16(%rcx), %xmm9; + vmovdqu 2 * 16(%rcx), %xmm10; + vmovdqu 3 * 16(%rcx), %xmm11; + vmovdqu 4 * 16(%rcx), %xmm12; + vmovdqu 5 * 16(%rcx), %xmm13; + vmovdqu 6 * 16(%rcx), %xmm14; + vmovdqu 7 * 16(%rcx), %xmm15; + + vmovdqu %xmm6, (%rax); vmovdqu %xmm7, 1 * 16(%rax); vpunpckhdq %xmm9, %xmm8, %xmm7; vpunpckldq %xmm9, %xmm8, %xmm8; vpunpckldq %xmm11, %xmm10, %xmm6; vpunpckhdq %xmm11, %xmm10, %xmm10; vpunpckhqdq %xmm6, %xmm8, %xmm9; vpunpcklqdq %xmm6, %xmm8, %xmm8; vpunpckhqdq %xmm10, %xmm7, %xmm11; vpunpcklqdq %xmm10, %xmm7, %xmm10;; vpunpckhdq %xmm13, %xmm12, %xmm7; vpunpckldq %xmm13, %xmm12, %xmm12; vpunpckldq %xmm15, %xmm14, %xmm6; vpunpckhdq %xmm15, %xmm14, %xmm14; vpunpckhqdq %xmm6, %xmm12, %xmm13; vpunpcklqdq %xmm6, %xmm12, %xmm12; vpunpckhqdq %xmm14, %xmm7, %xmm15; vpunpcklqdq %xmm14, %xmm7, %xmm14;; vmovdqu (%rax), %xmm6; vmovdqu 1 * 16(%rax), %xmm7; vmovdqu %xmm8, (%rax); vmovdqu %xmm9, 1 * 16(%rax); vpunpckhdq %xmm1, %xmm0, %xmm9; vpunpckldq %xmm1, %xmm0, %xmm0; vpunpckldq %xmm3, %xmm2, %xmm8; vpunpckhdq %xmm3, %xmm2, %xmm2; vpunpckhqdq %xmm8, %xmm0, %xmm1; vpunpcklqdq %xmm8, %xmm0, %xmm0; vpunpckhqdq %xmm2, %xmm9, %xmm3; vpunpcklqdq %xmm2, %xmm9, %xmm2;; vpunpckhdq %xmm5, %xmm4, %xmm9; vpunpckldq %xmm5, %xmm4, %xmm4; vpunpckldq %xmm7, %xmm6, %xmm8; vpunpckhdq %xmm7, %xmm6, %xmm6; vpunpckhqdq %xmm8, %xmm4, %xmm5; vpunpcklqdq %xmm8, %xmm4, %xmm4; vpunpckhqdq %xmm6, %xmm9, %xmm7; vpunpcklqdq %xmm6, %xmm9, %xmm6;; vmovdqu .Lshufb_16x16b(%rip), %xmm8; vmovdqu 1 * 16(%rax), %xmm9; vpshufb %xmm8, %xmm10, %xmm10; vpshufb %xmm8, %xmm11, %xmm11; vpshufb %xmm8, %xmm12, %xmm12; vpshufb %xmm8, %xmm13, %xmm13; vpshufb %xmm8, %xmm14, %xmm14; vpshufb %xmm8, %xmm15, %xmm15; vpshufb %xmm8, %xmm9, %xmm9; vpshufb %xmm8, %xmm0, %xmm0; vpshufb %xmm8, %xmm1, %xmm1; vpshufb %xmm8, %xmm2, %xmm2; vpshufb %xmm8, %xmm3, %xmm3; vpshufb %xmm8, %xmm4, %xmm4; vpshufb %xmm8, %xmm5, %xmm5; vpshufb %xmm8, %xmm6, %xmm6; vpshufb %xmm8, %xmm7, %xmm7; vmovdqu %xmm7, 1 * 16(%rax); vmovdqu (%rax), %xmm7; vpshufb %xmm8, %xmm7, %xmm8; vmovdqu %xmm6, (%rax); vpunpckhdq %xmm12, %xmm8, %xmm7; vpunpckldq %xmm12, %xmm8, %xmm8; vpunpckldq %xmm4, %xmm0, %xmm6; vpunpckhdq %xmm4, %xmm0, %xmm0; vpunpckhqdq %xmm6, %xmm8, %xmm12; vpunpcklqdq %xmm6, %xmm8, %xmm8; vpunpckhqdq %xmm0, %xmm7, %xmm4; vpunpcklqdq %xmm0, %xmm7, %xmm0;; vpunpckhdq %xmm13, %xmm9, %xmm7; vpunpckldq %xmm13, %xmm9, %xmm9; vpunpckldq %xmm5, %xmm1, %xmm6; vpunpckhdq %xmm5, %xmm1, %xmm1; vpunpckhqdq %xmm6, %xmm9, %xmm13; vpunpcklqdq %xmm6, %xmm9, %xmm9; vpunpckhqdq %xmm1, %xmm7, %xmm5; vpunpcklqdq %xmm1, %xmm7, %xmm1;; vmovdqu (%rax), %xmm6; vmovdqu 1 * 16(%rax), %xmm7; vmovdqu %xmm12, (%rax); vmovdqu %xmm13, 1 * 16(%rax); vpunpckhdq %xmm14, %xmm10, %xmm13; vpunpckldq %xmm14, %xmm10, %xmm10; vpunpckldq %xmm6, %xmm2, %xmm12; vpunpckhdq %xmm6, %xmm2, %xmm2; vpunpckhqdq %xmm12, %xmm10, %xmm14; vpunpcklqdq %xmm12, %xmm10, %xmm10; vpunpckhqdq %xmm2, %xmm13, %xmm6; vpunpcklqdq %xmm2, %xmm13, %xmm2;; vpunpckhdq %xmm15, %xmm11, %xmm13; vpunpckldq %xmm15, %xmm11, %xmm11; vpunpckldq %xmm7, %xmm3, %xmm12; vpunpckhdq %xmm7, %xmm3, %xmm3; vpunpckhqdq %xmm12, %xmm11, %xmm15; vpunpcklqdq %xmm12, %xmm11, %xmm11; vpunpckhqdq %xmm3, %xmm13, %xmm7; vpunpcklqdq %xmm3, %xmm13, %xmm3;; vmovdqu (%rax), %xmm12; vmovdqu 1 * 16(%rax), %xmm13;; vmovdqu %xmm0, (%rax); vmovq (0)(%rdi), %xmm0; vpshufb .Lpack_bswap(%rip), %xmm0, %xmm0; vpxor %xmm0, %xmm15, %xmm15; vpxor %xmm0, %xmm14, %xmm14; vpxor %xmm0, %xmm13, %xmm13; vpxor %xmm0, %xmm12, %xmm12; vpxor %xmm0, %xmm11, %xmm11; vpxor %xmm0, %xmm10, %xmm10; vpxor %xmm0, %xmm9, %xmm9; vpxor %xmm0, %xmm8, %xmm8; vpxor %xmm0, %xmm7, %xmm7; vpxor %xmm0, %xmm6, %xmm6; vpxor %xmm0, %xmm5, %xmm5; vpxor %xmm0, %xmm4, %xmm4; vpxor %xmm0, %xmm3, %xmm3; vpxor %xmm0, %xmm2, %xmm2; vpxor %xmm0, %xmm1, %xmm1; vpxor (%rax), %xmm0, %xmm0;; + + + + ret; + +.align 8 +.global camellia_ecb_enc_16way +.global _camellia_ecb_enc_16way + +camellia_ecb_enc_16way: +_camellia_ecb_enc_16way: + +.ifdef WINABI +pushq %rsi +pushq %rdi +subq $168, %rsp # 8 bytes to align stack and 16*10 bytes to store xmm register +vmovdqa %xmm6, 0*16 (%rsp) +vmovdqa %xmm7, 1*16 (%rsp) +vmovdqa %xmm8, 2*16 (%rsp) +vmovdqa %xmm9, 3*16 (%rsp) +vmovdqa %xmm10, 4*16 (%rsp) +vmovdqa %xmm11, 5*16 (%rsp) +vmovdqa %xmm12, 6*16 (%rsp) +vmovdqa %xmm13, 7*16 (%rsp) +vmovdqa %xmm14, 8*16 (%rsp) +vmovdqa %xmm15, 9*16 (%rsp) + +movq %rcx, %rdi; +movq %rdx, %rsi; +movq %r8, %rdx; +.endif + + + vzeroupper; + vmovq (0)(%rdi), %xmm0; vpshufb .Lpack_bswap(%rip), %xmm0, %xmm0; vpxor 0 * 16(%rdx), %xmm0, %xmm15; vpxor 1 * 16(%rdx), %xmm0, %xmm14; vpxor 2 * 16(%rdx), %xmm0, %xmm13; vpxor 3 * 16(%rdx), %xmm0, %xmm12; vpxor 4 * 16(%rdx), %xmm0, %xmm11; vpxor 5 * 16(%rdx), %xmm0, %xmm10; vpxor 6 * 16(%rdx), %xmm0, %xmm9; vpxor 7 * 16(%rdx), %xmm0, %xmm8; vpxor 8 * 16(%rdx), %xmm0, %xmm7; vpxor 9 * 16(%rdx), %xmm0, %xmm6; vpxor 10 * 16(%rdx), %xmm0, %xmm5; vpxor 11 * 16(%rdx), %xmm0, %xmm4; vpxor 12 * 16(%rdx), %xmm0, %xmm3; vpxor 13 * 16(%rdx), %xmm0, %xmm2; vpxor 14 * 16(%rdx), %xmm0, %xmm1; vpxor 15 * 16(%rdx), %xmm0, %xmm0;; + + + + + movq %rsi, %rax; + + call __camellia_enc_blk16; + + vmovdqu %xmm7, 0 * 16(%rsi); vmovdqu %xmm6, 1 * 16(%rsi); vmovdqu %xmm5, 2 * 16(%rsi); vmovdqu %xmm4, 3 * 16(%rsi); vmovdqu %xmm3, 4 * 16(%rsi); vmovdqu %xmm2, 5 * 16(%rsi); vmovdqu %xmm1, 6 * 16(%rsi); vmovdqu %xmm0, 7 * 16(%rsi); vmovdqu %xmm15, 8 * 16(%rsi); vmovdqu %xmm14, 9 * 16(%rsi); vmovdqu %xmm13, 10 * 16(%rsi); vmovdqu %xmm12, 11 * 16(%rsi); vmovdqu %xmm11, 12 * 16(%rsi); vmovdqu %xmm10, 13 * 16(%rsi); vmovdqu %xmm9, 14 * 16(%rsi); vmovdqu %xmm8, 15 * 16(%rsi);; + + + + vzeroupper; + +.ifdef WINABI + +vmovdqa 0*16 (%rsp), %xmm6 +vmovdqa 1*16 (%rsp), %xmm7 +vmovdqa 2*16 (%rsp), %xmm8 +vmovdqa 3*16 (%rsp), %xmm9 +vmovdqa 4*16 (%rsp), %xmm10 +vmovdqa 5*16 (%rsp), %xmm11 +vmovdqa 6*16 (%rsp), %xmm12 +vmovdqa 7*16 (%rsp), %xmm13 +vmovdqa 8*16 (%rsp), %xmm14 +vmovdqa 9*16 (%rsp), %xmm15 + +addq $168, %rsp + +popq %rdi +popq %rsi +.endif + + ret; + +.align 8 +.global camellia_ecb_dec_16way +.global _camellia_ecb_dec_16way + +camellia_ecb_dec_16way: +_camellia_ecb_dec_16way: + +.ifdef WINABI +pushq %rsi +pushq %rdi +subq $168, %rsp # 8 bytes to align stack and 16*10 bytes to store xmm register +vmovdqa %xmm6, 0*16 (%rsp) +vmovdqa %xmm7, 1*16 (%rsp) +vmovdqa %xmm8, 2*16 (%rsp) +vmovdqa %xmm9, 3*16 (%rsp) +vmovdqa %xmm10, 4*16 (%rsp) +vmovdqa %xmm11, 5*16 (%rsp) +vmovdqa %xmm12, 6*16 (%rsp) +vmovdqa %xmm13, 7*16 (%rsp) +vmovdqa %xmm14, 8*16 (%rsp) +vmovdqa %xmm15, 9*16 (%rsp) + +movq %rcx, %rdi; +movq %rdx, %rsi; +movq %r8, %rdx; +.endif + + + vzeroupper; + + + vmovq (256)(%rdi), %xmm0; vpshufb .Lpack_bswap(%rip), %xmm0, %xmm0; vpxor 0 * 16(%rdx), %xmm0, %xmm15; vpxor 1 * 16(%rdx), %xmm0, %xmm14; vpxor 2 * 16(%rdx), %xmm0, %xmm13; vpxor 3 * 16(%rdx), %xmm0, %xmm12; vpxor 4 * 16(%rdx), %xmm0, %xmm11; vpxor 5 * 16(%rdx), %xmm0, %xmm10; vpxor 6 * 16(%rdx), %xmm0, %xmm9; vpxor 7 * 16(%rdx), %xmm0, %xmm8; vpxor 8 * 16(%rdx), %xmm0, %xmm7; vpxor 9 * 16(%rdx), %xmm0, %xmm6; vpxor 10 * 16(%rdx), %xmm0, %xmm5; vpxor 11 * 16(%rdx), %xmm0, %xmm4; vpxor 12 * 16(%rdx), %xmm0, %xmm3; vpxor 13 * 16(%rdx), %xmm0, %xmm2; vpxor 14 * 16(%rdx), %xmm0, %xmm1; vpxor 15 * 16(%rdx), %xmm0, %xmm0;; + + + + movq %rsi, %rax; + + call __camellia_dec_blk16; + + vmovdqu %xmm7, 0 * 16(%rsi); vmovdqu %xmm6, 1 * 16(%rsi); vmovdqu %xmm5, 2 * 16(%rsi); vmovdqu %xmm4, 3 * 16(%rsi); vmovdqu %xmm3, 4 * 16(%rsi); vmovdqu %xmm2, 5 * 16(%rsi); vmovdqu %xmm1, 6 * 16(%rsi); vmovdqu %xmm0, 7 * 16(%rsi); vmovdqu %xmm15, 8 * 16(%rsi); vmovdqu %xmm14, 9 * 16(%rsi); vmovdqu %xmm13, 10 * 16(%rsi); vmovdqu %xmm12, 11 * 16(%rsi); vmovdqu %xmm11, 12 * 16(%rsi); vmovdqu %xmm10, 13 * 16(%rsi); vmovdqu %xmm9, 14 * 16(%rsi); vmovdqu %xmm8, 15 * 16(%rsi);; + + + + vzeroupper; + +.ifdef WINABI +vmovdqa 0*16 (%rsp), %xmm6 +vmovdqa 1*16 (%rsp), %xmm7 +vmovdqa 2*16 (%rsp), %xmm8 +vmovdqa 3*16 (%rsp), %xmm9 +vmovdqa 4*16 (%rsp), %xmm10 +vmovdqa 5*16 (%rsp), %xmm11 +vmovdqa 6*16 (%rsp), %xmm12 +vmovdqa 7*16 (%rsp), %xmm13 +vmovdqa 8*16 (%rsp), %xmm14 +vmovdqa 9*16 (%rsp), %xmm15 + +addq $168, %rsp + +popq %rdi +popq %rsi +.endif + + ret; + + .ifndef __YASM__ +#if defined(__linux__) && defined(__ELF__) +.section .note.GNU-stack,"",%progbits +#endif + .endif + diff --git a/src/Crypto/Camellia_aesni_x86.S b/src/Crypto/Camellia_aesni_x86.S new file mode 100644 index 00000000..0b1721ea --- /dev/null +++ b/src/Crypto/Camellia_aesni_x86.S @@ -0,0 +1,5 @@ + .ifndef __YASM__ +#if defined(__linux__) && defined(__ELF__) +.section .note.GNU-stack,"",%progbits +#endif + .endif diff --git a/src/Crypto/Camellia_x64.S b/src/Crypto/Camellia_x64.S new file mode 100644 index 00000000..0358529b --- /dev/null +++ b/src/Crypto/Camellia_x64.S @@ -0,0 +1,318 @@ +/* camellia_asm.S ver 1.1 + * + * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi> + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH + * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY + * AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, + * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM + * LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE + * OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + * PERFORMANCE OF THIS SOFTWARE. + */ + + /* Adapted to VeraCrypt + - original file pre-processed using "gcc -E" + - added instructions at begining of each functiont to adapt to Windows parameters passing convention + - Fix "'ADDR32' relocation" error when building Windows driver by using lea to load address of tables into + registers instead of using immediat form in xorq. This required saving many registers in the stack in order + to free 7 general purpose registers %r9-%r15 and %xmm0/%xmm1 are used for the 8th table: %xmm0 stores table + address and %xmm1 is used to save/restore %r8 each time it is used for memory addressed by %xmm0. + These changes reduce performance compared to original code by around 8%. + Original code did not have 'ADDR32' relocation issue because it was implemented as an executable, not a shared + library like the case of Windows driver. + + */ + +.text + +.extern camellia_sp10011110; +.extern camellia_sp22000222; +.extern camellia_sp03303033; +.extern camellia_sp00444404; +.extern camellia_sp02220222; +.extern camellia_sp30333033; +.extern camellia_sp44044404; +.extern camellia_sp11101110; + +.align 8 +.global camellia_encrypt_asm; +.global _camellia_encrypt_asm; + +camellia_encrypt_asm: +_camellia_encrypt_asm: + +.ifdef WINABI +pushq %rsi +pushq %rdi +movq %rcx, %rdi; +movq %rdx, %rsi; +movq %r8, %rdx; +.endif + +pushq %r12 +pushq %r13 +pushq %r14 +pushq %r15 + +leaq camellia_sp10011110(%rip), %r12 +leaq camellia_sp22000222(%rip), %r13 +leaq camellia_sp03303033(%rip), %r14 +leaq camellia_sp00444404(%rip), %r15 +leaq camellia_sp02220222(%rip), %r9 +leaq camellia_sp30333033(%rip), %r10 +leaq camellia_sp44044404(%rip), %r11 +leaq camellia_sp11101110(%rip), %rax +movq %rax, %xmm0 + + pushq %rbp + + pushq %rsi + movq %rdx, %rsi; + + movq (%rsi), %rax; bswapq %rax; rolq $32, %rax; movq 4*2(%rsi), %rcx; bswapq %rcx; rorq $32, %rcx; xorq 0(%rdi), %rax;; + + movq (0 + ((0 + 2) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((0 + 3) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;; movq (0 + ((0 + 4) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((0 + 5) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;; movq (0 + ((0 + 6) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((0 + 7) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;;; + movl (0 + ((8 + 0) * 2) * 4)(%rdi), %esi; andl %eax, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rax; movq (0 + ((8 + 1) * 2) * 4)(%rdi), %rbp; orq %rcx, %rbp; shrq $32, %rbp; xorq %rbp, %rcx; movq (0 + ((8 + 0) * 2) * 4)(%rdi), %r8; orq %rax, %r8; shrq $32, %r8; xorq %r8, %rax; movl (0 + ((8 + 1) * 2) * 4)(%rdi), %esi; andl %ecx, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rcx;;; + movq (0 + ((8 + 2) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((8 + 3) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;; movq (0 + ((8 + 4) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((8 + 5) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;; movq (0 + ((8 + 6) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((8 + 7) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;;; + movl (0 + ((16 + 0) * 2) * 4)(%rdi), %esi; andl %eax, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rax; movq (0 + ((16 + 1) * 2) * 4)(%rdi), %rbp; orq %rcx, %rbp; shrq $32, %rbp; xorq %rbp, %rcx; movq (0 + ((16 + 0) * 2) * 4)(%rdi), %r8; orq %rax, %r8; shrq $32, %r8; xorq %r8, %rax; movl (0 + ((16 + 1) * 2) * 4)(%rdi), %esi; andl %ecx, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rcx;;; + movq (0 + ((16 + 2) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((16 + 3) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;; movq (0 + ((16 + 4) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((16 + 5) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;; movq (0 + ((16 + 6) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((16 + 7) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;;; + movl $24, %ebp; + + movl (0 + ((24 + 0) * 2) * 4)(%rdi), %esi; andl %eax, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rax; movq (0 + ((24 + 1) * 2) * 4)(%rdi), %rbp; orq %rcx, %rbp; shrq $32, %rbp; xorq %rbp, %rcx; movq (0 + ((24 + 0) * 2) * 4)(%rdi), %r8; orq %rax, %r8; shrq $32, %r8; xorq %r8, %rax; movl (0 + ((24 + 1) * 2) * 4)(%rdi), %esi; andl %ecx, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rcx;;; + movq (0 + ((24 + 2) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((24 + 3) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;; movq (0 + ((24 + 4) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((24 + 5) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;; movq (0 + ((24 + 6) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((24 + 7) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;;; + movl $32, %ebp; + +__enc_done: + popq %rsi + + xorq 0(%rdi, %rbp, 8), %rcx; rorq $32, %rcx; bswapq %rcx; movq %rcx, (%rsi); rolq $32, %rax; bswapq %rax; movq %rax, 4*2(%rsi);; + + popq %rbp + +popq %r15 +popq %r14 +popq %r13 +popq %r12 + +.ifdef WINABI +popq %rdi +popq %rsi +.endif + + ret; + +.align 8 +.global camellia_decrypt_asm; +.global _camellia_decrypt_asm; + +camellia_decrypt_asm: +_camellia_decrypt_asm: + +.ifdef WINABI +pushq %rsi +pushq %rdi +movq %rcx, %rdi; +movq %rdx, %rsi; +movq %r8, %rdx; +.endif + +pushq %r12 +pushq %r13 +pushq %r14 +pushq %r15 + + + +leaq camellia_sp10011110(%rip), %r12 +leaq camellia_sp22000222(%rip), %r13 +leaq camellia_sp03303033(%rip), %r14 +leaq camellia_sp00444404(%rip), %r15 +leaq camellia_sp02220222(%rip), %r9 +leaq camellia_sp30333033(%rip), %r10 +leaq camellia_sp44044404(%rip), %r11 +leaq camellia_sp11101110(%rip), %rax +movq %rax, %xmm0 + + pushq %rbp + pushq %rsi + movq %rdx, %rsi; + + movq (%rsi), %rax; bswapq %rax; rolq $32, %rax; movq 4*2(%rsi), %rcx; bswapq %rcx; rorq $32, %rcx; xorq 256(%rdi), %rax;; + + movq (0 + ((24 + 7) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((24 + 6) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;; movq (0 + ((24 + 5) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((24 + 4) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;; movq (0 + ((24 + 3) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((24 + 2) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;;; + movl (0 + ((24 + 1) * 2) * 4)(%rdi), %esi; andl %eax, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rax; movq (0 + ((24 + 0) * 2) * 4)(%rdi), %rbp; orq %rcx, %rbp; shrq $32, %rbp; xorq %rbp, %rcx; movq (0 + ((24 + 1) * 2) * 4)(%rdi), %r8; orq %rax, %r8; shrq $32, %r8; xorq %r8, %rax; movl (0 + ((24 + 0) * 2) * 4)(%rdi), %esi; andl %ecx, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rcx;;; + +__dec_rounds16: + movq (0 + ((16 + 7) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((16 + 6) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;; movq (0 + ((16 + 5) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((16 + 4) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;; movq (0 + ((16 + 3) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((16 + 2) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;;; + movl (0 + ((16 + 1) * 2) * 4)(%rdi), %esi; andl %eax, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rax; movq (0 + ((16 + 0) * 2) * 4)(%rdi), %rbp; orq %rcx, %rbp; shrq $32, %rbp; xorq %rbp, %rcx; movq (0 + ((16 + 1) * 2) * 4)(%rdi), %r8; orq %rax, %r8; shrq $32, %r8; xorq %r8, %rax; movl (0 + ((16 + 0) * 2) * 4)(%rdi), %esi; andl %ecx, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rcx;;; + movq (0 + ((8 + 7) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((8 + 6) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;; movq (0 + ((8 + 5) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((8 + 4) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;; movq (0 + ((8 + 3) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((8 + 2) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;;; + movl (0 + ((8 + 1) * 2) * 4)(%rdi), %esi; andl %eax, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rax; movq (0 + ((8 + 0) * 2) * 4)(%rdi), %rbp; orq %rcx, %rbp; shrq $32, %rbp; xorq %rbp, %rcx; movq (0 + ((8 + 1) * 2) * 4)(%rdi), %r8; orq %rax, %r8; shrq $32, %r8; xorq %r8, %rax; movl (0 + ((8 + 0) * 2) * 4)(%rdi), %esi; andl %ecx, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rcx;;; + movq (0 + ((0 + 7) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((0 + 6) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;; movq (0 + ((0 + 5) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((0 + 4) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;; movq (0 + ((0 + 3) * 2) * 4)(%rdi), %r8; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rcx;;; movq (0 + ((0 + 2) * 2) * 4)(%rdi), %r8; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; xorq %r8, %rax;;;; + + popq %rsi + + xorq 0(%rdi), %rcx; rorq $32, %rcx; bswapq %rcx; movq %rcx, (%rsi); rolq $32, %rax; bswapq %rax; movq %rax, 4*2(%rsi);; + + popq %rbp + + popq %r15 +popq %r14 +popq %r13 +popq %r12 + + .ifdef WINABI +popq %rdi +popq %rsi +.endif + + ret; + +.align 8 +.global camellia_enc_blk2 +.global _camellia_enc_blk2 + +camellia_enc_blk2: +_camellia_enc_blk2: + +.ifdef WINABI +pushq %rsi +pushq %rdi +movq %rcx, %rdi; +movq %rdx, %rsi; +movq %r8, %rdx; +.endif + +pushq %r12 +pushq %r13 +pushq %r14 +pushq %r15 + +leaq camellia_sp10011110(%rip), %r12 +leaq camellia_sp22000222(%rip), %r13 +leaq camellia_sp03303033(%rip), %r14 +leaq camellia_sp00444404(%rip), %r15 +leaq camellia_sp02220222(%rip), %r9 +leaq camellia_sp30333033(%rip), %r10 +leaq camellia_sp44044404(%rip), %r11 +leaq camellia_sp11101110(%rip), %rax +movq %rax, %xmm0 + + + pushq %rbx; + + pushq %rbp + pushq %rsi + movq %rdx, %rsi; + + movq (%rsi), %rax; bswapq %rax; rorq $32, %rax; movq 4*2(%rsi), %rcx; bswapq %rcx; rolq $32, %rcx; xorq 0(%rdi), %rax; movq 8*2(%rsi), %rbx; bswapq %rbx; rorq $32, %rbx; movq 12*2(%rsi), %rdx; bswapq %rdx; rolq $32, %rdx; xorq 0(%rdi), %rbx;; + + movq (0 + ((0 + 2) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((0 + 3) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;; movq (0 + ((0 + 4) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((0 + 5) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;; movq (0 + ((0 + 6) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((0 + 7) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;;; + movl (0 + ((8 + 0) * 2) * 4)(%rdi), %esi; andl %eax, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rax; movq (0 + ((8 + 1) * 2) * 4)(%rdi), %rbp; orq %rcx, %rbp; shrq $32, %rbp; xorq %rbp, %rcx; movl (0 + ((8 + 0) * 2) * 4)(%rdi), %r8d; andl %ebx, %r8d; roll $1, %r8d; shlq $32, %r8; xorq %r8, %rbx; movq (0 + ((8 + 1) * 2) * 4)(%rdi), %rsi; orq %rdx, %rsi; shrq $32, %rsi; xorq %rsi, %rdx; movq (0 + ((8 + 0) * 2) * 4)(%rdi), %rbp; orq %rax, %rbp; shrq $32, %rbp; xorq %rbp, %rax; movl (0 + ((8 + 1) * 2) * 4)(%rdi), %r8d; andl %ecx, %r8d; roll $1, %r8d; shlq $32, %r8; xorq %r8, %rcx; movq (0 + ((8 + 0) * 2) * 4)(%rdi), %rsi; orq %rbx, %rsi; shrq $32, %rsi; xorq %rsi, %rbx; movl (0 + ((8 + 1) * 2) * 4)(%rdi), %ebp; andl %edx, %ebp; roll $1, %ebp; shlq $32, %rbp; xorq %rbp, %rdx;;; + movq (0 + ((8 + 2) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((8 + 3) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;; movq (0 + ((8 + 4) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((8 + 5) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;; movq (0 + ((8 + 6) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((8 + 7) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;;; + movl (0 + ((16 + 0) * 2) * 4)(%rdi), %esi; andl %eax, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rax; movq (0 + ((16 + 1) * 2) * 4)(%rdi), %rbp; orq %rcx, %rbp; shrq $32, %rbp; xorq %rbp, %rcx; movl (0 + ((16 + 0) * 2) * 4)(%rdi), %r8d; andl %ebx, %r8d; roll $1, %r8d; shlq $32, %r8; xorq %r8, %rbx; movq (0 + ((16 + 1) * 2) * 4)(%rdi), %rsi; orq %rdx, %rsi; shrq $32, %rsi; xorq %rsi, %rdx; movq (0 + ((16 + 0) * 2) * 4)(%rdi), %rbp; orq %rax, %rbp; shrq $32, %rbp; xorq %rbp, %rax; movl (0 + ((16 + 1) * 2) * 4)(%rdi), %r8d; andl %ecx, %r8d; roll $1, %r8d; shlq $32, %r8; xorq %r8, %rcx; movq (0 + ((16 + 0) * 2) * 4)(%rdi), %rsi; orq %rbx, %rsi; shrq $32, %rsi; xorq %rsi, %rbx; movl (0 + ((16 + 1) * 2) * 4)(%rdi), %ebp; andl %edx, %ebp; roll $1, %ebp; shlq $32, %rbp; xorq %rbp, %rdx;;; + movq (0 + ((16 + 2) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((16 + 3) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;; movq (0 + ((16 + 4) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((16 + 5) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;; movq (0 + ((16 + 6) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((16 + 7) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;;; + movl $24, %r8d; + + movl (0 + ((24 + 0) * 2) * 4)(%rdi), %esi; andl %eax, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rax; movq (0 + ((24 + 1) * 2) * 4)(%rdi), %rbp; orq %rcx, %rbp; shrq $32, %rbp; xorq %rbp, %rcx; movl (0 + ((24 + 0) * 2) * 4)(%rdi), %r8d; andl %ebx, %r8d; roll $1, %r8d; shlq $32, %r8; xorq %r8, %rbx; movq (0 + ((24 + 1) * 2) * 4)(%rdi), %rsi; orq %rdx, %rsi; shrq $32, %rsi; xorq %rsi, %rdx; movq (0 + ((24 + 0) * 2) * 4)(%rdi), %rbp; orq %rax, %rbp; shrq $32, %rbp; xorq %rbp, %rax; movl (0 + ((24 + 1) * 2) * 4)(%rdi), %r8d; andl %ecx, %r8d; roll $1, %r8d; shlq $32, %r8; xorq %r8, %rcx; movq (0 + ((24 + 0) * 2) * 4)(%rdi), %rsi; orq %rbx, %rsi; shrq $32, %rsi; xorq %rsi, %rbx; movl (0 + ((24 + 1) * 2) * 4)(%rdi), %ebp; andl %edx, %ebp; roll $1, %ebp; shlq $32, %rbp; xorq %rbp, %rdx;;; + movq (0 + ((24 + 2) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((24 + 3) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;; movq (0 + ((24 + 4) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((24 + 5) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;; movq (0 + ((24 + 6) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((24 + 7) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;;; + movl $32, %r8d; + +__enc2_done: + popq %rsi + + xorq 0(%rdi, %r8, 8), %rcx; rolq $32, %rcx; bswapq %rcx; movq %rcx, (%rsi); rorq $32, %rax; bswapq %rax; movq %rax, 4*2(%rsi); xorq 0(%rdi, %r8, 8), %rdx; rolq $32, %rdx; bswapq %rdx; movq %rdx, 8*2(%rsi); rorq $32, %rbx; bswapq %rbx; movq %rbx, 12*2(%rsi);; + + popq %rbp + popq %rbx; + + popq %r15 +popq %r14 +popq %r13 +popq %r12 + + .ifdef WINABI +popq %rdi +popq %rsi +.endif + + ret; + +.align 8 +.global camellia_dec_blk2 +.global _camellia_dec_blk2 + +camellia_dec_blk2: +_camellia_dec_blk2: + +.ifdef WINABI +pushq %rsi +pushq %rdi +movq %rcx, %rdi; +movq %rdx, %rsi; +movq %r8, %rdx; +.endif + +pushq %r12 +pushq %r13 +pushq %r14 +pushq %r15 + +leaq camellia_sp10011110(%rip), %r12 +leaq camellia_sp22000222(%rip), %r13 +leaq camellia_sp03303033(%rip), %r14 +leaq camellia_sp00444404(%rip), %r15 +leaq camellia_sp02220222(%rip), %r9 +leaq camellia_sp30333033(%rip), %r10 +leaq camellia_sp44044404(%rip), %r11 +leaq camellia_sp11101110(%rip), %rax +movq %rax, %xmm0 + + pushq %rbx + + pushq %rbp + pushq %rsi + movq %rdx, %rsi; + + movq (%rsi), %rax; bswapq %rax; rorq $32, %rax; movq 4*2(%rsi), %rcx; bswapq %rcx; rolq $32, %rcx; xorq 256(%rdi), %rax; movq 8*2(%rsi), %rbx; bswapq %rbx; rorq $32, %rbx; movq 12*2(%rsi), %rdx; bswapq %rdx; rolq $32, %rdx; xorq 256(%rdi), %rbx;; + + movq (0 + ((24 + 7) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((24 + 6) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;; movq (0 + ((24 + 5) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((24 + 4) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;; movq (0 + ((24 + 3) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((24 + 2) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;;; + movl (0 + ((24 + 1) * 2) * 4)(%rdi), %esi; andl %eax, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rax; movq (0 + ((24 + 0) * 2) * 4)(%rdi), %rbp; orq %rcx, %rbp; shrq $32, %rbp; xorq %rbp, %rcx; movl (0 + ((24 + 1) * 2) * 4)(%rdi), %r8d; andl %ebx, %r8d; roll $1, %r8d; shlq $32, %r8; xorq %r8, %rbx; movq (0 + ((24 + 0) * 2) * 4)(%rdi), %rsi; orq %rdx, %rsi; shrq $32, %rsi; xorq %rsi, %rdx; movq (0 + ((24 + 1) * 2) * 4)(%rdi), %rbp; orq %rax, %rbp; shrq $32, %rbp; xorq %rbp, %rax; movl (0 + ((24 + 0) * 2) * 4)(%rdi), %r8d; andl %ecx, %r8d; roll $1, %r8d; shlq $32, %r8; xorq %r8, %rcx; movq (0 + ((24 + 1) * 2) * 4)(%rdi), %rsi; orq %rbx, %rsi; shrq $32, %rsi; xorq %rsi, %rbx; movl (0 + ((24 + 0) * 2) * 4)(%rdi), %ebp; andl %edx, %ebp; roll $1, %ebp; shlq $32, %rbp; xorq %rbp, %rdx;;; + +__dec2_rounds16: + movq (0 + ((16 + 7) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((16 + 6) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;; movq (0 + ((16 + 5) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((16 + 4) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;; movq (0 + ((16 + 3) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((16 + 2) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;;; + movl (0 + ((16 + 1) * 2) * 4)(%rdi), %esi; andl %eax, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rax; movq (0 + ((16 + 0) * 2) * 4)(%rdi), %rbp; orq %rcx, %rbp; shrq $32, %rbp; xorq %rbp, %rcx; movl (0 + ((16 + 1) * 2) * 4)(%rdi), %r8d; andl %ebx, %r8d; roll $1, %r8d; shlq $32, %r8; xorq %r8, %rbx; movq (0 + ((16 + 0) * 2) * 4)(%rdi), %rsi; orq %rdx, %rsi; shrq $32, %rsi; xorq %rsi, %rdx; movq (0 + ((16 + 1) * 2) * 4)(%rdi), %rbp; orq %rax, %rbp; shrq $32, %rbp; xorq %rbp, %rax; movl (0 + ((16 + 0) * 2) * 4)(%rdi), %r8d; andl %ecx, %r8d; roll $1, %r8d; shlq $32, %r8; xorq %r8, %rcx; movq (0 + ((16 + 1) * 2) * 4)(%rdi), %rsi; orq %rbx, %rsi; shrq $32, %rsi; xorq %rsi, %rbx; movl (0 + ((16 + 0) * 2) * 4)(%rdi), %ebp; andl %edx, %ebp; roll $1, %ebp; shlq $32, %rbp; xorq %rbp, %rdx;;; + movq (0 + ((8 + 7) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((8 + 6) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;; movq (0 + ((8 + 5) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((8 + 4) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;; movq (0 + ((8 + 3) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((8 + 2) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;;; + movl (0 + ((8 + 1) * 2) * 4)(%rdi), %esi; andl %eax, %esi; roll $1, %esi; shlq $32, %rsi; xorq %rsi, %rax; movq (0 + ((8 + 0) * 2) * 4)(%rdi), %rbp; orq %rcx, %rbp; shrq $32, %rbp; xorq %rbp, %rcx; movl (0 + ((8 + 1) * 2) * 4)(%rdi), %r8d; andl %ebx, %r8d; roll $1, %r8d; shlq $32, %r8; xorq %r8, %rbx; movq (0 + ((8 + 0) * 2) * 4)(%rdi), %rsi; orq %rdx, %rsi; shrq $32, %rsi; xorq %rsi, %rdx; movq (0 + ((8 + 1) * 2) * 4)(%rdi), %rbp; orq %rax, %rbp; shrq $32, %rbp; xorq %rbp, %rax; movl (0 + ((8 + 0) * 2) * 4)(%rdi), %r8d; andl %ecx, %r8d; roll $1, %r8d; shlq $32, %r8; xorq %r8, %rcx; movq (0 + ((8 + 1) * 2) * 4)(%rdi), %rsi; orq %rbx, %rsi; shrq $32, %rsi; xorq %rsi, %rbx; movl (0 + ((8 + 0) * 2) * 4)(%rdi), %ebp; andl %edx, %ebp; roll $1, %ebp; shlq $32, %rbp; xorq %rbp, %rdx;;; + movq (0 + ((0 + 7) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((0 + 6) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;; movq (0 + ((0 + 5) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((0 + 4) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;; movq (0 + ((0 + 3) * 2) * 4)(%rdi), %r8; xorq %r8, %rdx; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r15, %rbp, 8), %rcx; xorq (%r14, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rcx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rcx;; movzbl %al, %ebp; movzbl %ah, %esi; rorq $16, %rax; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r15, %rbp, 8), %rdx; xorq (%r14, %rsi, 8), %rdx;; xorq %r8, %rcx; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r13, %rbp, 8), %rdx; xorq (%r12, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rdx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rdx;; movzbl %bl, %ebp; movzbl %bh, %esi; rorq $16, %rbx; xorq (%r10, %rbp, 8), %rdx; xorq (%r9, %rsi, 8), %rdx;;;; movq (0 + ((0 + 2) * 2) * 4)(%rdi), %r8; xorq %r8, %rbx; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r15, %rbp, 8), %rax; xorq (%r14, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r13, %rbp, 8), %r8; xorq (%r12, %rsi, 8), %r8;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rax; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rax;; movzbl %cl, %ebp; movzbl %ch, %esi; rorq $16, %rcx; xorq (%r10, %rbp, 8), %r8; xorq (%r9, %rsi, 8), %r8;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r15, %rbp, 8), %rbx; xorq (%r14, %rsi, 8), %rbx;; xorq %r8, %rax; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r13, %rbp, 8), %rbx; xorq (%r12, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; movq %r8, %xmm1; movq %xmm0, %r8; xorq (%r8, %rbp, 8), %rbx; movq %xmm1, %r8; xorq (%r11, %rsi, 8), %rbx;; movzbl %dl, %ebp; movzbl %dh, %esi; rorq $16, %rdx; xorq (%r10, %rbp, 8), %rbx; xorq (%r9, %rsi, 8), %rbx;;;;; + + popq %rsi + + xorq 0(%rdi), %rcx; rolq $32, %rcx; bswapq %rcx; movq %rcx, (%rsi); rorq $32, %rax; bswapq %rax; movq %rax, 4*2(%rsi); xorq 0(%rdi), %rdx; rolq $32, %rdx; bswapq %rdx; movq %rdx, 8*2(%rsi); rorq $32, %rbx; bswapq %rbx; movq %rbx, 12*2(%rsi);; + + popq %rbp + + popq %rbx; + + popq %r15 +popq %r14 +popq %r13 +popq %r12 + + .ifdef WINABI +popq %rdi +popq %rsi +.endif + + ret; + + .ifndef __YASM__ +#if defined(__linux__) && defined(__ELF__) +.section .note.GNU-stack,"",%progbits +#endif + .endif + diff --git a/src/Crypto/Camellia_x86.S b/src/Crypto/Camellia_x86.S new file mode 100644 index 00000000..0b1721ea --- /dev/null +++ b/src/Crypto/Camellia_x86.S @@ -0,0 +1,5 @@ + .ifndef __YASM__ +#if defined(__linux__) && defined(__ELF__) +.section .note.GNU-stack,"",%progbits +#endif + .endif diff --git a/src/Crypto/Crypto.vcproj b/src/Crypto/Crypto.vcproj deleted file mode 100644 index 15c5c4fb..00000000 --- a/src/Crypto/Crypto.vcproj +++ /dev/null @@ -1,587 +0,0 @@ -<?xml version="1.0" encoding="Windows-1252"?> -<VisualStudioProject - ProjectType="Visual C++" - Version="9.00" - Name="Crypto" - ProjectGUID="{993245CF-6B70-47EE-91BB-39F8FC6DC0E7}" - RootNamespace="Crypto" - Keyword="Win32Proj" - TargetFrameworkVersion="131072" - > - <Platforms> - <Platform - Name="Win32" - /> - <Platform - Name="x64" - /> - </Platforms> - <ToolFiles> - </ToolFiles> - <Configurations> - <Configuration - Name="Debug|Win32" - OutputDirectory="Debug" - IntermediateDirectory="Debug" - ConfigurationType="4" - InheritedPropertySheets="$(VCInstallDir)VCProjectDefaults\UpgradeFromVC71.vsprops" - CharacterSet="1" - > - <Tool - Name="VCPreBuildEventTool" - /> - <Tool - Name="VCCustomBuildTool" - /> - <Tool - Name="VCXMLDataGeneratorTool" - /> - <Tool - Name="VCWebServiceProxyGeneratorTool" - /> - <Tool - Name="VCMIDLTool" - /> - <Tool - Name="VCCLCompilerTool" - Optimization="0" - AdditionalIncludeDirectories=""$(ProjectDir)\..";"$(ProjectDir)\..\Common"" - PreprocessorDefinitions="WIN32;DEBUG;_DEBUG;_LIB;_CRT_SECURE_NO_DEPRECATE;_CRT_NON_CONFORMING_SWPRINTFS" - MinimalRebuild="true" - BasicRuntimeChecks="0" - RuntimeLibrary="1" - BufferSecurityCheck="false" - UsePrecompiledHeader="0" - WarningLevel="4" - DebugInformationFormat="3" - DisableSpecificWarnings="4100;4127;4201" - /> - <Tool - Name="VCManagedResourceCompilerTool" - /> - <Tool - Name="VCResourceCompilerTool" - /> - <Tool - Name="VCPreLinkEventTool" - /> - <Tool - Name="VCLibrarianTool" - OutputFile="$(OutDir)/Crypto.lib" - /> - <Tool - Name="VCALinkTool" - /> - <Tool - Name="VCXDCMakeTool" - /> - <Tool - Name="VCBscMakeTool" - /> - <Tool - Name="VCFxCopTool" - /> - <Tool - Name="VCPostBuildEventTool" - /> - </Configuration> - <Configuration - Name="Debug|x64" - OutputDirectory="$(PlatformName)\$(ConfigurationName)" - IntermediateDirectory="$(PlatformName)\$(ConfigurationName)" - ConfigurationType="4" - InheritedPropertySheets="$(VCInstallDir)VCProjectDefaults\UpgradeFromVC71.vsprops" - CharacterSet="1" - > - <Tool - Name="VCPreBuildEventTool" - /> - <Tool - Name="VCCustomBuildTool" - /> - <Tool - Name="VCXMLDataGeneratorTool" - /> - <Tool - Name="VCWebServiceProxyGeneratorTool" - /> - <Tool - Name="VCMIDLTool" - TargetEnvironment="3" - /> - <Tool - Name="VCCLCompilerTool" - Optimization="0" - AdditionalIncludeDirectories=""$(ProjectDir)\..";"$(ProjectDir)\..\Common"" - PreprocessorDefinitions="WIN32;DEBUG;_DEBUG;_LIB;_CRT_SECURE_NO_DEPRECATE;_CRT_NON_CONFORMING_SWPRINTFS" - MinimalRebuild="true" - BasicRuntimeChecks="0" - RuntimeLibrary="1" - BufferSecurityCheck="false" - UsePrecompiledHeader="0" - WarningLevel="4" - DebugInformationFormat="3" - DisableSpecificWarnings="4100;4127;4201" - /> - <Tool - Name="VCManagedResourceCompilerTool" - /> - <Tool - Name="VCResourceCompilerTool" - /> - <Tool - Name="VCPreLinkEventTool" - /> - <Tool - Name="VCLibrarianTool" - OutputFile="$(OutDir)/Crypto.lib" - /> - <Tool - Name="VCALinkTool" - /> - <Tool - Name="VCXDCMakeTool" - /> - <Tool - Name="VCBscMakeTool" - /> - <Tool - Name="VCFxCopTool" - /> - <Tool - Name="VCPostBuildEventTool" - /> - </Configuration> - <Configuration - Name="Release|Win32" - OutputDirectory="Release" - IntermediateDirectory="Release" - ConfigurationType="4" - InheritedPropertySheets="$(VCInstallDir)VCProjectDefaults\UpgradeFromVC71.vsprops" - CharacterSet="1" - > - <Tool - Name="VCPreBuildEventTool" - /> - <Tool - Name="VCCustomBuildTool" - /> - <Tool - Name="VCXMLDataGeneratorTool" - /> - <Tool - Name="VCWebServiceProxyGeneratorTool" - /> - <Tool - Name="VCMIDLTool" - /> - <Tool - Name="VCCLCompilerTool" - Optimization="2" - AdditionalIncludeDirectories=""$(ProjectDir)\..";"$(ProjectDir)\..\Common"" - PreprocessorDefinitions="WIN32;NDEBUG;_LIB;_CRT_SECURE_NO_DEPRECATE;_CRT_NON_CONFORMING_SWPRINTFS" - RuntimeLibrary="0" - BufferSecurityCheck="true" - UsePrecompiledHeader="0" - AssemblerOutput="2" - AssemblerListingLocation="$(IntDir)/" - WarningLevel="4" - Detect64BitPortabilityProblems="false" - DebugInformationFormat="0" - DisableSpecificWarnings="4100;4127;4201" - /> - <Tool - Name="VCManagedResourceCompilerTool" - /> - <Tool - Name="VCResourceCompilerTool" - /> - <Tool - Name="VCPreLinkEventTool" - /> - <Tool - Name="VCLibrarianTool" - OutputFile="$(OutDir)/Crypto.lib" - AdditionalLibraryDirectories="$(TargetDir)" - /> - <Tool - Name="VCALinkTool" - /> - <Tool - Name="VCXDCMakeTool" - /> - <Tool - Name="VCBscMakeTool" - /> - <Tool - Name="VCFxCopTool" - /> - <Tool - Name="VCPostBuildEventTool" - /> - </Configuration> - <Configuration - Name="Release|x64" - OutputDirectory="$(PlatformName)\$(ConfigurationName)" - IntermediateDirectory="$(PlatformName)\$(ConfigurationName)" - ConfigurationType="4" - InheritedPropertySheets="$(VCInstallDir)VCProjectDefaults\UpgradeFromVC71.vsprops" - CharacterSet="1" - > - <Tool - Name="VCPreBuildEventTool" - /> - <Tool - Name="VCCustomBuildTool" - /> - <Tool - Name="VCXMLDataGeneratorTool" - /> - <Tool - Name="VCWebServiceProxyGeneratorTool" - /> - <Tool - Name="VCMIDLTool" - TargetEnvironment="3" - /> - <Tool - Name="VCCLCompilerTool" - Optimization="2" - AdditionalIncludeDirectories=""$(ProjectDir)\..";"$(ProjectDir)\..\Common"" - PreprocessorDefinitions="WIN32;NDEBUG;_LIB;_CRT_SECURE_NO_DEPRECATE;_CRT_NON_CONFORMING_SWPRINTFS" - RuntimeLibrary="0" - BufferSecurityCheck="true" - UsePrecompiledHeader="0" - AssemblerOutput="2" - AssemblerListingLocation="$(IntDir)/" - WarningLevel="4" - Detect64BitPortabilityProblems="false" - DebugInformationFormat="0" - DisableSpecificWarnings="4100;4127;4201" - /> - <Tool - Name="VCManagedResourceCompilerTool" - /> - <Tool - Name="VCResourceCompilerTool" - /> - <Tool - Name="VCPreLinkEventTool" - /> - <Tool - Name="VCLibrarianTool" - OutputFile="$(OutDir)/Crypto.lib" - AdditionalLibraryDirectories="$(TargetDir)" - /> - <Tool - Name="VCALinkTool" - /> - <Tool - Name="VCXDCMakeTool" - /> - <Tool - Name="VCBscMakeTool" - /> - <Tool - Name="VCFxCopTool" - /> - <Tool - Name="VCPostBuildEventTool" - /> - </Configuration> - </Configurations> - <References> - </References> - <Files> - <Filter - Name="Source Files" - Filter="cpp;c;cxx;def;odl;idl;hpj;bat;asm;asmx" - UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}" - > - <File - RelativePath=".\Aes_hw_cpu.asm" - > - <FileConfiguration - Name="Debug|Win32" - > - <Tool - Name="VCCustomBuildTool" - CommandLine="echo $(InputFileName) & nasm.exe -Xvc -f win32 -Ox -g --prefix _ -o "$(TargetDir)\$(InputName).obj" "$(InputPath)"
" - Outputs="$(TargetDir)\$(InputName).obj" - /> - </FileConfiguration> - <FileConfiguration - Name="Debug|x64" - > - <Tool - Name="VCCustomBuildTool" - CommandLine="echo $(InputFileName) & nasm.exe -Xvc -f win64 -Ox -g -o "$(TargetDir)\$(InputName).obj" "$(InputPath)"
" - Outputs="$(TargetDir)\$(InputName).obj" - /> - </FileConfiguration> - <FileConfiguration - Name="Release|Win32" - > - <Tool - Name="VCCustomBuildTool" - CommandLine="echo $(InputFileName) & nasm.exe -Xvc -f win32 -Ox --prefix _ -o "$(TargetDir)\$(InputName).obj" -l "$(TargetDir)\$(InputName).lst" "$(InputPath)"
" - Outputs="$(TargetDir)\$(InputName).obj" - /> - </FileConfiguration> - <FileConfiguration - Name="Release|x64" - > - <Tool - Name="VCCustomBuildTool" - CommandLine="echo $(InputFileName) & nasm.exe -Xvc -f win64 -Ox -o "$(TargetDir)\$(InputName).obj" -l "$(TargetDir)\$(InputName).lst" "$(InputPath)"
" - Outputs="$(TargetDir)\$(InputName).obj" - /> - </FileConfiguration> - </File> - <File - RelativePath=".\Aes_x64.asm" - > - <FileConfiguration - Name="Debug|Win32" - ExcludedFromBuild="true" - > - <Tool - Name="VCCustomBuildTool" - /> - </FileConfiguration> - <FileConfiguration - Name="Debug|x64" - > - <Tool - Name="VCCustomBuildTool" - CommandLine="echo $(InputFileName) & nasm.exe -Xvc -f win64 -Ox -o "$(TargetDir)\$(InputName).obj" -l "$(TargetDir)\$(InputName).lst" "$(InputPath)"
" - Outputs="$(TargetDir)\$(InputName).obj" - /> - </FileConfiguration> - <FileConfiguration - Name="Release|Win32" - ExcludedFromBuild="true" - > - <Tool - Name="VCCustomBuildTool" - /> - </FileConfiguration> - <FileConfiguration - Name="Release|x64" - > - <Tool - Name="VCCustomBuildTool" - CommandLine="echo $(InputFileName) & nasm.exe -Xvc -f win64 -Ox -o "$(TargetDir)\$(InputName).obj" -l "$(TargetDir)\$(InputName).lst" "$(InputPath)"
" - Outputs="$(TargetDir)\$(InputName).obj" - /> - </FileConfiguration> - </File> - <File - RelativePath=".\Aes_x86.asm" - > - <FileConfiguration - Name="Debug|Win32" - > - <Tool - Name="VCCustomBuildTool" - CommandLine="echo $(InputFileName) & nasm.exe -Xvc -f win32 -Ox -g --prefix _ -o "$(TargetDir)\$(InputName).obj" "$(InputPath)"
" - Outputs="$(TargetDir)\$(InputName).obj" - /> - </FileConfiguration> - <FileConfiguration - Name="Debug|x64" - ExcludedFromBuild="true" - > - <Tool - Name="VCCustomBuildTool" - CommandLine="echo $(InputFileName) & nasm.exe -Xvc -f win32 -Ox -g --prefix _ -o "$(TargetDir)\$(InputName).obj" "$(InputPath)"
" - Outputs="$(TargetDir)\$(InputName).obj" - /> - </FileConfiguration> - <FileConfiguration - Name="Release|Win32" - > - <Tool - Name="VCCustomBuildTool" - CommandLine="echo $(InputFileName) & nasm.exe -Xvc -f win32 -Ox --prefix _ -o "$(TargetDir)\$(InputName).obj" -l "$(TargetDir)\$(InputName).lst" "$(InputPath)"
" - Outputs="$(TargetDir)\$(InputName).obj" - /> - </FileConfiguration> - <FileConfiguration - Name="Release|x64" - ExcludedFromBuild="true" - > - <Tool - Name="VCCustomBuildTool" - CommandLine="echo $(InputFileName) & nasm.exe -Xvc -f win32 -Ox --prefix _ -o "$(TargetDir)\$(InputName).obj" -l "$(TargetDir)\$(InputName).lst" "$(InputPath)"
" - Outputs="$(TargetDir)\$(InputName).obj" - /> - </FileConfiguration> - </File> - <File - RelativePath=".\Aeskey.c" - > - </File> - <File - RelativePath=".\Aestab.c" - > - </File> - <File - RelativePath=".\Camellia.c" - > - </File> - <File - RelativePath=".\cpu.c" - > - </File> - <File - RelativePath=".\Gost89_x64.asm" - > - <FileConfiguration - Name="Debug|Win32" - ExcludedFromBuild="true" - > - <Tool - Name="VCCustomBuildTool" - /> - </FileConfiguration> - <FileConfiguration - Name="Debug|x64" - > - <Tool - Name="VCCustomBuildTool" - CommandLine="echo $(InputFileName) & nasm.exe -Xvc -f win64 -Ox -o "$(TargetDir)\$(InputName).obj" -l "$(TargetDir)\$(InputName).lst" "$(InputPath)"
" - Outputs="$(TargetDir)\$(InputName).obj" - /> - </FileConfiguration> - <FileConfiguration - Name="Release|Win32" - ExcludedFromBuild="true" - > - <Tool - Name="VCCustomBuildTool" - /> - </FileConfiguration> - <FileConfiguration - Name="Release|x64" - > - <Tool - Name="VCCustomBuildTool" - CommandLine="echo $(InputFileName) & nasm.exe -Xvc -f win64 -Ox -o "$(TargetDir)\$(InputName).obj" -l "$(TargetDir)\$(InputName).lst" "$(InputPath)"
" - Outputs="$(TargetDir)\$(InputName).obj" - /> - </FileConfiguration> - </File> - <File - RelativePath=".\GostCipher.c" - > - </File> - <File - RelativePath=".\kuznyechik.c" - > - </File> - <File - RelativePath=".\Rmd160.c" - > - </File> - <File - RelativePath=".\Serpent.c" - > - </File> - <File - RelativePath=".\Sha2.c" - > - </File> - <File - RelativePath=".\Streebog.c" - > - </File> - <File - RelativePath=".\Twofish.c" - > - </File> - <File - RelativePath=".\Whirlpool.c" - > - </File> - </Filter> - <Filter - Name="Header Files" - Filter="h;hpp;hxx;hm;inl;inc;xsd" - UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}" - > - <File - RelativePath=".\Aes.h" - > - </File> - <File - RelativePath=".\Aes_hw_cpu.h" - > - </File> - <File - RelativePath=".\Aesopt.h" - > - </File> - <File - RelativePath=".\Aestab.h" - > - </File> - <File - RelativePath=".\Camellia.h" - > - </File> - <File - RelativePath=".\config.h" - > - </File> - <File - RelativePath=".\cpu.h" - > - </File> - <File - RelativePath=".\GostCipher.h" - > - </File> - <File - RelativePath=".\kuznyechik.h" - > - </File> - <File - RelativePath=".\misc.h" - > - </File> - <File - RelativePath=".\Rmd160.h" - > - </File> - <File - RelativePath=".\Serpent.h" - > - </File> - <File - RelativePath=".\Sha2.h" - > - </File> - <File - RelativePath=".\Streebog.h" - > - </File> - <File - RelativePath=".\Twofish.h" - > - </File> - <File - RelativePath=".\Whirlpool.h" - > - </File> - </Filter> - <Filter - Name="Resource Files" - Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx" - UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}" - > - </Filter> - </Files> - <Globals> - </Globals> -</VisualStudioProject> diff --git a/src/Crypto/Crypto.vcxproj b/src/Crypto/Crypto.vcxproj index c95874c6..4aebc084 100644 --- a/src/Crypto/Crypto.vcxproj +++ b/src/Crypto/Crypto.vcxproj @@ -1,18 +1,18 @@ <?xml version="1.0" encoding="utf-8"?> <Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> <ItemGroup Label="ProjectConfigurations"> - <ProjectConfiguration Include="Debug|Win32"> + <ProjectConfiguration Include="Debug|ARM64"> <Configuration>Debug</Configuration> - <Platform>Win32</Platform> + <Platform>ARM64</Platform> </ProjectConfiguration> <ProjectConfiguration Include="Debug|x64"> <Configuration>Debug</Configuration> <Platform>x64</Platform> </ProjectConfiguration> - <ProjectConfiguration Include="Release|Win32"> + <ProjectConfiguration Include="Release|ARM64"> <Configuration>Release</Configuration> - <Platform>Win32</Platform> + <Platform>ARM64</Platform> </ProjectConfiguration> <ProjectConfiguration Include="Release|x64"> <Configuration>Release</Configuration> <Platform>x64</Platform> @@ -21,62 +21,69 @@ <PropertyGroup Label="Globals"> <ProjectGuid>{993245CF-6B70-47EE-91BB-39F8FC6DC0E7}</ProjectGuid> <RootNamespace>Crypto</RootNamespace> <Keyword>Win32Proj</Keyword> + <WindowsTargetPlatformVersion>10.0</WindowsTargetPlatformVersion> + <ProjectName>Crypto</ProjectName> </PropertyGroup> <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" /> - <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration"> + <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration"> <ConfigurationType>StaticLibrary</ConfigurationType> <CharacterSet>Unicode</CharacterSet> - <PlatformToolset>Windows7.1SDK</PlatformToolset> + <PlatformToolset>v143</PlatformToolset> + <SpectreMitigation>Spectre</SpectreMitigation> </PropertyGroup> - <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration"> + <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'" Label="Configuration"> <ConfigurationType>StaticLibrary</ConfigurationType> <CharacterSet>Unicode</CharacterSet> - <PlatformToolset>Windows7.1SDK</PlatformToolset> + <PlatformToolset>v143</PlatformToolset> + <SpectreMitigation>Spectre</SpectreMitigation> </PropertyGroup> - <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration"> + <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration"> <ConfigurationType>StaticLibrary</ConfigurationType> <CharacterSet>Unicode</CharacterSet> - <PlatformToolset>Windows7.1SDK</PlatformToolset> + <PlatformToolset>v143</PlatformToolset> </PropertyGroup> - <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration"> + <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'" Label="Configuration"> <ConfigurationType>StaticLibrary</ConfigurationType> <CharacterSet>Unicode</CharacterSet> - <PlatformToolset>Windows7.1SDK</PlatformToolset> + <PlatformToolset>v143</PlatformToolset> </PropertyGroup> <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" /> <ImportGroup Label="ExtensionSettings"> </ImportGroup> - <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="PropertySheets"> + <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets"> <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> <Import Project="$(VCTargetsPath)Microsoft.CPP.UpgradeFromVC71.props" /> </ImportGroup> - <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="PropertySheets"> + <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'" Label="PropertySheets"> <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> <Import Project="$(VCTargetsPath)Microsoft.CPP.UpgradeFromVC71.props" /> </ImportGroup> - <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets"> + <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets"> <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> <Import Project="$(VCTargetsPath)Microsoft.CPP.UpgradeFromVC71.props" /> </ImportGroup> - <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets"> + <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'" Label="PropertySheets"> <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> <Import Project="$(VCTargetsPath)Microsoft.CPP.UpgradeFromVC71.props" /> </ImportGroup> <PropertyGroup Label="UserMacros" /> <PropertyGroup> <_ProjectFileVersion>10.0.40219.1</_ProjectFileVersion> - <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">Debug\</OutDir> - <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">Debug\</IntDir> - <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(Platform)\$(Configuration)\</OutDir> - <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(Platform)\$(Configuration)\</IntDir> - <OutDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">Release\</OutDir> - <IntDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">Release\</IntDir> - <OutDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(Platform)\$(Configuration)\</OutDir> - <IntDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(Platform)\$(Configuration)\</IntDir> + <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(ProjectDir)$(Platform)\$(Configuration)\</OutDir> + <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(ProjectDir)$(Platform)\$(Configuration)\</OutDir> + <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(ProjectDir)$(Platform)\$(Configuration)\</IntDir> + <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(ProjectDir)$(Platform)\$(Configuration)\</IntDir> + <OutDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(ProjectDir)$(Platform)\$(Configuration)\</OutDir> + <OutDir Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(ProjectDir)$(Platform)\$(Configuration)\</OutDir> + <IntDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(ProjectDir)$(Platform)\$(Configuration)\</IntDir> + <IntDir Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(ProjectDir)$(Platform)\$(Configuration)\</IntDir> </PropertyGroup> - <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'"> + <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'"> + <Midl> + <TargetEnvironment>X64</TargetEnvironment> + </Midl> <ClCompile> <Optimization>Disabled</Optimization> <AdditionalIncludeDirectories>$(ProjectDir)\..;$(ProjectDir)\..\Common;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> <PreprocessorDefinitions>WIN32;DEBUG;_DEBUG;_LIB;_CRT_SECURE_NO_DEPRECATE;_CRT_NON_CONFORMING_SWPRINTFS;%(PreprocessorDefinitions)</PreprocessorDefinitions> @@ -93,12 +100,10 @@ <Lib> <OutputFile>$(OutDir)Crypto.lib</OutputFile> </Lib> </ItemDefinitionGroup> - <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'"> - <Midl> - <TargetEnvironment>X64</TargetEnvironment> - </Midl> + <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'"> + <Midl /> <ClCompile> <Optimization>Disabled</Optimization> <AdditionalIncludeDirectories>$(ProjectDir)\..;$(ProjectDir)\..\Common;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> <PreprocessorDefinitions>WIN32;DEBUG;_DEBUG;_LIB;_CRT_SECURE_NO_DEPRECATE;_CRT_NON_CONFORMING_SWPRINTFS;%(PreprocessorDefinitions)</PreprocessorDefinitions> @@ -115,9 +120,12 @@ <Lib> <OutputFile>$(OutDir)Crypto.lib</OutputFile> </Lib> </ItemDefinitionGroup> - <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'"> + <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'"> + <Midl> + <TargetEnvironment>X64</TargetEnvironment> + </Midl> <ClCompile> <Optimization>MaxSpeed</Optimization> <AdditionalIncludeDirectories>$(ProjectDir)\..;$(ProjectDir)\..\Common;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> <PreprocessorDefinitions>WIN32;NDEBUG;_LIB;_CRT_SECURE_NO_DEPRECATE;_CRT_NON_CONFORMING_SWPRINTFS;%(PreprocessorDefinitions)</PreprocessorDefinitions> @@ -127,21 +135,19 @@ </PrecompiledHeader> <AssemblerOutput>All</AssemblerOutput> <AssemblerListingLocation>$(IntDir)</AssemblerListingLocation> <WarningLevel>Level4</WarningLevel> - <DebugInformationFormat> - </DebugInformationFormat> + <DebugInformationFormat>ProgramDatabase</DebugInformationFormat> <DisableSpecificWarnings>4100;4127;4201;%(DisableSpecificWarnings)</DisableSpecificWarnings> + <ControlFlowGuard>Guard</ControlFlowGuard> </ClCompile> <Lib> <OutputFile>$(OutDir)Crypto.lib</OutputFile> <AdditionalLibraryDirectories>$(TargetDir);%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories> </Lib> </ItemDefinitionGroup> - <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'"> - <Midl> - <TargetEnvironment>X64</TargetEnvironment> - </Midl> + <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'"> + <Midl /> <ClCompile> <Optimization>MaxSpeed</Optimization> <AdditionalIncludeDirectories>$(ProjectDir)\..;$(ProjectDir)\..\Common;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories> <PreprocessorDefinitions>WIN32;NDEBUG;_LIB;_CRT_SECURE_NO_DEPRECATE;_CRT_NON_CONFORMING_SWPRINTFS;%(PreprocessorDefinitions)</PreprocessorDefinitions> @@ -151,81 +157,106 @@ </PrecompiledHeader> <AssemblerOutput>All</AssemblerOutput> <AssemblerListingLocation>$(IntDir)</AssemblerListingLocation> <WarningLevel>Level4</WarningLevel> - <DebugInformationFormat> - </DebugInformationFormat> + <DebugInformationFormat>ProgramDatabase</DebugInformationFormat> <DisableSpecificWarnings>4100;4127;4201;%(DisableSpecificWarnings)</DisableSpecificWarnings> + <ControlFlowGuard>Guard</ControlFlowGuard> </ClCompile> <Lib> <OutputFile>$(OutDir)Crypto.lib</OutputFile> <AdditionalLibraryDirectories>$(TargetDir);%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories> </Lib> </ItemDefinitionGroup> <ItemGroup> <CustomBuild Include="Aes_hw_cpu.asm"> - <Command Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win32 -Ox -g --prefix _ -o "$(TargetDir)\%(Filename).obj" "%(FullPath)" -</Command> - <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win64 -Ox -g -o "$(TargetDir)\%(Filename).obj" "%(FullPath)" </Command> - <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> - <Command Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win32 -Ox --prefix _ -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)" + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win64 -Ox -g -o "$(TargetDir)\%(Filename).obj" "%(FullPath)" </Command> - <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win64 -Ox -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)" </Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win64 -Ox -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)" +</Command> <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> </CustomBuild> <CustomBuild Include="Aes_x64.asm"> - <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</ExcludedFromBuild> <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win64 -Ox -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)" </Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win64 -Ox -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)" +</Command> <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> - <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">true</ExcludedFromBuild> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win64 -Ox -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)" </Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win64 -Ox -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)" +</Command> <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> </CustomBuild> <CustomBuild Include="Aes_x86.asm"> - <Command Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win32 -Ox -g --prefix _ -o "$(TargetDir)\%(Filename).obj" "%(FullPath)" -</Command> - <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win32 -Ox -g --prefix _ -o "$(TargetDir)\%(Filename).obj" "%(FullPath)" </Command> - <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> - <Command Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win32 -Ox --prefix _ -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)" + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win32 -Ox -g --prefix _ -o "$(TargetDir)\%(Filename).obj" "%(FullPath)" </Command> - <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win32 -Ox --prefix _ -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)" </Command> - <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> - </CustomBuild> - <CustomBuild Include="Gost89_x64.asm"> - <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</ExcludedFromBuild> - <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win64 -Ox -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)" -</Command> - <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> - <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">true</ExcludedFromBuild> - <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win64 -Ox -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)" + <Command Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">echo %(Filename)%(Extension) & nasm.exe -Xvc -f win32 -Ox --prefix _ -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)" </Command> <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> </CustomBuild> </ItemGroup> <ItemGroup> + <ClCompile Include="Aescrypt.c"> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">true</ExcludedFromBuild> + </ClCompile> <ClCompile Include="Aeskey.c" /> <ClCompile Include="Aestab.c" /> + <ClCompile Include="blake2s.c" /> + <ClCompile Include="blake2s_SSE2.c" /> + <ClCompile Include="blake2s_SSE41.c" /> + <ClCompile Include="blake2s_SSSE3.c" /> <ClCompile Include="Camellia.c" /> + <ClCompile Include="chacha-xmm.c" /> + <ClCompile Include="chacha256.c" /> + <ClCompile Include="chachaRng.c" /> <ClCompile Include="cpu.c" /> - <ClCompile Include="GostCipher.c" /> + <ClCompile Include="jitterentropy-base.c"> + <Optimization Condition="'$(Configuration)|$(Platform)'=='Release|x64'">Disabled</Optimization> + <Optimization Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">Disabled</Optimization> + </ClCompile> <ClCompile Include="kuznyechik.c" /> - <ClCompile Include="Rmd160.c" /> + <ClCompile Include="kuznyechik_simd.c"> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> + </ClCompile> + <ClCompile Include="rdrand.c"> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> + </ClCompile> <ClCompile Include="SerpentFast.c" /> <ClCompile Include="SerpentFast_simd.cpp" /> <ClCompile Include="Sha2.c" /> + <ClCompile Include="Sha2Intel.c" /> <ClCompile Include="Streebog.c" /> + <ClCompile Include="t1ha2.c" /> + <ClCompile Include="t1ha2_selfcheck.c" /> + <ClCompile Include="t1ha_selfcheck.c" /> <ClCompile Include="Twofish.c" /> <ClCompile Include="Whirlpool.c" /> </ItemGroup> <ItemGroup> @@ -233,36 +264,213 @@ <ClInclude Include="Aes_hw_cpu.h" /> <ClInclude Include="Aesopt.h" /> <ClInclude Include="Aestab.h" /> <ClInclude Include="Camellia.h" /> + <ClInclude Include="chacha256.h" /> + <ClInclude Include="chachaRng.h" /> + <ClInclude Include="chacha_u1.h" /> + <ClInclude Include="chacha_u4.h" /> <ClInclude Include="config.h" /> <ClInclude Include="cpu.h" /> - <ClInclude Include="GostCipher.h" /> + <ClInclude Include="jitterentropy-base-user.h" /> + <ClInclude Include="jitterentropy.h" /> <ClInclude Include="kuznyechik.h" /> <ClInclude Include="misc.h" /> - <ClInclude Include="Rmd160.h" /> + <ClInclude Include="rdrand.h" /> <ClInclude Include="SerpentFast.h" /> <ClInclude Include="SerpentFast_sbox.h" /> <ClInclude Include="Sha2.h" /> <ClInclude Include="Streebog.h" /> + <ClInclude Include="t1ha.h" /> + <ClInclude Include="t1ha_bits.h" /> + <ClInclude Include="t1ha_selfcheck.h" /> <ClInclude Include="Twofish.h" /> <ClInclude Include="Whirlpool.h" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\Boot\Windows\Boot.vcxproj"> - <Project>{8b7f059f-e4c7-4e11-88f5-ee8b8433072e}</Project> - <ReferenceOutputAssembly>false</ReferenceOutputAssembly> - </ProjectReference> + <CustomBuild Include="Twofish_x64.S"> + <FileType>Document</FileType> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & yasm.exe -p gas -D WINABI -D __YASM__ -f win64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">echo %(Filename)%(Extension) & yasm.exe -p gas -D WINABI -D __YASM__ -f win64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & yasm.exe -p gas -D WINABI -D __YASM__ -f win64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">echo %(Filename)%(Extension) & yasm.exe -p gas -D WINABI -D __YASM__ -f win64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> + </CustomBuild> </ItemGroup> <ItemGroup> - <CustomBuild Include="Twofish_x64.S"> - <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</ExcludedFromBuild> - <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">true</ExcludedFromBuild> + <CustomBuild Include="Camellia_aesni_x64.S"> + <FileType>Document</FileType> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & yasm.exe -p gas -D WINABI -D __YASM__ -f win64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">echo %(Filename)%(Extension) & yasm.exe -p gas -D WINABI -D __YASM__ -f win64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & yasm.exe -p gas -D WINABI -D __YASM__ -f win64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">echo %(Filename)%(Extension) & yasm.exe -p gas -D WINABI -D __YASM__ -f win64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> + </CustomBuild> + <CustomBuild Include="Camellia_x64.S"> <FileType>Document</FileType> - <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & yasm.exe -p gas -D WINABI -f win64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & yasm.exe -p gas -D WINABI -D __YASM__ -f win64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">echo %(Filename)%(Extension) & yasm.exe -p gas -D WINABI -D __YASM__ -f win64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & yasm.exe -p gas -D WINABI -D __YASM__ -f win64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">echo %(Filename)%(Extension) & yasm.exe -p gas -D WINABI -D __YASM__ -f win64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> + </CustomBuild> + </ItemGroup> + <ItemGroup> + <CustomBuild Include="sha256-x86-nayuki.S"> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> + <FileType>Document</FileType> + </CustomBuild> + <CustomBuild Include="sha256_avx1_x64.asm"> + <FileType>Document</FileType> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> + </CustomBuild> + <CustomBuild Include="sha256_avx2_x64.asm"> + <FileType>Document</FileType> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> + </CustomBuild> + <CustomBuild Include="sha256_sse4_x64.asm"> + <FileType>Document</FileType> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> + </CustomBuild> + <CustomBuild Include="sha512-x86-nayuki.S"> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> + <FileType>Document</FileType> + </CustomBuild> + <CustomBuild Include="sha512-x64-nayuki.S"> + <FileType>Document</FileType> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & yasm.exe -Xvc -p gas -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">echo %(Filename)%(Extension) & yasm.exe -Xvc -p gas -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & yasm.exe -Xvc -p gas -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">echo %(Filename)%(Extension) & yasm.exe -Xvc -p gas -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> + </CustomBuild> + <CustomBuild Include="sha512_avx1_x64.asm"> + <FileType>Document</FileType> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> + </CustomBuild> + <CustomBuild Include="sha512_avx2_x64.asm"> + <FileType>Document</FileType> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> + </CustomBuild> + <CustomBuild Include="sha512_sse4_x64.asm"> + <FileType>Document</FileType> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">echo %(Filename)%(Extension) & yasm.exe -D WINABI -D __YASM__ -f x64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> + </CustomBuild> + </ItemGroup> + <ItemGroup> + <CustomBuild Include="rdrand_ml.asm"> + <FileType>Document</FileType> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & ml64.exe /nologo /D_M_X64 /W3 /Cx /Zi /Fo "$(TargetDir)\%(Filename).obj" /c "%(FullPath)" +</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">echo %(Filename)%(Extension) & ml64.exe /nologo /D_M_X64 /W3 /Cx /Zi /Fo "$(TargetDir)\%(Filename).obj" /c "%(FullPath)" +</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & ml64.exe /nologo /D_M_X64 /W3 /Cx /Zi /Fo "$(TargetDir)\%(Filename).obj" /c "%(FullPath)" +</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">echo %(Filename)%(Extension) & ml64.exe /nologo /D_M_X64 /W3 /Cx /Zi /Fo "$(TargetDir)\%(Filename).obj" /c "%(FullPath)" +</Command> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> + </CustomBuild> + <CustomBuild Include="rdseed_ml.asm"> + <FileType>Document</FileType> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">echo %(Filename)%(Extension) & ml64.exe /nologo /D_M_X64 /W3 /Cx /Zi /Fo "$(TargetDir)\%(Filename).obj" /c "%(FullPath)" +</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">echo %(Filename)%(Extension) & ml64.exe /nologo /D_M_X64 /W3 /Cx /Zi /Fo "$(TargetDir)\%(Filename).obj" /c "%(FullPath)" +</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & ml64.exe /nologo /D_M_X64 /W3 /Cx /Zi /Fo "$(TargetDir)\%(Filename).obj" /c "%(FullPath)" +</Command> + <Command Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">echo %(Filename)%(Extension) & ml64.exe /nologo /D_M_X64 /W3 /Cx /Zi /Fo "$(TargetDir)\%(Filename).obj" /c "%(FullPath)" +</Command> <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> - <Command Condition="'$(Configuration)|$(Platform)'=='Release|x64'">echo %(Filename)%(Extension) & yasm.exe -p gas -D WINABI -f win64 -o "$(TargetDir)\%(Filename).obj" -l "$(TargetDir)\%(Filename).lst" "%(FullPath)"</Command> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <Outputs Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">$(TargetDir)\%(Filename).obj;%(Outputs)</Outputs> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|ARM64'">true</ExcludedFromBuild> + <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|ARM64'">true</ExcludedFromBuild> </CustomBuild> </ItemGroup> <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" /> <ImportGroup Label="ExtensionTargets"> diff --git a/src/Crypto/Crypto.vcxproj.filters b/src/Crypto/Crypto.vcxproj.filters index 702dedbf..3d384f97 100644 --- a/src/Crypto/Crypto.vcxproj.filters +++ b/src/Crypto/Crypto.vcxproj.filters @@ -20,17 +20,11 @@ </ClCompile> <ClCompile Include="Aestab.c"> <Filter>Source Files</Filter> </ClCompile> - <ClCompile Include="Camellia.c"> - <Filter>Source Files</Filter> - </ClCompile> <ClCompile Include="cpu.c"> <Filter>Source Files</Filter> </ClCompile> - <ClCompile Include="Rmd160.c"> - <Filter>Source Files</Filter> - </ClCompile> <ClCompile Include="Sha2.c"> <Filter>Source Files</Filter> </ClCompile> <ClCompile Include="Twofish.c"> @@ -38,11 +32,8 @@ </ClCompile> <ClCompile Include="Whirlpool.c"> <Filter>Source Files</Filter> </ClCompile> - <ClCompile Include="GostCipher.c"> - <Filter>Source Files</Filter> - </ClCompile> <ClCompile Include="kuznyechik.c"> <Filter>Source Files</Filter> </ClCompile> <ClCompile Include="Streebog.c"> @@ -53,8 +44,53 @@ </ClCompile> <ClCompile Include="SerpentFast_simd.cpp"> <Filter>Source Files</Filter> </ClCompile> + <ClCompile Include="Camellia.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="kuznyechik_simd.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="rdrand.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="chacha256.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="chacha-xmm.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="chachaRng.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="jitterentropy-base.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="t1ha_selfcheck.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="t1ha2.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="t1ha2_selfcheck.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="blake2s.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="blake2s_SSE2.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="blake2s_SSE41.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="blake2s_SSSE3.c"> + <Filter>Source Files</Filter> + </ClCompile> + <ClCompile Include="Sha2Intel.c"> + <Filter>Source Files</Filter> + </ClCompile> </ItemGroup> <ItemGroup> <ClInclude Include="Aes.h"> <Filter>Header Files</Filter> @@ -79,11 +115,8 @@ </ClInclude> <ClInclude Include="misc.h"> <Filter>Header Files</Filter> </ClInclude> - <ClInclude Include="Rmd160.h"> - <Filter>Header Files</Filter> - </ClInclude> <ClInclude Include="Sha2.h"> <Filter>Header Files</Filter> </ClInclude> <ClInclude Include="Twofish.h"> @@ -91,11 +124,8 @@ </ClInclude> <ClInclude Include="Whirlpool.h"> <Filter>Header Files</Filter> </ClInclude> - <ClInclude Include="GostCipher.h"> - <Filter>Header Files</Filter> - </ClInclude> <ClInclude Include="kuznyechik.h"> <Filter>Header Files</Filter> </ClInclude> <ClInclude Include="Streebog.h"> @@ -106,8 +136,47 @@ </ClInclude> <ClInclude Include="SerpentFast_sbox.h"> <Filter>Header Files</Filter> </ClInclude> + <ClInclude Include="rdrand.h"> + <Filter>Header Files</Filter> + </ClInclude> + <ClInclude Include="chacha_u1.h"> + <Filter>Header Files</Filter> + </ClInclude> + <ClInclude Include="chacha_u4.h"> + <Filter>Header Files</Filter> + </ClInclude> + <ClInclude Include="chacha256.h"> + <Filter>Header Files</Filter> + </ClInclude> + <ClInclude Include="chachaRng.h"> + <Filter>Header Files</Filter> + </ClInclude> + <ClInclude Include="jitterentropy.h"> + <Filter>Header Files</Filter> + </ClInclude> + <ClInclude Include="jitterentropy-base-user.h"> + <Filter>Header Files</Filter> + </ClInclude> + <ClInclude Include="t1ha.h"> + <Filter>Header Files</Filter> + </ClInclude> + <ClInclude Include="t1ha_bits.h"> + <Filter>Header Files</Filter> + </ClInclude> + <ClInclude Include="t1ha_selfcheck.h"> + <Filter>Header Files</Filter> + </ClInclude> + <ClInclude Include="blake2s-load-sse2.h"> + <Filter>Header Files</Filter> + </ClInclude> + <ClInclude Include="blake2s-load-sse41.h"> + <Filter>Header Files</Filter> + </ClInclude> + <ClInclude Include="blake2s-round.h"> + <Filter>Header Files</Filter> + </ClInclude> </ItemGroup> <ItemGroup> <CustomBuild Include="Aes_hw_cpu.asm"> <Filter>Source Files</Filter> @@ -117,12 +186,48 @@ </CustomBuild> <CustomBuild Include="Aes_x86.asm"> <Filter>Source Files</Filter> </CustomBuild> - <CustomBuild Include="Gost89_x64.asm"> + <CustomBuild Include="Twofish_x64.S"> <Filter>Source Files</Filter> </CustomBuild> - <CustomBuild Include="Twofish_x64.S"> + <CustomBuild Include="Camellia_x64.S"> + <Filter>Source Files</Filter> + </CustomBuild> + <CustomBuild Include="Camellia_aesni_x64.S"> + <Filter>Source Files</Filter> + </CustomBuild> + <CustomBuild Include="sha512-x64-nayuki.S"> + <Filter>Source Files</Filter> + </CustomBuild> + <CustomBuild Include="sha256-x86-nayuki.S"> + <Filter>Source Files</Filter> + </CustomBuild> + <CustomBuild Include="sha256_avx1_x64.asm"> + <Filter>Source Files</Filter> + </CustomBuild> + <CustomBuild Include="sha256_avx2_x64.asm"> + <Filter>Source Files</Filter> + </CustomBuild> + <CustomBuild Include="sha256_sse4_x64.asm"> + <Filter>Source Files</Filter> + </CustomBuild> + <CustomBuild Include="sha512-x86-nayuki.S"> + <Filter>Source Files</Filter> + </CustomBuild> + <CustomBuild Include="sha512_avx1_x64.asm"> + <Filter>Source Files</Filter> + </CustomBuild> + <CustomBuild Include="sha512_avx2_x64.asm"> + <Filter>Source Files</Filter> + </CustomBuild> + <CustomBuild Include="sha512_sse4_x64.asm"> + <Filter>Source Files</Filter> + </CustomBuild> + <CustomBuild Include="rdrand_ml.asm"> + <Filter>Source Files</Filter> + </CustomBuild> + <CustomBuild Include="rdseed_ml.asm"> <Filter>Source Files</Filter> </CustomBuild> </ItemGroup> </Project>
\ No newline at end of file diff --git a/src/Crypto/Crypto.vcxproj.user b/src/Crypto/Crypto.vcxproj.user index ace9a86a..88a55094 100644 --- a/src/Crypto/Crypto.vcxproj.user +++ b/src/Crypto/Crypto.vcxproj.user @@ -1,3 +1,4 @@ <?xml version="1.0" encoding="utf-8"?> -<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> +<Project ToolsVersion="Current" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> + <PropertyGroup /> </Project>
\ No newline at end of file diff --git a/src/Crypto/GostCipher.c b/src/Crypto/GostCipher.c deleted file mode 100644 index 0fd3941a..00000000 --- a/src/Crypto/GostCipher.c +++ /dev/null @@ -1,265 +0,0 @@ -/** @file -GOST89 implementation - -Copyright (c) 2016. Disk Cryptography Services for EFI (DCS), Alex Kolotnikov - -This program and the accompanying materials -are licensed and made available under the terms and conditions -of the Apache License, Version 2.0. -The full text of the license may be found at -https://opensource.org/licenses/Apache-2.0 - -Dynamic SBOX idea is from GostCrypt project. Copyright (c) 2008-2011 TrueCrypt Developers Association -**/ - - - -#include "GostCipher.h" -#include "Streebog.h" -#include "cpu.h" - -#if defined(CIPHER_GOST89) - -// Crypto Pro -byte S_CryptoPro[8][16] = { - {0x1,0x3,0xA,0x9,0x5,0xB,0x4,0xF,0x8,0x6,0x7,0xE,0xD,0x0,0x2,0xC}, - {0xD,0xE,0x4,0x1,0x7,0x0,0x5,0xA,0x3,0xC,0x8,0xF,0x6,0x2,0x9,0xB}, - {0x7,0x6,0x2,0x4,0xD,0x9,0xF,0x0,0xA,0x1,0x5,0xB,0x8,0xE,0xC,0x3}, - {0x7,0x6,0x4,0xB,0x9,0xC,0x2,0xA,0x1,0x8,0x0,0xE,0xF,0xD,0x3,0x5}, - {0x4,0xA,0x7,0xC,0x0,0xF,0x2,0x8,0xE,0x1,0x6,0x5,0xD,0xB,0x9,0x3}, - {0x7,0xF,0xC,0xE,0x9,0x4,0x1,0x0,0x3,0xB,0x5,0x2,0x6,0xA,0x8,0xD}, - {0x5,0xF,0x4,0x0,0x2,0xD,0xB,0x9,0x1,0x7,0x6,0x3,0xC,0xE,0xA,0x8}, - {0xA,0x4,0x5,0x6,0x8,0x1,0x3,0x7,0xD,0xC,0xE,0x0,0x9,0x2,0xB,0xF} - }; - -// TC26 -byte S_TC26[8][16] = -{ - { 0xc, 0x4, 0x6, 0x2, 0xa, 0x5, 0xb, 0x9, 0xe, 0x8, 0xd, 0x7, 0x0, 0x3, 0xf, 0x1 }, - { 0x6, 0x8, 0x2, 0x3, 0x9, 0xa, 0x5, 0xc, 0x1, 0xe, 0x4, 0x7, 0xb, 0xd, 0x0, 0xf }, - { 0xb, 0x3, 0x5, 0x8, 0x2, 0xf, 0xa, 0xd, 0xe, 0x1, 0x7, 0x4, 0xc, 0x9, 0x6, 0x0 }, - { 0xc, 0x8, 0x2, 0x1, 0xd, 0x4, 0xf, 0x6, 0x7, 0x0, 0xa, 0x5, 0x3, 0xe, 0x9, 0xb }, - { 0x7, 0xf, 0x5, 0xa, 0x8, 0x1, 0x6, 0xd, 0x0, 0x9, 0x3, 0xe, 0xb, 0x4, 0x2, 0xc }, - { 0x5, 0xd, 0xf, 0x6, 0x9, 0x2, 0xc, 0xa, 0xb, 0x7, 0x8, 0x1, 0x4, 0x3, 0xe, 0x0 }, - { 0x8, 0xe, 0x2, 0x5, 0x6, 0x9, 0x1, 0xc, 0xf, 0x4, 0xb, 0x0, 0xd, 0xa, 0x3, 0x7 }, - { 0x1, 0x7, 0xe, 0xd, 0x0, 0x5, 0x8, 0x3, 0x4, 0xf, 0xa, 0x6, 0x9, 0xc, 0xb, 0x2 }, -}; - -void gost_prepare_kds(gost_kds* kds) { - uint32 i; - // Build substitution tables. - for (i = 0; i < 256; ++i) { - uint32 p; - p = kds->sbox[7][i >> 4] << 4 | kds->sbox[6][i & 15]; - p = p << 24; p = p << 11 | p >> 21; - kds->sbox_cvt[i] = p; // S87 - - p = kds->sbox[5][i >> 4] << 4 | kds->sbox[4][i & 15]; - p = p << 16; p = p << 11 | p >> 21; - kds->sbox_cvt[256 + i] = p; // S65 - - p = kds->sbox[3][i >> 4] << 4 | kds->sbox[2][i & 15]; - p = p << 8; p = p << 11 | p >> 21; - kds->sbox_cvt[256 * 2 + i] = p; // S43 - - p = kds->sbox[1][i >> 4] << 4 | kds->sbox[0][i & 15]; - p = p << 11 | p >> 21; - kds->sbox_cvt[256 * 3 + i] = p; // S21 - } -} - - -static void xor_s_box(byte s_box[8][16], byte *seed) -{ - int i; - for (i = 0; i < 16; i++) - { - s_box[0][i] ^= (seed[ (i * 4) + 0 ] ) & 0xF; - s_box[1][i] ^= (seed[ (i * 4) + 0 ]>>4) & 0xF; - s_box[2][i] ^= (seed[ (i * 4) + 1 ] ) & 0xF; - s_box[3][i] ^= (seed[ (i * 4) + 1 ]>>4) & 0xF; - s_box[4][i] ^= (seed[ (i * 4) + 2 ] ) & 0xF; - s_box[5][i] ^= (seed[ (i * 4) + 2 ]>>4) & 0xF; - s_box[6][i] ^= (seed[ (i * 4) + 3 ] ) & 0xF; - s_box[7][i] ^= (seed[ (i * 4) + 3 ]>>4) & 0xF; - } -} - -void gost_set_key(const byte *key, gost_kds *ks, int useDynamicSbox) -{ - memcpy(ks->key, key, GOST_KEYSIZE); - memcpy(ks->sbox, S_TC26, sizeof(ks->sbox)); - - if (useDynamicSbox) - { - STREEBOG_CTX sctx; - byte sbox_seed[64]; -#if defined (DEVICE_DRIVER) && !defined (_WIN64) - KFLOATING_SAVE floatingPointState; - NTSTATUS saveStatus = STATUS_SUCCESS; - if (HasSSE2() || HasSSE41()) - saveStatus = KeSaveFloatingPointState (&floatingPointState); -#endif - //Generate pseudorandom data based on the key - STREEBOG_init(&sctx); - STREEBOG_add(&sctx, ks->key, 32); - STREEBOG_finalize(&sctx, sbox_seed); - -#if defined (DEVICE_DRIVER) && !defined (_WIN64) - if (NT_SUCCESS (saveStatus) && (HasSSE2() || HasSSE41())) - KeRestoreFloatingPointState (&floatingPointState); -#endif - - xor_s_box(ks->sbox, sbox_seed); - } - - gost_prepare_kds(ks); -} - -static uint32 f(uint32 v, uint32* sbox){ - byte* x =(byte*) &v; - /* Do substitutions */ - return sbox[x[3]] | sbox[256 + x[2]] | sbox[256*2 + x[1]] | sbox[256*3 + x[0]]; -} - -void gost_encrypt_block(uint64 in_, uint64* out_, gost_kds* kds) { - uint32* in = (uint32*)&in_; - uint32* out = (uint32*)out_; - uint32* key = (uint32*)kds->key; - uint32* sbox = kds->sbox_cvt; - - // As named in the GOST - uint32 n1 = in[0]; - uint32 n2 = in[1]; - - n2 ^= f(n1+key[0], sbox); - n1 ^= f(n2+key[1], sbox); - n2 ^= f(n1+key[2], sbox); - n1 ^= f(n2+key[3], sbox); - n2 ^= f(n1+key[4], sbox); - n1 ^= f(n2+key[5], sbox); - n2 ^= f(n1+key[6], sbox); - n1 ^= f(n2+key[7], sbox); - - n2 ^= f(n1+key[0], sbox); - n1 ^= f(n2+key[1], sbox); - n2 ^= f(n1+key[2], sbox); - n1 ^= f(n2+key[3], sbox); - n2 ^= f(n1+key[4], sbox); - n1 ^= f(n2+key[5], sbox); - n2 ^= f(n1+key[6], sbox); - n1 ^= f(n2+key[7], sbox); - - n2 ^= f(n1+key[0], sbox); - n1 ^= f(n2+key[1], sbox); - n2 ^= f(n1+key[2], sbox); - n1 ^= f(n2+key[3], sbox); - n2 ^= f(n1+key[4], sbox); - n1 ^= f(n2+key[5], sbox); - n2 ^= f(n1+key[6], sbox); - n1 ^= f(n2+key[7], sbox); - - n2 ^= f(n1+key[7], sbox); - n1 ^= f(n2+key[6], sbox); - n2 ^= f(n1+key[5], sbox); - n1 ^= f(n2+key[4], sbox); - n2 ^= f(n1+key[3], sbox); - n1 ^= f(n2+key[2], sbox); - n2 ^= f(n1+key[1], sbox); - n1 ^= f(n2+key[0], sbox); - - // There is no swap after the last round - out[0] = n2; - out[1] = n1; -} - -void gost_decrypt_block(uint64 in_, uint64* out_, gost_kds* kds) { - uint32* in = (uint32*)&in_; - uint32* out = (uint32*)out_; - uint32* key = (uint32*)kds->key; - uint32* sbox = kds->sbox_cvt; - - // As named in the GOST - uint32 n1 = in[0]; - uint32 n2 = in[1]; - - n2 ^= f(n1+key[0], sbox); - n1 ^= f(n2+key[1], sbox); - n2 ^= f(n1+key[2], sbox); - n1 ^= f(n2+key[3], sbox); - n2 ^= f(n1+key[4], sbox); - n1 ^= f(n2+key[5], sbox); - n2 ^= f(n1+key[6], sbox); - n1 ^= f(n2+key[7], sbox); - - n2 ^= f(n1+key[7], sbox); - n1 ^= f(n2+key[6], sbox); - n2 ^= f(n1+key[5], sbox); - n1 ^= f(n2+key[4], sbox); - n2 ^= f(n1+key[3], sbox); - n1 ^= f(n2+key[2], sbox); - n2 ^= f(n1+key[1], sbox); - n1 ^= f(n2+key[0], sbox); - - n2 ^= f(n1+key[7], sbox); - n1 ^= f(n2+key[6], sbox); - n2 ^= f(n1+key[5], sbox); - n1 ^= f(n2+key[4], sbox); - n2 ^= f(n1+key[3], sbox); - n1 ^= f(n2+key[2], sbox); - n2 ^= f(n1+key[1], sbox); - n1 ^= f(n2+key[0], sbox); - - n2 ^= f(n1+key[7], sbox); - n1 ^= f(n2+key[6], sbox); - n2 ^= f(n1+key[5], sbox); - n1 ^= f(n2+key[4], sbox); - n2 ^= f(n1+key[3], sbox); - n1 ^= f(n2+key[2], sbox); - n2 ^= f(n1+key[1], sbox); - n1 ^= f(n2+key[0], sbox); - - out[0] = n2; - out[1] = n1; -} - -#if defined(_M_AMD64) -void gost_encrypt_128_CBC_asm(const byte *in, byte *out, gost_kds *ks, uint64 count); -void gost_decrypt_128_CBC_asm(const byte *in, byte *out, gost_kds *ks, uint64 count); -#endif - -void gost_encrypt(const byte *in, byte *out, gost_kds *ks, int count) { -#if defined(_M_AMD64) - gost_encrypt_128_CBC_asm(in, out, ks, (uint64)count); -#else - while (count > 0) { - // encrypt two blocks in CBC mode - gost_encrypt_block(*((uint64*)in), (uint64*)out, ks); - *((gst_udword*)(out + 8)) = *((gst_udword*)(in + 8)) ^ *((gst_udword*)(out)); - *((gst_udword*)(out + 12)) = *((gst_udword*)(in + 12)) ^ *((gst_udword*)(out + 4)); - gost_encrypt_block(*((uint64*)(out + 8)), (uint64*)(out + 8), ks); - count--; - in += 16; - out += 16; - } -#endif -} - -void gost_decrypt(const byte *in, byte *out, gost_kds *ks, int count) { -#if defined(_M_AMD64) - gost_decrypt_128_CBC_asm(in, out, ks, (uint64)count); -#else - while (count > 0) { - // decrypt two blocks in CBC mode - gost_decrypt_block(*((uint64*)(in + 8)), (uint64*)(out + 8), ks); - *((gst_udword*)(out + 8)) ^= *((gst_udword*)(in));; - *((gst_udword*)(out + 12)) ^= *((gst_udword*)(in + 4));; - gost_decrypt_block(*((uint64*)(in)), (uint64*)(out), ks); - count--; - in += 16; - out += 16; - } -#endif -} - -#endif diff --git a/src/Crypto/GostCipher.h b/src/Crypto/GostCipher.h deleted file mode 100644 index 6031c438..00000000 --- a/src/Crypto/GostCipher.h +++ /dev/null @@ -1,71 +0,0 @@ - -/* - Copyright (c) 2008-2011 TrueCrypt Developers Association. All rights reserved. - - Governed by the TrueCrypt License 3.0 the full text of which is contained in - the file License.txt included in TrueCrypt binary and source code distribution - packages. -*/ - - - -#ifndef GOST_CIPHER_H -#define GOST_CIPHER_H - -#include "Common/Tcdefs.h" -#include "config.h" - -#ifdef __cplusplus -extern "C" { -#endif - -//In unsigned chars -#define GOST_KEYSIZE 32 -#define GOST_BLOCKSIZE 8 -#define GOST_SBOX_SIZE 16 - -//Production setting, but can be turned off to compare the algorithm with other implementations -#define CIPHER_GOST89 -#define GOST_DYNAMIC_SBOXES - -#if defined(CIPHER_GOST89) - -#ifndef rotl32 -#define rotl32(b, shift) ((b << shift) | (b >> (32 - shift))) -#endif - -#ifdef GST_WINDOWS_BOOT -typedef int gst_word; -typedef long gst_dword; -typedef unsigned int gst_uword; -typedef unsigned long gst_udword; -#else -typedef short gst_word; -typedef int gst_dword; -typedef unsigned short gst_uword; -typedef unsigned int gst_udword; -#endif - -typedef struct gost_kds -{ - CRYPTOPP_ALIGN_DATA(16) byte key[32]; - gst_udword sbox_cvt[256 * 4]; - byte sbox[8][16]; -} gost_kds; - -#define GOST_KS (sizeof(gost_kds)) - -void gost_encrypt(const byte *in, byte *out, gost_kds *ks, int count); -void gost_decrypt(const byte *in, byte *out, gost_kds *ks, int count); -void gost_set_key(const byte *key, gost_kds *ks, int useDynamicSbox); - -#else -#define GOST_KS (0) -#endif - -#ifdef __cplusplus -} -#endif - - -#endif diff --git a/src/Crypto/Makefile.inc b/src/Crypto/Makefile.inc index e014976a..e05d02ca 100644 --- a/src/Crypto/Makefile.inc +++ b/src/Crypto/Makefile.inc @@ -1,24 +1,64 @@ TC_ASFLAGS = -Xvc -Ox -VC_YASMFLAGS = -Xvc -p gas -D WINABI +VC_YASMFLAGS = -Xvc -D WINABI -D __YASM__ +VC_MLFLAGS = /nologo /W3 /Cx /Zi +VC_MLEXE = ml.exe !if "$(TC_ARCH)" == "x86" TC_ASFLAGS = $(TC_ASFLAGS) -f win32 --prefix _ -D MS_STDCALL -D DLL_EXPORT -VC_YASMFLAGS = $(VC_YASMFLAGS) -f win32 +VC_YASMFLAGS = $(VC_YASMFLAGS) -f win32 -D MS_STDCALL +VC_MLFLAGS = $(VC_MLFLAGS) /D_M_X86 /safeseh !else TC_ASFLAGS = $(TC_ASFLAGS) -f win64 VC_YASMFLAGS = $(VC_YASMFLAGS) -f win64 +VC_MLFLAGS = $(VC_MLFLAGS) /D_M_X64 +VC_MLEXE = ml64.exe !endif TC_ASM_ERR_LOG = ..\Driver\build_errors_asm.log -"$(OBJ_PATH)\$(O)\gost89_$(TC_ARCH).obj": gost89_$(TC_ARCH).asm - nasm.exe $(TC_ASFLAGS) -o "$@" -l "$(OBJ_PATH)\$(O)\gost89_$(TC_ARCH).lst" gost89_$(TC_ARCH).asm 2>$(TC_ASM_ERR_LOG) - "$(OBJ_PATH)\$(O)\Aes_$(TC_ARCH).obj": Aes_$(TC_ARCH).asm nasm.exe $(TC_ASFLAGS) -o "$@" -l "$(OBJ_PATH)\$(O)\Aes_$(TC_ARCH).lst" Aes_$(TC_ARCH).asm 2>$(TC_ASM_ERR_LOG) "$(OBJ_PATH)\$(O)\Aes_hw_cpu.obj": Aes_hw_cpu.asm nasm.exe $(TC_ASFLAGS) -o "$@" -l "$(OBJ_PATH)\$(O)\Aes_hw_cpu.lst" Aes_hw_cpu.asm 2>$(TC_ASM_ERR_LOG) "$(OBJ_PATH)\$(O)\Twofish_$(TC_ARCH).obj": Twofish_$(TC_ARCH).S - yasm.exe $(VC_YASMFLAGS) -o "$@" -l "$(OBJ_PATH)\$(O)\Twofish_$(TC_ARCH).lst" Twofish_$(TC_ARCH).S 2>$(TC_ASM_ERR_LOG) + yasm.exe $(VC_YASMFLAGS) -p gas -o "$@" -l "$(OBJ_PATH)\$(O)\Twofish_$(TC_ARCH).lst" Twofish_$(TC_ARCH).S 2>$(TC_ASM_ERR_LOG) + +"$(OBJ_PATH)\$(O)\Camellia_$(TC_ARCH).obj": Camellia_$(TC_ARCH).S + yasm.exe $(VC_YASMFLAGS) -p gas -o "$@" -l "$(OBJ_PATH)\$(O)\Camellia_$(TC_ARCH).lst" Camellia_$(TC_ARCH).S 2>$(TC_ASM_ERR_LOG) + +"$(OBJ_PATH)\$(O)\Camellia_aesni_$(TC_ARCH).obj": Camellia_aesni_$(TC_ARCH).S + yasm.exe $(VC_YASMFLAGS) -p gas -o "$@" -l "$(OBJ_PATH)\$(O)\Camellia_aesni_$(TC_ARCH).lst" Camellia_aesni_$(TC_ARCH).S 2>$(TC_ASM_ERR_LOG) + +"$(OBJ_PATH)\$(O)\sha256-$(TC_ARCH)-nayuki.obj": sha256-$(TC_ARCH)-nayuki.S + yasm.exe $(VC_YASMFLAGS) -p gas -o "$@" -l "$(OBJ_PATH)\$(O)\sha256-$(TC_ARCH)-nayuki.lst" sha256-$(TC_ARCH)-nayuki.S 2>$(TC_ASM_ERR_LOG) + +"$(OBJ_PATH)\$(O)\sha512-$(TC_ARCH)-nayuki.obj": sha512-$(TC_ARCH)-nayuki.S + yasm.exe $(VC_YASMFLAGS) -p gas -o "$@" -l "$(OBJ_PATH)\$(O)\sha512-$(TC_ARCH)-nayuki.lst" sha512-$(TC_ARCH)-nayuki.S 2>$(TC_ASM_ERR_LOG) + +"$(OBJ_PATH)\$(O)\sha512_avx1_$(TC_ARCH).obj": sha512_avx1_$(TC_ARCH).asm + yasm.exe $(VC_YASMFLAGS) -o "$@" -l "$(OBJ_PATH)\$(O)\sha512_avx1_$(TC_ARCH).lst" sha512_avx1_$(TC_ARCH).asm 2>$(TC_ASM_ERR_LOG) + +"$(OBJ_PATH)\$(O)\sha512_avx2_$(TC_ARCH).obj": sha512_avx2_$(TC_ARCH).asm + yasm.exe $(VC_YASMFLAGS) -o "$@" -l "$(OBJ_PATH)\$(O)\sha512_avx2_$(TC_ARCH).lst" sha512_avx2_$(TC_ARCH).asm 2>$(TC_ASM_ERR_LOG) + +"$(OBJ_PATH)\$(O)\sha512_sse4_$(TC_ARCH).obj": sha512_sse4_$(TC_ARCH).asm + yasm.exe $(VC_YASMFLAGS) -o "$@" -l "$(OBJ_PATH)\$(O)\sha512_sse4_$(TC_ARCH).lst" sha512_sse4_$(TC_ARCH).asm 2>$(TC_ASM_ERR_LOG) + +"$(OBJ_PATH)\$(O)\sha256_avx1_$(TC_ARCH).obj": sha256_avx1_$(TC_ARCH).asm + yasm.exe $(VC_YASMFLAGS) -o "$@" -l "$(OBJ_PATH)\$(O)\sha256_avx1_$(TC_ARCH).lst" sha256_avx1_$(TC_ARCH).asm 2>$(TC_ASM_ERR_LOG) + +"$(OBJ_PATH)\$(O)\sha256_avx2_$(TC_ARCH).obj": sha256_avx2_$(TC_ARCH).asm + yasm.exe $(VC_YASMFLAGS) -o "$@" -l "$(OBJ_PATH)\$(O)\sha256_avx2_$(TC_ARCH).lst" sha256_avx2_$(TC_ARCH).asm 2>$(TC_ASM_ERR_LOG) + +"$(OBJ_PATH)\$(O)\sha256_sse4_$(TC_ARCH).obj": sha256_sse4_$(TC_ARCH).asm + yasm.exe $(VC_YASMFLAGS) -o "$@" -l "$(OBJ_PATH)\$(O)\sha256_sse4_$(TC_ARCH).lst" sha256_sse4_$(TC_ARCH).asm 2>$(TC_ASM_ERR_LOG) + +"$(OBJ_PATH)\$(O)\rdrand_ml.obj": rdrand_ml.asm + $(VC_MLEXE) $(VC_MLFLAGS) /Fo "$@" /c rdrand_ml.asm 2>$(TC_ASM_ERR_LOG) + +"$(OBJ_PATH)\$(O)\rdseed_ml.obj": rdseed_ml.asm + $(VC_MLEXE) $(VC_MLFLAGS) /Fo "$@" /c rdseed_ml.asm 2>$(TC_ASM_ERR_LOG) + + diff --git a/src/Crypto/Rmd160.c b/src/Crypto/Rmd160.c deleted file mode 100644 index 9153af49..00000000 --- a/src/Crypto/Rmd160.c +++ /dev/null @@ -1,500 +0,0 @@ -// RIPEMD-160 written and placed in the public domain by Wei Dai - -/* - * This code implements the MD4 message-digest algorithm. - * The algorithm is due to Ron Rivest. This code was - * written by Colin Plumb in 1993, no copyright is claimed. - * This code is in the public domain; do with it what you wish. - */ - -/* Adapted for TrueCrypt */ -/* Adapted for VeraCrypt */ -#if !defined(_UEFI) -#include <memory.h> -#endif // !defined(_UEFI) - -#include "Common/Tcdefs.h" -#include "Common/Endian.h" -#include "Rmd160.h" - -#define F(x, y, z) (x ^ y ^ z) -#define G(x, y, z) (z ^ (x & (y^z))) -#define H(x, y, z) (z ^ (x | ~y)) -#define I(x, y, z) (y ^ (z & (x^y))) -#define J(x, y, z) (x ^ (y | ~z)) - -#define PUT_64BIT_LE(cp, value) do { \ - (cp)[7] = (byte) ((value) >> 56); \ - (cp)[6] = (byte) ((value) >> 48); \ - (cp)[5] = (byte) ((value) >> 40); \ - (cp)[4] = (byte) ((value) >> 32); \ - (cp)[3] = (byte) ((value) >> 24); \ - (cp)[2] = (byte) ((value) >> 16); \ - (cp)[1] = (byte) ((value) >> 8); \ - (cp)[0] = (byte) (value); } while (0) - -#define PUT_32BIT_LE(cp, value) do { \ - (cp)[3] = (byte) ((value) >> 24); \ - (cp)[2] = (byte) ((value) >> 16); \ - (cp)[1] = (byte) ((value) >> 8); \ - (cp)[0] = (byte) (value); } while (0) - -#ifndef TC_MINIMIZE_CODE_SIZE - -static byte PADDING[64] = { - 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 -}; - -#else - -static byte PADDING[64]; - -#endif - -void RMD160Init (RMD160_CTX *ctx) -{ - ctx->count = 0; - ctx->state[0] = 0x67452301; - ctx->state[1] = 0xefcdab89; - ctx->state[2] = 0x98badcfe; - ctx->state[3] = 0x10325476; - ctx->state[4] = 0xc3d2e1f0; - PADDING[0] = 0x80; -} - -/* -* Update context to reflect the concatenation of another buffer full -* of bytes. -*/ -void RMD160Update (RMD160_CTX *ctx, const unsigned char *input, unsigned __int32 lenArg) -{ -#ifndef TC_WINDOWS_BOOT - uint64 len = lenArg; -#else - uint32 len = lenArg; -#endif - unsigned int have, need; - - /* Check how many bytes we already have and how many more we need. */ - have = (unsigned int) ((ctx->count) & (RIPEMD160_BLOCK_LENGTH - 1)); - need = RIPEMD160_BLOCK_LENGTH - have; - - /* Update bitcount */ - ctx->count += len; - - if (len >= need) { - if (have != 0) { - memcpy (ctx->buffer + have, input, (size_t) need); - RMD160Transform ((uint32 *) ctx->state, (const uint32 *) ctx->buffer); - input += need; - len -= need; - have = 0; - } - - /* Process data in RIPEMD160_BLOCK_LENGTH-byte chunks. */ - while (len >= RIPEMD160_BLOCK_LENGTH) { - RMD160Transform ((uint32 *) ctx->state, (const uint32 *) input); - input += RIPEMD160_BLOCK_LENGTH; - len -= RIPEMD160_BLOCK_LENGTH; - } - } - - /* Handle any remaining bytes of data. */ - if (len != 0) - memcpy (ctx->buffer + have, input, (size_t) len); -} - -/* -* Pad pad to 64-byte boundary with the bit pattern -* 1 0* (64-bit count of bits processed, MSB-first) -*/ -static void RMD160Pad(RMD160_CTX *ctx) -{ - byte count[8]; - uint32 padlen; - - /* Convert count to 8 bytes in little endian order. */ - -#ifndef TC_WINDOWS_BOOT - uint64 bitcount = ctx->count << 3; - PUT_64BIT_LE(count, bitcount); -#else - *(uint32 *) (count + 4) = 0; - *(uint32 *) (count + 0) = ctx->count << 3; -#endif - - /* Pad out to 56 mod 64. */ - padlen = RIPEMD160_BLOCK_LENGTH - - (uint32)((ctx->count) & (RIPEMD160_BLOCK_LENGTH - 1)); - if (padlen < 1 + 8) - padlen += RIPEMD160_BLOCK_LENGTH; - RMD160Update(ctx, PADDING, padlen - 8); /* padlen - 8 <= 64 */ - RMD160Update(ctx, count, 8); -} - -/* -* Final wrapup--call RMD160Pad, fill in digest and zero out ctx. -*/ -void RMD160Final(unsigned char *digest, RMD160_CTX *ctx) -{ - int i; - - RMD160Pad(ctx); - if (digest) { - for (i = 0; i < 5; i++) - PUT_32BIT_LE(digest + i * 4, ctx->state[i]); -#ifndef TC_WINDOWS_BOOT - burn (ctx, sizeof(*ctx)); -#endif - } -} - - -#ifndef TC_MINIMIZE_CODE_SIZE - -#define word32 unsigned __int32 - -#define k0 0 -#define k1 0x5a827999UL -#define k2 0x6ed9eba1UL -#define k3 0x8f1bbcdcUL -#define k4 0xa953fd4eUL -#define k5 0x50a28be6UL -#define k6 0x5c4dd124UL -#define k7 0x6d703ef3UL -#define k8 0x7a6d76e9UL -#define k9 0 - -static word32 rotlFixed (word32 x, unsigned int y) -{ - return (word32)((x<<y) | (x>>(sizeof(word32)*8-y))); -} - -#define Subround(f, a, b, c, d, e, x, s, k) \ - a += f(b, c, d) + x + k;\ - a = rotlFixed((word32)a, s) + e;\ - c = rotlFixed((word32)c, 10U) - -void RMD160Transform (unsigned __int32 *digest, const unsigned __int32 *data) -{ -#if BYTE_ORDER == LITTLE_ENDIAN - const word32 *X = data; -#else - word32 X[16]; - int i; -#endif - - word32 a1, b1, c1, d1, e1, a2, b2, c2, d2, e2; - a1 = a2 = digest[0]; - b1 = b2 = digest[1]; - c1 = c2 = digest[2]; - d1 = d2 = digest[3]; - e1 = e2 = digest[4]; - -#if BYTE_ORDER == BIG_ENDIAN - for (i = 0; i < 16; i++) - { - X[i] = LE32 (data[i]); - } -#endif - - Subround(F, a1, b1, c1, d1, e1, X[ 0], 11, k0); - Subround(F, e1, a1, b1, c1, d1, X[ 1], 14, k0); - Subround(F, d1, e1, a1, b1, c1, X[ 2], 15, k0); - Subround(F, c1, d1, e1, a1, b1, X[ 3], 12, k0); - Subround(F, b1, c1, d1, e1, a1, X[ 4], 5, k0); - Subround(F, a1, b1, c1, d1, e1, X[ 5], 8, k0); - Subround(F, e1, a1, b1, c1, d1, X[ 6], 7, k0); - Subround(F, d1, e1, a1, b1, c1, X[ 7], 9, k0); - Subround(F, c1, d1, e1, a1, b1, X[ 8], 11, k0); - Subround(F, b1, c1, d1, e1, a1, X[ 9], 13, k0); - Subround(F, a1, b1, c1, d1, e1, X[10], 14, k0); - Subround(F, e1, a1, b1, c1, d1, X[11], 15, k0); - Subround(F, d1, e1, a1, b1, c1, X[12], 6, k0); - Subround(F, c1, d1, e1, a1, b1, X[13], 7, k0); - Subround(F, b1, c1, d1, e1, a1, X[14], 9, k0); - Subround(F, a1, b1, c1, d1, e1, X[15], 8, k0); - - Subround(G, e1, a1, b1, c1, d1, X[ 7], 7, k1); - Subround(G, d1, e1, a1, b1, c1, X[ 4], 6, k1); - Subround(G, c1, d1, e1, a1, b1, X[13], 8, k1); - Subround(G, b1, c1, d1, e1, a1, X[ 1], 13, k1); - Subround(G, a1, b1, c1, d1, e1, X[10], 11, k1); - Subround(G, e1, a1, b1, c1, d1, X[ 6], 9, k1); - Subround(G, d1, e1, a1, b1, c1, X[15], 7, k1); - Subround(G, c1, d1, e1, a1, b1, X[ 3], 15, k1); - Subround(G, b1, c1, d1, e1, a1, X[12], 7, k1); - Subround(G, a1, b1, c1, d1, e1, X[ 0], 12, k1); - Subround(G, e1, a1, b1, c1, d1, X[ 9], 15, k1); - Subround(G, d1, e1, a1, b1, c1, X[ 5], 9, k1); - Subround(G, c1, d1, e1, a1, b1, X[ 2], 11, k1); - Subround(G, b1, c1, d1, e1, a1, X[14], 7, k1); - Subround(G, a1, b1, c1, d1, e1, X[11], 13, k1); - Subround(G, e1, a1, b1, c1, d1, X[ 8], 12, k1); - - Subround(H, d1, e1, a1, b1, c1, X[ 3], 11, k2); - Subround(H, c1, d1, e1, a1, b1, X[10], 13, k2); - Subround(H, b1, c1, d1, e1, a1, X[14], 6, k2); - Subround(H, a1, b1, c1, d1, e1, X[ 4], 7, k2); - Subround(H, e1, a1, b1, c1, d1, X[ 9], 14, k2); - Subround(H, d1, e1, a1, b1, c1, X[15], 9, k2); - Subround(H, c1, d1, e1, a1, b1, X[ 8], 13, k2); - Subround(H, b1, c1, d1, e1, a1, X[ 1], 15, k2); - Subround(H, a1, b1, c1, d1, e1, X[ 2], 14, k2); - Subround(H, e1, a1, b1, c1, d1, X[ 7], 8, k2); - Subround(H, d1, e1, a1, b1, c1, X[ 0], 13, k2); - Subround(H, c1, d1, e1, a1, b1, X[ 6], 6, k2); - Subround(H, b1, c1, d1, e1, a1, X[13], 5, k2); - Subround(H, a1, b1, c1, d1, e1, X[11], 12, k2); - Subround(H, e1, a1, b1, c1, d1, X[ 5], 7, k2); - Subround(H, d1, e1, a1, b1, c1, X[12], 5, k2); - - Subround(I, c1, d1, e1, a1, b1, X[ 1], 11, k3); - Subround(I, b1, c1, d1, e1, a1, X[ 9], 12, k3); - Subround(I, a1, b1, c1, d1, e1, X[11], 14, k3); - Subround(I, e1, a1, b1, c1, d1, X[10], 15, k3); - Subround(I, d1, e1, a1, b1, c1, X[ 0], 14, k3); - Subround(I, c1, d1, e1, a1, b1, X[ 8], 15, k3); - Subround(I, b1, c1, d1, e1, a1, X[12], 9, k3); - Subround(I, a1, b1, c1, d1, e1, X[ 4], 8, k3); - Subround(I, e1, a1, b1, c1, d1, X[13], 9, k3); - Subround(I, d1, e1, a1, b1, c1, X[ 3], 14, k3); - Subround(I, c1, d1, e1, a1, b1, X[ 7], 5, k3); - Subround(I, b1, c1, d1, e1, a1, X[15], 6, k3); - Subround(I, a1, b1, c1, d1, e1, X[14], 8, k3); - Subround(I, e1, a1, b1, c1, d1, X[ 5], 6, k3); - Subround(I, d1, e1, a1, b1, c1, X[ 6], 5, k3); - Subround(I, c1, d1, e1, a1, b1, X[ 2], 12, k3); - - Subround(J, b1, c1, d1, e1, a1, X[ 4], 9, k4); - Subround(J, a1, b1, c1, d1, e1, X[ 0], 15, k4); - Subround(J, e1, a1, b1, c1, d1, X[ 5], 5, k4); - Subround(J, d1, e1, a1, b1, c1, X[ 9], 11, k4); - Subround(J, c1, d1, e1, a1, b1, X[ 7], 6, k4); - Subround(J, b1, c1, d1, e1, a1, X[12], 8, k4); - Subround(J, a1, b1, c1, d1, e1, X[ 2], 13, k4); - Subround(J, e1, a1, b1, c1, d1, X[10], 12, k4); - Subround(J, d1, e1, a1, b1, c1, X[14], 5, k4); - Subround(J, c1, d1, e1, a1, b1, X[ 1], 12, k4); - Subround(J, b1, c1, d1, e1, a1, X[ 3], 13, k4); - Subround(J, a1, b1, c1, d1, e1, X[ 8], 14, k4); - Subround(J, e1, a1, b1, c1, d1, X[11], 11, k4); - Subround(J, d1, e1, a1, b1, c1, X[ 6], 8, k4); - Subround(J, c1, d1, e1, a1, b1, X[15], 5, k4); - Subround(J, b1, c1, d1, e1, a1, X[13], 6, k4); - - Subround(J, a2, b2, c2, d2, e2, X[ 5], 8, k5); - Subround(J, e2, a2, b2, c2, d2, X[14], 9, k5); - Subround(J, d2, e2, a2, b2, c2, X[ 7], 9, k5); - Subround(J, c2, d2, e2, a2, b2, X[ 0], 11, k5); - Subround(J, b2, c2, d2, e2, a2, X[ 9], 13, k5); - Subround(J, a2, b2, c2, d2, e2, X[ 2], 15, k5); - Subround(J, e2, a2, b2, c2, d2, X[11], 15, k5); - Subround(J, d2, e2, a2, b2, c2, X[ 4], 5, k5); - Subround(J, c2, d2, e2, a2, b2, X[13], 7, k5); - Subround(J, b2, c2, d2, e2, a2, X[ 6], 7, k5); - Subround(J, a2, b2, c2, d2, e2, X[15], 8, k5); - Subround(J, e2, a2, b2, c2, d2, X[ 8], 11, k5); - Subround(J, d2, e2, a2, b2, c2, X[ 1], 14, k5); - Subround(J, c2, d2, e2, a2, b2, X[10], 14, k5); - Subround(J, b2, c2, d2, e2, a2, X[ 3], 12, k5); - Subround(J, a2, b2, c2, d2, e2, X[12], 6, k5); - - Subround(I, e2, a2, b2, c2, d2, X[ 6], 9, k6); - Subround(I, d2, e2, a2, b2, c2, X[11], 13, k6); - Subround(I, c2, d2, e2, a2, b2, X[ 3], 15, k6); - Subround(I, b2, c2, d2, e2, a2, X[ 7], 7, k6); - Subround(I, a2, b2, c2, d2, e2, X[ 0], 12, k6); - Subround(I, e2, a2, b2, c2, d2, X[13], 8, k6); - Subround(I, d2, e2, a2, b2, c2, X[ 5], 9, k6); - Subround(I, c2, d2, e2, a2, b2, X[10], 11, k6); - Subround(I, b2, c2, d2, e2, a2, X[14], 7, k6); - Subround(I, a2, b2, c2, d2, e2, X[15], 7, k6); - Subround(I, e2, a2, b2, c2, d2, X[ 8], 12, k6); - Subround(I, d2, e2, a2, b2, c2, X[12], 7, k6); - Subround(I, c2, d2, e2, a2, b2, X[ 4], 6, k6); - Subround(I, b2, c2, d2, e2, a2, X[ 9], 15, k6); - Subround(I, a2, b2, c2, d2, e2, X[ 1], 13, k6); - Subround(I, e2, a2, b2, c2, d2, X[ 2], 11, k6); - - Subround(H, d2, e2, a2, b2, c2, X[15], 9, k7); - Subround(H, c2, d2, e2, a2, b2, X[ 5], 7, k7); - Subround(H, b2, c2, d2, e2, a2, X[ 1], 15, k7); - Subround(H, a2, b2, c2, d2, e2, X[ 3], 11, k7); - Subround(H, e2, a2, b2, c2, d2, X[ 7], 8, k7); - Subround(H, d2, e2, a2, b2, c2, X[14], 6, k7); - Subround(H, c2, d2, e2, a2, b2, X[ 6], 6, k7); - Subround(H, b2, c2, d2, e2, a2, X[ 9], 14, k7); - Subround(H, a2, b2, c2, d2, e2, X[11], 12, k7); - Subround(H, e2, a2, b2, c2, d2, X[ 8], 13, k7); - Subround(H, d2, e2, a2, b2, c2, X[12], 5, k7); - Subround(H, c2, d2, e2, a2, b2, X[ 2], 14, k7); - Subround(H, b2, c2, d2, e2, a2, X[10], 13, k7); - Subround(H, a2, b2, c2, d2, e2, X[ 0], 13, k7); - Subround(H, e2, a2, b2, c2, d2, X[ 4], 7, k7); - Subround(H, d2, e2, a2, b2, c2, X[13], 5, k7); - - Subround(G, c2, d2, e2, a2, b2, X[ 8], 15, k8); - Subround(G, b2, c2, d2, e2, a2, X[ 6], 5, k8); - Subround(G, a2, b2, c2, d2, e2, X[ 4], 8, k8); - Subround(G, e2, a2, b2, c2, d2, X[ 1], 11, k8); - Subround(G, d2, e2, a2, b2, c2, X[ 3], 14, k8); - Subround(G, c2, d2, e2, a2, b2, X[11], 14, k8); - Subround(G, b2, c2, d2, e2, a2, X[15], 6, k8); - Subround(G, a2, b2, c2, d2, e2, X[ 0], 14, k8); - Subround(G, e2, a2, b2, c2, d2, X[ 5], 6, k8); - Subround(G, d2, e2, a2, b2, c2, X[12], 9, k8); - Subround(G, c2, d2, e2, a2, b2, X[ 2], 12, k8); - Subround(G, b2, c2, d2, e2, a2, X[13], 9, k8); - Subround(G, a2, b2, c2, d2, e2, X[ 9], 12, k8); - Subround(G, e2, a2, b2, c2, d2, X[ 7], 5, k8); - Subround(G, d2, e2, a2, b2, c2, X[10], 15, k8); - Subround(G, c2, d2, e2, a2, b2, X[14], 8, k8); - - Subround(F, b2, c2, d2, e2, a2, X[12], 8, k9); - Subround(F, a2, b2, c2, d2, e2, X[15], 5, k9); - Subround(F, e2, a2, b2, c2, d2, X[10], 12, k9); - Subround(F, d2, e2, a2, b2, c2, X[ 4], 9, k9); - Subround(F, c2, d2, e2, a2, b2, X[ 1], 12, k9); - Subround(F, b2, c2, d2, e2, a2, X[ 5], 5, k9); - Subround(F, a2, b2, c2, d2, e2, X[ 8], 14, k9); - Subround(F, e2, a2, b2, c2, d2, X[ 7], 6, k9); - Subround(F, d2, e2, a2, b2, c2, X[ 6], 8, k9); - Subround(F, c2, d2, e2, a2, b2, X[ 2], 13, k9); - Subround(F, b2, c2, d2, e2, a2, X[13], 6, k9); - Subround(F, a2, b2, c2, d2, e2, X[14], 5, k9); - Subround(F, e2, a2, b2, c2, d2, X[ 0], 15, k9); - Subround(F, d2, e2, a2, b2, c2, X[ 3], 13, k9); - Subround(F, c2, d2, e2, a2, b2, X[ 9], 11, k9); - Subround(F, b2, c2, d2, e2, a2, X[11], 11, k9); - - c1 = digest[1] + c1 + d2; - digest[1] = digest[2] + d1 + e2; - digest[2] = digest[3] + e1 + a2; - digest[3] = digest[4] + a1 + b2; - digest[4] = digest[0] + b1 + c2; - digest[0] = c1; -} - -#else // TC_MINIMIZE_CODE_SIZE - -/* - Derived from source code of TrueCrypt 7.1a, which is - Copyright (c) 2008-2012 TrueCrypt Developers Association and which is governed - by the TrueCrypt License 3.0. - - Modifications and additions to the original source code (contained in this file) - and all other portions of this file are Copyright (c) 2013-2016 IDRIX - and are governed by the Apache License 2.0 the full text of which is - contained in the file License.txt included in VeraCrypt binary and source - code distribution packages. -*/ - -#pragma optimize ("tl", on) - -typedef unsigned __int32 uint32; -typedef unsigned __int8 byte; - -#include <stdlib.h> -#pragma intrinsic (_lrotl) - -static const byte OrderTab[] = { - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, - 7, 4, 13, 1, 10, 6, 15, 3, 12, 0, 9, 5, 2, 14, 11, 8, - 3, 10, 14, 4, 9, 15, 8, 1, 2, 7, 0, 6, 13, 11, 5, 12, - 1, 9, 11, 10, 0, 8, 12, 4, 13, 3, 7, 15, 14, 5, 6, 2, - 4, 0, 5, 9, 7, 12, 2, 10, 14, 1, 3, 8, 11, 6, 15, 13, - 5, 14, 7, 0, 9, 2, 11, 4, 13, 6, 15, 8, 1, 10, 3, 12, - 6, 11, 3, 7, 0, 13, 5, 10, 14, 15, 8, 12, 4, 9, 1, 2, - 15, 5, 1, 3, 7, 14, 6, 9, 11, 8, 12, 2, 10, 0, 4, 13, - 8, 6, 4, 1, 3, 11, 15, 0, 5, 12, 2, 13, 9, 7, 10, 14, - 12, 15, 10, 4, 1, 5, 8, 7, 6, 2, 13, 14, 0, 3, 9, 11 -}; - -static const byte RolTab[] = { - 11, 14, 15, 12, 5, 8, 7, 9, 11, 13, 14, 15, 6, 7, 9, 8, - 7, 6, 8, 13, 11, 9, 7, 15, 7, 12, 15, 9, 11, 7, 13, 12, - 11, 13, 6, 7, 14, 9, 13, 15, 14, 8, 13, 6, 5, 12, 7, 5, - 11, 12, 14, 15, 14, 15, 9, 8, 9, 14, 5, 6, 8, 6, 5, 12, - 9, 15, 5, 11, 6, 8, 13, 12, 5, 12, 13, 14, 11, 8, 5, 6, - 8, 9, 9, 11, 13, 15, 15, 5, 7, 7, 8, 11, 14, 14, 12, 6, - 9, 13, 15, 7, 12, 8, 9, 11, 7, 7, 12, 7, 6, 15, 13, 11, - 9, 7, 15, 11, 8, 6, 6, 14, 12, 13, 5, 14, 13, 13, 7, 5, - 15, 5, 8, 11, 14, 14, 6, 14, 6, 9, 12, 9, 12, 5, 15, 8, - 8, 5, 12, 9, 12, 5, 14, 6, 8, 13, 6, 5, 15, 13, 11, 11 -}; - -static const uint32 KTab[] = { - 0x00000000UL, - 0x5A827999UL, - 0x6ED9EBA1UL, - 0x8F1BBCDCUL, - 0xA953FD4EUL, - 0x50A28BE6UL, - 0x5C4DD124UL, - 0x6D703EF3UL, - 0x7A6D76E9UL, - 0x00000000UL -}; - - -void RMD160Transform (unsigned __int32 *state, const unsigned __int32 *data) -{ - uint32 a, b, c, d, e; - uint32 a2, b2, c2, d2, e2; - byte pos; - uint32 tmp; - - a = state[0]; - b = state[1]; - c = state[2]; - d = state[3]; - e = state[4]; - - for (pos = 0; pos < 160; ++pos) - { - tmp = a + data[OrderTab[pos]] + KTab[pos >> 4]; - - switch (pos >> 4) - { - case 0: case 9: tmp += F (b, c, d); break; - case 1: case 8: tmp += G (b, c, d); break; - case 2: case 7: tmp += H (b, c, d); break; - case 3: case 6: tmp += I (b, c, d); break; - case 4: case 5: tmp += J (b, c, d); break; - } - - tmp = _lrotl (tmp, RolTab[pos]) + e; - a = e; - e = d; - d = _lrotl (c, 10); - c = b; - b = tmp; - - if (pos == 79) - { - a2 = a; - b2 = b; - c2 = c; - d2 = d; - e2 = e; - - a = state[0]; - b = state[1]; - c = state[2]; - d = state[3]; - e = state[4]; - } - } - - tmp = state[1] + c2 + d; - state[1] = state[2] + d2 + e; - state[2] = state[3] + e2 + a; - state[3] = state[4] + a2 + b; - state[4] = state[0] + b2 + c; - state[0] = tmp; -} - -#endif // TC_MINIMIZE_CODE_SIZE diff --git a/src/Crypto/Rmd160.h b/src/Crypto/Rmd160.h deleted file mode 100644 index 81b5d6f0..00000000 --- a/src/Crypto/Rmd160.h +++ /dev/null @@ -1,33 +0,0 @@ -#ifndef TC_HEADER_Crypto_Ripemd160 -#define TC_HEADER_Crypto_Ripemd160 - -#include "Common/Tcdefs.h" - -#if defined(__cplusplus) -extern "C" -{ -#endif - -#define RIPEMD160_BLOCK_LENGTH 64 - -typedef struct RMD160Context -{ - unsigned __int32 state[5]; -#ifndef TC_WINDOWS_BOOT - uint64 count; -#else - uint32 count; -#endif - unsigned char buffer[RIPEMD160_BLOCK_LENGTH]; -} RMD160_CTX; - -void RMD160Init (RMD160_CTX *ctx); -void RMD160Transform (unsigned __int32 *state, const unsigned __int32 *data); -void RMD160Update (RMD160_CTX *ctx, const unsigned char *input, unsigned __int32 len); -void RMD160Final (unsigned char *digest, RMD160_CTX *ctx); - -#if defined(__cplusplus) -} -#endif - -#endif // TC_HEADER_Crypto_Ripemd160 diff --git a/src/Crypto/SerpentFast.c b/src/Crypto/SerpentFast.c index cb143262..3358db13 100644 --- a/src/Crypto/SerpentFast.c +++ b/src/Crypto/SerpentFast.c @@ -79,9 +79,9 @@ void serpent_encrypt_blocks(const unsigned __int8* in, unsigned __int8* out, siz { unsigned __int32 B0, B1, B2, B3; unsigned __int32* round_key = ((unsigned __int32*) ks) + 8; size_t i; -#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE +#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE && (!defined (DEBUG) || !defined (TC_WINDOWS_DRIVER)) if(HasSSE2() && (blocks >= 4)) { while(blocks >= 4) { @@ -158,9 +158,9 @@ void serpent_decrypt_blocks(const unsigned __int8* in, unsigned __int8* out, siz { unsigned __int32 B0, B1, B2, B3; unsigned __int32* round_key = ((unsigned __int32*) ks) + 8; size_t i; -#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE +#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE && (!defined (DEBUG) || !defined (TC_WINDOWS_DRIVER)) if(HasSSE2() && (blocks >= 4)) { while(blocks >= 4) { diff --git a/src/Crypto/SerpentFast_simd.cpp b/src/Crypto/SerpentFast_simd.cpp index d5d5d65b..6e5fd49b 100644 --- a/src/Crypto/SerpentFast_simd.cpp +++ b/src/Crypto/SerpentFast_simd.cpp @@ -217,9 +217,9 @@ typedef SIMD_4x32 SIMD_32; B0.rotate_right(13); \ } while(0); - +#if (!defined (DEBUG) || !defined (TC_WINDOWS_DRIVER)) /* * SIMD Serpent Encryption of 4 blocks in parallel */ extern "C" void serpent_simd_encrypt_blocks_4(const unsigned __int8 in[], unsigned __int8 out[], unsigned __int32* round_key) @@ -329,9 +329,9 @@ extern "C" void serpent_simd_decrypt_blocks_4(const unsigned __int8 in[], unsign B1.store_le(out + 16); B2.store_le(out + 32); B3.store_le(out + 48); } - +#endif #undef key_xor #undef transform #undef i_transform diff --git a/src/Crypto/Sha2.c b/src/Crypto/Sha2.c index 80758285..5ae9cae2 100644 --- a/src/Crypto/Sha2.c +++ b/src/Crypto/Sha2.c @@ -1,757 +1,879 @@ /* - --------------------------------------------------------------------------- - Copyright (c) 2002, Dr Brian Gladman, Worcester, UK. All rights reserved. - - LICENSE TERMS - - The free distribution and use of this software is allowed (with or without - changes) provided that: - - 1. source code distributions include the above copyright notice, this - list of conditions and the following disclaimer; - - 2. binary distributions include the above copyright notice, this list - of conditions and the following disclaimer in their documentation; - - 3. the name of the copyright holder is not used to endorse products - built using this software without specific written permission. - - DISCLAIMER - - This software is provided 'as is' with no explicit or implied warranties - in respect of its properties, including, but not limited to, correctness - and/or fitness for purpose. - --------------------------------------------------------------------------- - Issue Date: 01/08/2005 - - This is a byte oriented version of SHA2 that operates on arrays of bytes - stored in memory. This code implements sha256, sha384 and sha512 but the - latter two functions rely on efficient 64-bit integer operations that - may not be very efficient on 32-bit machines - - The sha256 functions use a type 'sha256_ctx' to hold details of the - current hash state and uses the following three calls: - - void sha256_begin(sha256_ctx ctx[1]) - void sha256_hash(const unsigned char data[], - unsigned long len, sha256_ctx ctx[1]) - void sha_end1(unsigned char hval[], sha256_ctx ctx[1]) - - The first subroutine initialises a hash computation by setting up the - context in the sha256_ctx context. The second subroutine hashes 8-bit - bytes from array data[] into the hash state withinh sha256_ctx context, - the number of bytes to be hashed being given by the the unsigned long - integer len. The third subroutine completes the hash calculation and - places the resulting digest value in the array of 8-bit bytes hval[]. - - The sha384 and sha512 functions are similar and use the interfaces: - - void sha384_begin(sha384_ctx ctx[1]); - void sha384_hash(const unsigned char data[], - unsigned long len, sha384_ctx ctx[1]); - void sha384_end(unsigned char hval[], sha384_ctx ctx[1]); - - void sha512_begin(sha512_ctx ctx[1]); - void sha512_hash(const unsigned char data[], - unsigned long len, sha512_ctx ctx[1]); - void sha512_end(unsigned char hval[], sha512_ctx ctx[1]); - - In addition there is a function sha2 that can be used to call all these - functions using a call with a hash length parameter as follows: - - int sha2_begin(unsigned long len, sha2_ctx ctx[1]); - void sha2_hash(const unsigned char data[], - unsigned long len, sha2_ctx ctx[1]); - void sha2_end(unsigned char hval[], sha2_ctx ctx[1]); - - My thanks to Erik Andersen <andersen@codepoet.org> for testing this code - on big-endian systems and for his assistance with corrections +This code is written by kerukuro for cppcrypto library (http://cppcrypto.sourceforge.net/) +and released into public domain. */ +/* Modified for VeraCrypt with speed optimization for C implementation */ + +#include "Sha2.h" #include "Common/Endian.h" -#include "Common/Tcdefs.h" +#include "Crypto/cpu.h" #include "Crypto/misc.h" -#define PLATFORM_BYTE_ORDER BYTE_ORDER -#define IS_LITTLE_ENDIAN LITTLE_ENDIAN - -#if 0 -#define UNROLL_SHA2 /* for SHA2 loop unroll */ +#if defined(_UEFI) || defined(CRYPTOPP_DISABLE_ASM) +#define NO_OPTIMIZED_VERSIONS #endif -#if !defined(_UEFI) -#include <string.h> /* for memcpy() etc. */ -#endif // !defined(_UEFI) - -#include "Sha2.h" +#ifndef NO_OPTIMIZED_VERSIONS #if defined(__cplusplus) extern "C" { #endif - -#if defined( _MSC_VER ) && ( _MSC_VER > 800 ) && !defined(_UEFI) -#pragma intrinsic(memcpy) +#if CRYPTOPP_BOOL_X64 + void sha512_rorx(const void* M, void* D, uint_64t l); + void sha512_sse4(const void* M, uint_64t D[8], uint_64t l); + void sha512_avx(const void* M, void* D, uint_64t l); #endif - -#if (PLATFORM_BYTE_ORDER == IS_LITTLE_ENDIAN) -#define SWAP_BYTES -#else -#undef SWAP_BYTES + +#if CRYPTOPP_BOOL_X64 || ((CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32) && !defined (TC_MACOSX)) + void VC_CDECL sha512_compress_nayuki(uint_64t state[8], const uint_8t block[128]); #endif - -#if 0 - -#define ch(x,y,z) (((x) & (y)) ^ (~(x) & (z))) -#define maj(x,y,z) (((x) & (y)) ^ ((x) & (z)) ^ ((y) & (z))) - -#else /* Thanks to Rich Schroeppel and Colin Plumb for the following */ - -#define ch(x,y,z) ((z) ^ ((x) & ((y) ^ (z)))) -#define maj(x,y,z) (((x) & (y)) | ((z) & ((x) ^ (y)))) - +#if defined(__cplusplus) +} #endif -/* round transforms for SHA256 and SHA512 compression functions */ - -#define vf(n,i) v[(n - i) & 7] - -#define hf(i) (p[i & 15] += \ - g_1(p[(i + 14) & 15]) + p[(i + 9) & 15] + g_0(p[(i + 1) & 15])) - -#define v_cycle(i,j) \ - vf(7,i) += (j ? hf(i) : p[i]) + k_0[i+j] \ - + s_1(vf(4,i)) + ch(vf(4,i),vf(5,i),vf(6,i)); \ - vf(3,i) += vf(7,i); \ - vf(7,i) += s_0(vf(0,i))+ maj(vf(0,i),vf(1,i),vf(2,i)) - -#if defined(SHA_224) || defined(SHA_256) - -#define SHA256_MASK (SHA256_BLOCK_SIZE - 1) +#endif -#if defined(SWAP_BYTES) -#define bsw_32(p,n) \ - { int _i = (n); while(_i--) ((uint_32t*)p)[_i] = bswap_32(((uint_32t*)p)[_i]); } -#else -#define bsw_32(p,n) -#endif - -#define s_0(x) (rotr32((x), 2) ^ rotr32((x), 13) ^ rotr32((x), 22)) -#define s_1(x) (rotr32((x), 6) ^ rotr32((x), 11) ^ rotr32((x), 25)) -#define g_0(x) (rotr32((x), 7) ^ rotr32((x), 18) ^ ((x) >> 3)) -#define g_1(x) (rotr32((x), 17) ^ rotr32((x), 19) ^ ((x) >> 10)) -#define k_0 k256 - -/* rotated SHA256 round definition. Rather than swapping variables as in */ -/* FIPS-180, different variables are 'rotated' on each round, returning */ -/* to their starting positions every eight rounds */ - -#define q(n) v##n - -#define one_cycle(a,b,c,d,e,f,g,h,k,w) \ - q(h) += s_1(q(e)) + ch(q(e), q(f), q(g)) + k + w; \ - q(d) += q(h); q(h) += s_0(q(a)) + maj(q(a), q(b), q(c)) - -/* SHA256 mixing data */ - -const uint_32t k256[64] = -{ 0x428a2f98ul, 0x71374491ul, 0xb5c0fbcful, 0xe9b5dba5ul, - 0x3956c25bul, 0x59f111f1ul, 0x923f82a4ul, 0xab1c5ed5ul, - 0xd807aa98ul, 0x12835b01ul, 0x243185beul, 0x550c7dc3ul, - 0x72be5d74ul, 0x80deb1feul, 0x9bdc06a7ul, 0xc19bf174ul, - 0xe49b69c1ul, 0xefbe4786ul, 0x0fc19dc6ul, 0x240ca1ccul, - 0x2de92c6ful, 0x4a7484aaul, 0x5cb0a9dcul, 0x76f988daul, - 0x983e5152ul, 0xa831c66dul, 0xb00327c8ul, 0xbf597fc7ul, - 0xc6e00bf3ul, 0xd5a79147ul, 0x06ca6351ul, 0x14292967ul, - 0x27b70a85ul, 0x2e1b2138ul, 0x4d2c6dfcul, 0x53380d13ul, - 0x650a7354ul, 0x766a0abbul, 0x81c2c92eul, 0x92722c85ul, - 0xa2bfe8a1ul, 0xa81a664bul, 0xc24b8b70ul, 0xc76c51a3ul, - 0xd192e819ul, 0xd6990624ul, 0xf40e3585ul, 0x106aa070ul, - 0x19a4c116ul, 0x1e376c08ul, 0x2748774cul, 0x34b0bcb5ul, - 0x391c0cb3ul, 0x4ed8aa4aul, 0x5b9cca4ful, 0x682e6ff3ul, - 0x748f82eeul, 0x78a5636ful, 0x84c87814ul, 0x8cc70208ul, - 0x90befffaul, 0xa4506cebul, 0xbef9a3f7ul, 0xc67178f2ul, +typedef void (*transformFn)(sha512_ctx* ctx, void* m, uint_64t num_blks); + +transformFn transfunc = NULL; + +static const uint_64t K[80] = { + LL(0x428a2f98d728ae22), LL(0x7137449123ef65cd), LL(0xb5c0fbcfec4d3b2f), LL(0xe9b5dba58189dbbc), + LL(0x3956c25bf348b538), LL(0x59f111f1b605d019), LL(0x923f82a4af194f9b), LL(0xab1c5ed5da6d8118), + LL(0xd807aa98a3030242), LL(0x12835b0145706fbe), LL(0x243185be4ee4b28c), LL(0x550c7dc3d5ffb4e2), + LL(0x72be5d74f27b896f), LL(0x80deb1fe3b1696b1), LL(0x9bdc06a725c71235), LL(0xc19bf174cf692694), + LL(0xe49b69c19ef14ad2), LL(0xefbe4786384f25e3), LL(0x0fc19dc68b8cd5b5), LL(0x240ca1cc77ac9c65), + LL(0x2de92c6f592b0275), LL(0x4a7484aa6ea6e483), LL(0x5cb0a9dcbd41fbd4), LL(0x76f988da831153b5), + LL(0x983e5152ee66dfab), LL(0xa831c66d2db43210), LL(0xb00327c898fb213f), LL(0xbf597fc7beef0ee4), + LL(0xc6e00bf33da88fc2), LL(0xd5a79147930aa725), LL(0x06ca6351e003826f), LL(0x142929670a0e6e70), + LL(0x27b70a8546d22ffc), LL(0x2e1b21385c26c926), LL(0x4d2c6dfc5ac42aed), LL(0x53380d139d95b3df), + LL(0x650a73548baf63de), LL(0x766a0abb3c77b2a8), LL(0x81c2c92e47edaee6), LL(0x92722c851482353b), + LL(0xa2bfe8a14cf10364), LL(0xa81a664bbc423001), LL(0xc24b8b70d0f89791), LL(0xc76c51a30654be30), + LL(0xd192e819d6ef5218), LL(0xd69906245565a910), LL(0xf40e35855771202a), LL(0x106aa07032bbd1b8), + LL(0x19a4c116b8d2d0c8), LL(0x1e376c085141ab53), LL(0x2748774cdf8eeb99), LL(0x34b0bcb5e19b48a8), + LL(0x391c0cb3c5c95a63), LL(0x4ed8aa4ae3418acb), LL(0x5b9cca4f7763e373), LL(0x682e6ff3d6b2b8a3), + LL(0x748f82ee5defb2fc), LL(0x78a5636f43172f60), LL(0x84c87814a1f0ab72), LL(0x8cc702081a6439ec), + LL(0x90befffa23631e28), LL(0xa4506cebde82bde9), LL(0xbef9a3f7b2c67915), LL(0xc67178f2e372532b), + LL(0xca273eceea26619c), LL(0xd186b8c721c0c207), LL(0xeada7dd6cde0eb1e), LL(0xf57d4f7fee6ed178), + LL(0x06f067aa72176fba), LL(0x0a637dc5a2c898a6), LL(0x113f9804bef90dae), LL(0x1b710b35131c471b), + LL(0x28db77f523047d84), LL(0x32caab7b40c72493), LL(0x3c9ebe0a15c9bebc), LL(0x431d67c49c100d4c), + LL(0x4cc5d4becb3e42b6), LL(0x597f299cfc657e2a), LL(0x5fcb6fab3ad6faec), LL(0x6c44198c4a475817) }; -/* Compile 64 bytes of hash data into SHA256 digest value */ -/* NOTE: this routine assumes that the byte order in the */ -/* ctx->wbuf[] at this point is such that low address bytes */ -/* in the ORIGINAL byte stream will go into the high end of */ -/* words on BOTH big and little endian systems */ -VOID_RETURN sha256_compile(sha256_ctx ctx[1]) +#define Ch(x,y,z) ((z) ^ ((x) & ((y) ^ (z)))) +#define Maj(x,y,z) (((x) & (y)) | ((z) & ((x) ^ (y)))) +#define sum0(x) (rotr64((x), 28) ^ rotr64((x), 34) ^ rotr64((x), 39)) +#define sum1(x) (rotr64((x), 14) ^ rotr64((x), 18) ^ rotr64((x), 41)) +#define sigma0(x) (rotr64((x), 1) ^ rotr64((x), 8) ^ ((x) >> 7)) +#define sigma1(x) (rotr64((x), 19) ^ rotr64((x), 61) ^ ((x) >> 6)) + +#define WU(j) (W[j & 15] += sigma1(W[(j + 14) & 15]) + W[(j + 9) & 15] + sigma0(W[(j + 1) & 15])) + +#define COMPRESS_ROUND(i, j, K) \ + T1 = h + sum1(e) + Ch(e, f, g) + K[i + j] + (i? WU(j): W[j]); \ + T2 = sum0(a) + Maj(a, b, c); \ + h = g; \ + g = f; \ + f = e; \ + e = d + T1; \ + d = c; \ + c = b; \ + b = a; \ + a = T1 + T2; + +void StdTransform(sha512_ctx* ctx, void* mp, uint_64t num_blks) { -#if !defined(UNROLL_SHA2) - - uint_32t j, *p = ctx->wbuf, v[8]; - - memcpy(v, ctx->hash, 8 * sizeof(uint_32t)); - - for(j = 0; j < 64; j += 16) - { - v_cycle( 0, j); v_cycle( 1, j); - v_cycle( 2, j); v_cycle( 3, j); - v_cycle( 4, j); v_cycle( 5, j); - v_cycle( 6, j); v_cycle( 7, j); - v_cycle( 8, j); v_cycle( 9, j); - v_cycle(10, j); v_cycle(11, j); - v_cycle(12, j); v_cycle(13, j); - v_cycle(14, j); v_cycle(15, j); - } - - ctx->hash[0] += v[0]; ctx->hash[1] += v[1]; - ctx->hash[2] += v[2]; ctx->hash[3] += v[3]; - ctx->hash[4] += v[4]; ctx->hash[5] += v[5]; - ctx->hash[6] += v[6]; ctx->hash[7] += v[7]; + uint_64t blk; + for (blk = 0; blk < num_blks; blk++) + { + uint_64t W[16]; + uint_64t a,b,c,d,e,f,g,h; + uint_64t T1, T2; + int i; +#if defined (TC_WINDOWS_DRIVER) && defined (DEBUG) + int j; +#endif + for (i = 0; i < 128 / 8; i++) + { + W[i] = bswap_64((((const uint_64t*)(mp))[blk * 16 + i])); + } + + a = ctx->hash[0]; + b = ctx->hash[1]; + c = ctx->hash[2]; + d = ctx->hash[3]; + e = ctx->hash[4]; + f = ctx->hash[5]; + g = ctx->hash[6]; + h = ctx->hash[7]; + + for (i = 0; i <= 79; i+=16) + { +#if defined (TC_WINDOWS_DRIVER) && defined (DEBUG) + for (j = 0; j < 16; j++) + { + COMPRESS_ROUND(i, j, K); + } #else - - uint_32t *p = ctx->wbuf,v0,v1,v2,v3,v4,v5,v6,v7; - - v0 = ctx->hash[0]; v1 = ctx->hash[1]; - v2 = ctx->hash[2]; v3 = ctx->hash[3]; - v4 = ctx->hash[4]; v5 = ctx->hash[5]; - v6 = ctx->hash[6]; v7 = ctx->hash[7]; - - one_cycle(0,1,2,3,4,5,6,7,k256[ 0],p[ 0]); - one_cycle(7,0,1,2,3,4,5,6,k256[ 1],p[ 1]); - one_cycle(6,7,0,1,2,3,4,5,k256[ 2],p[ 2]); - one_cycle(5,6,7,0,1,2,3,4,k256[ 3],p[ 3]); - one_cycle(4,5,6,7,0,1,2,3,k256[ 4],p[ 4]); - one_cycle(3,4,5,6,7,0,1,2,k256[ 5],p[ 5]); - one_cycle(2,3,4,5,6,7,0,1,k256[ 6],p[ 6]); - one_cycle(1,2,3,4,5,6,7,0,k256[ 7],p[ 7]); - one_cycle(0,1,2,3,4,5,6,7,k256[ 8],p[ 8]); - one_cycle(7,0,1,2,3,4,5,6,k256[ 9],p[ 9]); - one_cycle(6,7,0,1,2,3,4,5,k256[10],p[10]); - one_cycle(5,6,7,0,1,2,3,4,k256[11],p[11]); - one_cycle(4,5,6,7,0,1,2,3,k256[12],p[12]); - one_cycle(3,4,5,6,7,0,1,2,k256[13],p[13]); - one_cycle(2,3,4,5,6,7,0,1,k256[14],p[14]); - one_cycle(1,2,3,4,5,6,7,0,k256[15],p[15]); - - one_cycle(0,1,2,3,4,5,6,7,k256[16],hf( 0)); - one_cycle(7,0,1,2,3,4,5,6,k256[17],hf( 1)); - one_cycle(6,7,0,1,2,3,4,5,k256[18],hf( 2)); - one_cycle(5,6,7,0,1,2,3,4,k256[19],hf( 3)); - one_cycle(4,5,6,7,0,1,2,3,k256[20],hf( 4)); - one_cycle(3,4,5,6,7,0,1,2,k256[21],hf( 5)); - one_cycle(2,3,4,5,6,7,0,1,k256[22],hf( 6)); - one_cycle(1,2,3,4,5,6,7,0,k256[23],hf( 7)); - one_cycle(0,1,2,3,4,5,6,7,k256[24],hf( 8)); - one_cycle(7,0,1,2,3,4,5,6,k256[25],hf( 9)); - one_cycle(6,7,0,1,2,3,4,5,k256[26],hf(10)); - one_cycle(5,6,7,0,1,2,3,4,k256[27],hf(11)); - one_cycle(4,5,6,7,0,1,2,3,k256[28],hf(12)); - one_cycle(3,4,5,6,7,0,1,2,k256[29],hf(13)); - one_cycle(2,3,4,5,6,7,0,1,k256[30],hf(14)); - one_cycle(1,2,3,4,5,6,7,0,k256[31],hf(15)); - - one_cycle(0,1,2,3,4,5,6,7,k256[32],hf( 0)); - one_cycle(7,0,1,2,3,4,5,6,k256[33],hf( 1)); - one_cycle(6,7,0,1,2,3,4,5,k256[34],hf( 2)); - one_cycle(5,6,7,0,1,2,3,4,k256[35],hf( 3)); - one_cycle(4,5,6,7,0,1,2,3,k256[36],hf( 4)); - one_cycle(3,4,5,6,7,0,1,2,k256[37],hf( 5)); - one_cycle(2,3,4,5,6,7,0,1,k256[38],hf( 6)); - one_cycle(1,2,3,4,5,6,7,0,k256[39],hf( 7)); - one_cycle(0,1,2,3,4,5,6,7,k256[40],hf( 8)); - one_cycle(7,0,1,2,3,4,5,6,k256[41],hf( 9)); - one_cycle(6,7,0,1,2,3,4,5,k256[42],hf(10)); - one_cycle(5,6,7,0,1,2,3,4,k256[43],hf(11)); - one_cycle(4,5,6,7,0,1,2,3,k256[44],hf(12)); - one_cycle(3,4,5,6,7,0,1,2,k256[45],hf(13)); - one_cycle(2,3,4,5,6,7,0,1,k256[46],hf(14)); - one_cycle(1,2,3,4,5,6,7,0,k256[47],hf(15)); - - one_cycle(0,1,2,3,4,5,6,7,k256[48],hf( 0)); - one_cycle(7,0,1,2,3,4,5,6,k256[49],hf( 1)); - one_cycle(6,7,0,1,2,3,4,5,k256[50],hf( 2)); - one_cycle(5,6,7,0,1,2,3,4,k256[51],hf( 3)); - one_cycle(4,5,6,7,0,1,2,3,k256[52],hf( 4)); - one_cycle(3,4,5,6,7,0,1,2,k256[53],hf( 5)); - one_cycle(2,3,4,5,6,7,0,1,k256[54],hf( 6)); - one_cycle(1,2,3,4,5,6,7,0,k256[55],hf( 7)); - one_cycle(0,1,2,3,4,5,6,7,k256[56],hf( 8)); - one_cycle(7,0,1,2,3,4,5,6,k256[57],hf( 9)); - one_cycle(6,7,0,1,2,3,4,5,k256[58],hf(10)); - one_cycle(5,6,7,0,1,2,3,4,k256[59],hf(11)); - one_cycle(4,5,6,7,0,1,2,3,k256[60],hf(12)); - one_cycle(3,4,5,6,7,0,1,2,k256[61],hf(13)); - one_cycle(2,3,4,5,6,7,0,1,k256[62],hf(14)); - one_cycle(1,2,3,4,5,6,7,0,k256[63],hf(15)); - - ctx->hash[0] += v0; ctx->hash[1] += v1; - ctx->hash[2] += v2; ctx->hash[3] += v3; - ctx->hash[4] += v4; ctx->hash[5] += v5; - ctx->hash[6] += v6; ctx->hash[7] += v7; + COMPRESS_ROUND(i, 0, K); + COMPRESS_ROUND(i, 1, K); + COMPRESS_ROUND(i , 2, K); + COMPRESS_ROUND(i, 3, K); + COMPRESS_ROUND(i, 4, K); + COMPRESS_ROUND(i, 5, K); + COMPRESS_ROUND(i, 6, K); + COMPRESS_ROUND(i, 7, K); + COMPRESS_ROUND(i, 8, K); + COMPRESS_ROUND(i, 9, K); + COMPRESS_ROUND(i, 10, K); + COMPRESS_ROUND(i, 11, K); + COMPRESS_ROUND(i, 12, K); + COMPRESS_ROUND(i, 13, K); + COMPRESS_ROUND(i, 14, K); + COMPRESS_ROUND(i, 15, K); #endif + } + ctx->hash[0] += a; + ctx->hash[1] += b; + ctx->hash[2] += c; + ctx->hash[3] += d; + ctx->hash[4] += e; + ctx->hash[5] += f; + ctx->hash[6] += g; + ctx->hash[7] += h; + } } -/* SHA256 hash data in an array of bytes into hash buffer */ -/* and call the hash_compile function as required. */ - -VOID_RETURN sha256_hash(const unsigned char data[], unsigned long len, sha256_ctx ctx[1]) -{ uint_32t pos = (uint_32t)(ctx->count[0] & SHA256_MASK), - space = SHA256_BLOCK_SIZE - pos; - const unsigned char *sp = data; - - if((ctx->count[0] += len) < len) - ++(ctx->count[1]); +#ifndef NO_OPTIMIZED_VERSIONS - while(len >= space) /* tranfer whole blocks while possible */ - { - memcpy(((unsigned char*)ctx->wbuf) + pos, sp, space); - sp += space; len -= space; space = SHA256_BLOCK_SIZE; pos = 0; - bsw_32(ctx->wbuf, SHA256_BLOCK_SIZE >> 2) - sha256_compile(ctx); - } - - memcpy(((unsigned char*)ctx->wbuf) + pos, sp, len); +#if CRYPTOPP_BOOL_X64 +void Avx2Transform(sha512_ctx* ctx, void* mp, uint_64t num_blks) +{ + if (num_blks > 1) + sha512_rorx(mp, ctx->hash, num_blks); + else + sha512_sse4(mp, ctx->hash, num_blks); } -/* SHA256 Final padding and digest calculation */ - -static void sha_end1(unsigned char hval[], sha256_ctx ctx[1], const unsigned int hlen) -{ uint_32t i = (uint_32t)(ctx->count[0] & SHA256_MASK); - - /* put bytes in the buffer in an order in which references to */ - /* 32-bit words will put bytes with lower addresses into the */ - /* top of 32 bit words on BOTH big and little endian machines */ - bsw_32(ctx->wbuf, (i + 3) >> 2) - - /* we now need to mask valid bytes and add the padding which is */ - /* a single 1 bit and as many zero bits as necessary. Note that */ - /* we can always add the first padding byte here because the */ - /* buffer always has at least one empty slot */ - ctx->wbuf[i >> 2] &= 0xffffff80 << 8 * (~i & 3); - ctx->wbuf[i >> 2] |= 0x00000080 << 8 * (~i & 3); - - /* we need 9 or more empty positions, one for the padding byte */ - /* (above) and eight for the length count. If there is not */ - /* enough space pad and empty the buffer */ - if(i > SHA256_BLOCK_SIZE - 9) - { - if(i < 60) ctx->wbuf[15] = 0; - sha256_compile(ctx); - i = 0; - } - else /* compute a word index for the empty buffer positions */ - i = (i >> 2) + 1; - - while(i < 14) /* and zero pad all but last two positions */ - ctx->wbuf[i++] = 0; - - /* the following 32-bit length fields are assembled in the */ - /* wrong byte order on little endian machines but this is */ - /* corrected later since they are only ever used as 32-bit */ - /* word values. */ - ctx->wbuf[14] = (ctx->count[1] << 3) | (ctx->count[0] >> 29); - ctx->wbuf[15] = ctx->count[0] << 3; - sha256_compile(ctx); - - /* extract the hash value as bytes in case the hash buffer is */ - /* mislaigned for 32-bit words */ - for(i = 0; i < hlen; ++i) - hval[i] = (unsigned char)(ctx->hash[i >> 2] >> (8 * (~i & 3))); +void AvxTransform(sha512_ctx* ctx, void* mp, uint_64t num_blks) +{ + if (num_blks > 1) + sha512_avx(mp, ctx->hash, num_blks); + else + sha512_sse4(mp, ctx->hash, num_blks); } +void SSE4Transform(sha512_ctx* ctx, void* mp, uint_64t num_blks) +{ + sha512_sse4(mp, ctx->hash, num_blks); +} #endif -#if defined(SHA_224) +#if CRYPTOPP_BOOL_X64 || ((CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32) && !defined (TC_MACOSX)) -const uint_32t i224[8] = +void SSE2Transform(sha512_ctx* ctx, void* mp, uint_64t num_blks) { - 0xc1059ed8ul, 0x367cd507ul, 0x3070dd17ul, 0xf70e5939ul, - 0xffc00b31ul, 0x68581511ul, 0x64f98fa7ul, 0xbefa4fa4ul -}; - -VOID_RETURN sha224_begin(sha224_ctx ctx[1]) -{ - ctx->count[0] = ctx->count[1] = 0; - memcpy(ctx->hash, i224, 8 * sizeof(uint_32t)); + uint_64t i; + for (i = 0; i < num_blks; i++) + sha512_compress_nayuki(ctx->hash, (uint_8t*)mp + i * 128); } -VOID_RETURN sha224_end(unsigned char hval[], sha224_ctx ctx[1]) -{ - sha_end1(hval, ctx, SHA224_DIGEST_SIZE); -} +#endif -VOID_RETURN sha224(unsigned char hval[], const unsigned char data[], unsigned long len) -{ sha224_ctx cx[1]; +#endif // NO_OPTIMIZED_VERSIONS - sha224_begin(cx); - sha224_hash(data, len, cx); - sha_end1(hval, cx, SHA224_DIGEST_SIZE); -} +void sha512_begin(sha512_ctx* ctx) +{ + ctx->hash[0] = LL(0x6a09e667f3bcc908); + ctx->hash[1] = LL(0xbb67ae8584caa73b); + ctx->hash[2] = LL(0x3c6ef372fe94f82b); + ctx->hash[3] = LL(0xa54ff53a5f1d36f1); + ctx->hash[4] = LL(0x510e527fade682d1); + ctx->hash[5] = LL(0x9b05688c2b3e6c1f); + ctx->hash[6] = LL(0x1f83d9abfb41bd6b); + ctx->hash[7] = LL(0x5be0cd19137e2179); + ctx->count[0] = 0; + ctx->count[1] = 0; + + if (!transfunc) + { +#ifndef NO_OPTIMIZED_VERSIONS +#if CRYPTOPP_BOOL_X64 + if (g_isIntel&& HasSAVX2() && HasSBMI2()) + transfunc = Avx2Transform; + else if (g_isIntel && HasSAVX()) + { + transfunc = AvxTransform; + } + else if (HasSSE41()) + { + transfunc = SSE4Transform; + } + else +#endif +#if CRYPTOPP_BOOL_X64 || ((CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32) && !defined (TC_MACOSX)) +#if CRYPTOPP_BOOL_X64 + if (HasSSE2()) +#else + if (HasSSSE3() && HasMMX()) +#endif + transfunc = SSE2Transform; + else #endif -#if defined(SHA_256) +#endif + transfunc = StdTransform; + } +} -const uint_32t i256[8] = +void sha512_end(unsigned char * result, sha512_ctx* ctx) { - 0x6a09e667ul, 0xbb67ae85ul, 0x3c6ef372ul, 0xa54ff53aul, - 0x510e527ful, 0x9b05688cul, 0x1f83d9abul, 0x5be0cd19ul -}; + int i; + uint_64t mlen, pos = ctx->count[0]; + uint_8t* m = (uint_8t*) ctx->wbuf; + m[pos++] = 0x80; + if (pos > 112) + { + memset(m + pos, 0, (size_t) (128 - pos)); + transfunc(ctx, m, 1); + pos = 0; + } + memset(m + pos, 0, (size_t) (128 - pos)); + mlen = bswap_64(ctx->count[1]); + memcpy(m + (128 - 8), &mlen, 64 / 8); + transfunc(ctx, m, 1); + for (i = 0; i < 8; i++) + { + ctx->hash[i] = bswap_64(ctx->hash[i]); + } + memcpy(result, ctx->hash, 64); +} -VOID_RETURN sha256_begin(sha256_ctx ctx[1]) +void sha512_hash(const unsigned char * data, uint_64t len, sha512_ctx *ctx) { - ctx->count[0] = ctx->count[1] = 0; - memcpy(ctx->hash, i256, 8 * sizeof(uint_32t)); + uint_64t pos = ctx->count[0]; + uint_64t total = ctx->count[1]; + uint_8t* m = (uint_8t*) ctx->wbuf; + if (pos && pos + len >= 128) + { + memcpy(m + pos, data, (size_t) (128 - pos)); + transfunc(ctx, m, 1); + len -= 128 - pos; + total += (128 - pos) * 8; + data += 128 - pos; + pos = 0; + } + if (len >= 128) + { + uint_64t blocks = len / 128; + uint_64t bytes = blocks * 128; + transfunc(ctx, (void*)data, blocks); + len -= bytes; + total += (bytes)* 8; + data += bytes; + } + memcpy(m+pos, data, (size_t) (len)); + pos += len; + total += len * 8; + ctx->count[0] = pos; + ctx->count[1] = total; } -VOID_RETURN sha256_end(unsigned char hval[], sha256_ctx ctx[1]) +void sha512(unsigned char * result, const unsigned char* source, uint_64t sourceLen) { - sha_end1(hval, ctx, SHA256_DIGEST_SIZE); + sha512_ctx ctx; + + sha512_begin(&ctx); + sha512_hash(source, sourceLen, &ctx); + sha512_end(result, &ctx); } -VOID_RETURN sha256(unsigned char hval[], const unsigned char data[], unsigned long len) -{ sha256_ctx cx[1]; +///////////////////////////// - sha256_begin(cx); - sha256_hash(data, len, cx); - sha_end1(hval, cx, SHA256_DIGEST_SIZE); -} +#ifndef NO_OPTIMIZED_VERSIONS +#if defined(__cplusplus) +extern "C" +{ #endif -#if defined(SHA_384) || defined(SHA_512) +#if CRYPTOPP_BOOL_X64 + void sha256_sse4(void *input_data, uint_32t digest[8], uint_64t num_blks); + void sha256_rorx(void *input_data, uint_32t digest[8], uint_64t num_blks); + void sha256_avx(void *input_data, uint_32t digest[8], uint_64t num_blks); +#if CRYPTOPP_SHANI_AVAILABLE + void sha256_intel(void *input_data, uint_32t digest[8], uint_64t num_blks); +#endif +#endif -#define SHA512_MASK (SHA512_BLOCK_SIZE - 1) +#if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 + void VC_CDECL sha256_compress_nayuki(uint_32t state[8], const uint_8t block[64]); +#endif -#if defined(SWAP_BYTES) -#define bsw_64(p,n) \ - { int _i = (n); while(_i--) ((uint_64t*)p)[_i] = bswap_64(((uint_64t*)p)[_i]); } -#else -#define bsw_64(p,n) +#if defined(__cplusplus) +} #endif -/* SHA512 mixing function definitions */ +#endif -#ifdef s_0 -# undef s_0 -# undef s_1 -# undef g_0 -# undef g_1 -# undef k_0 +CRYPTOPP_ALIGN_DATA(16) static const uint_32t SHA256_K[64] CRYPTOPP_SECTION_ALIGN16 = { + 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, + 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, + 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, + 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, + 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, + 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, + 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, + 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2 + }; + +#if (defined(CRYPTOPP_X86_ASM_AVAILABLE) || defined(CRYPTOPP_X32_ASM_AVAILABLE)) + +#ifdef _MSC_VER +# pragma warning(disable: 4100 4731) #endif -#define s_0(x) (rotr64((x), 28) ^ rotr64((x), 34) ^ rotr64((x), 39)) -#define s_1(x) (rotr64((x), 14) ^ rotr64((x), 18) ^ rotr64((x), 41)) -#define g_0(x) (rotr64((x), 1) ^ rotr64((x), 8) ^ ((x) >> 7)) -#define g_1(x) (rotr64((x), 19) ^ rotr64((x), 61) ^ ((x) >> 6)) -#define k_0 k512 +static void CRYPTOPP_FASTCALL X86_SHA256_HashBlocks(uint_32t *state, const uint_32t *data, size_t len) +{ + #define LOCALS_SIZE 8*4 + 16*4 + 4*WORD_SZ + #define H(i) [BASE+ASM_MOD(1024+7-(i),8)*4] + #define G(i) H(i+1) + #define F(i) H(i+2) + #define E(i) H(i+3) + #define D(i) H(i+4) + #define C(i) H(i+5) + #define B(i) H(i+6) + #define A(i) H(i+7) + #define Wt(i) BASE+8*4+ASM_MOD(1024+15-(i),16)*4 + #define Wt_2(i) Wt((i)-2) + #define Wt_15(i) Wt((i)-15) + #define Wt_7(i) Wt((i)-7) + #define K_END [BASE+8*4+16*4+0*WORD_SZ] + #define STATE_SAVE [BASE+8*4+16*4+1*WORD_SZ] + #define DATA_SAVE [BASE+8*4+16*4+2*WORD_SZ] + #define DATA_END [BASE+8*4+16*4+3*WORD_SZ] + #define Kt(i) WORD_REG(si)+(i)*4 +#if CRYPTOPP_BOOL_X32 + #define BASE esp+8 +#elif CRYPTOPP_BOOL_X86 + #define BASE esp+4 +#elif defined(__GNUC__) + #define BASE r8 +#else + #define BASE rsp +#endif -/* SHA384/SHA512 mixing data */ +#define RA0(i, edx, edi) \ + AS2( add edx, [Kt(i)] )\ + AS2( add edx, [Wt(i)] )\ + AS2( add edx, H(i) )\ + +#define RA1(i, edx, edi) + +#define RB0(i, edx, edi) + +#define RB1(i, edx, edi) \ + AS2( mov AS_REG_7d, [Wt_2(i)] )\ + AS2( mov edi, [Wt_15(i)])\ + AS2( mov ebx, AS_REG_7d )\ + AS2( shr AS_REG_7d, 10 )\ + AS2( ror ebx, 17 )\ + AS2( xor AS_REG_7d, ebx )\ + AS2( ror ebx, 2 )\ + AS2( xor ebx, AS_REG_7d )/* s1(W_t-2) */\ + AS2( add ebx, [Wt_7(i)])\ + AS2( mov AS_REG_7d, edi )\ + AS2( shr AS_REG_7d, 3 )\ + AS2( ror edi, 7 )\ + AS2( add ebx, [Wt(i)])/* s1(W_t-2) + W_t-7 + W_t-16 */\ + AS2( xor AS_REG_7d, edi )\ + AS2( add edx, [Kt(i)])\ + AS2( ror edi, 11 )\ + AS2( add edx, H(i) )\ + AS2( xor AS_REG_7d, edi )/* s0(W_t-15) */\ + AS2( add AS_REG_7d, ebx )/* W_t = s1(W_t-2) + W_t-7 + s0(W_t-15) W_t-16*/\ + AS2( mov [Wt(i)], AS_REG_7d)\ + AS2( add edx, AS_REG_7d )\ + +#define ROUND(i, r, eax, ecx, edi, edx)\ + /* in: edi = E */\ + /* unused: eax, ecx, temp: ebx, AS_REG_7d, out: edx = T1 */\ + AS2( mov edx, F(i) )\ + AS2( xor edx, G(i) )\ + AS2( and edx, edi )\ + AS2( xor edx, G(i) )/* Ch(E,F,G) = (G^(E&(F^G))) */\ + AS2( mov AS_REG_7d, edi )\ + AS2( ror edi, 6 )\ + AS2( ror AS_REG_7d, 25 )\ + RA##r(i, edx, edi )/* H + Wt + Kt + Ch(E,F,G) */\ + AS2( xor AS_REG_7d, edi )\ + AS2( ror edi, 5 )\ + AS2( xor AS_REG_7d, edi )/* S1(E) */\ + AS2( add edx, AS_REG_7d )/* T1 = S1(E) + Ch(E,F,G) + H + Wt + Kt */\ + RB##r(i, edx, edi )/* H + Wt + Kt + Ch(E,F,G) */\ + /* in: ecx = A, eax = B^C, edx = T1 */\ + /* unused: edx, temp: ebx, AS_REG_7d, out: eax = A, ecx = B^C, edx = E */\ + AS2( mov ebx, ecx )\ + AS2( xor ecx, B(i) )/* A^B */\ + AS2( and eax, ecx )\ + AS2( xor eax, B(i) )/* Maj(A,B,C) = B^((A^B)&(B^C) */\ + AS2( mov AS_REG_7d, ebx )\ + AS2( ror ebx, 2 )\ + AS2( add eax, edx )/* T1 + Maj(A,B,C) */\ + AS2( add edx, D(i) )\ + AS2( mov D(i), edx )\ + AS2( ror AS_REG_7d, 22 )\ + AS2( xor AS_REG_7d, ebx )\ + AS2( ror ebx, 11 )\ + AS2( xor AS_REG_7d, ebx )\ + AS2( add eax, AS_REG_7d )/* T1 + S0(A) + Maj(A,B,C) */\ + AS2( mov H(i), eax )\ + +// Unroll the use of CRYPTOPP_BOOL_X64 in assembler math. The GAS assembler on X32 (version 2.25) +// complains "Error: invalid operands (*ABS* and *UND* sections) for `*` and `-`" +#if CRYPTOPP_BOOL_X64 +#define SWAP_COPY(i) \ + AS2( mov WORD_REG(bx), [WORD_REG(dx)+i*WORD_SZ])\ + AS1( bswap WORD_REG(bx))\ + AS2( mov [Wt(i*2+1)], WORD_REG(bx)) +#else // X86 and X32 +#define SWAP_COPY(i) \ + AS2( mov WORD_REG(bx), [WORD_REG(dx)+i*WORD_SZ])\ + AS1( bswap WORD_REG(bx))\ + AS2( mov [Wt(i)], WORD_REG(bx)) +#endif -const uint_64t k512[80] = -{ - li_64(428a2f98d728ae22), li_64(7137449123ef65cd), - li_64(b5c0fbcfec4d3b2f), li_64(e9b5dba58189dbbc), - li_64(3956c25bf348b538), li_64(59f111f1b605d019), - li_64(923f82a4af194f9b), li_64(ab1c5ed5da6d8118), - li_64(d807aa98a3030242), li_64(12835b0145706fbe), - li_64(243185be4ee4b28c), li_64(550c7dc3d5ffb4e2), - li_64(72be5d74f27b896f), li_64(80deb1fe3b1696b1), - li_64(9bdc06a725c71235), li_64(c19bf174cf692694), - li_64(e49b69c19ef14ad2), li_64(efbe4786384f25e3), - li_64(0fc19dc68b8cd5b5), li_64(240ca1cc77ac9c65), - li_64(2de92c6f592b0275), li_64(4a7484aa6ea6e483), - li_64(5cb0a9dcbd41fbd4), li_64(76f988da831153b5), - li_64(983e5152ee66dfab), li_64(a831c66d2db43210), - li_64(b00327c898fb213f), li_64(bf597fc7beef0ee4), - li_64(c6e00bf33da88fc2), li_64(d5a79147930aa725), - li_64(06ca6351e003826f), li_64(142929670a0e6e70), - li_64(27b70a8546d22ffc), li_64(2e1b21385c26c926), - li_64(4d2c6dfc5ac42aed), li_64(53380d139d95b3df), - li_64(650a73548baf63de), li_64(766a0abb3c77b2a8), - li_64(81c2c92e47edaee6), li_64(92722c851482353b), - li_64(a2bfe8a14cf10364), li_64(a81a664bbc423001), - li_64(c24b8b70d0f89791), li_64(c76c51a30654be30), - li_64(d192e819d6ef5218), li_64(d69906245565a910), - li_64(f40e35855771202a), li_64(106aa07032bbd1b8), - li_64(19a4c116b8d2d0c8), li_64(1e376c085141ab53), - li_64(2748774cdf8eeb99), li_64(34b0bcb5e19b48a8), - li_64(391c0cb3c5c95a63), li_64(4ed8aa4ae3418acb), - li_64(5b9cca4f7763e373), li_64(682e6ff3d6b2b8a3), - li_64(748f82ee5defb2fc), li_64(78a5636f43172f60), - li_64(84c87814a1f0ab72), li_64(8cc702081a6439ec), - li_64(90befffa23631e28), li_64(a4506cebde82bde9), - li_64(bef9a3f7b2c67915), li_64(c67178f2e372532b), - li_64(ca273eceea26619c), li_64(d186b8c721c0c207), - li_64(eada7dd6cde0eb1e), li_64(f57d4f7fee6ed178), - li_64(06f067aa72176fba), li_64(0a637dc5a2c898a6), - li_64(113f9804bef90dae), li_64(1b710b35131c471b), - li_64(28db77f523047d84), li_64(32caab7b40c72493), - li_64(3c9ebe0a15c9bebc), li_64(431d67c49c100d4c), - li_64(4cc5d4becb3e42b6), li_64(597f299cfc657e2a), - li_64(5fcb6fab3ad6faec), li_64(6c44198c4a475817) -}; +#if defined(__GNUC__) + #if CRYPTOPP_BOOL_X64 + CRYPTOPP_ALIGN_DATA(16) uint8 workspace[LOCALS_SIZE] ; + #endif + __asm__ __volatile__ + ( + #if CRYPTOPP_BOOL_X64 + "lea %4, %%r8;" + #endif + INTEL_NOPREFIX +#endif -/* Compile 128 bytes of hash data into SHA384/512 digest */ -/* NOTE: this routine assumes that the byte order in the */ -/* ctx->wbuf[] at this point is such that low address bytes */ -/* in the ORIGINAL byte stream will go into the high end of */ -/* words on BOTH big and little endian systems */ - -VOID_RETURN sha512_compile(sha512_ctx ctx[1]) -{ uint_64t v[8], *p = ctx->wbuf; - uint_32t j; - - memcpy(v, ctx->hash, 8 * sizeof(uint_64t)); - - for(j = 0; j < 80; j += 16) - { - v_cycle( 0, j); v_cycle( 1, j); - v_cycle( 2, j); v_cycle( 3, j); - v_cycle( 4, j); v_cycle( 5, j); - v_cycle( 6, j); v_cycle( 7, j); - v_cycle( 8, j); v_cycle( 9, j); - v_cycle(10, j); v_cycle(11, j); - v_cycle(12, j); v_cycle(13, j); - v_cycle(14, j); v_cycle(15, j); - } - - ctx->hash[0] += v[0]; ctx->hash[1] += v[1]; - ctx->hash[2] += v[2]; ctx->hash[3] += v[3]; - ctx->hash[4] += v[4]; ctx->hash[5] += v[5]; - ctx->hash[6] += v[6]; ctx->hash[7] += v[7]; -} +#if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 + #ifndef __GNUC__ + AS2( mov edi, [len]) + AS2( lea WORD_REG(si), [SHA256_K+48*4]) + #endif + #if !defined(_MSC_VER) || (_MSC_VER < 1400) + AS_PUSH_IF86(bx) + #endif + + AS_PUSH_IF86(bp) + AS2( mov ebx, esp) + AS2( and esp, -16) + AS2( sub WORD_REG(sp), LOCALS_SIZE) + AS_PUSH_IF86(bx) +#endif + AS2( mov STATE_SAVE, WORD_REG(cx)) + AS2( mov DATA_SAVE, WORD_REG(dx)) + AS2( lea WORD_REG(ax), [WORD_REG(di) + WORD_REG(dx)]) + AS2( mov DATA_END, WORD_REG(ax)) + AS2( mov K_END, WORD_REG(si)) + +#if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE +#if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 + AS2( test edi, 1) + ASJ( jnz, 2, f) + AS1( dec DWORD PTR K_END) +#endif + AS2( movdqu xmm0, XMMWORD_PTR [WORD_REG(cx)+0*16]) + AS2( movdqu xmm1, XMMWORD_PTR [WORD_REG(cx)+1*16]) +#endif -/* Compile 128 bytes of hash data into SHA256 digest value */ -/* NOTE: this routine assumes that the byte order in the */ -/* ctx->wbuf[] at this point is in such an order that low */ -/* address bytes in the ORIGINAL byte stream placed in this */ -/* buffer will now go to the high end of words on BOTH big */ -/* and little endian systems */ - -VOID_RETURN sha512_hash(const unsigned char data[], unsigned long len, sha512_ctx ctx[1]) -{ uint_32t pos = (uint_32t)(ctx->count[0] & SHA512_MASK), - space = SHA512_BLOCK_SIZE - pos; - const unsigned char *sp = data; - - if((ctx->count[0] += len) < len) - ++(ctx->count[1]); - - while(len >= space) /* tranfer whole blocks while possible */ - { - memcpy(((unsigned char*)ctx->wbuf) + pos, sp, space); - sp += space; len -= space; space = SHA512_BLOCK_SIZE; pos = 0; - bsw_64(ctx->wbuf, SHA512_BLOCK_SIZE >> 3); - sha512_compile(ctx); - } - - memcpy(((unsigned char*)ctx->wbuf) + pos, sp, len); -} +#if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 +#if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE + ASJ( jmp, 0, f) +#endif + ASL(2) // non-SSE2 + AS2( mov esi, ecx) + AS2( lea edi, A(0)) + AS2( mov ecx, 8) +ATT_NOPREFIX + AS1( rep movsd) +INTEL_NOPREFIX + AS2( mov esi, K_END) + ASJ( jmp, 3, f) +#endif -/* SHA384/512 Final padding and digest calculation */ - -static void sha_end2(unsigned char hval[], sha512_ctx ctx[1], const unsigned int hlen) -{ uint_32t i = (uint_32t)(ctx->count[0] & SHA512_MASK); - - /* put bytes in the buffer in an order in which references to */ - /* 32-bit words will put bytes with lower addresses into the */ - /* top of 32 bit words on BOTH big and little endian machines */ - bsw_64(ctx->wbuf, (i + 7) >> 3); - - /* we now need to mask valid bytes and add the padding which is */ - /* a single 1 bit and as many zero bits as necessary. Note that */ - /* we can always add the first padding byte here because the */ - /* buffer always has at least one empty slot */ - ctx->wbuf[i >> 3] &= li_64(ffffffffffffff00) << 8 * (~i & 7); - ctx->wbuf[i >> 3] |= li_64(0000000000000080) << 8 * (~i & 7); - - /* we need 17 or more empty byte positions, one for the padding */ - /* byte (above) and sixteen for the length count. If there is */ - /* not enough space pad and empty the buffer */ - if(i > SHA512_BLOCK_SIZE - 17) - { - if(i < 120) ctx->wbuf[15] = 0; - sha512_compile(ctx); - i = 0; - } - else - i = (i >> 3) + 1; - - while(i < 14) - ctx->wbuf[i++] = 0; - - /* the following 64-bit length fields are assembled in the */ - /* wrong byte order on little endian machines but this is */ - /* corrected later since they are only ever used as 64-bit */ - /* word values. */ - ctx->wbuf[14] = (ctx->count[1] << 3) | (ctx->count[0] >> 61); - ctx->wbuf[15] = ctx->count[0] << 3; - sha512_compile(ctx); - - /* extract the hash value as bytes in case the hash buffer is */ - /* misaligned for 32-bit words */ - for(i = 0; i < hlen; ++i) - hval[i] = (unsigned char)(ctx->hash[i >> 3] >> (8 * (~i & 7))); -} +#if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE + ASL(0) + AS2( movdqu E(0), xmm1) + AS2( movdqu A(0), xmm0) +#endif +#if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 + ASL(3) +#endif + AS2( sub WORD_REG(si), 48*4) + SWAP_COPY(0) SWAP_COPY(1) SWAP_COPY(2) SWAP_COPY(3) + SWAP_COPY(4) SWAP_COPY(5) SWAP_COPY(6) SWAP_COPY(7) +#if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 + SWAP_COPY(8) SWAP_COPY(9) SWAP_COPY(10) SWAP_COPY(11) + SWAP_COPY(12) SWAP_COPY(13) SWAP_COPY(14) SWAP_COPY(15) +#endif + AS2( mov edi, E(0)) // E + AS2( mov eax, B(0)) // B + AS2( xor eax, C(0)) // B^C + AS2( mov ecx, A(0)) // A + + ROUND(0, 0, eax, ecx, edi, edx) + ROUND(1, 0, ecx, eax, edx, edi) + ROUND(2, 0, eax, ecx, edi, edx) + ROUND(3, 0, ecx, eax, edx, edi) + ROUND(4, 0, eax, ecx, edi, edx) + ROUND(5, 0, ecx, eax, edx, edi) + ROUND(6, 0, eax, ecx, edi, edx) + ROUND(7, 0, ecx, eax, edx, edi) + ROUND(8, 0, eax, ecx, edi, edx) + ROUND(9, 0, ecx, eax, edx, edi) + ROUND(10, 0, eax, ecx, edi, edx) + ROUND(11, 0, ecx, eax, edx, edi) + ROUND(12, 0, eax, ecx, edi, edx) + ROUND(13, 0, ecx, eax, edx, edi) + ROUND(14, 0, eax, ecx, edi, edx) + ROUND(15, 0, ecx, eax, edx, edi) + + ASL(1) + AS2(add WORD_REG(si), 4*16) + ROUND(0, 1, eax, ecx, edi, edx) + ROUND(1, 1, ecx, eax, edx, edi) + ROUND(2, 1, eax, ecx, edi, edx) + ROUND(3, 1, ecx, eax, edx, edi) + ROUND(4, 1, eax, ecx, edi, edx) + ROUND(5, 1, ecx, eax, edx, edi) + ROUND(6, 1, eax, ecx, edi, edx) + ROUND(7, 1, ecx, eax, edx, edi) + ROUND(8, 1, eax, ecx, edi, edx) + ROUND(9, 1, ecx, eax, edx, edi) + ROUND(10, 1, eax, ecx, edi, edx) + ROUND(11, 1, ecx, eax, edx, edi) + ROUND(12, 1, eax, ecx, edi, edx) + ROUND(13, 1, ecx, eax, edx, edi) + ROUND(14, 1, eax, ecx, edi, edx) + ROUND(15, 1, ecx, eax, edx, edi) + AS2( cmp WORD_REG(si), K_END) + ATT_NOPREFIX + ASJ( jb, 1, b) + INTEL_NOPREFIX + + AS2( mov WORD_REG(dx), DATA_SAVE) + AS2( add WORD_REG(dx), 64) + AS2( mov AS_REG_7, STATE_SAVE) + AS2( mov DATA_SAVE, WORD_REG(dx)) + +#if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE +#if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 + AS2( test DWORD PTR K_END, 1) + ASJ( jz, 4, f) +#endif + AS2( movdqu xmm1, XMMWORD_PTR [AS_REG_7+1*16]) + AS2( movdqu xmm0, XMMWORD_PTR [AS_REG_7+0*16]) + AS2( paddd xmm1, E(0)) + AS2( paddd xmm0, A(0)) + AS2( movdqu [AS_REG_7+1*16], xmm1) + AS2( movdqu [AS_REG_7+0*16], xmm0) + AS2( cmp WORD_REG(dx), DATA_END) + ATT_NOPREFIX + ASJ( jb, 0, b) + INTEL_NOPREFIX +#endif +#if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 +#if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE + ASJ( jmp, 5, f) + ASL(4) // non-SSE2 +#endif + AS2( add [AS_REG_7+0*4], ecx) // A + AS2( add [AS_REG_7+4*4], edi) // E + AS2( mov eax, B(0)) + AS2( mov ebx, C(0)) + AS2( mov ecx, D(0)) + AS2( add [AS_REG_7+1*4], eax) + AS2( add [AS_REG_7+2*4], ebx) + AS2( add [AS_REG_7+3*4], ecx) + AS2( mov eax, F(0)) + AS2( mov ebx, G(0)) + AS2( mov ecx, H(0)) + AS2( add [AS_REG_7+5*4], eax) + AS2( add [AS_REG_7+6*4], ebx) + AS2( add [AS_REG_7+7*4], ecx) + AS2( mov ecx, AS_REG_7d) + AS2( cmp WORD_REG(dx), DATA_END) + ASJ( jb, 2, b) +#if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE + ASL(5) +#endif #endif -#if defined(SHA_384) + AS_POP_IF86(sp) + AS_POP_IF86(bp) + #if !defined(_MSC_VER) || (_MSC_VER < 1400) + AS_POP_IF86(bx) + #endif + +#ifdef __GNUC__ + ATT_PREFIX + : + : "c" (state), "d" (data), "S" (SHA256_K+48), "D" (len) + #if CRYPTOPP_BOOL_X64 + , "m" (workspace[0]) + #endif + : "memory", "cc", "%eax" + #if CRYPTOPP_BOOL_X64 + , "%rbx", "%r8", "%r10" + #endif + ); +#endif +} -/* SHA384 initialisation data */ +#endif // (defined(CRYPTOPP_X86_ASM_AVAILABLE)) -const uint_64t i384[80] = -{ - li_64(cbbb9d5dc1059ed8), li_64(629a292a367cd507), - li_64(9159015a3070dd17), li_64(152fecd8f70e5939), - li_64(67332667ffc00b31), li_64(8eb44a8768581511), - li_64(db0c2e0d64f98fa7), li_64(47b5481dbefa4fa4) -}; +#undef sum0 +#undef sum1 +#undef sigma0 +#undef sigma1 -VOID_RETURN sha384_begin(sha384_ctx ctx[1]) -{ - ctx->count[0] = ctx->count[1] = 0; - memcpy(ctx->hash, i384, 8 * sizeof(uint_64t)); -} +#define sum0(x) (rotr32((x), 2) ^ rotr32((x), 13) ^ rotr32((x), 22)) +#define sum1(x) (rotr32((x), 6) ^ rotr32((x), 11) ^ rotr32((x), 25)) +#define sigma0(x) (rotr32((x), 7) ^ rotr32((x), 18) ^ ((x) >> 3)) +#define sigma1(x) (rotr32((x), 17) ^ rotr32((x), 19) ^ ((x) >> 10)) -VOID_RETURN sha384_end(unsigned char hval[], sha384_ctx ctx[1]) -{ - sha_end2(hval, ctx, SHA384_DIGEST_SIZE); -} -VOID_RETURN sha384(unsigned char hval[], const unsigned char data[], unsigned long len) -{ sha384_ctx cx[1]; +typedef void (*sha256transformFn)(sha256_ctx* ctx, void* m, uint_64t num_blks); - sha384_begin(cx); - sha384_hash(data, len, cx); - sha_end2(hval, cx, SHA384_DIGEST_SIZE); -} +sha256transformFn sha256transfunc = NULL; +void StdSha256Transform(sha256_ctx* ctx, void* mp, uint_64t num_blks) +{ + uint_64t blk; + for (blk = 0; blk < num_blks; blk++) + { + uint_32t W[16]; + uint_32t a,b,c,d,e,f,g,h; + uint_32t T1, T2; + int i; +#if defined (TC_WINDOWS_DRIVER) && defined (DEBUG) + int j; #endif -#if defined(SHA_512) + for (i = 0; i < 64 / 4; i++) + { + W[i] = bswap_32((((const uint_32t*)(mp))[blk * 16 + i])); + } + + a = ctx->hash[0]; + b = ctx->hash[1]; + c = ctx->hash[2]; + d = ctx->hash[3]; + e = ctx->hash[4]; + f = ctx->hash[5]; + g = ctx->hash[6]; + h = ctx->hash[7]; + + for (i = 0; i <= 63; i+=16) + { +#if defined (TC_WINDOWS_DRIVER) && defined (DEBUG) + for (j = 0; j < 16; j++) + { + COMPRESS_ROUND(i, j, SHA256_K); + } +#else + COMPRESS_ROUND(i, 0, SHA256_K); + COMPRESS_ROUND(i, 1, SHA256_K); + COMPRESS_ROUND(i , 2, SHA256_K); + COMPRESS_ROUND(i, 3, SHA256_K); + COMPRESS_ROUND(i, 4, SHA256_K); + COMPRESS_ROUND(i, 5, SHA256_K); + COMPRESS_ROUND(i, 6, SHA256_K); + COMPRESS_ROUND(i, 7, SHA256_K); + COMPRESS_ROUND(i, 8, SHA256_K); + COMPRESS_ROUND(i, 9, SHA256_K); + COMPRESS_ROUND(i, 10, SHA256_K); + COMPRESS_ROUND(i, 11, SHA256_K); + COMPRESS_ROUND(i, 12, SHA256_K); + COMPRESS_ROUND(i, 13, SHA256_K); + COMPRESS_ROUND(i, 14, SHA256_K); + COMPRESS_ROUND(i, 15, SHA256_K); +#endif + } + ctx->hash[0] += a; + ctx->hash[1] += b; + ctx->hash[2] += c; + ctx->hash[3] += d; + ctx->hash[4] += e; + ctx->hash[5] += f; + ctx->hash[6] += g; + ctx->hash[7] += h; + } +} -/* SHA512 initialisation data */ +#ifndef NO_OPTIMIZED_VERSIONS -const uint_64t i512[80] = +#if CRYPTOPP_BOOL_X64 +#if CRYPTOPP_SHANI_AVAILABLE +void IntelSha256Transform(sha256_ctx* ctx, void* mp, uint_64t num_blks) { - li_64(6a09e667f3bcc908), li_64(bb67ae8584caa73b), - li_64(3c6ef372fe94f82b), li_64(a54ff53a5f1d36f1), - li_64(510e527fade682d1), li_64(9b05688c2b3e6c1f), - li_64(1f83d9abfb41bd6b), li_64(5be0cd19137e2179) -}; + sha256_intel(mp, ctx->hash, num_blks); +} +#endif -VOID_RETURN sha512_begin(sha512_ctx ctx[1]) +void Avx2Sha256Transform(sha256_ctx* ctx, void* mp, uint_64t num_blks) { - ctx->count[0] = ctx->count[1] = 0; - memcpy(ctx->hash, i512, 8 * sizeof(uint_64t)); + if (num_blks > 1) + sha256_rorx(mp, ctx->hash, num_blks); + else + sha256_sse4(mp, ctx->hash, num_blks); } -VOID_RETURN sha512_end(unsigned char hval[], sha512_ctx ctx[1]) +void AvxSha256Transform(sha256_ctx* ctx, void* mp, uint_64t num_blks) { - sha_end2(hval, ctx, SHA512_DIGEST_SIZE); + if (num_blks > 1) + sha256_avx(mp, ctx->hash, num_blks); + else + sha256_sse4(mp, ctx->hash, num_blks); } -VOID_RETURN sha512(unsigned char hval[], const unsigned char data[], unsigned long len) -{ sha512_ctx cx[1]; - - sha512_begin(cx); - sha512_hash(data, len, cx); - sha_end2(hval, cx, SHA512_DIGEST_SIZE); +void SSE4Sha256Transform(sha256_ctx* ctx, void* mp, uint_64t num_blks) +{ + sha256_sse4(mp, ctx->hash, num_blks); } #endif -#if defined(SHA_2) - -#define CTX_224(x) ((x)->uu->ctx256) -#define CTX_256(x) ((x)->uu->ctx256) -#define CTX_384(x) ((x)->uu->ctx512) -#define CTX_512(x) ((x)->uu->ctx512) - -/* SHA2 initialisation */ - -INT_RETURN sha2_begin(unsigned long len, sha2_ctx ctx[1]) +#if (defined(CRYPTOPP_X86_ASM_AVAILABLE) || defined(CRYPTOPP_X32_ASM_AVAILABLE)) +void SSE2Sha256Transform(sha256_ctx* ctx, void* mp, uint_64t num_blks) { - switch(len) - { -#if defined(SHA_224) - case 224: - case 28: CTX_256(ctx)->count[0] = CTX_256(ctx)->count[1] = 0; - memcpy(CTX_256(ctx)->hash, i224, 32); - ctx->sha2_len = 28; return EXIT_SUCCESS; -#endif -#if defined(SHA_256) - case 256: - case 32: CTX_256(ctx)->count[0] = CTX_256(ctx)->count[1] = 0; - memcpy(CTX_256(ctx)->hash, i256, 32); - ctx->sha2_len = 32; return EXIT_SUCCESS; -#endif -#if defined(SHA_384) - case 384: - case 48: CTX_384(ctx)->count[0] = CTX_384(ctx)->count[1] = 0; - memcpy(CTX_384(ctx)->hash, i384, 64); - ctx->sha2_len = 48; return EXIT_SUCCESS; -#endif -#if defined(SHA_512) - case 512: - case 64: CTX_512(ctx)->count[0] = CTX_512(ctx)->count[1] = 0; - memcpy(CTX_512(ctx)->hash, i512, 64); - ctx->sha2_len = 64; return EXIT_SUCCESS; -#endif - default: return EXIT_FAILURE; - } + X86_SHA256_HashBlocks(ctx->hash, (const uint_32t*)mp, (size_t)(num_blks * 64)); } +#endif -VOID_RETURN sha2_hash(const unsigned char data[], unsigned long len, sha2_ctx ctx[1]) +#if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 +void Sha256AsmTransform(sha256_ctx* ctx, void* mp, uint_64t num_blks) { - switch(ctx->sha2_len) - { -#if defined(SHA_224) - case 28: sha224_hash(data, len, CTX_224(ctx)); return; -#endif -#if defined(SHA_256) - case 32: sha256_hash(data, len, CTX_256(ctx)); return; -#endif -#if defined(SHA_384) - case 48: sha384_hash(data, len, CTX_384(ctx)); return; + uint_64t i; + for (i = 0; i < num_blks; i++) + sha256_compress_nayuki(ctx->hash, (uint_8t*)mp + i * 64); +} #endif -#if defined(SHA_512) - case 64: sha512_hash(data, len, CTX_512(ctx)); return; + #endif - } -} -VOID_RETURN sha2_end(unsigned char hval[], sha2_ctx ctx[1]) +void sha256_begin(sha256_ctx* ctx) { - switch(ctx->sha2_len) - { -#if defined(SHA_224) - case 28: sha_end1(hval, CTX_224(ctx), SHA224_DIGEST_SIZE); return; + ctx->hash[0] = 0x6a09e667; + ctx->hash[1] = 0xbb67ae85; + ctx->hash[2] = 0x3c6ef372; + ctx->hash[3] = 0xa54ff53a; + ctx->hash[4] = 0x510e527f; + ctx->hash[5] = 0x9b05688c; + ctx->hash[6] = 0x1f83d9ab; + ctx->hash[7] = 0x5be0cd19; + ctx->count[0] = 0; + ctx->count[1] = 0; + + if (!sha256transfunc) + { +#ifndef NO_OPTIMIZED_VERSIONS +#if CRYPTOPP_BOOL_X64 +#if CRYPTOPP_SHANI_AVAILABLE + if (HasSHA256()) + sha256transfunc = IntelSha256Transform; + else #endif -#if defined(SHA_256) - case 32: sha_end1(hval, CTX_256(ctx), SHA256_DIGEST_SIZE); return; + if (g_isIntel && HasSAVX2() && HasSBMI2()) + sha256transfunc = Avx2Sha256Transform; + else if (g_isIntel && HasSAVX()) + sha256transfunc = AvxSha256Transform; + else if (HasSSE41()) + sha256transfunc = SSE4Sha256Transform; + else #endif -#if defined(SHA_384) - case 48: sha_end2(hval, CTX_384(ctx), SHA384_DIGEST_SIZE); return; + +#if (defined(CRYPTOPP_X86_ASM_AVAILABLE) || defined(CRYPTOPP_X32_ASM_AVAILABLE)) + if (HasSSE2 ()) + sha256transfunc = SSE2Sha256Transform; + else #endif -#if defined(SHA_512) - case 64: sha_end2(hval, CTX_512(ctx), SHA512_DIGEST_SIZE); return; + +#if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 + sha256transfunc = Sha256AsmTransform; +#else + sha256transfunc = StdSha256Transform; #endif - } +#else + sha256transfunc = StdSha256Transform; +#endif + } } -INT_RETURN sha2(unsigned char hval[], unsigned long size, - const unsigned char data[], unsigned long len) -{ sha2_ctx cx[1]; +void sha256_end(unsigned char * result, sha256_ctx* ctx) +{ + int i; + uint_64t mlen, pos = ctx->count[0]; + uint_8t* m = (uint_8t*) ctx->wbuf; + m[pos++] = 0x80; + if (pos > 56) + { + memset(m + pos, 0, (size_t) (64 - pos)); + sha256transfunc(ctx, m, 1); + pos = 0; + } + memset(m + pos, 0, (size_t) (56 - pos)); + mlen = bswap_64((uint_64t) ctx->count[1]); + memcpy(m + (64 - 8), &mlen, 64 / 8); + sha256transfunc(ctx, m, 1); + for (i = 0; i < 8; i++) + { + ctx->hash[i] = bswap_32(ctx->hash[i]); + } + memcpy(result, ctx->hash, 32); +} - if(sha2_begin(size, cx) == EXIT_SUCCESS) - { - sha2_hash(data, len, cx); sha2_end(hval, cx); return EXIT_SUCCESS; - } - else - return EXIT_FAILURE; +void sha256_hash(const unsigned char * data, uint_32t len, sha256_ctx *ctx) +{ + uint_32t pos = ctx->count[0]; + uint_32t total = ctx->count[1]; + uint_8t* m = (uint_8t*) ctx->wbuf; + if (pos && pos + len >= 64) + { + memcpy(m + pos, data, 64 - pos); + sha256transfunc(ctx, m, 1); + len -= 64 - pos; + total += (64 - pos) * 8; + data += 64 - pos; + pos = 0; + } + if (len >= 64) + { + uint_32t blocks = len / 64; + uint_32t bytes = blocks * 64; + sha256transfunc(ctx, (void*)data, blocks); + len -= bytes; + total += (bytes)* 8; + data += bytes; + } + memcpy(m+pos, data, len); + pos += len; + total += len * 8; + ctx->count[0] = pos; + ctx->count[1] = total; } -#endif +void sha256(unsigned char * result, const unsigned char* source, uint_32t sourceLen) +{ + sha256_ctx ctx; -#if defined(__cplusplus) + sha256_begin(&ctx); + sha256_hash(source, sourceLen, &ctx); + sha256_end(result, &ctx); } -#endif diff --git a/src/Crypto/Sha2.h b/src/Crypto/Sha2.h index 6d0aeb0f..1fbcb8d1 100644 --- a/src/Crypto/Sha2.h +++ b/src/Crypto/Sha2.h @@ -1,155 +1,72 @@ /* - --------------------------------------------------------------------------- - Copyright (c) 2002, Dr Brian Gladman, Worcester, UK. All rights reserved. - - LICENSE TERMS - - The free distribution and use of this software is allowed (with or without - changes) provided that: - - 1. source code distributions include the above copyright notice, this - list of conditions and the following disclaimer; - - 2. binary distributions include the above copyright notice, this list - of conditions and the following disclaimer in their documentation; - - 3. the name of the copyright holder is not used to endorse products - built using this software without specific written permission. - - DISCLAIMER - - This software is provided 'as is' with no explicit or implied warranties - in respect of its properties, including, but not limited to, correctness - and/or fitness for purpose. - --------------------------------------------------------------------------- - Issue Date: 01/08/2005 -*/ + * Copyright (c) 2013-2017 IDRIX + * Governed by the Apache License 2.0 the full text of which is contained + * in the file License.txt included in VeraCrypt binary and source + * code distribution packages. + */ #ifndef _SHA2_H #define _SHA2_H #include "Common/Tcdefs.h" #include "Common/Endian.h" +#include "Crypto/config.h" -#define SHA_64BIT - -/* define the hash functions that you need */ -#define SHA_2 /* for dynamic hash length */ -#define SHA_224 -#define SHA_256 -#ifdef SHA_64BIT -# define SHA_384 -# define SHA_512 -# define NEED_UINT_64T -#endif - -#ifndef EXIT_SUCCESS -#define EXIT_SUCCESS 0 -#define EXIT_FAILURE 1 +#ifdef WOLFCRYPT_BACKEND + #include <wolfssl/options.h> + #include <wolfssl/wolfcrypt/sha256.h> + #include <wolfssl/wolfcrypt/sha512.h> + #include <wolfssl/wolfcrypt/hash.h> #endif -#define li_64(h) 0x##h##ull - -#define VOID_RETURN void -#define INT_RETURN int - #if defined(__cplusplus) -extern "C" -{ +extern "C" { #endif -/* Note that the following function prototypes are the same */ -/* for both the bit and byte oriented implementations. But */ -/* the length fields are in bytes or bits as is appropriate */ -/* for the version used. Bit sequences are arrays of bytes */ -/* in which bit sequence indexes increase from the most to */ -/* the least significant end of each byte */ - -#define SHA224_DIGEST_SIZE 28 -#define SHA224_BLOCK_SIZE 64 #define SHA256_DIGEST_SIZE 32 #define SHA256_BLOCK_SIZE 64 -/* type to hold the SHA256 (and SHA224) context */ - -typedef struct -{ uint_32t count[2]; - uint_32t hash[8]; - uint_32t wbuf[16]; -} sha256_ctx; - -typedef sha256_ctx sha224_ctx; - -VOID_RETURN sha256_compile(sha256_ctx ctx[1]); - -VOID_RETURN sha224_begin(sha224_ctx ctx[1]); -#define sha224_hash sha256_hash -VOID_RETURN sha224_end(unsigned char hval[], sha224_ctx ctx[1]); -VOID_RETURN sha224(unsigned char hval[], const unsigned char data[], unsigned long len); - -VOID_RETURN sha256_begin(sha256_ctx ctx[1]); -VOID_RETURN sha256_hash(const unsigned char data[], unsigned long len, sha256_ctx ctx[1]); -VOID_RETURN sha256_end(unsigned char hval[], sha256_ctx ctx[1]); -VOID_RETURN sha256(unsigned char hval[], const unsigned char data[], unsigned long len); - -#ifndef SHA_64BIT - -typedef struct -{ union - { sha256_ctx ctx256[1]; - } uu[1]; - uint_32t sha2_len; -} sha2_ctx; - -#define SHA2_MAX_DIGEST_SIZE SHA256_DIGEST_SIZE - -#else - -#define SHA384_DIGEST_SIZE 48 -#define SHA384_BLOCK_SIZE 128 #define SHA512_DIGEST_SIZE 64 #define SHA512_BLOCK_SIZE 128 -#define SHA2_MAX_DIGEST_SIZE SHA512_DIGEST_SIZE -/* type to hold the SHA384 (and SHA512) context */ +#if CRYPTOPP_BOOL_X64 && !defined(CRYPTOPP_DISABLE_ASM) +#define SHA2_ALIGN CRYPTOPP_ALIGN_DATA(32) +#else +#define SHA2_ALIGN CRYPTOPP_ALIGN_DATA(16) +#endif +#ifdef WOLFCRYPT_BACKEND +typedef struct wc_Sha512 sha512_ctx; +typedef struct wc_Sha256 sha256_ctx; +#else typedef struct { uint_64t count[2]; - uint_64t hash[8]; - uint_64t wbuf[16]; + SHA2_ALIGN uint_64t hash[8]; + SHA2_ALIGN uint_64t wbuf[16]; } sha512_ctx; -typedef sha512_ctx sha384_ctx; - typedef struct -{ union - { sha256_ctx ctx256[1]; - sha512_ctx ctx512[1]; - } uu[1]; - uint_32t sha2_len; -} sha2_ctx; - -VOID_RETURN sha512_compile(sha512_ctx ctx[1]); - -VOID_RETURN sha384_begin(sha384_ctx ctx[1]); -#define sha384_hash sha512_hash -VOID_RETURN sha384_end(unsigned char hval[], sha384_ctx ctx[1]); -VOID_RETURN sha384(unsigned char hval[], const unsigned char data[], unsigned long len); - -VOID_RETURN sha512_begin(sha512_ctx ctx[1]); -VOID_RETURN sha512_hash(const unsigned char data[], unsigned long len, sha512_ctx ctx[1]); -VOID_RETURN sha512_end(unsigned char hval[], sha512_ctx ctx[1]); -VOID_RETURN sha512(unsigned char hval[], const unsigned char data[], unsigned long len); - -INT_RETURN sha2_begin(unsigned long size, sha2_ctx ctx[1]); -VOID_RETURN sha2_hash(const unsigned char data[], unsigned long len, sha2_ctx ctx[1]); -VOID_RETURN sha2_end(unsigned char hval[], sha2_ctx ctx[1]); -INT_RETURN sha2(unsigned char hval[], unsigned long size, const unsigned char data[], unsigned long len); - +{ uint_32t count[2]; + SHA2_ALIGN uint_32t hash[8]; + SHA2_ALIGN uint_32t wbuf[16]; +} sha256_ctx; #endif + +void sha512_begin(sha512_ctx* ctx); +void sha512_hash(const unsigned char * source, uint_64t sourceLen, sha512_ctx *ctx); +void sha512_end(unsigned char * result, sha512_ctx* ctx); +void sha512(unsigned char * result, const unsigned char* source, uint_64t sourceLen); + +void sha256_begin(sha256_ctx* ctx); +void sha256_hash(const unsigned char * source, uint_32t sourceLen, sha256_ctx *ctx); +void sha256_end(unsigned char * result, sha256_ctx* ctx); +void sha256(unsigned char * result, const unsigned char* source, uint_32t sourceLen); + #if defined(__cplusplus) } #endif + + #endif diff --git a/src/Crypto/Sha2Intel.c b/src/Crypto/Sha2Intel.c new file mode 100644 index 00000000..943115bf --- /dev/null +++ b/src/Crypto/Sha2Intel.c @@ -0,0 +1,278 @@ +/* +* Support for SHA-256 x86 instrinsic +* Based on public domain code by Sean Gulley +* (https://github.com/mitls/hacl-star/tree/master/experimental/hash) +* +* Botan is released under the Simplified BSD License (see license.txt) +*/ + +/* November 10th 2024: Modified for VeraCrypt */ + +#include "Sha2.h" +#include "Common/Endian.h" +#include "cpu.h" +#include "misc.h" + +#if defined(_UEFI) || defined(CRYPTOPP_DISABLE_ASM) +#define NO_OPTIMIZED_VERSIONS +#endif + +#ifndef NO_OPTIMIZED_VERSIONS + +#if CRYPTOPP_SHANI_AVAILABLE + +#ifndef _MSC_VER +#include <signal.h> +#include <setjmp.h> + +typedef void (*SigHandler)(int); + +static jmp_buf s_jmpNoSHA; +static void SigIllHandlerSHA(int p) +{ + longjmp(s_jmpNoSHA, 1); +} +#endif + +int TrySHA256() +{ + volatile int result = 0; +#ifdef _MSC_VER + __try +#else + SigHandler oldHandler = signal(SIGILL, SigIllHandlerSHA); + if (oldHandler == SIG_ERR) + return 0; + if (setjmp(s_jmpNoSHA)) + result = 0; + else +#endif + { + // Known input message block + __m128i msg0 = _mm_setr_epi32(0x12345678, 0x9ABCDEF0, 0x87654321, 0x0FEDCBA9); + __m128i msg1 = _mm_setr_epi32(0x11111111, 0x22222222, 0x33333333, 0x44444444); + + // SHA256 message schedule update + __m128i tmp = _mm_sha256msg1_epu32(msg0, msg1); + + // Verify result - these values were pre-computed for the given input +#ifdef _MSC_VER + if (tmp.m128i_u32[0] == 0xD8131B44 && + tmp.m128i_u32[1] == 0x9DE6E22B && + tmp.m128i_u32[2] == 0xA86D643A && + tmp.m128i_u32[3] == 0x74320FED) +#else + if (((uint32_t*)(&tmp))[0] == 0xD8131B44 && + ((uint32_t*)(&tmp))[1] == 0x9DE6E22B && + ((uint32_t*)(&tmp))[2] == 0xA86D643A && + ((uint32_t*)(&tmp))[3] == 0x74320FED) +#endif + result = 1; + } +#ifdef _MSC_VER + __except (EXCEPTION_EXECUTE_HANDLER) + { + // ignore error if SHA instructions not supported + } +#else + signal(SIGILL, oldHandler); +#endif + + return result; +} + +// +void sha256_intel(void *mp, uint_32t state[8], uint_64t num_blks) +{ + // Constants table - align for better performance + CRYPTOPP_ALIGN_DATA(64) + static const uint_32t K[64] = { + 0x428A2F98, 0x71374491, 0xB5C0FBCF, 0xE9B5DBA5, 0x3956C25B, 0x59F111F1, 0x923F82A4, 0xAB1C5ED5, + 0xD807AA98, 0x12835B01, 0x243185BE, 0x550C7DC3, 0x72BE5D74, 0x80DEB1FE, 0x9BDC06A7, 0xC19BF174, + 0xE49B69C1, 0xEFBE4786, 0x0FC19DC6, 0x240CA1CC, 0x2DE92C6F, 0x4A7484AA, 0x5CB0A9DC, 0x76F988DA, + 0x983E5152, 0xA831C66D, 0xB00327C8, 0xBF597FC7, 0xC6E00BF3, 0xD5A79147, 0x06CA6351, 0x14292967, + 0x27B70A85, 0x2E1B2138, 0x4D2C6DFC, 0x53380D13, 0x650A7354, 0x766A0ABB, 0x81C2C92E, 0x92722C85, + 0xA2BFE8A1, 0xA81A664B, 0xC24B8B70, 0xC76C51A3, 0xD192E819, 0xD6990624, 0xF40E3585, 0x106AA070, + 0x19A4C116, 0x1E376C08, 0x2748774C, 0x34B0BCB5, 0x391C0CB3, 0x4ED8AA4A, 0x5B9CCA4F, 0x682E6FF3, + 0x748F82EE, 0x78A5636F, 0x84C87814, 0x8CC70208, 0x90BEFFFA, 0xA4506CEB, 0xBEF9A3F7, 0xC67178F2, + }; + + const __m128i* K_mm = (const __m128i*)K; + const __m128i* input_mm = (const __m128i*)mp; + + // Create byte shuffle mask for big-endian to little-endian conversion + const __m128i MASK = _mm_set_epi64x(0x0c0d0e0f08090a0b, 0x0405060700010203); + + // Load initial values + __m128i STATE0 = _mm_loadu_si128((__m128i*)&state[0]); + __m128i STATE1 = _mm_loadu_si128((__m128i*)&state[4]); + + // Adjust byte ordering + STATE0 = _mm_shuffle_epi32(STATE0, 0xB1); // CDAB + STATE1 = _mm_shuffle_epi32(STATE1, 0x1B); // EFGH + + __m128i TMP = _mm_alignr_epi8(STATE0, STATE1, 8); // ABEF + STATE1 = _mm_blend_epi16(STATE1, STATE0, 0xF0); // CDGH + STATE0 = TMP; + + while(num_blks > 0) { + // Save current state + const __m128i ABEF_SAVE = STATE0; + const __m128i CDGH_SAVE = STATE1; + + __m128i MSG; + + __m128i TMSG0 = _mm_shuffle_epi8(_mm_loadu_si128(input_mm), MASK); + __m128i TMSG1 = _mm_shuffle_epi8(_mm_loadu_si128(input_mm + 1), MASK); + __m128i TMSG2 = _mm_shuffle_epi8(_mm_loadu_si128(input_mm + 2), MASK); + __m128i TMSG3 = _mm_shuffle_epi8(_mm_loadu_si128(input_mm + 3), MASK); + + // Rounds 0-3 + MSG = _mm_add_epi32(TMSG0, _mm_load_si128(K_mm)); + STATE1 = _mm_sha256rnds2_epu32(STATE1, STATE0, MSG); + STATE0 = _mm_sha256rnds2_epu32(STATE0, STATE1, _mm_shuffle_epi32(MSG, 0x0E)); + + // Rounds 4-7 + MSG = _mm_add_epi32(TMSG1, _mm_load_si128(K_mm + 1)); + STATE1 = _mm_sha256rnds2_epu32(STATE1, STATE0, MSG); + STATE0 = _mm_sha256rnds2_epu32(STATE0, STATE1, _mm_shuffle_epi32(MSG, 0x0E)); + + TMSG0 = _mm_sha256msg1_epu32(TMSG0, TMSG1); + + // Rounds 8-11 + MSG = _mm_add_epi32(TMSG2, _mm_load_si128(K_mm + 2)); + STATE1 = _mm_sha256rnds2_epu32(STATE1, STATE0, MSG); + STATE0 = _mm_sha256rnds2_epu32(STATE0, STATE1, _mm_shuffle_epi32(MSG, 0x0E)); + + TMSG1 = _mm_sha256msg1_epu32(TMSG1, TMSG2); + + // Rounds 12-15 + MSG = _mm_add_epi32(TMSG3, _mm_load_si128(K_mm + 3)); + STATE1 = _mm_sha256rnds2_epu32(STATE1, STATE0, MSG); + STATE0 = _mm_sha256rnds2_epu32(STATE0, STATE1, _mm_shuffle_epi32(MSG, 0x0E)); + + TMSG0 = _mm_add_epi32(TMSG0, _mm_alignr_epi8(TMSG3, TMSG2, 4)); + TMSG0 = _mm_sha256msg2_epu32(TMSG0, TMSG3); + TMSG2 = _mm_sha256msg1_epu32(TMSG2, TMSG3); + + // Rounds 16-19 + MSG = _mm_add_epi32(TMSG0, _mm_load_si128(K_mm + 4)); + STATE1 = _mm_sha256rnds2_epu32(STATE1, STATE0, MSG); + STATE0 = _mm_sha256rnds2_epu32(STATE0, STATE1, _mm_shuffle_epi32(MSG, 0x0E)); + + TMSG1 = _mm_add_epi32(TMSG1, _mm_alignr_epi8(TMSG0, TMSG3, 4)); + TMSG1 = _mm_sha256msg2_epu32(TMSG1, TMSG0); + TMSG3 = _mm_sha256msg1_epu32(TMSG3, TMSG0); + + // Rounds 20-23 + MSG = _mm_add_epi32(TMSG1, _mm_load_si128(K_mm + 5)); + STATE1 = _mm_sha256rnds2_epu32(STATE1, STATE0, MSG); + STATE0 = _mm_sha256rnds2_epu32(STATE0, STATE1, _mm_shuffle_epi32(MSG, 0x0E)); + + TMSG2 = _mm_add_epi32(TMSG2, _mm_alignr_epi8(TMSG1, TMSG0, 4)); + TMSG2 = _mm_sha256msg2_epu32(TMSG2, TMSG1); + TMSG0 = _mm_sha256msg1_epu32(TMSG0, TMSG1); + + // Rounds 24-27 + MSG = _mm_add_epi32(TMSG2, _mm_load_si128(K_mm + 6)); + STATE1 = _mm_sha256rnds2_epu32(STATE1, STATE0, MSG); + STATE0 = _mm_sha256rnds2_epu32(STATE0, STATE1, _mm_shuffle_epi32(MSG, 0x0E)); + + TMSG3 = _mm_add_epi32(TMSG3, _mm_alignr_epi8(TMSG2, TMSG1, 4)); + TMSG3 = _mm_sha256msg2_epu32(TMSG3, TMSG2); + TMSG1 = _mm_sha256msg1_epu32(TMSG1, TMSG2); + + // Rounds 28-31 + MSG = _mm_add_epi32(TMSG3, _mm_load_si128(K_mm + 7)); + STATE1 = _mm_sha256rnds2_epu32(STATE1, STATE0, MSG); + STATE0 = _mm_sha256rnds2_epu32(STATE0, STATE1, _mm_shuffle_epi32(MSG, 0x0E)); + + TMSG0 = _mm_add_epi32(TMSG0, _mm_alignr_epi8(TMSG3, TMSG2, 4)); + TMSG0 = _mm_sha256msg2_epu32(TMSG0, TMSG3); + TMSG2 = _mm_sha256msg1_epu32(TMSG2, TMSG3); + + // Rounds 32-35 + MSG = _mm_add_epi32(TMSG0, _mm_load_si128(K_mm + 8)); + STATE1 = _mm_sha256rnds2_epu32(STATE1, STATE0, MSG); + STATE0 = _mm_sha256rnds2_epu32(STATE0, STATE1, _mm_shuffle_epi32(MSG, 0x0E)); + + TMSG1 = _mm_add_epi32(TMSG1, _mm_alignr_epi8(TMSG0, TMSG3, 4)); + TMSG1 = _mm_sha256msg2_epu32(TMSG1, TMSG0); + TMSG3 = _mm_sha256msg1_epu32(TMSG3, TMSG0); + + // Rounds 36-39 + MSG = _mm_add_epi32(TMSG1, _mm_load_si128(K_mm + 9)); + STATE1 = _mm_sha256rnds2_epu32(STATE1, STATE0, MSG); + STATE0 = _mm_sha256rnds2_epu32(STATE0, STATE1, _mm_shuffle_epi32(MSG, 0x0E)); + + TMSG2 = _mm_add_epi32(TMSG2, _mm_alignr_epi8(TMSG1, TMSG0, 4)); + TMSG2 = _mm_sha256msg2_epu32(TMSG2, TMSG1); + TMSG0 = _mm_sha256msg1_epu32(TMSG0, TMSG1); + + // Rounds 40-43 + MSG = _mm_add_epi32(TMSG2, _mm_load_si128(K_mm + 10)); + STATE1 = _mm_sha256rnds2_epu32(STATE1, STATE0, MSG); + STATE0 = _mm_sha256rnds2_epu32(STATE0, STATE1, _mm_shuffle_epi32(MSG, 0x0E)); + + TMSG3 = _mm_add_epi32(TMSG3, _mm_alignr_epi8(TMSG2, TMSG1, 4)); + TMSG3 = _mm_sha256msg2_epu32(TMSG3, TMSG2); + TMSG1 = _mm_sha256msg1_epu32(TMSG1, TMSG2); + + // Rounds 44-47 + MSG = _mm_add_epi32(TMSG3, _mm_load_si128(K_mm + 11)); + STATE1 = _mm_sha256rnds2_epu32(STATE1, STATE0, MSG); + STATE0 = _mm_sha256rnds2_epu32(STATE0, STATE1, _mm_shuffle_epi32(MSG, 0x0E)); + + TMSG0 = _mm_add_epi32(TMSG0, _mm_alignr_epi8(TMSG3, TMSG2, 4)); + TMSG0 = _mm_sha256msg2_epu32(TMSG0, TMSG3); + TMSG2 = _mm_sha256msg1_epu32(TMSG2, TMSG3); + + // Rounds 48-51 + MSG = _mm_add_epi32(TMSG0, _mm_load_si128(K_mm + 12)); + STATE1 = _mm_sha256rnds2_epu32(STATE1, STATE0, MSG); + STATE0 = _mm_sha256rnds2_epu32(STATE0, STATE1, _mm_shuffle_epi32(MSG, 0x0E)); + + TMSG1 = _mm_add_epi32(TMSG1, _mm_alignr_epi8(TMSG0, TMSG3, 4)); + TMSG1 = _mm_sha256msg2_epu32(TMSG1, TMSG0); + TMSG3 = _mm_sha256msg1_epu32(TMSG3, TMSG0); + + // Rounds 52-55 + MSG = _mm_add_epi32(TMSG1, _mm_load_si128(K_mm + 13)); + STATE1 = _mm_sha256rnds2_epu32(STATE1, STATE0, MSG); + STATE0 = _mm_sha256rnds2_epu32(STATE0, STATE1, _mm_shuffle_epi32(MSG, 0x0E)); + + TMSG2 = _mm_add_epi32(TMSG2, _mm_alignr_epi8(TMSG1, TMSG0, 4)); + TMSG2 = _mm_sha256msg2_epu32(TMSG2, TMSG1); + + // Rounds 56-59 + MSG = _mm_add_epi32(TMSG2, _mm_load_si128(K_mm + 14)); + STATE1 = _mm_sha256rnds2_epu32(STATE1, STATE0, MSG); + STATE0 = _mm_sha256rnds2_epu32(STATE0, STATE1, _mm_shuffle_epi32(MSG, 0x0E)); + + TMSG3 = _mm_add_epi32(TMSG3, _mm_alignr_epi8(TMSG2, TMSG1, 4)); + TMSG3 = _mm_sha256msg2_epu32(TMSG3, TMSG2); + + // Rounds 60-63 + MSG = _mm_add_epi32(TMSG3, _mm_load_si128(K_mm + 15)); + STATE1 = _mm_sha256rnds2_epu32(STATE1, STATE0, MSG); + STATE0 = _mm_sha256rnds2_epu32(STATE0, STATE1, _mm_shuffle_epi32(MSG, 0x0E)); + + // Add values back to state + STATE0 = _mm_add_epi32(STATE0, ABEF_SAVE); + STATE1 = _mm_add_epi32(STATE1, CDGH_SAVE); + + input_mm += 4; + num_blks--; + } + + // Shuffle state back to correct order + STATE0 = _mm_shuffle_epi32(STATE0, 0x1B); // FEBA + STATE1 = _mm_shuffle_epi32(STATE1, 0xB1); // DCHG + + // Save state + _mm_storeu_si128((__m128i*)&state[0], _mm_blend_epi16(STATE0, STATE1, 0xF0)); // DCBA + _mm_storeu_si128((__m128i*)&state[4], _mm_alignr_epi8(STATE1, STATE0, 8)); // HGFE +} + +#endif +#endif diff --git a/src/Crypto/Sha2Small.c b/src/Crypto/Sha2Small.c index 08318833..572dd612 100644 --- a/src/Crypto/Sha2Small.c +++ b/src/Crypto/Sha2Small.c @@ -19,9 +19,9 @@ #pragma optimize ("tl", on) typedef unsigned __int32 uint32; -typedef unsigned __int8 byte; +typedef unsigned __int8 uint8; #include <stdlib.h> #pragma intrinsic(_lrotr) #define RORc(x,n) _lrotr(x,n) diff --git a/src/Crypto/Sources b/src/Crypto/Sources index edddd4c6..bd990382 100644 --- a/src/Crypto/Sources +++ b/src/Crypto/Sources @@ -5,25 +5,58 @@ INCLUDES = .. NTTARGETFILES = \ "$(OBJ_PATH)\$(O)\Aes_$(TC_ARCH).obj" \ "$(OBJ_PATH)\$(O)\Aes_hw_cpu.obj" \ - "$(OBJ_PATH)\$(O)\gost89_$(TC_ARCH).obj" \ - "$(OBJ_PATH)\$(O)\Twofish_$(TC_ARCH).obj" + "$(OBJ_PATH)\$(O)\rdrand_ml.obj" \ + "$(OBJ_PATH)\$(O)\Twofish_$(TC_ARCH).obj" \ + "$(OBJ_PATH)\$(O)\Camellia_$(TC_ARCH).obj" \ + "$(OBJ_PATH)\$(O)\Camellia_aesni_$(TC_ARCH).obj" \ + "$(OBJ_PATH)\$(O)\sha256-$(TC_ARCH)-nayuki.obj" \ + "$(OBJ_PATH)\$(O)\sha512-$(TC_ARCH)-nayuki.obj" \ + "$(OBJ_PATH)\$(O)\sha512_avx1_$(TC_ARCH).obj" \ + "$(OBJ_PATH)\$(O)\sha512_avx2_$(TC_ARCH).obj" \ + "$(OBJ_PATH)\$(O)\sha512_sse4_$(TC_ARCH).obj" \ + "$(OBJ_PATH)\$(O)\sha256_avx1_$(TC_ARCH).obj" \ + "$(OBJ_PATH)\$(O)\sha256_avx2_$(TC_ARCH).obj" \ + "$(OBJ_PATH)\$(O)\sha256_sse4_$(TC_ARCH).obj" SOURCES = \ Aes_$(TC_ARCH).asm \ - gost89_$(TC_ARCH).asm \ Aes_hw_cpu.asm \ + rdrand_ml.asm \ + rdseed_ml.asm \ Aeskey.c \ Aestab.c \ + blake2s.c \ + blake2s_SSE2.c \ + blake2s_SSE41.c \ + blake2s_SSSE3.c \ + chacha-xmm.c \ + chacha256.c \ + chachaRng.c \ cpu.c \ - Rmd160.c \ + jitterentropy-base.c \ + rdrand.c \ SerpentFast.c \ SerpentFast_simd.cpp \ Sha2.c \ + Sha2Intel.c \ + t1ha_selfcheck.c \ + t1ha2.c \ + t1ha2_selfcheck.c \ Twofish.c \ Twofish_$(TC_ARCH).S \ - GostCipher.c \ Streebog.c \ kuznyechik.c \ + kuznyechik_simd.c \ Whirlpool.c \ - Camellia.c + Camellia.c \ + Camellia_$(TC_ARCH).S \ + Camellia_aesni_$(TC_ARCH).S \ + sha256-$(TC_ARCH)-nayuki.S \ + sha512-$(TC_ARCH)-nayuki.S \ + sha512_avx1_$(TC_ARCH).asm \ + sha512_avx2_$(TC_ARCH).asm \ + sha512_sse4_$(TC_ARCH).asm \ + sha256_avx1_$(TC_ARCH).asm \ + sha256_avx2_$(TC_ARCH).asm \ + sha256_sse4_$(TC_ARCH).asm diff --git a/src/Crypto/Streebog.c b/src/Crypto/Streebog.c index 6c52ce75..d223eef8 100644 --- a/src/Crypto/Streebog.c +++ b/src/Crypto/Streebog.c @@ -275,105 +275,8 @@ STREEBOG_ALIGN(16) static const unsigned long long C[12][8] = { } }; #endif -#ifndef __GOST3411_BIG_ENDIAN__ -static const unsigned long long A[64] = { - 0x8e20faa72ba0b470ULL, 0x47107ddd9b505a38ULL, 0xad08b0e0c3282d1cULL, - 0xd8045870ef14980eULL, 0x6c022c38f90a4c07ULL, 0x3601161cf205268dULL, - 0x1b8e0b0e798c13c8ULL, 0x83478b07b2468764ULL, 0xa011d380818e8f40ULL, - 0x5086e740ce47c920ULL, 0x2843fd2067adea10ULL, 0x14aff010bdd87508ULL, - 0x0ad97808d06cb404ULL, 0x05e23c0468365a02ULL, 0x8c711e02341b2d01ULL, - 0x46b60f011a83988eULL, 0x90dab52a387ae76fULL, 0x486dd4151c3dfdb9ULL, - 0x24b86a840e90f0d2ULL, 0x125c354207487869ULL, 0x092e94218d243cbaULL, - 0x8a174a9ec8121e5dULL, 0x4585254f64090fa0ULL, 0xaccc9ca9328a8950ULL, - 0x9d4df05d5f661451ULL, 0xc0a878a0a1330aa6ULL, 0x60543c50de970553ULL, - 0x302a1e286fc58ca7ULL, 0x18150f14b9ec46ddULL, 0x0c84890ad27623e0ULL, - 0x0642ca05693b9f70ULL, 0x0321658cba93c138ULL, 0x86275df09ce8aaa8ULL, - 0x439da0784e745554ULL, 0xafc0503c273aa42aULL, 0xd960281e9d1d5215ULL, - 0xe230140fc0802984ULL, 0x71180a8960409a42ULL, 0xb60c05ca30204d21ULL, - 0x5b068c651810a89eULL, 0x456c34887a3805b9ULL, 0xac361a443d1c8cd2ULL, - 0x561b0d22900e4669ULL, 0x2b838811480723baULL, 0x9bcf4486248d9f5dULL, - 0xc3e9224312c8c1a0ULL, 0xeffa11af0964ee50ULL, 0xf97d86d98a327728ULL, - 0xe4fa2054a80b329cULL, 0x727d102a548b194eULL, 0x39b008152acb8227ULL, - 0x9258048415eb419dULL, 0x492c024284fbaec0ULL, 0xaa16012142f35760ULL, - 0x550b8e9e21f7a530ULL, 0xa48b474f9ef5dc18ULL, 0x70a6a56e2440598eULL, - 0x3853dc371220a247ULL, 0x1ca76e95091051adULL, 0x0edd37c48a08a6d8ULL, - 0x07e095624504536cULL, 0x8d70c431ac02a736ULL, 0xc83862965601dd1bULL, - 0x641c314b2b8ee083ULL -}; -#else -static const unsigned long long A[64] = { - 0x70b4a02ba7fa208eULL, 0x385a509bdd7d1047ULL, 0x1c2d28c3e0b008adULL, - 0x0e9814ef705804d8ULL, 0x074c0af9382c026cULL, 0x8d2605f21c160136ULL, - 0xc8138c790e0b8e1bULL, 0x648746b2078b4783ULL, 0x408f8e8180d311a0ULL, - 0x20c947ce40e78650ULL, 0x10eaad6720fd4328ULL, 0x0875d8bd10f0af14ULL, - 0x04b46cd00878d90aULL, 0x025a3668043ce205ULL, 0x012d1b34021e718cULL, - 0x8e98831a010fb646ULL, 0x6fe77a382ab5da90ULL, 0xb9fd3d1c15d46d48ULL, - 0xd2f0900e846ab824ULL, 0x6978480742355c12ULL, 0xba3c248d21942e09ULL, - 0x5d1e12c89e4a178aULL, 0xa00f09644f258545ULL, 0x50898a32a99cccacULL, - 0x5114665f5df04d9dULL, 0xa60a33a1a078a8c0ULL, 0x530597de503c5460ULL, - 0xa78cc56f281e2a30ULL, 0xdd46ecb9140f1518ULL, 0xe02376d20a89840cULL, - 0x709f3b6905ca4206ULL, 0x38c193ba8c652103ULL, 0xa8aae89cf05d2786ULL, - 0x5455744e78a09d43ULL, 0x2aa43a273c50c0afULL, 0x15521d9d1e2860d9ULL, - 0x842980c00f1430e2ULL, 0x429a4060890a1871ULL, 0x214d2030ca050cb6ULL, - 0x9ea81018658c065bULL, 0xb905387a88346c45ULL, 0xd28c1c3d441a36acULL, - 0x69460e90220d1b56ULL, 0xba2307481188832bULL, 0x5d9f8d248644cf9bULL, - 0xa0c1c8124322e9c3ULL, 0x50ee6409af11faefULL, 0x2877328ad9867df9ULL, - 0x9c320ba85420fae4ULL, 0x4e198b542a107d72ULL, 0x2782cb2a1508b039ULL, - 0x9d41eb1584045892ULL, 0xc0aefb8442022c49ULL, 0x6057f342210116aaULL, - 0x30a5f7219e8e0b55ULL, 0x18dcf59e4f478ba4ULL, 0x8e5940246ea5a670ULL, - 0x47a2201237dc5338ULL, 0xad511009956ea71cULL, 0xd8a6088ac437dd0eULL, - 0x6c5304456295e007ULL, 0x36a702ac31c4708dULL, 0x1bdd0156966238c8ULL, - 0x83e08e2b4b311c64ULL -}; -#endif - -static const unsigned char Tau[64] = { - 0, 8, 16, 24, 32, 40, 48, 56, - 1, 9, 17, 25, 33, 41, 49, 57, - 2, 10, 18, 26, 34, 42, 50, 58, - 3, 11, 19, 27, 35, 43, 51, 59, - 4, 12, 20, 28, 36, 44, 52, 60, - 5, 13, 21, 29, 37, 45, 53, 61, - 6, 14, 22, 30, 38, 46, 54, 62, - 7, 15, 23, 31, 39, 47, 55, 63 -}; - -static const unsigned char Pi[256] = { - 252, 238, 221, 17, 207, 110, 49, 22, - 251, 196, 250, 218, 35, 197, 4, 77, - 233, 119, 240, 219, 147, 46, 153, 186, - 23, 54, 241, 187, 20, 205, 95, 193, - 249, 24, 101, 90, 226, 92, 239, 33, - 129, 28, 60, 66, 139, 1, 142, 79, - 5, 132, 2, 174, 227, 106, 143, 160, - 6, 11, 237, 152, 127, 212, 211, 31, - 235, 52, 44, 81, 234, 200, 72, 171, - 242, 42, 104, 162, 253, 58, 206, 204, - 181, 112, 14, 86, 8, 12, 118, 18, - 191, 114, 19, 71, 156, 183, 93, 135, - 21, 161, 150, 41, 16, 123, 154, 199, - 243, 145, 120, 111, 157, 158, 178, 177, - 50, 117, 25, 61, 255, 53, 138, 126, - 109, 84, 198, 128, 195, 189, 13, 87, - 223, 245, 36, 169, 62, 168, 67, 201, - 215, 121, 214, 246, 124, 34, 185, 3, - 224, 15, 236, 222, 122, 148, 176, 188, - 220, 232, 40, 80, 78, 51, 10, 74, - 167, 151, 96, 115, 30, 0, 98, 68, - 26, 184, 56, 130, 100, 159, 38, 65, - 173, 69, 70, 146, 39, 94, 85, 47, - 140, 163, 165, 125, 105, 213, 149, 59, - 7, 88, 179, 64, 134, 172, 29, 247, - 48, 55, 107, 228, 136, 217, 231, 137, - 225, 27, 131, 73, 76, 63, 248, 254, - 141, 83, 170, 144, 202, 216, 133, 97, - 32, 113, 103, 164, 45, 43, 9, 91, - 203, 155, 37, 208, 190, 229, 108, 82, - 89, 166, 116, 210, 230, 244, 180, 192, - 209, 102, 175, 194, 57, 75, 99, 182 -}; #endif // CONSTS #if 1 #ifndef __GOST3411_BIG_ENDIAN__ @@ -1844,23 +1747,42 @@ static void add512(const unsigned long long *x, const unsigned long long *y, unsigned long long *r) { #ifndef __GOST3411_BIG_ENDIAN__ unsigned int CF, OF; + unsigned long long tmp; unsigned int i; CF = 0; for (i = 0; i < 8; i++) { - r[i] = x[i] + y[i]; - if ( (r[i] < y[i]) || - (r[i] < x[i]) ) + /* Detecting integer overflow condition for three numbers + * in a portable way is tricky a little. */ + + /* Step 1: numbers cause overflow */ + tmp = x[i] + y[i]; + + /* Compare with any of two summands, no need to check both */ + if (tmp < x[i]) OF = 1; else OF = 0; - r[i] += CF; + /* Step 2: carry bit causes overflow */ + tmp += CF; + + /* + * We don't include the carry bit overflow since it can break + * mounting for some containers eventhough the probability of + * such case is very low + */ + /* + if (CF > 0 && tmp == 0) + OF = 1; + */ CF = OF; - } + + r[i] = tmp; + } #else const unsigned char *xp, *yp; unsigned char *rp; unsigned int i; @@ -1889,13 +1811,12 @@ add512(const unsigned long long *x, const unsigned long long *y, unsigned long l z[6] = x[6] ^ y[6]; \ z[7] = x[7] ^ y[7]; \ } -#ifndef __GOST3411_BIG_ENDIAN__ #define __XLPS_FOR for (_i = 0; _i <= 7; _i++) +#ifndef __GOST3411_BIG_ENDIAN__ #define _datai _i #else -#define __XLPS_FOR for (_i = 7; _i >= 0; _i--) #define _datai 7 - _i #endif #define XLPS(x, y, data) { \ @@ -1913,16 +1834,24 @@ add512(const unsigned long long *x, const unsigned long long *y, unsigned long l \ \ __XLPS_FOR \ {\ - data[_datai] = Ax[0][(r0 >> (_i << 3)) & 0xFF]; \ - data[_datai] ^= Ax[1][(r1 >> (_i << 3)) & 0xFF]; \ - data[_datai] ^= Ax[2][(r2 >> (_i << 3)) & 0xFF]; \ - data[_datai] ^= Ax[3][(r3 >> (_i << 3)) & 0xFF]; \ - data[_datai] ^= Ax[4][(r4 >> (_i << 3)) & 0xFF]; \ - data[_datai] ^= Ax[5][(r5 >> (_i << 3)) & 0xFF]; \ - data[_datai] ^= Ax[6][(r6 >> (_i << 3)) & 0xFF]; \ - data[_datai] ^= Ax[7][(r7 >> (_i << 3)) & 0xFF]; \ + data[_datai] = Ax[0][r0 & 0xFF]; \ + data[_datai] ^= Ax[1][r1 & 0xFF]; \ + data[_datai] ^= Ax[2][r2 & 0xFF]; \ + data[_datai] ^= Ax[3][r3 & 0xFF]; \ + data[_datai] ^= Ax[4][r4 & 0xFF]; \ + data[_datai] ^= Ax[5][r5 & 0xFF]; \ + data[_datai] ^= Ax[6][r6 & 0xFF]; \ + data[_datai] ^= Ax[7][r7 & 0xFF]; \ + r0 >>= 8; \ + r1 >>= 8; \ + r2 >>= 8; \ + r3 >>= 8; \ + r4 >>= 8; \ + r5 >>= 8; \ + r6 >>= 8; \ + r7 >>= 8; \ }\ } #define ROUND(i, Ki, data) { \ @@ -1975,12 +1904,12 @@ VC_INLINE __m128i _mm_set_epi64x_a(uint64 i0, uint64 i1) { #endif #define LOAD(P, xmm0, xmm1, xmm2, xmm3) { \ const __m128i *__m128p = (const __m128i *) &P[0]; \ - xmm0 = _mm_load_si128(&__m128p[0]); \ - xmm1 = _mm_load_si128(&__m128p[1]); \ - xmm2 = _mm_load_si128(&__m128p[2]); \ - xmm3 = _mm_load_si128(&__m128p[3]); \ + xmm0 = _mm_loadu_si128(&__m128p[0]); \ + xmm1 = _mm_loadu_si128(&__m128p[1]); \ + xmm2 = _mm_loadu_si128(&__m128p[2]); \ + xmm3 = _mm_loadu_si128(&__m128p[3]); \ } #define UNLOAD(P, xmm0, xmm1, xmm2, xmm3) { \ __m128i *__m128p = (__m128i *) &P[0]; \ @@ -1998,12 +1927,12 @@ VC_INLINE __m128i _mm_set_epi64x_a(uint64 i0, uint64 i1) { } #define X128M(P, xmm0, xmm1, xmm2, xmm3) { \ const __m128i *__m128p = (const __m128i *) &P[0]; \ - xmm0 = _mm_xor_si128(xmm0, _mm_load_si128(&__m128p[0])); \ - xmm1 = _mm_xor_si128(xmm1, _mm_load_si128(&__m128p[1])); \ - xmm2 = _mm_xor_si128(xmm2, _mm_load_si128(&__m128p[2])); \ - xmm3 = _mm_xor_si128(xmm3, _mm_load_si128(&__m128p[3])); \ + xmm0 = _mm_xor_si128(xmm0, _mm_loadu_si128(&__m128p[0])); \ + xmm1 = _mm_xor_si128(xmm1, _mm_loadu_si128(&__m128p[1])); \ + xmm2 = _mm_xor_si128(xmm2, _mm_loadu_si128(&__m128p[2])); \ + xmm3 = _mm_xor_si128(xmm3, _mm_loadu_si128(&__m128p[3])); \ } #define _mm_xor_64(mm0, mm1) _mm_xor_si64(mm0, _mm_cvtsi64_m64(mm1)) @@ -2045,33 +1974,8 @@ VC_INLINE __m128i _mm_set_epi64x_a(uint64 i0, uint64 i1) { \ xmm4 = _mm_set_epi64(mm1, mm0); \ } -#define __EXTRACT64(row, xmm0, xmm1, xmm2, xmm3, xmm4) { \ - __m128i tmm4; \ - register unsigned long long r0, r1; \ - r0 = Ax[0][_mm_extract_epi8(xmm0, row + 0)]; \ - r0 ^= Ax[1][_mm_extract_epi8(xmm0, row + 8)]; \ - r0 ^= Ax[2][_mm_extract_epi8(xmm1, row + 0)]; \ - r0 ^= Ax[3][_mm_extract_epi8(xmm1, row + 8)]; \ - r0 ^= Ax[4][_mm_extract_epi8(xmm2, row + 0)]; \ - r0 ^= Ax[5][_mm_extract_epi8(xmm2, row + 8)]; \ - r0 ^= Ax[6][_mm_extract_epi8(xmm3, row + 0)]; \ - r0 ^= Ax[7][_mm_extract_epi8(xmm3, row + 8)]; \ - \ - r1 = Ax[0][_mm_extract_epi8(xmm0, row + 1)]; \ - r1 ^= Ax[1][_mm_extract_epi8(xmm0, row + 9)]; \ - r1 ^= Ax[2][_mm_extract_epi8(xmm1, row + 1)]; \ - r1 ^= Ax[3][_mm_extract_epi8(xmm1, row + 9)]; \ - r1 ^= Ax[4][_mm_extract_epi8(xmm2, row + 1)]; \ - r1 ^= Ax[5][_mm_extract_epi8(xmm2, row + 9)]; \ - r1 ^= Ax[6][_mm_extract_epi8(xmm3, row + 1)]; \ - r1 ^= Ax[7][_mm_extract_epi8(xmm3, row + 9)]; \ - xmm4 = _mm_cvtsi64_si128((long long) r0); \ - tmm4 = _mm_cvtsi64_si128((long long) r1); \ - xmm4 = _mm_unpacklo_epi64(xmm4, tmm4); \ -} - #define EXTRACT64(row, xmm0, xmm1, xmm2, xmm3, xmm4) { \ __m128i tmm4; \ register unsigned short ax; \ register unsigned long long r0, r1; \ @@ -2348,9 +2252,9 @@ stage3(STREEBOG_CTX *CTX) g((CTX->h), buffer0, (const unsigned char *) (CTX->Sigma)); memcpy((CTX->hash), (CTX->h), 8 * sizeof(unsigned long long)); } -void STREEBOG_add(STREEBOG_CTX *CTX, const byte *data, size_t len) +void STREEBOG_add(STREEBOG_CTX *CTX, const uint8 *data, size_t len) { size_t chunksize; while (len > 63 && CTX->bufsize == 0) @@ -2381,9 +2285,9 @@ void STREEBOG_add(STREEBOG_CTX *CTX, const byte *data, size_t len) } } } -void STREEBOG_finalize(STREEBOG_CTX *CTX, byte *digest) +void STREEBOG_finalize(STREEBOG_CTX *CTX, uint8 *digest) { stage3(CTX); CTX->bufsize = 0; diff --git a/src/Crypto/Streebog.h b/src/Crypto/Streebog.h index d5691e70..29571d73 100644 --- a/src/Crypto/Streebog.h +++ b/src/Crypto/Streebog.h @@ -30,10 +30,10 @@ typedef STREEBOG_ALIGN(16) struct _STREEBOG_CTX } STREEBOG_CTX; void STREEBOG_init(STREEBOG_CTX *ctx); void STREEBOG_init256(STREEBOG_CTX *ctx); -void STREEBOG_add(STREEBOG_CTX *ctx, const byte *msg, size_t len); -void STREEBOG_finalize(STREEBOG_CTX *ctx, byte *out); +void STREEBOG_add(STREEBOG_CTX *ctx, const uint8 *msg, size_t len); +void STREEBOG_finalize(STREEBOG_CTX *ctx, uint8 *out); #ifdef __cplusplus } #endif diff --git a/src/Crypto/Twofish.c b/src/Crypto/Twofish.c index 8ab59081..ff46bc99 100644 --- a/src/Crypto/Twofish.c +++ b/src/Crypto/Twofish.c @@ -53,30 +53,30 @@ #if !defined (_MSC_VER) || defined(_M_X64) #define UNROLL_TWOFISH #endif -#if CRYPTOPP_BOOL_X64 +#if CRYPTOPP_BOOL_X64 && !defined(CRYPTOPP_DISABLE_ASM) /* these are 64-bit assembly implementation taken from https://github.com/jkivilin/supercop-blockciphers - Copyright 2011-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi> + Copyright © 2011-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi> */ #if defined(__cplusplus) extern "C" { #endif -void twofish_enc_blk(TwofishInstance *ks, byte *dst, const byte *src); -void twofish_dec_blk(TwofishInstance *ks, byte *dst, const byte *src); -void twofish_enc_blk2(TwofishInstance *ks, byte *dst, const byte *src); -void twofish_dec_blk2(TwofishInstance *ks, byte *dst, const byte *src); -void twofish_enc_blk3(TwofishInstance *ks, byte *dst, const byte *src); -void twofish_dec_blk3(TwofishInstance *ks, byte *dst, const byte *src); +void twofish_enc_blk(TwofishInstance *ks, uint8 *dst, const uint8 *src); +void twofish_dec_blk(TwofishInstance *ks, uint8 *dst, const uint8 *src); +void twofish_enc_blk2(TwofishInstance *ks, uint8 *dst, const uint8 *src); +void twofish_dec_blk2(TwofishInstance *ks, uint8 *dst, const uint8 *src); +void twofish_enc_blk3(TwofishInstance *ks, uint8 *dst, const uint8 *src); +void twofish_dec_blk3(TwofishInstance *ks, uint8 *dst, const uint8 *src); #if defined(__cplusplus) } #endif -void twofish_encrypt_blocks(TwofishInstance *instance, const byte* in_blk, byte* out_blk, uint32 blockCount) +void twofish_encrypt_blocks(TwofishInstance *instance, const uint8* in_blk, uint8* out_blk, uint32 blockCount) { while (blockCount >= 3) { twofish_enc_blk3 (instance, out_blk, in_blk); @@ -95,9 +95,9 @@ void twofish_encrypt_blocks(TwofishInstance *instance, const byte* in_blk, byte* } } -void twofish_decrypt_blocks(TwofishInstance *instance, const byte* in_blk, byte* out_blk, uint32 blockCount) +void twofish_decrypt_blocks(TwofishInstance *instance, const uint8* in_blk, uint8* out_blk, uint32 blockCount) { while (blockCount >= 3) { twofish_dec_blk3 (instance, out_blk, in_blk); @@ -119,9 +119,9 @@ void twofish_decrypt_blocks(TwofishInstance *instance, const byte* in_blk, byte* #endif -static const byte Q[2][256] = { +static const uint8 Q[2][256] = { { 0xa9, 0x67, 0xb3, 0xe8, 0x04, 0xfd, 0xa3, 0x76, 0x9a, 0x92, 0x80, 0x78, 0xe4, 0xdd, 0xd1, 0x38, 0x0d, 0xc6, 0x35, 0x98, 0x18, 0xf7, 0xec, 0x6c, 0x43, 0x75, 0x37, 0x26, 0xfa, 0x13, 0x94, 0x48, 0xf2, 0xd0, 0x8b, 0x30, 0x84, 0x54, 0xdf, 0x23, 0x19, 0x5b, 0x3d, 0x59, 0xf3, 0xae, 0xa2, 0x82, @@ -603,13 +603,13 @@ static const uint32 RS[8][256] = { void twofish_set_key(TwofishInstance *instance, const u4byte in_key[]) { union { - byte S8[16]; + uint8 S8[16]; uint32 S32[4]; } us; - int i; - const byte* key = (const byte*) in_key; + unsigned int i; + const uint8* key = (const uint8*) in_key; us.S32[0] = RS[0][key[0]] ^ RS[1][key[1]] ^ RS[2][key[2]] ^ RS[3][key[3]] ^ RS[4][key[4]] ^ RS[5][key[5]] ^ RS[6][key[6]] ^ RS[7][key[7]]; us.S32[1] = RS[0][key[8]] ^ RS[1][key[9]] ^ RS[2][key[10]] ^ RS[3][key[11]] ^ RS[4][key[12]] ^ RS[5][key[13]] ^ RS[6][key[14]] ^ RS[7][key[15]]; us.S32[2] = RS[0][key[16]] ^ RS[1][key[17]] ^ RS[2][key[18]] ^ RS[3][key[19]] ^ RS[4][key[20]] ^ RS[5][key[21]] ^ RS[6][key[22]] ^ RS[7][key[23]]; @@ -629,9 +629,9 @@ void twofish_set_key(TwofishInstance *instance, const u4byte in_key[]) ^ MDSQ[2][Q[1][Q[0][Q[0][Q[0][i] ^ key[26]] ^ key[18]] ^ key[10]] ^ key[2]] ^ MDSQ[3][Q[1][Q[1][Q[0][Q[1][i] ^ key[27]] ^ key[19]] ^ key[11]] ^ key[3]]; uint32 b = rotl32(MDSQ[0][Q[0][Q[0][Q[1][Q[1][i + 1] ^ key[28]] ^ key[20]] ^ key[12]] ^ key[4]] ^ MDSQ[1][Q[0][Q[1][Q[1][Q[0][i + 1] ^ key[29]] ^ key[21]] ^ key[13]] ^ key[5]] ^ MDSQ[2][Q[1][Q[0][Q[0][Q[0][i + 1] ^ key[30]] ^ key[22]] ^ key[14]] ^ key[6]] ^ MDSQ[3][Q[1][Q[1][Q[0][Q[1][i + 1] ^ key[31]] ^ key[23]] ^ key[15]] ^ key[7]], 8); a += b; -#if CRYPTOPP_BOOL_X64 +#if CRYPTOPP_BOOL_X64 && !defined(CRYPTOPP_DISABLE_ASM) if (i < 8) { instance->w[i] = a; instance->w[i + 1] = rotl32(a + b, 9); @@ -997,9 +997,9 @@ void twofish_set_key(TwofishInstance *instance, const u4byte in_key[]) /* encrypt a block of text */ #ifndef TC_MINIMIZE_CODE_SIZE -#if (CRYPTOPP_BOOL_X64 == 0) +#if (CRYPTOPP_BOOL_X64 == 0) || defined(CRYPTOPP_DISABLE_ASM) void twofish_encrypt(TwofishInstance *ks, const u4byte in_blk[4], u4byte out_blk[4]) { uint32* rk = ks->l_key; @@ -1070,9 +1070,9 @@ void twofish_encrypt(TwofishInstance *instance, const u4byte in_blk[4], u4byte o /* decrypt a block of text */ #ifndef TC_MINIMIZE_CODE_SIZE -#if (CRYPTOPP_BOOL_X64 == 0) +#if (CRYPTOPP_BOOL_X64 == 0) || defined(CRYPTOPP_DISABLE_ASM) void twofish_decrypt(TwofishInstance *ks, const u4byte in_blk[4], u4byte out_blk[4]) { uint32* rk = ks->l_key; uint32 x0 = in_blk[0] ^ rk[4]; diff --git a/src/Crypto/Twofish.h b/src/Crypto/Twofish.h index b2d44ddb..3b530cbd 100644 --- a/src/Crypto/Twofish.h +++ b/src/Crypto/Twofish.h @@ -34,15 +34,17 @@ extern "C" #endif typedef struct { -#if CRYPTOPP_BOOL_X64 +#if CRYPTOPP_BOOL_X64 && !defined(CRYPTOPP_DISABLE_ASM) u4byte mk_tab[4][256], w[8], k[32]; #else u4byte l_key[40]; #ifdef TC_MINIMIZE_CODE_SIZE u4byte s_key[4]; +#ifdef TC_WINDOWS_BOOT_TWOFISH u4byte mk_tab[4 * 256]; +#endif #else u4byte mk_tab[4][256]; #endif #endif @@ -51,13 +53,13 @@ typedef struct #define TWOFISH_KS sizeof(TwofishInstance) /* in_key must be 32-bytes long */ void twofish_set_key(TwofishInstance *instance, const u4byte in_key[]); -#if CRYPTOPP_BOOL_X64 -void twofish_encrypt_blocks(TwofishInstance *instance, const byte* in_blk, byte* out_blk, uint32 blockCount); -void twofish_decrypt_blocks(TwofishInstance *instance, const byte* in_blk, byte* out_blk, uint32 blockCount); -#define twofish_encrypt(instance,in_blk,out_blk) twofish_encrypt_blocks(instance, (const byte*) in_blk, (byte*) out_blk, 1) -#define twofish_decrypt(instance,in_blk,out_blk) twofish_decrypt_blocks(instance, (const byte*) in_blk, (byte*) out_blk, 1) +#if CRYPTOPP_BOOL_X64 && !defined(CRYPTOPP_DISABLE_ASM) +void twofish_encrypt_blocks(TwofishInstance *instance, const uint8* in_blk, uint8* out_blk, uint32 blockCount); +void twofish_decrypt_blocks(TwofishInstance *instance, const uint8* in_blk, uint8* out_blk, uint32 blockCount); +#define twofish_encrypt(instance,in_blk,out_blk) twofish_encrypt_blocks(instance, (const uint8*) in_blk, (uint8*) out_blk, 1) +#define twofish_decrypt(instance,in_blk,out_blk) twofish_decrypt_blocks(instance, (const uint8*) in_blk, (uint8*) out_blk, 1) #else void twofish_encrypt(TwofishInstance *instance, const u4byte in_blk[4], u4byte out_blk[4]); void twofish_decrypt(TwofishInstance *instance, const u4byte in_blk[4], u4byte out_blk[4]); #endif diff --git a/src/Crypto/Twofish_x64.S b/src/Crypto/Twofish_x64.S index 1e271691..93f28206 100644 --- a/src/Crypto/Twofish_x64.S +++ b/src/Crypto/Twofish_x64.S @@ -311,4 +311,10 @@ movq %r8, %rdx; popq %rsi .endif ret; + .ifndef __YASM__ +#if defined(__linux__) && defined(__ELF__) +.section .note.GNU-stack,"",%progbits +#endif + .endif + diff --git a/src/Crypto/Twofish_x86.S b/src/Crypto/Twofish_x86.S index e69de29b..46f0e986 100644 --- a/src/Crypto/Twofish_x86.S +++ b/src/Crypto/Twofish_x86.S @@ -0,0 +1,5 @@ + .ifndef __YASM__ +#if defined(__linux__) && defined(__ELF__) +.section .note.GNU-stack,"",%progbits +#endif + .endif
\ No newline at end of file diff --git a/src/Crypto/Whirlpool.c b/src/Crypto/Whirlpool.c index 308f4812..140c7c6f 100644 --- a/src/Crypto/Whirlpool.c +++ b/src/Crypto/Whirlpool.c @@ -642,13 +642,27 @@ static const uint64 Whirlpool_C[8*256+R] = { // Whirlpool basic transformation. Transforms state based on block. void WhirlpoolTransform(uint64 *digest, const uint64 *block) { #if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE +#if defined(__GNUC__) && (CRYPTOPP_GCC_VERSION <= 40407) + /* workaround for gcc 4.4.7 bug under CentOS which causes crash + * in inline assembly. + * This dummy check that is always false since "block" is aligned. + */ + uint64 lb = (uint64) block; + if (lb % 16) + { + TC_THROW_FATAL_EXCEPTION; + } +#endif +#endif + +#if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE if (HasISSE()) { #ifdef __GNUC__ #if CRYPTOPP_BOOL_X64 - uint64 workspace[16]; + CRYPTOPP_ALIGN_DATA(16) uint64 workspace[16]; #endif __asm__ __volatile__ ( INTEL_NOPREFIX @@ -879,9 +893,9 @@ static uint64 HashMultipleBlocks(WHIRLPOOL_CTX * const ctx, const uint64 *input, { #if BYTE_ORDER == BIG_ENDIAN WhirlpoolTransform(ctx->state, input); #else - CorrectEndianess(dataBuf, input, 64); + CorrectEndianness(dataBuf, input, 64); WhirlpoolTransform(ctx->state, dataBuf); #endif input += 8; length -= 64; @@ -921,9 +935,9 @@ void WHIRLPOOL_add(const unsigned char * input, return; else { uint64* dataBuf = ctx->data; - byte* data = (byte *)dataBuf; + uint8* data = (uint8 *)dataBuf; num = oldCountLo & 63; if (num != 0) // process left over data { @@ -932,9 +946,8 @@ void WHIRLPOOL_add(const unsigned char * input, memcpy(data+num, input, (size_t) (64-num)); HashMultipleBlocks(ctx, dataBuf, 64); input += (64-num); len -= (64-num); - num = 0; // drop through and do the rest } else { @@ -943,30 +956,37 @@ void WHIRLPOOL_add(const unsigned char * input, } } // now process the input data in blocks of 64 bytes and save the leftovers to ctx->data - if (len >= 64) - { - if (input == data) - { - HashMultipleBlocks(ctx, dataBuf, 64); - return; - } - else if (IsAligned16(input)) - { - uint64 leftOver = HashMultipleBlocks(ctx, (uint64 *)input, len); - input += (len - leftOver); - len = leftOver; - } - else - do - { // copy input first if it's not aligned correctly - memcpy(data, input, 64); - HashMultipleBlocks(ctx, dataBuf, 64); - input+=64; - len-=64; - } while (len >= 64); - } + if (len >= 64) + { + if (input == data) + { + HashMultipleBlocks(ctx, dataBuf, 64); + return; + } + else + { +#ifndef CRYPTOPP_ALLOW_UNALIGNED_DATA_ACCESS + if (IsAligned16(input)) +#endif + { + uint64 leftOver = HashMultipleBlocks(ctx, (uint64*)input, len); + input += (len - leftOver); + len = leftOver; + } +#ifndef CRYPTOPP_ALLOW_UNALIGNED_DATA_ACCESS + else + do + { // copy input first if it's not aligned correctly + memcpy(data, input, 64); + HashMultipleBlocks(ctx, dataBuf, 64); + input += 64; + len -= 64; + } while (len >= 64); +#endif + } + } if (len && data != input) memcpy(data, input, (size_t) len); } @@ -982,9 +1002,9 @@ void WHIRLPOOL_finalize(WHIRLPOOL_CTX * const ctx, { unsigned int num = ctx->countLo & 63; uint64* dataBuf = ctx->data; uint64* stateBuf = ctx->state; - byte* data = (byte *)dataBuf; + uint8* data = (uint8 *)dataBuf; data[num++] = 0x80; if (num <= 32) memset(data+num, 0, 32-num); @@ -994,9 +1014,9 @@ void WHIRLPOOL_finalize(WHIRLPOOL_CTX * const ctx, HashMultipleBlocks(ctx, dataBuf, 64); memset(data, 0, 32); } #if BYTE_ORDER == LITTLE_ENDIAN - CorrectEndianess(dataBuf, dataBuf, 32); + CorrectEndianness(dataBuf, dataBuf, 32); #endif dataBuf[4] = 0; dataBuf[5] = 0; @@ -1004,8 +1024,8 @@ void WHIRLPOOL_finalize(WHIRLPOOL_CTX * const ctx, dataBuf[7] = ctx->countLo << 3; WhirlpoolTransform(stateBuf, dataBuf); #if BYTE_ORDER == LITTLE_ENDIAN - CorrectEndianess(stateBuf, stateBuf, 64); + CorrectEndianness(stateBuf, stateBuf, 64); #endif memcpy(result, stateBuf, 64); } diff --git a/src/Crypto/blake2-impl.h b/src/Crypto/blake2-impl.h new file mode 100644 index 00000000..ad9d88fe --- /dev/null +++ b/src/Crypto/blake2-impl.h @@ -0,0 +1,86 @@ +/* + BLAKE2 reference source code package - reference C implementations + + Copyright 2012, Samuel Neves <sneves@dei.uc.pt>. You may use this under the + terms of the CC0, the OpenSSL Licence, or the Apache Public License 2.0, at + your option. The terms of these licenses can be found at: + + - CC0 1.0 Universal : http://creativecommons.org/publicdomain/zero/1.0 + - OpenSSL license : https://www.openssl.org/source/license.html + - Apache 2.0 : http://www.apache.org/licenses/LICENSE-2.0 + + More information about the BLAKE2 hash function can be found at + https://blake2.net. +*/ +#ifndef BLAKE2_IMPL_H +#define BLAKE2_IMPL_H + +#define NATIVE_LITTLE_ENDIAN + +#include <stdint.h> +#include <string.h> + +#if !defined(__cplusplus) && (!defined(__STDC_VERSION__) || __STDC_VERSION__ < 199901L) + #if defined(_MSC_VER) + #define BLAKE2_INLINE __inline + #elif defined(__GNUC__) + #define BLAKE2_INLINE __inline__ + #else + #define BLAKE2_INLINE + #endif +#else + #define BLAKE2_INLINE inline +#endif + +static BLAKE2_INLINE uint32_t load32( const void *src ) +{ +#if defined(NATIVE_LITTLE_ENDIAN) + uint32_t w; + memcpy(&w, src, sizeof w); + return w; +#else + const uint8_t *p = ( const uint8_t * )src; + return (( uint32_t )( p[0] ) << 0) | + (( uint32_t )( p[1] ) << 8) | + (( uint32_t )( p[2] ) << 16) | + (( uint32_t )( p[3] ) << 24) ; +#endif +} + +static BLAKE2_INLINE void store16( void *dst, uint16_t w ) +{ +#if defined(NATIVE_LITTLE_ENDIAN) + memcpy(dst, &w, sizeof w); +#else + uint8_t *p = ( uint8_t * )dst; + *p++ = ( uint8_t )w; w >>= 8; + *p++ = ( uint8_t )w; +#endif +} + +static BLAKE2_INLINE void store32( void *dst, uint32_t w ) +{ +#if defined(NATIVE_LITTLE_ENDIAN) + memcpy(dst, &w, sizeof w); +#else + uint8_t *p = ( uint8_t * )dst; + p[0] = (uint8_t)(w >> 0); + p[1] = (uint8_t)(w >> 8); + p[2] = (uint8_t)(w >> 16); + p[3] = (uint8_t)(w >> 24); +#endif +} + +static BLAKE2_INLINE uint32_t rotr32( const uint32_t w, const unsigned c ) +{ + return ( w >> c ) | ( w << ( 32 - c ) ); +} + +/* prevents compiler optimizing out memset() */ +static BLAKE2_INLINE void secure_zero_memory(void *v, size_t n) +{ + static void *(*const volatile memset_v)(void *, int, size_t) = &memset; + memset_v(v, 0, n); +} + +#endif diff --git a/src/Crypto/blake2.h b/src/Crypto/blake2.h new file mode 100644 index 00000000..490ed43b --- /dev/null +++ b/src/Crypto/blake2.h @@ -0,0 +1,102 @@ +/* + BLAKE2 reference source code package - reference C implementations + + Copyright 2012, Samuel Neves <sneves@dei.uc.pt>. You may use this under the + terms of the CC0, the OpenSSL Licence, or the Apache Public License 2.0, at + your option. The terms of these licenses can be found at: + + - CC0 1.0 Universal : http://creativecommons.org/publicdomain/zero/1.0 + - OpenSSL license : https://www.openssl.org/source/license.html + - Apache 2.0 : http://www.apache.org/licenses/LICENSE-2.0 + + More information about the BLAKE2 hash function can be found at + https://blake2.net. +*/ + +/* Adapted for VeraCrypt */ + +#ifndef BLAKE2_H +#define BLAKE2_H +#include "Common/Tcdefs.h" + +#if defined(_MSC_VER) +#ifdef TC_WINDOWS_BOOT +#define BLAKE2_PACKED(x) x +#else +#define BLAKE2_PACKED(x) __pragma(pack(push, 1)) x __pragma(pack(pop)) +#endif + +#else +#define BLAKE2_PACKED(x) x __attribute__((packed)) +#endif + +#if defined(__cplusplus) +extern "C" { +#endif + + enum blake2s_constant + { + BLAKE2S_BLOCKBYTES = 64, + BLAKE2S_OUTBYTES = 32, + BLAKE2S_KEYBYTES = 32, + BLAKE2S_SALTBYTES = 8, + BLAKE2S_PERSONALBYTES = 8 + }; + + typedef struct blake2s_state__ + { + uint32 h[8]; + uint32 t[2]; + uint32 f[2]; + uint8 buf[BLAKE2S_BLOCKBYTES]; + size_t buflen; + size_t outlen; + uint8 last_node; + } blake2s_state; + +#ifdef TC_WINDOWS_BOOT + #pragma pack(1) +#endif + + BLAKE2_PACKED(struct blake2s_param__ + { + uint8 digest_length; /* 1 */ + uint8 key_length; /* 2 */ + uint8 fanout; /* 3 */ + uint8 depth; /* 4 */ + uint32 leaf_length; /* 8 */ + uint32 node_offset; /* 12 */ + uint16 xof_length; /* 14 */ + uint8 node_depth; /* 15 */ + uint8 inner_length; /* 16 */ + /* uint8 reserved[0]; */ + uint8 salt[BLAKE2S_SALTBYTES]; /* 24 */ + uint8 personal[BLAKE2S_PERSONALBYTES]; /* 32 */ + }); + +#ifdef TC_WINDOWS_BOOT + #pragma pack() +#endif + + typedef struct blake2s_param__ blake2s_param; + + + /* Padded structs result in a compile-time error */ + enum { + BLAKE2_DUMMY_1 = 1/(int)(sizeof(blake2s_param) == BLAKE2S_OUTBYTES) + }; + + /* Streaming API */ + void blake2s_init( blake2s_state *S ); + void blake2s_init_param( blake2s_state *S, const blake2s_param *P ); + void blake2s_update( blake2s_state *S, const void *in, size_t inlen ); + int blake2s_final( blake2s_state *S, unsigned char *out ); + + /* Simple API */ + int blake2s( void *out, const void *in, size_t inlen ); + +#if defined(__cplusplus) +} +#endif + +#endif diff --git a/src/Crypto/blake2s-load-sse2.h b/src/Crypto/blake2s-load-sse2.h new file mode 100644 index 00000000..926eedda --- /dev/null +++ b/src/Crypto/blake2s-load-sse2.h @@ -0,0 +1,64 @@ +/* + BLAKE2 reference source code package - optimized C implementations + + Copyright 2012, Samuel Neves <sneves@dei.uc.pt>. You may use this under the + terms of the CC0, the OpenSSL Licence, or the Apache Public License 2.0, at + your option. The terms of these licenses can be found at: + + - CC0 1.0 Universal : http://creativecommons.org/publicdomain/zero/1.0 + - OpenSSL license : https://www.openssl.org/source/license.html + - Apache 2.0 : http://www.apache.org/licenses/LICENSE-2.0 + + More information about the BLAKE2 hash function can be found at + https://blake2.net. +*/ + +/* Adapted for VeraCrypt */ + + +#ifndef BLAKE2S_LOAD_SSE2_H +#define BLAKE2S_LOAD_SSE2_H + +#define LOAD_MSG_0_1(buf) buf = _mm_set_epi32(m6,m4,m2,m0) +#define LOAD_MSG_0_2(buf) buf = _mm_set_epi32(m7,m5,m3,m1) +#define LOAD_MSG_0_3(buf) buf = _mm_set_epi32(m12,m10,m8,m14) +#define LOAD_MSG_0_4(buf) buf = _mm_set_epi32(m13,m11,m9,m15) +#define LOAD_MSG_1_1(buf) buf = _mm_set_epi32(m13,m9,m4,m14) +#define LOAD_MSG_1_2(buf) buf = _mm_set_epi32(m6,m15,m8,m10) +#define LOAD_MSG_1_3(buf) buf = _mm_set_epi32(m11,m0,m1,m5) +#define LOAD_MSG_1_4(buf) buf = _mm_set_epi32(m7,m2,m12,m3) +#define LOAD_MSG_2_1(buf) buf = _mm_set_epi32(m15,m5,m12,m11) +#define LOAD_MSG_2_2(buf) buf = _mm_set_epi32(m13,m2,m0,m8) +#define LOAD_MSG_2_3(buf) buf = _mm_set_epi32(m7,m3,m10,m9) +#define LOAD_MSG_2_4(buf) buf = _mm_set_epi32(m1,m6,m14,m4) +#define LOAD_MSG_3_1(buf) buf = _mm_set_epi32(m11,m13,m3,m7) +#define LOAD_MSG_3_2(buf) buf = _mm_set_epi32(m14,m12,m1,m9) +#define LOAD_MSG_3_3(buf) buf = _mm_set_epi32(m4,m5,m2,m15) +#define LOAD_MSG_3_4(buf) buf = _mm_set_epi32(m0,m10,m6,m8) +#define LOAD_MSG_4_1(buf) buf = _mm_set_epi32(m10,m2,m5,m9) +#define LOAD_MSG_4_2(buf) buf = _mm_set_epi32(m15,m4,m7,m0) +#define LOAD_MSG_4_3(buf) buf = _mm_set_epi32(m6,m11,m14,m3) +#define LOAD_MSG_4_4(buf) buf = _mm_set_epi32(m8,m12,m1,m13) +#define LOAD_MSG_5_1(buf) buf = _mm_set_epi32(m8,m0,m6,m2) +#define LOAD_MSG_5_2(buf) buf = _mm_set_epi32(m3,m11,m10,m12) +#define LOAD_MSG_5_3(buf) buf = _mm_set_epi32(m15,m7,m4,m1) +#define LOAD_MSG_5_4(buf) buf = _mm_set_epi32(m14,m5,m13,m9) +#define LOAD_MSG_6_1(buf) buf = _mm_set_epi32(m4,m14,m1,m12) +#define LOAD_MSG_6_2(buf) buf = _mm_set_epi32(m10,m13,m15,m5) +#define LOAD_MSG_6_3(buf) buf = _mm_set_epi32(m9,m6,m0,m8) +#define LOAD_MSG_6_4(buf) buf = _mm_set_epi32(m2,m3,m7,m11) +#define LOAD_MSG_7_1(buf) buf = _mm_set_epi32(m3,m12,m7,m13) +#define LOAD_MSG_7_2(buf) buf = _mm_set_epi32(m9,m1,m14,m11) +#define LOAD_MSG_7_3(buf) buf = _mm_set_epi32(m8,m15,m5,m2) +#define LOAD_MSG_7_4(buf) buf = _mm_set_epi32(m6,m4,m0,m10) +#define LOAD_MSG_8_1(buf) buf = _mm_set_epi32(m0,m11,m14,m6) +#define LOAD_MSG_8_2(buf) buf = _mm_set_epi32(m8,m3,m9,m15) +#define LOAD_MSG_8_3(buf) buf = _mm_set_epi32(m1,m13,m12,m10) +#define LOAD_MSG_8_4(buf) buf = _mm_set_epi32(m4,m7,m2,m5) +#define LOAD_MSG_9_1(buf) buf = _mm_set_epi32(m1,m7,m8,m10) +#define LOAD_MSG_9_2(buf) buf = _mm_set_epi32(m5,m6,m4,m2) +#define LOAD_MSG_9_3(buf) buf = _mm_set_epi32(m3,m9,m15,m13) +#define LOAD_MSG_9_4(buf) buf = _mm_set_epi32(m12,m14,m11,m0) + + +#endif diff --git a/src/Crypto/blake2s-load-sse41.h b/src/Crypto/blake2s-load-sse41.h new file mode 100644 index 00000000..9ac3f52a --- /dev/null +++ b/src/Crypto/blake2s-load-sse41.h @@ -0,0 +1,240 @@ +/* + BLAKE2 reference source code package - optimized C implementations + + Copyright 2012, Samuel Neves <sneves@dei.uc.pt>. You may use this under the + terms of the CC0, the OpenSSL Licence, or the Apache Public License 2.0, at + your option. The terms of these licenses can be found at: + + - CC0 1.0 Universal : http://creativecommons.org/publicdomain/zero/1.0 + - OpenSSL license : https://www.openssl.org/source/license.html + - Apache 2.0 : http://www.apache.org/licenses/LICENSE-2.0 + + More information about the BLAKE2 hash function can be found at + https://blake2.net. +*/ + +/* Adapted for VeraCrypt */ + + +#ifndef BLAKE2S_LOAD_SSE41_H +#define BLAKE2S_LOAD_SSE41_H + +#define LOAD_MSG_0_1(buf) \ +buf = TOI(_mm_shuffle_ps(TOF(m0), TOF(m1), _MM_SHUFFLE(2,0,2,0))); + +#define LOAD_MSG_0_2(buf) \ +buf = TOI(_mm_shuffle_ps(TOF(m0), TOF(m1), _MM_SHUFFLE(3,1,3,1))); + +#define LOAD_MSG_0_3(buf) \ +t0 = _mm_shuffle_epi32(m2, _MM_SHUFFLE(3,2,0,1)); \ +t1 = _mm_shuffle_epi32(m3, _MM_SHUFFLE(0,1,3,2)); \ +buf = _mm_blend_epi16(t0, t1, 0xC3); + +#define LOAD_MSG_0_4(buf) \ +t0 = _mm_blend_epi16(t0, t1, 0x3C); \ +buf = _mm_shuffle_epi32(t0, _MM_SHUFFLE(2,3,0,1)); + +#define LOAD_MSG_1_1(buf) \ +t0 = _mm_blend_epi16(m1, m2, 0x0C); \ +t1 = _mm_slli_si128(m3, 4); \ +t2 = _mm_blend_epi16(t0, t1, 0xF0); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(2,1,0,3)); + +#define LOAD_MSG_1_2(buf) \ +t0 = _mm_shuffle_epi32(m2,_MM_SHUFFLE(0,0,2,0)); \ +t1 = _mm_blend_epi16(m1,m3,0xC0); \ +t2 = _mm_blend_epi16(t0, t1, 0xF0); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(2,3,0,1)); + +#define LOAD_MSG_1_3(buf) \ +t0 = _mm_slli_si128(m1, 4); \ +t1 = _mm_blend_epi16(m2, t0, 0x30); \ +t2 = _mm_blend_epi16(m0, t1, 0xF0); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(3,0,1,2)); + +#define LOAD_MSG_1_4(buf) \ +t0 = _mm_unpackhi_epi32(m0,m1); \ +t1 = _mm_slli_si128(m3, 4); \ +t2 = _mm_blend_epi16(t0, t1, 0x0C); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(3,0,1,2)); + +#define LOAD_MSG_2_1(buf) \ +t0 = _mm_unpackhi_epi32(m2,m3); \ +t1 = _mm_blend_epi16(m3,m1,0x0C); \ +t2 = _mm_blend_epi16(t0, t1, 0x0F); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(3,1,0,2)); + +#define LOAD_MSG_2_2(buf) \ +t0 = _mm_unpacklo_epi32(m2,m0); \ +t1 = _mm_blend_epi16(t0, m0, 0xF0); \ +t2 = _mm_slli_si128(m3, 8); \ +buf = _mm_blend_epi16(t1, t2, 0xC0); + +#define LOAD_MSG_2_3(buf) \ +t0 = _mm_blend_epi16(m0, m2, 0x3C); \ +t1 = _mm_srli_si128(m1, 12); \ +t2 = _mm_blend_epi16(t0,t1,0x03); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(0,3,2,1)); + +#define LOAD_MSG_2_4(buf) \ +t0 = _mm_slli_si128(m3, 4); \ +t1 = _mm_blend_epi16(m0, m1, 0x33); \ +t2 = _mm_blend_epi16(t1, t0, 0xC0); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(1,2,3,0)); + +#define LOAD_MSG_3_1(buf) \ +t0 = _mm_unpackhi_epi32(m0,m1); \ +t1 = _mm_unpackhi_epi32(t0, m2); \ +t2 = _mm_blend_epi16(t1, m3, 0x0C); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(3,1,0,2)); + +#define LOAD_MSG_3_2(buf) \ +t0 = _mm_slli_si128(m2, 8); \ +t1 = _mm_blend_epi16(m3,m0,0x0C); \ +t2 = _mm_blend_epi16(t1, t0, 0xC0); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(2,0,1,3)); + +#define LOAD_MSG_3_3(buf) \ +t0 = _mm_blend_epi16(m0,m1,0x0F); \ +t1 = _mm_blend_epi16(t0, m3, 0xC0); \ +buf = _mm_shuffle_epi32(t1, _MM_SHUFFLE(0,1,2,3)); + +#define LOAD_MSG_3_4(buf) \ +t0 = _mm_alignr_epi8(m0, m1, 4); \ +buf = _mm_blend_epi16(t0, m2, 0x33); + +#define LOAD_MSG_4_1(buf) \ +t0 = _mm_unpacklo_epi64(m1,m2); \ +t1 = _mm_unpackhi_epi64(m0,m2); \ +t2 = _mm_blend_epi16(t0,t1,0x33); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(2,0,1,3)); + +#define LOAD_MSG_4_2(buf) \ +t0 = _mm_unpackhi_epi64(m1,m3); \ +t1 = _mm_unpacklo_epi64(m0,m1); \ +buf = _mm_blend_epi16(t0,t1,0x33); + +#define LOAD_MSG_4_3(buf) \ +t0 = _mm_unpackhi_epi64(m3,m1); \ +t1 = _mm_unpackhi_epi64(m2,m0); \ +t2 = _mm_blend_epi16(t1,t0,0x33); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(2,1,0,3)); + +#define LOAD_MSG_4_4(buf) \ +t0 = _mm_blend_epi16(m0,m2,0x03); \ +t1 = _mm_slli_si128(t0, 8); \ +t2 = _mm_blend_epi16(t1,m3,0x0F); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(2,0,3,1)); + +#define LOAD_MSG_5_1(buf) \ +t0 = _mm_unpackhi_epi32(m0,m1); \ +t1 = _mm_unpacklo_epi32(m0,m2); \ +buf = _mm_unpacklo_epi64(t0,t1); + +#define LOAD_MSG_5_2(buf) \ +t0 = _mm_srli_si128(m2, 4); \ +t1 = _mm_blend_epi16(m0,m3,0x03); \ +buf = _mm_blend_epi16(t1,t0,0x3C); + +#define LOAD_MSG_5_3(buf) \ +t0 = _mm_blend_epi16(m1,m0,0x0C); \ +t1 = _mm_srli_si128(m3, 4); \ +t2 = _mm_blend_epi16(t0,t1,0x30); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(2,3,0,1)); + +#define LOAD_MSG_5_4(buf) \ +t0 = _mm_unpacklo_epi64(m2,m1); \ +t1 = _mm_shuffle_epi32(m3, _MM_SHUFFLE(2,0,1,0)); \ +t2 = _mm_srli_si128(t0, 4); \ +buf = _mm_blend_epi16(t1,t2,0x33); + +#define LOAD_MSG_6_1(buf) \ +t0 = _mm_slli_si128(m1, 12); \ +t1 = _mm_blend_epi16(m0,m3,0x33); \ +buf = _mm_blend_epi16(t1,t0,0xC0); + +#define LOAD_MSG_6_2(buf) \ +t0 = _mm_blend_epi16(m3,m2,0x30); \ +t1 = _mm_srli_si128(m1, 4); \ +t2 = _mm_blend_epi16(t0,t1,0x03); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(2,1,3,0)); + +#define LOAD_MSG_6_3(buf) \ +t0 = _mm_unpacklo_epi64(m0,m2); \ +t1 = _mm_srli_si128(m1, 4); \ +t2 = _mm_blend_epi16(t0,t1,0x0C); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(3,1,0,2)); + +#define LOAD_MSG_6_4(buf) \ +t0 = _mm_unpackhi_epi32(m1,m2); \ +t1 = _mm_unpackhi_epi64(m0,t0); \ +buf = _mm_shuffle_epi32(t1, _MM_SHUFFLE(0,1,2,3)); + +#define LOAD_MSG_7_1(buf) \ +t0 = _mm_unpackhi_epi32(m0,m1); \ +t1 = _mm_blend_epi16(t0,m3,0x0F); \ +buf = _mm_shuffle_epi32(t1,_MM_SHUFFLE(2,0,3,1)); + +#define LOAD_MSG_7_2(buf) \ +t0 = _mm_blend_epi16(m2,m3,0x30); \ +t1 = _mm_srli_si128(m0,4); \ +t2 = _mm_blend_epi16(t0,t1,0x03); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(1,0,2,3)); + +#define LOAD_MSG_7_3(buf) \ +t0 = _mm_unpackhi_epi64(m0,m3); \ +t1 = _mm_unpacklo_epi64(m1,m2); \ +t2 = _mm_blend_epi16(t0,t1,0x3C); \ +buf = _mm_shuffle_epi32(t2,_MM_SHUFFLE(2,3,1,0)); + +#define LOAD_MSG_7_4(buf) \ +t0 = _mm_unpacklo_epi32(m0,m1); \ +t1 = _mm_unpackhi_epi32(m1,m2); \ +t2 = _mm_unpacklo_epi64(t0,t1); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(2,1,0,3)); + +#define LOAD_MSG_8_1(buf) \ +t0 = _mm_unpackhi_epi32(m1,m3); \ +t1 = _mm_unpacklo_epi64(t0,m0); \ +t2 = _mm_blend_epi16(t1,m2,0xC0); \ +buf = _mm_shufflehi_epi16(t2,_MM_SHUFFLE(1,0,3,2)); + +#define LOAD_MSG_8_2(buf) \ +t0 = _mm_unpackhi_epi32(m0,m3); \ +t1 = _mm_blend_epi16(m2,t0,0xF0); \ +buf = _mm_shuffle_epi32(t1,_MM_SHUFFLE(0,2,1,3)); + +#define LOAD_MSG_8_3(buf) \ +t0 = _mm_unpacklo_epi64(m0,m3); \ +t1 = _mm_srli_si128(m2,8); \ +t2 = _mm_blend_epi16(t0,t1,0x03); \ +buf = _mm_shuffle_epi32(t2, _MM_SHUFFLE(1,3,2,0)); + +#define LOAD_MSG_8_4(buf) \ +t0 = _mm_blend_epi16(m1,m0,0x30); \ +buf = _mm_shuffle_epi32(t0,_MM_SHUFFLE(0,3,2,1)); + +#define LOAD_MSG_9_1(buf) \ +t0 = _mm_blend_epi16(m0,m2,0x03); \ +t1 = _mm_blend_epi16(m1,m2,0x30); \ +t2 = _mm_blend_epi16(t1,t0,0x0F); \ +buf = _mm_shuffle_epi32(t2,_MM_SHUFFLE(1,3,0,2)); + +#define LOAD_MSG_9_2(buf) \ +t0 = _mm_slli_si128(m0,4); \ +t1 = _mm_blend_epi16(m1,t0,0xC0); \ +buf = _mm_shuffle_epi32(t1,_MM_SHUFFLE(1,2,0,3)); + +#define LOAD_MSG_9_3(buf) \ +t0 = _mm_unpackhi_epi32(m0,m3); \ +t1 = _mm_unpacklo_epi32(m2,m3); \ +t2 = _mm_unpackhi_epi64(t0,t1); \ +buf = _mm_shuffle_epi32(t2,_MM_SHUFFLE(0,2,1,3)); + +#define LOAD_MSG_9_4(buf) \ +t0 = _mm_blend_epi16(m3,m2,0xC0); \ +t1 = _mm_unpacklo_epi32(m0,m3); \ +t2 = _mm_blend_epi16(t0,t1,0x0F); \ +buf = _mm_shuffle_epi32(t2,_MM_SHUFFLE(1,2,3,0)); + +#endif diff --git a/src/Crypto/blake2s-ref.c b/src/Crypto/blake2s-ref.c new file mode 100644 index 00000000..435630b9 --- /dev/null +++ b/src/Crypto/blake2s-ref.c @@ -0,0 +1,336 @@ +/* + BLAKE2 reference source code package - reference C implementations + + Copyright 2012, Samuel Neves <sneves@dei.uc.pt>. You may use this under the + terms of the CC0, the OpenSSL Licence, or the Apache Public License 2.0, at + your option. The terms of these licenses can be found at: + + - CC0 1.0 Universal : http://creativecommons.org/publicdomain/zero/1.0 + - OpenSSL license : https://www.openssl.org/source/license.html + - Apache 2.0 : http://www.apache.org/licenses/LICENSE-2.0 + + More information about the BLAKE2 hash function can be found at + https://blake2.net. +*/ + +/* Adapted for VeraCrypt */ + +#include <stdlib.h> +#include <STRING.H> + +#include "blake2.h" + +#pragma optimize ("tl", on) + +#pragma intrinsic(_lrotr) +#pragma intrinsic( memcpy ) +#pragma intrinsic( memset ) + +static const uint32 blake2s_IV[8] = +{ + 0x6A09E667UL, 0xBB67AE85UL, 0x3C6EF372UL, 0xA54FF53AUL, + 0x510E527FUL, 0x9B05688CUL, 0x1F83D9ABUL, 0x5BE0CD19UL +}; + +static const uint8 blake2s_sigma[10][16] = +{ + { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 } , + { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 } , + { 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 } , + { 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 } , + { 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 } , + { 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 } , + { 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 } , + { 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 } , + { 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 } , + { 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13 , 0 } , +}; + +static void blake2s_set_lastnode( blake2s_state *S ) +{ + S->f[1] = (uint32)-1; +} + +/* Some helper functions, not necessarily useful */ +static int blake2s_is_lastblock( const blake2s_state *S ) +{ + return S->f[0] != 0; +} + +static void blake2s_set_lastblock( blake2s_state *S ) +{ + if( S->last_node ) blake2s_set_lastnode( S ); + + S->f[0] = (uint32)-1; +} + +static void blake2s_increment_counter( blake2s_state *S, const uint32 inc ) +{ + S->t[0] += inc; + S->t[1] += ( S->t[0] < inc ); +} + +static void blake2s_init0( blake2s_state *S ) +{ + size_t i; + memset( S, 0, sizeof( blake2s_state ) ); + + for( i = 0; i < 8; ++i ) S->h[i] = blake2s_IV[i]; +} + +/* init2 xors IV with input parameter block */ +void blake2s_init_param( blake2s_state *S, const blake2s_param *P ) +{ + const unsigned char *p = ( const unsigned char * )( P ); + size_t i; + uint32 w; + + blake2s_init0( S ); + + /* IV XOR ParamBlock */ + for( i = 0; i < 8; ++i ) + { + memcpy (&w, &p[i * 4], sizeof (w)); + S->h[i] ^= w; + } + + S->outlen = P->digest_length; +} + + +/* Sequential blake2s initialization */ +void blake2s_init( blake2s_state *S ) +{ + blake2s_param P[1]; + + P->digest_length = 32; + P->key_length = 0; + P->fanout = 1; + P->depth = 1; + P->leaf_length = 0; + P->node_offset = 0; + P->xof_length = 0; + P->node_depth = 0; + P->inner_length = 0; + /* memset(P->reserved, 0, sizeof(P->reserved) ); */ + memset( P->salt, 0, sizeof( P->salt ) ); + memset( P->personal, 0, sizeof( P->personal ) ); + blake2s_init_param( S, P ); +} + +#ifndef TC_MINIMIZE_CODE_SIZE +#define G(r,i,a,b,c,d) \ + do { \ + a = a + b + m[blake2s_sigma[r][2*i+0]]; \ + d = _lrotr(d ^ a, 16); \ + c = c + d; \ + b = _lrotr(b ^ c, 12); \ + a = a + b + m[blake2s_sigma[r][2*i+1]]; \ + d = _lrotr(d ^ a, 8); \ + c = c + d; \ + b = _lrotr(b ^ c, 7); \ + } while(0) + +#define ROUND(r) \ + do { \ + G(r,0,v[ 0],v[ 4],v[ 8],v[12]); \ + G(r,1,v[ 1],v[ 5],v[ 9],v[13]); \ + G(r,2,v[ 2],v[ 6],v[10],v[14]); \ + G(r,3,v[ 3],v[ 7],v[11],v[15]); \ + G(r,4,v[ 0],v[ 5],v[10],v[15]); \ + G(r,5,v[ 1],v[ 6],v[11],v[12]); \ + G(r,6,v[ 2],v[ 7],v[ 8],v[13]); \ + G(r,7,v[ 3],v[ 4],v[ 9],v[14]); \ + } while(0) +#else +#define G_BASE(r,i,a,b,c,d) \ + do { \ + v[a] = v[a] + v[b] + m[blake2s_sigma[r][2*i+0]]; \ + v[d] = _lrotr(v[d] ^ v[a], 16); \ + v[c] = v[c] + v[d]; \ + v[b] = _lrotr(v[b] ^ v[c], 12); \ + v[a] = v[a] + v[b] + m[blake2s_sigma[r][2*i+1]]; \ + v[d] = _lrotr(v[d] ^ v[a], 8); \ + v[c] = v[c] + v[d]; \ + v[b] = _lrotr(v[b] ^ v[c], 7); \ + } while(0) + +static void G(unsigned char r, unsigned char i, uint32* m, uint32* v, unsigned char a, unsigned char b, unsigned char c, unsigned char d) +{ + G_BASE(r,i,a,b,c,d); +} + +static void round_base (unsigned char r, uint32* m, uint32* v) +{ + G(r,0,m,v, 0, 4, 8, 12); + G(r,1,m,v, 1, 5, 9,13); + G(r,2,m,v, 2, 6,10,14); + G(r,3,m,v, 3, 7,11,15); + G(r,4,m,v, 0, 5,10,15); + G(r,5,m,v, 1, 6,11,12); + G(r,6,m,v, 2, 7, 8,13); + G(r,7,m,v, 3, 4, 9,14); +} + +#define ROUND(r) round_base(r,m,v) +#endif +static void blake2s_compress( blake2s_state *S, const uint8 in[BLAKE2S_BLOCKBYTES] ) +{ + uint32 m[16]; + uint32 v[16]; + int i; + + for( i = 0; i < 16; ++i ) { + memcpy (&m[i], in + i * sizeof( m[i] ), sizeof(uint32)); + } + + for( i = 0; i < 8; ++i ) { + v[i] = S->h[i]; + } + + v[ 8] = blake2s_IV[0]; + v[ 9] = blake2s_IV[1]; + v[10] = blake2s_IV[2]; + v[11] = blake2s_IV[3]; + v[12] = S->t[0] ^ blake2s_IV[4]; + v[13] = S->t[1] ^ blake2s_IV[5]; + v[14] = S->f[0] ^ blake2s_IV[6]; + v[15] = S->f[1] ^ blake2s_IV[7]; + + ROUND( 0 ); + ROUND( 1 ); + ROUND( 2 ); + ROUND( 3 ); + ROUND( 4 ); + ROUND( 5 ); + ROUND( 6 ); + ROUND( 7 ); + ROUND( 8 ); + ROUND( 9 ); + + for( i = 0; i < 8; ++i ) { + S->h[i] = S->h[i] ^ v[i] ^ v[i + 8]; + } +} + +#undef G +#undef ROUND + +void blake2s_update( blake2s_state *S, const void *pin, size_t inlen ) +{ + const unsigned char * in = (const unsigned char *)pin; + if( inlen > 0 ) + { + size_t left = S->buflen; + size_t fill = BLAKE2S_BLOCKBYTES - left; + if( inlen > fill ) + { + S->buflen = 0; + memcpy( S->buf + left, in, fill ); /* Fill buffer */ + blake2s_increment_counter( S, BLAKE2S_BLOCKBYTES ); + blake2s_compress( S, S->buf ); /* Compress */ + in += fill; inlen -= fill; + while(inlen > BLAKE2S_BLOCKBYTES) { + blake2s_increment_counter(S, BLAKE2S_BLOCKBYTES); + blake2s_compress( S, in ); + in += BLAKE2S_BLOCKBYTES; + inlen -= BLAKE2S_BLOCKBYTES; + } + } + memcpy( S->buf + S->buflen, in, inlen ); + S->buflen += inlen; + } +} + +int blake2s_final( blake2s_state *S, unsigned char *out ) +{ + int i; + + if( blake2s_is_lastblock( S ) ) + return -1; + + blake2s_increment_counter( S, ( uint32 )S->buflen ); + blake2s_set_lastblock( S ); + memset( S->buf + S->buflen, 0, BLAKE2S_BLOCKBYTES - S->buflen ); /* Padding */ + blake2s_compress( S, S->buf ); + + for( i = 0; i < 8; ++i ) /* Output full hash to temp buffer */ + memcpy( out + sizeof( S->h[i] ) * i, &S->h[i], sizeof(uint32) ); + + return 0; +} + +#if defined(SUPERCOP) +int crypto_hash( unsigned char *out, unsigned char *in, unsigned long long inlen ) +{ + return blake2s( out, BLAKE2S_OUTBYTES, in, inlen, NULL, 0 ); +} +#endif + +#if defined(BLAKE2S_SELFTEST) +#include <string.h> +#include "blake2-kat.h" +int main( void ) +{ + uint8 key[BLAKE2S_KEYBYTES]; + uint8 buf[BLAKE2_KAT_LENGTH]; + size_t i, step; + + for( i = 0; i < BLAKE2S_KEYBYTES; ++i ) + key[i] = ( uint8 )i; + + for( i = 0; i < BLAKE2_KAT_LENGTH; ++i ) + buf[i] = ( uint8 )i; + + /* Test simple API */ + for( i = 0; i < BLAKE2_KAT_LENGTH; ++i ) + { + uint8 hash[BLAKE2S_OUTBYTES]; + blake2s( hash, BLAKE2S_OUTBYTES, buf, i, key, BLAKE2S_KEYBYTES ); + + if( 0 != memcmp( hash, blake2s_keyed_kat[i], BLAKE2S_OUTBYTES ) ) + { + goto fail; + } + } + + /* Test streaming API */ + for(step = 1; step < BLAKE2S_BLOCKBYTES; ++step) { + for (i = 0; i < BLAKE2_KAT_LENGTH; ++i) { + uint8 hash[BLAKE2S_OUTBYTES]; + blake2s_state S; + uint8 * p = buf; + size_t mlen = i; + int err = 0; + + if( (err = blake2s_init_key(&S, BLAKE2S_OUTBYTES, key, BLAKE2S_KEYBYTES)) < 0 ) { + goto fail; + } + + while (mlen >= step) { + if ( (err = blake2s_update(&S, p, step)) < 0 ) { + goto fail; + } + mlen -= step; + p += step; + } + if ( (err = blake2s_update(&S, p, mlen)) < 0) { + goto fail; + } + if ( (err = blake2s_final(&S, hash, BLAKE2S_OUTBYTES)) < 0) { + goto fail; + } + + if (0 != memcmp(hash, blake2s_keyed_kat[i], BLAKE2S_OUTBYTES)) { + goto fail; + } + } + } + + puts( "ok" ); + return 0; +fail: + puts("error"); + return -1; +} +#endif diff --git a/src/Crypto/blake2s-round.h b/src/Crypto/blake2s-round.h new file mode 100644 index 00000000..590540bb --- /dev/null +++ b/src/Crypto/blake2s-round.h @@ -0,0 +1,159 @@ +/* + BLAKE2 reference source code package - optimized C implementations + + Copyright 2012, Samuel Neves <sneves@dei.uc.pt>. You may use this under the + terms of the CC0, the OpenSSL Licence, or the Apache Public License 2.0, at + your option. The terms of these licenses can be found at: + + - CC0 1.0 Universal : http://creativecommons.org/publicdomain/zero/1.0 + - OpenSSL license : https://www.openssl.org/source/license.html + - Apache 2.0 : http://www.apache.org/licenses/LICENSE-2.0 + + More information about the BLAKE2 hash function can be found at + https://blake2.net. +*/ + +/* Adapted for VeraCrypt */ + + +#ifndef BLAKE2S_ROUND_H +#define BLAKE2S_ROUND_H + +#define LOADU(p) _mm_loadu_si128( (const __m128i *)(p) ) +#define STOREU(p,r) _mm_storeu_si128((__m128i *)(p), r) + +#define TOF(reg) _mm_castsi128_ps((reg)) +#define TOI(reg) _mm_castps_si128((reg)) + +#define LIKELY(x) __builtin_expect((x),1) + + +/* Microarchitecture-specific macros */ +#ifndef HAVE_XOP +#ifdef HAVE_SSSE3 +#define _mm_roti_epi32(r, c) ( \ + (8==-(c)) ? _mm_shuffle_epi8(r,r8) \ + : (16==-(c)) ? _mm_shuffle_epi8(r,r16) \ + : _mm_xor_si128(_mm_srli_epi32( (r), -(c) ),_mm_slli_epi32( (r), 32-(-(c)) )) ) +#else +#define _mm_roti_epi32(r, c) _mm_xor_si128(_mm_srli_epi32( (r), -(c) ),_mm_slli_epi32( (r), 32-(-(c)) )) +#endif +#else +/* ... */ +#endif + + +#define G1(row1,row2,row3,row4,buf) \ + row1 = _mm_add_epi32( _mm_add_epi32( row1, buf), row2 ); \ + row4 = _mm_xor_si128( row4, row1 ); \ + row4 = _mm_roti_epi32(row4, -16); \ + row3 = _mm_add_epi32( row3, row4 ); \ + row2 = _mm_xor_si128( row2, row3 ); \ + row2 = _mm_roti_epi32(row2, -12); + +#define G2(row1,row2,row3,row4,buf) \ + row1 = _mm_add_epi32( _mm_add_epi32( row1, buf), row2 ); \ + row4 = _mm_xor_si128( row4, row1 ); \ + row4 = _mm_roti_epi32(row4, -8); \ + row3 = _mm_add_epi32( row3, row4 ); \ + row2 = _mm_xor_si128( row2, row3 ); \ + row2 = _mm_roti_epi32(row2, -7); + +#define DIAGONALIZE(row1,row2,row3,row4) \ + row1 = _mm_shuffle_epi32( row1, _MM_SHUFFLE(2,1,0,3) ); \ + row4 = _mm_shuffle_epi32( row4, _MM_SHUFFLE(1,0,3,2) ); \ + row3 = _mm_shuffle_epi32( row3, _MM_SHUFFLE(0,3,2,1) ); + +#define UNDIAGONALIZE(row1,row2,row3,row4) \ + row1 = _mm_shuffle_epi32( row1, _MM_SHUFFLE(0,3,2,1) ); \ + row4 = _mm_shuffle_epi32( row4, _MM_SHUFFLE(1,0,3,2) ); \ + row3 = _mm_shuffle_epi32( row3, _MM_SHUFFLE(2,1,0,3) ); + +#if defined(HAVE_XOP) +#include "blake2s-load-xop.h" +#elif defined(HAVE_SSE41) +#include "blake2s-load-sse41.h" +#else +#include "blake2s-load-sse2.h" +#endif + +#define ROUND(r) \ + LOAD_MSG_ ##r ##_1(buf1); \ + G1(row1,row2,row3,row4,buf1); \ + LOAD_MSG_ ##r ##_2(buf2); \ + G2(row1,row2,row3,row4,buf2); \ + DIAGONALIZE(row1,row2,row3,row4); \ + LOAD_MSG_ ##r ##_3(buf3); \ + G1(row1,row2,row3,row4,buf3); \ + LOAD_MSG_ ##r ##_4(buf4); \ + G2(row1,row2,row3,row4,buf4); \ + UNDIAGONALIZE(row1,row2,row3,row4); \ + +// load32 is always called in SSE case which implies little endian +#define load32(x) *((uint32*) (x)) + +extern const uint32 blake2s_IV[8]; + +#if defined(HAVE_SSE41) +void blake2s_compress_sse41( blake2s_state *S, const uint8 block[BLAKE2S_BLOCKBYTES] ) +#elif defined (HAVE_SSSE3) +void blake2s_compress_ssse3( blake2s_state *S, const uint8 block[BLAKE2S_BLOCKBYTES] ) +#else +void blake2s_compress_sse2( blake2s_state *S, const uint8 block[BLAKE2S_BLOCKBYTES] ) +#endif +{ + __m128i row1, row2, row3, row4; + __m128i buf1, buf2, buf3, buf4; +#if defined(HAVE_SSE41) + __m128i t0, t1; +#if !defined(HAVE_XOP) + __m128i t2; +#endif +#endif + __m128i ff0, ff1; +#if defined(HAVE_SSSE3) && !defined(HAVE_XOP) + const __m128i r8 = _mm_set_epi8( 12, 15, 14, 13, 8, 11, 10, 9, 4, 7, 6, 5, 0, 3, 2, 1 ); + const __m128i r16 = _mm_set_epi8( 13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2 ); +#endif +#if defined(HAVE_SSE41) + const __m128i m0 = LOADU( block + 00 ); + const __m128i m1 = LOADU( block + 16 ); + const __m128i m2 = LOADU( block + 32 ); + const __m128i m3 = LOADU( block + 48 ); +#else + const uint32 m0 = load32(block + 0 * sizeof(uint32)); + const uint32 m1 = load32(block + 1 * sizeof(uint32)); + const uint32 m2 = load32(block + 2 * sizeof(uint32)); + const uint32 m3 = load32(block + 3 * sizeof(uint32)); + const uint32 m4 = load32(block + 4 * sizeof(uint32)); + const uint32 m5 = load32(block + 5 * sizeof(uint32)); + const uint32 m6 = load32(block + 6 * sizeof(uint32)); + const uint32 m7 = load32(block + 7 * sizeof(uint32)); + const uint32 m8 = load32(block + 8 * sizeof(uint32)); + const uint32 m9 = load32(block + 9 * sizeof(uint32)); + const uint32 m10 = load32(block + 10 * sizeof(uint32)); + const uint32 m11 = load32(block + 11 * sizeof(uint32)); + const uint32 m12 = load32(block + 12 * sizeof(uint32)); + const uint32 m13 = load32(block + 13 * sizeof(uint32)); + const uint32 m14 = load32(block + 14 * sizeof(uint32)); + const uint32 m15 = load32(block + 15 * sizeof(uint32)); +#endif + row1 = ff0 = LOADU( &S->h[0] ); + row2 = ff1 = LOADU( &S->h[4] ); + row3 = _mm_loadu_si128( (__m128i const *)&blake2s_IV[0] ); + row4 = _mm_xor_si128( _mm_loadu_si128( (__m128i const *)&blake2s_IV[4] ), LOADU( &S->t[0] ) ); + ROUND( 0 ); + ROUND( 1 ); + ROUND( 2 ); + ROUND( 3 ); + ROUND( 4 ); + ROUND( 5 ); + ROUND( 6 ); + ROUND( 7 ); + ROUND( 8 ); + ROUND( 9 ); + STOREU( &S->h[0], _mm_xor_si128( ff0, _mm_xor_si128( row1, row3 ) ) ); + STOREU( &S->h[4], _mm_xor_si128( ff1, _mm_xor_si128( row2, row4 ) ) ); +} + +#endif diff --git a/src/Crypto/blake2s.c b/src/Crypto/blake2s.c new file mode 100644 index 00000000..9850cae1 --- /dev/null +++ b/src/Crypto/blake2s.c @@ -0,0 +1,349 @@ +/* + BLAKE2 reference source code package - optimized C implementations + + Copyright 2012, Samuel Neves <sneves@dei.uc.pt>. You may use this under the + terms of the CC0, the OpenSSL Licence, or the Apache Public License 2.0, at + your option. The terms of these licenses can be found at: + + - CC0 1.0 Universal : http://creativecommons.org/publicdomain/zero/1.0 + - OpenSSL license : https://www.openssl.org/source/license.html + - Apache 2.0 : http://www.apache.org/licenses/LICENSE-2.0 + + More information about the BLAKE2 hash function can be found at + https://blake2.net. +*/ + +/* Adapted for VeraCrypt */ + +#include "blake2.h" +#include "Common/Endian.h" +#include "Crypto/config.h" +#include "Crypto/cpu.h" +#include "Crypto/misc.h" + +// load32 is always called in SSE case which implies little endian +#define load32(x) *((uint32*) (x)) + +const uint32 blake2s_IV[8] = +{ + 0x6A09E667UL, 0xBB67AE85UL, 0x3C6EF372UL, 0xA54FF53AUL, + 0x510E527FUL, 0x9B05688CUL, 0x1F83D9ABUL, 0x5BE0CD19UL +}; + +static const uint8 blake2s_sigma[10][16] = +{ + { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 } , + { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 } , + { 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 } , + { 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 } , + { 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 } , + { 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 } , + { 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 } , + { 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 } , + { 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 } , + { 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13 , 0 } , +}; + +/* Some helper functions */ +#define blake2s_set_lastnode(S) S->f[1] = (uint32)-1; + +#define blake2s_is_lastblock(S) (S->f[0] != 0) + +#define blake2s_set_lastblock(S) { \ + if( S->last_node ) blake2s_set_lastnode( S ); \ + S->f[0] = (uint32)-1; \ + } + +#define blake2s_increment_counter(S,inc) { \ + uint64 t = ( (( uint64 )S->t[1]) << 32 ) | S->t[0]; \ + t += (inc); \ + S->t[0] = ( uint32 )( t ); \ + S->t[1] = ( uint32 )( t >> 32 ); \ + } + +/* init2 xors IV with input parameter block */ +void blake2s_init_param( blake2s_state *S, const blake2s_param *P ) +{ + size_t i; + /*blake2s_init0( S ); */ + const uint8 * v = ( const uint8 * )( blake2s_IV ); + const uint8 * p = ( const uint8 * )( P ); + uint8 * h = ( uint8 * )( S->h ); + /* IV XOR ParamBlock */ + memset( S, 0, sizeof( blake2s_state ) ); + + for( i = 0; i < BLAKE2S_OUTBYTES; ++i ) h[i] = v[i] ^ p[i]; + + S->outlen = P->digest_length; +} + + +#define G(r,i,a,b,c,d) \ + do { \ + a = a + b + m[blake2s_sigma[r][2*i+0]]; \ + d = rotr32(d ^ a, 16); \ + c = c + d; \ + b = rotr32(b ^ c, 12); \ + a = a + b + m[blake2s_sigma[r][2*i+1]]; \ + d = rotr32(d ^ a, 8); \ + c = c + d; \ + b = rotr32(b ^ c, 7); \ + } while(0) + +#define ROUND(r) \ + do { \ + G(r,0,v[ 0],v[ 4],v[ 8],v[12]); \ + G(r,1,v[ 1],v[ 5],v[ 9],v[13]); \ + G(r,2,v[ 2],v[ 6],v[10],v[14]); \ + G(r,3,v[ 3],v[ 7],v[11],v[15]); \ + G(r,4,v[ 0],v[ 5],v[10],v[15]); \ + G(r,5,v[ 1],v[ 6],v[11],v[12]); \ + G(r,6,v[ 2],v[ 7],v[ 8],v[13]); \ + G(r,7,v[ 3],v[ 4],v[ 9],v[14]); \ + } while(0) + +typedef void (*blake2s_compressFn)( blake2s_state *S, const uint8 block[BLAKE2S_BLOCKBYTES] ); + +blake2s_compressFn blake2s_compress_func = NULL; +#if CRYPTOPP_BOOL_X64 || CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 +extern int blake2s_has_sse2(); +extern int blake2s_has_ssse3(); +extern int blake2s_has_sse41(); +extern void blake2s_compress_sse2( blake2s_state *S, const uint8 block[BLAKE2S_BLOCKBYTES] ); +extern void blake2s_compress_ssse3( blake2s_state *S, const uint8 block[BLAKE2S_BLOCKBYTES] ); +extern void blake2s_compress_sse41( blake2s_state *S, const uint8 block[BLAKE2S_BLOCKBYTES] ); +#endif + + +static void blake2s_compress_std( blake2s_state *S, const uint8 in[BLAKE2S_BLOCKBYTES] ) +{ + uint32 m[16]; + uint32 v[16]; + size_t i; + + for( i = 0; i < 16; ++i ) { + m[i] = *((uint32*) (in + i * sizeof( m[i] ))); + } + + for( i = 0; i < 8; ++i ) { + v[i] = S->h[i]; + } + + v[ 8] = blake2s_IV[0]; + v[ 9] = blake2s_IV[1]; + v[10] = blake2s_IV[2]; + v[11] = blake2s_IV[3]; + v[12] = S->t[0] ^ blake2s_IV[4]; + v[13] = S->t[1] ^ blake2s_IV[5]; + v[14] = S->f[0] ^ blake2s_IV[6]; + v[15] = S->f[1] ^ blake2s_IV[7]; + + ROUND( 0 ); + ROUND( 1 ); + ROUND( 2 ); + ROUND( 3 ); + ROUND( 4 ); + ROUND( 5 ); + ROUND( 6 ); + ROUND( 7 ); + ROUND( 8 ); + ROUND( 9 ); + + for( i = 0; i < 8; ++i ) { + S->h[i] = S->h[i] ^ v[i] ^ v[i + 8]; + } +} + +#undef G +#undef ROUND + + +/* Some sort of default parameter block initialization, for sequential blake2s */ +void blake2s_init( blake2s_state *S ) +{ + blake2s_param P[1]; + + P->digest_length = BLAKE2S_OUTBYTES; + P->key_length = 0; + P->fanout = 1; + P->depth = 1; + P->leaf_length = 0; + P->node_offset = 0; + P->xof_length = 0; + P->node_depth = 0; + P->inner_length = 0; + /* memset(P->reserved, 0, sizeof(P->reserved) ); */ + memset( P->salt, 0, sizeof( P->salt ) ); + memset( P->personal, 0, sizeof( P->personal ) ); + + blake2s_init_param( S, P ); + + if (!blake2s_compress_func) + { +#if CRYPTOPP_BOOL_X64 || CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 + if (HasSSE2() && blake2s_has_sse2()) + { + if (HasSSE41() && blake2s_has_sse41()) + { + blake2s_compress_func = blake2s_compress_sse41; + } + else + if (HasSSSE3() && blake2s_has_ssse3()) + { + blake2s_compress_func = blake2s_compress_ssse3; + } + else + blake2s_compress_func = blake2s_compress_sse2; + } + else +#endif + blake2s_compress_func = blake2s_compress_std; + } +} + +void blake2s_update( blake2s_state *S, const void *pin, size_t inlen ) +{ + const unsigned char * in = (const unsigned char *)pin; + if( inlen > 0 ) + { + size_t left = S->buflen; + size_t fill = BLAKE2S_BLOCKBYTES - left; + if( inlen > fill ) + { + S->buflen = 0; + memcpy( S->buf + left, in, fill ); /* Fill buffer */ + blake2s_increment_counter( S, BLAKE2S_BLOCKBYTES ); + blake2s_compress_func( S, S->buf ); /* Compress */ + in += fill; inlen -= fill; + while(inlen > BLAKE2S_BLOCKBYTES) { + blake2s_increment_counter(S, BLAKE2S_BLOCKBYTES); + blake2s_compress_func( S, in ); + in += BLAKE2S_BLOCKBYTES; + inlen -= BLAKE2S_BLOCKBYTES; + } + } + memcpy( S->buf + S->buflen, in, inlen ); + S->buflen += inlen; + } +} + +int blake2s_final( blake2s_state *S, unsigned char *out ) +{ + size_t i; + + if( blake2s_is_lastblock( S ) ) + return -1; + + blake2s_increment_counter( S, (uint32)S->buflen ); + blake2s_set_lastblock( S ); + memset( S->buf + S->buflen, 0, BLAKE2S_BLOCKBYTES - S->buflen ); /* Padding */ + blake2s_compress_func( S, S->buf ); + + for( i = 0; i < 8; ++i ) /* Output full hash to temp buffer */ + { +#if BYTE_ORDER == LITTLE_ENDIAN + *((uint32*) out) = S->h[i]; +#else + uint32 w = S->h[i] ; + out[0] = (uint8)(w >> 0); + out[1] = (uint8)(w >> 8); + out[2] = (uint8)(w >> 16); + out[3] = (uint8)(w >> 24); +#endif + out += sizeof (uint32); + } + + return 0; +} + +/* inlen, at least, should be uint64. Others can be size_t. */ +int blake2s( void *out, const void *in, size_t inlen) +{ + blake2s_state S[1]; + + /* Verify parameters */ + if ( NULL == in && inlen > 0 ) return -1; + + if ( NULL == out ) return -1; + + blake2s_init( S ); + + blake2s_update( S, ( const uint8 * )in, inlen ); + blake2s_final( S, (unsigned char*) out ); + return 0; +} + +#if defined(SUPERCOP) +int crypto_hash( unsigned char *out, unsigned char *in, unsigned long long inlen ) +{ + return blake2s( out, BLAKE2S_OUTBYTES, in, inlen, NULL, 0 ); +} +#endif + +#if defined(BLAKE2S_SELFTEST) +#include <string.h> +#include "blake2-kat.h" +int main( void ) +{ + uint8 key[BLAKE2S_KEYBYTES]; + uint8 buf[BLAKE2_KAT_LENGTH]; + size_t i, step; + + for( i = 0; i < BLAKE2S_KEYBYTES; ++i ) + key[i] = ( uint8 )i; + + for( i = 0; i < BLAKE2_KAT_LENGTH; ++i ) + buf[i] = ( uint8 )i; + + /* Test simple API */ + for( i = 0; i < BLAKE2_KAT_LENGTH; ++i ) + { + uint8 hash[BLAKE2S_OUTBYTES]; + blake2s( hash, BLAKE2S_OUTBYTES, buf, i, key, BLAKE2S_KEYBYTES ); + + if( 0 != memcmp( hash, blake2s_keyed_kat[i], BLAKE2S_OUTBYTES ) ) + { + goto fail; + } + } + + /* Test streaming API */ + for(step = 1; step < BLAKE2S_BLOCKBYTES; ++step) { + for (i = 0; i < BLAKE2_KAT_LENGTH; ++i) { + uint8 hash[BLAKE2S_OUTBYTES]; + blake2s_state S; + uint8 * p = buf; + size_t mlen = i; + int err = 0; + + if( (err = blake2s_init_key(&S, BLAKE2S_OUTBYTES, key, BLAKE2S_KEYBYTES)) < 0 ) { + goto fail; + } + + while (mlen >= step) { + if ( (err = blake2s_update(&S, p, step)) < 0 ) { + goto fail; + } + mlen -= step; + p += step; + } + if ( (err = blake2s_update(&S, p, mlen)) < 0) { + goto fail; + } + if ( (err = blake2s_final(&S, hash, BLAKE2S_OUTBYTES)) < 0) { + goto fail; + } + + if (0 != memcmp(hash, blake2s_keyed_kat[i], BLAKE2S_OUTBYTES)) { + goto fail; + } + } + } + + puts( "ok" ); + return 0; +fail: + puts("error"); + return -1; +} +#endif diff --git a/src/Crypto/blake2s_SSE2.c b/src/Crypto/blake2s_SSE2.c new file mode 100644 index 00000000..41ea0a6c --- /dev/null +++ b/src/Crypto/blake2s_SSE2.c @@ -0,0 +1,39 @@ +/* + BLAKE2 reference source code package - optimized C implementations + + Copyright 2012, Samuel Neves <sneves@dei.uc.pt>. You may use this under the + terms of the CC0, the OpenSSL Licence, or the Apache Public License 2.0, at + your option. The terms of these licenses can be found at: + + - CC0 1.0 Universal : http://creativecommons.org/publicdomain/zero/1.0 + - OpenSSL license : https://www.openssl.org/source/license.html + - Apache 2.0 : http://www.apache.org/licenses/LICENSE-2.0 + + More information about the BLAKE2 hash function can be found at + https://blake2.net. +*/ + +/* Adapted for VeraCrypt */ + +#include "blake2.h" +#include "Common/Endian.h" +#include "Crypto/config.h" +#include "Crypto/cpu.h" +#include "Crypto/misc.h" + +#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE + +#include "blake2s-round.h" + +int blake2s_has_sse2() +{ + return 1; +} + +#else +int blake2s_has_sse2() +{ + return 0; +} + +#endif diff --git a/src/Crypto/blake2s_SSE41.c b/src/Crypto/blake2s_SSE41.c new file mode 100644 index 00000000..99e394c1 --- /dev/null +++ b/src/Crypto/blake2s_SSE41.c @@ -0,0 +1,52 @@ +/* + BLAKE2 reference source code package - optimized C implementations + + Copyright 2012, Samuel Neves <sneves@dei.uc.pt>. You may use this under the + terms of the CC0, the OpenSSL Licence, or the Apache Public License 2.0, at + your option. The terms of these licenses can be found at: + + - CC0 1.0 Universal : http://creativecommons.org/publicdomain/zero/1.0 + - OpenSSL license : https://www.openssl.org/source/license.html + - Apache 2.0 : http://www.apache.org/licenses/LICENSE-2.0 + + More information about the BLAKE2 hash function can be found at + https://blake2.net. +*/ + +/* Adapted for VeraCrypt */ + +#include "blake2.h" +#include "Common/Endian.h" +#include "Crypto/config.h" +#include "Crypto/cpu.h" +#include "Crypto/misc.h" + +#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE +#if CRYPTOPP_BOOL_SSE41_INTRINSICS_AVAILABLE + +#define HAVE_SSE41 + +#if CRYPTOPP_SSSE3_AVAILABLE +#define HAVE_SSSE3 +#endif + +#include "blake2s-round.h" + +int blake2s_has_sse41() +{ + return 1; +} + +#else +int blake2s_has_sse41() +{ + return 0; +} + +#endif +#else +int blake2s_has_sse41() +{ + return 0; +} +#endif diff --git a/src/Crypto/blake2s_SSSE3.c b/src/Crypto/blake2s_SSSE3.c new file mode 100644 index 00000000..4f3252c3 --- /dev/null +++ b/src/Crypto/blake2s_SSSE3.c @@ -0,0 +1,47 @@ +/* + BLAKE2 reference source code package - optimized C implementations + + Copyright 2012, Samuel Neves <sneves@dei.uc.pt>. You may use this under the + terms of the CC0, the OpenSSL Licence, or the Apache Public License 2.0, at + your option. The terms of these licenses can be found at: + + - CC0 1.0 Universal : http://creativecommons.org/publicdomain/zero/1.0 + - OpenSSL license : https://www.openssl.org/source/license.html + - Apache 2.0 : http://www.apache.org/licenses/LICENSE-2.0 + + More information about the BLAKE2 hash function can be found at + https://blake2.net. +*/ + +/* Adapted for VeraCrypt */ + +#include "blake2.h" +#include "Common/Endian.h" +#include "Crypto/config.h" +#include "Crypto/cpu.h" +#include "Crypto/misc.h" + +#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE +#if CRYPTOPP_BOOL_SSSE3_INTRINSICS_AVAILABLE + +#define HAVE_SSSE3 + +#include "blake2s-round.h" + +int blake2s_has_ssse3() +{ + return 1; +} + +#else +int blake2s_has_ssse3() +{ + return 0; +} +#endif +#else +int blake2s_has_ssse3() +{ + return 0; +} +#endif diff --git a/src/Crypto/chacha-xmm.c b/src/Crypto/chacha-xmm.c new file mode 100644 index 00000000..980c2c81 --- /dev/null +++ b/src/Crypto/chacha-xmm.c @@ -0,0 +1,113 @@ +/* +chacha.c version $Date: 2014/09/08 17:38:05 $ +D. J. Bernstein +Romain Dolbeau +Public domain. +*/ + +// Modified by kerukuro for use in cppcrypto. + +/* Adapted to VeraCrypt */ + +#include "Common/Tcdefs.h" +#include "config.h" +#include "cpu.h" +#include "misc.h" + +#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE + +#ifndef _M_X64 +#ifdef _MSC_VER +#if _MSC_VER < 1900 +__inline __m128i _mm_set_epi64x(int64 i0, int64 i1) { + union { + int64 q[2]; + int32 r[4]; + } u; + u.q[0] = i1; u.q[1] = i0; + // this is inefficient, but other solutions are worse + return _mm_setr_epi32(u.r[0], u.r[1], u.r[2], u.r[3]); +} +#pragma warning(disable:4799) +__inline __m128i _mm_set1_epi64x(int64 a) +{ + union { + __m64 m; + long long ii; + } u; + u.ii = a; + return _mm_set1_epi64(u.m); +} +#pragma warning(default:4799) +#endif +#endif +#endif + + +#define U32V(v) (v) +#define ROTL32(x,n) rotl32(x, n) +#define U32TO8_LITTLE(p, v) (((uint32*)(p))[0] = (v)) +#define U8TO32_LITTLE(v) *((uint32*)(v)) + + +#define ROTATE(v,c) (ROTL32(v,c)) +#define XOR(v,w) ((v) ^ (w)) +#define PLUS(v,w) (U32V((v) + (w))) +#define PLUSONE(v) (PLUS((v),1)) + +#define QUARTERROUND(a,b,c,d) \ + x[a] = PLUS(x[a],x[b]); x[d] = ROTATE(XOR(x[d],x[a]),16); \ + x[c] = PLUS(x[c],x[d]); x[b] = ROTATE(XOR(x[b],x[c]),12); \ + x[a] = PLUS(x[a],x[b]); x[d] = ROTATE(XOR(x[d],x[a]), 8); \ + x[c] = PLUS(x[c],x[d]); x[b] = ROTATE(XOR(x[b],x[c]), 7); + +static void salsa20_wordtobyte(uint8 output[64],const uint32 input[16], unsigned int r) +{ + uint32 x[16]; + int i; + + for (i = 0;i < 16;++i) x[i] = input[i]; + for (i = r;i > 0;--i) { + QUARTERROUND( 0, 4, 8,12) + QUARTERROUND( 1, 5, 9,13) + QUARTERROUND( 2, 6,10,14) + QUARTERROUND( 3, 7,11,15) + QUARTERROUND( 0, 5,10,15) + QUARTERROUND( 1, 6,11,12) + QUARTERROUND( 2, 7, 8,13) + QUARTERROUND( 3, 4, 9,14) + } + for (i = 0;i < 16;++i) x[i] = PLUS(x[i],input[i]); + for (i = 0;i < 16;++i) U32TO8_LITTLE(output + 4 * i,x[i]); +} + + +void chacha_ECRYPT_encrypt_bytes(size_t bytes, uint32* x, const uint8* m, uint8* out, uint8* output, unsigned int r) +{ + unsigned int i; + +#include "chacha_u4.h" + +#include "chacha_u1.h" + +#ifndef _M_X64 +#ifdef _MSC_VER +#if _MSC_VER < 1900 + _mm_empty(); +#endif +#endif +#endif + + if (!bytes) return; + // bytes is now guaranteed to be between 1 and 63 + salsa20_wordtobyte(output,x, r); + x[12] = PLUSONE(x[12]); + if (!x[12]) { + x[13] = PLUSONE(x[13]); + /* stopping at 2^70 bytes per nonce is user's responsibility */ + } + + for (i = 0;i < bytes;++i) out[i] = m[i] ^ output[i]; +} + +#endif diff --git a/src/Crypto/chacha256.c b/src/Crypto/chacha256.c new file mode 100644 index 00000000..685f7886 --- /dev/null +++ b/src/Crypto/chacha256.c @@ -0,0 +1,219 @@ +/* +This code is written by kerukuro for cppcrypto library (http://cppcrypto.sourceforge.net/) +and released into public domain. +*/ + +/* adapted for VeraCrypt */ + +#include "chacha256.h" +#include "cpu.h" +#include "misc.h" + + + +#define rotater32(x,n) rotr32(x, n) +#define rotatel32(x,n) rotl32(x, n) + +#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE +void chacha_ECRYPT_encrypt_bytes(size_t bytes, uint32* x, const unsigned char* m, unsigned char* out, unsigned char* output, unsigned int r); +#endif + +static VC_INLINE void xor_block_512(const unsigned char* in, const unsigned char* prev, unsigned char* out) +{ +#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE && !defined(_UEFI) && (!defined (TC_WINDOWS_DRIVER) || (!defined (DEBUG))) + if (HasSSE2()) + { + __m128i b1 = _mm_loadu_si128((const __m128i*) in); + __m128i p1 = _mm_loadu_si128((const __m128i*) prev); + __m128i b2 = _mm_loadu_si128((const __m128i*) (in + 16)); + __m128i p2 = _mm_loadu_si128((const __m128i*) (prev + 16)); + + _mm_storeu_si128((__m128i*) out, _mm_xor_si128(b1, p1)); + _mm_storeu_si128((__m128i*) (out + 16), _mm_xor_si128(b2, p2)); + + b1 = _mm_loadu_si128((const __m128i*) (in + 32)); + p1 = _mm_loadu_si128((const __m128i*) (prev + 32)); + b2 = _mm_loadu_si128((const __m128i*) (in + 48)); + p2 = _mm_loadu_si128((const __m128i*) (prev + 48)); + + _mm_storeu_si128((__m128i*) (out + 32), _mm_xor_si128(b1, p1)); + _mm_storeu_si128((__m128i*) (out + 48), _mm_xor_si128(b2, p2)); + + } + else +#endif + { + int i; + for (i = 0; i < 64; i++) + out[i] = in[i] ^ prev[i]; + } + +} + +static VC_INLINE void chacha_core(uint32* x, int r) +{ + int i; + for (i = 0; i < r; i++) + { + x[0] += x[4]; + x[12] = rotatel32(x[12] ^ x[0], 16); + x[8] += x[12]; + x[4] = rotatel32(x[4] ^ x[8], 12); + x[0] += x[4]; + x[12] = rotatel32(x[12] ^ x[0], 8); + x[8] += x[12]; + x[4] = rotatel32(x[4] ^ x[8], 7); + + x[1] += x[5]; + x[13] = rotatel32(x[13] ^ x[1], 16); + x[9] += x[13]; + x[5] = rotatel32(x[5] ^ x[9], 12); + x[1] += x[5]; + x[13] = rotatel32(x[13] ^ x[1], 8); + x[9] += x[13]; + x[5] = rotatel32(x[5] ^ x[9], 7); + + x[2] += x[6]; + x[14] = rotatel32(x[14] ^ x[2], 16); + x[10] += x[14]; + x[6] = rotatel32(x[6] ^ x[10], 12); + x[2] += x[6]; + x[14] = rotatel32(x[14] ^ x[2], 8); + x[10] += x[14]; + x[6] = rotatel32(x[6] ^ x[10], 7); + + x[3] += x[7]; + x[15] = rotatel32(x[15] ^ x[3], 16); + x[11] += x[15]; + x[7] = rotatel32(x[7] ^ x[11], 12); + x[3] += x[7]; + x[15] = rotatel32(x[15] ^ x[3], 8); + x[11] += x[15]; + x[7] = rotatel32(x[7] ^ x[11], 7); + + x[0] += x[5]; + x[15] = rotatel32(x[15] ^ x[0], 16); + x[10] += x[15]; + x[5] = rotatel32(x[5] ^ x[10], 12); + x[0] += x[5]; + x[15] = rotatel32(x[15] ^ x[0], 8); + x[10] += x[15]; + x[5] = rotatel32(x[5] ^ x[10], 7); + + x[1] += x[6]; + x[12] = rotatel32(x[12] ^ x[1], 16); + x[11] += x[12]; + x[6] = rotatel32(x[6] ^ x[11], 12); + x[1] += x[6]; + x[12] = rotatel32(x[12] ^ x[1], 8); + x[11] += x[12]; + x[6] = rotatel32(x[6] ^ x[11], 7); + + x[2] += x[7]; + x[13] = rotatel32(x[13] ^ x[2], 16); + x[8] += x[13]; + x[7] = rotatel32(x[7] ^ x[8], 12); + x[2] += x[7]; + x[13] = rotatel32(x[13] ^ x[2], 8); + x[8] += x[13]; + x[7] = rotatel32(x[7] ^ x[8], 7); + + x[3] += x[4]; + x[14] = rotatel32(x[14] ^ x[3], 16); + x[9] += x[14]; + x[4] = rotatel32(x[4] ^ x[9], 12); + x[3] += x[4]; + x[14] = rotatel32(x[14] ^ x[3], 8); + x[9] += x[14]; + x[4] = rotatel32(x[4] ^ x[9], 7); + } +} + +static VC_INLINE void chacha_hash(const uint32* in, uint32* out, int r) +{ + uint32 x[16]; + int i; + memcpy(x, in, 64); + chacha_core(x, r); + for (i = 0; i < 16; ++i) + out[i] = x[i] + in[i]; +} + +static VC_INLINE void incrementSalsaCounter(uint32* input, uint32* block, int r) +{ + chacha_hash(input, block, r); + if (!++input[12]) + ++input[13]; +} + +static VC_INLINE void do_encrypt(const unsigned char* in, size_t len, unsigned char* out, int r, size_t* posPtr, uint32* input, uint32* block) +{ + size_t i = 0, pos = *posPtr; + if (pos) + { + while (pos < len && pos < 64) + { + out[i] = in[i] ^ ((unsigned char*)block)[pos++]; + ++i; + } + len -= i; + } + if (len) + pos = 0; + +#if CRYPTOPP_SSSE3_AVAILABLE && !defined(_UEFI) && (!defined (TC_WINDOWS_DRIVER) || (!defined (DEBUG))) + if (HasSSSE3()) + { + size_t fullblocks = len - len % 64; + if (fullblocks) + { + chacha_ECRYPT_encrypt_bytes(fullblocks, input, in + i, out + i, (unsigned char*)block, r); + i += fullblocks; + len -= fullblocks; + } + if (len) + { + chacha_ECRYPT_encrypt_bytes(len, input, in + i, out + i, (unsigned char*)block, r); + pos = len; + } + *posPtr = pos; + return; + } +#endif + + for (; len; len -= VC_MIN(64, len)) + { + incrementSalsaCounter(input, block, r); + if (len >= 64) + { + xor_block_512(in + i, (unsigned char*)block, out + i); + i += 64; + } + else + { + for (; pos < len; pos++, i++) + out[i] = in[i] ^ ((unsigned char*)block)[pos]; + } + } + *posPtr = pos; +} + +void ChaCha256Init(ChaCha256Ctx* ctx, const unsigned char* key, const unsigned char* iv, int rounds) +{ + ctx->internalRounds = rounds / 2; + ctx->pos = 0; + + ctx->input_[12] = 0; + ctx->input_[13] = 0; + memcpy(ctx->input_ + 4, key, 32); + memcpy(ctx->input_ + 14, iv, 8); + ctx->input_[0] = 0x61707865; + ctx->input_[1] = 0x3320646E; + ctx->input_[2] = 0x79622D32; + ctx->input_[3] = 0x6B206574; +} + +void ChaCha256Encrypt(ChaCha256Ctx* ctx, const unsigned char* in, size_t len, unsigned char* out) +{ + do_encrypt(in, len, out, ctx->internalRounds, &ctx->pos, ctx->input_, ctx->block_); +} diff --git a/src/Crypto/chacha256.h b/src/Crypto/chacha256.h new file mode 100644 index 00000000..e9533a39 --- /dev/null +++ b/src/Crypto/chacha256.h @@ -0,0 +1,31 @@ +#ifndef HEADER_Crypto_ChaCha256 +#define HEADER_Crypto_ChaCha256 + +#include "Common/Tcdefs.h" +#include "config.h" + +typedef struct +{ + CRYPTOPP_ALIGN_DATA(16) uint32 block_[16]; + CRYPTOPP_ALIGN_DATA(16) uint32 input_[16]; + size_t pos; + int internalRounds; +} ChaCha256Ctx; + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * key must be 32 bytes long and iv must be 8 bytes long + */ +void ChaCha256Init(ChaCha256Ctx* ctx, const unsigned char* key, const unsigned char* iv, int rounds); +void ChaCha256Encrypt(ChaCha256Ctx* ctx, const unsigned char* in, size_t len, unsigned char* out); +#define ChaCha256Decrypt ChaCha256Encrypt + +#ifdef __cplusplus +} +#endif + +#endif // HEADER_Crypto_ChaCha + diff --git a/src/Crypto/chachaRng.c b/src/Crypto/chachaRng.c new file mode 100644 index 00000000..b3d92039 --- /dev/null +++ b/src/Crypto/chachaRng.c @@ -0,0 +1,120 @@ +/* $OpenBSD: arc4random.c,v 1.54 2015/09/13 08:31:47 guenther Exp $ */ + +/* + * Copyright (c) 1996, David Mazieres <dm@uun.org> + * Copyright (c) 2008, Damien Miller <djm@openbsd.org> + * Copyright (c) 2013, Markus Friedl <markus@openbsd.org> + * Copyright (c) 2014, Theo de Raadt <deraadt@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* + * ChaCha based random number generator for OpenBSD. + */ + +/* + * Adapted for VeraCrypt + */ + +#include "chachaRng.h" +#include "cpu.h" +#include "misc.h" +#include <string.h> + +static VC_INLINE void ChaCha20RngReKey (ChaCha20RngCtx* pCtx, int useCallBack) +{ + /* fill rs_buf with the keystream */ + if (pCtx->m_rs_have) + memset(pCtx->m_rs_buf + sizeof(pCtx->m_rs_buf) - pCtx->m_rs_have, 0, pCtx->m_rs_have); + ChaCha256Encrypt(&pCtx->m_chachaCtx, pCtx->m_rs_buf, sizeof (pCtx->m_rs_buf), + pCtx->m_rs_buf); + /* mix in optional user provided data */ + if (pCtx->m_getRandSeedCallback && useCallBack) { + unsigned char dat[CHACHA20RNG_KEYSZ + CHACHA20RNG_IVSZ]; + size_t i; + + pCtx->m_getRandSeedCallback (dat, sizeof (dat)); + + for (i = 0; i < (CHACHA20RNG_KEYSZ + CHACHA20RNG_IVSZ); i++) + pCtx->m_rs_buf[i] ^= dat[i]; + + burn (dat, sizeof(dat)); + } + + /* immediately reinit for backtracking resistance */ + ChaCha256Init (&pCtx->m_chachaCtx, pCtx->m_rs_buf, pCtx->m_rs_buf + CHACHA20RNG_KEYSZ, 20); + memset(pCtx->m_rs_buf, 0, CHACHA20RNG_KEYSZ + CHACHA20RNG_IVSZ); + pCtx->m_rs_have = sizeof (pCtx->m_rs_buf) - CHACHA20RNG_KEYSZ - CHACHA20RNG_IVSZ; +} + +static VC_INLINE void ChaCha20RngStir(ChaCha20RngCtx* pCtx) +{ + ChaCha20RngReKey (pCtx, 1); + + /* invalidate rs_buf */ + pCtx->m_rs_have = 0; + memset(pCtx->m_rs_buf, 0, CHACHA20RNG_RSBUFSZ); + + pCtx->m_rs_count = 1600000; +} + +static VC_INLINE void ChaCha20RngStirIfNeeded(ChaCha20RngCtx* pCtx, size_t len) +{ + if (pCtx->m_rs_count <= len) { + ChaCha20RngStir(pCtx); + } else + pCtx->m_rs_count -= len; +} + +void ChaCha20RngInit (ChaCha20RngCtx* pCtx, const unsigned char* key, GetRandSeedFn rngSeedCallback, size_t InitialBytesToSkip) +{ + ChaCha256Init (&pCtx->m_chachaCtx, key, key + 32, 20); + pCtx->m_getRandSeedCallback = rngSeedCallback; + + /* fill rs_buf with the keystream */ + pCtx->m_rs_have = 0; + memset (pCtx->m_rs_buf, 0, sizeof (pCtx->m_rs_buf)); + pCtx->m_rs_count = 1600000; + + ChaCha20RngReKey(pCtx, 0); + + if (InitialBytesToSkip) + ChaCha20RngGetBytes (pCtx, NULL, InitialBytesToSkip); +} + +void ChaCha20RngGetBytes (ChaCha20RngCtx* pCtx, unsigned char* buffer, size_t bufferLen) +{ + unsigned char *buf = (unsigned char*) buffer; + unsigned char* keystream; + size_t m; + + ChaCha20RngStirIfNeeded(pCtx, bufferLen); + + while (bufferLen > 0) { + if (pCtx->m_rs_have > 0) { + m = VC_MIN(bufferLen, pCtx->m_rs_have); + keystream = pCtx->m_rs_buf + sizeof(pCtx->m_rs_buf) - pCtx->m_rs_have; + if (buf) + { + memcpy(buf, keystream, m); + buf += m; + } + memset(keystream, 0, m); + bufferLen -= m; + pCtx->m_rs_have -= m; + } + if (pCtx->m_rs_have == 0) + ChaCha20RngReKey (pCtx, 0); + } +} diff --git a/src/Crypto/chachaRng.h b/src/Crypto/chachaRng.h new file mode 100644 index 00000000..a2cb4ce8 --- /dev/null +++ b/src/Crypto/chachaRng.h @@ -0,0 +1,63 @@ +/* $OpenBSD: arc4random.c,v 1.54 2015/09/13 08:31:47 guenther Exp $ */ + +/* + * Copyright (c) 1996, David Mazieres <dm@uun.org> + * Copyright (c) 2008, Damien Miller <djm@openbsd.org> + * Copyright (c) 2013, Markus Friedl <markus@openbsd.org> + * Copyright (c) 2014, Theo de Raadt <deraadt@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* + * ChaCha based random number generator for OpenBSD. + */ + +/* + * Adapted for VeraCrypt + */ + +#ifndef HEADER_Crypto_ChaChaRng +#define HEADER_Crypto_ChaChaRng + +#include "chacha256.h" + +#define CHACHA20RNG_KEYSZ 32 +#define CHACHA20RNG_IVSZ 8 +#define CHACHA20RNG_BLOCKSZ 64 +#define CHACHA20RNG_RSBUFSZ (16*CHACHA20RNG_BLOCKSZ) + +#ifdef __cplusplus +extern "C" { +#endif + +typedef void (*GetRandSeedFn)(unsigned char* pbRandSeed, size_t cbRandSeed); + +typedef struct +{ + ChaCha256Ctx m_chachaCtx; /* ChaCha20 context */ + unsigned char m_rs_buf[CHACHA20RNG_RSBUFSZ]; /* keystream blocks */ + size_t m_rs_have; /* valid bytes at end of rs_buf */ + size_t m_rs_count; /* bytes till reseed */ + GetRandSeedFn m_getRandSeedCallback; +} ChaCha20RngCtx; + +/* key length must be equal to 40 bytes (CHACHA20RNG_KEYSZ + CHACHA20RNG_IVSZ) */ +void ChaCha20RngInit (ChaCha20RngCtx* pCtx, const unsigned char* key, GetRandSeedFn rngCallback, size_t InitialBytesToSkip); +void ChaCha20RngGetBytes (ChaCha20RngCtx* pCtx, unsigned char* buffer, size_t bufferLen); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/Crypto/chacha_u1.h b/src/Crypto/chacha_u1.h new file mode 100644 index 00000000..e77bc1ea --- /dev/null +++ b/src/Crypto/chacha_u1.h @@ -0,0 +1,102 @@ +/* +u1.h version $Date: 2014/09/08 17:44:28 $ +D. J. Bernstein +Romain Dolbeau +Public domain. +*/ + +// Modified by kerukuro for use in cppcrypto. + +// if (!bytes) return; + while (bytes >=64) { + __m128i x_0, x_1, x_2, x_3; + __m128i t_1; + const __m128i rot16 = _mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2); + const __m128i rot8 = _mm_set_epi8(14,13,12,15,10,9,8,11,6,5,4,7,2,1,0,3); + uint32 in12, in13; + + x_0 = _mm_load_si128((__m128i*)(x + 0)); + x_1 = _mm_load_si128((__m128i*)(x + 4)); + x_2 = _mm_load_si128((__m128i*)(x + 8)); + x_3 = _mm_load_si128((__m128i*)(x + 12)); + + for (i = 0 ; i < r ; ++i) { + x_0 = _mm_add_epi32(x_0, x_1); + x_3 = _mm_xor_si128(x_3, x_0); + x_3 = _mm_shuffle_epi8(x_3, rot16); + + x_2 = _mm_add_epi32(x_2, x_3); + x_1 = _mm_xor_si128(x_1, x_2); + + t_1 = x_1; + x_1 = _mm_slli_epi32(x_1, 12); + t_1 = _mm_srli_epi32(t_1, 20); + x_1 = _mm_xor_si128(x_1, t_1); + + x_0 = _mm_add_epi32(x_0, x_1); + x_3 = _mm_xor_si128(x_3, x_0); + x_0 = _mm_shuffle_epi32(x_0, 0x93); + x_3 = _mm_shuffle_epi8(x_3, rot8); + + x_2 = _mm_add_epi32(x_2, x_3); + x_3 = _mm_shuffle_epi32(x_3, 0x4e); + x_1 = _mm_xor_si128(x_1, x_2); + x_2 = _mm_shuffle_epi32(x_2, 0x39); + + t_1 = x_1; + x_1 = _mm_slli_epi32(x_1, 7); + t_1 = _mm_srli_epi32(t_1, 25); + x_1 = _mm_xor_si128(x_1, t_1); + + x_0 = _mm_add_epi32(x_0, x_1); + x_3 = _mm_xor_si128(x_3, x_0); + x_3 = _mm_shuffle_epi8(x_3, rot16); + + x_2 = _mm_add_epi32(x_2, x_3); + x_1 = _mm_xor_si128(x_1, x_2); + + t_1 = x_1; + x_1 = _mm_slli_epi32(x_1, 12); + t_1 = _mm_srli_epi32(t_1, 20); + x_1 = _mm_xor_si128(x_1, t_1); + + x_0 = _mm_add_epi32(x_0, x_1); + x_3 = _mm_xor_si128(x_3, x_0); + x_0 = _mm_shuffle_epi32(x_0, 0x39); + x_3 = _mm_shuffle_epi8(x_3, rot8); + + x_2 = _mm_add_epi32(x_2, x_3); + x_3 = _mm_shuffle_epi32(x_3, 0x4e); + x_1 = _mm_xor_si128(x_1, x_2); + x_2 = _mm_shuffle_epi32(x_2, 0x93); + + t_1 = x_1; + x_1 = _mm_slli_epi32(x_1, 7); + t_1 = _mm_srli_epi32(t_1, 25); + x_1 = _mm_xor_si128(x_1, t_1); + } + x_0 = _mm_add_epi32(x_0, _mm_loadu_si128((__m128i*)(x + 0))); + x_1 = _mm_add_epi32(x_1, _mm_loadu_si128((__m128i*)(x + 4))); + x_2 = _mm_add_epi32(x_2, _mm_loadu_si128((__m128i*)(x + 8))); + x_3 = _mm_add_epi32(x_3, _mm_loadu_si128((__m128i*)(x + 12))); + x_0 = _mm_xor_si128(x_0, _mm_loadu_si128((__m128i*)(m + 0))); + x_1 = _mm_xor_si128(x_1, _mm_loadu_si128((__m128i*)(m + 16))); + x_2 = _mm_xor_si128(x_2, _mm_loadu_si128((__m128i*)(m + 32))); + x_3 = _mm_xor_si128(x_3, _mm_loadu_si128((__m128i*)(m + 48))); + _mm_storeu_si128((__m128i*)(out + 0), x_0); + _mm_storeu_si128((__m128i*)(out + 16), x_1); + _mm_storeu_si128((__m128i*)(out + 32), x_2); + _mm_storeu_si128((__m128i*)(out + 48), x_3); + + in12 = x[12]; + in13 = x[13]; + in12 ++; + if (in12 == 0) + in13 ++; + x[12] = in12; + x[13] = in13; + + bytes -= 64; + out += 64; + m += 64; + } diff --git a/src/Crypto/chacha_u4.h b/src/Crypto/chacha_u4.h new file mode 100644 index 00000000..8eef5dc5 --- /dev/null +++ b/src/Crypto/chacha_u4.h @@ -0,0 +1,187 @@ +/* +u4.h version $Date: 2014/11/11 10:46:58 $ +D. J. Bernstein +Romain Dolbeau +Public domain. +*/ + +// Modified by kerukuro for use in cppcrypto. + +#define VEC4_ROT(a,imm) _mm_or_si128(_mm_slli_epi32(a,imm),_mm_srli_epi32(a,(32-imm))) + +/* same, but replace 2 of the shift/shift/or "rotation" by byte shuffles (8 & 16) (better) */ +#define VEC4_QUARTERROUND_SHUFFLE(a,b,c,d) \ + x_##a = _mm_add_epi32(x_##a, x_##b); t_##a = _mm_xor_si128(x_##d, x_##a); x_##d = _mm_shuffle_epi8(t_##a, rot16); \ + x_##c = _mm_add_epi32(x_##c, x_##d); t_##c = _mm_xor_si128(x_##b, x_##c); x_##b = VEC4_ROT(t_##c, 12); \ + x_##a = _mm_add_epi32(x_##a, x_##b); t_##a = _mm_xor_si128(x_##d, x_##a); x_##d = _mm_shuffle_epi8(t_##a, rot8); \ + x_##c = _mm_add_epi32(x_##c, x_##d); t_##c = _mm_xor_si128(x_##b, x_##c); x_##b = VEC4_ROT(t_##c, 7) + +#define VEC4_QUARTERROUND(a,b,c,d) VEC4_QUARTERROUND_SHUFFLE(a,b,c,d) + + +// if (!bytes) return; +if (bytes>=256) { + /* constant for shuffling bytes (replacing multiple-of-8 rotates) */ + __m128i rot16 = _mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2); + __m128i rot8 = _mm_set_epi8(14,13,12,15,10,9,8,11,6,5,4,7,2,1,0,3); + uint32 in12, in13; + __m128i x_0 = _mm_set1_epi32(x[0]); + __m128i x_1 = _mm_set1_epi32(x[1]); + __m128i x_2 = _mm_set1_epi32(x[2]); + __m128i x_3 = _mm_set1_epi32(x[3]); + __m128i x_4 = _mm_set1_epi32(x[4]); + __m128i x_5 = _mm_set1_epi32(x[5]); + __m128i x_6 = _mm_set1_epi32(x[6]); + __m128i x_7 = _mm_set1_epi32(x[7]); + __m128i x_8 = _mm_set1_epi32(x[8]); + __m128i x_9 = _mm_set1_epi32(x[9]); + __m128i x_10 = _mm_set1_epi32(x[10]); + __m128i x_11 = _mm_set1_epi32(x[11]); + __m128i x_12;// = _mm_set1_epi32(x[12]); /* useless */ + __m128i x_13;// = _mm_set1_epi32(x[13]); /* useless */ + __m128i x_14 = _mm_set1_epi32(x[14]); + __m128i x_15 = _mm_set1_epi32(x[15]); + __m128i orig0 = x_0; + __m128i orig1 = x_1; + __m128i orig2 = x_2; + __m128i orig3 = x_3; + __m128i orig4 = x_4; + __m128i orig5 = x_5; + __m128i orig6 = x_6; + __m128i orig7 = x_7; + __m128i orig8 = x_8; + __m128i orig9 = x_9; + __m128i orig10 = x_10; + __m128i orig11 = x_11; + __m128i orig12;// = x_12; /* useless */ + __m128i orig13;// = x_13; /* useless */ + __m128i orig14 = x_14; + __m128i orig15 = x_15; + __m128i t_0; + __m128i t_1; + __m128i t_2; + __m128i t_3; + __m128i t_4; + __m128i t_5; + __m128i t_6; + __m128i t_7; + __m128i t_8; + __m128i t_9; + __m128i t_10; + __m128i t_11; + __m128i t_12; + __m128i t_13; + __m128i t_14; + __m128i t_15; + + while (bytes >= 256) { + const __m128i addv12 = _mm_set_epi64x(1,0); + const __m128i addv13 = _mm_set_epi64x(3,2); + __m128i t12, t13; + uint64 in1213; + + x_0 = orig0; + x_1 = orig1; + x_2 = orig2; + x_3 = orig3; + x_4 = orig4; + x_5 = orig5; + x_6 = orig6; + x_7 = orig7; + x_8 = orig8; + x_9 = orig9; + x_10 = orig10; + x_11 = orig11; + //x_12 = orig12; /* useless */ + //x_13 = orig13; /* useless */ + x_14 = orig14; + x_15 = orig15; + + + + + in12 = x[12]; + in13 = x[13]; + in1213 = ((uint64)in12) | (((uint64)in13) << 32); + t12 = _mm_set1_epi64x(in1213); + t13 = _mm_set1_epi64x(in1213); + + x_12 = _mm_add_epi64(addv12, t12); + x_13 = _mm_add_epi64(addv13, t13); + + t12 = _mm_unpacklo_epi32(x_12, x_13); + t13 = _mm_unpackhi_epi32(x_12, x_13); + + x_12 = _mm_unpacklo_epi32(t12, t13); + x_13 = _mm_unpackhi_epi32(t12, t13); + + orig12 = x_12; + orig13 = x_13; + + in1213 += 4; + + x[12] = in1213 & 0xFFFFFFFF; + x[13] = (in1213>>32)&0xFFFFFFFF; + + for (i = 0 ; i < r ; ++i) { + VEC4_QUARTERROUND( 0, 4, 8,12); + VEC4_QUARTERROUND( 1, 5, 9,13); + VEC4_QUARTERROUND( 2, 6,10,14); + VEC4_QUARTERROUND( 3, 7,11,15); + VEC4_QUARTERROUND( 0, 5,10,15); + VEC4_QUARTERROUND( 1, 6,11,12); + VEC4_QUARTERROUND( 2, 7, 8,13); + VEC4_QUARTERROUND( 3, 4, 9,14); + } + +#define ONEQUAD_TRANSPOSE(a,b,c,d) \ + { \ + __m128i t0, t1, t2, t3; \ + x_##a = _mm_add_epi32(x_##a, orig##a); \ + x_##b = _mm_add_epi32(x_##b, orig##b); \ + x_##c = _mm_add_epi32(x_##c, orig##c); \ + x_##d = _mm_add_epi32(x_##d, orig##d); \ + t_##a = _mm_unpacklo_epi32(x_##a, x_##b); \ + t_##b = _mm_unpacklo_epi32(x_##c, x_##d); \ + t_##c = _mm_unpackhi_epi32(x_##a, x_##b); \ + t_##d = _mm_unpackhi_epi32(x_##c, x_##d); \ + x_##a = _mm_unpacklo_epi64(t_##a, t_##b); \ + x_##b = _mm_unpackhi_epi64(t_##a, t_##b); \ + x_##c = _mm_unpacklo_epi64(t_##c, t_##d); \ + x_##d = _mm_unpackhi_epi64(t_##c, t_##d); \ + t0 = _mm_xor_si128(x_##a, _mm_loadu_si128((__m128i*)(m+0))); \ + _mm_storeu_si128((__m128i*)(out+0),t0); \ + t1 = _mm_xor_si128(x_##b, _mm_loadu_si128((__m128i*)(m+64))); \ + _mm_storeu_si128((__m128i*)(out+64),t1); \ + t2 = _mm_xor_si128(x_##c, _mm_loadu_si128((__m128i*)(m+128))); \ + _mm_storeu_si128((__m128i*)(out+128),t2); \ + t3 = _mm_xor_si128(x_##d, _mm_loadu_si128((__m128i*)(m+192))); \ + _mm_storeu_si128((__m128i*)(out+192),t3); \ + } + +#define ONEQUAD(a,b,c,d) ONEQUAD_TRANSPOSE(a,b,c,d) + + ONEQUAD(0,1,2,3); + m+=16; + out+=16; + ONEQUAD(4,5,6,7); + m+=16; + out+=16; + ONEQUAD(8,9,10,11); + m+=16; + out+=16; + ONEQUAD(12,13,14,15); + m-=48; + out-=48; + +#undef ONEQUAD +#undef ONEQUAD_TRANSPOSE + + bytes -= 256; + out += 256; + m += 256; + } + } +#undef VEC4_ROT +#undef VEC4_QUARTERROUND +#undef VEC4_QUARTERROUND_SHUFFLE diff --git a/src/Crypto/config.h b/src/Crypto/config.h index 03b1ecb4..1c2aff72 100644 --- a/src/Crypto/config.h +++ b/src/Crypto/config.h @@ -10,9 +10,9 @@ #endif // Clang pretends to be VC++, too. // See http://github.com/weidai11/cryptopp/issues/147 -#if defined(_MSC_VER) && defined(__clang__) +#if defined(_MSC_VER) && defined(__clang__) && !defined(_DCSPKG_ANALYZE) # error: "Unsupported configuration" #endif #ifdef __GNUC__ @@ -20,12 +20,12 @@ #endif // Apple and LLVM's Clang. Apple Clang version 7.0 roughly equals LLVM Clang version 3.7 -#if defined(__clang__ ) && !defined(__apple_build_version__) +#if defined(__clang__ ) && !defined(__apple_build_version__) && !defined(_DCSPKG_ANALYZE) #define CRYPTOPP_LLVM_CLANG_VERSION (__clang_major__ * 10000 + __clang_minor__ * 100 + __clang_patchlevel__) #define CRYPTOPP_CLANG_INTEGRATED_ASSEMBLER 1 -#elif defined(__clang__ ) && defined(__apple_build_version__) +#elif defined(__clang__ ) && defined(__apple_build_version__) && !defined(_DCSPKG_ANALYZE) #define CRYPTOPP_APPLE_CLANG_VERSION (__clang_major__ * 10000 + __clang_minor__ * 100 + __clang_patchlevel__) #define CRYPTOPP_CLANG_INTEGRATED_ASSEMBLER 1 #endif @@ -50,9 +50,9 @@ #define CRYPTOPP_MSVC6PP_OR_LATER #endif #ifndef CRYPTOPP_ALIGN_DATA - #if defined(_MSC_VER) + #if defined(_MSC_VER) && !defined(TC_WINDOWS_BOOT) #define CRYPTOPP_ALIGN_DATA(x) __declspec(align(x)) #elif defined(__GNUC__) #define CRYPTOPP_ALIGN_DATA(x) __attribute__((aligned(x))) #else @@ -112,14 +112,29 @@ #if !defined(CRYPTOPP_DISABLE_ASM) && defined(__GNUC__) && defined(__x86_64__) #define CRYPTOPP_X64_ASM_AVAILABLE #endif -#if !defined(CRYPTOPP_DISABLE_SSE2) && (defined(CRYPTOPP_MSVC6PP_OR_LATER) || defined(__SSE2__)) && !defined(_M_ARM) +#if !defined(CRYPTOPP_DISABLE_SSE2) && (defined(CRYPTOPP_MSVC6PP_OR_LATER) || defined(__SSE2__)) && !defined(_M_ARM) && !defined(_M_ARM64) && !defined(__arm__) && !defined(__aarch64__) && !defined(__arm64__) #define CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE 1 #else #define CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE 0 #endif +#if !defined(CRYPTOPP_DISABLE_ASM) && !defined(CRYPTOPP_DISABLE_SSSE3) && !defined(_M_ARM) && !defined(_M_ARM64) && !defined(__arm__) && !defined(__aarch64__) && !defined(__arm64__) && ( \ + defined(__SSSE3__) || (_MSC_VER >= 1500) || \ + (CRYPTOPP_GCC_VERSION >= 40300) || (__INTEL_COMPILER >= 1000) || (__SUNPRO_CC >= 0x5110) || \ + (CRYPTOPP_LLVM_CLANG_VERSION >= 20300) || (CRYPTOPP_APPLE_CLANG_VERSION >= 40000)) + #define CRYPTOPP_SSSE3_AVAILABLE 1 +#else + #define CRYPTOPP_SSSE3_AVAILABLE 0 +# endif + +#if !defined(CRYPTOPP_DISABLE_SSSE3) && (defined(__SSSE3__) || (_MSC_VER >= 1500)) && !defined(_M_ARM) && !defined(_M_ARM64) && !defined(__arm__) && !defined(__aarch64__) && !defined(__arm64__) + #define CRYPTOPP_BOOL_SSSE3_INTRINSICS_AVAILABLE 1 +#else + #define CRYPTOPP_BOOL_SSSE3_INTRINSICS_AVAILABLE 0 +#endif + #if !defined(CRYPTOPP_DISABLE_SSSE3) && !defined(CRYPTOPP_DISABLE_AESNI) && CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE && (CRYPTOPP_GCC_VERSION >= 40400 || _MSC_FULL_VER >= 150030729 || __INTEL_COMPILER >= 1110 || defined(__AES__)) #define CRYPTOPP_BOOL_AESNI_INTRINSICS_AVAILABLE 1 #else #define CRYPTOPP_BOOL_AESNI_INTRINSICS_AVAILABLE 0 @@ -130,14 +145,23 @@ #else #define CRYPTOPP_BOOL_ALIGN16 0 #endif -#if CRYPTOPP_BOOL_AESNI_INTRINSICS_AVAILABLE && (defined(__SSE4_1__) || defined(__INTEL_COMPILER) || defined(_MSC_VER)) +#if CRYPTOPP_BOOL_SSSE3_INTRINSICS_AVAILABLE && (defined(__SSE4_1__) || defined(__INTEL_COMPILER) || defined(_MSC_VER)) #define CRYPTOPP_BOOL_SSE41_INTRINSICS_AVAILABLE 1 #else #define CRYPTOPP_BOOL_SSE41_INTRINSICS_AVAILABLE 0 #endif +#if !defined(CRYPTOPP_DISABLE_SHANI) && !defined(_M_ARM) && !defined(_M_ARM64) && !defined(__arm__) && !defined(__aarch64__) && !defined(__arm64__) && defined(CRYPTOPP_BOOL_SSE41_INTRINSICS_AVAILABLE) && \ + (defined(__SHA__) || (_MSC_VER >= 1900) || (__SUNPRO_CC >= 0x5160) || \ + (CRYPTOPP_GCC_VERSION >= 40900) || (__INTEL_COMPILER >= 1600) || \ + (CRYPTOPP_LLVM_CLANG_VERSION >= 30400) || (CRYPTOPP_APPLE_CLANG_VERSION >= 50100)) + #define CRYPTOPP_SHANI_AVAILABLE 1 +#else + #define CRYPTOPP_SHANI_AVAILABLE 0 +#endif + // how to allocate 16-byte aligned memory (for SSE2) #if defined(_MSC_VER) #define CRYPTOPP_MM_MALLOC_AVAILABLE #elif defined(__FreeBSD__) || defined(__NetBSD__) || defined(__OpenBSD__) @@ -193,9 +217,9 @@ // this version of the macro is fastest on Pentium 3 and Pentium 4 with MSVC 6 SP5 w/ Processor Pack #define GETBYTE(x, y) (unsigned int)((unsigned char)((x)>>(8*(y)))) // these may be faster on other CPUs/compilers // #define GETBYTE(x, y) (unsigned int)(((x)>>(8*(y)))&255) -// #define GETBYTE(x, y) (((byte *)&(x))[y]) +// #define GETBYTE(x, y) (((uint8 *)&(x))[y]) -#define CRYPTOPP_GET_BYTE_AS_BYTE(x, y) ((byte)((x)>>(8*(y)))) +#define CRYPTOPP_GET_BYTE_AS_BYTE(x, y) ((uint8)((x)>>(8*(y)))) #endif diff --git a/src/Crypto/cpu.c b/src/Crypto/cpu.c index 7a4656d4..e611e9bb 100644 --- a/src/Crypto/cpu.c +++ b/src/Crypto/cpu.c @@ -1,8 +1,11 @@ /* cpu.c - written and placed in the public domain by Wei Dai */ #include "cpu.h" #include "misc.h" +#if defined(_MSC_VER) && !defined(_UEFI) +#include "rdrand.h" +#endif #ifndef EXCEPTION_EXECUTE_HANDLER #define EXCEPTION_EXECUTE_HANDLER 1 #endif @@ -13,8 +16,12 @@ #endif #ifdef CRYPTOPP_CPUID_AVAILABLE +#if defined(__GNUC__) || defined(__clang__) + #include <cpuid.h> // for __get_cpuid and __get_cpuid_count +#endif + #if _MSC_VER >= 1400 && CRYPTOPP_BOOL_X64 int CpuId(uint32 input, uint32 output[4]) { @@ -132,13 +139,8 @@ static int TrySSE2() #if CRYPTOPP_BOOL_X64 return 1; #elif defined(CRYPTOPP_MS_STYLE_INLINE_ASSEMBLY) && !defined(_UEFI) volatile int result = 1; -#if defined (TC_WINDOWS_DRIVER) && !defined (_WIN64) - KFLOATING_SAVE floatingPointState; - if (NT_SUCCESS (KeSaveFloatingPointState (&floatingPointState))) - { -#endif __try { #if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE AS2(por xmm0, xmm0) // executing SSE2 instruction @@ -150,14 +152,8 @@ static int TrySSE2() __except (EXCEPTION_EXECUTE_HANDLER) { result = 0; } -#if defined (TC_WINDOWS_DRIVER) && !defined (_WIN64) - KeRestoreFloatingPointState (&floatingPointState); - } - else - return 0; -#endif return result; #elif !defined(_UEFI) // longjmp and clobber warnings. Volatile is required. // http://github.com/weidai11/cryptopp/issues/24 @@ -186,12 +182,27 @@ static int TrySSE2() return 1; #endif } -int g_x86DetectionDone = 0; -int g_hasISSE = 0, g_hasSSE2 = 0, g_hasSSSE3 = 0, g_hasMMX = 0, g_hasAESNI = 0, g_hasCLMUL = 0, g_isP4 = 0; -int g_hasAVX = 0, g_hasAVX2 = 0, g_hasBMI2 = 0, g_hasSSE42 = 0, g_hasSSE41 = 0; -uint32 g_cacheLineSize = CRYPTOPP_L1_CACHE_LINE_SIZE; +static uint64 xgetbv() +{ +#if defined(_MSC_VER) && defined(_XCR_XFEATURE_ENABLED_MASK) && !defined(_UEFI) + return _xgetbv(_XCR_XFEATURE_ENABLED_MASK); +#elif defined(__GNUC__) || defined(__clang__) + uint32 eax, edx; + __asm__ __volatile__(".byte 0x0F, 0x01, 0xd0" : "=a"(eax), "=d"(edx) : "c"(0)); + return ((uint64_t)edx << 32) | eax; +#else + return 0; +#endif +} + +volatile int g_x86DetectionDone = 0; +volatile int g_hasISSE = 0, g_hasSSE2 = 0, g_hasSSSE3 = 0, g_hasMMX = 0, g_hasAESNI = 0, g_hasCLMUL = 0, g_isP4 = 0; +volatile int g_hasAVX = 0, g_hasAVX2 = 0, g_hasBMI2 = 0, g_hasSSE42 = 0, g_hasSSE41 = 0, g_isIntel = 0, g_isAMD = 0; +volatile int g_hasRDRAND = 0, g_hasRDSEED = 0; +volatile int g_hasSHA256 = 0; +volatile uint32 g_cacheLineSize = CRYPTOPP_L1_CACHE_LINE_SIZE; VC_INLINE int IsIntel(const uint32 output[4]) { // This is the "GenuineIntel" string @@ -203,10 +214,18 @@ VC_INLINE int IsIntel(const uint32 output[4]) VC_INLINE int IsAMD(const uint32 output[4]) { // This is the "AuthenticAMD" string return (output[1] /*EBX*/ == 0x68747541) && - (output[2] /*ECX*/ == 0x69746E65) && - (output[3] /*EDX*/ == 0x444D4163); + (output[2] /*ECX*/ == 0x444D4163) && + (output[3] /*EDX*/ == 0x69746E65); +} + +VC_INLINE int IsHygon(const uint32 output[4]) +{ + // This is the "HygonGenuine" string. + return (output[1] /*EBX*/ == 0x6f677948) && + (output[2] /*ECX*/ == 0x656e6975) && + (output[3] /*EDX*/ == 0x6e65476e); } #if !defined (_UEFI) && ((defined(__AES__) && defined(__PCLMUL__)) || defined(__INTEL_COMPILER) || CRYPTOPP_BOOL_AESNI_INTRINSICS_AVAILABLE) @@ -254,53 +273,86 @@ static int Detect_MS_HyperV_AES () int hasAesNI = 0; // when Hyper-V is enabled on older versions of Windows Server (i.e. 2008 R2), the AES-NI capability // gets masked out for all applications, even running on the host. // We try to detect Hyper-V virtual CPU and perform a dummy AES-NI operation to check its real presence - uint32 cpuid[4]; + uint32 cpuid[4] = {0}; char HvProductName[13]; CpuId(0x40000000, cpuid); memcpy (HvProductName, &cpuid[1], 12); HvProductName[12] = 0; if (_stricmp(HvProductName, "Microsoft Hv") == 0) { -#if defined (TC_WINDOWS_DRIVER) && !defined (_WIN64) - KFLOATING_SAVE floatingPointState; - if (NT_SUCCESS (KeSaveFloatingPointState (&floatingPointState))) - { -#endif hasAesNI = TryAESNI (); - -#if defined (TC_WINDOWS_DRIVER) && !defined (_WIN64) - KeRestoreFloatingPointState (&floatingPointState); - } -#endif } return hasAesNI; } #endif +#if defined(__SHA__) || defined(__INTEL_COMPILER) || CRYPTOPP_SHANI_AVAILABLE +extern int TrySHA256(); +#endif + +static BOOL CheckSHA256Support() { +#if CRYPTOPP_BOOL_X64 && CRYPTOPP_SHANI_AVAILABLE +#if defined(_MSC_VER) // Windows with MSVC + int cpuInfo[4] = { 0 }; + __cpuidex(cpuInfo, 7, 0); + return (cpuInfo[1] & (1 << 29)) != 0? TRUE : FALSE; + +#elif defined(__GNUC__) || defined(__clang__) // Linux, FreeBSD, macOS with GCC/Clang + unsigned int eax = 0, ebx = 0, ecx = 0, edx = 0; + // First check if CPUID leaf 7 is supported + if (__get_cpuid(0, &eax, &ebx, &ecx, &edx)) { + if (eax >= 7) { + // Now check SHA-256 support in leaf 7, sub-leaf 0 + if (__get_cpuid_count(7, 0, &eax, &ebx, &ecx, &edx)) { + return (ebx & (1 << 29)) != 0? TRUE : FALSE; + } + } + } + return FALSE; + +#else + #error "Unsupported compiler" +#endif +#else + return FALSE; +#endif +} + + void DetectX86Features() { - uint32 cpuid[4] = {0}, cpuid1[4] = {0}; + uint32 cpuid[4] = {0}, cpuid1[4] = {0}, cpuid2[4] = {0}; if (!CpuId(0, cpuid)) return; if (!CpuId(1, cpuid1)) return; g_hasMMX = (cpuid1[3] & (1 << 23)) != 0; + + // cpuid1[2] & (1 << 27) is XSAVE/XRESTORE and signals OS support for SSE; use it to avoid probes. + // See http://github.com/weidai11/cryptopp/issues/511 and http://stackoverflow.com/a/22521619/608639 if ((cpuid1[3] & (1 << 26)) != 0) - g_hasSSE2 = TrySSE2(); - g_hasAVX2 = g_hasSSE2 && (cpuid1[1] & (1 << 5)); + g_hasSSE2 = (cpuid1[2] & (1 << 27)) || TrySSE2(); + if (g_hasSSE2 && (cpuid1[2] & (1 << 28)) && (cpuid1[2] & (1 << 27)) && (cpuid1[2] & (1 << 26))) /* CPU has AVX and OS supports XSAVE/XRSTORE */ + { + uint64 xcrFeatureMask = xgetbv(); + g_hasAVX = (xcrFeatureMask & 0x6) == 0x6; + } + g_hasAVX2 = g_hasAVX && (cpuid1[1] & (1 << 5)); g_hasBMI2 = g_hasSSE2 && (cpuid1[1] & (1 << 8)); - g_hasAVX = g_hasSSE2 && (cpuid1[2] & (1 << 28)); g_hasSSE42 = g_hasSSE2 && (cpuid1[2] & (1 << 20)); g_hasSSE41 = g_hasSSE2 && (cpuid1[2] & (1 << 19)); g_hasSSSE3 = g_hasSSE2 && (cpuid1[2] & (1<<9)); +#ifndef CRYPTOPP_DISABLE_AESNI g_hasAESNI = g_hasSSE2 && (cpuid1[2] & (1<<25)); +#endif g_hasCLMUL = g_hasSSE2 && (cpuid1[2] & (1<<1)); + g_hasSHA256 = CheckSHA256Support(); #if !defined (_UEFI) && ((defined(__AES__) && defined(__PCLMUL__)) || defined(__INTEL_COMPILER) || CRYPTOPP_BOOL_AESNI_INTRINSICS_AVAILABLE) // Hypervisor = bit 31 of ECX of CPUID leaf 0x1 // reference: http://artemonsecurity.com/vmde.pdf @@ -309,13 +361,19 @@ void DetectX86Features() g_hasAESNI = Detect_MS_HyperV_AES (); } #endif +#if defined(__SHA__) || defined(__INTEL_COMPILER) || CRYPTOPP_SHANI_AVAILABLE + if (!g_hasSHA256) + { + g_hasSHA256 = TrySHA256(); + } +#endif + if ((cpuid1[3] & (1 << 25)) != 0) g_hasISSE = 1; else { - uint32 cpuid2[4]; CpuId(0x080000000, cpuid2); if (cpuid2[0] >= 0x080000001) { CpuId(0x080000001, cpuid2); @@ -324,43 +382,90 @@ void DetectX86Features() } if (IsIntel(cpuid)) { + g_isIntel = 1; g_isP4 = ((cpuid1[0] >> 8) & 0xf) == 0xf; g_cacheLineSize = 8 * GETBYTE(cpuid1[1], 1); + g_hasRDRAND = (cpuid1[2] & (1 << 30)) != 0; + + if (cpuid[0] >= 7) + { + if (CpuId(7, cpuid2)) + { + g_hasRDSEED = (cpuid2[1] & (1 << 18)) != 0; + g_hasAVX2 = (cpuid2[1] & (1 << 5)) != 0; + g_hasBMI2 = (cpuid2[1] & (1 << 8)) != 0; + } + } } - else if (IsAMD(cpuid)) + else if (IsAMD(cpuid) || IsHygon(cpuid)) { + g_isAMD = 1; CpuId(0x80000005, cpuid); g_cacheLineSize = GETBYTE(cpuid[2], 0); + g_hasRDRAND = (cpuid1[2] & (1 << 30)) != 0; + + if (cpuid[0] >= 7) + { + if (CpuId(7, cpuid2)) + { + g_hasRDSEED = (cpuid2[1] & (1 << 18)) != 0; + g_hasAVX2 = (cpuid2[1] & (1 << 5)) != 0; + g_hasBMI2 = (cpuid2[1] & (1 << 8)) != 0; + } + } + } +#if defined(_MSC_VER) && !defined(_UEFI) + /* Add check fur buggy RDRAND (AMD Ryzen case) even if we always use RDSEED instead of RDRAND when RDSEED available */ + if (g_hasRDRAND) + { + if ( RDRAND_getBytes ((unsigned char*) cpuid, sizeof (cpuid)) + && (cpuid[0] == 0xFFFFFFFF) && (cpuid[1] == 0xFFFFFFFF) + && (cpuid[2] == 0xFFFFFFFF) && (cpuid[3] == 0xFFFFFFFF) + ) + { + g_hasRDRAND = 0; + g_hasRDSEED = 0; + } + } + + if (g_hasRDSEED) + { + if ( RDSEED_getBytes ((unsigned char*) cpuid, sizeof (cpuid)) + && (cpuid[0] == 0xFFFFFFFF) && (cpuid[1] == 0xFFFFFFFF) + && (cpuid[2] == 0xFFFFFFFF) && (cpuid[3] == 0xFFFFFFFF) + ) + { + g_hasRDRAND = 0; + g_hasRDSEED = 0; + } } +#endif if (!g_cacheLineSize) g_cacheLineSize = CRYPTOPP_L1_CACHE_LINE_SIZE; *((volatile int*)&g_x86DetectionDone) = 1; } -int is_aes_hw_cpu_supported () +void DisableCPUExtendedFeatures () { - int bHasAESNI = 0; - uint32 cpuid[4]; - - if (CpuId(1, cpuid)) - { - if (cpuid[2] & (1<<25)) - bHasAESNI = 1; -#if !defined (_UEFI) && ((defined(__AES__) && defined(__PCLMUL__)) || defined(__INTEL_COMPILER) || CRYPTOPP_BOOL_AESNI_INTRINSICS_AVAILABLE) - // Hypervisor = bit 31 of ECX of CPUID leaf 0x1 - // reference: http://artemonsecurity.com/vmde.pdf - if (!bHasAESNI && (cpuid[2] & (1<<31))) - { - bHasAESNI = Detect_MS_HyperV_AES (); - } -#endif - } - - return bHasAESNI; + g_hasSSE2 = 0; + g_hasISSE = 0; + g_hasMMX = 0; + g_hasSSE2 = 0; + g_hasISSE = 0; + g_hasMMX = 0; + g_hasAVX = 0; + g_hasAVX2 = 0; + g_hasBMI2 = 0; + g_hasSSE42 = 0; + g_hasSSE41 = 0; + g_hasSSSE3 = 0; + g_hasAESNI = 0; + g_hasCLMUL = 0; + g_hasSHA256 = 0; } #endif diff --git a/src/Crypto/cpu.h b/src/Crypto/cpu.h index 05ce9d8f..b0df6462 100644 --- a/src/Crypto/cpu.h +++ b/src/Crypto/cpu.h @@ -24,8 +24,27 @@ #define ATT_PREFIX #define ATT_NOPREFIX #endif +#if defined (_MSC_VER) && !defined (TC_WINDOWS_BOOT) +#if defined(TC_WINDOWS_DRIVER) || defined (_UEFI) +#if defined(__cplusplus) +extern "C" { +#endif +#if defined(_M_X64) || defined (_M_IX86) || defined (_M_IX86_FP) +extern unsigned __int64 __rdtsc(); +#endif +#if defined(__cplusplus) +} +#endif +#else +#include <intrin.h> +#if defined(_M_X64) || defined (_M_IX86) || defined (_M_IX86_FP) +#pragma intrinsic(__rdtsc) +#endif +#endif +#endif + #ifdef CRYPTOPP_GENERATE_X64_MASM #define CRYPTOPP_X86_ASM_AVAILABLE #define CRYPTOPP_BOOL_X64 1 @@ -88,18 +107,21 @@ extern __m128i _mm_cvtsi64_si128(__int64); extern __m128i _mm_unpacklo_epi64(__m128i _A, __m128i _B); extern void _mm_store_si128(__m128i *_P, __m128i _B); extern __m64 _m_pxor(__m64 _MM1, __m64 _MM2); extern __m128i _mm_set_epi64(__m64 _Q1, __m64 _Q0); +extern __m128i _mm_set1_epi64(__m64 q); extern __m128i _mm_setr_epi32(int _I0, int _I1, int _I2, int _I3); extern __m128i _mm_loadu_si128(__m128i const*_P); +extern __m128i _mm_set_epi8(char b15, char b14, char b13, char b12, char b11, char b10, char b9, char b8, char b7, char b6, char b5, char b4, char b3, char b2, char b1, char b0); extern __m128i _mm_set_epi32(int _I3, int _I2, int _I1, int _I0); extern __m128i _mm_set1_epi32(int _I); extern void _mm_storeu_si128(__m128i *_P, __m128i _B); extern __m128i _mm_or_si128(__m128i _A, __m128i _B); extern __m128i _mm_slli_epi32(__m128i _A, int _Count); extern __m128i _mm_srli_epi32(__m128i _A, int _Count); extern __m128i _mm_add_epi32(__m128i _A, __m128i _B); extern __m128i _mm_sub_epi32(__m128i _A, __m128i _B); +extern __m128i _mm_add_epi64 (__m128i a, __m128i b); extern __m128i _mm_or_si128(__m128i _A, __m128i _B); extern __m128i _mm_and_si128(__m128i _A, __m128i _B); extern __m128i _mm_andnot_si128(__m128i _A, __m128i _B); extern __m128i _mm_shufflehi_epi16(__m128i _A, int _Imm); @@ -108,8 +130,16 @@ extern __m128i _mm_unpacklo_epi32(__m128i _A, __m128i _B); extern __m128i _mm_unpackhi_epi32(__m128i _A, __m128i _B); extern __m128i _mm_unpackhi_epi64(__m128i _A, __m128i _B); extern __m128i _mm_srli_epi16(__m128i _A, int _Count); extern __m128i _mm_slli_epi16(__m128i _A, int _Count); +extern __m128i _mm_shuffle_epi32 (__m128i a, int imm8); +extern __m128i _mm_set_epi64x (__int64 e1, __int64 e0); +extern __m128i _mm_set1_epi64x (__int64 a); +extern __m128i _mm_castps_si128(__m128); +extern __m128 _mm_castsi128_ps(__m128i); +extern __m128 _mm_shuffle_ps(__m128 _A, __m128 _B, unsigned int _Imm8); +extern __m128i _mm_srli_si128(__m128i _A, int _Imm); +extern __m128i _mm_slli_si128(__m128i _A, int _Imm); #define _mm_xor_si64 _m_pxor #define _mm_empty _m_empty #define _MM_SHUFFLE(fp3,fp2,fp1,fp0) (((fp3) << 6) | ((fp2) << 4) | \ ((fp1) << 2) | ((fp0))) @@ -121,30 +151,31 @@ extern __m128i _mm_slli_epi16(__m128i _A, int _Count); #include <emmintrin.h> #endif #endif -#if CRYPTOPP_BOOL_AESNI_INTRINSICS_AVAILABLE -#if defined(__SSSE3__) || defined(__INTEL_COMPILER) +#if CRYPTOPP_BOOL_SSSE3_INTRINSICS_AVAILABLE || defined(__INTEL_COMPILER) #if defined(TC_WINDOWS_DRIVER) || defined (_UEFI) #if defined(__cplusplus) extern "C" { #endif extern __m128i _mm_shuffle_epi8 (__m128i a, __m128i b); +extern __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int n); #if defined(__cplusplus) } #endif #else #include <tmmintrin.h> #endif #endif -#if defined(__SSE4_1__) || defined(__INTEL_COMPILER) || defined(_MSC_VER) +#if CRYPTOPP_BOOL_SSE41_INTRINSICS_AVAILABLE || defined(__INTEL_COMPILER) #if defined(TC_WINDOWS_DRIVER) || defined (_UEFI) #if defined(__cplusplus) extern "C" { #endif extern int _mm_extract_epi32(__m128i src, const int ndx); extern __m128i _mm_insert_epi32(__m128i dst, int s, const int ndx); +extern __m128i _mm_blend_epi16 (__m128i v1, __m128i v2, const int mask); #if defined(_M_X64) extern __m128i _mm_insert_epi64(__m128i dst, __int64 s, const int ndx); #endif #if defined(__cplusplus) @@ -174,8 +205,23 @@ extern __m128i _mm_aesdeclast_si128(__m128i v, __m128i rkey); #else #include <wmmintrin.h> #endif #endif + +#if CRYPTOPP_SHANI_AVAILABLE +#if defined(TC_WINDOWS_DRIVER) || defined (_UEFI) +#if defined(__cplusplus) +extern "C" { +#endif +extern __m128i __cdecl _mm_sha256msg1_epu32(__m128i, __m128i); +extern __m128i __cdecl _mm_sha256msg2_epu32(__m128i, __m128i); +extern __m128i __cdecl _mm_sha256rnds2_epu32(__m128i, __m128i, __m128i); +#if defined(__cplusplus) +} +#endif +#else +#include <immintrin.h> +#endif #endif #if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 || CRYPTOPP_BOOL_X64 @@ -183,40 +229,46 @@ extern __m128i _mm_aesdeclast_si128(__m128i v, __m128i rkey); extern "C" { #endif #define CRYPTOPP_CPUID_AVAILABLE +#if !defined(CRYPTOPP_DISABLE_AESNI) && !defined(WOLFCRYPT_BACKEND) +#define TC_AES_HW_CPU +#endif // these should not be used directly -extern int g_x86DetectionDone; -extern int g_hasAVX; -extern int g_hasAVX2; -extern int g_hasBMI2; -extern int g_hasSSE42; -extern int g_hasSSE41; -extern int g_hasSSSE3; -extern int g_hasAESNI; -extern int g_hasCLMUL; -extern int g_isP4; -extern uint32 g_cacheLineSize; +extern volatile int g_x86DetectionDone; +extern volatile int g_hasSSE2; +extern volatile int g_hasISSE; +extern volatile int g_hasMMX; +extern volatile int g_hasAVX; +extern volatile int g_hasAVX2; +extern volatile int g_hasBMI2; +extern volatile int g_hasSSE42; +extern volatile int g_hasSSE41; +extern volatile int g_hasSSSE3; +extern volatile int g_hasAESNI; +extern volatile int g_hasCLMUL; +extern volatile int g_isP4; +extern volatile int g_hasRDRAND; +extern volatile int g_hasRDSEED; +extern volatile int g_hasSHA256; +extern volatile int g_isIntel; +extern volatile int g_isAMD; +extern volatile uint32 g_cacheLineSize; void DetectX86Features(); // must be called at the start of the program/driver -int CpuId(uint32 input, uint32 *output); +int CpuId(uint32 input, uint32 output[4]); +// disable all CPU extended features (e.g. SSE, AVX, AES) that may have +// been enabled by DetectX86Features. +void DisableCPUExtendedFeatures (); -#if CRYPTOPP_BOOL_X64 +#ifdef CRYPTOPP_BOOL_X64 #define HasSSE2() 1 #define HasISSE() 1 -#define HasMMX() 1 #else - -extern int g_hasSSE2; -extern int g_hasISSE; -extern int g_hasMMX; - #define HasSSE2() g_hasSSE2 #define HasISSE() g_hasISSE -#define HasMMX() g_hasMMX - #endif - +#define HasMMX() g_hasMMX #define HasSSE42() g_hasSSE42 #define HasSSE41() g_hasSSE41 #define HasSAVX() g_hasAVX #define HasSAVX2() g_hasAVX2 @@ -224,18 +276,43 @@ extern int g_hasMMX; #define HasSSSE3() g_hasSSSE3 #define HasAESNI() g_hasAESNI #define HasCLMUL() g_hasCLMUL #define IsP4() g_isP4 +#define HasRDRAND() g_hasRDRAND +#define HasRDSEED() g_hasRDSEED +#define HasSHA256() g_hasSHA256 +#define IsCpuIntel() g_isIntel +#define IsCpuAMD() g_isAMD #define GetCacheLineSize() g_cacheLineSize #if defined(__cplusplus) } #endif #else +#define HasSSE2() 0 +#define HasISSE() 0 + +#define HasMMX() 0 +#define HasSSE42() 0 +#define HasSSE41() 0 +#define HasSAVX() 0 +#define HasSAVX2() 0 +#define HasSBMI2() 0 +#define HasSSSE3() 0 +#define HasAESNI() 0 +#define HasCLMUL() 0 +#define IsP4() 0 +#define HasRDRAND() 0 +#define HasRDSEED() 0 +#define IsCpuIntel() 0 +#define IsCpuAMD() 0 #define GetCacheLineSize() CRYPTOPP_L1_CACHE_LINE_SIZE +#define DetectX86Features() +#define DisableCPUExtendedFeatures() + #endif #endif diff --git a/src/Crypto/gost89_x64.asm b/src/Crypto/gost89_x64.asm deleted file mode 100644 index 5f5892fe..00000000 --- a/src/Crypto/gost89_x64.asm +++ /dev/null @@ -1,483 +0,0 @@ -; -; GOST89 implementation x64 -; -; Copyright (c) 2016. Disk Cryptography Services for EFI (DCS), Alex Kolotnikov -; -; This program and the accompanying materials -; are licensed and made available under the terms and conditions -; of the Apache License, Version 2.0. -; -; The full text of the license may be found at -; https://opensource.org/licenses/Apache-2.0 -; -; Some ideas from article https://xakep.ru/2013/10/19/shifrovanie-gost-28147-89/ -; - -[section .bss align=16] - -;/////////////////////////////////////////////////////////////////// -;// Win64 registers to save -;/////////////////////////////////////////////////////////////////// -%macro SaveRegs 0 - sub rsp,8*8+10*16 - mov [rsp], rbx - mov [rsp+8], rbp - mov [rsp+8*2], rdi - mov [rsp+8*3], rsi - mov [rsp+8*4], r12 - mov [rsp+8*5], r13 - mov [rsp+8*6], r14 - mov [rsp+8*7], r15 -%endmacro - -%macro RestoreRegs 0 - mov rbx, [rsp] - mov rbp, [rsp+8] - mov rdi, [rsp+8*2] - mov rsi, [rsp+8*3] - mov r12, [rsp+8*4] - mov r13, [rsp+8*5] - mov r14, [rsp+8*6] - mov r15, [rsp+8*7] - add rsp,8*8+10*16 -%endmacro - -[section .text align=16] -;/////////////////////////////////////////////////////////////////// -;// Crypting 2 blocks -;/////////////////////////////////////////////////////////////////// -%macro gost_round2 2 ; 1 - pos1, 2 - pos2 - ; 1st - ; 1-2 byte - add ecx, r13d ; add key - movzx edi, cl - movzx esi, ch - xor r14d, dword [r8 + 32 + 256*3*4 + rdi*4] - xor r14d, dword [r8 + 32 + 256*2*4 + rsi*4] - shr ecx, 16 - ; 3-4 - movzx edi, cl - xor r14d, dword [r8 + 32 + 256*4 + rdi*4] - movzx esi, ch - xor r14d, dword [r8 + 32 + rsi*4] - mov edx, [r8 + %1*4] ; read key for second step - - ; 2nd - ; 1-2 byte - add eax, r10d ; read key - movzx r15d, al - movzx ebp, ah - xor r11d, dword [r8 + 32 + 256*3*4 + r15*4] - xor r11d, dword [r8 + 32 + 256*2*4 + rbp*4] - shr eax, 16 - ; 3-4 - movzx r15d, al - xor r11d, dword [r8 + 32 + 256*4 + r15*4] - movzx ebp, ah - xor r11d, dword [r8 + 32 + rbp*4] - mov ebx, [r8 + %1*4] ; read key for second step - - ; second step - ; 1st - ; 1-2 byte - add edx, r14d ; add key - movzx edi, dl - movzx esi, dh - xor r13d, dword [r8 + 32 + 256*3*4 + rdi*4] - xor r13d, dword [r8 + 32 + 256*2*4 + rsi*4] - shr edx, 16 - ; 3-4 - movzx edi, dl - xor r13d, dword [r8 + 32 + 256*4 + rdi*4] - movzx esi, dh - xor r13d, dword [r8 + 32 + rsi*4] - mov ecx, [r8 + %2*4] ; read key - - ; 2nd - ; 1-2 byte - add ebx, r11d; ; add key - movzx r15d, bl; - movzx ebp, bh; - xor r10d, dword [r8 + 32 + 256*3*4 + r15*4] - xor r10d, dword [r8 + 32 + 256*2*4 + rbp*4] - shr ebx, 16 - ; 3-4 - movzx r15d, bl - xor r10d, dword [r8 + 32 + 256*4 + r15*4] - movzx ebp, bh - xor r10d, dword [r8 + 32 + rbp*4] - mov eax, [r8 + %2*4] ; read key -%endmacro - -; input: r8 - &key, rcx - &IN -; returns: (r13) & (r10) -GostEncrypt2x64: - ; 1st - mov r13d, [rcx] - mov r14, [rcx] - shr r14, 32 - - ; 2nd - mov r10d, [rcx + 16] - mov r11, [rcx + 16] - shr r11, 32 - - mov ecx, [r8] - mov eax, ecx - - gost_round2 1, 2 - gost_round2 3, 4 - gost_round2 5, 6 - gost_round2 7, 0 - - gost_round2 1, 2 - gost_round2 3, 4 - gost_round2 5, 6 - gost_round2 7, 0 - - gost_round2 1, 2 - gost_round2 3, 4 - gost_round2 5, 6 - gost_round2 7, 7 - - gost_round2 6, 5 - gost_round2 4, 3 - gost_round2 2, 1 - gost_round2 0, 0 - - shl r13, 32 ; combine - or r13, r14 - - shl r10, 32 ; combine - or r10, r11 - ret - -; input: r8 - &key, rcx - &IN -; returns: (r13) & (r10) -GostDecrypt2x64: - ; 1st - mov r13d, [rcx] - mov r14, [rcx] - shr r14, 32 - - ; 2nd - mov r10d, [rcx + 16] - mov r11, [rcx + 16] - shr r11, 32 - - mov ecx, [r8] - mov eax, ecx - - gost_round2 1, 2 - gost_round2 3, 4 - gost_round2 5, 6 - gost_round2 7, 7 - - gost_round2 6, 5 - gost_round2 4, 3 - gost_round2 2, 1 - gost_round2 0, 7 - - gost_round2 6, 5 - gost_round2 4, 3 - gost_round2 2, 1 - gost_round2 0, 7 - - gost_round2 6, 5 - gost_round2 4, 3 - gost_round2 2, 1 - gost_round2 0, 0 - - shl r13, 32 ; combine - or r13, r14 - - shl r10, 32 ; combine - or r10, r11 -ret - -;/////////////////////////////////////////////////////////////////// -;// Crypting 1 block -;/////////////////////////////////////////////////////////////////// -%macro gost_round1 2 ; 1 - pos1, 2 - pos2 - ; 1-2 byte - add ecx, r13d ; add key - movzx edi, cl - movzx esi, ch - xor r14d, dword [r8 + 32 + 256*3*4 + rdi*4] - xor r14d, dword [r8 + 32 + 256*2*4 + rsi*4] - shr ecx, 16 - ; 3-4 - movzx edi, cl - xor r14d, dword [r8 + 32 + 256*4 + rdi*4] - movzx esi, ch - xor r14d, dword [r8 + 32 + rsi*4] - mov edx, [r8 + %1*4] ; read key for second step - - ; second step - ; 1-2 byte - add edx, r14d ; add key - movzx edi, dl - movzx esi, dh - xor r13d, dword [r8 + 32 + 256*3*4 + rdi*4] - xor r13d, dword [r8 + 32 + 256*2*4 + rsi*4] - shr edx, 16 - ; 3-4 - movzx edi, dl - xor r13d, dword [r8 + 32 + 256*4 + rdi*4] - movzx esi, dh - xor r13d, dword [r8 + 32 + rsi*4] - mov ecx, [r8 + %2*4] ; read key -%endmacro - -; input: r8 - &gost_kds rcx - &IN -; returns: r13 -GostEncrypt1x64: - mov r13d, [rcx] - mov r14, [rcx] - shr r14, 32 - mov ecx, [r8] - - gost_round1 1, 2 - gost_round1 3, 4 - gost_round1 5, 6 - gost_round1 7, 0 - - gost_round1 1, 2 - gost_round1 3, 4 - gost_round1 5, 6 - gost_round1 7, 0 - - gost_round1 1, 2 - gost_round1 3, 4 - gost_round1 5, 6 - gost_round1 7, 7 - - gost_round1 6, 5 - gost_round1 4, 3 - gost_round1 2, 1 - gost_round1 0, 0 - - shl r13, 32 ; combine - or r13, r14 -ret - -; input: r8 - &gost_kds rcx - IN -; returns: r13 -GostDecrypt1x64: - mov r13d, [rcx] - mov r14, [rcx] - shr r14, 32 - mov ecx, [r8] - - gost_round1 1, 2 - gost_round1 3, 4 - gost_round1 5, 6 - gost_round1 7, 7 - - gost_round1 6, 5 - gost_round1 4, 3 - gost_round1 2, 1 - gost_round1 0, 7 - - gost_round1 6, 5 - gost_round1 4, 3 - gost_round1 2, 1 - gost_round1 0, 7 - - gost_round1 6, 5 - gost_round1 4, 3 - gost_round1 2, 1 - gost_round1 0, 0 - - shl r13, 32 ; combine - or r13, r14 -ret - -global gost_encrypt_128_CBC_asm ; gost_encrypt_128_CBC_asm(uint64* in, uint64* out, gost_kds* kds, uint64 count); -; rcx - &in -; rdx - &out -; r8 - &gost_kds -; r9 - count -gost_encrypt_128_CBC_asm: - SaveRegs ; Saving - - sub rsp, 32 - mov [rsp], rdx ; Save out addr - mov [rsp + 8], rcx ; Save in addr - mov [rsp + 16], r8 ; key addr - -.do: - mov [rsp + 24], r9 ; Save count - cmp r9, 2 - jge .blk2 - cmp r9, 1 - jge .blk1 - jmp .end - -; One 128 block encryption -.blk1: - mov rcx, [rsp + 8] ; set in addr - call GostEncrypt1x64 - - mov rdx, [rsp] ; Restore out - mov rcx, [rsp + 8] ; restore in - - mov [rdx], r13 - mov rax, [rcx + 8] - xor rax, r13 ; CBC - - add rdx, 8 ;next 8 bytes - mov [rdx], rax - - mov rcx, rdx - call GostEncrypt1x64 - - mov rdx, [rsp] ; Restore out addr - mov rcx, [rsp+8] ; Restore in addr - - mov [rdx + 8], r13 - - add rdx,16 - mov [rsp], rdx - - add rcx, 16 - mov [rsp+8], rcx - - mov r9, [rsp + 24] - dec r9 - - jmp .do - -.blk2: - mov rcx, [rsp + 8] ; set in addr - call GostEncrypt2x64 - - mov rdx, [rsp] ; Restore out - mov rcx, [rsp + 8] ; restore in - - mov [rdx], r13 - - mov rax, [rcx + 8] - xor rax, r13 ; CBC - - mov [rdx + 16], r10 - - mov rbx, [rcx + 24] - xor rbx, r10 ; CBC - - mov [rdx + 8], rax - mov [rdx + 24], rbx - - add rdx, 8 ;next 8 bytes - - mov rcx, rdx - call GostEncrypt2x64 - - mov rdx, [rsp] ; Restore out addr - mov rcx, [rsp+8] ; Restore in addr - - mov [rdx + 8], r13 - mov [rdx + 24], r10 - - add rdx,32 - mov [rsp], rdx - - add rcx, 32 - mov [rsp+8], rcx - - mov r9, [rsp + 24] - sub r9, 2 - - jmp .do - -.end: - add rsp, 32 ; Load out addr - RestoreRegs ; Load -ret - -global gost_decrypt_128_CBC_asm ; gost_decrypt_128_CBC_asm(uint64* in, uint64* out, const gost_kds* kds, uint64 count); -; rcx - &in -; rdx - &out -; r8 - &gost_kds -; r9 - count -gost_decrypt_128_CBC_asm: - SaveRegs ; Saving - - sub rsp, 32 - mov [rsp], rdx ; Save out addr - mov [rsp+8], rcx ; Save in addr - mov [rsp+16], r8 ; key addr - -.do: - mov [rsp + 24], r9 ; Save count - cmp r9, 2 - jge .blk2 - cmp r9, 1 - jge .blk1 - jmp .end - -; One 128 block decryption -.blk1: - add rcx, 8 - call GostDecrypt1x64 - mov rdx, [rsp] ; Restore out - mov rcx, [rsp + 8] ; Restore in - mov rax, [rcx] - xor rax, r13 ; CBC - mov [rdx + 8], rax - - call GostDecrypt1x64 - - mov rdx, [rsp] ; Restore out addr - mov rcx, [rsp+8] ; Restore in addr - - mov [rdx], r13 - - add rdx,16 - mov [rsp], rdx - - add rcx, 16 - mov [rsp+8], rcx - - mov r9, [rsp + 24] - dec r9 - - jmp .do - -.blk2: - add rcx, 8 - call GostDecrypt2x64 - mov rdx, [rsp] ; Restore out - mov rcx, [rsp + 8] ; Restore in - - mov rax, [rcx] - xor rax, r13 ; CBC - mov [rdx + 8], rax - - mov rbx, [rcx+16] - xor rbx, r10 ; CBC - mov [rdx + 24], rbx - - call GostDecrypt2x64 - - mov rdx, [rsp] ; Restore out addr - mov rcx, [rsp+8] ; Restore in addr - - mov [rdx], r13 - mov [rdx+16], r10 - - add rdx,32 - mov [rsp], rdx - - add rcx,32 - mov [rsp+8], rcx - - mov r9, [rsp + 24] - sub r9, 2 - - jmp .do - -.end: - add rsp, 32 ; Load out addr - RestoreRegs ; Load -ret diff --git a/src/Crypto/jitterentropy-base-user.h b/src/Crypto/jitterentropy-base-user.h new file mode 100644 index 00000000..aaefb41a --- /dev/null +++ b/src/Crypto/jitterentropy-base-user.h @@ -0,0 +1,180 @@ +/* + * Non-physical true random number generator based on timing jitter. + * + * Copyright Stephan Mueller <smueller@chronox.de>, 2013 - 2019 + * + * License + * ======= + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, and the entire permission notice in its entirety, + * including the disclaimer of warranties. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote + * products derived from this software without specific prior + * written permission. + * + * ALTERNATIVELY, this product may be distributed under the terms of + * the GNU General Public License, in which case the provisions of the GPL are + * required INSTEAD OF the above restrictions. (This clause is + * necessary due to a potential bad interaction between the GPL and + * the restrictions contained in a BSD-style copyright.) + * + * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ALL OF + * WHICH ARE HEREBY DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT + * OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + * USE OF THIS SOFTWARE, EVEN IF NOT ADVISED OF THE POSSIBILITY OF SUCH + * DAMAGE. + */ + +/* Adapted for VeraCrypt */ + +#pragma once + +#include "Common/Tcdefs.h" +#include "misc.h" +#include "cpu.h" +#include <stdlib.h> +#include <string.h> + +#ifdef _MSC_VER + +typedef uint64 uint64_t; +typedef int64 int64_t; + +#ifndef _UEFI +#define CONFIG_CRYPTO_CPU_JITTERENTROPY_SECURE_MEMORY +#endif + +#ifndef _UEFI +typedef SSIZE_T ssize_t; +#else +#if CRYPTOPP_BOOL_X64 +typedef int64 ssize_t; +#else +typedef int32 ssize_t; +#endif +#endif + +static VC_INLINE void jent_get_nstime(uint64 *out) +{ +#ifdef _M_ARM64 + LARGE_INTEGER v = { 0 }; +#ifdef TC_WINDOWS_DRIVER + v = KeQueryPerformanceCounter(NULL); +#else + QueryPerformanceCounter(&v); +#endif + * out = v.QuadPart; +#else + *out = __rdtsc(); +#endif +} + +#else + +#include <sys/types.h> + +#if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32 || CRYPTOPP_BOOL_X64 + +/* taken from Linux kernel */ +#if CRYPTOPP_BOOL_X64 +#define DECLARE_ARGS(val, low, high) unsigned low, high +#define EAX_EDX_VAL(val, low, high) ((low) | ((uint64)(high) << 32)) +#define EAX_EDX_RET(val, low, high) "=a" (low), "=d" (high) +#else +#define DECLARE_ARGS(val, low, high) unsigned long long val +#define EAX_EDX_VAL(val, low, high) (val) +#define EAX_EDX_RET(val, low, high) "=A" (val) +#endif + +VC_INLINE void jent_get_nstime(uint64 *out) +{ + DECLARE_ARGS(val, low, high); + asm volatile("rdtsc" : EAX_EDX_RET(val, low, high)); + *out = EAX_EDX_VAL(val, low, high); +} + +#else + +#include <time.h> + +VC_INLINE void jent_get_nstime(uint64 *out) +{ + /* we could use CLOCK_MONOTONIC(_RAW), but with CLOCK_REALTIME + * we get some nice extra entropy once in a while from the NTP actions + * that we want to use as well... though, we do not rely on that + * extra little entropy */ + uint64_t tmp = 0; + struct timespec time; + if (clock_gettime(CLOCK_REALTIME, &time) == 0) + { + tmp = time.tv_sec; + tmp = tmp << 32; + tmp = tmp | time.tv_nsec; + } + *out = tmp; +} + +#endif + +#endif + +#ifdef _MSC_VER +static +#endif +VC_INLINE void *jent_zalloc(size_t len) +{ + void *tmp = NULL; + tmp = TCalloc(len); + if(NULL != tmp) + { + memset(tmp, 0, len); +#if defined(_WIN32) && !defined(TC_WINDOWS_DRIVER) && !defined(_UEFI) + VirtualLock (tmp, len); +#endif + } + return tmp; +} + +#ifdef _MSC_VER +static +#endif +VC_INLINE void jent_zfree(void *ptr, unsigned int len) +{ + if (len % 8) + burn(ptr, len); + else + FAST_ERASE64(ptr, len); +#if defined(_WIN32) && !defined(TC_WINDOWS_DRIVER) && !defined(_UEFI) + VirtualUnlock (ptr, len); +#endif + TCfree(ptr); +} + +#ifdef _MSC_VER +static +#endif +VC_INLINE int jent_fips_enabled(void) +{ + return 1; +} + +/* --- helpers needed in user space -- */ + +#define rol64(x,n) rotl64(x,n) + + + diff --git a/src/Crypto/jitterentropy-base.c b/src/Crypto/jitterentropy-base.c new file mode 100644 index 00000000..b7512532 --- /dev/null +++ b/src/Crypto/jitterentropy-base.c @@ -0,0 +1,855 @@ +/* + * Non-physical true random number generator based on timing jitter. + * + * Copyright Stephan Mueller <smueller@chronox.de>, 2014 - 2019 + * + * Design + * ====== + * + * See documentation in doc/ folder. + * + * Interface + * ========= + * + * See documentation in jitterentropy(3) man page. + * + * License + * ======= + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, and the entire permission notice in its entirety, + * including the disclaimer of warranties. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote + * products derived from this software without specific prior + * written permission. + * + * ALTERNATIVELY, this product may be distributed under the terms of + * the GNU General Public License, in which case the provisions of the GPL2 are + * required INSTEAD OF the above restrictions. (This clause is + * necessary due to a potential bad interaction between the GPL and + * the restrictions contained in a BSD-style copyright.) + * + * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ALL OF + * WHICH ARE HEREBY DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT + * OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + * USE OF THIS SOFTWARE, EVEN IF NOT ADVISED OF THE POSSIBILITY OF SUCH + * DAMAGE. + */ + +/* Adapted for VeraCrypt */ + + +#ifdef TC_WINDOWS_DRIVER +#define UINT64_MAX 0xffffffffffffffffui64 +#else +#include <stdint.h> +#endif + +#undef _FORTIFY_SOURCE + +#ifdef _MSC_VER +#pragma optimize( "", off ) +#pragma warning(disable:4242 4244 4334) /* disable warnings on the original code */ +#else +#if defined(__clang__) + #pragma clang optimize off +#elif defined (__GNUC__) + #pragma GCC optimize ("O0") +#endif +#endif + +#include "jitterentropy.h" + +#ifdef __OPTIMIZE__ + #error "The CPU Jitter random number generator must not be compiled with optimizations. See documentation. Use the compiler switch -O0 for compiling jitterentropy-base.c." +#endif + +#define MAJVERSION 2 /* API / ABI incompatible changes, functional changes that + * require consumer to be updated (as long as this number + * is zero, the API is not considered stable and can + * change without a bump of the major version) */ +#define MINVERSION 2 /* API compatible, ABI may change, functional + * enhancements only, consumer can be left unchanged if + * enhancements are not considered */ +#define PATCHLEVEL 0 /* API / ABI compatible, no functional changes, no + * enhancements, bug fixes only */ + +#define ARRAY_SIZE(x) (sizeof(x) / sizeof((x)[0])) + +/** + * jent_version() - Return machine-usable version number of jent library + * + * The function returns a version number that is monotonic increasing + * for newer versions. The version numbers are multiples of 100. For example, + * version 1.2.3 is converted to 1020300 -- the last two digits are reserved + * for future use. + * + * The result of this function can be used in comparing the version number + * in a calling program if version-specific calls need to be make. + * + * @return Version number of jitterentropy library + */ +JENT_PRIVATE_STATIC +unsigned int jent_version(void) +{ + unsigned int version = 0; + + version = MAJVERSION * 1000000; + version += MINVERSION * 10000; + version += PATCHLEVEL * 100; + + return version; +} + +/*************************************************************************** + * Adaptive Proportion Test + * + * This test complies with SP800-90B section 4.4.2. + ***************************************************************************/ + +/** + * Reset the APT counter + * + * @ec [in] Reference to entropy collector + */ +static void jent_apt_reset(struct rand_data *ec, unsigned int delta_masked) +{ + /* Reset APT counter */ + ec->apt_count = 0; + ec->apt_base = delta_masked; + ec->apt_observations = 0; +} + +/** + * Insert a new entropy event into APT + * + * @ec [in] Reference to entropy collector + * @delta_masked [in] Masked time delta to process + */ +static void jent_apt_insert(struct rand_data *ec, unsigned int delta_masked) +{ + /* Initialize the base reference */ + if (!ec->apt_base_set) { + ec->apt_base = delta_masked; + ec->apt_base_set = 1; + return; + } + + if (delta_masked == ec->apt_base) { + ec->apt_count++; + + if (ec->apt_count >= JENT_APT_CUTOFF) + ec->health_failure = 1; + } + + ec->apt_observations++; + + if (ec->apt_observations >= JENT_APT_WINDOW_SIZE) + jent_apt_reset(ec, delta_masked); +} + +/*************************************************************************** + * Stuck Test and its use as Repetition Count Test + * + * The Jitter RNG uses an enhanced version of the Repetition Count Test + * (RCT) specified in SP800-90B section 4.4.1. Instead of counting identical + * back-to-back values, the input to the RCT is the counting of the stuck + * values during the generation of one Jitter RNG output block. + * + * The RCT is applied with an alpha of 2^{-30} compliant to FIPS 140-2 IG 9.8. + * + * During the counting operation, the Jitter RNG always calculates the RCT + * cut-off value of C. If that value exceeds the allowed cut-off value, + * the Jitter RNG output block will be calculated completely but discarded at + * the end. The caller of the Jitter RNG is informed with an error code. + ***************************************************************************/ + +/** + * Repetition Count Test as defined in SP800-90B section 4.4.1 + * + * @ec [in] Reference to entropy collector + * @stuck [in] Indicator whether the value is stuck + */ +static void jent_rct_insert(struct rand_data *ec, int stuck) +{ + /* + * If we have a count less than zero, a previous RCT round identified + * a failure. We will not overwrite it. + */ + if (ec->rct_count < 0) + return; + + if (stuck) { + ec->rct_count++; + + /* + * The cutoff value is based on the following consideration: + * alpha = 2^-30 as recommended in FIPS 140-2 IG 9.8. + * In addition, we require an entropy value H of 1/OSR as this + * is the minimum entropy required to provide full entropy. + * Note, we collect 64 * OSR deltas for inserting them into + * the entropy pool which should then have (close to) 64 bits + * of entropy. + * + * Note, ec->rct_count (which equals to value B in the pseudo + * code of SP800-90B section 4.4.1) starts with zero. Hence + * we need to subtract one from the cutoff value as calculated + * following SP800-90B. + */ + if ((unsigned int)ec->rct_count >= (30 * ec->osr)) { + ec->rct_count = -1; + ec->health_failure = 1; + } + } else { + ec->rct_count = 0; + } +} + +/** + * Is there an RCT health test failure? + * + * @ec [in] Reference to entropy collector + * + * @return + * 0 No health test failure + * 1 Permanent health test failure + */ +static int jent_rct_failure(struct rand_data *ec) +{ + if (ec->rct_count < 0) + return 1; + return 0; +} + +#ifdef _MSC_VER +static +#endif +VC_INLINE uint64_t jent_delta(uint64_t prev, uint64_t next) +{ + return (prev < next) ? (next - prev) : (UINT64_MAX - prev + 1 + next); +} + +/** + * Stuck test by checking the: + * 1st derivative of the jitter measurement (time delta) + * 2nd derivative of the jitter measurement (delta of time deltas) + * 3rd derivative of the jitter measurement (delta of delta of time deltas) + * + * All values must always be non-zero. + * + * @ec [in] Reference to entropy collector + * @current_delta [in] Jitter time delta + * + * @return + * 0 jitter measurement not stuck (good bit) + * 1 jitter measurement stuck (reject bit) + */ +static int jent_stuck(struct rand_data *ec, uint64_t current_delta) +{ + uint64_t delta2 = jent_delta(ec->last_delta, current_delta); + uint64_t delta3 = jent_delta(ec->last_delta2, delta2); + unsigned int delta_masked = current_delta & JENT_APT_WORD_MASK; + + ec->last_delta = current_delta; + ec->last_delta2 = delta2; + + /* + * Insert the result of the comparison of two back-to-back time + * deltas. + */ + jent_apt_insert(ec, delta_masked); + + if (!current_delta || !delta2 || !delta3) { + /* RCT with a stuck bit */ + jent_rct_insert(ec, 1); + return 1; + } + + /* RCT with a non-stuck bit */ + jent_rct_insert(ec, 0); + + return 0; +} + +/** + * Report any health test failures + * + * @ec [in] Reference to entropy collector + * + * @return + * 0 No health test failure + * 1 Permanent health test failure + */ +static int jent_health_failure(struct rand_data *ec) +{ + /* Test is only enabled in FIPS mode */ + if (!ec->fips_enabled) + return 0; + + return ec->health_failure; +} + +/*************************************************************************** + * Noise sources + ***************************************************************************/ + +/** + * Update of the loop count used for the next round of + * an entropy collection. + * + * @ec [in] entropy collector struct -- may be NULL + * @bits [in] is the number of low bits of the timer to consider + * @min [in] is the number of bits we shift the timer value to the right at + * the end to make sure we have a guaranteed minimum value + * + * @return Newly calculated loop counter + */ +static uint64_t jent_loop_shuffle(struct rand_data *ec, + unsigned int bits, unsigned int min) +{ + uint64_t time = 0; + uint64_t shuffle = 0; + unsigned int i = 0; + unsigned int mask = (1<<bits) - 1; + + jent_get_nstime(&time); + /* + * Mix the current state of the random number into the shuffle + * calculation to balance that shuffle a bit more. + */ + if (ec) + time ^= ec->data; + /* + * We fold the time value as much as possible to ensure that as many + * bits of the time stamp are included as possible. + */ + for (i = 0; ((DATA_SIZE_BITS + bits - 1) / bits) > i; i++) { + shuffle ^= time & mask; + time = time >> bits; + } + + /* + * We add a lower boundary value to ensure we have a minimum + * RNG loop count. + */ + return (shuffle + (1ULL<<min)); +} + +/** + * CPU Jitter noise source -- this is the noise source based on the CPU + * execution time jitter + * + * This function injects the individual bits of the time value into the + * entropy pool using an LFSR. + * + * The code is deliberately inefficient with respect to the bit shifting + * and shall stay that way. This function is the root cause why the code + * shall be compiled without optimization. This function not only acts as + * LFSR operation, but this function's execution is used to measure + * the CPU execution time jitter. Any change to the loop in this function + * implies that careful retesting must be done. + * + * @ec [in] entropy collector struct -- may be NULL + * @time [in] time stamp to be injected + * @loop_cnt [in] if a value not equal to 0 is set, use the given value as + * number of loops to perform the LFSR + * + * Output: + * updated ec->data + */ +static void jent_lfsr_time(struct rand_data *ec, uint64_t time, + uint64_t loop_cnt, int stuck) +{ + unsigned int i; + uint64_t j = 0; + uint64_t new = 0; +#define MAX_FOLD_LOOP_BIT 4 +#define MIN_FOLD_LOOP_BIT 0 + uint64_t lfsr_loop_cnt = + jent_loop_shuffle(ec, MAX_FOLD_LOOP_BIT, MIN_FOLD_LOOP_BIT); + + /* + * testing purposes -- allow test app to set the counter, not + * needed during runtime + */ + if (loop_cnt) + lfsr_loop_cnt = loop_cnt; + for (j = 0; j < lfsr_loop_cnt; j++) { + new = ec->data; + for (i = 1; (DATA_SIZE_BITS) >= i; i++) { + uint64_t tmp = time << (DATA_SIZE_BITS - i); + + tmp = tmp >> (DATA_SIZE_BITS - 1); + + /* + * Fibonacci LSFR with polynomial of + * x^64 + x^61 + x^56 + x^31 + x^28 + x^23 + 1 which is + * primitive according to + * http://poincare.matf.bg.ac.rs/~ezivkovm/publications/primpol1.pdf + * (the shift values are the polynomial values minus one + * due to counting bits from 0 to 63). As the current + * position is always the LSB, the polynomial only needs + * to shift data in from the left without wrap. + */ + tmp ^= ((new >> 63) & 1); + tmp ^= ((new >> 60) & 1); + tmp ^= ((new >> 55) & 1); + tmp ^= ((new >> 30) & 1); + tmp ^= ((new >> 27) & 1); + tmp ^= ((new >> 22) & 1); + new <<= 1; + new ^= tmp; + } + } + + /* + * If the time stamp is stuck, do not finally insert the value into + * the entropy pool. Although this operation should not do any harm + * even when the time stamp has no entropy, SP800-90B requires that + * any conditioning operation (SP800-90B considers the LFSR to be a + * conditioning operation) to have an identical amount of input + * data according to section 3.1.5. + */ + if (!stuck) + ec->data = new; +} + +/** + * Memory Access noise source -- this is a noise source based on variations in + * memory access times + * + * This function performs memory accesses which will add to the timing + * variations due to an unknown amount of CPU wait states that need to be + * added when accessing memory. The memory size should be larger than the L1 + * caches as outlined in the documentation and the associated testing. + * + * The L1 cache has a very high bandwidth, albeit its access rate is usually + * slower than accessing CPU registers. Therefore, L1 accesses only add minimal + * variations as the CPU has hardly to wait. Starting with L2, significant + * variations are added because L2 typically does not belong to the CPU any more + * and therefore a wider range of CPU wait states is necessary for accesses. + * L3 and real memory accesses have even a wider range of wait states. However, + * to reliably access either L3 or memory, the ec->mem memory must be quite + * large which is usually not desirable. + * + * @ec [in] Reference to the entropy collector with the memory access data -- if + * the reference to the memory block to be accessed is NULL, this noise + * source is disabled + * @loop_cnt [in] if a value not equal to 0 is set, use the given value as + * number of loops to perform the folding + */ +static void jent_memaccess(struct rand_data *ec, uint64_t loop_cnt) +{ + unsigned int wrap = 0; + uint64_t i = 0; +#define MAX_ACC_LOOP_BIT 7 +#define MIN_ACC_LOOP_BIT 0 + uint64_t acc_loop_cnt = + jent_loop_shuffle(ec, MAX_ACC_LOOP_BIT, MIN_ACC_LOOP_BIT); + + if (NULL == ec || NULL == ec->mem) + return; + wrap = ec->memblocksize * ec->memblocks; + + /* + * testing purposes -- allow test app to set the counter, not + * needed during runtime + */ + if (loop_cnt) + acc_loop_cnt = loop_cnt; + + for (i = 0; i < (ec->memaccessloops + acc_loop_cnt); i++) { + unsigned char *tmpval = ec->mem + ec->memlocation; + /* + * memory access: just add 1 to one byte, + * wrap at 255 -- memory access implies read + * from and write to memory location + */ + *tmpval = (*tmpval + 1) & 0xff; + /* + * Addition of memblocksize - 1 to pointer + * with wrap around logic to ensure that every + * memory location is hit evenly + */ + ec->memlocation = ec->memlocation + ec->memblocksize - 1; + ec->memlocation = ec->memlocation % wrap; + } +} + +/*************************************************************************** + * Start of entropy processing logic + ***************************************************************************/ +/** + * This is the heart of the entropy generation: calculate time deltas and + * use the CPU jitter in the time deltas. The jitter is injected into the + * entropy pool. + * + * WARNING: ensure that ->prev_time is primed before using the output + * of this function! This can be done by calling this function + * and not using its result. + * + * @ec [in] Reference to entropy collector + * + * @return: result of stuck test + */ +static int jent_measure_jitter(struct rand_data *ec) +{ + uint64_t time = 0; + uint64_t current_delta = 0; + int stuck; + + /* Invoke one noise source before time measurement to add variations */ + jent_memaccess(ec, 0); + + /* + * Get time stamp and calculate time delta to previous + * invocation to measure the timing variations + */ + jent_get_nstime(&time); + current_delta = jent_delta(ec->prev_time, time); + ec->prev_time = time; + + /* Check whether we have a stuck measurement. */ + stuck = jent_stuck(ec, current_delta); + + /* Now call the next noise sources which also injects the data */ + jent_lfsr_time(ec, current_delta, 0, stuck); + + return stuck; +} + +/** + * Generator of one 64 bit random number + * Function fills rand_data->data + * + * @ec [in] Reference to entropy collector + */ +static void jent_gen_entropy(struct rand_data *ec) +{ + unsigned int k = 0; + + /* priming of the ->prev_time value */ + jent_measure_jitter(ec); + + while (1) { + /* If a stuck measurement is received, repeat measurement */ + if (jent_measure_jitter(ec)) + continue; + + /* + * We multiply the loop value with ->osr to obtain the + * oversampling rate requested by the caller + */ + if (++k >= (DATA_SIZE_BITS * ec->osr)) + break; + } +} + +/** + * Entry function: Obtain entropy for the caller. + * + * This function invokes the entropy gathering logic as often to generate + * as many bytes as requested by the caller. The entropy gathering logic + * creates 64 bit per invocation. + * + * This function truncates the last 64 bit entropy value output to the exact + * size specified by the caller. + * + * @ec [in] Reference to entropy collector + * @data [out] pointer to buffer for storing random data -- buffer must + * already exist + * @len [in] size of the buffer, specifying also the requested number of random + * in bytes + * + * @return number of bytes returned when request is fulfilled or an error + * + * The following error codes can occur: + * -1 entropy_collector is NULL + * -2 RCT failed + * -3 Chi-Squared test failed + */ +JENT_PRIVATE_STATIC +ssize_t jent_read_entropy(struct rand_data *ec, char *data, size_t len) +{ + char *p = data; + size_t orig_len = len; + + if (NULL == ec) + return -1; + + while (0 < len) { + size_t tocopy; + + jent_gen_entropy(ec); + + if (jent_health_failure(ec)) { + if (jent_rct_failure(ec)) + return -2; + else + return -3; + } + + if ((DATA_SIZE_BITS / 8) < len) + tocopy = (DATA_SIZE_BITS / 8); + else + tocopy = len; + memcpy(p, &ec->data, tocopy); + + len -= tocopy; + p += tocopy; + } + + /* + * To be on the safe side, we generate one more round of entropy + * which we do not give out to the caller. That round shall ensure + * that in case the calling application crashes, memory dumps, pages + * out, or due to the CPU Jitter RNG lingering in memory for long + * time without being moved and an attacker cracks the application, + * all he reads in the entropy pool is a value that is NEVER EVER + * being used for anything. Thus, he does NOT see the previous value + * that was returned to the caller for cryptographic purposes. + */ + /* + * If we use secured memory, do not use that precaution as the secure + * memory protects the entropy pool. Moreover, note that using this + * call reduces the speed of the RNG by up to half + */ +#ifndef CONFIG_CRYPTO_CPU_JITTERENTROPY_SECURE_MEMORY + jent_gen_entropy(ec); +#endif + return orig_len; +} + +/*************************************************************************** + * Initialization logic + ***************************************************************************/ + +JENT_PRIVATE_STATIC +struct rand_data *jent_entropy_collector_alloc(unsigned int osr, + unsigned int flags) +{ + struct rand_data *entropy_collector; + + entropy_collector = jent_zalloc(sizeof(struct rand_data)); + if (NULL == entropy_collector) + return NULL; + + if (!(flags & JENT_DISABLE_MEMORY_ACCESS)) { + /* Allocate memory for adding variations based on memory + * access + */ + entropy_collector->mem = + (unsigned char *)jent_zalloc(JENT_MEMORY_SIZE); + if (NULL == entropy_collector->mem) { + jent_zfree(entropy_collector, sizeof(struct rand_data)); + return NULL; + } + entropy_collector->memblocksize = JENT_MEMORY_BLOCKSIZE; + entropy_collector->memblocks = JENT_MEMORY_BLOCKS; + entropy_collector->memaccessloops = JENT_MEMORY_ACCESSLOOPS; + } + + /* verify and set the oversampling rate */ + if (0 == osr) + osr = 1; /* minimum sampling rate is 1 */ + entropy_collector->osr = osr; + + if (jent_fips_enabled()) + entropy_collector->fips_enabled = 1; + + /* fill the data pad with non-zero values */ + jent_gen_entropy(entropy_collector); + + return entropy_collector; +} + +JENT_PRIVATE_STATIC +void jent_entropy_collector_free(struct rand_data *entropy_collector) +{ + if (NULL != entropy_collector) { + if (NULL != entropy_collector->mem) { + jent_zfree(entropy_collector->mem, JENT_MEMORY_SIZE); + entropy_collector->mem = NULL; + } + jent_zfree(entropy_collector, sizeof(struct rand_data)); + } +} + +JENT_PRIVATE_STATIC +int jent_entropy_init(void) +{ + int i; + uint64_t delta_sum = 0; + uint64_t old_delta = 0; + unsigned int nonstuck = 0; + int time_backwards = 0; + int count_mod = 0; + int count_stuck = 0; + struct rand_data ec; + + memset(&ec, 0, sizeof(ec)); + + /* Required for RCT */ + ec.osr = 1; + if (jent_fips_enabled()) + ec.fips_enabled = 1; + + /* We could perform statistical tests here, but the problem is + * that we only have a few loop counts to do testing. These + * loop counts may show some slight skew and we produce + * false positives. + * + * Moreover, only old systems show potentially problematic + * jitter entropy that could potentially be caught here. But + * the RNG is intended for hardware that is available or widely + * used, but not old systems that are long out of favor. Thus, + * no statistical tests. + */ + + /* + * We could add a check for system capabilities such as clock_getres or + * check for CONFIG_X86_TSC, but it does not make much sense as the + * following sanity checks verify that we have a high-resolution + * timer. + */ + /* + * TESTLOOPCOUNT needs some loops to identify edge systems. 100 is + * definitely too little. + * + * SP800-90B requires at least 1024 initial test cycles. + */ +#define TESTLOOPCOUNT 1024 +#define CLEARCACHE 100 + for (i = 0; (TESTLOOPCOUNT + CLEARCACHE) > i; i++) { + uint64_t time = 0; + uint64_t time2 = 0; + uint64_t delta = 0; + unsigned int lowdelta = 0; + int stuck; + + /* Invoke core entropy collection logic */ + jent_get_nstime(&time); + ec.prev_time = time; + jent_memaccess(&ec, 0); + jent_lfsr_time(&ec, time, 0, 0); + jent_get_nstime(&time2); + + /* test whether timer works */ + if (!time || !time2) + return ENOTIME; + delta = jent_delta(time, time2); + /* + * test whether timer is fine grained enough to provide + * delta even when called shortly after each other -- this + * implies that we also have a high resolution timer + */ + if (!delta) + return ECOARSETIME; + + stuck = jent_stuck(&ec, delta); + + /* + * up to here we did not modify any variable that will be + * evaluated later, but we already performed some work. Thus we + * already have had an impact on the caches, branch prediction, + * etc. with the goal to clear it to get the worst case + * measurements. + */ + if (CLEARCACHE > i) + continue; + + if (stuck) + count_stuck++; + else { + nonstuck++; + + /* + * Ensure that the APT succeeded. + * + * With the check below that count_stuck must be less + * than 10% of the overall generated raw entropy values + * it is guaranteed that the APT is invoked at + * floor((TESTLOOPCOUNT * 0.9) / 64) == 14 times. + */ + if ((nonstuck % JENT_APT_WINDOW_SIZE) == 0) { + jent_apt_reset(&ec, + delta & JENT_APT_WORD_MASK); + if (jent_health_failure(&ec)) + return EHEALTH; + } + } + + /* Validate RCT */ + if (jent_rct_failure(&ec)) + return ERCT; + + /* test whether we have an increasing timer */ + if (!(time2 > time)) + time_backwards++; + + /* use 32 bit value to ensure compilation on 32 bit arches */ + lowdelta = (uint64_t)time2 - (uint64_t)time; + if (!(lowdelta % 100)) + count_mod++; + + /* + * ensure that we have a varying delta timer which is necessary + * for the calculation of entropy -- perform this check + * only after the first loop is executed as we need to prime + * the old_data value + */ + if (delta > old_delta) + delta_sum += (delta - old_delta); + else + delta_sum += (old_delta - delta); + old_delta = delta; + } + + /* + * we allow up to three times the time running backwards. + * CLOCK_REALTIME is affected by adjtime and NTP operations. Thus, + * if such an operation just happens to interfere with our test, it + * should not fail. The value of 3 should cover the NTP case being + * performed during our test run. + */ + if (3 < time_backwards) + return ENOMONOTONIC; + + /* + * Variations of deltas of time must on average be larger + * than 1 to ensure the entropy estimation + * implied with 1 is preserved + */ + if ((delta_sum) <= 1) + return EMINVARVAR; + + /* + * Ensure that we have variations in the time stamp below 10 for at least + * 10% of all checks -- on some platforms, the counter increments in + * multiples of 100, but not always + */ + if ((TESTLOOPCOUNT/10 * 9) < count_mod) + return ECOARSETIME; + + /* + * If we have more than 90% stuck results, then this Jitter RNG is + * likely to not work well. + */ + if ((TESTLOOPCOUNT/10 * 9) < count_stuck) + return ESTUCK; + + return 0; +} diff --git a/src/Crypto/jitterentropy.h b/src/Crypto/jitterentropy.h new file mode 100644 index 00000000..3dd69017 --- /dev/null +++ b/src/Crypto/jitterentropy.h @@ -0,0 +1,170 @@ +/* + * Non-physical true random number generator based on timing jitter. + * + * Copyright Stephan Mueller <smueller@chronox.de>, 2014 - 2019 + * + * License + * ======= + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, and the entire permission notice in its entirety, + * including the disclaimer of warranties. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote + * products derived from this software without specific prior + * written permission. + * + * ALTERNATIVELY, this product may be distributed under the terms of + * the GNU General Public License, in which case the provisions of the GPL are + * required INSTEAD OF the above restrictions. (This clause is + * necessary due to a potential bad interaction between the GPL and + * the restrictions contained in a BSD-style copyright.) + * + * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ALL OF + * WHICH ARE HEREBY DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT + * OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + * USE OF THIS SOFTWARE, EVEN IF NOT ADVISED OF THE POSSIBILITY OF SUCH + * DAMAGE. + */ + +/* Adapted for VeraCrypt */ + +#ifndef _JITTERENTROPY_H +#define _JITTERENTROPY_H + +#include "jitterentropy-base-user.h" + +/* The entropy pool */ +struct rand_data +{ + /* all data values that are vital to maintain the security + * of the RNG are marked as SENSITIVE. A user must not + * access that information while the RNG executes its loops to + * calculate the next random value. */ + uint64_t data; /* SENSITIVE Actual random number */ + uint64_t prev_time; /* SENSITIVE Previous time stamp */ +#define DATA_SIZE_BITS ((sizeof(uint64_t)) * 8) + uint64_t last_delta; /* SENSITIVE stuck test */ + uint64_t last_delta2; /* SENSITIVE stuck test */ + unsigned int osr; /* Oversampling rate */ +#define JENT_MEMORY_BLOCKS 64 +#define JENT_MEMORY_BLOCKSIZE 32 +#define JENT_MEMORY_ACCESSLOOPS 128 +#define JENT_MEMORY_SIZE (JENT_MEMORY_BLOCKS*JENT_MEMORY_BLOCKSIZE) + unsigned char *mem; /* Memory access location with size of + * memblocks * memblocksize */ + unsigned int memlocation; /* Pointer to byte in *mem */ + unsigned int memblocks; /* Number of memory blocks in *mem */ + unsigned int memblocksize; /* Size of one memory block in bytes */ + unsigned int memaccessloops; /* Number of memory accesses per random + * bit generation */ + + /* Repetition Count Test */ + int rct_count; /* Number of stuck values */ + + /* Adaptive Proportion Test for a significance level of 2^-30 */ +#define JENT_APT_CUTOFF 325 /* Taken from SP800-90B sec 4.4.2 */ +#define JENT_APT_WINDOW_SIZE 512 /* Data window size */ + /* LSB of time stamp to process */ +#define JENT_APT_LSB 16 +#define JENT_APT_WORD_MASK (JENT_APT_LSB - 1) + unsigned int apt_observations; /* Number of collected observations */ + unsigned int apt_count; /* APT counter */ + unsigned int apt_base; /* APT base reference */ + unsigned int apt_base_set:1; /* APT base reference set? */ + + unsigned int fips_enabled:1; + unsigned int health_failure:1; /* Permanent health failure */ +}; + +/* Flags that can be used to initialize the RNG */ +#define JENT_DISABLE_STIR (1<<0) /* UNUSED */ +#define JENT_DISABLE_UNBIAS (1<<1) /* UNUSED */ +#define JENT_DISABLE_MEMORY_ACCESS (1<<2) /* Disable memory access for more + entropy, saves MEMORY_SIZE RAM for + entropy collector */ + +/* -- BEGIN Main interface functions -- */ + +#ifndef JENT_STUCK_INIT_THRES +/* + * Per default, not more than 90% of all measurements during initialization + * are allowed to be stuck. + * + * It is allowed to change this value as required for the intended environment. + */ +#define JENT_STUCK_INIT_THRES(x) (x/10 * 9) +#endif + +#ifdef JENT_PRIVATE_COMPILE +# define JENT_PRIVATE_STATIC static +#else /* JENT_PRIVATE_COMPILE */ +# define JENT_PRIVATE_STATIC +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +/* Number of low bits of the time value that we want to consider */ +/* get raw entropy */ +JENT_PRIVATE_STATIC +ssize_t jent_read_entropy(struct rand_data *ec, char *data, size_t len); +/* initialize an instance of the entropy collector */ +JENT_PRIVATE_STATIC +struct rand_data *jent_entropy_collector_alloc(unsigned int osr, + unsigned int flags); +/* clearing of entropy collector */ +JENT_PRIVATE_STATIC +void jent_entropy_collector_free(struct rand_data *entropy_collector); + +/* initialization of entropy collector */ +JENT_PRIVATE_STATIC +int jent_entropy_init(void); + +/* return version number of core library */ +JENT_PRIVATE_STATIC +unsigned int jent_version(void); + +/* -- END of Main interface functions -- */ + +/* -- BEGIN error codes for init function -- */ +#define ENOTIME 1 /* Timer service not available */ +#define ECOARSETIME 2 /* Timer too coarse for RNG */ +#define ENOMONOTONIC 3 /* Timer is not monotonic increasing */ +#define EMINVARIATION 4 /* Timer variations too small for RNG */ +#define EVARVAR 5 /* Timer does not produce variations of variations + (2nd derivation of time is zero) */ +#define EMINVARVAR 6 /* Timer variations of variations is too small */ +#define EPROGERR 7 /* Programming error */ +#define ESTUCK 8 /* Too many stuck results during init. */ +#define EHEALTH 9 /* Health test failed during initialization */ +#define ERCT 10 /* RCT failed during initialization */ + +/* -- BEGIN statistical test functions only complied with CONFIG_CRYPTO_CPU_JITTERENTROPY_STAT -- */ + +#ifdef CONFIG_CRYPTO_CPU_JITTERENTROPY_STAT +JENT_PRIVATE_STATIC +uint64_t jent_lfsr_var_stat(struct rand_data *ec, unsigned int min); +#endif /* CONFIG_CRYPTO_CPU_JITTERENTROPY_STAT */ + +/* -- END of statistical test function -- */ + +#ifdef __cplusplus +} +#endif + + +#endif /* _JITTERENTROPY_H */ diff --git a/src/Crypto/kuznyechik.c b/src/Crypto/kuznyechik.c index 403d1084..1ba38abe 100644 --- a/src/Crypto/kuznyechik.c +++ b/src/Crypto/kuznyechik.c @@ -3,19 +3,26 @@ This code is written by kerukuro for cppcrypto library (http://cppcrypto.sourcef and released into public domain. */ #include "kuznyechik.h" -// #include <memory.h> -// #include <algorithm> -// #include "portability.h" +#include "cpu.h" +#include "misc.h" #ifdef _MSC_VER #define inline __forceinline #endif +#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE +void kuznyechik_set_key_simd(const uint8* key, kuznyechik_kds *kds); +void kuznyechik_encrypt_block_simd(uint8* out, const uint8* in, kuznyechik_kds* kds); +void kuznyechik_encrypt_blocks_simd(uint8* out, const uint8* in, size_t blocks, kuznyechik_kds* kds); +void kuznyechik_decrypt_block_simd(uint8* out, const uint8* in, kuznyechik_kds* kds); +void kuznyechik_decrypt_blocks_simd(uint8* out, const uint8* in, size_t blocks, kuznyechik_kds* kds); +#endif + //#define CPPCRYPTO_DEBUG - static const byte S[256] = { + static const uint8 S[256] = { 252, 238, 221, 17, 207, 110, 49, 22, 251, 196, 250, 218, 35, 197, 4, 77, 233, 119, 240, 219, 147, 46, 153, 186, 23, 54, 241, 187, 20, 205, 95, 193, 249, 24, 101, 90, 226, 92, 239, 33, 129, 28, 60, 66, 139, 1, 142, 79, 5, 132, 2, 174, 227, 106, 143, 160, 6, 11, 237, 152, 127, 212, 211, 31, 235, 52, 44, 81, 234, 200, 72, 171, 242, 42, 104, 162, 253, 58, 206, 204, 181, 112, 14, 86, 8, 12, 118, 18, 191, 114, 19, 71, 156, 183, 93, 135, 21, 161, 150, 41, 16, 123, 154, 199, 243, 145, 120, 111, 157, 158, 178, 177, 50, 117, 25, 61, 255, 53, 138, 126, 109, 84, 198, 128, 195, 189, 13, 87, @@ -24,9 +31,9 @@ and released into public domain. 7, 88, 179, 64, 134, 172, 29, 247, 48, 55, 107, 228, 136, 217, 231, 137, 225, 27, 131, 73, 76, 63, 248, 254, 141, 83, 170, 144, 202, 216, 133, 97, 32, 113, 103, 164, 45, 43, 9, 91, 203, 155, 37, 208, 190, 229, 108, 82, 89, 166, 116, 210, 230, 244, 180, 192, 209, 102, 175, 194, 57, 75, 99, 182 }; - static const byte IS[256] = { + static const uint8 IS[256] = { 165, 45, 50, 143, 14, 48, 56, 192, 84, 230, 158, 57, 85, 126, 82, 145, 100, 3, 87, 90, 28, 96, 7, 24, 33, 114, 168, 209, 41, 198, 164, 63, 224, 39, 141, 12, 130, 234, 174, 180, 154, 99, 73, 229, 66, 228, 21, 183, 200, 6, 112, 157, 65, 117, 25, 201, 170, 252, 77, 191, 42, 115, 132, 213, 195, 175, 43, 134, 167, 177, 178, 91, 70, 211, 159, 253, 212, 15, 156, 47, 155, 67, 239, 217, 121, 182, 83, 127, 193, 240, 35, 231, 37, 94, 181, 30, 162, 223, 166, 254, 172, 34, 249, 226, 74, 188, 53, 202, 238, 120, 5, 107, 81, 225, 89, 163, 242, 113, 86, 17, 106, 137, 148, 101, 140, 187, 119, 60, @@ -2135,201 +2142,259 @@ and released into public domain. {LL(0xe0804785482c49b7), LL(0x19eab9b4539de969)}, {LL(0xeb0e9f31deb66c05), LL(0x1a95f6106399808e)}, {LL(0x5374d75dacc0ce6b), LL(0x1b0172cd7324a7d3)}, {LL(0xfdd1ec9a314126a2), LL(0x1c6b689b03915283)}, {LL(0x45aba4f6433784cc), LL(0x1dffec46132c75de)}, {LL(0x4e257c42d5ada17e), LL(0x1e80a3e223281c39)}, {LL(0xf65f342ea7db0310), LL(0x1f14273f33953b64)}, {LL(0x619b141e58d8a75e), LL(0x20a8ed9c45c16af1)} }; - static inline void LS(uint64 x1, uint64 x2, uint64* t1, uint64* t2) - { - *t1 = T[0][(byte)(x1)][0] ^ T[1][(byte)(x1 >> 8)][0] ^ T[2][(byte)(x1 >> 16)][0] ^ T[3][(byte)(x1 >> 24)][0] ^ T[4][(byte)(x1 >> 32)][0] ^ T[5][(byte)(x1 >> 40)][0] ^ - T[6][(byte)(x1 >> 48)][0] ^ T[7][(byte)(x1 >> 56)][0] ^ T[8][(byte)(x2)][0] ^ T[9][(byte)(x2 >> 8)][0] ^ T[10][(byte)(x2 >> 16)][0] ^ T[11][(byte)(x2 >> 24)][0] ^ - T[12][(byte)(x2 >> 32)][0] ^ T[13][(byte)(x2 >> 40)][0] ^ T[14][(byte)(x2 >> 48)][0] ^ T[15][(byte)(x2 >> 56)][0]; - *t2 = T[0][(byte)(x1)][1] ^ T[1][(byte)(x1 >> 8)][1] ^ T[2][(byte)(x1 >> 16)][1] ^ T[3][(byte)(x1 >> 24)][1] ^ T[4][(byte)(x1 >> 32)][1] ^ T[5][(byte)(x1 >> 40)][1] ^ - T[6][(byte)(x1 >> 48)][1] ^ T[7][(byte)(x1 >> 56)][1] ^ T[8][(byte)(x2)][1] ^ T[9][(byte)(x2 >> 8)][1] ^ T[10][(byte)(x2 >> 16)][1] ^ T[11][(byte)(x2 >> 24)][1] ^ - T[12][(byte)(x2 >> 32)][1] ^ T[13][(byte)(x2 >> 40)][1] ^ T[14][(byte)(x2 >> 48)][1] ^ T[15][(byte)(x2 >> 56)][1]; +#define LS(x1,x2,t1,t2) { \ + t1 = T[0][(uint8)(x1)][0] ^ T[1][(uint8)(x1 >> 8)][0] ^ T[2][(uint8)(x1 >> 16)][0] ^ T[3][(uint8)(x1 >> 24)][0] ^ T[4][(uint8)(x1 >> 32)][0] ^ T[5][(uint8)(x1 >> 40)][0] ^ \ + T[6][(uint8)(x1 >> 48)][0] ^ T[7][(uint8)(x1 >> 56)][0] ^ T[8][(uint8)(x2)][0] ^ T[9][(uint8)(x2 >> 8)][0] ^ T[10][(uint8)(x2 >> 16)][0] ^ T[11][(uint8)(x2 >> 24)][0] ^ \ + T[12][(uint8)(x2 >> 32)][0] ^ T[13][(uint8)(x2 >> 40)][0] ^ T[14][(uint8)(x2 >> 48)][0] ^ T[15][(uint8)(x2 >> 56)][0]; \ + t2 = T[0][(uint8)(x1)][1] ^ T[1][(uint8)(x1 >> 8)][1] ^ T[2][(uint8)(x1 >> 16)][1] ^ T[3][(uint8)(x1 >> 24)][1] ^ T[4][(uint8)(x1 >> 32)][1] ^ T[5][(uint8)(x1 >> 40)][1] ^ \ + T[6][(uint8)(x1 >> 48)][1] ^ T[7][(uint8)(x1 >> 56)][1] ^ T[8][(uint8)(x2)][1] ^ T[9][(uint8)(x2 >> 8)][1] ^ T[10][(uint8)(x2 >> 16)][1] ^ T[11][(uint8)(x2 >> 24)][1] ^ \ + T[12][(uint8)(x2 >> 32)][1] ^ T[13][(uint8)(x2 >> 40)][1] ^ T[14][(uint8)(x2 >> 48)][1] ^ T[15][(uint8)(x2 >> 56)][1]; \ } - static inline void ILS(uint64 x1, uint64 x2, uint64* t1, uint64* t2) - { - *t1 = IT[0][(byte)(x1)][0] ^ IT[1][(byte)(x1 >> 8)][0] ^ IT[2][(byte)(x1 >> 16)][0] ^ IT[3][(byte)(x1 >> 24)][0] ^ IT[4][(byte)(x1 >> 32)][0] ^ IT[5][(byte)(x1 >> 40)][0] ^ - IT[6][(byte)(x1 >> 48)][0] ^ IT[7][(byte)(x1 >> 56)][0] ^ IT[8][(byte)(x2)][0] ^ IT[9][(byte)(x2 >> 8)][0] ^ IT[10][(byte)(x2 >> 16)][0] ^ IT[11][(byte)(x2 >> 24)][0] ^ - IT[12][(byte)(x2 >> 32)][0] ^ IT[13][(byte)(x2 >> 40)][0] ^ IT[14][(byte)(x2 >> 48)][0] ^ IT[15][(byte)(x2 >> 56)][0]; - *t2 = IT[0][(byte)(x1)][1] ^ IT[1][(byte)(x1 >> 8)][1] ^ IT[2][(byte)(x1 >> 16)][1] ^ IT[3][(byte)(x1 >> 24)][1] ^ IT[4][(byte)(x1 >> 32)][1] ^ IT[5][(byte)(x1 >> 40)][1] ^ - IT[6][(byte)(x1 >> 48)][1] ^ IT[7][(byte)(x1 >> 56)][1] ^ IT[8][(byte)(x2)][1] ^ IT[9][(byte)(x2 >> 8)][1] ^ IT[10][(byte)(x2 >> 16)][1] ^ IT[11][(byte)(x2 >> 24)][1] ^ - IT[12][(byte)(x2 >> 32)][1] ^ IT[13][(byte)(x2 >> 40)][1] ^ IT[14][(byte)(x2 >> 48)][1] ^ IT[15][(byte)(x2 >> 56)][1]; +#define ILS(x1,x2,t1,t2) { \ + t1 = IT[0][(uint8)(x1)][0] ^ IT[1][(uint8)(x1 >> 8)][0] ^ IT[2][(uint8)(x1 >> 16)][0] ^ IT[3][(uint8)(x1 >> 24)][0] ^ IT[4][(uint8)(x1 >> 32)][0] ^ IT[5][(uint8)(x1 >> 40)][0] ^ \ + IT[6][(uint8)(x1 >> 48)][0] ^ IT[7][(uint8)(x1 >> 56)][0] ^ IT[8][(uint8)(x2)][0] ^ IT[9][(uint8)(x2 >> 8)][0] ^ IT[10][(uint8)(x2 >> 16)][0] ^ IT[11][(uint8)(x2 >> 24)][0] ^ \ + IT[12][(uint8)(x2 >> 32)][0] ^ IT[13][(uint8)(x2 >> 40)][0] ^ IT[14][(uint8)(x2 >> 48)][0] ^ IT[15][(uint8)(x2 >> 56)][0]; \ + t2 = IT[0][(uint8)(x1)][1] ^ IT[1][(uint8)(x1 >> 8)][1] ^ IT[2][(uint8)(x1 >> 16)][1] ^ IT[3][(uint8)(x1 >> 24)][1] ^ IT[4][(uint8)(x1 >> 32)][1] ^ IT[5][(uint8)(x1 >> 40)][1] ^ \ + IT[6][(uint8)(x1 >> 48)][1] ^ IT[7][(uint8)(x1 >> 56)][1] ^ IT[8][(uint8)(x2)][1] ^ IT[9][(uint8)(x2 >> 8)][1] ^ IT[10][(uint8)(x2 >> 16)][1] ^ IT[11][(uint8)(x2 >> 24)][1] ^ \ + IT[12][(uint8)(x2 >> 32)][1] ^ IT[13][(uint8)(x2 >> 40)][1] ^ IT[14][(uint8)(x2 >> 48)][1] ^ IT[15][(uint8)(x2 >> 56)][1]; \ } - static inline void ILSS(uint64 x1, uint64 x2, uint64* t1, uint64* t2) - { - *t1 = IT[0][S[(byte)(x1)]][0] ^ IT[1][S[(byte)(x1 >> 8)]][0] ^ IT[2][S[(byte)(x1 >> 16)]][0] ^ IT[3][S[(byte)(x1 >> 24)]][0] ^ IT[4][S[(byte)(x1 >> 32)]][0] ^ IT[5][S[(byte)(x1 >> 40)]][0] ^ - IT[6][S[(byte)(x1 >> 48)]][0] ^ IT[7][S[(byte)(x1 >> 56)]][0] ^ IT[8][S[(byte)(x2)]][0] ^ IT[9][S[(byte)(x2 >> 8)]][0] ^ IT[10][S[(byte)(x2 >> 16)]][0] ^ IT[11][S[(byte)(x2 >> 24)]][0] ^ - IT[12][S[(byte)(x2 >> 32)]][0] ^ IT[13][S[(byte)(x2 >> 40)]][0] ^ IT[14][S[(byte)(x2 >> 48)]][0] ^ IT[15][S[(byte)(x2 >> 56)]][0]; - *t2 = IT[0][S[(byte)(x1)]][1] ^ IT[1][S[(byte)(x1 >> 8)]][1] ^ IT[2][S[(byte)(x1 >> 16)]][1] ^ IT[3][S[(byte)(x1 >> 24)]][1] ^ IT[4][S[(byte)(x1 >> 32)]][1] ^ IT[5][S[(byte)(x1 >> 40)]][1] ^ - IT[6][S[(byte)(x1 >> 48)]][1] ^ IT[7][S[(byte)(x1 >> 56)]][1] ^ IT[8][S[(byte)(x2)]][1] ^ IT[9][S[(byte)(x2 >> 8)]][1] ^ IT[10][S[(byte)(x2 >> 16)]][1] ^ IT[11][S[(byte)(x2 >> 24)]][1] ^ - IT[12][S[(byte)(x2 >> 32)]][1] ^ IT[13][S[(byte)(x2 >> 40)]][1] ^ IT[14][S[(byte)(x2 >> 48)]][1] ^ IT[15][S[(byte)(x2 >> 56)]][1]; +#define ILSS(x1,x2,t1,t2) { \ + t1 = IT[0][S[(uint8)(x1)]][0] ^ IT[1][S[(uint8)(x1 >> 8)]][0] ^ IT[2][S[(uint8)(x1 >> 16)]][0] ^ IT[3][S[(uint8)(x1 >> 24)]][0] ^ IT[4][S[(uint8)(x1 >> 32)]][0] ^ IT[5][S[(uint8)(x1 >> 40)]][0] ^ \ + IT[6][S[(uint8)(x1 >> 48)]][0] ^ IT[7][S[(uint8)(x1 >> 56)]][0] ^ IT[8][S[(uint8)(x2)]][0] ^ IT[9][S[(uint8)(x2 >> 8)]][0] ^ IT[10][S[(uint8)(x2 >> 16)]][0] ^ IT[11][S[(uint8)(x2 >> 24)]][0] ^ \ + IT[12][S[(uint8)(x2 >> 32)]][0] ^ IT[13][S[(uint8)(x2 >> 40)]][0] ^ IT[14][S[(uint8)(x2 >> 48)]][0] ^ IT[15][S[(uint8)(x2 >> 56)]][0]; \ + t2 = IT[0][S[(uint8)(x1)]][1] ^ IT[1][S[(uint8)(x1 >> 8)]][1] ^ IT[2][S[(uint8)(x1 >> 16)]][1] ^ IT[3][S[(uint8)(x1 >> 24)]][1] ^ IT[4][S[(uint8)(x1 >> 32)]][1] ^ IT[5][S[(uint8)(x1 >> 40)]][1] ^ \ + IT[6][S[(uint8)(x1 >> 48)]][1] ^ IT[7][S[(uint8)(x1 >> 56)]][1] ^ IT[8][S[(uint8)(x2)]][1] ^ IT[9][S[(uint8)(x2 >> 8)]][1] ^ IT[10][S[(uint8)(x2 >> 16)]][1] ^ IT[11][S[(uint8)(x2 >> 24)]][1] ^ \ + IT[12][S[(uint8)(x2 >> 32)]][1] ^ IT[13][S[(uint8)(x2 >> 40)]][1] ^ IT[14][S[(uint8)(x2 >> 48)]][1] ^ IT[15][S[(uint8)(x2 >> 56)]][1]; \ } - static inline void ISI(byte* val) - { - val[0] = IS[val[0]]; - val[1] = IS[val[1]]; - val[2] = IS[val[2]]; - val[3] = IS[val[3]]; - val[4] = IS[val[4]]; - val[5] = IS[val[5]]; - val[6] = IS[val[6]]; - val[7] = IS[val[7]]; +#define ISI(val) { \ + (val)[0] = IS[(val)[0]]; \ + (val)[1] = IS[(val)[1]]; \ + (val)[2] = IS[(val)[2]]; \ + (val)[3] = IS[(val)[3]]; \ + (val)[4] = IS[(val)[4]]; \ + (val)[5] = IS[(val)[5]]; \ + (val)[6] = IS[(val)[6]]; \ + (val)[7] = IS[(val)[7]]; \ } - static inline void F(uint64 k00, uint64 k01, uint64 k10, uint64 k11, int i, uint64* o00, uint64* o01, uint64* o10, uint64* o11) - { - *o10 = k00; - *o11 = k01; - k00 ^= C[i][0]; - k01 ^= C[i][1]; - LS(k00, k01, o00, o01); - *o00 ^= k10; - *o01 ^= k11; +#define F(k00,k01,k10,k11,i,o00,o01,o10,o11) { \ + o10 = k00; \ + o11 = k01; \ + k00 ^= C[i][0]; \ + k01 ^= C[i][1]; \ + LS(k00, k01, o00, o01); \ + o00 ^= k10; \ + o01 ^= k11; \ + } + +#define FK(k00,k01,k10,k11,ist) { \ + for (i = 0; i < 8; i += 2) \ + { \ + F(k00, k01, k10, k11, i + ist, t00, t01, t10, t11); \ + F(t00, t01, t10, t11, i + 1 + ist, k00, k01, k10, k11); \ + } \ } - static inline void FK(uint64* k00, uint64* k01, uint64* k10, uint64* k11, int ist) + void kuznyechik_set_key(const uint8* key, kuznyechik_kds* kds) { - uint64 t00, t01, t10, t11; - int i; - for (i = 0; i < 8; i += 2) +#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE && !defined(_UEFI) && (!defined (TC_WINDOWS_DRIVER) || (!defined (DEBUG))) + if(HasSSE2()) { - F(*k00, *k01, *k10, *k11, i + ist, &t00, &t01, &t10, &t11); - F(t00, t01, t10, t11, i + 1 + ist, k00, k01, k10, k11); + kuznyechik_set_key_simd (key, kds); } - } - - void kuznyechik_set_key(const byte* key, kuznyechik_kds* kds) - { - int i; - uint64 k00 = *(const uint64*)key; - uint64 k01 = *(((const uint64*)key) + 1); - uint64 k10 = *(((const uint64*)key) + 2); - uint64 k11 = *(((const uint64*)key) + 3); + else +#endif + { + int i; + uint64 k00 = *(const uint64*)key; + uint64 k01 = *(((const uint64*)key) + 1); + uint64 k10 = *(((const uint64*)key) + 2); + uint64 k11 = *(((const uint64*)key) + 3); + uint64 t00, t01, t10, t11; - kds->rke[0][0] = k00; - kds->rke[0][1] = k01; - kds->rke[1][0] = k10; - kds->rke[1][1] = k11; - FK(&k00, &k01, &k10, &k11, 0); - kds->rke[2][0] = k00; - kds->rke[2][1] = k01; - kds->rke[3][0] = k10; - kds->rke[3][1] = k11; - FK(&k00, &k01, &k10, &k11, 8); - kds->rke[4][0] = k00; - kds->rke[4][1] = k01; - kds->rke[5][0] = k10; - kds->rke[5][1] = k11; - FK(&k00, &k01, &k10, &k11, 16); - kds->rke[6][0] = k00; - kds->rke[6][1] = k01; - kds->rke[7][0] = k10; - kds->rke[7][1] = k11; - FK(&k00, &k01, &k10, &k11, 24); - kds->rke[8][0] = k00; - kds->rke[8][1] = k01; - kds->rke[9][0] = k10; - kds->rke[9][1] = k11; + kds->rke[0] = k00; + kds->rke[1] = k01; + kds->rke[2] = k10; + kds->rke[3] = k11; + FK(k00, k01, k10, k11, 0); + kds->rke[4] = k00; + kds->rke[5] = k01; + kds->rke[6] = k10; + kds->rke[7] = k11; + FK(k00, k01, k10, k11, 8); + kds->rke[8] = k00; + kds->rke[9] = k01; + kds->rke[10] = k10; + kds->rke[11] = k11; + FK(k00, k01, k10, k11, 16); + kds->rke[12] = k00; + kds->rke[13] = k01; + kds->rke[14] = k10; + kds->rke[15] = k11; + FK(k00, k01, k10, k11, 24); + kds->rke[16] = k00; + kds->rke[17] = k01; + kds->rke[18] = k10; + kds->rke[19] = k11; - kds->rkd[0][0] = kds->rke[0][0]; - kds->rkd[0][1] = kds->rke[0][1]; + kds->rkd[0] = kds->rke[0]; + kds->rkd[1] = kds->rke[1]; - for (i = 1; i < 10; i++) - { - uint64 t1 = kds->rke[i][0], t2 = kds->rke[i][1]; - kds->rkd[i][0] = t1; kds->rkd[i][1] = t2; - ILSS(t1, t2, &kds->rkd[i][0], &kds->rkd[i][1]); + for (i = 1; i < 10; i++) + { + uint64 t1 = kds->rke[2*i], t2 = kds->rke[2*i+1]; + kds->rkd[2*i] = t1; kds->rkd[2*i + 1] = t2; + ILSS(t1, t2, kds->rkd[2*i], kds->rkd[2*i+1]); + } } - #ifdef CPPCRYPTO_DEBUG for(int i = 0; i < 10; i++) - printf("key[%d]: { 0x%016I64X, 0x%016I64X }\n", i, kds->rke[i][0], kds->rke[i][1]); + printf("key[%d]: { 0x%016I64X, 0x%016I64X }\n", i, kds->rke[2*i], kds->rke[2*i+1]); #endif } - void kuznyechik_encrypt_block(byte* out, const byte* in, kuznyechik_kds* kds) + void kuznyechik_encrypt_block(uint8* out, const uint8* in, kuznyechik_kds* kds) + { +#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE && !defined(_UEFI) && (!defined (TC_WINDOWS_DRIVER) || (!defined (DEBUG))) + if(HasSSE2()) + { + kuznyechik_encrypt_block_simd (out, in, kds); + } + else +#endif + { + uint64 x1 = *(const uint64*)in; + uint64 x2 = *(((const uint64*)in)+1); + uint64 t1, t2; + x1 ^= kds->rke[0]; + x2 ^= kds->rke[1]; + LS(x1, x2, t1, t2); + t1 ^= kds->rke[2]; + t2 ^= kds->rke[3]; + LS(t1, t2, x1, x2); + x1 ^= kds->rke[4]; + x2 ^= kds->rke[5]; + LS(x1, x2, t1, t2); + t1 ^= kds->rke[6]; + t2 ^= kds->rke[7]; + LS(t1, t2, x1, x2); + x1 ^= kds->rke[8]; + x2 ^= kds->rke[9]; + LS(x1, x2, t1, t2); + t1 ^= kds->rke[10]; + t2 ^= kds->rke[11]; + LS(t1, t2, x1, x2); + x1 ^= kds->rke[12]; + x2 ^= kds->rke[13]; + LS(x1, x2, t1, t2); + t1 ^= kds->rke[14]; + t2 ^= kds->rke[15]; + LS(t1, t2, x1, x2); + x1 ^= kds->rke[16]; + x2 ^= kds->rke[17]; + LS(x1, x2, t1, t2); + t1 ^= kds->rke[18]; + t2 ^= kds->rke[19]; + *(uint64*)out = t1; + *(((uint64*)out) + 1) = t2; + } + } + + void kuznyechik_encrypt_blocks(uint8* out, const uint8* in, size_t blocks, kuznyechik_kds* kds) { - uint64 x1 = *(const uint64*)in; - uint64 x2 = *(((const uint64*)in)+1); - uint64 t1, t2; - x1 ^= kds->rke[0][0]; - x2 ^= kds->rke[0][1]; - LS(x1, x2, &t1, &t2); - t1 ^= kds->rke[1][0]; - t2 ^= kds->rke[1][1]; - LS(t1, t2, &x1, &x2); - x1 ^= kds->rke[2][0]; - x2 ^= kds->rke[2][1]; - LS(x1, x2, &t1, &t2); - t1 ^= kds->rke[3][0]; - t2 ^= kds->rke[3][1]; - LS(t1, t2, &x1, &x2); - x1 ^= kds->rke[4][0]; - x2 ^= kds->rke[4][1]; - LS(x1, x2, &t1, &t2); - t1 ^= kds->rke[5][0]; - t2 ^= kds->rke[5][1]; - LS(t1, t2, &x1, &x2); - x1 ^= kds->rke[6][0]; - x2 ^= kds->rke[6][1]; - LS(x1, x2, &t1, &t2); - t1 ^= kds->rke[7][0]; - t2 ^= kds->rke[7][1]; - LS(t1, t2, &x1, &x2); - x1 ^= kds->rke[8][0]; - x2 ^= kds->rke[8][1]; - LS(x1, x2, &t1, &t2); - t1 ^= kds->rke[9][0]; - t2 ^= kds->rke[9][1]; - *(uint64*)out = t1; - *(((uint64*)out) + 1) = t2; +#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE && !defined(_UEFI) && (!defined (DEBUG) || !defined (TC_WINDOWS_DRIVER)) + if(HasSSE2()) + { + kuznyechik_encrypt_blocks_simd (out, in, blocks, kds); + } + else +#endif + { + while (blocks) + { + kuznyechik_encrypt_block (out, in, kds); + in += 16; + out += 16; + blocks--; + } + } } - void kuznyechik_decrypt_block(byte* out, const byte* in, kuznyechik_kds* kds) + void kuznyechik_decrypt_block(uint8* out, const uint8* in, kuznyechik_kds* kds) { - uint64 x1 = *(const uint64*)in; - uint64 x2 = *(((const uint64*)in) + 1); - uint64 t1, t2; +#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE && !defined(_UEFI) && (!defined (TC_WINDOWS_DRIVER) || (!defined (DEBUG))) + if(HasSSE2()) + { + kuznyechik_decrypt_block_simd (out, in, kds); + } + else +#endif + { + uint64 x1 = *(const uint64*)in; + uint64 x2 = *(((const uint64*)in) + 1); + uint64 t1, t2; - ILSS(x1, x2, &t1, &t2); - t1 ^= kds->rkd[9][0]; - t2 ^= kds->rkd[9][1]; - ILS(t1, t2, &x1, &x2); - x1 ^= kds->rkd[8][0]; - x2 ^= kds->rkd[8][1]; - ILS(x1, x2, &t1, &t2); - t1 ^= kds->rkd[7][0]; - t2 ^= kds->rkd[7][1]; - ILS(t1, t2, &x1, &x2); - x1 ^= kds->rkd[6][0]; - x2 ^= kds->rkd[6][1]; - ILS(x1, x2, &t1, &t2); - t1 ^= kds->rkd[5][0]; - t2 ^= kds->rkd[5][1]; - ILS(t1, t2, &x1, &x2); - x1 ^= kds->rkd[4][0]; - x2 ^= kds->rkd[4][1]; - ILS(x1, x2, &t1, &t2); - t1 ^= kds->rkd[3][0]; - t2 ^= kds->rkd[3][1]; - ILS(t1, t2, &x1, &x2); - x1 ^= kds->rkd[2][0]; - x2 ^= kds->rkd[2][1]; - ILS(x1, x2, &t1, &t2); - t1 ^= kds->rkd[1][0]; - t2 ^= kds->rkd[1][1]; - ISI((byte*)&t1); - ISI((byte*)&t2); - t1 ^= kds->rkd[0][0]; - t2 ^= kds->rkd[0][1]; - *(uint64*)out = t1; - *(((uint64*)out) + 1) = t2; + ILSS(x1, x2, t1, t2); + t1 ^= kds->rkd[18]; + t2 ^= kds->rkd[19]; + ILS(t1, t2, x1, x2); + x1 ^= kds->rkd[16]; + x2 ^= kds->rkd[17]; + ILS(x1, x2, t1, t2); + t1 ^= kds->rkd[14]; + t2 ^= kds->rkd[15]; + ILS(t1, t2, x1, x2); + x1 ^= kds->rkd[12]; + x2 ^= kds->rkd[13]; + ILS(x1, x2, t1, t2); + t1 ^= kds->rkd[10]; + t2 ^= kds->rkd[11]; + ILS(t1, t2, x1, x2); + x1 ^= kds->rkd[8]; + x2 ^= kds->rkd[9]; + ILS(x1, x2, t1, t2); + t1 ^= kds->rkd[6]; + t2 ^= kds->rkd[7]; + ILS(t1, t2, x1, x2); + x1 ^= kds->rkd[4]; + x2 ^= kds->rkd[5]; + ILS(x1, x2, t1, t2); + t1 ^= kds->rkd[2]; + t2 ^= kds->rkd[3]; + ISI((uint8*)&t1); + ISI((uint8*)&t2); + t1 ^= kds->rkd[0]; + t2 ^= kds->rkd[1]; + *(uint64*)out = t1; + *(((uint64*)out) + 1) = t2; + } } + void kuznyechik_decrypt_blocks(uint8* out, const uint8* in, size_t blocks, kuznyechik_kds* kds) + { +#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE && !defined(_UEFI) && (!defined (DEBUG) || !defined (TC_WINDOWS_DRIVER)) + if(HasSSE2()) + { + kuznyechik_decrypt_blocks_simd (out, in, blocks, kds); + } + else +#endif + { + while (blocks) + { + kuznyechik_decrypt_block (out, in, kds); + in += 16; + out += 16; + blocks--; + } + } + } #if 0 static inline uint8_t mul_gf(uint8_t x, uint8_t y, uint16_t p) { uint8_t r = 0; diff --git a/src/Crypto/kuznyechik.h b/src/Crypto/kuznyechik.h index 79ca539b..05dc6e0e 100644 --- a/src/Crypto/kuznyechik.h +++ b/src/Crypto/kuznyechik.h @@ -15,17 +15,19 @@ extern "C" { #endif typedef struct _kuznyechik_kds { - uint64 rke[10][2]; - uint64 rkd[10][2]; + uint64 rke[20]; + uint64 rkd[20]; } kuznyechik_kds; #define KUZNYECHIK_KS (sizeof(kuznyechik_kds)) -void kuznyechik_encrypt_block(byte* out, const byte* in, kuznyechik_kds* kds); -void kuznyechik_decrypt_block(byte* out, const byte* in, kuznyechik_kds* kds); -void kuznyechik_set_key(const byte* key, kuznyechik_kds *kds); +void kuznyechik_encrypt_block(uint8* out, const uint8* in, kuznyechik_kds* kds); +void kuznyechik_encrypt_blocks(uint8* out, const uint8* in, size_t blocks, kuznyechik_kds* kds); +void kuznyechik_decrypt_block(uint8* out, const uint8* in, kuznyechik_kds* kds); +void kuznyechik_decrypt_blocks(uint8* out, const uint8* in, size_t blocks, kuznyechik_kds* kds); +void kuznyechik_set_key(const uint8* key, kuznyechik_kds *kds); #ifdef __cplusplus } #endif diff --git a/src/Crypto/kuznyechik_simd.c b/src/Crypto/kuznyechik_simd.c new file mode 100644 index 00000000..a7391908 --- /dev/null +++ b/src/Crypto/kuznyechik_simd.c @@ -0,0 +1,9517 @@ +/* + Based on implementation from https://github.com/aprelev/libgost15 + + Parallelization and speed optimizations for VeraCrypt use. +*/ + +#include "kuznyechik.h" +#include "cpu.h" +#include "misc.h" + +#if CRYPTOPP_BOOL_SSE2_INTRINSICS_AVAILABLE + +#if defined (_MSC_VER) && (_MSC_VER <= 1600) +#define UNROLL_LOOPS +#endif + + +enum { + NumberOfRounds = 10, + NumberOfRoundsInKeySchedule = 8, + + BlockLengthInBytes = 128 / 8, + KeyLengthInBytes = 256 / 8, +}; + +typedef union { + uint_8t asBytes[BlockLengthInBytes]; + __m128i asXMMWord; +} block_t; + +CRYPTOPP_ALIGN_DATA(16) const uint_8t roundConstants[512] CRYPTOPP_SECTION_ALIGN16 = { + 0x6e, 0xa2, 0x76, 0x72, 0x6c, 0x48, 0x7a, 0xb8, 0x5d, 0x27, 0xbd, 0x10, 0xdd, 0x84, 0x94, 0x01, + 0xdc, 0x87, 0xec, 0xe4, 0xd8, 0x90, 0xf4, 0xb3, 0xba, 0x4e, 0xb9, 0x20, 0x79, 0xcb, 0xeb, 0x02, + 0xb2, 0x25, 0x9a, 0x96, 0xb4, 0xd8, 0x8e, 0x0b, 0xe7, 0x69, 0x04, 0x30, 0xa4, 0x4f, 0x7f, 0x03, + 0x7b, 0xcd, 0x1b, 0x0b, 0x73, 0xe3, 0x2b, 0xa5, 0xb7, 0x9c, 0xb1, 0x40, 0xf2, 0x55, 0x15, 0x04, + 0x15, 0x6f, 0x6d, 0x79, 0x1f, 0xab, 0x51, 0x1d, 0xea, 0xbb, 0x0c, 0x50, 0x2f, 0xd1, 0x81, 0x05, + 0xa7, 0x4a, 0xf7, 0xef, 0xab, 0x73, 0xdf, 0x16, 0x0d, 0xd2, 0x08, 0x60, 0x8b, 0x9e, 0xfe, 0x06, + 0xc9, 0xe8, 0x81, 0x9d, 0xc7, 0x3b, 0xa5, 0xae, 0x50, 0xf5, 0xb5, 0x70, 0x56, 0x1a, 0x6a, 0x07, + 0xf6, 0x59, 0x36, 0x16, 0xe6, 0x05, 0x56, 0x89, 0xad, 0xfb, 0xa1, 0x80, 0x27, 0xaa, 0x2a, 0x08, + 0x98, 0xfb, 0x40, 0x64, 0x8a, 0x4d, 0x2c, 0x31, 0xf0, 0xdc, 0x1c, 0x90, 0xfa, 0x2e, 0xbe, 0x09, + 0x2a, 0xde, 0xda, 0xf2, 0x3e, 0x95, 0xa2, 0x3a, 0x17, 0xb5, 0x18, 0xa0, 0x5e, 0x61, 0xc1, 0x0a, + 0x44, 0x7c, 0xac, 0x80, 0x52, 0xdd, 0xd8, 0x82, 0x4a, 0x92, 0xa5, 0xb0, 0x83, 0xe5, 0x55, 0x0b, + 0x8d, 0x94, 0x2d, 0x1d, 0x95, 0xe6, 0x7d, 0x2c, 0x1a, 0x67, 0x10, 0xc0, 0xd5, 0xff, 0x3f, 0x0c, + 0xe3, 0x36, 0x5b, 0x6f, 0xf9, 0xae, 0x07, 0x94, 0x47, 0x40, 0xad, 0xd0, 0x08, 0x7b, 0xab, 0x0d, + 0x51, 0x13, 0xc1, 0xf9, 0x4d, 0x76, 0x89, 0x9f, 0xa0, 0x29, 0xa9, 0xe0, 0xac, 0x34, 0xd4, 0x0e, + 0x3f, 0xb1, 0xb7, 0x8b, 0x21, 0x3e, 0xf3, 0x27, 0xfd, 0x0e, 0x14, 0xf0, 0x71, 0xb0, 0x40, 0x0f, + 0x2f, 0xb2, 0x6c, 0x2c, 0x0f, 0x0a, 0xac, 0xd1, 0x99, 0x35, 0x81, 0xc3, 0x4e, 0x97, 0x54, 0x10, + 0x41, 0x10, 0x1a, 0x5e, 0x63, 0x42, 0xd6, 0x69, 0xc4, 0x12, 0x3c, 0xd3, 0x93, 0x13, 0xc0, 0x11, + 0xf3, 0x35, 0x80, 0xc8, 0xd7, 0x9a, 0x58, 0x62, 0x23, 0x7b, 0x38, 0xe3, 0x37, 0x5c, 0xbf, 0x12, + 0x9d, 0x97, 0xf6, 0xba, 0xbb, 0xd2, 0x22, 0xda, 0x7e, 0x5c, 0x85, 0xf3, 0xea, 0xd8, 0x2b, 0x13, + 0x54, 0x7f, 0x77, 0x27, 0x7c, 0xe9, 0x87, 0x74, 0x2e, 0xa9, 0x30, 0x83, 0xbc, 0xc2, 0x41, 0x14, + 0x3a, 0xdd, 0x01, 0x55, 0x10, 0xa1, 0xfd, 0xcc, 0x73, 0x8e, 0x8d, 0x93, 0x61, 0x46, 0xd5, 0x15, + 0x88, 0xf8, 0x9b, 0xc3, 0xa4, 0x79, 0x73, 0xc7, 0x94, 0xe7, 0x89, 0xa3, 0xc5, 0x09, 0xaa, 0x16, + 0xe6, 0x5a, 0xed, 0xb1, 0xc8, 0x31, 0x09, 0x7f, 0xc9, 0xc0, 0x34, 0xb3, 0x18, 0x8d, 0x3e, 0x17, + 0xd9, 0xeb, 0x5a, 0x3a, 0xe9, 0x0f, 0xfa, 0x58, 0x34, 0xce, 0x20, 0x43, 0x69, 0x3d, 0x7e, 0x18, + 0xb7, 0x49, 0x2c, 0x48, 0x85, 0x47, 0x80, 0xe0, 0x69, 0xe9, 0x9d, 0x53, 0xb4, 0xb9, 0xea, 0x19, + 0x05, 0x6c, 0xb6, 0xde, 0x31, 0x9f, 0x0e, 0xeb, 0x8e, 0x80, 0x99, 0x63, 0x10, 0xf6, 0x95, 0x1a, + 0x6b, 0xce, 0xc0, 0xac, 0x5d, 0xd7, 0x74, 0x53, 0xd3, 0xa7, 0x24, 0x73, 0xcd, 0x72, 0x01, 0x1b, + 0xa2, 0x26, 0x41, 0x31, 0x9a, 0xec, 0xd1, 0xfd, 0x83, 0x52, 0x91, 0x03, 0x9b, 0x68, 0x6b, 0x1c, + 0xcc, 0x84, 0x37, 0x43, 0xf6, 0xa4, 0xab, 0x45, 0xde, 0x75, 0x2c, 0x13, 0x46, 0xec, 0xff, 0x1d, + 0x7e, 0xa1, 0xad, 0xd5, 0x42, 0x7c, 0x25, 0x4e, 0x39, 0x1c, 0x28, 0x23, 0xe2, 0xa3, 0x80, 0x1e, + 0x10, 0x03, 0xdb, 0xa7, 0x2e, 0x34, 0x5f, 0xf6, 0x64, 0x3b, 0x95, 0x33, 0x3f, 0x27, 0x14, 0x1f, + 0x5e, 0xa7, 0xd8, 0x58, 0x1e, 0x14, 0x9b, 0x61, 0xf1, 0x6a, 0xc1, 0x45, 0x9c, 0xed, 0xa8, 0x20, +}; + +/** Exponents table (generator = 0x02 in GF(256) modulo g(x) = 111000011). */ +CRYPTOPP_ALIGN_DATA(16) const uint_8t exponentialTable[256] CRYPTOPP_SECTION_ALIGN16 = { + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0xc3, 0x45, 0x8a, 0xd7, 0x6d, 0xda, 0x77, 0xee, + 0x1f, 0x3e, 0x7c, 0xf8, 0x33, 0x66, 0xcc, 0x5b, 0xb6, 0xaf, 0x9d, 0xf9, 0x31, 0x62, 0xc4, 0x4b, + 0x96, 0xef, 0x1d, 0x3a, 0x74, 0xe8, 0x13, 0x26, 0x4c, 0x98, 0xf3, 0x25, 0x4a, 0x94, 0xeb, 0x15, + 0x2a, 0x54, 0xa8, 0x93, 0xe5, 0x09, 0x12, 0x24, 0x48, 0x90, 0xe3, 0x05, 0x0a, 0x14, 0x28, 0x50, + 0xa0, 0x83, 0xc5, 0x49, 0x92, 0xe7, 0x0d, 0x1a, 0x34, 0x68, 0xd0, 0x63, 0xc6, 0x4f, 0x9e, 0xff, + 0x3d, 0x7a, 0xf4, 0x2b, 0x56, 0xac, 0x9b, 0xf5, 0x29, 0x52, 0xa4, 0x8b, 0xd5, 0x69, 0xd2, 0x67, + 0xce, 0x5f, 0xbe, 0xbf, 0xbd, 0xb9, 0xb1, 0xa1, 0x81, 0xc1, 0x41, 0x82, 0xc7, 0x4d, 0x9a, 0xf7, + 0x2d, 0x5a, 0xb4, 0xab, 0x95, 0xe9, 0x11, 0x22, 0x44, 0x88, 0xd3, 0x65, 0xca, 0x57, 0xae, 0x9f, + 0xfd, 0x39, 0x72, 0xe4, 0x0b, 0x16, 0x2c, 0x58, 0xb0, 0xa3, 0x85, 0xc9, 0x51, 0xa2, 0x87, 0xcd, + 0x59, 0xb2, 0xa7, 0x8d, 0xd9, 0x71, 0xe2, 0x07, 0x0e, 0x1c, 0x38, 0x70, 0xe0, 0x03, 0x06, 0x0c, + 0x18, 0x30, 0x60, 0xc0, 0x43, 0x86, 0xcf, 0x5d, 0xba, 0xb7, 0xad, 0x99, 0xf1, 0x21, 0x42, 0x84, + 0xcb, 0x55, 0xaa, 0x97, 0xed, 0x19, 0x32, 0x64, 0xc8, 0x53, 0xa6, 0x8f, 0xdd, 0x79, 0xf2, 0x27, + 0x4e, 0x9c, 0xfb, 0x35, 0x6a, 0xd4, 0x6b, 0xd6, 0x6f, 0xde, 0x7f, 0xfe, 0x3f, 0x7e, 0xfc, 0x3b, + 0x76, 0xec, 0x1b, 0x36, 0x6c, 0xd8, 0x73, 0xe6, 0x0f, 0x1e, 0x3c, 0x78, 0xf0, 0x23, 0x46, 0x8c, + 0xdb, 0x75, 0xea, 0x17, 0x2e, 0x5c, 0xb8, 0xb3, 0xa5, 0x89, 0xd1, 0x61, 0xc2, 0x47, 0x8e, 0xdf, + 0x7d, 0xfa, 0x37, 0x6e, 0xdc, 0x7b, 0xf6, 0x2f, 0x5e, 0xbc, 0xbb, 0xb5, 0xa9, 0x91, 0xe1, 0x01 +}; + + +/** Discrete logarithms table (generator = 0x02 in GF(256) modulo g(x) = 111000011). */ +CRYPTOPP_ALIGN_DATA(16) const uint_8t logarithmicTable[256] CRYPTOPP_SECTION_ALIGN16 = { + 0x00, 0x00, 0x01, 0x9d, 0x02, 0x3b, 0x9e, 0x97, 0x03, 0x35, 0x3c, 0x84, 0x9f, 0x46, 0x98, 0xd8, + 0x04, 0x76, 0x36, 0x26, 0x3d, 0x2f, 0x85, 0xe3, 0xa0, 0xb5, 0x47, 0xd2, 0x99, 0x22, 0xd9, 0x10, + 0x05, 0xad, 0x77, 0xdd, 0x37, 0x2b, 0x27, 0xbf, 0x3e, 0x58, 0x30, 0x53, 0x86, 0x70, 0xe4, 0xf7, + 0xa1, 0x1c, 0xb6, 0x14, 0x48, 0xc3, 0xd3, 0xf2, 0x9a, 0x81, 0x23, 0xcf, 0xda, 0x50, 0x11, 0xcc, + 0x06, 0x6a, 0xae, 0xa4, 0x78, 0x09, 0xde, 0xed, 0x38, 0x43, 0x2c, 0x1f, 0x28, 0x6d, 0xc0, 0x4d, + 0x3f, 0x8c, 0x59, 0xb9, 0x31, 0xb1, 0x54, 0x7d, 0x87, 0x90, 0x71, 0x17, 0xe5, 0xa7, 0xf8, 0x61, + 0xa2, 0xeb, 0x1d, 0x4b, 0xb7, 0x7b, 0x15, 0x5f, 0x49, 0x5d, 0xc4, 0xc6, 0xd4, 0x0c, 0xf3, 0xc8, + 0x9b, 0x95, 0x82, 0xd6, 0x24, 0xe1, 0xd0, 0x0e, 0xdb, 0xbd, 0x51, 0xf5, 0x12, 0xf0, 0xcd, 0xca, + 0x07, 0x68, 0x6b, 0x41, 0xaf, 0x8a, 0xa5, 0x8e, 0x79, 0xe9, 0x0a, 0x5b, 0xdf, 0x93, 0xee, 0xbb, + 0x39, 0xfd, 0x44, 0x33, 0x2d, 0x74, 0x20, 0xb3, 0x29, 0xab, 0x6e, 0x56, 0xc1, 0x1a, 0x4e, 0x7f, + 0x40, 0x67, 0x8d, 0x89, 0x5a, 0xe8, 0xba, 0x92, 0x32, 0xfc, 0xb2, 0x73, 0x55, 0xaa, 0x7e, 0x19, + 0x88, 0x66, 0x91, 0xe7, 0x72, 0xfb, 0x18, 0xa9, 0xe6, 0x65, 0xa8, 0xfa, 0xf9, 0x64, 0x62, 0x63, + 0xa3, 0x69, 0xec, 0x08, 0x1e, 0x42, 0x4c, 0x6c, 0xb8, 0x8b, 0x7c, 0xb0, 0x16, 0x8f, 0x60, 0xa6, + 0x4a, 0xea, 0x5e, 0x7a, 0xc5, 0x5c, 0xc7, 0x0b, 0xd5, 0x94, 0x0d, 0xe0, 0xf4, 0xbc, 0xc9, 0xef, + 0x9c, 0xfe, 0x96, 0x3a, 0x83, 0x34, 0xd7, 0x45, 0x25, 0x75, 0xe2, 0x2e, 0xd1, 0xb4, 0x0f, 0x21, + 0xdc, 0xac, 0xbe, 0x2a, 0x52, 0x57, 0xf6, 0x6f, 0x13, 0x1b, 0xf1, 0xc2, 0xce, 0x80, 0xcb, 0x4f +}; + +/** Pi substitution box defined by GOST 34.12 '2015. */ +CRYPTOPP_ALIGN_DATA(16) const uint_8t Pi[256] CRYPTOPP_SECTION_ALIGN16 = { + 0xfc, 0xee, 0xdd, 0x11, 0xcf, 0x6e, 0x31, 0x16, 0xfb, 0xc4, 0xfa, 0xda, 0x23, 0xc5, 0x04, 0x4d, + 0xe9, 0x77, 0xf0, 0xdb, 0x93, 0x2e, 0x99, 0xba, 0x17, 0x36, 0xf1, 0xbb, 0x14, 0xcd, 0x5f, 0xc1, + 0xf9, 0x18, 0x65, 0x5a, 0xe2, 0x5c, 0xef, 0x21, 0x81, 0x1c, 0x3c, 0x42, 0x8b, 0x01, 0x8e, 0x4f, + 0x05, 0x84, 0x02, 0xae, 0xe3, 0x6a, 0x8f, 0xa0, 0x06, 0x0b, 0xed, 0x98, 0x7f, 0xd4, 0xd3, 0x1f, + 0xeb, 0x34, 0x2c, 0x51, 0xea, 0xc8, 0x48, 0xab, 0xf2, 0x2a, 0x68, 0xa2, 0xfd, 0x3a, 0xce, 0xcc, + 0xb5, 0x70, 0x0e, 0x56, 0x08, 0x0c, 0x76, 0x12, 0xbf, 0x72, 0x13, 0x47, 0x9c, 0xb7, 0x5d, 0x87, + 0x15, 0xa1, 0x96, 0x29, 0x10, 0x7b, 0x9a, 0xc7, 0xf3, 0x91, 0x78, 0x6f, 0x9d, 0x9e, 0xb2, 0xb1, + 0x32, 0x75, 0x19, 0x3d, 0xff, 0x35, 0x8a, 0x7e, 0x6d, 0x54, 0xc6, 0x80, 0xc3, 0xbd, 0x0d, 0x57, + 0xdf, 0xf5, 0x24, 0xa9, 0x3e, 0xa8, 0x43, 0xc9, 0xd7, 0x79, 0xd6, 0xf6, 0x7c, 0x22, 0xb9, 0x03, + 0xe0, 0x0f, 0xec, 0xde, 0x7a, 0x94, 0xb0, 0xbc, 0xdc, 0xe8, 0x28, 0x50, 0x4e, 0x33, 0x0a, 0x4a, + 0xa7, 0x97, 0x60, 0x73, 0x1e, 0x00, 0x62, 0x44, 0x1a, 0xb8, 0x38, 0x82, 0x64, 0x9f, 0x26, 0x41, + 0xad, 0x45, 0x46, 0x92, 0x27, 0x5e, 0x55, 0x2f, 0x8c, 0xa3, 0xa5, 0x7d, 0x69, 0xd5, 0x95, 0x3b, + 0x07, 0x58, 0xb3, 0x40, 0x86, 0xac, 0x1d, 0xf7, 0x30, 0x37, 0x6b, 0xe4, 0x88, 0xd9, 0xe7, 0x89, + 0xe1, 0x1b, 0x83, 0x49, 0x4c, 0x3f, 0xf8, 0xfe, 0x8d, 0x53, 0xaa, 0x90, 0xca, 0xd8, 0x85, 0x61, + 0x20, 0x71, 0x67, 0xa4, 0x2d, 0x2b, 0x09, 0x5b, 0xcb, 0x9b, 0x25, 0xd0, 0xbe, 0xe5, 0x6c, 0x52, + 0x59, 0xa6, 0x74, 0xd2, 0xe6, 0xf4, 0xb4, 0xc0, 0xd1, 0x66, 0xaf, 0xc2, 0x39, 0x4b, 0x63, 0xb6, +}; + +/** Inversed Pi substitution box. */ +CRYPTOPP_ALIGN_DATA(16) const uint_8t InversedPi[256] CRYPTOPP_SECTION_ALIGN16 = { + 0xa5, 0x2d, 0x32, 0x8f, 0x0e, 0x30, 0x38, 0xc0, 0x54, 0xe6, 0x9e, 0x39, 0x55, 0x7e, 0x52, 0x91, + 0x64, 0x03, 0x57, 0x5a, 0x1c, 0x60, 0x07, 0x18, 0x21, 0x72, 0xa8, 0xd1, 0x29, 0xc6, 0xa4, 0x3f, + 0xe0, 0x27, 0x8d, 0x0c, 0x82, 0xea, 0xae, 0xb4, 0x9a, 0x63, 0x49, 0xe5, 0x42, 0xe4, 0x15, 0xb7, + 0xc8, 0x06, 0x70, 0x9d, 0x41, 0x75, 0x19, 0xc9, 0xaa, 0xfc, 0x4d, 0xbf, 0x2a, 0x73, 0x84, 0xd5, + 0xc3, 0xaf, 0x2b, 0x86, 0xa7, 0xb1, 0xb2, 0x5b, 0x46, 0xd3, 0x9f, 0xfd, 0xd4, 0x0f, 0x9c, 0x2f, + 0x9b, 0x43, 0xef, 0xd9, 0x79, 0xb6, 0x53, 0x7f, 0xc1, 0xf0, 0x23, 0xe7, 0x25, 0x5e, 0xb5, 0x1e, + 0xa2, 0xdf, 0xa6, 0xfe, 0xac, 0x22, 0xf9, 0xe2, 0x4a, 0xbc, 0x35, 0xca, 0xee, 0x78, 0x05, 0x6b, + 0x51, 0xe1, 0x59, 0xa3, 0xf2, 0x71, 0x56, 0x11, 0x6a, 0x89, 0x94, 0x65, 0x8c, 0xbb, 0x77, 0x3c, + 0x7b, 0x28, 0xab, 0xd2, 0x31, 0xde, 0xc4, 0x5f, 0xcc, 0xcf, 0x76, 0x2c, 0xb8, 0xd8, 0x2e, 0x36, + 0xdb, 0x69, 0xb3, 0x14, 0x95, 0xbe, 0x62, 0xa1, 0x3b, 0x16, 0x66, 0xe9, 0x5c, 0x6c, 0x6d, 0xad, + 0x37, 0x61, 0x4b, 0xb9, 0xe3, 0xba, 0xf1, 0xa0, 0x85, 0x83, 0xda, 0x47, 0xc5, 0xb0, 0x33, 0xfa, + 0x96, 0x6f, 0x6e, 0xc2, 0xf6, 0x50, 0xff, 0x5d, 0xa9, 0x8e, 0x17, 0x1b, 0x97, 0x7d, 0xec, 0x58, + 0xf7, 0x1f, 0xfb, 0x7c, 0x09, 0x0d, 0x7a, 0x67, 0x45, 0x87, 0xdc, 0xe8, 0x4f, 0x1d, 0x4e, 0x04, + 0xeb, 0xf8, 0xf3, 0x3e, 0x3d, 0xbd, 0x8a, 0x88, 0xdd, 0xcd, 0x0b, 0x13, 0x98, 0x02, 0x93, 0x80, + 0x90, 0xd0, 0x24, 0x34, 0xcb, 0xed, 0xf4, 0xce, 0x99, 0x10, 0x44, 0x40, 0x92, 0x3a, 0x01, 0x26, + 0x12, 0x1a, 0x48, 0x68, 0xf5, 0x81, 0x8b, 0xc7, 0xd6, 0x20, 0x0a, 0x08, 0x00, 0x4c, 0xd7, 0x74, +}; + +/** L transformation matrix over GF(256). */ +CRYPTOPP_ALIGN_DATA(16) const uint_8t LTransformationMatrix[16][16] CRYPTOPP_SECTION_ALIGN16 = { + {0xcf, 0x6e, 0xa2, 0x76, 0x72, 0x6c, 0x48, 0x7a, 0xb8, 0x5d, 0x27, 0xbd, 0x10, 0xdd, 0x84, 0x94,}, + {0x98, 0x20, 0xc8, 0x33, 0xf2, 0x76, 0xd5, 0xe6, 0x49, 0xd4, 0x9f, 0x95, 0xe9, 0x99, 0x2d, 0x20,}, + {0x74, 0xc6, 0x87, 0x10, 0x6b, 0xec, 0x62, 0x4e, 0x87, 0xb8, 0xbe, 0x5e, 0xd0, 0x75, 0x74, 0x85,}, + {0xbf, 0xda, 0x70, 0x0c, 0xca, 0x0c, 0x17, 0x1a, 0x14, 0x2f, 0x68, 0x30, 0xd9, 0xca, 0x96, 0x10,}, + {0x93, 0x90, 0x68, 0x1c, 0x20, 0xc5, 0x06, 0xbb, 0xcb, 0x8d, 0x1a, 0xe9, 0xf3, 0x97, 0x5d, 0xc2,}, + {0x8e, 0x48, 0x43, 0x11, 0xeb, 0xbc, 0x2d, 0x2e, 0x8d, 0x12, 0x7c, 0x60, 0x94, 0x44, 0x77, 0xc0,}, + {0xf2, 0x89, 0x1c, 0xd6, 0x02, 0xaf, 0xc4, 0xf1, 0xab, 0xee, 0xad, 0xbf, 0x3d, 0x5a, 0x6f, 0x01,}, + {0xf3, 0x9c, 0x2b, 0x6a, 0xa4, 0x6e, 0xe7, 0xbe, 0x49, 0xf6, 0xc9, 0x10, 0xaf, 0xe0, 0xde, 0xfb,}, + {0x0a, 0xc1, 0xa1, 0xa6, 0x8d, 0xa3, 0xd5, 0xd4, 0x09, 0x08, 0x84, 0xef, 0x7b, 0x30, 0x54, 0x01,}, + {0xbf, 0x64, 0x63, 0xd7, 0xd4, 0xe1, 0xeb, 0xaf, 0x6c, 0x54, 0x2f, 0x39, 0xff, 0xa6, 0xb4, 0xc0,}, + {0xf6, 0xb8, 0x30, 0xf6, 0xc4, 0x90, 0x99, 0x37, 0x2a, 0x0f, 0xeb, 0xec, 0x64, 0x31, 0x8d, 0xc2,}, + {0xa9, 0x2d, 0x6b, 0x49, 0x01, 0x58, 0x78, 0xb1, 0x01, 0xf3, 0xfe, 0x91, 0x91, 0xd3, 0xd1, 0x10,}, + {0xea, 0x86, 0x9f, 0x07, 0x65, 0x0e, 0x52, 0xd4, 0x60, 0x98, 0xc6, 0x7f, 0x52, 0xdf, 0x44, 0x85,}, + {0x8e, 0x44, 0x30, 0x14, 0xdd, 0x02, 0xf5, 0x2a, 0x8e, 0xc8, 0x48, 0x48, 0xf8, 0x48, 0x3c, 0x20,}, + {0x4d, 0xd0, 0xe3, 0xe8, 0x4c, 0xc3, 0x16, 0x6e, 0x4b, 0x7f, 0xa2, 0x89, 0x0d, 0x64, 0xa5, 0x94,}, + {0x6e, 0xa2, 0x76, 0x72, 0x6c, 0x48, 0x7a, 0xb8, 0x5d, 0x27, 0xbd, 0x10, 0xdd, 0x84, 0x94, 0x01,}, +}; + +/** Inversed L transformation matrix over GF(256). */ +CRYPTOPP_ALIGN_DATA(16) const uint_8t inversedLTransformationMatrix[16][16] CRYPTOPP_SECTION_ALIGN16 = { + {0x01, 0x94, 0x84, 0xdd, 0x10, 0xbd, 0x27, 0x5d, 0xb8, 0x7a, 0x48, 0x6c, 0x72, 0x76, 0xa2, 0x6e,}, + {0x94, 0xa5, 0x64, 0x0d, 0x89, 0xa2, 0x7f, 0x4b, 0x6e, 0x16, 0xc3, 0x4c, 0xe8, 0xe3, 0xd0, 0x4d,}, + {0x20, 0x3c, 0x48, 0xf8, 0x48, 0x48, 0xc8, 0x8e, 0x2a, 0xf5, 0x02, 0xdd, 0x14, 0x30, 0x44, 0x8e,}, + {0x85, 0x44, 0xdf, 0x52, 0x7f, 0xc6, 0x98, 0x60, 0xd4, 0x52, 0x0e, 0x65, 0x07, 0x9f, 0x86, 0xea,}, + {0x10, 0xd1, 0xd3, 0x91, 0x91, 0xfe, 0xf3, 0x01, 0xb1, 0x78, 0x58, 0x01, 0x49, 0x6b, 0x2d, 0xa9,}, + {0xc2, 0x8d, 0x31, 0x64, 0xec, 0xeb, 0x0f, 0x2a, 0x37, 0x99, 0x90, 0xc4, 0xf6, 0x30, 0xb8, 0xf6,}, + {0xc0, 0xb4, 0xa6, 0xff, 0x39, 0x2f, 0x54, 0x6c, 0xaf, 0xeb, 0xe1, 0xd4, 0xd7, 0x63, 0x64, 0xbf,}, + {0x01, 0x54, 0x30, 0x7b, 0xef, 0x84, 0x08, 0x09, 0xd4, 0xd5, 0xa3, 0x8d, 0xa6, 0xa1, 0xc1, 0x0a,}, + {0xfb, 0xde, 0xe0, 0xaf, 0x10, 0xc9, 0xf6, 0x49, 0xbe, 0xe7, 0x6e, 0xa4, 0x6a, 0x2b, 0x9c, 0xf3,}, + {0x01, 0x6f, 0x5a, 0x3d, 0xbf, 0xad, 0xee, 0xab, 0xf1, 0xc4, 0xaf, 0x02, 0xd6, 0x1c, 0x89, 0xf2,}, + {0xc0, 0x77, 0x44, 0x94, 0x60, 0x7c, 0x12, 0x8d, 0x2e, 0x2d, 0xbc, 0xeb, 0x11, 0x43, 0x48, 0x8e,}, + {0xc2, 0x5d, 0x97, 0xf3, 0xe9, 0x1a, 0x8d, 0xcb, 0xbb, 0x06, 0xc5, 0x20, 0x1c, 0x68, 0x90, 0x93,}, + {0x10, 0x96, 0xca, 0xd9, 0x30, 0x68, 0x2f, 0x14, 0x1a, 0x17, 0x0c, 0xca, 0x0c, 0x70, 0xda, 0xbf,}, + {0x85, 0x74, 0x75, 0xd0, 0x5e, 0xbe, 0xb8, 0x87, 0x4e, 0x62, 0xec, 0x6b, 0x10, 0x87, 0xc6, 0x74,}, + {0x20, 0x2d, 0x99, 0xe9, 0x95, 0x9f, 0xd4, 0x49, 0xe6, 0xd5, 0x76, 0xf2, 0x33, 0xc8, 0x20, 0x98,}, + {0x94, 0x84, 0xdd, 0x10, 0xbd, 0x27, 0x5d, 0xb8, 0x7a, 0x48, 0x6c, 0x72, 0x76, 0xa2, 0x6e, 0xcf,}, +}; + +CRYPTOPP_ALIGN_DATA(16) const uint_8t bitmask[16] CRYPTOPP_SECTION_ALIGN16 = { + 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, +}; + +CRYPTOPP_ALIGN_DATA(16) const uint_8t precomputedInversedLSTable[16 * 256 * 16] CRYPTOPP_SECTION_ALIGN16 = { + /* Matrix row: 0 */ + 0xa5, 0xcc, 0x0e, 0x86, 0xc2, 0x4f, 0xba, 0x59, 0x3b, 0xe3, 0xef, 0x79, 0x82, 0x53, 0x11, 0xf0, /* Byte value: 0x00 */ + 0x2d, 0x03, 0x96, 0x94, 0x95, 0x6c, 0x2a, 0xb6, 0xf5, 0x9c, 0xba, 0xe7, 0x37, 0x83, 0x91, 0xbd, /* Byte value: 0x01 */ + 0x32, 0x17, 0xb1, 0xab, 0xa6, 0xf9, 0x11, 0xd2, 0x03, 0xc3, 0x8e, 0xc9, 0x90, 0x58, 0x92, 0xad, /* Byte value: 0x02 */ + 0x8f, 0xa5, 0x82, 0x44, 0x27, 0x96, 0x65, 0xbf, 0x60, 0xda, 0x6e, 0x59, 0x28, 0x51, 0x68, 0x84, /* Byte value: 0x03 */ + 0x0e, 0xd4, 0x34, 0xac, 0xe0, 0xa9, 0x29, 0xa0, 0x9f, 0x89, 0x76, 0x4d, 0xf9, 0xc1, 0x13, 0x51, /* Byte value: 0x04 */ + 0x30, 0xfc, 0x7a, 0xd2, 0x86, 0x40, 0x5f, 0x68, 0xb0, 0x37, 0x1e, 0x11, 0x74, 0xb4, 0x15, 0x71, /* Byte value: 0x05 */ + 0x38, 0xd6, 0xd0, 0xf5, 0x06, 0xe1, 0xa4, 0xc5, 0x39, 0x61, 0x1b, 0xf7, 0x62, 0x82, 0x4c, 0x87, /* Byte value: 0x06 */ + 0xc0, 0x76, 0x2b, 0xce, 0x5d, 0xc3, 0xbf, 0x63, 0x85, 0xdc, 0x78, 0x44, 0x13, 0x95, 0x54, 0x07, /* Byte value: 0x07 */ + 0x54, 0xd2, 0xdb, 0x47, 0x09, 0x71, 0x7d, 0x0f, 0xb6, 0x72, 0xc1, 0x40, 0x97, 0x04, 0xf2, 0xe8, /* Byte value: 0x08 */ + 0xe6, 0x20, 0x58, 0xd9, 0x78, 0x0a, 0x07, 0x9f, 0xf2, 0x98, 0x1f, 0xf1, 0xa4, 0xba, 0xb9, 0xfe, /* Byte value: 0x09 */ + 0x9e, 0x65, 0x91, 0xd7, 0xf4, 0xaa, 0x77, 0x7b, 0x09, 0x0c, 0x2c, 0x3a, 0x76, 0x4b, 0x78, 0xc5, /* Byte value: 0x0a */ + 0x39, 0x42, 0x54, 0x28, 0x16, 0x5c, 0x83, 0x98, 0x81, 0x1b, 0x53, 0x9b, 0x10, 0xf4, 0xee, 0xe9, /* Byte value: 0x0b */ + 0x55, 0x46, 0x5f, 0x9a, 0x19, 0xcc, 0x5a, 0x52, 0x0e, 0x08, 0x89, 0x2c, 0xe5, 0x72, 0x50, 0x86, /* Byte value: 0x0c */ + 0x7e, 0xbb, 0x57, 0x85, 0xec, 0xa8, 0xa2, 0xe9, 0xed, 0x4b, 0x40, 0x60, 0x3d, 0x06, 0x8b, 0x9c, /* Byte value: 0x0d */ + 0x52, 0x2c, 0x45, 0xcc, 0x69, 0x79, 0xaf, 0x02, 0xa0, 0xad, 0xb2, 0xeb, 0x78, 0xf3, 0xb8, 0x4f, /* Byte value: 0x0e */ + 0x91, 0x25, 0x21, 0xa6, 0x04, 0xbe, 0x79, 0x86, 0x2e, 0xff, 0x12, 0x1b, 0xfd, 0xfc, 0xc9, 0xfa, /* Byte value: 0x0f */ + 0x64, 0x2e, 0xa1, 0x95, 0x8f, 0x31, 0x22, 0x67, 0x06, 0x45, 0xdf, 0x51, 0xe3, 0xb0, 0xe7, 0x99, /* Byte value: 0x10 */ + 0x03, 0x7f, 0x4f, 0xa4, 0x30, 0x04, 0x69, 0xe7, 0x0b, 0x8e, 0xd8, 0xb4, 0x96, 0x9a, 0x25, 0xb2, /* Byte value: 0x11 */ + 0x57, 0xad, 0x94, 0xe3, 0x39, 0x75, 0x14, 0xe8, 0xbd, 0xfc, 0x19, 0xf4, 0x01, 0x9e, 0xd7, 0x5a, /* Byte value: 0x12 */ + 0x5a, 0x06, 0xef, 0xeb, 0xe9, 0xd8, 0x54, 0xaf, 0x29, 0xfb, 0xb7, 0x0d, 0x6e, 0xc5, 0xe1, 0xb9, /* Byte value: 0x13 */ + 0x1c, 0x6b, 0x68, 0x9b, 0x03, 0x91, 0x52, 0x83, 0xfd, 0xd1, 0xec, 0x9a, 0x31, 0x41, 0x26, 0xa2, /* Byte value: 0x14 */ + 0x60, 0x3b, 0xf4, 0x67, 0xcf, 0x80, 0xbe, 0xd0, 0xa3, 0x6e, 0x3c, 0x22, 0xe8, 0xab, 0x2a, 0xe2, /* Byte value: 0x15 */ + 0x07, 0x6a, 0x1a, 0x56, 0x70, 0xb5, 0xf5, 0x50, 0xae, 0xa5, 0x3b, 0xc7, 0x9d, 0x81, 0xe8, 0xc9, /* Byte value: 0x16 */ + 0x18, 0x7e, 0x3d, 0x69, 0x43, 0x20, 0xce, 0x34, 0x58, 0xfa, 0x0f, 0xe9, 0x3a, 0x5a, 0xeb, 0xd9, /* Byte value: 0x17 */ + 0x21, 0x3c, 0x69, 0x41, 0x55, 0x7c, 0x4d, 0xac, 0xd9, 0xe1, 0x5c, 0x72, 0x2a, 0xae, 0x05, 0x30, /* Byte value: 0x18 */ + 0x72, 0x84, 0xa8, 0x50, 0x2c, 0xb8, 0xc5, 0xf3, 0xc1, 0x36, 0xa6, 0xf5, 0x20, 0x2b, 0x1f, 0x11, /* Byte value: 0x19 */ + 0xa8, 0x67, 0x75, 0x8e, 0x12, 0xe2, 0xfa, 0x1e, 0xaf, 0xe4, 0x41, 0x80, 0xed, 0x08, 0x27, 0x13, /* Byte value: 0x1a */ + 0xd1, 0xb6, 0x38, 0x5d, 0x8e, 0xff, 0xad, 0xa7, 0xec, 0x0a, 0x3a, 0x27, 0x4d, 0x8f, 0x44, 0x46, /* Byte value: 0x1b */ + 0x29, 0x16, 0xc3, 0x66, 0xd5, 0xdd, 0xb6, 0x01, 0x50, 0xb7, 0x59, 0x94, 0x3c, 0x98, 0x5c, 0xc6, /* Byte value: 0x1c */ + 0xc6, 0x88, 0xb5, 0x45, 0x3d, 0xcb, 0x6d, 0x6e, 0x93, 0x03, 0x0b, 0xef, 0xfc, 0x62, 0x1e, 0xa0, /* Byte value: 0x1d */ + 0xa4, 0x58, 0x8a, 0x5b, 0xd2, 0xf2, 0x9d, 0x04, 0x83, 0x99, 0xa7, 0x15, 0xf0, 0x25, 0xb3, 0x9e, /* Byte value: 0x1e */ + 0x3f, 0xbc, 0xca, 0xa3, 0x76, 0x54, 0x51, 0x95, 0x97, 0xc4, 0x20, 0x30, 0xff, 0x03, 0xa4, 0x4e, /* Byte value: 0x1f */ + 0xe0, 0xde, 0xc6, 0x52, 0x18, 0x02, 0xd5, 0x92, 0xe4, 0x47, 0x6c, 0x5a, 0x4b, 0x4d, 0xf3, 0x59, /* Byte value: 0x20 */ + 0x27, 0xc2, 0xf7, 0xca, 0x35, 0x74, 0x9f, 0xa1, 0xcf, 0x3e, 0x2f, 0xd9, 0xc5, 0x59, 0x4f, 0x97, /* Byte value: 0x21 */ + 0x8d, 0x4e, 0x49, 0x3d, 0x07, 0x2f, 0x2b, 0x05, 0xd3, 0x2e, 0xfe, 0x81, 0xcc, 0xbd, 0xef, 0x58, /* Byte value: 0x22 */ + 0x0c, 0x3f, 0xff, 0xd5, 0xc0, 0x10, 0x67, 0x1a, 0x2c, 0x7d, 0xe6, 0x95, 0x1d, 0x2d, 0x94, 0x8d, /* Byte value: 0x23 */ + 0x82, 0x0e, 0xf9, 0x4c, 0xf7, 0x3b, 0x25, 0xf8, 0xf4, 0xdd, 0xc0, 0xa0, 0x47, 0x0a, 0x5e, 0x67, /* Byte value: 0x24 */ + 0xea, 0x1f, 0xa7, 0x0c, 0xb8, 0x1a, 0x60, 0x85, 0xde, 0xe5, 0xf9, 0x64, 0xb9, 0x97, 0x2d, 0x73, /* Byte value: 0x25 */ + 0xae, 0x99, 0xeb, 0x05, 0x72, 0xea, 0x28, 0x13, 0xb9, 0x3b, 0x32, 0x2b, 0x02, 0xff, 0x6d, 0xb4, /* Byte value: 0x26 */ + 0xb4, 0x0c, 0x1d, 0x15, 0x11, 0x73, 0xa8, 0x9d, 0x52, 0x35, 0xad, 0x1a, 0xdc, 0x49, 0x01, 0xb1, /* Byte value: 0x27 */ + 0x9a, 0x70, 0xc4, 0x25, 0xb4, 0x1b, 0xeb, 0xcc, 0xac, 0x27, 0xcf, 0x49, 0x7d, 0x50, 0xb5, 0xbe, /* Byte value: 0x28 */ + 0x63, 0x44, 0xbb, 0xc3, 0xff, 0x84, 0xd7, 0x37, 0xa8, 0xe0, 0xe4, 0x96, 0x7e, 0x31, 0x0f, 0x50, /* Byte value: 0x29 */ + 0x49, 0x2d, 0x37, 0x01, 0x1a, 0x5d, 0x08, 0xd1, 0xf3, 0xd9, 0x65, 0xb6, 0xd4, 0x33, 0x76, 0x24, /* Byte value: 0x2a */ + 0xe5, 0x5f, 0x17, 0x7d, 0x48, 0x0e, 0x6e, 0x78, 0xf9, 0x16, 0xc7, 0x45, 0x32, 0x20, 0x9c, 0x4c, /* Byte value: 0x2b */ + 0x42, 0x78, 0xd2, 0x82, 0xaa, 0xf8, 0x9a, 0x9b, 0x71, 0x01, 0xb8, 0xe4, 0x54, 0x9f, 0x0a, 0x60, /* Byte value: 0x2c */ + 0xe4, 0xcb, 0x93, 0xa0, 0x58, 0xb3, 0x49, 0x25, 0x41, 0x6c, 0x8f, 0x29, 0x40, 0x56, 0x3e, 0x22, /* Byte value: 0x2d */ + 0x15, 0xd5, 0x46, 0x61, 0x93, 0x8d, 0x8e, 0x73, 0xcc, 0xfd, 0xa1, 0x10, 0x55, 0x01, 0xdd, 0x3a, /* Byte value: 0x2e */ + 0xb7, 0x73, 0x52, 0xb1, 0x21, 0x77, 0xc1, 0x7a, 0x59, 0xbb, 0x75, 0xae, 0x4a, 0xd3, 0x24, 0x03, /* Byte value: 0x2f */ + 0xc8, 0x5c, 0x81, 0xe9, 0xdd, 0x62, 0x44, 0xce, 0x0c, 0x8a, 0x7d, 0xa2, 0x05, 0xa3, 0x0d, 0xf1, /* Byte value: 0x30 */ + 0x06, 0xfe, 0x9e, 0x8b, 0x60, 0x08, 0xd2, 0x0d, 0x16, 0xdf, 0x73, 0xab, 0xef, 0xf7, 0x4a, 0xa7, /* Byte value: 0x31 */ + 0x70, 0x6f, 0x63, 0x29, 0x0c, 0x01, 0x8b, 0x49, 0x72, 0xc2, 0x36, 0x2d, 0xc4, 0xc7, 0x98, 0xcd, /* Byte value: 0x32 */ + 0x9d, 0x1a, 0xde, 0x73, 0xc4, 0xae, 0x1e, 0x9c, 0x02, 0x82, 0xf4, 0x8e, 0xe0, 0xd1, 0x5d, 0x77, /* Byte value: 0x33 */ + 0x41, 0x07, 0x9d, 0x26, 0x9a, 0xfc, 0xf3, 0x7c, 0x7a, 0x8f, 0x60, 0x50, 0xc2, 0x05, 0x2f, 0xd2, /* Byte value: 0x34 */ + 0x75, 0xee, 0xb2, 0x06, 0x5c, 0x0d, 0x30, 0xa3, 0x6f, 0x93, 0x9d, 0x32, 0xbd, 0xaa, 0xf7, 0xd8, /* Byte value: 0x35 */ + 0x19, 0xea, 0xb9, 0xb4, 0x53, 0x9d, 0xe9, 0x69, 0xe0, 0x80, 0x47, 0x85, 0x48, 0x2c, 0x49, 0xb7, /* Byte value: 0x36 */ + 0xc9, 0xc8, 0x05, 0x34, 0xcd, 0xdf, 0x63, 0x93, 0xb4, 0xf0, 0x35, 0xce, 0x77, 0xd5, 0xaf, 0x9f, /* Byte value: 0x37 */ + 0xaa, 0x8c, 0xbe, 0xf7, 0x32, 0x5b, 0xb4, 0xa4, 0x1c, 0x10, 0xd1, 0x58, 0x09, 0xe4, 0xa0, 0xcf, /* Byte value: 0x38 */ + 0xfc, 0xb5, 0xae, 0xc9, 0x1b, 0x93, 0x87, 0x11, 0x19, 0x96, 0x80, 0xc0, 0x7a, 0x0c, 0xd5, 0xfb, /* Byte value: 0x39 */ + 0x4d, 0x38, 0x62, 0xf3, 0x5a, 0xec, 0x94, 0x66, 0x56, 0xf2, 0x86, 0xc5, 0xdf, 0x28, 0xbb, 0x5f, /* Byte value: 0x3a */ + 0xbf, 0x59, 0xf8, 0x96, 0xa1, 0xd6, 0x3a, 0xd7, 0xd0, 0xed, 0x70, 0x48, 0x5c, 0xe5, 0x7d, 0xf5, /* Byte value: 0x3b */ + 0x2a, 0x69, 0x8c, 0xc2, 0xe5, 0xd9, 0xdf, 0xe6, 0x5b, 0x39, 0x81, 0x20, 0xaa, 0x02, 0x79, 0x74, /* Byte value: 0x3c */ + 0x73, 0x10, 0x2c, 0x8d, 0x3c, 0x05, 0xe2, 0xae, 0x79, 0x4c, 0xee, 0x99, 0x52, 0x5d, 0xbd, 0x7f, /* Byte value: 0x3d */ + 0x84, 0xf0, 0x67, 0xc7, 0x97, 0x33, 0xf7, 0xf5, 0xe2, 0x02, 0xb3, 0x0b, 0xa8, 0xfd, 0x14, 0xc0, /* Byte value: 0x3e */ + 0xd5, 0xa3, 0x6d, 0xaf, 0xce, 0x4e, 0x31, 0x10, 0x49, 0x21, 0xd9, 0x54, 0x46, 0x94, 0x89, 0x3d, /* Byte value: 0x3f */ + 0xc3, 0x09, 0x64, 0x6a, 0x6d, 0xc7, 0xd6, 0x84, 0x8e, 0x52, 0xa0, 0xf0, 0x85, 0x0f, 0x71, 0xb5, /* Byte value: 0x40 */ + 0xaf, 0x0d, 0x6f, 0xd8, 0x62, 0x57, 0x0f, 0x4e, 0x01, 0x41, 0x7a, 0x47, 0x70, 0x89, 0xcf, 0xda, /* Byte value: 0x41 */ + 0x2b, 0xfd, 0x08, 0x1f, 0xf5, 0x64, 0xf8, 0xbb, 0xe3, 0x43, 0xc9, 0x4c, 0xd8, 0x74, 0xdb, 0x1a, /* Byte value: 0x42 */ + 0x86, 0x1b, 0xac, 0xbe, 0xb7, 0x8a, 0xb9, 0x4f, 0x51, 0xf6, 0x23, 0xd3, 0x4c, 0x11, 0x93, 0x1c, /* Byte value: 0x43 */ + 0xa7, 0x27, 0xc5, 0xff, 0xe2, 0xf6, 0xf4, 0xe3, 0x88, 0x17, 0x7f, 0xa1, 0x66, 0xbf, 0x96, 0x2c, /* Byte value: 0x44 */ + 0xb1, 0x8d, 0xcc, 0x3a, 0x41, 0x7f, 0x13, 0x77, 0x4f, 0x64, 0x06, 0x05, 0xa5, 0x24, 0x6e, 0xa4, /* Byte value: 0x45 */ + 0xb2, 0xf2, 0x83, 0x9e, 0x71, 0x7b, 0x7a, 0x90, 0x44, 0xea, 0xde, 0xb1, 0x33, 0xbe, 0x4b, 0x16, /* Byte value: 0x46 */ + 0x5b, 0x92, 0x6b, 0x36, 0xf9, 0x65, 0x73, 0xf2, 0x91, 0x81, 0xff, 0x61, 0x1c, 0xb3, 0x43, 0xd7, /* Byte value: 0x47 */ + 0x46, 0x6d, 0x87, 0x70, 0xea, 0x49, 0x06, 0x2c, 0xd4, 0x2a, 0x5b, 0x97, 0x5f, 0x84, 0xc7, 0x1b, /* Byte value: 0x48 */ + 0xd3, 0x5d, 0xf3, 0x24, 0xae, 0x46, 0xe3, 0x1d, 0x5f, 0xfe, 0xaa, 0xff, 0xa9, 0x63, 0xc3, 0x9a, /* Byte value: 0x49 */ + 0x9f, 0xf1, 0x15, 0x0a, 0xe4, 0x17, 0x50, 0x26, 0xb1, 0x76, 0x64, 0x56, 0x04, 0x3d, 0xda, 0xab, /* Byte value: 0x4a */ + 0xfd, 0x21, 0x2a, 0x14, 0x0b, 0x2e, 0xa0, 0x4c, 0xa1, 0xec, 0xc8, 0xac, 0x08, 0x7a, 0x77, 0x95, /* Byte value: 0x4b */ + 0xd4, 0x37, 0xe9, 0x72, 0xde, 0xf3, 0x16, 0x4d, 0xf1, 0x5b, 0x91, 0x38, 0x34, 0xe2, 0x2b, 0x53, /* Byte value: 0x4c */ + 0x0f, 0x40, 0xb0, 0x71, 0xf0, 0x14, 0x0e, 0xfd, 0x27, 0xf3, 0x3e, 0x21, 0x8b, 0xb7, 0xb1, 0x3f, /* Byte value: 0x4d */ + 0x9c, 0x8e, 0x5a, 0xae, 0xd4, 0x13, 0x39, 0xc1, 0xba, 0xf8, 0xbc, 0xe2, 0x92, 0xa7, 0xff, 0x19, /* Byte value: 0x4e */ + 0x2f, 0xe8, 0x5d, 0xed, 0xb5, 0xd5, 0x64, 0x0c, 0x46, 0x68, 0x2a, 0x3f, 0xd3, 0x6f, 0x16, 0x61, /* Byte value: 0x4f */ + 0x9b, 0xe4, 0x40, 0xf8, 0xa4, 0xa6, 0xcc, 0x91, 0x14, 0x5d, 0x87, 0x25, 0x0f, 0x26, 0x17, 0xd0, /* Byte value: 0x50 */ + 0x43, 0xec, 0x56, 0x5f, 0xba, 0x45, 0xbd, 0xc6, 0xc9, 0x7b, 0xf0, 0x88, 0x26, 0xe9, 0xa8, 0x0e, /* Byte value: 0x51 */ + 0xef, 0x9e, 0x76, 0x23, 0xe8, 0x16, 0xdb, 0x6f, 0xc3, 0xb4, 0x52, 0x7b, 0xc0, 0xfa, 0x42, 0x66, /* Byte value: 0x52 */ + 0xd9, 0x9c, 0x92, 0x7a, 0x0e, 0x5e, 0x56, 0x0a, 0x65, 0x5c, 0x3f, 0xc1, 0x5b, 0xb9, 0x1d, 0xb0, /* Byte value: 0x53 */ + 0x79, 0xd1, 0x4d, 0xd3, 0x9c, 0x1d, 0x57, 0xb9, 0x43, 0xee, 0x7b, 0xa7, 0xa0, 0x87, 0x63, 0x55, /* Byte value: 0x54 */ + 0xb6, 0xe7, 0xd6, 0x6c, 0x31, 0xca, 0xe6, 0x27, 0xe1, 0xc1, 0x3d, 0xc2, 0x38, 0xa5, 0x86, 0x6d, /* Byte value: 0x55 */ + 0x53, 0xb8, 0xc1, 0x11, 0x79, 0xc4, 0x88, 0x5f, 0x18, 0xd7, 0xfa, 0x87, 0x0a, 0x85, 0x1a, 0x21, /* Byte value: 0x56 */ + 0x7f, 0x2f, 0xd3, 0x58, 0xfc, 0x15, 0x85, 0xb4, 0x55, 0x31, 0x08, 0x0c, 0x4f, 0x70, 0x29, 0xf2, /* Byte value: 0x57 */ + 0xc1, 0xe2, 0xaf, 0x13, 0x4d, 0x7e, 0x98, 0x3e, 0x3d, 0xa6, 0x30, 0x28, 0x61, 0xe3, 0xf6, 0x69, /* Byte value: 0x58 */ + 0xf0, 0x8a, 0x51, 0x1c, 0xdb, 0x83, 0xe0, 0x0b, 0x35, 0xeb, 0x66, 0x55, 0x67, 0x21, 0x41, 0x76, /* Byte value: 0x59 */ + 0x23, 0xd7, 0xa2, 0x38, 0x75, 0xc5, 0x03, 0x16, 0x6a, 0x15, 0xcc, 0xaa, 0xce, 0x42, 0x82, 0xec, /* Byte value: 0x5a */ + 0xe7, 0xb4, 0xdc, 0x04, 0x68, 0xb7, 0x20, 0xc2, 0x4a, 0xe2, 0x57, 0x9d, 0xd6, 0xcc, 0x1b, 0x90, /* Byte value: 0x5b */ + 0x25, 0x29, 0x3c, 0xb3, 0x15, 0xcd, 0xd1, 0x1b, 0x7c, 0xca, 0xbf, 0x01, 0x21, 0xb5, 0xc8, 0x4b, /* Byte value: 0x5c */ + 0x5e, 0x13, 0xba, 0x19, 0xa9, 0x69, 0xc8, 0x18, 0x8c, 0xd0, 0x54, 0x7e, 0x65, 0xde, 0x2c, 0xc2, /* Byte value: 0x5d */ + 0xb5, 0x98, 0x99, 0xc8, 0x01, 0xce, 0x8f, 0xc0, 0xea, 0x4f, 0xe5, 0x76, 0xae, 0x3f, 0xa3, 0xdf, /* Byte value: 0x5e */ + 0x1e, 0x80, 0xa3, 0xe2, 0x23, 0x28, 0x1c, 0x39, 0x4e, 0x25, 0x7c, 0x42, 0xd5, 0xad, 0xa1, 0x7e, /* Byte value: 0x5f */ + 0xa2, 0xa6, 0x14, 0xd0, 0xb2, 0xfa, 0x4f, 0x09, 0x95, 0x46, 0xd4, 0xbe, 0x1f, 0xd2, 0xf9, 0x39, /* Byte value: 0x60 */ + 0xdf, 0x62, 0x0c, 0xf1, 0x6e, 0x56, 0x84, 0x07, 0x73, 0x83, 0x4c, 0x6a, 0xb4, 0x4e, 0x57, 0x17, /* Byte value: 0x61 */ + 0xa6, 0xb3, 0x41, 0x22, 0xf2, 0x4b, 0xd3, 0xbe, 0x30, 0x6d, 0x37, 0xcd, 0x14, 0xc9, 0x34, 0x42, /* Byte value: 0x62 */ + 0xfe, 0x5e, 0x65, 0xb0, 0x3b, 0x2a, 0xc9, 0xab, 0xaa, 0x62, 0x10, 0x18, 0x9e, 0xe0, 0x52, 0x27, /* Byte value: 0x63 */ + 0xac, 0x72, 0x20, 0x7c, 0x52, 0x53, 0x66, 0xa9, 0x0a, 0xcf, 0xa2, 0xf3, 0xe6, 0x13, 0xea, 0x68, /* Byte value: 0x64 */ + 0x22, 0x43, 0x26, 0xe5, 0x65, 0x78, 0x24, 0x4b, 0xd2, 0x6f, 0x84, 0xc6, 0xbc, 0x34, 0x20, 0x82, /* Byte value: 0x65 */ + 0xf9, 0x34, 0x7f, 0xe6, 0x4b, 0x9f, 0x3c, 0xfb, 0x04, 0xc7, 0x2b, 0xdf, 0x03, 0x61, 0xba, 0xee, /* Byte value: 0x66 */ + 0xe2, 0x35, 0x0d, 0x2b, 0x38, 0xbb, 0x9b, 0x28, 0x57, 0xb3, 0xfc, 0x82, 0xaf, 0xa1, 0x74, 0x85, /* Byte value: 0x67 */ + 0x4a, 0x52, 0x78, 0xa5, 0x2a, 0x59, 0x61, 0x36, 0xf8, 0x57, 0xbd, 0x02, 0x42, 0xa9, 0x53, 0x96, /* Byte value: 0x68 */ + 0xbc, 0x26, 0xb7, 0x32, 0x91, 0xd2, 0x53, 0x30, 0xdb, 0x63, 0xa8, 0xfc, 0xca, 0x7f, 0x58, 0x47, /* Byte value: 0x69 */ + 0x35, 0x7d, 0xab, 0xfd, 0xd6, 0x4c, 0xe4, 0x82, 0xad, 0x66, 0xb5, 0x0e, 0x0d, 0xd9, 0x7a, 0x64, /* Byte value: 0x6a */ + 0xca, 0xb7, 0x4a, 0x90, 0xfd, 0xdb, 0x0a, 0x74, 0xbf, 0x7e, 0xed, 0x7a, 0xe1, 0x4f, 0x8a, 0x2d, /* Byte value: 0x6b */ + 0xee, 0x0a, 0xf2, 0xfe, 0xf8, 0xab, 0xfc, 0x32, 0x7b, 0xce, 0x1a, 0x17, 0xb2, 0x8c, 0xe0, 0x08, /* Byte value: 0x6c */ + 0x78, 0x45, 0xc9, 0x0e, 0x8c, 0xa0, 0x70, 0xe4, 0xfb, 0x94, 0x33, 0xcb, 0xd2, 0xf1, 0xc1, 0x3b, /* Byte value: 0x6d */ + 0x05, 0x81, 0xd1, 0x2f, 0x50, 0x0c, 0xbb, 0xea, 0x1d, 0x51, 0xab, 0x1f, 0x79, 0x6d, 0x6f, 0x15, /* Byte value: 0x6e */ + 0x6b, 0x6e, 0x11, 0xe4, 0x7f, 0x25, 0x2c, 0x9a, 0x21, 0xb6, 0xe1, 0x70, 0x68, 0x07, 0x56, 0xa6, /* Byte value: 0x6f */ + 0x51, 0x53, 0x0a, 0x68, 0x59, 0x7d, 0xc6, 0xe5, 0xab, 0x23, 0x6a, 0x5f, 0xee, 0x69, 0x9d, 0xfd, /* Byte value: 0x70 */ + 0xe1, 0x4a, 0x42, 0x8f, 0x08, 0xbf, 0xf2, 0xcf, 0x5c, 0x3d, 0x24, 0x36, 0x39, 0x3b, 0x51, 0x37, /* Byte value: 0x71 */ + 0x59, 0x79, 0xa0, 0x4f, 0xd9, 0xdc, 0x3d, 0x48, 0x22, 0x75, 0x6f, 0xb9, 0xf8, 0x5f, 0xc4, 0x0b, /* Byte value: 0x72 */ + 0xa3, 0x32, 0x90, 0x0d, 0xa2, 0x47, 0x68, 0x54, 0x2d, 0x3c, 0x9c, 0xd2, 0x6d, 0xa4, 0x5b, 0x57, /* Byte value: 0x73 */ + 0xf2, 0x61, 0x9a, 0x65, 0xfb, 0x3a, 0xae, 0xb1, 0x86, 0x1f, 0xf6, 0x8d, 0x83, 0xcd, 0xc6, 0xaa, /* Byte value: 0x74 */ + 0x71, 0xfb, 0xe7, 0xf4, 0x1c, 0xbc, 0xac, 0x14, 0xca, 0xb8, 0x7e, 0x41, 0xb6, 0xb1, 0x3a, 0xa3, /* Byte value: 0x75 */ + 0x56, 0x39, 0x10, 0x3e, 0x29, 0xc8, 0x33, 0xb5, 0x05, 0x86, 0x51, 0x98, 0x73, 0xe8, 0x75, 0x34, /* Byte value: 0x76 */ + 0x11, 0xc0, 0x13, 0x93, 0xd3, 0x3c, 0x12, 0xc4, 0x69, 0xd6, 0x42, 0x63, 0x5e, 0x1a, 0x10, 0x41, /* Byte value: 0x77 */ + 0x6a, 0xfa, 0x95, 0x39, 0x6f, 0x98, 0x0b, 0xc7, 0x99, 0xcc, 0xa9, 0x1c, 0x1a, 0x71, 0xf4, 0xc8, /* Byte value: 0x78 */ + 0x89, 0x5b, 0x1c, 0xcf, 0x47, 0x9e, 0xb7, 0xb2, 0x76, 0x05, 0x1d, 0xf2, 0xc7, 0xa6, 0x22, 0x23, /* Byte value: 0x79 */ + 0x94, 0xa4, 0xf0, 0x89, 0x54, 0xb2, 0xc2, 0x6c, 0x33, 0xae, 0xb9, 0x04, 0x84, 0x91, 0xa6, 0xef, /* Byte value: 0x7a */ + 0x65, 0xba, 0x25, 0x48, 0x9f, 0x8c, 0x05, 0x3a, 0xbe, 0x3f, 0x97, 0x3d, 0x91, 0xc6, 0x45, 0xf7, /* Byte value: 0x7b */ + 0x8c, 0xda, 0xcd, 0xe0, 0x17, 0x92, 0x0c, 0x58, 0x6b, 0x54, 0xb6, 0xed, 0xbe, 0xcb, 0x4d, 0x36, /* Byte value: 0x7c */ + 0xbb, 0x4c, 0xad, 0x64, 0xe1, 0x67, 0xa6, 0x60, 0x75, 0xc6, 0x93, 0x3b, 0x57, 0xfe, 0xb0, 0x8e, /* Byte value: 0x7d */ + 0x77, 0x05, 0x79, 0x7f, 0x7c, 0xb4, 0x7e, 0x19, 0xdc, 0x67, 0x0d, 0xea, 0x59, 0x46, 0x70, 0x04, /* Byte value: 0x7e */ + 0x3c, 0xc3, 0x85, 0x07, 0x46, 0x50, 0x38, 0x72, 0x9c, 0x4a, 0xf8, 0x84, 0x69, 0x99, 0x81, 0xfc, /* Byte value: 0x7f */ + 0x7b, 0x3a, 0x86, 0xaa, 0xbc, 0xa4, 0x19, 0x03, 0xf0, 0x1a, 0xeb, 0x7f, 0x44, 0x6b, 0xe4, 0x89, /* Byte value: 0x80 */ + 0x28, 0x82, 0x47, 0xbb, 0xc5, 0x60, 0x91, 0x5c, 0xe8, 0xcd, 0x11, 0xf8, 0x4e, 0xee, 0xfe, 0xa8, /* Byte value: 0x81 */ + 0xab, 0x18, 0x3a, 0x2a, 0x22, 0xe6, 0x93, 0xf9, 0xa4, 0x6a, 0x99, 0x34, 0x7b, 0x92, 0x02, 0xa1, /* Byte value: 0x82 */ + 0xd2, 0xc9, 0x77, 0xf9, 0xbe, 0xfb, 0xc4, 0x40, 0xe7, 0x84, 0xe2, 0x93, 0xdb, 0x15, 0x61, 0xf4, /* Byte value: 0x83 */ + 0x31, 0x68, 0xfe, 0x0f, 0x96, 0xfd, 0x78, 0x35, 0x08, 0x4d, 0x56, 0x7d, 0x06, 0xc2, 0xb7, 0x1f, /* Byte value: 0x84 */ + 0xde, 0xf6, 0x88, 0x2c, 0x7e, 0xeb, 0xa3, 0x5a, 0xcb, 0xf9, 0x04, 0x06, 0xc6, 0x38, 0xf5, 0x79, /* Byte value: 0x85 */ + 0xc4, 0x63, 0x7e, 0x3c, 0x1d, 0x72, 0x23, 0xd4, 0x20, 0xf7, 0x9b, 0x37, 0x18, 0x8e, 0x99, 0x7c, /* Byte value: 0x86 */ + 0x5f, 0x87, 0x3e, 0xc4, 0xb9, 0xd4, 0xef, 0x45, 0x34, 0xaa, 0x1c, 0x12, 0x17, 0xa8, 0x8e, 0xac, /* Byte value: 0x87 */ + 0xcc, 0x49, 0xd4, 0x1b, 0x9d, 0xd3, 0xd8, 0x79, 0xa9, 0xa1, 0x9e, 0xd1, 0x0e, 0xb8, 0xc0, 0x8a, /* Byte value: 0x88 */ + 0xcf, 0x36, 0x9b, 0xbf, 0xad, 0xd7, 0xb1, 0x9e, 0xa2, 0x2f, 0x46, 0x65, 0x98, 0x22, 0xe5, 0x38, /* Byte value: 0x89 */ + 0x76, 0x91, 0xfd, 0xa2, 0x6c, 0x09, 0x59, 0x44, 0x64, 0x1d, 0x45, 0x86, 0x2b, 0x30, 0xd2, 0x6a, /* Byte value: 0x8a */ + 0x2c, 0x97, 0x12, 0x49, 0x85, 0xd1, 0x0d, 0xeb, 0x4d, 0xe6, 0xf2, 0x8b, 0x45, 0xf5, 0x33, 0xd3, /* Byte value: 0x8b */ + 0xb8, 0x33, 0xe2, 0xc0, 0xd1, 0x63, 0xcf, 0x87, 0x7e, 0x48, 0x4b, 0x8f, 0xc1, 0x64, 0x95, 0x3c, /* Byte value: 0x8c */ + 0xd8, 0x08, 0x16, 0xa7, 0x1e, 0xe3, 0x71, 0x57, 0xdd, 0x26, 0x77, 0xad, 0x29, 0xcf, 0xbf, 0xde, /* Byte value: 0x8d */ + 0x2e, 0x7c, 0xd9, 0x30, 0xa5, 0x68, 0x43, 0x51, 0xfe, 0x12, 0x62, 0x53, 0xa1, 0x19, 0xb4, 0x0f, /* Byte value: 0x8e */ + 0x36, 0x02, 0xe4, 0x59, 0xe6, 0x48, 0x8d, 0x65, 0xa6, 0xe8, 0x6d, 0xba, 0x9b, 0x43, 0x5f, 0xd6, /* Byte value: 0x8f */ + 0xdb, 0x77, 0x59, 0x03, 0x2e, 0xe7, 0x18, 0xb0, 0xd6, 0xa8, 0xaf, 0x19, 0xbf, 0x55, 0x9a, 0x6c, /* Byte value: 0x90 */ + 0x69, 0x85, 0xda, 0x9d, 0x5f, 0x9c, 0x62, 0x20, 0x92, 0x42, 0x71, 0xa8, 0x8c, 0xeb, 0xd1, 0x7a, /* Byte value: 0x91 */ + 0xb3, 0x66, 0x07, 0x43, 0x61, 0xc6, 0x5d, 0xcd, 0xfc, 0x90, 0x96, 0xdd, 0x41, 0xc8, 0xe9, 0x78, /* Byte value: 0x92 */ + 0x14, 0x41, 0xc2, 0xbc, 0x83, 0x30, 0xa9, 0x2e, 0x74, 0x87, 0xe9, 0x7c, 0x27, 0x77, 0x7f, 0x54, /* Byte value: 0x93 */ + 0x95, 0x30, 0x74, 0x54, 0x44, 0x0f, 0xe5, 0x31, 0x8b, 0xd4, 0xf1, 0x68, 0xf6, 0xe7, 0x04, 0x81, /* Byte value: 0x94 */ + 0xbe, 0xcd, 0x7c, 0x4b, 0xb1, 0x6b, 0x1d, 0x8a, 0x68, 0x97, 0x38, 0x24, 0x2e, 0x93, 0xdf, 0x9b, /* Byte value: 0x95 */ + 0x62, 0xd0, 0x3f, 0x1e, 0xef, 0x39, 0xf0, 0x6a, 0x10, 0x9a, 0xac, 0xfa, 0x0c, 0x47, 0xad, 0x3e, /* Byte value: 0x96 */ + 0xa1, 0xd9, 0x5b, 0x74, 0x82, 0xfe, 0x26, 0xee, 0x9e, 0xc8, 0x0c, 0x0a, 0x89, 0x48, 0xdc, 0x8b, /* Byte value: 0x97 */ + 0x3b, 0xa9, 0x9f, 0x51, 0x36, 0xe5, 0xcd, 0x22, 0x32, 0xef, 0xc3, 0x43, 0xf4, 0x18, 0x69, 0x35, /* Byte value: 0x98 */ + 0x16, 0xaa, 0x09, 0xc5, 0xa3, 0x89, 0xe7, 0x94, 0xc7, 0x73, 0x79, 0xa4, 0xc3, 0x9b, 0xf8, 0x88, /* Byte value: 0x99 */ + 0x66, 0xc5, 0x6a, 0xec, 0xaf, 0x88, 0x6c, 0xdd, 0xb5, 0xb1, 0x4f, 0x89, 0x07, 0x5c, 0x60, 0x45, /* Byte value: 0x9a */ + 0xe9, 0x60, 0xe8, 0xa8, 0x88, 0x1e, 0x09, 0x62, 0xd5, 0x6b, 0x21, 0xd0, 0x2f, 0x0d, 0x08, 0xc1, /* Byte value: 0x9b */ + 0x5c, 0xf8, 0x71, 0x60, 0x89, 0xd0, 0x86, 0xa2, 0x3f, 0x24, 0xc4, 0xa6, 0x81, 0x32, 0xab, 0x1e, /* Byte value: 0x9c */ + 0x6c, 0x04, 0x0b, 0xb2, 0x0f, 0x90, 0xd9, 0xca, 0x8f, 0x13, 0xda, 0xb7, 0xf5, 0x86, 0xbe, 0x6f, /* Byte value: 0x9d */ + 0x6d, 0x90, 0x8f, 0x6f, 0x1f, 0x2d, 0xfe, 0x97, 0x37, 0x69, 0x92, 0xdb, 0x87, 0xf0, 0x1c, 0x01, /* Byte value: 0x9e */ + 0xad, 0xe6, 0xa4, 0xa1, 0x42, 0xee, 0x41, 0xf4, 0xb2, 0xb5, 0xea, 0x9f, 0x94, 0x65, 0x48, 0x06, /* Byte value: 0x9f */ + 0x37, 0x96, 0x60, 0x84, 0xf6, 0xf5, 0xaa, 0x38, 0x1e, 0x92, 0x25, 0xd6, 0xe9, 0x35, 0xfd, 0xb8, /* Byte value: 0xa0 */ + 0x61, 0xaf, 0x70, 0xba, 0xdf, 0x3d, 0x99, 0x8d, 0x1b, 0x14, 0x74, 0x4e, 0x9a, 0xdd, 0x88, 0x8c, /* Byte value: 0xa1 */ + 0x4b, 0xc6, 0xfc, 0x78, 0x3a, 0xe4, 0x46, 0x6b, 0x40, 0x2d, 0xf5, 0x6e, 0x30, 0xdf, 0xf1, 0xf8, /* Byte value: 0xa2 */ + 0xb9, 0xa7, 0x66, 0x1d, 0xc1, 0xde, 0xe8, 0xda, 0xc6, 0x32, 0x03, 0xe3, 0xb3, 0x12, 0x37, 0x52, /* Byte value: 0xa3 */ + 0xe3, 0xa1, 0x89, 0xf6, 0x28, 0x06, 0xbc, 0x75, 0xef, 0xc9, 0xb4, 0xee, 0xdd, 0xd7, 0xd6, 0xeb, /* Byte value: 0xa4 */ + 0xba, 0xd8, 0x29, 0xb9, 0xf1, 0xda, 0x81, 0x3d, 0xcd, 0xbc, 0xdb, 0x57, 0x25, 0x88, 0x12, 0xe0, /* Byte value: 0xa5 */ + 0xf1, 0x1e, 0xd5, 0xc1, 0xcb, 0x3e, 0xc7, 0x56, 0x8d, 0x91, 0x2e, 0x39, 0x15, 0x57, 0xe3, 0x18, /* Byte value: 0xa6 */ + 0xa0, 0x4d, 0xdf, 0xa9, 0x92, 0x43, 0x01, 0xb3, 0x26, 0xb2, 0x44, 0x66, 0xfb, 0x3e, 0x7e, 0xe5, /* Byte value: 0xa7 */ + 0x85, 0x64, 0xe3, 0x1a, 0x87, 0x8e, 0xd0, 0xa8, 0x5a, 0x78, 0xfb, 0x67, 0xda, 0x8b, 0xb6, 0xae, /* Byte value: 0xa8 */ + 0x83, 0x9a, 0x7d, 0x91, 0xe7, 0x86, 0x02, 0xa5, 0x4c, 0xa7, 0x88, 0xcc, 0x35, 0x7c, 0xfc, 0x09, /* Byte value: 0xa9 */ + 0xda, 0xe3, 0xdd, 0xde, 0x3e, 0x5a, 0x3f, 0xed, 0x6e, 0xd2, 0xe7, 0x75, 0xcd, 0x23, 0x38, 0x02, /* Byte value: 0xaa */ + 0x47, 0xf9, 0x03, 0xad, 0xfa, 0xf4, 0x21, 0x71, 0x6c, 0x50, 0x13, 0xfb, 0x2d, 0xf2, 0x65, 0x75, /* Byte value: 0xab */ + 0xc5, 0xf7, 0xfa, 0xe1, 0x0d, 0xcf, 0x04, 0x89, 0x98, 0x8d, 0xd3, 0x5b, 0x6a, 0xf8, 0x3b, 0x12, /* Byte value: 0xac */ + 0xb0, 0x19, 0x48, 0xe7, 0x51, 0xc2, 0x34, 0x2a, 0xf7, 0x1e, 0x4e, 0x69, 0xd7, 0x52, 0xcc, 0xca, /* Byte value: 0xad */ + 0x33, 0x83, 0x35, 0x76, 0xb6, 0x44, 0x36, 0x8f, 0xbb, 0xb9, 0xc6, 0xa5, 0xe2, 0x2e, 0x30, 0xc3, /* Byte value: 0xae */ + 0xfa, 0x4b, 0x30, 0x42, 0x7b, 0x9b, 0x55, 0x1c, 0x0f, 0x49, 0xf3, 0x6b, 0x95, 0xfb, 0x9f, 0x5c, /* Byte value: 0xaf */ + 0x96, 0x4f, 0x3b, 0xf0, 0x74, 0x0b, 0x8c, 0xd6, 0x80, 0x5a, 0x29, 0xdc, 0x60, 0x7d, 0x21, 0x33, /* Byte value: 0xb0 */ + 0x6f, 0x7b, 0x44, 0x16, 0x3f, 0x94, 0xb0, 0x2d, 0x84, 0x9d, 0x02, 0x03, 0x63, 0x1c, 0x9b, 0xdd, /* Byte value: 0xb1 */ + 0x6e, 0xef, 0xc0, 0xcb, 0x2f, 0x29, 0x97, 0x70, 0x3c, 0xe7, 0x4a, 0x6f, 0x11, 0x6a, 0x39, 0xb3, /* Byte value: 0xb2 */ + 0xc2, 0x9d, 0xe0, 0xb7, 0x7d, 0x7a, 0xf1, 0xd9, 0x36, 0x28, 0xe8, 0x9c, 0xf7, 0x79, 0xd3, 0xdb, /* Byte value: 0xb3 */ + 0xf6, 0x74, 0xcf, 0x97, 0xbb, 0x8b, 0x32, 0x06, 0x23, 0x34, 0x15, 0xfe, 0x88, 0xd6, 0x0b, 0xd1, /* Byte value: 0xb4 */ + 0x50, 0xc7, 0x8e, 0xb5, 0x49, 0xc0, 0xe1, 0xb8, 0x13, 0x59, 0x22, 0x33, 0x9c, 0x1f, 0x3f, 0x93, /* Byte value: 0xb5 */ + 0xff, 0xca, 0xe1, 0x6d, 0x2b, 0x97, 0xee, 0xf6, 0x12, 0x18, 0x58, 0x74, 0xec, 0x96, 0xf0, 0x49, /* Byte value: 0xb6 */ + 0x5d, 0x6c, 0xf5, 0xbd, 0x99, 0x6d, 0xa1, 0xff, 0x87, 0x5e, 0x8c, 0xca, 0xf3, 0x44, 0x09, 0x70, /* Byte value: 0xb7 */ + 0xa9, 0xf3, 0xf1, 0x53, 0x02, 0x5f, 0xdd, 0x43, 0x17, 0x9e, 0x09, 0xec, 0x9f, 0x7e, 0x85, 0x7d, /* Byte value: 0xb8 */ + 0x8e, 0x31, 0x06, 0x99, 0x37, 0x2b, 0x42, 0xe2, 0xd8, 0xa0, 0x26, 0x35, 0x5a, 0x27, 0xca, 0xea, /* Byte value: 0xb9 */ + 0x17, 0x3e, 0x8d, 0x18, 0xb3, 0x34, 0xc0, 0xc9, 0x7f, 0x09, 0x31, 0xc8, 0xb1, 0xed, 0x5a, 0xe6, /* Byte value: 0xba */ + 0x1b, 0x01, 0x72, 0xcd, 0x73, 0x24, 0xa7, 0xd3, 0x53, 0x74, 0xd7, 0x5d, 0xac, 0xc0, 0xce, 0x6b, /* Byte value: 0xbb */ + 0x97, 0xdb, 0xbf, 0x2d, 0x64, 0xb6, 0xab, 0x8b, 0x38, 0x20, 0x61, 0xb0, 0x12, 0x0b, 0x83, 0x5d, /* Byte value: 0xbc */ + 0x7d, 0xc4, 0x18, 0x21, 0xdc, 0xac, 0xcb, 0x0e, 0xe6, 0xc5, 0x98, 0xd4, 0xab, 0x9c, 0xae, 0x2e, /* Byte value: 0xbd */ + 0xec, 0xe1, 0x39, 0x87, 0xd8, 0x12, 0xb2, 0x88, 0xc8, 0x3a, 0x8a, 0xcf, 0x56, 0x60, 0x67, 0xd4, /* Byte value: 0xbe */ + 0x58, 0xed, 0x24, 0x92, 0xc9, 0x61, 0x1a, 0x15, 0x9a, 0x0f, 0x27, 0xd5, 0x8a, 0x29, 0x66, 0x65, /* Byte value: 0xbf */ + 0xf7, 0xe0, 0x4b, 0x4a, 0xab, 0x36, 0x15, 0x5b, 0x9b, 0x4e, 0x5d, 0x92, 0xfa, 0xa0, 0xa9, 0xbf, /* Byte value: 0xc0 */ + 0x1f, 0x14, 0x27, 0x3f, 0x33, 0x95, 0x3b, 0x64, 0xf6, 0x5f, 0x34, 0x2e, 0xa7, 0xdb, 0x03, 0x10, /* Byte value: 0xc1 */ + 0xfb, 0xdf, 0xb4, 0x9f, 0x6b, 0x26, 0x72, 0x41, 0xb7, 0x33, 0xbb, 0x07, 0xe7, 0x8d, 0x3d, 0x32, /* Byte value: 0xc2 */ + 0x7c, 0x50, 0x9c, 0xfc, 0xcc, 0x11, 0xec, 0x53, 0x5e, 0xbf, 0xd0, 0xb8, 0xd9, 0xea, 0x0c, 0x40, /* Byte value: 0xc3 */ + 0x09, 0xbe, 0x2e, 0xfa, 0x90, 0x1c, 0xdc, 0xf0, 0x31, 0x2c, 0x4d, 0x8a, 0x64, 0x40, 0xfb, 0x98, /* Byte value: 0xc4 */ + 0x0d, 0xab, 0x7b, 0x08, 0xd0, 0xad, 0x40, 0x47, 0x94, 0x07, 0xae, 0xf9, 0x6f, 0x5b, 0x36, 0xe3, /* Byte value: 0xc5 */ + 0x7a, 0xae, 0x02, 0x77, 0xac, 0x19, 0x3e, 0x5e, 0x48, 0x60, 0xa3, 0x13, 0x36, 0x1d, 0x46, 0xe7, /* Byte value: 0xc6 */ + 0x67, 0x51, 0xee, 0x31, 0xbf, 0x35, 0x4b, 0x80, 0x0d, 0xcb, 0x07, 0xe5, 0x75, 0x2a, 0xc2, 0x2b, /* Byte value: 0xc7 */ + 0x45, 0x12, 0xc8, 0xd4, 0xda, 0x4d, 0x6f, 0xcb, 0xdf, 0xa4, 0x83, 0x23, 0xc9, 0x1e, 0xe2, 0xa9, /* Byte value: 0xc8 */ + 0x87, 0x8f, 0x28, 0x63, 0xa7, 0x37, 0x9e, 0x12, 0xe9, 0x8c, 0x6b, 0xbf, 0x3e, 0x67, 0x31, 0x72, /* Byte value: 0xc9 */ + 0xdc, 0x1d, 0x43, 0x55, 0x5e, 0x52, 0xed, 0xe0, 0x78, 0x0d, 0x94, 0xde, 0x22, 0xd4, 0x72, 0xa5, /* Byte value: 0xca */ + 0xe8, 0xf4, 0x6c, 0x75, 0x98, 0xa3, 0x2e, 0x3f, 0x6d, 0x11, 0x69, 0xbc, 0x5d, 0x7b, 0xaa, 0xaf, /* Byte value: 0xcb */ + 0x4f, 0xd3, 0xa9, 0x8a, 0x7a, 0x55, 0xda, 0xdc, 0xe5, 0x06, 0x16, 0x1d, 0x3b, 0xc4, 0x3c, 0x83, /* Byte value: 0xcc */ + 0x1d, 0xff, 0xec, 0x46, 0x13, 0x2c, 0x75, 0xde, 0x45, 0xab, 0xa4, 0xf6, 0x43, 0x37, 0x84, 0xcc, /* Byte value: 0xcd */ + 0x4e, 0x47, 0x2d, 0x57, 0x6a, 0xe8, 0xfd, 0x81, 0x5d, 0x7c, 0x5e, 0x71, 0x49, 0xb2, 0x9e, 0xed, /* Byte value: 0xce */ + 0x04, 0x15, 0x55, 0xf2, 0x40, 0xb1, 0x9c, 0xb7, 0xa5, 0x2b, 0xe3, 0x73, 0x0b, 0x1b, 0xcd, 0x7b, /* Byte value: 0xcf */ + 0xeb, 0x8b, 0x23, 0xd1, 0xa8, 0xa7, 0x47, 0xd8, 0x66, 0x9f, 0xb1, 0x08, 0xcb, 0xe1, 0x8f, 0x1d, /* Byte value: 0xd0 */ + 0xf8, 0xa0, 0xfb, 0x3b, 0x5b, 0x22, 0x1b, 0xa6, 0xbc, 0xbd, 0x63, 0xb3, 0x71, 0x17, 0x18, 0x80, /* Byte value: 0xd1 */ + 0xf3, 0xf5, 0x1e, 0xb8, 0xeb, 0x87, 0x89, 0xec, 0x3e, 0x65, 0xbe, 0xe1, 0xf1, 0xbb, 0x64, 0xc4, /* Byte value: 0xd2 */ + 0x3e, 0x28, 0x4e, 0x7e, 0x66, 0xe9, 0x76, 0xc8, 0x2f, 0xbe, 0x68, 0x5c, 0x8d, 0x75, 0x06, 0x20, /* Byte value: 0xd3 */ + 0x3d, 0x57, 0x01, 0xda, 0x56, 0xed, 0x1f, 0x2f, 0x24, 0x30, 0xb0, 0xe8, 0x1b, 0xef, 0x23, 0x92, /* Byte value: 0xd4 */ + 0xbd, 0xb2, 0x33, 0xef, 0x81, 0x6f, 0x74, 0x6d, 0x63, 0x19, 0xe0, 0x90, 0xb8, 0x09, 0xfa, 0x29, /* Byte value: 0xd5 */ + 0x8a, 0x24, 0x53, 0x6b, 0x77, 0x9a, 0xde, 0x55, 0x7d, 0x8b, 0xc5, 0x46, 0x51, 0x3c, 0x07, 0x91, /* Byte value: 0xd6 */ + 0x88, 0xcf, 0x98, 0x12, 0x57, 0x23, 0x90, 0xef, 0xce, 0x7f, 0x55, 0x9e, 0xb5, 0xd0, 0x80, 0x4d, /* Byte value: 0xd7 */ + 0xdd, 0x89, 0xc7, 0x88, 0x4e, 0xef, 0xca, 0xbd, 0xc0, 0x77, 0xdc, 0xb2, 0x50, 0xa2, 0xd0, 0xcb, /* Byte value: 0xd8 */ + 0xcd, 0xdd, 0x50, 0xc6, 0x8d, 0x6e, 0xff, 0x24, 0x11, 0xdb, 0xd6, 0xbd, 0x7c, 0xce, 0x62, 0xe4, /* Byte value: 0xd9 */ + 0x0b, 0x55, 0xe5, 0x83, 0xb0, 0xa5, 0x92, 0x4a, 0x82, 0xd8, 0xdd, 0x52, 0x80, 0xac, 0x7c, 0x44, /* Byte value: 0xda */ + 0x13, 0x2b, 0xd8, 0xea, 0xf3, 0x85, 0x5c, 0x7e, 0xda, 0x22, 0xd2, 0xbb, 0xba, 0xf6, 0x97, 0x9d, /* Byte value: 0xdb */ + 0x98, 0x9b, 0x0f, 0x5c, 0x94, 0xa2, 0xa5, 0x76, 0x1f, 0xd3, 0x5f, 0x91, 0x99, 0xbc, 0x32, 0x62, /* Byte value: 0xdc */ + 0x02, 0xeb, 0xcb, 0x79, 0x20, 0xb9, 0x4e, 0xba, 0xb3, 0xf4, 0x90, 0xd8, 0xe4, 0xec, 0x87, 0xdc, /* Byte value: 0xdd */ + 0x93, 0xce, 0xea, 0xdf, 0x24, 0x07, 0x37, 0x3c, 0x9d, 0x0b, 0x82, 0xc3, 0x19, 0x10, 0x4e, 0x26, /* Byte value: 0xde */ + 0x80, 0xe5, 0x32, 0x35, 0xd7, 0x82, 0x6b, 0x42, 0x47, 0x29, 0x50, 0x78, 0xa3, 0xe6, 0xd9, 0xbb, /* Byte value: 0xdf */ + 0x90, 0xb1, 0xa5, 0x7b, 0x14, 0x03, 0x5e, 0xdb, 0x96, 0x85, 0x5a, 0x77, 0x8f, 0x8a, 0x6b, 0x94, /* Byte value: 0xe0 */ + 0xd0, 0x22, 0xbc, 0x80, 0x9e, 0x42, 0x8a, 0xfa, 0x54, 0x70, 0x72, 0x4b, 0x3f, 0xf9, 0xe6, 0x28, /* Byte value: 0xe1 */ + 0x24, 0xbd, 0xb8, 0x6e, 0x05, 0x70, 0xf6, 0x46, 0xc4, 0xb0, 0xf7, 0x6d, 0x53, 0xc3, 0x6a, 0x25, /* Byte value: 0xe2 */ + 0x34, 0xe9, 0x2f, 0x20, 0xc6, 0xf1, 0xc3, 0xdf, 0x15, 0x1c, 0xfd, 0x62, 0x7f, 0xaf, 0xd8, 0x0a, /* Byte value: 0xe3 */ + 0xcb, 0x23, 0xce, 0x4d, 0xed, 0x66, 0x2d, 0x29, 0x07, 0x04, 0xa5, 0x16, 0x93, 0x39, 0x28, 0x43, /* Byte value: 0xe4 */ + 0xed, 0x75, 0xbd, 0x5a, 0xc8, 0xaf, 0x95, 0xd5, 0x70, 0x40, 0xc2, 0xa3, 0x24, 0x16, 0xc5, 0xba, /* Byte value: 0xe5 */ + 0xf4, 0x9f, 0x04, 0xee, 0x9b, 0x32, 0x7c, 0xbc, 0x90, 0xc0, 0x85, 0x26, 0x6c, 0x3a, 0x8c, 0x0d, /* Byte value: 0xe6 */ + 0xce, 0xa2, 0x1f, 0x62, 0xbd, 0x6a, 0x96, 0xc3, 0x1a, 0x55, 0x0e, 0x09, 0xea, 0x54, 0x47, 0x56, /* Byte value: 0xe7 */ + 0x99, 0x0f, 0x8b, 0x81, 0x84, 0x1f, 0x82, 0x2b, 0xa7, 0xa9, 0x17, 0xfd, 0xeb, 0xca, 0x90, 0x0c, /* Byte value: 0xe8 */ + 0x10, 0x54, 0x97, 0x4e, 0xc3, 0x81, 0x35, 0x99, 0xd1, 0xac, 0x0a, 0x0f, 0x2c, 0x6c, 0xb2, 0x2f, /* Byte value: 0xe9 */ + 0x44, 0x86, 0x4c, 0x09, 0xca, 0xf0, 0x48, 0x96, 0x67, 0xde, 0xcb, 0x4f, 0xbb, 0x68, 0x40, 0xc7, /* Byte value: 0xea */ + 0x40, 0x93, 0x19, 0xfb, 0x8a, 0x41, 0xd4, 0x21, 0xc2, 0xf5, 0x28, 0x3c, 0xb0, 0x73, 0x8d, 0xbc, /* Byte value: 0xeb */ + 0x92, 0x5a, 0x6e, 0x02, 0x34, 0xba, 0x10, 0x61, 0x25, 0x71, 0xca, 0xaf, 0x6b, 0x66, 0xec, 0x48, /* Byte value: 0xec */ + 0x3a, 0x3d, 0x1b, 0x8c, 0x26, 0x58, 0xea, 0x7f, 0x8a, 0x95, 0x8b, 0x2f, 0x86, 0x6e, 0xcb, 0x5b, /* Byte value: 0xed */ + 0x01, 0x94, 0x84, 0xdd, 0x10, 0xbd, 0x27, 0x5d, 0xb8, 0x7a, 0x48, 0x6c, 0x72, 0x76, 0xa2, 0x6e, /* Byte value: 0xee */ + 0x26, 0x56, 0x73, 0x17, 0x25, 0xc9, 0xb8, 0xfc, 0x77, 0x44, 0x67, 0xb5, 0xb7, 0x2f, 0xed, 0xf9, /* Byte value: 0xef */ + 0x12, 0xbf, 0x5c, 0x37, 0xe3, 0x38, 0x7b, 0x23, 0x62, 0x58, 0x9a, 0xd7, 0xc8, 0x80, 0x35, 0xf3, /* Byte value: 0xf0 */ + 0x1a, 0x95, 0xf6, 0x10, 0x63, 0x99, 0x80, 0x8e, 0xeb, 0x0e, 0x9f, 0x31, 0xde, 0xb6, 0x6c, 0x05, /* Byte value: 0xf1 */ + 0x48, 0xb9, 0xb3, 0xdc, 0x0a, 0xe0, 0x2f, 0x8c, 0x4b, 0xa3, 0x2d, 0xda, 0xa6, 0x45, 0xd4, 0x4a, /* Byte value: 0xf2 */ + 0x68, 0x11, 0x5e, 0x40, 0x4f, 0x21, 0x45, 0x7d, 0x2a, 0x38, 0x39, 0xc4, 0xfe, 0x9d, 0x73, 0x14, /* Byte value: 0xf3 */ + 0xf5, 0x0b, 0x80, 0x33, 0x8b, 0x8f, 0x5b, 0xe1, 0x28, 0xba, 0xcd, 0x4a, 0x1e, 0x4c, 0x2e, 0x63, /* Byte value: 0xf4 */ + 0x81, 0x71, 0xb6, 0xe8, 0xc7, 0x3f, 0x4c, 0x1f, 0xff, 0x53, 0x18, 0x14, 0xd1, 0x90, 0x7b, 0xd5, /* Byte value: 0xf5 */ + 0x8b, 0xb0, 0xd7, 0xb6, 0x67, 0x27, 0xf9, 0x08, 0xc5, 0xf1, 0x8d, 0x2a, 0x23, 0x4a, 0xa5, 0xff, /* Byte value: 0xf6 */ + 0xc7, 0x1c, 0x31, 0x98, 0x2d, 0x76, 0x4a, 0x33, 0x2b, 0x79, 0x43, 0x83, 0x8e, 0x14, 0xbc, 0xce, /* Byte value: 0xf7 */ + 0xd6, 0xdc, 0x22, 0x0b, 0xfe, 0x4a, 0x58, 0xf7, 0x42, 0xaf, 0x01, 0xe0, 0xd0, 0x0e, 0xac, 0x8f, /* Byte value: 0xf8 */ + 0x20, 0xa8, 0xed, 0x9c, 0x45, 0xc1, 0x6a, 0xf1, 0x61, 0x9b, 0x14, 0x1e, 0x58, 0xd8, 0xa7, 0x5e, /* Byte value: 0xf9 */ + 0x0a, 0xc1, 0x61, 0x5e, 0xa0, 0x18, 0xb5, 0x17, 0x3a, 0xa2, 0x95, 0x3e, 0xf2, 0xda, 0xde, 0x2a, /* Byte value: 0xfa */ + 0x08, 0x2a, 0xaa, 0x27, 0x80, 0xa1, 0xfb, 0xad, 0x89, 0x56, 0x05, 0xe6, 0x16, 0x36, 0x59, 0xf6, /* Byte value: 0xfb */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xfc */ + 0x4c, 0xac, 0xe6, 0x2e, 0x4a, 0x51, 0xb3, 0x3b, 0xee, 0x88, 0xce, 0xa9, 0xad, 0x5e, 0x19, 0x31, /* Byte value: 0xfd */ + 0xd7, 0x48, 0xa6, 0xd6, 0xee, 0xf7, 0x7f, 0xaa, 0xfa, 0xd5, 0x49, 0x8c, 0xa2, 0x78, 0x0e, 0xe1, /* Byte value: 0xfe */ + 0x74, 0x7a, 0x36, 0xdb, 0x4c, 0xb0, 0x17, 0xfe, 0xd7, 0xe9, 0xd5, 0x5e, 0xcf, 0xdc, 0x55, 0xb6, /* Byte value: 0xff */ + + /* Matrix row: 1 */ + 0xcc, 0xec, 0x18, 0x15, 0x1b, 0x11, 0x97, 0xc3, 0xf0, 0x9a, 0x7d, 0x3e, 0x77, 0x3a, 0x93, 0x9b, /* Byte value: 0x00 */ + 0x03, 0x52, 0x4c, 0x32, 0xa4, 0x91, 0x05, 0xcd, 0xbd, 0x7b, 0x44, 0x0e, 0x71, 0xad, 0xa6, 0x23, /* Byte value: 0x01 */ + 0x17, 0x0c, 0x9a, 0xa9, 0x18, 0x92, 0x39, 0xd8, 0xad, 0x0a, 0xf2, 0x46, 0x78, 0x7d, 0x02, 0x74, /* Byte value: 0x02 */ + 0xa5, 0x43, 0xab, 0x04, 0x86, 0x68, 0x2c, 0x3c, 0x84, 0x83, 0x35, 0x17, 0xdb, 0x7b, 0x40, 0x98, /* Byte value: 0x03 */ + 0xd4, 0x39, 0x3d, 0x46, 0x72, 0x13, 0xbf, 0x64, 0x51, 0xc4, 0x18, 0x4e, 0x79, 0x1b, 0xea, 0x40, /* Byte value: 0x04 */ + 0xfc, 0x85, 0x52, 0xb3, 0xc9, 0x15, 0xc7, 0x4e, 0x71, 0x26, 0xb7, 0xde, 0x6b, 0x78, 0x61, 0xee, /* Byte value: 0x05 */ + 0xd6, 0xe4, 0xf4, 0xdb, 0x0b, 0x4c, 0xb9, 0x53, 0x87, 0x96, 0x60, 0xfb, 0x27, 0x6c, 0x2e, 0xc3, /* Byte value: 0x06 */ + 0x76, 0x51, 0x8b, 0x89, 0xa2, 0x54, 0x9a, 0xfb, 0x07, 0x98, 0x99, 0xfe, 0x6f, 0x23, 0x47, 0x3e, /* Byte value: 0x07 */ + 0xd2, 0x9d, 0xa5, 0x22, 0xf9, 0xf2, 0xb5, 0x3d, 0xe8, 0x32, 0x90, 0x52, 0x9b, 0x82, 0x65, 0x06, /* Byte value: 0x08 */ + 0x20, 0x4e, 0xcd, 0xc4, 0x9c, 0xb9, 0x60, 0xf6, 0xfe, 0x69, 0x8c, 0x01, 0xa9, 0x7c, 0x1d, 0xe7, /* Byte value: 0x09 */ + 0x65, 0x24, 0x40, 0xd9, 0x48, 0x78, 0xaf, 0x4d, 0xc5, 0x36, 0x9b, 0x11, 0xab, 0xb0, 0x0e, 0x8f, /* Byte value: 0x0a */ + 0x42, 0x41, 0x90, 0xd6, 0x82, 0xee, 0xc6, 0x18, 0xe9, 0x80, 0xa3, 0xb7, 0xcf, 0x8f, 0xfe, 0x8e, /* Byte value: 0x0b */ + 0x46, 0x38, 0xc1, 0x2f, 0x70, 0x50, 0xca, 0x76, 0x86, 0x24, 0x53, 0x1e, 0x73, 0x61, 0xb5, 0x4b, /* Byte value: 0x0c */ + 0xbb, 0x32, 0x16, 0x33, 0x64, 0x8b, 0x0e, 0xc2, 0x9c, 0x2b, 0xd8, 0x7b, 0x37, 0xc3, 0xb6, 0x05, /* Byte value: 0x0d */ + 0x2c, 0xc5, 0x3e, 0x0c, 0x49, 0xb8, 0x74, 0x44, 0x4f, 0x46, 0x5f, 0x39, 0xae, 0x8d, 0xc0, 0x6b, /* Byte value: 0x0e */ + 0x25, 0xb8, 0x19, 0x92, 0xb3, 0xc9, 0x6f, 0x62, 0xfa, 0xe4, 0x40, 0x13, 0x3a, 0x48, 0x34, 0x82, /* Byte value: 0x0f */ + 0x2e, 0x18, 0xf7, 0x91, 0x30, 0xe7, 0x72, 0x73, 0x99, 0x14, 0x27, 0x8c, 0xf0, 0xfa, 0x04, 0xe8, /* Byte value: 0x10 */ + 0x7f, 0x2c, 0xac, 0x17, 0x58, 0x25, 0x81, 0xdd, 0xb2, 0x3a, 0x86, 0xd4, 0xfb, 0xe6, 0xb3, 0xd7, /* Byte value: 0x11 */ + 0xad, 0xb1, 0x09, 0x35, 0xa1, 0xd7, 0x34, 0xe0, 0x5a, 0x08, 0x16, 0x86, 0x60, 0x64, 0xd6, 0xd1, /* Byte value: 0x12 */ + 0x06, 0xa4, 0x98, 0x64, 0x8b, 0xe1, 0x0a, 0x59, 0xb9, 0xf6, 0x88, 0x1c, 0xe2, 0x99, 0x8f, 0x46, /* Byte value: 0x13 */ + 0x6b, 0x72, 0x7a, 0x8c, 0xe4, 0x26, 0xbd, 0xc8, 0xa2, 0x4b, 0x30, 0x9c, 0xf2, 0x36, 0x17, 0x80, /* Byte value: 0x14 */ + 0x3b, 0xc9, 0xa4, 0xa5, 0x51, 0x2a, 0x4d, 0x9c, 0xe2, 0x4c, 0xad, 0x7f, 0xd6, 0xf0, 0xc2, 0x1f, /* Byte value: 0x15 */ + 0x6a, 0xfd, 0xff, 0x23, 0x39, 0xe8, 0xbe, 0x32, 0xc9, 0x62, 0x0c, 0x27, 0xdd, 0xec, 0x75, 0x20, /* Byte value: 0x16 */ + 0x7e, 0xa3, 0x29, 0xb8, 0x85, 0xeb, 0x82, 0x27, 0xd9, 0x13, 0xba, 0x6f, 0xd4, 0x3c, 0xd1, 0x77, /* Byte value: 0x17 */ + 0x3c, 0xe2, 0xb9, 0x6e, 0x07, 0x05, 0x44, 0x3f, 0x30, 0x93, 0x19, 0xd8, 0x1b, 0xb3, 0x2f, 0xf9, /* Byte value: 0x18 */ + 0x84, 0x82, 0xe3, 0x6f, 0xc7, 0x1f, 0x4f, 0x30, 0x11, 0xc3, 0x85, 0xad, 0x5d, 0xdd, 0x3f, 0xdf, /* Byte value: 0x19 */ + 0x67, 0xf9, 0x89, 0x44, 0x31, 0x27, 0xa9, 0x7a, 0x13, 0x64, 0xe3, 0xa4, 0xf5, 0xc7, 0xca, 0x0c, /* Byte value: 0x1a */ + 0xb6, 0x36, 0x60, 0x54, 0x6c, 0x44, 0x19, 0x8a, 0x46, 0x2d, 0x37, 0xf8, 0x1f, 0xe8, 0x09, 0x29, /* Byte value: 0x1b */ + 0x16, 0x83, 0x1f, 0x06, 0xc5, 0x5c, 0x3a, 0x22, 0xc6, 0x23, 0xce, 0xfd, 0x57, 0xa7, 0x60, 0xd4, /* Byte value: 0x1c */ + 0x88, 0x09, 0x10, 0xa7, 0x12, 0x1e, 0x5b, 0x82, 0xa0, 0xec, 0x56, 0x95, 0x5a, 0x2c, 0xe2, 0x53, /* Byte value: 0x1d */ + 0x58, 0x49, 0x7c, 0x18, 0x92, 0xb3, 0xe8, 0x88, 0x9e, 0x8c, 0xbe, 0x72, 0x9f, 0xd9, 0x43, 0xd6, /* Byte value: 0x1e */ + 0xbc, 0x19, 0x0b, 0xf8, 0x32, 0xa4, 0x07, 0x61, 0x4e, 0xf4, 0x6c, 0xdc, 0xfa, 0x80, 0x5b, 0xe3, /* Byte value: 0x1f */ + 0xde, 0x16, 0x56, 0xea, 0x2c, 0xf3, 0xa1, 0x8f, 0x59, 0x1d, 0x43, 0x6a, 0x9c, 0x73, 0xb8, 0x8a, /* Byte value: 0x20 */ + 0xc2, 0xba, 0x22, 0x40, 0xb7, 0x4f, 0x85, 0x46, 0x97, 0xe7, 0xd6, 0xb3, 0x2e, 0xbc, 0x8a, 0x94, /* Byte value: 0x21 */ + 0x4e, 0xca, 0x63, 0x1e, 0x57, 0xef, 0xd2, 0xaa, 0x58, 0xaf, 0x70, 0x8f, 0xc8, 0x7e, 0x23, 0x02, /* Byte value: 0x22 */ + 0x3f, 0xb0, 0xf5, 0x5c, 0xa3, 0x94, 0x41, 0xf2, 0x8d, 0xe8, 0x5d, 0xd6, 0x6a, 0x1e, 0x89, 0xda, /* Byte value: 0x23 */ + 0x0e, 0x56, 0x3a, 0x55, 0xac, 0x5e, 0x12, 0x85, 0x67, 0x7d, 0xab, 0x8d, 0x59, 0x86, 0x19, 0x0f, /* Byte value: 0x24 */ + 0x1f, 0xfe, 0x38, 0x98, 0x3f, 0x2d, 0x21, 0x04, 0x73, 0x81, 0xd1, 0xd7, 0xc3, 0x62, 0x94, 0x3d, /* Byte value: 0x25 */ + 0x99, 0xa1, 0x12, 0x6a, 0x81, 0x6d, 0x68, 0x03, 0xb4, 0x10, 0x2c, 0xcf, 0xc0, 0xc8, 0x6f, 0x61, /* Byte value: 0x26 */ + 0x0c, 0x8b, 0xf3, 0xc8, 0xd5, 0x01, 0x14, 0xb2, 0xb1, 0x2f, 0xd3, 0x38, 0x07, 0xf1, 0xdd, 0x8c, /* Byte value: 0x27 */ + 0x70, 0xf5, 0x13, 0xed, 0x29, 0xb5, 0x90, 0xa2, 0xbe, 0x6e, 0x11, 0xe2, 0x8d, 0xba, 0xc8, 0x78, /* Byte value: 0x28 */ + 0x44, 0xe5, 0x08, 0xb2, 0x09, 0x0f, 0xcc, 0x41, 0x50, 0x76, 0x2b, 0xab, 0x2d, 0x16, 0x71, 0xc8, /* Byte value: 0x29 */ + 0x2d, 0x4a, 0xbb, 0xa3, 0x94, 0x76, 0x77, 0xbe, 0x24, 0x6f, 0x63, 0x82, 0x81, 0x57, 0xa2, 0xcb, /* Byte value: 0x2a */ + 0x5f, 0x62, 0x61, 0xd3, 0xc4, 0x9c, 0xe1, 0x2b, 0x4c, 0x53, 0x0a, 0xd5, 0x52, 0x9a, 0xae, 0x30, /* Byte value: 0x2b */ + 0x78, 0x07, 0xb1, 0xdc, 0x0e, 0x0a, 0x88, 0x7e, 0x60, 0xe5, 0x32, 0x73, 0x36, 0xa5, 0x5e, 0x31, /* Byte value: 0x2c */ + 0xcb, 0xc7, 0x05, 0xde, 0x4d, 0x3e, 0x9e, 0x60, 0x22, 0x45, 0xc9, 0x99, 0xba, 0x79, 0x7e, 0x7d, /* Byte value: 0x2d */ + 0xd5, 0xb6, 0xb8, 0xe9, 0xaf, 0xdd, 0xbc, 0x9e, 0x3a, 0xed, 0x24, 0xf5, 0x56, 0xc1, 0x88, 0xe0, /* Byte value: 0x2e */ + 0x73, 0xa7, 0x5f, 0xdf, 0x8d, 0x24, 0x95, 0x6f, 0x03, 0x15, 0x55, 0xec, 0xfc, 0x17, 0x6e, 0x5b, /* Byte value: 0x2f */ + 0x5c, 0x30, 0x2d, 0xe1, 0x60, 0x0d, 0xe4, 0xe6, 0xf1, 0x28, 0x4e, 0xdb, 0x23, 0x37, 0x08, 0x13, /* Byte value: 0x30 */ + 0xfe, 0x58, 0x9b, 0x2e, 0xb0, 0x4a, 0xc1, 0x79, 0xa7, 0x74, 0xcf, 0x6b, 0x35, 0x0f, 0xa5, 0x6d, /* Byte value: 0x31 */ + 0x6f, 0x0b, 0x2b, 0x75, 0x16, 0x98, 0xb1, 0xa6, 0xcd, 0xef, 0xc0, 0x35, 0x4e, 0xd8, 0x5c, 0x45, /* Byte value: 0x32 */ + 0x1a, 0x08, 0xec, 0xce, 0x10, 0x5d, 0x2e, 0x90, 0x77, 0x0c, 0x1d, 0xc5, 0x50, 0x56, 0xbd, 0x58, /* Byte value: 0x33 */ + 0x07, 0x2b, 0x1d, 0xcb, 0x56, 0x2f, 0x09, 0xa3, 0xd2, 0xdf, 0xb4, 0xa7, 0xcd, 0x43, 0xed, 0xe6, /* Byte value: 0x34 */ + 0xee, 0x7f, 0x1c, 0x4c, 0xfe, 0xf7, 0xf1, 0x02, 0xd8, 0xa1, 0x89, 0x8a, 0x80, 0x31, 0x4a, 0xff, /* Byte value: 0x35 */ + 0xea, 0x06, 0x4d, 0xb5, 0x0c, 0x49, 0xfd, 0x6c, 0xb7, 0x05, 0x79, 0x23, 0x3c, 0xdf, 0x01, 0x3a, /* Byte value: 0x36 */ + 0xc8, 0x95, 0x49, 0xec, 0xe9, 0xaf, 0x9b, 0xad, 0x9f, 0x3e, 0x8d, 0x97, 0xcb, 0xd4, 0xd8, 0x5e, /* Byte value: 0x37 */ + 0x8c, 0x70, 0x41, 0x5e, 0xe0, 0xa0, 0x57, 0xec, 0xcf, 0x48, 0xa6, 0x3c, 0xe6, 0xc2, 0xa9, 0x96, /* Byte value: 0x38 */ + 0xb5, 0x64, 0x2c, 0x66, 0xc8, 0xd5, 0x1c, 0x47, 0xfb, 0x56, 0x73, 0xf6, 0x6e, 0x45, 0xaf, 0x0a, /* Byte value: 0x39 */ + 0x38, 0x9b, 0xe8, 0x97, 0xf5, 0xbb, 0x48, 0x51, 0x5f, 0x37, 0xe9, 0x71, 0xa7, 0x5d, 0x64, 0x3c, /* Byte value: 0x3a */ + 0x59, 0xc6, 0xf9, 0xb7, 0x4f, 0x7d, 0xeb, 0x72, 0xf5, 0xa5, 0x82, 0xc9, 0xb0, 0x03, 0x21, 0x76, /* Byte value: 0x3b */ + 0x69, 0xaf, 0xb3, 0x11, 0x9d, 0x79, 0xbb, 0xff, 0x74, 0x19, 0x48, 0x29, 0xac, 0x41, 0xd3, 0x03, /* Byte value: 0x3c */ + 0x10, 0x27, 0x87, 0x62, 0x4e, 0xbd, 0x30, 0x7b, 0x7f, 0xd5, 0x46, 0xe1, 0xb5, 0x3e, 0xef, 0x92, /* Byte value: 0x3d */ + 0xf0, 0x0e, 0xa1, 0x7b, 0x1c, 0x14, 0xd3, 0xfc, 0xc0, 0x09, 0x64, 0xe6, 0x6c, 0x89, 0xbc, 0x62, /* Byte value: 0x3e */ + 0xa3, 0xe7, 0x33, 0x60, 0x0d, 0x89, 0x26, 0x65, 0x3d, 0x75, 0xbd, 0x0b, 0x39, 0xe2, 0xcf, 0xde, /* Byte value: 0x3f */ + 0x09, 0x7d, 0x27, 0x9e, 0xfa, 0x71, 0x1b, 0x26, 0xb5, 0xa2, 0x1f, 0x2a, 0x94, 0xc5, 0xf4, 0xe9, /* Byte value: 0x40 */ + 0x0d, 0x04, 0x76, 0x67, 0x08, 0xcf, 0x17, 0x48, 0xda, 0x06, 0xef, 0x83, 0x28, 0x2b, 0xbf, 0x2c, /* Byte value: 0x41 */ + 0xfd, 0x0a, 0xd7, 0x1c, 0x14, 0xdb, 0xc4, 0xb4, 0x1a, 0x0f, 0x8b, 0x65, 0x44, 0xa2, 0x03, 0x4e, /* Byte value: 0x42 */ + 0x1b, 0x87, 0x69, 0x61, 0xcd, 0x93, 0x2d, 0x6a, 0x1c, 0x25, 0x21, 0x7e, 0x7f, 0x8c, 0xdf, 0xf8, /* Byte value: 0x43 */ + 0x27, 0x65, 0xd0, 0x0f, 0xca, 0x96, 0x69, 0x55, 0x2c, 0xb6, 0x38, 0xa6, 0x64, 0x3f, 0xf0, 0x01, /* Byte value: 0x44 */ + 0x8d, 0xff, 0xc4, 0xf1, 0x3d, 0x6e, 0x54, 0x16, 0xa4, 0x61, 0x9a, 0x87, 0xc9, 0x18, 0xcb, 0x36, /* Byte value: 0x45 */ + 0xf2, 0xd3, 0x68, 0xe6, 0x65, 0x4b, 0xd5, 0xcb, 0x16, 0x5b, 0x1c, 0x53, 0x32, 0xfe, 0x78, 0xe1, /* Byte value: 0x46 */ + 0x92, 0x01, 0xfc, 0x69, 0x02, 0x43, 0x75, 0x12, 0xd7, 0xe0, 0x4b, 0x50, 0x0a, 0x7a, 0x5f, 0x0b, /* Byte value: 0x47 */ + 0x6d, 0xd6, 0xe2, 0xe8, 0x6f, 0xc7, 0xb7, 0x91, 0x1b, 0xbd, 0xb8, 0x80, 0x10, 0xaf, 0x98, 0xc6, /* Byte value: 0x48 */ + 0x5d, 0xbf, 0xa8, 0x4e, 0xbd, 0xc3, 0xe7, 0x1c, 0x9a, 0x01, 0x72, 0x60, 0x0c, 0xed, 0x6a, 0xb3, /* Byte value: 0x49 */ + 0xf1, 0x81, 0x24, 0xd4, 0xc1, 0xda, 0xd0, 0x06, 0xab, 0x20, 0x58, 0x5d, 0x43, 0x53, 0xde, 0xc2, /* Byte value: 0x4a */ + 0x21, 0xc1, 0x48, 0x6b, 0x41, 0x77, 0x63, 0x0c, 0x95, 0x40, 0xb0, 0xba, 0x86, 0xa6, 0x7f, 0x47, /* Byte value: 0x4b */ + 0x37, 0x42, 0x57, 0x6d, 0x84, 0x2b, 0x59, 0x2e, 0x53, 0x63, 0x7e, 0x47, 0xd1, 0x01, 0x1f, 0x93, /* Byte value: 0x4c */ + 0x40, 0x9c, 0x59, 0x4b, 0xfb, 0xb1, 0xc0, 0x2f, 0x3f, 0xd2, 0xdb, 0x02, 0x91, 0xf8, 0x3a, 0x0d, /* Byte value: 0x4d */ + 0x8e, 0xad, 0x88, 0xc3, 0x99, 0xff, 0x51, 0xdb, 0x19, 0x1a, 0xde, 0x89, 0xb8, 0xb5, 0x6d, 0x15, /* Byte value: 0x4e */ + 0xe8, 0xdb, 0x84, 0x28, 0x75, 0x16, 0xfb, 0x5b, 0x61, 0x57, 0x01, 0x96, 0x62, 0xa8, 0xc5, 0xb9, /* Byte value: 0x4f */ + 0xe4, 0x50, 0x77, 0xe0, 0xa0, 0x17, 0xef, 0xe9, 0xd0, 0x78, 0xd2, 0xae, 0x65, 0x59, 0x18, 0x35, /* Byte value: 0x50 */ + 0xec, 0xa2, 0xd5, 0xd1, 0x87, 0xa8, 0xf7, 0x35, 0x0e, 0xf3, 0xf1, 0x3f, 0xde, 0x46, 0x8e, 0x7c, /* Byte value: 0x51 */ + 0x9e, 0x8a, 0x0f, 0xa1, 0xd7, 0x42, 0x61, 0xa0, 0x66, 0xcf, 0x98, 0x68, 0x0d, 0x8b, 0x82, 0x87, /* Byte value: 0x52 */ + 0x9c, 0x57, 0xc6, 0x3c, 0xae, 0x1d, 0x67, 0x97, 0xb0, 0x9d, 0xe0, 0xdd, 0x53, 0xfc, 0x46, 0x04, /* Byte value: 0x53 */ + 0xd1, 0xcf, 0xe9, 0x10, 0x5d, 0x63, 0xb0, 0xf0, 0x55, 0x49, 0xd4, 0x5c, 0xea, 0x2f, 0xc3, 0x25, /* Byte value: 0x54 */ + 0xe7, 0x02, 0x3b, 0xd2, 0x04, 0x86, 0xea, 0x24, 0x6d, 0x03, 0x96, 0xa0, 0x14, 0xf4, 0xbe, 0x16, /* Byte value: 0x55 */ + 0xb8, 0x60, 0x5a, 0x01, 0xc0, 0x1a, 0x0b, 0x0f, 0x21, 0x50, 0x9c, 0x75, 0x46, 0x6e, 0x10, 0x26, /* Byte value: 0x56 */ + 0x2f, 0x97, 0x72, 0x3e, 0xed, 0x29, 0x71, 0x89, 0xf2, 0x3d, 0x1b, 0x37, 0xdf, 0x20, 0x66, 0x48, /* Byte value: 0x57 */ + 0xe2, 0xf4, 0xef, 0x84, 0x2b, 0xf6, 0xe5, 0xb0, 0x69, 0x8e, 0x5a, 0xb2, 0x87, 0xc0, 0x97, 0x73, /* Byte value: 0x58 */ + 0x8a, 0xd4, 0xd9, 0x3a, 0x6b, 0x41, 0x5d, 0xb5, 0x76, 0xbe, 0x2e, 0x20, 0x04, 0x5b, 0x26, 0xd0, /* Byte value: 0x59 */ + 0xd7, 0x6b, 0x71, 0x74, 0xd6, 0x82, 0xba, 0xa9, 0xec, 0xbf, 0x5c, 0x40, 0x08, 0xb6, 0x4c, 0x63, /* Byte value: 0x5a */ + 0xb4, 0xeb, 0xa9, 0xc9, 0x15, 0x1b, 0x1f, 0xbd, 0x90, 0x7f, 0x4f, 0x4d, 0x41, 0x9f, 0xcd, 0xaa, /* Byte value: 0x5b */ + 0x29, 0x33, 0xea, 0x5a, 0x66, 0xc8, 0x7b, 0xd0, 0x4b, 0xcb, 0x93, 0x2b, 0x3d, 0xb9, 0xe9, 0x0e, /* Byte value: 0x5c */ + 0x13, 0x75, 0xcb, 0x50, 0xea, 0x2c, 0x35, 0xb6, 0xc2, 0xae, 0x02, 0xef, 0xc4, 0x93, 0x49, 0xb1, /* Byte value: 0x5d */ + 0x98, 0x2e, 0x97, 0xc5, 0x5c, 0xa3, 0x6b, 0xf9, 0xdf, 0x39, 0x10, 0x74, 0xef, 0x12, 0x0d, 0xc1, /* Byte value: 0x5e */ + 0x80, 0xfb, 0xb2, 0x96, 0x35, 0xa1, 0x43, 0x5e, 0x7e, 0x67, 0x75, 0x04, 0xe1, 0x33, 0x74, 0x1a, /* Byte value: 0x5f */ + 0xa6, 0x11, 0xe7, 0x36, 0x22, 0xf9, 0x29, 0xf1, 0x39, 0xf8, 0x71, 0x19, 0xaa, 0xd6, 0xe6, 0xbb, /* Byte value: 0x60 */ + 0x62, 0x0f, 0x5d, 0x12, 0x1e, 0x57, 0xa6, 0xee, 0x17, 0xe9, 0x2f, 0xb6, 0x66, 0xf3, 0xe3, 0x69, /* Byte value: 0x61 */ + 0xb3, 0xc0, 0xb4, 0x02, 0x43, 0x34, 0x16, 0x1e, 0x42, 0xa0, 0xfb, 0xea, 0x8c, 0xdc, 0x20, 0x4c, /* Byte value: 0x62 */ + 0x5e, 0xed, 0xe4, 0x7c, 0x19, 0x52, 0xe2, 0xd1, 0x27, 0x7a, 0x36, 0x6e, 0x7d, 0x40, 0xcc, 0x90, /* Byte value: 0x63 */ + 0x72, 0x28, 0xda, 0x70, 0x50, 0xea, 0x96, 0x95, 0x68, 0x3c, 0x69, 0x57, 0xd3, 0xcd, 0x0c, 0xfb, /* Byte value: 0x64 */ + 0x43, 0xce, 0x15, 0x79, 0x5f, 0x20, 0xc5, 0xe2, 0x82, 0xa9, 0x9f, 0x0c, 0xe0, 0x55, 0x9c, 0x2e, /* Byte value: 0x65 */ + 0x34, 0x10, 0x1b, 0x5f, 0x20, 0xba, 0x5c, 0xe3, 0xee, 0x18, 0x3a, 0x49, 0xa0, 0xac, 0xb9, 0xb0, /* Byte value: 0x66 */ + 0x35, 0x9f, 0x9e, 0xf0, 0xfd, 0x74, 0x5f, 0x19, 0x85, 0x31, 0x06, 0xf2, 0x8f, 0x76, 0xdb, 0x10, /* Byte value: 0x67 */ + 0x52, 0x66, 0x17, 0xb4, 0xcc, 0x53, 0xf6, 0x63, 0x96, 0x55, 0xe5, 0x56, 0x7a, 0xb1, 0x11, 0x1c, /* Byte value: 0x68 */ + 0x26, 0xea, 0x55, 0xa0, 0x17, 0x58, 0x6a, 0xaf, 0x47, 0x9f, 0x04, 0x1d, 0x4b, 0xe5, 0x92, 0xa1, /* Byte value: 0x69 */ + 0x7d, 0xf1, 0x65, 0x8a, 0x21, 0x7a, 0x87, 0xea, 0x64, 0x68, 0xfe, 0x61, 0xa5, 0x91, 0x77, 0x54, /* Byte value: 0x6a */ + 0xb7, 0xb9, 0xe5, 0xfb, 0xb1, 0x8a, 0x1a, 0x70, 0x2d, 0x04, 0x0b, 0x43, 0x30, 0x32, 0x6b, 0x89, /* Byte value: 0x6b */ + 0x0a, 0x2f, 0x6b, 0xac, 0x5e, 0xe0, 0x1e, 0xeb, 0x08, 0xd9, 0x5b, 0x24, 0xe5, 0x68, 0x52, 0xca, /* Byte value: 0x6c */ + 0x45, 0x6a, 0x8d, 0x1d, 0xd4, 0xc1, 0xcf, 0xbb, 0x3b, 0x5f, 0x17, 0x10, 0x02, 0xcc, 0x13, 0x68, /* Byte value: 0x6d */ + 0x81, 0x74, 0x37, 0x39, 0xe8, 0x6f, 0x40, 0xa4, 0x15, 0x4e, 0x49, 0xbf, 0xce, 0xe9, 0x16, 0xba, /* Byte value: 0x6e */ + 0x6e, 0x84, 0xae, 0xda, 0xcb, 0x56, 0xb2, 0x5c, 0xa6, 0xc6, 0xfc, 0x8e, 0x61, 0x02, 0x3e, 0xe5, /* Byte value: 0x6f */ + 0x53, 0xe9, 0x92, 0x1b, 0x11, 0x9d, 0xf5, 0x99, 0xfd, 0x7c, 0xd9, 0xed, 0x55, 0x6b, 0x73, 0xbc, /* Byte value: 0x70 */ + 0x4a, 0xb3, 0x32, 0xe7, 0xa5, 0x51, 0xde, 0xc4, 0x37, 0x0b, 0x80, 0x26, 0x74, 0x90, 0x68, 0xc7, /* Byte value: 0x71 */ + 0x79, 0x88, 0x34, 0x73, 0xd3, 0xc4, 0x8b, 0x84, 0x0b, 0xcc, 0x0e, 0xc8, 0x19, 0x7f, 0x3c, 0x91, /* Byte value: 0x72 */ + 0x32, 0xb4, 0x83, 0x3b, 0xab, 0x5b, 0x56, 0xba, 0x57, 0xee, 0xb2, 0x55, 0x42, 0x35, 0x36, 0xf6, /* Byte value: 0x73 */ + 0x61, 0x5d, 0x11, 0x20, 0xba, 0xc6, 0xa3, 0x23, 0xaa, 0x92, 0x6b, 0xb8, 0x17, 0x5e, 0x45, 0x4a, /* Byte value: 0x74 */ + 0xfb, 0xae, 0x4f, 0x78, 0x9f, 0x3a, 0xce, 0xed, 0xa3, 0xf9, 0x03, 0x79, 0xa6, 0x3b, 0x8c, 0x08, /* Byte value: 0x75 */ + 0x39, 0x14, 0x6d, 0x38, 0x28, 0x75, 0x4b, 0xab, 0x34, 0x1e, 0xd5, 0xca, 0x88, 0x87, 0x06, 0x9c, /* Byte value: 0x76 */ + 0xc0, 0x67, 0xeb, 0xdd, 0xce, 0x10, 0x83, 0x71, 0x41, 0xb5, 0xae, 0x06, 0x70, 0xcb, 0x4e, 0x17, /* Byte value: 0x77 */ + 0xfa, 0x21, 0xca, 0xd7, 0x42, 0xf4, 0xcd, 0x17, 0xc8, 0xd0, 0x3f, 0xc2, 0x89, 0xe1, 0xee, 0xa8, /* Byte value: 0x78 */ + 0x5b, 0x1b, 0x30, 0x2a, 0x36, 0x22, 0xed, 0x45, 0x23, 0xf7, 0xfa, 0x7c, 0xee, 0x74, 0xe5, 0xf5, /* Byte value: 0x79 */ + 0xa4, 0xcc, 0x2e, 0xab, 0x5b, 0xa6, 0x2f, 0xc6, 0xef, 0xaa, 0x09, 0xac, 0xf4, 0xa1, 0x22, 0x38, /* Byte value: 0x7a */ + 0xba, 0xbd, 0x93, 0x9c, 0xb9, 0x45, 0x0d, 0x38, 0xf7, 0x02, 0xe4, 0xc0, 0x18, 0x19, 0xd4, 0xa5, /* Byte value: 0x7b */ + 0xda, 0x6f, 0x07, 0x13, 0xde, 0x4d, 0xad, 0xe1, 0x36, 0xb9, 0xb3, 0xc3, 0x20, 0x9d, 0xf3, 0x4f, /* Byte value: 0x7c */ + 0x4c, 0x17, 0xaa, 0x83, 0x2e, 0xb0, 0xd4, 0x9d, 0x8e, 0xfd, 0x08, 0x3a, 0x96, 0x09, 0xe7, 0x81, /* Byte value: 0x7d */ + 0x05, 0xf6, 0xd4, 0x56, 0x2f, 0x70, 0x0f, 0x94, 0x04, 0x8d, 0xcc, 0x12, 0x93, 0x34, 0x29, 0x65, /* Byte value: 0x7e */ + 0xc3, 0x35, 0xa7, 0xef, 0x6a, 0x81, 0x86, 0xbc, 0xfc, 0xce, 0xea, 0x08, 0x01, 0x66, 0xe8, 0x34, /* Byte value: 0x7f */ + 0x3a, 0x46, 0x21, 0x0a, 0x8c, 0xe4, 0x4e, 0x66, 0x89, 0x65, 0x91, 0xc4, 0xf9, 0x2a, 0xa0, 0xbf, /* Byte value: 0x80 */ + 0x82, 0x26, 0x7b, 0x0b, 0x4c, 0xfe, 0x45, 0x69, 0xa8, 0x35, 0x0d, 0xb1, 0xbf, 0x44, 0xb0, 0x99, /* Byte value: 0x81 */ + 0x18, 0xd5, 0x25, 0x53, 0x69, 0x02, 0x28, 0xa7, 0xa1, 0x5e, 0x65, 0x70, 0x0e, 0x21, 0x79, 0xdb, /* Byte value: 0x82 */ + 0xc9, 0x1a, 0xcc, 0x43, 0x34, 0x61, 0x98, 0x57, 0xf4, 0x17, 0xb1, 0x2c, 0xe4, 0x0e, 0xba, 0xfe, /* Byte value: 0x83 */ + 0x68, 0x20, 0x36, 0xbe, 0x40, 0xb7, 0xb8, 0x05, 0x1f, 0x30, 0x74, 0x92, 0x83, 0x9b, 0xb1, 0xa3, /* Byte value: 0x84 */ + 0xf6, 0xaa, 0x39, 0x1f, 0x97, 0xf5, 0xd9, 0xa5, 0x79, 0xff, 0xec, 0xfa, 0x8e, 0x10, 0x33, 0x24, /* Byte value: 0x85 */ + 0x63, 0x80, 0xd8, 0xbd, 0xc3, 0x99, 0xa5, 0x14, 0x7c, 0xc0, 0x13, 0x0d, 0x49, 0x29, 0x81, 0xc9, /* Byte value: 0x86 */ + 0x87, 0xd0, 0xaf, 0x5d, 0x63, 0x8e, 0x4a, 0xfd, 0xac, 0xb8, 0xc1, 0xa3, 0x2c, 0x70, 0x99, 0xfc, /* Byte value: 0x87 */ + 0x49, 0xe1, 0x7e, 0xd5, 0x01, 0xc0, 0xdb, 0x09, 0x8a, 0x70, 0xc4, 0x28, 0x05, 0x3d, 0xce, 0xe4, /* Byte value: 0x88 */ + 0x36, 0xcd, 0xd2, 0xc2, 0x59, 0xe5, 0x5a, 0xd4, 0x38, 0x4a, 0x42, 0xfc, 0xfe, 0xdb, 0x7d, 0x33, /* Byte value: 0x89 */ + 0x91, 0x53, 0xb0, 0x5b, 0xa6, 0xd2, 0x70, 0xdf, 0x6a, 0x9b, 0x0f, 0x5e, 0x7b, 0xd7, 0xf9, 0x28, /* Byte value: 0x8a */ + 0x97, 0xf7, 0x28, 0x3f, 0x2d, 0x33, 0x7a, 0x86, 0xd3, 0x6d, 0x87, 0x42, 0x99, 0x4e, 0x76, 0x6e, /* Byte value: 0x8b */ + 0x33, 0x3b, 0x06, 0x94, 0x76, 0x95, 0x55, 0x40, 0x3c, 0xc7, 0x8e, 0xee, 0x6d, 0xef, 0x54, 0x56, /* Byte value: 0x8c */ + 0x08, 0xf2, 0xa2, 0x31, 0x27, 0xbf, 0x18, 0xdc, 0xde, 0x8b, 0x23, 0x91, 0xbb, 0x1f, 0x96, 0x49, /* Byte value: 0x8d */ + 0x7c, 0x7e, 0xe0, 0x25, 0xfc, 0xb4, 0x84, 0x10, 0x0f, 0x41, 0xc2, 0xda, 0x8a, 0x4b, 0x15, 0xf4, /* Byte value: 0x8e */ + 0x02, 0xdd, 0xc9, 0x9d, 0x79, 0x5f, 0x06, 0x37, 0xd6, 0x52, 0x78, 0xb5, 0x5e, 0x77, 0xc4, 0x83, /* Byte value: 0x8f */ + 0x77, 0xde, 0x0e, 0x26, 0x7f, 0x9a, 0x99, 0x01, 0x6c, 0xb1, 0xa5, 0x45, 0x40, 0xf9, 0x25, 0x9e, /* Byte value: 0x90 */ + 0x85, 0x0d, 0x66, 0xc0, 0x1a, 0xd1, 0x4c, 0xca, 0x7a, 0xea, 0xb9, 0x16, 0x72, 0x07, 0x5d, 0x7f, /* Byte value: 0x91 */ + 0x66, 0x76, 0x0c, 0xeb, 0xec, 0xe9, 0xaa, 0x80, 0x78, 0x4d, 0xdf, 0x1f, 0xda, 0x1d, 0xa8, 0xac, /* Byte value: 0x92 */ + 0x41, 0x13, 0xdc, 0xe4, 0x26, 0x7f, 0xc3, 0xd5, 0x54, 0xfb, 0xe7, 0xb9, 0xbe, 0x22, 0x58, 0xad, /* Byte value: 0x93 */ + 0x30, 0x69, 0x4a, 0xa6, 0xd2, 0x04, 0x50, 0x8d, 0x81, 0xbc, 0xca, 0xe0, 0x1c, 0x42, 0xf2, 0x75, /* Byte value: 0x94 */ + 0xcd, 0x63, 0x9d, 0xba, 0xc6, 0xdf, 0x94, 0x39, 0x9b, 0xb3, 0x41, 0x85, 0x58, 0xe0, 0xf1, 0x3b, /* Byte value: 0x95 */ + 0xd0, 0x40, 0x6c, 0xbf, 0x80, 0xad, 0xb3, 0x0a, 0x3e, 0x60, 0xe8, 0xe7, 0xc5, 0xf5, 0xa1, 0x85, /* Byte value: 0x96 */ + 0xd9, 0x3d, 0x4b, 0x21, 0x7a, 0xdc, 0xa8, 0x2c, 0x8b, 0xc2, 0xf7, 0xcd, 0x51, 0x30, 0x55, 0x6c, /* Byte value: 0x97 */ + 0xa9, 0xc8, 0x58, 0xcc, 0x53, 0x69, 0x38, 0x8e, 0x35, 0xac, 0xe6, 0x2f, 0xdc, 0x8a, 0x9d, 0x14, /* Byte value: 0x98 */ + 0xaa, 0x9a, 0x14, 0xfe, 0xf7, 0xf8, 0x3d, 0x43, 0x88, 0xd7, 0xa2, 0x21, 0xad, 0x27, 0x3b, 0x37, /* Byte value: 0x99 */ + 0xc5, 0x91, 0x3f, 0x8b, 0xe1, 0x60, 0x8c, 0xe5, 0x45, 0x38, 0x62, 0x14, 0xe3, 0xff, 0x67, 0x72, /* Byte value: 0x9a */ + 0x60, 0xd2, 0x94, 0x8f, 0x67, 0x08, 0xa0, 0xd9, 0xc1, 0xbb, 0x57, 0x03, 0x38, 0x84, 0x27, 0xea, /* Byte value: 0x9b */ + 0xf8, 0xfc, 0x03, 0x4a, 0x3b, 0xab, 0xcb, 0x20, 0x1e, 0x82, 0x47, 0x77, 0xd7, 0x96, 0x2a, 0x2b, /* Byte value: 0x9c */ + 0x04, 0x79, 0x51, 0xf9, 0xf2, 0xbe, 0x0c, 0x6e, 0x6f, 0xa4, 0xf0, 0xa9, 0xbc, 0xee, 0x4b, 0xc5, /* Byte value: 0x9d */ + 0x90, 0xdc, 0x35, 0xf4, 0x7b, 0x1c, 0x73, 0x25, 0x01, 0xb2, 0x33, 0xe5, 0x54, 0x0d, 0x9b, 0x88, /* Byte value: 0x9e */ + 0xe6, 0x8d, 0xbe, 0x7d, 0xd9, 0x48, 0xe9, 0xde, 0x06, 0x2a, 0xaa, 0x1b, 0x3b, 0x2e, 0xdc, 0xb6, /* Byte value: 0x9f */ + 0x96, 0x78, 0xad, 0x90, 0xf0, 0xfd, 0x79, 0x7c, 0xb8, 0x44, 0xbb, 0xf9, 0xb6, 0x94, 0x14, 0xce, /* Byte value: 0xa0 */ + 0xaf, 0x6c, 0xc0, 0xa8, 0xd8, 0x88, 0x32, 0xd7, 0x8c, 0x5a, 0x6e, 0x33, 0x3e, 0x13, 0x12, 0x52, /* Byte value: 0xa1 */ + 0xc6, 0xc3, 0x73, 0xb9, 0x45, 0xf1, 0x89, 0x28, 0xf8, 0x43, 0x26, 0x1a, 0x92, 0x52, 0xc1, 0x51, /* Byte value: 0xa2 */ + 0xa7, 0x9e, 0x62, 0x99, 0xff, 0x37, 0x2a, 0x0b, 0x52, 0xd1, 0x4d, 0xa2, 0x85, 0x0c, 0x84, 0x1b, /* Byte value: 0xa3 */ + 0xa1, 0x3a, 0xfa, 0xfd, 0x74, 0xd6, 0x20, 0x52, 0xeb, 0x27, 0xc5, 0xbe, 0x67, 0x95, 0x0b, 0x5d, /* Byte value: 0xa4 */ + 0xd8, 0xb2, 0xce, 0x8e, 0xa7, 0x12, 0xab, 0xd6, 0xe0, 0xeb, 0xcb, 0x76, 0x7e, 0xea, 0x37, 0xcc, /* Byte value: 0xa5 */ + 0x1e, 0x71, 0xbd, 0x37, 0xe2, 0xe3, 0x22, 0xfe, 0x18, 0xa8, 0xed, 0x6c, 0xec, 0xb8, 0xf6, 0x9d, /* Byte value: 0xa6 */ + 0x4d, 0x98, 0x2f, 0x2c, 0xf3, 0x7e, 0xd7, 0x67, 0xe5, 0xd4, 0x34, 0x81, 0xb9, 0xd3, 0x85, 0x21, /* Byte value: 0xa7 */ + 0x64, 0xab, 0xc5, 0x76, 0x95, 0xb6, 0xac, 0xb7, 0xae, 0x1f, 0xa7, 0xaa, 0x84, 0x6a, 0x6c, 0x2f, /* Byte value: 0xa8 */ + 0x9a, 0xf3, 0x5e, 0x58, 0x25, 0xfc, 0x6d, 0xce, 0x09, 0x6b, 0x68, 0xc1, 0xb1, 0x65, 0xc9, 0x42, /* Byte value: 0xa9 */ + 0xe3, 0x7b, 0x6a, 0x2b, 0xf6, 0x38, 0xe6, 0x4a, 0x02, 0xa7, 0x66, 0x09, 0xa8, 0x1a, 0xf5, 0xd3, /* Byte value: 0xaa */ + 0xf9, 0x73, 0x86, 0xe5, 0xe6, 0x65, 0xc8, 0xda, 0x75, 0xab, 0x7b, 0xcc, 0xf8, 0x4c, 0x48, 0x8b, /* Byte value: 0xab */ + 0xf7, 0x25, 0xbc, 0xb0, 0x4a, 0x3b, 0xda, 0x5f, 0x12, 0xd6, 0xd0, 0x41, 0xa1, 0xca, 0x51, 0x84, /* Byte value: 0xac */ + 0x19, 0x5a, 0xa0, 0xfc, 0xb4, 0xcc, 0x2b, 0x5d, 0xca, 0x77, 0x59, 0xcb, 0x21, 0xfb, 0x1b, 0x7b, /* Byte value: 0xad */ + 0x83, 0xa9, 0xfe, 0xa4, 0x91, 0x30, 0x46, 0x93, 0xc3, 0x1c, 0x31, 0x0a, 0x90, 0x9e, 0xd2, 0x39, /* Byte value: 0xae */ + 0x4b, 0x3c, 0xb7, 0x48, 0x78, 0x9f, 0xdd, 0x3e, 0x5c, 0x22, 0xbc, 0x9d, 0x5b, 0x4a, 0x0a, 0x67, /* Byte value: 0xaf */ + 0x4f, 0x45, 0xe6, 0xb1, 0x8a, 0x21, 0xd1, 0x50, 0x33, 0x86, 0x4c, 0x34, 0xe7, 0xa4, 0x41, 0xa2, /* Byte value: 0xb0 */ + 0x7b, 0x55, 0xfd, 0xee, 0xaa, 0x9b, 0x8d, 0xb3, 0xdd, 0x9e, 0x76, 0x7d, 0x47, 0x08, 0xf8, 0x12, /* Byte value: 0xb1 */ + 0xef, 0xf0, 0x99, 0xe3, 0x23, 0x39, 0xf2, 0xf8, 0xb3, 0x88, 0xb5, 0x31, 0xaf, 0xeb, 0x28, 0x5f, /* Byte value: 0xb2 */ + 0x9d, 0xd8, 0x43, 0x93, 0x73, 0xd3, 0x64, 0x6d, 0xdb, 0xb4, 0xdc, 0x66, 0x7c, 0x26, 0x24, 0xa4, /* Byte value: 0xb3 */ + 0x74, 0x8c, 0x42, 0x14, 0xdb, 0x0b, 0x9c, 0xcc, 0xd1, 0xca, 0xe1, 0x4b, 0x31, 0x54, 0x83, 0xbd, /* Byte value: 0xb4 */ + 0xc7, 0x4c, 0xf6, 0x16, 0x98, 0x3f, 0x8a, 0xd2, 0x93, 0x6a, 0x1a, 0xa1, 0xbd, 0x88, 0xa3, 0xf1, /* Byte value: 0xb5 */ + 0xca, 0x48, 0x80, 0x71, 0x90, 0xf0, 0x9d, 0x9a, 0x49, 0x6c, 0xf5, 0x22, 0x95, 0xa3, 0x1c, 0xdd, /* Byte value: 0xb6 */ + 0x6c, 0x59, 0x67, 0x47, 0xb2, 0x09, 0xb4, 0x6b, 0x70, 0x94, 0x84, 0x3b, 0x3f, 0x75, 0xfa, 0x66, /* Byte value: 0xb7 */ + 0xf3, 0x5c, 0xed, 0x49, 0xb8, 0x85, 0xd6, 0x31, 0x7d, 0x72, 0x20, 0xe8, 0x1d, 0x24, 0x1a, 0x41, /* Byte value: 0xb8 */ + 0x31, 0xe6, 0xcf, 0x09, 0x0f, 0xca, 0x53, 0x77, 0xea, 0x95, 0xf6, 0x5b, 0x33, 0x98, 0x90, 0xd5, /* Byte value: 0xb9 */ + 0x3e, 0x3f, 0x70, 0xf3, 0x7e, 0x5a, 0x42, 0x08, 0xe6, 0xc1, 0x61, 0x6d, 0x45, 0xc4, 0xeb, 0x7a, /* Byte value: 0xba */ + 0x01, 0x8f, 0x85, 0xaf, 0xdd, 0xce, 0x03, 0xfa, 0x6b, 0x29, 0x3c, 0xbb, 0x2f, 0xda, 0x62, 0xa0, /* Byte value: 0xbb */ + 0xdb, 0xe0, 0x82, 0xbc, 0x03, 0x83, 0xae, 0x1b, 0x5d, 0x90, 0x8f, 0x78, 0x0f, 0x47, 0x91, 0xef, /* Byte value: 0xbc */ + 0xc4, 0x1e, 0xba, 0x24, 0x3c, 0xae, 0x8f, 0x1f, 0x2e, 0x11, 0x5e, 0xaf, 0xcc, 0x25, 0x05, 0xd2, /* Byte value: 0xbd */ + 0xe1, 0xa6, 0xa3, 0xb6, 0x8f, 0x67, 0xe0, 0x7d, 0xd4, 0xf5, 0x1e, 0xbc, 0xf6, 0x6d, 0x31, 0x50, /* Byte value: 0xbe */ + 0xed, 0x2d, 0x50, 0x7e, 0x5a, 0x66, 0xf4, 0xcf, 0x65, 0xda, 0xcd, 0x84, 0xf1, 0x9c, 0xec, 0xdc, /* Byte value: 0xbf */ + 0xe0, 0x29, 0x26, 0x19, 0x52, 0xa9, 0xe3, 0x87, 0xbf, 0xdc, 0x22, 0x07, 0xd9, 0xb7, 0x53, 0xf0, /* Byte value: 0xc0 */ + 0x14, 0x5e, 0xd6, 0x9b, 0xbc, 0x03, 0x3c, 0x15, 0x10, 0x71, 0xb6, 0x48, 0x09, 0xd0, 0xa4, 0x57, /* Byte value: 0xc1 */ + 0xdf, 0x99, 0xd3, 0x45, 0xf1, 0x3d, 0xa2, 0x75, 0x32, 0x34, 0x7f, 0xd1, 0xb3, 0xa9, 0xda, 0x2a, /* Byte value: 0xc2 */ + 0x50, 0xbb, 0xde, 0x29, 0xb5, 0x0c, 0xf0, 0x54, 0x40, 0x07, 0x9d, 0xe3, 0x24, 0xc6, 0xd5, 0x9f, /* Byte value: 0xc3 */ + 0xbe, 0xc4, 0xc2, 0x65, 0x4b, 0xfb, 0x01, 0x56, 0x98, 0xa6, 0x14, 0x69, 0xa4, 0xf7, 0x9f, 0x60, /* Byte value: 0xc4 */ + 0xab, 0x15, 0x91, 0x51, 0x2a, 0x36, 0x3e, 0xb9, 0xe3, 0xfe, 0x9e, 0x9a, 0x82, 0xfd, 0x59, 0x97, /* Byte value: 0xc5 */ + 0xae, 0xe3, 0x45, 0x07, 0x05, 0x46, 0x31, 0x2d, 0xe7, 0x73, 0x52, 0x88, 0x11, 0xc9, 0x70, 0xf2, /* Byte value: 0xc6 */ + 0x51, 0x34, 0x5b, 0x86, 0x68, 0xc2, 0xf3, 0xae, 0x2b, 0x2e, 0xa1, 0x58, 0x0b, 0x1c, 0xb7, 0x3f, /* Byte value: 0xc7 */ + 0x12, 0xfa, 0x4e, 0xff, 0x37, 0xe2, 0x36, 0x4c, 0xa9, 0x87, 0x3e, 0x54, 0xeb, 0x49, 0x2b, 0x11, /* Byte value: 0xc8 */ + 0x8f, 0x22, 0x0d, 0x6c, 0x44, 0x31, 0x52, 0x21, 0x72, 0x33, 0xe2, 0x32, 0x97, 0x6f, 0x0f, 0xb5, /* Byte value: 0xc9 */ + 0x1d, 0x23, 0xf1, 0x05, 0x46, 0x72, 0x27, 0x33, 0xa5, 0xd3, 0xa9, 0x62, 0x9d, 0x15, 0x50, 0xbe, /* Byte value: 0xca */ + 0xf4, 0x77, 0xf0, 0x82, 0xee, 0xaa, 0xdf, 0x92, 0xaf, 0xad, 0x94, 0x4f, 0xd0, 0x67, 0xf7, 0xa7, /* Byte value: 0xcb */ + 0xd3, 0x12, 0x20, 0x8d, 0x24, 0x3c, 0xb6, 0xc7, 0x83, 0x1b, 0xac, 0xe9, 0xb4, 0x58, 0x07, 0xa6, /* Byte value: 0xcc */ + 0xff, 0xd7, 0x1e, 0x81, 0x6d, 0x84, 0xc2, 0x83, 0xcc, 0x5d, 0xf3, 0xd0, 0x1a, 0xd5, 0xc7, 0xcd, /* Byte value: 0xcd */ + 0x47, 0xb7, 0x44, 0x80, 0xad, 0x9e, 0xc9, 0x8c, 0xed, 0x0d, 0x6f, 0xa5, 0x5c, 0xbb, 0xd7, 0xeb, /* Byte value: 0xce */ + 0x15, 0xd1, 0x53, 0x34, 0x61, 0xcd, 0x3f, 0xef, 0x7b, 0x58, 0x8a, 0xf3, 0x26, 0x0a, 0xc6, 0xf7, /* Byte value: 0xcf */ + 0x8b, 0x5b, 0x5c, 0x95, 0xb6, 0x8f, 0x5e, 0x4f, 0x1d, 0x97, 0x12, 0x9b, 0x2b, 0x81, 0x44, 0x70, /* Byte value: 0xd0 */ + 0xa0, 0xb5, 0x7f, 0x52, 0xa9, 0x18, 0x23, 0xa8, 0x80, 0x0e, 0xf9, 0x05, 0x48, 0x4f, 0x69, 0xfd, /* Byte value: 0xd1 */ + 0xf5, 0xf8, 0x75, 0x2d, 0x33, 0x64, 0xdc, 0x68, 0xc4, 0x84, 0xa8, 0xf4, 0xff, 0xbd, 0x95, 0x07, /* Byte value: 0xd2 */ + 0x28, 0xbc, 0x6f, 0xf5, 0xbb, 0x06, 0x78, 0x2a, 0x20, 0xe2, 0xaf, 0x90, 0x12, 0x63, 0x8b, 0xae, /* Byte value: 0xd3 */ + 0x57, 0x90, 0xc3, 0xe2, 0xe3, 0x23, 0xf9, 0xf7, 0x92, 0xd8, 0x29, 0x44, 0xe9, 0x85, 0x38, 0x79, /* Byte value: 0xd4 */ + 0xb2, 0x4f, 0x31, 0xad, 0x9e, 0xfa, 0x15, 0xe4, 0x29, 0x89, 0xc7, 0x51, 0xa3, 0x06, 0x42, 0xec, /* Byte value: 0xd5 */ + 0x24, 0x37, 0x9c, 0x3d, 0x6e, 0x07, 0x6c, 0x98, 0x91, 0xcd, 0x7c, 0xa8, 0x15, 0x92, 0x56, 0x22, /* Byte value: 0xd6 */ + 0xcf, 0xbe, 0x54, 0x27, 0xbf, 0x80, 0x92, 0x0e, 0x4d, 0xe1, 0x39, 0x30, 0x06, 0x97, 0x35, 0xb8, /* Byte value: 0xd7 */ + 0x89, 0x86, 0x95, 0x08, 0xcf, 0xd0, 0x58, 0x78, 0xcb, 0xc5, 0x6a, 0x2e, 0x75, 0xf6, 0x80, 0xf3, /* Byte value: 0xd8 */ + 0xdd, 0x44, 0x1a, 0xd8, 0x88, 0x62, 0xa4, 0x42, 0xe4, 0x66, 0x07, 0x64, 0xed, 0xde, 0x1e, 0xa9, /* Byte value: 0xd9 */ + 0x55, 0x4d, 0x0a, 0x7f, 0x9a, 0x7c, 0xff, 0xc0, 0x44, 0x8a, 0x51, 0xf1, 0xb7, 0xf2, 0xfc, 0xfa, /* Byte value: 0xda */ + 0x2b, 0xee, 0x23, 0xc7, 0x1f, 0x97, 0x7d, 0xe7, 0x9d, 0x99, 0xeb, 0x9e, 0x63, 0xce, 0x2d, 0x8d, /* Byte value: 0xdb */ + 0x9b, 0x7c, 0xdb, 0xf7, 0xf8, 0x32, 0x6e, 0x34, 0x62, 0x42, 0x54, 0x7a, 0x9e, 0xbf, 0xab, 0xe2, /* Byte value: 0xdc */ + 0xeb, 0x89, 0xc8, 0x1a, 0xd1, 0x87, 0xfe, 0x96, 0xdc, 0x2c, 0x45, 0x98, 0x13, 0x05, 0x63, 0x9a, /* Byte value: 0xdd */ + 0xce, 0x31, 0xd1, 0x88, 0x62, 0x4e, 0x91, 0xf4, 0x26, 0xc8, 0x05, 0x8b, 0x29, 0x4d, 0x57, 0x18, /* Byte value: 0xde */ + 0xe5, 0xdf, 0xf2, 0x4f, 0x7d, 0xd9, 0xec, 0x13, 0xbb, 0x51, 0xee, 0x15, 0x4a, 0x83, 0x7a, 0x95, /* Byte value: 0xdf */ + 0xb1, 0x1d, 0x7d, 0x9f, 0x3a, 0x6b, 0x10, 0x29, 0x94, 0xf2, 0x83, 0x5f, 0xd2, 0xab, 0xe4, 0xcf, /* Byte value: 0xe0 */ + 0x22, 0x93, 0x04, 0x59, 0xe5, 0xe6, 0x66, 0xc1, 0x28, 0x3b, 0xf4, 0xb4, 0xf7, 0x0b, 0xd9, 0x64, /* Byte value: 0xe1 */ + 0xbd, 0x96, 0x8e, 0x57, 0xef, 0x6a, 0x04, 0x9b, 0x25, 0xdd, 0x50, 0x67, 0xd5, 0x5a, 0x39, 0x43, /* Byte value: 0xe2 */ + 0xe9, 0x54, 0x01, 0x87, 0xa8, 0xd8, 0xf8, 0xa1, 0x0a, 0x7e, 0x3d, 0x2d, 0x4d, 0x72, 0xa7, 0x19, /* Byte value: 0xe3 */ + 0x23, 0x1c, 0x81, 0xf6, 0x38, 0x28, 0x65, 0x3b, 0x43, 0x12, 0xc8, 0x0f, 0xd8, 0xd1, 0xbb, 0xc4, /* Byte value: 0xe4 */ + 0x75, 0x03, 0xc7, 0xbb, 0x06, 0xc5, 0x9f, 0x36, 0xba, 0xe3, 0xdd, 0xf0, 0x1e, 0x8e, 0xe1, 0x1d, /* Byte value: 0xe5 */ + 0x9f, 0x05, 0x8a, 0x0e, 0x0a, 0x8c, 0x62, 0x5a, 0x0d, 0xe6, 0xa4, 0xd3, 0x22, 0x51, 0xe0, 0x27, /* Byte value: 0xe6 */ + 0xa2, 0x68, 0xb6, 0xcf, 0xd0, 0x47, 0x25, 0x9f, 0x56, 0x5c, 0x81, 0xb0, 0x16, 0x38, 0xad, 0x7e, /* Byte value: 0xe7 */ + 0x0f, 0xd9, 0xbf, 0xfa, 0x71, 0x90, 0x11, 0x7f, 0x0c, 0x54, 0x97, 0x36, 0x76, 0x5c, 0x7b, 0xaf, /* Byte value: 0xe8 */ + 0x54, 0xc2, 0x8f, 0xd0, 0x47, 0xb2, 0xfc, 0x3a, 0x2f, 0xa3, 0x6d, 0x4a, 0x98, 0x28, 0x9e, 0x5a, /* Byte value: 0xe9 */ + 0x86, 0x5f, 0x2a, 0xf2, 0xbe, 0x40, 0x49, 0x07, 0xc7, 0x91, 0xfd, 0x18, 0x03, 0xaa, 0xfb, 0x5c, /* Byte value: 0xea */ + 0x93, 0x8e, 0x79, 0xc6, 0xdf, 0x8d, 0x76, 0xe8, 0xbc, 0xc9, 0x77, 0xeb, 0x25, 0xa0, 0x3d, 0xab, /* Byte value: 0xeb */ + 0x5a, 0x94, 0xb5, 0x85, 0xeb, 0xec, 0xee, 0xbf, 0x48, 0xde, 0xc6, 0xc7, 0xc1, 0xae, 0x87, 0x55, /* Byte value: 0xec */ + 0x3d, 0x6d, 0x3c, 0xc1, 0xda, 0xcb, 0x47, 0xc5, 0x5b, 0xba, 0x25, 0x63, 0x34, 0x69, 0x4d, 0x59, /* Byte value: 0xed */ + 0x94, 0xa5, 0x64, 0x0d, 0x89, 0xa2, 0x7f, 0x4b, 0x6e, 0x16, 0xc3, 0x4c, 0xe8, 0xe3, 0xd0, 0x4d, /* Byte value: 0xee */ + 0x56, 0x1f, 0x46, 0x4d, 0x3e, 0xed, 0xfa, 0x0d, 0xf9, 0xf1, 0x15, 0xff, 0xc6, 0x5f, 0x5a, 0xd9, /* Byte value: 0xef */ + 0xbf, 0x4b, 0x47, 0xca, 0x96, 0x35, 0x02, 0xac, 0xf3, 0x8f, 0x28, 0xd2, 0x8b, 0x2d, 0xfd, 0xc0, /* Byte value: 0xf0 */ + 0x95, 0x2a, 0xe1, 0xa2, 0x54, 0x6c, 0x7c, 0xb1, 0x05, 0x3f, 0xff, 0xf7, 0xc7, 0x39, 0xb2, 0xed, /* Byte value: 0xf1 */ + 0xb9, 0xef, 0xdf, 0xae, 0x1d, 0xd4, 0x08, 0xf5, 0x4a, 0x79, 0xa0, 0xce, 0x69, 0xb4, 0x72, 0x86, /* Byte value: 0xf2 */ + 0x11, 0xa8, 0x02, 0xcd, 0x93, 0x73, 0x33, 0x81, 0x14, 0xfc, 0x7a, 0x5a, 0x9a, 0xe4, 0x8d, 0x32, /* Byte value: 0xf3 */ + 0x0b, 0xa0, 0xee, 0x03, 0x83, 0x2e, 0x1d, 0x11, 0x63, 0xf0, 0x67, 0x9f, 0xca, 0xb2, 0x30, 0x6a, /* Byte value: 0xf4 */ + 0x71, 0x7a, 0x96, 0x42, 0xf4, 0x7b, 0x93, 0x58, 0xd5, 0x47, 0x2d, 0x59, 0xa2, 0x60, 0xaa, 0xd8, /* Byte value: 0xf5 */ + 0xb0, 0x92, 0xf8, 0x30, 0xe7, 0xa5, 0x13, 0xd3, 0xff, 0xdb, 0xbf, 0xe4, 0xfd, 0x71, 0x86, 0x6f, /* Byte value: 0xf6 */ + 0x1c, 0xac, 0x74, 0xaa, 0x9b, 0xbc, 0x24, 0xc9, 0xce, 0xfa, 0x95, 0xd9, 0xb2, 0xcf, 0x32, 0x1e, /* Byte value: 0xf7 */ + 0xdc, 0xcb, 0x9f, 0x77, 0x55, 0xac, 0xa7, 0xb8, 0x8f, 0x4f, 0x3b, 0xdf, 0xc2, 0x04, 0x7c, 0x09, /* Byte value: 0xf8 */ + 0xa8, 0x47, 0xdd, 0x63, 0x8e, 0xa7, 0x3b, 0x74, 0x5e, 0x85, 0xda, 0x94, 0xf3, 0x50, 0xff, 0xb4, /* Byte value: 0xf9 */ + 0xc1, 0xe8, 0x6e, 0x72, 0x13, 0xde, 0x80, 0x8b, 0x2a, 0x9c, 0x92, 0xbd, 0x5f, 0x11, 0x2c, 0xb7, /* Byte value: 0xfa */ + 0x2a, 0x61, 0xa6, 0x68, 0xc2, 0x59, 0x7e, 0x1d, 0xf6, 0xb0, 0xd7, 0x25, 0x4c, 0x14, 0x4f, 0x2d, /* Byte value: 0xfb */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xfc */ + 0xac, 0x3e, 0x8c, 0x9a, 0x7c, 0x19, 0x37, 0x1a, 0x31, 0x21, 0x2a, 0x3d, 0x4f, 0xbe, 0xb4, 0x71, /* Byte value: 0xfd */ + 0x48, 0x6e, 0xfb, 0x7a, 0xdc, 0x0e, 0xd8, 0xf3, 0xe1, 0x59, 0xf8, 0x93, 0x2a, 0xe7, 0xac, 0x44, /* Byte value: 0xfe */ + 0x7a, 0xda, 0x78, 0x41, 0x77, 0x55, 0x8e, 0x49, 0xb6, 0xb7, 0x4a, 0xc6, 0x68, 0xd2, 0x9a, 0xb2, /* Byte value: 0xff */ + + /* Matrix row: 2 */ + 0x47, 0x35, 0xef, 0xb5, 0xef, 0xef, 0x30, 0xe6, 0xaf, 0xa0, 0x89, 0x86, 0x13, 0x85, 0x5f, 0xe6, /* Byte value: 0x00 */ + 0xe9, 0x63, 0xba, 0xe4, 0xba, 0xba, 0x98, 0x67, 0x18, 0xd6, 0x5a, 0x94, 0x21, 0x7c, 0xa5, 0x67, /* Byte value: 0x01 */ + 0x8f, 0xb2, 0x8e, 0xde, 0x8e, 0x8e, 0xf7, 0x86, 0xb8, 0x77, 0x64, 0xab, 0x6e, 0x29, 0x15, 0x86, /* Byte value: 0x02 */ + 0x4e, 0xe2, 0x6e, 0xfc, 0x6e, 0x6e, 0x95, 0xad, 0x61, 0xf8, 0xdd, 0x44, 0x5e, 0x69, 0xe5, 0xad, /* Byte value: 0x03 */ + 0x03, 0xab, 0x76, 0x99, 0x76, 0x76, 0x7a, 0x58, 0x6f, 0xdf, 0x1c, 0xac, 0xd8, 0xe3, 0x3e, 0x58, /* Byte value: 0x04 */ + 0xcf, 0xca, 0x1e, 0xed, 0x1e, 0x1e, 0xa4, 0x59, 0xec, 0x5e, 0x60, 0xd2, 0x46, 0x49, 0x9d, 0x59, /* Byte value: 0x05 */ + 0x0c, 0xe9, 0x1b, 0x21, 0x1b, 0x1b, 0x2b, 0xa3, 0x7f, 0xfa, 0x70, 0xf5, 0xe6, 0x0a, 0xf8, 0xa3, /* Byte value: 0x06 */ + 0xba, 0xae, 0x78, 0x32, 0x78, 0x78, 0xd5, 0xa7, 0x36, 0xbb, 0x43, 0xce, 0xdb, 0xe7, 0x31, 0xa7, /* Byte value: 0x07 */ + 0x12, 0x6d, 0xc1, 0x92, 0xc1, 0xc1, 0x89, 0x96, 0x5f, 0xb0, 0xa8, 0x47, 0x9a, 0x1b, 0xb7, 0x96, /* Byte value: 0x08 */ + 0xf0, 0xaa, 0x1f, 0xd1, 0x1f, 0x1f, 0x59, 0x6b, 0xc3, 0x15, 0x0f, 0xd9, 0x66, 0x88, 0x3d, 0x6b, /* Byte value: 0x09 */ + 0x2b, 0x98, 0x2c, 0x5f, 0x2c, 0x2c, 0x80, 0x14, 0xae, 0x86, 0xff, 0xd7, 0xc9, 0xdf, 0x6b, 0x14, /* Byte value: 0x0a */ + 0x2c, 0xd5, 0x53, 0xd9, 0x53, 0x53, 0xe3, 0x2d, 0x55, 0x0f, 0x72, 0x28, 0xf2, 0x3a, 0xbc, 0x2d, /* Byte value: 0x0b */ + 0x32, 0x51, 0x89, 0x6a, 0x89, 0x89, 0x41, 0x18, 0x75, 0x45, 0xaa, 0x9a, 0x8e, 0x2b, 0xf3, 0x18, /* Byte value: 0x0c */ + 0x1b, 0xba, 0x40, 0xdb, 0x40, 0x40, 0x2c, 0xdd, 0x91, 0xe8, 0xfc, 0x85, 0xd7, 0xf7, 0x0d, 0xdd, /* Byte value: 0x0d */ + 0xd2, 0xe5, 0xb2, 0xc7, 0xb2, 0xb2, 0x7c, 0x34, 0xa3, 0xcb, 0xa4, 0xcc, 0xe2, 0xbb, 0xec, 0x34, /* Byte value: 0x0e */ + 0x08, 0x0f, 0x12, 0x3e, 0x12, 0x12, 0x32, 0xc2, 0xeb, 0xac, 0xe1, 0xa6, 0x05, 0x0c, 0x11, 0xc2, /* Byte value: 0x0f */ + 0xdd, 0xa7, 0xdf, 0x7f, 0xdf, 0xdf, 0x2d, 0xcf, 0xb3, 0xee, 0xc8, 0x95, 0xdc, 0x52, 0x2a, 0xcf, /* Byte value: 0x10 */ + 0x60, 0x44, 0xd8, 0xcb, 0xd8, 0xd8, 0x9b, 0x51, 0x7e, 0xdc, 0x06, 0xa4, 0x3c, 0x50, 0xcc, 0x51, /* Byte value: 0x11 */ + 0x72, 0x29, 0x19, 0x59, 0x19, 0x19, 0x12, 0xc7, 0x21, 0x6c, 0xae, 0xe3, 0xa6, 0x4b, 0x7b, 0xc7, /* Byte value: 0x12 */ + 0x11, 0xc6, 0xb7, 0x0b, 0xb7, 0xb7, 0xf3, 0xce, 0x30, 0x6f, 0xb4, 0xeb, 0x42, 0xf8, 0x89, 0xce, /* Byte value: 0x13 */ + 0x06, 0x95, 0xec, 0xf1, 0xec, 0xec, 0xf4, 0xb0, 0xde, 0x7d, 0x38, 0x9b, 0x73, 0x05, 0x7c, 0xb0, /* Byte value: 0x14 */ + 0x5d, 0x57, 0x3c, 0x19, 0x3c, 0x3c, 0x8b, 0xb2, 0x1b, 0xbc, 0xc0, 0x67, 0x8c, 0x92, 0xf9, 0xb2, /* Byte value: 0x15 */ + 0xe0, 0xb4, 0x3b, 0xad, 0x3b, 0x3b, 0x3d, 0x2c, 0xd6, 0x8e, 0x0e, 0x56, 0x6c, 0x90, 0x1f, 0x2c, /* Byte value: 0x16 */ + 0x86, 0x65, 0x0f, 0x97, 0x0f, 0x0f, 0x52, 0xcd, 0x76, 0x2f, 0x30, 0x69, 0x23, 0xc5, 0xaf, 0xcd, /* Byte value: 0x17 */ + 0xaa, 0xb0, 0x5c, 0x4e, 0x5c, 0x5c, 0xb1, 0xe0, 0x23, 0x20, 0x42, 0x41, 0xd1, 0xff, 0x13, 0xe0, /* Byte value: 0x18 */ + 0x58, 0x69, 0xa6, 0x71, 0xa6, 0xa6, 0x05, 0x5a, 0xaa, 0x1e, 0xe4, 0x50, 0x27, 0x74, 0xbb, 0x5a, /* Byte value: 0x19 */ + 0x24, 0xda, 0x41, 0xe7, 0x41, 0x41, 0xd1, 0xef, 0xbe, 0xa3, 0x93, 0x8e, 0xf7, 0x36, 0xad, 0xef, /* Byte value: 0x1a */ + 0xdf, 0xd4, 0x3a, 0x91, 0x3a, 0x3a, 0xc0, 0x1e, 0xf9, 0xc5, 0x61, 0x5d, 0x4c, 0x51, 0xbf, 0x1e, /* Byte value: 0x1b */ + 0x69, 0x93, 0x59, 0x82, 0x59, 0x59, 0x3e, 0x1a, 0xb0, 0x84, 0x52, 0x66, 0x71, 0xbc, 0x76, 0x1a, /* Byte value: 0x1c */ + 0x7a, 0x26, 0x0b, 0x67, 0x0b, 0x0b, 0x20, 0x05, 0xca, 0xc0, 0x4f, 0x45, 0xa3, 0x47, 0x6a, 0x05, /* Byte value: 0x1d */ + 0x67, 0x09, 0xa7, 0x4d, 0xa7, 0xa7, 0xf8, 0x68, 0x85, 0x55, 0x8b, 0x5b, 0x07, 0xb5, 0x1b, 0x68, /* Byte value: 0x1e */ + 0xec, 0x5d, 0x20, 0x8c, 0x20, 0x20, 0x16, 0x8f, 0xa9, 0x74, 0x7e, 0xa3, 0x8a, 0x9a, 0xe7, 0x8f, /* Byte value: 0x1f */ + 0x30, 0x22, 0x6c, 0x84, 0x6c, 0x6c, 0xac, 0xc9, 0x3f, 0x6e, 0x03, 0x52, 0x1e, 0x28, 0x66, 0xc9, /* Byte value: 0x20 */ + 0x6a, 0x38, 0x2f, 0x1b, 0x2f, 0x2f, 0x44, 0x42, 0xdf, 0x5b, 0x4e, 0xca, 0xa9, 0x5f, 0x48, 0x42, /* Byte value: 0x21 */ + 0x0e, 0x9a, 0xfe, 0xcf, 0xfe, 0xfe, 0xc6, 0x72, 0x35, 0xd1, 0xd9, 0x3d, 0x76, 0x09, 0x6d, 0x72, /* Byte value: 0x22 */ + 0x43, 0xd3, 0xe6, 0xaa, 0xe6, 0xe6, 0x29, 0x87, 0x3b, 0xf6, 0x18, 0xd5, 0xf0, 0x83, 0xb6, 0x87, /* Byte value: 0x23 */ + 0x2d, 0x0d, 0xc0, 0xae, 0xc0, 0xc0, 0x74, 0xa4, 0x70, 0xfb, 0xc7, 0x4c, 0xba, 0xda, 0x17, 0xa4, /* Byte value: 0x24 */ + 0xb3, 0x79, 0xf9, 0x7b, 0xf9, 0xf9, 0x70, 0xec, 0xf8, 0xe3, 0x17, 0x0c, 0x96, 0x0b, 0x8b, 0xec, /* Byte value: 0x25 */ + 0xe4, 0x52, 0x32, 0xb2, 0x32, 0x32, 0x24, 0x4d, 0x42, 0xd8, 0x9f, 0x05, 0x8f, 0x96, 0xf6, 0x4d, /* Byte value: 0x26 */ + 0x22, 0x4f, 0xad, 0x16, 0xad, 0xad, 0x25, 0x5f, 0x60, 0xde, 0xab, 0x15, 0x84, 0x33, 0xd1, 0x5f, /* Byte value: 0x27 */ + 0xab, 0x68, 0xcf, 0x39, 0xcf, 0xcf, 0x26, 0x69, 0x06, 0xd4, 0xf7, 0x25, 0x99, 0x1f, 0xb8, 0x69, /* Byte value: 0x28 */ + 0x3d, 0x13, 0xe4, 0xd2, 0xe4, 0xe4, 0x10, 0xe3, 0x65, 0x60, 0xc6, 0xc3, 0xb0, 0xc2, 0x35, 0xe3, /* Byte value: 0x29 */ + 0x34, 0xc4, 0x65, 0x9b, 0x65, 0x65, 0xb5, 0xa8, 0xab, 0x38, 0x92, 0x01, 0xfd, 0x2e, 0x8f, 0xa8, /* Byte value: 0x2a */ + 0x90, 0xee, 0xc7, 0x1a, 0xc7, 0xc7, 0xc2, 0x3a, 0xbd, 0xc9, 0x09, 0x7d, 0x5a, 0xd8, 0xf1, 0x3a, /* Byte value: 0x2b */ + 0x97, 0xa3, 0xb8, 0x9c, 0xb8, 0xb8, 0xa1, 0x03, 0x46, 0x40, 0x84, 0x82, 0x61, 0x3d, 0x26, 0x03, /* Byte value: 0x2c */ + 0xb0, 0xd2, 0x8f, 0xe2, 0x8f, 0x8f, 0x0a, 0xb4, 0x97, 0x3c, 0x0b, 0xa0, 0x4e, 0xe8, 0xb5, 0xb4, /* Byte value: 0x2d */ + 0xe5, 0x8a, 0xa1, 0xc5, 0xa1, 0xa1, 0xb3, 0xc4, 0x67, 0x2c, 0x2a, 0x61, 0xc7, 0x76, 0x5d, 0xc4, /* Byte value: 0x2e */ + 0x42, 0x0b, 0x75, 0xdd, 0x75, 0x75, 0xbe, 0x0e, 0x1e, 0x02, 0xad, 0xb1, 0xb8, 0x63, 0x1d, 0x0e, /* Byte value: 0x2f */ + 0x79, 0x8d, 0x7d, 0xfe, 0x7d, 0x7d, 0x5a, 0x5d, 0xa5, 0x1f, 0x53, 0xe9, 0x7b, 0xa4, 0x54, 0x5d, /* Byte value: 0x30 */ + 0xc0, 0x88, 0x73, 0x55, 0x73, 0x73, 0xf5, 0xa2, 0xfc, 0x7b, 0x0c, 0x8b, 0x78, 0xa0, 0x5b, 0xa2, /* Byte value: 0x31 */ + 0x18, 0x11, 0x36, 0x42, 0x36, 0x36, 0x56, 0x85, 0xfe, 0x37, 0xe0, 0x29, 0x0f, 0x14, 0x33, 0x85, /* Byte value: 0x32 */ + 0x4b, 0xdc, 0xf4, 0x94, 0xf4, 0xf4, 0x1b, 0x45, 0xd0, 0x5a, 0xf9, 0x73, 0xf5, 0x8f, 0xa7, 0x45, /* Byte value: 0x33 */ + 0xf7, 0xe7, 0x60, 0x57, 0x60, 0x60, 0x3a, 0x52, 0x38, 0x9c, 0x82, 0x26, 0x5d, 0x6d, 0xea, 0x52, /* Byte value: 0x34 */ + 0xb8, 0xdd, 0x9d, 0xdc, 0x9d, 0x9d, 0x38, 0x76, 0x7c, 0x90, 0xea, 0x06, 0x4b, 0xe4, 0xa4, 0x76, /* Byte value: 0x35 */ + 0xa6, 0x59, 0x47, 0x6f, 0x47, 0x47, 0x9a, 0x43, 0x5c, 0xda, 0x32, 0xb4, 0x37, 0xf5, 0xeb, 0x43, /* Byte value: 0x36 */ + 0x59, 0xb1, 0x35, 0x06, 0x35, 0x35, 0x92, 0xd3, 0x8f, 0xea, 0x51, 0x34, 0x6f, 0x94, 0x10, 0xd3, /* Byte value: 0x37 */ + 0x64, 0xa2, 0xd1, 0xd4, 0xd1, 0xd1, 0x82, 0x30, 0xea, 0x8a, 0x97, 0xf7, 0xdf, 0x56, 0x25, 0x30, /* Byte value: 0x38 */ + 0x36, 0xb7, 0x80, 0x75, 0x80, 0x80, 0x58, 0x79, 0xe1, 0x13, 0x3b, 0xc9, 0x6d, 0x2d, 0x1a, 0x79, /* Byte value: 0x39 */ + 0xb4, 0x34, 0x86, 0xfd, 0x86, 0x86, 0x13, 0xd5, 0x03, 0x6a, 0x9a, 0xf3, 0xad, 0xee, 0x5c, 0xd5, /* Byte value: 0x3a */ + 0x81, 0x28, 0x70, 0x11, 0x70, 0x70, 0x31, 0xf4, 0x8d, 0xa6, 0xbd, 0x96, 0x18, 0x20, 0x78, 0xf4, /* Byte value: 0x3b */ + 0x09, 0xd7, 0x81, 0x49, 0x81, 0x81, 0xa5, 0x4b, 0xce, 0x58, 0x54, 0xc2, 0x4d, 0xec, 0xba, 0x4b, /* Byte value: 0x3c */ + 0x78, 0x55, 0xee, 0x89, 0xee, 0xee, 0xcd, 0xd4, 0x80, 0xeb, 0xe6, 0x8d, 0x33, 0x44, 0xff, 0xd4, /* Byte value: 0x3d */ + 0xed, 0x85, 0xb3, 0xfb, 0xb3, 0xb3, 0x81, 0x06, 0x8c, 0x80, 0xcb, 0xc7, 0xc2, 0x7a, 0x4c, 0x06, /* Byte value: 0x3e */ + 0x5f, 0x24, 0xd9, 0xf7, 0xd9, 0xd9, 0x66, 0x63, 0x51, 0x97, 0x69, 0xaf, 0x1c, 0x91, 0x6c, 0x63, /* Byte value: 0x3f */ + 0xda, 0xea, 0xa0, 0xf9, 0xa0, 0xa0, 0x4e, 0xf6, 0x48, 0x67, 0x45, 0x6a, 0xe7, 0xb7, 0xfd, 0xf6, /* Byte value: 0x40 */ + 0xc4, 0x6e, 0x7a, 0x4a, 0x7a, 0x7a, 0xec, 0xc3, 0x68, 0x2d, 0x9d, 0xd8, 0x9b, 0xa6, 0xb2, 0xc3, /* Byte value: 0x41 */ + 0x29, 0xeb, 0xc9, 0xb1, 0xc9, 0xc9, 0x6d, 0xc5, 0xe4, 0xad, 0x56, 0x1f, 0x59, 0xdc, 0xfe, 0xc5, /* Byte value: 0x42 */ + 0xad, 0xfd, 0x23, 0xc8, 0x23, 0x23, 0xd2, 0xd9, 0xd8, 0xa9, 0xcf, 0xbe, 0xea, 0x1a, 0xc4, 0xd9, /* Byte value: 0x43 */ + 0x07, 0x4d, 0x7f, 0x86, 0x7f, 0x7f, 0x63, 0x39, 0xfb, 0x89, 0x8d, 0xff, 0x3b, 0xe5, 0xd7, 0x39, /* Byte value: 0x44 */ + 0x82, 0x83, 0x06, 0x88, 0x06, 0x06, 0x4b, 0xac, 0xe2, 0x79, 0xa1, 0x3a, 0xc0, 0xc3, 0x46, 0xac, /* Byte value: 0x45 */ + 0xe2, 0xc7, 0xde, 0x43, 0xde, 0xde, 0xd0, 0xfd, 0x9c, 0xa5, 0xa7, 0x9e, 0xfc, 0x93, 0x8a, 0xfd, /* Byte value: 0x46 */ + 0x31, 0xfa, 0xff, 0xf3, 0xff, 0xff, 0x3b, 0x40, 0x1a, 0x9a, 0xb6, 0x36, 0x56, 0xc8, 0xcd, 0x40, /* Byte value: 0x47 */ + 0x17, 0x53, 0x5b, 0xfa, 0x5b, 0x5b, 0x07, 0x7e, 0xee, 0x12, 0x8c, 0x70, 0x31, 0xfd, 0xf5, 0x7e, /* Byte value: 0x48 */ + 0x9f, 0xac, 0xaa, 0xa2, 0xaa, 0xaa, 0x93, 0xc1, 0xad, 0xec, 0x65, 0x24, 0x64, 0x31, 0x37, 0xc1, /* Byte value: 0x49 */ + 0x0b, 0xa4, 0x64, 0xa7, 0x64, 0x64, 0x48, 0x9a, 0x84, 0x73, 0xfd, 0x0a, 0xdd, 0xef, 0x2f, 0x9a, /* Byte value: 0x4a */ + 0x16, 0x8b, 0xc8, 0x8d, 0xc8, 0xc8, 0x90, 0xf7, 0xcb, 0xe6, 0x39, 0x14, 0x79, 0x1d, 0x5e, 0xf7, /* Byte value: 0x4b */ + 0x7f, 0x18, 0x91, 0x0f, 0x91, 0x91, 0xae, 0xed, 0x7b, 0x62, 0x6b, 0x72, 0x08, 0xa1, 0x28, 0xed, /* Byte value: 0x4c */ + 0x23, 0x97, 0x3e, 0x61, 0x3e, 0x3e, 0xb2, 0xd6, 0x45, 0x2a, 0x1e, 0x71, 0xcc, 0xd3, 0x7a, 0xd6, /* Byte value: 0x4d */ + 0x6b, 0xe0, 0xbc, 0x6c, 0xbc, 0xbc, 0xd3, 0xcb, 0xfa, 0xaf, 0xfb, 0xae, 0xe1, 0xbf, 0xe3, 0xcb, /* Byte value: 0x4e */ + 0xa9, 0x1b, 0x2a, 0xd7, 0x2a, 0x2a, 0xcb, 0xb8, 0x4c, 0xff, 0x5e, 0xed, 0x09, 0x1c, 0x2d, 0xb8, /* Byte value: 0x4f */ + 0x8b, 0x54, 0x87, 0xc1, 0x87, 0x87, 0xee, 0xe7, 0x2c, 0x21, 0xf5, 0xf8, 0x8d, 0x2f, 0xfc, 0xe7, /* Byte value: 0x50 */ + 0xb7, 0x9f, 0xf0, 0x64, 0xf0, 0xf0, 0x69, 0x8d, 0x6c, 0xb5, 0x86, 0x5f, 0x75, 0x0d, 0x62, 0x8d, /* Byte value: 0x51 */ + 0x13, 0xb5, 0x52, 0xe5, 0x52, 0x52, 0x1e, 0x1f, 0x7a, 0x44, 0x1d, 0x23, 0xd2, 0xfb, 0x1c, 0x1f, /* Byte value: 0x52 */ + 0x1c, 0xf7, 0x3f, 0x5d, 0x3f, 0x3f, 0x4f, 0xe4, 0x6a, 0x61, 0x71, 0x7a, 0xec, 0x12, 0xda, 0xe4, /* Byte value: 0x53 */ + 0xfb, 0x0e, 0x7b, 0x76, 0x7b, 0x7b, 0x11, 0xf1, 0x47, 0x66, 0xf2, 0xd3, 0xbb, 0x67, 0x12, 0xf1, /* Byte value: 0x54 */ + 0x62, 0x37, 0x3d, 0x25, 0x3d, 0x3d, 0x76, 0x80, 0x34, 0xf7, 0xaf, 0x6c, 0xac, 0x53, 0x59, 0x80, /* Byte value: 0x55 */ + 0xf2, 0xd9, 0xfa, 0x3f, 0xfa, 0xfa, 0xb4, 0xba, 0x89, 0x3e, 0xa6, 0x11, 0xf6, 0x8b, 0xa8, 0xba, /* Byte value: 0x56 */ + 0x3b, 0x86, 0x08, 0x23, 0x08, 0x08, 0xe4, 0x53, 0xbb, 0x1d, 0xfe, 0x58, 0xc3, 0xc7, 0x49, 0x53, /* Byte value: 0x57 */ + 0x9a, 0x92, 0x30, 0xca, 0x30, 0x30, 0x1d, 0x29, 0x1c, 0x4e, 0x41, 0x13, 0xcf, 0xd7, 0x75, 0x29, /* Byte value: 0x58 */ + 0x75, 0x64, 0x66, 0xdf, 0x66, 0x66, 0x71, 0xfe, 0xda, 0xe5, 0x23, 0x1c, 0x9d, 0xae, 0xac, 0xfe, /* Byte value: 0x59 */ + 0xea, 0xc8, 0xcc, 0x7d, 0xcc, 0xcc, 0xe2, 0x3f, 0x77, 0x09, 0x46, 0x38, 0xf9, 0x9f, 0x9b, 0x3f, /* Byte value: 0x5a */ + 0xd0, 0x96, 0x57, 0x29, 0x57, 0x57, 0x91, 0xe5, 0xe9, 0xe0, 0x0d, 0x04, 0x72, 0xb8, 0x79, 0xe5, /* Byte value: 0x5b */ + 0x2a, 0x40, 0xbf, 0x28, 0xbf, 0xbf, 0x17, 0x9d, 0x8b, 0x72, 0x4a, 0xb3, 0x81, 0x3f, 0xc0, 0x9d, /* Byte value: 0x5c */ + 0x91, 0x36, 0x54, 0x6d, 0x54, 0x54, 0x55, 0xb3, 0x98, 0x3d, 0xbc, 0x19, 0x12, 0x38, 0x5a, 0xb3, /* Byte value: 0x5d */ + 0x02, 0x73, 0xe5, 0xee, 0xe5, 0xe5, 0xed, 0xd1, 0x4a, 0x2b, 0xa9, 0xc8, 0x90, 0x03, 0x95, 0xd1, /* Byte value: 0x5e */ + 0x46, 0xed, 0x7c, 0xc2, 0x7c, 0x7c, 0xa7, 0x6f, 0x8a, 0x54, 0x3c, 0xe2, 0x5b, 0x65, 0xf4, 0x6f, /* Byte value: 0x5f */ + 0xa7, 0x81, 0xd4, 0x18, 0xd4, 0xd4, 0x0d, 0xca, 0x79, 0x2e, 0x87, 0xd0, 0x7f, 0x15, 0x40, 0xca, /* Byte value: 0x60 */ + 0xdc, 0x7f, 0x4c, 0x08, 0x4c, 0x4c, 0xba, 0x46, 0x96, 0x1a, 0x7d, 0xf1, 0x94, 0xb2, 0x81, 0x46, /* Byte value: 0x61 */ + 0x27, 0x71, 0x37, 0x7e, 0x37, 0x37, 0xab, 0xb7, 0xd1, 0x7c, 0x8f, 0x22, 0x2f, 0xd5, 0x93, 0xb7, /* Byte value: 0x62 */ + 0x76, 0xcf, 0x10, 0x46, 0x10, 0x10, 0x0b, 0xa6, 0xb5, 0x3a, 0x3f, 0xb0, 0x45, 0x4d, 0x92, 0xa6, /* Byte value: 0x63 */ + 0xa4, 0x2a, 0xa2, 0x81, 0xa2, 0xa2, 0x77, 0x92, 0x16, 0xf1, 0x9b, 0x7c, 0xa7, 0xf6, 0x7e, 0x92, /* Byte value: 0x64 */ + 0xca, 0xf4, 0x84, 0x85, 0x84, 0x84, 0x2a, 0xb1, 0x5d, 0xfc, 0x44, 0xe5, 0xed, 0xaf, 0xdf, 0xb1, /* Byte value: 0x65 */ + 0x96, 0x7b, 0x2b, 0xeb, 0x2b, 0x2b, 0x36, 0x8a, 0x63, 0xb4, 0x31, 0xe6, 0x29, 0xdd, 0x8d, 0x8a, /* Byte value: 0x66 */ + 0x70, 0x5a, 0xfc, 0xb7, 0xfc, 0xfc, 0xff, 0x16, 0x6b, 0x47, 0x07, 0x2b, 0x36, 0x48, 0xee, 0x16, /* Byte value: 0x67 */ + 0x54, 0x80, 0xbd, 0x50, 0xbd, 0xbd, 0x2e, 0xf9, 0xd5, 0xe4, 0x94, 0xa5, 0xc1, 0x7e, 0x43, 0xf9, /* Byte value: 0x68 */ + 0xe1, 0x6c, 0xa8, 0xda, 0xa8, 0xa8, 0xaa, 0xa5, 0xf3, 0x7a, 0xbb, 0x32, 0x24, 0x70, 0xb4, 0xa5, /* Byte value: 0x69 */ + 0x6f, 0x06, 0xb5, 0x73, 0xb5, 0xb5, 0xca, 0xaa, 0x6e, 0xf9, 0x6a, 0xfd, 0x02, 0xb9, 0x0a, 0xaa, /* Byte value: 0x6a */ + 0x39, 0xf5, 0xed, 0xcd, 0xed, 0xed, 0x09, 0x82, 0xf1, 0x36, 0x57, 0x90, 0x53, 0xc4, 0xdc, 0x82, /* Byte value: 0x6b */ + 0x33, 0x89, 0x1a, 0x1d, 0x1a, 0x1a, 0xd6, 0x91, 0x50, 0xb1, 0x1f, 0xfe, 0xc6, 0xcb, 0x58, 0x91, /* Byte value: 0x6c */ + 0xdb, 0x32, 0x33, 0x8e, 0x33, 0x33, 0xd9, 0x7f, 0x6d, 0x93, 0xf0, 0x0e, 0xaf, 0x57, 0x56, 0x7f, /* Byte value: 0x6d */ + 0xa0, 0xcc, 0xab, 0x9e, 0xab, 0xab, 0x6e, 0xf3, 0x82, 0xa7, 0x0a, 0x2f, 0x44, 0xf0, 0x97, 0xf3, /* Byte value: 0x6e */ + 0xfe, 0x30, 0xe1, 0x1e, 0xe1, 0xe1, 0x9f, 0x19, 0xf6, 0xc4, 0xd6, 0xe4, 0x10, 0x81, 0x50, 0x19, /* Byte value: 0x6f */ + 0xb2, 0xa1, 0x6a, 0x0c, 0x6a, 0x6a, 0xe7, 0x65, 0xdd, 0x17, 0xa2, 0x68, 0xde, 0xeb, 0x20, 0x65, /* Byte value: 0x70 */ + 0x10, 0x1e, 0x24, 0x7c, 0x24, 0x24, 0x64, 0x47, 0x15, 0x9b, 0x01, 0x8f, 0x0a, 0x18, 0x22, 0x47, /* Byte value: 0x71 */ + 0x71, 0x82, 0x6f, 0xc0, 0x6f, 0x6f, 0x68, 0x9f, 0x4e, 0xb3, 0xb2, 0x4f, 0x7e, 0xa8, 0x45, 0x9f, /* Byte value: 0x72 */ + 0x87, 0xbd, 0x9c, 0xe0, 0x9c, 0x9c, 0xc5, 0x44, 0x53, 0xdb, 0x85, 0x0d, 0x6b, 0x25, 0x04, 0x44, /* Byte value: 0x73 */ + 0x35, 0x1c, 0xf6, 0xec, 0xf6, 0xf6, 0x22, 0x21, 0x8e, 0xcc, 0x27, 0x65, 0xb5, 0xce, 0x24, 0x21, /* Byte value: 0x74 */ + 0x38, 0x2d, 0x7e, 0xba, 0x7e, 0x7e, 0x9e, 0x0b, 0xd4, 0xc2, 0xe2, 0xf4, 0x1b, 0x24, 0x77, 0x0b, /* Byte value: 0x75 */ + 0x52, 0x15, 0x51, 0xa1, 0x51, 0x51, 0xda, 0x49, 0x0b, 0x99, 0xac, 0x3e, 0xb2, 0x7b, 0x3f, 0x49, /* Byte value: 0x76 */ + 0x65, 0x7a, 0x42, 0xa3, 0x42, 0x42, 0x15, 0xb9, 0xcf, 0x7e, 0x22, 0x93, 0x97, 0xb6, 0x8e, 0xb9, /* Byte value: 0x77 */ + 0xde, 0x0c, 0xa9, 0xe6, 0xa9, 0xa9, 0x57, 0x97, 0xdc, 0x31, 0xd4, 0x39, 0x04, 0xb1, 0x14, 0x97, /* Byte value: 0x78 */ + 0x8e, 0x6a, 0x1d, 0xa9, 0x1d, 0x1d, 0x60, 0x0f, 0x9d, 0x83, 0xd1, 0xcf, 0x26, 0xc9, 0xbe, 0x0f, /* Byte value: 0x79 */ + 0xa8, 0xc3, 0xb9, 0xa0, 0xb9, 0xb9, 0x5c, 0x31, 0x69, 0x0b, 0xeb, 0x89, 0x41, 0xfc, 0x86, 0x31, /* Byte value: 0x7a */ + 0xfd, 0x9b, 0x97, 0x87, 0x97, 0x97, 0xe5, 0x41, 0x99, 0x1b, 0xca, 0x48, 0xc8, 0x62, 0x6e, 0x41, /* Byte value: 0x7b */ + 0x2e, 0xa6, 0xb6, 0x37, 0xb6, 0xb6, 0x0e, 0xfc, 0x1f, 0x24, 0xdb, 0xe0, 0x62, 0x39, 0x29, 0xfc, /* Byte value: 0x7c */ + 0x01, 0xd8, 0x93, 0x77, 0x93, 0x93, 0x97, 0x89, 0x25, 0xf4, 0xb5, 0x64, 0x48, 0xe0, 0xab, 0x89, /* Byte value: 0x7d */ + 0xf8, 0xa5, 0x0d, 0xef, 0x0d, 0x0d, 0x6b, 0xa9, 0x28, 0xb9, 0xee, 0x7f, 0x63, 0x84, 0x2c, 0xa9, /* Byte value: 0x7e */ + 0x8c, 0x19, 0xf8, 0x47, 0xf8, 0xf8, 0x8d, 0xde, 0xd7, 0xa8, 0x78, 0x07, 0xb6, 0xca, 0x2b, 0xde, /* Byte value: 0x7f */ + 0xbb, 0x76, 0xeb, 0x45, 0xeb, 0xeb, 0x42, 0x2e, 0x13, 0x4f, 0xf6, 0xaa, 0x93, 0x07, 0x9a, 0x2e, /* Byte value: 0x80 */ + 0x49, 0xaf, 0x11, 0x7a, 0x11, 0x11, 0xf6, 0x94, 0x9a, 0x71, 0x50, 0xbb, 0x65, 0x8c, 0x32, 0x94, /* Byte value: 0x81 */ + 0x44, 0x9e, 0x99, 0x2c, 0x99, 0x99, 0x4a, 0xbe, 0xc0, 0x7f, 0x95, 0x2a, 0xcb, 0x66, 0x61, 0xbe, /* Byte value: 0x82 */ + 0xbf, 0x90, 0xe2, 0x5a, 0xe2, 0xe2, 0x5b, 0x4f, 0x87, 0x19, 0x67, 0xf9, 0x70, 0x01, 0x73, 0x4f, /* Byte value: 0x83 */ + 0xef, 0xf6, 0x56, 0x15, 0x56, 0x56, 0x6c, 0xd7, 0xc6, 0xab, 0x62, 0x0f, 0x52, 0x79, 0xd9, 0xd7, /* Byte value: 0x84 */ + 0xfc, 0x43, 0x04, 0xf0, 0x04, 0x04, 0x72, 0xc8, 0xbc, 0xef, 0x7f, 0x2c, 0x80, 0x82, 0xc5, 0xc8, /* Byte value: 0x85 */ + 0x3a, 0x5e, 0x9b, 0x54, 0x9b, 0x9b, 0x73, 0xda, 0x9e, 0xe9, 0x4b, 0x3c, 0x8b, 0x27, 0xe2, 0xda, /* Byte value: 0x86 */ + 0xb1, 0x0a, 0x1c, 0x95, 0x1c, 0x1c, 0x9d, 0x3d, 0xb2, 0xc8, 0xbe, 0xc4, 0x06, 0x08, 0x1e, 0x3d, /* Byte value: 0x87 */ + 0xf9, 0x7d, 0x9e, 0x98, 0x9e, 0x9e, 0xfc, 0x20, 0x0d, 0x4d, 0x5b, 0x1b, 0x2b, 0x64, 0x87, 0x20, /* Byte value: 0x88 */ + 0x99, 0x39, 0x46, 0x53, 0x46, 0x46, 0x67, 0x71, 0x73, 0x91, 0x5d, 0xbf, 0x17, 0x34, 0x4b, 0x71, /* Byte value: 0x89 */ + 0xd8, 0x99, 0x45, 0x17, 0x45, 0x45, 0xa3, 0x27, 0x02, 0x4c, 0xec, 0xa2, 0x77, 0xb4, 0x68, 0x27, /* Byte value: 0x8a */ + 0xc9, 0x5f, 0xf2, 0x1c, 0xf2, 0xf2, 0x50, 0xe9, 0x32, 0x23, 0x58, 0x49, 0x35, 0x4c, 0xe1, 0xe9, /* Byte value: 0x8b */ + 0x61, 0x9c, 0x4b, 0xbc, 0x4b, 0x4b, 0x0c, 0xd8, 0x5b, 0x28, 0xb3, 0xc0, 0x74, 0xb0, 0x67, 0xd8, /* Byte value: 0x8c */ + 0x3c, 0xcb, 0x77, 0xa5, 0x77, 0x77, 0x87, 0x6a, 0x40, 0x94, 0x73, 0xa7, 0xf8, 0x22, 0x9e, 0x6a, /* Byte value: 0x8d */ + 0x89, 0x27, 0x62, 0x2f, 0x62, 0x62, 0x03, 0x36, 0x66, 0x0a, 0x5c, 0x30, 0x1d, 0x2c, 0x69, 0x36, /* Byte value: 0x8e */ + 0x0f, 0x42, 0x6d, 0xb8, 0x6d, 0x6d, 0x51, 0xfb, 0x10, 0x25, 0x6c, 0x59, 0x3e, 0xe9, 0xc6, 0xfb, /* Byte value: 0x8f */ + 0x5c, 0x8f, 0xaf, 0x6e, 0xaf, 0xaf, 0x1c, 0x3b, 0x3e, 0x48, 0x75, 0x03, 0xc4, 0x72, 0x52, 0x3b, /* Byte value: 0x90 */ + 0xbe, 0x48, 0x71, 0x2d, 0x71, 0x71, 0xcc, 0xc6, 0xa2, 0xed, 0xd2, 0x9d, 0x38, 0xe1, 0xd8, 0xc6, /* Byte value: 0x91 */ + 0xc2, 0xfb, 0x96, 0xbb, 0x96, 0x96, 0x18, 0x73, 0xb6, 0x50, 0xa5, 0x43, 0xe8, 0xa3, 0xce, 0x73, /* Byte value: 0x92 */ + 0xc5, 0xb6, 0xe9, 0x3d, 0xe9, 0xe9, 0x7b, 0x4a, 0x4d, 0xd9, 0x28, 0xbc, 0xd3, 0x46, 0x19, 0x4a, /* Byte value: 0x93 */ + 0x88, 0xff, 0xf1, 0x58, 0xf1, 0xf1, 0x94, 0xbf, 0x43, 0xfe, 0xe9, 0x54, 0x55, 0xcc, 0xc2, 0xbf, /* Byte value: 0x94 */ + 0xa1, 0x14, 0x38, 0xe9, 0x38, 0x38, 0xf9, 0x7a, 0xa7, 0x53, 0xbf, 0x4b, 0x0c, 0x10, 0x3c, 0x7a, /* Byte value: 0x95 */ + 0x1d, 0x2f, 0xac, 0x2a, 0xac, 0xac, 0xd8, 0x6d, 0x4f, 0x95, 0xc4, 0x1e, 0xa4, 0xf2, 0x71, 0x6d, /* Byte value: 0x96 */ + 0xc7, 0xc5, 0x0c, 0xd3, 0x0c, 0x0c, 0x96, 0x9b, 0x07, 0xf2, 0x81, 0x74, 0x43, 0x45, 0x8c, 0x9b, /* Byte value: 0x97 */ + 0x6c, 0xad, 0xc3, 0xea, 0xc3, 0xc3, 0xb0, 0xf2, 0x01, 0x26, 0x76, 0x51, 0xda, 0x5a, 0x34, 0xf2, /* Byte value: 0x98 */ + 0x85, 0xce, 0x79, 0x0e, 0x79, 0x79, 0x28, 0x95, 0x19, 0xf0, 0x2c, 0xc5, 0xfb, 0x26, 0x91, 0x95, /* Byte value: 0x99 */ + 0x9d, 0xdf, 0x4f, 0x4c, 0x4f, 0x4f, 0x7e, 0x10, 0xe7, 0xc7, 0xcc, 0xec, 0xf4, 0x32, 0xa2, 0x10, /* Byte value: 0x9a */ + 0xd3, 0x3d, 0x21, 0xb0, 0x21, 0x21, 0xeb, 0xbd, 0x86, 0x3f, 0x11, 0xa8, 0xaa, 0x5b, 0x47, 0xbd, /* Byte value: 0x9b */ + 0xd1, 0x4e, 0xc4, 0x5e, 0xc4, 0xc4, 0x06, 0x6c, 0xcc, 0x14, 0xb8, 0x60, 0x3a, 0x58, 0xd2, 0x6c, /* Byte value: 0x9c */ + 0x1e, 0x84, 0xda, 0xb3, 0xda, 0xda, 0xa2, 0x35, 0x20, 0x4a, 0xd8, 0xb2, 0x7c, 0x11, 0x4f, 0x35, /* Byte value: 0x9d */ + 0x3e, 0xb8, 0x92, 0x4b, 0x92, 0x92, 0x6a, 0xbb, 0x0a, 0xbf, 0xda, 0x6f, 0x68, 0x21, 0x0b, 0xbb, /* Byte value: 0x9e */ + 0x84, 0x16, 0xea, 0x79, 0xea, 0xea, 0xbf, 0x1c, 0x3c, 0x04, 0x99, 0xa1, 0xb3, 0xc6, 0x3a, 0x1c, /* Byte value: 0x9f */ + 0x2f, 0x7e, 0x25, 0x40, 0x25, 0x25, 0x99, 0x75, 0x3a, 0xd0, 0x6e, 0x84, 0x2a, 0xd9, 0x82, 0x75, /* Byte value: 0xa0 */ + 0x7d, 0x6b, 0x74, 0xe1, 0x74, 0x74, 0x43, 0x3c, 0x31, 0x49, 0xc2, 0xba, 0x98, 0xa2, 0xbd, 0x3c, /* Byte value: 0xa1 */ + 0x74, 0xbc, 0xf5, 0xa8, 0xf5, 0xf5, 0xe6, 0x77, 0xff, 0x11, 0x96, 0x78, 0xd5, 0x4e, 0x07, 0x77, /* Byte value: 0xa2 */ + 0x41, 0xa0, 0x03, 0x44, 0x03, 0x03, 0xc4, 0x56, 0x71, 0xdd, 0xb1, 0x1d, 0x60, 0x80, 0x23, 0x56, /* Byte value: 0xa3 */ + 0x50, 0x66, 0xb4, 0x4f, 0xb4, 0xb4, 0x37, 0x98, 0x41, 0xb2, 0x05, 0xf6, 0x22, 0x78, 0xaa, 0x98, /* Byte value: 0xa4 */ + 0x21, 0xe4, 0xdb, 0x8f, 0xdb, 0xdb, 0x5f, 0x07, 0x0f, 0x01, 0xb7, 0xb9, 0x5c, 0xd0, 0xef, 0x07, /* Byte value: 0xa5 */ + 0x55, 0x58, 0x2e, 0x27, 0x2e, 0x2e, 0xb9, 0x70, 0xf0, 0x10, 0x21, 0xc1, 0x89, 0x9e, 0xe8, 0x70, /* Byte value: 0xa6 */ + 0xe7, 0xf9, 0x44, 0x2b, 0x44, 0x44, 0x5e, 0x15, 0x2d, 0x07, 0x83, 0xa9, 0x57, 0x75, 0xc8, 0x15, /* Byte value: 0xa7 */ + 0xcd, 0xb9, 0xfb, 0x03, 0xfb, 0xfb, 0x49, 0x88, 0xa6, 0x75, 0xc9, 0x1a, 0xd6, 0x4a, 0x08, 0x88, /* Byte value: 0xa8 */ + 0x0d, 0x31, 0x88, 0x56, 0x88, 0x88, 0xbc, 0x2a, 0x5a, 0x0e, 0xc5, 0x91, 0xae, 0xea, 0x53, 0x2a, /* Byte value: 0xa9 */ + 0x7c, 0xb3, 0xe7, 0x96, 0xe7, 0xe7, 0xd4, 0xb5, 0x14, 0xbd, 0x77, 0xde, 0xd0, 0x42, 0x16, 0xb5, /* Byte value: 0xaa */ + 0x37, 0x6f, 0x13, 0x02, 0x13, 0x13, 0xcf, 0xf0, 0xc4, 0xe7, 0x8e, 0xad, 0x25, 0xcd, 0xb1, 0xf0, /* Byte value: 0xab */ + 0x1a, 0x62, 0xd3, 0xac, 0xd3, 0xd3, 0xbb, 0x54, 0xb4, 0x1c, 0x49, 0xe1, 0x9f, 0x17, 0xa6, 0x54, /* Byte value: 0xac */ + 0xa2, 0xbf, 0x4e, 0x70, 0x4e, 0x4e, 0x83, 0x22, 0xc8, 0x8c, 0xa3, 0xe7, 0xd4, 0xf3, 0x02, 0x22, /* Byte value: 0xad */ + 0xaf, 0x8e, 0xc6, 0x26, 0xc6, 0xc6, 0x3f, 0x08, 0x92, 0x82, 0x66, 0x76, 0x7a, 0x19, 0x51, 0x08, /* Byte value: 0xae */ + 0xf6, 0x3f, 0xf3, 0x20, 0xf3, 0xf3, 0xad, 0xdb, 0x1d, 0x68, 0x37, 0x42, 0x15, 0x8d, 0x41, 0xdb, /* Byte value: 0xaf */ + 0xe8, 0xbb, 0x29, 0x93, 0x29, 0x29, 0x0f, 0xee, 0x3d, 0x22, 0xef, 0xf0, 0x69, 0x9c, 0x0e, 0xee, /* Byte value: 0xb0 */ + 0x7e, 0xc0, 0x02, 0x78, 0x02, 0x02, 0x39, 0x64, 0x5e, 0x96, 0xde, 0x16, 0x40, 0x41, 0x83, 0x64, /* Byte value: 0xb1 */ + 0x5e, 0xfc, 0x4a, 0x80, 0x4a, 0x4a, 0xf1, 0xea, 0x74, 0x63, 0xdc, 0xcb, 0x54, 0x71, 0xc7, 0xea, /* Byte value: 0xb2 */ + 0xfa, 0xd6, 0xe8, 0x01, 0xe8, 0xe8, 0x86, 0x78, 0x62, 0x92, 0x47, 0xb7, 0xf3, 0x87, 0xb9, 0x78, /* Byte value: 0xb3 */ + 0xb5, 0xec, 0x15, 0x8a, 0x15, 0x15, 0x84, 0x5c, 0x26, 0x9e, 0x2f, 0x97, 0xe5, 0x0e, 0xf7, 0x5c, /* Byte value: 0xb4 */ + 0x92, 0x9d, 0x22, 0xf4, 0x22, 0x22, 0x2f, 0xeb, 0xf7, 0xe2, 0xa0, 0xb5, 0xca, 0xdb, 0x64, 0xeb, /* Byte value: 0xb5 */ + 0x56, 0xf3, 0x58, 0xbe, 0x58, 0x58, 0xc3, 0x28, 0x9f, 0xcf, 0x3d, 0x6d, 0x51, 0x7d, 0xd6, 0x28, /* Byte value: 0xb6 */ + 0xf1, 0x72, 0x8c, 0xa6, 0x8c, 0x8c, 0xce, 0xe2, 0xe6, 0xe1, 0xba, 0xbd, 0x2e, 0x68, 0x96, 0xe2, /* Byte value: 0xb7 */ + 0x04, 0xe6, 0x09, 0x1f, 0x09, 0x09, 0x19, 0x61, 0x94, 0x56, 0x91, 0x53, 0xe3, 0x06, 0xe9, 0x61, /* Byte value: 0xb8 */ + 0x6e, 0xde, 0x26, 0x04, 0x26, 0x26, 0x5d, 0x23, 0x4b, 0x0d, 0xdf, 0x99, 0x4a, 0x59, 0xa1, 0x23, /* Byte value: 0xb9 */ + 0xa5, 0xf2, 0x31, 0xf6, 0x31, 0x31, 0xe0, 0x1b, 0x33, 0x05, 0x2e, 0x18, 0xef, 0x16, 0xd5, 0x1b, /* Byte value: 0xba */ + 0xe6, 0x21, 0xd7, 0x5c, 0xd7, 0xd7, 0xc9, 0x9c, 0x08, 0xf3, 0x36, 0xcd, 0x1f, 0x95, 0x63, 0x9c, /* Byte value: 0xbb */ + 0xc8, 0x87, 0x61, 0x6b, 0x61, 0x61, 0xc7, 0x60, 0x17, 0xd7, 0xed, 0x2d, 0x7d, 0xac, 0x4a, 0x60, /* Byte value: 0xbc */ + 0x7b, 0xfe, 0x98, 0x10, 0x98, 0x98, 0xb7, 0x8c, 0xef, 0x34, 0xfa, 0x21, 0xeb, 0xa7, 0xc1, 0x8c, /* Byte value: 0xbd */ + 0x73, 0xf1, 0x8a, 0x2e, 0x8a, 0x8a, 0x85, 0x4e, 0x04, 0x98, 0x1b, 0x87, 0xee, 0xab, 0xd0, 0x4e, /* Byte value: 0xbe */ + 0x51, 0xbe, 0x27, 0x38, 0x27, 0x27, 0xa0, 0x11, 0x64, 0x46, 0xb0, 0x92, 0x6a, 0x98, 0x01, 0x11, /* Byte value: 0xbf */ + 0x95, 0xd0, 0x5d, 0x72, 0x5d, 0x5d, 0x4c, 0xd2, 0x0c, 0x6b, 0x2d, 0x4a, 0xf1, 0x3e, 0xb3, 0xd2, /* Byte value: 0xc0 */ + 0x66, 0xd1, 0x34, 0x3a, 0x34, 0x34, 0x6f, 0xe1, 0xa0, 0xa1, 0x3e, 0x3f, 0x4f, 0x55, 0xb0, 0xe1, /* Byte value: 0xc1 */ + 0xd6, 0x03, 0xbb, 0xd8, 0xbb, 0xbb, 0x65, 0x55, 0x37, 0x9d, 0x35, 0x9f, 0x01, 0xbd, 0x05, 0x55, /* Byte value: 0xc2 */ + 0x5b, 0xc2, 0xd0, 0xe8, 0xd0, 0xd0, 0x7f, 0x02, 0xc5, 0xc1, 0xf8, 0xfc, 0xff, 0x97, 0x85, 0x02, /* Byte value: 0xc3 */ + 0xe3, 0x1f, 0x4d, 0x34, 0x4d, 0x4d, 0x47, 0x74, 0xb9, 0x51, 0x12, 0xfa, 0xb4, 0x73, 0x21, 0x74, /* Byte value: 0xc4 */ + 0x63, 0xef, 0xae, 0x52, 0xae, 0xae, 0xe1, 0x09, 0x11, 0x03, 0x1a, 0x08, 0xe4, 0xb3, 0xf2, 0x09, /* Byte value: 0xc5 */ + 0x9b, 0x4a, 0xa3, 0xbd, 0xa3, 0xa3, 0x8a, 0xa0, 0x39, 0xba, 0xf4, 0x77, 0x87, 0x37, 0xde, 0xa0, /* Byte value: 0xc6 */ + 0xbd, 0xe3, 0x07, 0xb4, 0x07, 0x07, 0xb6, 0x9e, 0xcd, 0x32, 0xce, 0x31, 0xe0, 0x02, 0xe6, 0x9e, /* Byte value: 0xc7 */ + 0x77, 0x17, 0x83, 0x31, 0x83, 0x83, 0x9c, 0x2f, 0x90, 0xce, 0x8a, 0xd4, 0x0d, 0xad, 0x39, 0x2f, /* Byte value: 0xc8 */ + 0x8d, 0xc1, 0x6b, 0x30, 0x6b, 0x6b, 0x1a, 0x57, 0xf2, 0x5c, 0xcd, 0x63, 0xfe, 0x2a, 0x80, 0x57, /* Byte value: 0xc9 */ + 0xbc, 0x3b, 0x94, 0xc3, 0x94, 0x94, 0x21, 0x17, 0xe8, 0xc6, 0x7b, 0x55, 0xa8, 0xe2, 0x4d, 0x17, /* Byte value: 0xca */ + 0xf3, 0x01, 0x69, 0x48, 0x69, 0x69, 0x23, 0x33, 0xac, 0xca, 0x13, 0x75, 0xbe, 0x6b, 0x03, 0x33, /* Byte value: 0xcb */ + 0xf4, 0x4c, 0x16, 0xce, 0x16, 0x16, 0x40, 0x0a, 0x57, 0x43, 0x9e, 0x8a, 0x85, 0x8e, 0xd4, 0x0a, /* Byte value: 0xcc */ + 0x26, 0xa9, 0xa4, 0x09, 0xa4, 0xa4, 0x3c, 0x3e, 0xf4, 0x88, 0x3a, 0x46, 0x67, 0x35, 0x38, 0x3e, /* Byte value: 0xcd */ + 0xd4, 0x70, 0x5e, 0x36, 0x5e, 0x5e, 0x88, 0x84, 0x7d, 0xb6, 0x9c, 0x57, 0x91, 0xbe, 0x90, 0x84, /* Byte value: 0xce */ + 0x80, 0xf0, 0xe3, 0x66, 0xe3, 0xe3, 0xa6, 0x7d, 0xa8, 0x52, 0x08, 0xf2, 0x50, 0xc0, 0xd3, 0x7d, /* Byte value: 0xcf */ + 0x93, 0x45, 0xb1, 0x83, 0xb1, 0xb1, 0xb8, 0x62, 0xd2, 0x16, 0x15, 0xd1, 0x82, 0x3b, 0xcf, 0x62, /* Byte value: 0xd0 */ + 0xb6, 0x47, 0x63, 0x13, 0x63, 0x63, 0xfe, 0x04, 0x49, 0x41, 0x33, 0x3b, 0x3d, 0xed, 0xc9, 0x04, /* Byte value: 0xd1 */ + 0x15, 0x20, 0xbe, 0x14, 0xbe, 0xbe, 0xea, 0xaf, 0xa4, 0x39, 0x25, 0xb8, 0xa1, 0xfe, 0x60, 0xaf, /* Byte value: 0xd2 */ + 0xcc, 0x61, 0x68, 0x74, 0x68, 0x68, 0xde, 0x01, 0x83, 0x81, 0x7c, 0x7e, 0x9e, 0xaa, 0xa3, 0x01, /* Byte value: 0xd3 */ + 0xac, 0x25, 0xb0, 0xbf, 0xb0, 0xb0, 0x45, 0x50, 0xfd, 0x5d, 0x7a, 0xda, 0xa2, 0xfa, 0x6f, 0x50, /* Byte value: 0xd4 */ + 0xc1, 0x50, 0xe0, 0x22, 0xe0, 0xe0, 0x62, 0x2b, 0xd9, 0x8f, 0xb9, 0xef, 0x30, 0x40, 0xf0, 0x2b, /* Byte value: 0xd5 */ + 0xee, 0x2e, 0xc5, 0x62, 0xc5, 0xc5, 0xfb, 0x5e, 0xe3, 0x5f, 0xd7, 0x6b, 0x1a, 0x99, 0x72, 0x5e, /* Byte value: 0xd6 */ + 0xae, 0x56, 0x55, 0x51, 0x55, 0x55, 0xa8, 0x81, 0xb7, 0x76, 0xd3, 0x12, 0x32, 0xf9, 0xfa, 0x81, /* Byte value: 0xd7 */ + 0x9c, 0x07, 0xdc, 0x3b, 0xdc, 0xdc, 0xe9, 0x99, 0xc2, 0x33, 0x79, 0x88, 0xbc, 0xd2, 0x09, 0x99, /* Byte value: 0xd8 */ + 0xd9, 0x41, 0xd6, 0x60, 0xd6, 0xd6, 0x34, 0xae, 0x27, 0xb8, 0x59, 0xc6, 0x3f, 0x54, 0xc3, 0xae, /* Byte value: 0xd9 */ + 0xa3, 0x67, 0xdd, 0x07, 0xdd, 0xdd, 0x14, 0xab, 0xed, 0x78, 0x16, 0x83, 0x9c, 0x13, 0xa9, 0xab, /* Byte value: 0xda */ + 0x25, 0x02, 0xd2, 0x90, 0xd2, 0xd2, 0x46, 0x66, 0x9b, 0x57, 0x26, 0xea, 0xbf, 0xd6, 0x06, 0x66, /* Byte value: 0xdb */ + 0xeb, 0x10, 0x5f, 0x0a, 0x5f, 0x5f, 0x75, 0xb6, 0x52, 0xfd, 0xf3, 0x5c, 0xb1, 0x7f, 0x30, 0xb6, /* Byte value: 0xdc */ + 0x40, 0x78, 0x90, 0x33, 0x90, 0x90, 0x53, 0xdf, 0x54, 0x29, 0x04, 0x79, 0x28, 0x60, 0x88, 0xdf, /* Byte value: 0xdd */ + 0x48, 0x77, 0x82, 0x0d, 0x82, 0x82, 0x61, 0x1d, 0xbf, 0x85, 0xe5, 0xdf, 0x2d, 0x6c, 0x99, 0x1d, /* Byte value: 0xde */ + 0x6d, 0x75, 0x50, 0x9d, 0x50, 0x50, 0x27, 0x7b, 0x24, 0xd2, 0xc3, 0x35, 0x92, 0xba, 0x9f, 0x7b, /* Byte value: 0xdf */ + 0x28, 0x33, 0x5a, 0xc6, 0x5a, 0x5a, 0xfa, 0x4c, 0xc1, 0x59, 0xe3, 0x7b, 0x11, 0x3c, 0x55, 0x4c, /* Byte value: 0xe0 */ + 0xff, 0xe8, 0x72, 0x69, 0x72, 0x72, 0x08, 0x90, 0xd3, 0x30, 0x63, 0x80, 0x58, 0x61, 0xfb, 0x90, /* Byte value: 0xe1 */ + 0x0a, 0x7c, 0xf7, 0xd0, 0xf7, 0xf7, 0xdf, 0x13, 0xa1, 0x87, 0x48, 0x6e, 0x95, 0x0f, 0x84, 0x13, /* Byte value: 0xe2 */ + 0x4f, 0x3a, 0xfd, 0x8b, 0xfd, 0xfd, 0x02, 0x24, 0x44, 0x0c, 0x68, 0x20, 0x16, 0x89, 0x4e, 0x24, /* Byte value: 0xe3 */ + 0x19, 0xc9, 0xa5, 0x35, 0xa5, 0xa5, 0xc1, 0x0c, 0xdb, 0xc3, 0x55, 0x4d, 0x47, 0xf4, 0x98, 0x0c, /* Byte value: 0xe4 */ + 0x53, 0xcd, 0xc2, 0xd6, 0xc2, 0xc2, 0x4d, 0xc0, 0x2e, 0x6d, 0x19, 0x5a, 0xfa, 0x9b, 0x94, 0xc0, /* Byte value: 0xe5 */ + 0xf5, 0x94, 0x85, 0xb9, 0x85, 0x85, 0xd7, 0x83, 0x72, 0xb7, 0x2b, 0xee, 0xcd, 0x6e, 0x7f, 0x83, /* Byte value: 0xe6 */ + 0xb9, 0x05, 0x0e, 0xab, 0x0e, 0x0e, 0xaf, 0xff, 0x59, 0x64, 0x5f, 0x62, 0x03, 0x04, 0x0f, 0xff, /* Byte value: 0xe7 */ + 0xcb, 0x2c, 0x17, 0xf2, 0x17, 0x17, 0xbd, 0x38, 0x78, 0x08, 0xf1, 0x81, 0xa5, 0x4f, 0x74, 0x38, /* Byte value: 0xe8 */ + 0x45, 0x46, 0x0a, 0x5b, 0x0a, 0x0a, 0xdd, 0x37, 0xe5, 0x8b, 0x20, 0x4e, 0x83, 0x86, 0xca, 0x37, /* Byte value: 0xe9 */ + 0x57, 0x2b, 0xcb, 0xc9, 0xcb, 0xcb, 0x54, 0xa1, 0xba, 0x3b, 0x88, 0x09, 0x19, 0x9d, 0x7d, 0xa1, /* Byte value: 0xea */ + 0xd7, 0xdb, 0x28, 0xaf, 0x28, 0x28, 0xf2, 0xdc, 0x12, 0x69, 0x80, 0xfb, 0x49, 0x5d, 0xae, 0xdc, /* Byte value: 0xeb */ + 0x68, 0x4b, 0xca, 0xf5, 0xca, 0xca, 0xa9, 0x93, 0x95, 0x70, 0xe7, 0x02, 0x39, 0x5c, 0xdd, 0x93, /* Byte value: 0xec */ + 0x4c, 0x91, 0x8b, 0x12, 0x8b, 0x8b, 0x78, 0x7c, 0x2b, 0xd3, 0x74, 0x8c, 0xce, 0x6a, 0x70, 0x7c, /* Byte value: 0xed */ + 0x20, 0x3c, 0x48, 0xf8, 0x48, 0x48, 0xc8, 0x8e, 0x2a, 0xf5, 0x02, 0xdd, 0x14, 0x30, 0x44, 0x8e, /* Byte value: 0xee */ + 0x4a, 0x04, 0x67, 0xe3, 0x67, 0x67, 0x8c, 0xcc, 0xf5, 0xae, 0x4c, 0x17, 0xbd, 0x6f, 0x0c, 0xcc, /* Byte value: 0xef */ + 0x05, 0x3e, 0x9a, 0x68, 0x9a, 0x9a, 0x8e, 0xe8, 0xb1, 0xa2, 0x24, 0x37, 0xab, 0xe6, 0x42, 0xe8, /* Byte value: 0xf0 */ + 0xc6, 0x1d, 0x9f, 0xa4, 0x9f, 0x9f, 0x01, 0x12, 0x22, 0x06, 0x34, 0x10, 0x0b, 0xa5, 0x27, 0x12, /* Byte value: 0xf1 */ + 0x14, 0xf8, 0x2d, 0x63, 0x2d, 0x2d, 0x7d, 0x26, 0x81, 0xcd, 0x90, 0xdc, 0xe9, 0x1e, 0xcb, 0x26, /* Byte value: 0xf2 */ + 0x9e, 0x74, 0x39, 0xd5, 0x39, 0x39, 0x04, 0x48, 0x88, 0x18, 0xd0, 0x40, 0x2c, 0xd1, 0x9c, 0x48, /* Byte value: 0xf3 */ + 0xd5, 0xa8, 0xcd, 0x41, 0xcd, 0xcd, 0x1f, 0x0d, 0x58, 0x42, 0x29, 0x33, 0xd9, 0x5e, 0x3b, 0x0d, /* Byte value: 0xf4 */ + 0x4d, 0x49, 0x18, 0x65, 0x18, 0x18, 0xef, 0xf5, 0x0e, 0x27, 0xc1, 0xe8, 0x86, 0x8a, 0xdb, 0xf5, /* Byte value: 0xf5 */ + 0xce, 0x12, 0x8d, 0x9a, 0x8d, 0x8d, 0x33, 0xd0, 0xc9, 0xaa, 0xd5, 0xb6, 0x0e, 0xa9, 0x36, 0xd0, /* Byte value: 0xf6 */ + 0x5a, 0x1a, 0x43, 0x9f, 0x43, 0x43, 0xe8, 0x8b, 0xe0, 0x35, 0x4d, 0x98, 0xb7, 0x77, 0x2e, 0x8b, /* Byte value: 0xf7 */ + 0x3f, 0x60, 0x01, 0x3c, 0x01, 0x01, 0xfd, 0x32, 0x2f, 0x4b, 0x6f, 0x0b, 0x20, 0xc1, 0xa0, 0x32, /* Byte value: 0xf8 */ + 0x8a, 0x8c, 0x14, 0xb6, 0x14, 0x14, 0x79, 0x6e, 0x09, 0xd5, 0x40, 0x9c, 0xc5, 0xcf, 0x57, 0x6e, /* Byte value: 0xf9 */ + 0x83, 0x5b, 0x95, 0xff, 0x95, 0x95, 0xdc, 0x25, 0xc7, 0x8d, 0x14, 0x5e, 0x88, 0x23, 0xed, 0x25, /* Byte value: 0xfa */ + 0xc3, 0x23, 0x05, 0xcc, 0x05, 0x05, 0x8f, 0xfa, 0x93, 0xa4, 0x10, 0x27, 0xa0, 0x43, 0x65, 0xfa, /* Byte value: 0xfb */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xfc */ + 0x94, 0x08, 0xce, 0x05, 0xce, 0xce, 0xdb, 0x5b, 0x29, 0x9f, 0x98, 0x2e, 0xb9, 0xde, 0x18, 0x5b, /* Byte value: 0xfd */ + 0x1f, 0x5c, 0x49, 0xc4, 0x49, 0x49, 0x35, 0xbc, 0x05, 0xbe, 0x6d, 0xd6, 0x34, 0xf1, 0xe4, 0xbc, /* Byte value: 0xfe */ + 0x98, 0xe1, 0xd5, 0x24, 0xd5, 0xd5, 0xf0, 0xf8, 0x56, 0x65, 0xe8, 0xdb, 0x5f, 0xd4, 0xe0, 0xf8, /* Byte value: 0xff */ + + /* Matrix row: 3 */ + 0xab, 0x5f, 0x0f, 0xc5, 0x97, 0x09, 0x7c, 0xc9, 0x42, 0xc5, 0x39, 0xbd, 0xfd, 0x81, 0x87, 0xfe, /* Byte value: 0x00 */ + 0xbb, 0xa5, 0xce, 0xde, 0x05, 0xdd, 0x1c, 0xf8, 0x12, 0xde, 0x45, 0x61, 0xc3, 0xdf, 0xcc, 0x2b, /* Byte value: 0x01 */ + 0x83, 0x15, 0xcf, 0x1f, 0x39, 0x08, 0x8c, 0x52, 0xca, 0x1f, 0xff, 0xa8, 0x9e, 0x12, 0xd5, 0x1c, /* Byte value: 0x02 */ + 0x0d, 0xe5, 0x99, 0x66, 0x2c, 0xc3, 0x2e, 0xd2, 0x39, 0x66, 0x56, 0x24, 0x2b, 0x05, 0x5f, 0x06, /* Byte value: 0x03 */ + 0x3a, 0x3e, 0xb0, 0xfa, 0xbf, 0x2e, 0x9c, 0x05, 0xd2, 0xfa, 0x54, 0x33, 0x2a, 0xb6, 0x28, 0x65, /* Byte value: 0x04 */ + 0x4a, 0x9d, 0xb2, 0xbb, 0xc7, 0x47, 0x7f, 0x92, 0xa1, 0xbb, 0xe3, 0x62, 0x90, 0xef, 0x1a, 0x0b, /* Byte value: 0x05 */ + 0xe8, 0xf8, 0x85, 0x6e, 0xb9, 0xb8, 0x35, 0x14, 0xce, 0x6e, 0x93, 0xcc, 0xa8, 0x9d, 0xa0, 0x57, /* Byte value: 0x06 */ + 0xeb, 0x31, 0x8d, 0xa9, 0x9a, 0xdf, 0x3f, 0x0d, 0xc1, 0xa9, 0x0a, 0x4b, 0x05, 0x3a, 0x68, 0x2c, /* Byte value: 0x07 */ + 0x8f, 0xb7, 0xef, 0x85, 0xb5, 0x57, 0xa4, 0x36, 0xf6, 0x85, 0xde, 0xf1, 0x6f, 0xcb, 0x73, 0x33, /* Byte value: 0x08 */ + 0xbe, 0x3d, 0xd6, 0x54, 0x60, 0x74, 0x02, 0xd3, 0x03, 0x54, 0x2d, 0x2b, 0xf7, 0xf5, 0x57, 0xa6, /* Byte value: 0x09 */ + 0x0f, 0x6b, 0x28, 0x5d, 0xaf, 0x38, 0x22, 0x7d, 0x33, 0x5d, 0xb8, 0xde, 0x5c, 0x7e, 0x6e, 0x54, /* Byte value: 0x0a */ + 0x6d, 0xbc, 0x5a, 0x3c, 0xc6, 0x7e, 0xad, 0x74, 0x1a, 0x3c, 0x9d, 0xa9, 0xaf, 0x02, 0x26, 0xbd, /* Byte value: 0x0b */ + 0x0a, 0xf3, 0x30, 0xd7, 0xca, 0x91, 0x3c, 0x56, 0x22, 0xd7, 0xd0, 0x94, 0x68, 0x54, 0xf5, 0xd9, /* Byte value: 0x0c */ + 0x29, 0x0d, 0x79, 0x26, 0x0e, 0x9d, 0xf6, 0x2d, 0x8d, 0x26, 0xb1, 0x68, 0xb9, 0x4f, 0xab, 0xcb, /* Byte value: 0x0d */ + 0x17, 0xec, 0x68, 0xaa, 0x74, 0x86, 0x72, 0xb5, 0x4b, 0xaa, 0xfa, 0x6c, 0x7d, 0x0f, 0xe1, 0x0a, /* Byte value: 0x0e */ + 0xb0, 0x11, 0x47, 0xf5, 0x6f, 0xd0, 0x26, 0x18, 0x35, 0xf5, 0xe2, 0x88, 0x71, 0x57, 0xc0, 0xdb, /* Byte value: 0x0f */ + 0xc5, 0x2a, 0x5d, 0x3e, 0x72, 0x10, 0xdb, 0xa4, 0x57, 0x3e, 0x3d, 0x93, 0xff, 0x24, 0x69, 0x38, /* Byte value: 0x10 */ + 0x4c, 0xcc, 0xa2, 0xf6, 0x81, 0x89, 0x6b, 0xa0, 0xbf, 0xf6, 0x12, 0xaf, 0x09, 0x62, 0x49, 0xfd, /* Byte value: 0x11 */ + 0xc3, 0x7b, 0x4d, 0x73, 0x34, 0xde, 0xcf, 0x96, 0x49, 0x73, 0xcc, 0x5e, 0x66, 0xa9, 0x3a, 0xce, /* Byte value: 0x12 */ + 0xb5, 0x89, 0x5f, 0x7f, 0x0a, 0x79, 0x38, 0x33, 0x24, 0x7f, 0x8a, 0xc2, 0x45, 0x7d, 0x5b, 0x56, /* Byte value: 0x13 */ + 0x74, 0x7c, 0xa3, 0x37, 0xbd, 0x5c, 0xfb, 0x0a, 0x67, 0x37, 0xa8, 0x66, 0x54, 0xaf, 0x50, 0xca, /* Byte value: 0x14 */ + 0x94, 0xf9, 0xa7, 0xb5, 0x4d, 0x8e, 0xfe, 0xe7, 0x81, 0xb5, 0x05, 0xc4, 0xe3, 0x1d, 0x34, 0x16, /* Byte value: 0x15 */ + 0x1d, 0x1f, 0x58, 0x7d, 0xbe, 0x17, 0x4e, 0xe3, 0x69, 0x7d, 0x2a, 0xf8, 0x15, 0x5b, 0x14, 0xd3, /* Byte value: 0x16 */ + 0x25, 0xaf, 0x59, 0xbc, 0x82, 0xc2, 0xde, 0x49, 0xb1, 0xbc, 0x90, 0x31, 0x48, 0x96, 0x0d, 0xe4, /* Byte value: 0x17 */ + 0x48, 0x13, 0x03, 0x80, 0x44, 0xbc, 0x73, 0x3d, 0xab, 0x80, 0x0d, 0x98, 0xe7, 0x94, 0x2b, 0x59, /* Byte value: 0x18 */ + 0xda, 0xbb, 0xb4, 0x78, 0x4f, 0xfc, 0x99, 0xe8, 0x34, 0x78, 0xf9, 0x91, 0x9d, 0x04, 0x4c, 0xb9, /* Byte value: 0x19 */ + 0xdd, 0xad, 0x1d, 0xc9, 0xa9, 0xae, 0x8b, 0x6c, 0x2f, 0xc9, 0x7f, 0x21, 0xde, 0x55, 0xe6, 0x66, /* Byte value: 0x1a */ + 0xe9, 0xbf, 0x3c, 0x92, 0x19, 0x24, 0x33, 0xa2, 0xcb, 0x92, 0xe4, 0xb1, 0x72, 0x41, 0x59, 0x7e, /* Byte value: 0x1b */ + 0xea, 0x76, 0x34, 0x55, 0x3a, 0x43, 0x39, 0xbb, 0xc4, 0x55, 0x7d, 0x36, 0xdf, 0xe6, 0x91, 0x05, /* Byte value: 0x1c */ + 0x73, 0x6a, 0x0a, 0x86, 0x5b, 0x0e, 0xe9, 0x8e, 0x7c, 0x86, 0x2e, 0xd6, 0x17, 0xfe, 0xfa, 0x15, /* Byte value: 0x1d */ + 0x2e, 0x1b, 0xd0, 0x97, 0xe8, 0xcf, 0xe4, 0xa9, 0x96, 0x97, 0x37, 0xd8, 0xfa, 0x1e, 0x01, 0x14, /* Byte value: 0x1e */ + 0xf5, 0xe7, 0xdd, 0x13, 0x07, 0xaf, 0x7b, 0xf7, 0xa7, 0x13, 0xb9, 0x34, 0xbd, 0xc6, 0xb4, 0x84, /* Byte value: 0x1f */ + 0x26, 0x66, 0x51, 0x7b, 0xa1, 0xa5, 0xd4, 0x50, 0xbe, 0x7b, 0x09, 0xb6, 0xe5, 0x31, 0xc5, 0x9f, /* Byte value: 0x20 */ + 0xd0, 0x48, 0x84, 0xaf, 0x85, 0x6d, 0xa5, 0xbe, 0x16, 0xaf, 0x29, 0x05, 0xf5, 0x50, 0xb9, 0x60, /* Byte value: 0x21 */ + 0xc4, 0x6d, 0xe4, 0xc2, 0xd2, 0x8c, 0xdd, 0x12, 0x52, 0xc2, 0x4a, 0xee, 0x25, 0xf8, 0x90, 0x11, /* Byte value: 0x22 */ + 0xf3, 0xb6, 0xcd, 0x5e, 0x41, 0x61, 0x6f, 0xc5, 0xb9, 0x5e, 0x48, 0xf9, 0x24, 0x4b, 0xe7, 0x72, /* Byte value: 0x23 */ + 0x7b, 0x17, 0x8b, 0x6a, 0x12, 0x64, 0xd9, 0x77, 0x54, 0x6a, 0x10, 0xb8, 0x08, 0xd1, 0x3e, 0x9e, /* Byte value: 0x24 */ + 0x4d, 0x8b, 0x1b, 0x0a, 0x21, 0x15, 0x6d, 0x16, 0xba, 0x0a, 0x65, 0xd2, 0xd3, 0xbe, 0xb0, 0xd4, /* Byte value: 0x25 */ + 0x45, 0xf6, 0x9a, 0xe6, 0x68, 0x7f, 0x5d, 0xef, 0x92, 0xe6, 0x5b, 0xbc, 0xcc, 0x91, 0x74, 0x5f, /* Byte value: 0x26 */ + 0xa9, 0xd1, 0xbe, 0xfe, 0x14, 0xf2, 0x70, 0x66, 0x48, 0xfe, 0xd7, 0x47, 0x8a, 0xfa, 0xb6, 0xac, /* Byte value: 0x27 */ + 0x5e, 0xb8, 0xd2, 0xd6, 0x90, 0xa6, 0x07, 0x3e, 0xe5, 0xd6, 0x80, 0x89, 0x40, 0x47, 0x33, 0x7a, /* Byte value: 0x28 */ + 0xd8, 0x35, 0x05, 0x43, 0xcc, 0x07, 0x95, 0x47, 0x3e, 0x43, 0x17, 0x6b, 0xea, 0x7f, 0x7d, 0xeb, /* Byte value: 0x29 */ + 0x7e, 0x8f, 0x93, 0xe0, 0x77, 0xcd, 0xc7, 0x5c, 0x45, 0xe0, 0x78, 0xf2, 0x3c, 0xfb, 0xa5, 0x13, /* Byte value: 0x2a */ + 0xf2, 0xf1, 0x74, 0xa2, 0xe1, 0xfd, 0x69, 0x73, 0xbc, 0xa2, 0x3f, 0x84, 0xfe, 0x97, 0x1e, 0x5b, /* Byte value: 0x2b */ + 0x90, 0x26, 0x06, 0xc3, 0x88, 0xbb, 0xe6, 0x7a, 0x95, 0xc3, 0x1a, 0xf3, 0x0d, 0xeb, 0x56, 0xb2, /* Byte value: 0x2c */ + 0x77, 0xb5, 0xab, 0xf0, 0x9e, 0x3b, 0xf1, 0x13, 0x68, 0xf0, 0x31, 0xe1, 0xf9, 0x08, 0x98, 0xb1, /* Byte value: 0x2d */ + 0x53, 0x5d, 0x4b, 0xb0, 0xbc, 0x65, 0x29, 0xec, 0xdc, 0xb0, 0xd6, 0xad, 0x6b, 0x42, 0x6c, 0x7c, /* Byte value: 0x2e */ + 0xe5, 0x1d, 0x1c, 0x08, 0x95, 0x7b, 0x1b, 0xc6, 0xf7, 0x08, 0xc5, 0xe8, 0x83, 0x98, 0xff, 0x51, /* Byte value: 0x2f */ + 0x49, 0x54, 0xba, 0x7c, 0xe4, 0x20, 0x75, 0x8b, 0xae, 0x7c, 0x7a, 0xe5, 0x3d, 0x48, 0xd2, 0x70, /* Byte value: 0x30 */ + 0x98, 0x5b, 0x87, 0x2f, 0xc1, 0xd1, 0xd6, 0x83, 0xbd, 0x2f, 0x24, 0x9d, 0x12, 0xc4, 0x92, 0x39, /* Byte value: 0x31 */ + 0x13, 0x33, 0xc9, 0xdc, 0xb1, 0xb3, 0x6a, 0x28, 0x5f, 0xdc, 0xe5, 0x5b, 0x93, 0xf9, 0x83, 0xae, /* Byte value: 0x32 */ + 0x43, 0xa7, 0x8a, 0xab, 0x2e, 0xb1, 0x49, 0xdd, 0x8c, 0xab, 0xaa, 0x71, 0x55, 0x1c, 0x27, 0xa9, /* Byte value: 0x33 */ + 0xdc, 0xea, 0xa4, 0x35, 0x09, 0x32, 0x8d, 0xda, 0x2a, 0x35, 0x08, 0x5c, 0x04, 0x89, 0x1f, 0x4f, /* Byte value: 0x34 */ + 0xc7, 0xa4, 0xec, 0x05, 0xf1, 0xeb, 0xd7, 0x0b, 0x5d, 0x05, 0xd3, 0x69, 0x88, 0x5f, 0x58, 0x6a, /* Byte value: 0x35 */ + 0xa0, 0xeb, 0x86, 0xee, 0xfd, 0x04, 0x46, 0x29, 0x65, 0xee, 0x9e, 0x54, 0x4f, 0x09, 0x8b, 0x0e, /* Byte value: 0x36 */ + 0xcc, 0x10, 0x65, 0x2e, 0x9b, 0xe6, 0xed, 0xeb, 0x7a, 0x2e, 0x74, 0x80, 0x3a, 0xd7, 0x54, 0x9a, /* Byte value: 0x37 */ + 0x14, 0x25, 0x60, 0x6d, 0x57, 0xe1, 0x78, 0xac, 0x44, 0x6d, 0x63, 0xeb, 0xd0, 0xa8, 0x29, 0x71, /* Byte value: 0x38 */ + 0x52, 0x1a, 0xf2, 0x4c, 0x1c, 0xf9, 0x2f, 0x5a, 0xd9, 0x4c, 0xa1, 0xd0, 0xb1, 0x9e, 0x95, 0x55, /* Byte value: 0x39 */ + 0x2f, 0x5c, 0x69, 0x6b, 0x48, 0x53, 0xe2, 0x1f, 0x93, 0x6b, 0x40, 0xa5, 0x20, 0xc2, 0xf8, 0x3d, /* Byte value: 0x3a */ + 0x47, 0x78, 0x2b, 0xdd, 0xeb, 0x84, 0x51, 0x40, 0x98, 0xdd, 0xb5, 0x46, 0xbb, 0xea, 0x45, 0x0d, /* Byte value: 0x3b */ + 0xa6, 0xba, 0x96, 0xa3, 0xbb, 0xca, 0x52, 0x1b, 0x7b, 0xa3, 0x6f, 0x99, 0xd6, 0x84, 0xd8, 0xf8, /* Byte value: 0x3c */ + 0x5f, 0xff, 0x6b, 0x2a, 0x30, 0x3a, 0x01, 0x88, 0xe0, 0x2a, 0xf7, 0xf4, 0x9a, 0x9b, 0xca, 0x53, /* Byte value: 0x3d */ + 0xe3, 0x4c, 0x0c, 0x45, 0xd3, 0xb5, 0x0f, 0xf4, 0xe9, 0x45, 0x34, 0x25, 0x1a, 0x15, 0xac, 0xa7, /* Byte value: 0x3e */ + 0xb8, 0x6c, 0xc6, 0x19, 0x26, 0xba, 0x16, 0xe1, 0x1d, 0x19, 0xdc, 0xe6, 0x6e, 0x78, 0x04, 0x50, /* Byte value: 0x3f */ + 0xa7, 0xfd, 0x2f, 0x5f, 0x1b, 0x56, 0x54, 0xad, 0x7e, 0x5f, 0x18, 0xe4, 0x0c, 0x58, 0x21, 0xd1, /* Byte value: 0x40 */ + 0xc0, 0xb2, 0x45, 0xb4, 0x17, 0xb9, 0xc5, 0x8f, 0x46, 0xb4, 0x55, 0xd9, 0xcb, 0x0e, 0xf2, 0xb5, /* Byte value: 0x41 */ + 0x23, 0xfe, 0x49, 0xf1, 0xc4, 0x0c, 0xca, 0x7b, 0xaf, 0xf1, 0x61, 0xfc, 0xd1, 0x1b, 0x5e, 0x12, /* Byte value: 0x42 */ + 0x2a, 0xc4, 0x71, 0xe1, 0x2d, 0xfa, 0xfc, 0x34, 0x82, 0xe1, 0x28, 0xef, 0x14, 0xe8, 0x63, 0xb0, /* Byte value: 0x43 */ + 0x62, 0xd7, 0x72, 0x61, 0x69, 0x46, 0x8f, 0x09, 0x29, 0x61, 0x25, 0x77, 0xf3, 0x7c, 0x48, 0xe9, /* Byte value: 0x44 */ + 0x7d, 0x46, 0x9b, 0x27, 0x54, 0xaa, 0xcd, 0x45, 0x4a, 0x27, 0xe1, 0x75, 0x91, 0x5c, 0x6d, 0x68, /* Byte value: 0x45 */ + 0x31, 0x8a, 0x39, 0xd1, 0xd5, 0x23, 0xa6, 0xe5, 0xf5, 0xd1, 0xf3, 0xda, 0x98, 0x3e, 0x24, 0x95, /* Byte value: 0x46 */ + 0x30, 0xcd, 0x80, 0x2d, 0x75, 0xbf, 0xa0, 0x53, 0xf0, 0x2d, 0x84, 0xa7, 0x42, 0xe2, 0xdd, 0xbc, /* Byte value: 0x47 */ + 0xc1, 0xf5, 0xfc, 0x48, 0xb7, 0x25, 0xc3, 0x39, 0x43, 0x48, 0x22, 0xa4, 0x11, 0xd2, 0x0b, 0x9c, /* Byte value: 0x48 */ + 0x20, 0x37, 0x41, 0x36, 0xe7, 0x6b, 0xc0, 0x62, 0xa0, 0x36, 0xf8, 0x7b, 0x7c, 0xbc, 0x96, 0x69, /* Byte value: 0x49 */ + 0x8a, 0x2f, 0xf7, 0x0f, 0xd0, 0xfe, 0xba, 0x1d, 0xe7, 0x0f, 0xb6, 0xbb, 0x5b, 0xe1, 0xe8, 0xbe, /* Byte value: 0x4a */ + 0xd7, 0x5e, 0x2d, 0x1e, 0x63, 0x3f, 0xb7, 0x3a, 0x0d, 0x1e, 0xaf, 0xb5, 0xb6, 0x01, 0x13, 0xbf, /* Byte value: 0x4b */ + 0x3d, 0x28, 0x19, 0x4b, 0x59, 0x7c, 0x8e, 0x81, 0xc9, 0x4b, 0xd2, 0x83, 0x69, 0xe7, 0x82, 0xba, /* Byte value: 0x4c */ + 0xbf, 0x7a, 0x6f, 0xa8, 0xc0, 0xe8, 0x04, 0x65, 0x06, 0xa8, 0x5a, 0x56, 0x2d, 0x29, 0xae, 0x8f, /* Byte value: 0x4d */ + 0xc6, 0xe3, 0x55, 0xf9, 0x51, 0x77, 0xd1, 0xbd, 0x58, 0xf9, 0xa4, 0x14, 0x52, 0x83, 0xa1, 0x43, /* Byte value: 0x4e */ + 0x72, 0x2d, 0xb3, 0x7a, 0xfb, 0x92, 0xef, 0x38, 0x79, 0x7a, 0x59, 0xab, 0xcd, 0x22, 0x03, 0x3c, /* Byte value: 0x4f */ + 0xdb, 0xfc, 0x0d, 0x84, 0xef, 0x60, 0x9f, 0x5e, 0x31, 0x84, 0x8e, 0xec, 0x47, 0xd8, 0xb5, 0x90, /* Byte value: 0x50 */ + 0x15, 0x62, 0xd9, 0x91, 0xf7, 0x7d, 0x7e, 0x1a, 0x41, 0x91, 0x14, 0x96, 0x0a, 0x74, 0xd0, 0x58, /* Byte value: 0x51 */ + 0x99, 0x1c, 0x3e, 0xd3, 0x61, 0x4d, 0xd0, 0x35, 0xb8, 0xd3, 0x53, 0xe0, 0xc8, 0x18, 0x6b, 0x10, /* Byte value: 0x52 */ + 0x4b, 0xda, 0x0b, 0x47, 0x67, 0xdb, 0x79, 0x24, 0xa4, 0x47, 0x94, 0x1f, 0x4a, 0x33, 0xe3, 0x22, /* Byte value: 0x53 */ + 0x34, 0x12, 0x21, 0x5b, 0xb0, 0x8a, 0xb8, 0xce, 0xe4, 0x5b, 0x9b, 0x90, 0xac, 0x14, 0xbf, 0x18, /* Byte value: 0x54 */ + 0x60, 0x59, 0xc3, 0x5a, 0xea, 0xbd, 0x83, 0xa6, 0x23, 0x5a, 0xcb, 0x8d, 0x84, 0x07, 0x79, 0xbb, /* Byte value: 0x55 */ + 0x92, 0xa8, 0xb7, 0xf8, 0x0b, 0x40, 0xea, 0xd5, 0x9f, 0xf8, 0xf4, 0x09, 0x7a, 0x90, 0x67, 0xe0, /* Byte value: 0x56 */ + 0xac, 0x49, 0xa6, 0x74, 0x71, 0x5b, 0x6e, 0x4d, 0x59, 0x74, 0xbf, 0x0d, 0xbe, 0xd0, 0x2d, 0x21, /* Byte value: 0x57 */ + 0x6e, 0x75, 0x52, 0xfb, 0xe5, 0x19, 0xa7, 0x6d, 0x15, 0xfb, 0x04, 0x2e, 0x02, 0xa5, 0xee, 0xc6, /* Byte value: 0x58 */ + 0xa1, 0xac, 0x3f, 0x12, 0x5d, 0x98, 0x40, 0x9f, 0x60, 0x12, 0xe9, 0x29, 0x95, 0xd5, 0x72, 0x27, /* Byte value: 0x59 */ + 0x81, 0x9b, 0x7e, 0x24, 0xba, 0xf3, 0x80, 0xfd, 0xc0, 0x24, 0x11, 0x52, 0xe9, 0x69, 0xe4, 0x4e, /* Byte value: 0x5a */ + 0x3b, 0x79, 0x09, 0x06, 0x1f, 0xb2, 0x9a, 0xb3, 0xd7, 0x06, 0x23, 0x4e, 0xf0, 0x6a, 0xd1, 0x4c, /* Byte value: 0x5b */ + 0x19, 0xc0, 0xf9, 0x0b, 0x7b, 0x22, 0x56, 0x7e, 0x7d, 0x0b, 0x35, 0xcf, 0xfb, 0xad, 0x76, 0x77, /* Byte value: 0x5c */ + 0xe4, 0x5a, 0xa5, 0xf4, 0x35, 0xe7, 0x1d, 0x70, 0xf2, 0xf4, 0xb2, 0x95, 0x59, 0x44, 0x06, 0x78, /* Byte value: 0x5d */ + 0x2c, 0x95, 0x61, 0xac, 0x6b, 0x34, 0xe8, 0x06, 0x9c, 0xac, 0xd9, 0x22, 0x8d, 0x65, 0x30, 0x46, /* Byte value: 0x5e */ + 0xbd, 0xf4, 0xde, 0x93, 0x43, 0x13, 0x08, 0xca, 0x0c, 0x93, 0xb4, 0xac, 0x5a, 0x52, 0x9f, 0xdd, /* Byte value: 0x5f */ + 0xb6, 0x40, 0x57, 0xb8, 0x29, 0x1e, 0x32, 0x2a, 0x2b, 0xb8, 0x13, 0x45, 0xe8, 0xda, 0x93, 0x2d, /* Byte value: 0x60 */ + 0xd3, 0x81, 0x8c, 0x68, 0xa6, 0x0a, 0xaf, 0xa7, 0x19, 0x68, 0xb0, 0x82, 0x58, 0xf7, 0x71, 0x1b, /* Byte value: 0x61 */ + 0xe7, 0x93, 0xad, 0x33, 0x16, 0x80, 0x17, 0x69, 0xfd, 0x33, 0x2b, 0x12, 0xf4, 0xe3, 0xce, 0x03, /* Byte value: 0x62 */ + 0x9b, 0x92, 0x8f, 0xe8, 0xe2, 0xb6, 0xdc, 0x9a, 0xb2, 0xe8, 0xbd, 0x1a, 0xbf, 0x63, 0x5a, 0x42, /* Byte value: 0x63 */ + 0x8c, 0x7e, 0xe7, 0x42, 0x96, 0x30, 0xae, 0x2f, 0xf9, 0x42, 0x47, 0x76, 0xc2, 0x6c, 0xbb, 0x48, /* Byte value: 0x64 */ + 0x04, 0xdf, 0xa1, 0x76, 0xc5, 0x35, 0x18, 0x9d, 0x14, 0x76, 0x1f, 0x37, 0xee, 0xf6, 0x62, 0xa4, /* Byte value: 0x65 */ + 0x86, 0x8d, 0xd7, 0x95, 0x5c, 0xa1, 0x92, 0x79, 0xdb, 0x95, 0x97, 0xe2, 0xaa, 0x38, 0x4e, 0x91, /* Byte value: 0x66 */ + 0xef, 0xee, 0x2c, 0xdf, 0x5f, 0xea, 0x27, 0x90, 0xd5, 0xdf, 0x15, 0x7c, 0xeb, 0xcc, 0x0a, 0x88, /* Byte value: 0x67 */ + 0x32, 0x43, 0x31, 0x16, 0xf6, 0x44, 0xac, 0xfc, 0xfa, 0x16, 0x6a, 0x5d, 0x35, 0x99, 0xec, 0xee, /* Byte value: 0x68 */ + 0x0b, 0xb4, 0x89, 0x2b, 0x6a, 0x0d, 0x3a, 0xe0, 0x27, 0x2b, 0xa7, 0xe9, 0xb2, 0x88, 0x0c, 0xf0, /* Byte value: 0x69 */ + 0x9e, 0x0a, 0x97, 0x62, 0x87, 0x1f, 0xc2, 0xb1, 0xa3, 0x62, 0xd5, 0x50, 0x8b, 0x49, 0xc1, 0xcf, /* Byte value: 0x6a */ + 0x80, 0xdc, 0xc7, 0xd8, 0x1a, 0x6f, 0x86, 0x4b, 0xc5, 0xd8, 0x66, 0x2f, 0x33, 0xb5, 0x1d, 0x67, /* Byte value: 0x6b */ + 0x1c, 0x58, 0xe1, 0x81, 0x1e, 0x8b, 0x48, 0x55, 0x6c, 0x81, 0x5d, 0x85, 0xcf, 0x87, 0xed, 0xfa, /* Byte value: 0x6c */ + 0xb1, 0x56, 0xfe, 0x09, 0xcf, 0x4c, 0x20, 0xae, 0x30, 0x09, 0x95, 0xf5, 0xab, 0x8b, 0x39, 0xf2, /* Byte value: 0x6d */ + 0xd4, 0x97, 0x25, 0xd9, 0x40, 0x58, 0xbd, 0x23, 0x02, 0xd9, 0x36, 0x32, 0x1b, 0xa6, 0xdb, 0xc4, /* Byte value: 0x6e */ + 0x7a, 0x50, 0x32, 0x96, 0xb2, 0xf8, 0xdf, 0xc1, 0x51, 0x96, 0x67, 0xc5, 0xd2, 0x0d, 0xc7, 0xb7, /* Byte value: 0x6f */ + 0x5b, 0x20, 0xca, 0x5c, 0xf5, 0x0f, 0x19, 0x15, 0xf4, 0x5c, 0xe8, 0xc3, 0x74, 0x6d, 0xa8, 0xf7, /* Byte value: 0x70 */ + 0xa3, 0x22, 0x8e, 0x29, 0xde, 0x63, 0x4c, 0x30, 0x6a, 0x29, 0x07, 0xd3, 0xe2, 0xae, 0x43, 0x75, /* Byte value: 0x71 */ + 0xf9, 0x45, 0xfd, 0x89, 0x8b, 0xf0, 0x53, 0x93, 0x9b, 0x89, 0x98, 0x6d, 0x4c, 0x1f, 0x12, 0xab, /* Byte value: 0x72 */ + 0x33, 0x04, 0x88, 0xea, 0x56, 0xd8, 0xaa, 0x4a, 0xff, 0xea, 0x1d, 0x20, 0xef, 0x45, 0x15, 0xc7, /* Byte value: 0x73 */ + 0x68, 0x24, 0x42, 0xb6, 0xa3, 0xd7, 0xb3, 0x5f, 0x0b, 0xb6, 0xf5, 0xe3, 0x9b, 0x28, 0xbd, 0x30, /* Byte value: 0x74 */ + 0x96, 0x77, 0x16, 0x8e, 0xce, 0x75, 0xf2, 0x48, 0x8b, 0x8e, 0xeb, 0x3e, 0x94, 0x66, 0x05, 0x44, /* Byte value: 0x75 */ + 0x46, 0x3f, 0x92, 0x21, 0x4b, 0x18, 0x57, 0xf6, 0x9d, 0x21, 0xc2, 0x3b, 0x61, 0x36, 0xbc, 0x24, /* Byte value: 0x76 */ + 0x02, 0x8e, 0xb1, 0x3b, 0x83, 0xfb, 0x0c, 0xaf, 0x0a, 0x3b, 0xee, 0xfa, 0x77, 0x7b, 0x31, 0x52, /* Byte value: 0x77 */ + 0xff, 0x14, 0xed, 0xc4, 0xcd, 0x3e, 0x47, 0xa1, 0x85, 0xc4, 0x69, 0xa0, 0xd5, 0x92, 0x41, 0x5d, /* Byte value: 0x78 */ + 0x95, 0xbe, 0x1e, 0x49, 0xed, 0x12, 0xf8, 0x51, 0x84, 0x49, 0x72, 0xb9, 0x39, 0xc1, 0xcd, 0x3f, /* Byte value: 0x79 */ + 0x64, 0x86, 0x62, 0x2c, 0x2f, 0x88, 0x9b, 0x3b, 0x37, 0x2c, 0xd4, 0xba, 0x6a, 0xf1, 0x1b, 0x1f, /* Byte value: 0x7a */ + 0x40, 0x6e, 0x82, 0x6c, 0x0d, 0xd6, 0x43, 0xc4, 0x83, 0x6c, 0x33, 0xf6, 0xf8, 0xbb, 0xef, 0xd2, /* Byte value: 0x7b */ + 0x41, 0x29, 0x3b, 0x90, 0xad, 0x4a, 0x45, 0x72, 0x86, 0x90, 0x44, 0x8b, 0x22, 0x67, 0x16, 0xfb, /* Byte value: 0x7c */ + 0x16, 0xab, 0xd1, 0x56, 0xd4, 0x1a, 0x74, 0x03, 0x4e, 0x56, 0x8d, 0x11, 0xa7, 0xd3, 0x18, 0x23, /* Byte value: 0x7d */ + 0x0e, 0x2c, 0x91, 0xa1, 0x0f, 0xa4, 0x24, 0xcb, 0x36, 0xa1, 0xcf, 0xa3, 0x86, 0xa2, 0x97, 0x7d, /* Byte value: 0x7e */ + 0xb9, 0x2b, 0x7f, 0xe5, 0x86, 0x26, 0x10, 0x57, 0x18, 0xe5, 0xab, 0x9b, 0xb4, 0xa4, 0xfd, 0x79, /* Byte value: 0x7f */ + 0xfd, 0x9a, 0x5c, 0xff, 0x4e, 0xc5, 0x4b, 0x0e, 0x8f, 0xff, 0x87, 0x5a, 0xa2, 0xe9, 0x70, 0x0f, /* Byte value: 0x80 */ + 0x6f, 0x32, 0xeb, 0x07, 0x45, 0x85, 0xa1, 0xdb, 0x10, 0x07, 0x73, 0x53, 0xd8, 0x79, 0x17, 0xef, /* Byte value: 0x81 */ + 0x91, 0x61, 0xbf, 0x3f, 0x28, 0x27, 0xe0, 0xcc, 0x90, 0x3f, 0x6d, 0x8e, 0xd7, 0x37, 0xaf, 0x9b, /* Byte value: 0x82 */ + 0xa5, 0x73, 0x9e, 0x64, 0x98, 0xad, 0x58, 0x02, 0x74, 0x64, 0xf6, 0x1e, 0x7b, 0x23, 0x10, 0x83, /* Byte value: 0x83 */ + 0xcf, 0xd9, 0x6d, 0xe9, 0xb8, 0x81, 0xe7, 0xf2, 0x75, 0xe9, 0xed, 0x07, 0x97, 0x70, 0x9c, 0xe1, /* Byte value: 0x84 */ + 0x56, 0xc5, 0x53, 0x3a, 0xd9, 0xcc, 0x37, 0xc7, 0xcd, 0x3a, 0xbe, 0xe7, 0x5f, 0x68, 0xf7, 0xf1, /* Byte value: 0x85 */ + 0xba, 0xe2, 0x77, 0x22, 0xa5, 0x41, 0x1a, 0x4e, 0x17, 0x22, 0x32, 0x1c, 0x19, 0x03, 0x35, 0x02, /* Byte value: 0x86 */ + 0x61, 0x1e, 0x7a, 0xa6, 0x4a, 0x21, 0x85, 0x10, 0x26, 0xa6, 0xbc, 0xf0, 0x5e, 0xdb, 0x80, 0x92, /* Byte value: 0x87 */ + 0x18, 0x87, 0x40, 0xf7, 0xdb, 0xbe, 0x50, 0xc8, 0x78, 0xf7, 0x42, 0xb2, 0x21, 0x71, 0x8f, 0x5e, /* Byte value: 0x88 */ + 0x54, 0x4b, 0xe2, 0x01, 0x5a, 0x37, 0x3b, 0x68, 0xc7, 0x01, 0x50, 0x1d, 0x28, 0x13, 0xc6, 0xa3, /* Byte value: 0x89 */ + 0x8b, 0x68, 0x4e, 0xf3, 0x70, 0x62, 0xbc, 0xab, 0xe2, 0xf3, 0xc1, 0xc6, 0x81, 0x3d, 0x11, 0x97, /* Byte value: 0x8a */ + 0x3e, 0xe1, 0x11, 0x8c, 0x7a, 0x1b, 0x84, 0x98, 0xc6, 0x8c, 0x4b, 0x04, 0xc4, 0x40, 0x4a, 0xc1, /* Byte value: 0x8b */ + 0x5a, 0x67, 0x73, 0xa0, 0x55, 0x93, 0x1f, 0xa3, 0xf1, 0xa0, 0x9f, 0xbe, 0xae, 0xb1, 0x51, 0xde, /* Byte value: 0x8c */ + 0xce, 0x9e, 0xd4, 0x15, 0x18, 0x1d, 0xe1, 0x44, 0x70, 0x15, 0x9a, 0x7a, 0x4d, 0xac, 0x65, 0xc8, /* Byte value: 0x8d */ + 0xf7, 0x69, 0x6c, 0x28, 0x84, 0x54, 0x77, 0x58, 0xad, 0x28, 0x57, 0xce, 0xca, 0xbd, 0x85, 0xd6, /* Byte value: 0x8e */ + 0xd2, 0xc6, 0x35, 0x94, 0x06, 0x96, 0xa9, 0x11, 0x1c, 0x94, 0xc7, 0xff, 0x82, 0x2b, 0x88, 0x32, /* Byte value: 0x8f */ + 0x82, 0x52, 0x76, 0xe3, 0x99, 0x94, 0x8a, 0xe4, 0xcf, 0xe3, 0x88, 0xd5, 0x44, 0xce, 0x2c, 0x35, /* Byte value: 0x90 */ + 0xb3, 0xd8, 0x4f, 0x32, 0x4c, 0xb7, 0x2c, 0x01, 0x3a, 0x32, 0x7b, 0x0f, 0xdc, 0xf0, 0x08, 0xa0, /* Byte value: 0x91 */ + 0xb4, 0xce, 0xe6, 0x83, 0xaa, 0xe5, 0x3e, 0x85, 0x21, 0x83, 0xfd, 0xbf, 0x9f, 0xa1, 0xa2, 0x7f, /* Byte value: 0x92 */ + 0xd6, 0x19, 0x94, 0xe2, 0xc3, 0xa3, 0xb1, 0x8c, 0x08, 0xe2, 0xd8, 0xc8, 0x6c, 0xdd, 0xea, 0x96, /* Byte value: 0x93 */ + 0xe1, 0xc2, 0xbd, 0x7e, 0x50, 0x4e, 0x03, 0x5b, 0xe3, 0x7e, 0xda, 0xdf, 0x6d, 0x6e, 0x9d, 0xf5, /* Byte value: 0x94 */ + 0xc2, 0x3c, 0xf4, 0x8f, 0x94, 0x42, 0xc9, 0x20, 0x4c, 0x8f, 0xbb, 0x23, 0xbc, 0x75, 0xc3, 0xe7, /* Byte value: 0x95 */ + 0x5d, 0x71, 0xda, 0x11, 0xb3, 0xc1, 0x0d, 0x27, 0xea, 0x11, 0x19, 0x0e, 0xed, 0xe0, 0xfb, 0x01, /* Byte value: 0x96 */ + 0xfa, 0x8c, 0xf5, 0x4e, 0xa8, 0x97, 0x59, 0x8a, 0x94, 0x4e, 0x01, 0xea, 0xe1, 0xb8, 0xda, 0xd0, /* Byte value: 0x97 */ + 0xa4, 0x34, 0x27, 0x98, 0x38, 0x31, 0x5e, 0xb4, 0x71, 0x98, 0x81, 0x63, 0xa1, 0xff, 0xe9, 0xaa, /* Byte value: 0x98 */ + 0x1f, 0x91, 0xe9, 0x46, 0x3d, 0xec, 0x42, 0x4c, 0x63, 0x46, 0xc4, 0x02, 0x62, 0x20, 0x25, 0x81, /* Byte value: 0x99 */ + 0x0c, 0xa2, 0x20, 0x9a, 0x8c, 0x5f, 0x28, 0x64, 0x3c, 0x9a, 0x21, 0x59, 0xf1, 0xd9, 0xa6, 0x2f, /* Byte value: 0x9a */ + 0x01, 0x47, 0xb9, 0xfc, 0xa0, 0x9c, 0x06, 0xb6, 0x05, 0xfc, 0x77, 0x7d, 0xda, 0xdc, 0xf9, 0x29, /* Byte value: 0x9b */ + 0x2d, 0xd2, 0xd8, 0x50, 0xcb, 0xa8, 0xee, 0xb0, 0x99, 0x50, 0xae, 0x5f, 0x57, 0xb9, 0xc9, 0x6f, /* Byte value: 0x9c */ + 0x67, 0x4f, 0x6a, 0xeb, 0x0c, 0xef, 0x91, 0x22, 0x38, 0xeb, 0x4d, 0x3d, 0xc7, 0x56, 0xd3, 0x64, /* Byte value: 0x9d */ + 0xe2, 0x0b, 0xb5, 0xb9, 0x73, 0x29, 0x09, 0x42, 0xec, 0xb9, 0x43, 0x58, 0xc0, 0xc9, 0x55, 0x8e, /* Byte value: 0x9e */ + 0x09, 0x3a, 0x38, 0x10, 0xe9, 0xf6, 0x36, 0x4f, 0x2d, 0x10, 0x49, 0x13, 0xc5, 0xf3, 0x3d, 0xa2, /* Byte value: 0x9f */ + 0x57, 0x82, 0xea, 0xc6, 0x79, 0x50, 0x31, 0x71, 0xc8, 0xc6, 0xc9, 0x9a, 0x85, 0xb4, 0x0e, 0xd8, /* Byte value: 0xa0 */ + 0x11, 0xbd, 0x78, 0xe7, 0x32, 0x48, 0x66, 0x87, 0x55, 0xe7, 0x0b, 0xa1, 0xe4, 0x82, 0xb2, 0xfc, /* Byte value: 0xa1 */ + 0xb7, 0x07, 0xee, 0x44, 0x89, 0x82, 0x34, 0x9c, 0x2e, 0x44, 0x64, 0x38, 0x32, 0x06, 0x6a, 0x04, /* Byte value: 0xa2 */ + 0xdf, 0x23, 0xac, 0xf2, 0x2a, 0x55, 0x87, 0xc3, 0x25, 0xf2, 0x91, 0xdb, 0xa9, 0x2e, 0xd7, 0x34, /* Byte value: 0xa3 */ + 0x6a, 0xaa, 0xf3, 0x8d, 0x20, 0x2c, 0xbf, 0xf0, 0x01, 0x8d, 0x1b, 0x19, 0xec, 0x53, 0x8c, 0x62, /* Byte value: 0xa4 */ + 0x93, 0xef, 0x0e, 0x04, 0xab, 0xdc, 0xec, 0x63, 0x9a, 0x04, 0x83, 0x74, 0xa0, 0x4c, 0x9e, 0xc9, /* Byte value: 0xa5 */ + 0x24, 0xe8, 0xe0, 0x40, 0x22, 0x5e, 0xd8, 0xff, 0xb4, 0x40, 0xe7, 0x4c, 0x92, 0x4a, 0xf4, 0xcd, /* Byte value: 0xa6 */ + 0x7f, 0xc8, 0x2a, 0x1c, 0xd7, 0x51, 0xc1, 0xea, 0x40, 0x1c, 0x0f, 0x8f, 0xe6, 0x27, 0x5c, 0x3a, /* Byte value: 0xa7 */ + 0x66, 0x08, 0xd3, 0x17, 0xac, 0x73, 0x97, 0x94, 0x3d, 0x17, 0x3a, 0x40, 0x1d, 0x8a, 0x2a, 0x4d, /* Byte value: 0xa8 */ + 0xfe, 0x53, 0x54, 0x38, 0x6d, 0xa2, 0x41, 0x17, 0x80, 0x38, 0x1e, 0xdd, 0x0f, 0x4e, 0xb8, 0x74, /* Byte value: 0xa9 */ + 0x07, 0x16, 0xa9, 0xb1, 0xe6, 0x52, 0x12, 0x84, 0x1b, 0xb1, 0x86, 0xb0, 0x43, 0x51, 0xaa, 0xdf, /* Byte value: 0xaa */ + 0x44, 0xb1, 0x23, 0x1a, 0xc8, 0xe3, 0x5b, 0x59, 0x97, 0x1a, 0x2c, 0xc1, 0x16, 0x4d, 0x8d, 0x76, /* Byte value: 0xab */ + 0x3f, 0xa6, 0xa8, 0x70, 0xda, 0x87, 0x82, 0x2e, 0xc3, 0x70, 0x3c, 0x79, 0x1e, 0x9c, 0xb3, 0xe8, /* Byte value: 0xac */ + 0xf8, 0x02, 0x44, 0x75, 0x2b, 0x6c, 0x55, 0x25, 0x9e, 0x75, 0xef, 0x10, 0x96, 0xc3, 0xeb, 0x82, /* Byte value: 0xad */ + 0x06, 0x51, 0x10, 0x4d, 0x46, 0xce, 0x14, 0x32, 0x1e, 0x4d, 0xf1, 0xcd, 0x99, 0x8d, 0x53, 0xf6, /* Byte value: 0xae */ + 0xca, 0x41, 0x75, 0x63, 0xdd, 0x28, 0xf9, 0xd9, 0x64, 0x63, 0x85, 0x4d, 0xa3, 0x5a, 0x07, 0x6c, /* Byte value: 0xaf */ + 0xad, 0x0e, 0x1f, 0x88, 0xd1, 0xc7, 0x68, 0xfb, 0x5c, 0x88, 0xc8, 0x70, 0x64, 0x0c, 0xd4, 0x08, /* Byte value: 0xb0 */ + 0x2b, 0x83, 0xc8, 0x1d, 0x8d, 0x66, 0xfa, 0x82, 0x87, 0x1d, 0x5f, 0x92, 0xce, 0x34, 0x9a, 0x99, /* Byte value: 0xb1 */ + 0xae, 0xc7, 0x17, 0x4f, 0xf2, 0xa0, 0x62, 0xe2, 0x53, 0x4f, 0x51, 0xf7, 0xc9, 0xab, 0x1c, 0x73, /* Byte value: 0xb2 */ + 0x22, 0xb9, 0xf0, 0x0d, 0x64, 0x90, 0xcc, 0xcd, 0xaa, 0x0d, 0x16, 0x81, 0x0b, 0xc7, 0xa7, 0x3b, /* Byte value: 0xb3 */ + 0x39, 0xf7, 0xb8, 0x3d, 0x9c, 0x49, 0x96, 0x1c, 0xdd, 0x3d, 0xcd, 0xb4, 0x87, 0x11, 0xe0, 0x1e, /* Byte value: 0xb4 */ + 0xde, 0x64, 0x15, 0x0e, 0x8a, 0xc9, 0x81, 0x75, 0x20, 0x0e, 0xe6, 0xa6, 0x73, 0xf2, 0x2e, 0x1d, /* Byte value: 0xb5 */ + 0x1e, 0xd6, 0x50, 0xba, 0x9d, 0x70, 0x44, 0xfa, 0x66, 0xba, 0xb3, 0x7f, 0xb8, 0xfc, 0xdc, 0xa8, /* Byte value: 0xb6 */ + 0xa8, 0x96, 0x07, 0x02, 0xb4, 0x6e, 0x76, 0xd0, 0x4d, 0x02, 0xa0, 0x3a, 0x50, 0x26, 0x4f, 0x85, /* Byte value: 0xb7 */ + 0x58, 0xe9, 0xc2, 0x9b, 0xd6, 0x68, 0x13, 0x0c, 0xfb, 0x9b, 0x71, 0x44, 0xd9, 0xca, 0x60, 0x8c, /* Byte value: 0xb8 */ + 0x88, 0xa1, 0x46, 0x34, 0x53, 0x05, 0xb6, 0xb2, 0xed, 0x34, 0x58, 0x41, 0x2c, 0x9a, 0xd9, 0xec, /* Byte value: 0xb9 */ + 0x9a, 0xd5, 0x36, 0x14, 0x42, 0x2a, 0xda, 0x2c, 0xb7, 0x14, 0xca, 0x67, 0x65, 0xbf, 0xa3, 0x6b, /* Byte value: 0xba */ + 0x69, 0x63, 0xfb, 0x4a, 0x03, 0x4b, 0xb5, 0xe9, 0x0e, 0x4a, 0x82, 0x9e, 0x41, 0xf4, 0x44, 0x19, /* Byte value: 0xbb */ + 0x28, 0x4a, 0xc0, 0xda, 0xae, 0x01, 0xf0, 0x9b, 0x88, 0xda, 0xc6, 0x15, 0x63, 0x93, 0x52, 0xe2, /* Byte value: 0xbc */ + 0x65, 0xc1, 0xdb, 0xd0, 0x8f, 0x14, 0x9d, 0x8d, 0x32, 0xd0, 0xa3, 0xc7, 0xb0, 0x2d, 0xe2, 0x36, /* Byte value: 0xbd */ + 0xd5, 0xd0, 0x9c, 0x25, 0xe0, 0xc4, 0xbb, 0x95, 0x07, 0x25, 0x41, 0x4f, 0xc1, 0x7a, 0x22, 0xed, /* Byte value: 0xbe */ + 0x7c, 0x01, 0x22, 0xdb, 0xf4, 0x36, 0xcb, 0xf3, 0x4f, 0xdb, 0x96, 0x08, 0x4b, 0x80, 0x94, 0x41, /* Byte value: 0xbf */ + 0xbc, 0xb3, 0x67, 0x6f, 0xe3, 0x8f, 0x0e, 0x7c, 0x09, 0x6f, 0xc3, 0xd1, 0x80, 0x8e, 0x66, 0xf4, /* Byte value: 0xc0 */ + 0x38, 0xb0, 0x01, 0xc1, 0x3c, 0xd5, 0x90, 0xaa, 0xd8, 0xc1, 0xba, 0xc9, 0x5d, 0xcd, 0x19, 0x37, /* Byte value: 0xc1 */ + 0x4f, 0x05, 0xaa, 0x31, 0xa2, 0xee, 0x61, 0xb9, 0xb0, 0x31, 0x8b, 0x28, 0xa4, 0xc5, 0x81, 0x86, /* Byte value: 0xc2 */ + 0xe0, 0x85, 0x04, 0x82, 0xf0, 0xd2, 0x05, 0xed, 0xe6, 0x82, 0xad, 0xa2, 0xb7, 0xb2, 0x64, 0xdc, /* Byte value: 0xc3 */ + 0x27, 0x21, 0xe8, 0x87, 0x01, 0x39, 0xd2, 0xe6, 0xbb, 0x87, 0x7e, 0xcb, 0x3f, 0xed, 0x3c, 0xb6, /* Byte value: 0xc4 */ + 0x76, 0xf2, 0x12, 0x0c, 0x3e, 0xa7, 0xf7, 0xa5, 0x6d, 0x0c, 0x46, 0x9c, 0x23, 0xd4, 0x61, 0x98, /* Byte value: 0xc5 */ + 0x78, 0xde, 0x83, 0xad, 0x31, 0x03, 0xd3, 0x6e, 0x5b, 0xad, 0x89, 0x3f, 0xa5, 0x76, 0xf6, 0xe5, /* Byte value: 0xc6 */ + 0x89, 0xe6, 0xff, 0xc8, 0xf3, 0x99, 0xb0, 0x04, 0xe8, 0xc8, 0x2f, 0x3c, 0xf6, 0x46, 0x20, 0xc5, /* Byte value: 0xc7 */ + 0x8d, 0x39, 0x5e, 0xbe, 0x36, 0xac, 0xa8, 0x99, 0xfc, 0xbe, 0x30, 0x0b, 0x18, 0xb0, 0x42, 0x61, /* Byte value: 0xc8 */ + 0xaf, 0x80, 0xae, 0xb3, 0x52, 0x3c, 0x64, 0x54, 0x56, 0xb3, 0x26, 0x8a, 0x13, 0x77, 0xe5, 0x5a, /* Byte value: 0xc9 */ + 0x9f, 0x4d, 0x2e, 0x9e, 0x27, 0x83, 0xc4, 0x07, 0xa6, 0x9e, 0xa2, 0x2d, 0x51, 0x95, 0x38, 0xe6, /* Byte value: 0xca */ + 0x84, 0x03, 0x66, 0xae, 0xdf, 0x5a, 0x9e, 0xd6, 0xd1, 0xae, 0x79, 0x18, 0xdd, 0x43, 0x7f, 0xc3, /* Byte value: 0xcb */ + 0xe6, 0xd4, 0x14, 0xcf, 0xb6, 0x1c, 0x11, 0xdf, 0xf8, 0xcf, 0x5c, 0x6f, 0x2e, 0x3f, 0x37, 0x2a, /* Byte value: 0xcc */ + 0xf1, 0x38, 0x7c, 0x65, 0xc2, 0x9a, 0x63, 0x6a, 0xb3, 0x65, 0xa6, 0x03, 0x53, 0x30, 0xd6, 0x20, /* Byte value: 0xcd */ + 0x63, 0x90, 0xcb, 0x9d, 0xc9, 0xda, 0x89, 0xbf, 0x2c, 0x9d, 0x52, 0x0a, 0x29, 0xa0, 0xb1, 0xc0, /* Byte value: 0xce */ + 0x51, 0xd3, 0xfa, 0x8b, 0x3f, 0x9e, 0x25, 0x43, 0xd6, 0x8b, 0x38, 0x57, 0x1c, 0x39, 0x5d, 0x2e, /* Byte value: 0xcf */ + 0xc8, 0xcf, 0xc4, 0x58, 0x5e, 0xd3, 0xf5, 0x76, 0x6e, 0x58, 0x6b, 0xb7, 0xd4, 0x21, 0x36, 0x3e, /* Byte value: 0xd0 */ + 0x03, 0xc9, 0x08, 0xc7, 0x23, 0x67, 0x0a, 0x19, 0x0f, 0xc7, 0x99, 0x87, 0xad, 0xa7, 0xc8, 0x7b, /* Byte value: 0xd1 */ + 0xed, 0x60, 0x9d, 0xe4, 0xdc, 0x11, 0x2b, 0x3f, 0xdf, 0xe4, 0xfb, 0x86, 0x9c, 0xb7, 0x3b, 0xda, /* Byte value: 0xd2 */ + 0x70, 0xa3, 0x02, 0x41, 0x78, 0x69, 0xe3, 0x97, 0x73, 0x41, 0xb7, 0x51, 0xba, 0x59, 0x32, 0x6e, /* Byte value: 0xd3 */ + 0x3c, 0x6f, 0xa0, 0xb7, 0xf9, 0xe0, 0x88, 0x37, 0xcc, 0xb7, 0xa5, 0xfe, 0xb3, 0x3b, 0x7b, 0x93, /* Byte value: 0xd4 */ + 0x8e, 0xf0, 0x56, 0x79, 0x15, 0xcb, 0xa2, 0x80, 0xf3, 0x79, 0xa9, 0x8c, 0xb5, 0x17, 0x8a, 0x1a, /* Byte value: 0xd5 */ + 0xd9, 0x72, 0xbc, 0xbf, 0x6c, 0x9b, 0x93, 0xf1, 0x3b, 0xbf, 0x60, 0x16, 0x30, 0xa3, 0x84, 0xc2, /* Byte value: 0xd6 */ + 0x10, 0xfa, 0xc1, 0x1b, 0x92, 0xd4, 0x60, 0x31, 0x50, 0x1b, 0x7c, 0xdc, 0x3e, 0x5e, 0x4b, 0xd5, /* Byte value: 0xd7 */ + 0x1a, 0x09, 0xf1, 0xcc, 0x58, 0x45, 0x5c, 0x67, 0x72, 0xcc, 0xac, 0x48, 0x56, 0x0a, 0xbe, 0x0c, /* Byte value: 0xd8 */ + 0x9d, 0xc3, 0x9f, 0xa5, 0xa4, 0x78, 0xc8, 0xa8, 0xac, 0xa5, 0x4c, 0xd7, 0x26, 0xee, 0x09, 0xb4, /* Byte value: 0xd9 */ + 0xee, 0xa9, 0x95, 0x23, 0xff, 0x76, 0x21, 0x26, 0xd0, 0x23, 0x62, 0x01, 0x31, 0x10, 0xf3, 0xa1, /* Byte value: 0xda */ + 0xcb, 0x06, 0xcc, 0x9f, 0x7d, 0xb4, 0xff, 0x6f, 0x61, 0x9f, 0xf2, 0x30, 0x79, 0x86, 0xfe, 0x45, /* Byte value: 0xdb */ + 0x97, 0x30, 0xaf, 0x72, 0x6e, 0xe9, 0xf4, 0xfe, 0x8e, 0x72, 0x9c, 0x43, 0x4e, 0xba, 0xfc, 0x6d, /* Byte value: 0xdc */ + 0xc9, 0x88, 0x7d, 0xa4, 0xfe, 0x4f, 0xf3, 0xc0, 0x6b, 0xa4, 0x1c, 0xca, 0x0e, 0xfd, 0xcf, 0x17, /* Byte value: 0xdd */ + 0x79, 0x99, 0x3a, 0x51, 0x91, 0x9f, 0xd5, 0xd8, 0x5e, 0x51, 0xfe, 0x42, 0x7f, 0xaa, 0x0f, 0xcc, /* Byte value: 0xde */ + 0xb2, 0x9f, 0xf6, 0xce, 0xec, 0x2b, 0x2a, 0xb7, 0x3f, 0xce, 0x0c, 0x72, 0x06, 0x2c, 0xf1, 0x89, /* Byte value: 0xdf */ + 0x35, 0x55, 0x98, 0xa7, 0x10, 0x16, 0xbe, 0x78, 0xe1, 0xa7, 0xec, 0xed, 0x76, 0xc8, 0x46, 0x31, /* Byte value: 0xe0 */ + 0x6c, 0xfb, 0xe3, 0xc0, 0x66, 0xe2, 0xab, 0xc2, 0x1f, 0xc0, 0xea, 0xd4, 0x75, 0xde, 0xdf, 0x94, /* Byte value: 0xe1 */ + 0x9c, 0x84, 0x26, 0x59, 0x04, 0xe4, 0xce, 0x1e, 0xa9, 0x59, 0x3b, 0xaa, 0xfc, 0x32, 0xf0, 0x9d, /* Byte value: 0xe2 */ + 0x1b, 0x4e, 0x48, 0x30, 0xf8, 0xd9, 0x5a, 0xd1, 0x77, 0x30, 0xdb, 0x35, 0x8c, 0xd6, 0x47, 0x25, /* Byte value: 0xe3 */ + 0x05, 0x98, 0x18, 0x8a, 0x65, 0xa9, 0x1e, 0x2b, 0x11, 0x8a, 0x68, 0x4a, 0x34, 0x2a, 0x9b, 0x8d, /* Byte value: 0xe4 */ + 0x50, 0x94, 0x43, 0x77, 0x9f, 0x02, 0x23, 0xf5, 0xd3, 0x77, 0x4f, 0x2a, 0xc6, 0xe5, 0xa4, 0x07, /* Byte value: 0xe5 */ + 0xf0, 0x7f, 0xc5, 0x99, 0x62, 0x06, 0x65, 0xdc, 0xb6, 0x99, 0xd1, 0x7e, 0x89, 0xec, 0x2f, 0x09, /* Byte value: 0xe6 */ + 0xd1, 0x0f, 0x3d, 0x53, 0x25, 0xf1, 0xa3, 0x08, 0x13, 0x53, 0x5e, 0x78, 0x2f, 0x8c, 0x40, 0x49, /* Byte value: 0xe7 */ + 0x12, 0x74, 0x70, 0x20, 0x11, 0x2f, 0x6c, 0x9e, 0x5a, 0x20, 0x92, 0x26, 0x49, 0x25, 0x7a, 0x87, /* Byte value: 0xe8 */ + 0x87, 0xca, 0x6e, 0x69, 0xfc, 0x3d, 0x94, 0xcf, 0xde, 0x69, 0xe0, 0x9f, 0x70, 0xe4, 0xb7, 0xb8, /* Byte value: 0xe9 */ + 0x08, 0x7d, 0x81, 0xec, 0x49, 0x6a, 0x30, 0xf9, 0x28, 0xec, 0x3e, 0x6e, 0x1f, 0x2f, 0xc4, 0x8b, /* Byte value: 0xea */ + 0x59, 0xae, 0x7b, 0x67, 0x76, 0xf4, 0x15, 0xba, 0xfe, 0x67, 0x06, 0x39, 0x03, 0x16, 0x99, 0xa5, /* Byte value: 0xeb */ + 0xfc, 0xdd, 0xe5, 0x03, 0xee, 0x59, 0x4d, 0xb8, 0x8a, 0x03, 0xf0, 0x27, 0x78, 0x35, 0x89, 0x26, /* Byte value: 0xec */ + 0x21, 0x70, 0xf8, 0xca, 0x47, 0xf7, 0xc6, 0xd4, 0xa5, 0xca, 0x8f, 0x06, 0xa6, 0x60, 0x6f, 0x40, /* Byte value: 0xed */ + 0x85, 0x44, 0xdf, 0x52, 0x7f, 0xc6, 0x98, 0x60, 0xd4, 0x52, 0x0e, 0x65, 0x07, 0x9f, 0x86, 0xea, /* Byte value: 0xee */ + 0x55, 0x0c, 0x5b, 0xfd, 0xfa, 0xab, 0x3d, 0xde, 0xc2, 0xfd, 0x27, 0x60, 0xf2, 0xcf, 0x3f, 0x8a, /* Byte value: 0xef */ + 0x4e, 0x42, 0x13, 0xcd, 0x02, 0x72, 0x67, 0x0f, 0xb5, 0xcd, 0xfc, 0x55, 0x7e, 0x19, 0x78, 0xaf, /* Byte value: 0xf0 */ + 0xec, 0x27, 0x24, 0x18, 0x7c, 0x8d, 0x2d, 0x89, 0xda, 0x18, 0x8c, 0xfb, 0x46, 0x6b, 0xc2, 0xf3, /* Byte value: 0xf1 */ + 0xfb, 0xcb, 0x4c, 0xb2, 0x08, 0x0b, 0x5f, 0x3c, 0x91, 0xb2, 0x76, 0x97, 0x3b, 0x64, 0x23, 0xf9, /* Byte value: 0xf2 */ + 0x36, 0x9c, 0x90, 0x60, 0x33, 0x71, 0xb4, 0x61, 0xee, 0x60, 0x75, 0x6a, 0xdb, 0x6f, 0x8e, 0x4a, /* Byte value: 0xf3 */ + 0x75, 0x3b, 0x1a, 0xcb, 0x1d, 0xc0, 0xfd, 0xbc, 0x62, 0xcb, 0xdf, 0x1b, 0x8e, 0x73, 0xa9, 0xe3, /* Byte value: 0xf4 */ + 0x37, 0xdb, 0x29, 0x9c, 0x93, 0xed, 0xb2, 0xd7, 0xeb, 0x9c, 0x02, 0x17, 0x01, 0xb3, 0x77, 0x63, /* Byte value: 0xf5 */ + 0x5c, 0x36, 0x63, 0xed, 0x13, 0x5d, 0x0b, 0x91, 0xef, 0xed, 0x6e, 0x73, 0x37, 0x3c, 0x02, 0x28, /* Byte value: 0xf6 */ + 0xf6, 0x2e, 0xd5, 0xd4, 0x24, 0xc8, 0x71, 0xee, 0xa8, 0xd4, 0x20, 0xb3, 0x10, 0x61, 0x7c, 0xff, /* Byte value: 0xf7 */ + 0xf4, 0xa0, 0x64, 0xef, 0xa7, 0x33, 0x7d, 0x41, 0xa2, 0xef, 0xce, 0x49, 0x67, 0x1a, 0x4d, 0xad, /* Byte value: 0xf8 */ + 0xcd, 0x57, 0xdc, 0xd2, 0x3b, 0x7a, 0xeb, 0x5d, 0x7f, 0xd2, 0x03, 0xfd, 0xe0, 0x0b, 0xad, 0xb3, /* Byte value: 0xf9 */ + 0x6b, 0xed, 0x4a, 0x71, 0x80, 0xb0, 0xb9, 0x46, 0x04, 0x71, 0x6c, 0x64, 0x36, 0x8f, 0x75, 0x4b, /* Byte value: 0xfa */ + 0xa2, 0x65, 0x37, 0xd5, 0x7e, 0xff, 0x4a, 0x86, 0x6f, 0xd5, 0x70, 0xae, 0x38, 0x72, 0xba, 0x5c, /* Byte value: 0xfb */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xfc */ + 0xaa, 0x18, 0xb6, 0x39, 0x37, 0x95, 0x7a, 0x7f, 0x47, 0x39, 0x4e, 0xc0, 0x27, 0x5d, 0x7e, 0xd7, /* Byte value: 0xfd */ + 0x71, 0xe4, 0xbb, 0xbd, 0xd8, 0xf5, 0xe5, 0x21, 0x76, 0xbd, 0xc0, 0x2c, 0x60, 0x85, 0xcb, 0x47, /* Byte value: 0xfe */ + 0x42, 0xe0, 0x33, 0x57, 0x8e, 0x2d, 0x4f, 0x6b, 0x89, 0x57, 0xdd, 0x0c, 0x8f, 0xc0, 0xde, 0x80, /* Byte value: 0xff */ + + /* Matrix row: 4 */ + 0xc2, 0x36, 0xbf, 0xb8, 0xb8, 0xed, 0xf8, 0xa5, 0xff, 0x6a, 0x2d, 0xa5, 0x4a, 0x84, 0x52, 0x5c, /* Byte value: 0x00 */ + 0x95, 0x8b, 0xd1, 0x9a, 0x9a, 0x0a, 0x38, 0x2d, 0x73, 0xc6, 0x2f, 0x2d, 0x97, 0x24, 0xdb, 0x4d, /* Byte value: 0x01 */ + 0xa6, 0x30, 0x54, 0xed, 0xed, 0x72, 0xdb, 0x32, 0x62, 0xa7, 0x28, 0x32, 0xbc, 0x57, 0x26, 0x97, /* Byte value: 0x02 */ + 0x27, 0xcf, 0x12, 0x53, 0x53, 0x58, 0x5c, 0x8f, 0x1d, 0x07, 0x49, 0x8f, 0xe1, 0x72, 0x4a, 0xc8, /* Byte value: 0x03 */ + 0xe0, 0xe4, 0xf8, 0xe2, 0xe2, 0xbd, 0xfb, 0x0e, 0xe1, 0x95, 0x96, 0x0e, 0x78, 0x67, 0x45, 0x71, /* Byte value: 0x04 */ + 0x86, 0x51, 0x31, 0x0c, 0x0c, 0x4d, 0xfe, 0x30, 0xc3, 0x57, 0x98, 0x30, 0x2e, 0x81, 0x7c, 0x06, /* Byte value: 0x05 */ + 0x06, 0x16, 0x66, 0x0e, 0x0e, 0xb1, 0x6a, 0x38, 0x02, 0x11, 0x1d, 0x38, 0x23, 0x5f, 0xd7, 0x07, /* Byte value: 0x06 */ + 0x5d, 0x87, 0xc4, 0x30, 0x30, 0xf7, 0x7e, 0xc0, 0x8a, 0x9f, 0x25, 0xc0, 0xb8, 0x41, 0x33, 0x18, /* Byte value: 0x07 */ + 0x09, 0x31, 0x99, 0x15, 0x15, 0xa9, 0x8b, 0x54, 0x07, 0xda, 0xc8, 0x54, 0x95, 0x2f, 0x30, 0xeb, /* Byte value: 0x08 */ + 0x78, 0xfb, 0xf4, 0xd8, 0xd8, 0xc0, 0x04, 0xe6, 0x28, 0x97, 0x67, 0xe6, 0xf9, 0x06, 0x34, 0x6c, /* Byte value: 0x09 */ + 0xf4, 0x90, 0x6f, 0xc6, 0xc6, 0x9d, 0x44, 0x9e, 0xed, 0xf3, 0xd8, 0x9e, 0xb2, 0x66, 0xf2, 0x63, /* Byte value: 0x0a */ + 0x16, 0xc7, 0xb5, 0x9f, 0x9f, 0x4f, 0x99, 0x39, 0xb3, 0x69, 0x45, 0x39, 0x6a, 0x34, 0xfa, 0xae, /* Byte value: 0x0b */ + 0x19, 0xe0, 0x4a, 0x84, 0x84, 0x57, 0x78, 0x55, 0xb6, 0xa2, 0x90, 0x55, 0xdc, 0x44, 0x1d, 0x42, /* Byte value: 0x0c */ + 0xec, 0xc8, 0x34, 0xfe, 0xfe, 0x1c, 0x2f, 0x7e, 0xe5, 0xb7, 0xac, 0x7e, 0x3e, 0xd9, 0x28, 0x7f, /* Byte value: 0x0d */ + 0x69, 0x92, 0x36, 0xf5, 0xf5, 0xe8, 0xe4, 0x52, 0x27, 0x09, 0xdb, 0x52, 0xe0, 0x96, 0xde, 0x9b, /* Byte value: 0x0e */ + 0x04, 0xa5, 0x44, 0xb5, 0xb5, 0xde, 0x4c, 0x91, 0xbd, 0x1e, 0x16, 0x91, 0x83, 0x6a, 0x9a, 0xbb, /* Byte value: 0x0f */ + 0x8f, 0x60, 0xa8, 0x19, 0x19, 0xe4, 0x75, 0x64, 0xc4, 0x8d, 0x50, 0x64, 0xbb, 0xae, 0x4c, 0xed, /* Byte value: 0x10 */ + 0x30, 0xb0, 0xb6, 0x70, 0x70, 0xc1, 0xd6, 0x03, 0x10, 0x88, 0xe8, 0x03, 0xdb, 0xbd, 0x77, 0x38, /* Byte value: 0x11 */ + 0x39, 0x81, 0x2f, 0x65, 0x65, 0x68, 0x5d, 0x57, 0x17, 0x52, 0x20, 0x57, 0x4e, 0x92, 0x47, 0xd3, /* Byte value: 0x12 */ + 0xe9, 0xd5, 0x61, 0xf7, 0xf7, 0x14, 0x70, 0x5a, 0xe6, 0x4f, 0x5e, 0x5a, 0xed, 0x48, 0x75, 0x9a, /* Byte value: 0x13 */ + 0x03, 0x0b, 0x33, 0x07, 0x07, 0xb9, 0x35, 0x1c, 0x01, 0xe9, 0xef, 0x1c, 0xf0, 0xce, 0x8a, 0xe2, /* Byte value: 0x14 */ + 0xcf, 0xa2, 0x62, 0x18, 0x18, 0x9a, 0x3f, 0x60, 0x45, 0xae, 0xf3, 0x60, 0x5c, 0xc1, 0xf8, 0x0c, /* Byte value: 0x15 */ + 0x70, 0x72, 0x7c, 0x71, 0x71, 0xbf, 0x9c, 0x07, 0x91, 0xab, 0x4b, 0x07, 0x3c, 0xd2, 0xc3, 0xd9, /* Byte value: 0x16 */ + 0x43, 0xc9, 0xf9, 0x06, 0x06, 0xc7, 0x7f, 0x18, 0x80, 0xca, 0x4c, 0x18, 0x17, 0xa1, 0x3e, 0x03, /* Byte value: 0x17 */ + 0x55, 0x0e, 0x4c, 0x99, 0x99, 0x88, 0xe6, 0x21, 0x33, 0xa3, 0x09, 0x21, 0x7d, 0x95, 0xc4, 0xad, /* Byte value: 0x18 */ + 0x2c, 0x4d, 0xa9, 0xfd, 0xfd, 0x9e, 0xf1, 0x72, 0xa5, 0xd2, 0x8a, 0x72, 0xd4, 0x68, 0x37, 0x9f, /* Byte value: 0x19 */ + 0x12, 0x62, 0xf1, 0x2a, 0x2a, 0x91, 0xd5, 0xa8, 0x0e, 0x77, 0x53, 0xa8, 0xe9, 0x5e, 0x60, 0x15, /* Byte value: 0x1a */ + 0x8e, 0xd8, 0xb9, 0xa5, 0xa5, 0x32, 0x66, 0xd1, 0x7a, 0x6b, 0xb4, 0xd1, 0xeb, 0x55, 0x8b, 0xb3, /* Byte value: 0x1b */ + 0xd5, 0x49, 0x1b, 0x9b, 0x9b, 0x74, 0x72, 0x29, 0xf2, 0xe5, 0x8c, 0x29, 0x70, 0x4b, 0x6f, 0xac, /* Byte value: 0x1c */ + 0x3d, 0x24, 0x6b, 0xd0, 0xd0, 0xb6, 0x11, 0xc6, 0xaa, 0x4c, 0x36, 0xc6, 0xcd, 0xf8, 0xdd, 0x68, /* Byte value: 0x1d */ + 0xd2, 0xe7, 0x6c, 0x29, 0x29, 0x13, 0x0b, 0xa4, 0x4e, 0x12, 0x75, 0xa4, 0x03, 0xef, 0x7f, 0xf5, /* Byte value: 0x1e */ + 0x76, 0x64, 0x1a, 0x7f, 0x7f, 0x0e, 0xf6, 0x3f, 0x93, 0xba, 0x56, 0x3f, 0x1f, 0x8d, 0x14, 0xde, /* Byte value: 0x1f */ + 0x18, 0x58, 0x5b, 0x38, 0x38, 0x81, 0x6b, 0xe0, 0x08, 0x44, 0x74, 0xe0, 0x8c, 0xbf, 0xda, 0x1c, /* Byte value: 0x20 */ + 0x35, 0xad, 0xe3, 0x79, 0x79, 0xc9, 0x89, 0x27, 0x13, 0x70, 0x1a, 0x27, 0x08, 0x2c, 0x2a, 0xdd, /* Byte value: 0x21 */ + 0x07, 0xae, 0x77, 0xb2, 0xb2, 0x67, 0x79, 0x8d, 0xbc, 0xf7, 0xf9, 0x8d, 0x73, 0xa4, 0x10, 0x59, /* Byte value: 0x22 */ + 0xc0, 0x85, 0x9d, 0x03, 0x03, 0x82, 0xde, 0x0c, 0x40, 0x65, 0x26, 0x0c, 0xea, 0xb1, 0x1f, 0xe0, /* Byte value: 0x23 */ + 0xf7, 0x9b, 0x5c, 0xc1, 0xc1, 0x24, 0x71, 0x82, 0xec, 0x1a, 0x37, 0x82, 0x42, 0xa8, 0x78, 0x81, /* Byte value: 0x24 */ + 0xb8, 0x7e, 0x69, 0xdb, 0xdb, 0x42, 0xda, 0xea, 0x68, 0xf2, 0x41, 0xea, 0x13, 0xb7, 0x2b, 0x8c, /* Byte value: 0x25 */ + 0x72, 0xc1, 0x5e, 0xca, 0xca, 0xd0, 0xba, 0xae, 0x2e, 0xa4, 0x40, 0xae, 0x9c, 0xe7, 0x8e, 0x65, /* Byte value: 0x26 */ + 0x11, 0x69, 0xc2, 0x2d, 0x2d, 0x28, 0xe0, 0xb4, 0x0f, 0x9e, 0xbc, 0xb4, 0x19, 0x90, 0xea, 0xf7, /* Byte value: 0x27 */ + 0xb4, 0x52, 0xa5, 0xc7, 0xc7, 0xe3, 0x0e, 0x9a, 0x6c, 0xd0, 0x7b, 0x9a, 0x55, 0x09, 0x46, 0x82, /* Byte value: 0x28 */ + 0xff, 0x12, 0xd4, 0x68, 0x68, 0x5b, 0xe9, 0x63, 0x55, 0x26, 0x1b, 0x63, 0x87, 0x7c, 0x8f, 0x34, /* Byte value: 0x29 */ + 0x1a, 0xeb, 0x79, 0x83, 0x83, 0xee, 0x4d, 0x49, 0xb7, 0x4b, 0x7f, 0x49, 0x2c, 0x8a, 0x97, 0xa0, /* Byte value: 0x2a */ + 0x48, 0x4b, 0x42, 0xa8, 0xa8, 0x01, 0xd2, 0xe5, 0x38, 0x1f, 0x8f, 0xe5, 0x22, 0xbb, 0x43, 0x54, /* Byte value: 0x2b */ + 0xaa, 0x1c, 0x98, 0xf1, 0xf1, 0xd3, 0x0f, 0x42, 0x66, 0x85, 0x12, 0x42, 0xfa, 0xe9, 0x4b, 0x99, /* Byte value: 0x2c */ + 0x58, 0x9a, 0x91, 0x39, 0x39, 0xff, 0x21, 0xe4, 0x89, 0x67, 0xd7, 0xe4, 0x6b, 0xd0, 0x6e, 0xfd, /* Byte value: 0x2d */ + 0x93, 0x9d, 0xb7, 0x94, 0x94, 0xbb, 0x52, 0x15, 0x71, 0xd7, 0x32, 0x15, 0xb4, 0x7b, 0x0c, 0x4a, /* Byte value: 0x2e */ + 0x21, 0xd9, 0x74, 0x5d, 0x5d, 0xe9, 0x36, 0xb7, 0x1f, 0x16, 0x54, 0xb7, 0xc2, 0x2d, 0x9d, 0xcf, /* Byte value: 0x2f */ + 0xdd, 0xc0, 0x93, 0x32, 0x32, 0x0b, 0xea, 0xc8, 0x4b, 0xd9, 0xa0, 0xc8, 0xb5, 0x9f, 0x98, 0x19, /* Byte value: 0x30 */ + 0x60, 0xa3, 0xaf, 0xe0, 0xe0, 0x41, 0x6f, 0x06, 0x20, 0xd3, 0x13, 0x06, 0x75, 0xb9, 0xee, 0x70, /* Byte value: 0x31 */ + 0x0c, 0x2c, 0xcc, 0x1c, 0x1c, 0xa1, 0xd4, 0x70, 0x04, 0x22, 0x3a, 0x70, 0x46, 0xbe, 0x6d, 0x0e, /* Byte value: 0x32 */ + 0xc4, 0x20, 0xd9, 0xb6, 0xb6, 0x5c, 0x92, 0x9d, 0xfd, 0x7b, 0x30, 0x9d, 0x69, 0xdb, 0x85, 0x5b, /* Byte value: 0x33 */ + 0x9a, 0xac, 0x2e, 0x81, 0x81, 0x12, 0xd9, 0x41, 0x76, 0x0d, 0xfa, 0x41, 0x21, 0x54, 0x3c, 0xa1, /* Byte value: 0x34 */ + 0x5c, 0x3f, 0xd5, 0x8c, 0x8c, 0x21, 0x6d, 0x75, 0x34, 0x79, 0xc1, 0x75, 0xe8, 0xba, 0xf4, 0x46, /* Byte value: 0x35 */ + 0x53, 0x18, 0x2a, 0x97, 0x97, 0x39, 0x8c, 0x19, 0x31, 0xb2, 0x14, 0x19, 0x5e, 0xca, 0x13, 0xaa, /* Byte value: 0x36 */ + 0xcd, 0x11, 0x40, 0xa3, 0xa3, 0xf5, 0x19, 0xc9, 0xfa, 0xa1, 0xf8, 0xc9, 0xfc, 0xf4, 0xb5, 0xb0, /* Byte value: 0x37 */ + 0x32, 0x03, 0x94, 0xcb, 0xcb, 0xae, 0xf0, 0xaa, 0xaf, 0x87, 0xe3, 0xaa, 0x7b, 0x88, 0x3a, 0x84, /* Byte value: 0x38 */ + 0x1b, 0x53, 0x68, 0x3f, 0x3f, 0x38, 0x5e, 0xfc, 0x09, 0xad, 0x9b, 0xfc, 0x7c, 0x71, 0x50, 0xfe, /* Byte value: 0x39 */ + 0x5a, 0x29, 0xb3, 0x82, 0x82, 0x90, 0x07, 0x4d, 0x36, 0x68, 0xdc, 0x4d, 0xcb, 0xe5, 0x23, 0x41, /* Byte value: 0x3a */ + 0xa1, 0x9e, 0x23, 0x5f, 0x5f, 0x15, 0xa2, 0xbf, 0xde, 0x50, 0xd1, 0xbf, 0xcf, 0xf3, 0x36, 0xce, /* Byte value: 0x3b */ + 0xe5, 0xf9, 0xad, 0xeb, 0xeb, 0xb5, 0xa4, 0x2a, 0xe2, 0x6d, 0x64, 0x2a, 0xab, 0xf6, 0x18, 0x94, /* Byte value: 0x3c */ + 0x3c, 0x9c, 0x7a, 0x6c, 0x6c, 0x60, 0x02, 0x73, 0x14, 0xaa, 0xd2, 0x73, 0x9d, 0x03, 0x1a, 0x36, /* Byte value: 0x3d */ + 0x97, 0x38, 0xf3, 0x21, 0x21, 0x65, 0x1e, 0x84, 0xcc, 0xc9, 0x24, 0x84, 0x37, 0x11, 0x96, 0xf1, /* Byte value: 0x3e */ + 0xce, 0x1a, 0x73, 0xa4, 0xa4, 0x4c, 0x2c, 0xd5, 0xfb, 0x48, 0x17, 0xd5, 0x0c, 0x3a, 0x3f, 0x52, /* Byte value: 0x3f */ + 0x6d, 0x37, 0x72, 0x40, 0x40, 0x36, 0xa8, 0xc3, 0x9a, 0x17, 0xcd, 0xc3, 0x63, 0xfc, 0x44, 0x20, /* Byte value: 0x40 */ + 0x62, 0x10, 0x8d, 0x5b, 0x5b, 0x2e, 0x49, 0xaf, 0x9f, 0xdc, 0x18, 0xaf, 0xd5, 0x8c, 0xa3, 0xcc, /* Byte value: 0x41 */ + 0xf5, 0x28, 0x7e, 0x7a, 0x7a, 0x4b, 0x57, 0x2b, 0x53, 0x15, 0x3c, 0x2b, 0xe2, 0x9d, 0x35, 0x3d, /* Byte value: 0x42 */ + 0xb7, 0x59, 0x96, 0xc0, 0xc0, 0x5a, 0x3b, 0x86, 0x6d, 0x39, 0x94, 0x86, 0xa5, 0xc7, 0xcc, 0x60, /* Byte value: 0x43 */ + 0xe2, 0x57, 0xda, 0x59, 0x59, 0xd2, 0xdd, 0xa7, 0x5e, 0x9a, 0x9d, 0xa7, 0xd8, 0x52, 0x08, 0xcd, /* Byte value: 0x44 */ + 0x41, 0x7a, 0xdb, 0xbd, 0xbd, 0xa8, 0x59, 0xb1, 0x3f, 0xc5, 0x47, 0xb1, 0xb7, 0x94, 0x73, 0xbf, /* Byte value: 0x45 */ + 0x71, 0xca, 0x6d, 0xcd, 0xcd, 0x69, 0x8f, 0xb2, 0x2f, 0x4d, 0xaf, 0xb2, 0x6c, 0x29, 0x04, 0x87, /* Byte value: 0x46 */ + 0xf9, 0x04, 0xb2, 0x66, 0x66, 0xea, 0x83, 0x5b, 0x57, 0x37, 0x06, 0x5b, 0xa4, 0x23, 0x58, 0x33, /* Byte value: 0x47 */ + 0xea, 0xde, 0x52, 0xf0, 0xf0, 0xad, 0x45, 0x46, 0xe7, 0xa6, 0xb1, 0x46, 0x1d, 0x86, 0xff, 0x78, /* Byte value: 0x48 */ + 0xae, 0xb9, 0xdc, 0x44, 0x44, 0x0d, 0x43, 0xd3, 0xdb, 0x9b, 0x04, 0xd3, 0x79, 0x83, 0xd1, 0x22, /* Byte value: 0x49 */ + 0xe4, 0x41, 0xbc, 0x57, 0x57, 0x63, 0xb7, 0x9f, 0x5c, 0x8b, 0x80, 0x9f, 0xfb, 0x0d, 0xdf, 0xca, /* Byte value: 0x4a */ + 0x0b, 0x82, 0xbb, 0xae, 0xae, 0xc6, 0xad, 0xfd, 0xb8, 0xd5, 0xc3, 0xfd, 0x35, 0x1a, 0x7d, 0x57, /* Byte value: 0x4b */ + 0xde, 0xcb, 0xa0, 0x35, 0x35, 0xb2, 0xdf, 0xd4, 0x4a, 0x30, 0x4f, 0xd4, 0x45, 0x51, 0x12, 0xfb, /* Byte value: 0x4c */ + 0xf0, 0x35, 0x2b, 0x73, 0x73, 0x43, 0x08, 0x0f, 0x50, 0xed, 0xce, 0x0f, 0x31, 0x0c, 0x68, 0xd8, /* Byte value: 0x4d */ + 0xd4, 0xf1, 0x0a, 0x27, 0x27, 0xa2, 0x61, 0x9c, 0x4c, 0x03, 0x68, 0x9c, 0x20, 0xb0, 0xa8, 0xf2, /* Byte value: 0x4e */ + 0xb5, 0xea, 0xb4, 0x7b, 0x7b, 0x35, 0x1d, 0x2f, 0xd2, 0x36, 0x9f, 0x2f, 0x05, 0xf2, 0x81, 0xdc, /* Byte value: 0x4f */ + 0xa4, 0x83, 0x76, 0x56, 0x56, 0x1d, 0xfd, 0x9b, 0xdd, 0xa8, 0x23, 0x9b, 0x1c, 0x62, 0x6b, 0x2b, /* Byte value: 0x50 */ + 0xba, 0xcd, 0x4b, 0x60, 0x60, 0x2d, 0xfc, 0x43, 0xd7, 0xfd, 0x4a, 0x43, 0xb3, 0x82, 0x66, 0x30, /* Byte value: 0x51 */ + 0xe8, 0x6d, 0x70, 0x4b, 0x4b, 0xc2, 0x63, 0xef, 0x58, 0xa9, 0xba, 0xef, 0xbd, 0xb3, 0xb2, 0xc4, /* Byte value: 0x52 */ + 0x0e, 0x9f, 0xee, 0xa7, 0xa7, 0xce, 0xf2, 0xd9, 0xbb, 0x2d, 0x31, 0xd9, 0xe6, 0x8b, 0x20, 0xb2, /* Byte value: 0x53 */ + 0x9c, 0xba, 0x48, 0x8f, 0x8f, 0xa3, 0xb3, 0x79, 0x74, 0x1c, 0xe7, 0x79, 0x02, 0x0b, 0xeb, 0xa6, /* Byte value: 0x54 */ + 0x31, 0x08, 0xa7, 0xcc, 0xcc, 0x17, 0xc5, 0xb6, 0xae, 0x6e, 0x0c, 0xb6, 0x8b, 0x46, 0xb0, 0x66, /* Byte value: 0x55 */ + 0x79, 0x43, 0xe5, 0x64, 0x64, 0x16, 0x17, 0x53, 0x96, 0x71, 0x83, 0x53, 0xa9, 0xfd, 0xf3, 0x32, /* Byte value: 0x56 */ + 0xfc, 0x19, 0xe7, 0x6f, 0x6f, 0xe2, 0xdc, 0x7f, 0x54, 0xcf, 0xf4, 0x7f, 0x77, 0xb2, 0x05, 0xd6, /* Byte value: 0x57 */ + 0x4d, 0x56, 0x17, 0xa1, 0xa1, 0x09, 0x8d, 0xc1, 0x3b, 0xe7, 0x7d, 0xc1, 0xf1, 0x2a, 0x1e, 0xb1, /* Byte value: 0x58 */ + 0xdb, 0xd6, 0xf5, 0x3c, 0x3c, 0xba, 0x80, 0xf0, 0x49, 0xc8, 0xbd, 0xf0, 0x96, 0xc0, 0x4f, 0x1e, /* Byte value: 0x59 */ + 0x75, 0x6f, 0x29, 0x78, 0x78, 0xb7, 0xc3, 0x23, 0x92, 0x53, 0xb9, 0x23, 0xef, 0x43, 0x9e, 0x3c, /* Byte value: 0x5a */ + 0x68, 0x2a, 0x27, 0x49, 0x49, 0x3e, 0xf7, 0xe7, 0x99, 0xef, 0x3f, 0xe7, 0xb0, 0x6d, 0x19, 0xc5, /* Byte value: 0x5b */ + 0x15, 0xcc, 0x86, 0x98, 0x98, 0xf6, 0xac, 0x25, 0xb2, 0x80, 0xaa, 0x25, 0x9a, 0xfa, 0x70, 0x4c, /* Byte value: 0x5c */ + 0xa9, 0x17, 0xab, 0xf6, 0xf6, 0x6a, 0x3a, 0x5e, 0x67, 0x6c, 0xfd, 0x5e, 0x0a, 0x27, 0xc1, 0x7b, /* Byte value: 0x5d */ + 0x01, 0xb8, 0x11, 0xbc, 0xbc, 0xd6, 0x13, 0xb5, 0xbe, 0xe6, 0xe4, 0xb5, 0x50, 0xfb, 0xc7, 0x5e, /* Byte value: 0x5e */ + 0x23, 0x6a, 0x56, 0xe6, 0xe6, 0x86, 0x10, 0x1e, 0xa0, 0x19, 0x5f, 0x1e, 0x62, 0x18, 0xd0, 0x73, /* Byte value: 0x5f */ + 0xb2, 0x44, 0xc3, 0xc9, 0xc9, 0x52, 0x64, 0xa2, 0x6e, 0xc1, 0x66, 0xa2, 0x76, 0x56, 0x91, 0x85, /* Byte value: 0x60 */ + 0x6e, 0x3c, 0x41, 0x47, 0x47, 0x8f, 0x9d, 0xdf, 0x9b, 0xfe, 0x22, 0xdf, 0x93, 0x32, 0xce, 0xc2, /* Byte value: 0x61 */ + 0xf2, 0x86, 0x09, 0xc8, 0xc8, 0x2c, 0x2e, 0xa6, 0xef, 0xe2, 0xc5, 0xa6, 0x91, 0x39, 0x25, 0x64, /* Byte value: 0x62 */ + 0x3b, 0x32, 0x0d, 0xde, 0xde, 0x07, 0x7b, 0xfe, 0xa8, 0x5d, 0x2b, 0xfe, 0xee, 0xa7, 0x0a, 0x6f, /* Byte value: 0x63 */ + 0x52, 0xa0, 0x3b, 0x2b, 0x2b, 0xef, 0x9f, 0xac, 0x8f, 0x54, 0xf0, 0xac, 0x0e, 0x31, 0xd4, 0xf4, /* Byte value: 0x64 */ + 0x65, 0xbe, 0xfa, 0xe9, 0xe9, 0x49, 0x30, 0x22, 0x23, 0x2b, 0xe1, 0x22, 0xa6, 0x28, 0xb3, 0x95, /* Byte value: 0x65 */ + 0x4b, 0x40, 0x71, 0xaf, 0xaf, 0xb8, 0xe7, 0xf9, 0x39, 0xf6, 0x60, 0xf9, 0xd2, 0x75, 0xc9, 0xb6, /* Byte value: 0x66 */ + 0x38, 0x39, 0x3e, 0xd9, 0xd9, 0xbe, 0x4e, 0xe2, 0xa9, 0xb4, 0xc4, 0xe2, 0x1e, 0x69, 0x80, 0x8d, /* Byte value: 0x67 */ + 0x2a, 0x5b, 0xcf, 0xf3, 0xf3, 0x2f, 0x9b, 0x4a, 0xa7, 0xc3, 0x97, 0x4a, 0xf7, 0x37, 0xe0, 0x98, /* Byte value: 0x68 */ + 0x91, 0x2e, 0x95, 0x2f, 0x2f, 0xd4, 0x74, 0xbc, 0xce, 0xd8, 0x39, 0xbc, 0x14, 0x4e, 0x41, 0xf6, /* Byte value: 0x69 */ + 0xd6, 0x42, 0x28, 0x9c, 0x9c, 0xcd, 0x47, 0x35, 0xf3, 0x0c, 0x63, 0x35, 0x80, 0x85, 0xe5, 0x4e, /* Byte value: 0x6a */ + 0xfd, 0xa1, 0xf6, 0xd3, 0xd3, 0x34, 0xcf, 0xca, 0xea, 0x29, 0x10, 0xca, 0x27, 0x49, 0xc2, 0x88, /* Byte value: 0x6b */ + 0xf8, 0xbc, 0xa3, 0xda, 0xda, 0x3c, 0x90, 0xee, 0xe9, 0xd1, 0xe2, 0xee, 0xf4, 0xd8, 0x9f, 0x6d, /* Byte value: 0x6c */ + 0x8c, 0x6b, 0x9b, 0x1e, 0x1e, 0x5d, 0x40, 0x78, 0xc5, 0x64, 0xbf, 0x78, 0x4b, 0x60, 0xc6, 0x0f, /* Byte value: 0x6d */ + 0x50, 0x13, 0x19, 0x90, 0x90, 0x80, 0xb9, 0x05, 0x30, 0x5b, 0xfb, 0x05, 0xae, 0x04, 0x99, 0x48, /* Byte value: 0x6e */ + 0x7f, 0x55, 0x83, 0x6a, 0x6a, 0xa7, 0x7d, 0x6b, 0x94, 0x60, 0x9e, 0x6b, 0x8a, 0xa2, 0x24, 0x35, /* Byte value: 0x6f */ + 0x59, 0x22, 0x80, 0x85, 0x85, 0x29, 0x32, 0x51, 0x37, 0x81, 0x33, 0x51, 0x3b, 0x2b, 0xa9, 0xa3, /* Byte value: 0x70 */ + 0x08, 0x89, 0x88, 0xa9, 0xa9, 0x7f, 0x98, 0xe1, 0xb9, 0x3c, 0x2c, 0xe1, 0xc5, 0xd4, 0xf7, 0xb5, /* Byte value: 0x71 */ + 0xd9, 0x65, 0xd7, 0x87, 0x87, 0xd5, 0xa6, 0x59, 0xf6, 0xc7, 0xb6, 0x59, 0x36, 0xf5, 0x02, 0xa2, /* Byte value: 0x72 */ + 0xa2, 0x95, 0x10, 0x58, 0x58, 0xac, 0x97, 0xa3, 0xdf, 0xb9, 0x3e, 0xa3, 0x3f, 0x3d, 0xbc, 0x2c, /* Byte value: 0x73 */ + 0xfb, 0xb7, 0x90, 0xdd, 0xdd, 0x85, 0xa5, 0xf2, 0xe8, 0x38, 0x0d, 0xf2, 0x04, 0x16, 0x15, 0x8f, /* Byte value: 0x74 */ + 0x1c, 0xfd, 0x1f, 0x8d, 0x8d, 0x5f, 0x27, 0x71, 0xb5, 0x5a, 0x62, 0x71, 0x0f, 0xd5, 0x40, 0xa7, /* Byte value: 0x75 */ + 0x29, 0x50, 0xfc, 0xf4, 0xf4, 0x96, 0xae, 0x56, 0xa6, 0x2a, 0x78, 0x56, 0x07, 0xf9, 0x6a, 0x7a, /* Byte value: 0x76 */ + 0xd3, 0x5f, 0x7d, 0x95, 0x95, 0xc5, 0x18, 0x11, 0xf0, 0xf4, 0x91, 0x11, 0x53, 0x14, 0xb8, 0xab, /* Byte value: 0x77 */ + 0x6f, 0x84, 0x50, 0xfb, 0xfb, 0x59, 0x8e, 0x6a, 0x25, 0x18, 0xc6, 0x6a, 0xc3, 0xc9, 0x09, 0x9c, /* Byte value: 0x78 */ + 0x47, 0x6c, 0xbd, 0xb3, 0xb3, 0x19, 0x33, 0x89, 0x3d, 0xd4, 0x5a, 0x89, 0x94, 0xcb, 0xa4, 0xb8, /* Byte value: 0x79 */ + 0x54, 0xb6, 0x5d, 0x25, 0x25, 0x5e, 0xf5, 0x94, 0x8d, 0x45, 0xed, 0x94, 0x2d, 0x6e, 0x03, 0xf3, /* Byte value: 0x7a */ + 0x9f, 0xb1, 0x7b, 0x88, 0x88, 0x1a, 0x86, 0x65, 0x75, 0xf5, 0x08, 0x65, 0xf2, 0xc5, 0x61, 0x44, /* Byte value: 0x7b */ + 0x17, 0x7f, 0xa4, 0x23, 0x23, 0x99, 0x8a, 0x8c, 0x0d, 0x8f, 0xa1, 0x8c, 0x3a, 0xcf, 0x3d, 0xf0, /* Byte value: 0x7c */ + 0xe1, 0x5c, 0xe9, 0x5e, 0x5e, 0x6b, 0xe8, 0xbb, 0x5f, 0x73, 0x72, 0xbb, 0x28, 0x9c, 0x82, 0x2f, /* Byte value: 0x7d */ + 0x7c, 0x5e, 0xb0, 0x6d, 0x6d, 0x1e, 0x48, 0x77, 0x95, 0x89, 0x71, 0x77, 0x7a, 0x6c, 0xae, 0xd7, /* Byte value: 0x7e */ + 0x46, 0xd4, 0xac, 0x0f, 0x0f, 0xcf, 0x20, 0x3c, 0x83, 0x32, 0xbe, 0x3c, 0xc4, 0x30, 0x63, 0xe6, /* Byte value: 0x7f */ + 0xbc, 0xdb, 0x2d, 0x6e, 0x6e, 0x9c, 0x96, 0x7b, 0xd5, 0xec, 0x57, 0x7b, 0x90, 0xdd, 0xb1, 0x37, /* Byte value: 0x80 */ + 0xc5, 0x98, 0xc8, 0x0a, 0x0a, 0x8a, 0x81, 0x28, 0x43, 0x9d, 0xd4, 0x28, 0x39, 0x20, 0x42, 0x05, /* Byte value: 0x81 */ + 0x22, 0xd2, 0x47, 0x5a, 0x5a, 0x50, 0x03, 0xab, 0x1e, 0xff, 0xbb, 0xab, 0x32, 0xe3, 0x17, 0x2d, /* Byte value: 0x82 */ + 0xbe, 0x68, 0x0f, 0xd5, 0xd5, 0xf3, 0xb0, 0xd2, 0x6a, 0xe3, 0x5c, 0xd2, 0x30, 0xe8, 0xfc, 0x8b, /* Byte value: 0x83 */ + 0x96, 0x80, 0xe2, 0x9d, 0x9d, 0xb3, 0x0d, 0x31, 0x72, 0x2f, 0xc0, 0x31, 0x67, 0xea, 0x51, 0xaf, /* Byte value: 0x84 */ + 0x7e, 0xed, 0x92, 0xd6, 0xd6, 0x71, 0x6e, 0xde, 0x2a, 0x86, 0x7a, 0xde, 0xda, 0x59, 0xe3, 0x6b, /* Byte value: 0x85 */ + 0x1d, 0x45, 0x0e, 0x31, 0x31, 0x89, 0x34, 0xc4, 0x0b, 0xbc, 0x86, 0xc4, 0x5f, 0x2e, 0x87, 0xf9, /* Byte value: 0x86 */ + 0xb9, 0xc6, 0x78, 0x67, 0x67, 0x94, 0xc9, 0x5f, 0xd6, 0x14, 0xa5, 0x5f, 0x43, 0x4c, 0xec, 0xd2, /* Byte value: 0x87 */ + 0x9d, 0x02, 0x59, 0x33, 0x33, 0x75, 0xa0, 0xcc, 0xca, 0xfa, 0x03, 0xcc, 0x52, 0xf0, 0x2c, 0xf8, /* Byte value: 0x88 */ + 0xad, 0xb2, 0xef, 0x43, 0x43, 0xb4, 0x76, 0xcf, 0xda, 0x72, 0xeb, 0xcf, 0x89, 0x4d, 0x5b, 0xc0, /* Byte value: 0x89 */ + 0x6c, 0x8f, 0x63, 0xfc, 0xfc, 0xe0, 0xbb, 0x76, 0x24, 0xf1, 0x29, 0x76, 0x33, 0x07, 0x83, 0x7e, /* Byte value: 0x8a */ + 0x85, 0x5a, 0x02, 0x0b, 0x0b, 0xf4, 0xcb, 0x2c, 0xc2, 0xbe, 0x77, 0x2c, 0xde, 0x4f, 0xf6, 0xe4, /* Byte value: 0x8b */ + 0xd1, 0xec, 0x5f, 0x2e, 0x2e, 0xaa, 0x3e, 0xb8, 0x4f, 0xfb, 0x9a, 0xb8, 0xf3, 0x21, 0xf5, 0x17, /* Byte value: 0x8c */ + 0x1e, 0x4e, 0x3d, 0x36, 0x36, 0x30, 0x01, 0xd8, 0x0a, 0x55, 0x69, 0xd8, 0xaf, 0xe0, 0x0d, 0x1b, /* Byte value: 0x8d */ + 0xa5, 0x3b, 0x67, 0xea, 0xea, 0xcb, 0xee, 0x2e, 0x63, 0x4e, 0xc7, 0x2e, 0x4c, 0x99, 0xac, 0x75, /* Byte value: 0x8e */ + 0xe6, 0xf2, 0x9e, 0xec, 0xec, 0x0c, 0x91, 0x36, 0xe3, 0x84, 0x8b, 0x36, 0x5b, 0x38, 0x92, 0x76, /* Byte value: 0x8f */ + 0x2e, 0xfe, 0x8b, 0x46, 0x46, 0xf1, 0xd7, 0xdb, 0x1a, 0xdd, 0x81, 0xdb, 0x74, 0x5d, 0x7a, 0x23, /* Byte value: 0x90 */ + 0x5f, 0x34, 0xe6, 0x8b, 0x8b, 0x98, 0x58, 0x69, 0x35, 0x90, 0x2e, 0x69, 0x18, 0x74, 0x7e, 0xa4, /* Byte value: 0x91 */ + 0x61, 0x1b, 0xbe, 0x5c, 0x5c, 0x97, 0x7c, 0xb3, 0x9e, 0x35, 0xf7, 0xb3, 0x25, 0x42, 0x29, 0x2e, /* Byte value: 0x92 */ + 0x83, 0x4c, 0x64, 0x05, 0x05, 0x45, 0xa1, 0x14, 0xc0, 0xaf, 0x6a, 0x14, 0xfd, 0x10, 0x21, 0xe3, /* Byte value: 0x93 */ + 0x44, 0x67, 0x8e, 0xb4, 0xb4, 0xa0, 0x06, 0x95, 0x3c, 0x3d, 0xb5, 0x95, 0x64, 0x05, 0x2e, 0x5a, /* Byte value: 0x94 */ + 0xb1, 0x4f, 0xf0, 0xce, 0xce, 0xeb, 0x51, 0xbe, 0x6f, 0x28, 0x89, 0xbe, 0x86, 0x98, 0x1b, 0x67, /* Byte value: 0x95 */ + 0xef, 0xc3, 0x07, 0xf9, 0xf9, 0xa5, 0x1a, 0x62, 0xe4, 0x5e, 0x43, 0x62, 0xce, 0x17, 0xa2, 0x9d, /* Byte value: 0x96 */ + 0x82, 0xf4, 0x75, 0xb9, 0xb9, 0x93, 0xb2, 0xa1, 0x7e, 0x49, 0x8e, 0xa1, 0xad, 0xeb, 0xe6, 0xbd, /* Byte value: 0x97 */ + 0x36, 0xa6, 0xd0, 0x7e, 0x7e, 0x70, 0xbc, 0x3b, 0x12, 0x99, 0xf5, 0x3b, 0xf8, 0xe2, 0xa0, 0x3f, /* Byte value: 0x98 */ + 0xa3, 0x2d, 0x01, 0xe4, 0xe4, 0x7a, 0x84, 0x16, 0x61, 0x5f, 0xda, 0x16, 0x6f, 0xc6, 0x7b, 0x72, /* Byte value: 0x99 */ + 0xaf, 0x01, 0xcd, 0xf8, 0xf8, 0xdb, 0x50, 0x66, 0x65, 0x7d, 0xe0, 0x66, 0x29, 0x78, 0x16, 0x7c, /* Byte value: 0x9a */ + 0x88, 0xce, 0xdf, 0xab, 0xab, 0x83, 0x0c, 0xe9, 0x78, 0x7a, 0xa9, 0xe9, 0xc8, 0x0a, 0x5c, 0xb4, /* Byte value: 0x9b */ + 0x89, 0x76, 0xce, 0x17, 0x17, 0x55, 0x1f, 0x5c, 0xc6, 0x9c, 0x4d, 0x5c, 0x98, 0xf1, 0x9b, 0xea, /* Byte value: 0x9c */ + 0x0f, 0x27, 0xff, 0x1b, 0x1b, 0x18, 0xe1, 0x6c, 0x05, 0xcb, 0xd5, 0x6c, 0xb6, 0x70, 0xe7, 0xec, /* Byte value: 0x9d */ + 0x1f, 0xf6, 0x2c, 0x8a, 0x8a, 0xe6, 0x12, 0x6d, 0xb4, 0xb3, 0x8d, 0x6d, 0xff, 0x1b, 0xca, 0x45, /* Byte value: 0x9e */ + 0x42, 0x71, 0xe8, 0xba, 0xba, 0x11, 0x6c, 0xad, 0x3e, 0x2c, 0xa8, 0xad, 0x47, 0x5a, 0xf9, 0x5d, /* Byte value: 0x9f */ + 0xf6, 0x23, 0x4d, 0x7d, 0x7d, 0xf2, 0x62, 0x37, 0x52, 0xfc, 0xd3, 0x37, 0x12, 0x53, 0xbf, 0xdf, /* Byte value: 0xa0 */ + 0xdf, 0x73, 0xb1, 0x89, 0x89, 0x64, 0xcc, 0x61, 0xf4, 0xd6, 0xab, 0x61, 0x15, 0xaa, 0xd5, 0xa5, /* Byte value: 0xa1 */ + 0x3a, 0x8a, 0x1c, 0x62, 0x62, 0xd1, 0x68, 0x4b, 0x16, 0xbb, 0xcf, 0x4b, 0xbe, 0x5c, 0xcd, 0x31, /* Byte value: 0xa2 */ + 0xc1, 0x3d, 0x8c, 0xbf, 0xbf, 0x54, 0xcd, 0xb9, 0xfe, 0x83, 0xc2, 0xb9, 0xba, 0x4a, 0xd8, 0xbe, /* Byte value: 0xa3 */ + 0x28, 0xe8, 0xed, 0x48, 0x48, 0x40, 0xbd, 0xe3, 0x18, 0xcc, 0x9c, 0xe3, 0x57, 0x02, 0xad, 0x24, /* Byte value: 0xa4 */ + 0xf1, 0x8d, 0x3a, 0xcf, 0xcf, 0x95, 0x1b, 0xba, 0xee, 0x0b, 0x2a, 0xba, 0x61, 0xf7, 0xaf, 0x86, /* Byte value: 0xa5 */ + 0xcb, 0x07, 0x26, 0xad, 0xad, 0x44, 0x73, 0xf1, 0xf8, 0xb0, 0xe5, 0xf1, 0xdf, 0xab, 0x62, 0xb7, /* Byte value: 0xa6 */ + 0x92, 0x25, 0xa6, 0x28, 0x28, 0x6d, 0x41, 0xa0, 0xcf, 0x31, 0xd6, 0xa0, 0xe4, 0x80, 0xcb, 0x14, /* Byte value: 0xa7 */ + 0x87, 0xe9, 0x20, 0xb0, 0xb0, 0x9b, 0xed, 0x85, 0x7d, 0xb1, 0x7c, 0x85, 0x7e, 0x7a, 0xbb, 0x58, /* Byte value: 0xa8 */ + 0xe7, 0x4a, 0x8f, 0x50, 0x50, 0xda, 0x82, 0x83, 0x5d, 0x62, 0x6f, 0x83, 0x0b, 0xc3, 0x55, 0x28, /* Byte value: 0xa9 */ + 0x3e, 0x2f, 0x58, 0xd7, 0xd7, 0x0f, 0x24, 0xda, 0xab, 0xa5, 0xd9, 0xda, 0x3d, 0x36, 0x57, 0x8a, /* Byte value: 0xaa */ + 0xfa, 0x0f, 0x81, 0x61, 0x61, 0x53, 0xb6, 0x47, 0x56, 0xde, 0xe9, 0x47, 0x54, 0xed, 0xd2, 0xd1, /* Byte value: 0xab */ + 0x0d, 0x94, 0xdd, 0xa0, 0xa0, 0x77, 0xc7, 0xc5, 0xba, 0xc4, 0xde, 0xc5, 0x16, 0x45, 0xaa, 0x50, /* Byte value: 0xac */ + 0x51, 0xab, 0x08, 0x2c, 0x2c, 0x56, 0xaa, 0xb0, 0x8e, 0xbd, 0x1f, 0xb0, 0xfe, 0xff, 0x5e, 0x16, /* Byte value: 0xad */ + 0xb6, 0xe1, 0x87, 0x7c, 0x7c, 0x8c, 0x28, 0x33, 0xd3, 0xdf, 0x70, 0x33, 0xf5, 0x3c, 0x0b, 0x3e, /* Byte value: 0xae */ + 0x7b, 0xf0, 0xc7, 0xdf, 0xdf, 0x79, 0x31, 0xfa, 0x29, 0x7e, 0x88, 0xfa, 0x09, 0xc8, 0xbe, 0x8e, /* Byte value: 0xaf */ + 0x74, 0xd7, 0x38, 0xc4, 0xc4, 0x61, 0xd0, 0x96, 0x2c, 0xb5, 0x5d, 0x96, 0xbf, 0xb8, 0x59, 0x62, /* Byte value: 0xb0 */ + 0x3f, 0x97, 0x49, 0x6b, 0x6b, 0xd9, 0x37, 0x6f, 0x15, 0x43, 0x3d, 0x6f, 0x6d, 0xcd, 0x90, 0xd4, /* Byte value: 0xb1 */ + 0x2f, 0x46, 0x9a, 0xfa, 0xfa, 0x27, 0xc4, 0x6e, 0xa4, 0x3b, 0x65, 0x6e, 0x24, 0xa6, 0xbd, 0x7d, /* Byte value: 0xb2 */ + 0x7d, 0xe6, 0xa1, 0xd1, 0xd1, 0xc8, 0x5b, 0xc2, 0x2b, 0x6f, 0x95, 0xc2, 0x2a, 0x97, 0x69, 0x89, /* Byte value: 0xb3 */ + 0xbb, 0x75, 0x5a, 0xdc, 0xdc, 0xfb, 0xef, 0xf6, 0x69, 0x1b, 0xae, 0xf6, 0xe3, 0x79, 0xa1, 0x6e, /* Byte value: 0xb4 */ + 0x49, 0xf3, 0x53, 0x14, 0x14, 0xd7, 0xc1, 0x50, 0x86, 0xf9, 0x6b, 0x50, 0x72, 0x40, 0x84, 0x0a, /* Byte value: 0xb5 */ + 0x2b, 0xe3, 0xde, 0x4f, 0x4f, 0xf9, 0x88, 0xff, 0x19, 0x25, 0x73, 0xff, 0xa7, 0xcc, 0x27, 0xc6, /* Byte value: 0xb6 */ + 0x99, 0xa7, 0x1d, 0x86, 0x86, 0xab, 0xec, 0x5d, 0x77, 0xe4, 0x15, 0x5d, 0xd1, 0x9a, 0xb6, 0x43, /* Byte value: 0xb7 */ + 0x02, 0xb3, 0x22, 0xbb, 0xbb, 0x6f, 0x26, 0xa9, 0xbf, 0x0f, 0x0b, 0xa9, 0xa0, 0x35, 0x4d, 0xbc, /* Byte value: 0xb8 */ + 0x37, 0x1e, 0xc1, 0xc2, 0xc2, 0xa6, 0xaf, 0x8e, 0xac, 0x7f, 0x11, 0x8e, 0xa8, 0x19, 0x67, 0x61, /* Byte value: 0xb9 */ + 0xb3, 0xfc, 0xd2, 0x75, 0x75, 0x84, 0x77, 0x17, 0xd0, 0x27, 0x82, 0x17, 0x26, 0xad, 0x56, 0xdb, /* Byte value: 0xba */ + 0x73, 0x79, 0x4f, 0x76, 0x76, 0x06, 0xa9, 0x1b, 0x90, 0x42, 0xa4, 0x1b, 0xcc, 0x1c, 0x49, 0x3b, /* Byte value: 0xbb */ + 0x64, 0x06, 0xeb, 0x55, 0x55, 0x9f, 0x23, 0x97, 0x9d, 0xcd, 0x05, 0x97, 0xf6, 0xd3, 0x74, 0xcb, /* Byte value: 0xbc */ + 0xdc, 0x78, 0x82, 0x8e, 0x8e, 0xdd, 0xf9, 0x7d, 0xf5, 0x3f, 0x44, 0x7d, 0xe5, 0x64, 0x5f, 0x47, /* Byte value: 0xbd */ + 0xd8, 0xdd, 0xc6, 0x3b, 0x3b, 0x03, 0xb5, 0xec, 0x48, 0x21, 0x52, 0xec, 0x66, 0x0e, 0xc5, 0xfc, /* Byte value: 0xbe */ + 0xc9, 0xb4, 0x04, 0x16, 0x16, 0x2b, 0x55, 0x58, 0x47, 0xbf, 0xee, 0x58, 0x7f, 0x9e, 0x2f, 0x0b, /* Byte value: 0xbf */ + 0xab, 0xa4, 0x89, 0x4d, 0x4d, 0x05, 0x1c, 0xf7, 0xd8, 0x63, 0xf6, 0xf7, 0xaa, 0x12, 0x8c, 0xc7, /* Byte value: 0xc0 */ + 0x33, 0xbb, 0x85, 0x77, 0x77, 0x78, 0xe3, 0x1f, 0x11, 0x61, 0x07, 0x1f, 0x2b, 0x73, 0xfd, 0xda, /* Byte value: 0xc1 */ + 0x6b, 0x21, 0x14, 0x4e, 0x4e, 0x87, 0xc2, 0xfb, 0x98, 0x06, 0xd0, 0xfb, 0x40, 0xa3, 0x93, 0x27, /* Byte value: 0xc2 */ + 0xcc, 0xa9, 0x51, 0x1f, 0x1f, 0x23, 0x0a, 0x7c, 0x44, 0x47, 0x1c, 0x7c, 0xac, 0x0f, 0x72, 0xee, /* Byte value: 0xc3 */ + 0x90, 0x96, 0x84, 0x93, 0x93, 0x02, 0x67, 0x09, 0x70, 0x3e, 0xdd, 0x09, 0x44, 0xb5, 0x86, 0xa8, /* Byte value: 0xc4 */ + 0xd0, 0x54, 0x4e, 0x92, 0x92, 0x7c, 0x2d, 0x0d, 0xf1, 0x1d, 0x7e, 0x0d, 0xa3, 0xda, 0x32, 0x49, /* Byte value: 0xc5 */ + 0xac, 0x0a, 0xfe, 0xff, 0xff, 0x62, 0x65, 0x7a, 0x64, 0x94, 0x0f, 0x7a, 0xd9, 0xb6, 0x9c, 0x9e, /* Byte value: 0xc6 */ + 0xbf, 0xd0, 0x1e, 0x69, 0x69, 0x25, 0xa3, 0x67, 0xd4, 0x05, 0xb8, 0x67, 0x60, 0x13, 0x3b, 0xd5, /* Byte value: 0xc7 */ + 0xda, 0x6e, 0xe4, 0x80, 0x80, 0x6c, 0x93, 0x45, 0xf7, 0x2e, 0x59, 0x45, 0xc6, 0x3b, 0x88, 0x40, /* Byte value: 0xc8 */ + 0xa7, 0x88, 0x45, 0x51, 0x51, 0xa4, 0xc8, 0x87, 0xdc, 0x41, 0xcc, 0x87, 0xec, 0xac, 0xe1, 0xc9, /* Byte value: 0xc9 */ + 0x5e, 0x8c, 0xf7, 0x37, 0x37, 0x4e, 0x4b, 0xdc, 0x8b, 0x76, 0xca, 0xdc, 0x48, 0x8f, 0xb9, 0xfa, /* Byte value: 0xca */ + 0x98, 0x1f, 0x0c, 0x3a, 0x3a, 0x7d, 0xff, 0xe8, 0xc9, 0x02, 0xf1, 0xe8, 0x81, 0x61, 0x71, 0x1d, /* Byte value: 0xcb */ + 0x7a, 0x48, 0xd6, 0x63, 0x63, 0xaf, 0x22, 0x4f, 0x97, 0x98, 0x6c, 0x4f, 0x59, 0x33, 0x79, 0xd0, /* Byte value: 0xcc */ + 0x13, 0xda, 0xe0, 0x96, 0x96, 0x47, 0xc6, 0x1d, 0xb0, 0x91, 0xb7, 0x1d, 0xb9, 0xa5, 0xa7, 0x4b, /* Byte value: 0xcd */ + 0x6a, 0x99, 0x05, 0xf2, 0xf2, 0x51, 0xd1, 0x4e, 0x26, 0xe0, 0x34, 0x4e, 0x10, 0x58, 0x54, 0x79, /* Byte value: 0xce */ + 0x40, 0xc2, 0xca, 0x01, 0x01, 0x7e, 0x4a, 0x04, 0x81, 0x23, 0xa3, 0x04, 0xe7, 0x6f, 0xb4, 0xe1, /* Byte value: 0xcf */ + 0xa8, 0xaf, 0xba, 0x4a, 0x4a, 0xbc, 0x29, 0xeb, 0xd9, 0x8a, 0x19, 0xeb, 0x5a, 0xdc, 0x06, 0x25, /* Byte value: 0xd0 */ + 0x5b, 0x91, 0xa2, 0x3e, 0x3e, 0x46, 0x14, 0xf8, 0x88, 0x8e, 0x38, 0xf8, 0x9b, 0x1e, 0xe4, 0x1f, /* Byte value: 0xd1 */ + 0xeb, 0x66, 0x43, 0x4c, 0x4c, 0x7b, 0x56, 0xf3, 0x59, 0x40, 0x55, 0xf3, 0x4d, 0x7d, 0x38, 0x26, /* Byte value: 0xd2 */ + 0x66, 0xb5, 0xc9, 0xee, 0xee, 0xf0, 0x05, 0x3e, 0x22, 0xc2, 0x0e, 0x3e, 0x56, 0xe6, 0x39, 0x77, /* Byte value: 0xd3 */ + 0x56, 0x05, 0x7f, 0x9e, 0x9e, 0x31, 0xd3, 0x3d, 0x32, 0x4a, 0xe6, 0x3d, 0x8d, 0x5b, 0x4e, 0x4f, /* Byte value: 0xd4 */ + 0x81, 0xff, 0x46, 0xbe, 0xbe, 0x2a, 0x87, 0xbd, 0x7f, 0xa0, 0x61, 0xbd, 0x5d, 0x25, 0x6c, 0x5f, /* Byte value: 0xd5 */ + 0x77, 0xdc, 0x0b, 0xc3, 0xc3, 0xd8, 0xe5, 0x8a, 0x2d, 0x5c, 0xb2, 0x8a, 0x4f, 0x76, 0xd3, 0x80, /* Byte value: 0xd6 */ + 0x57, 0xbd, 0x6e, 0x22, 0x22, 0xe7, 0xc0, 0x88, 0x8c, 0xac, 0x02, 0x88, 0xdd, 0xa0, 0x89, 0x11, /* Byte value: 0xd7 */ + 0x4e, 0x5d, 0x24, 0xa6, 0xa6, 0xb0, 0xb8, 0xdd, 0x3a, 0x0e, 0x92, 0xdd, 0x01, 0xe4, 0x94, 0x53, /* Byte value: 0xd8 */ + 0x8d, 0xd3, 0x8a, 0xa2, 0xa2, 0x8b, 0x53, 0xcd, 0x7b, 0x82, 0x5b, 0xcd, 0x1b, 0x9b, 0x01, 0x51, /* Byte value: 0xd9 */ + 0xb0, 0xf7, 0xe1, 0x72, 0x72, 0x3d, 0x42, 0x0b, 0xd1, 0xce, 0x6d, 0x0b, 0xd6, 0x63, 0xdc, 0x39, /* Byte value: 0xda */ + 0xf3, 0x3e, 0x18, 0x74, 0x74, 0xfa, 0x3d, 0x13, 0x51, 0x04, 0x21, 0x13, 0xc1, 0xc2, 0xe2, 0x3a, /* Byte value: 0xdb */ + 0x94, 0x33, 0xc0, 0x26, 0x26, 0xdc, 0x2b, 0x98, 0xcd, 0x20, 0xcb, 0x98, 0xc7, 0xdf, 0x1c, 0x13, /* Byte value: 0xdc */ + 0x20, 0x61, 0x65, 0xe1, 0xe1, 0x3f, 0x25, 0x02, 0xa1, 0xf0, 0xb0, 0x02, 0x92, 0xd6, 0x5a, 0x91, /* Byte value: 0xdd */ + 0x24, 0xc4, 0x21, 0x54, 0x54, 0xe1, 0x69, 0x93, 0x1c, 0xee, 0xa6, 0x93, 0x11, 0xbc, 0xc0, 0x2a, /* Byte value: 0xde */ + 0xd7, 0xfa, 0x39, 0x20, 0x20, 0x1b, 0x54, 0x80, 0x4d, 0xea, 0x87, 0x80, 0xd0, 0x7e, 0x22, 0x10, /* Byte value: 0xdf */ + 0x14, 0x74, 0x97, 0x24, 0x24, 0x20, 0xbf, 0x90, 0x0c, 0x66, 0x4e, 0x90, 0xca, 0x01, 0xb7, 0x12, /* Byte value: 0xe0 */ + 0x9e, 0x09, 0x6a, 0x34, 0x34, 0xcc, 0x95, 0xd0, 0xcb, 0x13, 0xec, 0xd0, 0xa2, 0x3e, 0xa6, 0x1a, /* Byte value: 0xe1 */ + 0x05, 0x1d, 0x55, 0x09, 0x09, 0x08, 0x5f, 0x24, 0x03, 0xf8, 0xf2, 0x24, 0xd3, 0x91, 0x5d, 0xe5, /* Byte value: 0xe2 */ + 0xc6, 0x93, 0xfb, 0x0d, 0x0d, 0x33, 0xb4, 0x34, 0x42, 0x74, 0x3b, 0x34, 0xc9, 0xee, 0xc8, 0xe7, /* Byte value: 0xe3 */ + 0xed, 0x70, 0x25, 0x42, 0x42, 0xca, 0x3c, 0xcb, 0x5b, 0x51, 0x48, 0xcb, 0x6e, 0x22, 0xef, 0x21, /* Byte value: 0xe4 */ + 0xc8, 0x0c, 0x15, 0xaa, 0xaa, 0xfd, 0x46, 0xed, 0xf9, 0x59, 0x0a, 0xed, 0x2f, 0x65, 0xe8, 0x55, /* Byte value: 0xe5 */ + 0x9b, 0x14, 0x3f, 0x3d, 0x3d, 0xc4, 0xca, 0xf4, 0xc8, 0xeb, 0x1e, 0xf4, 0x71, 0xaf, 0xfb, 0xff, /* Byte value: 0xe6 */ + 0xbd, 0x63, 0x3c, 0xd2, 0xd2, 0x4a, 0x85, 0xce, 0x6b, 0x0a, 0xb3, 0xce, 0xc0, 0x26, 0x76, 0x69, /* Byte value: 0xe7 */ + 0x84, 0xe2, 0x13, 0xb7, 0xb7, 0x22, 0xd8, 0x99, 0x7c, 0x58, 0x93, 0x99, 0x8e, 0xb4, 0x31, 0xba, /* Byte value: 0xe8 */ + 0xc3, 0x8e, 0xae, 0x04, 0x04, 0x3b, 0xeb, 0x10, 0x41, 0x8c, 0xc9, 0x10, 0x1a, 0x7f, 0x95, 0x02, /* Byte value: 0xe9 */ + 0xca, 0xbf, 0x37, 0x11, 0x11, 0x92, 0x60, 0x44, 0x46, 0x56, 0x01, 0x44, 0x8f, 0x50, 0xa5, 0xe9, /* Byte value: 0xea */ + 0x8a, 0x7d, 0xfd, 0x10, 0x10, 0xec, 0x2a, 0x40, 0xc7, 0x75, 0xa2, 0x40, 0x68, 0x3f, 0x11, 0x08, /* Byte value: 0xeb */ + 0x34, 0x15, 0xf2, 0xc5, 0xc5, 0x1f, 0x9a, 0x92, 0xad, 0x96, 0xfe, 0x92, 0x58, 0xd7, 0xed, 0x83, /* Byte value: 0xec */ + 0x26, 0x77, 0x03, 0xef, 0xef, 0x8e, 0x4f, 0x3a, 0xa3, 0xe1, 0xad, 0x3a, 0xb1, 0x89, 0x8d, 0x96, /* Byte value: 0xed */ + 0x10, 0xd1, 0xd3, 0x91, 0x91, 0xfe, 0xf3, 0x01, 0xb1, 0x78, 0x58, 0x01, 0x49, 0x6b, 0x2d, 0xa9, /* Byte value: 0xee */ + 0x25, 0x7c, 0x30, 0xe8, 0xe8, 0x37, 0x7a, 0x26, 0xa2, 0x08, 0x42, 0x26, 0x41, 0x47, 0x07, 0x74, /* Byte value: 0xef */ + 0xe3, 0xef, 0xcb, 0xe5, 0xe5, 0x04, 0xce, 0x12, 0xe0, 0x7c, 0x79, 0x12, 0x88, 0xa9, 0xcf, 0x93, /* Byte value: 0xf0 */ + 0x63, 0xa8, 0x9c, 0xe7, 0xe7, 0xf8, 0x5a, 0x1a, 0x21, 0x3a, 0xfc, 0x1a, 0x85, 0x77, 0x64, 0x92, /* Byte value: 0xf1 */ + 0x0a, 0x3a, 0xaa, 0x12, 0x12, 0x10, 0xbe, 0x48, 0x06, 0x33, 0x27, 0x48, 0x65, 0xe1, 0xba, 0x09, /* Byte value: 0xf2 */ + 0x4f, 0xe5, 0x35, 0x1a, 0x1a, 0x66, 0xab, 0x68, 0x84, 0xe8, 0x76, 0x68, 0x51, 0x1f, 0x53, 0x0d, /* Byte value: 0xf3 */ + 0x8b, 0xc5, 0xec, 0xac, 0xac, 0x3a, 0x39, 0xf5, 0x79, 0x93, 0x46, 0xf5, 0x38, 0xc4, 0xd6, 0x56, /* Byte value: 0xf4 */ + 0xc7, 0x2b, 0xea, 0xb1, 0xb1, 0xe5, 0xa7, 0x81, 0xfc, 0x92, 0xdf, 0x81, 0x99, 0x15, 0x0f, 0xb9, /* Byte value: 0xf5 */ + 0x67, 0x0d, 0xd8, 0x52, 0x52, 0x26, 0x16, 0x8b, 0x9c, 0x24, 0xea, 0x8b, 0x06, 0x1d, 0xfe, 0x29, /* Byte value: 0xf6 */ + 0x2d, 0xf5, 0xb8, 0x41, 0x41, 0x48, 0xe2, 0xc7, 0x1b, 0x34, 0x6e, 0xc7, 0x84, 0x93, 0xf0, 0xc1, /* Byte value: 0xf7 */ + 0xfe, 0xaa, 0xc5, 0xd4, 0xd4, 0x8d, 0xfa, 0xd6, 0xeb, 0xc0, 0xff, 0xd6, 0xd7, 0x87, 0x48, 0x6a, /* Byte value: 0xf8 */ + 0x45, 0xdf, 0x9f, 0x08, 0x08, 0x76, 0x15, 0x20, 0x82, 0xdb, 0x51, 0x20, 0x34, 0xfe, 0xe9, 0x04, /* Byte value: 0xf9 */ + 0xa0, 0x26, 0x32, 0xe3, 0xe3, 0xc3, 0xb1, 0x0a, 0x60, 0xb6, 0x35, 0x0a, 0x9f, 0x08, 0xf1, 0x90, /* Byte value: 0xfa */ + 0x80, 0x47, 0x57, 0x02, 0x02, 0xfc, 0x94, 0x08, 0xc1, 0x46, 0x85, 0x08, 0x0d, 0xde, 0xab, 0x01, /* Byte value: 0xfb */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xfc */ + 0x4a, 0xf8, 0x60, 0x13, 0x13, 0x6e, 0xf4, 0x4c, 0x87, 0x10, 0x84, 0x4c, 0x82, 0x8e, 0x0e, 0xe8, /* Byte value: 0xfd */ + 0xee, 0x7b, 0x16, 0x45, 0x45, 0x73, 0x09, 0xd7, 0x5a, 0xb8, 0xa7, 0xd7, 0x9e, 0xec, 0x65, 0xc3, /* Byte value: 0xfe */ + 0x4c, 0xee, 0x06, 0x1d, 0x1d, 0xdf, 0x9e, 0x74, 0x85, 0x01, 0x99, 0x74, 0xa1, 0xd1, 0xd9, 0xef, /* Byte value: 0xff */ + + /* Matrix row: 5 */ + 0xd8, 0xca, 0x20, 0x18, 0xa6, 0x5b, 0x9c, 0xaf, 0x78, 0xd9, 0x1d, 0x80, 0x8c, 0x85, 0x3b, 0x8c, /* Byte value: 0x00 */ + 0x69, 0x10, 0x51, 0x4c, 0xc5, 0x06, 0x68, 0x18, 0xbf, 0x31, 0xb7, 0x87, 0xa1, 0x7c, 0xf5, 0xa1, /* Byte value: 0x01 */ + 0xc0, 0xd0, 0x1b, 0x9a, 0xb0, 0x2e, 0xcd, 0xb8, 0xb7, 0xbe, 0xdf, 0x6c, 0x21, 0x29, 0x03, 0x21, /* Byte value: 0x02 */ + 0xba, 0xff, 0xe6, 0xab, 0xa2, 0x89, 0xd9, 0x61, 0x42, 0xa1, 0xdc, 0x1e, 0xaa, 0x69, 0x60, 0xaa, /* Byte value: 0x03 */ + 0x16, 0x4a, 0xed, 0x3d, 0x41, 0x6b, 0x5a, 0x6f, 0xc9, 0x92, 0xec, 0x32, 0xcd, 0xe3, 0x9f, 0xcd, /* Byte value: 0x04 */ + 0x87, 0x09, 0x79, 0x52, 0xab, 0x3b, 0xd3, 0xec, 0xd9, 0x4f, 0x3c, 0x27, 0x0e, 0x49, 0xb0, 0x0e, /* Byte value: 0x05 */ + 0x58, 0xeb, 0x32, 0xf4, 0xc7, 0x6f, 0xab, 0x7f, 0xa2, 0x0d, 0x36, 0xc8, 0xb2, 0x0a, 0x39, 0xb2, /* Byte value: 0x06 */ + 0x59, 0x24, 0x27, 0x8b, 0xe9, 0xec, 0xca, 0x36, 0xe2, 0xff, 0xf0, 0x9c, 0x38, 0xe7, 0x85, 0x38, /* Byte value: 0x07 */ + 0xc4, 0x6a, 0x4f, 0xa5, 0x08, 0x67, 0x8a, 0x5f, 0x74, 0xf0, 0x41, 0xff, 0x4c, 0x1b, 0xb6, 0x4c, /* Byte value: 0x08 */ + 0x6a, 0x82, 0x6e, 0xcd, 0xb7, 0x40, 0xcb, 0xc3, 0x7f, 0xe4, 0x3e, 0x7b, 0xfc, 0x88, 0xf2, 0xfc, /* Byte value: 0x09 */ + 0x05, 0x75, 0x41, 0x40, 0x96, 0xca, 0x26, 0xae, 0x83, 0xbc, 0x58, 0xc7, 0xe7, 0xdf, 0x09, 0xe7, /* Byte value: 0x0a */ + 0x9a, 0x66, 0x03, 0x90, 0x2b, 0x84, 0xa4, 0x55, 0x95, 0x94, 0xa6, 0x0c, 0x44, 0x3a, 0x81, 0x44, /* Byte value: 0x0b */ + 0x06, 0xe7, 0x7e, 0xc1, 0xe4, 0x8c, 0x85, 0x75, 0x43, 0x69, 0xd1, 0x3b, 0xba, 0x2b, 0x0e, 0xba, /* Byte value: 0x0c */ + 0xa6, 0x5f, 0x89, 0x16, 0x0c, 0xb5, 0xcf, 0x91, 0x4e, 0x88, 0x80, 0x61, 0x6a, 0xf7, 0xed, 0x6a, /* Byte value: 0x0d */ + 0x0d, 0xc2, 0xe9, 0x3e, 0x25, 0x58, 0xa8, 0xa3, 0xc6, 0x20, 0xa7, 0x22, 0x3d, 0xbb, 0xa0, 0x3d, /* Byte value: 0x0e */ + 0xd1, 0xb2, 0x9d, 0x19, 0x3b, 0x4a, 0x73, 0xeb, 0x7d, 0xb7, 0x24, 0x31, 0xdc, 0x0c, 0x2e, 0xdc, /* Byte value: 0x0f */ + 0x43, 0x63, 0x36, 0xf7, 0xa3, 0x5c, 0x59, 0xb3, 0xad, 0xbf, 0x7d, 0xd8, 0x42, 0x52, 0x06, 0x42, /* Byte value: 0x10 */ + 0x85, 0x54, 0x53, 0xac, 0xf7, 0xfe, 0x11, 0x7e, 0x59, 0x68, 0x73, 0x8f, 0xd9, 0x50, 0x0b, 0xd9, /* Byte value: 0x11 */ + 0x41, 0x3e, 0x1c, 0x09, 0xff, 0x99, 0x9b, 0x21, 0x2d, 0x98, 0x32, 0x70, 0x95, 0x4b, 0xbd, 0x95, /* Byte value: 0x12 */ + 0xd2, 0x20, 0xa2, 0x98, 0x49, 0x0c, 0xd0, 0x30, 0xbd, 0x62, 0xad, 0xcd, 0x81, 0xf8, 0x29, 0x81, /* Byte value: 0x13 */ + 0x2c, 0x94, 0x19, 0x7a, 0x82, 0xd6, 0xb4, 0xde, 0x51, 0xe7, 0x1b, 0x64, 0x59, 0x05, 0xfd, 0x59, /* Byte value: 0x14 */ + 0xcd, 0x12, 0xf2, 0xa4, 0x95, 0x76, 0x65, 0x1b, 0x71, 0x9e, 0x78, 0x4e, 0x1c, 0x92, 0xa3, 0x1c, /* Byte value: 0x15 */ + 0x0b, 0x25, 0x97, 0xff, 0xc1, 0xd4, 0x2d, 0xd6, 0x85, 0x49, 0x76, 0x19, 0x87, 0x90, 0xae, 0x87, /* Byte value: 0x16 */ + 0xa2, 0xe5, 0xdd, 0x29, 0xb4, 0xfc, 0x88, 0x76, 0x8d, 0xc6, 0x1e, 0xf2, 0x07, 0xc5, 0x58, 0x07, /* Byte value: 0x17 */ + 0x38, 0x83, 0xde, 0xb9, 0x9f, 0x78, 0x2c, 0x23, 0x18, 0x52, 0xb8, 0xfe, 0x43, 0xff, 0xd9, 0x43, /* Byte value: 0x18 */ + 0xf7, 0xcc, 0x06, 0xe3, 0x56, 0xcb, 0x8b, 0xaa, 0xe9, 0xeb, 0x8f, 0x18, 0x88, 0x74, 0xc1, 0x88, /* Byte value: 0x19 */ + 0x4b, 0xd4, 0x9e, 0x89, 0x10, 0xce, 0xd7, 0xbe, 0xe8, 0x23, 0x82, 0x3d, 0x98, 0x36, 0xaf, 0x98, /* Byte value: 0x1a */ + 0xe6, 0xae, 0x80, 0x60, 0xdd, 0xaf, 0x35, 0xf9, 0x23, 0xe2, 0x74, 0x45, 0x75, 0x51, 0xec, 0x75, /* Byte value: 0x1b */ + 0xe7, 0x61, 0x95, 0x1f, 0xf3, 0x2c, 0x54, 0xb0, 0x63, 0x10, 0xb2, 0x11, 0xff, 0xbc, 0x50, 0xff, /* Byte value: 0x1c */ + 0x90, 0x8c, 0x81, 0x10, 0xc4, 0xd3, 0xe8, 0xca, 0x50, 0x2f, 0x16, 0x41, 0x49, 0x47, 0x93, 0x49, /* Byte value: 0x1d */ + 0x1a, 0x47, 0x11, 0x7c, 0x4a, 0xb0, 0x93, 0x85, 0x4f, 0x40, 0x8d, 0x44, 0x7a, 0xb5, 0x83, 0x7a, /* Byte value: 0x1e */ + 0x53, 0xce, 0xa5, 0x0b, 0x06, 0xbb, 0x86, 0xa9, 0x27, 0x44, 0x40, 0xd1, 0x35, 0x9a, 0x97, 0x35, /* Byte value: 0x1f */ + 0xa3, 0x2a, 0xc8, 0x56, 0x9a, 0x7f, 0xe9, 0x3f, 0xcd, 0x34, 0xd8, 0xa6, 0x8d, 0x28, 0xe4, 0x8d, /* Byte value: 0x20 */ + 0xf1, 0x2b, 0x78, 0x22, 0xb2, 0x47, 0x0e, 0xdf, 0xaa, 0x82, 0x5e, 0x23, 0x32, 0x5f, 0xcf, 0x32, /* Byte value: 0x21 */ + 0xfd, 0x26, 0x84, 0x63, 0xb9, 0x9c, 0xc7, 0x35, 0x2c, 0x50, 0x3f, 0x55, 0x85, 0x09, 0xd3, 0x85, /* Byte value: 0x22 */ + 0x51, 0x93, 0x8f, 0xf5, 0x5a, 0x7e, 0x44, 0x3b, 0xa7, 0x63, 0x0f, 0x79, 0xe2, 0x83, 0x2c, 0xe2, /* Byte value: 0x23 */ + 0x29, 0xe1, 0x58, 0x3a, 0x14, 0x1c, 0x92, 0x70, 0xd2, 0x5b, 0x43, 0xa3, 0xbe, 0xda, 0xf4, 0xbe, /* Byte value: 0x24 */ + 0x3b, 0x11, 0xe1, 0x38, 0xed, 0x3e, 0x8f, 0xf8, 0xd8, 0x87, 0x31, 0x02, 0x1e, 0x0b, 0xde, 0x1e, /* Byte value: 0x25 */ + 0x82, 0x7c, 0x38, 0x12, 0x3d, 0xf1, 0xf5, 0x42, 0x5a, 0xf3, 0x64, 0xe0, 0xe9, 0x96, 0xb9, 0xe9, /* Byte value: 0x26 */ + 0x67, 0x40, 0x87, 0xf3, 0x92, 0x18, 0x63, 0x60, 0xb9, 0xc4, 0x99, 0x59, 0xc1, 0x33, 0x52, 0xc1, /* Byte value: 0x27 */ + 0x8b, 0x04, 0x85, 0x13, 0xa0, 0xe0, 0x1a, 0x06, 0x5f, 0x9d, 0x5d, 0x51, 0xb9, 0x1f, 0xac, 0xb9, /* Byte value: 0x28 */ + 0x48, 0x46, 0xa1, 0x08, 0x62, 0x88, 0x74, 0x65, 0x28, 0xf6, 0x0b, 0xc1, 0xc5, 0xc2, 0xa8, 0xc5, /* Byte value: 0x29 */ + 0x2a, 0x73, 0x67, 0xbb, 0x66, 0x5a, 0x31, 0xab, 0x12, 0x8e, 0xca, 0x5f, 0xe3, 0x2e, 0xf3, 0xe3, /* Byte value: 0x2a */ + 0xef, 0xd6, 0x3d, 0x61, 0x40, 0xbe, 0xda, 0xbd, 0x26, 0x8c, 0x4d, 0xf4, 0x25, 0xd8, 0xf9, 0x25, /* Byte value: 0x2b */ + 0x70, 0xc5, 0x7f, 0xb1, 0xfd, 0xf0, 0x58, 0x46, 0x30, 0xa4, 0xb3, 0x3f, 0x86, 0x3d, 0x71, 0x86, /* Byte value: 0x2c */ + 0x2d, 0x5b, 0x0c, 0x05, 0xac, 0x55, 0xd5, 0x97, 0x11, 0x15, 0xdd, 0x30, 0xd3, 0xe8, 0x41, 0xd3, /* Byte value: 0x2d */ + 0x31, 0xfb, 0x63, 0xb8, 0x02, 0x69, 0xc3, 0x67, 0x1d, 0x3c, 0x81, 0x4f, 0x13, 0x76, 0xcc, 0x13, /* Byte value: 0x2e */ + 0xe2, 0x14, 0xd4, 0x5f, 0x65, 0xe6, 0x72, 0x1e, 0xe0, 0xac, 0xea, 0xd6, 0x18, 0x63, 0x59, 0x18, /* Byte value: 0x2f */ + 0x86, 0xc6, 0x6c, 0x2d, 0x85, 0xb8, 0xb2, 0xa5, 0x99, 0xbd, 0xfa, 0x73, 0x84, 0xa4, 0x0c, 0x84, /* Byte value: 0x30 */ + 0xc9, 0xa8, 0xa6, 0x9b, 0x2d, 0x3f, 0x22, 0xfc, 0xb2, 0xd0, 0xe6, 0xdd, 0x71, 0xa0, 0x16, 0x71, /* Byte value: 0x31 */ + 0xb0, 0x15, 0x64, 0x2b, 0x4d, 0xde, 0x95, 0xfe, 0x87, 0x1a, 0x6c, 0x53, 0xa7, 0x14, 0x72, 0xa7, /* Byte value: 0x32 */ + 0x80, 0x21, 0x12, 0xec, 0x61, 0x34, 0x37, 0xd0, 0xda, 0xd4, 0x2b, 0x48, 0x3e, 0x8f, 0x02, 0x3e, /* Byte value: 0x33 */ + 0xf5, 0x91, 0x2c, 0x1d, 0x0a, 0x0e, 0x49, 0x38, 0x69, 0xcc, 0xc0, 0xb0, 0x5f, 0x6d, 0x7a, 0x5f, /* Byte value: 0x34 */ + 0xfc, 0xe9, 0x91, 0x1c, 0x97, 0x1f, 0xa6, 0x7c, 0x6c, 0xa2, 0xf9, 0x01, 0x0f, 0xe4, 0x6f, 0x0f, /* Byte value: 0x35 */ + 0x60, 0x68, 0xec, 0x4d, 0x58, 0x17, 0x87, 0x5c, 0xba, 0x5f, 0x8e, 0x36, 0xf1, 0xf5, 0xe0, 0xf1, /* Byte value: 0x36 */ + 0x44, 0x4b, 0x5d, 0x49, 0x69, 0x53, 0xbd, 0x8f, 0xae, 0x24, 0x6a, 0xb7, 0x72, 0x94, 0xb4, 0x72, /* Byte value: 0x37 */ + 0x0c, 0x0d, 0xfc, 0x41, 0x0b, 0xdb, 0xc9, 0xea, 0x86, 0xd2, 0x61, 0x76, 0xb7, 0x56, 0x1c, 0xb7, /* Byte value: 0x38 */ + 0x8f, 0xbe, 0xd1, 0x2c, 0x18, 0xa9, 0x5d, 0xe1, 0x9c, 0xd3, 0xc3, 0xc2, 0xd4, 0x2d, 0x19, 0xd4, /* Byte value: 0x39 */ + 0xa4, 0x02, 0xa3, 0xe8, 0x50, 0x70, 0x0d, 0x03, 0xce, 0xaf, 0xcf, 0xc9, 0xbd, 0xee, 0x56, 0xbd, /* Byte value: 0x3a */ + 0x3d, 0xf6, 0x9f, 0xf9, 0x09, 0xb2, 0x0a, 0x8d, 0x9b, 0xee, 0xe0, 0x39, 0xa4, 0x20, 0xd0, 0xa4, /* Byte value: 0x3b */ + 0x62, 0x35, 0xc6, 0xb3, 0x04, 0xd2, 0x45, 0xce, 0x3a, 0x78, 0xc1, 0x9e, 0x26, 0xec, 0x5b, 0x26, /* Byte value: 0x3c */ + 0x35, 0x41, 0x37, 0x87, 0xba, 0x20, 0x84, 0x80, 0xde, 0x72, 0x1f, 0xdc, 0x7e, 0x44, 0x79, 0x7e, /* Byte value: 0x3d */ + 0xe0, 0x49, 0xfe, 0xa1, 0x39, 0x23, 0xb0, 0x8c, 0x60, 0x8b, 0xa5, 0x7e, 0xcf, 0x7a, 0xe2, 0xcf, /* Byte value: 0x3e */ + 0x68, 0xdf, 0x44, 0x33, 0xeb, 0x85, 0x09, 0x51, 0xff, 0xc3, 0x71, 0xd3, 0x2b, 0x91, 0x49, 0x2b, /* Byte value: 0x3f */ + 0xdc, 0x70, 0x74, 0x27, 0x1e, 0x12, 0xdb, 0x48, 0xbb, 0x97, 0x83, 0x13, 0xe1, 0xb7, 0x8e, 0xe1, /* Byte value: 0x40 */ + 0x40, 0xf1, 0x09, 0x76, 0xd1, 0x1a, 0xfa, 0x68, 0x6d, 0x6a, 0xf4, 0x24, 0x1f, 0xa6, 0x01, 0x1f, /* Byte value: 0x41 */ + 0xa0, 0xb8, 0xf7, 0xd7, 0xe8, 0x39, 0x4a, 0xe4, 0x0d, 0xe1, 0x51, 0x5a, 0xd0, 0xdc, 0xe3, 0xd0, /* Byte value: 0x42 */ + 0xa7, 0x90, 0x9c, 0x69, 0x22, 0x36, 0xae, 0xd8, 0x0e, 0x7a, 0x46, 0x35, 0xe0, 0x1a, 0x51, 0xe0, /* Byte value: 0x43 */ + 0x9f, 0x13, 0x42, 0xd0, 0xbd, 0x4e, 0x82, 0xfb, 0x16, 0x28, 0xfe, 0xcb, 0xa3, 0xe5, 0x88, 0xa3, /* Byte value: 0x44 */ + 0x2b, 0xbc, 0x72, 0xc4, 0x48, 0xd9, 0x50, 0xe2, 0x52, 0x7c, 0x0c, 0x0b, 0x69, 0xc3, 0x4f, 0x69, /* Byte value: 0x45 */ + 0xae, 0xe8, 0x21, 0x68, 0xbf, 0x27, 0x41, 0x9c, 0x0b, 0x14, 0x7f, 0x84, 0xb0, 0x93, 0x44, 0xb0, /* Byte value: 0x46 */ + 0x10, 0xad, 0x93, 0xfc, 0xa5, 0xe7, 0xdf, 0x1a, 0x8a, 0xfb, 0x3d, 0x09, 0x77, 0xc8, 0x91, 0x77, /* Byte value: 0x47 */ + 0xfe, 0xb4, 0xbb, 0xe2, 0xcb, 0xda, 0x64, 0xee, 0xec, 0x85, 0xb6, 0xa9, 0xd8, 0xfd, 0xd4, 0xd8, /* Byte value: 0x48 */ + 0xa1, 0x77, 0xe2, 0xa8, 0xc6, 0xba, 0x2b, 0xad, 0x4d, 0x13, 0x97, 0x0e, 0x5a, 0x31, 0x5f, 0x5a, /* Byte value: 0x49 */ + 0xc7, 0xf8, 0x70, 0x24, 0x7a, 0x21, 0x29, 0x84, 0xb4, 0x25, 0xc8, 0x03, 0x11, 0xef, 0xb1, 0x11, /* Byte value: 0x4a */ + 0x4d, 0x33, 0xe0, 0x48, 0xf4, 0x42, 0x52, 0xcb, 0xab, 0x4a, 0x53, 0x06, 0x22, 0x1d, 0xa1, 0x22, /* Byte value: 0x4b */ + 0xaa, 0x52, 0x75, 0x57, 0x07, 0x6e, 0x06, 0x7b, 0xc8, 0x5a, 0xe1, 0x17, 0xdd, 0xa1, 0xf1, 0xdd, /* Byte value: 0x4c */ + 0xd4, 0xc7, 0xdc, 0x59, 0xad, 0x80, 0x55, 0x45, 0xfe, 0x0b, 0x7c, 0xf6, 0x3b, 0xd3, 0x27, 0x3b, /* Byte value: 0x4d */ + 0x42, 0xac, 0x23, 0x88, 0x8d, 0xdf, 0x38, 0xfa, 0xed, 0x4d, 0xbb, 0x8c, 0xc8, 0xbf, 0xba, 0xc8, /* Byte value: 0x4e */ + 0x2e, 0xc9, 0x33, 0x84, 0xde, 0x13, 0x76, 0x4c, 0xd1, 0xc0, 0x54, 0xcc, 0x8e, 0x1c, 0x46, 0x8e, /* Byte value: 0x4f */ + 0x49, 0x89, 0xb4, 0x77, 0x4c, 0x0b, 0x15, 0x2c, 0x68, 0x04, 0xcd, 0x95, 0x4f, 0x2f, 0x14, 0x4f, /* Byte value: 0x50 */ + 0xb2, 0x48, 0x4e, 0xd5, 0x11, 0x1b, 0x57, 0x6c, 0x07, 0x3d, 0x23, 0xfb, 0x70, 0x0d, 0xc9, 0x70, /* Byte value: 0x51 */ + 0x77, 0xed, 0x14, 0x0f, 0x37, 0xff, 0xbc, 0x7a, 0x33, 0x3f, 0xa4, 0x50, 0xb6, 0xfb, 0xc3, 0xb6, /* Byte value: 0x52 */ + 0x39, 0x4c, 0xcb, 0xc6, 0xb1, 0xfb, 0x4d, 0x6a, 0x58, 0xa0, 0x7e, 0xaa, 0xc9, 0x12, 0x65, 0xc9, /* Byte value: 0x53 */ + 0xad, 0x7a, 0x1e, 0xe9, 0xcd, 0x61, 0xe2, 0x47, 0xcb, 0xc1, 0xf6, 0x78, 0xed, 0x67, 0x43, 0xed, /* Byte value: 0x54 */ + 0x20, 0x99, 0xe5, 0x3b, 0x89, 0x0d, 0x7d, 0x34, 0xd7, 0x35, 0x7a, 0x12, 0xee, 0x53, 0xe1, 0xee, /* Byte value: 0x55 */ + 0xcf, 0x4f, 0xd8, 0x5a, 0xc9, 0xb3, 0xa7, 0x89, 0xf1, 0xb9, 0x37, 0xe6, 0xcb, 0x8b, 0x18, 0xcb, /* Byte value: 0x56 */ + 0x64, 0xd2, 0xb8, 0x72, 0xe0, 0x5e, 0xc0, 0xbb, 0x79, 0x11, 0x10, 0xa5, 0x9c, 0xc7, 0x55, 0x9c, /* Byte value: 0x57 */ + 0x9b, 0xa9, 0x16, 0xef, 0x05, 0x07, 0xc5, 0x1c, 0xd5, 0x66, 0x60, 0x58, 0xce, 0xd7, 0x3d, 0xce, /* Byte value: 0x58 */ + 0xde, 0x2d, 0x5e, 0xd9, 0x42, 0xd7, 0x19, 0xda, 0x3b, 0xb0, 0xcc, 0xbb, 0x36, 0xae, 0x35, 0x36, /* Byte value: 0x59 */ + 0x7f, 0x5a, 0xbc, 0x71, 0x84, 0x6d, 0x32, 0x77, 0x76, 0xa3, 0x5b, 0xb5, 0x6c, 0x9f, 0x6a, 0x6c, /* Byte value: 0x5a */ + 0xa8, 0x0f, 0x5f, 0xa9, 0x5b, 0xab, 0xc4, 0xe9, 0x48, 0x7d, 0xae, 0xbf, 0x0a, 0xb8, 0x4a, 0x0a, /* Byte value: 0x5b */ + 0xb6, 0xf2, 0x1a, 0xea, 0xa9, 0x52, 0x10, 0x8b, 0xc4, 0x73, 0xbd, 0x68, 0x1d, 0x3f, 0x7c, 0x1d, /* Byte value: 0x5c */ + 0x5c, 0x51, 0x66, 0xcb, 0x7f, 0x26, 0xec, 0x98, 0x61, 0x43, 0xa8, 0x5b, 0xdf, 0x38, 0x8c, 0xdf, /* Byte value: 0x5d */ + 0xa5, 0xcd, 0xb6, 0x97, 0x7e, 0xf3, 0x6c, 0x4a, 0x8e, 0x5d, 0x09, 0x9d, 0x37, 0x03, 0xea, 0x37, /* Byte value: 0x5e */ + 0x6b, 0x4d, 0x7b, 0xb2, 0x99, 0xc3, 0xaa, 0x8a, 0x3f, 0x16, 0xf8, 0x2f, 0x76, 0x65, 0x4e, 0x76, /* Byte value: 0x5f */ + 0xd3, 0xef, 0xb7, 0xe7, 0x67, 0x8f, 0xb1, 0x79, 0xfd, 0x90, 0x6b, 0x99, 0x0b, 0x15, 0x95, 0x0b, /* Byte value: 0x60 */ + 0xf0, 0xe4, 0x6d, 0x5d, 0x9c, 0xc4, 0x6f, 0x96, 0xea, 0x70, 0x98, 0x77, 0xb8, 0xb2, 0x73, 0xb8, /* Byte value: 0x61 */ + 0x5d, 0x9e, 0x73, 0xb4, 0x51, 0xa5, 0x8d, 0xd1, 0x21, 0xb1, 0x6e, 0x0f, 0x55, 0xd5, 0x30, 0x55, /* Byte value: 0x62 */ + 0xc8, 0x67, 0xb3, 0xe4, 0x03, 0xbc, 0x43, 0xb5, 0xf2, 0x22, 0x20, 0x89, 0xfb, 0x4d, 0xaa, 0xfb, /* Byte value: 0x63 */ + 0xc5, 0xa5, 0x5a, 0xda, 0x26, 0xe4, 0xeb, 0x16, 0x34, 0x02, 0x87, 0xab, 0xc6, 0xf6, 0x0a, 0xc6, /* Byte value: 0x64 */ + 0xbd, 0xd7, 0x8d, 0x15, 0x68, 0x86, 0x3d, 0x5d, 0x41, 0x3a, 0xcb, 0x71, 0x9a, 0xaf, 0xd2, 0x9a, /* Byte value: 0x65 */ + 0xc3, 0x42, 0x24, 0x1b, 0xc2, 0x68, 0x6e, 0x63, 0x77, 0x6b, 0x56, 0x90, 0x7c, 0xdd, 0x04, 0x7c, /* Byte value: 0x66 */ + 0xe4, 0xf3, 0xaa, 0x9e, 0x81, 0x6a, 0xf7, 0x6b, 0xa3, 0xc5, 0x3b, 0xed, 0xa2, 0x48, 0x57, 0xa2, /* Byte value: 0x67 */ + 0xaf, 0x27, 0x34, 0x17, 0x91, 0xa4, 0x20, 0xd5, 0x4b, 0xe6, 0xb9, 0xd0, 0x3a, 0x7e, 0xf8, 0x3a, /* Byte value: 0x68 */ + 0xb8, 0xa2, 0xcc, 0x55, 0xfe, 0x4c, 0x1b, 0xf3, 0xc2, 0x86, 0x93, 0xb6, 0x7d, 0x70, 0xdb, 0x7d, /* Byte value: 0x69 */ + 0xcb, 0xf5, 0x8c, 0x65, 0x71, 0xfa, 0xe0, 0x6e, 0x32, 0xf7, 0xa9, 0x75, 0xa6, 0xb9, 0xad, 0xa6, /* Byte value: 0x6a */ + 0xc1, 0x1f, 0x0e, 0xe5, 0x9e, 0xad, 0xac, 0xf1, 0xf7, 0x4c, 0x19, 0x38, 0xab, 0xc4, 0xbf, 0xab, /* Byte value: 0x6b */ + 0xb5, 0x60, 0x25, 0x6b, 0xdb, 0x14, 0xb3, 0x50, 0x04, 0xa6, 0x34, 0x94, 0x40, 0xcb, 0x7b, 0x40, /* Byte value: 0x6c */ + 0x6f, 0xf7, 0x2f, 0x8d, 0x21, 0x8a, 0xed, 0x6d, 0xfc, 0x58, 0x66, 0xbc, 0x1b, 0x57, 0xfb, 0x1b, /* Byte value: 0x6d */ + 0x4c, 0xfc, 0xf5, 0x37, 0xda, 0xc1, 0x33, 0x82, 0xeb, 0xb8, 0x95, 0x52, 0xa8, 0xf0, 0x1d, 0xa8, /* Byte value: 0x6e */ + 0x97, 0xa4, 0xea, 0xae, 0x0e, 0xdc, 0x0c, 0xf6, 0x53, 0xb4, 0x01, 0x2e, 0x79, 0x81, 0x21, 0x79, /* Byte value: 0x6f */ + 0x88, 0x96, 0xba, 0x92, 0xd2, 0xa6, 0xb9, 0xdd, 0x9f, 0x48, 0xd4, 0xad, 0xe4, 0xeb, 0xab, 0xe4, /* Byte value: 0x70 */ + 0x61, 0xa7, 0xf9, 0x32, 0x76, 0x94, 0xe6, 0x15, 0xfa, 0xad, 0x48, 0x62, 0x7b, 0x18, 0x5c, 0x7b, /* Byte value: 0x71 */ + 0x57, 0x74, 0xf1, 0x34, 0xbe, 0xf2, 0xc1, 0x4e, 0xe4, 0x0a, 0xde, 0x42, 0x58, 0xa8, 0x22, 0x58, /* Byte value: 0x72 */ + 0x11, 0x62, 0x86, 0x83, 0x8b, 0x64, 0xbe, 0x53, 0xca, 0x09, 0xfb, 0x5d, 0xfd, 0x25, 0x2d, 0xfd, /* Byte value: 0x73 */ + 0x99, 0xf4, 0x3c, 0x11, 0x59, 0xc2, 0x07, 0x8e, 0x55, 0x41, 0x2f, 0xf0, 0x19, 0xce, 0x86, 0x19, /* Byte value: 0x74 */ + 0x72, 0x98, 0x55, 0x4f, 0xa1, 0x35, 0x9a, 0xd4, 0xb0, 0x83, 0xfc, 0x97, 0x51, 0x24, 0xca, 0x51, /* Byte value: 0x75 */ + 0x83, 0xb3, 0x2d, 0x6d, 0x13, 0x72, 0x94, 0x0b, 0x1a, 0x01, 0xa2, 0xb4, 0x63, 0x7b, 0x05, 0x63, /* Byte value: 0x76 */ + 0xbf, 0x8a, 0xa7, 0xeb, 0x34, 0x43, 0xff, 0xcf, 0xc1, 0x1d, 0x84, 0xd9, 0x4d, 0xb6, 0x69, 0x4d, /* Byte value: 0x77 */ + 0x55, 0x29, 0xdb, 0xca, 0xe2, 0x37, 0x03, 0xdc, 0x64, 0x2d, 0x91, 0xea, 0x8f, 0xb1, 0x99, 0x8f, /* Byte value: 0x78 */ + 0x73, 0x57, 0x40, 0x30, 0x8f, 0xb6, 0xfb, 0x9d, 0xf0, 0x71, 0x3a, 0xc3, 0xdb, 0xc9, 0x76, 0xdb, /* Byte value: 0x79 */ + 0x9d, 0x4e, 0x68, 0x2e, 0xe1, 0x8b, 0x40, 0x69, 0x96, 0x0f, 0xb1, 0x63, 0x74, 0xfc, 0x33, 0x74, /* Byte value: 0x7a */ + 0x81, 0xee, 0x07, 0x93, 0x4f, 0xb7, 0x56, 0x99, 0x9a, 0x26, 0xed, 0x1c, 0xb4, 0x62, 0xbe, 0xb4, /* Byte value: 0x7b */ + 0x3f, 0xab, 0xb5, 0x07, 0x55, 0x77, 0xc8, 0x1f, 0x1b, 0xc9, 0xaf, 0x91, 0x73, 0x39, 0x6b, 0x73, /* Byte value: 0x7c */ + 0xb3, 0x87, 0x5b, 0xaa, 0x3f, 0x98, 0x36, 0x25, 0x47, 0xcf, 0xe5, 0xaf, 0xfa, 0xe0, 0x75, 0xfa, /* Byte value: 0x7d */ + 0xbb, 0x30, 0xf3, 0xd4, 0x8c, 0x0a, 0xb8, 0x28, 0x02, 0x53, 0x1a, 0x4a, 0x20, 0x84, 0xdc, 0x20, /* Byte value: 0x7e */ + 0xd6, 0x9a, 0xf6, 0xa7, 0xf1, 0x45, 0x97, 0xd7, 0x7e, 0x2c, 0x33, 0x5e, 0xec, 0xca, 0x9c, 0xec, /* Byte value: 0x7f */ + 0xea, 0xa3, 0x7c, 0x21, 0xd6, 0x74, 0xfc, 0x13, 0xa5, 0x30, 0x15, 0x33, 0xc2, 0x07, 0xf0, 0xc2, /* Byte value: 0x80 */ + 0x25, 0xec, 0xa4, 0x7b, 0x1f, 0xc7, 0x5b, 0x9a, 0x54, 0x89, 0x22, 0xd5, 0x09, 0x8c, 0xe8, 0x09, /* Byte value: 0x81 */ + 0xce, 0x80, 0xcd, 0x25, 0xe7, 0x30, 0xc6, 0xc0, 0xb1, 0x4b, 0xf1, 0xb2, 0x41, 0x66, 0xa4, 0x41, /* Byte value: 0x82 */ + 0x63, 0xfa, 0xd3, 0xcc, 0x2a, 0x51, 0x24, 0x87, 0x7a, 0x8a, 0x07, 0xca, 0xac, 0x01, 0xe7, 0xac, /* Byte value: 0x83 */ + 0x45, 0x84, 0x48, 0x36, 0x47, 0xd0, 0xdc, 0xc6, 0xee, 0xd6, 0xac, 0xe3, 0xf8, 0x79, 0x08, 0xf8, /* Byte value: 0x84 */ + 0x32, 0x69, 0x5c, 0x39, 0x70, 0x2f, 0x60, 0xbc, 0xdd, 0xe9, 0x08, 0xb3, 0x4e, 0x82, 0xcb, 0x4e, /* Byte value: 0x85 */ + 0xd7, 0x55, 0xe3, 0xd8, 0xdf, 0xc6, 0xf6, 0x9e, 0x3e, 0xde, 0xf5, 0x0a, 0x66, 0x27, 0x20, 0x66, /* Byte value: 0x86 */ + 0x9e, 0xdc, 0x57, 0xaf, 0x93, 0xcd, 0xe3, 0xb2, 0x56, 0xda, 0x38, 0x9f, 0x29, 0x08, 0x34, 0x29, /* Byte value: 0x87 */ + 0x08, 0xb7, 0xa8, 0x7e, 0xb3, 0x92, 0x8e, 0x0d, 0x45, 0x9c, 0xff, 0xe5, 0xda, 0x64, 0xa9, 0xda, /* Byte value: 0x88 */ + 0x8d, 0xe3, 0xfb, 0xd2, 0x44, 0x6c, 0x9f, 0x73, 0x1c, 0xf4, 0x8c, 0x6a, 0x03, 0x34, 0xa2, 0x03, /* Byte value: 0x89 */ + 0x79, 0xbd, 0xc2, 0xb0, 0x60, 0xe1, 0xb7, 0x02, 0x35, 0xca, 0x8a, 0x8e, 0xd6, 0xb4, 0x64, 0xd6, /* Byte value: 0x8a */ + 0xab, 0x9d, 0x60, 0x28, 0x29, 0xed, 0x67, 0x32, 0x88, 0xa8, 0x27, 0x43, 0x57, 0x4c, 0x4d, 0x57, /* Byte value: 0x8b */ + 0x36, 0xd3, 0x08, 0x06, 0xc8, 0x66, 0x27, 0x5b, 0x1e, 0xa7, 0x96, 0x20, 0x23, 0xb0, 0x7e, 0x23, /* Byte value: 0x8c */ + 0xfb, 0xc1, 0xfa, 0xa2, 0x5d, 0x10, 0x42, 0x40, 0x6f, 0x39, 0xee, 0x6e, 0x3f, 0x22, 0xdd, 0x3f, /* Byte value: 0x8d */ + 0xec, 0x44, 0x02, 0xe0, 0x32, 0xf8, 0x79, 0x66, 0xe6, 0x59, 0xc4, 0x08, 0x78, 0x2c, 0xfe, 0x78, /* Byte value: 0x8e */ + 0x4e, 0xa1, 0xdf, 0xc9, 0x86, 0x04, 0xf1, 0x10, 0x6b, 0x9f, 0xda, 0xfa, 0x7f, 0xe9, 0xa6, 0x7f, /* Byte value: 0x8f */ + 0x7e, 0x95, 0xa9, 0x0e, 0xaa, 0xee, 0x53, 0x3e, 0x36, 0x51, 0x9d, 0xe1, 0xe6, 0x72, 0xd6, 0xe6, /* Byte value: 0x90 */ + 0xd0, 0x7d, 0x88, 0x66, 0x15, 0xc9, 0x12, 0xa2, 0x3d, 0x45, 0xe2, 0x65, 0x56, 0xe1, 0x92, 0x56, /* Byte value: 0x91 */ + 0x6c, 0x65, 0x10, 0x0c, 0x53, 0xcc, 0x4e, 0xb6, 0x3c, 0x8d, 0xef, 0x40, 0x46, 0xa3, 0xfc, 0x46, /* Byte value: 0x92 */ + 0xf3, 0x76, 0x52, 0xdc, 0xee, 0x82, 0xcc, 0x4d, 0x2a, 0xa5, 0x11, 0x8b, 0xe5, 0x46, 0x74, 0xe5, /* Byte value: 0x93 */ + 0x5f, 0xc3, 0x59, 0x4a, 0x0d, 0x60, 0x4f, 0x43, 0xa1, 0x96, 0x21, 0xa7, 0x82, 0xcc, 0x8b, 0x82, /* Byte value: 0x94 */ + 0xff, 0x7b, 0xae, 0x9d, 0xe5, 0x59, 0x05, 0xa7, 0xac, 0x77, 0x70, 0xfd, 0x52, 0x10, 0x68, 0x52, /* Byte value: 0x95 */ + 0x8a, 0xcb, 0x90, 0x6c, 0x8e, 0x63, 0x7b, 0x4f, 0x1f, 0x6f, 0x9b, 0x05, 0x33, 0xf2, 0x10, 0x33, /* Byte value: 0x96 */ + 0x56, 0xbb, 0xe4, 0x4b, 0x90, 0x71, 0xa0, 0x07, 0xa4, 0xf8, 0x18, 0x16, 0xd2, 0x45, 0x9e, 0xd2, /* Byte value: 0x97 */ + 0xdd, 0xbf, 0x61, 0x58, 0x30, 0x91, 0xba, 0x01, 0xfb, 0x65, 0x45, 0x47, 0x6b, 0x5a, 0x32, 0x6b, /* Byte value: 0x98 */ + 0xb4, 0xaf, 0x30, 0x14, 0xf5, 0x97, 0xd2, 0x19, 0x44, 0x54, 0xf2, 0xc0, 0xca, 0x26, 0xc7, 0xca, /* Byte value: 0x99 */ + 0x04, 0xba, 0x54, 0x3f, 0xb8, 0x49, 0x47, 0xe7, 0xc3, 0x4e, 0x9e, 0x93, 0x6d, 0x32, 0xb5, 0x6d, /* Byte value: 0x9a */ + 0xbe, 0x45, 0xb2, 0x94, 0x1a, 0xc0, 0x9e, 0x86, 0x81, 0xef, 0x42, 0x8d, 0xc7, 0x5b, 0xd5, 0xc7, /* Byte value: 0x9b */ + 0x1b, 0x88, 0x04, 0x03, 0x64, 0x33, 0xf2, 0xcc, 0x0f, 0xb2, 0x4b, 0x10, 0xf0, 0x58, 0x3f, 0xf0, /* Byte value: 0x9c */ + 0x9c, 0x81, 0x7d, 0x51, 0xcf, 0x08, 0x21, 0x20, 0xd6, 0xfd, 0x77, 0x37, 0xfe, 0x11, 0x8f, 0xfe, /* Byte value: 0x9d */ + 0x5e, 0x0c, 0x4c, 0x35, 0x23, 0xe3, 0x2e, 0x0a, 0xe1, 0x64, 0xe7, 0xf3, 0x08, 0x21, 0x37, 0x08, /* Byte value: 0x9e */ + 0x07, 0x28, 0x6b, 0xbe, 0xca, 0x0f, 0xe4, 0x3c, 0x03, 0x9b, 0x17, 0x6f, 0x30, 0xc6, 0xb2, 0x30, /* Byte value: 0x9f */ + 0x8c, 0x2c, 0xee, 0xad, 0x6a, 0xef, 0xfe, 0x3a, 0x5c, 0x06, 0x4a, 0x3e, 0x89, 0xd9, 0x1e, 0x89, /* Byte value: 0xa0 */ + 0x0f, 0x9f, 0xc3, 0xc0, 0x79, 0x9d, 0x6a, 0x31, 0x46, 0x07, 0xe8, 0x8a, 0xea, 0xa2, 0x1b, 0xea, /* Byte value: 0xa1 */ + 0x6d, 0xaa, 0x05, 0x73, 0x7d, 0x4f, 0x2f, 0xff, 0x7c, 0x7f, 0x29, 0x14, 0xcc, 0x4e, 0x40, 0xcc, /* Byte value: 0xa2 */ + 0xf4, 0x5e, 0x39, 0x62, 0x24, 0x8d, 0x28, 0x71, 0x29, 0x3e, 0x06, 0xe4, 0xd5, 0x80, 0xc6, 0xd5, /* Byte value: 0xa3 */ + 0x26, 0x7e, 0x9b, 0xfa, 0x6d, 0x81, 0xf8, 0x41, 0x94, 0x5c, 0xab, 0x29, 0x54, 0x78, 0xef, 0x54, /* Byte value: 0xa4 */ + 0x71, 0x0a, 0x6a, 0xce, 0xd3, 0x73, 0x39, 0x0f, 0x70, 0x56, 0x75, 0x6b, 0x0c, 0xd0, 0xcd, 0x0c, /* Byte value: 0xa5 */ + 0x1c, 0xa0, 0x6f, 0xbd, 0xae, 0x3c, 0x16, 0xf0, 0x0c, 0x29, 0x5c, 0x7f, 0xc0, 0x9e, 0x8d, 0xc0, /* Byte value: 0xa6 */ + 0x94, 0x36, 0xd5, 0x2f, 0x7c, 0x9a, 0xaf, 0x2d, 0x93, 0x61, 0x88, 0xd2, 0x24, 0x75, 0x26, 0x24, /* Byte value: 0xa7 */ + 0x22, 0xc4, 0xcf, 0xc5, 0xd5, 0xc8, 0xbf, 0xa6, 0x57, 0x12, 0x35, 0xba, 0x39, 0x4a, 0x5a, 0x39, /* Byte value: 0xa8 */ + 0xeb, 0x6c, 0x69, 0x5e, 0xf8, 0xf7, 0x9d, 0x5a, 0xe5, 0xc2, 0xd3, 0x67, 0x48, 0xea, 0x4c, 0x48, /* Byte value: 0xa9 */ + 0xbc, 0x18, 0x98, 0x6a, 0x46, 0x05, 0x5c, 0x14, 0x01, 0xc8, 0x0d, 0x25, 0x10, 0x42, 0x6e, 0x10, /* Byte value: 0xaa */ + 0x3c, 0x39, 0x8a, 0x86, 0x27, 0x31, 0x6b, 0xc4, 0xdb, 0x1c, 0x26, 0x6d, 0x2e, 0xcd, 0x6c, 0x2e, /* Byte value: 0xab */ + 0x15, 0xd8, 0xd2, 0xbc, 0x33, 0x2d, 0xf9, 0xb4, 0x09, 0x47, 0x65, 0xce, 0x90, 0x17, 0x98, 0x90, /* Byte value: 0xac */ + 0xe9, 0x31, 0x43, 0xa0, 0xa4, 0x32, 0x5f, 0xc8, 0x65, 0xe5, 0x9c, 0xcf, 0x9f, 0xf3, 0xf7, 0x9f, /* Byte value: 0xad */ + 0x02, 0x5d, 0x2a, 0xfe, 0x5c, 0xc5, 0xc2, 0x92, 0x80, 0x27, 0x4f, 0xa8, 0xd7, 0x19, 0xbb, 0xd7, /* Byte value: 0xae */ + 0x46, 0x16, 0x77, 0xb7, 0x35, 0x96, 0x7f, 0x1d, 0x2e, 0x03, 0x25, 0x1f, 0xa5, 0x8d, 0x0f, 0xa5, /* Byte value: 0xaf */ + 0xda, 0x97, 0x0a, 0xe6, 0xfa, 0x9e, 0x5e, 0x3d, 0xf8, 0xfe, 0x52, 0x28, 0x5b, 0x9c, 0x80, 0x5b, /* Byte value: 0xb0 */ + 0x19, 0xd5, 0x2e, 0xfd, 0x38, 0xf6, 0x30, 0x5e, 0x8f, 0x95, 0x04, 0xb8, 0x27, 0x41, 0x84, 0x27, /* Byte value: 0xb1 */ + 0xdb, 0x58, 0x1f, 0x99, 0xd4, 0x1d, 0x3f, 0x74, 0xb8, 0x0c, 0x94, 0x7c, 0xd1, 0x71, 0x3c, 0xd1, /* Byte value: 0xb2 */ + 0x1e, 0xfd, 0x45, 0x43, 0xf2, 0xf9, 0xd4, 0x62, 0x8c, 0x0e, 0x13, 0xd7, 0x17, 0x87, 0x36, 0x17, /* Byte value: 0xb3 */ + 0x17, 0x85, 0xf8, 0x42, 0x6f, 0xe8, 0x3b, 0x26, 0x89, 0x60, 0x2a, 0x66, 0x47, 0x0e, 0x23, 0x47, /* Byte value: 0xb4 */ + 0x4a, 0x1b, 0x8b, 0xf6, 0x3e, 0x4d, 0xb6, 0xf7, 0xa8, 0xd1, 0x44, 0x69, 0x12, 0xdb, 0x13, 0x12, /* Byte value: 0xb5 */ + 0x0a, 0xea, 0x82, 0x80, 0xef, 0x57, 0x4c, 0x9f, 0xc5, 0xbb, 0xb0, 0x4d, 0x0d, 0x7d, 0x12, 0x0d, /* Byte value: 0xb6 */ + 0xd9, 0x05, 0x35, 0x67, 0x88, 0xd8, 0xfd, 0xe6, 0x38, 0x2b, 0xdb, 0xd4, 0x06, 0x68, 0x87, 0x06, /* Byte value: 0xb7 */ + 0x89, 0x59, 0xaf, 0xed, 0xfc, 0x25, 0xd8, 0x94, 0xdf, 0xba, 0x12, 0xf9, 0x6e, 0x06, 0x17, 0x6e, /* Byte value: 0xb8 */ + 0x78, 0x72, 0xd7, 0xcf, 0x4e, 0x62, 0xd6, 0x4b, 0x75, 0x38, 0x4c, 0xda, 0x5c, 0x59, 0xd8, 0x5c, /* Byte value: 0xb9 */ + 0x76, 0x22, 0x01, 0x70, 0x19, 0x7c, 0xdd, 0x33, 0x73, 0xcd, 0x62, 0x04, 0x3c, 0x16, 0x7f, 0x3c, /* Byte value: 0xba */ + 0x27, 0xb1, 0x8e, 0x85, 0x43, 0x02, 0x99, 0x08, 0xd4, 0xae, 0x6d, 0x7d, 0xde, 0x95, 0x53, 0xde, /* Byte value: 0xbb */ + 0x18, 0x1a, 0x3b, 0x82, 0x16, 0x75, 0x51, 0x17, 0xcf, 0x67, 0xc2, 0xec, 0xad, 0xac, 0x38, 0xad, /* Byte value: 0xbc */ + 0x23, 0x0b, 0xda, 0xba, 0xfb, 0x4b, 0xde, 0xef, 0x17, 0xe0, 0xf3, 0xee, 0xb3, 0xa7, 0xe6, 0xb3, /* Byte value: 0xbd */ + 0xf2, 0xb9, 0x47, 0xa3, 0xc0, 0x01, 0xad, 0x04, 0x6a, 0x57, 0xd7, 0xdf, 0x6f, 0xab, 0xc8, 0x6f, /* Byte value: 0xbe */ + 0x95, 0xf9, 0xc0, 0x50, 0x52, 0x19, 0xce, 0x64, 0xd3, 0x93, 0x4e, 0x86, 0xae, 0x98, 0x9a, 0xae, /* Byte value: 0xbf */ + 0xd5, 0x08, 0xc9, 0x26, 0x83, 0x03, 0x34, 0x0c, 0xbe, 0xf9, 0xba, 0xa2, 0xb1, 0x3e, 0x9b, 0xb1, /* Byte value: 0xc0 */ + 0xa9, 0xc0, 0x4a, 0xd6, 0x75, 0x28, 0xa5, 0xa0, 0x08, 0x8f, 0x68, 0xeb, 0x80, 0x55, 0xf6, 0x80, /* Byte value: 0xc1 */ + 0x84, 0x9b, 0x46, 0xd3, 0xd9, 0x7d, 0x70, 0x37, 0x19, 0x9a, 0xb5, 0xdb, 0x53, 0xbd, 0xb7, 0x53, /* Byte value: 0xc2 */ + 0xe1, 0x86, 0xeb, 0xde, 0x17, 0xa0, 0xd1, 0xc5, 0x20, 0x79, 0x63, 0x2a, 0x45, 0x97, 0x5e, 0x45, /* Byte value: 0xc3 */ + 0x1d, 0x6f, 0x7a, 0xc2, 0x80, 0xbf, 0x77, 0xb9, 0x4c, 0xdb, 0x9a, 0x2b, 0x4a, 0x73, 0x31, 0x4a, /* Byte value: 0xc4 */ + 0x93, 0x1e, 0xbe, 0x91, 0xb6, 0x95, 0x4b, 0x11, 0x90, 0xfa, 0x9f, 0xbd, 0x14, 0xb3, 0x94, 0x14, /* Byte value: 0xc5 */ + 0x28, 0x2e, 0x4d, 0x45, 0x3a, 0x9f, 0xf3, 0x39, 0x92, 0xa9, 0x85, 0xf7, 0x34, 0x37, 0x48, 0x34, /* Byte value: 0xc6 */ + 0xc6, 0x37, 0x65, 0x5b, 0x54, 0xa2, 0x48, 0xcd, 0xf4, 0xd7, 0x0e, 0x57, 0x9b, 0x02, 0x0d, 0x9b, /* Byte value: 0xc7 */ + 0x7b, 0xe0, 0xe8, 0x4e, 0x3c, 0x24, 0x75, 0x90, 0xb5, 0xed, 0xc5, 0x26, 0x01, 0xad, 0xdf, 0x01, /* Byte value: 0xc8 */ + 0x65, 0x1d, 0xad, 0x0d, 0xce, 0xdd, 0xa1, 0xf2, 0x39, 0xe3, 0xd6, 0xf1, 0x16, 0x2a, 0xe9, 0x16, /* Byte value: 0xc9 */ + 0x75, 0xb0, 0x3e, 0xf1, 0x6b, 0x3a, 0x7e, 0xe8, 0xb3, 0x18, 0xeb, 0xf8, 0x61, 0xe2, 0x78, 0x61, /* Byte value: 0xca */ + 0x7c, 0xc8, 0x83, 0xf0, 0xf6, 0x2b, 0x91, 0xac, 0xb6, 0x76, 0xd2, 0x49, 0x31, 0x6b, 0x6d, 0x31, /* Byte value: 0xcb */ + 0xe3, 0xdb, 0xc1, 0x20, 0x4b, 0x65, 0x13, 0x57, 0xa0, 0x5e, 0x2c, 0x82, 0x92, 0x8e, 0xe5, 0x92, /* Byte value: 0xcc */ + 0xee, 0x19, 0x28, 0x1e, 0x6e, 0x3d, 0xbb, 0xf4, 0x66, 0x7e, 0x8b, 0xa0, 0xaf, 0x35, 0x45, 0xaf, /* Byte value: 0xcd */ + 0x21, 0x56, 0xf0, 0x44, 0xa7, 0x8e, 0x1c, 0x7d, 0x97, 0xc7, 0xbc, 0x46, 0x64, 0xbe, 0x5d, 0x64, /* Byte value: 0xce */ + 0x8e, 0x71, 0xc4, 0x53, 0x36, 0x2a, 0x3c, 0xa8, 0xdc, 0x21, 0x05, 0x96, 0x5e, 0xc0, 0xa5, 0x5e, /* Byte value: 0xcf */ + 0xf9, 0x9c, 0xd0, 0x5c, 0x01, 0xd5, 0x80, 0xd2, 0xef, 0x1e, 0xa1, 0xc6, 0xe8, 0x3b, 0x66, 0xe8, /* Byte value: 0xd0 */ + 0x01, 0xcf, 0x15, 0x7f, 0x2e, 0x83, 0x61, 0x49, 0x40, 0xf2, 0xc6, 0x54, 0x8a, 0xed, 0xbc, 0x8a, /* Byte value: 0xd1 */ + 0x5b, 0x79, 0x0d, 0x75, 0xb5, 0x29, 0x08, 0xa4, 0x62, 0xd8, 0xbf, 0x34, 0xef, 0xfe, 0x3e, 0xef, /* Byte value: 0xd2 */ + 0x91, 0x43, 0x94, 0x6f, 0xea, 0x50, 0x89, 0x83, 0x10, 0xdd, 0xd0, 0x15, 0xc3, 0xaa, 0x2f, 0xc3, /* Byte value: 0xd3 */ + 0x14, 0x17, 0xc7, 0xc3, 0x1d, 0xae, 0x98, 0xfd, 0x49, 0xb5, 0xa3, 0x9a, 0x1a, 0xfa, 0x24, 0x1a, /* Byte value: 0xd4 */ + 0x7a, 0x2f, 0xfd, 0x31, 0x12, 0xa7, 0x14, 0xd9, 0xf5, 0x1f, 0x03, 0x72, 0x8b, 0x40, 0x63, 0x8b, /* Byte value: 0xd5 */ + 0xf6, 0x03, 0x13, 0x9c, 0x78, 0x48, 0xea, 0xe3, 0xa9, 0x19, 0x49, 0x4c, 0x02, 0x99, 0x7d, 0x02, /* Byte value: 0xd6 */ + 0xb1, 0xda, 0x71, 0x54, 0x63, 0x5d, 0xf4, 0xb7, 0xc7, 0xe8, 0xaa, 0x07, 0x2d, 0xf9, 0xce, 0x2d, /* Byte value: 0xd7 */ + 0xb7, 0x3d, 0x0f, 0x95, 0x87, 0xd1, 0x71, 0xc2, 0x84, 0x81, 0x7b, 0x3c, 0x97, 0xd2, 0xc0, 0x97, /* Byte value: 0xd8 */ + 0xca, 0x3a, 0x99, 0x1a, 0x5f, 0x79, 0x81, 0x27, 0x72, 0x05, 0x6f, 0x21, 0x2c, 0x54, 0x11, 0x2c, /* Byte value: 0xd9 */ + 0x5a, 0xb6, 0x18, 0x0a, 0x9b, 0xaa, 0x69, 0xed, 0x22, 0x2a, 0x79, 0x60, 0x65, 0x13, 0x82, 0x65, /* Byte value: 0xda */ + 0xf8, 0x53, 0xc5, 0x23, 0x2f, 0x56, 0xe1, 0x9b, 0xaf, 0xec, 0x67, 0x92, 0x62, 0xd6, 0xda, 0x62, /* Byte value: 0xdb */ + 0xcc, 0xdd, 0xe7, 0xdb, 0xbb, 0xf5, 0x04, 0x52, 0x31, 0x6c, 0xbe, 0x1a, 0x96, 0x7f, 0x1f, 0x96, /* Byte value: 0xdc */ + 0x47, 0xd9, 0x62, 0xc8, 0x1b, 0x15, 0x1e, 0x54, 0x6e, 0xf1, 0xe3, 0x4b, 0x2f, 0x60, 0xb3, 0x2f, /* Byte value: 0xdd */ + 0x96, 0x6b, 0xff, 0xd1, 0x20, 0x5f, 0x6d, 0xbf, 0x13, 0x46, 0xc7, 0x7a, 0xf3, 0x6c, 0x9d, 0xf3, /* Byte value: 0xde */ + 0x6e, 0x38, 0x3a, 0xf2, 0x0f, 0x09, 0x8c, 0x24, 0xbc, 0xaa, 0xa0, 0xe8, 0x91, 0xba, 0x47, 0x91, /* Byte value: 0xdf */ + 0x13, 0x3f, 0xac, 0x7d, 0xd7, 0xa1, 0x7c, 0xc1, 0x4a, 0x2e, 0xb4, 0xf5, 0x2a, 0x3c, 0x96, 0x2a, /* Byte value: 0xe0 */ + 0x24, 0x23, 0xb1, 0x04, 0x31, 0x44, 0x3a, 0xd3, 0x14, 0x7b, 0xe4, 0x81, 0x83, 0x61, 0x54, 0x83, /* Byte value: 0xe1 */ + 0x74, 0x7f, 0x2b, 0x8e, 0x45, 0xb9, 0x1f, 0xa1, 0xf3, 0xea, 0x2d, 0xac, 0xeb, 0x0f, 0xc4, 0xeb, /* Byte value: 0xe2 */ + 0x09, 0x78, 0xbd, 0x01, 0x9d, 0x11, 0xef, 0x44, 0x05, 0x6e, 0x39, 0xb1, 0x50, 0x89, 0x15, 0x50, /* Byte value: 0xe3 */ + 0x03, 0x92, 0x3f, 0x81, 0x72, 0x46, 0xa3, 0xdb, 0xc0, 0xd5, 0x89, 0xfc, 0x5d, 0xf4, 0x07, 0x5d, /* Byte value: 0xe4 */ + 0x30, 0x34, 0x76, 0xc7, 0x2c, 0xea, 0xa2, 0x2e, 0x5d, 0xce, 0x47, 0x1b, 0x99, 0x9b, 0x70, 0x99, /* Byte value: 0xe5 */ + 0x50, 0x5c, 0x9a, 0x8a, 0x74, 0xfd, 0x25, 0x72, 0xe7, 0x91, 0xc9, 0x2d, 0x68, 0x6e, 0x90, 0x68, /* Byte value: 0xe6 */ + 0x4f, 0x6e, 0xca, 0xb6, 0xa8, 0x87, 0x90, 0x59, 0x2b, 0x6d, 0x1c, 0xae, 0xf5, 0x04, 0x1a, 0xf5, /* Byte value: 0xe7 */ + 0x0e, 0x50, 0xd6, 0xbf, 0x57, 0x1e, 0x0b, 0x78, 0x06, 0xf5, 0x2e, 0xde, 0x60, 0x4f, 0xa7, 0x60, /* Byte value: 0xe8 */ + 0x7d, 0x07, 0x96, 0x8f, 0xd8, 0xa8, 0xf0, 0xe5, 0xf6, 0x84, 0x14, 0x1d, 0xbb, 0x86, 0xd1, 0xbb, /* Byte value: 0xe9 */ + 0xb9, 0x6d, 0xd9, 0x2a, 0xd0, 0xcf, 0x7a, 0xba, 0x82, 0x74, 0x55, 0xe2, 0xf7, 0x9d, 0x67, 0xf7, /* Byte value: 0xea */ + 0x37, 0x1c, 0x1d, 0x79, 0xe6, 0xe5, 0x46, 0x12, 0x5e, 0x55, 0x50, 0x74, 0xa9, 0x5d, 0xc2, 0xa9, /* Byte value: 0xeb */ + 0x54, 0xe6, 0xce, 0xb5, 0xcc, 0xb4, 0x62, 0x95, 0x24, 0xdf, 0x57, 0xbe, 0x05, 0x5c, 0x25, 0x05, /* Byte value: 0xec */ + 0x1f, 0x32, 0x50, 0x3c, 0xdc, 0x7a, 0xb5, 0x2b, 0xcc, 0xfc, 0xd5, 0x83, 0x9d, 0x6a, 0x8a, 0x9d, /* Byte value: 0xed */ + 0xc2, 0x8d, 0x31, 0x64, 0xec, 0xeb, 0x0f, 0x2a, 0x37, 0x99, 0x90, 0xc4, 0xf6, 0x30, 0xb8, 0xf6, /* Byte value: 0xee */ + 0x33, 0xa6, 0x49, 0x46, 0x5e, 0xac, 0x01, 0xf5, 0x9d, 0x1b, 0xce, 0xe7, 0xc4, 0x6f, 0x77, 0xc4, /* Byte value: 0xef */ + 0x3a, 0xde, 0xf4, 0x47, 0xc3, 0xbd, 0xee, 0xb1, 0x98, 0x75, 0xf7, 0x56, 0x94, 0xe6, 0x62, 0x94, /* Byte value: 0xf0 */ + 0xe5, 0x3c, 0xbf, 0xe1, 0xaf, 0xe9, 0x96, 0x22, 0xe3, 0x37, 0xfd, 0xb9, 0x28, 0xa5, 0xeb, 0x28, /* Byte value: 0xf1 */ + 0xe8, 0xfe, 0x56, 0xdf, 0x8a, 0xb1, 0x3e, 0x81, 0x25, 0x17, 0x5a, 0x9b, 0x15, 0x1e, 0x4b, 0x15, /* Byte value: 0xf2 */ + 0x12, 0xf0, 0xb9, 0x02, 0xf9, 0x22, 0x1d, 0x88, 0x0a, 0xdc, 0x72, 0xa1, 0xa0, 0xd1, 0x2a, 0xa0, /* Byte value: 0xf3 */ + 0x92, 0xd1, 0xab, 0xee, 0x98, 0x16, 0x2a, 0x58, 0xd0, 0x08, 0x59, 0xe9, 0x9e, 0x5e, 0x28, 0x9e, /* Byte value: 0xf4 */ + 0xac, 0xb5, 0x0b, 0x96, 0xe3, 0xe2, 0x83, 0x0e, 0x8b, 0x33, 0x30, 0x2c, 0x67, 0x8a, 0xff, 0x67, /* Byte value: 0xf5 */ + 0x34, 0x8e, 0x22, 0xf8, 0x94, 0xa3, 0xe5, 0xc9, 0x9e, 0x80, 0xd9, 0x88, 0xf4, 0xa9, 0xc5, 0xf4, /* Byte value: 0xf6 */ + 0x52, 0x01, 0xb0, 0x74, 0x28, 0x38, 0xe7, 0xe0, 0x67, 0xb6, 0x86, 0x85, 0xbf, 0x77, 0x2b, 0xbf, /* Byte value: 0xf7 */ + 0xed, 0x8b, 0x17, 0x9f, 0x1c, 0x7b, 0x18, 0x2f, 0xa6, 0xab, 0x02, 0x5c, 0xf2, 0xc1, 0x42, 0xf2, /* Byte value: 0xf8 */ + 0xfa, 0x0e, 0xef, 0xdd, 0x73, 0x93, 0x23, 0x09, 0x2f, 0xcb, 0x28, 0x3a, 0xb5, 0xcf, 0x61, 0xb5, /* Byte value: 0xf9 */ + 0x98, 0x3b, 0x29, 0x6e, 0x77, 0x41, 0x66, 0xc7, 0x15, 0xb3, 0xe9, 0xa4, 0x93, 0x23, 0x3a, 0x93, /* Byte value: 0xfa */ + 0xdf, 0xe2, 0x4b, 0xa6, 0x6c, 0x54, 0x78, 0x93, 0x7b, 0x42, 0x0a, 0xef, 0xbc, 0x43, 0x89, 0xbc, /* Byte value: 0xfb */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xfc */ + 0x66, 0x8f, 0x92, 0x8c, 0xbc, 0x9b, 0x02, 0x29, 0xf9, 0x36, 0x5f, 0x0d, 0x4b, 0xde, 0xee, 0x4b, /* Byte value: 0xfd */ + 0x2f, 0x06, 0x26, 0xfb, 0xf0, 0x90, 0x17, 0x05, 0x91, 0x32, 0x92, 0x98, 0x04, 0xf1, 0xfa, 0x04, /* Byte value: 0xfe */ + 0x3e, 0x64, 0xa0, 0x78, 0x7b, 0xf4, 0xa9, 0x56, 0x5b, 0x3b, 0x69, 0xc5, 0xf9, 0xd4, 0xd7, 0xf9, /* Byte value: 0xff */ + + /* Matrix row: 6 */ + 0x51, 0x8b, 0xc0, 0x48, 0x41, 0xdb, 0x9d, 0x79, 0x04, 0x5b, 0xb3, 0x42, 0x6e, 0xe5, 0x18, 0xc6, /* Byte value: 0x00 */ + 0x33, 0xea, 0x25, 0x27, 0xfa, 0x81, 0x30, 0xe7, 0xa3, 0x06, 0xf7, 0x12, 0x65, 0x8f, 0x4c, 0x36, /* Byte value: 0x01 */ + 0xa4, 0x98, 0x5a, 0x40, 0x48, 0x42, 0xb3, 0xc9, 0x3b, 0x2e, 0x19, 0xca, 0x9c, 0x04, 0x9a, 0x9d, /* Byte value: 0x02 */ + 0x67, 0xeb, 0x11, 0xd7, 0x14, 0x97, 0xc2, 0x59, 0x6c, 0x89, 0xa6, 0x39, 0x6b, 0x80, 0xab, 0x4b, /* Byte value: 0x03 */ + 0x0a, 0xd7, 0x2b, 0xb3, 0x9d, 0x59, 0xde, 0x4d, 0x55, 0x6b, 0x07, 0xd2, 0xc0, 0x17, 0x3d, 0xb5, /* Byte value: 0x04 */ + 0xe7, 0x33, 0xd5, 0x7d, 0x3a, 0x1c, 0x1b, 0x11, 0xa6, 0x3b, 0x18, 0xa1, 0xf1, 0xc2, 0x52, 0x20, /* Byte value: 0x05 */ + 0x28, 0xda, 0xac, 0x89, 0x31, 0xa7, 0xfe, 0xf7, 0x97, 0x6f, 0x1c, 0xce, 0x86, 0x5c, 0xf4, 0x91, /* Byte value: 0x06 */ + 0x1a, 0xcc, 0xd2, 0x37, 0xe8, 0x70, 0x6c, 0x44, 0xdd, 0xec, 0x60, 0xc1, 0x42, 0x8e, 0x8b, 0x80, /* Byte value: 0x07 */ + 0x6c, 0xc0, 0x61, 0xfd, 0xaa, 0x98, 0xbe, 0x40, 0xd0, 0x67, 0x2a, 0xf6, 0x0a, 0xca, 0xa5, 0xd9, /* Byte value: 0x08 */ + 0x65, 0xd0, 0xa7, 0x26, 0x52, 0x3b, 0x45, 0xf1, 0x7d, 0x40, 0x73, 0x03, 0xea, 0x3a, 0xcd, 0x05, /* Byte value: 0x09 */ + 0xfa, 0x4e, 0x45, 0x03, 0x3b, 0x0d, 0x9f, 0x3a, 0xa1, 0xca, 0x4f, 0x33, 0x52, 0x1c, 0x40, 0x55, /* Byte value: 0x0a */ + 0xe8, 0x6e, 0x0a, 0x76, 0x08, 0x88, 0xaa, 0x9b, 0x38, 0x84, 0xfd, 0x1a, 0x51, 0x3f, 0x90, 0x2e, /* Byte value: 0x0b */ + 0xac, 0x74, 0xc7, 0x02, 0x93, 0xb7, 0xea, 0x2c, 0x7f, 0x8c, 0xcb, 0x22, 0xdd, 0xa9, 0xc1, 0x66, /* Byte value: 0x0c */ + 0x5a, 0xa0, 0xb0, 0x62, 0xff, 0xd4, 0xe1, 0x60, 0xb8, 0xb5, 0x3f, 0x8d, 0x0f, 0xaf, 0x16, 0x54, /* Byte value: 0x0d */ + 0xa9, 0xfe, 0x33, 0xba, 0x3c, 0x7a, 0x85, 0xeb, 0xb4, 0x58, 0x29, 0x4b, 0xbd, 0x43, 0x3e, 0xdd, /* Byte value: 0x0e */ + 0x30, 0x2d, 0xc8, 0x4f, 0x9f, 0x7b, 0x15, 0x1b, 0x5b, 0x4a, 0xa9, 0x35, 0x45, 0x68, 0x19, 0x5f, /* Byte value: 0x0f */ + 0x8b, 0xf3, 0xb4, 0x80, 0x90, 0x84, 0xa5, 0x51, 0x76, 0x5c, 0x32, 0x57, 0xfb, 0x08, 0xf7, 0xf9, /* Byte value: 0x10 */ + 0x83, 0x1f, 0x29, 0xc2, 0x4b, 0x71, 0xfc, 0xb4, 0x32, 0xfe, 0xe0, 0xbf, 0xba, 0xa5, 0xac, 0x02, /* Byte value: 0x11 */ + 0xef, 0xdf, 0x48, 0x3f, 0xe1, 0xe9, 0x42, 0xf4, 0xe2, 0x99, 0xca, 0x49, 0xb0, 0x6f, 0x09, 0xdb, /* Byte value: 0x12 */ + 0x66, 0x17, 0x4a, 0x4e, 0x37, 0xc1, 0x60, 0x0d, 0x85, 0x0c, 0x2d, 0x24, 0xca, 0xdd, 0x98, 0x6c, /* Byte value: 0x13 */ + 0x14, 0x6d, 0x56, 0xa5, 0xf9, 0xb2, 0x7f, 0x9a, 0xaa, 0xd6, 0x0e, 0x67, 0x43, 0x2e, 0x7a, 0xa9, /* Byte value: 0x14 */ + 0x0d, 0x66, 0x69, 0xfa, 0x74, 0x38, 0x36, 0x22, 0x8f, 0x76, 0x30, 0x81, 0x21, 0x47, 0xa4, 0x40, /* Byte value: 0x15 */ + 0x05, 0x8a, 0xf4, 0xb8, 0xaf, 0xcd, 0x6f, 0xc7, 0xcb, 0xd4, 0xe2, 0x69, 0x60, 0xea, 0xff, 0xbb, /* Byte value: 0x16 */ + 0x92, 0xf8, 0x8b, 0xdf, 0x1d, 0x0e, 0xec, 0xe9, 0x53, 0xfc, 0x0c, 0xb1, 0x99, 0x61, 0x29, 0x10, /* Byte value: 0x17 */ + 0x7a, 0x96, 0x81, 0xa9, 0x15, 0x86, 0x46, 0x72, 0x6b, 0x78, 0xf1, 0xab, 0xc8, 0x5e, 0xb9, 0x3e, /* Byte value: 0x18 */ + 0x13, 0xdc, 0x14, 0xec, 0x10, 0xd3, 0x97, 0xf5, 0x70, 0xcb, 0x39, 0x34, 0xa2, 0x7e, 0xe3, 0x5c, /* Byte value: 0x19 */ + 0xd8, 0x43, 0xc2, 0x39, 0x97, 0xf3, 0xbf, 0x80, 0x63, 0xce, 0x54, 0x2f, 0x14, 0x57, 0x89, 0x71, /* Byte value: 0x1a */ + 0x87, 0x69, 0x86, 0xe3, 0xc7, 0xea, 0x31, 0x27, 0x10, 0xaf, 0x89, 0xcb, 0x7b, 0x12, 0x60, 0x9e, /* Byte value: 0x1b */ + 0xb5, 0x7f, 0xf8, 0x5d, 0x1e, 0x3d, 0xa3, 0x94, 0x5a, 0x2c, 0xf5, 0xc4, 0xbf, 0xc0, 0x1f, 0x8f, /* Byte value: 0x1c */ + 0xdf, 0xf2, 0x80, 0x70, 0x7e, 0x92, 0x57, 0xef, 0xb9, 0xd3, 0x63, 0x7c, 0xf5, 0x07, 0x10, 0x84, /* Byte value: 0x1d */ + 0x91, 0x3f, 0x66, 0xb7, 0x78, 0xf4, 0xc9, 0x15, 0xab, 0xb0, 0x52, 0x96, 0xb9, 0x86, 0x7c, 0x79, /* Byte value: 0x1e */ + 0x2d, 0x50, 0x58, 0x31, 0x9e, 0x6a, 0x91, 0x30, 0x5c, 0xbb, 0xfe, 0xa7, 0xe6, 0xb6, 0x0b, 0x2a, /* Byte value: 0x1f */ + 0xa0, 0xee, 0xf5, 0x61, 0xc4, 0xd9, 0x7e, 0x5a, 0x19, 0x7f, 0x70, 0xbe, 0x5d, 0xb3, 0x56, 0x01, /* Byte value: 0x20 */ + 0xbf, 0xa8, 0xd3, 0xee, 0x83, 0x64, 0x7d, 0xd9, 0x0f, 0x47, 0xf2, 0x16, 0x7f, 0xd7, 0x22, 0x3a, /* Byte value: 0x21 */ + 0x24, 0x40, 0x9e, 0xea, 0x66, 0xc9, 0x6a, 0x81, 0xf1, 0x9c, 0xa7, 0x52, 0x06, 0x46, 0x63, 0xf6, /* Byte value: 0x22 */ + 0x49, 0x7c, 0xa4, 0x8e, 0xef, 0x07, 0x76, 0x95, 0xc8, 0x7e, 0x06, 0xb9, 0xad, 0xd1, 0xf5, 0x08, /* Byte value: 0x23 */ + 0xee, 0x23, 0x13, 0xa6, 0xc2, 0xbf, 0xe0, 0xa0, 0x0b, 0x1c, 0x41, 0x54, 0x11, 0x32, 0x3a, 0xfc, /* Byte value: 0x24 */ + 0x2c, 0xac, 0x03, 0xa8, 0xbd, 0x3c, 0x33, 0x64, 0xb5, 0x3e, 0x75, 0xba, 0x47, 0xeb, 0x38, 0x0d, /* Byte value: 0x25 */ + 0x1d, 0x7d, 0x90, 0x7e, 0x01, 0x11, 0x84, 0x2b, 0x07, 0xf1, 0x57, 0x92, 0xa3, 0xde, 0x12, 0x75, /* Byte value: 0x26 */ + 0xcc, 0x2e, 0x94, 0x9c, 0x6e, 0x41, 0xc0, 0x1a, 0xc9, 0x18, 0x5a, 0x48, 0x57, 0x79, 0xf3, 0xd8, /* Byte value: 0x27 */ + 0x7c, 0xdb, 0x98, 0x79, 0xdf, 0xb1, 0x0c, 0x49, 0x58, 0xe0, 0x4d, 0xe5, 0x88, 0x53, 0x13, 0xec, /* Byte value: 0x28 */ + 0x8e, 0x79, 0x40, 0x38, 0x3f, 0x49, 0xca, 0x96, 0xbd, 0x88, 0xd0, 0x3e, 0x9b, 0xe2, 0x08, 0x42, /* Byte value: 0x29 */ + 0xb8, 0x19, 0x91, 0xa7, 0x6a, 0x05, 0x95, 0xb6, 0xd5, 0x5a, 0xc5, 0x45, 0x9e, 0x87, 0xbb, 0xcf, /* Byte value: 0x2a */ + 0xe6, 0xcf, 0x8e, 0xe4, 0x19, 0x4a, 0xb9, 0x45, 0x4f, 0xbe, 0x93, 0xbc, 0x50, 0x9f, 0x61, 0x07, /* Byte value: 0x2b */ + 0xf4, 0xef, 0xc1, 0x91, 0x2a, 0xcf, 0x8c, 0xe4, 0xd6, 0xf0, 0x21, 0x95, 0x53, 0xbc, 0xb1, 0x7c, /* Byte value: 0x2c */ + 0x26, 0x7b, 0x28, 0x1b, 0x20, 0x65, 0xed, 0x29, 0xe0, 0x55, 0x72, 0x68, 0x87, 0xfc, 0x05, 0xb8, /* Byte value: 0x2d */ + 0x1b, 0x30, 0x89, 0xae, 0xcb, 0x26, 0xce, 0x10, 0x34, 0x69, 0xeb, 0xdc, 0xe3, 0xd3, 0xb8, 0xa7, /* Byte value: 0x2e */ + 0x4f, 0x31, 0xbd, 0x5e, 0x25, 0x30, 0x3c, 0xae, 0xfb, 0xe6, 0xba, 0xf7, 0xed, 0xdc, 0x5f, 0xda, /* Byte value: 0x2f */ + 0xd5, 0x25, 0xab, 0xc3, 0xe3, 0xcb, 0x89, 0xa2, 0xec, 0xb8, 0x64, 0xae, 0x35, 0x10, 0x2d, 0x31, /* Byte value: 0x30 */ + 0xc5, 0x3e, 0x52, 0x47, 0x96, 0xe2, 0x3b, 0xab, 0x64, 0x3f, 0x03, 0xbd, 0xb7, 0x89, 0x9b, 0x04, /* Byte value: 0x31 */ + 0x50, 0x77, 0x9b, 0xd1, 0x62, 0x8d, 0x3f, 0x2d, 0xed, 0xde, 0x38, 0x5f, 0xcf, 0xb8, 0x2b, 0xe1, /* Byte value: 0x32 */ + 0x79, 0x51, 0x6c, 0xc1, 0x70, 0x7c, 0x63, 0x8e, 0x93, 0x34, 0xaf, 0x8c, 0xe8, 0xb9, 0xec, 0x57, /* Byte value: 0x33 */ + 0x77, 0xf0, 0xe8, 0x53, 0x61, 0xbe, 0x70, 0x50, 0xe4, 0x0e, 0xc1, 0x2a, 0xe9, 0x19, 0x1d, 0x7e, /* Byte value: 0x34 */ + 0x16, 0x56, 0xe0, 0x54, 0xbf, 0x1e, 0xf8, 0x32, 0xbb, 0x1f, 0xdb, 0x5d, 0xc2, 0x94, 0x1c, 0xe7, /* Byte value: 0x35 */ + 0x52, 0x4c, 0x2d, 0x20, 0x24, 0x21, 0xb8, 0x85, 0xfc, 0x17, 0xed, 0x65, 0x4e, 0x02, 0x4d, 0xaf, /* Byte value: 0x36 */ + 0x15, 0x91, 0x0d, 0x3c, 0xda, 0xe4, 0xdd, 0xce, 0x43, 0x53, 0x85, 0x7a, 0xe2, 0x73, 0x49, 0x8e, /* Byte value: 0x37 */ + 0x9b, 0xe8, 0x4d, 0x04, 0xe5, 0xad, 0x17, 0x58, 0xfe, 0xdb, 0x55, 0x44, 0x79, 0x91, 0x41, 0xcc, /* Byte value: 0x38 */ + 0xb4, 0x83, 0xa3, 0xc4, 0x3d, 0x6b, 0x01, 0xc0, 0xb3, 0xa9, 0x7e, 0xd9, 0x1e, 0x9d, 0x2c, 0xa8, /* Byte value: 0x39 */ + 0x3e, 0x8c, 0x4c, 0xdd, 0x8e, 0xb9, 0x06, 0xc5, 0x2c, 0x70, 0xc7, 0x93, 0x44, 0xc8, 0xe8, 0x76, /* Byte value: 0x3a */ + 0x80, 0xd8, 0xc4, 0xaa, 0x2e, 0x8b, 0xd9, 0x48, 0xca, 0xb2, 0xbe, 0x98, 0x9a, 0x42, 0xf9, 0x6b, /* Byte value: 0x3b */ + 0x36, 0x60, 0xd1, 0x9f, 0x55, 0x4c, 0x5f, 0x20, 0x68, 0xd2, 0x15, 0x7b, 0x05, 0x65, 0xb3, 0x8d, /* Byte value: 0x3c */ + 0xd3, 0x68, 0xb2, 0x13, 0x29, 0xfc, 0xc3, 0x99, 0xdf, 0x20, 0xd8, 0xe0, 0x75, 0x1d, 0x87, 0xe3, /* Byte value: 0x3d */ + 0x2b, 0x1d, 0x41, 0xe1, 0x54, 0x5d, 0xdb, 0x0b, 0x6f, 0x23, 0x42, 0xe9, 0xa6, 0xbb, 0xa1, 0xf8, /* Byte value: 0x3e */ + 0x01, 0xfc, 0x5b, 0x99, 0x23, 0x56, 0xa2, 0x54, 0xe9, 0x85, 0x8b, 0x1d, 0xa1, 0x5d, 0x33, 0x27, /* Byte value: 0x3f */ + 0x99, 0xd3, 0xfb, 0xf5, 0xa3, 0x01, 0x90, 0xf0, 0xef, 0x12, 0x80, 0x7e, 0xf8, 0x2b, 0x27, 0x82, /* Byte value: 0x40 */ + 0xdd, 0xc9, 0x36, 0x81, 0x38, 0x3e, 0xd0, 0x47, 0xa8, 0x1a, 0xb6, 0x46, 0x74, 0xbd, 0x76, 0xca, /* Byte value: 0x41 */ + 0xf6, 0xd4, 0x77, 0x60, 0x6c, 0x63, 0x0b, 0x4c, 0xc7, 0x39, 0xf4, 0xaf, 0xd2, 0x06, 0xd7, 0x32, /* Byte value: 0x42 */ + 0x68, 0xb6, 0xce, 0xdc, 0x26, 0x03, 0x73, 0xd3, 0xf2, 0x36, 0x43, 0x82, 0xcb, 0x7d, 0x69, 0x45, /* Byte value: 0x43 */ + 0x12, 0x20, 0x4f, 0x75, 0x33, 0x85, 0x35, 0xa1, 0x99, 0x4e, 0xb2, 0x29, 0x03, 0x23, 0xd0, 0x7b, /* Byte value: 0x44 */ + 0x8a, 0x0f, 0xef, 0x19, 0xb3, 0xd2, 0x07, 0x05, 0x9f, 0xd9, 0xb9, 0x4a, 0x5a, 0x55, 0xc4, 0xde, /* Byte value: 0x45 */ + 0x09, 0x10, 0xc6, 0xdb, 0xf8, 0xa3, 0xfb, 0xb1, 0xad, 0x27, 0x59, 0xf5, 0xe0, 0xf0, 0x68, 0xdc, /* Byte value: 0x46 */ + 0xa6, 0xa3, 0xec, 0xb1, 0x0e, 0xee, 0x34, 0x61, 0x2a, 0xe7, 0xcc, 0xf0, 0x1d, 0xbe, 0xfc, 0xd3, /* Byte value: 0x47 */ + 0x72, 0x7a, 0x1c, 0xeb, 0xce, 0x73, 0x1f, 0x97, 0x2f, 0xda, 0x23, 0x43, 0x89, 0xf3, 0xe2, 0xc5, /* Byte value: 0x48 */ + 0xc4, 0xc2, 0x09, 0xde, 0xb5, 0xb4, 0x99, 0xff, 0x8d, 0xba, 0x88, 0xa0, 0x16, 0xd4, 0xa8, 0x23, /* Byte value: 0x49 */ + 0x3a, 0xfa, 0xe3, 0xfc, 0x02, 0x22, 0xcb, 0x56, 0x0e, 0x21, 0xae, 0xe7, 0x85, 0x7f, 0x24, 0xea, /* Byte value: 0x4a */ + 0x74, 0x37, 0x05, 0x3b, 0x04, 0x44, 0x55, 0xac, 0x1c, 0x42, 0x9f, 0x0d, 0xc9, 0xfe, 0x48, 0x17, /* Byte value: 0x4b */ + 0xc1, 0x48, 0xfd, 0x66, 0x1a, 0x79, 0xf6, 0x38, 0x46, 0x6e, 0x6a, 0xc9, 0x76, 0x3e, 0x57, 0x98, /* Byte value: 0x4c */ + 0xca, 0x63, 0x8d, 0x4c, 0xa4, 0x76, 0x8a, 0x21, 0xfa, 0x80, 0xe6, 0x06, 0x17, 0x74, 0x59, 0x0a, /* Byte value: 0x4d */ + 0xb9, 0xe5, 0xca, 0x3e, 0x49, 0x53, 0x37, 0xe2, 0x3c, 0xdf, 0x4e, 0x58, 0x3f, 0xda, 0x88, 0xe8, /* Byte value: 0x4e */ + 0x70, 0x41, 0xaa, 0x1a, 0x88, 0xdf, 0x98, 0x3f, 0x3e, 0x13, 0xf6, 0x79, 0x08, 0x49, 0x84, 0x8b, /* Byte value: 0x4f */ + 0xbc, 0x6f, 0x3e, 0x86, 0xe6, 0x9e, 0x58, 0x25, 0xf7, 0x0b, 0xac, 0x31, 0x5f, 0x30, 0x77, 0x53, /* Byte value: 0x50 */ + 0x34, 0x5b, 0x67, 0x6e, 0x13, 0xe0, 0xd8, 0x88, 0x79, 0x1b, 0xc0, 0x41, 0x84, 0xdf, 0xd5, 0xc3, /* Byte value: 0x51 */ + 0x6a, 0x8d, 0x78, 0x2d, 0x60, 0xaf, 0xf4, 0x7b, 0xe3, 0xff, 0x96, 0xb8, 0x4a, 0xc7, 0x0f, 0x0b, /* Byte value: 0x52 */ + 0x48, 0x80, 0xff, 0x17, 0xcc, 0x51, 0xd4, 0xc1, 0x21, 0xfb, 0x8d, 0xa4, 0x0c, 0x8c, 0xc6, 0x2f, /* Byte value: 0x53 */ + 0x5f, 0x2a, 0x44, 0xda, 0x50, 0x19, 0x8e, 0xa7, 0x73, 0x61, 0xdd, 0xe4, 0x6f, 0x45, 0xe9, 0xef, /* Byte value: 0x54 */ + 0x8f, 0x85, 0x1b, 0xa1, 0x1c, 0x1f, 0x68, 0xc2, 0x54, 0x0d, 0x5b, 0x23, 0x3a, 0xbf, 0x3b, 0x65, /* Byte value: 0x55 */ + 0x69, 0x4a, 0x95, 0x45, 0x05, 0x55, 0xd1, 0x87, 0x1b, 0xb3, 0xc8, 0x9f, 0x6a, 0x20, 0x5a, 0x62, /* Byte value: 0x56 */ + 0x9a, 0x14, 0x16, 0x9d, 0xc6, 0xfb, 0xb5, 0x0c, 0x17, 0x5e, 0xde, 0x59, 0xd8, 0xcc, 0x72, 0xeb, /* Byte value: 0x57 */ + 0xda, 0x78, 0x74, 0xc8, 0xd1, 0x5f, 0x38, 0x28, 0x72, 0x07, 0x81, 0x15, 0x95, 0xed, 0xef, 0x3f, /* Byte value: 0x58 */ + 0xfd, 0xff, 0x07, 0x4a, 0xd2, 0x6c, 0x77, 0x55, 0x7b, 0xd7, 0x78, 0x60, 0xb3, 0x4c, 0xd9, 0xa0, /* Byte value: 0x59 */ + 0x39, 0x3d, 0x0e, 0x94, 0x67, 0xd8, 0xee, 0xaa, 0xf6, 0x6d, 0xf0, 0xc0, 0xa5, 0x98, 0x71, 0x83, /* Byte value: 0x5a */ + 0xa5, 0x64, 0x01, 0xd9, 0x6b, 0x14, 0x11, 0x9d, 0xd2, 0xab, 0x92, 0xd7, 0x3d, 0x59, 0xa9, 0xba, /* Byte value: 0x5b */ + 0xfc, 0x03, 0x5c, 0xd3, 0xf1, 0x3a, 0xd5, 0x01, 0x92, 0x52, 0xf3, 0x7d, 0x12, 0x11, 0xea, 0x87, /* Byte value: 0x5c */ + 0xe0, 0x82, 0x97, 0x34, 0xd3, 0x7d, 0xf3, 0x7e, 0x7c, 0x26, 0x2f, 0xf2, 0x10, 0x92, 0xcb, 0xd5, /* Byte value: 0x5d */ + 0x0c, 0x9a, 0x32, 0x63, 0x57, 0x6e, 0x94, 0x76, 0x66, 0xf3, 0xbb, 0x9c, 0x80, 0x1a, 0x97, 0x67, /* Byte value: 0x5e */ + 0x57, 0xc6, 0xd9, 0x98, 0x8b, 0xec, 0xd7, 0x42, 0x37, 0xc3, 0x0f, 0x0c, 0x2e, 0xe8, 0xb2, 0x14, /* Byte value: 0x5f */ + 0x54, 0x01, 0x34, 0xf0, 0xee, 0x16, 0xf2, 0xbe, 0xcf, 0x8f, 0x51, 0x2b, 0x0e, 0x0f, 0xe7, 0x7d, /* Byte value: 0x60 */ + 0x8d, 0xbe, 0xad, 0x50, 0x5a, 0xb3, 0xef, 0x6a, 0x45, 0xc4, 0x8e, 0x19, 0xbb, 0x05, 0x5d, 0x2b, /* Byte value: 0x61 */ + 0xd2, 0x94, 0xe9, 0x8a, 0x0a, 0xaa, 0x61, 0xcd, 0x36, 0xa5, 0x53, 0xfd, 0xd4, 0x40, 0xb4, 0xc4, /* Byte value: 0x62 */ + 0xf7, 0x28, 0x2c, 0xf9, 0x4f, 0x35, 0xa9, 0x18, 0x2e, 0xbc, 0x7f, 0xb2, 0x73, 0x5b, 0xe4, 0x15, /* Byte value: 0x63 */ + 0x5e, 0xd6, 0x1f, 0x43, 0x73, 0x4f, 0x2c, 0xf3, 0x9a, 0xe4, 0x56, 0xf9, 0xce, 0x18, 0xda, 0xc8, /* Byte value: 0x64 */ + 0xf9, 0x89, 0xa8, 0x6b, 0x5e, 0xf7, 0xba, 0xc6, 0x59, 0x86, 0x11, 0x14, 0x72, 0xfb, 0x15, 0x3c, /* Byte value: 0x65 */ + 0xf2, 0xa2, 0xd8, 0x41, 0xe0, 0xf8, 0xc6, 0xdf, 0xe5, 0x68, 0x9d, 0xdb, 0x13, 0xb1, 0x1b, 0xae, /* Byte value: 0x66 */ + 0xe3, 0x45, 0x7a, 0x5c, 0xb6, 0x87, 0xd6, 0x82, 0x84, 0x6a, 0x71, 0xd5, 0x30, 0x75, 0x9e, 0xbc, /* Byte value: 0x67 */ + 0x3b, 0x06, 0xb8, 0x65, 0x21, 0x74, 0x69, 0x02, 0xe7, 0xa4, 0x25, 0xfa, 0x24, 0x22, 0x17, 0xcd, /* Byte value: 0x68 */ + 0x03, 0xc7, 0xed, 0x68, 0x65, 0xfa, 0x25, 0xfc, 0xf8, 0x4c, 0x5e, 0x27, 0x20, 0xe7, 0x55, 0x69, /* Byte value: 0x69 */ + 0xa1, 0x12, 0xae, 0xf8, 0xe7, 0x8f, 0xdc, 0x0e, 0xf0, 0xfa, 0xfb, 0xa3, 0xfc, 0xee, 0x65, 0x26, /* Byte value: 0x6a */ + 0x96, 0x8e, 0x24, 0xfe, 0x91, 0x95, 0x21, 0x7a, 0x71, 0xad, 0x65, 0xc5, 0x58, 0xd6, 0xe5, 0x8c, /* Byte value: 0x6b */ + 0xaa, 0x39, 0xde, 0xd2, 0x59, 0x80, 0xa0, 0x17, 0x4c, 0x14, 0x77, 0x6c, 0x9d, 0xa4, 0x6b, 0xb4, /* Byte value: 0x6c */ + 0x9f, 0x9e, 0xe2, 0x25, 0x69, 0x36, 0xda, 0xcb, 0xdc, 0x8a, 0x3c, 0x30, 0xb8, 0x26, 0x8d, 0x50, /* Byte value: 0x6d */ + 0x46, 0x21, 0x7b, 0x85, 0xdd, 0x93, 0xc7, 0x1f, 0x56, 0xc1, 0xe3, 0x02, 0x0d, 0x2c, 0x37, 0x06, /* Byte value: 0x6e */ + 0x41, 0x90, 0x39, 0xcc, 0x34, 0xf2, 0x2f, 0x70, 0x8c, 0xdc, 0xd4, 0x51, 0xec, 0x7c, 0xae, 0xf3, /* Byte value: 0x6f */ + 0x2a, 0xe1, 0x1a, 0x78, 0x77, 0x0b, 0x79, 0x5f, 0x86, 0xa6, 0xc9, 0xf4, 0x07, 0xe6, 0x92, 0xdf, /* Byte value: 0x70 */ + 0x60, 0x5a, 0x53, 0x9e, 0xfd, 0xf6, 0x2a, 0x36, 0xb6, 0x94, 0x91, 0x6a, 0x8a, 0xd0, 0x32, 0xbe, /* Byte value: 0x71 */ + 0xe5, 0x08, 0x63, 0x8c, 0x7c, 0xb0, 0x9c, 0xb9, 0xb7, 0xf2, 0xcd, 0x9b, 0x70, 0x78, 0x34, 0x6e, /* Byte value: 0x72 */ + 0x94, 0xb5, 0x92, 0x0f, 0xd7, 0x39, 0xa6, 0xd2, 0x60, 0x64, 0xb0, 0xff, 0xd9, 0x6c, 0x83, 0xc2, /* Byte value: 0x73 */ + 0xbe, 0x54, 0x88, 0x77, 0xa0, 0x32, 0xdf, 0x8d, 0xe6, 0xc2, 0x79, 0x0b, 0xde, 0x8a, 0x11, 0x1d, /* Byte value: 0x74 */ + 0x90, 0xc3, 0x3d, 0x2e, 0x5b, 0xa2, 0x6b, 0x41, 0x42, 0x35, 0xd9, 0x8b, 0x18, 0xdb, 0x4f, 0x5e, /* Byte value: 0x75 */ + 0x2f, 0x6b, 0xee, 0xc0, 0xd8, 0xc6, 0x16, 0x98, 0x4d, 0x72, 0x2b, 0x9d, 0x67, 0x0c, 0x6d, 0x64, /* Byte value: 0x76 */ + 0x9d, 0xa5, 0x54, 0xd4, 0x2f, 0x9a, 0x5d, 0x63, 0xcd, 0x43, 0xe9, 0x0a, 0x39, 0x9c, 0xeb, 0x1e, /* Byte value: 0x77 */ + 0x81, 0x24, 0x9f, 0x33, 0x0d, 0xdd, 0x7b, 0x1c, 0x23, 0x37, 0x35, 0x85, 0x3b, 0x1f, 0xca, 0x4c, /* Byte value: 0x78 */ + 0xa2, 0xd5, 0x43, 0x90, 0x82, 0x75, 0xf9, 0xf2, 0x08, 0xb6, 0xa5, 0x84, 0xdc, 0x09, 0x30, 0x4f, /* Byte value: 0x79 */ + 0x76, 0x0c, 0xb3, 0xca, 0x42, 0xe8, 0xd2, 0x04, 0x0d, 0x8b, 0x4a, 0x37, 0x48, 0x44, 0x2e, 0x59, /* Byte value: 0x7a */ + 0x4b, 0x47, 0x12, 0x7f, 0xa9, 0xab, 0xf1, 0x3d, 0xd9, 0xb7, 0xd3, 0x83, 0x2c, 0x6b, 0x93, 0x46, /* Byte value: 0x7b */ + 0xe4, 0xf4, 0x38, 0x15, 0x5f, 0xe6, 0x3e, 0xed, 0x5e, 0x77, 0x46, 0x86, 0xd1, 0x25, 0x07, 0x49, /* Byte value: 0x7c */ + 0x06, 0x4d, 0x19, 0xd0, 0xca, 0x37, 0x4a, 0x3b, 0x33, 0x98, 0xbc, 0x4e, 0x40, 0x0d, 0xaa, 0xd2, /* Byte value: 0x7d */ + 0x55, 0xfd, 0x6f, 0x69, 0xcd, 0x40, 0x50, 0xea, 0x26, 0x0a, 0xda, 0x36, 0xaf, 0x52, 0xd4, 0x5a, /* Byte value: 0x7e */ + 0xae, 0x4f, 0x71, 0xf3, 0xd5, 0x1b, 0x6d, 0x84, 0x6e, 0x45, 0x1e, 0x18, 0x5c, 0x13, 0xa7, 0x28, /* Byte value: 0x7f */ + 0x1c, 0x81, 0xcb, 0xe7, 0x22, 0x47, 0x26, 0x7f, 0xee, 0x74, 0xdc, 0x8f, 0x02, 0x83, 0x21, 0x52, /* Byte value: 0x80 */ + 0x75, 0xcb, 0x5e, 0xa2, 0x27, 0x12, 0xf7, 0xf8, 0xf5, 0xc7, 0x14, 0x10, 0x68, 0xa3, 0x7b, 0x30, /* Byte value: 0x81 */ + 0x5b, 0x5c, 0xeb, 0xfb, 0xdc, 0x82, 0x43, 0x34, 0x51, 0x30, 0xb4, 0x90, 0xae, 0xf2, 0x25, 0x73, /* Byte value: 0x82 */ + 0x04, 0x76, 0xaf, 0x21, 0x8c, 0x9b, 0xcd, 0x93, 0x22, 0x51, 0x69, 0x74, 0xc1, 0xb7, 0xcc, 0x9c, /* Byte value: 0x83 */ + 0x27, 0x87, 0x73, 0x82, 0x03, 0x33, 0x4f, 0x7d, 0x09, 0xd0, 0xf9, 0x75, 0x26, 0xa1, 0x36, 0x9f, /* Byte value: 0x84 */ + 0x4d, 0x0a, 0x0b, 0xaf, 0x63, 0x9c, 0xbb, 0x06, 0xea, 0x2f, 0x6f, 0xcd, 0x6c, 0x66, 0x39, 0x94, /* Byte value: 0x85 */ + 0x9c, 0x59, 0x0f, 0x4d, 0x0c, 0xcc, 0xff, 0x37, 0x24, 0xc6, 0x62, 0x17, 0x98, 0xc1, 0xd8, 0x39, /* Byte value: 0x86 */ + 0x20, 0x36, 0x31, 0xcb, 0xea, 0x52, 0xa7, 0x12, 0xd3, 0xcd, 0xce, 0x26, 0xc7, 0xf1, 0xaf, 0x6a, /* Byte value: 0x87 */ + 0x53, 0xb0, 0x76, 0xb9, 0x07, 0x77, 0x1a, 0xd1, 0x15, 0x92, 0x66, 0x78, 0xef, 0x5f, 0x7e, 0x88, /* Byte value: 0x88 */ + 0xd0, 0xaf, 0x5f, 0x7b, 0x4c, 0x06, 0xe6, 0x65, 0x27, 0x6c, 0x86, 0xc7, 0x55, 0xfa, 0xd2, 0x8a, /* Byte value: 0x89 */ + 0x95, 0x49, 0xc9, 0x96, 0xf4, 0x6f, 0x04, 0x86, 0x89, 0xe1, 0x3b, 0xe2, 0x78, 0x31, 0xb0, 0xe5, /* Byte value: 0x8a */ + 0xf3, 0x5e, 0x83, 0xd8, 0xc3, 0xae, 0x64, 0x8b, 0x0c, 0xed, 0x16, 0xc6, 0xb2, 0xec, 0x28, 0x89, /* Byte value: 0x8b */ + 0x85, 0x52, 0x30, 0x12, 0x81, 0x46, 0xb6, 0x8f, 0x01, 0x66, 0x5c, 0xf1, 0xfa, 0xa8, 0x06, 0xd0, /* Byte value: 0x8c */ + 0x88, 0x34, 0x59, 0xe8, 0xf5, 0x7e, 0x80, 0xad, 0x8e, 0x10, 0x6c, 0x70, 0xdb, 0xef, 0xa2, 0x90, /* Byte value: 0x8d */ + 0xb0, 0xf5, 0x0c, 0xe5, 0xb1, 0xf0, 0xcc, 0x53, 0x91, 0xf8, 0x17, 0xad, 0xdf, 0x2a, 0xe0, 0x34, /* Byte value: 0x8e */ + 0x22, 0x0d, 0x87, 0x3a, 0xac, 0xfe, 0x20, 0xba, 0xc2, 0x04, 0x1b, 0x1c, 0x46, 0x4b, 0xc9, 0x24, /* Byte value: 0x8f */ + 0x0b, 0x2b, 0x70, 0x2a, 0xbe, 0x0f, 0x7c, 0x19, 0xbc, 0xee, 0x8c, 0xcf, 0x61, 0x4a, 0x0e, 0x92, /* Byte value: 0x90 */ + 0x02, 0x3b, 0xb6, 0xf1, 0x46, 0xac, 0x87, 0xa8, 0x11, 0xc9, 0xd5, 0x3a, 0x81, 0xba, 0x66, 0x4e, /* Byte value: 0x91 */ + 0xc9, 0xa4, 0x60, 0x24, 0xc1, 0x8c, 0xaf, 0xdd, 0x02, 0xcc, 0xb8, 0x21, 0x37, 0x93, 0x0c, 0x63, /* Byte value: 0x92 */ + 0xdb, 0x84, 0x2f, 0x51, 0xf2, 0x09, 0x9a, 0x7c, 0x9b, 0x82, 0x0a, 0x08, 0x34, 0xb0, 0xdc, 0x18, /* Byte value: 0x93 */ + 0xb6, 0xb8, 0x15, 0x35, 0x7b, 0xc7, 0x86, 0x68, 0xa2, 0x60, 0xab, 0xe3, 0x9f, 0x27, 0x4a, 0xe6, /* Byte value: 0x94 */ + 0x40, 0x6c, 0x62, 0x55, 0x17, 0xa4, 0x8d, 0x24, 0x65, 0x59, 0x5f, 0x4c, 0x4d, 0x21, 0x9d, 0xd4, /* Byte value: 0x95 */ + 0x4e, 0xcd, 0xe6, 0xc7, 0x06, 0x66, 0x9e, 0xfa, 0x12, 0x63, 0x31, 0xea, 0x4c, 0x81, 0x6c, 0xfd, /* Byte value: 0x96 */ + 0xd7, 0x1e, 0x1d, 0x32, 0xa5, 0x67, 0x0e, 0x0a, 0xfd, 0x71, 0xb1, 0x94, 0xb4, 0xaa, 0x4b, 0x7f, /* Byte value: 0x97 */ + 0xab, 0xc5, 0x85, 0x4b, 0x7a, 0xd6, 0x02, 0x43, 0xa5, 0x91, 0xfc, 0x71, 0x3c, 0xf9, 0x58, 0x93, /* Byte value: 0x98 */ + 0x98, 0x2f, 0xa0, 0x6c, 0x80, 0x57, 0x32, 0xa4, 0x06, 0x97, 0x0b, 0x63, 0x59, 0x76, 0x14, 0xa5, /* Byte value: 0x99 */ + 0xc8, 0x58, 0x3b, 0xbd, 0xe2, 0xda, 0x0d, 0x89, 0xeb, 0x49, 0x33, 0x3c, 0x96, 0xce, 0x3f, 0x44, /* Byte value: 0x9a */ + 0xaf, 0xb3, 0x2a, 0x6a, 0xf6, 0x4d, 0xcf, 0xd0, 0x87, 0xc0, 0x95, 0x05, 0xfd, 0x4e, 0x94, 0x0f, /* Byte value: 0x9b */ + 0xa3, 0x29, 0x18, 0x09, 0xa1, 0x23, 0x5b, 0xa6, 0xe1, 0x33, 0x2e, 0x99, 0x7d, 0x54, 0x03, 0x68, /* Byte value: 0x9c */ + 0x44, 0x1a, 0xcd, 0x74, 0x9b, 0x3f, 0x40, 0xb7, 0x47, 0x08, 0x36, 0x38, 0x8c, 0x96, 0x51, 0x48, /* Byte value: 0x9d */ + 0x84, 0xae, 0x6b, 0x8b, 0xa2, 0x10, 0x14, 0xdb, 0xe8, 0xe3, 0xd7, 0xec, 0x5b, 0xf5, 0x35, 0xf7, /* Byte value: 0x9e */ + 0x9e, 0x62, 0xb9, 0xbc, 0x4a, 0x60, 0x78, 0x9f, 0x35, 0x0f, 0xb7, 0x2d, 0x19, 0x7b, 0xbe, 0x77, /* Byte value: 0x9f */ + 0xe2, 0xb9, 0x21, 0xc5, 0x95, 0xd1, 0x74, 0xd6, 0x6d, 0xef, 0xfa, 0xc8, 0x91, 0x28, 0xad, 0x9b, /* Byte value: 0xa0 */ + 0xcd, 0xd2, 0xcf, 0x05, 0x4d, 0x17, 0x62, 0x4e, 0x20, 0x9d, 0xd1, 0x55, 0xf6, 0x24, 0xc0, 0xff, /* Byte value: 0xa1 */ + 0xfb, 0xb2, 0x1e, 0x9a, 0x18, 0x5b, 0x3d, 0x6e, 0x48, 0x4f, 0xc4, 0x2e, 0xf3, 0x41, 0x73, 0x72, /* Byte value: 0xa2 */ + 0x45, 0xe6, 0x96, 0xed, 0xb8, 0x69, 0xe2, 0xe3, 0xae, 0x8d, 0xbd, 0x25, 0x2d, 0xcb, 0x62, 0x6f, /* Byte value: 0xa3 */ + 0x23, 0xf1, 0xdc, 0xa3, 0x8f, 0xa8, 0x82, 0xee, 0x2b, 0x81, 0x90, 0x01, 0xe7, 0x16, 0xfa, 0x03, /* Byte value: 0xa4 */ + 0xc6, 0xf9, 0xbf, 0x2f, 0xf3, 0x18, 0x1e, 0x57, 0x9c, 0x73, 0x5d, 0x9a, 0x97, 0x6e, 0xce, 0x6d, /* Byte value: 0xa5 */ + 0x3d, 0x4b, 0xa1, 0xb5, 0xeb, 0x43, 0x23, 0x39, 0xd4, 0x3c, 0x99, 0xb4, 0x64, 0x2f, 0xbd, 0x1f, /* Byte value: 0xa6 */ + 0x17, 0xaa, 0xbb, 0xcd, 0x9c, 0x48, 0x5a, 0x66, 0x52, 0x9a, 0x50, 0x40, 0x63, 0xc9, 0x2f, 0xc0, /* Byte value: 0xa7 */ + 0xeb, 0xa9, 0xe7, 0x1e, 0x6d, 0x72, 0x8f, 0x67, 0xc0, 0xc8, 0xa3, 0x3d, 0x71, 0xd8, 0xc5, 0x47, /* Byte value: 0xa8 */ + 0x2e, 0x97, 0xb5, 0x59, 0xfb, 0x90, 0xb4, 0xcc, 0xa4, 0xf7, 0xa0, 0x80, 0xc6, 0x51, 0x5e, 0x43, /* Byte value: 0xa9 */ + 0xcb, 0x9f, 0xd6, 0xd5, 0x87, 0x20, 0x28, 0x75, 0x13, 0x05, 0x6d, 0x1b, 0xb6, 0x29, 0x6a, 0x2d, /* Byte value: 0xaa */ + 0xb2, 0xce, 0xba, 0x14, 0xf7, 0x5c, 0x4b, 0xfb, 0x80, 0x31, 0xc2, 0x97, 0x5e, 0x90, 0x86, 0x7a, /* Byte value: 0xab */ + 0x5c, 0xed, 0xa9, 0xb2, 0x35, 0xe3, 0xab, 0x5b, 0x8b, 0x2d, 0x83, 0xc3, 0x4f, 0xa2, 0xbc, 0x86, /* Byte value: 0xac */ + 0x4a, 0xbb, 0x49, 0xe6, 0x8a, 0xfd, 0x53, 0x69, 0x30, 0x32, 0x58, 0x9e, 0x8d, 0x36, 0xa0, 0x61, /* Byte value: 0xad */ + 0x64, 0x2c, 0xfc, 0xbf, 0x71, 0x6d, 0xe7, 0xa5, 0x94, 0xc5, 0xf8, 0x1e, 0x4b, 0x67, 0xfe, 0x22, /* Byte value: 0xae */ + 0x71, 0xbd, 0xf1, 0x83, 0xab, 0x89, 0x3a, 0x6b, 0xd7, 0x96, 0x7d, 0x64, 0xa9, 0x14, 0xb7, 0xac, /* Byte value: 0xaf */ + 0x35, 0xa7, 0x3c, 0xf7, 0x30, 0xb6, 0x7a, 0xdc, 0x90, 0x9e, 0x4b, 0x5c, 0x25, 0x82, 0xe6, 0xe4, /* Byte value: 0xb0 */ + 0xc7, 0x05, 0xe4, 0xb6, 0xd0, 0x4e, 0xbc, 0x03, 0x75, 0xf6, 0xd6, 0x87, 0x36, 0x33, 0xfd, 0x4a, /* Byte value: 0xb1 */ + 0x07, 0xb1, 0x42, 0x49, 0xe9, 0x61, 0xe8, 0x6f, 0xda, 0x1d, 0x37, 0x53, 0xe1, 0x50, 0x99, 0xf5, /* Byte value: 0xb2 */ + 0x59, 0x67, 0x5d, 0x0a, 0x9a, 0x2e, 0xc4, 0x9c, 0x40, 0xf9, 0x61, 0xaa, 0x2f, 0x48, 0x43, 0x3d, /* Byte value: 0xb3 */ + 0x38, 0xc1, 0x55, 0x0d, 0x44, 0x8e, 0x4c, 0xfe, 0x1f, 0xe8, 0x7b, 0xdd, 0x04, 0xc5, 0x42, 0xa4, /* Byte value: 0xb4 */ + 0xea, 0x55, 0xbc, 0x87, 0x4e, 0x24, 0x2d, 0x33, 0x29, 0x4d, 0x28, 0x20, 0xd0, 0x85, 0xf6, 0x60, /* Byte value: 0xb5 */ + 0x37, 0x9c, 0x8a, 0x06, 0x76, 0x1a, 0xfd, 0x74, 0x81, 0x57, 0x9e, 0x66, 0xa4, 0x38, 0x80, 0xaa, /* Byte value: 0xb6 */ + 0x63, 0x9d, 0xbe, 0xf6, 0x98, 0x0c, 0x0f, 0xca, 0x4e, 0xd8, 0xcf, 0x4d, 0xaa, 0x37, 0x67, 0xd7, /* Byte value: 0xb7 */ + 0x18, 0xf7, 0x64, 0xc6, 0xae, 0xdc, 0xeb, 0xec, 0xcc, 0x25, 0xb5, 0xfb, 0xc3, 0x34, 0xed, 0xce, /* Byte value: 0xb8 */ + 0xa7, 0x5f, 0xb7, 0x28, 0x2d, 0xb8, 0x96, 0x35, 0xc3, 0x62, 0x47, 0xed, 0xbc, 0xe3, 0xcf, 0xf4, /* Byte value: 0xb9 */ + 0x58, 0x9b, 0x06, 0x93, 0xb9, 0x78, 0x66, 0xc8, 0xa9, 0x7c, 0xea, 0xb7, 0x8e, 0x15, 0x70, 0x1a, /* Byte value: 0xba */ + 0x11, 0xe7, 0xa2, 0x1d, 0x56, 0x7f, 0x10, 0x5d, 0x61, 0x02, 0xec, 0x0e, 0x23, 0xc4, 0x85, 0x12, /* Byte value: 0xbb */ + 0xf5, 0x13, 0x9a, 0x08, 0x09, 0x99, 0x2e, 0xb0, 0x3f, 0x75, 0xaa, 0x88, 0xf2, 0xe1, 0x82, 0x5b, /* Byte value: 0xbc */ + 0xd9, 0xbf, 0x99, 0xa0, 0xb4, 0xa5, 0x1d, 0xd4, 0x8a, 0x4b, 0xdf, 0x32, 0xb5, 0x0a, 0xba, 0x56, /* Byte value: 0xbd */ + 0xe9, 0x92, 0x51, 0xef, 0x2b, 0xde, 0x08, 0xcf, 0xd1, 0x01, 0x76, 0x07, 0xf0, 0x62, 0xa3, 0x09, /* Byte value: 0xbe */ + 0x25, 0xbc, 0xc5, 0x73, 0x45, 0x9f, 0xc8, 0xd5, 0x18, 0x19, 0x2c, 0x4f, 0xa7, 0x1b, 0x50, 0xd1, /* Byte value: 0xbf */ + 0xf8, 0x75, 0xf3, 0xf2, 0x7d, 0xa1, 0x18, 0x92, 0xb0, 0x03, 0x9a, 0x09, 0xd3, 0xa6, 0x26, 0x1b, /* Byte value: 0xc0 */ + 0x97, 0x72, 0x7f, 0x67, 0xb2, 0xc3, 0x83, 0x2e, 0x98, 0x28, 0xee, 0xd8, 0xf9, 0x8b, 0xd6, 0xab, /* Byte value: 0xc1 */ + 0xb1, 0x09, 0x57, 0x7c, 0x92, 0xa6, 0x6e, 0x07, 0x78, 0x7d, 0x9c, 0xb0, 0x7e, 0x77, 0xd3, 0x13, /* Byte value: 0xc2 */ + 0x19, 0x0b, 0x3f, 0x5f, 0x8d, 0x8a, 0x49, 0xb8, 0x25, 0xa0, 0x3e, 0xe6, 0x62, 0x69, 0xde, 0xe9, /* Byte value: 0xc3 */ + 0x0f, 0x5d, 0xdf, 0x0b, 0x32, 0x94, 0xb1, 0x8a, 0x9e, 0xbf, 0xe5, 0xbb, 0xa0, 0xfd, 0xc2, 0x0e, /* Byte value: 0xc4 */ + 0x89, 0xc8, 0x02, 0x71, 0xd6, 0x28, 0x22, 0xf9, 0x67, 0x95, 0xe7, 0x6d, 0x7a, 0xb2, 0x91, 0xb7, /* Byte value: 0xc5 */ + 0xdc, 0x35, 0x6d, 0x18, 0x1b, 0x68, 0x72, 0x13, 0x41, 0x9f, 0x3d, 0x5b, 0xd5, 0xe0, 0x45, 0xed, /* Byte value: 0xc6 */ + 0x08, 0xec, 0x9d, 0x42, 0xdb, 0xf5, 0x59, 0xe5, 0x44, 0xa2, 0xd2, 0xe8, 0x41, 0xad, 0x5b, 0xfb, /* Byte value: 0xc7 */ + 0xf1, 0x65, 0x35, 0x29, 0x85, 0x02, 0xe3, 0x23, 0x1d, 0x24, 0xc3, 0xfc, 0x33, 0x56, 0x4e, 0xc7, /* Byte value: 0xc8 */ + 0xa8, 0x02, 0x68, 0x23, 0x1f, 0x2c, 0x27, 0xbf, 0x5d, 0xdd, 0xa2, 0x56, 0x1c, 0x1e, 0x0d, 0xfa, /* Byte value: 0xc9 */ + 0x0e, 0xa1, 0x84, 0x92, 0x11, 0xc2, 0x13, 0xde, 0x77, 0x3a, 0x6e, 0xa6, 0x01, 0xa0, 0xf1, 0x29, /* Byte value: 0xca */ + 0x6f, 0x07, 0x8c, 0x95, 0xcf, 0x62, 0x9b, 0xbc, 0x28, 0x2b, 0x74, 0xd1, 0x2a, 0x2d, 0xf0, 0xb0, /* Byte value: 0xcb */ + 0x7d, 0x27, 0xc3, 0xe0, 0xfc, 0xe7, 0xae, 0x1d, 0xb1, 0x65, 0xc6, 0xf8, 0x29, 0x0e, 0x20, 0xcb, /* Byte value: 0xcc */ + 0xd4, 0xd9, 0xf0, 0x5a, 0xc0, 0x9d, 0x2b, 0xf6, 0x05, 0x3d, 0xef, 0xb3, 0x94, 0x4d, 0x1e, 0x16, /* Byte value: 0xcd */ + 0xbd, 0x93, 0x65, 0x1f, 0xc5, 0xc8, 0xfa, 0x71, 0x1e, 0x8e, 0x27, 0x2c, 0xfe, 0x6d, 0x44, 0x74, /* Byte value: 0xce */ + 0x86, 0x95, 0xdd, 0x7a, 0xe4, 0xbc, 0x93, 0x73, 0xf9, 0x2a, 0x02, 0xd6, 0xda, 0x4f, 0x53, 0xb9, /* Byte value: 0xcf */ + 0xec, 0x18, 0xa5, 0x57, 0x84, 0x13, 0x67, 0x08, 0x1a, 0xd5, 0x94, 0x6e, 0x90, 0x88, 0x5c, 0xb2, /* Byte value: 0xd0 */ + 0x32, 0x16, 0x7e, 0xbe, 0xd9, 0xd7, 0x92, 0xb3, 0x4a, 0x83, 0x7c, 0x0f, 0xc4, 0xd2, 0x7f, 0x11, /* Byte value: 0xd1 */ + 0x7e, 0xe0, 0x2e, 0x88, 0x99, 0x1d, 0x8b, 0xe1, 0x49, 0x29, 0x98, 0xdf, 0x09, 0xe9, 0x75, 0xa2, /* Byte value: 0xd2 */ + 0xed, 0xe4, 0xfe, 0xce, 0xa7, 0x45, 0xc5, 0x5c, 0xf3, 0x50, 0x1f, 0x73, 0x31, 0xd5, 0x6f, 0x95, /* Byte value: 0xd3 */ + 0x6e, 0xfb, 0xd7, 0x0c, 0xec, 0x34, 0x39, 0xe8, 0xc1, 0xae, 0xff, 0xcc, 0x8b, 0x70, 0xc3, 0x97, /* Byte value: 0xd4 */ + 0xc3, 0x73, 0x4b, 0x97, 0x5c, 0xd5, 0x71, 0x90, 0x57, 0xa7, 0xbf, 0xf3, 0xf7, 0x84, 0x31, 0xd6, /* Byte value: 0xd5 */ + 0x21, 0xca, 0x6a, 0x52, 0xc9, 0x04, 0x05, 0x46, 0x3a, 0x48, 0x45, 0x3b, 0x66, 0xac, 0x9c, 0x4d, /* Byte value: 0xd6 */ + 0x62, 0x61, 0xe5, 0x6f, 0xbb, 0x5a, 0xad, 0x9e, 0xa7, 0x5d, 0x44, 0x50, 0x0b, 0x6a, 0x54, 0xf0, /* Byte value: 0xd7 */ + 0xce, 0x15, 0x22, 0x6d, 0x28, 0xed, 0x47, 0xb2, 0xd8, 0xd1, 0x8f, 0x72, 0xd6, 0xc3, 0x95, 0x96, /* Byte value: 0xd8 */ + 0x93, 0x04, 0xd0, 0x46, 0x3e, 0x58, 0x4e, 0xbd, 0xba, 0x79, 0x87, 0xac, 0x38, 0x3c, 0x1a, 0x37, /* Byte value: 0xd9 */ + 0x4c, 0xf6, 0x50, 0x36, 0x40, 0xca, 0x19, 0x52, 0x03, 0xaa, 0xe4, 0xd0, 0xcd, 0x3b, 0x0a, 0xb3, /* Byte value: 0xda */ + 0xde, 0x0e, 0xdb, 0xe9, 0x5d, 0xc4, 0xf5, 0xbb, 0x50, 0x56, 0xe8, 0x61, 0x54, 0x5a, 0x23, 0xa3, /* Byte value: 0xdb */ + 0x3f, 0x70, 0x17, 0x44, 0xad, 0xef, 0xa4, 0x91, 0xc5, 0xf5, 0x4c, 0x8e, 0xe5, 0x95, 0xdb, 0x51, /* Byte value: 0xdc */ + 0x43, 0xab, 0x8f, 0x3d, 0x72, 0x5e, 0xa8, 0xd8, 0x9d, 0x15, 0x01, 0x6b, 0x6d, 0xc6, 0xc8, 0xbd, /* Byte value: 0xdd */ + 0x73, 0x86, 0x47, 0x72, 0xed, 0x25, 0xbd, 0xc3, 0xc6, 0x5f, 0xa8, 0x5e, 0x28, 0xae, 0xd1, 0xe2, /* Byte value: 0xde */ + 0xad, 0x88, 0x9c, 0x9b, 0xb0, 0xe1, 0x48, 0x78, 0x96, 0x09, 0x40, 0x3f, 0x7c, 0xf4, 0xf2, 0x41, /* Byte value: 0xdf */ + 0xf0, 0x99, 0x6e, 0xb0, 0xa6, 0x54, 0x41, 0x77, 0xf4, 0xa1, 0x48, 0xe1, 0x92, 0x0b, 0x7d, 0xe0, /* Byte value: 0xe0 */ + 0x47, 0xdd, 0x20, 0x1c, 0xfe, 0xc5, 0x65, 0x4b, 0xbf, 0x44, 0x68, 0x1f, 0xac, 0x71, 0x04, 0x21, /* Byte value: 0xe1 */ + 0x3c, 0xb7, 0xfa, 0x2c, 0xc8, 0x15, 0x81, 0x6d, 0x3d, 0xb9, 0x12, 0xa9, 0xc5, 0x72, 0x8e, 0x38, /* Byte value: 0xe2 */ + 0x61, 0xa6, 0x08, 0x07, 0xde, 0xa0, 0x88, 0x62, 0x5f, 0x11, 0x1a, 0x77, 0x2b, 0x8d, 0x01, 0x99, /* Byte value: 0xe3 */ + 0x56, 0x3a, 0x82, 0x01, 0xa8, 0xba, 0x75, 0x16, 0xde, 0x46, 0x84, 0x11, 0x8f, 0xb5, 0x81, 0x33, /* Byte value: 0xe4 */ + 0x29, 0x26, 0xf7, 0x10, 0x12, 0xf1, 0x5c, 0xa3, 0x7e, 0xea, 0x97, 0xd3, 0x27, 0x01, 0xc7, 0xb6, /* Byte value: 0xe5 */ + 0x7b, 0x6a, 0xda, 0x30, 0x36, 0xd0, 0xe4, 0x26, 0x82, 0xfd, 0x7a, 0xb6, 0x69, 0x03, 0x8a, 0x19, /* Byte value: 0xe6 */ + 0x10, 0x1b, 0xf9, 0x84, 0x75, 0x29, 0xb2, 0x09, 0x88, 0x87, 0x67, 0x13, 0x82, 0x99, 0xb6, 0x35, /* Byte value: 0xe7 */ + 0xff, 0xc4, 0xb1, 0xbb, 0x94, 0xc0, 0xf0, 0xfd, 0x6a, 0x1e, 0xad, 0x5a, 0x32, 0xf6, 0xbf, 0xee, /* Byte value: 0xe8 */ + 0x5d, 0x11, 0xf2, 0x2b, 0x16, 0xb5, 0x09, 0x0f, 0x62, 0xa8, 0x08, 0xde, 0xee, 0xff, 0x8f, 0xa1, /* Byte value: 0xe9 */ + 0x31, 0xd1, 0x93, 0xd6, 0xbc, 0x2d, 0xb7, 0x4f, 0xb2, 0xcf, 0x22, 0x28, 0xe4, 0x35, 0x2a, 0x78, /* Byte value: 0xea */ + 0xb7, 0x44, 0x4e, 0xac, 0x58, 0x91, 0x24, 0x3c, 0x4b, 0xe5, 0x20, 0xfe, 0x3e, 0x7a, 0x79, 0xc1, /* Byte value: 0xeb */ + 0xb3, 0x32, 0xe1, 0x8d, 0xd4, 0x0a, 0xe9, 0xaf, 0x69, 0xb4, 0x49, 0x8a, 0xff, 0xcd, 0xb5, 0x5d, /* Byte value: 0xec */ + 0x6b, 0x71, 0x23, 0xb4, 0x43, 0xf9, 0x56, 0x2f, 0x0a, 0x7a, 0x1d, 0xa5, 0xeb, 0x9a, 0x3c, 0x2c, /* Byte value: 0xed */ + 0xc0, 0xb4, 0xa6, 0xff, 0x39, 0x2f, 0x54, 0x6c, 0xaf, 0xeb, 0xe1, 0xd4, 0xd7, 0x63, 0x64, 0xbf, /* Byte value: 0xee */ + 0x7f, 0x1c, 0x75, 0x11, 0xba, 0x4b, 0x29, 0xb5, 0xa0, 0xac, 0x13, 0xc2, 0xa8, 0xb4, 0x46, 0x85, /* Byte value: 0xef */ + 0x1e, 0xba, 0x7d, 0x16, 0x64, 0xeb, 0xa1, 0xd7, 0xff, 0xbd, 0x09, 0xb5, 0x83, 0x39, 0x47, 0x1c, /* Byte value: 0xf0 */ + 0xd1, 0x53, 0x04, 0xe2, 0x6f, 0x50, 0x44, 0x31, 0xce, 0xe9, 0x0d, 0xda, 0xf4, 0xa7, 0xe1, 0xad, /* Byte value: 0xf1 */ + 0x78, 0xad, 0x37, 0x58, 0x53, 0x2a, 0xc1, 0xda, 0x7a, 0xb1, 0x24, 0x91, 0x49, 0xe4, 0xdf, 0x70, /* Byte value: 0xf2 */ + 0xc2, 0x8f, 0x10, 0x0e, 0x7f, 0x83, 0xd3, 0xc4, 0xbe, 0x22, 0x34, 0xee, 0x56, 0xd9, 0x02, 0xf1, /* Byte value: 0xf3 */ + 0xbb, 0xde, 0x7c, 0xcf, 0x0f, 0xff, 0xb0, 0x4a, 0x2d, 0x16, 0x9b, 0x62, 0xbe, 0x60, 0xee, 0xa6, /* Byte value: 0xf4 */ + 0x6d, 0x3c, 0x3a, 0x64, 0x89, 0xce, 0x1c, 0x14, 0x39, 0xe2, 0xa1, 0xeb, 0xab, 0x97, 0x96, 0xfe, /* Byte value: 0xf5 */ + 0xe1, 0x7e, 0xcc, 0xad, 0xf0, 0x2b, 0x51, 0x2a, 0x95, 0xa3, 0xa4, 0xef, 0xb1, 0xcf, 0xf8, 0xf2, /* Byte value: 0xf6 */ + 0x1f, 0x46, 0x26, 0x8f, 0x47, 0xbd, 0x03, 0x83, 0x16, 0x38, 0x82, 0xa8, 0x22, 0x64, 0x74, 0x3b, /* Byte value: 0xf7 */ + 0x82, 0xe3, 0x72, 0x5b, 0x68, 0x27, 0x5e, 0xe0, 0xdb, 0x7b, 0x6b, 0xa2, 0x1b, 0xf8, 0x9f, 0x25, /* Byte value: 0xf8 */ + 0xba, 0x22, 0x27, 0x56, 0x2c, 0xa9, 0x12, 0x1e, 0xc4, 0x93, 0x10, 0x7f, 0x1f, 0x3d, 0xdd, 0x81, /* Byte value: 0xf9 */ + 0x8c, 0x42, 0xf6, 0xc9, 0x79, 0xe5, 0x4d, 0x3e, 0xac, 0x41, 0x05, 0x04, 0x1a, 0x58, 0x6e, 0x0c, /* Byte value: 0xfa */ + 0xcf, 0xe9, 0x79, 0xf4, 0x0b, 0xbb, 0xe5, 0xe6, 0x31, 0x54, 0x04, 0x6f, 0x77, 0x9e, 0xa6, 0xb1, /* Byte value: 0xfb */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xfc */ + 0xfe, 0x38, 0xea, 0x22, 0xb7, 0x96, 0x52, 0xa9, 0x83, 0x9b, 0x26, 0x47, 0x93, 0xab, 0x8c, 0xc9, /* Byte value: 0xfd */ + 0x42, 0x57, 0xd4, 0xa4, 0x51, 0x08, 0x0a, 0x8c, 0x74, 0x90, 0x8a, 0x76, 0xcc, 0x9b, 0xfb, 0x9a, /* Byte value: 0xfe */ + 0xd6, 0xe2, 0x46, 0xab, 0x86, 0x31, 0xac, 0x5e, 0x14, 0xf4, 0x3a, 0x89, 0x15, 0xf7, 0x78, 0x58, /* Byte value: 0xff */ + + /* Matrix row: 7 */ + 0xa5, 0x9d, 0x85, 0x46, 0x8a, 0x0e, 0x61, 0xc4, 0x42, 0xe7, 0xb4, 0xca, 0xc0, 0x3d, 0xf4, 0xe8, /* Byte value: 0x00 */ + 0x2d, 0x30, 0x7c, 0xb1, 0xb2, 0x96, 0xab, 0x86, 0x12, 0x3f, 0xbc, 0x10, 0x25, 0xe6, 0x1e, 0xf1, /* Byte value: 0x01 */ + 0x32, 0xb3, 0x29, 0xf1, 0xe6, 0xb1, 0x53, 0x61, 0xca, 0xf8, 0xa0, 0xd0, 0x5a, 0xc4, 0x96, 0x37, /* Byte value: 0x02 */ + 0x8f, 0xc2, 0x69, 0x55, 0xf0, 0x82, 0xf2, 0x7d, 0x39, 0xb6, 0xe7, 0xff, 0x11, 0x3a, 0xe8, 0x2f, /* Byte value: 0x03 */ + 0x0e, 0xde, 0xe3, 0x87, 0x53, 0x34, 0x70, 0x7e, 0xd2, 0xdc, 0x1d, 0x4a, 0x2b, 0x01, 0x04, 0x6c, /* Byte value: 0x04 */ + 0x30, 0x1b, 0x49, 0x07, 0xfb, 0x7a, 0x43, 0x73, 0xa1, 0x91, 0x25, 0x09, 0xd5, 0x45, 0xd7, 0x23, /* Byte value: 0x05 */ + 0x38, 0xfe, 0x0a, 0x59, 0x8f, 0xd0, 0x03, 0x3b, 0xce, 0xf6, 0x74, 0xeb, 0xac, 0x04, 0x10, 0x73, /* Byte value: 0x06 */ + 0xc0, 0x6c, 0xe7, 0x1c, 0x6a, 0x2b, 0xcf, 0x0f, 0xc1, 0x01, 0x94, 0x24, 0xd2, 0xd7, 0xda, 0x8c, /* Byte value: 0x07 */ + 0x54, 0xbe, 0x1b, 0x26, 0xf4, 0xdb, 0xe5, 0xb1, 0xf6, 0xa2, 0xa6, 0x6a, 0x61, 0x0e, 0x38, 0x4d, /* Byte value: 0x08 */ + 0xe6, 0x45, 0x88, 0x7e, 0x5e, 0x58, 0x3c, 0xda, 0x03, 0xe5, 0x5f, 0x82, 0xa7, 0x50, 0x83, 0x33, /* Byte value: 0x09 */ + 0x9e, 0x9f, 0xdf, 0x92, 0xf7, 0x91, 0x7a, 0xe4, 0x33, 0xad, 0xe6, 0x75, 0x45, 0x19, 0x64, 0x85, /* Byte value: 0x0a */ + 0x39, 0xaa, 0x3a, 0x22, 0x60, 0x54, 0x0b, 0x32, 0x1a, 0x23, 0xd7, 0x66, 0x0a, 0xa5, 0xd1, 0x79, /* Byte value: 0x0b */ + 0x55, 0xea, 0x2b, 0x5d, 0x1b, 0x5f, 0xed, 0xb8, 0x22, 0x77, 0x05, 0xe7, 0xc7, 0xaf, 0xf9, 0x47, /* Byte value: 0x0c */ + 0x7e, 0xe1, 0xf7, 0x35, 0x8e, 0x57, 0x76, 0x08, 0x8d, 0xf3, 0xf5, 0x5f, 0xb0, 0x09, 0x24, 0x8a, /* Byte value: 0x0d */ + 0x52, 0x85, 0xbb, 0xff, 0xd3, 0x45, 0xd5, 0x87, 0x4b, 0x19, 0xea, 0xc2, 0x33, 0x4e, 0xfb, 0x71, /* Byte value: 0x0e */ + 0x91, 0x15, 0x0c, 0x6e, 0x4b, 0x21, 0x02, 0x93, 0x35, 0xa4, 0x58, 0xb2, 0xc8, 0xb9, 0xa1, 0xe3, /* Byte value: 0x0f */ + 0x64, 0xa5, 0x52, 0x21, 0x0f, 0xa1, 0xa6, 0xc2, 0x57, 0x33, 0x83, 0x63, 0xb4, 0x4b, 0xef, 0x6e, /* Byte value: 0x10 */ + 0x03, 0xfc, 0x50, 0x8d, 0xf2, 0x4f, 0x18, 0x1b, 0xbf, 0xbc, 0x26, 0x54, 0x29, 0x20, 0x80, 0x1e, /* Byte value: 0x11 */ + 0x57, 0x42, 0x4b, 0xab, 0x06, 0x94, 0xfd, 0xaa, 0x49, 0x1e, 0x80, 0x3e, 0x48, 0x2e, 0xb8, 0x53, /* Byte value: 0x12 */ + 0x5a, 0x60, 0xf8, 0xa1, 0xa7, 0xef, 0x95, 0xcf, 0x24, 0x7e, 0xbb, 0x20, 0x4a, 0x0f, 0x3c, 0x21, /* Byte value: 0x13 */ + 0x1c, 0x7f, 0x05, 0xcd, 0xa6, 0x68, 0xe0, 0xfc, 0x67, 0x7b, 0x3a, 0x94, 0x56, 0x02, 0x08, 0xd8, /* Byte value: 0x14 */ + 0x60, 0x36, 0x92, 0x0e, 0x35, 0xf4, 0x86, 0xe6, 0x81, 0xe1, 0x4a, 0x12, 0x69, 0x8a, 0x6d, 0x46, /* Byte value: 0x15 */ + 0x07, 0x6f, 0x90, 0xa2, 0xc8, 0x1a, 0x38, 0x3f, 0x69, 0x6e, 0xef, 0x25, 0xf4, 0xe1, 0x02, 0x36, /* Byte value: 0x16 */ + 0x18, 0xec, 0xc5, 0xe2, 0x9c, 0x3d, 0xc0, 0xd8, 0xb1, 0xa9, 0xf3, 0xe5, 0x8b, 0xc3, 0x8a, 0xf0, /* Byte value: 0x17 */ + 0x21, 0x46, 0xff, 0xc0, 0xfc, 0x69, 0xcb, 0xea, 0xab, 0x8a, 0x24, 0x83, 0x81, 0x66, 0x5b, 0x89, /* Byte value: 0x18 */ + 0x72, 0x97, 0x74, 0x44, 0xc0, 0xa8, 0x16, 0x64, 0x34, 0x46, 0x6d, 0xcc, 0x14, 0x89, 0x61, 0xf2, /* Byte value: 0x19 */ + 0xa8, 0xbf, 0x36, 0x4c, 0x2b, 0x75, 0x09, 0xa1, 0x2f, 0x87, 0x8f, 0xd4, 0xc2, 0x1c, 0x70, 0x9a, /* Byte value: 0x1a */ + 0xd1, 0x31, 0x51, 0xdb, 0x6d, 0x38, 0x47, 0x96, 0xcb, 0x1a, 0x95, 0xae, 0x86, 0xf4, 0x56, 0x26, /* Byte value: 0x1b */ + 0x29, 0xa3, 0xbc, 0x9e, 0x88, 0xc3, 0x8b, 0xa2, 0xc4, 0xed, 0x75, 0x61, 0xf8, 0x27, 0x9c, 0xd9, /* Byte value: 0x1c */ + 0xc6, 0x57, 0x47, 0xc5, 0x4d, 0xb5, 0xff, 0x39, 0x7c, 0xba, 0xd8, 0x8c, 0x80, 0x97, 0x19, 0xb0, /* Byte value: 0x1d */ + 0xa4, 0xc9, 0xb5, 0x3d, 0x65, 0x8a, 0x69, 0xcd, 0x96, 0x32, 0x17, 0x47, 0x66, 0x9c, 0x35, 0xe2, /* Byte value: 0x1e */ + 0x3f, 0x91, 0x9a, 0xfb, 0x47, 0xca, 0x3b, 0x04, 0xa7, 0x98, 0x9b, 0xce, 0x58, 0xe5, 0x12, 0x45, /* Byte value: 0x1f */ + 0xe0, 0x7e, 0x28, 0xa7, 0x79, 0xc6, 0x0c, 0xec, 0xbe, 0x5e, 0x13, 0x2a, 0xf5, 0x10, 0x40, 0x0f, /* Byte value: 0x20 */ + 0x27, 0x7d, 0x5f, 0x19, 0xdb, 0xf7, 0xfb, 0xdc, 0x16, 0x31, 0x68, 0x2b, 0xd3, 0x26, 0x98, 0xb5, /* Byte value: 0x21 */ + 0x8d, 0x6a, 0x09, 0xa3, 0xed, 0x49, 0xe2, 0x6f, 0x52, 0xdf, 0x62, 0x26, 0x9e, 0xbb, 0xa9, 0x3b, /* Byte value: 0x22 */ + 0x0c, 0x76, 0x83, 0x71, 0x4e, 0xff, 0x60, 0x6c, 0xb9, 0xb5, 0x98, 0x93, 0xa4, 0x80, 0x45, 0x78, /* Byte value: 0x23 */ + 0x82, 0xe0, 0xda, 0x5f, 0x51, 0xf9, 0x9a, 0x18, 0x54, 0xd6, 0xdc, 0xe1, 0x13, 0x1b, 0x6c, 0x5d, /* Byte value: 0x24 */ + 0xea, 0x33, 0x0b, 0x0f, 0x10, 0xa7, 0x5c, 0xb6, 0xba, 0x50, 0xc7, 0x11, 0x03, 0xd0, 0xc6, 0x4b, /* Byte value: 0x25 */ + 0xae, 0x84, 0x96, 0x95, 0x0c, 0xeb, 0x39, 0x97, 0x92, 0x3c, 0xc3, 0x7c, 0x90, 0x5c, 0xb3, 0xa6, /* Byte value: 0x26 */ + 0xb4, 0xc0, 0x33, 0x81, 0x8d, 0x1d, 0xe9, 0x5d, 0x48, 0xfc, 0xb5, 0x40, 0x94, 0x1e, 0x78, 0x42, /* Byte value: 0x27 */ + 0x9a, 0x0c, 0x1f, 0xbd, 0xcd, 0xc4, 0x5a, 0xc0, 0xe5, 0x7f, 0x2f, 0x04, 0x98, 0xd8, 0xe6, 0xad, /* Byte value: 0x28 */ + 0x63, 0xca, 0xc2, 0x83, 0xc7, 0xbb, 0x9e, 0xfd, 0x3e, 0x5d, 0x6c, 0x46, 0x40, 0xaa, 0xed, 0x58, /* Byte value: 0x29 */ + 0x49, 0x95, 0x2e, 0x90, 0xbd, 0x37, 0x0d, 0x44, 0x45, 0x0c, 0x3f, 0x73, 0x91, 0xad, 0xf1, 0x9f, /* Byte value: 0x2a */ + 0xe5, 0xb9, 0xd8, 0xf3, 0xac, 0x17, 0x24, 0xc1, 0xbc, 0x59, 0x79, 0xd6, 0x8e, 0x70, 0x03, 0x2d, /* Byte value: 0x2b */ + 0x42, 0x8c, 0x3d, 0x43, 0x3b, 0xd2, 0x55, 0x17, 0x95, 0xd7, 0x48, 0xc5, 0xc1, 0xcc, 0xb6, 0xd1, /* Byte value: 0x2c */ + 0xe4, 0xed, 0xe8, 0x88, 0x43, 0x93, 0x2c, 0xc8, 0x68, 0x8c, 0xda, 0x5b, 0x28, 0xd1, 0xc2, 0x27, /* Byte value: 0x2d */ + 0x15, 0xce, 0x76, 0xe8, 0x3d, 0x46, 0xa8, 0xbd, 0xdc, 0xc9, 0xc8, 0xfb, 0x89, 0xe2, 0x0e, 0x82, /* Byte value: 0x2e */ + 0xb7, 0x3c, 0x63, 0x0c, 0x7f, 0x52, 0xf1, 0x46, 0xf7, 0x40, 0x93, 0x14, 0xbd, 0x3e, 0xf8, 0x5c, /* Byte value: 0x2f */ + 0xc8, 0x89, 0xa4, 0x42, 0x1e, 0x81, 0x8f, 0x47, 0xae, 0x66, 0xc5, 0xc6, 0xab, 0x96, 0x1d, 0xdc, /* Byte value: 0x30 */ + 0x06, 0x3b, 0xa0, 0xd9, 0x27, 0x9e, 0x30, 0x36, 0xbd, 0xbb, 0x4c, 0xa8, 0x52, 0x40, 0xc3, 0x3c, /* Byte value: 0x31 */ + 0x70, 0x3f, 0x14, 0xb2, 0xdd, 0x63, 0x06, 0x76, 0x5f, 0x2f, 0xe8, 0x15, 0x9b, 0x08, 0x20, 0xe6, /* Byte value: 0x32 */ + 0x9d, 0x63, 0x8f, 0x1f, 0x05, 0xde, 0x62, 0xff, 0x8c, 0x11, 0xc0, 0x21, 0x6c, 0x39, 0xe4, 0x9b, /* Byte value: 0x33 */ + 0x41, 0x70, 0x6d, 0xce, 0xc9, 0x9d, 0x4d, 0x0c, 0x2a, 0x6b, 0x6e, 0x91, 0xe8, 0xec, 0x36, 0xcf, /* Byte value: 0x34 */ + 0x75, 0xf8, 0xe4, 0xe6, 0x08, 0xb2, 0x2e, 0x5b, 0x5d, 0x28, 0x82, 0xe9, 0xe0, 0x68, 0x63, 0xc4, /* Byte value: 0x35 */ + 0x19, 0xb8, 0xf5, 0x99, 0x73, 0xb9, 0xc8, 0xd1, 0x65, 0x7c, 0x50, 0x68, 0x2d, 0x62, 0x4b, 0xfa, /* Byte value: 0x36 */ + 0xc9, 0xdd, 0x94, 0x39, 0xf1, 0x05, 0x87, 0x4e, 0x7a, 0xb3, 0x66, 0x4b, 0x0d, 0x37, 0xdc, 0xd6, /* Byte value: 0x37 */ + 0xaa, 0x17, 0x56, 0xba, 0x36, 0xbe, 0x19, 0xb3, 0x44, 0xee, 0x0a, 0x0d, 0x4d, 0x9d, 0x31, 0x8e, /* Byte value: 0x38 */ + 0xfc, 0x01, 0x2d, 0x6a, 0xdf, 0xae, 0xec, 0x10, 0xd9, 0x25, 0x29, 0xbe, 0xa3, 0x12, 0x48, 0xd7, /* Byte value: 0x39 */ + 0x4d, 0x06, 0xee, 0xbf, 0x87, 0x62, 0x2d, 0x60, 0x93, 0xde, 0xf6, 0x02, 0x4c, 0x6c, 0x73, 0xb7, /* Byte value: 0x3a */ + 0xbf, 0xd9, 0x20, 0x52, 0x0b, 0xf8, 0xb1, 0x0e, 0x98, 0x27, 0xc2, 0xf6, 0xc4, 0x7f, 0x3f, 0x0c, /* Byte value: 0x3b */ + 0x2a, 0x5f, 0xec, 0x13, 0x7a, 0x8c, 0x93, 0xb9, 0x7b, 0x51, 0x53, 0x35, 0xd1, 0x07, 0x1c, 0xc7, /* Byte value: 0x3c */ + 0x73, 0xc3, 0x44, 0x3f, 0x2f, 0x2c, 0x1e, 0x6d, 0xe0, 0x93, 0xce, 0x41, 0xb2, 0x28, 0xa0, 0xf8, /* Byte value: 0x3d */ + 0x84, 0xdb, 0x7a, 0x86, 0x76, 0x67, 0xaa, 0x2e, 0xe9, 0x6d, 0x90, 0x49, 0x41, 0x5b, 0xaf, 0x61, /* Byte value: 0x3e */ + 0xd5, 0xa2, 0x91, 0xf4, 0x57, 0x6d, 0x67, 0xb2, 0x1d, 0xc8, 0x5c, 0xdf, 0x5b, 0x35, 0xd4, 0x0e, /* Byte value: 0x3f */ + 0xc3, 0x90, 0xb7, 0x91, 0x98, 0x64, 0xd7, 0x14, 0x7e, 0xbd, 0xb2, 0x70, 0xfb, 0xf7, 0x5a, 0x92, /* Byte value: 0x40 */ + 0xaf, 0xd0, 0xa6, 0xee, 0xe3, 0x6f, 0x31, 0x9e, 0x46, 0xe9, 0x60, 0xf1, 0x36, 0xfd, 0x72, 0xac, /* Byte value: 0x41 */ + 0x2b, 0x0b, 0xdc, 0x68, 0x95, 0x08, 0x9b, 0xb0, 0xaf, 0x84, 0xf0, 0xb8, 0x77, 0xa6, 0xdd, 0xcd, /* Byte value: 0x42 */ + 0x86, 0x73, 0x1a, 0x70, 0x6b, 0xac, 0xba, 0x3c, 0x82, 0x04, 0x15, 0x90, 0xce, 0xda, 0xee, 0x75, /* Byte value: 0x43 */ + 0xa7, 0x35, 0xe5, 0xb0, 0x97, 0xc5, 0x71, 0xd6, 0x29, 0x8e, 0x31, 0x13, 0x4f, 0xbc, 0xb5, 0xfc, /* Byte value: 0x44 */ + 0xb1, 0x07, 0xc3, 0xd5, 0x58, 0xcc, 0xc1, 0x70, 0x4a, 0xfb, 0xdf, 0xbc, 0xef, 0x7e, 0x3b, 0x60, /* Byte value: 0x45 */ + 0xb2, 0xfb, 0x93, 0x58, 0xaa, 0x83, 0xd9, 0x6b, 0xf5, 0x47, 0xf9, 0xe8, 0xc6, 0x5e, 0xbb, 0x7e, /* Byte value: 0x46 */ + 0x5b, 0x34, 0xc8, 0xda, 0x48, 0x6b, 0x9d, 0xc6, 0xf0, 0xab, 0x18, 0xad, 0xec, 0xae, 0xfd, 0x2b, /* Byte value: 0x47 */ + 0x46, 0x1f, 0xfd, 0x6c, 0x01, 0x87, 0x75, 0x33, 0x43, 0x05, 0x81, 0xb4, 0x1c, 0x0d, 0x34, 0xf9, /* Byte value: 0x48 */ + 0xd3, 0x99, 0x31, 0x2d, 0x70, 0xf3, 0x57, 0x84, 0xa0, 0x73, 0x10, 0x77, 0x09, 0x75, 0x17, 0x32, /* Byte value: 0x49 */ + 0x9f, 0xcb, 0xef, 0xe9, 0x18, 0x15, 0x72, 0xed, 0xe7, 0x78, 0x45, 0xf8, 0xe3, 0xb8, 0xa5, 0x8f, /* Byte value: 0x4a */ + 0xfd, 0x55, 0x1d, 0x11, 0x30, 0x2a, 0xe4, 0x19, 0x0d, 0xf0, 0x8a, 0x33, 0x05, 0xb3, 0x89, 0xdd, /* Byte value: 0x4b */ + 0xd4, 0xf6, 0xa1, 0x8f, 0xb8, 0xe9, 0x6f, 0xbb, 0xc9, 0x1d, 0xff, 0x52, 0xfd, 0x94, 0x15, 0x04, /* Byte value: 0x4c */ + 0x0f, 0x8a, 0xd3, 0xfc, 0xbc, 0xb0, 0x78, 0x77, 0x06, 0x09, 0xbe, 0xc7, 0x8d, 0xa0, 0xc5, 0x66, /* Byte value: 0x4d */ + 0x9c, 0x37, 0xbf, 0x64, 0xea, 0x5a, 0x6a, 0xf6, 0x58, 0xc4, 0x63, 0xac, 0xca, 0x98, 0x25, 0x91, /* Byte value: 0x4e */ + 0x2f, 0x98, 0x1c, 0x47, 0xaf, 0x5d, 0xbb, 0x94, 0x79, 0x56, 0x39, 0xc9, 0xaa, 0x67, 0x5f, 0xe5, /* Byte value: 0x4f */ + 0x9b, 0x58, 0x2f, 0xc6, 0x22, 0x40, 0x52, 0xc9, 0x31, 0xaa, 0x8c, 0x89, 0x3e, 0x79, 0x27, 0xa7, /* Byte value: 0x50 */ + 0x43, 0xd8, 0x0d, 0x38, 0xd4, 0x56, 0x5d, 0x1e, 0x41, 0x02, 0xeb, 0x48, 0x67, 0x6d, 0x77, 0xdb, /* Byte value: 0x51 */ + 0xef, 0xf4, 0xfb, 0x5b, 0xc5, 0x76, 0x74, 0x9b, 0xb8, 0x57, 0xad, 0xed, 0x78, 0xb0, 0x85, 0x69, /* Byte value: 0x52 */ + 0xd9, 0xd4, 0x12, 0x85, 0x19, 0x92, 0x07, 0xde, 0xa4, 0x7d, 0xc4, 0x4c, 0xff, 0xb5, 0x91, 0x76, /* Byte value: 0x53 */ + 0x79, 0x8e, 0x67, 0x97, 0x46, 0x4d, 0x4e, 0x37, 0xe4, 0x9d, 0x1a, 0x7a, 0x44, 0xe8, 0x26, 0xbc, /* Byte value: 0x54 */ + 0xb6, 0x68, 0x53, 0x77, 0x90, 0xd6, 0xf9, 0x4f, 0x23, 0x95, 0x30, 0x99, 0x1b, 0x9f, 0x39, 0x56, /* Byte value: 0x55 */ + 0x53, 0xd1, 0x8b, 0x84, 0x3c, 0xc1, 0xdd, 0x8e, 0x9f, 0xcc, 0x49, 0x4f, 0x95, 0xef, 0x3a, 0x7b, /* Byte value: 0x56 */ + 0x7f, 0xb5, 0xc7, 0x4e, 0x61, 0xd3, 0x7e, 0x01, 0x59, 0x26, 0x56, 0xd2, 0x16, 0xa8, 0xe5, 0x80, /* Byte value: 0x57 */ + 0xc1, 0x38, 0xd7, 0x67, 0x85, 0xaf, 0xc7, 0x06, 0x15, 0xd4, 0x37, 0xa9, 0x74, 0x76, 0x1b, 0x86, /* Byte value: 0x58 */ + 0xf0, 0x77, 0xae, 0x1b, 0x91, 0x51, 0x8c, 0x7c, 0x60, 0x90, 0xb1, 0x2d, 0x07, 0x92, 0x0d, 0xaf, /* Byte value: 0x59 */ + 0x23, 0xee, 0x9f, 0x36, 0xe1, 0xa2, 0xdb, 0xf8, 0xc0, 0xe3, 0xa1, 0x5a, 0x0e, 0xe7, 0x1a, 0x9d, /* Byte value: 0x5a */ + 0xe7, 0x11, 0xb8, 0x05, 0xb1, 0xdc, 0x34, 0xd3, 0xd7, 0x30, 0xfc, 0x0f, 0x01, 0xf1, 0x42, 0x39, /* Byte value: 0x5b */ + 0x25, 0xd5, 0x3f, 0xef, 0xc6, 0x3c, 0xeb, 0xce, 0x7d, 0x58, 0xed, 0xf2, 0x5c, 0xa7, 0xd9, 0xa1, /* Byte value: 0x5c */ + 0x5e, 0xf3, 0x38, 0x8e, 0x9d, 0xba, 0xb5, 0xeb, 0xf2, 0xac, 0x72, 0x51, 0x97, 0xce, 0xbe, 0x09, /* Byte value: 0x5d */ + 0xb5, 0x94, 0x03, 0xfa, 0x62, 0x99, 0xe1, 0x54, 0x9c, 0x29, 0x16, 0xcd, 0x32, 0xbf, 0xb9, 0x48, /* Byte value: 0x5e */ + 0x1e, 0xd7, 0x65, 0x3b, 0xbb, 0xa3, 0xf0, 0xee, 0x0c, 0x12, 0xbf, 0x4d, 0xd9, 0x83, 0x49, 0xcc, /* Byte value: 0x5f */ + 0xa2, 0xf2, 0x15, 0xe4, 0x42, 0x14, 0x59, 0xfb, 0x2b, 0x89, 0x5b, 0xef, 0x34, 0xdc, 0xf6, 0xde, /* Byte value: 0x60 */ + 0xdf, 0xef, 0xb2, 0x5c, 0x3e, 0x0c, 0x37, 0xe8, 0x19, 0xc6, 0x88, 0xe4, 0xad, 0xf5, 0x52, 0x4a, /* Byte value: 0x61 */ + 0xa6, 0x61, 0xd5, 0xcb, 0x78, 0x41, 0x79, 0xdf, 0xfd, 0x5b, 0x92, 0x9e, 0xe9, 0x1d, 0x74, 0xf6, /* Byte value: 0x62 */ + 0xfe, 0xa9, 0x4d, 0x9c, 0xc2, 0x65, 0xfc, 0x02, 0xb2, 0x4c, 0xac, 0x67, 0x2c, 0x93, 0x09, 0xc3, /* Byte value: 0x63 */ + 0xac, 0x2c, 0xf6, 0x63, 0x11, 0x20, 0x29, 0x85, 0xf9, 0x55, 0x46, 0xa5, 0x1f, 0xdd, 0xf2, 0xb2, /* Byte value: 0x64 */ + 0x22, 0xba, 0xaf, 0x4d, 0x0e, 0x26, 0xd3, 0xf1, 0x14, 0x36, 0x02, 0xd7, 0xa8, 0x46, 0xdb, 0x97, /* Byte value: 0x65 */ + 0xf9, 0xc6, 0xdd, 0x3e, 0x0a, 0x7f, 0xc4, 0x3d, 0xdb, 0x22, 0x43, 0x42, 0xd8, 0x72, 0x0b, 0xf5, /* Byte value: 0x66 */ + 0xe2, 0xd6, 0x48, 0x51, 0x64, 0x0d, 0x1c, 0xfe, 0xd5, 0x37, 0x96, 0xf3, 0x7a, 0x91, 0x01, 0x1b, /* Byte value: 0x67 */ + 0x4a, 0x69, 0x7e, 0x1d, 0x4f, 0x78, 0x15, 0x5f, 0xfa, 0xb0, 0x19, 0x27, 0xb8, 0x8d, 0x71, 0x81, /* Byte value: 0x68 */ + 0xbc, 0x25, 0x70, 0xdf, 0xf9, 0xb7, 0xa9, 0x15, 0x27, 0x9b, 0xe4, 0xa2, 0xed, 0x5f, 0xbf, 0x12, /* Byte value: 0x69 */ + 0x35, 0xdc, 0xb9, 0x53, 0x2e, 0xab, 0x6b, 0x5e, 0xa3, 0x96, 0x4f, 0xf5, 0xae, 0x25, 0x94, 0x01, /* Byte value: 0x6a */ + 0xca, 0x21, 0xc4, 0xb4, 0x03, 0x4a, 0x9f, 0x55, 0xc5, 0x0f, 0x40, 0x1f, 0x24, 0x17, 0x5c, 0xc8, /* Byte value: 0x6b */ + 0xee, 0xa0, 0xcb, 0x20, 0x2a, 0xf2, 0x7c, 0x92, 0x6c, 0x82, 0x0e, 0x60, 0xde, 0x11, 0x44, 0x63, /* Byte value: 0x6c */ + 0x78, 0xda, 0x57, 0xec, 0xa9, 0xc9, 0x46, 0x3e, 0x30, 0x48, 0xb9, 0xf7, 0xe2, 0x49, 0xe7, 0xb6, /* Byte value: 0x6d */ + 0x05, 0xc7, 0xf0, 0x54, 0xd5, 0xd1, 0x28, 0x2d, 0x02, 0x07, 0x6a, 0xfc, 0x7b, 0x60, 0x43, 0x22, /* Byte value: 0x6e */ + 0x6b, 0x2f, 0x81, 0xdd, 0xb3, 0x11, 0xde, 0xb5, 0x51, 0x3a, 0x3d, 0xa4, 0x39, 0xeb, 0x2a, 0x08, /* Byte value: 0x6f */ + 0x51, 0x79, 0xeb, 0x72, 0x21, 0x0a, 0xcd, 0x9c, 0xf4, 0xa5, 0xcc, 0x96, 0x1a, 0x6e, 0x7b, 0x6f, /* Byte value: 0x70 */ + 0xe1, 0x2a, 0x18, 0xdc, 0x96, 0x42, 0x04, 0xe5, 0x6a, 0x8b, 0xb0, 0xa7, 0x53, 0xb1, 0x81, 0x05, /* Byte value: 0x71 */ + 0x59, 0x9c, 0xa8, 0x2c, 0x55, 0xa0, 0x8d, 0xd4, 0x9b, 0xc2, 0x9d, 0x74, 0x63, 0x2f, 0xbc, 0x3f, /* Byte value: 0x72 */ + 0xa3, 0xa6, 0x25, 0x9f, 0xad, 0x90, 0x51, 0xf2, 0xff, 0x5c, 0xf8, 0x62, 0x92, 0x7d, 0x37, 0xd4, /* Byte value: 0x73 */ + 0xf2, 0xdf, 0xce, 0xed, 0x8c, 0x9a, 0x9c, 0x6e, 0x0b, 0xf9, 0x34, 0xf4, 0x88, 0x13, 0x4c, 0xbb, /* Byte value: 0x74 */ + 0x71, 0x6b, 0x24, 0xc9, 0x32, 0xe7, 0x0e, 0x7f, 0x8b, 0xfa, 0x4b, 0x98, 0x3d, 0xa9, 0xe1, 0xec, /* Byte value: 0x75 */ + 0x56, 0x16, 0x7b, 0xd0, 0xe9, 0x10, 0xf5, 0xa3, 0x9d, 0xcb, 0x23, 0xb3, 0xee, 0x8f, 0x79, 0x59, /* Byte value: 0x76 */ + 0x11, 0x5d, 0xb6, 0xc7, 0x07, 0x13, 0x88, 0x99, 0x0a, 0x1b, 0x01, 0x8a, 0x54, 0x23, 0x8c, 0xaa, /* Byte value: 0x77 */ + 0x6a, 0x7b, 0xb1, 0xa6, 0x5c, 0x95, 0xd6, 0xbc, 0x85, 0xef, 0x9e, 0x29, 0x9f, 0x4a, 0xeb, 0x02, /* Byte value: 0x78 */ + 0x89, 0xf9, 0xc9, 0x8c, 0xd7, 0x1c, 0xc2, 0x4b, 0x84, 0x0d, 0xab, 0x57, 0x43, 0x7a, 0x2b, 0x13, /* Byte value: 0x79 */ + 0x94, 0xd2, 0xfc, 0x3a, 0x9e, 0xf0, 0x2a, 0xbe, 0x37, 0xa3, 0x32, 0x4e, 0xb3, 0xd9, 0xe2, 0xc1, /* Byte value: 0x7a */ + 0x65, 0xf1, 0x62, 0x5a, 0xe0, 0x25, 0xae, 0xcb, 0x83, 0xe6, 0x20, 0xee, 0x12, 0xea, 0x2e, 0x64, /* Byte value: 0x7b */ + 0x8c, 0x3e, 0x39, 0xd8, 0x02, 0xcd, 0xea, 0x66, 0x86, 0x0a, 0xc1, 0xab, 0x38, 0x1a, 0x68, 0x31, /* Byte value: 0x7c */ + 0xbb, 0x4a, 0xe0, 0x7d, 0x31, 0xad, 0x91, 0x2a, 0x4e, 0xf5, 0x0b, 0x87, 0x19, 0xbe, 0xbd, 0x24, /* Byte value: 0x7d */ + 0x77, 0x50, 0x84, 0x10, 0x15, 0x79, 0x3e, 0x49, 0x36, 0x41, 0x07, 0x30, 0x6f, 0xe9, 0x22, 0xd0, /* Byte value: 0x7e */ + 0x3c, 0x6d, 0xca, 0x76, 0xb5, 0x85, 0x23, 0x1f, 0x18, 0x24, 0xbd, 0x9a, 0x71, 0xc5, 0x92, 0x5b, /* Byte value: 0x7f */ + 0x7b, 0x26, 0x07, 0x61, 0x5b, 0x86, 0x5e, 0x25, 0x8f, 0xf4, 0x9f, 0xa3, 0xcb, 0x69, 0x67, 0xa8, /* Byte value: 0x80 */ + 0x28, 0xf7, 0x8c, 0xe5, 0x67, 0x47, 0x83, 0xab, 0x10, 0x38, 0xd6, 0xec, 0x5e, 0x86, 0x5d, 0xd3, /* Byte value: 0x81 */ + 0xab, 0x43, 0x66, 0xc1, 0xd9, 0x3a, 0x11, 0xba, 0x90, 0x3b, 0xa9, 0x80, 0xeb, 0x3c, 0xf0, 0x84, /* Byte value: 0x82 */ + 0xd2, 0xcd, 0x01, 0x56, 0x9f, 0x77, 0x5f, 0x8d, 0x74, 0xa6, 0xb3, 0xfa, 0xaf, 0xd4, 0xd6, 0x38, /* Byte value: 0x83 */ + 0x31, 0x4f, 0x79, 0x7c, 0x14, 0xfe, 0x4b, 0x7a, 0x75, 0x44, 0x86, 0x84, 0x73, 0xe4, 0x16, 0x29, /* Byte value: 0x84 */ + 0xde, 0xbb, 0x82, 0x27, 0xd1, 0x88, 0x3f, 0xe1, 0xcd, 0x13, 0x2b, 0x69, 0x0b, 0x54, 0x93, 0x40, /* Byte value: 0x85 */ + 0xc4, 0xff, 0x27, 0x33, 0x50, 0x7e, 0xef, 0x2b, 0x17, 0xd3, 0x5d, 0x55, 0x0f, 0x16, 0x58, 0xa4, /* Byte value: 0x86 */ + 0x5f, 0xa7, 0x08, 0xf5, 0x72, 0x3e, 0xbd, 0xe2, 0x26, 0x79, 0xd1, 0xdc, 0x31, 0x6f, 0x7f, 0x03, /* Byte value: 0x87 */ + 0xcc, 0x1a, 0x64, 0x6d, 0x24, 0xd4, 0xaf, 0x63, 0x78, 0xb4, 0x0c, 0xb7, 0x76, 0x57, 0x9f, 0xf4, /* Byte value: 0x88 */ + 0xcf, 0xe6, 0x34, 0xe0, 0xd6, 0x9b, 0xb7, 0x78, 0xc7, 0x08, 0x2a, 0xe3, 0x5f, 0x77, 0x1f, 0xea, /* Byte value: 0x89 */ + 0x76, 0x04, 0xb4, 0x6b, 0xfa, 0xfd, 0x36, 0x40, 0xe2, 0x94, 0xa4, 0xbd, 0xc9, 0x48, 0xe3, 0xda, /* Byte value: 0x8a */ + 0x2c, 0x64, 0x4c, 0xca, 0x5d, 0x12, 0xa3, 0x8f, 0xc6, 0xea, 0x1f, 0x9d, 0x83, 0x47, 0xdf, 0xfb, /* Byte value: 0x8b */ + 0xb8, 0xb6, 0xb0, 0xf0, 0xc3, 0xe2, 0x89, 0x31, 0xf1, 0x49, 0x2d, 0xd3, 0x30, 0x9e, 0x3d, 0x3a, /* Byte value: 0x8c */ + 0xd8, 0x80, 0x22, 0xfe, 0xf6, 0x16, 0x0f, 0xd7, 0x70, 0xa8, 0x67, 0xc1, 0x59, 0x14, 0x50, 0x7c, /* Byte value: 0x8d */ + 0x2e, 0xcc, 0x2c, 0x3c, 0x40, 0xd9, 0xb3, 0x9d, 0xad, 0x83, 0x9a, 0x44, 0x0c, 0xc6, 0x9e, 0xef, /* Byte value: 0x8e */ + 0x36, 0x20, 0xe9, 0xde, 0xdc, 0xe4, 0x73, 0x45, 0x1c, 0x2a, 0x69, 0xa1, 0x87, 0x05, 0x14, 0x1f, /* Byte value: 0x8f */ + 0xdb, 0x7c, 0x72, 0x73, 0x04, 0x59, 0x17, 0xcc, 0xcf, 0x14, 0x41, 0x95, 0x70, 0x34, 0xd0, 0x62, /* Byte value: 0x90 */ + 0x69, 0x87, 0xe1, 0x2b, 0xae, 0xda, 0xce, 0xa7, 0x3a, 0x53, 0xb8, 0x7d, 0xb6, 0x6a, 0x6b, 0x1c, /* Byte value: 0x91 */ + 0xb3, 0xaf, 0xa3, 0x23, 0x45, 0x07, 0xd1, 0x62, 0x21, 0x92, 0x5a, 0x65, 0x60, 0xff, 0x7a, 0x74, /* Byte value: 0x92 */ + 0x14, 0x9a, 0x46, 0x93, 0xd2, 0xc2, 0xa0, 0xb4, 0x08, 0x1c, 0x6b, 0x76, 0x2f, 0x43, 0xcf, 0x88, /* Byte value: 0x93 */ + 0x95, 0x86, 0xcc, 0x41, 0x71, 0x74, 0x22, 0xb7, 0xe3, 0x76, 0x91, 0xc3, 0x15, 0x78, 0x23, 0xcb, /* Byte value: 0x94 */ + 0xbe, 0x8d, 0x10, 0x29, 0xe4, 0x7c, 0xb9, 0x07, 0x4c, 0xf2, 0x61, 0x7b, 0x62, 0xde, 0xfe, 0x06, /* Byte value: 0x95 */ + 0x62, 0x9e, 0xf2, 0xf8, 0x28, 0x3f, 0x96, 0xf4, 0xea, 0x88, 0xcf, 0xcb, 0xe6, 0x0b, 0x2c, 0x52, /* Byte value: 0x96 */ + 0xa1, 0x0e, 0x45, 0x69, 0xb0, 0x5b, 0x41, 0xe0, 0x94, 0x35, 0x7d, 0xbb, 0x1d, 0xfc, 0x76, 0xc0, /* Byte value: 0x97 */ + 0x3b, 0x02, 0x5a, 0xd4, 0x7d, 0x9f, 0x1b, 0x20, 0x71, 0x4a, 0x52, 0xbf, 0x85, 0x24, 0x90, 0x6d, /* Byte value: 0x98 */ + 0x16, 0x32, 0x26, 0x65, 0xcf, 0x09, 0xb0, 0xa6, 0x63, 0x75, 0xee, 0xaf, 0xa0, 0xc2, 0x8e, 0x9c, /* Byte value: 0x99 */ + 0x66, 0x0d, 0x32, 0xd7, 0x12, 0x6a, 0xb6, 0xd0, 0x3c, 0x5a, 0x06, 0xba, 0x3b, 0xca, 0xae, 0x7a, /* Byte value: 0x9a */ + 0xe9, 0xcf, 0x5b, 0x82, 0xe2, 0xe8, 0x44, 0xad, 0x05, 0xec, 0xe1, 0x45, 0x2a, 0xf0, 0x46, 0x55, /* Byte value: 0x9b */ + 0x5c, 0x5b, 0x58, 0x78, 0x80, 0x71, 0xa5, 0xf9, 0x99, 0xc5, 0xf7, 0x88, 0x18, 0x4f, 0xff, 0x1d, /* Byte value: 0x9c */ + 0x6c, 0x40, 0x11, 0x7f, 0x7b, 0x0b, 0xe6, 0x8a, 0x38, 0x54, 0xd2, 0x81, 0xcd, 0x0a, 0x28, 0x3e, /* Byte value: 0x9d */ + 0x6d, 0x14, 0x21, 0x04, 0x94, 0x8f, 0xee, 0x83, 0xec, 0x81, 0x71, 0x0c, 0x6b, 0xab, 0xe9, 0x34, /* Byte value: 0x9e */ + 0xad, 0x78, 0xc6, 0x18, 0xfe, 0xa4, 0x21, 0x8c, 0x2d, 0x80, 0xe5, 0x28, 0xb9, 0x7c, 0x33, 0xb8, /* Byte value: 0x9f */ + 0x37, 0x74, 0xd9, 0xa5, 0x33, 0x60, 0x7b, 0x4c, 0xc8, 0xff, 0xca, 0x2c, 0x21, 0xa4, 0xd5, 0x15, /* Byte value: 0xa0 */ + 0x61, 0x62, 0xa2, 0x75, 0xda, 0x70, 0x8e, 0xef, 0x55, 0x34, 0xe9, 0x9f, 0xcf, 0x2b, 0xac, 0x4c, /* Byte value: 0xa1 */ + 0x4b, 0x3d, 0x4e, 0x66, 0xa0, 0xfc, 0x1d, 0x56, 0x2e, 0x65, 0xba, 0xaa, 0x1e, 0x2c, 0xb0, 0x8b, /* Byte value: 0xa2 */ + 0xb9, 0xe2, 0x80, 0x8b, 0x2c, 0x66, 0x81, 0x38, 0x25, 0x9c, 0x8e, 0x5e, 0x96, 0x3f, 0xfc, 0x30, /* Byte value: 0xa3 */ + 0xe3, 0x82, 0x78, 0x2a, 0x8b, 0x89, 0x14, 0xf7, 0x01, 0xe2, 0x35, 0x7e, 0xdc, 0x30, 0xc0, 0x11, /* Byte value: 0xa4 */ + 0xba, 0x1e, 0xd0, 0x06, 0xde, 0x29, 0x99, 0x23, 0x9a, 0x20, 0xa8, 0x0a, 0xbf, 0x1f, 0x7c, 0x2e, /* Byte value: 0xa5 */ + 0xf1, 0x23, 0x9e, 0x60, 0x7e, 0xd5, 0x84, 0x75, 0xb4, 0x45, 0x12, 0xa0, 0xa1, 0x33, 0xcc, 0xa5, /* Byte value: 0xa6 */ + 0xa0, 0x5a, 0x75, 0x12, 0x5f, 0xdf, 0x49, 0xe9, 0x40, 0xe0, 0xde, 0x36, 0xbb, 0x5d, 0xb7, 0xca, /* Byte value: 0xa7 */ + 0x85, 0x8f, 0x4a, 0xfd, 0x99, 0xe3, 0xa2, 0x27, 0x3d, 0xb8, 0x33, 0xc4, 0xe7, 0xfa, 0x6e, 0x6b, /* Byte value: 0xa8 */ + 0x83, 0xb4, 0xea, 0x24, 0xbe, 0x7d, 0x92, 0x11, 0x80, 0x03, 0x7f, 0x6c, 0xb5, 0xba, 0xad, 0x57, /* Byte value: 0xa9 */ + 0xda, 0x28, 0x42, 0x08, 0xeb, 0xdd, 0x1f, 0xc5, 0x1b, 0xc1, 0xe2, 0x18, 0xd6, 0x95, 0x11, 0x68, /* Byte value: 0xaa */ + 0x47, 0x4b, 0xcd, 0x17, 0xee, 0x03, 0x7d, 0x3a, 0x97, 0xd0, 0x22, 0x39, 0xba, 0xac, 0xf5, 0xf3, /* Byte value: 0xab */ + 0xc5, 0xab, 0x17, 0x48, 0xbf, 0xfa, 0xe7, 0x22, 0xc3, 0x06, 0xfe, 0xd8, 0xa9, 0xb7, 0x99, 0xae, /* Byte value: 0xac */ + 0xb0, 0x53, 0xf3, 0xae, 0xb7, 0x48, 0xc9, 0x79, 0x9e, 0x2e, 0x7c, 0x31, 0x49, 0xdf, 0xfa, 0x6a, /* Byte value: 0xad */ + 0x33, 0xe7, 0x19, 0x8a, 0x09, 0x35, 0x5b, 0x68, 0x1e, 0x2d, 0x03, 0x5d, 0xfc, 0x65, 0x57, 0x3d, /* Byte value: 0xae */ + 0xfa, 0x3a, 0x8d, 0xb3, 0xf8, 0x30, 0xdc, 0x26, 0x64, 0x9e, 0x65, 0x16, 0xf1, 0x52, 0x8b, 0xeb, /* Byte value: 0xaf */ + 0x96, 0x7a, 0x9c, 0xcc, 0x83, 0x3b, 0x3a, 0xac, 0x5c, 0xca, 0xb7, 0x97, 0x3c, 0x58, 0xa3, 0xd5, /* Byte value: 0xb0 */ + 0x6f, 0xbc, 0x41, 0xf2, 0x89, 0x44, 0xfe, 0x91, 0x87, 0xe8, 0xf4, 0xd5, 0xe4, 0x2a, 0xa8, 0x20, /* Byte value: 0xb1 */ + 0x6e, 0xe8, 0x71, 0x89, 0x66, 0xc0, 0xf6, 0x98, 0x53, 0x3d, 0x57, 0x58, 0x42, 0x8b, 0x69, 0x2a, /* Byte value: 0xb2 */ + 0xc2, 0xc4, 0x87, 0xea, 0x77, 0xe0, 0xdf, 0x1d, 0xaa, 0x68, 0x11, 0xfd, 0x5d, 0x56, 0x9b, 0x98, /* Byte value: 0xb3 */ + 0xf6, 0x4c, 0x0e, 0xc2, 0xb6, 0xcf, 0xbc, 0x4a, 0xdd, 0x2b, 0xfd, 0x85, 0x55, 0xd2, 0xce, 0x93, /* Byte value: 0xb4 */ + 0x50, 0x2d, 0xdb, 0x09, 0xce, 0x8e, 0xc5, 0x95, 0x20, 0x70, 0x6f, 0x1b, 0xbc, 0xcf, 0xba, 0x65, /* Byte value: 0xb5 */ + 0xff, 0xfd, 0x7d, 0xe7, 0x2d, 0xe1, 0xf4, 0x0b, 0x66, 0x99, 0x0f, 0xea, 0x8a, 0x32, 0xc8, 0xc9, /* Byte value: 0xb6 */ + 0x5d, 0x0f, 0x68, 0x03, 0x6f, 0xf5, 0xad, 0xf0, 0x4d, 0x10, 0x54, 0x05, 0xbe, 0xee, 0x3e, 0x17, /* Byte value: 0xb7 */ + 0xa9, 0xeb, 0x06, 0x37, 0xc4, 0xf1, 0x01, 0xa8, 0xfb, 0x52, 0x2c, 0x59, 0x64, 0xbd, 0xb1, 0x90, /* Byte value: 0xb8 */ + 0x8e, 0x96, 0x59, 0x2e, 0x1f, 0x06, 0xfa, 0x74, 0xed, 0x63, 0x44, 0x72, 0xb7, 0x9b, 0x29, 0x25, /* Byte value: 0xb9 */ + 0x17, 0x66, 0x16, 0x1e, 0x20, 0x8d, 0xb8, 0xaf, 0xb7, 0xa0, 0x4d, 0x22, 0x06, 0x63, 0x4f, 0x96, /* Byte value: 0xba */ + 0x1b, 0x10, 0x95, 0x6f, 0x6e, 0x72, 0xd8, 0xc3, 0x0e, 0x15, 0xd5, 0xb1, 0xa2, 0xe3, 0x0a, 0xee, /* Byte value: 0xbb */ + 0x97, 0x2e, 0xac, 0xb7, 0x6c, 0xbf, 0x32, 0xa5, 0x88, 0x1f, 0x14, 0x1a, 0x9a, 0xf9, 0x62, 0xdf, /* Byte value: 0xbc */ + 0x7d, 0x1d, 0xa7, 0xb8, 0x7c, 0x18, 0x6e, 0x13, 0x32, 0x4f, 0xd3, 0x0b, 0x99, 0x29, 0xa4, 0x94, /* Byte value: 0xbd */ + 0xec, 0x08, 0xab, 0xd6, 0x37, 0x39, 0x6c, 0x80, 0x07, 0xeb, 0x8b, 0xb9, 0x51, 0x90, 0x05, 0x77, /* Byte value: 0xbe */ + 0x58, 0xc8, 0x98, 0x57, 0xba, 0x24, 0x85, 0xdd, 0x4f, 0x17, 0x3e, 0xf9, 0xc5, 0x8e, 0x7d, 0x35, /* Byte value: 0xbf */ + 0xf7, 0x18, 0x3e, 0xb9, 0x59, 0x4b, 0xb4, 0x43, 0x09, 0xfe, 0x5e, 0x08, 0xf3, 0x73, 0x0f, 0x99, /* Byte value: 0xc0 */ + 0x1f, 0x83, 0x55, 0x40, 0x54, 0x27, 0xf8, 0xe7, 0xd8, 0xc7, 0x1c, 0xc0, 0x7f, 0x22, 0x88, 0xc6, /* Byte value: 0xc1 */ + 0xfb, 0x6e, 0xbd, 0xc8, 0x17, 0xb4, 0xd4, 0x2f, 0xb0, 0x4b, 0xc6, 0x9b, 0x57, 0xf3, 0x4a, 0xe1, /* Byte value: 0xc2 */ + 0x7c, 0x49, 0x97, 0xc3, 0x93, 0x9c, 0x66, 0x1a, 0xe6, 0x9a, 0x70, 0x86, 0x3f, 0x88, 0x65, 0x9e, /* Byte value: 0xc3 */ + 0x09, 0xb1, 0x73, 0x25, 0x9b, 0x2e, 0x48, 0x41, 0xbb, 0xb2, 0xf2, 0x6f, 0xdf, 0xe0, 0x06, 0x5a, /* Byte value: 0xc4 */ + 0x0d, 0x22, 0xb3, 0x0a, 0xa1, 0x7b, 0x68, 0x65, 0x6d, 0x60, 0x3b, 0x1e, 0x02, 0x21, 0x84, 0x72, /* Byte value: 0xc5 */ + 0x7a, 0x72, 0x37, 0x1a, 0xb4, 0x02, 0x56, 0x2c, 0x5b, 0x21, 0x3c, 0x2e, 0x6d, 0xc8, 0xa6, 0xa2, /* Byte value: 0xc6 */ + 0x67, 0x59, 0x02, 0xac, 0xfd, 0xee, 0xbe, 0xd9, 0xe8, 0x8f, 0xa5, 0x37, 0x9d, 0x6b, 0x6f, 0x70, /* Byte value: 0xc7 */ + 0x45, 0xe3, 0xad, 0xe1, 0xf3, 0xc8, 0x6d, 0x28, 0xfc, 0xb9, 0xa7, 0xe0, 0x35, 0x2d, 0xb4, 0xe7, /* Byte value: 0xc8 */ + 0x87, 0x27, 0x2a, 0x0b, 0x84, 0x28, 0xb2, 0x35, 0x56, 0xd1, 0xb6, 0x1d, 0x68, 0x7b, 0x2f, 0x7f, /* Byte value: 0xc9 */ + 0xdc, 0x13, 0xe2, 0xd1, 0xcc, 0x43, 0x2f, 0xf3, 0xa6, 0x7a, 0xae, 0xb0, 0x84, 0xd5, 0xd2, 0x54, /* Byte value: 0xca */ + 0xe8, 0x9b, 0x6b, 0xf9, 0x0d, 0x6c, 0x4c, 0xa4, 0xd1, 0x39, 0x42, 0xc8, 0x8c, 0x51, 0x87, 0x5f, /* Byte value: 0xcb */ + 0x4f, 0xae, 0x8e, 0x49, 0x9a, 0xa9, 0x3d, 0x72, 0xf8, 0xb7, 0x73, 0xdb, 0xc3, 0xed, 0x32, 0xa3, /* Byte value: 0xcc */ + 0x1d, 0x2b, 0x35, 0xb6, 0x49, 0xec, 0xe8, 0xf5, 0xb3, 0xae, 0x99, 0x19, 0xf0, 0xa3, 0xc9, 0xd2, /* Byte value: 0xcd */ + 0x4e, 0xfa, 0xbe, 0x32, 0x75, 0x2d, 0x35, 0x7b, 0x2c, 0x62, 0xd0, 0x56, 0x65, 0x4c, 0xf3, 0xa9, /* Byte value: 0xce */ + 0x04, 0x93, 0xc0, 0x2f, 0x3a, 0x55, 0x20, 0x24, 0xd6, 0xd2, 0xc9, 0x71, 0xdd, 0xc1, 0x82, 0x28, /* Byte value: 0xcf */ + 0xeb, 0x67, 0x3b, 0x74, 0xff, 0x23, 0x54, 0xbf, 0x6e, 0x85, 0x64, 0x9c, 0xa5, 0x71, 0x07, 0x41, /* Byte value: 0xd0 */ + 0xf8, 0x92, 0xed, 0x45, 0xe5, 0xfb, 0xcc, 0x34, 0x0f, 0xf7, 0xe0, 0xcf, 0x7e, 0xd3, 0xca, 0xff, /* Byte value: 0xd1 */ + 0xf3, 0x8b, 0xfe, 0x96, 0x63, 0x1e, 0x94, 0x67, 0xdf, 0x2c, 0x97, 0x79, 0x2e, 0xb2, 0x8d, 0xb1, /* Byte value: 0xd2 */ + 0x3e, 0xc5, 0xaa, 0x80, 0xa8, 0x4e, 0x33, 0x0d, 0x73, 0x4d, 0x38, 0x43, 0xfe, 0x44, 0xd3, 0x4f, /* Byte value: 0xd3 */ + 0x3d, 0x39, 0xfa, 0x0d, 0x5a, 0x01, 0x2b, 0x16, 0xcc, 0xf1, 0x1e, 0x17, 0xd7, 0x64, 0x53, 0x51, /* Byte value: 0xd4 */ + 0xbd, 0x71, 0x40, 0xa4, 0x16, 0x33, 0xa1, 0x1c, 0xf3, 0x4e, 0x47, 0x2f, 0x4b, 0xfe, 0x7e, 0x18, /* Byte value: 0xd5 */ + 0x8a, 0x05, 0x99, 0x01, 0x25, 0x53, 0xda, 0x50, 0x3b, 0xb1, 0x8d, 0x03, 0x6a, 0x5a, 0xab, 0x0d, /* Byte value: 0xd6 */ + 0x88, 0xad, 0xf9, 0xf7, 0x38, 0x98, 0xca, 0x42, 0x50, 0xd8, 0x08, 0xda, 0xe5, 0xdb, 0xea, 0x19, /* Byte value: 0xd7 */ + 0xdd, 0x47, 0xd2, 0xaa, 0x23, 0xc7, 0x27, 0xfa, 0x72, 0xaf, 0x0d, 0x3d, 0x22, 0x74, 0x13, 0x5e, /* Byte value: 0xd8 */ + 0xcd, 0x4e, 0x54, 0x16, 0xcb, 0x50, 0xa7, 0x6a, 0xac, 0x61, 0xaf, 0x3a, 0xd0, 0xf6, 0x5e, 0xfe, /* Byte value: 0xd9 */ + 0x0b, 0x19, 0x13, 0xd3, 0x86, 0xe5, 0x58, 0x53, 0xd0, 0xdb, 0x77, 0xb6, 0x50, 0x61, 0x47, 0x4e, /* Byte value: 0xda */ + 0x13, 0xf5, 0xd6, 0x31, 0x1a, 0xd8, 0x98, 0x8b, 0x61, 0x72, 0x84, 0x53, 0xdb, 0xa2, 0xcd, 0xbe, /* Byte value: 0xdb */ + 0x98, 0xa4, 0x7f, 0x4b, 0xd0, 0x0f, 0x4a, 0xd2, 0x8e, 0x16, 0xaa, 0xdd, 0x17, 0x59, 0xa7, 0xb9, /* Byte value: 0xdc */ + 0x02, 0xa8, 0x60, 0xf6, 0x1d, 0xcb, 0x10, 0x12, 0x6b, 0x69, 0x85, 0xd9, 0x8f, 0x81, 0x41, 0x14, /* Byte value: 0xdd */ + 0x93, 0xbd, 0x6c, 0x98, 0x56, 0xea, 0x12, 0x81, 0x5e, 0xcd, 0xdd, 0x6b, 0x47, 0x38, 0xe0, 0xf7, /* Byte value: 0xde */ + 0x80, 0x48, 0xba, 0xa9, 0x4c, 0x32, 0x8a, 0x0a, 0x3f, 0xbf, 0x59, 0x38, 0x9c, 0x9a, 0x2d, 0x49, /* Byte value: 0xdf */ + 0x90, 0x41, 0x3c, 0x15, 0xa4, 0xa5, 0x0a, 0x9a, 0xe1, 0x71, 0xfb, 0x3f, 0x6e, 0x18, 0x60, 0xe9, /* Byte value: 0xe0 */ + 0xd0, 0x65, 0x61, 0xa0, 0x82, 0xbc, 0x4f, 0x9f, 0x1f, 0xcf, 0x36, 0x23, 0x20, 0x55, 0x97, 0x2c, /* Byte value: 0xe1 */ + 0x24, 0x81, 0x0f, 0x94, 0x29, 0xb8, 0xe3, 0xc7, 0xa9, 0x8d, 0x4e, 0x7f, 0xfa, 0x06, 0x18, 0xab, /* Byte value: 0xe2 */ + 0x34, 0x88, 0x89, 0x28, 0xc1, 0x2f, 0x63, 0x57, 0x77, 0x43, 0xec, 0x78, 0x08, 0x84, 0x55, 0x0b, /* Byte value: 0xe3 */ + 0xcb, 0x75, 0xf4, 0xcf, 0xec, 0xce, 0x97, 0x5c, 0x11, 0xda, 0xe3, 0x92, 0x82, 0xb6, 0x9d, 0xc2, /* Byte value: 0xe4 */ + 0xed, 0x5c, 0x9b, 0xad, 0xd8, 0xbd, 0x64, 0x89, 0xd3, 0x3e, 0x28, 0x34, 0xf7, 0x31, 0xc4, 0x7d, /* Byte value: 0xe5 */ + 0xf4, 0xe4, 0x6e, 0x34, 0xab, 0x04, 0xac, 0x58, 0xb6, 0x42, 0x78, 0x5c, 0xda, 0x53, 0x8f, 0x87, /* Byte value: 0xe6 */ + 0xce, 0xb2, 0x04, 0x9b, 0x39, 0x1f, 0xbf, 0x71, 0x13, 0xdd, 0x89, 0x6e, 0xf9, 0xd6, 0xde, 0xe0, /* Byte value: 0xe7 */ + 0x99, 0xf0, 0x4f, 0x30, 0x3f, 0x8b, 0x42, 0xdb, 0x5a, 0xc3, 0x09, 0x50, 0xb1, 0xf8, 0x66, 0xb3, /* Byte value: 0xe8 */ + 0x10, 0x09, 0x86, 0xbc, 0xe8, 0x97, 0x80, 0x90, 0xde, 0xce, 0xa2, 0x07, 0xf2, 0x82, 0x4d, 0xa0, /* Byte value: 0xe9 */ + 0x44, 0xb7, 0x9d, 0x9a, 0x1c, 0x4c, 0x65, 0x21, 0x28, 0x6c, 0x04, 0x6d, 0x93, 0x8c, 0x75, 0xed, /* Byte value: 0xea */ + 0x40, 0x24, 0x5d, 0xb5, 0x26, 0x19, 0x45, 0x05, 0xfe, 0xbe, 0xcd, 0x1c, 0x4e, 0x4d, 0xf7, 0xc5, /* Byte value: 0xeb */ + 0x92, 0xe9, 0x5c, 0xe3, 0xb9, 0x6e, 0x1a, 0x88, 0x8a, 0x18, 0x7e, 0xe6, 0xe1, 0x99, 0x21, 0xfd, /* Byte value: 0xec */ + 0x3a, 0x56, 0x6a, 0xaf, 0x92, 0x1b, 0x13, 0x29, 0xa5, 0x9f, 0xf1, 0x32, 0x23, 0x85, 0x51, 0x67, /* Byte value: 0xed */ + 0x01, 0x54, 0x30, 0x7b, 0xef, 0x84, 0x08, 0x09, 0xd4, 0xd5, 0xa3, 0x8d, 0xa6, 0xa1, 0xc1, 0x0a, /* Byte value: 0xee */ + 0x26, 0x29, 0x6f, 0x62, 0x34, 0x73, 0xf3, 0xd5, 0xc2, 0xe4, 0xcb, 0xa6, 0x75, 0x87, 0x59, 0xbf, /* Byte value: 0xef */ + 0x12, 0xa1, 0xe6, 0x4a, 0xf5, 0x5c, 0x90, 0x82, 0xb5, 0xa7, 0x27, 0xde, 0x7d, 0x03, 0x0c, 0xb4, /* Byte value: 0xf0 */ + 0x1a, 0x44, 0xa5, 0x14, 0x81, 0xf6, 0xd0, 0xca, 0xda, 0xc0, 0x76, 0x3c, 0x04, 0x42, 0xcb, 0xe4, /* Byte value: 0xf1 */ + 0x48, 0xc1, 0x1e, 0xeb, 0x52, 0xb3, 0x05, 0x4d, 0x91, 0xd9, 0x9c, 0xfe, 0x37, 0x0c, 0x30, 0x95, /* Byte value: 0xf2 */ + 0x68, 0xd3, 0xd1, 0x50, 0x41, 0x5e, 0xc6, 0xae, 0xee, 0x86, 0x1b, 0xf0, 0x10, 0xcb, 0xaa, 0x16, /* Byte value: 0xf3 */ + 0xf5, 0xb0, 0x5e, 0x4f, 0x44, 0x80, 0xa4, 0x51, 0x62, 0x97, 0xdb, 0xd1, 0x7c, 0xf2, 0x4e, 0x8d, /* Byte value: 0xf4 */ + 0x81, 0x1c, 0x8a, 0xd2, 0xa3, 0xb6, 0x82, 0x03, 0xeb, 0x6a, 0xfa, 0xb5, 0x3a, 0x3b, 0xec, 0x43, /* Byte value: 0xf5 */ + 0x8b, 0x51, 0xa9, 0x7a, 0xca, 0xd7, 0xd2, 0x59, 0xef, 0x64, 0x2e, 0x8e, 0xcc, 0xfb, 0x6a, 0x07, /* Byte value: 0xf6 */ + 0xc7, 0x03, 0x77, 0xbe, 0xa2, 0x31, 0xf7, 0x30, 0xa8, 0x6f, 0x7b, 0x01, 0x26, 0x36, 0xd8, 0xba, /* Byte value: 0xf7 */ + 0xd6, 0x5e, 0xc1, 0x79, 0xa5, 0x22, 0x7f, 0xa9, 0xa2, 0x74, 0x7a, 0x8b, 0x72, 0x15, 0x54, 0x10, /* Byte value: 0xf8 */ + 0x20, 0x12, 0xcf, 0xbb, 0x13, 0xed, 0xc3, 0xe3, 0x7f, 0x5f, 0x87, 0x0e, 0x27, 0xc7, 0x9a, 0x83, /* Byte value: 0xf9 */ + 0x0a, 0x4d, 0x23, 0xa8, 0x69, 0x61, 0x50, 0x5a, 0x04, 0x0e, 0xd4, 0x3b, 0xf6, 0xc0, 0x86, 0x44, /* Byte value: 0xfa */ + 0x08, 0xe5, 0x43, 0x5e, 0x74, 0xaa, 0x40, 0x48, 0x6f, 0x67, 0x51, 0xe2, 0x79, 0x41, 0xc7, 0x50, /* Byte value: 0xfb */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xfc */ + 0x4c, 0x52, 0xde, 0xc4, 0x68, 0xe6, 0x25, 0x69, 0x47, 0x0b, 0x55, 0x8f, 0xea, 0xcd, 0xb2, 0xbd, /* Byte value: 0xfd */ + 0xd7, 0x0a, 0xf1, 0x02, 0x4a, 0xa6, 0x77, 0xa0, 0x76, 0xa1, 0xd9, 0x06, 0xd4, 0xb4, 0x95, 0x1a, /* Byte value: 0xfe */ + 0x74, 0xac, 0xd4, 0x9d, 0xe7, 0x36, 0x26, 0x52, 0x89, 0xfd, 0x21, 0x64, 0x46, 0xc9, 0xa2, 0xce, /* Byte value: 0xff */ + + /* Matrix row: 8 */ + 0x99, 0xaa, 0x16, 0x04, 0xc2, 0x95, 0x8c, 0x4a, 0x63, 0xeb, 0xf0, 0x49, 0x21, 0x0a, 0xad, 0xf8, /* Byte value: 0x00 */ + 0x93, 0xe3, 0xda, 0xa3, 0x95, 0xb5, 0xa1, 0x97, 0x1b, 0x19, 0xbd, 0x7f, 0x09, 0x35, 0xa8, 0x38, /* Byte value: 0x01 */ + 0x88, 0xfd, 0x2b, 0x3b, 0xa6, 0xc5, 0x21, 0xbc, 0xaf, 0xb5, 0xad, 0x3e, 0x65, 0x8a, 0x44, 0xdb, /* Byte value: 0x02 */ + 0xae, 0x16, 0x29, 0x6c, 0x27, 0x1a, 0xaa, 0xe1, 0xc4, 0x02, 0x84, 0xcc, 0xfd, 0xee, 0x57, 0x5c, /* Byte value: 0x03 */ + 0x8b, 0xbe, 0x09, 0x55, 0xe0, 0x74, 0xcd, 0x78, 0xbb, 0x23, 0x51, 0x37, 0x69, 0x61, 0xa4, 0xfb, /* Byte value: 0x04 */ + 0xbd, 0x82, 0x28, 0xa6, 0x86, 0x94, 0x0e, 0x2e, 0x10, 0xb8, 0x71, 0xb5, 0xb1, 0xdc, 0xbf, 0xfe, /* Byte value: 0x05 */ + 0x69, 0xbd, 0x24, 0x97, 0x06, 0x13, 0xb2, 0x23, 0xa9, 0x8c, 0x87, 0xdc, 0x67, 0x47, 0xd5, 0x6a, /* Byte value: 0x06 */ + 0xb1, 0x4d, 0xa0, 0xdd, 0x5d, 0x15, 0x38, 0xb8, 0x40, 0xa5, 0x07, 0x91, 0x81, 0xf6, 0xb9, 0x7e, /* Byte value: 0x07 */ + 0x6e, 0xbb, 0x7e, 0xd0, 0x09, 0xdd, 0x4c, 0x95, 0x8d, 0x11, 0xe8, 0xc9, 0x7b, 0x0b, 0x37, 0x8b, /* Byte value: 0x08 */ + 0x38, 0x30, 0x95, 0x7d, 0x78, 0xbf, 0xfc, 0xf9, 0xe3, 0x62, 0xfe, 0xa8, 0xe0, 0x25, 0x1c, 0x04, /* Byte value: 0x09 */ + 0x3e, 0xb6, 0xd1, 0xa1, 0xf4, 0x1e, 0xe7, 0xb2, 0xcb, 0x8d, 0xc5, 0xba, 0xf8, 0x30, 0x1f, 0x44, /* Byte value: 0x0a */ + 0x92, 0x63, 0xc4, 0x38, 0x16, 0xda, 0x44, 0x6a, 0x17, 0x6b, 0xe9, 0x78, 0x0d, 0x6c, 0x49, 0x99, /* Byte value: 0x0b */ + 0x95, 0x65, 0x9e, 0x7f, 0x19, 0x14, 0xba, 0xdc, 0x33, 0xf6, 0x86, 0x6d, 0x11, 0x20, 0xab, 0x78, /* Byte value: 0x0c */ + 0x59, 0x07, 0x41, 0xb8, 0xec, 0x52, 0x6a, 0x3e, 0x2a, 0xf8, 0x9c, 0x4c, 0xa7, 0xef, 0xcd, 0x2f, /* Byte value: 0x0d */ + 0x31, 0x3a, 0x7b, 0xb4, 0x69, 0x2e, 0x3d, 0xe0, 0x8f, 0x06, 0x4f, 0x97, 0xc4, 0xf1, 0xf9, 0xe4, /* Byte value: 0x0e */ + 0x4e, 0xd6, 0x38, 0x5b, 0x04, 0xa3, 0xdc, 0x83, 0xce, 0x49, 0xfa, 0x29, 0xfb, 0x7a, 0x27, 0x4c, /* Byte value: 0x0f */ + 0xd3, 0x39, 0x56, 0x76, 0x8f, 0x49, 0x42, 0xbb, 0x9d, 0xa9, 0x99, 0x7c, 0xca, 0xd7, 0x88, 0x75, /* Byte value: 0x10 */ + 0xce, 0xa1, 0xe3, 0x32, 0x30, 0x98, 0xd9, 0xdb, 0x01, 0xea, 0xb2, 0x2f, 0xbe, 0x7d, 0x67, 0xd6, /* Byte value: 0x11 */ + 0xa0, 0x1a, 0x9d, 0xe2, 0x39, 0x45, 0x95, 0x4e, 0x8c, 0xfb, 0x5a, 0xe6, 0xc5, 0x76, 0x50, 0x5d, /* Byte value: 0x12 */ + 0xe5, 0x05, 0x77, 0x85, 0xe9, 0xa9, 0x81, 0xed, 0x36, 0x32, 0xb9, 0xfe, 0x12, 0x6a, 0x93, 0x70, /* Byte value: 0x13 */ + 0xd5, 0xbf, 0x12, 0xaa, 0x03, 0xe8, 0x59, 0xf0, 0xb5, 0x46, 0xa2, 0x6e, 0xd2, 0xc2, 0x8b, 0x35, /* Byte value: 0x14 */ + 0xb9, 0xc7, 0x50, 0x8f, 0xcf, 0xeb, 0x1c, 0x5c, 0x20, 0xb3, 0xe2, 0xa9, 0xa1, 0x7b, 0xbd, 0x3f, /* Byte value: 0x15 */ + 0xa4, 0x5f, 0xe5, 0xcb, 0x70, 0x3a, 0x87, 0x3c, 0xbc, 0xf0, 0xc9, 0xfa, 0xd5, 0xd1, 0x52, 0x9c, /* Byte value: 0x16 */ + 0xbf, 0x41, 0x14, 0x53, 0x43, 0x4a, 0x07, 0x17, 0x08, 0x5c, 0xd9, 0xbb, 0xb9, 0x6e, 0xbe, 0x7f, /* Byte value: 0x17 */ + 0x2d, 0x22, 0xd0, 0x6b, 0x55, 0x90, 0x43, 0x7d, 0x1f, 0x37, 0x30, 0xc3, 0xb4, 0x02, 0xf7, 0xe6, /* Byte value: 0x18 */ + 0xe7, 0xc6, 0x4b, 0x70, 0x2c, 0x77, 0x88, 0xd4, 0x2e, 0xd6, 0x11, 0xf0, 0x1a, 0xd8, 0x92, 0xf1, /* Byte value: 0x19 */ + 0xdc, 0xb5, 0xfc, 0x63, 0x12, 0x79, 0x98, 0xe9, 0xd9, 0x22, 0x13, 0x51, 0xf6, 0x16, 0x6e, 0xd5, /* Byte value: 0x1a */ + 0x21, 0xed, 0x58, 0x10, 0x8e, 0x11, 0x75, 0xeb, 0x4f, 0x2a, 0x46, 0xe7, 0x84, 0x28, 0xf1, 0x66, /* Byte value: 0x1b */ + 0xf9, 0x1d, 0xdc, 0x5a, 0xd5, 0x17, 0xff, 0x70, 0xa6, 0x03, 0xc6, 0xaa, 0x62, 0x99, 0x9d, 0x72, /* Byte value: 0x1c */ + 0xee, 0xcc, 0xa5, 0xb9, 0x3d, 0xe6, 0x49, 0xcd, 0x42, 0xb2, 0xa0, 0xcf, 0x3e, 0x0c, 0x77, 0x11, /* Byte value: 0x1d */ + 0x62, 0x74, 0xf6, 0xab, 0xd2, 0x5c, 0x7a, 0x03, 0xdd, 0x0c, 0x9e, 0xed, 0x4b, 0x21, 0x31, 0x0b, /* Byte value: 0x1e */ + 0xcd, 0xe2, 0xc1, 0x5c, 0x76, 0x29, 0x35, 0x1f, 0x15, 0x7c, 0x4e, 0x26, 0xb2, 0x96, 0x87, 0xf6, /* Byte value: 0x1f */ + 0x67, 0xb1, 0x90, 0x19, 0x18, 0x4c, 0x8d, 0x8c, 0xe1, 0x75, 0x59, 0xf6, 0x5f, 0xdf, 0xd2, 0x6b, /* Byte value: 0x20 */ + 0x72, 0xa3, 0xd5, 0x0f, 0x35, 0x63, 0x32, 0x08, 0x1d, 0x20, 0x97, 0x9d, 0x0b, 0xf8, 0x39, 0x89, /* Byte value: 0x21 */ + 0x9b, 0x69, 0x2a, 0xf1, 0x07, 0x4b, 0x85, 0x73, 0x7b, 0x0f, 0x58, 0x47, 0x29, 0xb8, 0xac, 0x79, /* Byte value: 0x22 */ + 0xbe, 0xc1, 0x0a, 0xc8, 0xc0, 0x25, 0xe2, 0xea, 0x04, 0x2e, 0x8d, 0xbc, 0xbd, 0x37, 0x5f, 0xde, /* Byte value: 0x23 */ + 0xeb, 0x09, 0xc3, 0x0b, 0xf7, 0xf6, 0xbe, 0x42, 0x7e, 0xcb, 0x67, 0xd4, 0x2a, 0xf2, 0x94, 0x71, /* Byte value: 0x24 */ + 0x86, 0xf1, 0x9f, 0xb5, 0xb8, 0x9a, 0x1e, 0x13, 0xe7, 0x4c, 0x73, 0x14, 0x5d, 0x12, 0x43, 0xda, /* Byte value: 0x25 */ + 0x83, 0x34, 0xf9, 0x07, 0x72, 0x8a, 0xe9, 0x9c, 0xdb, 0x35, 0xb4, 0x0f, 0x49, 0xec, 0xa0, 0xba, /* Byte value: 0x26 */ + 0x09, 0x0a, 0xee, 0xc9, 0x11, 0x91, 0xc1, 0x19, 0x6c, 0x64, 0xb1, 0x3f, 0x24, 0xd4, 0xe5, 0xe0, /* Byte value: 0x27 */ + 0x54, 0x48, 0xd7, 0x58, 0xb4, 0xbc, 0xb9, 0x55, 0x76, 0x97, 0xbe, 0x6f, 0x93, 0x9c, 0x2a, 0x0e, /* Byte value: 0x28 */ + 0x77, 0x66, 0xb3, 0xbd, 0xff, 0x73, 0xc5, 0x87, 0x21, 0x59, 0x50, 0x86, 0x1f, 0x06, 0xda, 0xe9, /* Byte value: 0x29 */ + 0x40, 0xda, 0x8c, 0xd5, 0x1a, 0xfc, 0xe3, 0x2c, 0x86, 0xb0, 0x24, 0x03, 0xc3, 0xe2, 0x20, 0x4d, /* Byte value: 0x2a */ + 0xf6, 0x91, 0x76, 0x4f, 0x48, 0x27, 0x25, 0x22, 0xe2, 0x88, 0x4c, 0x87, 0x5e, 0x58, 0x7b, 0xd2, /* Byte value: 0x2b */ + 0x5a, 0x44, 0x63, 0xd6, 0xaa, 0xe3, 0x86, 0xfa, 0x3e, 0x6e, 0x60, 0x45, 0xab, 0x04, 0x2d, 0x0f, /* Byte value: 0x2c */ + 0x0d, 0x4f, 0x96, 0xe0, 0x58, 0xee, 0xd3, 0x6b, 0x5c, 0x6f, 0x22, 0x23, 0x34, 0x73, 0xe7, 0x21, /* Byte value: 0x2d */ + 0xfa, 0x5e, 0xfe, 0x34, 0x93, 0xa6, 0x13, 0xb4, 0xb2, 0x95, 0x3a, 0xa3, 0x6e, 0x72, 0x7d, 0x52, /* Byte value: 0x2e */ + 0xc7, 0xab, 0x0d, 0xfb, 0x21, 0x09, 0x18, 0xc2, 0x6d, 0x8e, 0x03, 0x10, 0x9a, 0xa9, 0x82, 0x36, /* Byte value: 0x2f */ + 0x65, 0x72, 0xac, 0xec, 0xdd, 0x92, 0x84, 0xb5, 0xf9, 0x91, 0xf1, 0xf8, 0x57, 0x6d, 0xd3, 0xea, /* Byte value: 0x30 */ + 0x5f, 0x81, 0x05, 0x64, 0x60, 0xf3, 0x71, 0x75, 0x02, 0x17, 0xa7, 0x5e, 0xbf, 0xfa, 0xce, 0x6f, /* Byte value: 0x31 */ + 0xd2, 0xb9, 0x48, 0xed, 0x0c, 0x26, 0xa7, 0x46, 0x91, 0xdb, 0xcd, 0x7b, 0xce, 0x8e, 0x69, 0xd4, /* Byte value: 0x32 */ + 0xf0, 0x17, 0x32, 0x93, 0xc4, 0x86, 0x3e, 0x69, 0xca, 0x67, 0x77, 0x95, 0x46, 0x4d, 0x78, 0x92, /* Byte value: 0x33 */ + 0x94, 0xe5, 0x80, 0xe4, 0x9a, 0x7b, 0x5f, 0x21, 0x3f, 0x84, 0xd2, 0x6a, 0x15, 0x79, 0x4a, 0xd9, /* Byte value: 0x34 */ + 0x43, 0x99, 0xae, 0xbb, 0x5c, 0x4d, 0x0f, 0xe8, 0x92, 0x26, 0xd8, 0x0a, 0xcf, 0x09, 0xc0, 0x6d, /* Byte value: 0x35 */ + 0x44, 0x9f, 0xf4, 0xfc, 0x53, 0x83, 0xf1, 0x5e, 0xb6, 0xbb, 0xb7, 0x1f, 0xd3, 0x45, 0x22, 0x8c, /* Byte value: 0x36 */ + 0x9e, 0xac, 0x4c, 0x43, 0xcd, 0x5b, 0x72, 0xfc, 0x47, 0x76, 0x9f, 0x5c, 0x3d, 0x46, 0x4f, 0x19, /* Byte value: 0x37 */ + 0xe9, 0xca, 0xff, 0xfe, 0x32, 0x28, 0xb7, 0x7b, 0x66, 0x2f, 0xcf, 0xda, 0x22, 0x40, 0x95, 0xf0, /* Byte value: 0x38 */ + 0xb2, 0x0e, 0x82, 0xb3, 0x1b, 0xa4, 0xd4, 0x7c, 0x54, 0x33, 0xfb, 0x98, 0x8d, 0x1d, 0x59, 0x5e, /* Byte value: 0x39 */ + 0x2a, 0x24, 0x8a, 0x2c, 0x5a, 0x5e, 0xbd, 0xcb, 0x3b, 0xaa, 0x5f, 0xd6, 0xa8, 0x4e, 0x15, 0x07, /* Byte value: 0x3a */ + 0x13, 0x94, 0x01, 0xca, 0xa1, 0x8e, 0xa4, 0xcf, 0xd4, 0xba, 0xf5, 0x79, 0x4c, 0x32, 0xe8, 0xa2, /* Byte value: 0x3b */ + 0x37, 0xbc, 0x3f, 0x68, 0xe5, 0x8f, 0x26, 0xab, 0xa7, 0xe9, 0x74, 0x85, 0xdc, 0xe4, 0xfa, 0xa4, /* Byte value: 0x3c */ + 0x1c, 0x18, 0xab, 0xdf, 0x3c, 0xbe, 0x7e, 0x9d, 0x90, 0x31, 0x7f, 0x54, 0x70, 0xf3, 0x0e, 0x02, /* Byte value: 0x3d */ + 0xb4, 0x88, 0xc6, 0x6f, 0x97, 0x05, 0xcf, 0x37, 0x7c, 0xdc, 0xc0, 0x8a, 0x95, 0x08, 0x5a, 0x1e, /* Byte value: 0x3e */ + 0x4b, 0x13, 0x5e, 0xe9, 0xce, 0xb3, 0x2b, 0x0c, 0xf2, 0x30, 0x3d, 0x32, 0xef, 0x84, 0xc4, 0x2c, /* Byte value: 0x3f */ + 0x7f, 0xec, 0x43, 0xef, 0x6d, 0x8d, 0xe1, 0x63, 0x41, 0x4f, 0xb5, 0xbe, 0x3f, 0x8b, 0xde, 0xa8, /* Byte value: 0x40 */ + 0x78, 0xea, 0x19, 0xa8, 0x62, 0x43, 0x1f, 0xd5, 0x65, 0xd2, 0xda, 0xab, 0x23, 0xc7, 0x3c, 0x49, /* Byte value: 0x41 */ + 0xcc, 0x62, 0xdf, 0xc7, 0xf5, 0x46, 0xd0, 0xe2, 0x19, 0x0e, 0x1a, 0x21, 0xb6, 0xcf, 0x66, 0x57, /* Byte value: 0x42 */ + 0x81, 0xf7, 0xc5, 0xf2, 0xb7, 0x54, 0xe0, 0xa5, 0xc3, 0xd1, 0x1c, 0x01, 0x41, 0x5e, 0xa1, 0x3b, /* Byte value: 0x43 */ + 0xac, 0xd5, 0x15, 0x99, 0xe2, 0xc4, 0xa3, 0xd8, 0xdc, 0xe6, 0x2c, 0xc2, 0xf5, 0x5c, 0x56, 0xdd, /* Byte value: 0x44 */ + 0x98, 0x2a, 0x08, 0x9f, 0x41, 0xfa, 0x69, 0xb7, 0x6f, 0x99, 0xa4, 0x4e, 0x25, 0x53, 0x4c, 0x59, /* Byte value: 0x45 */ + 0x56, 0x8b, 0xeb, 0xad, 0x71, 0x62, 0xb0, 0x6c, 0x6e, 0x73, 0x16, 0x61, 0x9b, 0x2e, 0x2b, 0x8f, /* Byte value: 0x46 */ + 0x1e, 0xdb, 0x97, 0x2a, 0xf9, 0x60, 0x77, 0xa4, 0x88, 0xd5, 0xd7, 0x5a, 0x78, 0x41, 0x0f, 0x83, /* Byte value: 0x47 */ + 0x30, 0xba, 0x65, 0x2f, 0xea, 0x41, 0xd8, 0x1d, 0x83, 0x74, 0x1b, 0x90, 0xc0, 0xa8, 0x18, 0x45, /* Byte value: 0x48 */ + 0x14, 0x92, 0x5b, 0x8d, 0xae, 0x40, 0x5a, 0x79, 0xf0, 0x27, 0x9a, 0x6c, 0x50, 0x7e, 0x0a, 0x43, /* Byte value: 0x49 */ + 0xc5, 0x68, 0x31, 0x0e, 0xe4, 0xd7, 0x11, 0xfb, 0x75, 0x6a, 0xab, 0x1e, 0x92, 0x1b, 0x83, 0xb7, /* Byte value: 0x4a */ + 0x49, 0xd0, 0x62, 0x1c, 0x0b, 0x6d, 0x22, 0x35, 0xea, 0xd4, 0x95, 0x3c, 0xe7, 0x36, 0xc5, 0xad, /* Byte value: 0x4b */ + 0xb0, 0xcd, 0xbe, 0x46, 0xde, 0x7a, 0xdd, 0x45, 0x4c, 0xd7, 0x53, 0x96, 0x85, 0xaf, 0x58, 0xdf, /* Byte value: 0x4c */ + 0x70, 0x60, 0xe9, 0xfa, 0xf0, 0xbd, 0x3b, 0x31, 0x05, 0xc4, 0x3f, 0x93, 0x03, 0x4a, 0x38, 0x08, /* Byte value: 0x4d */ + 0x0b, 0xc9, 0xd2, 0x3c, 0xd4, 0x4f, 0xc8, 0x20, 0x74, 0x80, 0x19, 0x31, 0x2c, 0x66, 0xe4, 0x61, /* Byte value: 0x4e */ + 0xa6, 0x9c, 0xd9, 0x3e, 0xb5, 0xe4, 0x8e, 0x05, 0xa4, 0x14, 0x61, 0xf4, 0xdd, 0x63, 0x53, 0x1d, /* Byte value: 0x4f */ + 0xaf, 0x96, 0x37, 0xf7, 0xa4, 0x75, 0x4f, 0x1c, 0xc8, 0x70, 0xd0, 0xcb, 0xf9, 0xb7, 0xb6, 0xfd, /* Byte value: 0x50 */ + 0xa1, 0x9a, 0x83, 0x79, 0xba, 0x2a, 0x70, 0xb3, 0x80, 0x89, 0x0e, 0xe1, 0xc1, 0x2f, 0xb1, 0xfc, /* Byte value: 0x51 */ + 0x17, 0xd1, 0x79, 0xe3, 0xe8, 0xf1, 0xb6, 0xbd, 0xe4, 0xb1, 0x66, 0x65, 0x5c, 0x95, 0xea, 0x63, /* Byte value: 0x52 */ + 0xf5, 0xd2, 0x54, 0x21, 0x0e, 0x96, 0xc9, 0xe6, 0xf6, 0x1e, 0xb0, 0x8e, 0x52, 0xb3, 0x9b, 0xf2, /* Byte value: 0x53 */ + 0xfd, 0x58, 0xa4, 0x73, 0x9c, 0x68, 0xed, 0x02, 0x96, 0x08, 0x55, 0xb6, 0x72, 0x3e, 0x9f, 0xb3, /* Byte value: 0x54 */ + 0x3c, 0x75, 0xed, 0x54, 0x31, 0xc0, 0xee, 0x8b, 0xd3, 0x69, 0x6d, 0xb4, 0xf0, 0x82, 0x1e, 0xc5, /* Byte value: 0x55 */ + 0xca, 0xe4, 0x9b, 0x1b, 0x79, 0xe7, 0xcb, 0xa9, 0x31, 0xe1, 0x21, 0x33, 0xae, 0xda, 0x65, 0x17, /* Byte value: 0x56 */ + 0xa2, 0xd9, 0xa1, 0x17, 0xfc, 0x9b, 0x9c, 0x77, 0x94, 0x1f, 0xf2, 0xe8, 0xcd, 0xc4, 0x51, 0xdc, /* Byte value: 0x57 */ + 0x4a, 0x93, 0x40, 0x72, 0x4d, 0xdc, 0xce, 0xf1, 0xfe, 0x42, 0x69, 0x35, 0xeb, 0xdd, 0x25, 0x8d, /* Byte value: 0x58 */ + 0x0c, 0xcf, 0x88, 0x7b, 0xdb, 0x81, 0x36, 0x96, 0x50, 0x1d, 0x76, 0x24, 0x30, 0x2a, 0x06, 0x80, /* Byte value: 0x59 */ + 0x18, 0x5d, 0xd3, 0xf6, 0x75, 0xc1, 0x6c, 0xef, 0xa0, 0x3a, 0xec, 0x48, 0x60, 0x54, 0x0c, 0xc3, /* Byte value: 0x5a */ + 0xc3, 0xee, 0x75, 0xd2, 0x68, 0x76, 0x0a, 0xb0, 0x5d, 0x85, 0x90, 0x0c, 0x8a, 0x0e, 0x80, 0xf7, /* Byte value: 0x5b */ + 0x47, 0xdc, 0xd6, 0x92, 0x15, 0x32, 0x1d, 0x9a, 0xa2, 0x2d, 0x4b, 0x16, 0xdf, 0xae, 0xc2, 0xac, /* Byte value: 0x5c */ + 0x8f, 0xfb, 0x71, 0x7c, 0xa9, 0x0b, 0xdf, 0x0a, 0x8b, 0x28, 0xc2, 0x2b, 0x79, 0xc6, 0xa6, 0x3a, /* Byte value: 0x5d */ + 0xf2, 0xd4, 0x0e, 0x66, 0x01, 0x58, 0x37, 0x50, 0xd2, 0x83, 0xdf, 0x9b, 0x4e, 0xff, 0x79, 0x13, /* Byte value: 0x5e */ + 0xe0, 0xc0, 0x11, 0x37, 0x23, 0xb9, 0x76, 0x62, 0x0a, 0x4b, 0x7e, 0xe5, 0x06, 0x94, 0x70, 0x10, /* Byte value: 0x5f */ + 0x3d, 0xf5, 0xf3, 0xcf, 0xb2, 0xaf, 0x0b, 0x76, 0xdf, 0x1b, 0x39, 0xb3, 0xf4, 0xdb, 0xff, 0x64, /* Byte value: 0x60 */ + 0xaa, 0x53, 0x51, 0x45, 0x6e, 0x65, 0xb8, 0x93, 0xf4, 0x09, 0x17, 0xd0, 0xed, 0x49, 0x55, 0x9d, /* Byte value: 0x61 */ + 0x57, 0x0b, 0xf5, 0x36, 0xf2, 0x0d, 0x55, 0x91, 0x62, 0x01, 0x42, 0x66, 0x9f, 0x77, 0xca, 0x2e, /* Byte value: 0x62 */ + 0x87, 0x71, 0x81, 0x2e, 0x3b, 0xf5, 0xfb, 0xee, 0xeb, 0x3e, 0x27, 0x13, 0x59, 0x4b, 0xa2, 0x7b, /* Byte value: 0x63 */ + 0xb6, 0x4b, 0xfa, 0x9a, 0x52, 0xdb, 0xc6, 0x0e, 0x64, 0x38, 0x68, 0x84, 0x9d, 0xba, 0x5b, 0x9f, /* Byte value: 0x64 */ + 0xe3, 0x83, 0x33, 0x59, 0x65, 0x08, 0x9a, 0xa6, 0x1e, 0xdd, 0x82, 0xec, 0x0a, 0x7f, 0x90, 0x30, /* Byte value: 0x65 */ + 0x23, 0x2e, 0x64, 0xe5, 0x4b, 0xcf, 0x7c, 0xd2, 0x57, 0xce, 0xee, 0xe9, 0x8c, 0x9a, 0xf0, 0xe7, /* Byte value: 0x66 */ + 0x52, 0xce, 0x93, 0x84, 0x38, 0x1d, 0xa2, 0x1e, 0x5e, 0x78, 0x85, 0x7d, 0x8b, 0x89, 0x29, 0x4e, /* Byte value: 0x67 */ + 0x8e, 0x7b, 0x6f, 0xe7, 0x2a, 0x64, 0x3a, 0xf7, 0x87, 0x5a, 0x96, 0x2c, 0x7d, 0x9f, 0x47, 0x9b, /* Byte value: 0x68 */ + 0xdd, 0x35, 0xe2, 0xf8, 0x91, 0x16, 0x7d, 0x14, 0xd5, 0x50, 0x47, 0x56, 0xf2, 0x4f, 0x8f, 0x74, /* Byte value: 0x69 */ + 0x2c, 0xa2, 0xce, 0xf0, 0xd6, 0xff, 0xa6, 0x80, 0x13, 0x45, 0x64, 0xc4, 0xb0, 0x5b, 0x16, 0x47, /* Byte value: 0x6a */ + 0x50, 0x0d, 0xaf, 0x71, 0xfd, 0xc3, 0xab, 0x27, 0x46, 0x9c, 0x2d, 0x73, 0x83, 0x3b, 0x28, 0xcf, /* Byte value: 0x6b */ + 0xec, 0x0f, 0x99, 0x4c, 0xf8, 0x38, 0x40, 0xf4, 0x5a, 0x56, 0x08, 0xc1, 0x36, 0xbe, 0x76, 0x90, /* Byte value: 0x6c */ + 0x06, 0x86, 0x44, 0xdc, 0x8c, 0xa1, 0x1b, 0x4b, 0x28, 0xef, 0x3b, 0x12, 0x18, 0x15, 0x03, 0x40, /* Byte value: 0x6d */ + 0x91, 0x20, 0xe6, 0x56, 0x50, 0x6b, 0xa8, 0xae, 0x03, 0xfd, 0x15, 0x71, 0x01, 0x87, 0xa9, 0xb9, /* Byte value: 0x6e */ + 0xa3, 0x59, 0xbf, 0x8c, 0x7f, 0xf4, 0x79, 0x8a, 0x98, 0x6d, 0xa6, 0xef, 0xc9, 0x9d, 0xb0, 0x7d, /* Byte value: 0x6f */ + 0xff, 0x9b, 0x98, 0x86, 0x59, 0xb6, 0xe4, 0x3b, 0x8e, 0xec, 0xfd, 0xb8, 0x7a, 0x8c, 0x9e, 0x32, /* Byte value: 0x70 */ + 0x9c, 0x6f, 0x70, 0xb6, 0x08, 0x85, 0x7b, 0xc5, 0x5f, 0x92, 0x37, 0x52, 0x35, 0xf4, 0x4e, 0x98, /* Byte value: 0x71 */ + 0x2b, 0xa4, 0x94, 0xb7, 0xd9, 0x31, 0x58, 0x36, 0x37, 0xd8, 0x0b, 0xd1, 0xac, 0x17, 0xf4, 0xa6, /* Byte value: 0x72 */ + 0xc6, 0x2b, 0x13, 0x60, 0xa2, 0x66, 0xfd, 0x3f, 0x61, 0xfc, 0x57, 0x17, 0x9e, 0xf0, 0x63, 0x97, /* Byte value: 0x73 */ + 0x39, 0xb0, 0x8b, 0xe6, 0xfb, 0xd0, 0x19, 0x04, 0xef, 0x10, 0xaa, 0xaf, 0xe4, 0x7c, 0xfd, 0xa5, /* Byte value: 0x74 */ + 0x29, 0x67, 0xa8, 0x42, 0x1c, 0xef, 0x51, 0x0f, 0x2f, 0x3c, 0xa3, 0xdf, 0xa4, 0xa5, 0xf5, 0x27, /* Byte value: 0x75 */ + 0x5b, 0xc4, 0x7d, 0x4d, 0x29, 0x8c, 0x63, 0x07, 0x32, 0x1c, 0x34, 0x42, 0xaf, 0x5d, 0xcc, 0xae, /* Byte value: 0x76 */ + 0x90, 0xa0, 0xf8, 0xcd, 0xd3, 0x04, 0x4d, 0x53, 0x0f, 0x8f, 0x41, 0x76, 0x05, 0xde, 0x48, 0x18, /* Byte value: 0x77 */ + 0x58, 0x87, 0x5f, 0x23, 0x6f, 0x3d, 0x8f, 0xc3, 0x26, 0x8a, 0xc8, 0x4b, 0xa3, 0xb6, 0x2c, 0x8e, /* Byte value: 0x78 */ + 0xf1, 0x97, 0x2c, 0x08, 0x47, 0xe9, 0xdb, 0x94, 0xc6, 0x15, 0x23, 0x92, 0x42, 0x14, 0x99, 0x33, /* Byte value: 0x79 */ + 0xdf, 0xf6, 0xde, 0x0d, 0x54, 0xc8, 0x74, 0x2d, 0xcd, 0xb4, 0xef, 0x58, 0xfa, 0xfd, 0x8e, 0xf5, /* Byte value: 0x7a */ + 0x28, 0xe7, 0xb6, 0xd9, 0x9f, 0x80, 0xb4, 0xf2, 0x23, 0x4e, 0xf7, 0xd8, 0xa0, 0xfc, 0x14, 0x86, /* Byte value: 0x7b */ + 0x60, 0xb7, 0xca, 0x5e, 0x17, 0x82, 0x73, 0x3a, 0xc5, 0xe8, 0x36, 0xe3, 0x43, 0x93, 0x30, 0x8a, /* Byte value: 0x7c */ + 0x79, 0x6a, 0x07, 0x33, 0xe1, 0x2c, 0xfa, 0x28, 0x69, 0xa0, 0x8e, 0xac, 0x27, 0x9e, 0xdd, 0xe8, /* Byte value: 0x7d */ + 0x76, 0xe6, 0xad, 0x26, 0x7c, 0x1c, 0x20, 0x7a, 0x2d, 0x2b, 0x04, 0x81, 0x1b, 0x5f, 0x3b, 0x48, /* Byte value: 0x7e */ + 0x03, 0x43, 0x22, 0x6e, 0x46, 0xb1, 0xec, 0xc4, 0x14, 0x96, 0xfc, 0x09, 0x0c, 0xeb, 0xe0, 0x20, /* Byte value: 0x7f */ + 0xc8, 0x27, 0xa7, 0xee, 0xbc, 0x39, 0xc2, 0x90, 0x29, 0x05, 0x89, 0x3d, 0xa6, 0x68, 0x64, 0x96, /* Byte value: 0x80 */ + 0x02, 0xc3, 0x3c, 0xf5, 0xc5, 0xde, 0x09, 0x39, 0x18, 0xe4, 0xa8, 0x0e, 0x08, 0xb2, 0x01, 0x81, /* Byte value: 0x81 */ + 0x12, 0x14, 0x1f, 0x51, 0x22, 0xe1, 0x41, 0x32, 0xd8, 0xc8, 0xa1, 0x7e, 0x48, 0x6b, 0x09, 0x03, /* Byte value: 0x82 */ + 0xef, 0x4c, 0xbb, 0x22, 0xbe, 0x89, 0xac, 0x30, 0x4e, 0xc0, 0xf4, 0xc8, 0x3a, 0x55, 0x96, 0xb0, /* Byte value: 0x83 */ + 0x46, 0x5c, 0xc8, 0x09, 0x96, 0x5d, 0xf8, 0x67, 0xae, 0x5f, 0x1f, 0x11, 0xdb, 0xf7, 0x23, 0x0d, /* Byte value: 0x84 */ + 0x51, 0x8d, 0xb1, 0xea, 0x7e, 0xac, 0x4e, 0xda, 0x4a, 0xee, 0x79, 0x74, 0x87, 0x62, 0xc9, 0x6e, /* Byte value: 0x85 */ + 0xdb, 0xb3, 0xa6, 0x24, 0x1d, 0xb7, 0x66, 0x5f, 0xfd, 0xbf, 0x7c, 0x44, 0xea, 0x5a, 0x8c, 0x34, /* Byte value: 0x86 */ + 0x74, 0x25, 0x91, 0xd3, 0xb9, 0xc2, 0x29, 0x43, 0x35, 0xcf, 0xac, 0x8f, 0x13, 0xed, 0x3a, 0xc9, /* Byte value: 0x87 */ + 0x0f, 0x8c, 0xaa, 0x15, 0x9d, 0x30, 0xda, 0x52, 0x44, 0x8b, 0x8a, 0x2d, 0x3c, 0xc1, 0xe6, 0xa0, /* Byte value: 0x88 */ + 0xc1, 0x2d, 0x49, 0x27, 0xad, 0xa8, 0x03, 0x89, 0x45, 0x61, 0x38, 0x02, 0x82, 0xbc, 0x81, 0x76, /* Byte value: 0x89 */ + 0x8d, 0x38, 0x4d, 0x89, 0x6c, 0xd5, 0xd6, 0x33, 0x93, 0xcc, 0x6a, 0x25, 0x71, 0x74, 0xa7, 0xbb, /* Byte value: 0x8a */ + 0x68, 0x3d, 0x3a, 0x0c, 0x85, 0x7c, 0x57, 0xde, 0xa5, 0xfe, 0xd3, 0xdb, 0x63, 0x1e, 0x34, 0xcb, /* Byte value: 0x8b */ + 0xb7, 0xcb, 0xe4, 0x01, 0xd1, 0xb4, 0x23, 0xf3, 0x68, 0x4a, 0x3c, 0x83, 0x99, 0xe3, 0xba, 0x3e, /* Byte value: 0x8c */ + 0x0e, 0x0c, 0xb4, 0x8e, 0x1e, 0x5f, 0x3f, 0xaf, 0x48, 0xf9, 0xde, 0x2a, 0x38, 0x98, 0x07, 0x01, /* Byte value: 0x8d */ + 0x5d, 0x42, 0x39, 0x91, 0xa5, 0x2d, 0x78, 0x4c, 0x1a, 0xf3, 0x0f, 0x50, 0xb7, 0x48, 0xcf, 0xee, /* Byte value: 0x8e */ + 0xe2, 0x03, 0x2d, 0xc2, 0xe6, 0x67, 0x7f, 0x5b, 0x12, 0xaf, 0xd6, 0xeb, 0x0e, 0x26, 0x71, 0x91, /* Byte value: 0x8f */ + 0xc0, 0xad, 0x57, 0xbc, 0x2e, 0xc7, 0xe6, 0x74, 0x49, 0x13, 0x6c, 0x05, 0x86, 0xe5, 0x60, 0xd7, /* Byte value: 0x90 */ + 0x96, 0x26, 0xbc, 0x11, 0x5f, 0xa5, 0x56, 0x18, 0x27, 0x60, 0x7a, 0x64, 0x1d, 0xcb, 0x4b, 0x58, /* Byte value: 0x91 */ + 0xad, 0x55, 0x0b, 0x02, 0x61, 0xab, 0x46, 0x25, 0xd0, 0x94, 0x78, 0xc5, 0xf1, 0x05, 0xb7, 0x7c, /* Byte value: 0x92 */ + 0x01, 0x80, 0x1e, 0x9b, 0x83, 0x6f, 0xe5, 0xfd, 0x0c, 0x72, 0x54, 0x07, 0x04, 0x59, 0xe1, 0xa1, /* Byte value: 0x93 */ + 0x24, 0x28, 0x3e, 0xa2, 0x44, 0x01, 0x82, 0x64, 0x73, 0x53, 0x81, 0xfc, 0x90, 0xd6, 0x12, 0x06, /* Byte value: 0x94 */ + 0xe8, 0x4a, 0xe1, 0x65, 0xb1, 0x47, 0x52, 0x86, 0x6a, 0x5d, 0x9b, 0xdd, 0x26, 0x19, 0x74, 0x51, /* Byte value: 0x95 */ + 0x8c, 0xb8, 0x53, 0x12, 0xef, 0xba, 0x33, 0xce, 0x9f, 0xbe, 0x3e, 0x22, 0x75, 0x2d, 0x46, 0x1a, /* Byte value: 0x96 */ + 0xf3, 0x54, 0x10, 0xfd, 0x82, 0x37, 0xd2, 0xad, 0xde, 0xf1, 0x8b, 0x9c, 0x4a, 0xa6, 0x98, 0xb2, /* Byte value: 0x97 */ + 0xa7, 0x1c, 0xc7, 0xa5, 0x36, 0x8b, 0x6b, 0xf8, 0xa8, 0x66, 0x35, 0xf3, 0xd9, 0x3a, 0xb2, 0xbc, /* Byte value: 0x98 */ + 0x34, 0xff, 0x1d, 0x06, 0xa3, 0x3e, 0xca, 0x6f, 0xb3, 0x7f, 0x88, 0x8c, 0xd0, 0x0f, 0x1a, 0x84, /* Byte value: 0x99 */ + 0xe6, 0x46, 0x55, 0xeb, 0xaf, 0x18, 0x6d, 0x29, 0x22, 0xa4, 0x45, 0xf7, 0x1e, 0x81, 0x73, 0x50, /* Byte value: 0x9a */ + 0x48, 0x50, 0x7c, 0x87, 0x88, 0x02, 0xc7, 0xc8, 0xe6, 0xa6, 0xc1, 0x3b, 0xe3, 0x6f, 0x24, 0x0c, /* Byte value: 0x9b */ + 0xba, 0x84, 0x72, 0xe1, 0x89, 0x5a, 0xf0, 0x98, 0x34, 0x25, 0x1e, 0xa0, 0xad, 0x90, 0x5d, 0x1f, /* Byte value: 0x9c */ + 0x07, 0x06, 0x5a, 0x47, 0x0f, 0xce, 0xfe, 0xb6, 0x24, 0x9d, 0x6f, 0x15, 0x1c, 0x4c, 0xe2, 0xe1, /* Byte value: 0x9d */ + 0xfc, 0xd8, 0xba, 0xe8, 0x1f, 0x07, 0x08, 0xff, 0x9a, 0x7a, 0x01, 0xb1, 0x76, 0x67, 0x7e, 0x12, /* Byte value: 0x9e */ + 0x4d, 0x95, 0x1a, 0x35, 0x42, 0x12, 0x30, 0x47, 0xda, 0xdf, 0x06, 0x20, 0xf7, 0x91, 0xc7, 0x6c, /* Byte value: 0x9f */ + 0x19, 0xdd, 0xcd, 0x6d, 0xf6, 0xae, 0x89, 0x12, 0xac, 0x48, 0xb8, 0x4f, 0x64, 0x0d, 0xed, 0x62, /* Byte value: 0xa0 */ + 0x42, 0x19, 0xb0, 0x20, 0xdf, 0x22, 0xea, 0x15, 0x9e, 0x54, 0x8c, 0x0d, 0xcb, 0x50, 0x21, 0xcc, /* Byte value: 0xa1 */ + 0x75, 0xa5, 0x8f, 0x48, 0x3a, 0xad, 0xcc, 0xbe, 0x39, 0xbd, 0xf8, 0x88, 0x17, 0xb4, 0xdb, 0x68, /* Byte value: 0xa2 */ + 0x4c, 0x15, 0x04, 0xae, 0xc1, 0x7d, 0xd5, 0xba, 0xd6, 0xad, 0x52, 0x27, 0xf3, 0xc8, 0x26, 0xcd, /* Byte value: 0xa3 */ + 0xa9, 0x10, 0x73, 0x2b, 0x28, 0xd4, 0x54, 0x57, 0xe0, 0x9f, 0xeb, 0xd9, 0xe1, 0xa2, 0xb5, 0xbd, /* Byte value: 0xa4 */ + 0x82, 0xb4, 0xe7, 0x9c, 0xf1, 0xe5, 0x0c, 0x61, 0xd7, 0x47, 0xe0, 0x08, 0x4d, 0xb5, 0x41, 0x1b, /* Byte value: 0xa5 */ + 0xf7, 0x11, 0x68, 0xd4, 0xcb, 0x48, 0xc0, 0xdf, 0xee, 0xfa, 0x18, 0x80, 0x5a, 0x01, 0x9a, 0x73, /* Byte value: 0xa6 */ + 0x08, 0x8a, 0xf0, 0x52, 0x92, 0xfe, 0x24, 0xe4, 0x60, 0x16, 0xe5, 0x38, 0x20, 0x8d, 0x04, 0x41, /* Byte value: 0xa7 */ + 0x4f, 0x56, 0x26, 0xc0, 0x87, 0xcc, 0x39, 0x7e, 0xc2, 0x3b, 0xae, 0x2e, 0xff, 0x23, 0xc6, 0xed, /* Byte value: 0xa8 */ + 0x10, 0xd7, 0x23, 0xa4, 0xe7, 0x3f, 0x48, 0x0b, 0xc0, 0x2c, 0x09, 0x70, 0x40, 0xd9, 0x08, 0x82, /* Byte value: 0xa9 */ + 0x3b, 0x73, 0xb7, 0x13, 0x3e, 0x0e, 0x10, 0x3d, 0xf7, 0xf4, 0x02, 0xa1, 0xec, 0xce, 0xfc, 0x24, /* Byte value: 0xaa */ + 0xcb, 0x64, 0x85, 0x80, 0xfa, 0x88, 0x2e, 0x54, 0x3d, 0x93, 0x75, 0x34, 0xaa, 0x83, 0x84, 0xb6, /* Byte value: 0xab */ + 0x20, 0x6d, 0x46, 0x8b, 0x0d, 0x7e, 0x90, 0x16, 0x43, 0x58, 0x12, 0xe0, 0x80, 0x71, 0x10, 0xc7, /* Byte value: 0xac */ + 0x63, 0xf4, 0xe8, 0x30, 0x51, 0x33, 0x9f, 0xfe, 0xd1, 0x7e, 0xca, 0xea, 0x4f, 0x78, 0xd0, 0xaa, /* Byte value: 0xad */ + 0x73, 0x23, 0xcb, 0x94, 0xb6, 0x0c, 0xd7, 0xf5, 0x11, 0x52, 0xc3, 0x9a, 0x0f, 0xa1, 0xd8, 0x28, /* Byte value: 0xae */ + 0xed, 0x8f, 0x87, 0xd7, 0x7b, 0x57, 0xa5, 0x09, 0x56, 0x24, 0x5c, 0xc6, 0x32, 0xe7, 0x97, 0x31, /* Byte value: 0xaf */ + 0xea, 0x89, 0xdd, 0x90, 0x74, 0x99, 0x5b, 0xbf, 0x72, 0xb9, 0x33, 0xd3, 0x2e, 0xab, 0x75, 0xd0, /* Byte value: 0xb0 */ + 0xc9, 0xa7, 0xb9, 0x75, 0x3f, 0x56, 0x27, 0x6d, 0x25, 0x77, 0xdd, 0x3a, 0xa2, 0x31, 0x85, 0x37, /* Byte value: 0xb1 */ + 0x32, 0x79, 0x59, 0xda, 0x2f, 0x9f, 0xd1, 0x24, 0x9b, 0x90, 0xb3, 0x9e, 0xc8, 0x1a, 0x19, 0xc4, /* Byte value: 0xb2 */ + 0x84, 0x32, 0xa3, 0x40, 0x7d, 0x44, 0x17, 0x2a, 0xff, 0xa8, 0xdb, 0x1a, 0x55, 0xa0, 0x42, 0x5b, /* Byte value: 0xb3 */ + 0x53, 0x4e, 0x8d, 0x1f, 0xbb, 0x72, 0x47, 0xe3, 0x52, 0x0a, 0xd1, 0x7a, 0x8f, 0xd0, 0xc8, 0xef, /* Byte value: 0xb4 */ + 0x04, 0x45, 0x78, 0x29, 0x49, 0x7f, 0x12, 0x72, 0x30, 0x0b, 0x93, 0x1c, 0x10, 0xa7, 0x02, 0xc1, /* Byte value: 0xb5 */ + 0x7c, 0xaf, 0x61, 0x81, 0x2b, 0x3c, 0x0d, 0xa7, 0x55, 0xd9, 0x49, 0xb7, 0x33, 0x60, 0x3e, 0x88, /* Byte value: 0xb6 */ + 0x41, 0x5a, 0x92, 0x4e, 0x99, 0x93, 0x06, 0xd1, 0x8a, 0xc2, 0x70, 0x04, 0xc7, 0xbb, 0xc1, 0xec, /* Byte value: 0xb7 */ + 0x27, 0x6b, 0x1c, 0xcc, 0x02, 0xb0, 0x6e, 0xa0, 0x67, 0xc5, 0x7d, 0xf5, 0x9c, 0x3d, 0xf2, 0x26, /* Byte value: 0xb8 */ + 0x55, 0xc8, 0xc9, 0xc3, 0x37, 0xd3, 0x5c, 0xa8, 0x7a, 0xe5, 0xea, 0x68, 0x97, 0xc5, 0xcb, 0xaf, /* Byte value: 0xb9 */ + 0xcf, 0x21, 0xfd, 0xa9, 0xb3, 0xf7, 0x3c, 0x26, 0x0d, 0x98, 0xe6, 0x28, 0xba, 0x24, 0x86, 0x77, /* Byte value: 0xba */ + 0x71, 0xe0, 0xf7, 0x61, 0x73, 0xd2, 0xde, 0xcc, 0x09, 0xb6, 0x6b, 0x94, 0x07, 0x13, 0xd9, 0xa9, /* Byte value: 0xbb */ + 0x11, 0x57, 0x3d, 0x3f, 0x64, 0x50, 0xad, 0xf6, 0xcc, 0x5e, 0x5d, 0x77, 0x44, 0x80, 0xe9, 0x23, /* Byte value: 0xbc */ + 0x97, 0xa6, 0xa2, 0x8a, 0xdc, 0xca, 0xb3, 0xe5, 0x2b, 0x12, 0x2e, 0x63, 0x19, 0x92, 0xaa, 0xf9, /* Byte value: 0xbd */ + 0xd9, 0x70, 0x9a, 0xd1, 0xd8, 0x69, 0x6f, 0x66, 0xe5, 0x5b, 0xd4, 0x4a, 0xe2, 0xe8, 0x8d, 0xb5, /* Byte value: 0xbe */ + 0xd0, 0x7a, 0x74, 0x18, 0xc9, 0xf8, 0xae, 0x7f, 0x89, 0x3f, 0x65, 0x75, 0xc6, 0x3c, 0x68, 0x55, /* Byte value: 0xbf */ + 0xa8, 0x90, 0x6d, 0xb0, 0xab, 0xbb, 0xb1, 0xaa, 0xec, 0xed, 0xbf, 0xde, 0xe5, 0xfb, 0x54, 0x1c, /* Byte value: 0xc0 */ + 0x1b, 0x1e, 0xf1, 0x98, 0x33, 0x70, 0x80, 0x2b, 0xb4, 0xac, 0x10, 0x41, 0x6c, 0xbf, 0xec, 0xe3, /* Byte value: 0xc1 */ + 0x16, 0x51, 0x67, 0x78, 0x6b, 0x9e, 0x53, 0x40, 0xe8, 0xc3, 0x32, 0x62, 0x58, 0xcc, 0x0b, 0xc2, /* Byte value: 0xc2 */ + 0x6c, 0x78, 0x42, 0x25, 0xcc, 0x03, 0x45, 0xac, 0x95, 0xf5, 0x40, 0xc7, 0x73, 0xb9, 0x36, 0x0a, /* Byte value: 0xc3 */ + 0x2f, 0xe1, 0xec, 0x9e, 0x90, 0x4e, 0x4a, 0x44, 0x07, 0xd3, 0x98, 0xcd, 0xbc, 0xb0, 0xf6, 0x67, /* Byte value: 0xc4 */ + 0x45, 0x1f, 0xea, 0x67, 0xd0, 0xec, 0x14, 0xa3, 0xba, 0xc9, 0xe3, 0x18, 0xd7, 0x1c, 0xc3, 0x2d, /* Byte value: 0xc5 */ + 0x33, 0xf9, 0x47, 0x41, 0xac, 0xf0, 0x34, 0xd9, 0x97, 0xe2, 0xe7, 0x99, 0xcc, 0x43, 0xf8, 0x65, /* Byte value: 0xc6 */ + 0x1d, 0x98, 0xb5, 0x44, 0xbf, 0xd1, 0x9b, 0x60, 0x9c, 0x43, 0x2b, 0x53, 0x74, 0xaa, 0xef, 0xa3, /* Byte value: 0xc7 */ + 0xfe, 0x1b, 0x86, 0x1d, 0xda, 0xd9, 0x01, 0xc6, 0x82, 0x9e, 0xa9, 0xbf, 0x7e, 0xd5, 0x7f, 0x93, /* Byte value: 0xc8 */ + 0x7a, 0x29, 0x25, 0x5d, 0xa7, 0x9d, 0x16, 0xec, 0x7d, 0x36, 0x72, 0xa5, 0x2b, 0x75, 0x3d, 0xc8, /* Byte value: 0xc9 */ + 0x64, 0xf2, 0xb2, 0x77, 0x5e, 0xfd, 0x61, 0x48, 0xf5, 0xe3, 0xa5, 0xff, 0x53, 0x34, 0x32, 0x4b, /* Byte value: 0xca */ + 0xb3, 0x8e, 0x9c, 0x28, 0x98, 0xcb, 0x31, 0x81, 0x58, 0x41, 0xaf, 0x9f, 0x89, 0x44, 0xb8, 0xff, /* Byte value: 0xcb */ + 0x1f, 0x5b, 0x89, 0xb1, 0x7a, 0x0f, 0x92, 0x59, 0x84, 0xa7, 0x83, 0x5d, 0x7c, 0x18, 0xee, 0x22, /* Byte value: 0xcc */ + 0x2e, 0x61, 0xf2, 0x05, 0x13, 0x21, 0xaf, 0xb9, 0x0b, 0xa1, 0xcc, 0xca, 0xb8, 0xe9, 0x17, 0xc6, /* Byte value: 0xcd */ + 0xe4, 0x85, 0x69, 0x1e, 0x6a, 0xc6, 0x64, 0x10, 0x3a, 0x40, 0xed, 0xf9, 0x16, 0x33, 0x72, 0xd1, /* Byte value: 0xce */ + 0x6a, 0xfe, 0x06, 0xf9, 0x40, 0xa2, 0x5e, 0xe7, 0xbd, 0x1a, 0x7b, 0xd5, 0x6b, 0xac, 0x35, 0x4a, /* Byte value: 0xcf */ + 0x7d, 0x2f, 0x7f, 0x1a, 0xa8, 0x53, 0xe8, 0x5a, 0x59, 0xab, 0x1d, 0xb0, 0x37, 0x39, 0xdf, 0x29, /* Byte value: 0xd0 */ + 0xd8, 0xf0, 0x84, 0x4a, 0x5b, 0x06, 0x8a, 0x9b, 0xe9, 0x29, 0x80, 0x4d, 0xe6, 0xb1, 0x6c, 0x14, /* Byte value: 0xd1 */ + 0xc2, 0x6e, 0x6b, 0x49, 0xeb, 0x19, 0xef, 0x4d, 0x51, 0xf7, 0xc4, 0x0b, 0x8e, 0x57, 0x61, 0x56, /* Byte value: 0xd2 */ + 0x36, 0x3c, 0x21, 0xf3, 0x66, 0xe0, 0xc3, 0x56, 0xab, 0x9b, 0x20, 0x82, 0xd8, 0xbd, 0x1b, 0x05, /* Byte value: 0xd3 */ + 0xf8, 0x9d, 0xc2, 0xc1, 0x56, 0x78, 0x1a, 0x8d, 0xaa, 0x71, 0x92, 0xad, 0x66, 0xc0, 0x7c, 0xd3, /* Byte value: 0xd4 */ + 0x26, 0xeb, 0x02, 0x57, 0x81, 0xdf, 0x8b, 0x5d, 0x6b, 0xb7, 0x29, 0xf2, 0x98, 0x64, 0x13, 0x87, /* Byte value: 0xd5 */ + 0x3f, 0x36, 0xcf, 0x3a, 0x77, 0x71, 0x02, 0x4f, 0xc7, 0xff, 0x91, 0xbd, 0xfc, 0x69, 0xfe, 0xe5, /* Byte value: 0xd6 */ + 0x0a, 0x49, 0xcc, 0xa7, 0x57, 0x20, 0x2d, 0xdd, 0x78, 0xf2, 0x4d, 0x36, 0x28, 0x3f, 0x05, 0xc0, /* Byte value: 0xd7 */ + 0x9f, 0x2c, 0x52, 0xd8, 0x4e, 0x34, 0x97, 0x01, 0x4b, 0x04, 0xcb, 0x5b, 0x39, 0x1f, 0xae, 0xb8, /* Byte value: 0xd8 */ + 0xf4, 0x52, 0x4a, 0xba, 0x8d, 0xf9, 0x2c, 0x1b, 0xfa, 0x6c, 0xe4, 0x89, 0x56, 0xea, 0x7a, 0x53, /* Byte value: 0xd9 */ + 0x1a, 0x9e, 0xef, 0x03, 0xb0, 0x1f, 0x65, 0xd6, 0xb8, 0xde, 0x44, 0x46, 0x68, 0xe6, 0x0d, 0x42, /* Byte value: 0xda */ + 0xa5, 0xdf, 0xfb, 0x50, 0xf3, 0x55, 0x62, 0xc1, 0xb0, 0x82, 0x9d, 0xfd, 0xd1, 0x88, 0xb3, 0x3d, /* Byte value: 0xdb */ + 0x61, 0x37, 0xd4, 0xc5, 0x94, 0xed, 0x96, 0xc7, 0xc9, 0x9a, 0x62, 0xe4, 0x47, 0xca, 0xd1, 0x2b, /* Byte value: 0xdc */ + 0x35, 0x7f, 0x03, 0x9d, 0x20, 0x51, 0x2f, 0x92, 0xbf, 0x0d, 0xdc, 0x8b, 0xd4, 0x56, 0xfb, 0x25, /* Byte value: 0xdd */ + 0x7b, 0xa9, 0x3b, 0xc6, 0x24, 0xf2, 0xf3, 0x11, 0x71, 0x44, 0x26, 0xa2, 0x2f, 0x2c, 0xdc, 0x69, /* Byte value: 0xde */ + 0xde, 0x76, 0xc0, 0x96, 0xd7, 0xa7, 0x91, 0xd0, 0xc1, 0xc6, 0xbb, 0x5f, 0xfe, 0xa4, 0x6f, 0x54, /* Byte value: 0xdf */ + 0xb5, 0x08, 0xd8, 0xf4, 0x14, 0x6a, 0x2a, 0xca, 0x70, 0xae, 0x94, 0x8d, 0x91, 0x51, 0xbb, 0xbf, /* Byte value: 0xe0 */ + 0xda, 0x33, 0xb8, 0xbf, 0x9e, 0xd8, 0x83, 0xa2, 0xf1, 0xcd, 0x28, 0x43, 0xee, 0x03, 0x6d, 0x95, /* Byte value: 0xe1 */ + 0xbc, 0x02, 0x36, 0x3d, 0x05, 0xfb, 0xeb, 0xd3, 0x1c, 0xca, 0x25, 0xb2, 0xb5, 0x85, 0x5e, 0x5f, /* Byte value: 0xe2 */ + 0xd7, 0x7c, 0x2e, 0x5f, 0xc6, 0x36, 0x50, 0xc9, 0xad, 0xa2, 0x0a, 0x60, 0xda, 0x70, 0x8a, 0xb4, /* Byte value: 0xe3 */ + 0xab, 0xd3, 0x4f, 0xde, 0xed, 0x0a, 0x5d, 0x6e, 0xf8, 0x7b, 0x43, 0xd7, 0xe9, 0x10, 0xb4, 0x3c, /* Byte value: 0xe4 */ + 0x22, 0xae, 0x7a, 0x7e, 0xc8, 0xa0, 0x99, 0x2f, 0x5b, 0xbc, 0xba, 0xee, 0x88, 0xc3, 0x11, 0x46, /* Byte value: 0xe5 */ + 0x66, 0x31, 0x8e, 0x82, 0x9b, 0x23, 0x68, 0x71, 0xed, 0x07, 0x0d, 0xf1, 0x5b, 0x86, 0x33, 0xca, /* Byte value: 0xe6 */ + 0x3a, 0xf3, 0xa9, 0x88, 0xbd, 0x61, 0xf5, 0xc0, 0xfb, 0x86, 0x56, 0xa6, 0xe8, 0x97, 0x1d, 0x85, /* Byte value: 0xe7 */ + 0x9a, 0xe9, 0x34, 0x6a, 0x84, 0x24, 0x60, 0x8e, 0x77, 0x7d, 0x0c, 0x40, 0x2d, 0xe1, 0x4d, 0xd8, /* Byte value: 0xe8 */ + 0x6b, 0x7e, 0x18, 0x62, 0xc3, 0xcd, 0xbb, 0x1a, 0xb1, 0x68, 0x2f, 0xd2, 0x6f, 0xf5, 0xd4, 0xeb, /* Byte value: 0xe9 */ + 0x05, 0xc5, 0x66, 0xb2, 0xca, 0x10, 0xf7, 0x8f, 0x3c, 0x79, 0xc7, 0x1b, 0x14, 0xfe, 0xe3, 0x60, /* Byte value: 0xea */ + 0x6f, 0x3b, 0x60, 0x4b, 0x8a, 0xb2, 0xa9, 0x68, 0x81, 0x63, 0xbc, 0xce, 0x7f, 0x52, 0xd6, 0x2a, /* Byte value: 0xeb */ + 0x80, 0x77, 0xdb, 0x69, 0x34, 0x3b, 0x05, 0x58, 0xcf, 0xa3, 0x48, 0x06, 0x45, 0x07, 0x40, 0x9a, /* Byte value: 0xec */ + 0x5c, 0xc2, 0x27, 0x0a, 0x26, 0x42, 0x9d, 0xb1, 0x16, 0x81, 0x5b, 0x57, 0xb3, 0x11, 0x2e, 0x4f, /* Byte value: 0xed */ + 0xfb, 0xde, 0xe0, 0xaf, 0x10, 0xc9, 0xf6, 0x49, 0xbe, 0xe7, 0x6e, 0xa4, 0x6a, 0x2b, 0x9c, 0xf3, /* Byte value: 0xee */ + 0x89, 0x7d, 0x35, 0xa0, 0x25, 0xaa, 0xc4, 0x41, 0xa3, 0xc7, 0xf9, 0x39, 0x61, 0xd3, 0xa5, 0x7a, /* Byte value: 0xef */ + 0x5e, 0x01, 0x1b, 0xff, 0xe3, 0x9c, 0x94, 0x88, 0x0e, 0x65, 0xf3, 0x59, 0xbb, 0xa3, 0x2f, 0xce, /* Byte value: 0xf0 */ + 0x8a, 0x3e, 0x17, 0xce, 0x63, 0x1b, 0x28, 0x85, 0xb7, 0x51, 0x05, 0x30, 0x6d, 0x38, 0x45, 0x5a, /* Byte value: 0xf1 */ + 0xbb, 0x04, 0x6c, 0x7a, 0x0a, 0x35, 0x15, 0x65, 0x38, 0x57, 0x4a, 0xa7, 0xa9, 0xc9, 0xbc, 0xbe, /* Byte value: 0xf2 */ + 0x6d, 0xf8, 0x5c, 0xbe, 0x4f, 0x6c, 0xa0, 0x51, 0x99, 0x87, 0x14, 0xc0, 0x77, 0xe0, 0xd7, 0xab, /* Byte value: 0xf3 */ + 0x9d, 0xef, 0x6e, 0x2d, 0x8b, 0xea, 0x9e, 0x38, 0x53, 0xe0, 0x63, 0x55, 0x31, 0xad, 0xaf, 0x39, /* Byte value: 0xf4 */ + 0x25, 0xa8, 0x20, 0x39, 0xc7, 0x6e, 0x67, 0x99, 0x7f, 0x21, 0xd5, 0xfb, 0x94, 0x8f, 0xf3, 0xa7, /* Byte value: 0xf5 */ + 0xc4, 0xe8, 0x2f, 0x95, 0x67, 0xb8, 0xf4, 0x06, 0x79, 0x18, 0xff, 0x19, 0x96, 0x42, 0x62, 0x16, /* Byte value: 0xf6 */ + 0x15, 0x12, 0x45, 0x16, 0x2d, 0x2f, 0xbf, 0x84, 0xfc, 0x55, 0xce, 0x6b, 0x54, 0x27, 0xeb, 0xe2, /* Byte value: 0xf7 */ + 0x85, 0xb2, 0xbd, 0xdb, 0xfe, 0x2b, 0xf2, 0xd7, 0xf3, 0xda, 0x8f, 0x1d, 0x51, 0xf9, 0xa3, 0xfa, /* Byte value: 0xf8 */ + 0xd6, 0xfc, 0x30, 0xc4, 0x45, 0x59, 0xb5, 0x34, 0xa1, 0xd0, 0x5e, 0x67, 0xde, 0x29, 0x6b, 0x15, /* Byte value: 0xf9 */ + 0xe1, 0x40, 0x0f, 0xac, 0xa0, 0xd6, 0x93, 0x9f, 0x06, 0x39, 0x2a, 0xe2, 0x02, 0xcd, 0x91, 0xb1, /* Byte value: 0xfa */ + 0xd4, 0x3f, 0x0c, 0x31, 0x80, 0x87, 0xbc, 0x0d, 0xb9, 0x34, 0xf6, 0x69, 0xd6, 0x9b, 0x6a, 0x94, /* Byte value: 0xfb */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xfc */ + 0xd1, 0xfa, 0x6a, 0x83, 0x4a, 0x97, 0x4b, 0x82, 0x85, 0x4d, 0x31, 0x72, 0xc2, 0x65, 0x89, 0xf4, /* Byte value: 0xfd */ + 0x7e, 0x6c, 0x5d, 0x74, 0xee, 0xe2, 0x04, 0x9e, 0x4d, 0x3d, 0xe1, 0xb9, 0x3b, 0xd2, 0x3f, 0x09, /* Byte value: 0xfe */ + 0xb8, 0x47, 0x4e, 0x14, 0x4c, 0x84, 0xf9, 0xa1, 0x2c, 0xc1, 0xb6, 0xae, 0xa5, 0x22, 0x5c, 0x9e, /* Byte value: 0xff */ + + /* Matrix row: 9 */ + 0xa5, 0x55, 0xa4, 0x90, 0xc6, 0x8d, 0x2f, 0xd5, 0x71, 0x80, 0x04, 0x89, 0xcb, 0x72, 0x1b, 0x5d, /* Byte value: 0x00 */ + 0x2d, 0x90, 0x75, 0x4e, 0x36, 0xf9, 0x9f, 0x17, 0x62, 0x87, 0xa3, 0x5a, 0x48, 0x8a, 0xa4, 0x15, /* Byte value: 0x01 */ + 0x32, 0x9f, 0x4c, 0x80, 0x9d, 0x5f, 0xd4, 0xf3, 0xbf, 0x6c, 0x3b, 0x64, 0xae, 0x3d, 0x18, 0xe9, /* Byte value: 0x02 */ + 0x8f, 0x0b, 0x94, 0x6d, 0x4b, 0xb1, 0x7f, 0x15, 0x81, 0x1e, 0x6c, 0xdd, 0xe4, 0xac, 0x86, 0xd3, /* Byte value: 0x03 */ + 0x0e, 0x5f, 0x8a, 0xa5, 0xb5, 0x49, 0x5d, 0x6d, 0xe7, 0x32, 0x55, 0x1c, 0xce, 0xa8, 0x72, 0xf5, /* Byte value: 0x04 */ + 0x30, 0x41, 0xf8, 0xfa, 0x20, 0xc6, 0xcb, 0x66, 0x9e, 0x27, 0xa6, 0x60, 0xc1, 0x05, 0xc9, 0xce, /* Byte value: 0x05 */ + 0x38, 0xbf, 0x6d, 0xd1, 0x91, 0xe7, 0xb7, 0x77, 0x1a, 0xc8, 0x97, 0x70, 0xbe, 0xe5, 0x0b, 0x52, /* Byte value: 0x06 */ + 0xc0, 0xc7, 0x66, 0x6e, 0x80, 0x9e, 0xaa, 0x5b, 0x3d, 0x9c, 0xdd, 0x43, 0x82, 0x14, 0xa2, 0xbe, /* Byte value: 0x07 */ + 0x54, 0xbc, 0x60, 0x39, 0xd9, 0x78, 0xa0, 0x43, 0x23, 0xff, 0xd0, 0xa8, 0x5e, 0x7f, 0xf9, 0xdf, /* Byte value: 0x08 */ + 0xe6, 0x18, 0x68, 0x4c, 0x05, 0x72, 0xb8, 0x63, 0x0b, 0x7b, 0x7d, 0x0f, 0x0c, 0x5a, 0x9c, 0xe2, /* Byte value: 0x09 */ + 0x9e, 0x5b, 0x27, 0x06, 0x55, 0x5e, 0x69, 0x9c, 0xbb, 0xc7, 0xa1, 0xff, 0xcc, 0xb3, 0x48, 0xda, /* Byte value: 0x0a */ + 0x39, 0xd0, 0x37, 0xec, 0x2e, 0x4a, 0x59, 0xdc, 0xeb, 0x0c, 0x38, 0x72, 0x68, 0xf9, 0x82, 0xa0, /* Byte value: 0x0b */ + 0x55, 0xd3, 0x3a, 0x04, 0x66, 0xd5, 0x4e, 0xe8, 0xd2, 0x3b, 0x7f, 0xaa, 0x88, 0x63, 0x70, 0x2d, /* Byte value: 0x0c */ + 0x7e, 0xe2, 0x50, 0xc4, 0x54, 0x44, 0xf0, 0x83, 0xd3, 0x61, 0xb8, 0xfc, 0x71, 0xa1, 0x64, 0x51, /* Byte value: 0x0d */ + 0x52, 0x1d, 0x7f, 0xb7, 0xdd, 0x10, 0x81, 0x3f, 0x40, 0x22, 0xb4, 0xa4, 0xef, 0x37, 0x49, 0xb6, /* Byte value: 0x0e */ + 0x91, 0x6b, 0xf7, 0x9e, 0x5f, 0xba, 0xda, 0x5a, 0xad, 0x31, 0x5b, 0xe1, 0xd4, 0x07, 0xb3, 0xdd, /* Byte value: 0x0f */ + 0x64, 0xfd, 0x98, 0xc3, 0xf9, 0xbe, 0x6b, 0x25, 0xbd, 0xd8, 0x76, 0xc8, 0x9f, 0x7a, 0x30, 0x11, /* Byte value: 0x10 */ + 0x03, 0xb1, 0xee, 0x47, 0x02, 0x34, 0xf1, 0x3e, 0xd0, 0x8f, 0x32, 0x06, 0xb9, 0x24, 0x58, 0xd5, /* Byte value: 0x11 */ + 0x57, 0x0d, 0x8e, 0x7e, 0xdb, 0x4c, 0x51, 0x7d, 0xf3, 0x70, 0xe2, 0xae, 0xe7, 0x5b, 0xa1, 0x0a, /* Byte value: 0x12 */ + 0x5a, 0xe3, 0xea, 0x9c, 0x6c, 0x31, 0xfd, 0x2e, 0xc4, 0xcd, 0x85, 0xb4, 0x90, 0xd7, 0x8b, 0x2a, /* Byte value: 0x13 */ + 0x1c, 0xbe, 0xd7, 0x89, 0xa9, 0x92, 0xba, 0xda, 0x0d, 0x64, 0xaa, 0x38, 0x5f, 0x93, 0xe4, 0x29, /* Byte value: 0x14 */ + 0x60, 0x82, 0x33, 0x37, 0x40, 0x4f, 0x55, 0xcc, 0xff, 0x4e, 0x8f, 0xc0, 0x41, 0x0a, 0x51, 0x5f, /* Byte value: 0x15 */ + 0x07, 0xce, 0x45, 0xb3, 0xbb, 0xc5, 0xcf, 0xd7, 0x92, 0x19, 0xcb, 0x0e, 0x67, 0x54, 0x39, 0x9b, /* Byte value: 0x16 */ + 0x18, 0xc1, 0x7c, 0x7d, 0x10, 0x63, 0x84, 0x33, 0x4f, 0xf2, 0x53, 0x30, 0x81, 0xe3, 0x85, 0x67, /* Byte value: 0x17 */ + 0x21, 0x11, 0x4b, 0x91, 0x3e, 0x29, 0xdd, 0xef, 0xa4, 0xfe, 0x6b, 0x42, 0xe9, 0x1a, 0x07, 0xc7, /* Byte value: 0x18 */ + 0x72, 0x63, 0x6e, 0x1b, 0x5c, 0x94, 0xb2, 0x7b, 0x15, 0x18, 0x70, 0xe4, 0xd0, 0x31, 0xc7, 0x83, /* Byte value: 0x19 */ + 0xa8, 0xbb, 0xc0, 0x72, 0x71, 0xf0, 0x83, 0x86, 0x46, 0x3d, 0x63, 0x93, 0xbc, 0xfe, 0x31, 0x7d, /* Byte value: 0x1a */ + 0xd1, 0x97, 0xd5, 0x05, 0x9e, 0x71, 0xbc, 0xd2, 0x07, 0x45, 0x10, 0x61, 0xaa, 0x0b, 0x6c, 0xb7, /* Byte value: 0x1b */ + 0x29, 0xef, 0xde, 0xba, 0x8f, 0x08, 0xa1, 0xfe, 0x20, 0x11, 0x5a, 0x52, 0x96, 0xfa, 0xc5, 0x5b, /* Byte value: 0x1c */ + 0xc6, 0x66, 0x79, 0xe0, 0x84, 0xf6, 0x8b, 0x27, 0x5e, 0x41, 0xb9, 0x4f, 0x33, 0x5c, 0x12, 0xd7, /* Byte value: 0x1d */ + 0xa4, 0x3a, 0xfe, 0xad, 0x79, 0x20, 0xc1, 0x7e, 0x80, 0x44, 0xab, 0x8b, 0x1d, 0x6e, 0x92, 0xaf, /* Byte value: 0x1e */ + 0x3f, 0x71, 0x28, 0x62, 0x2a, 0x22, 0x78, 0xa0, 0x88, 0xd1, 0x5c, 0x7e, 0xd9, 0xb1, 0x32, 0xc9, /* Byte value: 0x1f */ + 0xe0, 0xb9, 0x77, 0xc2, 0x01, 0x1a, 0x99, 0x1f, 0x68, 0xa6, 0x19, 0x03, 0xbd, 0x12, 0x2c, 0x8b, /* Byte value: 0x20 */ + 0x27, 0xb0, 0x54, 0x1f, 0x3a, 0x41, 0xfc, 0x93, 0xc7, 0x23, 0x0f, 0x4e, 0x58, 0x52, 0xb7, 0xae, /* Byte value: 0x21 */ + 0x8d, 0xd5, 0x20, 0x17, 0xf6, 0x28, 0x60, 0x80, 0xa0, 0x55, 0xf1, 0xd9, 0x8b, 0x94, 0x57, 0xf4, /* Byte value: 0x22 */ + 0x0c, 0x81, 0x3e, 0xdf, 0x08, 0xd0, 0x42, 0xf8, 0xc6, 0x79, 0xc8, 0x18, 0xa1, 0x90, 0xa3, 0xd2, /* Byte value: 0x23 */ + 0x82, 0xe5, 0xf0, 0x8f, 0xfc, 0xcc, 0xd3, 0x46, 0xb6, 0xa3, 0x0b, 0xc7, 0x93, 0x20, 0xac, 0xf3, /* Byte value: 0x24 */ + 0xea, 0x99, 0x56, 0x93, 0x0d, 0xa2, 0xfa, 0x9b, 0xcd, 0x02, 0xb5, 0x17, 0xad, 0xca, 0x3f, 0x30, /* Byte value: 0x25 */ + 0xae, 0x1a, 0xdf, 0xfc, 0x75, 0x98, 0xa2, 0xfa, 0x25, 0xe0, 0x07, 0x9f, 0x0d, 0xb6, 0x81, 0x14, /* Byte value: 0x26 */ + 0xb4, 0x05, 0x17, 0xfb, 0xd8, 0x62, 0x39, 0x5c, 0x4b, 0x59, 0xc9, 0xab, 0xe3, 0x6d, 0xd5, 0x54, /* Byte value: 0x27 */ + 0x9a, 0x24, 0x8c, 0xf2, 0xec, 0xaf, 0x57, 0x75, 0xf9, 0x51, 0x58, 0xf7, 0x12, 0xc3, 0x29, 0x94, /* Byte value: 0x28 */ + 0x63, 0x33, 0xdd, 0x70, 0x42, 0x7b, 0xa4, 0xf2, 0x2f, 0xc1, 0xbd, 0xc6, 0xf8, 0x2e, 0x09, 0x8a, /* Byte value: 0x29 */ + 0x49, 0x6d, 0xed, 0x8d, 0xcf, 0x47, 0xf4, 0x32, 0xdf, 0x5f, 0xd5, 0x92, 0xd7, 0xf0, 0x94, 0x04, /* Byte value: 0x2a */ + 0xe5, 0xa9, 0x86, 0x0b, 0x07, 0x46, 0x49, 0x5d, 0xdb, 0xf4, 0x4f, 0x09, 0xb5, 0x7e, 0xc4, 0x37, /* Byte value: 0x2b */ + 0x42, 0x22, 0x96, 0xe1, 0x7c, 0x52, 0x79, 0x1d, 0x8b, 0x3f, 0xd6, 0x84, 0x11, 0x34, 0x0e, 0x4d, /* Byte value: 0x2c */ + 0xe4, 0xc6, 0xdc, 0x36, 0xb8, 0xeb, 0xa7, 0xf6, 0x2a, 0x30, 0xe0, 0x0b, 0x63, 0x62, 0x4d, 0xc5, /* Byte value: 0x2d */ + 0x15, 0x2f, 0x18, 0x9f, 0xa7, 0x1e, 0x28, 0x60, 0x78, 0x4f, 0x34, 0x2a, 0xf6, 0x6f, 0xaf, 0x47, /* Byte value: 0x2e */ + 0xb7, 0xb4, 0xf9, 0xbc, 0xda, 0x56, 0xc8, 0x62, 0x9b, 0xd6, 0xfb, 0xad, 0x5a, 0x49, 0x8d, 0x81, /* Byte value: 0x2f */ + 0xc8, 0x39, 0xf3, 0x45, 0x31, 0xbf, 0xd6, 0x4a, 0xb9, 0x73, 0xec, 0x53, 0xfd, 0xf4, 0x60, 0x22, /* Byte value: 0x30 */ + 0x06, 0xa1, 0x1f, 0x8e, 0x04, 0x68, 0x21, 0x7c, 0x63, 0xdd, 0x64, 0x0c, 0xb1, 0x48, 0xb0, 0x69, /* Byte value: 0x31 */ + 0x70, 0xbd, 0xda, 0x61, 0xe1, 0x0d, 0xad, 0xee, 0x34, 0x53, 0xed, 0xe0, 0xbf, 0x09, 0x16, 0xa4, /* Byte value: 0x32 */ + 0x9d, 0xea, 0xc9, 0x41, 0x57, 0x6a, 0x98, 0xa2, 0x6b, 0x48, 0x93, 0xf9, 0x75, 0x97, 0x10, 0x0f, /* Byte value: 0x33 */ + 0x41, 0x93, 0x78, 0xa6, 0x7e, 0x66, 0x88, 0x23, 0x5b, 0xb0, 0xe4, 0x82, 0xa8, 0x10, 0x56, 0x98, /* Byte value: 0x34 */ + 0x75, 0xad, 0x2b, 0xa8, 0xe7, 0x51, 0x7d, 0xac, 0x87, 0x01, 0xbb, 0xea, 0xb7, 0x65, 0xfe, 0x18, /* Byte value: 0x35 */ + 0x19, 0xae, 0x26, 0x40, 0xaf, 0xce, 0x6a, 0x98, 0xbe, 0x36, 0xfc, 0x32, 0x57, 0xff, 0x0c, 0x95, /* Byte value: 0x36 */ + 0xc9, 0x56, 0xa9, 0x78, 0x8e, 0x12, 0x38, 0xe1, 0x48, 0xb7, 0x43, 0x51, 0x2b, 0xe8, 0xe9, 0xd0, /* Byte value: 0x37 */ + 0xaa, 0x65, 0x74, 0x08, 0xcc, 0x69, 0x9c, 0x13, 0x67, 0x76, 0xfe, 0x97, 0xd3, 0xc6, 0xe0, 0x5a, /* Byte value: 0x38 */ + 0xfc, 0x07, 0xa0, 0x4b, 0xa8, 0x88, 0x23, 0xc5, 0x65, 0xc2, 0xb3, 0x3b, 0xe2, 0x81, 0xc8, 0xa2, /* Byte value: 0x39 */ + 0x4d, 0x12, 0x46, 0x79, 0x76, 0xb6, 0xca, 0xdb, 0x9d, 0xc9, 0x2c, 0x9a, 0x09, 0x80, 0xf5, 0x4a, /* Byte value: 0x3a */ + 0xbf, 0x4a, 0x6c, 0x97, 0x6b, 0x77, 0xb4, 0x73, 0x1f, 0x39, 0xca, 0xbd, 0x25, 0xa9, 0x4f, 0x1d, /* Byte value: 0x3b */ + 0x2a, 0x5e, 0x30, 0xfd, 0x8d, 0x3c, 0x50, 0xc0, 0xf0, 0x9e, 0x68, 0x54, 0x2f, 0xde, 0x9d, 0x8e, /* Byte value: 0x3c */ + 0x73, 0x0c, 0x34, 0x26, 0xe3, 0x39, 0x5c, 0xd0, 0xe4, 0xdc, 0xdf, 0xe6, 0x06, 0x2d, 0x4e, 0x71, /* Byte value: 0x3d */ + 0x84, 0x44, 0xef, 0x01, 0xf8, 0xa4, 0xf2, 0x3a, 0xd5, 0x7e, 0x6f, 0xcb, 0x22, 0x68, 0x1c, 0x9a, /* Byte value: 0x3e */ + 0xd5, 0xe8, 0x7e, 0xf1, 0x27, 0x80, 0x82, 0x3b, 0x45, 0xd3, 0xe9, 0x69, 0x74, 0x7b, 0x0d, 0xf9, /* Byte value: 0x3f */ + 0xc3, 0x76, 0x88, 0x29, 0x82, 0xaa, 0x5b, 0x65, 0xed, 0x13, 0xef, 0x45, 0x3b, 0x30, 0xfa, 0x6b, /* Byte value: 0x40 */ + 0xaf, 0x75, 0x85, 0xc1, 0xca, 0x35, 0x4c, 0x51, 0xd4, 0x24, 0xa8, 0x9d, 0xdb, 0xaa, 0x08, 0xe6, /* Byte value: 0x41 */ + 0x2b, 0x31, 0x6a, 0xc0, 0x32, 0x91, 0xbe, 0x6b, 0x01, 0x5a, 0xc7, 0x56, 0xf9, 0xc2, 0x14, 0x7c, /* Byte value: 0x42 */ + 0x86, 0x9a, 0x5b, 0x7b, 0x45, 0x3d, 0xed, 0xaf, 0xf4, 0x35, 0xf2, 0xcf, 0x4d, 0x50, 0xcd, 0xbd, /* Byte value: 0x43 */ + 0xa7, 0x8b, 0x10, 0xea, 0x7b, 0x14, 0x30, 0x40, 0x50, 0xcb, 0x99, 0x8d, 0xa4, 0x4a, 0xca, 0x7a, /* Byte value: 0x44 */ + 0xb1, 0x15, 0xe6, 0x32, 0xde, 0x3e, 0xe9, 0x1e, 0xf8, 0x0b, 0x9f, 0xa1, 0xeb, 0x01, 0x3d, 0xe8, /* Byte value: 0x45 */ + 0xb2, 0xa4, 0x08, 0x75, 0xdc, 0x0a, 0x18, 0x20, 0x28, 0x84, 0xad, 0xa7, 0x52, 0x25, 0x65, 0x3d, /* Byte value: 0x46 */ + 0x5b, 0x8c, 0xb0, 0xa1, 0xd3, 0x9c, 0x13, 0x85, 0x35, 0x09, 0x2a, 0xb6, 0x46, 0xcb, 0x02, 0xd8, /* Byte value: 0x47 */ + 0x46, 0x5d, 0x3d, 0x15, 0xc5, 0xa3, 0x47, 0xf4, 0xc9, 0xa9, 0x2f, 0x8c, 0xcf, 0x44, 0x6f, 0x03, /* Byte value: 0x48 */ + 0xd3, 0x49, 0x61, 0x7f, 0x23, 0xe8, 0xa3, 0x47, 0x26, 0x0e, 0x8d, 0x65, 0xc5, 0x33, 0xbd, 0x90, /* Byte value: 0x49 */ + 0x9f, 0x34, 0x7d, 0x3b, 0xea, 0xf3, 0x87, 0x37, 0x4a, 0x03, 0x0e, 0xfd, 0x1a, 0xaf, 0xc1, 0x28, /* Byte value: 0x4a */ + 0xfd, 0x68, 0xfa, 0x76, 0x17, 0x25, 0xcd, 0x6e, 0x94, 0x06, 0x1c, 0x39, 0x34, 0x9d, 0x41, 0x50, /* Byte value: 0x4b */ + 0xd4, 0x87, 0x24, 0xcc, 0x98, 0x2d, 0x6c, 0x90, 0xb4, 0x17, 0x46, 0x6b, 0xa2, 0x67, 0x84, 0x0b, /* Byte value: 0x4c */ + 0x0f, 0x30, 0xd0, 0x98, 0x0a, 0xe4, 0xb3, 0xc6, 0x16, 0xf6, 0xfa, 0x1e, 0x18, 0xb4, 0xfb, 0x07, /* Byte value: 0x4d */ + 0x9c, 0x85, 0x93, 0x7c, 0xe8, 0xc7, 0x76, 0x09, 0x9a, 0x8c, 0x3c, 0xfb, 0xa3, 0x8b, 0x99, 0xfd, /* Byte value: 0x4e */ + 0x2f, 0x4e, 0xc1, 0x34, 0x8b, 0x60, 0x80, 0x82, 0x43, 0xcc, 0x3e, 0x5e, 0x27, 0xb2, 0x75, 0x32, /* Byte value: 0x4f */ + 0x9b, 0x4b, 0xd6, 0xcf, 0x53, 0x02, 0xb9, 0xde, 0x08, 0x95, 0xf7, 0xf5, 0xc4, 0xdf, 0xa0, 0x66, /* Byte value: 0x50 */ + 0x43, 0x4d, 0xcc, 0xdc, 0xc3, 0xff, 0x97, 0xb6, 0x7a, 0xfb, 0x79, 0x86, 0xc7, 0x28, 0x87, 0xbf, /* Byte value: 0x51 */ + 0xef, 0x89, 0xa7, 0x5a, 0x0b, 0xfe, 0x2a, 0xd9, 0x7e, 0x50, 0xe3, 0x1d, 0xa5, 0xa6, 0xd7, 0x8c, /* Byte value: 0x52 */ + 0xd9, 0x69, 0x40, 0x2e, 0x2f, 0x50, 0xc0, 0xc3, 0x83, 0xaa, 0x21, 0x71, 0xd5, 0xeb, 0xae, 0x2b, /* Byte value: 0x53 */ + 0x79, 0x2c, 0x15, 0x77, 0xef, 0x81, 0x3f, 0x54, 0x41, 0x78, 0x73, 0xf2, 0x16, 0xf5, 0x5d, 0xca, /* Byte value: 0x54 */ + 0xb6, 0xdb, 0xa3, 0x81, 0x65, 0xfb, 0x26, 0xc9, 0x6a, 0x12, 0x54, 0xaf, 0x8c, 0x55, 0x04, 0x73, /* Byte value: 0x55 */ + 0x53, 0x72, 0x25, 0x8a, 0x62, 0xbd, 0x6f, 0x94, 0xb1, 0xe6, 0x1b, 0xa6, 0x39, 0x2b, 0xc0, 0x44, /* Byte value: 0x56 */ + 0x7f, 0x8d, 0x0a, 0xf9, 0xeb, 0xe9, 0x1e, 0x28, 0x22, 0xa5, 0x17, 0xfe, 0xa7, 0xbd, 0xed, 0xa3, /* Byte value: 0x57 */ + 0xc1, 0xa8, 0x3c, 0x53, 0x3f, 0x33, 0x44, 0xf0, 0xcc, 0x58, 0x72, 0x41, 0x54, 0x08, 0x2b, 0x4c, /* Byte value: 0x58 */ + 0xf0, 0x86, 0x9e, 0x94, 0xa0, 0x58, 0x61, 0x3d, 0xa3, 0xbb, 0x7b, 0x23, 0x43, 0x11, 0x6b, 0x70, /* Byte value: 0x59 */ + 0x23, 0xcf, 0xff, 0xeb, 0x83, 0xb0, 0xc2, 0x7a, 0x85, 0xb5, 0xf6, 0x46, 0x86, 0x22, 0xd6, 0xe0, /* Byte value: 0x5a */ + 0xe7, 0x77, 0x32, 0x71, 0xba, 0xdf, 0x56, 0xc8, 0xfa, 0xbf, 0xd2, 0x0d, 0xda, 0x46, 0x15, 0x10, /* Byte value: 0x5b */ + 0x25, 0x6e, 0xe0, 0x65, 0x87, 0xd8, 0xe3, 0x06, 0xe6, 0x68, 0x92, 0x4a, 0x37, 0x6a, 0x66, 0x89, /* Byte value: 0x5c */ + 0x5e, 0x9c, 0x41, 0x68, 0xd5, 0xc0, 0xc3, 0xc7, 0x86, 0x5b, 0x7c, 0xbc, 0x4e, 0xa7, 0xea, 0x64, /* Byte value: 0x5d */ + 0xb5, 0x6a, 0x4d, 0xc6, 0x67, 0xcf, 0xd7, 0xf7, 0xba, 0x9d, 0x66, 0xa9, 0x35, 0x71, 0x5c, 0xa6, /* Byte value: 0x5e */ + 0x1e, 0x60, 0x63, 0xf3, 0x14, 0x0b, 0xa5, 0x4f, 0x2c, 0x2f, 0x37, 0x3c, 0x30, 0xab, 0x35, 0x0e, /* Byte value: 0x5f */ + 0xa2, 0x9b, 0xe1, 0x23, 0x7d, 0x48, 0xe0, 0x02, 0xe3, 0x99, 0xcf, 0x87, 0xac, 0x26, 0x22, 0xc6, /* Byte value: 0x60 */ + 0xdf, 0xc8, 0x5f, 0xa0, 0x2b, 0x38, 0xe1, 0xbf, 0xe0, 0x77, 0x45, 0x7d, 0x64, 0xa3, 0x1e, 0x42, /* Byte value: 0x61 */ + 0xa6, 0xe4, 0x4a, 0xd7, 0xc4, 0xb9, 0xde, 0xeb, 0xa1, 0x0f, 0x36, 0x8f, 0x72, 0x56, 0x43, 0x88, /* Byte value: 0x62 */ + 0xfe, 0xd9, 0x14, 0x31, 0x15, 0x11, 0x3c, 0x50, 0x44, 0x89, 0x2e, 0x3f, 0x8d, 0xb9, 0x19, 0x85, /* Byte value: 0x63 */ + 0xac, 0xc4, 0x6b, 0x86, 0xc8, 0x01, 0xbd, 0x6f, 0x04, 0xab, 0x9a, 0x9b, 0x62, 0x8e, 0x50, 0x33, /* Byte value: 0x64 */ + 0x22, 0xa0, 0xa5, 0xd6, 0x3c, 0x1d, 0x2c, 0xd1, 0x74, 0x71, 0x59, 0x44, 0x50, 0x3e, 0x5f, 0x12, /* Byte value: 0x65 */ + 0xf9, 0x17, 0x51, 0x82, 0xae, 0xd4, 0xf3, 0x87, 0xd6, 0x90, 0xe5, 0x31, 0xea, 0xed, 0x20, 0x1e, /* Byte value: 0x66 */ + 0xe2, 0x67, 0xc3, 0xb8, 0xbc, 0x83, 0x86, 0x8a, 0x49, 0xed, 0x84, 0x07, 0xd2, 0x2a, 0xfd, 0xac, /* Byte value: 0x67 */ + 0x4a, 0xdc, 0x03, 0xca, 0xcd, 0x73, 0x05, 0x0c, 0x0f, 0xd0, 0xe7, 0x94, 0x6e, 0xd4, 0xcc, 0xd1, /* Byte value: 0x68 */ + 0xbc, 0xfb, 0x82, 0xd0, 0x69, 0x43, 0x45, 0x4d, 0xcf, 0xb6, 0xf8, 0xbb, 0x9c, 0x8d, 0x17, 0xc8, /* Byte value: 0x69 */ + 0x35, 0x51, 0x09, 0x33, 0x26, 0x9a, 0x1b, 0x24, 0x2d, 0x75, 0xf0, 0x6a, 0xc9, 0x69, 0x21, 0x72, /* Byte value: 0x6a */ + 0xca, 0xe7, 0x47, 0x3f, 0x8c, 0x26, 0xc9, 0xdf, 0x98, 0x38, 0x71, 0x57, 0x92, 0xcc, 0xb1, 0x05, /* Byte value: 0x6b */ + 0xee, 0xe6, 0xfd, 0x67, 0xb4, 0x53, 0xc4, 0x72, 0x8f, 0x94, 0x4c, 0x1f, 0x73, 0xba, 0x5e, 0x7e, /* Byte value: 0x6c */ + 0x78, 0x43, 0x4f, 0x4a, 0x50, 0x2c, 0xd1, 0xff, 0xb0, 0xbc, 0xdc, 0xf0, 0xc0, 0xe9, 0xd4, 0x38, /* Byte value: 0x6d */ + 0x05, 0x10, 0xf1, 0xc9, 0x06, 0x5c, 0xd0, 0x42, 0xb3, 0x52, 0x56, 0x0a, 0x08, 0x6c, 0xe8, 0xbc, /* Byte value: 0x6e */ + 0x6b, 0xcd, 0x48, 0x5b, 0xf3, 0x5a, 0xd8, 0xe3, 0xab, 0x2e, 0x8c, 0xd6, 0x87, 0xce, 0xcb, 0x16, /* Byte value: 0x6f */ + 0x51, 0xac, 0x91, 0xf0, 0xdf, 0x24, 0x70, 0x01, 0x90, 0xad, 0x86, 0xa2, 0x56, 0x13, 0x11, 0x63, /* Byte value: 0x70 */ + 0xe1, 0xd6, 0x2d, 0xff, 0xbe, 0xb7, 0x77, 0xb4, 0x99, 0x62, 0xb6, 0x01, 0x6b, 0x0e, 0xa5, 0x79, /* Byte value: 0x71 */ + 0x59, 0x52, 0x04, 0xdb, 0x6e, 0x05, 0x0c, 0x10, 0x14, 0x42, 0xb7, 0xb2, 0x29, 0xf3, 0xd3, 0xff, /* Byte value: 0x72 */ + 0xa3, 0xf4, 0xbb, 0x1e, 0xc2, 0xe5, 0x0e, 0xa9, 0x12, 0x5d, 0x60, 0x85, 0x7a, 0x3a, 0xab, 0x34, /* Byte value: 0x73 */ + 0xf2, 0x58, 0x2a, 0xee, 0x1d, 0xc1, 0x7e, 0xa8, 0x82, 0xf0, 0xe6, 0x27, 0x2c, 0x29, 0xba, 0x57, /* Byte value: 0x74 */ + 0x71, 0xd2, 0x80, 0x5c, 0x5e, 0xa0, 0x43, 0x45, 0xc5, 0x97, 0x42, 0xe2, 0x69, 0x15, 0x9f, 0x56, /* Byte value: 0x75 */ + 0x56, 0x62, 0xd4, 0x43, 0x64, 0xe1, 0xbf, 0xd6, 0x02, 0xb4, 0x4d, 0xac, 0x31, 0x47, 0x28, 0xf8, /* Byte value: 0x76 */ + 0x11, 0x50, 0xb3, 0x6b, 0x1e, 0xef, 0x16, 0x89, 0x3a, 0xd9, 0xcd, 0x22, 0x28, 0x1f, 0xce, 0x09, /* Byte value: 0x77 */ + 0x6a, 0xa2, 0x12, 0x66, 0x4c, 0xf7, 0x36, 0x48, 0x5a, 0xea, 0x23, 0xd4, 0x51, 0xd2, 0x42, 0xe4, /* Byte value: 0x78 */ + 0x89, 0xaa, 0x8b, 0xe3, 0x4f, 0xd9, 0x5e, 0x69, 0xe2, 0xc3, 0x08, 0xd1, 0x55, 0xe4, 0x36, 0xba, /* Byte value: 0x79 */ + 0x94, 0x7b, 0x06, 0x57, 0x59, 0xe6, 0x0a, 0x18, 0x1e, 0x63, 0x0d, 0xeb, 0xdc, 0x6b, 0x5b, 0x61, /* Byte value: 0x7a */ + 0x65, 0x92, 0xc2, 0xfe, 0x46, 0x13, 0x85, 0x8e, 0x4c, 0x1c, 0xd9, 0xca, 0x49, 0x66, 0xb9, 0xe3, /* Byte value: 0x7b */ + 0x8c, 0xba, 0x7a, 0x2a, 0x49, 0x85, 0x8e, 0x2b, 0x51, 0x91, 0x5e, 0xdb, 0x5d, 0x88, 0xde, 0x06, /* Byte value: 0x7c */ + 0xbb, 0x35, 0xc7, 0x63, 0xd2, 0x86, 0x8a, 0x9a, 0x5d, 0xaf, 0x33, 0xb5, 0xfb, 0xd9, 0x2e, 0x53, /* Byte value: 0x7d */ + 0x77, 0x73, 0x9f, 0xd2, 0x5a, 0xc8, 0x62, 0x39, 0xa6, 0x4a, 0x26, 0xee, 0xd8, 0x5d, 0x2f, 0x3f, /* Byte value: 0x7e */ + 0x3c, 0xc0, 0xc6, 0x25, 0x28, 0x16, 0x89, 0x9e, 0x58, 0x5e, 0x6e, 0x78, 0x60, 0x95, 0x6a, 0x1c, /* Byte value: 0x7f */ + 0x7b, 0xf2, 0xa1, 0x0d, 0x52, 0x18, 0x20, 0xc1, 0x60, 0x33, 0xee, 0xf6, 0x79, 0xcd, 0x8c, 0xed, /* Byte value: 0x80 */ + 0x28, 0x80, 0x84, 0x87, 0x30, 0xa5, 0x4f, 0x55, 0xd1, 0xd5, 0xf5, 0x50, 0x40, 0xe6, 0x4c, 0xa9, /* Byte value: 0x81 */ + 0xab, 0x0a, 0x2e, 0x35, 0x73, 0xc4, 0x72, 0xb8, 0x96, 0xb2, 0x51, 0x95, 0x05, 0xda, 0x69, 0xa8, /* Byte value: 0x82 */ + 0xd2, 0x26, 0x3b, 0x42, 0x9c, 0x45, 0x4d, 0xec, 0xd7, 0xca, 0x22, 0x67, 0x13, 0x2f, 0x34, 0x62, /* Byte value: 0x83 */ + 0x31, 0x2e, 0xa2, 0xc7, 0x9f, 0x6b, 0x25, 0xcd, 0x6f, 0xe3, 0x09, 0x62, 0x17, 0x19, 0x40, 0x3c, /* Byte value: 0x84 */ + 0xde, 0xa7, 0x05, 0x9d, 0x94, 0x95, 0x0f, 0x14, 0x11, 0xb3, 0xea, 0x7f, 0xb2, 0xbf, 0x97, 0xb0, /* Byte value: 0x85 */ + 0xc4, 0xb8, 0xcd, 0x9a, 0x39, 0x6f, 0x94, 0xb2, 0x7f, 0x0a, 0x24, 0x4b, 0x5c, 0x64, 0xc3, 0xf0, /* Byte value: 0x86 */ + 0x5f, 0xf3, 0x1b, 0x55, 0x6a, 0x6d, 0x2d, 0x6c, 0x77, 0x9f, 0xd3, 0xbe, 0x98, 0xbb, 0x63, 0x96, /* Byte value: 0x87 */ + 0xcc, 0x46, 0x58, 0xb1, 0x88, 0x4e, 0xe8, 0xa3, 0xfb, 0xe5, 0x15, 0x5b, 0x23, 0x84, 0x01, 0x6c, /* Byte value: 0x88 */ + 0xcf, 0xf7, 0xb6, 0xf6, 0x8a, 0x7a, 0x19, 0x9d, 0x2b, 0x6a, 0x27, 0x5d, 0x9a, 0xa0, 0x59, 0xb9, /* Byte value: 0x89 */ + 0x76, 0x1c, 0xc5, 0xef, 0xe5, 0x65, 0x8c, 0x92, 0x57, 0x8e, 0x89, 0xec, 0x0e, 0x41, 0xa6, 0xcd, /* Byte value: 0x8a */ + 0x2c, 0xff, 0x2f, 0x73, 0x89, 0x54, 0x71, 0xbc, 0x93, 0x43, 0x0c, 0x58, 0x9e, 0x96, 0x2d, 0xe7, /* Byte value: 0x8b */ + 0xb8, 0x84, 0x29, 0x24, 0xd0, 0xb2, 0x7b, 0xa4, 0x8d, 0x20, 0x01, 0xb3, 0x42, 0xfd, 0x76, 0x86, /* Byte value: 0x8c */ + 0xd8, 0x06, 0x1a, 0x13, 0x90, 0xfd, 0x2e, 0x68, 0x72, 0x6e, 0x8e, 0x73, 0x03, 0xf7, 0x27, 0xd9, /* Byte value: 0x8d */ + 0x2e, 0x21, 0x9b, 0x09, 0x34, 0xcd, 0x6e, 0x29, 0xb2, 0x08, 0x91, 0x5c, 0xf1, 0xae, 0xfc, 0xc0, /* Byte value: 0x8e */ + 0x36, 0xe0, 0xe7, 0x74, 0x24, 0xae, 0xea, 0x1a, 0xfd, 0xfa, 0xc2, 0x6c, 0x70, 0x4d, 0x79, 0xa7, /* Byte value: 0x8f */ + 0xdb, 0xb7, 0xf4, 0x54, 0x92, 0xc9, 0xdf, 0x56, 0xa2, 0xe1, 0xbc, 0x75, 0xba, 0xd3, 0x7f, 0x0c, /* Byte value: 0x90 */ + 0x69, 0x13, 0xfc, 0x21, 0x4e, 0xc3, 0xc7, 0x76, 0x8a, 0x65, 0x11, 0xd2, 0xe8, 0xf6, 0x1a, 0x31, /* Byte value: 0x91 */ + 0xb3, 0xcb, 0x52, 0x48, 0x63, 0xa7, 0xf6, 0x8b, 0xd9, 0x40, 0x02, 0xa5, 0x84, 0x39, 0xec, 0xcf, /* Byte value: 0x92 */ + 0x14, 0x40, 0x42, 0xa2, 0x18, 0xb3, 0xc6, 0xcb, 0x89, 0x8b, 0x9b, 0x28, 0x20, 0x73, 0x26, 0xb5, /* Byte value: 0x93 */ + 0x95, 0x14, 0x5c, 0x6a, 0xe6, 0x4b, 0xe4, 0xb3, 0xef, 0xa7, 0xa2, 0xe9, 0x0a, 0x77, 0xd2, 0x93, /* Byte value: 0x94 */ + 0xbe, 0x25, 0x36, 0xaa, 0xd4, 0xda, 0x5a, 0xd8, 0xee, 0xfd, 0x65, 0xbf, 0xf3, 0xb5, 0xc6, 0xef, /* Byte value: 0x95 */ + 0x62, 0x5c, 0x87, 0x4d, 0xfd, 0xd6, 0x4a, 0x59, 0xde, 0x05, 0x12, 0xc4, 0x2e, 0x32, 0x80, 0x78, /* Byte value: 0x96 */ + 0xa1, 0x2a, 0x0f, 0x64, 0x7f, 0x7c, 0x11, 0x3c, 0x33, 0x16, 0xfd, 0x81, 0x15, 0x02, 0x7a, 0x13, /* Byte value: 0x97 */ + 0x3b, 0x0e, 0x83, 0x96, 0x93, 0xd3, 0x46, 0x49, 0xca, 0x47, 0xa5, 0x76, 0x07, 0xc1, 0x53, 0x87, /* Byte value: 0x98 */ + 0x16, 0x9e, 0xf6, 0xd8, 0xa5, 0x2a, 0xd9, 0x5e, 0xa8, 0xc0, 0x06, 0x2c, 0x4f, 0x4b, 0xf7, 0x92, /* Byte value: 0x99 */ + 0x66, 0x23, 0x2c, 0xb9, 0x44, 0x27, 0x74, 0xb0, 0x9c, 0x93, 0xeb, 0xcc, 0xf0, 0x42, 0xe1, 0x36, /* Byte value: 0x9a */ + 0xe9, 0x28, 0xb8, 0xd4, 0x0f, 0x96, 0x0b, 0xa5, 0x1d, 0x8d, 0x87, 0x11, 0x14, 0xee, 0x67, 0xe5, /* Byte value: 0x9b */ + 0x5c, 0x42, 0xf5, 0x12, 0x68, 0x59, 0xdc, 0x52, 0xa7, 0x10, 0xe1, 0xb8, 0x21, 0x9f, 0x3b, 0x43, /* Byte value: 0x9c */ + 0x6c, 0x03, 0x0d, 0xe8, 0x48, 0x9f, 0x17, 0x34, 0x39, 0x37, 0x47, 0xd8, 0xe0, 0x9a, 0xf2, 0x8d, /* Byte value: 0x9d */ + 0x6d, 0x6c, 0x57, 0xd5, 0xf7, 0x32, 0xf9, 0x9f, 0xc8, 0xf3, 0xe8, 0xda, 0x36, 0x86, 0x7b, 0x7f, /* Byte value: 0x9e */ + 0xad, 0xab, 0x31, 0xbb, 0x77, 0xac, 0x53, 0xc4, 0xf5, 0x6f, 0x35, 0x99, 0xb4, 0x92, 0xd9, 0xc1, /* Byte value: 0x9f */ + 0x37, 0x8f, 0xbd, 0x49, 0x9b, 0x03, 0x04, 0xb1, 0x0c, 0x3e, 0x6d, 0x6e, 0xa6, 0x51, 0xf0, 0x55, /* Byte value: 0xa0 */ + 0x61, 0xed, 0x69, 0x0a, 0xff, 0xe2, 0xbb, 0x67, 0x0e, 0x8a, 0x20, 0xc2, 0x97, 0x16, 0xd8, 0xad, /* Byte value: 0xa1 */ + 0x4b, 0xb3, 0x59, 0xf7, 0x72, 0xde, 0xeb, 0xa7, 0xfe, 0x14, 0x48, 0x96, 0xb8, 0xc8, 0x45, 0x23, /* Byte value: 0xa2 */ + 0xb9, 0xeb, 0x73, 0x19, 0x6f, 0x1f, 0x95, 0x0f, 0x7c, 0xe4, 0xae, 0xb1, 0x94, 0xe1, 0xff, 0x74, /* Byte value: 0xa3 */ + 0xe3, 0x08, 0x99, 0x85, 0x03, 0x2e, 0x68, 0x21, 0xb8, 0x29, 0x2b, 0x05, 0x04, 0x36, 0x74, 0x5e, /* Byte value: 0xa4 */ + 0xba, 0x5a, 0x9d, 0x5e, 0x6d, 0x2b, 0x64, 0x31, 0xac, 0x6b, 0x9c, 0xb7, 0x2d, 0xc5, 0xa7, 0xa1, /* Byte value: 0xa5 */ + 0xf1, 0xe9, 0xc4, 0xa9, 0x1f, 0xf5, 0x8f, 0x96, 0x52, 0x7f, 0xd4, 0x21, 0x95, 0x0d, 0xe2, 0x82, /* Byte value: 0xa6 */ + 0xa0, 0x45, 0x55, 0x59, 0xc0, 0xd1, 0xff, 0x97, 0xc2, 0xd2, 0x52, 0x83, 0xc3, 0x1e, 0xf3, 0xe1, /* Byte value: 0xa7 */ + 0x85, 0x2b, 0xb5, 0x3c, 0x47, 0x09, 0x1c, 0x91, 0x24, 0xba, 0xc0, 0xc9, 0xf4, 0x74, 0x95, 0x68, /* Byte value: 0xa8 */ + 0x83, 0x8a, 0xaa, 0xb2, 0x43, 0x61, 0x3d, 0xed, 0x47, 0x67, 0xa4, 0xc5, 0x45, 0x3c, 0x25, 0x01, /* Byte value: 0xa9 */ + 0xda, 0xd8, 0xae, 0x69, 0x2d, 0x64, 0x31, 0xfd, 0x53, 0x25, 0x13, 0x77, 0x6c, 0xcf, 0xf6, 0xfe, /* Byte value: 0xaa */ + 0x47, 0x32, 0x67, 0x28, 0x7a, 0x0e, 0xa9, 0x5f, 0x38, 0x6d, 0x80, 0x8e, 0x19, 0x58, 0xe6, 0xf1, /* Byte value: 0xab */ + 0xc5, 0xd7, 0x97, 0xa7, 0x86, 0xc2, 0x7a, 0x19, 0x8e, 0xce, 0x8b, 0x49, 0x8a, 0x78, 0x4a, 0x02, /* Byte value: 0xac */ + 0xb0, 0x7a, 0xbc, 0x0f, 0x61, 0x93, 0x07, 0xb5, 0x09, 0xcf, 0x30, 0xa3, 0x3d, 0x1d, 0xb4, 0x1a, /* Byte value: 0xad */ + 0x33, 0xf0, 0x16, 0xbd, 0x22, 0xf2, 0x3a, 0x58, 0x4e, 0xa8, 0x94, 0x66, 0x78, 0x21, 0x91, 0x1b, /* Byte value: 0xae */ + 0xfa, 0xa6, 0xbf, 0xc5, 0xac, 0xe0, 0x02, 0xb9, 0x06, 0x1f, 0xd7, 0x37, 0x53, 0xc9, 0x78, 0xcb, /* Byte value: 0xaf */ + 0x96, 0xa5, 0xb2, 0x2d, 0xe4, 0x7f, 0x15, 0x8d, 0x3f, 0x28, 0x90, 0xef, 0xb3, 0x53, 0x8a, 0x46, /* Byte value: 0xb0 */ + 0x6f, 0xb2, 0xe3, 0xaf, 0x4a, 0xab, 0xe6, 0x0a, 0xe9, 0xb8, 0x75, 0xde, 0x59, 0xbe, 0xaa, 0x58, /* Byte value: 0xb1 */ + 0x6e, 0xdd, 0xb9, 0x92, 0xf5, 0x06, 0x08, 0xa1, 0x18, 0x7c, 0xda, 0xdc, 0x8f, 0xa2, 0x23, 0xaa, /* Byte value: 0xb2 */ + 0xc2, 0x19, 0xd2, 0x14, 0x3d, 0x07, 0xb5, 0xce, 0x1c, 0xd7, 0x40, 0x47, 0xed, 0x2c, 0x73, 0x99, /* Byte value: 0xb3 */ + 0xf6, 0x27, 0x81, 0x1a, 0xa4, 0x30, 0x40, 0x41, 0xc0, 0x66, 0x1f, 0x2f, 0xf2, 0x59, 0xdb, 0x19, /* Byte value: 0xb4 */ + 0x50, 0xc3, 0xcb, 0xcd, 0x60, 0x89, 0x9e, 0xaa, 0x61, 0x69, 0x29, 0xa0, 0x80, 0x0f, 0x98, 0x91, /* Byte value: 0xb5 */ + 0xff, 0xb6, 0x4e, 0x0c, 0xaa, 0xbc, 0xd2, 0xfb, 0xb5, 0x4d, 0x81, 0x3d, 0x5b, 0xa5, 0x90, 0x77, /* Byte value: 0xb6 */ + 0x5d, 0x2d, 0xaf, 0x2f, 0xd7, 0xf4, 0x32, 0xf9, 0x56, 0xd4, 0x4e, 0xba, 0xf7, 0x83, 0xb2, 0xb1, /* Byte value: 0xb7 */ + 0xa9, 0xd4, 0x9a, 0x4f, 0xce, 0x5d, 0x6d, 0x2d, 0xb7, 0xf9, 0xcc, 0x91, 0x6a, 0xe2, 0xb8, 0x8f, /* Byte value: 0xb8 */ + 0x8e, 0x64, 0xce, 0x50, 0xf4, 0x1c, 0x91, 0xbe, 0x70, 0xda, 0xc3, 0xdf, 0x32, 0xb0, 0x0f, 0x21, /* Byte value: 0xb9 */ + 0x17, 0xf1, 0xac, 0xe5, 0x1a, 0x87, 0x37, 0xf5, 0x59, 0x04, 0xa9, 0x2e, 0x99, 0x57, 0x7e, 0x60, /* Byte value: 0xba */ + 0x1b, 0x70, 0x92, 0x3a, 0x12, 0x57, 0x75, 0x0d, 0x9f, 0x7d, 0x61, 0x36, 0x38, 0xc7, 0xdd, 0xb2, /* Byte value: 0xbb */ + 0x97, 0xca, 0xe8, 0x10, 0x5b, 0xd2, 0xfb, 0x26, 0xce, 0xec, 0x3f, 0xed, 0x65, 0x4f, 0x03, 0xb4, /* Byte value: 0xbc */ + 0x7d, 0x53, 0xbe, 0x83, 0x56, 0x70, 0x01, 0xbd, 0x03, 0xee, 0x8a, 0xfa, 0xc8, 0x85, 0x3c, 0x84, /* Byte value: 0xbd */ + 0xec, 0x38, 0x49, 0x1d, 0x09, 0xca, 0xdb, 0xe7, 0xae, 0xdf, 0xd1, 0x1b, 0x1c, 0x82, 0x8f, 0x59, /* Byte value: 0xbe */ + 0x58, 0x3d, 0x5e, 0xe6, 0xd1, 0xa8, 0xe2, 0xbb, 0xe5, 0x86, 0x18, 0xb0, 0xff, 0xef, 0x5a, 0x0d, /* Byte value: 0xbf */ + 0xf7, 0x48, 0xdb, 0x27, 0x1b, 0x9d, 0xae, 0xea, 0x31, 0xa2, 0xb0, 0x2d, 0x24, 0x45, 0x52, 0xeb, /* Byte value: 0xc0 */ + 0x1f, 0x0f, 0x39, 0xce, 0xab, 0xa6, 0x4b, 0xe4, 0xdd, 0xeb, 0x98, 0x3e, 0xe6, 0xb7, 0xbc, 0xfc, /* Byte value: 0xc1 */ + 0xfb, 0xc9, 0xe5, 0xf8, 0x13, 0x4d, 0xec, 0x12, 0xf7, 0xdb, 0x78, 0x35, 0x85, 0xd5, 0xf1, 0x39, /* Byte value: 0xc2 */ + 0x7c, 0x3c, 0xe4, 0xbe, 0xe9, 0xdd, 0xef, 0x16, 0xf2, 0x2a, 0x25, 0xf8, 0x1e, 0x99, 0xb5, 0x76, /* Byte value: 0xc3 */ + 0x09, 0x91, 0xcf, 0x16, 0x0e, 0x8c, 0x92, 0xba, 0x75, 0x2b, 0x9e, 0x12, 0xa9, 0xfc, 0x4b, 0x6e, /* Byte value: 0xc4 */ + 0x0d, 0xee, 0x64, 0xe2, 0xb7, 0x7d, 0xac, 0x53, 0x37, 0xbd, 0x67, 0x1a, 0x77, 0x8c, 0x2a, 0x20, /* Byte value: 0xc5 */ + 0x7a, 0x9d, 0xfb, 0x30, 0xed, 0xb5, 0xce, 0x6a, 0x91, 0xf7, 0x41, 0xf4, 0xaf, 0xd1, 0x05, 0x1f, /* Byte value: 0xc6 */ + 0x67, 0x4c, 0x76, 0x84, 0xfb, 0x8a, 0x9a, 0x1b, 0x6d, 0x57, 0x44, 0xce, 0x26, 0x5e, 0x68, 0xc4, /* Byte value: 0xc7 */ + 0x45, 0xec, 0xd3, 0x52, 0xc7, 0x97, 0xb6, 0xca, 0x19, 0x26, 0x1d, 0x8a, 0x76, 0x60, 0x37, 0xd6, /* Byte value: 0xc8 */ + 0x87, 0xf5, 0x01, 0x46, 0xfa, 0x90, 0x03, 0x04, 0x05, 0xf1, 0x5d, 0xcd, 0x9b, 0x4c, 0x44, 0x4f, /* Byte value: 0xc9 */ + 0xdc, 0x79, 0xb1, 0xe7, 0x29, 0x0c, 0x10, 0x81, 0x30, 0xf8, 0x77, 0x7b, 0xdd, 0x87, 0x46, 0x97, /* Byte value: 0xca */ + 0xe8, 0x47, 0xe2, 0xe9, 0xb0, 0x3b, 0xe5, 0x0e, 0xec, 0x49, 0x28, 0x13, 0xc2, 0xf2, 0xee, 0x17, /* Byte value: 0xcb */ + 0x4f, 0xcc, 0xf2, 0x03, 0xcb, 0x2f, 0xd5, 0x4e, 0xbc, 0x82, 0xb1, 0x9e, 0x66, 0xb8, 0x24, 0x6d, /* Byte value: 0xcc */ + 0x1d, 0xd1, 0x8d, 0xb4, 0x16, 0x3f, 0x54, 0x71, 0xfc, 0xa0, 0x05, 0x3a, 0x89, 0x8f, 0x6d, 0xdb, /* Byte value: 0xcd */ + 0x4e, 0xa3, 0xa8, 0x3e, 0x74, 0x82, 0x3b, 0xe5, 0x4d, 0x46, 0x1e, 0x9c, 0xb0, 0xa4, 0xad, 0x9f, /* Byte value: 0xce */ + 0x04, 0x7f, 0xab, 0xf4, 0xb9, 0xf1, 0x3e, 0xe9, 0x42, 0x96, 0xf9, 0x08, 0xde, 0x70, 0x61, 0x4e, /* Byte value: 0xcf */ + 0xeb, 0xf6, 0x0c, 0xae, 0xb2, 0x0f, 0x14, 0x30, 0x3c, 0xc6, 0x1a, 0x15, 0x7b, 0xd6, 0xb6, 0xc2, /* Byte value: 0xd0 */ + 0xf8, 0x78, 0x0b, 0xbf, 0x11, 0x79, 0x1d, 0x2c, 0x27, 0x54, 0x4a, 0x33, 0x3c, 0xf1, 0xa9, 0xec, /* Byte value: 0xd1 */ + 0xf3, 0x37, 0x70, 0xd3, 0xa2, 0x6c, 0x90, 0x03, 0x73, 0x34, 0x49, 0x25, 0xfa, 0x35, 0x33, 0xa5, /* Byte value: 0xd2 */ + 0x3e, 0x1e, 0x72, 0x5f, 0x95, 0x8f, 0x96, 0x0b, 0x79, 0x15, 0xf3, 0x7c, 0x0f, 0xad, 0xbb, 0x3b, /* Byte value: 0xd3 */ + 0x3d, 0xaf, 0x9c, 0x18, 0x97, 0xbb, 0x67, 0x35, 0xa9, 0x9a, 0xc1, 0x7a, 0xb6, 0x89, 0xe3, 0xee, /* Byte value: 0xd4 */ + 0xbd, 0x94, 0xd8, 0xed, 0xd6, 0xee, 0xab, 0xe6, 0x3e, 0x72, 0x57, 0xb9, 0x4a, 0x91, 0x9e, 0x3a, /* Byte value: 0xd5 */ + 0x8a, 0x1b, 0x65, 0xa4, 0x4d, 0xed, 0xaf, 0x57, 0x32, 0x4c, 0x3a, 0xd7, 0xec, 0xc0, 0x6e, 0x6f, /* Byte value: 0xd6 */ + 0x88, 0xc5, 0xd1, 0xde, 0xf0, 0x74, 0xb0, 0xc2, 0x13, 0x07, 0xa7, 0xd3, 0x83, 0xf8, 0xbf, 0x48, /* Byte value: 0xd7 */ + 0xdd, 0x16, 0xeb, 0xda, 0x96, 0xa1, 0xfe, 0x2a, 0xc1, 0x3c, 0xd8, 0x79, 0x0b, 0x9b, 0xcf, 0x65, /* Byte value: 0xd8 */ + 0xcd, 0x29, 0x02, 0x8c, 0x37, 0xe3, 0x06, 0x08, 0x0a, 0x21, 0xba, 0x59, 0xf5, 0x98, 0x88, 0x9e, /* Byte value: 0xd9 */ + 0x0b, 0x4f, 0x7b, 0x6c, 0xb3, 0x15, 0x8d, 0x2f, 0x54, 0x60, 0x03, 0x16, 0xc6, 0xc4, 0x9a, 0x49, /* Byte value: 0xda */ + 0x13, 0x8e, 0x07, 0x11, 0xa3, 0x76, 0x09, 0x1c, 0x1b, 0x92, 0x50, 0x26, 0x47, 0x27, 0x1f, 0x2e, /* Byte value: 0xdb */ + 0x98, 0xfa, 0x38, 0x88, 0x51, 0x36, 0x48, 0xe0, 0xd8, 0x1a, 0xc5, 0xf3, 0x7d, 0xfb, 0xf8, 0xb3, /* Byte value: 0xdc */ + 0x02, 0xde, 0xb4, 0x7a, 0xbd, 0x99, 0x1f, 0x95, 0x21, 0x4b, 0x9d, 0x04, 0x6f, 0x38, 0xd1, 0x27, /* Byte value: 0xdd */ + 0x93, 0xb5, 0x43, 0xe4, 0xe2, 0x23, 0xc5, 0xcf, 0x8c, 0x7a, 0xc6, 0xe5, 0xbb, 0x3f, 0x62, 0xfa, /* Byte value: 0xde */ + 0x80, 0x3b, 0x44, 0xf5, 0x41, 0x55, 0xcc, 0xd3, 0x97, 0xe8, 0x96, 0xc3, 0xfc, 0x18, 0x7d, 0xd4, /* Byte value: 0xdf */ + 0x90, 0x04, 0xad, 0xa3, 0xe0, 0x17, 0x34, 0xf1, 0x5c, 0xf5, 0xf4, 0xe3, 0x02, 0x1b, 0x3a, 0x2f, /* Byte value: 0xe0 */ + 0xd0, 0xf8, 0x8f, 0x38, 0x21, 0xdc, 0x52, 0x79, 0xf6, 0x81, 0xbf, 0x63, 0x7c, 0x17, 0xe5, 0x45, /* Byte value: 0xe1 */ + 0x24, 0x01, 0xba, 0x58, 0x38, 0x75, 0x0d, 0xad, 0x17, 0xac, 0x3d, 0x48, 0xe1, 0x76, 0xef, 0x7b, /* Byte value: 0xe2 */ + 0x34, 0x3e, 0x53, 0x0e, 0x99, 0x37, 0xf5, 0x8f, 0xdc, 0xb1, 0x5f, 0x68, 0x1f, 0x75, 0xa8, 0x80, /* Byte value: 0xe3 */ + 0xcb, 0x88, 0x1d, 0x02, 0x33, 0x8b, 0x27, 0x74, 0x69, 0xfc, 0xde, 0x55, 0x44, 0xd0, 0x38, 0xf7, /* Byte value: 0xe4 */ + 0xed, 0x57, 0x13, 0x20, 0xb6, 0x67, 0x35, 0x4c, 0x5f, 0x1b, 0x7e, 0x19, 0xca, 0x9e, 0x06, 0xab, /* Byte value: 0xe5 */ + 0xf4, 0xf9, 0x35, 0x60, 0x19, 0xa9, 0x5f, 0xd4, 0xe1, 0x2d, 0x82, 0x2b, 0x9d, 0x61, 0x0a, 0x3e, /* Byte value: 0xe6 */ + 0xce, 0x98, 0xec, 0xcb, 0x35, 0xd7, 0xf7, 0x36, 0xda, 0xae, 0x88, 0x5f, 0x4c, 0xbc, 0xd0, 0x4b, /* Byte value: 0xe7 */ + 0x99, 0x95, 0x62, 0xb5, 0xee, 0x9b, 0xa6, 0x4b, 0x29, 0xde, 0x6a, 0xf1, 0xab, 0xe7, 0x71, 0x41, /* Byte value: 0xe8 */ + 0x10, 0x3f, 0xe9, 0x56, 0xa1, 0x42, 0xf8, 0x22, 0xcb, 0x1d, 0x62, 0x20, 0xfe, 0x03, 0x47, 0xfb, /* Byte value: 0xe9 */ + 0x44, 0x83, 0x89, 0x6f, 0x78, 0x3a, 0x58, 0x61, 0xe8, 0xe2, 0xb2, 0x88, 0xa0, 0x7c, 0xbe, 0x24, /* Byte value: 0xea */ + 0x40, 0xfc, 0x22, 0x9b, 0xc1, 0xcb, 0x66, 0x88, 0xaa, 0x74, 0x4b, 0x80, 0x7e, 0x0c, 0xdf, 0x6a, /* Byte value: 0xeb */ + 0x92, 0xda, 0x19, 0xd9, 0x5d, 0x8e, 0x2b, 0x64, 0x7d, 0xbe, 0x69, 0xe7, 0x6d, 0x23, 0xeb, 0x08, /* Byte value: 0xec */ + 0x3a, 0x61, 0xd9, 0xab, 0x2c, 0x7e, 0xa8, 0xe2, 0x3b, 0x83, 0x0a, 0x74, 0xd1, 0xdd, 0xda, 0x75, /* Byte value: 0xed */ + 0x01, 0x6f, 0x5a, 0x3d, 0xbf, 0xad, 0xee, 0xab, 0xf1, 0xc4, 0xaf, 0x02, 0xd6, 0x1c, 0x89, 0xf2, /* Byte value: 0xee */ + 0x26, 0xdf, 0x0e, 0x22, 0x85, 0xec, 0x12, 0x38, 0x36, 0xe7, 0xa0, 0x4c, 0x8e, 0x4e, 0x3e, 0x5c, /* Byte value: 0xef */ + 0x12, 0xe1, 0x5d, 0x2c, 0x1c, 0xdb, 0xe7, 0xb7, 0xea, 0x56, 0xff, 0x24, 0x91, 0x3b, 0x96, 0xdc, /* Byte value: 0xf0 */ + 0x1a, 0x1f, 0xc8, 0x07, 0xad, 0xfa, 0x9b, 0xa6, 0x6e, 0xb9, 0xce, 0x34, 0xee, 0xdb, 0x54, 0x40, /* Byte value: 0xf1 */ + 0x48, 0x02, 0xb7, 0xb0, 0x70, 0xea, 0x1a, 0x99, 0x2e, 0x9b, 0x7a, 0x90, 0x01, 0xec, 0x1d, 0xf6, /* Byte value: 0xf2 */ + 0x68, 0x7c, 0xa6, 0x1c, 0xf1, 0x6e, 0x29, 0xdd, 0x7b, 0xa1, 0xbe, 0xd0, 0x3e, 0xea, 0x93, 0xc3, /* Byte value: 0xf3 */ + 0xf5, 0x96, 0x6f, 0x5d, 0xa6, 0x04, 0xb1, 0x7f, 0x10, 0xe9, 0x2d, 0x29, 0x4b, 0x7d, 0x83, 0xcc, /* Byte value: 0xf4 */ + 0x81, 0x54, 0x1e, 0xc8, 0xfe, 0xf8, 0x22, 0x78, 0x66, 0x2c, 0x39, 0xc1, 0x2a, 0x04, 0xf4, 0x26, /* Byte value: 0xf5 */ + 0x8b, 0x74, 0x3f, 0x99, 0xf2, 0x40, 0x41, 0xfc, 0xc3, 0x88, 0x95, 0xd5, 0x3a, 0xdc, 0xe7, 0x9d, /* Byte value: 0xf6 */ + 0xc7, 0x09, 0x23, 0xdd, 0x3b, 0x5b, 0x65, 0x8c, 0xaf, 0x85, 0x16, 0x4d, 0xe5, 0x40, 0x9b, 0x25, /* Byte value: 0xf7 */ + 0xd6, 0x59, 0x90, 0xb6, 0x25, 0xb4, 0x73, 0x05, 0x95, 0x5c, 0xdb, 0x6f, 0xcd, 0x5f, 0x55, 0x2c, /* Byte value: 0xf8 */ + 0x20, 0x7e, 0x11, 0xac, 0x81, 0x84, 0x33, 0x44, 0x55, 0x3a, 0xc4, 0x40, 0x3f, 0x06, 0x8e, 0x35, /* Byte value: 0xf9 */ + 0x0a, 0x20, 0x21, 0x51, 0x0c, 0xb8, 0x63, 0x84, 0xa5, 0xa4, 0xac, 0x14, 0x10, 0xd8, 0x13, 0xbb, /* Byte value: 0xfa */ + 0x08, 0xfe, 0x95, 0x2b, 0xb1, 0x21, 0x7c, 0x11, 0x84, 0xef, 0x31, 0x10, 0x7f, 0xe0, 0xc2, 0x9c, /* Byte value: 0xfb */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xfc */ + 0x4c, 0x7d, 0x1c, 0x44, 0xc9, 0x1b, 0x24, 0x70, 0x6c, 0x0d, 0x83, 0x98, 0xdf, 0x9c, 0x7c, 0xb8, /* Byte value: 0xfd */ + 0xd7, 0x36, 0xca, 0x8b, 0x9a, 0x19, 0x9d, 0xae, 0x64, 0x98, 0x74, 0x6d, 0x1b, 0x43, 0xdc, 0xde, /* Byte value: 0xfe */ + 0x74, 0xc2, 0x71, 0x95, 0x58, 0xfc, 0x93, 0x07, 0x76, 0xc5, 0x14, 0xe8, 0x61, 0x79, 0x77, 0xea, /* Byte value: 0xff */ + + /* Matrix row: 10 */ + 0x51, 0xf6, 0x5f, 0xcc, 0xc9, 0xbb, 0x4b, 0xca, 0x7e, 0x52, 0xea, 0x5b, 0x67, 0xa2, 0xef, 0xe6, /* Byte value: 0x00 */ + 0x33, 0xae, 0xa5, 0x03, 0xf8, 0x72, 0xcf, 0x10, 0xac, 0xdb, 0x41, 0x06, 0xb8, 0x66, 0xba, 0x67, /* Byte value: 0x01 */ + 0xa4, 0x6a, 0x15, 0x17, 0x52, 0x6f, 0xc2, 0xd0, 0x70, 0x26, 0xcb, 0x2e, 0x94, 0x8b, 0x8e, 0x86, /* Byte value: 0x02 */ + 0x67, 0xde, 0xe5, 0xa5, 0xd2, 0x7e, 0xfa, 0xff, 0x18, 0x4a, 0x19, 0x89, 0xa8, 0xce, 0x6e, 0xad, /* Byte value: 0x03 */ + 0x0a, 0xcf, 0x3e, 0xd4, 0x05, 0xad, 0xfc, 0x4a, 0x57, 0x45, 0xa7, 0x6b, 0xee, 0x14, 0x76, 0x58, /* Byte value: 0x04 */ + 0xe7, 0x84, 0x9d, 0xfc, 0x92, 0x97, 0xe6, 0x09, 0x2c, 0x7c, 0x70, 0x3b, 0xb6, 0x0d, 0x1e, 0x59, /* Byte value: 0x05 */ + 0x28, 0xba, 0xf8, 0xd6, 0x14, 0xf1, 0x76, 0xeb, 0x9f, 0xd7, 0xd9, 0x6f, 0x3e, 0x50, 0x1b, 0xa3, /* Byte value: 0x06 */ + 0x1a, 0x55, 0x31, 0x76, 0x0d, 0x19, 0x1e, 0x24, 0xb0, 0x33, 0x03, 0xec, 0x9d, 0x34, 0x78, 0xa7, /* Byte value: 0x07 */ + 0x6c, 0x50, 0xb7, 0xd2, 0x36, 0x49, 0xa1, 0x6a, 0xcc, 0x30, 0x25, 0x67, 0x5d, 0xd8, 0xc1, 0x96, /* Byte value: 0x08 */ + 0x65, 0x5c, 0x3d, 0x20, 0xd3, 0x89, 0x77, 0x82, 0xdd, 0x34, 0xec, 0x40, 0x9e, 0xca, 0x1f, 0x6b, /* Byte value: 0x09 */ + 0xfa, 0xd5, 0x6b, 0x65, 0x7d, 0xce, 0x0b, 0x75, 0x93, 0xf2, 0x34, 0xca, 0x6a, 0x37, 0x2c, 0x14, /* Byte value: 0x0a */ + 0xe8, 0xcd, 0xbc, 0x42, 0x74, 0x8d, 0x64, 0x66, 0xb1, 0xfa, 0x65, 0x84, 0x2f, 0x13, 0x53, 0x2d, /* Byte value: 0x0b */ + 0xac, 0x27, 0xf3, 0x46, 0x56, 0x35, 0xb3, 0xe7, 0xe2, 0x1d, 0x99, 0x8c, 0x4c, 0x9b, 0x89, 0x18, /* Byte value: 0x0c */ + 0x5a, 0x78, 0x0d, 0xbb, 0x2d, 0x8c, 0x10, 0x5f, 0xaa, 0x28, 0xd6, 0xb5, 0x92, 0xb4, 0x40, 0xdd, /* Byte value: 0x0d */ + 0xa9, 0xa1, 0xec, 0x2c, 0xb5, 0x82, 0xcd, 0xc2, 0x28, 0xde, 0x2b, 0x58, 0x3b, 0x91, 0xb2, 0x34, /* Byte value: 0x0e */ + 0x30, 0x6d, 0x11, 0x25, 0x18, 0x1f, 0xe5, 0xb2, 0xea, 0x9a, 0x2f, 0x4a, 0x95, 0x60, 0x12, 0xc2, /* Byte value: 0x0f */ + 0x8b, 0xd4, 0x2a, 0x2e, 0xa4, 0xde, 0x47, 0x63, 0xe0, 0x4c, 0x55, 0x5c, 0xeb, 0xd5, 0xdf, 0xcf, /* Byte value: 0x10 */ + 0x83, 0x99, 0xcc, 0x7f, 0xa0, 0x84, 0x36, 0x54, 0x72, 0x77, 0x07, 0xfe, 0x33, 0xc5, 0xd8, 0x51, /* Byte value: 0x11 */ + 0xef, 0xc9, 0x7b, 0xad, 0x96, 0xcd, 0x97, 0x3e, 0xbe, 0x47, 0x22, 0x99, 0x6e, 0x1d, 0x19, 0xc7, /* Byte value: 0x12 */ + 0x66, 0x9f, 0x89, 0x06, 0x33, 0xe4, 0x5d, 0x20, 0x9b, 0x75, 0x82, 0x0c, 0xb3, 0xcc, 0xb7, 0xce, /* Byte value: 0x13 */ + 0x14, 0x5d, 0x7c, 0x6b, 0x0a, 0x99, 0x3b, 0x94, 0xae, 0x8a, 0x8d, 0xd6, 0x1f, 0x28, 0xec, 0xb0, /* Byte value: 0x14 */ + 0x0d, 0xcb, 0xf9, 0x3b, 0xe7, 0xed, 0x0f, 0x12, 0x58, 0xf8, 0xe0, 0x76, 0xaf, 0x1a, 0x3c, 0xb2, /* Byte value: 0x15 */ + 0x05, 0x86, 0x1f, 0x6a, 0xe3, 0xb7, 0x7e, 0x25, 0xca, 0xc3, 0xb2, 0xd4, 0x77, 0x0a, 0x3b, 0x2c, /* Byte value: 0x16 */ + 0x92, 0x42, 0xaf, 0x7e, 0x49, 0xaa, 0x73, 0xe5, 0x16, 0x3e, 0x38, 0xfc, 0x5b, 0xe7, 0x0f, 0xcd, /* Byte value: 0x17 */ + 0x7a, 0x8f, 0x13, 0x3c, 0x3d, 0x27, 0x17, 0x83, 0xa7, 0xc4, 0x5d, 0x78, 0x74, 0xf4, 0x5c, 0xe0, /* Byte value: 0x18 */ + 0x13, 0x59, 0xbb, 0x84, 0xe8, 0xd9, 0xc8, 0xcc, 0xa1, 0x37, 0xca, 0xcb, 0x5e, 0x26, 0xa6, 0x5a, /* Byte value: 0x19 */ + 0xd8, 0xa0, 0xad, 0x67, 0x6c, 0x92, 0x81, 0xd4, 0x5b, 0x60, 0x4a, 0xce, 0xba, 0x73, 0x41, 0xef, /* Byte value: 0x1a */ + 0x87, 0x5e, 0xbf, 0xb6, 0xa2, 0xa9, 0xef, 0xae, 0x3b, 0x8b, 0x2e, 0xaf, 0x5f, 0xcd, 0x3a, 0x1e, /* Byte value: 0x1b */ + 0xb5, 0xb1, 0x76, 0x16, 0xbb, 0x41, 0x87, 0x61, 0x14, 0x6f, 0xf4, 0x2c, 0xfc, 0xa9, 0x59, 0x1a, /* Byte value: 0x1c */ + 0xdf, 0xa4, 0x6a, 0x88, 0x8e, 0xd2, 0x72, 0x8c, 0x54, 0xdd, 0x0d, 0xd3, 0xfb, 0x7d, 0x0b, 0x05, /* Byte value: 0x1d */ + 0x91, 0x81, 0x1b, 0x58, 0xa9, 0xc7, 0x59, 0x47, 0x50, 0x7f, 0x56, 0xb0, 0x76, 0xe1, 0xa7, 0x68, /* Byte value: 0x1e */ + 0x2d, 0x3c, 0xe7, 0xbc, 0xf7, 0x46, 0x08, 0xce, 0x55, 0x14, 0x6b, 0xbb, 0x49, 0x5a, 0x20, 0x8f, /* Byte value: 0x1f */ + 0xa0, 0xad, 0x66, 0xde, 0x50, 0x42, 0x1b, 0x2a, 0x39, 0xda, 0xe2, 0x7f, 0xf8, 0x83, 0x6c, 0xc9, /* Byte value: 0x20 */ + 0xbf, 0x7e, 0x48, 0xc2, 0xbe, 0xec, 0x7b, 0x2b, 0x43, 0x2a, 0x53, 0x47, 0x12, 0xbd, 0x2f, 0x42, /* Byte value: 0x21 */ + 0x24, 0x30, 0x6d, 0x4e, 0x12, 0x86, 0xde, 0x26, 0x44, 0x10, 0xa2, 0x9c, 0x8a, 0x48, 0xfe, 0x72, /* Byte value: 0x22 */ + 0x49, 0x21, 0xb6, 0x3f, 0xc5, 0x55, 0xd8, 0x93, 0x0b, 0x1f, 0x1c, 0x7e, 0xcc, 0x92, 0xe6, 0x87, /* Byte value: 0x23 */ + 0xee, 0x88, 0x17, 0x0e, 0x77, 0x57, 0x30, 0xe1, 0x3d, 0x78, 0xb9, 0x1c, 0x75, 0x1f, 0xc0, 0xa4, /* Byte value: 0x24 */ + 0x2c, 0x7d, 0x8b, 0x1f, 0x16, 0xdc, 0xaf, 0x11, 0xd6, 0x2b, 0xf0, 0x3e, 0x52, 0x58, 0xf9, 0xec, /* Byte value: 0x25 */ + 0x1d, 0x51, 0xf6, 0x99, 0xef, 0x59, 0xed, 0x7c, 0xbf, 0x8e, 0x44, 0xf1, 0xdc, 0x3a, 0x32, 0x4d, /* Byte value: 0x26 */ + 0xcc, 0xfd, 0xd1, 0x0c, 0x66, 0x0b, 0xba, 0x40, 0xf5, 0xea, 0xc7, 0x18, 0xa5, 0x5b, 0xad, 0x5f, /* Byte value: 0x27 */ + 0x7c, 0xca, 0xb8, 0x70, 0x3e, 0xfd, 0x43, 0x04, 0x2b, 0x46, 0x81, 0xe0, 0x2e, 0xf8, 0xcf, 0x69, /* Byte value: 0x28 */ + 0x8e, 0x52, 0x35, 0x44, 0x47, 0x69, 0x39, 0x46, 0x2a, 0x8f, 0xe7, 0x88, 0x9c, 0xdf, 0xe4, 0xe3, /* Byte value: 0x29 */ + 0xb8, 0x7a, 0x8f, 0x2d, 0x5c, 0xac, 0x88, 0x73, 0x4c, 0x97, 0x14, 0x5a, 0x53, 0xb3, 0x65, 0xa8, /* Byte value: 0x2a */ + 0xe6, 0xc5, 0xf1, 0x5f, 0x73, 0x0d, 0x41, 0xd6, 0xaf, 0x43, 0xeb, 0xbe, 0xad, 0x0f, 0xc7, 0x3a, /* Byte value: 0x2b */ + 0xf4, 0xdd, 0x26, 0x78, 0x7a, 0x4e, 0x2e, 0xc5, 0x8d, 0x4b, 0xba, 0xf0, 0xe8, 0x2b, 0xb8, 0x03, /* Byte value: 0x2c */ + 0x26, 0xb2, 0xb5, 0xcb, 0x13, 0x71, 0x53, 0x5b, 0x81, 0x6e, 0x57, 0x55, 0xbc, 0x4c, 0x8f, 0xb4, /* Byte value: 0x2d */ + 0x1b, 0x14, 0x5d, 0xd5, 0xec, 0x83, 0xb9, 0xfb, 0x33, 0x0c, 0x98, 0x69, 0x86, 0x36, 0xa1, 0xc4, /* Byte value: 0x2e */ + 0x4f, 0x64, 0x1d, 0x73, 0xc6, 0x8f, 0x8c, 0x14, 0x87, 0x9d, 0xc0, 0xe6, 0x96, 0x9e, 0x75, 0x0e, /* Byte value: 0x2f */ + 0xd5, 0x6b, 0x54, 0x5c, 0x8b, 0x7f, 0x8e, 0xc6, 0x03, 0x98, 0xaa, 0xb8, 0x15, 0x69, 0x7d, 0x5d, /* Byte value: 0x30 */ + 0xc5, 0xf1, 0x5b, 0xfe, 0x83, 0xcb, 0x6c, 0xa8, 0xe4, 0xee, 0x0e, 0x3f, 0x66, 0x49, 0x73, 0xa2, /* Byte value: 0x31 */ + 0x50, 0xb7, 0x33, 0x6f, 0x28, 0x21, 0xec, 0x15, 0xfd, 0x6d, 0x71, 0xde, 0x7c, 0xa0, 0x36, 0x85, /* Byte value: 0x32 */ + 0x79, 0x4c, 0xa7, 0x1a, 0xdd, 0x4a, 0x3d, 0x21, 0xe1, 0x85, 0x33, 0x34, 0x59, 0xf2, 0xf4, 0x45, /* Byte value: 0x33 */ + 0x77, 0x44, 0xea, 0x07, 0xda, 0xca, 0x18, 0x91, 0xff, 0x3c, 0xbd, 0x0e, 0xdb, 0xee, 0x60, 0x52, /* Byte value: 0x34 */ + 0x16, 0xdf, 0xa4, 0xee, 0x0b, 0x6e, 0xb6, 0xe9, 0x6b, 0xf4, 0x78, 0x1f, 0x29, 0x2c, 0x9d, 0x76, /* Byte value: 0x35 */ + 0x52, 0x35, 0xeb, 0xea, 0x29, 0xd6, 0x61, 0x68, 0x38, 0x13, 0x84, 0x17, 0x4a, 0xa4, 0x47, 0x43, /* Byte value: 0x36 */ + 0x15, 0x1c, 0x10, 0xc8, 0xeb, 0x03, 0x9c, 0x4b, 0x2d, 0xb5, 0x16, 0x53, 0x04, 0x2a, 0x35, 0xd3, /* Byte value: 0x37 */ + 0x9b, 0x4e, 0x25, 0x8c, 0xac, 0x6a, 0xa5, 0x0d, 0x07, 0x3a, 0xf1, 0xdb, 0x98, 0xf5, 0xd1, 0x30, /* Byte value: 0x38 */ + 0xb4, 0xf0, 0x1a, 0xb5, 0x5a, 0xdb, 0x20, 0xbe, 0x97, 0x50, 0x6f, 0xa9, 0xe7, 0xab, 0x80, 0x79, /* Byte value: 0x39 */ + 0x3e, 0x65, 0x5c, 0x38, 0x1f, 0x9f, 0xc0, 0x02, 0xf4, 0x23, 0xa1, 0x70, 0x17, 0x7c, 0x86, 0xd5, /* Byte value: 0x3a */ + 0x80, 0x5a, 0x78, 0x59, 0x40, 0xe9, 0x1c, 0xf6, 0x34, 0x36, 0x69, 0xb2, 0x1e, 0xc3, 0x70, 0xf4, /* Byte value: 0x3b */ + 0x36, 0x28, 0xba, 0x69, 0x1b, 0xc5, 0xb1, 0x35, 0x66, 0x18, 0xf3, 0xd2, 0xcf, 0x6c, 0x81, 0x4b, /* Byte value: 0x3c */ + 0xd3, 0x2e, 0xff, 0x10, 0x88, 0xa5, 0xda, 0x41, 0x8f, 0x1a, 0x76, 0x20, 0x4f, 0x65, 0xee, 0xd4, /* Byte value: 0x3d */ + 0x2b, 0x79, 0x4c, 0xf0, 0xf4, 0x9c, 0x5c, 0x49, 0xd9, 0x96, 0xb7, 0x23, 0x13, 0x56, 0xb3, 0x06, /* Byte value: 0x3e */ + 0x01, 0x41, 0x6c, 0xa3, 0xe1, 0x9a, 0xa7, 0xdf, 0x83, 0x3f, 0x9b, 0x85, 0x1b, 0x02, 0xd9, 0x63, /* Byte value: 0x3f */ + 0x99, 0xcc, 0xfd, 0x09, 0xad, 0x9d, 0x28, 0x70, 0xc2, 0x44, 0x04, 0x12, 0xae, 0xf1, 0xa0, 0xf6, /* Byte value: 0x40 */ + 0xdd, 0x26, 0xb2, 0x0d, 0x8f, 0x25, 0xff, 0xf1, 0x91, 0xa3, 0xf8, 0x1a, 0xcd, 0x79, 0x7a, 0xc3, /* Byte value: 0x41 */ + 0xf6, 0x5f, 0xfe, 0xfd, 0x7b, 0xb9, 0xa3, 0xb8, 0x48, 0x35, 0x4f, 0x39, 0xde, 0x2f, 0xc9, 0xc5, /* Byte value: 0x42 */ + 0x68, 0x97, 0xc4, 0x1b, 0x34, 0x64, 0x78, 0x90, 0x85, 0xcc, 0x0c, 0x36, 0x31, 0xd0, 0x23, 0xd9, /* Byte value: 0x43 */ + 0x12, 0x18, 0xd7, 0x27, 0x09, 0x43, 0x6f, 0x13, 0x22, 0x08, 0x51, 0x4e, 0x45, 0x24, 0x7f, 0x39, /* Byte value: 0x44 */ + 0x8a, 0x95, 0x46, 0x8d, 0x45, 0x44, 0xe0, 0xbc, 0x63, 0x73, 0xce, 0xd9, 0xf0, 0xd7, 0x06, 0xac, /* Byte value: 0x45 */ + 0x09, 0x0c, 0x8a, 0xf2, 0xe5, 0xc0, 0xd6, 0xe8, 0x11, 0x04, 0xc9, 0x27, 0xc3, 0x12, 0xde, 0xfd, /* Byte value: 0x46 */ + 0xa6, 0xe8, 0xcd, 0x92, 0x53, 0x98, 0x4f, 0xad, 0xb5, 0x58, 0x3e, 0xe7, 0xa2, 0x8f, 0xff, 0x40, /* Byte value: 0x47 */ + 0x72, 0xc2, 0xf5, 0x6d, 0x39, 0x7d, 0x66, 0xb4, 0x35, 0xff, 0x0f, 0xda, 0xac, 0xe4, 0x5b, 0x7e, /* Byte value: 0x48 */ + 0xc4, 0xb0, 0x37, 0x5d, 0x62, 0x51, 0xcb, 0x77, 0x67, 0xd1, 0x95, 0xba, 0x7d, 0x4b, 0xaa, 0xc1, /* Byte value: 0x49 */ + 0x3a, 0xa2, 0x2f, 0xf1, 0x1d, 0xb2, 0x19, 0xf8, 0xbd, 0xdf, 0x88, 0x21, 0x7b, 0x74, 0x64, 0x9a, /* Byte value: 0x4a */ + 0x74, 0x87, 0x5e, 0x21, 0x3a, 0xa7, 0x32, 0x33, 0xb9, 0x7d, 0xd3, 0x42, 0xf6, 0xe8, 0xc8, 0xf7, /* Byte value: 0x4b */ + 0xc1, 0x36, 0x28, 0x37, 0x81, 0xe6, 0xb5, 0x52, 0xad, 0x12, 0x27, 0x6e, 0x0a, 0x41, 0x91, 0xed, /* Byte value: 0x4c */ + 0xca, 0xb8, 0x7a, 0x40, 0x65, 0xd1, 0xee, 0xc7, 0x79, 0x68, 0x1b, 0x80, 0xff, 0x57, 0x3e, 0xd6, /* Byte value: 0x4d */ + 0xb9, 0x3b, 0xe3, 0x8e, 0xbd, 0x36, 0x2f, 0xac, 0xcf, 0xa8, 0x8f, 0xdf, 0x48, 0xb1, 0xbc, 0xcb, /* Byte value: 0x4e */ + 0x70, 0x40, 0x2d, 0xe8, 0x38, 0x8a, 0xeb, 0xc9, 0xf0, 0x81, 0xfa, 0x13, 0x9a, 0xe0, 0x2a, 0xb8, /* Byte value: 0x4f */ + 0xbc, 0xbd, 0xfc, 0xe4, 0x5e, 0x81, 0x51, 0x89, 0x05, 0x6b, 0x3d, 0x0b, 0x3f, 0xbb, 0x87, 0xe7, /* Byte value: 0x50 */ + 0x34, 0xaa, 0x62, 0xec, 0x1a, 0x32, 0x3c, 0x48, 0xa3, 0x66, 0x06, 0x1b, 0xf9, 0x68, 0xf0, 0x8d, /* Byte value: 0x51 */ + 0x6a, 0x15, 0x1c, 0x9e, 0x35, 0x93, 0xf5, 0xed, 0x40, 0xb2, 0xf9, 0xff, 0x07, 0xd4, 0x52, 0x1f, /* Byte value: 0x52 */ + 0x48, 0x60, 0xda, 0x9c, 0x24, 0xcf, 0x7f, 0x4c, 0x88, 0x20, 0x87, 0xfb, 0xd7, 0x90, 0x3f, 0xe4, /* Byte value: 0x53 */ + 0x5f, 0xfe, 0x12, 0xd1, 0xce, 0x3b, 0x6e, 0x7a, 0x60, 0xeb, 0x64, 0x61, 0xe5, 0xbe, 0x7b, 0xf1, /* Byte value: 0x54 */ + 0x8f, 0x13, 0x59, 0xe7, 0xa6, 0xf3, 0x9e, 0x99, 0xa9, 0xb0, 0x7c, 0x0d, 0x87, 0xdd, 0x3d, 0x80, /* Byte value: 0x55 */ + 0x69, 0xd6, 0xa8, 0xb8, 0xd5, 0xfe, 0xdf, 0x4f, 0x06, 0xf3, 0x97, 0xb3, 0x2a, 0xd2, 0xfa, 0xba, /* Byte value: 0x56 */ + 0x9a, 0x0f, 0x49, 0x2f, 0x4d, 0xf0, 0x02, 0xd2, 0x84, 0x05, 0x6a, 0x5e, 0x83, 0xf7, 0x08, 0x53, /* Byte value: 0x57 */ + 0xda, 0x22, 0x75, 0xe2, 0x6d, 0x65, 0x0c, 0xa9, 0x9e, 0x1e, 0xbf, 0x07, 0x8c, 0x77, 0x30, 0x29, /* Byte value: 0x58 */ + 0xfd, 0xd1, 0xac, 0x8a, 0x9f, 0x8e, 0xf8, 0x2d, 0x9c, 0x4f, 0x73, 0xd7, 0x2b, 0x39, 0x66, 0xfe, /* Byte value: 0x59 */ + 0x39, 0x61, 0x9b, 0xd7, 0xfd, 0xdf, 0x33, 0x5a, 0xfb, 0x9e, 0xe6, 0x6d, 0x56, 0x72, 0xcc, 0x3f, /* Byte value: 0x5a */ + 0xa5, 0x2b, 0x79, 0xb4, 0xb3, 0xf5, 0x65, 0x0f, 0xf3, 0x19, 0x50, 0xab, 0x8f, 0x89, 0x57, 0xe5, /* Byte value: 0x5b */ + 0xfc, 0x90, 0xc0, 0x29, 0x7e, 0x14, 0x5f, 0xf2, 0x1f, 0x70, 0xe8, 0x52, 0x30, 0x3b, 0xbf, 0x9d, /* Byte value: 0x5c */ + 0xe0, 0x80, 0x5a, 0x13, 0x70, 0xd7, 0x15, 0x51, 0x23, 0xc1, 0x37, 0x26, 0xf7, 0x03, 0x54, 0xb3, /* Byte value: 0x5d */ + 0x0c, 0x8a, 0x95, 0x98, 0x06, 0x77, 0xa8, 0xcd, 0xdb, 0xc7, 0x7b, 0xf3, 0xb4, 0x18, 0xe5, 0xd1, /* Byte value: 0x5e */ + 0x57, 0xb3, 0xf4, 0x80, 0xca, 0x61, 0x1f, 0x4d, 0xf2, 0xd0, 0x36, 0xc3, 0x3d, 0xae, 0x7c, 0x6f, /* Byte value: 0x5f */ + 0x54, 0x70, 0x40, 0xa6, 0x2a, 0x0c, 0x35, 0xef, 0xb4, 0x91, 0x58, 0x8f, 0x10, 0xa8, 0xd4, 0xca, /* Byte value: 0x60 */ + 0x8d, 0x91, 0x81, 0x62, 0xa7, 0x04, 0x13, 0xe4, 0x6c, 0xce, 0x89, 0xc4, 0xb1, 0xd9, 0x4c, 0x46, /* Byte value: 0x61 */ + 0xd2, 0x6f, 0x93, 0xb3, 0x69, 0x3f, 0x7d, 0x9e, 0x0c, 0x25, 0xed, 0xa5, 0x54, 0x67, 0x37, 0xb7, /* Byte value: 0x62 */ + 0xf7, 0x1e, 0x92, 0x5e, 0x9a, 0x23, 0x04, 0x67, 0xcb, 0x0a, 0xd4, 0xbc, 0xc5, 0x2d, 0x10, 0xa6, /* Byte value: 0x63 */ + 0x5e, 0xbf, 0x7e, 0x72, 0x2f, 0xa1, 0xc9, 0xa5, 0xe3, 0xd4, 0xff, 0xe4, 0xfe, 0xbc, 0xa2, 0x92, /* Byte value: 0x64 */ + 0xf9, 0x16, 0xdf, 0x43, 0x9d, 0xa3, 0x21, 0xd7, 0xd5, 0xb3, 0x5a, 0x86, 0x47, 0x31, 0x84, 0xb1, /* Byte value: 0x65 */ + 0xf2, 0x98, 0x8d, 0x34, 0x79, 0x94, 0x7a, 0x42, 0x01, 0xc9, 0x66, 0x68, 0xb2, 0x27, 0x2b, 0x8a, /* Byte value: 0x66 */ + 0xe3, 0x43, 0xee, 0x35, 0x90, 0xba, 0x3f, 0xf3, 0x65, 0x80, 0x59, 0x6a, 0xda, 0x05, 0xfc, 0x16, /* Byte value: 0x67 */ + 0x3b, 0xe3, 0x43, 0x52, 0xfc, 0x28, 0xbe, 0x27, 0x3e, 0xe0, 0x13, 0xa4, 0x60, 0x76, 0xbd, 0xf9, /* Byte value: 0x68 */ + 0x03, 0xc3, 0xb4, 0x26, 0xe0, 0x6d, 0x2a, 0xa2, 0x46, 0x41, 0x6e, 0x4c, 0x2d, 0x06, 0xa8, 0xa5, /* Byte value: 0x69 */ + 0xa1, 0xec, 0x0a, 0x7d, 0xb1, 0xd8, 0xbc, 0xf5, 0xba, 0xe5, 0x79, 0xfa, 0xe3, 0x81, 0xb5, 0xaa, /* Byte value: 0x6a */ + 0x96, 0x85, 0xdc, 0xb7, 0x4b, 0x87, 0xaa, 0x1f, 0x5f, 0xc2, 0x11, 0xad, 0x37, 0xef, 0xed, 0x82, /* Byte value: 0x6b */ + 0xaa, 0x62, 0x58, 0x0a, 0x55, 0xef, 0xe7, 0x60, 0x6e, 0x9f, 0x45, 0x14, 0x16, 0x97, 0x1a, 0x91, /* Byte value: 0x6c */ + 0x9f, 0x89, 0x56, 0x45, 0xae, 0x47, 0x7c, 0xf7, 0x4e, 0xc6, 0xd8, 0x8a, 0xf4, 0xfd, 0x33, 0x7f, /* Byte value: 0x6d */ + 0x46, 0x68, 0x97, 0x81, 0x23, 0x4f, 0x5a, 0xfc, 0x96, 0x99, 0x09, 0xc1, 0x55, 0x8c, 0xab, 0xf3, /* Byte value: 0x6e */ + 0x41, 0x6c, 0x50, 0x6e, 0xc1, 0x0f, 0xa9, 0xa4, 0x99, 0x24, 0x4e, 0xdc, 0x14, 0x82, 0xe1, 0x19, /* Byte value: 0x6f */ + 0x2a, 0x38, 0x20, 0x53, 0x15, 0x06, 0xfb, 0x96, 0x5a, 0xa9, 0x2c, 0xa6, 0x08, 0x54, 0x6a, 0x65, /* Byte value: 0x70 */ + 0x60, 0xda, 0x22, 0x4a, 0x30, 0x3e, 0x09, 0xa7, 0x17, 0xf7, 0x5e, 0x94, 0xe9, 0xc0, 0x24, 0x47, /* Byte value: 0x71 */ + 0xe5, 0x06, 0x45, 0x79, 0x93, 0x60, 0x6b, 0x74, 0xe9, 0x02, 0x85, 0xf2, 0x80, 0x09, 0x6f, 0x9f, /* Byte value: 0x72 */ + 0x94, 0x07, 0x04, 0x32, 0x4a, 0x70, 0x27, 0x62, 0x9a, 0xbc, 0xe4, 0x64, 0x01, 0xeb, 0x9c, 0x44, /* Byte value: 0x73 */ + 0xbe, 0x3f, 0x24, 0x61, 0x5f, 0x76, 0xdc, 0xf4, 0xc0, 0x15, 0xc8, 0xc2, 0x09, 0xbf, 0xf6, 0x21, /* Byte value: 0x74 */ + 0x90, 0xc0, 0x77, 0xfb, 0x48, 0x5d, 0xfe, 0x98, 0xd3, 0x40, 0xcd, 0x35, 0x6d, 0xe3, 0x7e, 0x0b, /* Byte value: 0x75 */ + 0x2f, 0xbe, 0x3f, 0x39, 0xf6, 0xb1, 0x85, 0xb3, 0x90, 0x6a, 0x9e, 0x72, 0x7f, 0x5e, 0x51, 0x49, /* Byte value: 0x76 */ + 0x9d, 0x0b, 0x8e, 0xc0, 0xaf, 0xb0, 0xf1, 0x8a, 0x8b, 0xb8, 0x2d, 0x43, 0xc2, 0xf9, 0x42, 0xb9, /* Byte value: 0x77 */ + 0x81, 0x1b, 0x14, 0xfa, 0xa1, 0x73, 0xbb, 0x29, 0xb7, 0x09, 0xf2, 0x37, 0x05, 0xc1, 0xa9, 0x97, /* Byte value: 0x78 */ + 0xa2, 0x2f, 0xbe, 0x5b, 0x51, 0xb5, 0x96, 0x57, 0xfc, 0xa4, 0x17, 0xb6, 0xce, 0x87, 0x1d, 0x0f, /* Byte value: 0x79 */ + 0x76, 0x05, 0x86, 0xa4, 0x3b, 0x50, 0xbf, 0x4e, 0x7c, 0x03, 0x26, 0x8b, 0xc0, 0xec, 0xb9, 0x31, /* Byte value: 0x7a */ + 0x4b, 0xa3, 0x6e, 0xba, 0xc4, 0xa2, 0x55, 0xee, 0xce, 0x61, 0xe9, 0xb7, 0xfa, 0x96, 0x97, 0x41, /* Byte value: 0x7b */ + 0xe4, 0x47, 0x29, 0xda, 0x72, 0xfa, 0xcc, 0xab, 0x6a, 0x3d, 0x1e, 0x77, 0x9b, 0x0b, 0xb6, 0xfc, /* Byte value: 0x7c */ + 0x06, 0x45, 0xab, 0x4c, 0x03, 0xda, 0x54, 0x87, 0x8c, 0x82, 0xdc, 0x98, 0x5a, 0x0c, 0x93, 0x89, /* Byte value: 0x7d */ + 0x55, 0x31, 0x2c, 0x05, 0xcb, 0x96, 0x92, 0x30, 0x37, 0xae, 0xc3, 0x0a, 0x0b, 0xaa, 0x0d, 0xa9, /* Byte value: 0x7e */ + 0xae, 0xa5, 0x2b, 0xc3, 0x57, 0xc2, 0x3e, 0x9a, 0x27, 0x63, 0x6c, 0x45, 0x7a, 0x9f, 0xf8, 0xde, /* Byte value: 0x7f */ + 0x1c, 0x10, 0x9a, 0x3a, 0x0e, 0xc3, 0x4a, 0xa3, 0x3c, 0xb1, 0xdf, 0x74, 0xc7, 0x38, 0xeb, 0x2e, /* Byte value: 0x80 */ + 0x75, 0xc6, 0x32, 0x82, 0xdb, 0x3d, 0x95, 0xec, 0x3a, 0x42, 0x48, 0xc7, 0xed, 0xea, 0x11, 0x94, /* Byte value: 0x81 */ + 0x5b, 0x39, 0x61, 0x18, 0xcc, 0x16, 0xb7, 0x80, 0x29, 0x17, 0x4d, 0x30, 0x89, 0xb6, 0x99, 0xbe, /* Byte value: 0x82 */ + 0x04, 0xc7, 0x73, 0xc9, 0x02, 0x2d, 0xd9, 0xfa, 0x49, 0xfc, 0x29, 0x51, 0x6c, 0x08, 0xe2, 0x4f, /* Byte value: 0x83 */ + 0x27, 0xf3, 0xd9, 0x68, 0xf2, 0xeb, 0xf4, 0x84, 0x02, 0x51, 0xcc, 0xd0, 0xa7, 0x4e, 0x56, 0xd7, /* Byte value: 0x84 */ + 0x4d, 0xe6, 0xc5, 0xf6, 0xc7, 0x78, 0x01, 0x69, 0x42, 0xe3, 0x35, 0x2f, 0xa0, 0x9a, 0x04, 0xc8, /* Byte value: 0x85 */ + 0x9c, 0x4a, 0xe2, 0x63, 0x4e, 0x2a, 0x56, 0x55, 0x08, 0x87, 0xb6, 0xc6, 0xd9, 0xfb, 0x9b, 0xda, /* Byte value: 0x86 */ + 0x20, 0xf7, 0x1e, 0x87, 0x10, 0xab, 0x07, 0xdc, 0x0d, 0xec, 0x8b, 0xcd, 0xe6, 0x40, 0x1c, 0x3d, /* Byte value: 0x87 */ + 0x53, 0x74, 0x87, 0x49, 0xc8, 0x4c, 0xc6, 0xb7, 0xbb, 0x2c, 0x1f, 0x92, 0x51, 0xa6, 0x9e, 0x20, /* Byte value: 0x88 */ + 0xd0, 0xed, 0x4b, 0x36, 0x68, 0xc8, 0xf0, 0xe3, 0xc9, 0x5b, 0x18, 0x6c, 0x62, 0x63, 0x46, 0x71, /* Byte value: 0x89 */ + 0x95, 0x46, 0x68, 0x91, 0xab, 0xea, 0x80, 0xbd, 0x19, 0x83, 0x7f, 0xe1, 0x1a, 0xe9, 0x45, 0x27, /* Byte value: 0x8a */ + 0xf3, 0xd9, 0xe1, 0x97, 0x98, 0x0e, 0xdd, 0x9d, 0x82, 0xf6, 0xfd, 0xed, 0xa9, 0x25, 0xf2, 0xe9, /* Byte value: 0x8b */ + 0x85, 0xdc, 0x67, 0x33, 0xa3, 0x5e, 0x62, 0xd3, 0xfe, 0xf5, 0xdb, 0x66, 0x69, 0xc9, 0x4b, 0xd8, /* Byte value: 0x8c */ + 0x88, 0x17, 0x9e, 0x08, 0x44, 0xb3, 0x6d, 0xc1, 0xa6, 0x0d, 0x3b, 0x10, 0xc6, 0xd3, 0x77, 0x6a, /* Byte value: 0x8d */ + 0xb0, 0x37, 0x69, 0x7c, 0x58, 0xf6, 0xf9, 0x44, 0xde, 0xac, 0x46, 0xf8, 0x8b, 0xa3, 0x62, 0x36, /* Byte value: 0x8e */ + 0x22, 0x75, 0xc6, 0x02, 0x11, 0x5c, 0x8a, 0xa1, 0xc8, 0x92, 0x7e, 0x04, 0xd0, 0x44, 0x6d, 0xfb, /* Byte value: 0x8f */ + 0x0b, 0x8e, 0x52, 0x77, 0xe4, 0x37, 0x5b, 0x95, 0xd4, 0x7a, 0x3c, 0xee, 0xf5, 0x16, 0xaf, 0x3b, /* Byte value: 0x90 */ + 0x02, 0x82, 0xd8, 0x85, 0x01, 0xf7, 0x8d, 0x7d, 0xc5, 0x7e, 0xf5, 0xc9, 0x36, 0x04, 0x71, 0xc6, /* Byte value: 0x91 */ + 0xc9, 0x7b, 0xce, 0x66, 0x85, 0xbc, 0xc4, 0x65, 0x3f, 0x29, 0x75, 0xcc, 0xd2, 0x51, 0x96, 0x73, /* Byte value: 0x92 */ + 0xdb, 0x63, 0x19, 0x41, 0x8c, 0xff, 0xab, 0x76, 0x1d, 0x21, 0x24, 0x82, 0x97, 0x75, 0xe9, 0x4a, /* Byte value: 0x93 */ + 0xb6, 0x72, 0xc2, 0x30, 0x5b, 0x2c, 0xad, 0xc3, 0x52, 0x2e, 0x9a, 0x60, 0xd1, 0xaf, 0xf1, 0xbf, /* Byte value: 0x94 */ + 0x40, 0x2d, 0x3c, 0xcd, 0x20, 0x95, 0x0e, 0x7b, 0x1a, 0x1b, 0xd5, 0x59, 0x0f, 0x80, 0x38, 0x7a, /* Byte value: 0x95 */ + 0x4e, 0x25, 0x71, 0xd0, 0x27, 0x15, 0x2b, 0xcb, 0x04, 0xa2, 0x5b, 0x63, 0x8d, 0x9c, 0xac, 0x6d, /* Byte value: 0x96 */ + 0xd7, 0xe9, 0x8c, 0xd9, 0x8a, 0x88, 0x03, 0xbb, 0xc6, 0xe6, 0x5f, 0x71, 0x23, 0x6d, 0x0c, 0x9b, /* Byte value: 0x97 */ + 0xab, 0x23, 0x34, 0xa9, 0xb4, 0x75, 0x40, 0xbf, 0xed, 0xa0, 0xde, 0x91, 0x0d, 0x95, 0xc3, 0xf2, /* Byte value: 0x98 */ + 0x98, 0x8d, 0x91, 0xaa, 0x4c, 0x07, 0x8f, 0xaf, 0x41, 0x7b, 0x9f, 0x97, 0xb5, 0xf3, 0x79, 0x95, /* Byte value: 0x99 */ + 0xc8, 0x3a, 0xa2, 0xc5, 0x64, 0x26, 0x63, 0xba, 0xbc, 0x16, 0xee, 0x49, 0xc9, 0x53, 0x4f, 0x10, /* Byte value: 0x9a */ + 0xaf, 0xe4, 0x47, 0x60, 0xb6, 0x58, 0x99, 0x45, 0xa4, 0x5c, 0xf7, 0xc0, 0x61, 0x9d, 0x21, 0xbd, /* Byte value: 0x9b */ + 0xa3, 0x6e, 0xd2, 0xf8, 0xb0, 0x2f, 0x31, 0x88, 0x7f, 0x9b, 0x8c, 0x33, 0xd5, 0x85, 0xc4, 0x6c, /* Byte value: 0x9c */ + 0x44, 0xea, 0x4f, 0x04, 0x22, 0xb8, 0xd7, 0x81, 0x53, 0xe7, 0xfc, 0x08, 0x63, 0x88, 0xda, 0x35, /* Byte value: 0x9d */ + 0x84, 0x9d, 0x0b, 0x90, 0x42, 0xc4, 0xc5, 0x0c, 0x7d, 0xca, 0x40, 0xe3, 0x72, 0xcb, 0x92, 0xbb, /* Byte value: 0x9e */ + 0x9e, 0xc8, 0x3a, 0xe6, 0x4f, 0xdd, 0xdb, 0x28, 0xcd, 0xf9, 0x43, 0x0f, 0xef, 0xff, 0xea, 0x1c, /* Byte value: 0x9f */ + 0xe2, 0x02, 0x82, 0x96, 0x71, 0x20, 0x98, 0x2c, 0xe6, 0xbf, 0xc2, 0xef, 0xc1, 0x07, 0x25, 0x75, /* Byte value: 0xa0 */ + 0xcd, 0xbc, 0xbd, 0xaf, 0x87, 0x91, 0x1d, 0x9f, 0x76, 0xd5, 0x5c, 0x9d, 0xbe, 0x59, 0x74, 0x3c, /* Byte value: 0xa1 */ + 0xfb, 0x94, 0x07, 0xc6, 0x9c, 0x54, 0xac, 0xaa, 0x10, 0xcd, 0xaf, 0x4f, 0x71, 0x35, 0xf5, 0x77, /* Byte value: 0xa2 */ + 0x45, 0xab, 0x23, 0xa7, 0xc3, 0x22, 0x70, 0x5e, 0xd0, 0xd8, 0x67, 0x8d, 0x78, 0x8a, 0x03, 0x56, /* Byte value: 0xa3 */ + 0x23, 0x34, 0xaa, 0xa1, 0xf0, 0xc6, 0x2d, 0x7e, 0x4b, 0xad, 0xe5, 0x81, 0xcb, 0x46, 0xb4, 0x98, /* Byte value: 0xa4 */ + 0xc6, 0x32, 0xef, 0xd8, 0x63, 0xa6, 0x46, 0x0a, 0xa2, 0xaf, 0x60, 0x73, 0x4b, 0x4f, 0xdb, 0x07, /* Byte value: 0xa5 */ + 0x3d, 0xa6, 0xe8, 0x1e, 0xff, 0xf2, 0xea, 0xa0, 0xb2, 0x62, 0xcf, 0x3c, 0x3a, 0x7a, 0x2e, 0x70, /* Byte value: 0xa6 */ + 0x17, 0x9e, 0xc8, 0x4d, 0xea, 0xf4, 0x11, 0x36, 0xe8, 0xcb, 0xe3, 0x9a, 0x32, 0x2e, 0x44, 0x15, /* Byte value: 0xa7 */ + 0xeb, 0x0e, 0x08, 0x64, 0x94, 0xe0, 0x4e, 0xc4, 0xf7, 0xbb, 0x0b, 0xc8, 0x02, 0x15, 0xfb, 0x88, /* Byte value: 0xa8 */ + 0x2e, 0xff, 0x53, 0x9a, 0x17, 0x2b, 0x22, 0x6c, 0x13, 0x55, 0x05, 0xf7, 0x64, 0x5c, 0x88, 0x2a, /* Byte value: 0xa9 */ + 0xcb, 0xf9, 0x16, 0xe3, 0x84, 0x4b, 0x49, 0x18, 0xfa, 0x57, 0x80, 0x05, 0xe4, 0x55, 0xe7, 0xb5, /* Byte value: 0xaa */ + 0xb2, 0xb5, 0xb1, 0xf9, 0x59, 0x01, 0x74, 0x39, 0x1b, 0xd2, 0xb3, 0x31, 0xbd, 0xa7, 0x13, 0xf0, /* Byte value: 0xab */ + 0x5c, 0x3d, 0xa6, 0xf7, 0x2e, 0x56, 0x44, 0xd8, 0x26, 0xaa, 0x0a, 0x2d, 0xc8, 0xb8, 0xd3, 0x54, /* Byte value: 0xac */ + 0x4a, 0xe2, 0x02, 0x19, 0x25, 0x38, 0xf2, 0x31, 0x4d, 0x5e, 0x72, 0x32, 0xe1, 0x94, 0x4e, 0x22, /* Byte value: 0xad */ + 0x64, 0x1d, 0x51, 0x83, 0x32, 0x13, 0xd0, 0x5d, 0x5e, 0x0b, 0x77, 0xc5, 0x85, 0xc8, 0xc6, 0x08, /* Byte value: 0xae */ + 0x71, 0x01, 0x41, 0x4b, 0xd9, 0x10, 0x4c, 0x16, 0x73, 0xbe, 0x61, 0x96, 0x81, 0xe2, 0xf3, 0xdb, /* Byte value: 0xaf */ + 0x35, 0xeb, 0x0e, 0x4f, 0xfb, 0xa8, 0x9b, 0x97, 0x20, 0x59, 0x9d, 0x9e, 0xe2, 0x6a, 0x29, 0xee, /* Byte value: 0xb0 */ + 0xc7, 0x73, 0x83, 0x7b, 0x82, 0x3c, 0xe1, 0xd5, 0x21, 0x90, 0xfb, 0xf6, 0x50, 0x4d, 0x02, 0x64, /* Byte value: 0xb1 */ + 0x07, 0x04, 0xc7, 0xef, 0xe2, 0x40, 0xf3, 0x58, 0x0f, 0xbd, 0x47, 0x1d, 0x41, 0x0e, 0x4a, 0xea, /* Byte value: 0xb2 */ + 0x59, 0xbb, 0xb9, 0x9d, 0xcd, 0xe1, 0x3a, 0xfd, 0xec, 0x69, 0xb8, 0xf9, 0xbf, 0xb2, 0xe8, 0x78, /* Byte value: 0xb3 */ + 0x38, 0x20, 0xf7, 0x74, 0x1c, 0x45, 0x94, 0x85, 0x78, 0xa1, 0x7d, 0xe8, 0x4d, 0x70, 0x15, 0x5c, /* Byte value: 0xb4 */ + 0xea, 0x4f, 0x64, 0xc7, 0x75, 0x7a, 0xe9, 0x1b, 0x74, 0x84, 0x90, 0x4d, 0x19, 0x17, 0x22, 0xeb, /* Byte value: 0xb5 */ + 0x37, 0x69, 0xd6, 0xca, 0xfa, 0x5f, 0x16, 0xea, 0xe5, 0x27, 0x68, 0x57, 0xd4, 0x6e, 0x58, 0x28, /* Byte value: 0xb6 */ + 0x63, 0x19, 0x96, 0x6c, 0xd0, 0x53, 0x23, 0x05, 0x51, 0xb6, 0x30, 0xd8, 0xc4, 0xc6, 0x8c, 0xe2, /* Byte value: 0xb7 */ + 0x18, 0xd7, 0xe9, 0xf3, 0x0c, 0xee, 0x93, 0x59, 0x75, 0x4d, 0xf6, 0x25, 0xab, 0x30, 0x09, 0x61, /* Byte value: 0xb8 */ + 0xa7, 0xa9, 0xa1, 0x31, 0xb2, 0x02, 0xe8, 0x72, 0x36, 0x67, 0xa5, 0x62, 0xb9, 0x8d, 0x26, 0x23, /* Byte value: 0xb9 */ + 0x58, 0xfa, 0xd5, 0x3e, 0x2c, 0x7b, 0x9d, 0x22, 0x6f, 0x56, 0x23, 0x7c, 0xa4, 0xb0, 0x31, 0x1b, /* Byte value: 0xba */ + 0x11, 0xdb, 0x63, 0x01, 0xe9, 0x2e, 0x45, 0xb1, 0x64, 0x49, 0x3f, 0x02, 0x68, 0x22, 0xd7, 0x9c, /* Byte value: 0xbb */ + 0xf5, 0x9c, 0x4a, 0xdb, 0x9b, 0xd4, 0x89, 0x1a, 0x0e, 0x74, 0x21, 0x75, 0xf3, 0x29, 0x61, 0x60, /* Byte value: 0xbc */ + 0xd9, 0xe1, 0xc1, 0xc4, 0x8d, 0x08, 0x26, 0x0b, 0xd8, 0x5f, 0xd1, 0x4b, 0xa1, 0x71, 0x98, 0x8c, /* Byte value: 0xbd */ + 0xe9, 0x8c, 0xd0, 0xe1, 0x95, 0x17, 0xc3, 0xb9, 0x32, 0xc5, 0xfe, 0x01, 0x34, 0x11, 0x8a, 0x4e, /* Byte value: 0xbe */ + 0x25, 0x71, 0x01, 0xed, 0xf3, 0x1c, 0x79, 0xf9, 0xc7, 0x2f, 0x39, 0x19, 0x91, 0x4a, 0x27, 0x11, /* Byte value: 0xbf */ + 0xf8, 0x57, 0xb3, 0xe0, 0x7c, 0x39, 0x86, 0x08, 0x56, 0x8c, 0xc1, 0x03, 0x5c, 0x33, 0x5d, 0xd2, /* Byte value: 0xc0 */ + 0x97, 0xc4, 0xb0, 0x14, 0xaa, 0x1d, 0x0d, 0xc0, 0xdc, 0xfd, 0x8a, 0x28, 0x2c, 0xed, 0x34, 0xe1, /* Byte value: 0xc1 */ + 0xb1, 0x76, 0x05, 0xdf, 0xb9, 0x6c, 0x5e, 0x9b, 0x5d, 0x93, 0xdd, 0x7d, 0x90, 0xa1, 0xbb, 0x55, /* Byte value: 0xc2 */ + 0x19, 0x96, 0x85, 0x50, 0xed, 0x74, 0x34, 0x86, 0xf6, 0x72, 0x6d, 0xa0, 0xb0, 0x32, 0xd0, 0x02, /* Byte value: 0xc3 */ + 0x0f, 0x49, 0x21, 0xbe, 0xe6, 0x1a, 0x82, 0x6f, 0x9d, 0x86, 0x15, 0xbf, 0x99, 0x1e, 0x4d, 0x74, /* Byte value: 0xc4 */ + 0x89, 0x56, 0xf2, 0xab, 0xa5, 0x29, 0xca, 0x1e, 0x25, 0x32, 0xa0, 0x95, 0xdd, 0xd1, 0xae, 0x09, /* Byte value: 0xc5 */ + 0xdc, 0x67, 0xde, 0xae, 0x6e, 0xbf, 0x58, 0x2e, 0x12, 0x9c, 0x63, 0x9f, 0xd6, 0x7b, 0xa3, 0xa0, /* Byte value: 0xc6 */ + 0x08, 0x4d, 0xe6, 0x51, 0x04, 0x5a, 0x71, 0x37, 0x92, 0x3b, 0x52, 0xa2, 0xd8, 0x10, 0x07, 0x9e, /* Byte value: 0xc7 */ + 0xf1, 0x5b, 0x39, 0x12, 0x99, 0xf9, 0x50, 0xe0, 0x47, 0x88, 0x08, 0x24, 0x9f, 0x21, 0x83, 0x2f, /* Byte value: 0xc8 */ + 0xa8, 0xe0, 0x80, 0x8f, 0x54, 0x18, 0x6a, 0x1d, 0xab, 0xe1, 0xb0, 0xdd, 0x20, 0x93, 0x6b, 0x57, /* Byte value: 0xc9 */ + 0x0e, 0x08, 0x4d, 0x1d, 0x07, 0x80, 0x25, 0xb0, 0x1e, 0xb9, 0x8e, 0x3a, 0x82, 0x1c, 0x94, 0x17, /* Byte value: 0xca */ + 0x6f, 0x93, 0x03, 0xf4, 0xd6, 0x24, 0x8b, 0xc8, 0x8a, 0x71, 0x4b, 0x2b, 0x70, 0xde, 0x69, 0x33, /* Byte value: 0xcb */ + 0x7d, 0x8b, 0xd4, 0xd3, 0xdf, 0x67, 0xe4, 0xdb, 0xa8, 0x79, 0x1a, 0x65, 0x35, 0xfa, 0x16, 0x0a, /* Byte value: 0xcc */ + 0xd4, 0x2a, 0x38, 0xff, 0x6a, 0xe5, 0x29, 0x19, 0x80, 0xa7, 0x31, 0x3d, 0x0e, 0x6b, 0xa4, 0x3e, /* Byte value: 0xcd */ + 0xbd, 0xfc, 0x90, 0x47, 0xbf, 0x1b, 0xf6, 0x56, 0x86, 0x54, 0xa6, 0x8e, 0x24, 0xb9, 0x5e, 0x84, /* Byte value: 0xce */ + 0x86, 0x1f, 0xd3, 0x15, 0x43, 0x33, 0x48, 0x71, 0xb8, 0xb4, 0xb5, 0x2a, 0x44, 0xcf, 0xe3, 0x7d, /* Byte value: 0xcf */ + 0xec, 0x0a, 0xcf, 0x8b, 0x76, 0xa0, 0xbd, 0x9c, 0xf8, 0x06, 0x4c, 0xd5, 0x43, 0x1b, 0xb1, 0x62, /* Byte value: 0xd0 */ + 0x32, 0xef, 0xc9, 0xa0, 0x19, 0xe8, 0x68, 0xcf, 0x2f, 0xe4, 0xda, 0x83, 0xa3, 0x64, 0x63, 0x04, /* Byte value: 0xd1 */ + 0x7e, 0x48, 0x60, 0xf5, 0x3f, 0x0a, 0xce, 0x79, 0xee, 0x38, 0x74, 0x29, 0x18, 0xfc, 0xbe, 0xaf, /* Byte value: 0xd2 */ + 0xed, 0x4b, 0xa3, 0x28, 0x97, 0x3a, 0x1a, 0x43, 0x7b, 0x39, 0xd7, 0x50, 0x58, 0x19, 0x68, 0x01, /* Byte value: 0xd3 */ + 0x6e, 0xd2, 0x6f, 0x57, 0x37, 0xbe, 0x2c, 0x17, 0x09, 0x4e, 0xd0, 0xae, 0x6b, 0xdc, 0xb0, 0x50, /* Byte value: 0xd4 */ + 0xc3, 0xb4, 0xf0, 0xb2, 0x80, 0x11, 0x38, 0x2f, 0x68, 0x6c, 0xd2, 0xa7, 0x3c, 0x45, 0xe0, 0x2b, /* Byte value: 0xd5 */ + 0x21, 0xb6, 0x72, 0x24, 0xf1, 0x31, 0xa0, 0x03, 0x8e, 0xd3, 0x10, 0x48, 0xfd, 0x42, 0xc5, 0x5e, /* Byte value: 0xd6 */ + 0x62, 0x58, 0xfa, 0xcf, 0x31, 0xc9, 0x84, 0xda, 0xd2, 0x89, 0xab, 0x5d, 0xdf, 0xc4, 0x55, 0x81, /* Byte value: 0xd7 */ + 0xce, 0x7f, 0x09, 0x89, 0x67, 0xfc, 0x37, 0x3d, 0x30, 0x94, 0x32, 0xd1, 0x93, 0x5f, 0xdc, 0x99, /* Byte value: 0xd8 */ + 0x93, 0x03, 0xc3, 0xdd, 0xa8, 0x30, 0xd4, 0x3a, 0x95, 0x01, 0xa3, 0x79, 0x40, 0xe5, 0xd6, 0xae, /* Byte value: 0xd9 */ + 0x4c, 0xa7, 0xa9, 0x55, 0x26, 0xe2, 0xa6, 0xb6, 0xc1, 0xdc, 0xae, 0xaa, 0xbb, 0x98, 0xdd, 0xab, /* Byte value: 0xda */ + 0xde, 0xe5, 0x06, 0x2b, 0x6f, 0x48, 0xd5, 0x53, 0xd7, 0xe2, 0x96, 0x56, 0xe0, 0x7f, 0xd2, 0x66, /* Byte value: 0xdb */ + 0x3f, 0x24, 0x30, 0x9b, 0xfe, 0x05, 0x67, 0xdd, 0x77, 0x1c, 0x3a, 0xf5, 0x0c, 0x7e, 0x5f, 0xb6, /* Byte value: 0xdc */ + 0x43, 0xee, 0x88, 0xeb, 0xc0, 0xf8, 0x24, 0xd9, 0x5c, 0x5a, 0xbb, 0x15, 0x22, 0x86, 0x90, 0xdf, /* Byte value: 0xdd */ + 0x73, 0x83, 0x99, 0xce, 0xd8, 0xe7, 0xc1, 0x6b, 0xb6, 0xc0, 0x94, 0x5f, 0xb7, 0xe6, 0x82, 0x1d, /* Byte value: 0xde */ + 0xad, 0x66, 0x9f, 0xe5, 0xb7, 0xaf, 0x14, 0x38, 0x61, 0x22, 0x02, 0x09, 0x57, 0x99, 0x50, 0x7b, /* Byte value: 0xdf */ + 0xf0, 0x1a, 0x55, 0xb1, 0x78, 0x63, 0xf7, 0x3f, 0xc4, 0xb7, 0x93, 0xa1, 0x84, 0x23, 0x5a, 0x4c, /* Byte value: 0xe0 */ + 0x47, 0x29, 0xfb, 0x22, 0xc2, 0xd5, 0xfd, 0x23, 0x15, 0xa6, 0x92, 0x44, 0x4e, 0x8e, 0x72, 0x90, /* Byte value: 0xe1 */ + 0x3c, 0xe7, 0x84, 0xbd, 0x1e, 0x68, 0x4d, 0x7f, 0x31, 0x5d, 0x54, 0xb9, 0x21, 0x78, 0xf7, 0x13, /* Byte value: 0xe2 */ + 0x61, 0x9b, 0x4e, 0xe9, 0xd1, 0xa4, 0xae, 0x78, 0x94, 0xc8, 0xc5, 0x11, 0xf2, 0xc2, 0xfd, 0x24, /* Byte value: 0xe3 */ + 0x56, 0xf2, 0x98, 0x23, 0x2b, 0xfb, 0xb8, 0x92, 0x71, 0xef, 0xad, 0x46, 0x26, 0xac, 0xa5, 0x0c, /* Byte value: 0xe4 */ + 0x29, 0xfb, 0x94, 0x75, 0xf5, 0x6b, 0xd1, 0x34, 0x1c, 0xe8, 0x42, 0xea, 0x25, 0x52, 0xc2, 0xc0, /* Byte value: 0xe5 */ + 0x7b, 0xce, 0x7f, 0x9f, 0xdc, 0xbd, 0xb0, 0x5c, 0x24, 0xfb, 0xc6, 0xfd, 0x6f, 0xf6, 0x85, 0x83, /* Byte value: 0xe6 */ + 0x10, 0x9a, 0x0f, 0xa2, 0x08, 0xb4, 0xe2, 0x6e, 0xe7, 0x76, 0xa4, 0x87, 0x73, 0x20, 0x0e, 0xff, /* Byte value: 0xe7 */ + 0xff, 0x53, 0x74, 0x0f, 0x9e, 0x79, 0x75, 0x50, 0x59, 0x31, 0x86, 0x1e, 0x1d, 0x3d, 0x17, 0x38, /* Byte value: 0xe8 */ + 0x5d, 0x7c, 0xca, 0x54, 0xcf, 0xcc, 0xe3, 0x07, 0xa5, 0x95, 0x91, 0xa8, 0xd3, 0xba, 0x0a, 0x37, /* Byte value: 0xe9 */ + 0x31, 0x2c, 0x7d, 0x86, 0xf9, 0x85, 0x42, 0x6d, 0x69, 0xa5, 0xb4, 0xcf, 0x8e, 0x62, 0xcb, 0xa1, /* Byte value: 0xea */ + 0xb7, 0x33, 0xae, 0x93, 0xba, 0xb6, 0x0a, 0x1c, 0xd1, 0x11, 0x01, 0xe5, 0xca, 0xad, 0x28, 0xdc, /* Byte value: 0xeb */ + 0xb3, 0xf4, 0xdd, 0x5a, 0xb8, 0x9b, 0xd3, 0xe6, 0x98, 0xed, 0x28, 0xb4, 0xa6, 0xa5, 0xca, 0x93, /* Byte value: 0xec */ + 0x6b, 0x54, 0x70, 0x3d, 0xd4, 0x09, 0x52, 0x32, 0xc3, 0x8d, 0x62, 0x7a, 0x1c, 0xd6, 0x8b, 0x7c, /* Byte value: 0xed */ + 0xc0, 0x77, 0x44, 0x94, 0x60, 0x7c, 0x12, 0x8d, 0x2e, 0x2d, 0xbc, 0xeb, 0x11, 0x43, 0x48, 0x8e, /* Byte value: 0xee */ + 0x7f, 0x09, 0x0c, 0x56, 0xde, 0x90, 0x69, 0xa6, 0x6d, 0x07, 0xef, 0xac, 0x03, 0xfe, 0x67, 0xcc, /* Byte value: 0xef */ + 0x1e, 0x92, 0x42, 0xbf, 0x0f, 0x34, 0xc7, 0xde, 0xf9, 0xcf, 0x2a, 0xbd, 0xf1, 0x3c, 0x9a, 0xe8, /* Byte value: 0xf0 */ + 0xd1, 0xac, 0x27, 0x95, 0x89, 0x52, 0x57, 0x3c, 0x4a, 0x64, 0x83, 0xe9, 0x79, 0x61, 0x9f, 0x12, /* Byte value: 0xf1 */ + 0x78, 0x0d, 0xcb, 0xb9, 0x3c, 0xd0, 0x9a, 0xfe, 0x62, 0xba, 0xa8, 0xb1, 0x42, 0xf0, 0x2d, 0x26, /* Byte value: 0xf2 */ + 0xc2, 0xf5, 0x9c, 0x11, 0x61, 0x8b, 0x9f, 0xf0, 0xeb, 0x53, 0x49, 0x22, 0x27, 0x47, 0x39, 0x48, /* Byte value: 0xf3 */ + 0xbb, 0xb9, 0x3b, 0x0b, 0xbc, 0xc1, 0xa2, 0xd1, 0x0a, 0xd6, 0x7a, 0x16, 0x7e, 0xb5, 0xcd, 0x0d, /* Byte value: 0xf4 */ + 0x6d, 0x11, 0xdb, 0x71, 0xd7, 0xd3, 0x06, 0xb5, 0x4f, 0x0f, 0xbe, 0xe2, 0x46, 0xda, 0x18, 0xf5, /* Byte value: 0xf5 */ + 0xe1, 0xc1, 0x36, 0xb0, 0x91, 0x4d, 0xb2, 0x8e, 0xa0, 0xfe, 0xac, 0xa3, 0xec, 0x01, 0x8d, 0xd0, /* Byte value: 0xf6 */ + 0x1f, 0xd3, 0x2e, 0x1c, 0xee, 0xae, 0x60, 0x01, 0x7a, 0xf0, 0xb1, 0x38, 0xea, 0x3e, 0x43, 0x8b, /* Byte value: 0xf7 */ + 0x82, 0xd8, 0xa0, 0xdc, 0x41, 0x1e, 0x91, 0x8b, 0xf1, 0x48, 0x9c, 0x7b, 0x28, 0xc7, 0x01, 0x32, /* Byte value: 0xf8 */ + 0xba, 0xf8, 0x57, 0xa8, 0x5d, 0x5b, 0x05, 0x0e, 0x89, 0xe9, 0xe1, 0x93, 0x65, 0xb7, 0x14, 0x6e, /* Byte value: 0xf9 */ + 0x8c, 0xd0, 0xed, 0xc1, 0x46, 0x9e, 0xb4, 0x3b, 0xef, 0xf1, 0x12, 0x41, 0xaa, 0xdb, 0x95, 0x25, /* Byte value: 0xfa */ + 0xcf, 0x3e, 0x65, 0x2a, 0x86, 0x66, 0x90, 0xe2, 0xb3, 0xab, 0xa9, 0x54, 0x88, 0x5d, 0x05, 0xfa, /* Byte value: 0xfb */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xfc */ + 0xfe, 0x12, 0x18, 0xac, 0x7f, 0xe3, 0xd2, 0x8f, 0xda, 0x0e, 0x1d, 0x9b, 0x06, 0x3f, 0xce, 0x5b, /* Byte value: 0xfd */ + 0x42, 0xaf, 0xe4, 0x48, 0x21, 0x62, 0x83, 0x06, 0xdf, 0x65, 0x20, 0x90, 0x39, 0x84, 0x49, 0xbc, /* Byte value: 0xfe */ + 0xd6, 0xa8, 0xe0, 0x7a, 0x6b, 0x12, 0xa4, 0x64, 0x45, 0xd9, 0xc4, 0xf4, 0x38, 0x6f, 0xd5, 0xf8, /* Byte value: 0xff */ + + /* Matrix row: 11 */ + 0xd8, 0x59, 0xe0, 0xf8, 0xd2, 0x2a, 0xca, 0x1c, 0x17, 0x58, 0x25, 0x47, 0x72, 0xa8, 0x1d, 0x31, /* Byte value: 0x00 */ + 0x69, 0xb6, 0x74, 0x38, 0x5c, 0x64, 0x10, 0xef, 0x82, 0xee, 0xaa, 0xe9, 0x8a, 0x53, 0xb7, 0xc0, /* Byte value: 0x01 */ + 0xc0, 0xd2, 0x41, 0xdb, 0x4a, 0x91, 0xd0, 0xa1, 0x55, 0xac, 0x5e, 0x8f, 0x3d, 0x01, 0xdf, 0x89, /* Byte value: 0x02 */ + 0xba, 0xbf, 0xf7, 0x5c, 0x54, 0x08, 0xff, 0xc7, 0x32, 0xa4, 0x91, 0x4e, 0xac, 0x20, 0xdc, 0x8e, /* Byte value: 0x03 */ + 0x16, 0xa0, 0xc6, 0xfb, 0x77, 0x8c, 0x4a, 0x68, 0x8d, 0x24, 0x3c, 0x03, 0xa8, 0x75, 0xec, 0xfe, /* Byte value: 0x04 */ + 0x87, 0x68, 0xac, 0xfe, 0x5b, 0xa5, 0x09, 0xf4, 0xe0, 0xa0, 0x17, 0xcf, 0x05, 0xd1, 0x3c, 0x6c, /* Byte value: 0x05 */ + 0x58, 0xc5, 0x9e, 0x6a, 0x1f, 0x75, 0xeb, 0x63, 0x71, 0x90, 0xf0, 0x0c, 0xe5, 0x17, 0x36, 0x7e, /* Byte value: 0x06 */ + 0x59, 0x63, 0xf5, 0x7e, 0xaf, 0xd1, 0x24, 0x56, 0x06, 0xc5, 0x5c, 0xba, 0x14, 0xc2, 0xf0, 0x73, /* Byte value: 0x07 */ + 0xc4, 0x0f, 0x2e, 0x8b, 0xcf, 0x44, 0x6a, 0x75, 0x4a, 0x3b, 0xab, 0x12, 0x7f, 0xd3, 0x41, 0xbd, /* Byte value: 0x08 */ + 0x6a, 0x9f, 0xc9, 0x04, 0x4f, 0x4b, 0x82, 0xb0, 0x1b, 0x11, 0x9d, 0xf0, 0x5a, 0xef, 0x3e, 0xd7, /* Byte value: 0x09 */ + 0x05, 0x7b, 0x04, 0x44, 0x35, 0x71, 0x75, 0xe1, 0x68, 0xc2, 0x59, 0x2b, 0xb3, 0x07, 0x58, 0x39, /* Byte value: 0x0a */ + 0x9a, 0x98, 0x09, 0x99, 0xf6, 0x6f, 0x66, 0xa8, 0xca, 0x96, 0x35, 0x2c, 0xf9, 0x7f, 0xa6, 0xed, /* Byte value: 0x0b */ + 0x06, 0x52, 0xb9, 0x78, 0x26, 0x5e, 0xe7, 0xbe, 0xf1, 0x3d, 0x6e, 0x32, 0x63, 0xbb, 0xd1, 0x2e, /* Byte value: 0x0c */ + 0xa6, 0xe9, 0x39, 0x2f, 0x49, 0x66, 0x5f, 0xae, 0x6f, 0xc7, 0x1f, 0x1b, 0xa1, 0x5b, 0x80, 0x02, /* Byte value: 0x0d */ + 0x0d, 0x02, 0xda, 0xe4, 0xfc, 0x18, 0xc2, 0x8a, 0x56, 0x2f, 0x70, 0xd2, 0x37, 0x60, 0xa7, 0x51, /* Byte value: 0x0e */ + 0xd1, 0x86, 0x55, 0x4c, 0xab, 0xe7, 0xb2, 0x42, 0x5e, 0xe0, 0xa0, 0x08, 0x07, 0x1a, 0x24, 0x54, /* Byte value: 0x0f */ + 0x43, 0x67, 0x82, 0x75, 0x94, 0xe1, 0x63, 0x81, 0xaa, 0x9b, 0xbc, 0xdd, 0x7a, 0x02, 0x7d, 0xd1, /* Byte value: 0x10 */ + 0x85, 0xe7, 0x7a, 0xd6, 0xf8, 0x2e, 0x54, 0x9e, 0x0e, 0x0a, 0x8c, 0x60, 0x24, 0xb8, 0x73, 0x76, /* Byte value: 0x11 */ + 0x41, 0xe8, 0x54, 0x5d, 0x37, 0x6a, 0x3e, 0xeb, 0x44, 0x31, 0x27, 0x72, 0x5b, 0x6b, 0x32, 0xcb, /* Byte value: 0x12 */ + 0xd2, 0xaf, 0xe8, 0x70, 0xb8, 0xc8, 0x20, 0x1d, 0xc7, 0x1f, 0x97, 0x11, 0xd7, 0xa6, 0xad, 0x43, /* Byte value: 0x13 */ + 0x2c, 0x83, 0x4f, 0x35, 0xee, 0xdb, 0x94, 0xd0, 0xd9, 0x48, 0x78, 0x06, 0x93, 0xea, 0x1b, 0x3f, /* Byte value: 0x14 */ + 0xcd, 0xd0, 0x9b, 0x3f, 0xb6, 0x89, 0x12, 0x2b, 0x03, 0x83, 0x2e, 0x5d, 0x0a, 0x61, 0x78, 0xd8, /* Byte value: 0x15 */ + 0x0b, 0x50, 0x63, 0x9c, 0xda, 0x46, 0x25, 0x34, 0xa7, 0x12, 0x1e, 0xe0, 0x54, 0xdb, 0x76, 0x7f, /* Byte value: 0x16 */ + 0xa2, 0x34, 0x56, 0x7f, 0xcc, 0xb3, 0xe5, 0x7a, 0x70, 0x50, 0xea, 0x86, 0xe3, 0x89, 0x1e, 0x36, /* Byte value: 0x17 */ + 0x38, 0xac, 0x5f, 0xe6, 0x3a, 0xdc, 0x83, 0xd2, 0xba, 0xc6, 0xdf, 0xaa, 0x1a, 0xf6, 0xb8, 0xdb, /* Byte value: 0x18 */ + 0xf7, 0xf3, 0x12, 0xf1, 0x2f, 0xde, 0xcc, 0x93, 0x57, 0xef, 0x6a, 0x58, 0x31, 0xfe, 0x8f, 0x19, /* Byte value: 0x19 */ + 0x4b, 0x1e, 0x5c, 0xd5, 0x5d, 0x88, 0xd4, 0xea, 0x94, 0x76, 0x95, 0x24, 0xfe, 0x65, 0x82, 0xb9, /* Byte value: 0x1a */ + 0xe6, 0xa7, 0x06, 0x66, 0xce, 0xa8, 0xae, 0x70, 0x5c, 0xa3, 0x94, 0xdf, 0x0b, 0xe5, 0x74, 0xc4, /* Byte value: 0x1b */ + 0xe7, 0x01, 0x6d, 0x72, 0x7e, 0x0c, 0x61, 0x45, 0x2b, 0xf6, 0x38, 0x69, 0xfa, 0x30, 0xb2, 0xc9, /* Byte value: 0x1c */ + 0x90, 0x6e, 0x01, 0x11, 0x9c, 0x8d, 0x8c, 0xa9, 0x1a, 0xd1, 0x87, 0x7a, 0x5c, 0x71, 0x16, 0x9f, /* Byte value: 0x1d */ + 0x1a, 0x04, 0x77, 0x0b, 0x3b, 0x30, 0x47, 0xd7, 0xac, 0x5e, 0xe0, 0x67, 0x6e, 0xc0, 0x8d, 0xa2, /* Byte value: 0x1e */ + 0x53, 0x95, 0xfd, 0xf6, 0xc5, 0x33, 0xce, 0x57, 0xd6, 0x82, 0xee, 0xec, 0xb1, 0xcc, 0x40, 0x01, /* Byte value: 0x1f */ + 0xa3, 0x92, 0x3d, 0x6b, 0x7c, 0x17, 0x2a, 0x4f, 0x07, 0x05, 0x46, 0x30, 0x12, 0x5c, 0xd8, 0x3b, /* Byte value: 0x20 */ + 0xf1, 0xa1, 0xab, 0x89, 0x09, 0x80, 0x2b, 0x2d, 0xa6, 0xd2, 0x04, 0x6a, 0x52, 0x45, 0x5e, 0x37, /* Byte value: 0x21 */ + 0xfd, 0x05, 0x1a, 0x79, 0x45, 0x3c, 0x26, 0x92, 0x87, 0xa8, 0xd8, 0x0e, 0x94, 0xf0, 0x3f, 0x6b, /* Byte value: 0x22 */ + 0x51, 0x1a, 0x2b, 0xde, 0x66, 0xb8, 0x93, 0x3d, 0x38, 0x28, 0x75, 0x43, 0x90, 0xa5, 0x0f, 0x1b, /* Byte value: 0x23 */ + 0x29, 0xf8, 0x4b, 0x71, 0xdb, 0xaa, 0xe1, 0x31, 0xb1, 0x8a, 0x21, 0x2d, 0x20, 0xed, 0x43, 0x06, /* Byte value: 0x24 */ + 0x3b, 0x85, 0xe2, 0xda, 0x29, 0xf3, 0x11, 0x8d, 0x23, 0x39, 0xe8, 0xb3, 0xca, 0x4a, 0x31, 0xcc, /* Byte value: 0x25 */ + 0x82, 0x13, 0xa8, 0xba, 0x6e, 0xd4, 0x7c, 0x15, 0x88, 0x62, 0x4e, 0xe4, 0xb6, 0xd6, 0x64, 0x55, /* Byte value: 0x26 */ + 0x67, 0x9d, 0x13, 0xe0, 0xb3, 0x53, 0x40, 0x3a, 0x4d, 0x3e, 0xed, 0x22, 0x6d, 0x8f, 0x99, 0x86, /* Byte value: 0x27 */ + 0x8b, 0xcc, 0x1d, 0x0e, 0x17, 0x19, 0x04, 0x4b, 0xc1, 0xda, 0xcb, 0xab, 0xc3, 0x64, 0x5d, 0x30, /* Byte value: 0x28 */ + 0x48, 0x37, 0xe1, 0xe9, 0x4e, 0xa7, 0x46, 0xb5, 0x0d, 0x89, 0xa2, 0x3d, 0x2e, 0xd9, 0x0b, 0xae, /* Byte value: 0x29 */ + 0x2a, 0xd1, 0xf6, 0x4d, 0xc8, 0x85, 0x73, 0x6e, 0x28, 0x75, 0x16, 0x34, 0xf0, 0x51, 0xca, 0x11, /* Byte value: 0x2a */ + 0xef, 0x78, 0xb3, 0xd2, 0xb7, 0x65, 0xd6, 0x2e, 0x15, 0x1b, 0x11, 0x90, 0x7e, 0x57, 0x4d, 0xa1, /* Byte value: 0x2b */ + 0x70, 0x9b, 0xbe, 0x0f, 0x74, 0x7b, 0xc5, 0x67, 0xb7, 0x4f, 0x7d, 0x97, 0x34, 0x2f, 0xb3, 0x75, /* Byte value: 0x2c */ + 0x2d, 0x25, 0x24, 0x21, 0x5e, 0x7f, 0x5b, 0xe5, 0xae, 0x1d, 0xd4, 0xb0, 0x62, 0x3f, 0xdd, 0x32, /* Byte value: 0x2d */ + 0x31, 0x73, 0xea, 0x52, 0x43, 0x11, 0xfb, 0x8c, 0xf3, 0x7e, 0x5a, 0xe5, 0x6f, 0x44, 0x81, 0xbe, /* Byte value: 0x2e */ + 0xe2, 0x7a, 0x69, 0x36, 0x4b, 0x7d, 0x14, 0xa4, 0x43, 0x34, 0x61, 0x42, 0x49, 0x37, 0xea, 0xf0, /* Byte value: 0x2f */ + 0x86, 0xce, 0xc7, 0xea, 0xeb, 0x01, 0xc6, 0xc1, 0x97, 0xf5, 0xbb, 0x79, 0xf4, 0x04, 0xfa, 0x61, /* Byte value: 0x30 */ + 0xc9, 0x0d, 0xf4, 0x6f, 0x33, 0x5c, 0xa8, 0xff, 0x1c, 0x14, 0xdb, 0xc0, 0x48, 0xb3, 0xe6, 0xec, /* Byte value: 0x31 */ + 0xb0, 0x49, 0xff, 0xd4, 0x3e, 0xea, 0x15, 0xc6, 0xe2, 0xe3, 0x23, 0x18, 0x09, 0x2e, 0x6c, 0xfc, /* Byte value: 0x32 */ + 0x80, 0x9c, 0x7e, 0x92, 0xcd, 0x5f, 0x21, 0x7f, 0x66, 0xc8, 0xd5, 0x4b, 0x97, 0xbf, 0x2b, 0x4f, /* Byte value: 0x33 */ + 0xf5, 0x7c, 0xc4, 0xd9, 0x8c, 0x55, 0x91, 0xf9, 0xb9, 0x45, 0xf1, 0xf7, 0x10, 0x97, 0xc0, 0x03, /* Byte value: 0x34 */ + 0xfc, 0xa3, 0x71, 0x6d, 0xf5, 0x98, 0xe9, 0xa7, 0xf0, 0xfd, 0x74, 0xb8, 0x65, 0x25, 0xf9, 0x66, /* Byte value: 0x35 */ + 0x60, 0x69, 0xc1, 0x8c, 0x25, 0xa9, 0x68, 0xb1, 0xcb, 0x56, 0x2f, 0xa6, 0xff, 0xe1, 0x8e, 0xa5, /* Byte value: 0x36 */ + 0x44, 0x93, 0x50, 0x19, 0x02, 0x1b, 0x4b, 0x0a, 0x2c, 0xf3, 0x7e, 0x59, 0xe8, 0x6c, 0x6a, 0xf2, /* Byte value: 0x37 */ + 0x0c, 0xa4, 0xb1, 0xf0, 0x4c, 0xbc, 0x0d, 0xbf, 0x21, 0x7a, 0xdc, 0x64, 0xc6, 0xb5, 0x61, 0x5c, /* Byte value: 0x38 */ + 0x8f, 0x11, 0x72, 0x5e, 0x92, 0xcc, 0xbe, 0x9f, 0xde, 0x4d, 0x3e, 0x36, 0x81, 0xb6, 0xc3, 0x04, /* Byte value: 0x39 */ + 0xa4, 0x66, 0xef, 0x07, 0xea, 0xed, 0x02, 0xc4, 0x81, 0x6d, 0x84, 0xb4, 0x80, 0x32, 0xcf, 0x18, /* Byte value: 0x3a */ + 0x3d, 0xd7, 0x5b, 0xa2, 0x0f, 0xad, 0xf6, 0x33, 0xd2, 0x04, 0x86, 0x81, 0xa9, 0xf1, 0xe0, 0xe2, /* Byte value: 0x3b */ + 0x62, 0xe6, 0x17, 0xa4, 0x86, 0x22, 0x35, 0xdb, 0x25, 0xfc, 0xb4, 0x09, 0xde, 0x88, 0xc1, 0xbf, /* Byte value: 0x3c */ + 0x35, 0xae, 0x85, 0x02, 0xc6, 0xc4, 0x41, 0x58, 0xec, 0xe9, 0xaf, 0x78, 0x2d, 0x96, 0x1f, 0x8a, /* Byte value: 0x3d */ + 0xe0, 0xf5, 0xbf, 0x1e, 0xe8, 0xf6, 0x49, 0xce, 0xad, 0x9e, 0xfa, 0xed, 0x68, 0x5e, 0xa5, 0xea, /* Byte value: 0x3e */ + 0x68, 0x10, 0x1f, 0x2c, 0xec, 0xc0, 0xdf, 0xda, 0xf5, 0xbb, 0x06, 0x5f, 0x7b, 0x86, 0x71, 0xcd, /* Byte value: 0x3f */ + 0xdc, 0x84, 0x8f, 0xa8, 0x57, 0xff, 0x70, 0xc8, 0x08, 0xcf, 0xd0, 0xda, 0x30, 0x7a, 0x83, 0x05, /* Byte value: 0x40 */ + 0x40, 0x4e, 0x3f, 0x49, 0x87, 0xce, 0xf1, 0xde, 0x33, 0x64, 0x8b, 0xc4, 0xaa, 0xbe, 0xf4, 0xc6, /* Byte value: 0x41 */ + 0xa0, 0xbb, 0x80, 0x57, 0x6f, 0x38, 0xb8, 0x10, 0x9e, 0xfa, 0x71, 0x29, 0xc2, 0xe0, 0x51, 0x2c, /* Byte value: 0x42 */ + 0xa7, 0x4f, 0x52, 0x3b, 0xf9, 0xc2, 0x90, 0x9b, 0x18, 0x92, 0xb3, 0xad, 0x50, 0x8e, 0x46, 0x0f, /* Byte value: 0x43 */ + 0x9f, 0xe3, 0x0d, 0xdd, 0xc3, 0x1e, 0x13, 0x49, 0xa2, 0x54, 0x6c, 0x07, 0x4a, 0x78, 0xfe, 0xd4, /* Byte value: 0x44 */ + 0x2b, 0x77, 0x9d, 0x59, 0x78, 0x21, 0xbc, 0x5b, 0x5f, 0x20, 0xba, 0x82, 0x01, 0x84, 0x0c, 0x1c, /* Byte value: 0x45 */ + 0xae, 0x90, 0xe7, 0x8f, 0x80, 0x0f, 0xe8, 0xc5, 0x51, 0x2a, 0x36, 0xe2, 0x25, 0x3c, 0x7f, 0x6a, /* Byte value: 0x46 */ + 0x10, 0xf2, 0x7f, 0x83, 0x51, 0xd2, 0xad, 0xd6, 0x7c, 0x19, 0x52, 0x31, 0xcb, 0xce, 0x3d, 0xd0, /* Byte value: 0x47 */ + 0xfe, 0x2c, 0xa7, 0x45, 0x56, 0x13, 0xb4, 0xcd, 0x1e, 0x57, 0xef, 0x17, 0x44, 0x4c, 0xb6, 0x7c, /* Byte value: 0x48 */ + 0xa1, 0x1d, 0xeb, 0x43, 0xdf, 0x9c, 0x77, 0x25, 0xe9, 0xaf, 0xdd, 0x9f, 0x33, 0x35, 0x97, 0x21, /* Byte value: 0x49 */ + 0xc7, 0x26, 0x93, 0xb7, 0xdc, 0x6b, 0xf8, 0x2a, 0xd3, 0xc4, 0x9c, 0x0b, 0xaf, 0x6f, 0xc8, 0xaa, /* Byte value: 0x4a */ + 0x4d, 0x4c, 0xe5, 0xad, 0x7b, 0xd6, 0x33, 0x54, 0x65, 0x4b, 0xfb, 0x16, 0x9d, 0xde, 0x53, 0x97, /* Byte value: 0x4b */ + 0xaa, 0x4d, 0x88, 0xdf, 0x05, 0xda, 0x52, 0x11, 0x4e, 0xbd, 0xc3, 0x7f, 0x67, 0xee, 0xe1, 0x5e, /* Byte value: 0x4c */ + 0xd4, 0xfd, 0x51, 0x08, 0x9e, 0x96, 0xc7, 0xa3, 0x36, 0x22, 0xf9, 0x23, 0xb4, 0x1d, 0x7c, 0x6d, /* Byte value: 0x4d */ + 0x42, 0xc1, 0xe9, 0x61, 0x24, 0x45, 0xac, 0xb4, 0xdd, 0xce, 0x10, 0x6b, 0x8b, 0xd7, 0xbb, 0xdc, /* Byte value: 0x4e */ + 0x2e, 0x0c, 0x99, 0x1d, 0x4d, 0x50, 0xc9, 0xba, 0x37, 0xe2, 0xe3, 0xa9, 0xb2, 0x83, 0x54, 0x25, /* Byte value: 0x4f */ + 0x49, 0x91, 0x8a, 0xfd, 0xfe, 0x03, 0x89, 0x80, 0x7a, 0xdc, 0x0e, 0x8b, 0xdf, 0x0c, 0xcd, 0xa3, /* Byte value: 0x50 */ + 0xb2, 0xc6, 0x29, 0xfc, 0x9d, 0x61, 0x48, 0xac, 0x0c, 0x49, 0xb8, 0xb7, 0x28, 0x47, 0x23, 0xe6, /* Byte value: 0x51 */ + 0x77, 0x6f, 0x6c, 0x63, 0xe2, 0x81, 0xed, 0xec, 0x31, 0x27, 0xbf, 0x13, 0xa6, 0x41, 0xa4, 0x56, /* Byte value: 0x52 */ + 0x39, 0x0a, 0x34, 0xf2, 0x8a, 0x78, 0x4c, 0xe7, 0xcd, 0x93, 0x73, 0x1c, 0xeb, 0x23, 0x7e, 0xd6, /* Byte value: 0x53 */ + 0xad, 0xb9, 0x5a, 0xb3, 0x93, 0x20, 0x7a, 0x9a, 0xc8, 0xd5, 0x01, 0xfb, 0xf5, 0x80, 0xf6, 0x7d, /* Byte value: 0x54 */ + 0x20, 0x27, 0xfe, 0xc5, 0xa2, 0x67, 0x99, 0x6f, 0xf8, 0x32, 0xa4, 0x62, 0x55, 0x5f, 0x7a, 0x63, /* Byte value: 0x55 */ + 0xcf, 0x5f, 0x4d, 0x17, 0x15, 0x02, 0x4f, 0x41, 0xed, 0x29, 0xb5, 0xf2, 0x2b, 0x08, 0x37, 0xc2, /* Byte value: 0x56 */ + 0x64, 0xb4, 0xae, 0xdc, 0xa0, 0x7c, 0xd2, 0x65, 0xd4, 0xc1, 0xda, 0x3b, 0xbd, 0x33, 0x10, 0x91, /* Byte value: 0x57 */ + 0x9b, 0x3e, 0x62, 0x8d, 0x46, 0xcb, 0xa9, 0x9d, 0xbd, 0xc3, 0x99, 0x9a, 0x08, 0xaa, 0x60, 0xe0, /* Byte value: 0x58 */ + 0xde, 0x0b, 0x59, 0x80, 0xf4, 0x74, 0x2d, 0xa2, 0xe6, 0x65, 0x4b, 0x75, 0x11, 0x13, 0xcc, 0x1f, /* Byte value: 0x59 */ + 0x7f, 0x16, 0xb2, 0xc3, 0x2b, 0xe8, 0x5a, 0x87, 0x0f, 0xca, 0x96, 0xea, 0x22, 0x26, 0x5b, 0x3e, /* Byte value: 0x5a */ + 0xa8, 0xc2, 0x5e, 0xf7, 0xa6, 0x51, 0x0f, 0x7b, 0xa0, 0x17, 0x58, 0xd0, 0x46, 0x87, 0xae, 0x44, /* Byte value: 0x5b */ + 0xb6, 0x1b, 0x46, 0xac, 0x18, 0xb4, 0xf2, 0x78, 0x13, 0xde, 0x4d, 0x2a, 0x6a, 0x95, 0xbd, 0xd2, /* Byte value: 0x5c */ + 0x5c, 0x18, 0xf1, 0x3a, 0x9a, 0xa0, 0x51, 0xb7, 0x6e, 0x07, 0x05, 0x91, 0xa7, 0xc5, 0xa8, 0x4a, /* Byte value: 0x5d */ + 0xa5, 0xc0, 0x84, 0x13, 0x5a, 0x49, 0xcd, 0xf1, 0xf6, 0x38, 0x28, 0x02, 0x71, 0xe7, 0x09, 0x15, /* Byte value: 0x5e */ + 0x6b, 0x39, 0xa2, 0x10, 0xff, 0xef, 0x4d, 0x85, 0x6c, 0x44, 0x31, 0x46, 0xab, 0x3a, 0xf8, 0xda, /* Byte value: 0x5f */ + 0xd3, 0x09, 0x83, 0x64, 0x08, 0x6c, 0xef, 0x28, 0xb0, 0x4a, 0x3b, 0xa7, 0x26, 0x73, 0x6b, 0x4e, /* Byte value: 0x60 */ + 0xf0, 0x07, 0xc0, 0x9d, 0xb9, 0x24, 0xe4, 0x18, 0xd1, 0x87, 0xa8, 0xdc, 0xa3, 0x90, 0x98, 0x3a, /* Byte value: 0x61 */ + 0x5d, 0xbe, 0x9a, 0x2e, 0x2a, 0x04, 0x9e, 0x82, 0x19, 0x52, 0xa9, 0x27, 0x56, 0x10, 0x6e, 0x47, /* Byte value: 0x62 */ + 0xc8, 0xab, 0x9f, 0x7b, 0x83, 0xf8, 0x67, 0xca, 0x6b, 0x41, 0x77, 0x76, 0xb9, 0x66, 0x20, 0xe1, /* Byte value: 0x63 */ + 0xc5, 0xa9, 0x45, 0x9f, 0x7f, 0xe0, 0xa5, 0x40, 0x3d, 0x6e, 0x07, 0xa4, 0x8e, 0x06, 0x87, 0xb0, /* Byte value: 0x64 */ + 0xbd, 0x4b, 0x25, 0x30, 0xc2, 0xf2, 0xd7, 0x4c, 0xb4, 0xcc, 0x53, 0xca, 0x3e, 0x4e, 0xcb, 0xad, /* Byte value: 0x65 */ + 0xc3, 0xfb, 0xfc, 0xe7, 0x59, 0xbe, 0x42, 0xfe, 0xcc, 0x53, 0x69, 0x96, 0xed, 0xbd, 0x56, 0x9e, /* Byte value: 0x66 */ + 0xe4, 0x28, 0xd0, 0x4e, 0x6d, 0x23, 0xf3, 0x1a, 0xb2, 0x09, 0x0f, 0x70, 0x2a, 0x8c, 0x3b, 0xde, /* Byte value: 0x67 */ + 0xaf, 0x36, 0x8c, 0x9b, 0x30, 0xab, 0x27, 0xf0, 0x26, 0x7f, 0x9a, 0x54, 0xd4, 0xe9, 0xb9, 0x67, /* Byte value: 0x68 */ + 0xb8, 0x30, 0x21, 0x74, 0xf7, 0x83, 0xa2, 0xad, 0xdc, 0x0e, 0x0a, 0xe1, 0x8d, 0x49, 0x93, 0x94, /* Byte value: 0x69 */ + 0xcb, 0x82, 0x22, 0x47, 0x90, 0xd7, 0xf5, 0x95, 0xf2, 0xbe, 0x40, 0x6f, 0x69, 0xda, 0xa9, 0xf6, /* Byte value: 0x6a */ + 0xc1, 0x74, 0x2a, 0xcf, 0xfa, 0x35, 0x1f, 0x94, 0x22, 0xf9, 0xf2, 0x39, 0xcc, 0xd4, 0x19, 0x84, /* Byte value: 0x6b */ + 0xb5, 0x32, 0xfb, 0x90, 0x0b, 0x9b, 0x60, 0x27, 0x8a, 0x21, 0x7a, 0x33, 0xba, 0x29, 0x34, 0xc5, /* Byte value: 0x6c */ + 0x6f, 0xe4, 0xcd, 0x40, 0x7a, 0x3a, 0xf7, 0x51, 0x73, 0xd3, 0xc4, 0xdb, 0xe9, 0xe8, 0x66, 0xee, /* Byte value: 0x6d */ + 0x4c, 0xea, 0x8e, 0xb9, 0xcb, 0x72, 0xfc, 0x61, 0x12, 0x1e, 0x57, 0xa0, 0x6c, 0x0b, 0x95, 0x9a, /* Byte value: 0x6e */ + 0x97, 0x9a, 0xd3, 0x7d, 0x0a, 0x77, 0xa4, 0x22, 0x9c, 0xb9, 0x45, 0xfe, 0xce, 0x1f, 0x01, 0xbc, /* Byte value: 0x6f */ + 0x88, 0xe5, 0xa0, 0x32, 0x04, 0x36, 0x96, 0x14, 0x58, 0x25, 0xfc, 0xb2, 0x13, 0xd8, 0xd4, 0x27, /* Byte value: 0x70 */ + 0x61, 0xcf, 0xaa, 0x98, 0x95, 0x0d, 0xa7, 0x84, 0xbc, 0x03, 0x83, 0x10, 0x0e, 0x34, 0x48, 0xa8, /* Byte value: 0x71 */ + 0x57, 0x48, 0x92, 0xa6, 0x40, 0xe6, 0x74, 0x83, 0xc9, 0x15, 0x1b, 0x71, 0xf3, 0x1e, 0xde, 0x35, /* Byte value: 0x72 */ + 0x11, 0x54, 0x14, 0x97, 0xe1, 0x76, 0x62, 0xe3, 0x0b, 0x4c, 0xfe, 0x87, 0x3a, 0x1b, 0xfb, 0xdd, /* Byte value: 0x73 */ + 0x99, 0xb1, 0xb4, 0xa5, 0xe5, 0x40, 0xf4, 0xf7, 0x53, 0x69, 0x02, 0x35, 0x29, 0xc3, 0x2f, 0xfa, /* Byte value: 0x74 */ + 0x72, 0x14, 0x68, 0x27, 0xd7, 0xf0, 0x98, 0x0d, 0x59, 0xe5, 0xe6, 0x38, 0x15, 0x46, 0xfc, 0x6f, /* Byte value: 0x75 */ + 0x83, 0xb5, 0xc3, 0xae, 0xde, 0x70, 0xb3, 0x20, 0xff, 0x37, 0xe2, 0x52, 0x47, 0x03, 0xa2, 0x58, /* Byte value: 0x76 */ + 0xbf, 0xc4, 0xf3, 0x18, 0x61, 0x79, 0x8a, 0x26, 0x5a, 0x66, 0xc8, 0x65, 0x1f, 0x27, 0x84, 0xb7, /* Byte value: 0x77 */ + 0x55, 0xc7, 0x44, 0x8e, 0xe3, 0x6d, 0x29, 0xe9, 0x27, 0xbf, 0x80, 0xde, 0xd2, 0x77, 0x91, 0x2f, /* Byte value: 0x78 */ + 0x73, 0xb2, 0x03, 0x33, 0x67, 0x54, 0x57, 0x38, 0x2e, 0xb0, 0x4a, 0x8e, 0xe4, 0x93, 0x3a, 0x62, /* Byte value: 0x79 */ + 0x9d, 0x6c, 0xdb, 0xf5, 0x60, 0x95, 0x4e, 0x23, 0x4c, 0xfe, 0xf7, 0xa8, 0x6b, 0x11, 0xb1, 0xce, /* Byte value: 0x7a */ + 0x81, 0x3a, 0x15, 0x86, 0x7d, 0xfb, 0xee, 0x4a, 0x11, 0x9d, 0x79, 0xfd, 0x66, 0x6a, 0xed, 0x42, /* Byte value: 0x7b */ + 0x3f, 0x58, 0x8d, 0x8a, 0xac, 0x26, 0xab, 0x59, 0x3c, 0xae, 0x1d, 0x2e, 0x88, 0x98, 0xaf, 0xf8, /* Byte value: 0x7c */ + 0xb3, 0x60, 0x42, 0xe8, 0x2d, 0xc5, 0x87, 0x99, 0x7b, 0x1c, 0x14, 0x01, 0xd9, 0x92, 0xe5, 0xeb, /* Byte value: 0x7d */ + 0xbb, 0x19, 0x9c, 0x48, 0xe4, 0xac, 0x30, 0xf2, 0x45, 0xf1, 0x3d, 0xf8, 0x5d, 0xf5, 0x1a, 0x83, /* Byte value: 0x7e */ + 0xd6, 0x72, 0x87, 0x20, 0x3d, 0x1d, 0x9a, 0xc9, 0xd8, 0x88, 0x62, 0x8c, 0x95, 0x74, 0x33, 0x77, /* Byte value: 0x7f */ + 0xea, 0x03, 0xb7, 0x96, 0x82, 0x14, 0xa3, 0xcf, 0x7d, 0xd9, 0x48, 0xbb, 0xcd, 0x50, 0x15, 0x98, /* Byte value: 0x80 */ + 0x25, 0x5c, 0xfa, 0x81, 0x97, 0x16, 0xec, 0x8e, 0x90, 0xf0, 0xfd, 0x49, 0xe6, 0x58, 0x22, 0x5a, /* Byte value: 0x81 */ + 0xce, 0xf9, 0x26, 0x03, 0xa5, 0xa6, 0x80, 0x74, 0x9a, 0x7c, 0x19, 0x44, 0xda, 0xdd, 0xf1, 0xcf, /* Byte value: 0x82 */ + 0x63, 0x40, 0x7c, 0xb0, 0x36, 0x86, 0xfa, 0xee, 0x52, 0xa9, 0x18, 0xbf, 0x2f, 0x5d, 0x07, 0xb2, /* Byte value: 0x83 */ + 0x45, 0x35, 0x3b, 0x0d, 0xb2, 0xbf, 0x84, 0x3f, 0x5b, 0xa6, 0xd2, 0xef, 0x19, 0xb9, 0xac, 0xff, /* Byte value: 0x84 */ + 0x32, 0x5a, 0x57, 0x6e, 0x50, 0x3e, 0x69, 0xd3, 0x6a, 0x81, 0x6d, 0xfc, 0xbf, 0xf8, 0x08, 0xa9, /* Byte value: 0x85 */ + 0xd7, 0xd4, 0xec, 0x34, 0x8d, 0xb9, 0x55, 0xfc, 0xaf, 0xdd, 0xce, 0x3a, 0x64, 0xa1, 0xf5, 0x7a, /* Byte value: 0x86 */ + 0x9e, 0x45, 0x66, 0xc9, 0x73, 0xba, 0xdc, 0x7c, 0xd5, 0x01, 0xc0, 0xb1, 0xbb, 0xad, 0x38, 0xd9, /* Byte value: 0x87 */ + 0x08, 0x79, 0xde, 0xa0, 0xc9, 0x69, 0xb7, 0x6b, 0x3e, 0xed, 0x29, 0xf9, 0x84, 0x67, 0xff, 0x68, /* Byte value: 0x88 */ + 0x8d, 0x9e, 0xa4, 0x76, 0x31, 0x47, 0xe3, 0xf5, 0x30, 0xe7, 0xa5, 0x99, 0xa0, 0xdf, 0x8c, 0x1e, /* Byte value: 0x89 */ + 0x79, 0x44, 0x0b, 0xbb, 0x0d, 0xb6, 0xbd, 0x39, 0xfe, 0xf7, 0xf8, 0xd8, 0x41, 0x9d, 0x8a, 0x10, /* Byte value: 0x8a */ + 0xab, 0xeb, 0xe3, 0xcb, 0xb5, 0x7e, 0x9d, 0x24, 0x39, 0xe8, 0x6f, 0xc9, 0x96, 0x3b, 0x27, 0x53, /* Byte value: 0x8b */ + 0x36, 0x87, 0x38, 0x3e, 0xd5, 0xeb, 0xd3, 0x07, 0x75, 0x16, 0x98, 0x61, 0xfd, 0x2a, 0x96, 0x9d, /* Byte value: 0x8c */ + 0xfb, 0x57, 0xa3, 0x01, 0x63, 0x62, 0xc1, 0x2c, 0x76, 0x95, 0xb6, 0x3c, 0xf7, 0x4b, 0xee, 0x45, /* Byte value: 0x8d */ + 0xec, 0x51, 0x0e, 0xee, 0xa4, 0x4a, 0x44, 0x71, 0x8c, 0xe4, 0x26, 0x89, 0xae, 0xeb, 0xc4, 0xb6, /* Byte value: 0x8e */ + 0x4e, 0x65, 0x58, 0x91, 0x68, 0xf9, 0xa1, 0x0b, 0xfc, 0xb4, 0xcc, 0x0f, 0x4d, 0x62, 0xda, 0x80, /* Byte value: 0x8f */ + 0x7e, 0xb0, 0xd9, 0xd7, 0x9b, 0x4c, 0x95, 0xb2, 0x78, 0x9f, 0x3a, 0x5c, 0xd3, 0xf3, 0x9d, 0x33, /* Byte value: 0x90 */ + 0xd0, 0x20, 0x3e, 0x58, 0x1b, 0x43, 0x7d, 0x77, 0x29, 0xb5, 0x0c, 0xbe, 0xf6, 0xcf, 0xe2, 0x59, /* Byte value: 0x91 */ + 0x6c, 0xcd, 0x70, 0x7c, 0x69, 0x15, 0x65, 0x0e, 0xea, 0x2c, 0xf3, 0xc2, 0x39, 0x54, 0xef, 0xf9, /* Byte value: 0x92 */ + 0xf3, 0x2e, 0x7d, 0xa1, 0xaa, 0x0b, 0x76, 0x47, 0x48, 0x78, 0x9f, 0xc5, 0x73, 0x2c, 0x11, 0x2d, /* Byte value: 0x93 */ + 0x5f, 0x31, 0x4c, 0x06, 0x89, 0x8f, 0xc3, 0xe8, 0xf7, 0xf8, 0x32, 0x88, 0x77, 0x79, 0x21, 0x5d, /* Byte value: 0x94 */ + 0xff, 0x8a, 0xcc, 0x51, 0xe6, 0xb7, 0x7b, 0xf8, 0x69, 0x02, 0x43, 0xa1, 0xb5, 0x99, 0x70, 0x71, /* Byte value: 0x95 */ + 0x8a, 0x6a, 0x76, 0x1a, 0xa7, 0xbd, 0xcb, 0x7e, 0xb6, 0x8f, 0x67, 0x1d, 0x32, 0xb1, 0x9b, 0x3d, /* Byte value: 0x96 */ + 0x56, 0xee, 0xf9, 0xb2, 0xf0, 0x42, 0xbb, 0xb6, 0xbe, 0x40, 0xb7, 0xc7, 0x02, 0xcb, 0x18, 0x38, /* Byte value: 0x97 */ + 0xdd, 0x22, 0xe4, 0xbc, 0xe7, 0x5b, 0xbf, 0xfd, 0x7f, 0x9a, 0x7c, 0x6c, 0xc1, 0xaf, 0x45, 0x08, /* Byte value: 0x98 */ + 0xb4, 0x94, 0x90, 0x84, 0xbb, 0x3f, 0xaf, 0x12, 0xfd, 0x74, 0xd6, 0x85, 0x4b, 0xfc, 0xf2, 0xc8, /* Byte value: 0x99 */ + 0x04, 0xdd, 0x6f, 0x50, 0x85, 0xd5, 0xba, 0xd4, 0x1f, 0x97, 0xf5, 0x9d, 0x42, 0xd2, 0x9e, 0x34, /* Byte value: 0x9a */ + 0xbe, 0x62, 0x98, 0x0c, 0xd1, 0xdd, 0x45, 0x13, 0x2d, 0x33, 0x64, 0xd3, 0xee, 0xf2, 0x42, 0xba, /* Byte value: 0x9b */ + 0x1b, 0xa2, 0x1c, 0x1f, 0x8b, 0x94, 0x88, 0xe2, 0xdb, 0x0b, 0x4c, 0xd1, 0x9f, 0x15, 0x4b, 0xaf, /* Byte value: 0x9c */ + 0x9c, 0xca, 0xb0, 0xe1, 0xd0, 0x31, 0x81, 0x16, 0x3b, 0xab, 0x5b, 0x1e, 0x9a, 0xc4, 0x77, 0xc3, /* Byte value: 0x9d */ + 0x5e, 0x97, 0x27, 0x12, 0x39, 0x2b, 0x0c, 0xdd, 0x80, 0xad, 0x9e, 0x3e, 0x86, 0xac, 0xe7, 0x50, /* Byte value: 0x9e */ + 0x07, 0xf4, 0xd2, 0x6c, 0x96, 0xfa, 0x28, 0x8b, 0x86, 0x68, 0xc2, 0x84, 0x92, 0x6e, 0x17, 0x23, /* Byte value: 0x9f */ + 0x8c, 0x38, 0xcf, 0x62, 0x81, 0xe3, 0x2c, 0xc0, 0x47, 0xb2, 0x09, 0x2f, 0x51, 0x0a, 0x4a, 0x13, /* Byte value: 0xa0 */ + 0x0f, 0x8d, 0x0c, 0xcc, 0x5f, 0x93, 0x9f, 0xe0, 0xb8, 0x85, 0xeb, 0x7d, 0x16, 0x09, 0xe8, 0x4b, /* Byte value: 0xa1 */ + 0x6d, 0x6b, 0x1b, 0x68, 0xd9, 0xb1, 0xaa, 0x3b, 0x9d, 0x79, 0x5f, 0x74, 0xc8, 0x81, 0x29, 0xf4, /* Byte value: 0xa2 */ + 0xf4, 0xda, 0xaf, 0xcd, 0x3c, 0xf1, 0x5e, 0xcc, 0xce, 0x10, 0x5d, 0x41, 0xe1, 0x42, 0x06, 0x0e, /* Byte value: 0xa3 */ + 0x26, 0x75, 0x47, 0xbd, 0x84, 0x39, 0x7e, 0xd1, 0x09, 0x0f, 0xca, 0x50, 0x36, 0xe4, 0xab, 0x4d, /* Byte value: 0xa4 */ + 0x71, 0x3d, 0xd5, 0x1b, 0xc4, 0xdf, 0x0a, 0x52, 0xc0, 0x1a, 0xd1, 0x21, 0xc5, 0xfa, 0x75, 0x78, /* Byte value: 0xa5 */ + 0x1c, 0x56, 0xce, 0x73, 0x1d, 0x6e, 0xa0, 0x69, 0x5d, 0x63, 0x8e, 0x55, 0x0d, 0x7b, 0x5c, 0x8c, /* Byte value: 0xa6 */ + 0x94, 0xb3, 0x6e, 0x41, 0x19, 0x58, 0x36, 0x7d, 0x05, 0x46, 0x72, 0xe7, 0x1e, 0xa3, 0x88, 0xab, /* Byte value: 0xa7 */ + 0x22, 0xa8, 0x28, 0xed, 0x01, 0xec, 0xc4, 0x05, 0x16, 0x98, 0x3f, 0xcd, 0x74, 0x36, 0x35, 0x79, /* Byte value: 0xa8 */ + 0xeb, 0xa5, 0xdc, 0x82, 0x32, 0xb0, 0x6c, 0xfa, 0x0a, 0x8c, 0xe4, 0x0d, 0x3c, 0x85, 0xd3, 0x95, /* Byte value: 0xa9 */ + 0xbc, 0xed, 0x4e, 0x24, 0x72, 0x56, 0x18, 0x79, 0xc3, 0x99, 0xff, 0x7c, 0xcf, 0x9b, 0x0d, 0xa0, /* Byte value: 0xaa */ + 0x3c, 0x71, 0x30, 0xb6, 0xbf, 0x09, 0x39, 0x06, 0xa5, 0x51, 0x2a, 0x37, 0x58, 0x24, 0x26, 0xef, /* Byte value: 0xab */ + 0x15, 0x89, 0x7b, 0xc7, 0x64, 0xa3, 0xd8, 0x37, 0x14, 0xdb, 0x0b, 0x1a, 0x78, 0xc9, 0x65, 0xe9, /* Byte value: 0xac */ + 0xe9, 0x2a, 0x0a, 0xaa, 0x91, 0x3b, 0x31, 0x90, 0xe4, 0x26, 0x7f, 0xa2, 0x1d, 0xec, 0x9c, 0x8f, /* Byte value: 0xad */ + 0x02, 0x8f, 0xd6, 0x28, 0xa3, 0x8b, 0x5d, 0x6a, 0xee, 0xaa, 0x9b, 0xaf, 0x21, 0x69, 0x4f, 0x1a, /* Byte value: 0xae */ + 0x46, 0x1c, 0x86, 0x31, 0xa1, 0x90, 0x16, 0x60, 0xc2, 0x59, 0xe5, 0xf6, 0xc9, 0x05, 0x25, 0xe8, /* Byte value: 0xaf */ + 0xda, 0xd6, 0x36, 0xd0, 0x71, 0xa1, 0x97, 0x76, 0xf9, 0xf2, 0xbe, 0xe8, 0x53, 0xc1, 0x52, 0x2b, /* Byte value: 0xb0 */ + 0x19, 0x2d, 0xca, 0x37, 0x28, 0x1f, 0xd5, 0x88, 0x35, 0xa1, 0xd7, 0x7e, 0xbe, 0x7c, 0x04, 0xb5, /* Byte value: 0xb1 */ + 0xdb, 0x70, 0x5d, 0xc4, 0xc1, 0x05, 0x58, 0x43, 0x8e, 0xa7, 0x12, 0x5e, 0xa2, 0x14, 0x94, 0x26, /* Byte value: 0xb2 */ + 0x1e, 0xd9, 0x18, 0x5b, 0xbe, 0xe5, 0xfd, 0x03, 0xb3, 0xc9, 0x15, 0xfa, 0x2c, 0x12, 0x13, 0x96, /* Byte value: 0xb3 */ + 0x17, 0x06, 0xad, 0xef, 0xc7, 0x28, 0x85, 0x5d, 0xfa, 0x71, 0x90, 0xb5, 0x59, 0xa0, 0x2a, 0xf3, /* Byte value: 0xb4 */ + 0x4a, 0xb8, 0x37, 0xc1, 0xed, 0x2c, 0x1b, 0xdf, 0xe3, 0x23, 0x39, 0x92, 0x0f, 0xb0, 0x44, 0xb4, /* Byte value: 0xb5 */ + 0x0a, 0xf6, 0x08, 0x88, 0x6a, 0xe2, 0xea, 0x01, 0xd0, 0x47, 0xb2, 0x56, 0xa5, 0x0e, 0xb0, 0x72, /* Byte value: 0xb6 */ + 0xd9, 0xff, 0x8b, 0xec, 0x62, 0x8e, 0x05, 0x29, 0x60, 0x0d, 0x89, 0xf1, 0x83, 0x7d, 0xdb, 0x3c, /* Byte value: 0xb7 */ + 0x89, 0x43, 0xcb, 0x26, 0xb4, 0x92, 0x59, 0x21, 0x2f, 0x70, 0x50, 0x04, 0xe2, 0x0d, 0x12, 0x2a, /* Byte value: 0xb8 */ + 0x78, 0xe2, 0x60, 0xaf, 0xbd, 0x12, 0x72, 0x0c, 0x89, 0xa2, 0x54, 0x6e, 0xb0, 0x48, 0x4c, 0x1d, /* Byte value: 0xb9 */ + 0x76, 0xc9, 0x07, 0x77, 0x52, 0x25, 0x22, 0xd9, 0x46, 0x72, 0x13, 0xa5, 0x57, 0x94, 0x62, 0x5b, /* Byte value: 0xba */ + 0x27, 0xd3, 0x2c, 0xa9, 0x34, 0x9d, 0xb1, 0xe4, 0x7e, 0x5a, 0x66, 0xe6, 0xc7, 0x31, 0x6d, 0x40, /* Byte value: 0xbb */ + 0x18, 0x8b, 0xa1, 0x23, 0x98, 0xbb, 0x1a, 0xbd, 0x42, 0xf4, 0x7b, 0xc8, 0x4f, 0xa9, 0xc2, 0xb8, /* Byte value: 0xbc */ + 0x23, 0x0e, 0x43, 0xf9, 0xb1, 0x48, 0x0b, 0x30, 0x61, 0xcd, 0x93, 0x7b, 0x85, 0xe3, 0xf3, 0x74, /* Byte value: 0xbd */ + 0xf2, 0x88, 0x16, 0xb5, 0x1a, 0xaf, 0xb9, 0x72, 0x3f, 0x2d, 0x33, 0x73, 0x82, 0xf9, 0xd7, 0x20, /* Byte value: 0xbe */ + 0x95, 0x15, 0x05, 0x55, 0xa9, 0xfc, 0xf9, 0x48, 0x72, 0x13, 0xde, 0x51, 0xef, 0x76, 0x4e, 0xa6, /* Byte value: 0xbf */ + 0xd5, 0x5b, 0x3a, 0x1c, 0x2e, 0x32, 0x08, 0x96, 0x41, 0x77, 0x55, 0x95, 0x45, 0xc8, 0xba, 0x60, /* Byte value: 0xc0 */ + 0xa9, 0x64, 0x35, 0xe3, 0x16, 0xf5, 0xc0, 0x4e, 0xd7, 0x42, 0xf4, 0x66, 0xb7, 0x52, 0x68, 0x49, /* Byte value: 0xc1 */ + 0x84, 0x41, 0x11, 0xc2, 0x48, 0x8a, 0x9b, 0xab, 0x79, 0x5f, 0x20, 0xd6, 0xd5, 0x6d, 0xb5, 0x7b, /* Byte value: 0xc2 */ + 0xe1, 0x53, 0xd4, 0x0a, 0x58, 0x52, 0x86, 0xfb, 0xda, 0xcb, 0x56, 0x5b, 0x99, 0x8b, 0x63, 0xe7, /* Byte value: 0xc3 */ + 0x1d, 0xf0, 0xa5, 0x67, 0xad, 0xca, 0x6f, 0x5c, 0x2a, 0x36, 0x22, 0xe3, 0xfc, 0xae, 0x9a, 0x81, /* Byte value: 0xc4 */ + 0x93, 0x47, 0xbc, 0x2d, 0x8f, 0xa2, 0x1e, 0xf6, 0x83, 0x2e, 0xb0, 0x63, 0x8c, 0xcd, 0x9f, 0x88, /* Byte value: 0xc5 */ + 0x28, 0x5e, 0x20, 0x65, 0x6b, 0x0e, 0x2e, 0x04, 0xc6, 0xdf, 0x8d, 0x9b, 0xd1, 0x38, 0x85, 0x0b, /* Byte value: 0xc6 */ + 0xc6, 0x80, 0xf8, 0xa3, 0x6c, 0xcf, 0x37, 0x1f, 0xa4, 0x91, 0x30, 0xbd, 0x5e, 0xba, 0x0e, 0xa7, /* Byte value: 0xc7 */ + 0x7b, 0xcb, 0xdd, 0x93, 0xae, 0x3d, 0xe0, 0x53, 0x10, 0x5d, 0x63, 0x77, 0x60, 0xf4, 0xc5, 0x0a, /* Byte value: 0xc8 */ + 0x65, 0x12, 0xc5, 0xc8, 0x10, 0xd8, 0x1d, 0x50, 0xa3, 0x94, 0x76, 0x8d, 0x4c, 0xe6, 0xd6, 0x9c, /* Byte value: 0xc9 */ + 0x75, 0xe0, 0xba, 0x4b, 0x41, 0x0a, 0xb0, 0x86, 0xdf, 0x8d, 0x24, 0xbc, 0x87, 0x28, 0xeb, 0x4c, /* Byte value: 0xca */ + 0x7c, 0x3f, 0x0f, 0xff, 0x38, 0xc7, 0xc8, 0xd8, 0x96, 0x35, 0xa1, 0xf3, 0xf2, 0x9a, 0xd2, 0x29, /* Byte value: 0xcb */ + 0xe3, 0xdc, 0x02, 0x22, 0xfb, 0xd9, 0xdb, 0x91, 0x34, 0x61, 0xcd, 0xf4, 0xb8, 0xe2, 0x2c, 0xfd, /* Byte value: 0xcc */ + 0xee, 0xde, 0xd8, 0xc6, 0x07, 0xc1, 0x19, 0x1b, 0x62, 0x4e, 0xbd, 0x26, 0x8f, 0x82, 0x8b, 0xac, /* Byte value: 0xcd */ + 0x21, 0x81, 0x95, 0xd1, 0x12, 0xc3, 0x56, 0x5a, 0x8f, 0x67, 0x08, 0xd4, 0xa4, 0x8a, 0xbc, 0x6e, /* Byte value: 0xce */ + 0x8e, 0xb7, 0x19, 0x4a, 0x22, 0x68, 0x71, 0xaa, 0xa9, 0x18, 0x92, 0x80, 0x70, 0x63, 0x05, 0x09, /* Byte value: 0xcf */ + 0xf9, 0xd8, 0x75, 0x29, 0xc0, 0xe9, 0x9c, 0x46, 0x98, 0x3f, 0x2d, 0x93, 0xd6, 0x22, 0xa1, 0x5f, /* Byte value: 0xd0 */ + 0x01, 0xa6, 0x6b, 0x14, 0xb0, 0xa4, 0xcf, 0x35, 0x77, 0x55, 0xac, 0xb6, 0xf1, 0xd5, 0xc6, 0x0d, /* Byte value: 0xd1 */ + 0x5b, 0xec, 0x23, 0x56, 0x0c, 0x5a, 0x79, 0x3c, 0xe8, 0x6f, 0xc7, 0x15, 0x35, 0xab, 0xbf, 0x69, /* Byte value: 0xd2 */ + 0x91, 0xc8, 0x6a, 0x05, 0x2c, 0x29, 0x43, 0x9c, 0x6d, 0x84, 0x2b, 0xcc, 0xad, 0xa4, 0xd0, 0x92, /* Byte value: 0xd3 */ + 0x14, 0x2f, 0x10, 0xd3, 0xd4, 0x07, 0x17, 0x02, 0x63, 0x8e, 0xa7, 0xac, 0x89, 0x1c, 0xa3, 0xe4, /* Byte value: 0xd4 */ + 0x7a, 0x6d, 0xb6, 0x87, 0x1e, 0x99, 0x2f, 0x66, 0x67, 0x08, 0xcf, 0xc1, 0x91, 0x21, 0x03, 0x07, /* Byte value: 0xd5 */ + 0xf6, 0x55, 0x79, 0xe5, 0x9f, 0x7a, 0x03, 0xa6, 0x20, 0xba, 0xc6, 0xee, 0xc0, 0x2b, 0x49, 0x14, /* Byte value: 0xd6 */ + 0xb1, 0xef, 0x94, 0xc0, 0x8e, 0x4e, 0xda, 0xf3, 0x95, 0xb6, 0x8f, 0xae, 0xf8, 0xfb, 0xaa, 0xf1, /* Byte value: 0xd7 */ + 0xb7, 0xbd, 0x2d, 0xb8, 0xa8, 0x10, 0x3d, 0x4d, 0x64, 0x8b, 0xe1, 0x9c, 0x9b, 0x40, 0x7b, 0xdf, /* Byte value: 0xd8 */ + 0xca, 0x24, 0x49, 0x53, 0x20, 0x73, 0x3a, 0xa0, 0x85, 0xeb, 0xec, 0xd9, 0x98, 0x0f, 0x6f, 0xfb, /* Byte value: 0xd9 */ + 0x5a, 0x4a, 0x48, 0x42, 0xbc, 0xfe, 0xb6, 0x09, 0x9f, 0x3a, 0x6b, 0xa3, 0xc4, 0x7e, 0x79, 0x64, /* Byte value: 0xda */ + 0xf8, 0x7e, 0x1e, 0x3d, 0x70, 0x4d, 0x53, 0x73, 0xef, 0x6a, 0x81, 0x25, 0x27, 0xf7, 0x67, 0x52, /* Byte value: 0xdb */ + 0xcc, 0x76, 0xf0, 0x2b, 0x06, 0x2d, 0xdd, 0x1e, 0x74, 0xd6, 0x82, 0xeb, 0xfb, 0xb4, 0xbe, 0xd5, /* Byte value: 0xdc */ + 0x47, 0xba, 0xed, 0x25, 0x11, 0x34, 0xd9, 0x55, 0xb5, 0x0c, 0x49, 0x40, 0x38, 0xd0, 0xe3, 0xe5, /* Byte value: 0xdd */ + 0x96, 0x3c, 0xb8, 0x69, 0xba, 0xd3, 0x6b, 0x17, 0xeb, 0xec, 0xe9, 0x48, 0x3f, 0xca, 0xc7, 0xb1, /* Byte value: 0xde */ + 0x6e, 0x42, 0xa6, 0x54, 0xca, 0x9e, 0x38, 0x64, 0x04, 0x86, 0x68, 0x6d, 0x18, 0x3d, 0xa0, 0xe3, /* Byte value: 0xdf */ + 0x13, 0xdb, 0xc2, 0xbf, 0x42, 0xfd, 0x3f, 0x89, 0xe5, 0xe6, 0x65, 0x28, 0x1b, 0x72, 0xb4, 0xc7, /* Byte value: 0xe0 */ + 0x24, 0xfa, 0x91, 0x95, 0x27, 0xb2, 0x23, 0xbb, 0xe7, 0xa5, 0x51, 0xff, 0x17, 0x8d, 0xe4, 0x57, /* Byte value: 0xe1 */ + 0x74, 0x46, 0xd1, 0x5f, 0xf1, 0xae, 0x7f, 0xb3, 0xa8, 0xd8, 0x88, 0x0a, 0x76, 0xfd, 0x2d, 0x41, /* Byte value: 0xe2 */ + 0x09, 0xdf, 0xb5, 0xb4, 0x79, 0xcd, 0x78, 0x5e, 0x49, 0xb8, 0x85, 0x4f, 0x75, 0xb2, 0x39, 0x65, /* Byte value: 0xe3 */ + 0x03, 0x29, 0xbd, 0x3c, 0x13, 0x2f, 0x92, 0x5f, 0x99, 0xff, 0x37, 0x19, 0xd0, 0xbc, 0x89, 0x17, /* Byte value: 0xe4 */ + 0x30, 0xd5, 0x81, 0x46, 0xf3, 0xb5, 0x34, 0xb9, 0x84, 0x2b, 0xf6, 0x53, 0x9e, 0x91, 0x47, 0xb3, /* Byte value: 0xe5 */ + 0x50, 0xbc, 0x40, 0xca, 0xd6, 0x1c, 0x5c, 0x08, 0x4f, 0x7d, 0xd9, 0xf5, 0x61, 0x70, 0xc9, 0x16, /* Byte value: 0xe6 */ + 0x4f, 0xc3, 0x33, 0x85, 0xd8, 0x5d, 0x6e, 0x3e, 0x8b, 0xe1, 0x60, 0xb9, 0xbc, 0xb7, 0x1c, 0x8d, /* Byte value: 0xe7 */ + 0x0e, 0x2b, 0x67, 0xd8, 0xef, 0x37, 0x50, 0xd5, 0xcf, 0xd0, 0x47, 0xcb, 0xe7, 0xdc, 0x2e, 0x46, /* Byte value: 0xe8 */ + 0x7d, 0x99, 0x64, 0xeb, 0x88, 0x63, 0x07, 0xed, 0xe1, 0x60, 0x0d, 0x45, 0x03, 0x4f, 0x14, 0x24, /* Byte value: 0xe9 */ + 0xb9, 0x96, 0x4a, 0x60, 0x47, 0x27, 0x6d, 0x98, 0xab, 0x5b, 0xa6, 0x57, 0x7c, 0x9c, 0x55, 0x99, /* Byte value: 0xea */ + 0x37, 0x21, 0x53, 0x2a, 0x65, 0x4f, 0x1c, 0x32, 0x02, 0x43, 0x34, 0xd7, 0x0c, 0xff, 0x50, 0x90, /* Byte value: 0xeb */ + 0x54, 0x61, 0x2f, 0x9a, 0x53, 0xc9, 0xe6, 0xdc, 0x50, 0xea, 0x2c, 0x68, 0x23, 0xa2, 0x57, 0x22, /* Byte value: 0xec */ + 0x1f, 0x7f, 0x73, 0x4f, 0x0e, 0x41, 0x32, 0x36, 0xc4, 0x9c, 0xb9, 0x4c, 0xdd, 0xc7, 0xd5, 0x9b, /* Byte value: 0xed */ + 0xc2, 0x5d, 0x97, 0xf3, 0xe9, 0x1a, 0x8d, 0xcb, 0xbb, 0x06, 0xc5, 0x20, 0x1c, 0x68, 0x90, 0x93, /* Byte value: 0xee */ + 0x33, 0xfc, 0x3c, 0x7a, 0xe0, 0x9a, 0xa6, 0xe6, 0x1d, 0xd4, 0xc1, 0x4a, 0x4e, 0x2d, 0xce, 0xa4, /* Byte value: 0xef */ + 0x3a, 0x23, 0x89, 0xce, 0x99, 0x57, 0xde, 0xb8, 0x54, 0x6c, 0x44, 0x05, 0x3b, 0x9f, 0xf7, 0xc1, /* Byte value: 0xf0 */ + 0xe5, 0x8e, 0xbb, 0x5a, 0xdd, 0x87, 0x3c, 0x2f, 0xc5, 0x5c, 0xa3, 0xc6, 0xdb, 0x59, 0xfd, 0xd3, /* Byte value: 0xf1 */ + 0xe8, 0x8c, 0x61, 0xbe, 0x21, 0x9f, 0xfe, 0xa5, 0x93, 0x73, 0xd3, 0x14, 0xec, 0x39, 0x5a, 0x82, /* Byte value: 0xf2 */ + 0x12, 0x7d, 0xa9, 0xab, 0xf2, 0x59, 0xf0, 0xbc, 0x92, 0xb3, 0xc9, 0x9e, 0xea, 0xa7, 0x72, 0xca, /* Byte value: 0xf3 */ + 0x92, 0xe1, 0xd7, 0x39, 0x3f, 0x06, 0xd1, 0xc3, 0xf4, 0x7b, 0x1c, 0xd5, 0x7d, 0x18, 0x59, 0x85, /* Byte value: 0xf4 */ + 0xac, 0x1f, 0x31, 0xa7, 0x23, 0x84, 0xb5, 0xaf, 0xbf, 0x80, 0xad, 0x4d, 0x04, 0x55, 0x30, 0x70, /* Byte value: 0xf5 */ + 0x34, 0x08, 0xee, 0x16, 0x76, 0x60, 0x8e, 0x6d, 0x9b, 0xbc, 0x03, 0xce, 0xdc, 0x43, 0xd9, 0x87, /* Byte value: 0xf6 */ + 0x52, 0x33, 0x96, 0xe2, 0x75, 0x97, 0x01, 0x62, 0xa1, 0xd7, 0x42, 0x5a, 0x40, 0x19, 0x86, 0x0c, /* Byte value: 0xf7 */ + 0xed, 0xf7, 0x65, 0xfa, 0x14, 0xee, 0x8b, 0x44, 0xfb, 0xb1, 0x8a, 0x3f, 0x5f, 0x3e, 0x02, 0xbb, /* Byte value: 0xf8 */ + 0xfa, 0xf1, 0xc8, 0x15, 0xd3, 0xc6, 0x0e, 0x19, 0x01, 0xc0, 0x1a, 0x8a, 0x06, 0x9e, 0x28, 0x48, /* Byte value: 0xf9 */ + 0x98, 0x17, 0xdf, 0xb1, 0x55, 0xe4, 0x3b, 0xc2, 0x24, 0x3c, 0xae, 0x83, 0xd8, 0x16, 0xe9, 0xf7, /* Byte value: 0xfa */ + 0xdf, 0xad, 0x32, 0x94, 0x44, 0xd0, 0xe2, 0x97, 0x91, 0x30, 0xe7, 0xc3, 0xe0, 0xc6, 0x0a, 0x12, /* Byte value: 0xfb */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xfc */ + 0x66, 0x3b, 0x78, 0xf4, 0x03, 0xf7, 0x8f, 0x0f, 0x3a, 0x6b, 0x41, 0x94, 0x9c, 0x5a, 0x5f, 0x8b, /* Byte value: 0xfd */ + 0x2f, 0xaa, 0xf2, 0x09, 0xfd, 0xf4, 0x06, 0x8f, 0x40, 0xb7, 0x4f, 0x1f, 0x43, 0x56, 0x92, 0x28, /* Byte value: 0xfe */ + 0x3e, 0xfe, 0xe6, 0x9e, 0x1c, 0x82, 0x64, 0x6c, 0x4b, 0xfb, 0xb1, 0x98, 0x79, 0x4d, 0x69, 0xf5, /* Byte value: 0xff */ + + /* Matrix row: 12 */ + 0xc2, 0x45, 0xb9, 0x57, 0x85, 0xa8, 0xdb, 0x13, 0x2a, 0x3f, 0xb0, 0xb9, 0xb0, 0x0b, 0x7b, 0xc6, /* Byte value: 0x00 */ + 0x95, 0x59, 0xc2, 0x20, 0x7c, 0x53, 0x81, 0x21, 0x64, 0x56, 0x1f, 0xc2, 0x1f, 0x6d, 0x57, 0x36, /* Byte value: 0x01 */ + 0xa6, 0x73, 0x93, 0x63, 0x29, 0x01, 0x42, 0x6e, 0x91, 0x38, 0x9b, 0x93, 0x9b, 0xf4, 0x35, 0x9d, /* Byte value: 0x02 */ + 0x27, 0x78, 0x48, 0x3d, 0x69, 0x20, 0x97, 0x5e, 0x08, 0x0c, 0x8b, 0x48, 0x8b, 0xf5, 0x6f, 0x4b, /* Byte value: 0x03 */ + 0xe0, 0xc8, 0x66, 0x94, 0xe3, 0x75, 0x59, 0xd8, 0x8c, 0xca, 0x48, 0x66, 0x48, 0xe5, 0x86, 0xb5, /* Byte value: 0x04 */ + 0x86, 0x9c, 0xc4, 0x12, 0x49, 0xd1, 0x1c, 0x46, 0xa5, 0x16, 0x83, 0xc4, 0x83, 0x14, 0x42, 0x20, /* Byte value: 0x05 */ + 0x06, 0xa6, 0x5b, 0x15, 0x0a, 0x17, 0xa7, 0xe6, 0x75, 0xae, 0xe3, 0x5b, 0xe3, 0x12, 0x5d, 0x91, /* Byte value: 0x06 */ + 0x5d, 0x35, 0x96, 0x48, 0xe7, 0xc2, 0x70, 0xdb, 0xd1, 0x58, 0x49, 0x96, 0x49, 0x50, 0xcb, 0x80, /* Byte value: 0x07 */ + 0x09, 0x7a, 0x21, 0xd4, 0x1b, 0xd3, 0x98, 0x9a, 0x44, 0x66, 0x76, 0x21, 0x76, 0x3f, 0x28, 0xd9, /* Byte value: 0x08 */ + 0x78, 0x2f, 0x56, 0xc7, 0x88, 0xef, 0x3b, 0x66, 0x4b, 0x8f, 0x22, 0x56, 0x22, 0xab, 0x2e, 0x05, /* Byte value: 0x09 */ + 0xf4, 0x9a, 0x7f, 0xea, 0xdf, 0x07, 0x0d, 0xc9, 0x71, 0xa8, 0x47, 0x7f, 0x47, 0x89, 0x8b, 0x55, /* Byte value: 0x0a */ + 0x16, 0x30, 0x91, 0xcc, 0x3a, 0x7f, 0x88, 0xf2, 0x6f, 0xb9, 0xef, 0x91, 0xef, 0x62, 0x87, 0x2e, /* Byte value: 0x0b */ + 0x19, 0xec, 0xeb, 0x0d, 0x2b, 0xbb, 0xb7, 0x8e, 0x5e, 0x71, 0x7a, 0xeb, 0x7a, 0x4f, 0xf2, 0x66, /* Byte value: 0x0c */ + 0xec, 0x47, 0xd0, 0xbe, 0xf7, 0x5b, 0xd4, 0xd7, 0x66, 0x55, 0x4d, 0xd0, 0x4d, 0xc1, 0x3c, 0x54, /* Byte value: 0x0d */ + 0x69, 0x88, 0xd8, 0x47, 0xbb, 0x60, 0x7a, 0xe2, 0x18, 0x14, 0x5e, 0xd8, 0x5e, 0xdc, 0xb1, 0xdd, /* Byte value: 0x0e */ + 0x04, 0xc4, 0xd3, 0xa7, 0x0c, 0x1a, 0x7b, 0x05, 0xe7, 0x75, 0x03, 0xd3, 0x03, 0x1c, 0xd7, 0x5f, /* Byte value: 0x0f */ + 0x8f, 0xe6, 0xe5, 0xc6, 0x52, 0x02, 0x84, 0xdc, 0xe1, 0x70, 0xf5, 0xe5, 0xf5, 0x2b, 0x6a, 0xf9, /* Byte value: 0x10 */ + 0x30, 0x79, 0x9d, 0xa8, 0x50, 0xb8, 0x71, 0x3c, 0x2e, 0x39, 0x14, 0x9d, 0x14, 0x90, 0xad, 0x02, /* Byte value: 0x11 */ + 0x39, 0x03, 0xbc, 0x7c, 0x4b, 0x6b, 0xe9, 0xa6, 0x6a, 0x5f, 0x62, 0xbc, 0x62, 0xaf, 0x85, 0xdb, /* Byte value: 0x12 */ + 0xe9, 0xb2, 0x47, 0x40, 0xf8, 0xa6, 0xc1, 0x42, 0xc8, 0xac, 0x3e, 0x47, 0x3e, 0xda, 0xae, 0x6c, /* Byte value: 0x13 */ + 0x03, 0x53, 0xcc, 0xeb, 0x05, 0xea, 0xb2, 0x73, 0xdb, 0x57, 0x90, 0xcc, 0x90, 0x09, 0xcf, 0xa9, /* Byte value: 0x14 */ + 0xcf, 0xfb, 0x4b, 0x24, 0x92, 0x61, 0x38, 0x8c, 0x89, 0x2c, 0xc5, 0x4b, 0xc5, 0x28, 0x84, 0x40, /* Byte value: 0x15 */ + 0x70, 0x64, 0x33, 0x4a, 0x90, 0xdb, 0xcd, 0x6c, 0x46, 0x65, 0x24, 0x33, 0x24, 0x93, 0x43, 0xbb, /* Byte value: 0x16 */ + 0x43, 0x4e, 0x62, 0x09, 0xc5, 0x89, 0x0e, 0x23, 0xb3, 0x0b, 0xa0, 0x62, 0xa0, 0x0a, 0x21, 0x10, /* Byte value: 0x17 */ + 0x55, 0x7e, 0xf3, 0xc5, 0xff, 0xf6, 0x86, 0xd1, 0xdc, 0xb2, 0x4f, 0xf3, 0x4f, 0x68, 0xa6, 0x3e, /* Byte value: 0x18 */ + 0x2c, 0x60, 0xe1, 0x5b, 0x74, 0xfe, 0xd3, 0x27, 0xde, 0xb1, 0x1d, 0xe1, 0x1d, 0xc4, 0xcd, 0x5c, /* Byte value: 0x19 */ + 0x12, 0xf4, 0x42, 0x6b, 0x36, 0x65, 0xf3, 0xf7, 0x88, 0xcc, 0xec, 0x42, 0xec, 0x7e, 0x50, 0x71, /* Byte value: 0x1a */ + 0x8e, 0xd7, 0xa1, 0x9f, 0x51, 0xe5, 0xea, 0x4c, 0xa8, 0xfc, 0x85, 0xa1, 0x85, 0x2c, 0x2f, 0x9e, /* Byte value: 0x1b */ + 0xd5, 0x44, 0x6c, 0xc2, 0xbc, 0x30, 0x3d, 0x71, 0x0c, 0x0a, 0x2f, 0x6c, 0x2f, 0x6e, 0xb9, 0x8f, /* Byte value: 0x1c */ + 0x3d, 0xc7, 0x6f, 0xdb, 0x47, 0x71, 0x92, 0xa3, 0x8d, 0x2a, 0x61, 0x6f, 0x61, 0xb3, 0x52, 0x84, /* Byte value: 0x1d */ + 0xd2, 0xd3, 0x73, 0x8e, 0xb5, 0xc0, 0xf4, 0x07, 0x30, 0x28, 0xbc, 0x73, 0xbc, 0x7b, 0xa1, 0x79, /* Byte value: 0x1e */ + 0x76, 0xc2, 0x68, 0x5f, 0x9a, 0xcc, 0x6a, 0x8a, 0x33, 0xcb, 0xc7, 0x68, 0xc7, 0x81, 0x1e, 0x2a, /* Byte value: 0x1f */ + 0x18, 0xdd, 0xaf, 0x54, 0x28, 0x5c, 0xd9, 0x1e, 0x17, 0xfd, 0x0a, 0xaf, 0x0a, 0x48, 0xb7, 0x01, /* Byte value: 0x20 */ + 0x35, 0x8c, 0x0a, 0x56, 0x5f, 0x45, 0x64, 0xa9, 0x80, 0xc0, 0x67, 0x0a, 0x67, 0x8b, 0x3f, 0x3a, /* Byte value: 0x21 */ + 0x07, 0x97, 0x1f, 0x4c, 0x09, 0xf0, 0xc9, 0x76, 0x3c, 0x22, 0x93, 0x1f, 0x93, 0x15, 0x18, 0xf6, /* Byte value: 0x22 */ + 0xc0, 0x27, 0x31, 0xe5, 0x83, 0xa5, 0x07, 0xf0, 0xb8, 0xe4, 0x50, 0x31, 0x50, 0x05, 0xf1, 0x08, /* Byte value: 0x23 */ + 0xf7, 0xc9, 0xb3, 0x01, 0xda, 0xed, 0xbf, 0xba, 0xaa, 0xff, 0xd7, 0xb3, 0xd7, 0x80, 0x44, 0xfc, /* Byte value: 0x24 */ + 0xb8, 0x08, 0x67, 0x22, 0x0b, 0x4a, 0x3c, 0x96, 0xf3, 0x6b, 0x72, 0x67, 0x72, 0xae, 0xdf, 0x0d, /* Byte value: 0x25 */ + 0x72, 0x06, 0xbb, 0xf8, 0x96, 0xd6, 0x11, 0x8f, 0xd4, 0xbe, 0xc4, 0xbb, 0xc4, 0x9d, 0xc9, 0x75, /* Byte value: 0x26 */ + 0x11, 0xa7, 0x8e, 0x80, 0x33, 0x8f, 0x41, 0x84, 0x53, 0x9b, 0x7c, 0x8e, 0x7c, 0x77, 0x9f, 0xd8, /* Byte value: 0x27 */ + 0xb4, 0x87, 0xd1, 0x08, 0x1f, 0x64, 0xb1, 0x99, 0x19, 0xf4, 0x77, 0xd1, 0x77, 0x8a, 0x65, 0xec, /* Byte value: 0x28 */ + 0xff, 0x82, 0xd6, 0x8c, 0xc2, 0xd9, 0x49, 0xb0, 0xa7, 0x15, 0xd1, 0xd6, 0xd1, 0xb8, 0x29, 0x42, /* Byte value: 0x29 */ + 0x1a, 0xbf, 0x27, 0xe6, 0x2e, 0x51, 0x05, 0xfd, 0x85, 0x26, 0xea, 0x27, 0xea, 0x46, 0x3d, 0xcf, /* Byte value: 0x2a */ + 0x48, 0x56, 0xcb, 0x6f, 0xd8, 0x57, 0x4a, 0x5a, 0x65, 0xb6, 0x36, 0xcb, 0x36, 0x3b, 0x83, 0x07, /* Byte value: 0x2b */ + 0xaa, 0xfc, 0x25, 0x49, 0x3d, 0x2f, 0xcf, 0x61, 0x7b, 0xa7, 0x9e, 0x25, 0x9e, 0xd0, 0x8f, 0x7c, /* Byte value: 0x2c */ + 0x58, 0xc0, 0x01, 0xb6, 0xe8, 0x3f, 0x65, 0x4e, 0x7f, 0xa1, 0x3a, 0x01, 0x3a, 0x4b, 0x59, 0xb8, /* Byte value: 0x2d */ + 0x93, 0xff, 0x99, 0x35, 0x76, 0x44, 0x26, 0xc7, 0x11, 0xf8, 0xfc, 0x99, 0xfc, 0x7f, 0x0a, 0xa7, /* Byte value: 0x2e */ + 0x21, 0xde, 0x13, 0x28, 0x63, 0x37, 0x30, 0xb8, 0x7d, 0xa2, 0x68, 0x13, 0x68, 0xe7, 0x32, 0xda, /* Byte value: 0x2f */ + 0xdd, 0x0f, 0x09, 0x4f, 0xa4, 0x04, 0xcb, 0x7b, 0x01, 0xe0, 0x29, 0x09, 0x29, 0x56, 0xd4, 0x31, /* Byte value: 0x30 */ + 0x60, 0xf2, 0xf9, 0x93, 0xa0, 0xb3, 0xe2, 0x78, 0x5c, 0x72, 0x28, 0xf9, 0x28, 0xe3, 0x99, 0x04, /* Byte value: 0x31 */ + 0x0c, 0x8f, 0xb6, 0x2a, 0x14, 0x2e, 0x8d, 0x0f, 0xea, 0x9f, 0x05, 0xb6, 0x05, 0x24, 0xba, 0xe1, /* Byte value: 0x32 */ + 0xc4, 0xe3, 0xe2, 0x42, 0x8f, 0xbf, 0x7c, 0xf5, 0x5f, 0x91, 0x53, 0xe2, 0x53, 0x19, 0x26, 0x57, /* Byte value: 0x33 */ + 0x9a, 0x85, 0xb8, 0xe1, 0x6d, 0x97, 0xbe, 0x5d, 0x55, 0x9e, 0x8a, 0xb8, 0x8a, 0x40, 0x22, 0x7e, /* Byte value: 0x34 */ + 0x5c, 0x04, 0xd2, 0x11, 0xe4, 0x25, 0x1e, 0x4b, 0x98, 0xd4, 0x39, 0xd2, 0x39, 0x57, 0x8e, 0xe7, /* Byte value: 0x35 */ + 0x53, 0xd8, 0xa8, 0xd0, 0xf5, 0xe1, 0x21, 0x37, 0xa9, 0x1c, 0xac, 0xa8, 0xac, 0x7a, 0xfb, 0xaf, /* Byte value: 0x36 */ + 0xcd, 0x99, 0xc3, 0x96, 0x94, 0x6c, 0xe4, 0x6f, 0x1b, 0xf7, 0x25, 0xc3, 0x25, 0x26, 0x0e, 0x8e, /* Byte value: 0x37 */ + 0x32, 0x1b, 0x15, 0x1a, 0x56, 0xb5, 0xad, 0xdf, 0xbc, 0xe2, 0xf4, 0x15, 0xf4, 0x9e, 0x27, 0xcc, /* Byte value: 0x38 */ + 0x1b, 0x8e, 0x63, 0xbf, 0x2d, 0xb6, 0x6b, 0x6d, 0xcc, 0xaa, 0x9a, 0x63, 0x9a, 0x41, 0x78, 0xa8, /* Byte value: 0x39 */ + 0x5a, 0xa2, 0x89, 0x04, 0xee, 0x32, 0xb9, 0xad, 0xed, 0x7a, 0xda, 0x89, 0xda, 0x45, 0xd3, 0x76, /* Byte value: 0x3a */ + 0xa1, 0xe4, 0x8c, 0x2f, 0x20, 0xf1, 0x8b, 0x18, 0xad, 0x1a, 0x08, 0x8c, 0x08, 0xe1, 0x2d, 0x6b, /* Byte value: 0x3b */ + 0xe5, 0x3d, 0xf1, 0x6a, 0xec, 0x88, 0x4c, 0x4d, 0x22, 0x33, 0x3b, 0xf1, 0x3b, 0xfe, 0x14, 0x8d, /* Byte value: 0x3c */ + 0x3c, 0xf6, 0x2b, 0x82, 0x44, 0x96, 0xfc, 0x33, 0xc4, 0xa6, 0x11, 0x2b, 0x11, 0xb4, 0x17, 0xe3, /* Byte value: 0x3d */ + 0x97, 0x3b, 0x4a, 0x92, 0x7a, 0x5e, 0x5d, 0xc2, 0xf6, 0x8d, 0xff, 0x4a, 0xff, 0x63, 0xdd, 0xf8, /* Byte value: 0x3e */ + 0xce, 0xca, 0x0f, 0x7d, 0x91, 0x86, 0x56, 0x1c, 0xc0, 0xa0, 0xb5, 0x0f, 0xb5, 0x2f, 0xc1, 0x27, /* Byte value: 0x3f */ + 0x6d, 0x4c, 0x0b, 0xe0, 0xb7, 0x7a, 0x01, 0xe7, 0xff, 0x61, 0x5d, 0x0b, 0x5d, 0xc0, 0x66, 0x82, /* Byte value: 0x40 */ + 0x62, 0x90, 0x71, 0x21, 0xa6, 0xbe, 0x3e, 0x9b, 0xce, 0xa9, 0xc8, 0x71, 0xc8, 0xed, 0x13, 0xca, /* Byte value: 0x41 */ + 0xf5, 0xab, 0x3b, 0xb3, 0xdc, 0xe0, 0x63, 0x59, 0x38, 0x24, 0x37, 0x3b, 0x37, 0x8e, 0xce, 0x32, /* Byte value: 0x42 */ + 0xb7, 0xd4, 0x1d, 0xe3, 0x1a, 0x8e, 0x03, 0xea, 0xc2, 0xa3, 0xe7, 0x1d, 0xe7, 0x83, 0xaa, 0x45, /* Byte value: 0x43 */ + 0xe2, 0xaa, 0xee, 0x26, 0xe5, 0x78, 0x85, 0x3b, 0x1e, 0x11, 0xa8, 0xee, 0xa8, 0xeb, 0x0c, 0x7b, /* Byte value: 0x44 */ + 0x41, 0x2c, 0xea, 0xbb, 0xc3, 0x84, 0xd2, 0xc0, 0x21, 0xd0, 0x40, 0xea, 0x40, 0x04, 0xab, 0xde, /* Byte value: 0x45 */ + 0x71, 0x55, 0x77, 0x13, 0x93, 0x3c, 0xa3, 0xfc, 0x0f, 0xe9, 0x54, 0x77, 0x54, 0x94, 0x06, 0xdc, /* Byte value: 0x46 */ + 0xf9, 0x24, 0x8d, 0x99, 0xc8, 0xce, 0xee, 0x56, 0xd2, 0xbb, 0x32, 0x8d, 0x32, 0xaa, 0x74, 0xd3, /* Byte value: 0x47 */ + 0xea, 0xe1, 0x8b, 0xab, 0xfd, 0x4c, 0x73, 0x31, 0x13, 0xfb, 0xae, 0x8b, 0xae, 0xd3, 0x61, 0xc5, /* Byte value: 0x48 */ + 0xae, 0x38, 0xf6, 0xee, 0x31, 0x35, 0xb4, 0x64, 0x9c, 0xd2, 0x9d, 0xf6, 0x9d, 0xcc, 0x58, 0x23, /* Byte value: 0x49 */ + 0xe4, 0x0c, 0xb5, 0x33, 0xef, 0x6f, 0x22, 0xdd, 0x6b, 0xbf, 0x4b, 0xb5, 0x4b, 0xf9, 0x51, 0xea, /* Byte value: 0x4a */ + 0x0b, 0x18, 0xa9, 0x66, 0x1d, 0xde, 0x44, 0x79, 0xd6, 0xbd, 0x96, 0xa9, 0x96, 0x31, 0xa2, 0x17, /* Byte value: 0x4b */ + 0xde, 0x5c, 0xc5, 0xa4, 0xa1, 0xee, 0x79, 0x08, 0xda, 0xb7, 0xb9, 0xc5, 0xb9, 0x5f, 0x1b, 0x98, /* Byte value: 0x4c */ + 0xf0, 0x5e, 0xac, 0x4d, 0xd3, 0x1d, 0x76, 0xcc, 0x96, 0xdd, 0x44, 0xac, 0x44, 0x95, 0x5c, 0x0a, /* Byte value: 0x4d */ + 0xd4, 0x75, 0x28, 0x9b, 0xbf, 0xd7, 0x53, 0xe1, 0x45, 0x86, 0x5f, 0x28, 0x5f, 0x69, 0xfc, 0xe8, /* Byte value: 0x4e */ + 0xb5, 0xb6, 0x95, 0x51, 0x1c, 0x83, 0xdf, 0x09, 0x50, 0x78, 0x07, 0x95, 0x07, 0x8d, 0x20, 0x8b, /* Byte value: 0x4f */ + 0xa4, 0x11, 0x1b, 0xd1, 0x2f, 0x0c, 0x9e, 0x8d, 0x03, 0xe3, 0x7b, 0x1b, 0x7b, 0xfa, 0xbf, 0x53, /* Byte value: 0x50 */ + 0xba, 0x6a, 0xef, 0x90, 0x0d, 0x47, 0xe0, 0x75, 0x61, 0xb0, 0x92, 0xef, 0x92, 0xa0, 0x55, 0xc3, /* Byte value: 0x51 */ + 0xe8, 0x83, 0x03, 0x19, 0xfb, 0x41, 0xaf, 0xd2, 0x81, 0x20, 0x4e, 0x03, 0x4e, 0xdd, 0xeb, 0x0b, /* Byte value: 0x52 */ + 0x0e, 0xed, 0x3e, 0x98, 0x12, 0x23, 0x51, 0xec, 0x78, 0x44, 0xe5, 0x3e, 0xe5, 0x2a, 0x30, 0x2f, /* Byte value: 0x53 */ + 0x9c, 0x23, 0xe3, 0xf4, 0x67, 0x80, 0x19, 0xbb, 0x20, 0x30, 0x69, 0xe3, 0x69, 0x52, 0x7f, 0xef, /* Byte value: 0x54 */ + 0x31, 0x48, 0xd9, 0xf1, 0x53, 0x5f, 0x1f, 0xac, 0x67, 0xb5, 0x64, 0xd9, 0x64, 0x97, 0xe8, 0x65, /* Byte value: 0x55 */ + 0x79, 0x1e, 0x12, 0x9e, 0x8b, 0x08, 0x55, 0xf6, 0x02, 0x03, 0x52, 0x12, 0x52, 0xac, 0x6b, 0x62, /* Byte value: 0x56 */ + 0xfc, 0xd1, 0x1a, 0x67, 0xc7, 0x33, 0xfb, 0xc3, 0x7c, 0x42, 0x41, 0x1a, 0x41, 0xb1, 0xe6, 0xeb, /* Byte value: 0x57 */ + 0x4d, 0xa3, 0x5c, 0x91, 0xd7, 0xaa, 0x5f, 0xcf, 0xcb, 0x4f, 0x45, 0x5c, 0x45, 0x20, 0x11, 0x3f, /* Byte value: 0x58 */ + 0xdb, 0xa9, 0x52, 0x5a, 0xae, 0x13, 0x6c, 0x9d, 0x74, 0x4e, 0xca, 0x52, 0xca, 0x44, 0x89, 0xa0, /* Byte value: 0x59 */ + 0x75, 0x91, 0xa4, 0xb4, 0x9f, 0x26, 0xd8, 0xf9, 0xe8, 0x9c, 0x57, 0xa4, 0x57, 0x88, 0xd1, 0x83, /* Byte value: 0x5a */ + 0x68, 0xb9, 0x9c, 0x1e, 0xb8, 0x87, 0x14, 0x72, 0x51, 0x98, 0x2e, 0x9c, 0x2e, 0xdb, 0xf4, 0xba, /* Byte value: 0x5b */ + 0x15, 0x63, 0x5d, 0x27, 0x3f, 0x95, 0x3a, 0x81, 0xb4, 0xee, 0x7f, 0x5d, 0x7f, 0x6b, 0x48, 0x87, /* Byte value: 0x5c */ + 0xa9, 0xaf, 0xe9, 0xa2, 0x38, 0xc5, 0x7d, 0x12, 0xa0, 0xf0, 0x0e, 0xe9, 0x0e, 0xd9, 0x40, 0xd5, /* Byte value: 0x5d */ + 0x01, 0x31, 0x44, 0x59, 0x03, 0xe7, 0x6e, 0x90, 0x49, 0x8c, 0x70, 0x44, 0x70, 0x07, 0x45, 0x67, /* Byte value: 0x5e */ + 0x23, 0xbc, 0x9b, 0x9a, 0x65, 0x3a, 0xec, 0x5b, 0xef, 0x79, 0x88, 0x9b, 0x88, 0xe9, 0xb8, 0x14, /* Byte value: 0x5f */ + 0xb2, 0x21, 0x8a, 0x1d, 0x15, 0x73, 0x16, 0x7f, 0x6c, 0x5a, 0x94, 0x8a, 0x94, 0x98, 0x38, 0x7d, /* Byte value: 0x60 */ + 0x6e, 0x1f, 0xc7, 0x0b, 0xb2, 0x90, 0xb3, 0x94, 0x24, 0x36, 0xcd, 0xc7, 0xcd, 0xc9, 0xa9, 0x2b, /* Byte value: 0x61 */ + 0xf2, 0x3c, 0x24, 0xff, 0xd5, 0x10, 0xaa, 0x2f, 0x04, 0x06, 0xa4, 0x24, 0xa4, 0x9b, 0xd6, 0xc4, /* Byte value: 0x62 */ + 0x3b, 0x61, 0x34, 0xce, 0x4d, 0x66, 0x35, 0x45, 0xf8, 0x84, 0x82, 0x34, 0x82, 0xa1, 0x0f, 0x15, /* Byte value: 0x63 */ + 0x52, 0xe9, 0xec, 0x89, 0xf6, 0x06, 0x4f, 0xa7, 0xe0, 0x90, 0xdc, 0xec, 0xdc, 0x7d, 0xbe, 0xc8, /* Byte value: 0x64 */ + 0x65, 0x07, 0x6e, 0x6d, 0xaf, 0x4e, 0xf7, 0xed, 0xf2, 0x8b, 0x5b, 0x6e, 0x5b, 0xf8, 0x0b, 0x3c, /* Byte value: 0x65 */ + 0x4b, 0x05, 0x07, 0x84, 0xdd, 0xbd, 0xf8, 0x29, 0xbe, 0xe1, 0xa6, 0x07, 0xa6, 0x32, 0x4c, 0xae, /* Byte value: 0x66 */ + 0x38, 0x32, 0xf8, 0x25, 0x48, 0x8c, 0x87, 0x36, 0x23, 0xd3, 0x12, 0xf8, 0x12, 0xa8, 0xc0, 0xbc, /* Byte value: 0x67 */ + 0x2a, 0xc6, 0xba, 0x4e, 0x7e, 0xe9, 0x74, 0xc1, 0xab, 0x1f, 0xfe, 0xba, 0xfe, 0xd6, 0x90, 0xcd, /* Byte value: 0x68 */ + 0x91, 0x9d, 0x11, 0x87, 0x70, 0x49, 0xfa, 0x24, 0x83, 0x23, 0x1c, 0x11, 0x1c, 0x71, 0x80, 0x69, /* Byte value: 0x69 */ + 0xd6, 0x17, 0xa0, 0x29, 0xb9, 0xda, 0x8f, 0x02, 0xd7, 0x5d, 0xbf, 0xa0, 0xbf, 0x67, 0x76, 0x26, /* Byte value: 0x6a */ + 0xfd, 0xe0, 0x5e, 0x3e, 0xc4, 0xd4, 0x95, 0x53, 0x35, 0xce, 0x31, 0x5e, 0x31, 0xb6, 0xa3, 0x8c, /* Byte value: 0x6b */ + 0xf8, 0x15, 0xc9, 0xc0, 0xcb, 0x29, 0x80, 0xc6, 0x9b, 0x37, 0x42, 0xc9, 0x42, 0xad, 0x31, 0xb4, /* Byte value: 0x6c */ + 0x8c, 0xb5, 0x29, 0x2d, 0x57, 0xe8, 0x36, 0xaf, 0x3a, 0x27, 0x65, 0x29, 0x65, 0x22, 0xa5, 0x50, /* Byte value: 0x6d */ + 0x50, 0x8b, 0x64, 0x3b, 0xf0, 0x0b, 0x93, 0x44, 0x72, 0x4b, 0x3c, 0x64, 0x3c, 0x73, 0x34, 0x06, /* Byte value: 0x6e */ + 0x7f, 0xb8, 0x49, 0x8b, 0x81, 0x1f, 0xf2, 0x10, 0x77, 0xad, 0xb1, 0x49, 0xb1, 0xbe, 0x36, 0xf3, /* Byte value: 0x6f */ + 0x59, 0xf1, 0x45, 0xef, 0xeb, 0xd8, 0x0b, 0xde, 0x36, 0x2d, 0x4a, 0x45, 0x4a, 0x4c, 0x1c, 0xdf, /* Byte value: 0x70 */ + 0x08, 0x4b, 0x65, 0x8d, 0x18, 0x34, 0xf6, 0x0a, 0x0d, 0xea, 0x06, 0x65, 0x06, 0x38, 0x6d, 0xbe, /* Byte value: 0x71 */ + 0xd9, 0xcb, 0xda, 0xe8, 0xa8, 0x1e, 0xb0, 0x7e, 0xe6, 0x95, 0x2a, 0xda, 0x2a, 0x4a, 0x03, 0x6e, /* Byte value: 0x72 */ + 0xa2, 0xb7, 0x40, 0xc4, 0x25, 0x1b, 0x39, 0x6b, 0x76, 0x4d, 0x98, 0x40, 0x98, 0xe8, 0xe2, 0xc2, /* Byte value: 0x73 */ + 0xfb, 0x46, 0x05, 0x2b, 0xce, 0xc3, 0x32, 0xb5, 0x40, 0x60, 0xd2, 0x05, 0xd2, 0xa4, 0xfe, 0x1d, /* Byte value: 0x74 */ + 0x1c, 0x19, 0x7c, 0xf3, 0x24, 0x46, 0xa2, 0x1b, 0xf0, 0x88, 0x09, 0x7c, 0x09, 0x54, 0x60, 0x5e, /* Byte value: 0x75 */ + 0x29, 0x95, 0x76, 0xa5, 0x7b, 0x03, 0xc6, 0xb2, 0x70, 0x48, 0x6e, 0x76, 0x6e, 0xdf, 0x5f, 0x64, /* Byte value: 0x76 */ + 0xd3, 0xe2, 0x37, 0xd7, 0xb6, 0x27, 0x9a, 0x97, 0x79, 0xa4, 0xcc, 0x37, 0xcc, 0x7c, 0xe4, 0x1e, /* Byte value: 0x77 */ + 0x6f, 0x2e, 0x83, 0x52, 0xb1, 0x77, 0xdd, 0x04, 0x6d, 0xba, 0xbd, 0x83, 0xbd, 0xce, 0xec, 0x4c, /* Byte value: 0x78 */ + 0x47, 0x8a, 0xb1, 0xae, 0xc9, 0x93, 0x75, 0x26, 0x54, 0x7e, 0xa3, 0xb1, 0xa3, 0x16, 0xf6, 0x4f, /* Byte value: 0x79 */ + 0x54, 0x4f, 0xb7, 0x9c, 0xfc, 0x11, 0xe8, 0x41, 0x95, 0x3e, 0x3f, 0xb7, 0x3f, 0x6f, 0xe3, 0x59, /* Byte value: 0x7a */ + 0x9f, 0x70, 0x2f, 0x1f, 0x62, 0x6a, 0xab, 0xc8, 0xfb, 0x67, 0xf9, 0x2f, 0xf9, 0x5b, 0xb0, 0x46, /* Byte value: 0x7b */ + 0x17, 0x01, 0xd5, 0x95, 0x39, 0x98, 0xe6, 0x62, 0x26, 0x35, 0x9f, 0xd5, 0x9f, 0x65, 0xc2, 0x49, /* Byte value: 0x7c */ + 0xe1, 0xf9, 0x22, 0xcd, 0xe0, 0x92, 0x37, 0x48, 0xc5, 0x46, 0x38, 0x22, 0x38, 0xe2, 0xc3, 0xd2, /* Byte value: 0x7d */ + 0x7c, 0xeb, 0x85, 0x60, 0x84, 0xf5, 0x40, 0x63, 0xac, 0xfa, 0x21, 0x85, 0x21, 0xb7, 0xf9, 0x5a, /* Byte value: 0x7e */ + 0x46, 0xbb, 0xf5, 0xf7, 0xca, 0x74, 0x1b, 0xb6, 0x1d, 0xf2, 0xd3, 0xf5, 0xd3, 0x11, 0xb3, 0x28, /* Byte value: 0x7f */ + 0xbc, 0xcc, 0xb4, 0x85, 0x07, 0x50, 0x47, 0x93, 0x14, 0x1e, 0x71, 0xb4, 0x71, 0xb2, 0x08, 0x52, /* Byte value: 0x80 */ + 0xc5, 0xd2, 0xa6, 0x1b, 0x8c, 0x58, 0x12, 0x65, 0x16, 0x1d, 0x23, 0xa6, 0x23, 0x1e, 0x63, 0x30, /* Byte value: 0x81 */ + 0x22, 0x8d, 0xdf, 0xc3, 0x66, 0xdd, 0x82, 0xcb, 0xa6, 0xf5, 0xf8, 0xdf, 0xf8, 0xee, 0xfd, 0x73, /* Byte value: 0x82 */ + 0xbe, 0xae, 0x3c, 0x37, 0x01, 0x5d, 0x9b, 0x70, 0x86, 0xc5, 0x91, 0x3c, 0x91, 0xbc, 0x82, 0x9c, /* Byte value: 0x83 */ + 0x96, 0x0a, 0x0e, 0xcb, 0x79, 0xb9, 0x33, 0x52, 0xbf, 0x01, 0x8f, 0x0e, 0x8f, 0x64, 0x98, 0x9f, /* Byte value: 0x84 */ + 0x7e, 0x89, 0x0d, 0xd2, 0x82, 0xf8, 0x9c, 0x80, 0x3e, 0x21, 0xc1, 0x0d, 0xc1, 0xb9, 0x73, 0x94, /* Byte value: 0x85 */ + 0x1d, 0x28, 0x38, 0xaa, 0x27, 0xa1, 0xcc, 0x8b, 0xb9, 0x04, 0x79, 0x38, 0x79, 0x53, 0x25, 0x39, /* Byte value: 0x86 */ + 0xb9, 0x39, 0x23, 0x7b, 0x08, 0xad, 0x52, 0x06, 0xba, 0xe7, 0x02, 0x23, 0x02, 0xa9, 0x9a, 0x6a, /* Byte value: 0x87 */ + 0x9d, 0x12, 0xa7, 0xad, 0x64, 0x67, 0x77, 0x2b, 0x69, 0xbc, 0x19, 0xa7, 0x19, 0x55, 0x3a, 0x88, /* Byte value: 0x88 */ + 0xad, 0x6b, 0x3a, 0x05, 0x34, 0xdf, 0x06, 0x17, 0x47, 0x85, 0x0d, 0x3a, 0x0d, 0xc5, 0x97, 0x8a, /* Byte value: 0x89 */ + 0x6c, 0x7d, 0x4f, 0xb9, 0xb4, 0x9d, 0x6f, 0x77, 0xb6, 0xed, 0x2d, 0x4f, 0x2d, 0xc7, 0x23, 0xe5, /* Byte value: 0x8a */ + 0x85, 0xcf, 0x08, 0xf9, 0x4c, 0x3b, 0xae, 0x35, 0x7e, 0x41, 0x13, 0x08, 0x13, 0x1d, 0x8d, 0x89, /* Byte value: 0x8b */ + 0xd1, 0x80, 0xbf, 0x65, 0xb0, 0x2a, 0x46, 0x74, 0xeb, 0x7f, 0x2c, 0xbf, 0x2c, 0x72, 0x6e, 0xd0, /* Byte value: 0x8c */ + 0x1e, 0x7b, 0xf4, 0x41, 0x22, 0x4b, 0x7e, 0xf8, 0x62, 0x53, 0xe9, 0xf4, 0xe9, 0x5a, 0xea, 0x90, /* Byte value: 0x8d */ + 0xa5, 0x20, 0x5f, 0x88, 0x2c, 0xeb, 0xf0, 0x1d, 0x4a, 0x6f, 0x0b, 0x5f, 0x0b, 0xfd, 0xfa, 0x34, /* Byte value: 0x8e */ + 0xe6, 0x6e, 0x3d, 0x81, 0xe9, 0x62, 0xfe, 0x3e, 0xf9, 0x64, 0xab, 0x3d, 0xab, 0xf7, 0xdb, 0x24, /* Byte value: 0x8f */ + 0x2e, 0x02, 0x69, 0xe9, 0x72, 0xf3, 0x0f, 0xc4, 0x4c, 0x6a, 0xfd, 0x69, 0xfd, 0xca, 0x47, 0x92, /* Byte value: 0x90 */ + 0x5f, 0x57, 0x1e, 0xfa, 0xe1, 0xcf, 0xac, 0x38, 0x43, 0x83, 0xa9, 0x1e, 0xa9, 0x5e, 0x41, 0x4e, /* Byte value: 0x91 */ + 0x61, 0xc3, 0xbd, 0xca, 0xa3, 0x54, 0x8c, 0xe8, 0x15, 0xfe, 0x58, 0xbd, 0x58, 0xe4, 0xdc, 0x63, /* Byte value: 0x92 */ + 0x83, 0x69, 0x53, 0xec, 0x46, 0x2c, 0x09, 0xd3, 0x0b, 0xef, 0xf0, 0x53, 0xf0, 0x0f, 0xd0, 0x18, /* Byte value: 0x93 */ + 0x44, 0xd9, 0x7d, 0x45, 0xcc, 0x79, 0xc7, 0x55, 0x8f, 0x29, 0x33, 0x7d, 0x33, 0x1f, 0x39, 0xe6, /* Byte value: 0x94 */ + 0xb1, 0x72, 0x46, 0xf6, 0x10, 0x99, 0xa4, 0x0c, 0xb7, 0x0d, 0x04, 0x46, 0x04, 0x91, 0xf7, 0xd4, /* Byte value: 0x95 */ + 0xef, 0x14, 0x1c, 0x55, 0xf2, 0xb1, 0x66, 0xa4, 0xbd, 0x02, 0xdd, 0x1c, 0xdd, 0xc8, 0xf3, 0xfd, /* Byte value: 0x96 */ + 0x82, 0x58, 0x17, 0xb5, 0x45, 0xcb, 0x67, 0x43, 0x42, 0x63, 0x80, 0x17, 0x80, 0x08, 0x95, 0x7f, /* Byte value: 0x97 */ + 0x36, 0xdf, 0xc6, 0xbd, 0x5a, 0xaf, 0xd6, 0xda, 0x5b, 0x97, 0xf7, 0xc6, 0xf7, 0x82, 0xf0, 0x93, /* Byte value: 0x98 */ + 0xa3, 0x86, 0x04, 0x9d, 0x26, 0xfc, 0x57, 0xfb, 0x3f, 0xc1, 0xe8, 0x04, 0xe8, 0xef, 0xa7, 0xa5, /* Byte value: 0x99 */ + 0xaf, 0x09, 0xb2, 0xb7, 0x32, 0xd2, 0xda, 0xf4, 0xd5, 0x5e, 0xed, 0xb2, 0xed, 0xcb, 0x1d, 0x44, /* Byte value: 0x9a */ + 0x88, 0x71, 0xfa, 0x8a, 0x5b, 0xf2, 0x4d, 0xaa, 0xdd, 0x52, 0x66, 0xfa, 0x66, 0x3e, 0x72, 0x0f, /* Byte value: 0x9b */ + 0x89, 0x40, 0xbe, 0xd3, 0x58, 0x15, 0x23, 0x3a, 0x94, 0xde, 0x16, 0xbe, 0x16, 0x39, 0x37, 0x68, /* Byte value: 0x9c */ + 0x0f, 0xdc, 0x7a, 0xc1, 0x11, 0xc4, 0x3f, 0x7c, 0x31, 0xc8, 0x95, 0x7a, 0x95, 0x2d, 0x75, 0x48, /* Byte value: 0x9d */ + 0x1f, 0x4a, 0xb0, 0x18, 0x21, 0xac, 0x10, 0x68, 0x2b, 0xdf, 0x99, 0xb0, 0x99, 0x5d, 0xaf, 0xf7, /* Byte value: 0x9e */ + 0x42, 0x7f, 0x26, 0x50, 0xc6, 0x6e, 0x60, 0xb3, 0xfa, 0x87, 0xd0, 0x26, 0xd0, 0x0d, 0x64, 0x77, /* Byte value: 0x9f */ + 0xf6, 0xf8, 0xf7, 0x58, 0xd9, 0x0a, 0xd1, 0x2a, 0xe3, 0x73, 0xa7, 0xf7, 0xa7, 0x87, 0x01, 0x9b, /* Byte value: 0xa0 */ + 0xdf, 0x6d, 0x81, 0xfd, 0xa2, 0x09, 0x17, 0x98, 0x93, 0x3b, 0xc9, 0x81, 0xc9, 0x58, 0x5e, 0xff, /* Byte value: 0xa1 */ + 0x3a, 0x50, 0x70, 0x97, 0x4e, 0x81, 0x5b, 0xd5, 0xb1, 0x08, 0xf2, 0x70, 0xf2, 0xa6, 0x4a, 0x72, /* Byte value: 0xa2 */ + 0xc1, 0x16, 0x75, 0xbc, 0x80, 0x42, 0x69, 0x60, 0xf1, 0x68, 0x20, 0x75, 0x20, 0x02, 0xb4, 0x6f, /* Byte value: 0xa3 */ + 0x28, 0xa4, 0x32, 0xfc, 0x78, 0xe4, 0xa8, 0x22, 0x39, 0xc4, 0x1e, 0x32, 0x1e, 0xd8, 0x1a, 0x03, /* Byte value: 0xa4 */ + 0xf1, 0x6f, 0xe8, 0x14, 0xd0, 0xfa, 0x18, 0x5c, 0xdf, 0x51, 0x34, 0xe8, 0x34, 0x92, 0x19, 0x6d, /* Byte value: 0xa5 */ + 0xcb, 0x3f, 0x98, 0x83, 0x9e, 0x7b, 0x43, 0x89, 0x6e, 0x59, 0xc6, 0x98, 0xc6, 0x34, 0x53, 0x1f, /* Byte value: 0xa6 */ + 0x92, 0xce, 0xdd, 0x6c, 0x75, 0xa3, 0x48, 0x57, 0x58, 0x74, 0x8c, 0xdd, 0x8c, 0x78, 0x4f, 0xc0, /* Byte value: 0xa7 */ + 0x87, 0xad, 0x80, 0x4b, 0x4a, 0x36, 0x72, 0xd6, 0xec, 0x9a, 0xf3, 0x80, 0xf3, 0x13, 0x07, 0x47, /* Byte value: 0xa8 */ + 0xe7, 0x5f, 0x79, 0xd8, 0xea, 0x85, 0x90, 0xae, 0xb0, 0xe8, 0xdb, 0x79, 0xdb, 0xf0, 0x9e, 0x43, /* Byte value: 0xa9 */ + 0x3e, 0x94, 0xa3, 0x30, 0x42, 0x9b, 0x20, 0xd0, 0x56, 0x7d, 0xf1, 0xa3, 0xf1, 0xba, 0x9d, 0x2d, /* Byte value: 0xaa */ + 0xfa, 0x77, 0x41, 0x72, 0xcd, 0x24, 0x5c, 0x25, 0x09, 0xec, 0xa2, 0x41, 0xa2, 0xa3, 0xbb, 0x7a, /* Byte value: 0xab */ + 0x0d, 0xbe, 0xf2, 0x73, 0x17, 0xc9, 0xe3, 0x9f, 0xa3, 0x13, 0x75, 0xf2, 0x75, 0x23, 0xff, 0x86, /* Byte value: 0xac */ + 0x51, 0xba, 0x20, 0x62, 0xf3, 0xec, 0xfd, 0xd4, 0x3b, 0xc7, 0x4c, 0x20, 0x4c, 0x74, 0x71, 0x61, /* Byte value: 0xad */ + 0xb6, 0xe5, 0x59, 0xba, 0x19, 0x69, 0x6d, 0x7a, 0x8b, 0x2f, 0x97, 0x59, 0x97, 0x84, 0xef, 0x22, /* Byte value: 0xae */ + 0x7b, 0x7c, 0x9a, 0x2c, 0x8d, 0x05, 0x89, 0x15, 0x90, 0xd8, 0xb2, 0x9a, 0xb2, 0xa2, 0xe1, 0xac, /* Byte value: 0xaf */ + 0x74, 0xa0, 0xe0, 0xed, 0x9c, 0xc1, 0xb6, 0x69, 0xa1, 0x10, 0x27, 0xe0, 0x27, 0x8f, 0x94, 0xe4, /* Byte value: 0xb0 */ + 0x3f, 0xa5, 0xe7, 0x69, 0x41, 0x7c, 0x4e, 0x40, 0x1f, 0xf1, 0x81, 0xe7, 0x81, 0xbd, 0xd8, 0x4a, /* Byte value: 0xb1 */ + 0x2f, 0x33, 0x2d, 0xb0, 0x71, 0x14, 0x61, 0x54, 0x05, 0xe6, 0x8d, 0x2d, 0x8d, 0xcd, 0x02, 0xf5, /* Byte value: 0xb2 */ + 0x7d, 0xda, 0xc1, 0x39, 0x87, 0x12, 0x2e, 0xf3, 0xe5, 0x76, 0x51, 0xc1, 0x51, 0xb0, 0xbc, 0x3d, /* Byte value: 0xb3 */ + 0xbb, 0x5b, 0xab, 0xc9, 0x0e, 0xa0, 0x8e, 0xe5, 0x28, 0x3c, 0xe2, 0xab, 0xe2, 0xa7, 0x10, 0xa4, /* Byte value: 0xb4 */ + 0x49, 0x67, 0x8f, 0x36, 0xdb, 0xb0, 0x24, 0xca, 0x2c, 0x3a, 0x46, 0x8f, 0x46, 0x3c, 0xc6, 0x60, /* Byte value: 0xb5 */ + 0x2b, 0xf7, 0xfe, 0x17, 0x7d, 0x0e, 0x1a, 0x51, 0xe2, 0x93, 0x8e, 0xfe, 0x8e, 0xd1, 0xd5, 0xaa, /* Byte value: 0xb6 */ + 0x99, 0xd6, 0x74, 0x0a, 0x68, 0x7d, 0x0c, 0x2e, 0x8e, 0xc9, 0x1a, 0x74, 0x1a, 0x49, 0xed, 0xd7, /* Byte value: 0xb7 */ + 0x02, 0x62, 0x88, 0xb2, 0x06, 0x0d, 0xdc, 0xe3, 0x92, 0xdb, 0xe0, 0x88, 0xe0, 0x0e, 0x8a, 0xce, /* Byte value: 0xb8 */ + 0x37, 0xee, 0x82, 0xe4, 0x59, 0x48, 0xb8, 0x4a, 0x12, 0x1b, 0x87, 0x82, 0x87, 0x85, 0xb5, 0xf4, /* Byte value: 0xb9 */ + 0xb3, 0x10, 0xce, 0x44, 0x16, 0x94, 0x78, 0xef, 0x25, 0xd6, 0xe4, 0xce, 0xe4, 0x9f, 0x7d, 0x1a, /* Byte value: 0xba */ + 0x73, 0x37, 0xff, 0xa1, 0x95, 0x31, 0x7f, 0x1f, 0x9d, 0x32, 0xb4, 0xff, 0xb4, 0x9a, 0x8c, 0x12, /* Byte value: 0xbb */ + 0x64, 0x36, 0x2a, 0x34, 0xac, 0xa9, 0x99, 0x7d, 0xbb, 0x07, 0x2b, 0x2a, 0x2b, 0xff, 0x4e, 0x5b, /* Byte value: 0xbc */ + 0xdc, 0x3e, 0x4d, 0x16, 0xa7, 0xe3, 0xa5, 0xeb, 0x48, 0x6c, 0x59, 0x4d, 0x59, 0x51, 0x91, 0x56, /* Byte value: 0xbd */ + 0xd8, 0xfa, 0x9e, 0xb1, 0xab, 0xf9, 0xde, 0xee, 0xaf, 0x19, 0x5a, 0x9e, 0x5a, 0x4d, 0x46, 0x09, /* Byte value: 0xbe */ + 0xc9, 0x5d, 0x10, 0x31, 0x98, 0x76, 0x9f, 0x6a, 0xfc, 0x82, 0x26, 0x10, 0x26, 0x3a, 0xd9, 0xd1, /* Byte value: 0xbf */ + 0xab, 0xcd, 0x61, 0x10, 0x3e, 0xc8, 0xa1, 0xf1, 0x32, 0x2b, 0xee, 0x61, 0xee, 0xd7, 0xca, 0x1b, /* Byte value: 0xc0 */ + 0x33, 0x2a, 0x51, 0x43, 0x55, 0x52, 0xc3, 0x4f, 0xf5, 0x6e, 0x84, 0x51, 0x84, 0x99, 0x62, 0xab, /* Byte value: 0xc1 */ + 0x6b, 0xea, 0x50, 0xf5, 0xbd, 0x6d, 0xa6, 0x01, 0x8a, 0xcf, 0xbe, 0x50, 0xbe, 0xd2, 0x3b, 0x13, /* Byte value: 0xc2 */ + 0xcc, 0xa8, 0x87, 0xcf, 0x97, 0x8b, 0x8a, 0xff, 0x52, 0x7b, 0x55, 0x87, 0x55, 0x21, 0x4b, 0xe9, /* Byte value: 0xc3 */ + 0x90, 0xac, 0x55, 0xde, 0x73, 0xae, 0x94, 0xb4, 0xca, 0xaf, 0x6c, 0x55, 0x6c, 0x76, 0xc5, 0x0e, /* Byte value: 0xc4 */ + 0xd0, 0xb1, 0xfb, 0x3c, 0xb3, 0xcd, 0x28, 0xe4, 0xa2, 0xf3, 0x5c, 0xfb, 0x5c, 0x75, 0x2b, 0xb7, /* Byte value: 0xc5 */ + 0xac, 0x5a, 0x7e, 0x5c, 0x37, 0x38, 0x68, 0x87, 0x0e, 0x09, 0x7d, 0x7e, 0x7d, 0xc2, 0xd2, 0xed, /* Byte value: 0xc6 */ + 0xbf, 0x9f, 0x78, 0x6e, 0x02, 0xba, 0xf5, 0xe0, 0xcf, 0x49, 0xe1, 0x78, 0xe1, 0xbb, 0xc7, 0xfb, /* Byte value: 0xc7 */ + 0xda, 0x98, 0x16, 0x03, 0xad, 0xf4, 0x02, 0x0d, 0x3d, 0xc2, 0xba, 0x16, 0xba, 0x43, 0xcc, 0xc7, /* Byte value: 0xc8 */ + 0xa7, 0x42, 0xd7, 0x3a, 0x2a, 0xe6, 0x2c, 0xfe, 0xd8, 0xb4, 0xeb, 0xd7, 0xeb, 0xf3, 0x70, 0xfa, /* Byte value: 0xc9 */ + 0x5e, 0x66, 0x5a, 0xa3, 0xe2, 0x28, 0xc2, 0xa8, 0x0a, 0x0f, 0xd9, 0x5a, 0xd9, 0x59, 0x04, 0x29, /* Byte value: 0xca */ + 0x98, 0xe7, 0x30, 0x53, 0x6b, 0x9a, 0x62, 0xbe, 0xc7, 0x45, 0x6a, 0x30, 0x6a, 0x4e, 0xa8, 0xb0, /* Byte value: 0xcb */ + 0x7a, 0x4d, 0xde, 0x75, 0x8e, 0xe2, 0xe7, 0x85, 0xd9, 0x54, 0xc2, 0xde, 0xc2, 0xa5, 0xa4, 0xcb, /* Byte value: 0xcc */ + 0x13, 0xc5, 0x06, 0x32, 0x35, 0x82, 0x9d, 0x67, 0xc1, 0x40, 0x9c, 0x06, 0x9c, 0x79, 0x15, 0x16, /* Byte value: 0xcd */ + 0x6a, 0xdb, 0x14, 0xac, 0xbe, 0x8a, 0xc8, 0x91, 0xc3, 0x43, 0xce, 0x14, 0xce, 0xd5, 0x7e, 0x74, /* Byte value: 0xce */ + 0x40, 0x1d, 0xae, 0xe2, 0xc0, 0x63, 0xbc, 0x50, 0x68, 0x5c, 0x30, 0xae, 0x30, 0x03, 0xee, 0xb9, /* Byte value: 0xcf */ + 0xa8, 0x9e, 0xad, 0xfb, 0x3b, 0x22, 0x13, 0x82, 0xe9, 0x7c, 0x7e, 0xad, 0x7e, 0xde, 0x05, 0xb2, /* Byte value: 0xd0 */ + 0x5b, 0x93, 0xcd, 0x5d, 0xed, 0xd5, 0xd7, 0x3d, 0xa4, 0xf6, 0xaa, 0xcd, 0xaa, 0x42, 0x96, 0x11, /* Byte value: 0xd1 */ + 0xeb, 0xd0, 0xcf, 0xf2, 0xfe, 0xab, 0x1d, 0xa1, 0x5a, 0x77, 0xde, 0xcf, 0xde, 0xd4, 0x24, 0xa2, /* Byte value: 0xd2 */ + 0x66, 0x54, 0xa2, 0x86, 0xaa, 0xa4, 0x45, 0x9e, 0x29, 0xdc, 0xcb, 0xa2, 0xcb, 0xf1, 0xc4, 0x95, /* Byte value: 0xd3 */ + 0x56, 0x2d, 0x3f, 0x2e, 0xfa, 0x1c, 0x34, 0xa2, 0x07, 0xe5, 0xdf, 0x3f, 0xdf, 0x61, 0x69, 0x97, /* Byte value: 0xd4 */ + 0x81, 0x0b, 0xdb, 0x5e, 0x40, 0x21, 0xd5, 0x30, 0x99, 0x34, 0x10, 0xdb, 0x10, 0x01, 0x5a, 0xd6, /* Byte value: 0xd5 */ + 0x77, 0xf3, 0x2c, 0x06, 0x99, 0x2b, 0x04, 0x1a, 0x7a, 0x47, 0xb7, 0x2c, 0xb7, 0x86, 0x5b, 0x4d, /* Byte value: 0xd6 */ + 0x57, 0x1c, 0x7b, 0x77, 0xf9, 0xfb, 0x5a, 0x32, 0x4e, 0x69, 0xaf, 0x7b, 0xaf, 0x66, 0x2c, 0xf0, /* Byte value: 0xd7 */ + 0x4e, 0xf0, 0x90, 0x7a, 0xd2, 0x40, 0xed, 0xbc, 0x10, 0x18, 0xd5, 0x90, 0xd5, 0x29, 0xde, 0x96, /* Byte value: 0xd8 */ + 0x8d, 0x84, 0x6d, 0x74, 0x54, 0x0f, 0x58, 0x3f, 0x73, 0xab, 0x15, 0x6d, 0x15, 0x25, 0xe0, 0x37, /* Byte value: 0xd9 */ + 0xb0, 0x43, 0x02, 0xaf, 0x13, 0x7e, 0xca, 0x9c, 0xfe, 0x81, 0x74, 0x02, 0x74, 0x96, 0xb2, 0xb3, /* Byte value: 0xda */ + 0xf3, 0x0d, 0x60, 0xa6, 0xd6, 0xf7, 0xc4, 0xbf, 0x4d, 0x8a, 0xd4, 0x60, 0xd4, 0x9c, 0x93, 0xa3, /* Byte value: 0xdb */ + 0x94, 0x68, 0x86, 0x79, 0x7f, 0xb4, 0xef, 0xb1, 0x2d, 0xda, 0x6f, 0x86, 0x6f, 0x6a, 0x12, 0x51, /* Byte value: 0xdc */ + 0x20, 0xef, 0x57, 0x71, 0x60, 0xd0, 0x5e, 0x28, 0x34, 0x2e, 0x18, 0x57, 0x18, 0xe0, 0x77, 0xbd, /* Byte value: 0xdd */ + 0x24, 0x2b, 0x84, 0xd6, 0x6c, 0xca, 0x25, 0x2d, 0xd3, 0x5b, 0x1b, 0x84, 0x1b, 0xfc, 0xa0, 0xe2, /* Byte value: 0xde */ + 0xd7, 0x26, 0xe4, 0x70, 0xba, 0x3d, 0xe1, 0x92, 0x9e, 0xd1, 0xcf, 0xe4, 0xcf, 0x60, 0x33, 0x41, /* Byte value: 0xdf */ + 0x14, 0x52, 0x19, 0x7e, 0x3c, 0x72, 0x54, 0x11, 0xfd, 0x62, 0x0f, 0x19, 0x0f, 0x6c, 0x0d, 0xe0, /* Byte value: 0xe0 */ + 0x9e, 0x41, 0x6b, 0x46, 0x61, 0x8d, 0xc5, 0x58, 0xb2, 0xeb, 0x89, 0x6b, 0x89, 0x5c, 0xf5, 0x21, /* Byte value: 0xe1 */ + 0x05, 0xf5, 0x97, 0xfe, 0x0f, 0xfd, 0x15, 0x95, 0xae, 0xf9, 0x73, 0x97, 0x73, 0x1b, 0x92, 0x38, /* Byte value: 0xe2 */ + 0xc6, 0x81, 0x6a, 0xf0, 0x89, 0xb2, 0xa0, 0x16, 0xcd, 0x4a, 0xb3, 0x6a, 0xb3, 0x17, 0xac, 0x99, /* Byte value: 0xe3 */ + 0xed, 0x76, 0x94, 0xe7, 0xf4, 0xbc, 0xba, 0x47, 0x2f, 0xd9, 0x3d, 0x94, 0x3d, 0xc6, 0x79, 0x33, /* Byte value: 0xe4 */ + 0xc8, 0x6c, 0x54, 0x68, 0x9b, 0x91, 0xf1, 0xfa, 0xb5, 0x0e, 0x56, 0x54, 0x56, 0x3d, 0x9c, 0xb6, /* Byte value: 0xe5 */ + 0x9b, 0xb4, 0xfc, 0xb8, 0x6e, 0x70, 0xd0, 0xcd, 0x1c, 0x12, 0xfa, 0xfc, 0xfa, 0x47, 0x67, 0x19, /* Byte value: 0xe6 */ + 0xbd, 0xfd, 0xf0, 0xdc, 0x04, 0xb7, 0x29, 0x03, 0x5d, 0x92, 0x01, 0xf0, 0x01, 0xb5, 0x4d, 0x35, /* Byte value: 0xe7 */ + 0x84, 0xfe, 0x4c, 0xa0, 0x4f, 0xdc, 0xc0, 0xa5, 0x37, 0xcd, 0x63, 0x4c, 0x63, 0x1a, 0xc8, 0xee, /* Byte value: 0xe8 */ + 0xc3, 0x74, 0xfd, 0x0e, 0x86, 0x4f, 0xb5, 0x83, 0x63, 0xb3, 0xc0, 0xfd, 0xc0, 0x0c, 0x3e, 0xa1, /* Byte value: 0xe9 */ + 0xca, 0x0e, 0xdc, 0xda, 0x9d, 0x9c, 0x2d, 0x19, 0x27, 0xd5, 0xb6, 0xdc, 0xb6, 0x33, 0x16, 0x78, /* Byte value: 0xea */ + 0x8a, 0x13, 0x72, 0x38, 0x5d, 0xff, 0x91, 0x49, 0x4f, 0x89, 0x86, 0x72, 0x86, 0x30, 0xf8, 0xc1, /* Byte value: 0xeb */ + 0x34, 0xbd, 0x4e, 0x0f, 0x5c, 0xa2, 0x0a, 0x39, 0xc9, 0x4c, 0x17, 0x4e, 0x17, 0x8c, 0x7a, 0x5d, /* Byte value: 0xec */ + 0x26, 0x49, 0x0c, 0x64, 0x6a, 0xc7, 0xf9, 0xce, 0x41, 0x80, 0xfb, 0x0c, 0xfb, 0xf2, 0x2a, 0x2c, /* Byte value: 0xed */ + 0x10, 0x96, 0xca, 0xd9, 0x30, 0x68, 0x2f, 0x14, 0x1a, 0x17, 0x0c, 0xca, 0x0c, 0x70, 0xda, 0xbf, /* Byte value: 0xee */ + 0x25, 0x1a, 0xc0, 0x8f, 0x6f, 0x2d, 0x4b, 0xbd, 0x9a, 0xd7, 0x6b, 0xc0, 0x6b, 0xfb, 0xe5, 0x85, /* Byte value: 0xef */ + 0xe3, 0x9b, 0xaa, 0x7f, 0xe6, 0x9f, 0xeb, 0xab, 0x57, 0x9d, 0xd8, 0xaa, 0xd8, 0xec, 0x49, 0x1c, /* Byte value: 0xf0 */ + 0x63, 0xa1, 0x35, 0x78, 0xa5, 0x59, 0x50, 0x0b, 0x87, 0x25, 0xb8, 0x35, 0xb8, 0xea, 0x56, 0xad, /* Byte value: 0xf1 */ + 0x0a, 0x29, 0xed, 0x3f, 0x1e, 0x39, 0x2a, 0xe9, 0x9f, 0x31, 0xe6, 0xed, 0xe6, 0x36, 0xe7, 0x70, /* Byte value: 0xf2 */ + 0x4f, 0xc1, 0xd4, 0x23, 0xd1, 0xa7, 0x83, 0x2c, 0x59, 0x94, 0xa5, 0xd4, 0xa5, 0x2e, 0x9b, 0xf1, /* Byte value: 0xf3 */ + 0x8b, 0x22, 0x36, 0x61, 0x5e, 0x18, 0xff, 0xd9, 0x06, 0x05, 0xf6, 0x36, 0xf6, 0x37, 0xbd, 0xa6, /* Byte value: 0xf4 */ + 0xc7, 0xb0, 0x2e, 0xa9, 0x8a, 0x55, 0xce, 0x86, 0x84, 0xc6, 0xc3, 0x2e, 0xc3, 0x10, 0xe9, 0xfe, /* Byte value: 0xf5 */ + 0x67, 0x65, 0xe6, 0xdf, 0xa9, 0x43, 0x2b, 0x0e, 0x60, 0x50, 0xbb, 0xe6, 0xbb, 0xf6, 0x81, 0xf2, /* Byte value: 0xf6 */ + 0x2d, 0x51, 0xa5, 0x02, 0x77, 0x19, 0xbd, 0xb7, 0x97, 0x3d, 0x6d, 0xa5, 0x6d, 0xc3, 0x88, 0x3b, /* Byte value: 0xf7 */ + 0xfe, 0xb3, 0x92, 0xd5, 0xc1, 0x3e, 0x27, 0x20, 0xee, 0x99, 0xa1, 0x92, 0xa1, 0xbf, 0x6c, 0x25, /* Byte value: 0xf8 */ + 0x45, 0xe8, 0x39, 0x1c, 0xcf, 0x9e, 0xa9, 0xc5, 0xc6, 0xa5, 0x43, 0x39, 0x43, 0x18, 0x7c, 0x81, /* Byte value: 0xf9 */ + 0xa0, 0xd5, 0xc8, 0x76, 0x23, 0x16, 0xe5, 0x88, 0xe4, 0x96, 0x78, 0xc8, 0x78, 0xe6, 0x68, 0x0c, /* Byte value: 0xfa */ + 0x80, 0x3a, 0x9f, 0x07, 0x43, 0xc6, 0xbb, 0xa0, 0xd0, 0xb8, 0x60, 0x9f, 0x60, 0x06, 0x1f, 0xb1, /* Byte value: 0xfb */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xfc */ + 0x4a, 0x34, 0x43, 0xdd, 0xde, 0x5a, 0x96, 0xb9, 0xf7, 0x6d, 0xd6, 0x43, 0xd6, 0x35, 0x09, 0xc9, /* Byte value: 0xfd */ + 0xee, 0x25, 0x58, 0x0c, 0xf1, 0x56, 0x08, 0x34, 0xf4, 0x8e, 0xad, 0x58, 0xad, 0xcf, 0xb6, 0x9a, /* Byte value: 0xfe */ + 0x4c, 0x92, 0x18, 0xc8, 0xd4, 0x4d, 0x31, 0x5f, 0x82, 0xc3, 0x35, 0x18, 0x35, 0x27, 0x54, 0x58, /* Byte value: 0xff */ + + /* Matrix row: 13 */ + 0xab, 0xda, 0x7f, 0x93, 0x75, 0x63, 0x3b, 0x22, 0xb7, 0x40, 0xa6, 0x84, 0xc2, 0x22, 0x09, 0xda, /* Byte value: 0x00 */ + 0xbb, 0xd9, 0xf4, 0xa6, 0xc1, 0x1b, 0xf5, 0xe1, 0x54, 0xa2, 0xc5, 0x24, 0x95, 0xe1, 0xdd, 0xd9, /* Byte value: 0x01 */ + 0x83, 0x3c, 0x0e, 0x02, 0x84, 0xaf, 0x03, 0xe7, 0x22, 0x36, 0xb0, 0x57, 0xa6, 0xe7, 0x08, 0x3c, /* Byte value: 0x02 */ + 0x0d, 0x8c, 0x03, 0x40, 0xed, 0xc4, 0x60, 0xd0, 0xca, 0x0f, 0xa2, 0x72, 0x27, 0xd0, 0xc3, 0x8c, /* Byte value: 0x03 */ + 0x3a, 0xdd, 0xd3, 0xea, 0xb2, 0xbb, 0x9f, 0x26, 0x52, 0x19, 0x41, 0x67, 0xe0, 0x26, 0x2e, 0xdd, /* Byte value: 0x04 */ + 0x4a, 0xd4, 0xe4, 0x61, 0x38, 0x10, 0xb0, 0x2a, 0xbe, 0xf2, 0xab, 0x81, 0x86, 0x2a, 0x47, 0xd4, /* Byte value: 0x05 */ + 0xe8, 0xf2, 0xca, 0x2e, 0x8d, 0xa9, 0x39, 0x98, 0x8b, 0x64, 0xc7, 0x5f, 0x06, 0x98, 0xb8, 0xf2, /* Byte value: 0x06 */ + 0xeb, 0xd6, 0x16, 0x47, 0xe0, 0x40, 0x85, 0xa8, 0xbd, 0x4e, 0xe9, 0x41, 0x5d, 0xa8, 0xdf, 0xd6, /* Byte value: 0x07 */ + 0x8f, 0xac, 0xf8, 0x65, 0xf3, 0x8d, 0xb6, 0x27, 0xfa, 0x9e, 0x08, 0x2f, 0x09, 0x27, 0x57, 0xac, /* Byte value: 0x08 */ + 0xbe, 0xb5, 0x53, 0x1d, 0x76, 0xe3, 0xf2, 0xb1, 0x0e, 0xdc, 0xb7, 0x06, 0x78, 0xb1, 0x74, 0xb5, /* Byte value: 0x09 */ + 0x0f, 0xb4, 0x2a, 0x0e, 0x1a, 0xcb, 0x09, 0xf0, 0xee, 0x82, 0x96, 0x66, 0xf4, 0xf0, 0x38, 0xb4, /* Byte value: 0x0a */ + 0x6d, 0x86, 0xbf, 0xfe, 0xd3, 0x17, 0x81, 0x1f, 0xc5, 0x06, 0x2b, 0x34, 0x16, 0x1f, 0x7e, 0x86, /* Byte value: 0x0b */ + 0x0a, 0xd8, 0x8d, 0xb5, 0xad, 0x33, 0x0e, 0xa0, 0xb4, 0xfc, 0xe4, 0x44, 0x19, 0xa0, 0x91, 0xd8, /* Byte value: 0x0c */ + 0x29, 0xfa, 0x84, 0xb6, 0x6b, 0x2a, 0xed, 0xd5, 0x87, 0xd1, 0x0c, 0xd9, 0xec, 0xd5, 0x9d, 0xfa, /* Byte value: 0x0d */ + 0x17, 0x57, 0x05, 0xc0, 0xf4, 0x8f, 0xa0, 0xb3, 0x9d, 0x11, 0x25, 0x96, 0x69, 0xb3, 0x86, 0x57, /* Byte value: 0x0e */ + 0xb0, 0x1d, 0x8c, 0x34, 0xf6, 0xce, 0x2e, 0x51, 0xf2, 0xf9, 0x3b, 0x6a, 0x04, 0x51, 0xd0, 0x1d, /* Byte value: 0x0f */ + 0xc5, 0x78, 0x1c, 0x04, 0xcb, 0x9d, 0x06, 0x0d, 0x44, 0x6c, 0xa3, 0xae, 0x8f, 0x0d, 0x10, 0x78, /* Byte value: 0x10 */ + 0x4c, 0x9c, 0x9f, 0xb3, 0xe2, 0x01, 0x0b, 0x4a, 0xd2, 0xa6, 0xf7, 0xbd, 0x30, 0x4a, 0x89, 0x9c, /* Byte value: 0x11 */ + 0xc3, 0x30, 0x67, 0xd6, 0x11, 0x8c, 0xbd, 0x6d, 0x28, 0x38, 0xff, 0x92, 0x39, 0x6d, 0xde, 0x30, /* Byte value: 0x12 */ + 0xb5, 0x71, 0x2b, 0x8f, 0x41, 0x36, 0x29, 0x01, 0xa8, 0x87, 0x49, 0x48, 0xe9, 0x01, 0x79, 0x71, /* Byte value: 0x13 */ + 0x74, 0x79, 0x65, 0x17, 0xa7, 0xb5, 0xfd, 0x4c, 0xa4, 0x32, 0x82, 0xce, 0x03, 0x4c, 0x5c, 0x79, /* Byte value: 0x14 */ + 0x94, 0x6b, 0x0b, 0xc2, 0x70, 0x20, 0xa3, 0x54, 0xbf, 0x27, 0x95, 0xc1, 0xcf, 0x54, 0x8e, 0x6b, /* Byte value: 0x15 */ + 0x1d, 0x8f, 0x88, 0x75, 0x59, 0xbc, 0xae, 0x13, 0x29, 0xed, 0xc1, 0xd2, 0x70, 0x13, 0x17, 0x8f, /* Byte value: 0x16 */ + 0x25, 0x6a, 0x72, 0xd1, 0x1c, 0x08, 0x58, 0x15, 0x5f, 0x79, 0xb4, 0xa1, 0x43, 0x15, 0xc2, 0x6a, /* Byte value: 0x17 */ + 0x48, 0xec, 0xcd, 0x2f, 0xcf, 0x1f, 0xd9, 0x0a, 0x9a, 0x7f, 0x9f, 0x95, 0x55, 0x0a, 0xbc, 0xec, /* Byte value: 0x18 */ + 0xda, 0xcf, 0xbd, 0x3f, 0x65, 0x2e, 0xc1, 0x3e, 0x49, 0x0c, 0x56, 0x68, 0x2c, 0x3e, 0xfc, 0xcf, /* Byte value: 0x19 */ + 0xdd, 0x9b, 0x33, 0xca, 0x25, 0xd9, 0xaf, 0x4e, 0x37, 0xff, 0x10, 0x5e, 0x12, 0x4e, 0xae, 0x9b, /* Byte value: 0x1a */ + 0xe9, 0xee, 0x3f, 0x09, 0x17, 0x4f, 0xec, 0x88, 0x99, 0xc3, 0xdd, 0x55, 0x8e, 0x88, 0x24, 0xee, /* Byte value: 0x1b */ + 0xea, 0xca, 0xe3, 0x60, 0x7a, 0xa6, 0x50, 0xb8, 0xaf, 0xe9, 0xf3, 0x4b, 0xd5, 0xb8, 0x43, 0xca, /* Byte value: 0x1c */ + 0x73, 0x2d, 0xeb, 0xe2, 0xe7, 0x42, 0x93, 0x3c, 0xda, 0xc1, 0xc4, 0xf8, 0x3d, 0x3c, 0x0e, 0x2d, /* Byte value: 0x1d */ + 0x2e, 0xae, 0x0a, 0x43, 0x2b, 0xdd, 0x83, 0xa5, 0xf9, 0x22, 0x4a, 0xef, 0xd2, 0xa5, 0xcf, 0xae, /* Byte value: 0x1e */ + 0xf5, 0x7d, 0x42, 0x5b, 0xd4, 0x15, 0x97, 0x8b, 0xa2, 0x89, 0x06, 0x8d, 0x76, 0x8b, 0xaf, 0x7d, /* Byte value: 0x1f */ + 0x26, 0x4e, 0xae, 0xb8, 0x71, 0xe1, 0xe4, 0x25, 0x69, 0x53, 0x9a, 0xbf, 0x18, 0x25, 0xa5, 0x4e, /* Byte value: 0x20 */ + 0xd0, 0x17, 0x30, 0x8a, 0xc8, 0x1d, 0xcf, 0x9e, 0xfd, 0xf0, 0xb2, 0x2c, 0x35, 0x9e, 0x6d, 0x17, /* Byte value: 0x21 */ + 0xc4, 0x64, 0xe9, 0x23, 0x51, 0x7b, 0xd3, 0x1d, 0x56, 0xcb, 0xb9, 0xa4, 0x07, 0x1d, 0x8c, 0x64, /* Byte value: 0x22 */ + 0xf3, 0x35, 0x39, 0x89, 0x0e, 0x04, 0x2c, 0xeb, 0xce, 0xdd, 0x5a, 0xb1, 0xc0, 0xeb, 0x61, 0x35, /* Byte value: 0x23 */ + 0x7b, 0xcd, 0x4f, 0x19, 0xbd, 0x7e, 0xf4, 0xbc, 0x4a, 0xb0, 0x14, 0xa8, 0xf7, 0xbc, 0x64, 0xcd, /* Byte value: 0x24 */ + 0x4d, 0x80, 0x6a, 0x94, 0x78, 0xe7, 0xde, 0x5a, 0xc0, 0x01, 0xed, 0xb7, 0xb8, 0x5a, 0x15, 0x80, /* Byte value: 0x25 */ + 0x45, 0x60, 0xce, 0x6f, 0x22, 0xdb, 0xb9, 0xda, 0x50, 0x70, 0x3d, 0xe7, 0x72, 0xda, 0x7f, 0x60, /* Byte value: 0x26 */ + 0xa9, 0xe2, 0x56, 0xdd, 0x82, 0x6c, 0x52, 0x02, 0x93, 0xcd, 0x92, 0x90, 0x11, 0x02, 0xf2, 0xe2, /* Byte value: 0x27 */ + 0x5e, 0xa7, 0x3d, 0xc8, 0xa1, 0x76, 0xac, 0xa9, 0x15, 0xc9, 0xa0, 0x09, 0xb4, 0xa9, 0xa6, 0xa7, /* Byte value: 0x28 */ + 0xd8, 0xf7, 0x94, 0x71, 0x92, 0x21, 0xa8, 0x1e, 0x6d, 0x81, 0x62, 0x7c, 0xff, 0x1e, 0x07, 0xf7, /* Byte value: 0x29 */ + 0x7e, 0xa1, 0xe8, 0xa2, 0x0a, 0x86, 0xf3, 0xec, 0x10, 0xce, 0x66, 0x8a, 0x1a, 0xec, 0xcd, 0xa1, /* Byte value: 0x2a */ + 0xf2, 0x29, 0xcc, 0xae, 0x94, 0xe2, 0xf9, 0xfb, 0xdc, 0x7a, 0x40, 0xbb, 0x48, 0xfb, 0xfd, 0x29, /* Byte value: 0x2b */ + 0x90, 0x1b, 0x59, 0x5e, 0x5d, 0x3e, 0x71, 0x14, 0xf7, 0xfe, 0xfd, 0xe9, 0xaa, 0x14, 0xbb, 0x1b, /* Byte value: 0x2c */ + 0x77, 0x5d, 0xb9, 0x7e, 0xca, 0x5c, 0x41, 0x7c, 0x92, 0x18, 0xac, 0xd0, 0x58, 0x7c, 0x3b, 0x5d, /* Byte value: 0x2d */ + 0x53, 0x2b, 0x3e, 0x88, 0x4c, 0xb2, 0xcc, 0x79, 0xdf, 0xc6, 0x02, 0x7b, 0x93, 0x79, 0x65, 0x2b, /* Byte value: 0x2e */ + 0xe5, 0x7e, 0xc9, 0x6e, 0x60, 0x6d, 0x59, 0x48, 0x41, 0x6b, 0x65, 0x2d, 0x21, 0x48, 0x7b, 0x7e, /* Byte value: 0x2f */ + 0x49, 0xf0, 0x38, 0x08, 0x55, 0xf9, 0x0c, 0x1a, 0x88, 0xd8, 0x85, 0x9f, 0xdd, 0x1a, 0x20, 0xf0, /* Byte value: 0x30 */ + 0x98, 0xfb, 0xfd, 0xa5, 0x07, 0x02, 0x16, 0x94, 0x67, 0x8f, 0x2d, 0xb9, 0x60, 0x94, 0xd1, 0xfb, /* Byte value: 0x31 */ + 0x13, 0x27, 0x57, 0x5c, 0xd9, 0x91, 0x72, 0xf3, 0xd5, 0xc8, 0x4d, 0xbe, 0x0c, 0xf3, 0xb3, 0x27, /* Byte value: 0x32 */ + 0x43, 0x28, 0xb5, 0xbd, 0xf8, 0xca, 0x02, 0xba, 0x3c, 0x24, 0x61, 0xdb, 0xc4, 0xba, 0xb1, 0x28, /* Byte value: 0x33 */ + 0xdc, 0x87, 0xc6, 0xed, 0xbf, 0x3f, 0x7a, 0x5e, 0x25, 0x58, 0x0a, 0x54, 0x9a, 0x5e, 0x32, 0x87, /* Byte value: 0x34 */ + 0xc7, 0x40, 0x35, 0x4a, 0x3c, 0x92, 0x6f, 0x2d, 0x60, 0xe1, 0x97, 0xba, 0x5c, 0x2d, 0xeb, 0x40, /* Byte value: 0x35 */ + 0xa0, 0x1e, 0x07, 0x01, 0x42, 0xb6, 0xe0, 0x92, 0x11, 0x1b, 0x58, 0xca, 0x53, 0x92, 0x04, 0x1e, /* Byte value: 0x36 */ + 0xcc, 0x84, 0x4d, 0xd8, 0x0b, 0x47, 0xb4, 0x9d, 0xc6, 0xba, 0x69, 0xf4, 0xcd, 0x9d, 0xe6, 0x84, /* Byte value: 0x37 */ + 0x14, 0x73, 0xd9, 0xa9, 0x99, 0x66, 0x1c, 0x83, 0xab, 0x3b, 0x0b, 0x88, 0x32, 0x83, 0xe1, 0x73, /* Byte value: 0x38 */ + 0x52, 0x37, 0xcb, 0xaf, 0xd6, 0x54, 0x19, 0x69, 0xcd, 0x61, 0x18, 0x71, 0x1b, 0x69, 0xf9, 0x37, /* Byte value: 0x39 */ + 0x2f, 0xb2, 0xff, 0x64, 0xb1, 0x3b, 0x56, 0xb5, 0xeb, 0x85, 0x50, 0xe5, 0x5a, 0xb5, 0x53, 0xb2, /* Byte value: 0x3a */ + 0x47, 0x58, 0xe7, 0x21, 0xd5, 0xd4, 0xd0, 0xfa, 0x74, 0xfd, 0x09, 0xf3, 0xa1, 0xfa, 0x84, 0x58, /* Byte value: 0x3b */ + 0xa6, 0x56, 0x7c, 0xd3, 0x98, 0xa7, 0x5b, 0xf2, 0x7d, 0x4f, 0x04, 0xf6, 0xe5, 0xf2, 0xca, 0x56, /* Byte value: 0x3c */ + 0x5f, 0xbb, 0xc8, 0xef, 0x3b, 0x90, 0x79, 0xb9, 0x07, 0x6e, 0xba, 0x03, 0x3c, 0xb9, 0x3a, 0xbb, /* Byte value: 0x3d */ + 0xe3, 0x36, 0xb2, 0xbc, 0xba, 0x7c, 0xe2, 0x28, 0x2d, 0x3f, 0x39, 0x11, 0x97, 0x28, 0xb5, 0x36, /* Byte value: 0x3e */ + 0xb8, 0xfd, 0x28, 0xcf, 0xac, 0xf2, 0x49, 0xd1, 0x62, 0x88, 0xeb, 0x3a, 0xce, 0xd1, 0xba, 0xfd, /* Byte value: 0x3f */ + 0xa7, 0x4a, 0x89, 0xf4, 0x02, 0x41, 0x8e, 0xe2, 0x6f, 0xe8, 0x1e, 0xfc, 0x6d, 0xe2, 0x56, 0x4a, /* Byte value: 0x40 */ + 0xc0, 0x14, 0xbb, 0xbf, 0x7c, 0x65, 0x01, 0x5d, 0x1e, 0x12, 0xd1, 0x8c, 0x62, 0x5d, 0xb9, 0x14, /* Byte value: 0x41 */ + 0x23, 0x22, 0x09, 0x03, 0xc6, 0x19, 0xe3, 0x75, 0x33, 0x2d, 0xe8, 0x9d, 0xf5, 0x75, 0x0c, 0x22, /* Byte value: 0x42 */ + 0x2a, 0xde, 0x58, 0xdf, 0x06, 0xc3, 0x51, 0xe5, 0xb1, 0xfb, 0x22, 0xc7, 0xb7, 0xe5, 0xfa, 0xde, /* Byte value: 0x43 */ + 0x62, 0x32, 0x95, 0xf0, 0xc9, 0xdc, 0x88, 0xef, 0x2b, 0x84, 0xbd, 0x52, 0xe2, 0xef, 0x46, 0x32, /* Byte value: 0x44 */ + 0x7d, 0x85, 0x34, 0xcb, 0x67, 0x6f, 0x4f, 0xdc, 0x26, 0xe4, 0x48, 0x94, 0x41, 0xdc, 0xaa, 0x85, /* Byte value: 0x45 */ + 0x31, 0x19, 0xab, 0x78, 0x85, 0x6e, 0x44, 0x96, 0xf4, 0x42, 0xbf, 0x29, 0x71, 0x96, 0x23, 0x19, /* Byte value: 0x46 */ + 0x30, 0x05, 0x5e, 0x5f, 0x1f, 0x88, 0x91, 0x86, 0xe6, 0xe5, 0xa5, 0x23, 0xf9, 0x86, 0xbf, 0x05, /* Byte value: 0x47 */ + 0xc1, 0x08, 0x4e, 0x98, 0xe6, 0x83, 0xd4, 0x4d, 0x0c, 0xb5, 0xcb, 0x86, 0xea, 0x4d, 0x25, 0x08, /* Byte value: 0x48 */ + 0x20, 0x06, 0xd5, 0x6a, 0xab, 0xf0, 0x5f, 0x45, 0x05, 0x07, 0xc6, 0x83, 0xae, 0x45, 0x6b, 0x06, /* Byte value: 0x49 */ + 0x8a, 0xc0, 0x5f, 0xde, 0x44, 0x75, 0xb1, 0x77, 0xa0, 0xe0, 0x7a, 0x0d, 0xe4, 0x77, 0xfe, 0xc0, /* Byte value: 0x4a */ + 0xd7, 0x43, 0xbe, 0x7f, 0x88, 0xea, 0xa1, 0xee, 0x83, 0x03, 0xf4, 0x1a, 0x0b, 0xee, 0x3f, 0x43, /* Byte value: 0x4b */ + 0x3d, 0x89, 0x5d, 0x1f, 0xf2, 0x4c, 0xf1, 0x56, 0x2c, 0xea, 0x07, 0x51, 0xde, 0x56, 0x7c, 0x89, /* Byte value: 0x4c */ + 0xbf, 0xa9, 0xa6, 0x3a, 0xec, 0x05, 0x27, 0xa1, 0x1c, 0x7b, 0xad, 0x0c, 0xf0, 0xa1, 0xe8, 0xa9, /* Byte value: 0x4d */ + 0xc6, 0x5c, 0xc0, 0x6d, 0xa6, 0x74, 0xba, 0x3d, 0x72, 0x46, 0x8d, 0xb0, 0xd4, 0x3d, 0x77, 0x5c, /* Byte value: 0x4e */ + 0x72, 0x31, 0x1e, 0xc5, 0x7d, 0xa4, 0x46, 0x2c, 0xc8, 0x66, 0xde, 0xf2, 0xb5, 0x2c, 0x92, 0x31, /* Byte value: 0x4f */ + 0xdb, 0xd3, 0x48, 0x18, 0xff, 0xc8, 0x14, 0x2e, 0x5b, 0xab, 0x4c, 0x62, 0xa4, 0x2e, 0x60, 0xd3, /* Byte value: 0x50 */ + 0x15, 0x6f, 0x2c, 0x8e, 0x03, 0x80, 0xc9, 0x93, 0xb9, 0x9c, 0x11, 0x82, 0xba, 0x93, 0x7d, 0x6f, /* Byte value: 0x51 */ + 0x99, 0xe7, 0x08, 0x82, 0x9d, 0xe4, 0xc3, 0x84, 0x75, 0x28, 0x37, 0xb3, 0xe8, 0x84, 0x4d, 0xe7, /* Byte value: 0x52 */ + 0x4b, 0xc8, 0x11, 0x46, 0xa2, 0xf6, 0x65, 0x3a, 0xac, 0x55, 0xb1, 0x8b, 0x0e, 0x3a, 0xdb, 0xc8, /* Byte value: 0x53 */ + 0x34, 0x75, 0x0c, 0xc3, 0x32, 0x96, 0x43, 0xc6, 0xae, 0x3c, 0xcd, 0x0b, 0x9c, 0xc6, 0x8a, 0x75, /* Byte value: 0x54 */ + 0x60, 0x0a, 0xbc, 0xbe, 0x3e, 0xd3, 0xe1, 0xcf, 0x0f, 0x09, 0x89, 0x46, 0x31, 0xcf, 0xbd, 0x0a, /* Byte value: 0x55 */ + 0x92, 0x23, 0x70, 0x10, 0xaa, 0x31, 0x18, 0x34, 0xd3, 0x73, 0xc9, 0xfd, 0x79, 0x34, 0x40, 0x23, /* Byte value: 0x56 */ + 0xac, 0x8e, 0xf1, 0x66, 0x35, 0x94, 0x55, 0x52, 0xc9, 0xb3, 0xe0, 0xb2, 0xfc, 0x52, 0x5b, 0x8e, /* Byte value: 0x57 */ + 0x6e, 0xa2, 0x63, 0x97, 0xbe, 0xfe, 0x3d, 0x2f, 0xf3, 0x2c, 0x05, 0x2a, 0x4d, 0x2f, 0x19, 0xa2, /* Byte value: 0x58 */ + 0xa1, 0x02, 0xf2, 0x26, 0xd8, 0x50, 0x35, 0x82, 0x03, 0xbc, 0x42, 0xc0, 0xdb, 0x82, 0x98, 0x02, /* Byte value: 0x59 */ + 0x81, 0x04, 0x27, 0x4c, 0x73, 0xa0, 0x6a, 0xc7, 0x06, 0xbb, 0x84, 0x43, 0x75, 0xc7, 0xf3, 0x04, /* Byte value: 0x5a */ + 0x3b, 0xc1, 0x26, 0xcd, 0x28, 0x5d, 0x4a, 0x36, 0x40, 0xbe, 0x5b, 0x6d, 0x68, 0x36, 0xb2, 0xc1, /* Byte value: 0x5b */ + 0x19, 0xff, 0xda, 0xe9, 0x74, 0xa2, 0x7c, 0x53, 0x61, 0x34, 0xa9, 0xfa, 0x15, 0x53, 0x22, 0xff, /* Byte value: 0x5c */ + 0xe4, 0x62, 0x3c, 0x49, 0xfa, 0x8b, 0x8c, 0x58, 0x53, 0xcc, 0x7f, 0x27, 0xa9, 0x58, 0xe7, 0x62, /* Byte value: 0x5d */ + 0x2c, 0x96, 0x23, 0x0d, 0xdc, 0xd2, 0xea, 0x85, 0xdd, 0xaf, 0x7e, 0xfb, 0x01, 0x85, 0x34, 0x96, /* Byte value: 0x5e */ + 0xbd, 0x91, 0x8f, 0x74, 0x1b, 0x0a, 0x4e, 0x81, 0x38, 0xf6, 0x99, 0x18, 0x23, 0x81, 0x13, 0x91, /* Byte value: 0x5f */ + 0xb6, 0x55, 0xf7, 0xe6, 0x2c, 0xdf, 0x95, 0x31, 0x9e, 0xad, 0x67, 0x56, 0xb2, 0x31, 0x1e, 0x55, /* Byte value: 0x60 */ + 0xd3, 0x33, 0xec, 0xe3, 0xa5, 0xf4, 0x73, 0xae, 0xcb, 0xda, 0x9c, 0x32, 0x6e, 0xae, 0x0a, 0x33, /* Byte value: 0x61 */ + 0xe7, 0x46, 0xe0, 0x20, 0x97, 0x62, 0x30, 0x68, 0x65, 0xe6, 0x51, 0x39, 0xf2, 0x68, 0x80, 0x46, /* Byte value: 0x62 */ + 0x9b, 0xdf, 0x21, 0xcc, 0x6a, 0xeb, 0xaa, 0xa4, 0x51, 0xa5, 0x03, 0xa7, 0x3b, 0xa4, 0xb6, 0xdf, /* Byte value: 0x63 */ + 0x8c, 0x88, 0x24, 0x0c, 0x9e, 0x64, 0x0a, 0x17, 0xcc, 0xb4, 0x26, 0x31, 0x52, 0x17, 0x30, 0x88, /* Byte value: 0x64 */ + 0x04, 0x70, 0x52, 0x9c, 0x2d, 0x1e, 0xd2, 0x40, 0x48, 0xd9, 0x68, 0x28, 0x65, 0x40, 0x35, 0x70, /* Byte value: 0x65 */ + 0x86, 0x50, 0xa9, 0xb9, 0x33, 0x57, 0x04, 0xb7, 0x78, 0x48, 0xc2, 0x75, 0x4b, 0xb7, 0xa1, 0x50, /* Byte value: 0x66 */ + 0xef, 0xa6, 0x44, 0xdb, 0xcd, 0x5e, 0x57, 0xe8, 0xf5, 0x97, 0x81, 0x69, 0x38, 0xe8, 0xea, 0xa6, /* Byte value: 0x67 */ + 0x32, 0x3d, 0x77, 0x11, 0xe8, 0x87, 0xf8, 0xa6, 0xc2, 0x68, 0x91, 0x37, 0x2a, 0xa6, 0x44, 0x3d, /* Byte value: 0x68 */ + 0x0b, 0xc4, 0x78, 0x92, 0x37, 0xd5, 0xdb, 0xb0, 0xa6, 0x5b, 0xfe, 0x4e, 0x91, 0xb0, 0x0d, 0xc4, /* Byte value: 0x69 */ + 0x9e, 0xb3, 0x86, 0x77, 0xdd, 0x13, 0xad, 0xf4, 0x0b, 0xdb, 0x71, 0x85, 0xd6, 0xf4, 0x1f, 0xb3, /* Byte value: 0x6a */ + 0x80, 0x18, 0xd2, 0x6b, 0xe9, 0x46, 0xbf, 0xd7, 0x14, 0x1c, 0x9e, 0x49, 0xfd, 0xd7, 0x6f, 0x18, /* Byte value: 0x6b */ + 0x1c, 0x93, 0x7d, 0x52, 0xc3, 0x5a, 0x7b, 0x03, 0x3b, 0x4a, 0xdb, 0xd8, 0xf8, 0x03, 0x8b, 0x93, /* Byte value: 0x6c */ + 0xb1, 0x01, 0x79, 0x13, 0x6c, 0x28, 0xfb, 0x41, 0xe0, 0x5e, 0x21, 0x60, 0x8c, 0x41, 0x4c, 0x01, /* Byte value: 0x6d */ + 0xd4, 0x67, 0x62, 0x16, 0xe5, 0x03, 0x1d, 0xde, 0xb5, 0x29, 0xda, 0x04, 0x50, 0xde, 0x58, 0x67, /* Byte value: 0x6e */ + 0x7a, 0xd1, 0xba, 0x3e, 0x27, 0x98, 0x21, 0xac, 0x58, 0x17, 0x0e, 0xa2, 0x7f, 0xac, 0xf8, 0xd1, /* Byte value: 0x6f */ + 0x5b, 0xcb, 0x9a, 0x73, 0x16, 0x8e, 0xab, 0xf9, 0x4f, 0xb7, 0xd2, 0x2b, 0x59, 0xf9, 0x0f, 0xcb, /* Byte value: 0x70 */ + 0xa3, 0x3a, 0xdb, 0x68, 0x2f, 0x5f, 0x5c, 0xa2, 0x27, 0x31, 0x76, 0xd4, 0x08, 0xa2, 0x63, 0x3a, /* Byte value: 0x71 */ + 0xf9, 0xed, 0xb4, 0x3c, 0xa3, 0x37, 0x22, 0x4b, 0x7a, 0x21, 0xbe, 0xf5, 0xd9, 0x4b, 0xf0, 0xed, /* Byte value: 0x72 */ + 0x33, 0x21, 0x82, 0x36, 0x72, 0x61, 0x2d, 0xb6, 0xd0, 0xcf, 0x8b, 0x3d, 0xa2, 0xb6, 0xd8, 0x21, /* Byte value: 0x73 */ + 0x68, 0xea, 0x18, 0x45, 0x64, 0xef, 0x86, 0x4f, 0x9f, 0x78, 0x59, 0x16, 0xfb, 0x4f, 0xd7, 0xea, /* Byte value: 0x74 */ + 0x96, 0x53, 0x22, 0x8c, 0x87, 0x2f, 0xca, 0x74, 0x9b, 0xaa, 0xa1, 0xd5, 0x1c, 0x74, 0x75, 0x53, /* Byte value: 0x75 */ + 0x46, 0x44, 0x12, 0x06, 0x4f, 0x32, 0x05, 0xea, 0x66, 0x5a, 0x13, 0xf9, 0x29, 0xea, 0x18, 0x44, /* Byte value: 0x76 */ + 0x02, 0x38, 0x29, 0x4e, 0xf7, 0x0f, 0x69, 0x20, 0x24, 0x8d, 0x34, 0x14, 0xd3, 0x20, 0xfb, 0x38, /* Byte value: 0x77 */ + 0xff, 0xa5, 0xcf, 0xee, 0x79, 0x26, 0x99, 0x2b, 0x16, 0x75, 0xe2, 0xc9, 0x6f, 0x2b, 0x3e, 0xa5, /* Byte value: 0x78 */ + 0x95, 0x77, 0xfe, 0xe5, 0xea, 0xc6, 0x76, 0x44, 0xad, 0x80, 0x8f, 0xcb, 0x47, 0x44, 0x12, 0x77, /* Byte value: 0x79 */ + 0x64, 0x7a, 0xee, 0x22, 0x13, 0xcd, 0x33, 0x8f, 0x47, 0xd0, 0xe1, 0x6e, 0x54, 0x8f, 0x88, 0x7a, /* Byte value: 0x7a */ + 0x40, 0x0c, 0x69, 0xd4, 0x95, 0x23, 0xbe, 0x8a, 0x0a, 0x0e, 0x4f, 0xc5, 0x9f, 0x8a, 0xd6, 0x0c, /* Byte value: 0x7b */ + 0x41, 0x10, 0x9c, 0xf3, 0x0f, 0xc5, 0x6b, 0x9a, 0x18, 0xa9, 0x55, 0xcf, 0x17, 0x9a, 0x4a, 0x10, /* Byte value: 0x7c */ + 0x16, 0x4b, 0xf0, 0xe7, 0x6e, 0x69, 0x75, 0xa3, 0x8f, 0xb6, 0x3f, 0x9c, 0xe1, 0xa3, 0x1a, 0x4b, /* Byte value: 0x7d */ + 0x0e, 0xa8, 0xdf, 0x29, 0x80, 0x2d, 0xdc, 0xe0, 0xfc, 0x25, 0x8c, 0x6c, 0x7c, 0xe0, 0xa4, 0xa8, /* Byte value: 0x7e */ + 0xb9, 0xe1, 0xdd, 0xe8, 0x36, 0x14, 0x9c, 0xc1, 0x70, 0x2f, 0xf1, 0x30, 0x46, 0xc1, 0x26, 0xe1, /* Byte value: 0x7f */ + 0xfd, 0x9d, 0xe6, 0xa0, 0x8e, 0x29, 0xf0, 0x0b, 0x32, 0xf8, 0xd6, 0xdd, 0xbc, 0x0b, 0xc5, 0x9d, /* Byte value: 0x80 */ + 0x6f, 0xbe, 0x96, 0xb0, 0x24, 0x18, 0xe8, 0x3f, 0xe1, 0x8b, 0x1f, 0x20, 0xc5, 0x3f, 0x85, 0xbe, /* Byte value: 0x81 */ + 0x91, 0x07, 0xac, 0x79, 0xc7, 0xd8, 0xa4, 0x04, 0xe5, 0x59, 0xe7, 0xe3, 0x22, 0x04, 0x27, 0x07, /* Byte value: 0x82 */ + 0xa5, 0x72, 0xa0, 0xba, 0xf5, 0x4e, 0xe7, 0xc2, 0x4b, 0x65, 0x2a, 0xe8, 0xbe, 0xc2, 0xad, 0x72, /* Byte value: 0x83 */ + 0xcf, 0xa0, 0x91, 0xb1, 0x66, 0xae, 0x08, 0xad, 0xf0, 0x90, 0x47, 0xea, 0x96, 0xad, 0x81, 0xa0, /* Byte value: 0x84 */ + 0x56, 0x47, 0x99, 0x33, 0xfb, 0x4a, 0xcb, 0x29, 0x85, 0xb8, 0x70, 0x59, 0x7e, 0x29, 0xcc, 0x47, /* Byte value: 0x85 */ + 0xba, 0xc5, 0x01, 0x81, 0x5b, 0xfd, 0x20, 0xf1, 0x46, 0x05, 0xdf, 0x2e, 0x1d, 0xf1, 0x41, 0xc5, /* Byte value: 0x86 */ + 0x61, 0x16, 0x49, 0x99, 0xa4, 0x35, 0x34, 0xdf, 0x1d, 0xae, 0x93, 0x4c, 0xb9, 0xdf, 0x21, 0x16, /* Byte value: 0x87 */ + 0x18, 0xe3, 0x2f, 0xce, 0xee, 0x44, 0xa9, 0x43, 0x73, 0x93, 0xb3, 0xf0, 0x9d, 0x43, 0xbe, 0xe3, /* Byte value: 0x88 */ + 0x54, 0x7f, 0xb0, 0x7d, 0x0c, 0x45, 0xa2, 0x09, 0xa1, 0x35, 0x44, 0x4d, 0xad, 0x09, 0x37, 0x7f, /* Byte value: 0x89 */ + 0x8b, 0xdc, 0xaa, 0xf9, 0xde, 0x93, 0x64, 0x67, 0xb2, 0x47, 0x60, 0x07, 0x6c, 0x67, 0x62, 0xdc, /* Byte value: 0x8a */ + 0x3e, 0xad, 0x81, 0x76, 0x9f, 0xa5, 0x4d, 0x66, 0x1a, 0xc0, 0x29, 0x4f, 0x85, 0x66, 0x1b, 0xad, /* Byte value: 0x8b */ + 0x5a, 0xd7, 0x6f, 0x54, 0x8c, 0x68, 0x7e, 0xe9, 0x5d, 0x10, 0xc8, 0x21, 0xd1, 0xe9, 0x93, 0xd7, /* Byte value: 0x8c */ + 0xce, 0xbc, 0x64, 0x96, 0xfc, 0x48, 0xdd, 0xbd, 0xe2, 0x37, 0x5d, 0xe0, 0x1e, 0xbd, 0x1d, 0xbc, /* Byte value: 0x8d */ + 0xf7, 0x45, 0x6b, 0x15, 0x23, 0x1a, 0xfe, 0xab, 0x86, 0x04, 0x32, 0x99, 0xa5, 0xab, 0x54, 0x45, /* Byte value: 0x8e */ + 0xd2, 0x2f, 0x19, 0xc4, 0x3f, 0x12, 0xa6, 0xbe, 0xd9, 0x7d, 0x86, 0x38, 0xe6, 0xbe, 0x96, 0x2f, /* Byte value: 0x8f */ + 0x82, 0x20, 0xfb, 0x25, 0x1e, 0x49, 0xd6, 0xf7, 0x30, 0x91, 0xaa, 0x5d, 0x2e, 0xf7, 0x94, 0x20, /* Byte value: 0x90 */ + 0xb3, 0x39, 0x50, 0x5d, 0x9b, 0x27, 0x92, 0x61, 0xc4, 0xd3, 0x15, 0x74, 0x5f, 0x61, 0xb7, 0x39, /* Byte value: 0x91 */ + 0xb4, 0x6d, 0xde, 0xa8, 0xdb, 0xd0, 0xfc, 0x11, 0xba, 0x20, 0x53, 0x42, 0x61, 0x11, 0xe5, 0x6d, /* Byte value: 0x92 */ + 0xd6, 0x5f, 0x4b, 0x58, 0x12, 0x0c, 0x74, 0xfe, 0x91, 0xa4, 0xee, 0x10, 0x83, 0xfe, 0xa3, 0x5f, /* Byte value: 0x93 */ + 0xe1, 0x0e, 0x9b, 0xf2, 0x4d, 0x73, 0x8b, 0x08, 0x09, 0xb2, 0x0d, 0x05, 0x44, 0x08, 0x4e, 0x0e, /* Byte value: 0x94 */ + 0xc2, 0x2c, 0x92, 0xf1, 0x8b, 0x6a, 0x68, 0x7d, 0x3a, 0x9f, 0xe5, 0x98, 0xb1, 0x7d, 0x42, 0x2c, /* Byte value: 0x95 */ + 0x5d, 0x83, 0xe1, 0xa1, 0xcc, 0x9f, 0x10, 0x99, 0x23, 0xe3, 0x8e, 0x17, 0xef, 0x99, 0xc1, 0x83, /* Byte value: 0x96 */ + 0xfa, 0xc9, 0x68, 0x55, 0xce, 0xde, 0x9e, 0x7b, 0x4c, 0x0b, 0x90, 0xeb, 0x82, 0x7b, 0x97, 0xc9, /* Byte value: 0x97 */ + 0xa4, 0x6e, 0x55, 0x9d, 0x6f, 0xa8, 0x32, 0xd2, 0x59, 0xc2, 0x30, 0xe2, 0x36, 0xd2, 0x31, 0x6e, /* Byte value: 0x98 */ + 0x1f, 0xb7, 0xa1, 0x3b, 0xae, 0xb3, 0xc7, 0x33, 0x0d, 0x60, 0xf5, 0xc6, 0xa3, 0x33, 0xec, 0xb7, /* Byte value: 0x99 */ + 0x0c, 0x90, 0xf6, 0x67, 0x77, 0x22, 0xb5, 0xc0, 0xd8, 0xa8, 0xb8, 0x78, 0xaf, 0xc0, 0x5f, 0x90, /* Byte value: 0x9a */ + 0x01, 0x1c, 0xf5, 0x27, 0x9a, 0xe6, 0xd5, 0x10, 0x12, 0xa7, 0x1a, 0x0a, 0x88, 0x10, 0x9c, 0x1c, /* Byte value: 0x9b */ + 0x2d, 0x8a, 0xd6, 0x2a, 0x46, 0x34, 0x3f, 0x95, 0xcf, 0x08, 0x64, 0xf1, 0x89, 0x95, 0xa8, 0x8a, /* Byte value: 0x9c */ + 0x67, 0x5e, 0x32, 0x4b, 0x7e, 0x24, 0x8f, 0xbf, 0x71, 0xfa, 0xcf, 0x70, 0x0f, 0xbf, 0xef, 0x5e, /* Byte value: 0x9d */ + 0xe2, 0x2a, 0x47, 0x9b, 0x20, 0x9a, 0x37, 0x38, 0x3f, 0x98, 0x23, 0x1b, 0x1f, 0x38, 0x29, 0x2a, /* Byte value: 0x9e */ + 0x09, 0xfc, 0x51, 0xdc, 0xc0, 0xda, 0xb2, 0x90, 0x82, 0xd6, 0xca, 0x5a, 0x42, 0x90, 0xf6, 0xfc, /* Byte value: 0x9f */ + 0x57, 0x5b, 0x6c, 0x14, 0x61, 0xac, 0x1e, 0x39, 0x97, 0x1f, 0x6a, 0x53, 0xf6, 0x39, 0x50, 0x5b, /* Byte value: 0xa0 */ + 0x11, 0x1f, 0x7e, 0x12, 0x2e, 0x9e, 0x1b, 0xd3, 0xf1, 0x45, 0x79, 0xaa, 0xdf, 0xd3, 0x48, 0x1f, /* Byte value: 0xa1 */ + 0xb7, 0x49, 0x02, 0xc1, 0xb6, 0x39, 0x40, 0x21, 0x8c, 0x0a, 0x7d, 0x5c, 0x3a, 0x21, 0x82, 0x49, /* Byte value: 0xa2 */ + 0xdf, 0xa3, 0x1a, 0x84, 0xd2, 0xd6, 0xc6, 0x6e, 0x13, 0x72, 0x24, 0x4a, 0xc1, 0x6e, 0x55, 0xa3, /* Byte value: 0xa3 */ + 0x6a, 0xd2, 0x31, 0x0b, 0x93, 0xe0, 0xef, 0x6f, 0xbb, 0xf5, 0x6d, 0x02, 0x28, 0x6f, 0x2c, 0xd2, /* Byte value: 0xa4 */ + 0x93, 0x3f, 0x85, 0x37, 0x30, 0xd7, 0xcd, 0x24, 0xc1, 0xd4, 0xd3, 0xf7, 0xf1, 0x24, 0xdc, 0x3f, /* Byte value: 0xa5 */ + 0x24, 0x76, 0x87, 0xf6, 0x86, 0xee, 0x8d, 0x05, 0x4d, 0xde, 0xae, 0xab, 0xcb, 0x05, 0x5e, 0x76, /* Byte value: 0xa6 */ + 0x7f, 0xbd, 0x1d, 0x85, 0x90, 0x60, 0x26, 0xfc, 0x02, 0x69, 0x7c, 0x80, 0x92, 0xfc, 0x51, 0xbd, /* Byte value: 0xa7 */ + 0x66, 0x42, 0xc7, 0x6c, 0xe4, 0xc2, 0x5a, 0xaf, 0x63, 0x5d, 0xd5, 0x7a, 0x87, 0xaf, 0x73, 0x42, /* Byte value: 0xa8 */ + 0xfe, 0xb9, 0x3a, 0xc9, 0xe3, 0xc0, 0x4c, 0x3b, 0x04, 0xd2, 0xf8, 0xc3, 0xe7, 0x3b, 0xa2, 0xb9, /* Byte value: 0xa9 */ + 0x07, 0x54, 0x8e, 0xf5, 0x40, 0xf7, 0x6e, 0x70, 0x7e, 0xf3, 0x46, 0x36, 0x3e, 0x70, 0x52, 0x54, /* Byte value: 0xaa */ + 0x44, 0x7c, 0x3b, 0x48, 0xb8, 0x3d, 0x6c, 0xca, 0x42, 0xd7, 0x27, 0xed, 0xfa, 0xca, 0xe3, 0x7c, /* Byte value: 0xab */ + 0x3f, 0xb1, 0x74, 0x51, 0x05, 0x43, 0x98, 0x76, 0x08, 0x67, 0x33, 0x45, 0x0d, 0x76, 0x87, 0xb1, /* Byte value: 0xac */ + 0xf8, 0xf1, 0x41, 0x1b, 0x39, 0xd1, 0xf7, 0x5b, 0x68, 0x86, 0xa4, 0xff, 0x51, 0x5b, 0x6c, 0xf1, /* Byte value: 0xad */ + 0x06, 0x48, 0x7b, 0xd2, 0xda, 0x11, 0xbb, 0x60, 0x6c, 0x54, 0x5c, 0x3c, 0xb6, 0x60, 0xce, 0x48, /* Byte value: 0xae */ + 0xca, 0xcc, 0x36, 0x0a, 0xd1, 0x56, 0x0f, 0xfd, 0xaa, 0xee, 0x35, 0xc8, 0x7b, 0xfd, 0x28, 0xcc, /* Byte value: 0xaf */ + 0xad, 0x92, 0x04, 0x41, 0xaf, 0x72, 0x80, 0x42, 0xdb, 0x14, 0xfa, 0xb8, 0x74, 0x42, 0xc7, 0x92, /* Byte value: 0xb0 */ + 0x2b, 0xc2, 0xad, 0xf8, 0x9c, 0x25, 0x84, 0xf5, 0xa3, 0x5c, 0x38, 0xcd, 0x3f, 0xf5, 0x66, 0xc2, /* Byte value: 0xb1 */ + 0xae, 0xb6, 0xd8, 0x28, 0xc2, 0x9b, 0x3c, 0x72, 0xed, 0x3e, 0xd4, 0xa6, 0x2f, 0x72, 0xa0, 0xb6, /* Byte value: 0xb2 */ + 0x22, 0x3e, 0xfc, 0x24, 0x5c, 0xff, 0x36, 0x65, 0x21, 0x8a, 0xf2, 0x97, 0x7d, 0x65, 0x90, 0x3e, /* Byte value: 0xb3 */ + 0x39, 0xf9, 0x0f, 0x83, 0xdf, 0x52, 0x23, 0x16, 0x64, 0x33, 0x6f, 0x79, 0xbb, 0x16, 0x49, 0xf9, /* Byte value: 0xb4 */ + 0xde, 0xbf, 0xef, 0xa3, 0x48, 0x30, 0x13, 0x7e, 0x01, 0xd5, 0x3e, 0x40, 0x49, 0x7e, 0xc9, 0xbf, /* Byte value: 0xb5 */ + 0x1e, 0xab, 0x54, 0x1c, 0x34, 0x55, 0x12, 0x23, 0x1f, 0xc7, 0xef, 0xcc, 0x2b, 0x23, 0x70, 0xab, /* Byte value: 0xb6 */ + 0xa8, 0xfe, 0xa3, 0xfa, 0x18, 0x8a, 0x87, 0x12, 0x81, 0x6a, 0x88, 0x9a, 0x99, 0x12, 0x6e, 0xfe, /* Byte value: 0xb7 */ + 0x58, 0xef, 0x46, 0x1a, 0x7b, 0x67, 0x17, 0xc9, 0x79, 0x9d, 0xfc, 0x35, 0x02, 0xc9, 0x68, 0xef, /* Byte value: 0xb8 */ + 0x88, 0xf8, 0x76, 0x90, 0xb3, 0x7a, 0xd8, 0x57, 0x84, 0x6d, 0x4e, 0x19, 0x37, 0x57, 0x05, 0xf8, /* Byte value: 0xb9 */ + 0x9a, 0xc3, 0xd4, 0xeb, 0xf0, 0x0d, 0x7f, 0xb4, 0x43, 0x02, 0x19, 0xad, 0xb3, 0xb4, 0x2a, 0xc3, /* Byte value: 0xba */ + 0x69, 0xf6, 0xed, 0x62, 0xfe, 0x09, 0x53, 0x5f, 0x8d, 0xdf, 0x43, 0x1c, 0x73, 0x5f, 0x4b, 0xf6, /* Byte value: 0xbb */ + 0x28, 0xe6, 0x71, 0x91, 0xf1, 0xcc, 0x38, 0xc5, 0x95, 0x76, 0x16, 0xd3, 0x64, 0xc5, 0x01, 0xe6, /* Byte value: 0xbc */ + 0x65, 0x66, 0x1b, 0x05, 0x89, 0x2b, 0xe6, 0x9f, 0x55, 0x77, 0xfb, 0x64, 0xdc, 0x9f, 0x14, 0x66, /* Byte value: 0xbd */ + 0xd5, 0x7b, 0x97, 0x31, 0x7f, 0xe5, 0xc8, 0xce, 0xa7, 0x8e, 0xc0, 0x0e, 0xd8, 0xce, 0xc4, 0x7b, /* Byte value: 0xbe */ + 0x7c, 0x99, 0xc1, 0xec, 0xfd, 0x89, 0x9a, 0xcc, 0x34, 0x43, 0x52, 0x9e, 0xc9, 0xcc, 0x36, 0x99, /* Byte value: 0xbf */ + 0xbc, 0x8d, 0x7a, 0x53, 0x81, 0xec, 0x9b, 0x91, 0x2a, 0x51, 0x83, 0x12, 0xab, 0x91, 0x8f, 0x8d, /* Byte value: 0xc0 */ + 0x38, 0xe5, 0xfa, 0xa4, 0x45, 0xb4, 0xf6, 0x06, 0x76, 0x94, 0x75, 0x73, 0x33, 0x06, 0xd5, 0xe5, /* Byte value: 0xc1 */ + 0x4f, 0xb8, 0x43, 0xda, 0x8f, 0xe8, 0xb7, 0x7a, 0xe4, 0x8c, 0xd9, 0xa3, 0x6b, 0x7a, 0xee, 0xb8, /* Byte value: 0xc2 */ + 0xe0, 0x12, 0x6e, 0xd5, 0xd7, 0x95, 0x5e, 0x18, 0x1b, 0x15, 0x17, 0x0f, 0xcc, 0x18, 0xd2, 0x12, /* Byte value: 0xc3 */ + 0x27, 0x52, 0x5b, 0x9f, 0xeb, 0x07, 0x31, 0x35, 0x7b, 0xf4, 0x80, 0xb5, 0x90, 0x35, 0x39, 0x52, /* Byte value: 0xc4 */ + 0x76, 0x41, 0x4c, 0x59, 0x50, 0xba, 0x94, 0x6c, 0x80, 0xbf, 0xb6, 0xda, 0xd0, 0x6c, 0xa7, 0x41, /* Byte value: 0xc5 */ + 0x78, 0xe9, 0x93, 0x70, 0xd0, 0x97, 0x48, 0x8c, 0x7c, 0x9a, 0x3a, 0xb6, 0xac, 0x8c, 0x03, 0xe9, /* Byte value: 0xc6 */ + 0x89, 0xe4, 0x83, 0xb7, 0x29, 0x9c, 0x0d, 0x47, 0x96, 0xca, 0x54, 0x13, 0xbf, 0x47, 0x99, 0xe4, /* Byte value: 0xc7 */ + 0x8d, 0x94, 0xd1, 0x2b, 0x04, 0x82, 0xdf, 0x07, 0xde, 0x13, 0x3c, 0x3b, 0xda, 0x07, 0xac, 0x94, /* Byte value: 0xc8 */ + 0xaf, 0xaa, 0x2d, 0x0f, 0x58, 0x7d, 0xe9, 0x62, 0xff, 0x99, 0xce, 0xac, 0xa7, 0x62, 0x3c, 0xaa, /* Byte value: 0xc9 */ + 0x9f, 0xaf, 0x73, 0x50, 0x47, 0xf5, 0x78, 0xe4, 0x19, 0x7c, 0x6b, 0x8f, 0x5e, 0xe4, 0x83, 0xaf, /* Byte value: 0xca */ + 0x84, 0x68, 0x80, 0xf7, 0xc4, 0x58, 0x6d, 0x97, 0x5c, 0xc5, 0xf6, 0x61, 0x98, 0x97, 0x5a, 0x68, /* Byte value: 0xcb */ + 0xe6, 0x5a, 0x15, 0x07, 0x0d, 0x84, 0xe5, 0x78, 0x77, 0x41, 0x4b, 0x33, 0x7a, 0x78, 0x1c, 0x5a, /* Byte value: 0xcc */ + 0xf1, 0x0d, 0x10, 0xc7, 0xf9, 0x0b, 0x45, 0xcb, 0xea, 0x50, 0x6e, 0xa5, 0x13, 0xcb, 0x9a, 0x0d, /* Byte value: 0xcd */ + 0x63, 0x2e, 0x60, 0xd7, 0x53, 0x3a, 0x5d, 0xff, 0x39, 0x23, 0xa7, 0x58, 0x6a, 0xff, 0xda, 0x2e, /* Byte value: 0xce */ + 0x51, 0x13, 0x17, 0xc6, 0xbb, 0xbd, 0xa5, 0x59, 0xfb, 0x4b, 0x36, 0x6f, 0x40, 0x59, 0x9e, 0x13, /* Byte value: 0xcf */ + 0xc8, 0xf4, 0x1f, 0x44, 0x26, 0x59, 0x66, 0xdd, 0x8e, 0x63, 0x01, 0xdc, 0xa8, 0xdd, 0xd3, 0xf4, /* Byte value: 0xd0 */ + 0x03, 0x24, 0xdc, 0x69, 0x6d, 0xe9, 0xbc, 0x30, 0x36, 0x2a, 0x2e, 0x1e, 0x5b, 0x30, 0x67, 0x24, /* Byte value: 0xd1 */ + 0xed, 0x9e, 0x6d, 0x95, 0x3a, 0x51, 0x3e, 0xc8, 0xd1, 0x1a, 0xb5, 0x7d, 0xeb, 0xc8, 0x11, 0x9e, /* Byte value: 0xd2 */ + 0x70, 0x09, 0x37, 0x8b, 0x8a, 0xab, 0x2f, 0x0c, 0xec, 0xeb, 0xea, 0xe6, 0x66, 0x0c, 0x69, 0x09, /* Byte value: 0xd3 */ + 0x3c, 0x95, 0xa8, 0x38, 0x68, 0xaa, 0x24, 0x46, 0x3e, 0x4d, 0x1d, 0x5b, 0x56, 0x46, 0xe0, 0x95, /* Byte value: 0xd4 */ + 0x8e, 0xb0, 0x0d, 0x42, 0x69, 0x6b, 0x63, 0x37, 0xe8, 0x39, 0x12, 0x25, 0x81, 0x37, 0xcb, 0xb0, /* Byte value: 0xd5 */ + 0xd9, 0xeb, 0x61, 0x56, 0x08, 0xc7, 0x7d, 0x0e, 0x7f, 0x26, 0x78, 0x76, 0x77, 0x0e, 0x9b, 0xeb, /* Byte value: 0xd6 */ + 0x10, 0x03, 0x8b, 0x35, 0xb4, 0x78, 0xce, 0xc3, 0xe3, 0xe2, 0x63, 0xa0, 0x57, 0xc3, 0xd4, 0x03, /* Byte value: 0xd7 */ + 0x1a, 0xdb, 0x06, 0x80, 0x19, 0x4b, 0xc0, 0x63, 0x57, 0x1e, 0x87, 0xe4, 0x4e, 0x63, 0x45, 0xdb, /* Byte value: 0xd8 */ + 0x9d, 0x97, 0x5a, 0x1e, 0xb0, 0xfa, 0x11, 0xc4, 0x3d, 0xf1, 0x5f, 0x9b, 0x8d, 0xc4, 0x78, 0x97, /* Byte value: 0xd9 */ + 0xee, 0xba, 0xb1, 0xfc, 0x57, 0xb8, 0x82, 0xf8, 0xe7, 0x30, 0x9b, 0x63, 0xb0, 0xf8, 0x76, 0xba, /* Byte value: 0xda */ + 0xcb, 0xd0, 0xc3, 0x2d, 0x4b, 0xb0, 0xda, 0xed, 0xb8, 0x49, 0x2f, 0xc2, 0xf3, 0xed, 0xb4, 0xd0, /* Byte value: 0xdb */ + 0x97, 0x4f, 0xd7, 0xab, 0x1d, 0xc9, 0x1f, 0x64, 0x89, 0x0d, 0xbb, 0xdf, 0x94, 0x64, 0xe9, 0x4f, /* Byte value: 0xdc */ + 0xc9, 0xe8, 0xea, 0x63, 0xbc, 0xbf, 0xb3, 0xcd, 0x9c, 0xc4, 0x1b, 0xd6, 0x20, 0xcd, 0x4f, 0xe8, /* Byte value: 0xdd */ + 0x79, 0xf5, 0x66, 0x57, 0x4a, 0x71, 0x9d, 0x9c, 0x6e, 0x3d, 0x20, 0xbc, 0x24, 0x9c, 0x9f, 0xf5, /* Byte value: 0xde */ + 0xb2, 0x25, 0xa5, 0x7a, 0x01, 0xc1, 0x47, 0x71, 0xd6, 0x74, 0x0f, 0x7e, 0xd7, 0x71, 0x2b, 0x25, /* Byte value: 0xdf */ + 0x35, 0x69, 0xf9, 0xe4, 0xa8, 0x70, 0x96, 0xd6, 0xbc, 0x9b, 0xd7, 0x01, 0x14, 0xd6, 0x16, 0x69, /* Byte value: 0xe0 */ + 0x6c, 0x9a, 0x4a, 0xd9, 0x49, 0xf1, 0x54, 0x0f, 0xd7, 0xa1, 0x31, 0x3e, 0x9e, 0x0f, 0xe2, 0x9a, /* Byte value: 0xe1 */ + 0x9c, 0x8b, 0xaf, 0x39, 0x2a, 0x1c, 0xc4, 0xd4, 0x2f, 0x56, 0x45, 0x91, 0x05, 0xd4, 0xe4, 0x8b, /* Byte value: 0xe2 */ + 0x1b, 0xc7, 0xf3, 0xa7, 0x83, 0xad, 0x15, 0x73, 0x45, 0xb9, 0x9d, 0xee, 0xc6, 0x73, 0xd9, 0xc7, /* Byte value: 0xe3 */ + 0x05, 0x6c, 0xa7, 0xbb, 0xb7, 0xf8, 0x07, 0x50, 0x5a, 0x7e, 0x72, 0x22, 0xed, 0x50, 0xa9, 0x6c, /* Byte value: 0xe4 */ + 0x50, 0x0f, 0xe2, 0xe1, 0x21, 0x5b, 0x70, 0x49, 0xe9, 0xec, 0x2c, 0x65, 0xc8, 0x49, 0x02, 0x0f, /* Byte value: 0xe5 */ + 0xf0, 0x11, 0xe5, 0xe0, 0x63, 0xed, 0x90, 0xdb, 0xf8, 0xf7, 0x74, 0xaf, 0x9b, 0xdb, 0x06, 0x11, /* Byte value: 0xe6 */ + 0xd1, 0x0b, 0xc5, 0xad, 0x52, 0xfb, 0x1a, 0x8e, 0xef, 0x57, 0xa8, 0x26, 0xbd, 0x8e, 0xf1, 0x0b, /* Byte value: 0xe7 */ + 0x12, 0x3b, 0xa2, 0x7b, 0x43, 0x77, 0xa7, 0xe3, 0xc7, 0x6f, 0x57, 0xb4, 0x84, 0xe3, 0x2f, 0x3b, /* Byte value: 0xe8 */ + 0x87, 0x4c, 0x5c, 0x9e, 0xa9, 0xb1, 0xd1, 0xa7, 0x6a, 0xef, 0xd8, 0x7f, 0xc3, 0xa7, 0x3d, 0x4c, /* Byte value: 0xe9 */ + 0x08, 0xe0, 0xa4, 0xfb, 0x5a, 0x3c, 0x67, 0x80, 0x90, 0x71, 0xd0, 0x50, 0xca, 0x80, 0x6a, 0xe0, /* Byte value: 0xea */ + 0x59, 0xf3, 0xb3, 0x3d, 0xe1, 0x81, 0xc2, 0xd9, 0x6b, 0x3a, 0xe6, 0x3f, 0x8a, 0xd9, 0xf4, 0xf3, /* Byte value: 0xeb */ + 0xfc, 0x81, 0x13, 0x87, 0x14, 0xcf, 0x25, 0x1b, 0x20, 0x5f, 0xcc, 0xd7, 0x34, 0x1b, 0x59, 0x81, /* Byte value: 0xec */ + 0x21, 0x1a, 0x20, 0x4d, 0x31, 0x16, 0x8a, 0x55, 0x17, 0xa0, 0xdc, 0x89, 0x26, 0x55, 0xf7, 0x1a, /* Byte value: 0xed */ + 0x85, 0x74, 0x75, 0xd0, 0x5e, 0xbe, 0xb8, 0x87, 0x4e, 0x62, 0xec, 0x6b, 0x10, 0x87, 0xc6, 0x74, /* Byte value: 0xee */ + 0x55, 0x63, 0x45, 0x5a, 0x96, 0xa3, 0x77, 0x19, 0xb3, 0x92, 0x5e, 0x47, 0x25, 0x19, 0xab, 0x63, /* Byte value: 0xef */ + 0x4e, 0xa4, 0xb6, 0xfd, 0x15, 0x0e, 0x62, 0x6a, 0xf6, 0x2b, 0xc3, 0xa9, 0xe3, 0x6a, 0x72, 0xa4, /* Byte value: 0xf0 */ + 0xec, 0x82, 0x98, 0xb2, 0xa0, 0xb7, 0xeb, 0xd8, 0xc3, 0xbd, 0xaf, 0x77, 0x63, 0xd8, 0x8d, 0x82, /* Byte value: 0xf1 */ + 0xfb, 0xd5, 0x9d, 0x72, 0x54, 0x38, 0x4b, 0x6b, 0x5e, 0xac, 0x8a, 0xe1, 0x0a, 0x6b, 0x0b, 0xd5, /* Byte value: 0xf2 */ + 0x36, 0x4d, 0x25, 0x8d, 0xc5, 0x99, 0x2a, 0xe6, 0x8a, 0xb1, 0xf9, 0x1f, 0x4f, 0xe6, 0x71, 0x4d, /* Byte value: 0xf3 */ + 0x75, 0x65, 0x90, 0x30, 0x3d, 0x53, 0x28, 0x5c, 0xb6, 0x95, 0x98, 0xc4, 0x8b, 0x5c, 0xc0, 0x65, /* Byte value: 0xf4 */ + 0x37, 0x51, 0xd0, 0xaa, 0x5f, 0x7f, 0xff, 0xf6, 0x98, 0x16, 0xe3, 0x15, 0xc7, 0xf6, 0xed, 0x51, /* Byte value: 0xf5 */ + 0x5c, 0x9f, 0x14, 0x86, 0x56, 0x79, 0xc5, 0x89, 0x31, 0x44, 0x94, 0x1d, 0x67, 0x89, 0x5d, 0x9f, /* Byte value: 0xf6 */ + 0xf6, 0x59, 0x9e, 0x32, 0xb9, 0xfc, 0x2b, 0xbb, 0x94, 0xa3, 0x28, 0x93, 0x2d, 0xbb, 0xc8, 0x59, /* Byte value: 0xf7 */ + 0xf4, 0x61, 0xb7, 0x7c, 0x4e, 0xf3, 0x42, 0x9b, 0xb0, 0x2e, 0x1c, 0x87, 0xfe, 0x9b, 0x33, 0x61, /* Byte value: 0xf8 */ + 0xcd, 0x98, 0xb8, 0xff, 0x91, 0xa1, 0x61, 0x8d, 0xd4, 0x1d, 0x73, 0xfe, 0x45, 0x8d, 0x7a, 0x98, /* Byte value: 0xf9 */ + 0x6b, 0xce, 0xc4, 0x2c, 0x09, 0x06, 0x3a, 0x7f, 0xa9, 0x52, 0x77, 0x08, 0xa0, 0x7f, 0xb0, 0xce, /* Byte value: 0xfa */ + 0xa2, 0x26, 0x2e, 0x4f, 0xb5, 0xb9, 0x89, 0xb2, 0x35, 0x96, 0x6c, 0xde, 0x80, 0xb2, 0xff, 0x26, /* Byte value: 0xfb */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xfc */ + 0xaa, 0xc6, 0x8a, 0xb4, 0xef, 0x85, 0xee, 0x32, 0xa5, 0xe7, 0xbc, 0x8e, 0x4a, 0x32, 0x95, 0xc6, /* Byte value: 0xfd */ + 0x71, 0x15, 0xc2, 0xac, 0x10, 0x4d, 0xfa, 0x1c, 0xfe, 0x4c, 0xf0, 0xec, 0xee, 0x1c, 0xf5, 0x15, /* Byte value: 0xfe */ + 0x42, 0x34, 0x40, 0x9a, 0x62, 0x2c, 0xd7, 0xaa, 0x2e, 0x83, 0x7b, 0xd1, 0x4c, 0xaa, 0x2d, 0x34, /* Byte value: 0xff */ + + /* Matrix row: 14 */ + 0x47, 0x52, 0xd9, 0xd2, 0x69, 0x81, 0x42, 0x4a, 0x4e, 0xe7, 0x53, 0x5d, 0xa9, 0x30, 0x47, 0x7c, /* Byte value: 0x00 */ + 0xe9, 0xdb, 0x31, 0x5c, 0x2e, 0xdf, 0x12, 0x97, 0x34, 0x3f, 0x83, 0x15, 0x0b, 0x98, 0xe9, 0x1c, /* Byte value: 0x01 */ + 0x8f, 0x26, 0xbe, 0x4a, 0x25, 0x12, 0xca, 0xbc, 0x87, 0xf8, 0x58, 0xe9, 0x7f, 0xf7, 0x8f, 0x8c, /* Byte value: 0x02 */ + 0x4e, 0x4a, 0xa1, 0x54, 0x2a, 0x05, 0x39, 0xe1, 0x8d, 0xb6, 0x51, 0xd3, 0x3b, 0x95, 0x4e, 0x2e, /* Byte value: 0x03 */ + 0x03, 0x45, 0x92, 0x77, 0xda, 0xb6, 0xd2, 0x78, 0x2d, 0xdc, 0xc1, 0xf5, 0xf1, 0x7a, 0x03, 0x9c, /* Byte value: 0x04 */ + 0xcf, 0x7c, 0x4f, 0x5b, 0xcc, 0xef, 0xa1, 0x2e, 0x88, 0x91, 0xb4, 0xce, 0x19, 0xa4, 0xcf, 0x7f, /* Byte value: 0x05 */ + 0x0c, 0xd7, 0x0d, 0x1f, 0xee, 0x9d, 0xce, 0x23, 0xb4, 0xf6, 0x82, 0x52, 0x42, 0x2b, 0x0c, 0x35, /* Byte value: 0x06 */ + 0xba, 0x33, 0xff, 0xaf, 0xb6, 0x3a, 0xc1, 0xb8, 0x65, 0x01, 0x95, 0xbe, 0x64, 0xd5, 0xba, 0x3f, /* Byte value: 0x07 */ + 0x12, 0x30, 0xf0, 0xcf, 0x86, 0xcb, 0xf6, 0x95, 0x45, 0xa2, 0x04, 0xdf, 0xe7, 0x89, 0x12, 0xa4, /* Byte value: 0x08 */ + 0xf0, 0x34, 0xe4, 0x4f, 0xc6, 0xf5, 0x03, 0xf9, 0x84, 0xe5, 0xba, 0xe2, 0x61, 0x59, 0xf0, 0x02, /* Byte value: 0x09 */ + 0x2b, 0xf2, 0xbc, 0x35, 0xfb, 0x7e, 0x33, 0xb2, 0x13, 0xad, 0x4b, 0xda, 0xbe, 0x80, 0x2b, 0x22, /* Byte value: 0x0a */ + 0x2c, 0xfa, 0x94, 0xf6, 0x7b, 0x02, 0x1a, 0x6a, 0x52, 0x23, 0xf4, 0xa0, 0x71, 0xe3, 0x2c, 0xad, /* Byte value: 0x0b */ + 0x32, 0x1d, 0x69, 0x26, 0x13, 0x54, 0x22, 0xdc, 0xa3, 0x77, 0x72, 0x2d, 0xd4, 0x41, 0x32, 0x3c, /* Byte value: 0x0c */ + 0x1b, 0x28, 0x88, 0x49, 0xc5, 0x4f, 0x8d, 0x3e, 0x86, 0xf3, 0x06, 0x51, 0x75, 0x2c, 0x1b, 0xf6, /* Byte value: 0x0d */ + 0xd2, 0xde, 0x20, 0xfc, 0x7e, 0x0f, 0x4b, 0xe0, 0x54, 0x19, 0xf3, 0xb6, 0x4d, 0x7c, 0xd2, 0x72, /* Byte value: 0x0e */ + 0x08, 0x9a, 0xb7, 0xab, 0xb4, 0x57, 0x35, 0x83, 0xd8, 0xa4, 0xfc, 0xdd, 0x7c, 0x32, 0x08, 0x26, /* Byte value: 0x0f */ + 0xdd, 0x4c, 0xbf, 0x94, 0x4a, 0x24, 0x57, 0xbb, 0xcd, 0x33, 0xb0, 0x11, 0xfe, 0x2d, 0xdd, 0xdb, /* Byte value: 0x10 */ + 0x60, 0x77, 0x68, 0xf8, 0x7c, 0x62, 0xbf, 0xdb, 0xe9, 0xbc, 0x9a, 0xd5, 0x55, 0x9b, 0x60, 0x6b, /* Byte value: 0x11 */ + 0x72, 0x47, 0x98, 0x37, 0xfa, 0xa9, 0x49, 0x4e, 0xac, 0x1e, 0x9e, 0x0a, 0xb2, 0x12, 0x72, 0xcf, /* Byte value: 0x12 */ + 0x11, 0x75, 0x62, 0xb8, 0x5c, 0x7d, 0x24, 0xed, 0x68, 0x7e, 0xc5, 0x2a, 0x16, 0xf3, 0x11, 0x38, /* Byte value: 0x13 */ + 0x06, 0x8a, 0xe7, 0xee, 0x77, 0xaf, 0x67, 0xf0, 0x5a, 0x7b, 0x41, 0x29, 0x21, 0xf4, 0x06, 0xfb, /* Byte value: 0x14 */ + 0x5d, 0xf8, 0x9e, 0xb6, 0x5b, 0x1d, 0x81, 0x5c, 0xd3, 0xe1, 0xab, 0x5f, 0x32, 0x8b, 0x5d, 0xfe, /* Byte value: 0x15 */ + 0xe0, 0xc3, 0x49, 0xda, 0x6d, 0x5b, 0x69, 0x3c, 0xf7, 0x6e, 0x81, 0x9b, 0x99, 0x3d, 0xe0, 0x4e, /* Byte value: 0x16 */ + 0x86, 0x3e, 0xc6, 0xcc, 0x66, 0x96, 0xb1, 0x17, 0x44, 0xa9, 0x5a, 0x67, 0xed, 0x52, 0x86, 0xde, /* Byte value: 0x17 */ + 0xaa, 0xc4, 0x52, 0x3a, 0x1d, 0x94, 0xab, 0x7d, 0x16, 0x8a, 0xae, 0xc7, 0x9c, 0xb1, 0xaa, 0x73, /* Byte value: 0x18 */ + 0x58, 0x37, 0xeb, 0x2f, 0xf6, 0x04, 0x34, 0xd4, 0xa4, 0x46, 0x2b, 0x83, 0xe2, 0x05, 0x58, 0x99, /* Byte value: 0x19 */ + 0x24, 0x60, 0x23, 0x5d, 0xcf, 0x55, 0x2f, 0xe9, 0x8a, 0x87, 0x08, 0x7d, 0x0d, 0xd1, 0x24, 0x8b, /* Byte value: 0x1a */ + 0xdf, 0x8b, 0xe2, 0xce, 0x67, 0x41, 0xcb, 0xeb, 0xfb, 0x1a, 0x8f, 0xb7, 0xe1, 0xc0, 0xdf, 0x33, /* Byte value: 0x1b */ + 0x69, 0x6f, 0x10, 0x7e, 0x3f, 0xe6, 0xc4, 0x70, 0x2a, 0xed, 0x98, 0x5b, 0xc7, 0x3e, 0x69, 0x39, /* Byte value: 0x1c */ + 0x7a, 0xdd, 0x2f, 0x9c, 0x4e, 0xfe, 0x7c, 0xcd, 0x74, 0xba, 0x62, 0xd7, 0xce, 0x20, 0x7a, 0xe9, /* Byte value: 0x1d */ + 0x67, 0x7f, 0x40, 0x3b, 0xfc, 0x1e, 0x96, 0x03, 0xa8, 0x32, 0x25, 0xaf, 0x9a, 0xf8, 0x67, 0xe4, /* Byte value: 0x1e */ + 0xec, 0x14, 0x44, 0xc5, 0x83, 0xc6, 0xa7, 0x1f, 0x43, 0x98, 0x03, 0xc9, 0xdb, 0x16, 0xec, 0x7b, /* Byte value: 0x1f */ + 0x30, 0xda, 0x34, 0x7c, 0x3e, 0x31, 0xbe, 0x8c, 0x95, 0x5e, 0x4d, 0x8b, 0xcb, 0xac, 0x30, 0xd4, /* Byte value: 0x20 */ + 0x6a, 0x2a, 0x82, 0x09, 0xe5, 0x50, 0x16, 0x08, 0x07, 0x31, 0x59, 0xae, 0x36, 0x44, 0x6a, 0xa5, /* Byte value: 0x21 */ + 0x0e, 0x10, 0x50, 0x45, 0xc3, 0xf8, 0x52, 0x73, 0x82, 0xdf, 0xbd, 0xf4, 0x5d, 0xc6, 0x0e, 0xdd, /* Byte value: 0x22 */ + 0x43, 0x1f, 0x63, 0x66, 0x33, 0x4b, 0xb9, 0xea, 0x22, 0xb5, 0x2d, 0xd2, 0x97, 0x29, 0x43, 0x6f, /* Byte value: 0x23 */ + 0x2d, 0x78, 0x5b, 0xdb, 0x8c, 0xd1, 0x54, 0x42, 0x49, 0xd6, 0x0a, 0xf3, 0x9f, 0x74, 0x2d, 0xd9, /* Byte value: 0x24 */ + 0xb3, 0x2b, 0x87, 0x29, 0xf5, 0xbe, 0xba, 0x13, 0xa6, 0x50, 0x97, 0x30, 0xf6, 0x70, 0xb3, 0x6d, /* Byte value: 0x25 */ + 0xe4, 0x8e, 0xf3, 0x6e, 0x37, 0x91, 0x92, 0x9c, 0x9b, 0x3c, 0xff, 0x14, 0xa7, 0x24, 0xe4, 0x5d, /* Byte value: 0x26 */ + 0x22, 0xea, 0xc4, 0xb3, 0xb8, 0xfa, 0x48, 0x19, 0xd0, 0xfc, 0x49, 0x54, 0x2c, 0x25, 0x22, 0x70, /* Byte value: 0x27 */ + 0xab, 0x46, 0x9d, 0x17, 0xea, 0x47, 0xe5, 0x55, 0x0d, 0x7f, 0x50, 0x94, 0x72, 0x26, 0xab, 0x07, /* Byte value: 0x28 */ + 0x3d, 0x8f, 0xf6, 0x4e, 0x27, 0x7f, 0x3e, 0x87, 0x3a, 0x5d, 0x31, 0x8a, 0x67, 0x10, 0x3d, 0x95, /* Byte value: 0x29 */ + 0x34, 0x97, 0x8e, 0xc8, 0x64, 0xfb, 0x45, 0x2c, 0xf9, 0x0c, 0x33, 0x04, 0xf5, 0xb5, 0x34, 0xc7, /* Byte value: 0x2a */ + 0x90, 0x43, 0x8c, 0xb7, 0xba, 0x97, 0xbc, 0x22, 0x6d, 0x59, 0x20, 0x37, 0x34, 0xc2, 0x90, 0x69, /* Byte value: 0x2b */ + 0x97, 0x4b, 0xa4, 0x74, 0x3a, 0xeb, 0x95, 0xfa, 0x2c, 0xd7, 0x9f, 0x4d, 0xfb, 0xa1, 0x97, 0xe6, /* Byte value: 0x2c */ + 0xb0, 0x6e, 0x15, 0x5e, 0x2f, 0x08, 0x68, 0x6b, 0x8b, 0x8c, 0x56, 0xc5, 0x07, 0x0a, 0xb0, 0xf1, /* Byte value: 0x2d */ + 0xe5, 0x0c, 0x3c, 0x43, 0xc0, 0x42, 0xdc, 0xb4, 0x80, 0xc9, 0x01, 0x47, 0x49, 0xb3, 0xe5, 0x29, /* Byte value: 0x2e */ + 0x42, 0x9d, 0xac, 0x4b, 0xc4, 0x98, 0xf7, 0xc2, 0x39, 0x40, 0xd3, 0x81, 0x79, 0xbe, 0x42, 0x1b, /* Byte value: 0x2f */ + 0x79, 0x98, 0xbd, 0xeb, 0x94, 0x48, 0xae, 0xb5, 0x59, 0x66, 0xa3, 0x22, 0x3f, 0x5a, 0x79, 0x75, /* Byte value: 0x30 */ + 0xc0, 0xee, 0xd0, 0x33, 0xf8, 0xc4, 0xbd, 0x75, 0x11, 0xbb, 0xf7, 0x69, 0xaa, 0xf5, 0xc0, 0xd6, /* Byte value: 0x31 */ + 0x18, 0x6d, 0x1a, 0x3e, 0x1f, 0xf9, 0x5f, 0x46, 0xab, 0x2f, 0xc7, 0xa4, 0x84, 0x56, 0x18, 0x6a, /* Byte value: 0x32 */ + 0x4b, 0x85, 0xd4, 0xcd, 0x87, 0x1c, 0x8c, 0x69, 0xfa, 0x11, 0xd1, 0x0f, 0xeb, 0x1b, 0x4b, 0x49, /* Byte value: 0x33 */ + 0xf7, 0x3c, 0xcc, 0x8c, 0x46, 0x89, 0x2a, 0x21, 0xc5, 0x6b, 0x05, 0x98, 0xae, 0x3a, 0xf7, 0x8d, /* Byte value: 0x34 */ + 0xb8, 0xf4, 0xa2, 0xf5, 0x9b, 0x5f, 0x5d, 0xe8, 0x53, 0x28, 0xaa, 0x18, 0x7b, 0x38, 0xb8, 0xd7, /* Byte value: 0x35 */ + 0xa6, 0x13, 0x5f, 0x25, 0xf3, 0x09, 0x65, 0x5e, 0xa2, 0x7c, 0x2c, 0x95, 0xde, 0x9a, 0xa6, 0x46, /* Byte value: 0x36 */ + 0x59, 0xb5, 0x24, 0x02, 0x01, 0xd7, 0x7a, 0xfc, 0xbf, 0xb3, 0xd5, 0xd0, 0x0c, 0x92, 0x59, 0xed, /* Byte value: 0x37 */ + 0x64, 0x3a, 0xd2, 0x4c, 0x26, 0xa8, 0x44, 0x7b, 0x85, 0xee, 0xe4, 0x5a, 0x6b, 0x82, 0x64, 0x78, /* Byte value: 0x38 */ + 0x36, 0x50, 0xd3, 0x92, 0x49, 0x9e, 0xd9, 0x7c, 0xcf, 0x25, 0x0c, 0xa2, 0xea, 0x58, 0x36, 0x2f, /* Byte value: 0x39 */ + 0xb4, 0x23, 0xaf, 0xea, 0x75, 0xc2, 0x93, 0xcb, 0xe7, 0xde, 0x28, 0x4a, 0x39, 0x13, 0xb4, 0xe2, /* Byte value: 0x3a */ + 0x81, 0x36, 0xee, 0x0f, 0xe6, 0xea, 0x98, 0xcf, 0x05, 0x27, 0xe5, 0x1d, 0x22, 0x31, 0x81, 0x51, /* Byte value: 0x3b */ + 0x09, 0x18, 0x78, 0x86, 0x43, 0x84, 0x7b, 0xab, 0xc3, 0x51, 0x02, 0x8e, 0x92, 0xa5, 0x09, 0x52, /* Byte value: 0x3c */ + 0x78, 0x1a, 0x72, 0xc6, 0x63, 0x9b, 0xe0, 0x9d, 0x42, 0x93, 0x5d, 0x71, 0xd1, 0xcd, 0x78, 0x01, /* Byte value: 0x3d */ + 0xed, 0x96, 0x8b, 0xe8, 0x74, 0x15, 0xe9, 0x37, 0x58, 0x6d, 0xfd, 0x9a, 0x35, 0x81, 0xed, 0x0f, /* Byte value: 0x3e */ + 0x5f, 0x3f, 0xc3, 0xec, 0x76, 0x78, 0x1d, 0x0c, 0xe5, 0xc8, 0x94, 0xf9, 0x2d, 0x66, 0x5f, 0x16, /* Byte value: 0x3f */ + 0xda, 0x44, 0x97, 0x57, 0xca, 0x58, 0x7e, 0x63, 0x8c, 0xbd, 0x0f, 0x6b, 0x31, 0x4e, 0xda, 0x54, /* Byte value: 0x40 */ + 0xc4, 0xa3, 0x6a, 0x87, 0xa2, 0x0e, 0x46, 0xd5, 0x7d, 0xe9, 0x89, 0xe6, 0x94, 0xec, 0xc4, 0xc5, /* Byte value: 0x41 */ + 0x29, 0x35, 0xe1, 0x6f, 0xd6, 0x1b, 0xaf, 0xe2, 0x25, 0x84, 0x74, 0x7c, 0xa1, 0x6d, 0x29, 0xca, /* Byte value: 0x42 */ + 0xad, 0xcc, 0x7a, 0xf9, 0x9d, 0xe8, 0x82, 0xa5, 0x57, 0x04, 0x11, 0xbd, 0x53, 0xd2, 0xad, 0xfc, /* Byte value: 0x43 */ + 0x07, 0x08, 0x28, 0xc3, 0x80, 0x7c, 0x29, 0xd8, 0x41, 0x8e, 0xbf, 0x7a, 0xcf, 0x63, 0x07, 0x8f, /* Byte value: 0x44 */ + 0x82, 0x73, 0x7c, 0x78, 0x3c, 0x5c, 0x4a, 0xb7, 0x28, 0xfb, 0x24, 0xe8, 0xd3, 0x4b, 0x82, 0xcd, /* Byte value: 0x45 */ + 0xe2, 0x04, 0x14, 0x80, 0x40, 0x3e, 0xf5, 0x6c, 0xc1, 0x47, 0xbe, 0x3d, 0x86, 0xd0, 0xe2, 0xa6, /* Byte value: 0x46 */ + 0x31, 0x58, 0xfb, 0x51, 0xc9, 0xe2, 0xf0, 0xa4, 0x8e, 0xab, 0xb3, 0xd8, 0x25, 0x3b, 0x31, 0xa0, /* Byte value: 0x47 */ + 0x17, 0xff, 0x85, 0x56, 0x2b, 0xd2, 0x43, 0x1d, 0x32, 0x05, 0x84, 0x03, 0x37, 0x07, 0x17, 0xc3, /* Byte value: 0x48 */ + 0x9f, 0xd1, 0x13, 0xdf, 0x8e, 0xbc, 0xa0, 0x79, 0xf4, 0x73, 0x63, 0x90, 0x87, 0x93, 0x9f, 0xc0, /* Byte value: 0x49 */ + 0x0b, 0xdf, 0x25, 0xdc, 0x6e, 0xe1, 0xe7, 0xfb, 0xf5, 0x78, 0x3d, 0x28, 0x8d, 0x48, 0x0b, 0xba, /* Byte value: 0x4a */ + 0x16, 0x7d, 0x4a, 0x7b, 0xdc, 0x01, 0x0d, 0x35, 0x29, 0xf0, 0x7a, 0x50, 0xd9, 0x90, 0x16, 0xb7, /* Byte value: 0x4b */ + 0x7f, 0x12, 0x5a, 0x05, 0xe3, 0xe7, 0xc9, 0x45, 0x03, 0x1d, 0xe2, 0x0b, 0x1e, 0xae, 0x7f, 0x8e, /* Byte value: 0x4c */ + 0x23, 0x68, 0x0b, 0x9e, 0x4f, 0x29, 0x06, 0x31, 0xcb, 0x09, 0xb7, 0x07, 0xc2, 0xb2, 0x23, 0x04, /* Byte value: 0x4d */ + 0x6b, 0xa8, 0x4d, 0x24, 0x12, 0x83, 0x58, 0x20, 0x1c, 0xc4, 0xa7, 0xfd, 0xd8, 0xd3, 0x6b, 0xd1, /* Byte value: 0x4e */ + 0xa9, 0x81, 0xc0, 0x4d, 0xc7, 0x22, 0x79, 0x05, 0x3b, 0x56, 0x6f, 0x32, 0x6d, 0xcb, 0xa9, 0xef, /* Byte value: 0x4f */ + 0x8b, 0x6b, 0x04, 0xfe, 0x7f, 0xd8, 0x31, 0x1c, 0xeb, 0xaa, 0x26, 0x66, 0x41, 0xee, 0x8b, 0x9f, /* Byte value: 0x50 */ + 0xb7, 0x66, 0x3d, 0x9d, 0xaf, 0x74, 0x41, 0xb3, 0xca, 0x02, 0xe9, 0xbf, 0xc8, 0x69, 0xb7, 0x7e, /* Byte value: 0x51 */ + 0x13, 0xb2, 0x3f, 0xe2, 0x71, 0x18, 0xb8, 0xbd, 0x5e, 0x57, 0xfa, 0x8c, 0x09, 0x1e, 0x13, 0xd0, /* Byte value: 0x52 */ + 0x1c, 0x20, 0xa0, 0x8a, 0x45, 0x33, 0xa4, 0xe6, 0xc7, 0x7d, 0xb9, 0x2b, 0xba, 0x4f, 0x1c, 0x79, /* Byte value: 0x53 */ + 0xfb, 0xeb, 0xc1, 0x93, 0xa8, 0x14, 0xe4, 0x02, 0x71, 0x9d, 0x87, 0xca, 0xec, 0x11, 0xfb, 0xb8, /* Byte value: 0x54 */ + 0x62, 0xb0, 0x35, 0xa2, 0x51, 0x07, 0x23, 0x8b, 0xdf, 0x95, 0xa5, 0x73, 0x4a, 0x76, 0x62, 0x83, /* Byte value: 0x55 */ + 0xf2, 0xf3, 0xb9, 0x15, 0xeb, 0x90, 0x9f, 0xa9, 0xb2, 0xcc, 0x85, 0x44, 0x7e, 0xb4, 0xf2, 0xea, /* Byte value: 0x56 */ + 0x3b, 0x05, 0x11, 0xa0, 0x50, 0xd0, 0x59, 0x77, 0x60, 0x26, 0x70, 0xa3, 0x46, 0xe4, 0x3b, 0x6e, /* Byte value: 0x57 */ + 0x9a, 0x1e, 0x66, 0x46, 0x23, 0xa5, 0x15, 0xf1, 0x83, 0xd4, 0xe3, 0x4c, 0x57, 0x1d, 0x9a, 0xa7, /* Byte value: 0x58 */ + 0x75, 0x4f, 0xb0, 0xf4, 0x7a, 0xd5, 0x60, 0x96, 0xed, 0x90, 0x21, 0x70, 0x7d, 0x71, 0x75, 0x40, /* Byte value: 0x59 */ + 0xea, 0x9e, 0xa3, 0x2b, 0xf4, 0x69, 0xc0, 0xef, 0x19, 0xe3, 0x42, 0xe0, 0xfa, 0xe2, 0xea, 0x80, /* Byte value: 0x5a */ + 0xd0, 0x19, 0x7d, 0xa6, 0x53, 0x6a, 0xd7, 0xb0, 0x62, 0x30, 0xcc, 0x10, 0x52, 0x91, 0xd0, 0x9a, /* Byte value: 0x5b */ + 0x2a, 0x70, 0x73, 0x18, 0x0c, 0xad, 0x7d, 0x9a, 0x08, 0x58, 0xb5, 0x89, 0x50, 0x17, 0x2a, 0x56, /* Byte value: 0x5c */ + 0x91, 0xc1, 0x43, 0x9a, 0x4d, 0x44, 0xf2, 0x0a, 0x76, 0xac, 0xde, 0x64, 0xda, 0x55, 0x91, 0x1d, /* Byte value: 0x5d */ + 0x02, 0xc7, 0x5d, 0x5a, 0x2d, 0x65, 0x9c, 0x50, 0x36, 0x29, 0x3f, 0xa6, 0x1f, 0xed, 0x02, 0xe8, /* Byte value: 0x5e */ + 0x46, 0xd0, 0x16, 0xff, 0x9e, 0x52, 0x0c, 0x62, 0x55, 0x12, 0xad, 0x0e, 0x47, 0xa7, 0x46, 0x08, /* Byte value: 0x5f */ + 0xa7, 0x91, 0x90, 0x08, 0x04, 0xda, 0x2b, 0x76, 0xb9, 0x89, 0xd2, 0xc6, 0x30, 0x0d, 0xa7, 0x32, /* Byte value: 0x60 */ + 0xdc, 0xce, 0x70, 0xb9, 0xbd, 0xf7, 0x19, 0x93, 0xd6, 0xc6, 0x4e, 0x42, 0x10, 0xba, 0xdc, 0xaf, /* Byte value: 0x61 */ + 0x27, 0x25, 0xb1, 0x2a, 0x15, 0xe3, 0xfd, 0x91, 0xa7, 0x5b, 0xc9, 0x88, 0xfc, 0xab, 0x27, 0x17, /* Byte value: 0x62 */ + 0x76, 0x0a, 0x22, 0x83, 0xa0, 0x63, 0xb2, 0xee, 0xc0, 0x4c, 0xe0, 0x85, 0x8c, 0x0b, 0x76, 0xdc, /* Byte value: 0x63 */ + 0xa4, 0xd4, 0x02, 0x7f, 0xde, 0x6c, 0xf9, 0x0e, 0x94, 0x55, 0x13, 0x33, 0xc1, 0x77, 0xa4, 0xae, /* Byte value: 0x64 */ + 0xca, 0xb3, 0x3a, 0xc2, 0x61, 0xf6, 0x14, 0xa6, 0xff, 0x36, 0x34, 0x12, 0xc9, 0x2a, 0xca, 0x18, /* Byte value: 0x65 */ + 0x96, 0xc9, 0x6b, 0x59, 0xcd, 0x38, 0xdb, 0xd2, 0x37, 0x22, 0x61, 0x1e, 0x15, 0x36, 0x96, 0x92, /* Byte value: 0x66 */ + 0x70, 0x80, 0xc5, 0x6d, 0xd7, 0xcc, 0xd5, 0x1e, 0x9a, 0x37, 0xa1, 0xac, 0xad, 0xff, 0x70, 0x27, /* Byte value: 0x67 */ + 0x54, 0xe0, 0xe6, 0x30, 0x18, 0x99, 0xfa, 0xf7, 0x10, 0xb0, 0xa9, 0xd1, 0xa0, 0x2e, 0x54, 0xac, /* Byte value: 0x68 */ + 0xe1, 0x41, 0x86, 0xf7, 0x9a, 0x88, 0x27, 0x14, 0xec, 0x9b, 0x7f, 0xc8, 0x77, 0xaa, 0xe1, 0x3a, /* Byte value: 0x69 */ + 0x6f, 0xe5, 0xf7, 0x90, 0x48, 0x49, 0xa3, 0x80, 0x70, 0x96, 0xd9, 0x72, 0xe6, 0xca, 0x6f, 0xc2, /* Byte value: 0x6a */ + 0x39, 0xc2, 0x4c, 0xfa, 0x7d, 0xb5, 0xc5, 0x27, 0x56, 0x0f, 0x4f, 0x05, 0x59, 0x09, 0x39, 0x86, /* Byte value: 0x6b */ + 0x33, 0x9f, 0xa6, 0x0b, 0xe4, 0x87, 0x6c, 0xf4, 0xb8, 0x82, 0x8c, 0x7e, 0x3a, 0xd6, 0x33, 0x48, /* Byte value: 0x6c */ + 0xdb, 0xc6, 0x58, 0x7a, 0x3d, 0x8b, 0x30, 0x4b, 0x97, 0x48, 0xf1, 0x38, 0xdf, 0xd9, 0xdb, 0x20, /* Byte value: 0x6d */ + 0xa0, 0x99, 0xb8, 0xcb, 0x84, 0xa6, 0x02, 0xae, 0xf8, 0x07, 0x6d, 0xbc, 0xff, 0x6e, 0xa0, 0xbd, /* Byte value: 0x6e */ + 0xfe, 0x24, 0xb4, 0x0a, 0x05, 0x0d, 0x51, 0x8a, 0x06, 0x3a, 0x07, 0x16, 0x3c, 0x9f, 0xfe, 0xdf, /* Byte value: 0x6f */ + 0xb2, 0xa9, 0x48, 0x04, 0x02, 0x6d, 0xf4, 0x3b, 0xbd, 0xa5, 0x69, 0x63, 0x18, 0xe7, 0xb2, 0x19, /* Byte value: 0x70 */ + 0x10, 0xf7, 0xad, 0x95, 0xab, 0xae, 0x6a, 0xc5, 0x73, 0x8b, 0x3b, 0x79, 0xf8, 0x64, 0x10, 0x4c, /* Byte value: 0x71 */ + 0x71, 0x02, 0x0a, 0x40, 0x20, 0x1f, 0x9b, 0x36, 0x81, 0xc2, 0x5f, 0xff, 0x43, 0x68, 0x71, 0x53, /* Byte value: 0x72 */ + 0x87, 0xbc, 0x09, 0xe1, 0x91, 0x45, 0xff, 0x3f, 0x5f, 0x5c, 0xa4, 0x34, 0x03, 0xc5, 0x87, 0xaa, /* Byte value: 0x73 */ + 0x35, 0x15, 0x41, 0xe5, 0x93, 0x28, 0x0b, 0x04, 0xe2, 0xf9, 0xcd, 0x57, 0x1b, 0x22, 0x35, 0xb3, /* Byte value: 0x74 */ + 0x38, 0x40, 0x83, 0xd7, 0x8a, 0x66, 0x8b, 0x0f, 0x4d, 0xfa, 0xb1, 0x56, 0xb7, 0x9e, 0x38, 0xf2, /* Byte value: 0x75 */ + 0x52, 0x6a, 0x01, 0xde, 0x6f, 0x36, 0x9d, 0x07, 0x4a, 0xcb, 0xe8, 0xf8, 0x81, 0xda, 0x52, 0x57, /* Byte value: 0x76 */ + 0x65, 0xb8, 0x1d, 0x61, 0xd1, 0x7b, 0x0a, 0x53, 0x9e, 0x1b, 0x1a, 0x09, 0x85, 0x15, 0x65, 0x0c, /* Byte value: 0x77 */ + 0xde, 0x09, 0x2d, 0xe3, 0x90, 0x92, 0x85, 0xc3, 0xe0, 0xef, 0x71, 0xe4, 0x0f, 0x57, 0xde, 0x47, /* Byte value: 0x78 */ + 0x8e, 0xa4, 0x71, 0x67, 0xd2, 0xc1, 0x84, 0x94, 0x9c, 0x0d, 0xa6, 0xba, 0x91, 0x60, 0x8e, 0xf8, /* Byte value: 0x79 */ + 0xa8, 0x03, 0x0f, 0x60, 0x30, 0xf1, 0x37, 0x2d, 0x20, 0xa3, 0x91, 0x61, 0x83, 0x5c, 0xa8, 0x9b, /* Byte value: 0x7a */ + 0xfd, 0x61, 0x26, 0x7d, 0xdf, 0xbb, 0x83, 0xf2, 0x2b, 0xe6, 0xc6, 0xe3, 0xcd, 0xe5, 0xfd, 0x43, /* Byte value: 0x7b */ + 0x2e, 0x3d, 0xc9, 0xac, 0x56, 0x67, 0x86, 0x3a, 0x64, 0x0a, 0xcb, 0x06, 0x6e, 0x0e, 0x2e, 0x45, /* Byte value: 0x7c */ + 0x01, 0x82, 0xcf, 0x2d, 0xf7, 0xd3, 0x4e, 0x28, 0x1b, 0xf5, 0xfe, 0x53, 0xee, 0x97, 0x01, 0x74, /* Byte value: 0x7d */ + 0xf8, 0xae, 0x53, 0xe4, 0x72, 0xa2, 0x36, 0x7a, 0x5c, 0x41, 0x46, 0x3f, 0x1d, 0x6b, 0xf8, 0x24, /* Byte value: 0x7e */ + 0x8c, 0x63, 0x2c, 0x3d, 0xff, 0xa4, 0x18, 0xc4, 0xaa, 0x24, 0x99, 0x1c, 0x8e, 0x8d, 0x8c, 0x10, /* Byte value: 0x7f */ + 0xbb, 0xb1, 0x30, 0x82, 0x41, 0xe9, 0x8f, 0x90, 0x7e, 0xf4, 0x6b, 0xed, 0x8a, 0x42, 0xbb, 0x4b, /* Byte value: 0x80 */ + 0x49, 0x42, 0x89, 0x97, 0xaa, 0x79, 0x10, 0x39, 0xcc, 0x38, 0xee, 0xa9, 0xf4, 0xf6, 0x49, 0xa1, /* Byte value: 0x81 */ + 0x44, 0x17, 0x4b, 0xa5, 0xb3, 0x37, 0x90, 0x32, 0x63, 0x3b, 0x92, 0xa8, 0x58, 0x4a, 0x44, 0xe0, /* Byte value: 0x82 */ + 0xbf, 0xfc, 0x8a, 0x36, 0x1b, 0x23, 0x74, 0x30, 0x12, 0xa6, 0x15, 0x62, 0xb4, 0x5b, 0xbf, 0x58, /* Byte value: 0x83 */ + 0xef, 0x51, 0xd6, 0xb2, 0x59, 0x70, 0x75, 0x67, 0x6e, 0x44, 0xc2, 0x3c, 0x2a, 0x6c, 0xef, 0xe7, /* Byte value: 0x84 */ + 0xfc, 0xe3, 0xe9, 0x50, 0x28, 0x68, 0xcd, 0xda, 0x30, 0x13, 0x38, 0xb0, 0x23, 0x72, 0xfc, 0x37, /* Byte value: 0x85 */ + 0x3a, 0x87, 0xde, 0x8d, 0xa7, 0x03, 0x17, 0x5f, 0x7b, 0xd3, 0x8e, 0xf0, 0xa8, 0x73, 0x3a, 0x1a, /* Byte value: 0x86 */ + 0xb1, 0xec, 0xda, 0x73, 0xd8, 0xdb, 0x26, 0x43, 0x90, 0x79, 0xa8, 0x96, 0xe9, 0x9d, 0xb1, 0x85, /* Byte value: 0x87 */ + 0xf9, 0x2c, 0x9c, 0xc9, 0x85, 0x71, 0x78, 0x52, 0x47, 0xb4, 0xb8, 0x6c, 0xf3, 0xfc, 0xf9, 0x50, /* Byte value: 0x88 */ + 0x99, 0x5b, 0xf4, 0x31, 0xf9, 0x13, 0xc7, 0x89, 0xae, 0x08, 0x22, 0xb9, 0xa6, 0x67, 0x99, 0x3b, /* Byte value: 0x89 */ + 0xd8, 0x83, 0xca, 0x0d, 0xe7, 0x3d, 0xe2, 0x33, 0xba, 0x94, 0x30, 0xcd, 0x2e, 0xa3, 0xd8, 0xbc, /* Byte value: 0x8a */ + 0xc9, 0xf6, 0xa8, 0xb5, 0xbb, 0x40, 0xc6, 0xde, 0xd2, 0xea, 0xf5, 0xe7, 0x38, 0x50, 0xc9, 0x84, /* Byte value: 0x8b */ + 0x61, 0xf5, 0xa7, 0xd5, 0x8b, 0xb1, 0xf1, 0xf3, 0xf2, 0x49, 0x64, 0x86, 0xbb, 0x0c, 0x61, 0x1f, /* Byte value: 0x8c */ + 0x3c, 0x0d, 0x39, 0x63, 0xd0, 0xac, 0x70, 0xaf, 0x21, 0xa8, 0xcf, 0xd9, 0x89, 0x87, 0x3c, 0xe1, /* Byte value: 0x8d */ + 0x89, 0xac, 0x59, 0xa4, 0x52, 0xbd, 0xad, 0x4c, 0xdd, 0x83, 0x19, 0xc0, 0x5e, 0x03, 0x89, 0x77, /* Byte value: 0x8e */ + 0x0f, 0x92, 0x9f, 0x68, 0x34, 0x2b, 0x1c, 0x5b, 0x99, 0x2a, 0x43, 0xa7, 0xb3, 0x51, 0x0f, 0xa9, /* Byte value: 0x8f */ + 0x5c, 0x7a, 0x51, 0x9b, 0xac, 0xce, 0xcf, 0x74, 0xc8, 0x14, 0x55, 0x0c, 0xdc, 0x1c, 0x5c, 0x8a, /* Byte value: 0x90 */ + 0xbe, 0x7e, 0x45, 0x1b, 0xec, 0xf0, 0x3a, 0x18, 0x09, 0x53, 0xeb, 0x31, 0x5a, 0xcc, 0xbe, 0x2c, /* Byte value: 0x91 */ + 0xc2, 0x29, 0x8d, 0x69, 0xd5, 0xa1, 0x21, 0x25, 0x27, 0x92, 0xc8, 0xcf, 0xb5, 0x18, 0xc2, 0x3e, /* Byte value: 0x92 */ + 0xc5, 0x21, 0xa5, 0xaa, 0x55, 0xdd, 0x08, 0xfd, 0x66, 0x1c, 0x77, 0xb5, 0x7a, 0x7b, 0xc5, 0xb1, /* Byte value: 0x93 */ + 0x88, 0x2e, 0x96, 0x89, 0xa5, 0x6e, 0xe3, 0x64, 0xc6, 0x76, 0xe7, 0x93, 0xb0, 0x94, 0x88, 0x03, /* Byte value: 0x94 */ + 0xa1, 0x1b, 0x77, 0xe6, 0x73, 0x75, 0x4c, 0x86, 0xe3, 0xf2, 0x93, 0xef, 0x11, 0xf9, 0xa1, 0xc9, /* Byte value: 0x95 */ + 0x1d, 0xa2, 0x6f, 0xa7, 0xb2, 0xe0, 0xea, 0xce, 0xdc, 0x88, 0x47, 0x78, 0x54, 0xd8, 0x1d, 0x0d, /* Byte value: 0x96 */ + 0xc7, 0xe6, 0xf8, 0xf0, 0x78, 0xb8, 0x94, 0xad, 0x50, 0x35, 0x48, 0x13, 0x65, 0x96, 0xc7, 0x59, /* Byte value: 0x97 */ + 0x6c, 0xa0, 0x65, 0xe7, 0x92, 0xff, 0x71, 0xf8, 0x5d, 0x4a, 0x18, 0x87, 0x17, 0xb0, 0x6c, 0x5e, /* Byte value: 0x98 */ + 0x85, 0x7b, 0x54, 0xbb, 0xbc, 0x20, 0x63, 0x6f, 0x69, 0x75, 0x9b, 0x92, 0x1c, 0x28, 0x85, 0x42, /* Byte value: 0x99 */ + 0x9d, 0x16, 0x4e, 0x85, 0xa3, 0xd9, 0x3c, 0x29, 0xc2, 0x5a, 0x5c, 0x36, 0x98, 0x7e, 0x9d, 0x28, /* Byte value: 0x9a */ + 0xd3, 0x5c, 0xef, 0xd1, 0x89, 0xdc, 0x05, 0xc8, 0x4f, 0xec, 0x0d, 0xe5, 0xa3, 0xeb, 0xd3, 0x06, /* Byte value: 0x9b */ + 0xd1, 0x9b, 0xb2, 0x8b, 0xa4, 0xb9, 0x99, 0x98, 0x79, 0xc5, 0x32, 0x43, 0xbc, 0x06, 0xd1, 0xee, /* Byte value: 0x9c */ + 0x1e, 0xe7, 0xfd, 0xd0, 0x68, 0x56, 0x38, 0xb6, 0xf1, 0x54, 0x86, 0x8d, 0xa5, 0xa2, 0x1e, 0x91, /* Byte value: 0x9d */ + 0x3e, 0xca, 0x64, 0x39, 0xfd, 0xc9, 0xec, 0xff, 0x17, 0x81, 0xf0, 0x7f, 0x96, 0x6a, 0x3e, 0x09, /* Byte value: 0x9e */ + 0x84, 0xf9, 0x9b, 0x96, 0x4b, 0xf3, 0x2d, 0x47, 0x72, 0x80, 0x65, 0xc1, 0xf2, 0xbf, 0x84, 0x36, /* Byte value: 0x9f */ + 0x2f, 0xbf, 0x06, 0x81, 0xa1, 0xb4, 0xc8, 0x12, 0x7f, 0xff, 0x35, 0x55, 0x80, 0x99, 0x2f, 0x31, /* Byte value: 0xa0 */ + 0x7d, 0xd5, 0x07, 0x5f, 0xce, 0x82, 0x55, 0x15, 0x35, 0x34, 0xdd, 0xad, 0x01, 0x43, 0x7d, 0x66, /* Byte value: 0xa1 */ + 0x74, 0xcd, 0x7f, 0xd9, 0x8d, 0x06, 0x2e, 0xbe, 0xf6, 0x65, 0xdf, 0x23, 0x93, 0xe6, 0x74, 0x34, /* Byte value: 0xa2 */ + 0x41, 0xd8, 0x3e, 0x3c, 0x1e, 0x2e, 0x25, 0xba, 0x14, 0x9c, 0x12, 0x74, 0x88, 0xc4, 0x41, 0x87, /* Byte value: 0xa3 */ + 0x50, 0xad, 0x5c, 0x84, 0x42, 0x53, 0x01, 0x57, 0x7c, 0xe2, 0xd7, 0x5e, 0x9e, 0x37, 0x50, 0xbf, /* Byte value: 0xa4 */ + 0x21, 0xaf, 0x56, 0xc4, 0x62, 0x4c, 0x9a, 0x61, 0xfd, 0x20, 0x88, 0xa1, 0xdd, 0x5f, 0x21, 0xec, /* Byte value: 0xa5 */ + 0x55, 0x62, 0x29, 0x1d, 0xef, 0x4a, 0xb4, 0xdf, 0x0b, 0x45, 0x57, 0x82, 0x4e, 0xb9, 0x55, 0xd8, /* Byte value: 0xa6 */ + 0xe7, 0xcb, 0x61, 0x19, 0xed, 0x27, 0x40, 0xe4, 0xb6, 0xe0, 0x3e, 0xe1, 0x56, 0x5e, 0xe7, 0xc1, /* Byte value: 0xa7 */ + 0xcd, 0xbb, 0x12, 0x01, 0xe1, 0x8a, 0x3d, 0x7e, 0xbe, 0xb8, 0x8b, 0x68, 0x06, 0x49, 0xcd, 0x97, /* Byte value: 0xa8 */ + 0x0d, 0x55, 0xc2, 0x32, 0x19, 0x4e, 0x80, 0x0b, 0xaf, 0x03, 0x7c, 0x01, 0xac, 0xbc, 0x0d, 0x41, /* Byte value: 0xa9 */ + 0x7c, 0x57, 0xc8, 0x72, 0x39, 0x51, 0x1b, 0x3d, 0x2e, 0xc1, 0x23, 0xfe, 0xef, 0xd4, 0x7c, 0x12, /* Byte value: 0xaa */ + 0x37, 0xd2, 0x1c, 0xbf, 0xbe, 0x4d, 0x97, 0x54, 0xd4, 0xd0, 0xf2, 0xf1, 0x04, 0xcf, 0x37, 0x5b, /* Byte value: 0xab */ + 0x1a, 0xaa, 0x47, 0x64, 0x32, 0x9c, 0xc3, 0x16, 0x9d, 0x06, 0xf8, 0x02, 0x9b, 0xbb, 0x1a, 0x82, /* Byte value: 0xac */ + 0xa2, 0x5e, 0xe5, 0x91, 0xa9, 0xc3, 0x9e, 0xfe, 0xce, 0x2e, 0x52, 0x1a, 0xe0, 0x83, 0xa2, 0x55, /* Byte value: 0xad */ + 0xaf, 0x0b, 0x27, 0xa3, 0xb0, 0x8d, 0x1e, 0xf5, 0x61, 0x2d, 0x2e, 0x1b, 0x4c, 0x3f, 0xaf, 0x14, /* Byte value: 0xae */ + 0xf6, 0xbe, 0x03, 0xa1, 0xb1, 0x5a, 0x64, 0x09, 0xde, 0x9e, 0xfb, 0xcb, 0x40, 0xad, 0xf6, 0xf9, /* Byte value: 0xaf */ + 0xe8, 0x59, 0xfe, 0x71, 0xd9, 0x0c, 0x5c, 0xbf, 0x2f, 0xca, 0x7d, 0x46, 0xe5, 0x0f, 0xe8, 0x68, /* Byte value: 0xb0 */ + 0x7e, 0x90, 0x95, 0x28, 0x14, 0x34, 0x87, 0x6d, 0x18, 0xe8, 0x1c, 0x58, 0xf0, 0x39, 0x7e, 0xfa, /* Byte value: 0xb1 */ + 0x5e, 0xbd, 0x0c, 0xc1, 0x81, 0xab, 0x53, 0x24, 0xfe, 0x3d, 0x6a, 0xaa, 0xc3, 0xf1, 0x5e, 0x62, /* Byte value: 0xb2 */ + 0xfa, 0x69, 0x0e, 0xbe, 0x5f, 0xc7, 0xaa, 0x2a, 0x6a, 0x68, 0x79, 0x99, 0x02, 0x86, 0xfa, 0xcc, /* Byte value: 0xb3 */ + 0xb5, 0xa1, 0x60, 0xc7, 0x82, 0x11, 0xdd, 0xe3, 0xfc, 0x2b, 0xd6, 0x19, 0xd7, 0x84, 0xb5, 0x96, /* Byte value: 0xb4 */ + 0x92, 0x84, 0xd1, 0xed, 0x97, 0xf2, 0x20, 0x72, 0x5b, 0x70, 0x1f, 0x91, 0x2b, 0x2f, 0x92, 0x81, /* Byte value: 0xb5 */ + 0x56, 0x27, 0xbb, 0x6a, 0x35, 0xfc, 0x66, 0xa7, 0x26, 0x99, 0x96, 0x77, 0xbf, 0xc3, 0x56, 0x44, /* Byte value: 0xb6 */ + 0xf1, 0xb6, 0x2b, 0x62, 0x31, 0x26, 0x4d, 0xd1, 0x9f, 0x10, 0x44, 0xb1, 0x8f, 0xce, 0xf1, 0x76, /* Byte value: 0xb7 */ + 0x04, 0x4d, 0xba, 0xb4, 0x5a, 0xca, 0xfb, 0xa0, 0x6c, 0x52, 0x7e, 0x8f, 0x3e, 0x19, 0x04, 0x13, /* Byte value: 0xb8 */ + 0x6e, 0x67, 0x38, 0xbd, 0xbf, 0x9a, 0xed, 0xa8, 0x6b, 0x63, 0x27, 0x21, 0x08, 0x5d, 0x6e, 0xb6, /* Byte value: 0xb9 */ + 0xa5, 0x56, 0xcd, 0x52, 0x29, 0xbf, 0xb7, 0x26, 0x8f, 0xa0, 0xed, 0x60, 0x2f, 0xe0, 0xa5, 0xda, /* Byte value: 0xba */ + 0xe6, 0x49, 0xae, 0x34, 0x1a, 0xf4, 0x0e, 0xcc, 0xad, 0x15, 0xc0, 0xb2, 0xb8, 0xc9, 0xe6, 0xb5, /* Byte value: 0xbb */ + 0xc8, 0x74, 0x67, 0x98, 0x4c, 0x93, 0x88, 0xf6, 0xc9, 0x1f, 0x0b, 0xb4, 0xd6, 0xc7, 0xc8, 0xf0, /* Byte value: 0xbc */ + 0x7b, 0x5f, 0xe0, 0xb1, 0xb9, 0x2d, 0x32, 0xe5, 0x6f, 0x4f, 0x9c, 0x84, 0x20, 0xb7, 0x7b, 0x9d, /* Byte value: 0xbd */ + 0x73, 0xc5, 0x57, 0x1a, 0x0d, 0x7a, 0x07, 0x66, 0xb7, 0xeb, 0x60, 0x59, 0x5c, 0x85, 0x73, 0xbb, /* Byte value: 0xbe */ + 0x51, 0x2f, 0x93, 0xa9, 0xb5, 0x80, 0x4f, 0x7f, 0x67, 0x17, 0x29, 0x0d, 0x70, 0xa0, 0x51, 0xcb, /* Byte value: 0xbf */ + 0x95, 0x8c, 0xf9, 0x2e, 0x17, 0x8e, 0x09, 0xaa, 0x1a, 0xfe, 0xa0, 0xeb, 0xe4, 0x4c, 0x95, 0x0e, /* Byte value: 0xc0 */ + 0x66, 0xfd, 0x8f, 0x16, 0x0b, 0xcd, 0xd8, 0x2b, 0xb3, 0xc7, 0xdb, 0xfc, 0x74, 0x6f, 0x66, 0x90, /* Byte value: 0xc1 */ + 0xd6, 0x93, 0x9a, 0x48, 0x24, 0xc5, 0xb0, 0x40, 0x38, 0x4b, 0x8d, 0x39, 0x73, 0x65, 0xd6, 0x61, /* Byte value: 0xc2 */ + 0x5b, 0x72, 0x79, 0x58, 0x2c, 0xb2, 0xe6, 0xac, 0x89, 0x9a, 0xea, 0x76, 0x13, 0x7f, 0x5b, 0x05, /* Byte value: 0xc3 */ + 0xe3, 0x86, 0xdb, 0xad, 0xb7, 0xed, 0xbb, 0x44, 0xda, 0xb2, 0x40, 0x6e, 0x68, 0x47, 0xe3, 0xd2, /* Byte value: 0xc4 */ + 0x63, 0x32, 0xfa, 0x8f, 0xa6, 0xd4, 0x6d, 0xa3, 0xc4, 0x60, 0x5b, 0x20, 0xa4, 0xe1, 0x63, 0xf7, /* Byte value: 0xc5 */ + 0x9b, 0x9c, 0xa9, 0x6b, 0xd4, 0x76, 0x5b, 0xd9, 0x98, 0x21, 0x1d, 0x1f, 0xb9, 0x8a, 0x9b, 0xd3, /* Byte value: 0xc6 */ + 0xbd, 0x3b, 0xd7, 0x6c, 0x36, 0x46, 0xe8, 0x60, 0x24, 0x8f, 0x2a, 0xc4, 0xab, 0xb6, 0xbd, 0xb0, /* Byte value: 0xc7 */ + 0x77, 0x88, 0xed, 0xae, 0x57, 0xb0, 0xfc, 0xc6, 0xdb, 0xb9, 0x1e, 0xd6, 0x62, 0x9c, 0x77, 0xa8, /* Byte value: 0xc8 */ + 0x8d, 0xe1, 0xe3, 0x10, 0x08, 0x77, 0x56, 0xec, 0xb1, 0xd1, 0x67, 0x4f, 0x60, 0x1a, 0x8d, 0x64, /* Byte value: 0xc9 */ + 0xbc, 0xb9, 0x18, 0x41, 0xc1, 0x95, 0xa6, 0x48, 0x3f, 0x7a, 0xd4, 0x97, 0x45, 0x21, 0xbc, 0xc4, /* Byte value: 0xca */ + 0xf3, 0x71, 0x76, 0x38, 0x1c, 0x43, 0xd1, 0x81, 0xa9, 0x39, 0x7b, 0x17, 0x90, 0x23, 0xf3, 0x9e, /* Byte value: 0xcb */ + 0xf4, 0x79, 0x5e, 0xfb, 0x9c, 0x3f, 0xf8, 0x59, 0xe8, 0xb7, 0xc4, 0x6d, 0x5f, 0x40, 0xf4, 0x11, /* Byte value: 0xcc */ + 0x26, 0xa7, 0x7e, 0x07, 0xe2, 0x30, 0xb3, 0xb9, 0xbc, 0xae, 0x37, 0xdb, 0x12, 0x3c, 0x26, 0x63, /* Byte value: 0xcd */ + 0xd4, 0x54, 0xc7, 0x12, 0x09, 0xa0, 0x2c, 0x10, 0x0e, 0x62, 0xb2, 0x9f, 0x6c, 0x88, 0xd4, 0x89, /* Byte value: 0xce */ + 0x80, 0xb4, 0x21, 0x22, 0x11, 0x39, 0xd6, 0xe7, 0x1e, 0xd2, 0x1b, 0x4e, 0xcc, 0xa6, 0x80, 0x25, /* Byte value: 0xcf */ + 0x93, 0x06, 0x1e, 0xc0, 0x60, 0x21, 0x6e, 0x5a, 0x40, 0x85, 0xe1, 0xc2, 0xc5, 0xb8, 0x93, 0xf5, /* Byte value: 0xd0 */ + 0xb6, 0xe4, 0xf2, 0xb0, 0x58, 0xa7, 0x0f, 0x9b, 0xd1, 0xf7, 0x17, 0xec, 0x26, 0xfe, 0xb6, 0x0a, /* Byte value: 0xd1 */ + 0x15, 0x38, 0xd8, 0x0c, 0x06, 0xb7, 0xdf, 0x4d, 0x04, 0x2c, 0xbb, 0xa5, 0x28, 0xea, 0x15, 0x2b, /* Byte value: 0xd2 */ + 0xcc, 0x39, 0xdd, 0x2c, 0x16, 0x59, 0x73, 0x56, 0xa5, 0x4d, 0x75, 0x3b, 0xe8, 0xde, 0xcc, 0xe3, /* Byte value: 0xd3 */ + 0xac, 0x4e, 0xb5, 0xd4, 0x6a, 0x3b, 0xcc, 0x8d, 0x4c, 0xf1, 0xef, 0xee, 0xbd, 0x45, 0xac, 0x88, /* Byte value: 0xd4 */ + 0xc1, 0x6c, 0x1f, 0x1e, 0x0f, 0x17, 0xf3, 0x5d, 0x0a, 0x4e, 0x09, 0x3a, 0x44, 0x62, 0xc1, 0xa2, /* Byte value: 0xd5 */ + 0xee, 0xd3, 0x19, 0x9f, 0xae, 0xa3, 0x3b, 0x4f, 0x75, 0xb1, 0x3c, 0x6f, 0xc4, 0xfb, 0xee, 0x93, /* Byte value: 0xd6 */ + 0xae, 0x89, 0xe8, 0x8e, 0x47, 0x5e, 0x50, 0xdd, 0x7a, 0xd8, 0xd0, 0x48, 0xa2, 0xa8, 0xae, 0x60, /* Byte value: 0xd7 */ + 0x9c, 0x94, 0x81, 0xa8, 0x54, 0x0a, 0x72, 0x01, 0xd9, 0xaf, 0xa2, 0x65, 0x76, 0xe9, 0x9c, 0x5c, /* Byte value: 0xd8 */ + 0xd9, 0x01, 0x05, 0x20, 0x10, 0xee, 0xac, 0x1b, 0xa1, 0x61, 0xce, 0x9e, 0xc0, 0x34, 0xd9, 0xc8, /* Byte value: 0xd9 */ + 0xa3, 0xdc, 0x2a, 0xbc, 0x5e, 0x10, 0xd0, 0xd6, 0xd5, 0xdb, 0xac, 0x49, 0x0e, 0x14, 0xa3, 0x21, /* Byte value: 0xda */ + 0x25, 0xe2, 0xec, 0x70, 0x38, 0x86, 0x61, 0xc1, 0x91, 0x72, 0xf6, 0x2e, 0xe3, 0x46, 0x25, 0xff, /* Byte value: 0xdb */ + 0xeb, 0x1c, 0x6c, 0x06, 0x03, 0xba, 0x8e, 0xc7, 0x02, 0x16, 0xbc, 0xb3, 0x14, 0x75, 0xeb, 0xf4, /* Byte value: 0xdc */ + 0x40, 0x5a, 0xf1, 0x11, 0xe9, 0xfd, 0x6b, 0x92, 0x0f, 0x69, 0xec, 0x27, 0x66, 0x53, 0x40, 0xf3, /* Byte value: 0xdd */ + 0x48, 0xc0, 0x46, 0xba, 0x5d, 0xaa, 0x5e, 0x11, 0xd7, 0xcd, 0x10, 0xfa, 0x1a, 0x61, 0x48, 0xd5, /* Byte value: 0xde */ + 0x6d, 0x22, 0xaa, 0xca, 0x65, 0x2c, 0x3f, 0xd0, 0x46, 0xbf, 0xe6, 0xd4, 0xf9, 0x27, 0x6d, 0x2a, /* Byte value: 0xdf */ + 0x28, 0xb7, 0x2e, 0x42, 0x21, 0xc8, 0xe1, 0xca, 0x3e, 0x71, 0x8a, 0x2f, 0x4f, 0xfa, 0x28, 0xbe, /* Byte value: 0xe0 */ + 0xff, 0xa6, 0x7b, 0x27, 0xf2, 0xde, 0x1f, 0xa2, 0x1d, 0xcf, 0xf9, 0x45, 0xd2, 0x08, 0xff, 0xab, /* Byte value: 0xe1 */ + 0x0a, 0x5d, 0xea, 0xf1, 0x99, 0x32, 0xa9, 0xd3, 0xee, 0x8d, 0xc3, 0x7b, 0x63, 0xdf, 0x0a, 0xce, /* Byte value: 0xe2 */ + 0x4f, 0xc8, 0x6e, 0x79, 0xdd, 0xd6, 0x77, 0xc9, 0x96, 0x43, 0xaf, 0x80, 0xd5, 0x02, 0x4f, 0x5a, /* Byte value: 0xe3 */ + 0x19, 0xef, 0xd5, 0x13, 0xe8, 0x2a, 0x11, 0x6e, 0xb0, 0xda, 0x39, 0xf7, 0x6a, 0xc1, 0x19, 0x1e, /* Byte value: 0xe4 */ + 0x53, 0xe8, 0xce, 0xf3, 0x98, 0xe5, 0xd3, 0x2f, 0x51, 0x3e, 0x16, 0xab, 0x6f, 0x4d, 0x53, 0x23, /* Byte value: 0xe5 */ + 0xf5, 0xfb, 0x91, 0xd6, 0x6b, 0xec, 0xb6, 0x71, 0xf3, 0x42, 0x3a, 0x3e, 0xb1, 0xd7, 0xf5, 0x65, /* Byte value: 0xe6 */ + 0xb9, 0x76, 0x6d, 0xd8, 0x6c, 0x8c, 0x13, 0xc0, 0x48, 0xdd, 0x54, 0x4b, 0x95, 0xaf, 0xb9, 0xa3, /* Byte value: 0xe7 */ + 0xcb, 0x31, 0xf5, 0xef, 0x96, 0x25, 0x5a, 0x8e, 0xe4, 0xc3, 0xca, 0x41, 0x27, 0xbd, 0xcb, 0x6c, /* Byte value: 0xe8 */ + 0x45, 0x95, 0x84, 0x88, 0x44, 0xe4, 0xde, 0x1a, 0x78, 0xce, 0x6c, 0xfb, 0xb6, 0xdd, 0x45, 0x94, /* Byte value: 0xe9 */ + 0x57, 0xa5, 0x74, 0x47, 0xc2, 0x2f, 0x28, 0x8f, 0x3d, 0x6c, 0x68, 0x24, 0x51, 0x54, 0x57, 0x30, /* Byte value: 0xea */ + 0xd7, 0x11, 0x55, 0x65, 0xd3, 0x16, 0xfe, 0x68, 0x23, 0xbe, 0x73, 0x6a, 0x9d, 0xf2, 0xd7, 0x15, /* Byte value: 0xeb */ + 0x68, 0xed, 0xdf, 0x53, 0xc8, 0x35, 0x8a, 0x58, 0x31, 0x18, 0x66, 0x08, 0x29, 0xa9, 0x68, 0x4d, /* Byte value: 0xec */ + 0x4c, 0x8d, 0xfc, 0x0e, 0x07, 0x60, 0xa5, 0xb1, 0xbb, 0x9f, 0x6e, 0x75, 0x24, 0x78, 0x4c, 0xc6, /* Byte value: 0xed */ + 0x20, 0x2d, 0x99, 0xe9, 0x95, 0x9f, 0xd4, 0x49, 0xe6, 0xd5, 0x76, 0xf2, 0x33, 0xc8, 0x20, 0x98, /* Byte value: 0xee */ + 0x4a, 0x07, 0x1b, 0xe0, 0x70, 0xcf, 0xc2, 0x41, 0xe1, 0xe4, 0x2f, 0x5c, 0x05, 0x8c, 0x4a, 0x3d, /* Byte value: 0xef */ + 0x05, 0xcf, 0x75, 0x99, 0xad, 0x19, 0xb5, 0x88, 0x77, 0xa7, 0x80, 0xdc, 0xd0, 0x8e, 0x05, 0x67, /* Byte value: 0xf0 */ + 0xc6, 0x64, 0x37, 0xdd, 0x8f, 0x6b, 0xda, 0x85, 0x4b, 0xc0, 0xb6, 0x40, 0x8b, 0x01, 0xc6, 0x2d, /* Byte value: 0xf1 */ + 0x14, 0xba, 0x17, 0x21, 0xf1, 0x64, 0x91, 0x65, 0x1f, 0xd9, 0x45, 0xf6, 0xc6, 0x7d, 0x14, 0x5f, /* Byte value: 0xf2 */ + 0x9e, 0x53, 0xdc, 0xf2, 0x79, 0x6f, 0xee, 0x51, 0xef, 0x86, 0x9d, 0xc3, 0x69, 0x04, 0x9e, 0xb4, /* Byte value: 0xf3 */ + 0xd5, 0xd6, 0x08, 0x3f, 0xfe, 0x73, 0x62, 0x38, 0x15, 0x97, 0x4c, 0xcc, 0x82, 0x1f, 0xd5, 0xfd, /* Byte value: 0xf4 */ + 0x4d, 0x0f, 0x33, 0x23, 0xf0, 0xb3, 0xeb, 0x99, 0xa0, 0x6a, 0x90, 0x26, 0xca, 0xef, 0x4d, 0xb2, /* Byte value: 0xf5 */ + 0xce, 0xfe, 0x80, 0x76, 0x3b, 0x3c, 0xef, 0x06, 0x93, 0x64, 0x4a, 0x9d, 0xf7, 0x33, 0xce, 0x0b, /* Byte value: 0xf6 */ + 0x5a, 0xf0, 0xb6, 0x75, 0xdb, 0x61, 0xa8, 0x84, 0x92, 0x6f, 0x14, 0x25, 0xfd, 0xe8, 0x5a, 0x71, /* Byte value: 0xf7 */ + 0x3f, 0x48, 0xab, 0x14, 0x0a, 0x1a, 0xa2, 0xd7, 0x0c, 0x74, 0x0e, 0x2c, 0x78, 0xfd, 0x3f, 0x7d, /* Byte value: 0xf8 */ + 0x8a, 0xe9, 0xcb, 0xd3, 0x88, 0x0b, 0x7f, 0x34, 0xf0, 0x5f, 0xd8, 0x35, 0xaf, 0x79, 0x8a, 0xeb, /* Byte value: 0xf9 */ + 0x83, 0xf1, 0xb3, 0x55, 0xcb, 0x8f, 0x04, 0x9f, 0x33, 0x0e, 0xda, 0xbb, 0x3d, 0xdc, 0x83, 0xb9, /* Byte value: 0xfa */ + 0xc3, 0xab, 0x42, 0x44, 0x22, 0x72, 0x6f, 0x0d, 0x3c, 0x67, 0x36, 0x9c, 0x5b, 0x8f, 0xc3, 0x4a, /* Byte value: 0xfb */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xfc */ + 0x94, 0x0e, 0x36, 0x03, 0xe0, 0x5d, 0x47, 0x82, 0x01, 0x0b, 0x5e, 0xb8, 0x0a, 0xdb, 0x94, 0x7a, /* Byte value: 0xfd */ + 0x1f, 0x65, 0x32, 0xfd, 0x9f, 0x85, 0x76, 0x9e, 0xea, 0xa1, 0x78, 0xde, 0x4b, 0x35, 0x1f, 0xe5, /* Byte value: 0xfe */ + 0x98, 0xd9, 0x3b, 0x1c, 0x0e, 0xc0, 0x89, 0xa1, 0xb5, 0xfd, 0xdc, 0xea, 0x48, 0xf0, 0x98, 0x4f, /* Byte value: 0xff */ + + /* Matrix row: 15 */ + 0xcc, 0x0e, 0x86, 0xc2, 0x4f, 0xba, 0x59, 0x3b, 0xe3, 0xef, 0x79, 0x82, 0x53, 0x11, 0xf0, 0xcd, /* Byte value: 0x00 */ + 0x03, 0x96, 0x94, 0x95, 0x6c, 0x2a, 0xb6, 0xf5, 0x9c, 0xba, 0xe7, 0x37, 0x83, 0x91, 0xbd, 0x5b, /* Byte value: 0x01 */ + 0x17, 0xb1, 0xab, 0xa6, 0xf9, 0x11, 0xd2, 0x03, 0xc3, 0x8e, 0xc9, 0x90, 0x58, 0x92, 0xad, 0x69, /* Byte value: 0x02 */ + 0xa5, 0x82, 0x44, 0x27, 0x96, 0x65, 0xbf, 0x60, 0xda, 0x6e, 0x59, 0x28, 0x51, 0x68, 0x84, 0xbe, /* Byte value: 0x03 */ + 0xd4, 0x34, 0xac, 0xe0, 0xa9, 0x29, 0xa0, 0x9f, 0x89, 0x76, 0x4d, 0xf9, 0xc1, 0x13, 0x51, 0x50, /* Byte value: 0x04 */ + 0xfc, 0x7a, 0xd2, 0x86, 0x40, 0x5f, 0x68, 0xb0, 0x37, 0x1e, 0x11, 0x74, 0xb4, 0x15, 0x71, 0x34, /* Byte value: 0x05 */ + 0xd6, 0xd0, 0xf5, 0x06, 0xe1, 0xa4, 0xc5, 0x39, 0x61, 0x1b, 0xf7, 0x62, 0x82, 0x4c, 0x87, 0x83, /* Byte value: 0x06 */ + 0x76, 0x2b, 0xce, 0x5d, 0xc3, 0xbf, 0x63, 0x85, 0xdc, 0x78, 0x44, 0x13, 0x95, 0x54, 0x07, 0xd0, /* Byte value: 0x07 */ + 0xd2, 0xdb, 0x47, 0x09, 0x71, 0x7d, 0x0f, 0xb6, 0x72, 0xc1, 0x40, 0x97, 0x04, 0xf2, 0xe8, 0xe6, /* Byte value: 0x08 */ + 0x20, 0x58, 0xd9, 0x78, 0x0a, 0x07, 0x9f, 0xf2, 0x98, 0x1f, 0xf1, 0xa4, 0xba, 0xb9, 0xfe, 0xae, /* Byte value: 0x09 */ + 0x65, 0x91, 0xd7, 0xf4, 0xaa, 0x77, 0x7b, 0x09, 0x0c, 0x2c, 0x3a, 0x76, 0x4b, 0x78, 0xc5, 0xdc, /* Byte value: 0x0a */ + 0x42, 0x54, 0x28, 0x16, 0x5c, 0x83, 0x98, 0x81, 0x1b, 0x53, 0x9b, 0x10, 0xf4, 0xee, 0xe9, 0x4c, /* Byte value: 0x0b */ + 0x46, 0x5f, 0x9a, 0x19, 0xcc, 0x5a, 0x52, 0x0e, 0x08, 0x89, 0x2c, 0xe5, 0x72, 0x50, 0x86, 0x29, /* Byte value: 0x0c */ + 0xbb, 0x57, 0x85, 0xec, 0xa8, 0xa2, 0xe9, 0xed, 0x4b, 0x40, 0x60, 0x3d, 0x06, 0x8b, 0x9c, 0x95, /* Byte value: 0x0d */ + 0x2c, 0x45, 0xcc, 0x69, 0x79, 0xaf, 0x02, 0xa0, 0xad, 0xb2, 0xeb, 0x78, 0xf3, 0xb8, 0x4f, 0x01, /* Byte value: 0x0e */ + 0x25, 0x21, 0xa6, 0x04, 0xbe, 0x79, 0x86, 0x2e, 0xff, 0x12, 0x1b, 0xfd, 0xfc, 0xc9, 0xfa, 0x43, /* Byte value: 0x0f */ + 0x2e, 0xa1, 0x95, 0x8f, 0x31, 0x22, 0x67, 0x06, 0x45, 0xdf, 0x51, 0xe3, 0xb0, 0xe7, 0x99, 0xd2, /* Byte value: 0x10 */ + 0x7f, 0x4f, 0xa4, 0x30, 0x04, 0x69, 0xe7, 0x0b, 0x8e, 0xd8, 0xb4, 0x96, 0x9a, 0x25, 0xb2, 0x92, /* Byte value: 0x11 */ + 0xad, 0x94, 0xe3, 0x39, 0x75, 0x14, 0xe8, 0xbd, 0xfc, 0x19, 0xf4, 0x01, 0x9e, 0xd7, 0x5a, 0x74, /* Byte value: 0x12 */ + 0x06, 0xef, 0xeb, 0xe9, 0xd8, 0x54, 0xaf, 0x29, 0xfb, 0xb7, 0x0d, 0x6e, 0xc5, 0xe1, 0xb9, 0xb6, /* Byte value: 0x13 */ + 0x6b, 0x68, 0x9b, 0x03, 0x91, 0x52, 0x83, 0xfd, 0xd1, 0xec, 0x9a, 0x31, 0x41, 0x26, 0xa2, 0xa0, /* Byte value: 0x14 */ + 0x3b, 0xf4, 0x67, 0xcf, 0x80, 0xbe, 0xd0, 0xa3, 0x6e, 0x3c, 0x22, 0xe8, 0xab, 0x2a, 0xe2, 0x68, /* Byte value: 0x15 */ + 0x6a, 0x1a, 0x56, 0x70, 0xb5, 0xf5, 0x50, 0xae, 0xa5, 0x3b, 0xc7, 0x9d, 0x81, 0xe8, 0xc9, 0x28, /* Byte value: 0x16 */ + 0x7e, 0x3d, 0x69, 0x43, 0x20, 0xce, 0x34, 0x58, 0xfa, 0x0f, 0xe9, 0x3a, 0x5a, 0xeb, 0xd9, 0x1a, /* Byte value: 0x17 */ + 0x3c, 0x69, 0x41, 0x55, 0x7c, 0x4d, 0xac, 0xd9, 0xe1, 0x5c, 0x72, 0x2a, 0xae, 0x05, 0x30, 0x56, /* Byte value: 0x18 */ + 0x84, 0xa8, 0x50, 0x2c, 0xb8, 0xc5, 0xf3, 0xc1, 0x36, 0xa6, 0xf5, 0x20, 0x2b, 0x1f, 0x11, 0x98, /* Byte value: 0x19 */ + 0x67, 0x75, 0x8e, 0x12, 0xe2, 0xfa, 0x1e, 0xaf, 0xe4, 0x41, 0x80, 0xed, 0x08, 0x27, 0x13, 0x0f, /* Byte value: 0x1a */ + 0xb6, 0x38, 0x5d, 0x8e, 0xff, 0xad, 0xa7, 0xec, 0x0a, 0x3a, 0x27, 0x4d, 0x8f, 0x44, 0x46, 0xb2, /* Byte value: 0x1b */ + 0x16, 0xc3, 0x66, 0xd5, 0xdd, 0xb6, 0x01, 0x50, 0xb7, 0x59, 0x94, 0x3c, 0x98, 0x5c, 0xc6, 0xe1, /* Byte value: 0x1c */ + 0x88, 0xb5, 0x45, 0x3d, 0xcb, 0x6d, 0x6e, 0x93, 0x03, 0x0b, 0xef, 0xfc, 0x62, 0x1e, 0xa0, 0x37, /* Byte value: 0x1d */ + 0x58, 0x8a, 0x5b, 0xd2, 0xf2, 0x9d, 0x04, 0x83, 0x99, 0xa7, 0x15, 0xf0, 0x25, 0xb3, 0x9e, 0x02, /* Byte value: 0x1e */ + 0xbc, 0xca, 0xa3, 0x76, 0x54, 0x51, 0x95, 0x97, 0xc4, 0x20, 0x30, 0xff, 0x03, 0xa4, 0x4e, 0xab, /* Byte value: 0x1f */ + 0xde, 0xc6, 0x52, 0x18, 0x02, 0xd5, 0x92, 0xe4, 0x47, 0x6c, 0x5a, 0x4b, 0x4d, 0xf3, 0x59, 0x49, /* Byte value: 0x20 */ + 0xc2, 0xf7, 0xca, 0x35, 0x74, 0x9f, 0xa1, 0xcf, 0x3e, 0x2f, 0xd9, 0xc5, 0x59, 0x4f, 0x97, 0xb1, /* Byte value: 0x21 */ + 0x4e, 0x49, 0x3d, 0x07, 0x2f, 0x2b, 0x05, 0xd3, 0x2e, 0xfe, 0x81, 0xcc, 0xbd, 0xef, 0x58, 0xe3, /* Byte value: 0x22 */ + 0x3f, 0xff, 0xd5, 0xc0, 0x10, 0x67, 0x1a, 0x2c, 0x7d, 0xe6, 0x95, 0x1d, 0x2d, 0x94, 0x8d, 0x0d, /* Byte value: 0x23 */ + 0x0e, 0xf9, 0x4c, 0xf7, 0x3b, 0x25, 0xf8, 0xf4, 0xdd, 0xc0, 0xa0, 0x47, 0x0a, 0x5e, 0x67, 0x7c, /* Byte value: 0x24 */ + 0x1f, 0xa7, 0x0c, 0xb8, 0x1a, 0x60, 0x85, 0xde, 0xe5, 0xf9, 0x64, 0xb9, 0x97, 0x2d, 0x73, 0xa3, /* Byte value: 0x25 */ + 0x99, 0xeb, 0x05, 0x72, 0xea, 0x28, 0x13, 0xb9, 0x3b, 0x32, 0x2b, 0x02, 0xff, 0x6d, 0xb4, 0xe8, /* Byte value: 0x26 */ + 0x0c, 0x1d, 0x15, 0x11, 0x73, 0xa8, 0x9d, 0x52, 0x35, 0xad, 0x1a, 0xdc, 0x49, 0x01, 0xb1, 0xaf, /* Byte value: 0x27 */ + 0x70, 0xc4, 0x25, 0xb4, 0x1b, 0xeb, 0xcc, 0xac, 0x27, 0xcf, 0x49, 0x7d, 0x50, 0xb5, 0xbe, 0x66, /* Byte value: 0x28 */ + 0x44, 0xbb, 0xc3, 0xff, 0x84, 0xd7, 0x37, 0xa8, 0xe0, 0xe4, 0x96, 0x7e, 0x31, 0x0f, 0x50, 0xfa, /* Byte value: 0x29 */ + 0x2d, 0x37, 0x01, 0x1a, 0x5d, 0x08, 0xd1, 0xf3, 0xd9, 0x65, 0xb6, 0xd4, 0x33, 0x76, 0x24, 0x89, /* Byte value: 0x2a */ + 0x5f, 0x17, 0x7d, 0x48, 0x0e, 0x6e, 0x78, 0xf9, 0x16, 0xc7, 0x45, 0x32, 0x20, 0x9c, 0x4c, 0x3c, /* Byte value: 0x2b */ + 0x78, 0xd2, 0x82, 0xaa, 0xf8, 0x9a, 0x9b, 0x71, 0x01, 0xb8, 0xe4, 0x54, 0x9f, 0x0a, 0x60, 0xac, /* Byte value: 0x2c */ + 0xcb, 0x93, 0xa0, 0x58, 0xb3, 0x49, 0x25, 0x41, 0x6c, 0x8f, 0x29, 0x40, 0x56, 0x3e, 0x22, 0xf3, /* Byte value: 0x2d */ + 0xd5, 0x46, 0x61, 0x93, 0x8d, 0x8e, 0x73, 0xcc, 0xfd, 0xa1, 0x10, 0x55, 0x01, 0xdd, 0x3a, 0xd8, /* Byte value: 0x2e */ + 0x73, 0x52, 0xb1, 0x21, 0x77, 0xc1, 0x7a, 0x59, 0xbb, 0x75, 0xae, 0x4a, 0xd3, 0x24, 0x03, 0x3d, /* Byte value: 0x2f */ + 0x5c, 0x81, 0xe9, 0xdd, 0x62, 0x44, 0xce, 0x0c, 0x8a, 0x7d, 0xa2, 0x05, 0xa3, 0x0d, 0xf1, 0x67, /* Byte value: 0x30 */ + 0xfe, 0x9e, 0x8b, 0x60, 0x08, 0xd2, 0x0d, 0x16, 0xdf, 0x73, 0xab, 0xef, 0xf7, 0x4a, 0xa7, 0xe7, /* Byte value: 0x31 */ + 0x6f, 0x63, 0x29, 0x0c, 0x01, 0x8b, 0x49, 0x72, 0xc2, 0x36, 0x2d, 0xc4, 0xc7, 0x98, 0xcd, 0xc5, /* Byte value: 0x32 */ + 0x1a, 0xde, 0x73, 0xc4, 0xae, 0x1e, 0x9c, 0x02, 0x82, 0xf4, 0x8e, 0xe0, 0xd1, 0x5d, 0x77, 0x4e, /* Byte value: 0x33 */ + 0x07, 0x9d, 0x26, 0x9a, 0xfc, 0xf3, 0x7c, 0x7a, 0x8f, 0x60, 0x50, 0xc2, 0x05, 0x2f, 0xd2, 0x3e, /* Byte value: 0x34 */ + 0xee, 0xb2, 0x06, 0x5c, 0x0d, 0x30, 0xa3, 0x6f, 0x93, 0x9d, 0x32, 0xbd, 0xaa, 0xf7, 0xd8, 0xb0, /* Byte value: 0x35 */ + 0xea, 0xb9, 0xb4, 0x53, 0x9d, 0xe9, 0x69, 0xe0, 0x80, 0x47, 0x85, 0x48, 0x2c, 0x49, 0xb7, 0xd5, /* Byte value: 0x36 */ + 0xc8, 0x05, 0x34, 0xcd, 0xdf, 0x63, 0x93, 0xb4, 0xf0, 0x35, 0xce, 0x77, 0xd5, 0xaf, 0x9f, 0xa8, /* Byte value: 0x37 */ + 0x8c, 0xbe, 0xf7, 0x32, 0x5b, 0xb4, 0xa4, 0x1c, 0x10, 0xd1, 0x58, 0x09, 0xe4, 0xa0, 0xcf, 0x52, /* Byte value: 0x38 */ + 0xb5, 0xae, 0xc9, 0x1b, 0x93, 0x87, 0x11, 0x19, 0x96, 0x80, 0xc0, 0x7a, 0x0c, 0xd5, 0xfb, 0xe9, /* Byte value: 0x39 */ + 0x38, 0x62, 0xf3, 0x5a, 0xec, 0x94, 0x66, 0x56, 0xf2, 0x86, 0xc5, 0xdf, 0x28, 0xbb, 0x5f, 0x33, /* Byte value: 0x3a */ + 0x59, 0xf8, 0x96, 0xa1, 0xd6, 0x3a, 0xd7, 0xd0, 0xed, 0x70, 0x48, 0x5c, 0xe5, 0x7d, 0xf5, 0x8a, /* Byte value: 0x3b */ + 0x69, 0x8c, 0xc2, 0xe5, 0xd9, 0xdf, 0xe6, 0x5b, 0x39, 0x81, 0x20, 0xaa, 0x02, 0x79, 0x74, 0x73, /* Byte value: 0x3c */ + 0x10, 0x2c, 0x8d, 0x3c, 0x05, 0xe2, 0xae, 0x79, 0x4c, 0xee, 0x99, 0x52, 0x5d, 0xbd, 0x7f, 0x57, /* Byte value: 0x3d */ + 0xf0, 0x67, 0xc7, 0x97, 0x33, 0xf7, 0xf5, 0xe2, 0x02, 0xb3, 0x0b, 0xa8, 0xfd, 0x14, 0xc0, 0x9b, /* Byte value: 0x3e */ + 0xa3, 0x6d, 0xaf, 0xce, 0x4e, 0x31, 0x10, 0x49, 0x21, 0xd9, 0x54, 0x46, 0x94, 0x89, 0x3d, 0x08, /* Byte value: 0x3f */ + 0x09, 0x64, 0x6a, 0x6d, 0xc7, 0xd6, 0x84, 0x8e, 0x52, 0xa0, 0xf0, 0x85, 0x0f, 0x71, 0xb5, 0x42, /* Byte value: 0x40 */ + 0x0d, 0x6f, 0xd8, 0x62, 0x57, 0x0f, 0x4e, 0x01, 0x41, 0x7a, 0x47, 0x70, 0x89, 0xcf, 0xda, 0x27, /* Byte value: 0x41 */ + 0xfd, 0x08, 0x1f, 0xf5, 0x64, 0xf8, 0xbb, 0xe3, 0x43, 0xc9, 0x4c, 0xd8, 0x74, 0xdb, 0x1a, 0xbc, /* Byte value: 0x42 */ + 0x1b, 0xac, 0xbe, 0xb7, 0x8a, 0xb9, 0x4f, 0x51, 0xf6, 0x23, 0xd3, 0x4c, 0x11, 0x93, 0x1c, 0xc6, /* Byte value: 0x43 */ + 0x27, 0xc5, 0xff, 0xe2, 0xf6, 0xf4, 0xe3, 0x88, 0x17, 0x7f, 0xa1, 0x66, 0xbf, 0x96, 0x2c, 0x90, /* Byte value: 0x44 */ + 0x8d, 0xcc, 0x3a, 0x41, 0x7f, 0x13, 0x77, 0x4f, 0x64, 0x06, 0x05, 0xa5, 0x24, 0x6e, 0xa4, 0xda, /* Byte value: 0x45 */ + 0xf2, 0x83, 0x9e, 0x71, 0x7b, 0x7a, 0x90, 0x44, 0xea, 0xde, 0xb1, 0x33, 0xbe, 0x4b, 0x16, 0x48, /* Byte value: 0x46 */ + 0x92, 0x6b, 0x36, 0xf9, 0x65, 0x73, 0xf2, 0x91, 0x81, 0xff, 0x61, 0x1c, 0xb3, 0x43, 0xd7, 0x79, /* Byte value: 0x47 */ + 0x6d, 0x87, 0x70, 0xea, 0x49, 0x06, 0x2c, 0xd4, 0x2a, 0x5b, 0x97, 0x5f, 0x84, 0xc7, 0x1b, 0x16, /* Byte value: 0x48 */ + 0x5d, 0xf3, 0x24, 0xae, 0x46, 0xe3, 0x1d, 0x5f, 0xfe, 0xaa, 0xff, 0xa9, 0x63, 0xc3, 0x9a, 0xef, /* Byte value: 0x49 */ + 0xf1, 0x15, 0x0a, 0xe4, 0x17, 0x50, 0x26, 0xb1, 0x76, 0x64, 0x56, 0x04, 0x3d, 0xda, 0xab, 0x13, /* Byte value: 0x4a */ + 0x21, 0x2a, 0x14, 0x0b, 0x2e, 0xa0, 0x4c, 0xa1, 0xec, 0xc8, 0xac, 0x08, 0x7a, 0x77, 0x95, 0x26, /* Byte value: 0x4b */ + 0x37, 0xe9, 0x72, 0xde, 0xf3, 0x16, 0x4d, 0xf1, 0x5b, 0x91, 0x38, 0x34, 0xe2, 0x2b, 0x53, 0xc7, /* Byte value: 0x4c */ + 0x40, 0xb0, 0x71, 0xf0, 0x14, 0x0e, 0xfd, 0x27, 0xf3, 0x3e, 0x21, 0x8b, 0xb7, 0xb1, 0x3f, 0x9f, /* Byte value: 0x4d */ + 0x8e, 0x5a, 0xae, 0xd4, 0x13, 0x39, 0xc1, 0xba, 0xf8, 0xbc, 0xe2, 0x92, 0xa7, 0xff, 0x19, 0x81, /* Byte value: 0x4e */ + 0xe8, 0x5d, 0xed, 0xb5, 0xd5, 0x64, 0x0c, 0x46, 0x68, 0x2a, 0x3f, 0xd3, 0x6f, 0x16, 0x61, 0x06, /* Byte value: 0x4f */ + 0xe4, 0x40, 0xf8, 0xa4, 0xa6, 0xcc, 0x91, 0x14, 0x5d, 0x87, 0x25, 0x0f, 0x26, 0x17, 0xd0, 0xa9, /* Byte value: 0x50 */ + 0xec, 0x56, 0x5f, 0xba, 0x45, 0xbd, 0xc6, 0xc9, 0x7b, 0xf0, 0x88, 0x26, 0xe9, 0xa8, 0x0e, 0x63, /* Byte value: 0x51 */ + 0x9e, 0x76, 0x23, 0xe8, 0x16, 0xdb, 0x6f, 0xc3, 0xb4, 0x52, 0x7b, 0xc0, 0xfa, 0x42, 0x66, 0xd6, /* Byte value: 0x52 */ + 0x9c, 0x92, 0x7a, 0x0e, 0x5e, 0x56, 0x0a, 0x65, 0x5c, 0x3f, 0xc1, 0x5b, 0xb9, 0x1d, 0xb0, 0x05, /* Byte value: 0x53 */ + 0xd1, 0x4d, 0xd3, 0x9c, 0x1d, 0x57, 0xb9, 0x43, 0xee, 0x7b, 0xa7, 0xa0, 0x87, 0x63, 0x55, 0xbd, /* Byte value: 0x54 */ + 0xe7, 0xd6, 0x6c, 0x31, 0xca, 0xe6, 0x27, 0xe1, 0xc1, 0x3d, 0xc2, 0x38, 0xa5, 0x86, 0x6d, 0xf2, /* Byte value: 0x55 */ + 0xb8, 0xc1, 0x11, 0x79, 0xc4, 0x88, 0x5f, 0x18, 0xd7, 0xfa, 0x87, 0x0a, 0x85, 0x1a, 0x21, 0xce, /* Byte value: 0x56 */ + 0x2f, 0xd3, 0x58, 0xfc, 0x15, 0x85, 0xb4, 0x55, 0x31, 0x08, 0x0c, 0x4f, 0x70, 0x29, 0xf2, 0x5a, /* Byte value: 0x57 */ + 0xe2, 0xaf, 0x13, 0x4d, 0x7e, 0x98, 0x3e, 0x3d, 0xa6, 0x30, 0x28, 0x61, 0xe3, 0xf6, 0x69, 0x1f, /* Byte value: 0x58 */ + 0x8a, 0x51, 0x1c, 0xdb, 0x83, 0xe0, 0x0b, 0x35, 0xeb, 0x66, 0x55, 0x67, 0x21, 0x41, 0x76, 0xe4, /* Byte value: 0x59 */ + 0xd7, 0xa2, 0x38, 0x75, 0xc5, 0x03, 0x16, 0x6a, 0x15, 0xcc, 0xaa, 0xce, 0x42, 0x82, 0xec, 0x0b, /* Byte value: 0x5a */ + 0xb4, 0xdc, 0x04, 0x68, 0xb7, 0x20, 0xc2, 0x4a, 0xe2, 0x57, 0x9d, 0xd6, 0xcc, 0x1b, 0x90, 0x61, /* Byte value: 0x5b */ + 0x29, 0x3c, 0xb3, 0x15, 0xcd, 0xd1, 0x1b, 0x7c, 0xca, 0xbf, 0x01, 0x21, 0xb5, 0xc8, 0x4b, 0xec, /* Byte value: 0x5c */ + 0x13, 0xba, 0x19, 0xa9, 0x69, 0xc8, 0x18, 0x8c, 0xd0, 0x54, 0x7e, 0x65, 0xde, 0x2c, 0xc2, 0x0c, /* Byte value: 0x5d */ + 0x98, 0x99, 0xc8, 0x01, 0xce, 0x8f, 0xc0, 0xea, 0x4f, 0xe5, 0x76, 0xae, 0x3f, 0xa3, 0xdf, 0x60, /* Byte value: 0x5e */ + 0x80, 0xa3, 0xe2, 0x23, 0x28, 0x1c, 0x39, 0x4e, 0x25, 0x7c, 0x42, 0xd5, 0xad, 0xa1, 0x7e, 0xfd, /* Byte value: 0x5f */ + 0xa6, 0x14, 0xd0, 0xb2, 0xfa, 0x4f, 0x09, 0x95, 0x46, 0xd4, 0xbe, 0x1f, 0xd2, 0xf9, 0x39, 0xe5, /* Byte value: 0x60 */ + 0x62, 0x0c, 0xf1, 0x6e, 0x56, 0x84, 0x07, 0x73, 0x83, 0x4c, 0x6a, 0xb4, 0x4e, 0x57, 0x17, 0xe2, /* Byte value: 0x61 */ + 0xb3, 0x41, 0x22, 0xf2, 0x4b, 0xd3, 0xbe, 0x30, 0x6d, 0x37, 0xcd, 0x14, 0xc9, 0x34, 0x42, 0x5f, /* Byte value: 0x62 */ + 0x5e, 0x65, 0xb0, 0x3b, 0x2a, 0xc9, 0xab, 0xaa, 0x62, 0x10, 0x18, 0x9e, 0xe0, 0x52, 0x27, 0xb4, /* Byte value: 0x63 */ + 0x72, 0x20, 0x7c, 0x52, 0x53, 0x66, 0xa9, 0x0a, 0xcf, 0xa2, 0xf3, 0xe6, 0x13, 0xea, 0x68, 0xb5, /* Byte value: 0x64 */ + 0x43, 0x26, 0xe5, 0x65, 0x78, 0x24, 0x4b, 0xd2, 0x6f, 0x84, 0xc6, 0xbc, 0x34, 0x20, 0x82, 0xc4, /* Byte value: 0x65 */ + 0x34, 0x7f, 0xe6, 0x4b, 0x9f, 0x3c, 0xfb, 0x04, 0xc7, 0x2b, 0xdf, 0x03, 0x61, 0xba, 0xee, 0x9c, /* Byte value: 0x66 */ + 0x35, 0x0d, 0x2b, 0x38, 0xbb, 0x9b, 0x28, 0x57, 0xb3, 0xfc, 0x82, 0xaf, 0xa1, 0x74, 0x85, 0x14, /* Byte value: 0x67 */ + 0x52, 0x78, 0xa5, 0x2a, 0x59, 0x61, 0x36, 0xf8, 0x57, 0xbd, 0x02, 0x42, 0xa9, 0x53, 0x96, 0x1b, /* Byte value: 0x68 */ + 0x26, 0xb7, 0x32, 0x91, 0xd2, 0x53, 0x30, 0xdb, 0x63, 0xa8, 0xfc, 0xca, 0x7f, 0x58, 0x47, 0x18, /* Byte value: 0x69 */ + 0x7d, 0xab, 0xfd, 0xd6, 0x4c, 0xe4, 0x82, 0xad, 0x66, 0xb5, 0x0e, 0x0d, 0xd9, 0x7a, 0x64, 0x41, /* Byte value: 0x6a */ + 0xb7, 0x4a, 0x90, 0xfd, 0xdb, 0x0a, 0x74, 0xbf, 0x7e, 0xed, 0x7a, 0xe1, 0x4f, 0x8a, 0x2d, 0x3a, /* Byte value: 0x6b */ + 0x0a, 0xf2, 0xfe, 0xf8, 0xab, 0xfc, 0x32, 0x7b, 0xce, 0x1a, 0x17, 0xb2, 0x8c, 0xe0, 0x08, 0x19, /* Byte value: 0x6c */ + 0x45, 0xc9, 0x0e, 0x8c, 0xa0, 0x70, 0xe4, 0xfb, 0x94, 0x33, 0xcb, 0xd2, 0xf1, 0xc1, 0x3b, 0x72, /* Byte value: 0x6d */ + 0x81, 0xd1, 0x2f, 0x50, 0x0c, 0xbb, 0xea, 0x1d, 0x51, 0xab, 0x1f, 0x79, 0x6d, 0x6f, 0x15, 0x75, /* Byte value: 0x6e */ + 0x6e, 0x11, 0xe4, 0x7f, 0x25, 0x2c, 0x9a, 0x21, 0xb6, 0xe1, 0x70, 0x68, 0x07, 0x56, 0xa6, 0x4d, /* Byte value: 0x6f */ + 0x53, 0x0a, 0x68, 0x59, 0x7d, 0xc6, 0xe5, 0xab, 0x23, 0x6a, 0x5f, 0xee, 0x69, 0x9d, 0xfd, 0x93, /* Byte value: 0x70 */ + 0x4a, 0x42, 0x8f, 0x08, 0xbf, 0xf2, 0xcf, 0x5c, 0x3d, 0x24, 0x36, 0x39, 0x3b, 0x51, 0x37, 0x86, /* Byte value: 0x71 */ + 0x79, 0xa0, 0x4f, 0xd9, 0xdc, 0x3d, 0x48, 0x22, 0x75, 0x6f, 0xb9, 0xf8, 0x5f, 0xc4, 0x0b, 0x24, /* Byte value: 0x72 */ + 0x32, 0x90, 0x0d, 0xa2, 0x47, 0x68, 0x54, 0x2d, 0x3c, 0x9c, 0xd2, 0x6d, 0xa4, 0x5b, 0x57, 0x2a, /* Byte value: 0x73 */ + 0x61, 0x9a, 0x65, 0xfb, 0x3a, 0xae, 0xb1, 0x86, 0x1f, 0xf6, 0x8d, 0x83, 0xcd, 0xc6, 0xaa, 0xb9, /* Byte value: 0x74 */ + 0xfb, 0xe7, 0xf4, 0x1c, 0xbc, 0xac, 0x14, 0xca, 0xb8, 0x7e, 0x41, 0xb6, 0xb1, 0x3a, 0xa3, 0x0a, /* Byte value: 0x75 */ + 0x39, 0x10, 0x3e, 0x29, 0xc8, 0x33, 0xb5, 0x05, 0x86, 0x51, 0x98, 0x73, 0xe8, 0x75, 0x34, 0xbb, /* Byte value: 0x76 */ + 0xc0, 0x13, 0x93, 0xd3, 0x3c, 0x12, 0xc4, 0x69, 0xd6, 0x42, 0x63, 0x5e, 0x1a, 0x10, 0x41, 0x62, /* Byte value: 0x77 */ + 0xfa, 0x95, 0x39, 0x6f, 0x98, 0x0b, 0xc7, 0x99, 0xcc, 0xa9, 0x1c, 0x1a, 0x71, 0xf4, 0xc8, 0x82, /* Byte value: 0x78 */ + 0x5b, 0x1c, 0xcf, 0x47, 0x9e, 0xb7, 0xb2, 0x76, 0x05, 0x1d, 0xf2, 0xc7, 0xa6, 0x22, 0x23, 0x59, /* Byte value: 0x79 */ + 0xa4, 0xf0, 0x89, 0x54, 0xb2, 0xc2, 0x6c, 0x33, 0xae, 0xb9, 0x04, 0x84, 0x91, 0xa6, 0xef, 0x36, /* Byte value: 0x7a */ + 0xba, 0x25, 0x48, 0x9f, 0x8c, 0x05, 0x3a, 0xbe, 0x3f, 0x97, 0x3d, 0x91, 0xc6, 0x45, 0xf7, 0x1d, /* Byte value: 0x7b */ + 0xda, 0xcd, 0xe0, 0x17, 0x92, 0x0c, 0x58, 0x6b, 0x54, 0xb6, 0xed, 0xbe, 0xcb, 0x4d, 0x36, 0x2c, /* Byte value: 0x7c */ + 0x4c, 0xad, 0x64, 0xe1, 0x67, 0xa6, 0x60, 0x75, 0xc6, 0x93, 0x3b, 0x57, 0xfe, 0xb0, 0x8e, 0x30, /* Byte value: 0x7d */ + 0x05, 0x79, 0x7f, 0x7c, 0xb4, 0x7e, 0x19, 0xdc, 0x67, 0x0d, 0xea, 0x59, 0x46, 0x70, 0x04, 0xed, /* Byte value: 0x7e */ + 0xc3, 0x85, 0x07, 0x46, 0x50, 0x38, 0x72, 0x9c, 0x4a, 0xf8, 0x84, 0x69, 0x99, 0x81, 0xfc, 0x39, /* Byte value: 0x7f */ + 0x3a, 0x86, 0xaa, 0xbc, 0xa4, 0x19, 0x03, 0xf0, 0x1a, 0xeb, 0x7f, 0x44, 0x6b, 0xe4, 0x89, 0xe0, /* Byte value: 0x80 */ + 0x82, 0x47, 0xbb, 0xc5, 0x60, 0x91, 0x5c, 0xe8, 0xcd, 0x11, 0xf8, 0x4e, 0xee, 0xfe, 0xa8, 0x2e, /* Byte value: 0x81 */ + 0x18, 0x3a, 0x2a, 0x22, 0xe6, 0x93, 0xf9, 0xa4, 0x6a, 0x99, 0x34, 0x7b, 0x92, 0x02, 0xa1, 0x9d, /* Byte value: 0x82 */ + 0xc9, 0x77, 0xf9, 0xbe, 0xfb, 0xc4, 0x40, 0xe7, 0x84, 0xe2, 0x93, 0xdb, 0x15, 0x61, 0xf4, 0x20, /* Byte value: 0x83 */ + 0x68, 0xfe, 0x0f, 0x96, 0xfd, 0x78, 0x35, 0x08, 0x4d, 0x56, 0x7d, 0x06, 0xc2, 0xb7, 0x1f, 0xfb, /* Byte value: 0x84 */ + 0xf6, 0x88, 0x2c, 0x7e, 0xeb, 0xa3, 0x5a, 0xcb, 0xf9, 0x04, 0x06, 0xc6, 0x38, 0xf5, 0x79, 0x2d, /* Byte value: 0x85 */ + 0x63, 0x7e, 0x3c, 0x1d, 0x72, 0x23, 0xd4, 0x20, 0xf7, 0x9b, 0x37, 0x18, 0x8e, 0x99, 0x7c, 0x6a, /* Byte value: 0x86 */ + 0x87, 0x3e, 0xc4, 0xb9, 0xd4, 0xef, 0x45, 0x34, 0xaa, 0x1c, 0x12, 0x17, 0xa8, 0x8e, 0xac, 0xc3, /* Byte value: 0x87 */ + 0x49, 0xd4, 0x1b, 0x9d, 0xd3, 0xd8, 0x79, 0xa9, 0xa1, 0x9e, 0xd1, 0x0e, 0xb8, 0xc0, 0x8a, 0xdd, /* Byte value: 0x88 */ + 0x36, 0x9b, 0xbf, 0xad, 0xd7, 0xb1, 0x9e, 0xa2, 0x2f, 0x46, 0x65, 0x98, 0x22, 0xe5, 0x38, 0x4f, /* Byte value: 0x89 */ + 0x91, 0xfd, 0xa2, 0x6c, 0x09, 0x59, 0x44, 0x64, 0x1d, 0x45, 0x86, 0x2b, 0x30, 0xd2, 0x6a, 0x22, /* Byte value: 0x8a */ + 0x97, 0x12, 0x49, 0x85, 0xd1, 0x0d, 0xeb, 0x4d, 0xe6, 0xf2, 0x8b, 0x45, 0xf5, 0x33, 0xd3, 0x94, /* Byte value: 0x8b */ + 0x33, 0xe2, 0xc0, 0xd1, 0x63, 0xcf, 0x87, 0x7e, 0x48, 0x4b, 0x8f, 0xc1, 0x64, 0x95, 0x3c, 0xa2, /* Byte value: 0x8c */ + 0x08, 0x16, 0xa7, 0x1e, 0xe3, 0x71, 0x57, 0xdd, 0x26, 0x77, 0xad, 0x29, 0xcf, 0xbf, 0xde, 0xca, /* Byte value: 0x8d */ + 0x7c, 0xd9, 0x30, 0xa5, 0x68, 0x43, 0x51, 0xfe, 0x12, 0x62, 0x53, 0xa1, 0x19, 0xb4, 0x0f, 0xc9, /* Byte value: 0x8e */ + 0x02, 0xe4, 0x59, 0xe6, 0x48, 0x8d, 0x65, 0xa6, 0xe8, 0x6d, 0xba, 0x9b, 0x43, 0x5f, 0xd6, 0xd3, /* Byte value: 0x8f */ + 0x77, 0x59, 0x03, 0x2e, 0xe7, 0x18, 0xb0, 0xd6, 0xa8, 0xaf, 0x19, 0xbf, 0x55, 0x9a, 0x6c, 0x58, /* Byte value: 0x90 */ + 0x85, 0xda, 0x9d, 0x5f, 0x9c, 0x62, 0x20, 0x92, 0x42, 0x71, 0xa8, 0x8c, 0xeb, 0xd1, 0x7a, 0x10, /* Byte value: 0x91 */ + 0x66, 0x07, 0x43, 0x61, 0xc6, 0x5d, 0xcd, 0xfc, 0x90, 0x96, 0xdd, 0x41, 0xc8, 0xe9, 0x78, 0x87, /* Byte value: 0x92 */ + 0x41, 0xc2, 0xbc, 0x83, 0x30, 0xa9, 0x2e, 0x74, 0x87, 0xe9, 0x7c, 0x27, 0x77, 0x7f, 0x54, 0x17, /* Byte value: 0x93 */ + 0x30, 0x74, 0x54, 0x44, 0x0f, 0xe5, 0x31, 0x8b, 0xd4, 0xf1, 0x68, 0xf6, 0xe7, 0x04, 0x81, 0xf9, /* Byte value: 0x94 */ + 0xcd, 0x7c, 0x4b, 0xb1, 0x6b, 0x1d, 0x8a, 0x68, 0x97, 0x38, 0x24, 0x2e, 0x93, 0xdf, 0x9b, 0x45, /* Byte value: 0x95 */ + 0xd0, 0x3f, 0x1e, 0xef, 0x39, 0xf0, 0x6a, 0x10, 0x9a, 0xac, 0xfa, 0x0c, 0x47, 0xad, 0x3e, 0x35, /* Byte value: 0x96 */ + 0xd9, 0x5b, 0x74, 0x82, 0xfe, 0x26, 0xee, 0x9e, 0xc8, 0x0c, 0x0a, 0x89, 0x48, 0xdc, 0x8b, 0x77, /* Byte value: 0x97 */ + 0xa9, 0x9f, 0x51, 0x36, 0xe5, 0xcd, 0x22, 0x32, 0xef, 0xc3, 0x43, 0xf4, 0x18, 0x69, 0x35, 0x11, /* Byte value: 0x98 */ + 0xaa, 0x09, 0xc5, 0xa3, 0x89, 0xe7, 0x94, 0xc7, 0x73, 0x79, 0xa4, 0xc3, 0x9b, 0xf8, 0x88, 0x4a, /* Byte value: 0x99 */ + 0xc5, 0x6a, 0xec, 0xaf, 0x88, 0x6c, 0xdd, 0xb5, 0xb1, 0x4f, 0x89, 0x07, 0x5c, 0x60, 0x45, 0x8f, /* Byte value: 0x9a */ + 0x60, 0xe8, 0xa8, 0x88, 0x1e, 0x09, 0x62, 0xd5, 0x6b, 0x21, 0xd0, 0x2f, 0x0d, 0x08, 0xc1, 0x31, /* Byte value: 0x9b */ + 0xf8, 0x71, 0x60, 0x89, 0xd0, 0x86, 0xa2, 0x3f, 0x24, 0xc4, 0xa6, 0x81, 0x32, 0xab, 0x1e, 0x51, /* Byte value: 0x9c */ + 0x04, 0x0b, 0xb2, 0x0f, 0x90, 0xd9, 0xca, 0x8f, 0x13, 0xda, 0xb7, 0xf5, 0x86, 0xbe, 0x6f, 0x65, /* Byte value: 0x9d */ + 0x90, 0x8f, 0x6f, 0x1f, 0x2d, 0xfe, 0x97, 0x37, 0x69, 0x92, 0xdb, 0x87, 0xf0, 0x1c, 0x01, 0xaa, /* Byte value: 0x9e */ + 0xe6, 0xa4, 0xa1, 0x42, 0xee, 0x41, 0xf4, 0xb2, 0xb5, 0xea, 0x9f, 0x94, 0x65, 0x48, 0x06, 0x7a, /* Byte value: 0x9f */ + 0x96, 0x60, 0x84, 0xf6, 0xf5, 0xaa, 0x38, 0x1e, 0x92, 0x25, 0xd6, 0xe9, 0x35, 0xfd, 0xb8, 0x1c, /* Byte value: 0xa0 */ + 0xaf, 0x70, 0xba, 0xdf, 0x3d, 0x99, 0x8d, 0x1b, 0x14, 0x74, 0x4e, 0x9a, 0xdd, 0x88, 0x8c, 0xa7, /* Byte value: 0xa1 */ + 0xc6, 0xfc, 0x78, 0x3a, 0xe4, 0x46, 0x6b, 0x40, 0x2d, 0xf5, 0x6e, 0x30, 0xdf, 0xf1, 0xf8, 0xd4, /* Byte value: 0xa2 */ + 0xa7, 0x66, 0x1d, 0xc1, 0xde, 0xe8, 0xda, 0xc6, 0x32, 0x03, 0xe3, 0xb3, 0x12, 0x37, 0x52, 0x6d, /* Byte value: 0xa3 */ + 0xa1, 0x89, 0xf6, 0x28, 0x06, 0xbc, 0x75, 0xef, 0xc9, 0xb4, 0xee, 0xdd, 0xd7, 0xd6, 0xeb, 0xdb, /* Byte value: 0xa4 */ + 0xd8, 0x29, 0xb9, 0xf1, 0xda, 0x81, 0x3d, 0xcd, 0xbc, 0xdb, 0x57, 0x25, 0x88, 0x12, 0xe0, 0xff, /* Byte value: 0xa5 */ + 0x1e, 0xd5, 0xc1, 0xcb, 0x3e, 0xc7, 0x56, 0x8d, 0x91, 0x2e, 0x39, 0x15, 0x57, 0xe3, 0x18, 0x2b, /* Byte value: 0xa6 */ + 0x4d, 0xdf, 0xa9, 0x92, 0x43, 0x01, 0xb3, 0x26, 0xb2, 0x44, 0x66, 0xfb, 0x3e, 0x7e, 0xe5, 0xb8, /* Byte value: 0xa7 */ + 0x64, 0xe3, 0x1a, 0x87, 0x8e, 0xd0, 0xa8, 0x5a, 0x78, 0xfb, 0x67, 0xda, 0x8b, 0xb6, 0xae, 0x54, /* Byte value: 0xa8 */ + 0x9a, 0x7d, 0x91, 0xe7, 0x86, 0x02, 0xa5, 0x4c, 0xa7, 0x88, 0xcc, 0x35, 0x7c, 0xfc, 0x09, 0xb3, /* Byte value: 0xa9 */ + 0xe3, 0xdd, 0xde, 0x3e, 0x5a, 0x3f, 0xed, 0x6e, 0xd2, 0xe7, 0x75, 0xcd, 0x23, 0x38, 0x02, 0x97, /* Byte value: 0xaa */ + 0xf9, 0x03, 0xad, 0xfa, 0xf4, 0x21, 0x71, 0x6c, 0x50, 0x13, 0xfb, 0x2d, 0xf2, 0x65, 0x75, 0xd9, /* Byte value: 0xab */ + 0xf7, 0xfa, 0xe1, 0x0d, 0xcf, 0x04, 0x89, 0x98, 0x8d, 0xd3, 0x5b, 0x6a, 0xf8, 0x3b, 0x12, 0xa5, /* Byte value: 0xac */ + 0x19, 0x48, 0xe7, 0x51, 0xc2, 0x34, 0x2a, 0xf7, 0x1e, 0x4e, 0x69, 0xd7, 0x52, 0xcc, 0xca, 0x15, /* Byte value: 0xad */ + 0x83, 0x35, 0x76, 0xb6, 0x44, 0x36, 0x8f, 0xbb, 0xb9, 0xc6, 0xa5, 0xe2, 0x2e, 0x30, 0xc3, 0xa6, /* Byte value: 0xae */ + 0x4b, 0x30, 0x42, 0x7b, 0x9b, 0x55, 0x1c, 0x0f, 0x49, 0xf3, 0x6b, 0x95, 0xfb, 0x9f, 0x5c, 0x0e, /* Byte value: 0xaf */ + 0x4f, 0x3b, 0xf0, 0x74, 0x0b, 0x8c, 0xd6, 0x80, 0x5a, 0x29, 0xdc, 0x60, 0x7d, 0x21, 0x33, 0x6b, /* Byte value: 0xb0 */ + 0x7b, 0x44, 0x16, 0x3f, 0x94, 0xb0, 0x2d, 0x84, 0x9d, 0x02, 0x03, 0x63, 0x1c, 0x9b, 0xdd, 0xf7, /* Byte value: 0xb1 */ + 0xef, 0xc0, 0xcb, 0x2f, 0x29, 0x97, 0x70, 0x3c, 0xe7, 0x4a, 0x6f, 0x11, 0x6a, 0x39, 0xb3, 0x38, /* Byte value: 0xb2 */ + 0x9d, 0xe0, 0xb7, 0x7d, 0x7a, 0xf1, 0xd9, 0x36, 0x28, 0xe8, 0x9c, 0xf7, 0x79, 0xd3, 0xdb, 0x8d, /* Byte value: 0xb3 */ + 0x74, 0xcf, 0x97, 0xbb, 0x8b, 0x32, 0x06, 0x23, 0x34, 0x15, 0xfe, 0x88, 0xd6, 0x0b, 0xd1, 0x03, /* Byte value: 0xb4 */ + 0xc7, 0x8e, 0xb5, 0x49, 0xc0, 0xe1, 0xb8, 0x13, 0x59, 0x22, 0x33, 0x9c, 0x1f, 0x3f, 0x93, 0x5c, /* Byte value: 0xb5 */ + 0xca, 0xe1, 0x6d, 0x2b, 0x97, 0xee, 0xf6, 0x12, 0x18, 0x58, 0x74, 0xec, 0x96, 0xf0, 0x49, 0x7b, /* Byte value: 0xb6 */ + 0x6c, 0xf5, 0xbd, 0x99, 0x6d, 0xa1, 0xff, 0x87, 0x5e, 0x8c, 0xca, 0xf3, 0x44, 0x09, 0x70, 0x9e, /* Byte value: 0xb7 */ + 0xf3, 0xf1, 0x53, 0x02, 0x5f, 0xdd, 0x43, 0x17, 0x9e, 0x09, 0xec, 0x9f, 0x7e, 0x85, 0x7d, 0xc0, /* Byte value: 0xb8 */ + 0x31, 0x06, 0x99, 0x37, 0x2b, 0x42, 0xe2, 0xd8, 0xa0, 0x26, 0x35, 0x5a, 0x27, 0xca, 0xea, 0x71, /* Byte value: 0xb9 */ + 0x3e, 0x8d, 0x18, 0xb3, 0x34, 0xc0, 0xc9, 0x7f, 0x09, 0x31, 0xc8, 0xb1, 0xed, 0x5a, 0xe6, 0x85, /* Byte value: 0xba */ + 0x01, 0x72, 0xcd, 0x73, 0x24, 0xa7, 0xd3, 0x53, 0x74, 0xd7, 0x5d, 0xac, 0xc0, 0xce, 0x6b, 0x88, /* Byte value: 0xbb */ + 0xdb, 0xbf, 0x2d, 0x64, 0xb6, 0xab, 0x8b, 0x38, 0x20, 0x61, 0xb0, 0x12, 0x0b, 0x83, 0x5d, 0xa4, /* Byte value: 0xbc */ + 0xc4, 0x18, 0x21, 0xdc, 0xac, 0xcb, 0x0e, 0xe6, 0xc5, 0x98, 0xd4, 0xab, 0x9c, 0xae, 0x2e, 0x07, /* Byte value: 0xbd */ + 0xe1, 0x39, 0x87, 0xd8, 0x12, 0xb2, 0x88, 0xc8, 0x3a, 0x8a, 0xcf, 0x56, 0x60, 0x67, 0xd4, 0x44, /* Byte value: 0xbe */ + 0xed, 0x24, 0x92, 0xc9, 0x61, 0x1a, 0x15, 0x9a, 0x0f, 0x27, 0xd5, 0x8a, 0x29, 0x66, 0x65, 0xeb, /* Byte value: 0xbf */ + 0xe0, 0x4b, 0x4a, 0xab, 0x36, 0x15, 0x5b, 0x9b, 0x4e, 0x5d, 0x92, 0xfa, 0xa0, 0xa9, 0xbf, 0xcc, /* Byte value: 0xc0 */ + 0x14, 0x27, 0x3f, 0x33, 0x95, 0x3b, 0x64, 0xf6, 0x5f, 0x34, 0x2e, 0xa7, 0xdb, 0x03, 0x10, 0x32, /* Byte value: 0xc1 */ + 0xdf, 0xb4, 0x9f, 0x6b, 0x26, 0x72, 0x41, 0xb7, 0x33, 0xbb, 0x07, 0xe7, 0x8d, 0x3d, 0x32, 0xc1, /* Byte value: 0xc2 */ + 0x50, 0x9c, 0xfc, 0xcc, 0x11, 0xec, 0x53, 0x5e, 0xbf, 0xd0, 0xb8, 0xd9, 0xea, 0x0c, 0x40, 0xc8, /* Byte value: 0xc3 */ + 0xbe, 0x2e, 0xfa, 0x90, 0x1c, 0xdc, 0xf0, 0x31, 0x2c, 0x4d, 0x8a, 0x64, 0x40, 0xfb, 0x98, 0x78, /* Byte value: 0xc4 */ + 0xab, 0x7b, 0x08, 0xd0, 0xad, 0x40, 0x47, 0x94, 0x07, 0xae, 0xf9, 0x6f, 0x5b, 0x36, 0xe3, 0xc2, /* Byte value: 0xc5 */ + 0xae, 0x02, 0x77, 0xac, 0x19, 0x3e, 0x5e, 0x48, 0x60, 0xa3, 0x13, 0x36, 0x1d, 0x46, 0xe7, 0x2f, /* Byte value: 0xc6 */ + 0x51, 0xee, 0x31, 0xbf, 0x35, 0x4b, 0x80, 0x0d, 0xcb, 0x07, 0xe5, 0x75, 0x2a, 0xc2, 0x2b, 0x40, /* Byte value: 0xc7 */ + 0x12, 0xc8, 0xd4, 0xda, 0x4d, 0x6f, 0xcb, 0xdf, 0xa4, 0x83, 0x23, 0xc9, 0x1e, 0xe2, 0xa9, 0x84, /* Byte value: 0xc8 */ + 0x8f, 0x28, 0x63, 0xa7, 0x37, 0x9e, 0x12, 0xe9, 0x8c, 0x6b, 0xbf, 0x3e, 0x67, 0x31, 0x72, 0x09, /* Byte value: 0xc9 */ + 0x1d, 0x43, 0x55, 0x5e, 0x52, 0xed, 0xe0, 0x78, 0x0d, 0x94, 0xde, 0x22, 0xd4, 0x72, 0xa5, 0x70, /* Byte value: 0xca */ + 0xf4, 0x6c, 0x75, 0x98, 0xa3, 0x2e, 0x3f, 0x6d, 0x11, 0x69, 0xbc, 0x5d, 0x7b, 0xaa, 0xaf, 0xfe, /* Byte value: 0xcb */ + 0xd3, 0xa9, 0x8a, 0x7a, 0x55, 0xda, 0xdc, 0xe5, 0x06, 0x16, 0x1d, 0x3b, 0xc4, 0x3c, 0x83, 0x6e, /* Byte value: 0xcc */ + 0xff, 0xec, 0x46, 0x13, 0x2c, 0x75, 0xde, 0x45, 0xab, 0xa4, 0xf6, 0x43, 0x37, 0x84, 0xcc, 0x6f, /* Byte value: 0xcd */ + 0x47, 0x2d, 0x57, 0x6a, 0xe8, 0xfd, 0x81, 0x5d, 0x7c, 0x5e, 0x71, 0x49, 0xb2, 0x9e, 0xed, 0xa1, /* Byte value: 0xce */ + 0x15, 0x55, 0xf2, 0x40, 0xb1, 0x9c, 0xb7, 0xa5, 0x2b, 0xe3, 0x73, 0x0b, 0x1b, 0xcd, 0x7b, 0xba, /* Byte value: 0xcf */ + 0x8b, 0x23, 0xd1, 0xa8, 0xa7, 0x47, 0xd8, 0x66, 0x9f, 0xb1, 0x08, 0xcb, 0xe1, 0x8f, 0x1d, 0x6c, /* Byte value: 0xd0 */ + 0xa0, 0xfb, 0x3b, 0x5b, 0x22, 0x1b, 0xa6, 0xbc, 0xbd, 0x63, 0xb3, 0x71, 0x17, 0x18, 0x80, 0x53, /* Byte value: 0xd1 */ + 0xf5, 0x1e, 0xb8, 0xeb, 0x87, 0x89, 0xec, 0x3e, 0x65, 0xbe, 0xe1, 0xf1, 0xbb, 0x64, 0xc4, 0x76, /* Byte value: 0xd2 */ + 0x28, 0x4e, 0x7e, 0x66, 0xe9, 0x76, 0xc8, 0x2f, 0xbe, 0x68, 0x5c, 0x8d, 0x75, 0x06, 0x20, 0x64, /* Byte value: 0xd3 */ + 0x57, 0x01, 0xda, 0x56, 0xed, 0x1f, 0x2f, 0x24, 0x30, 0xb0, 0xe8, 0x1b, 0xef, 0x23, 0x92, 0xf6, /* Byte value: 0xd4 */ + 0xb2, 0x33, 0xef, 0x81, 0x6f, 0x74, 0x6d, 0x63, 0x19, 0xe0, 0x90, 0xb8, 0x09, 0xfa, 0x29, 0xd7, /* Byte value: 0xd5 */ + 0x24, 0x53, 0x6b, 0x77, 0x9a, 0xde, 0x55, 0x7d, 0x8b, 0xc5, 0x46, 0x51, 0x3c, 0x07, 0x91, 0xcb, /* Byte value: 0xd6 */ + 0xcf, 0x98, 0x12, 0x57, 0x23, 0x90, 0xef, 0xce, 0x7f, 0x55, 0x9e, 0xb5, 0xd0, 0x80, 0x4d, 0x96, /* Byte value: 0xd7 */ + 0x89, 0xc7, 0x88, 0x4e, 0xef, 0xca, 0xbd, 0xc0, 0x77, 0xdc, 0xb2, 0x50, 0xa2, 0xd0, 0xcb, 0xbf, /* Byte value: 0xd8 */ + 0xdd, 0x50, 0xc6, 0x8d, 0x6e, 0xff, 0x24, 0x11, 0xdb, 0xd6, 0xbd, 0x7c, 0xce, 0x62, 0xe4, 0x12, /* Byte value: 0xd9 */ + 0x55, 0xe5, 0x83, 0xb0, 0xa5, 0x92, 0x4a, 0x82, 0xd8, 0xdd, 0x52, 0x80, 0xac, 0x7c, 0x44, 0x25, /* Byte value: 0xda */ + 0x2b, 0xd8, 0xea, 0xf3, 0x85, 0x5c, 0x7e, 0xda, 0x22, 0xd2, 0xbb, 0xba, 0xf6, 0x97, 0x9d, 0x3f, /* Byte value: 0xdb */ + 0x9b, 0x0f, 0x5c, 0x94, 0xa2, 0xa5, 0x76, 0x1f, 0xd3, 0x5f, 0x91, 0x99, 0xbc, 0x32, 0x62, 0x3b, /* Byte value: 0xdc */ + 0xeb, 0xcb, 0x79, 0x20, 0xb9, 0x4e, 0xba, 0xb3, 0xf4, 0x90, 0xd8, 0xe4, 0xec, 0x87, 0xdc, 0x5d, /* Byte value: 0xdd */ + 0xce, 0xea, 0xdf, 0x24, 0x07, 0x37, 0x3c, 0x9d, 0x0b, 0x82, 0xc3, 0x19, 0x10, 0x4e, 0x26, 0x1e, /* Byte value: 0xde */ + 0xe5, 0x32, 0x35, 0xd7, 0x82, 0x6b, 0x42, 0x47, 0x29, 0x50, 0x78, 0xa3, 0xe6, 0xd9, 0xbb, 0x21, /* Byte value: 0xdf */ + 0xb1, 0xa5, 0x7b, 0x14, 0x03, 0x5e, 0xdb, 0x96, 0x85, 0x5a, 0x77, 0x8f, 0x8a, 0x6b, 0x94, 0x8c, /* Byte value: 0xe0 */ + 0x22, 0xbc, 0x80, 0x9e, 0x42, 0x8a, 0xfa, 0x54, 0x70, 0x72, 0x4b, 0x3f, 0xf9, 0xe6, 0x28, 0x7d, /* Byte value: 0xe1 */ + 0xbd, 0xb8, 0x6e, 0x05, 0x70, 0xf6, 0x46, 0xc4, 0xb0, 0xf7, 0x6d, 0x53, 0xc3, 0x6a, 0x25, 0x23, /* Byte value: 0xe2 */ + 0xe9, 0x2f, 0x20, 0xc6, 0xf1, 0xc3, 0xdf, 0x15, 0x1c, 0xfd, 0x62, 0x7f, 0xaf, 0xd8, 0x0a, 0x8e, /* Byte value: 0xe3 */ + 0x23, 0xce, 0x4d, 0xed, 0x66, 0x2d, 0x29, 0x07, 0x04, 0xa5, 0x16, 0x93, 0x39, 0x28, 0x43, 0xf5, /* Byte value: 0xe4 */ + 0x75, 0xbd, 0x5a, 0xc8, 0xaf, 0x95, 0xd5, 0x70, 0x40, 0xc2, 0xa3, 0x24, 0x16, 0xc5, 0xba, 0x8b, /* Byte value: 0xe5 */ + 0x9f, 0x04, 0xee, 0x9b, 0x32, 0x7c, 0xbc, 0x90, 0xc0, 0x85, 0x26, 0x6c, 0x3a, 0x8c, 0x0d, 0x5e, /* Byte value: 0xe6 */ + 0xa2, 0x1f, 0x62, 0xbd, 0x6a, 0x96, 0xc3, 0x1a, 0x55, 0x0e, 0x09, 0xea, 0x54, 0x47, 0x56, 0x80, /* Byte value: 0xe7 */ + 0x0f, 0x8b, 0x81, 0x84, 0x1f, 0x82, 0x2b, 0xa7, 0xa9, 0x17, 0xfd, 0xeb, 0xca, 0x90, 0x0c, 0xf4, /* Byte value: 0xe8 */ + 0x54, 0x97, 0x4e, 0xc3, 0x81, 0x35, 0x99, 0xd1, 0xac, 0x0a, 0x0f, 0x2c, 0x6c, 0xb2, 0x2f, 0xad, /* Byte value: 0xe9 */ + 0x86, 0x4c, 0x09, 0xca, 0xf0, 0x48, 0x96, 0x67, 0xde, 0xcb, 0x4f, 0xbb, 0x68, 0x40, 0xc7, 0x4b, /* Byte value: 0xea */ + 0x93, 0x19, 0xfb, 0x8a, 0x41, 0xd4, 0x21, 0xc2, 0xf5, 0x28, 0x3c, 0xb0, 0x73, 0x8d, 0xbc, 0xf1, /* Byte value: 0xeb */ + 0x5a, 0x6e, 0x02, 0x34, 0xba, 0x10, 0x61, 0x25, 0x71, 0xca, 0xaf, 0x6b, 0x66, 0xec, 0x48, 0xd1, /* Byte value: 0xec */ + 0x3d, 0x1b, 0x8c, 0x26, 0x58, 0xea, 0x7f, 0x8a, 0x95, 0x8b, 0x2f, 0x86, 0x6e, 0xcb, 0x5b, 0xde, /* Byte value: 0xed */ + 0x94, 0x84, 0xdd, 0x10, 0xbd, 0x27, 0x5d, 0xb8, 0x7a, 0x48, 0x6c, 0x72, 0x76, 0xa2, 0x6e, 0xcf, /* Byte value: 0xee */ + 0x56, 0x73, 0x17, 0x25, 0xc9, 0xb8, 0xfc, 0x77, 0x44, 0x67, 0xb5, 0xb7, 0x2f, 0xed, 0xf9, 0x7e, /* Byte value: 0xef */ + 0xbf, 0x5c, 0x37, 0xe3, 0x38, 0x7b, 0x23, 0x62, 0x58, 0x9a, 0xd7, 0xc8, 0x80, 0x35, 0xf3, 0xf0, /* Byte value: 0xf0 */ + 0x95, 0xf6, 0x10, 0x63, 0x99, 0x80, 0x8e, 0xeb, 0x0e, 0x9f, 0x31, 0xde, 0xb6, 0x6c, 0x05, 0x47, /* Byte value: 0xf1 */ + 0xb9, 0xb3, 0xdc, 0x0a, 0xe0, 0x2f, 0x8c, 0x4b, 0xa3, 0x2d, 0xda, 0xa6, 0x45, 0xd4, 0x4a, 0x46, /* Byte value: 0xf2 */ + 0x11, 0x5e, 0x40, 0x4f, 0x21, 0x45, 0x7d, 0x2a, 0x38, 0x39, 0xc4, 0xfe, 0x9d, 0x73, 0x14, 0xdf, /* Byte value: 0xf3 */ + 0x0b, 0x80, 0x33, 0x8b, 0x8f, 0x5b, 0xe1, 0x28, 0xba, 0xcd, 0x4a, 0x1e, 0x4c, 0x2e, 0x63, 0x91, /* Byte value: 0xf4 */ + 0x71, 0xb6, 0xe8, 0xc7, 0x3f, 0x4c, 0x1f, 0xff, 0x53, 0x18, 0x14, 0xd1, 0x90, 0x7b, 0xd5, 0xee, /* Byte value: 0xf5 */ + 0xb0, 0xd7, 0xb6, 0x67, 0x27, 0xf9, 0x08, 0xc5, 0xf1, 0x8d, 0x2a, 0x23, 0x4a, 0xa5, 0xff, 0x04, /* Byte value: 0xf6 */ + 0x1c, 0x31, 0x98, 0x2d, 0x76, 0x4a, 0x33, 0x2b, 0x79, 0x43, 0x83, 0x8e, 0x14, 0xbc, 0xce, 0xf8, /* Byte value: 0xf7 */ + 0xdc, 0x22, 0x0b, 0xfe, 0x4a, 0x58, 0xf7, 0x42, 0xaf, 0x01, 0xe0, 0xd0, 0x0e, 0xac, 0x8f, 0x9a, /* Byte value: 0xf8 */ + 0xa8, 0xed, 0x9c, 0x45, 0xc1, 0x6a, 0xf1, 0x61, 0x9b, 0x14, 0x1e, 0x58, 0xd8, 0xa7, 0x5e, 0x99, /* Byte value: 0xf9 */ + 0xc1, 0x61, 0x5e, 0xa0, 0x18, 0xb5, 0x17, 0x3a, 0xa2, 0x95, 0x3e, 0xf2, 0xda, 0xde, 0x2a, 0xea, /* Byte value: 0xfa */ + 0x2a, 0xaa, 0x27, 0x80, 0xa1, 0xfb, 0xad, 0x89, 0x56, 0x05, 0xe6, 0x16, 0x36, 0x59, 0xf6, 0xb7, /* Byte value: 0xfb */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xfc */ + 0xac, 0xe6, 0x2e, 0x4a, 0x51, 0xb3, 0x3b, 0xee, 0x88, 0xce, 0xa9, 0xad, 0x5e, 0x19, 0x31, 0xfc, /* Byte value: 0xfd */ + 0x48, 0xa6, 0xd6, 0xee, 0xf7, 0x7f, 0xaa, 0xfa, 0xd5, 0x49, 0x8c, 0xa2, 0x78, 0x0e, 0xe1, 0x55, /* Byte value: 0xfe */ + 0x7a, 0x36, 0xdb, 0x4c, 0xb0, 0x17, 0xfe, 0xd7, 0xe9, 0xd5, 0x5e, 0xcf, 0xdc, 0x55, 0xb6, 0x7f, /* Byte value: 0xff */ +}; + +CRYPTOPP_ALIGN_DATA(16) const uint_8t precomputedLSTable[16 * 256 * 16] CRYPTOPP_SECTION_ALIGN16 = { + /* Matrix row: 0 */ + 0xe9, 0xfb, 0xd5, 0x0c, 0x7a, 0xc0, 0x80, 0x96, 0x19, 0x11, 0x87, 0x93, 0x1b, 0xc9, 0xae, 0xb5, /* Byte value: 0x01 */ + 0x19, 0x08, 0xe0, 0x8c, 0xb2, 0x17, 0x1a, 0xce, 0x7b, 0x32, 0xfc, 0xab, 0xf8, 0xfe, 0xf2, 0x0a, /* Byte value: 0x02 */ + 0xbf, 0xcb, 0xd0, 0xa2, 0x50, 0xb2, 0xdc, 0x77, 0xc0, 0xbd, 0xca, 0xef, 0x4e, 0x88, 0xc7, 0x89, /* Byte value: 0x03 */ + 0x62, 0x41, 0x10, 0x1a, 0x5e, 0x63, 0x42, 0xd6, 0x69, 0xc4, 0x12, 0x3c, 0xd3, 0x93, 0x13, 0xc0, /* Byte value: 0x04 */ + 0x4f, 0x38, 0xe5, 0x22, 0x98, 0x65, 0x46, 0x2f, 0xa2, 0x9e, 0xb1, 0xd7, 0xad, 0xbf, 0x9b, 0x36, /* Byte value: 0x05 */ + 0x38, 0xb3, 0x39, 0x6a, 0x11, 0x6f, 0x4a, 0xe7, 0x3c, 0x70, 0x97, 0x29, 0x2f, 0xcb, 0xc0, 0xef, /* Byte value: 0x06 */ + 0xfb, 0x1f, 0xb7, 0xc2, 0x06, 0x7d, 0x56, 0x4d, 0x08, 0x35, 0x78, 0xfd, 0x96, 0x0f, 0xfe, 0x68, /* Byte value: 0x07 */ + 0x4a, 0x88, 0xf8, 0x9b, 0xc3, 0xa4, 0x79, 0x73, 0xc7, 0x94, 0xe7, 0x89, 0xa3, 0xc5, 0x09, 0xaa, /* Byte value: 0x08 */ + 0xc1, 0x32, 0x3d, 0x8d, 0xe7, 0x07, 0xbb, 0x33, 0xb7, 0x41, 0x72, 0x26, 0x6b, 0x9f, 0xb4, 0xdf, /* Byte value: 0x09 */ + 0x6a, 0x7c, 0x99, 0x8e, 0x18, 0x37, 0x9b, 0xf7, 0x20, 0xd4, 0x23, 0x72, 0x1d, 0x3c, 0x7e, 0x63, /* Byte value: 0x0a */ + 0x0e, 0x5c, 0x9f, 0xfb, 0x95, 0x6b, 0xf3, 0x49, 0x0f, 0x1c, 0x55, 0x9b, 0x7b, 0x42, 0x30, 0x4b, /* Byte value: 0x0b */ + 0x97, 0x02, 0x38, 0x23, 0xcd, 0x75, 0xe7, 0xd2, 0x6e, 0xed, 0x3f, 0x5a, 0x3e, 0xde, 0xdd, 0xe3, /* Byte value: 0x0c */ + 0x0b, 0xec, 0x82, 0x42, 0xce, 0xaa, 0xcc, 0x15, 0x6a, 0x16, 0x03, 0xc5, 0x75, 0x38, 0xa2, 0xd7, /* Byte value: 0x0d */ + 0xa5, 0x12, 0x3b, 0xf8, 0x6a, 0x5b, 0xd3, 0x8d, 0x98, 0x89, 0x04, 0xcf, 0x0d, 0xe1, 0xfa, 0xf7, /* Byte value: 0x0e */ + 0xba, 0x7b, 0xcd, 0x1b, 0x0b, 0x73, 0xe3, 0x2b, 0xa5, 0xb7, 0x9c, 0xb1, 0x40, 0xf2, 0x55, 0x15, /* Byte value: 0x0f */ + 0x33, 0x5f, 0xbb, 0x28, 0xdf, 0xc5, 0x86, 0xf2, 0x56, 0x66, 0x94, 0xec, 0x5a, 0xf3, 0x62, 0x38, /* Byte value: 0x10 */ + 0x31, 0xc1, 0x08, 0x0d, 0x2f, 0xd0, 0x21, 0x6b, 0xd5, 0x62, 0x09, 0x1e, 0x88, 0xa8, 0xe8, 0x60, /* Byte value: 0x11 */ + 0xed, 0x04, 0x70, 0x46, 0x59, 0xea, 0x0d, 0x67, 0xdc, 0x19, 0x7e, 0xb4, 0x7c, 0x7f, 0x79, 0x05, /* Byte value: 0x12 */ + 0xe4, 0x76, 0x41, 0x21, 0x67, 0x55, 0x66, 0xeb, 0x35, 0x0b, 0xe0, 0x83, 0xdb, 0x1c, 0x51, 0x8a, /* Byte value: 0x13 */ + 0x58, 0x6c, 0x9a, 0x55, 0xbf, 0x19, 0xaf, 0xa8, 0xd6, 0xb0, 0x18, 0xe7, 0x2e, 0x03, 0x59, 0x77, /* Byte value: 0x14 */ + 0x1e, 0x26, 0x4e, 0x10, 0x19, 0xc3, 0x82, 0x0b, 0x9d, 0x3c, 0x37, 0x07, 0x24, 0xdf, 0xea, 0xce, /* Byte value: 0x15 */ + 0xc9, 0x0f, 0xb4, 0x19, 0xa1, 0x53, 0x62, 0x12, 0xfe, 0x51, 0x43, 0x68, 0xa5, 0x30, 0xd9, 0x7c, /* Byte value: 0x16 */ + 0xf4, 0x0c, 0x90, 0xca, 0xeb, 0xfd, 0x17, 0xa9, 0xa7, 0x2b, 0x82, 0x1f, 0x84, 0x81, 0x8b, 0x0f, /* Byte value: 0x17 */ + 0xff, 0xe0, 0x12, 0x88, 0x25, 0x57, 0xdb, 0xbc, 0xcd, 0x3d, 0x81, 0xda, 0xf1, 0xb9, 0x29, 0xd8, /* Byte value: 0x18 */ + 0x85, 0xe6, 0x5a, 0xed, 0xb1, 0xc8, 0x31, 0x09, 0x7f, 0xc9, 0xc0, 0x34, 0xb3, 0x18, 0x8d, 0x3e, /* Byte value: 0x19 */ + 0xd3, 0xd6, 0x5f, 0x43, 0x9b, 0xba, 0x6d, 0xe8, 0xa6, 0x65, 0x8d, 0x48, 0xe6, 0x59, 0xe4, 0x02, /* Byte value: 0x1a */ + 0x2b, 0x18, 0xe3, 0x57, 0x15, 0x39, 0x2e, 0x91, 0x8d, 0x56, 0xc7, 0x3e, 0xcb, 0xc1, 0xd5, 0x1e, /* Byte value: 0x1b */ + 0x30, 0x8e, 0xb0, 0xfe, 0x57, 0x3b, 0x93, 0xc6, 0x75, 0x60, 0xa6, 0x67, 0xe1, 0x64, 0xad, 0x4c, /* Byte value: 0x1c */ + 0x17, 0x54, 0x7f, 0x77, 0x27, 0x7c, 0xe9, 0x87, 0x74, 0x2e, 0xa9, 0x30, 0x83, 0xbc, 0xc2, 0x41, /* Byte value: 0x1d */ + 0x12, 0xe4, 0x62, 0xce, 0x7c, 0xbd, 0xd6, 0xdb, 0x11, 0x24, 0xff, 0x6e, 0x8d, 0xc6, 0x50, 0xdd, /* Byte value: 0x1e */ + 0xc3, 0xac, 0x8e, 0xa8, 0x17, 0x12, 0x1c, 0xaa, 0x34, 0x45, 0xef, 0xd4, 0xb9, 0xc4, 0x3e, 0x87, /* Byte value: 0x1f */ + 0x1f, 0x69, 0xf6, 0xe3, 0x61, 0x28, 0x30, 0xa6, 0x3d, 0x3e, 0x98, 0x7e, 0x4d, 0x13, 0xaf, 0xe2, /* Byte value: 0x20 */ + 0x9c, 0xee, 0xba, 0x61, 0x03, 0xdf, 0x2b, 0xc7, 0x04, 0xfb, 0x3c, 0x9f, 0x4b, 0xe6, 0x7f, 0x34, /* Byte value: 0x21 */ + 0x1a, 0xd9, 0xeb, 0x5a, 0x3a, 0xe9, 0x0f, 0xfa, 0x58, 0x34, 0xce, 0x20, 0x43, 0x69, 0x3d, 0x7e, /* Byte value: 0x22 */ + 0x1d, 0xf7, 0x45, 0xc6, 0x91, 0x3d, 0x97, 0x3f, 0xbe, 0x3a, 0x05, 0x8c, 0x9f, 0x48, 0x25, 0xba, /* Byte value: 0x23 */ + 0xb6, 0xb9, 0xe1, 0xc5, 0x6e, 0x0d, 0xb7, 0xfb, 0x29, 0xaf, 0x54, 0xd8, 0xe9, 0xeb, 0xef, 0x06, /* Byte value: 0x24 */ + 0x14, 0x85, 0x74, 0xa1, 0xaf, 0x82, 0xfc, 0xb3, 0x57, 0x28, 0x9b, 0xbb, 0x38, 0x2b, 0x0d, 0x35, /* Byte value: 0x25 */ + 0x51, 0x1e, 0xab, 0x32, 0x81, 0xa6, 0xc4, 0x24, 0x3f, 0xa2, 0x86, 0xd0, 0x89, 0x60, 0x71, 0xf8, /* Byte value: 0x26 */ + 0xd6, 0x66, 0x42, 0xfa, 0xc0, 0x7b, 0x52, 0xb4, 0xc3, 0x6f, 0xdb, 0x16, 0xe8, 0x23, 0x76, 0x9e, /* Byte value: 0x27 */ + 0x56, 0x30, 0x05, 0xae, 0x2a, 0x72, 0x5c, 0xe1, 0xd9, 0xac, 0x4d, 0x7c, 0x55, 0x41, 0x69, 0x3c, /* Byte value: 0x28 */ + 0xee, 0xd5, 0x7b, 0x90, 0xd1, 0x14, 0x18, 0x53, 0xff, 0x1f, 0x4c, 0x3f, 0xc7, 0xe8, 0xb6, 0x71, /* Byte value: 0x29 */ + 0xa0, 0xa2, 0x26, 0x41, 0x31, 0x9a, 0xec, 0xd1, 0xfd, 0x83, 0x52, 0x91, 0x03, 0x9b, 0x68, 0x6b, /* Byte value: 0x2a */ + 0x39, 0xfc, 0x81, 0x99, 0x69, 0x84, 0xf8, 0x4a, 0x9c, 0x72, 0x38, 0x50, 0x46, 0x07, 0x85, 0xc3, /* Byte value: 0x2b */ + 0xac, 0x60, 0x0a, 0x9f, 0x54, 0xe4, 0xb8, 0x01, 0x71, 0x9b, 0x9a, 0xf8, 0xaa, 0x82, 0xd2, 0x78, /* Byte value: 0x2c */ + 0x04, 0xff, 0xa5, 0x4a, 0x23, 0x2a, 0x8d, 0xf1, 0xc5, 0x08, 0xf9, 0x27, 0x67, 0xb6, 0xd7, 0xb0, /* Byte value: 0x2d */ + 0xcf, 0x6e, 0xa2, 0x76, 0x72, 0x6c, 0x48, 0x7a, 0xb8, 0x5d, 0x27, 0xbd, 0x10, 0xdd, 0x84, 0x94, /* Byte value: 0x2e */ + 0x71, 0xea, 0xca, 0x27, 0x5a, 0x35, 0x26, 0xa0, 0xd8, 0xe2, 0x42, 0x2b, 0x37, 0x99, 0x06, 0x31, /* Byte value: 0x2f */ + 0x6e, 0x83, 0x3c, 0xc4, 0x3b, 0x1d, 0x16, 0x06, 0xe5, 0xdc, 0xda, 0x55, 0x7a, 0x8a, 0xa9, 0xd3, /* Byte value: 0x30 */ + 0x75, 0x15, 0x6f, 0x6d, 0x79, 0x1f, 0xab, 0x51, 0x1d, 0xea, 0xbb, 0x0c, 0x50, 0x2f, 0xd1, 0x81, /* Byte value: 0x31 */ + 0x9b, 0xc0, 0x14, 0xfd, 0xa8, 0x0b, 0xb3, 0x02, 0xe2, 0xf5, 0xf7, 0x33, 0x97, 0xc7, 0x67, 0xf0, /* Byte value: 0x32 */ + 0x5d, 0xdc, 0x87, 0xec, 0xe4, 0xd8, 0x90, 0xf4, 0xb3, 0xba, 0x4e, 0xb9, 0x20, 0x79, 0xcb, 0xeb, /* Byte value: 0x33 */ + 0xe8, 0xb4, 0x6d, 0xff, 0x02, 0x2b, 0x32, 0x3b, 0xb9, 0x13, 0x28, 0xea, 0x72, 0x05, 0xeb, 0x99, /* Byte value: 0x34 */ + 0xdb, 0xeb, 0xd6, 0xd7, 0xdd, 0xee, 0xb4, 0xc9, 0xef, 0x75, 0xbc, 0x06, 0x28, 0xf6, 0x89, 0xa1, /* Byte value: 0x35 */ + 0x82, 0xc8, 0xf4, 0x71, 0x1a, 0x1c, 0xa9, 0xcc, 0x99, 0xc7, 0x0b, 0x98, 0x6f, 0x39, 0x95, 0xfa, /* Byte value: 0x36 */ + 0xbe, 0x84, 0x68, 0x51, 0x28, 0x59, 0x6e, 0xda, 0x60, 0xbf, 0x65, 0x96, 0x27, 0x44, 0x82, 0xa5, /* Byte value: 0x37 */ + 0xb8, 0xe5, 0x7e, 0x3e, 0xfb, 0x66, 0x44, 0xb2, 0x26, 0xb3, 0x01, 0x43, 0x92, 0xa9, 0xdf, 0x4d, /* Byte value: 0x38 */ + 0xe7, 0xa7, 0x4a, 0xf7, 0xef, 0xab, 0x73, 0xdf, 0x16, 0x0d, 0xd2, 0x08, 0x60, 0x8b, 0x9e, 0xfe, /* Byte value: 0x39 */ + 0x25, 0x44, 0x7c, 0xac, 0x80, 0x52, 0xdd, 0xd8, 0x82, 0x4a, 0x92, 0xa5, 0xb0, 0x83, 0xe5, 0x55, /* Byte value: 0x3a */ + 0x8b, 0xba, 0xc5, 0x16, 0x24, 0xa3, 0xc2, 0x40, 0x70, 0xd5, 0x95, 0xaf, 0xc8, 0x5a, 0xbd, 0x75, /* Byte value: 0x3b */ + 0x3b, 0x62, 0x32, 0xbc, 0x99, 0x91, 0x5f, 0xd3, 0x1f, 0x76, 0xa5, 0xa2, 0x94, 0x5c, 0x0f, 0x9b, /* Byte value: 0x3c */ + 0x5a, 0xf2, 0x29, 0x70, 0x4f, 0x0c, 0x08, 0x31, 0x55, 0xb4, 0x85, 0x15, 0xfc, 0x58, 0xd3, 0x2f, /* Byte value: 0x3d */ + 0xc7, 0x53, 0x2b, 0xe2, 0x34, 0x38, 0x91, 0x5b, 0xf1, 0x4d, 0x16, 0xf3, 0xde, 0x72, 0xe9, 0x37, /* Byte value: 0x3e */ + 0xef, 0x9a, 0xc3, 0x63, 0xa9, 0xff, 0xaa, 0xfe, 0x5f, 0x1d, 0xe3, 0x46, 0xae, 0x24, 0xf3, 0x5d, /* Byte value: 0x3f */ + 0x32, 0x10, 0x03, 0xdb, 0xa7, 0x2e, 0x34, 0x5f, 0xf6, 0x64, 0x3b, 0x95, 0x33, 0x3f, 0x27, 0x14, /* Byte value: 0x40 */ + 0x6c, 0x1d, 0x8f, 0xe1, 0xcb, 0x08, 0xb1, 0x9f, 0x66, 0xd8, 0x47, 0xa7, 0xa8, 0xd1, 0x23, 0x8b, /* Byte value: 0x41 */ + 0x8e, 0x0a, 0xd8, 0xaf, 0x7f, 0x62, 0xfd, 0x1c, 0x15, 0xdf, 0xc3, 0xf1, 0xc6, 0x20, 0x2f, 0xe9, /* Byte value: 0x42 */ + 0x94, 0xd3, 0x33, 0xf5, 0x45, 0x8b, 0xf2, 0xe6, 0x4d, 0xeb, 0x0d, 0xd1, 0x85, 0x49, 0x12, 0x97, /* Byte value: 0x43 */ + 0x93, 0xfd, 0x9d, 0x69, 0xee, 0x5f, 0x6a, 0x23, 0xab, 0xe5, 0xc6, 0x7d, 0x59, 0x68, 0x0a, 0x53, /* Byte value: 0x44 */ + 0xa3, 0x73, 0x2d, 0x97, 0xb9, 0x64, 0xf9, 0xe5, 0xde, 0x85, 0x60, 0x1a, 0xb8, 0x0c, 0xa7, 0x1f, /* Byte value: 0x45 */ + 0x67, 0xf1, 0x0d, 0xa3, 0x05, 0xa2, 0x7d, 0x8a, 0x0c, 0xce, 0x44, 0x62, 0xdd, 0xe9, 0x81, 0x5c, /* Byte value: 0x46 */ + 0x46, 0x4a, 0xd4, 0x45, 0xa6, 0xda, 0x2d, 0xa3, 0x4b, 0x8c, 0x2f, 0xe0, 0x0a, 0xdc, 0xb3, 0xb9, /* Byte value: 0x47 */ + 0x9d, 0xa1, 0x02, 0x92, 0x7b, 0x34, 0x99, 0x6a, 0xa4, 0xf9, 0x93, 0xe6, 0x22, 0x2a, 0x3a, 0x18, /* Byte value: 0x48 */ + 0xb9, 0xaa, 0xc6, 0xcd, 0x83, 0x8d, 0xf6, 0x1f, 0x86, 0xb1, 0xae, 0x3a, 0xfb, 0x65, 0x9a, 0x61, /* Byte value: 0x49 */ + 0x73, 0x74, 0x79, 0x02, 0xaa, 0x20, 0x81, 0x39, 0x5b, 0xe6, 0xdf, 0xd9, 0xe5, 0xc2, 0x8c, 0x69, /* Byte value: 0x4a */ + 0xdf, 0x14, 0x73, 0x9d, 0xfe, 0xc4, 0x39, 0x38, 0x2a, 0x7d, 0x45, 0x21, 0x4f, 0x40, 0x5e, 0x11, /* Byte value: 0x4b */ + 0xe5, 0x39, 0xf9, 0xd2, 0x1f, 0xbe, 0xd4, 0x46, 0x95, 0x09, 0x4f, 0xfa, 0xb2, 0xd0, 0x14, 0xa6, /* Byte value: 0x4c */ + 0x26, 0x95, 0x77, 0x7a, 0x08, 0xac, 0xc8, 0xec, 0xa1, 0x4c, 0xa0, 0x2e, 0x0b, 0x14, 0x2a, 0x21, /* Byte value: 0x4d */ + 0xde, 0x5b, 0xcb, 0x6e, 0x86, 0x2f, 0x8b, 0x95, 0x8a, 0x7f, 0xea, 0x58, 0x26, 0x8c, 0x1b, 0x3d, /* Byte value: 0x4e */ + 0x80, 0x56, 0x47, 0x54, 0xea, 0x09, 0x0e, 0x55, 0x1a, 0xc3, 0x96, 0x6a, 0xbd, 0x62, 0x1f, 0xa2, /* Byte value: 0x4f */ + 0xdd, 0x8a, 0xc0, 0xb8, 0x0e, 0xd1, 0x9e, 0xa1, 0xa9, 0x79, 0xd8, 0xd3, 0x9d, 0x1b, 0xd4, 0x49, /* Byte value: 0x50 */ + 0x60, 0xdf, 0xa3, 0x3f, 0xae, 0x76, 0xe5, 0x4f, 0xea, 0xc0, 0x8f, 0xce, 0x01, 0xc8, 0x99, 0x98, /* Byte value: 0x51 */ + 0xc5, 0xcd, 0x98, 0xc7, 0xc4, 0x2d, 0x36, 0xc2, 0x72, 0x49, 0x8b, 0x01, 0x0c, 0x29, 0x63, 0x6f, /* Byte value: 0x52 */ + 0x50, 0x51, 0x13, 0xc1, 0xf9, 0x4d, 0x76, 0x89, 0x9f, 0xa0, 0x29, 0xa9, 0xe0, 0xac, 0x34, 0xd4, /* Byte value: 0x53 */ + 0xbb, 0x34, 0x75, 0xe8, 0x73, 0x98, 0x51, 0x86, 0x05, 0xb5, 0x33, 0xc8, 0x29, 0x3e, 0x10, 0x39, /* Byte value: 0x54 */ + 0xb7, 0xf6, 0x59, 0x36, 0x16, 0xe6, 0x05, 0x56, 0x89, 0xad, 0xfb, 0xa1, 0x80, 0x27, 0xaa, 0x2a, /* Byte value: 0x55 */ + 0x0d, 0x8d, 0x94, 0x2d, 0x1d, 0x95, 0xe6, 0x7d, 0x2c, 0x1a, 0x67, 0x10, 0xc0, 0xd5, 0xff, 0x3f, /* Byte value: 0x56 */ + 0x22, 0x6a, 0xd2, 0x30, 0x2b, 0x86, 0x45, 0x1d, 0x64, 0x44, 0x59, 0x09, 0x6c, 0xa2, 0xfd, 0x91, /* Byte value: 0x57 */ + 0xf0, 0xf3, 0x35, 0x80, 0xc8, 0xd7, 0x9a, 0x58, 0x62, 0x23, 0x7b, 0x38, 0xe3, 0x37, 0x5c, 0xbf, /* Byte value: 0x58 */ + 0x8a, 0xf5, 0x7d, 0xe5, 0x5c, 0x48, 0x70, 0xed, 0xd0, 0xd7, 0x3a, 0xd6, 0xa1, 0x96, 0xf8, 0x59, /* Byte value: 0x59 */ + 0x98, 0x11, 0x1f, 0x2b, 0x20, 0xf5, 0xa6, 0x36, 0xc1, 0xf3, 0xc5, 0xb8, 0x2c, 0x50, 0xa8, 0x84, /* Byte value: 0x5a */ + 0x3f, 0x9d, 0x97, 0xf6, 0xba, 0xbb, 0xd2, 0x22, 0xda, 0x7e, 0x5c, 0x85, 0xf3, 0xea, 0xd8, 0x2b, /* Byte value: 0x5b */ + 0xd9, 0x75, 0x65, 0xf2, 0x2d, 0xfb, 0x13, 0x50, 0x6c, 0x71, 0x21, 0xf4, 0xfa, 0xad, 0x03, 0xf9, /* Byte value: 0x5c */ + 0x81, 0x19, 0xff, 0xa7, 0x92, 0xe2, 0xbc, 0xf8, 0xba, 0xc1, 0x39, 0x13, 0xd4, 0xae, 0x5a, 0x8e, /* Byte value: 0x5d */ + 0x3d, 0x03, 0x24, 0xd3, 0x4a, 0xae, 0x75, 0xbb, 0x59, 0x7a, 0xc1, 0x77, 0x21, 0xb1, 0x52, 0x73, /* Byte value: 0x5e */ + 0x9e, 0x70, 0x09, 0x44, 0xf3, 0xca, 0x8c, 0x5e, 0x87, 0xff, 0xa1, 0x6d, 0x99, 0xbd, 0xf5, 0x6c, /* Byte value: 0x5f */ + 0x09, 0x72, 0x31, 0x67, 0x3e, 0xbf, 0x6b, 0x8c, 0xe9, 0x12, 0x9e, 0x37, 0xa7, 0x63, 0x28, 0x8f, /* Byte value: 0x60 */ + 0xd8, 0x3a, 0xdd, 0x01, 0x55, 0x10, 0xa1, 0xfd, 0xcc, 0x73, 0x8e, 0x8d, 0x93, 0x61, 0x46, 0xd5, /* Byte value: 0x61 */ + 0x77, 0x8b, 0xdc, 0x48, 0x89, 0x0a, 0x0c, 0xc8, 0x9e, 0xee, 0x26, 0xfe, 0x82, 0x74, 0x5b, 0xd9, /* Byte value: 0x62 */ + 0x6b, 0x33, 0x21, 0x7d, 0x60, 0xdc, 0x29, 0x5a, 0x80, 0xd6, 0x8c, 0x0b, 0x74, 0xf0, 0x3b, 0x4f, /* Byte value: 0x63 */ + 0xe1, 0xc6, 0x5c, 0x98, 0x3c, 0x94, 0x59, 0xb7, 0x50, 0x01, 0xb6, 0xdd, 0xd5, 0x66, 0xc3, 0x16, /* Byte value: 0x64 */ + 0xad, 0x2f, 0xb2, 0x6c, 0x2c, 0x0f, 0x0a, 0xac, 0xd1, 0x99, 0x35, 0x81, 0xc3, 0x4e, 0x97, 0x54, /* Byte value: 0x65 */ + 0xe0, 0x89, 0xe4, 0x6b, 0x44, 0x7f, 0xeb, 0x1a, 0xf0, 0x03, 0x19, 0xa4, 0xbc, 0xaa, 0x86, 0x3a, /* Byte value: 0x66 */ + 0x66, 0xbe, 0xb5, 0x50, 0x7d, 0x49, 0xcf, 0x27, 0xac, 0xcc, 0xeb, 0x1b, 0xb4, 0x25, 0xc4, 0x70, /* Byte value: 0x67 */ + 0xf8, 0xce, 0xbc, 0x14, 0x8e, 0x83, 0x43, 0x79, 0x2b, 0x33, 0x4a, 0x76, 0x2d, 0x98, 0x31, 0x1c, /* Byte value: 0x68 */ + 0x76, 0xc4, 0x64, 0xbb, 0xf1, 0xe1, 0xbe, 0x65, 0x3e, 0xec, 0x89, 0x87, 0xeb, 0xb8, 0x1e, 0xf5, /* Byte value: 0x69 */ + 0x43, 0xfa, 0xc9, 0xfc, 0xfd, 0x1b, 0x12, 0xff, 0x2e, 0x86, 0x79, 0xbe, 0x04, 0xa6, 0x21, 0x25, /* Byte value: 0x6a */ + 0x72, 0x3b, 0xc1, 0xf1, 0xd2, 0xcb, 0x33, 0x94, 0xfb, 0xe4, 0x70, 0xa0, 0x8c, 0x0e, 0xc9, 0x45, /* Byte value: 0x6b */ + 0xf7, 0xdd, 0x9b, 0x1c, 0x63, 0x03, 0x02, 0x9d, 0x84, 0x2d, 0xb0, 0x94, 0x3f, 0x16, 0x44, 0x7b, /* Byte value: 0x6c */ + 0x4e, 0x77, 0x5d, 0xd1, 0xe0, 0x8e, 0xf4, 0x82, 0x02, 0x9c, 0x1e, 0xae, 0xc4, 0x73, 0xde, 0x1a, /* Byte value: 0x6d */ + 0xdc, 0xc5, 0x78, 0x4b, 0x76, 0x3a, 0x2c, 0x0c, 0x09, 0x7b, 0x77, 0xaa, 0xf4, 0xd7, 0x91, 0x65, /* Byte value: 0x6e */ + 0x48, 0x16, 0x4b, 0xbe, 0x33, 0xb1, 0xde, 0xea, 0x44, 0x90, 0x7a, 0x7b, 0x71, 0x9e, 0x83, 0xf2, /* Byte value: 0x6f */ + 0xda, 0xa4, 0x6e, 0x24, 0xa5, 0x05, 0x06, 0x64, 0x4f, 0x77, 0x13, 0x7f, 0x41, 0x3a, 0xcc, 0x8d, /* Byte value: 0x70 */ + 0x69, 0xad, 0x92, 0x58, 0x90, 0xc9, 0x8e, 0xc3, 0x03, 0xd2, 0x11, 0xf9, 0xa6, 0xab, 0xb1, 0x17, /* Byte value: 0x71 */ + 0xb0, 0xd8, 0xf7, 0xaa, 0xbd, 0x32, 0x9d, 0x93, 0x6f, 0xa3, 0x30, 0x0d, 0x5c, 0x06, 0xb2, 0xee, /* Byte value: 0x72 */ + 0xd5, 0xb7, 0x49, 0x2c, 0x48, 0x85, 0x47, 0x80, 0xe0, 0x69, 0xe9, 0x9d, 0x53, 0xb4, 0xb9, 0xea, /* Byte value: 0x73 */ + 0xf6, 0x92, 0x23, 0xef, 0x1b, 0xe8, 0xb0, 0x30, 0x24, 0x2f, 0x1f, 0xed, 0x56, 0xda, 0x01, 0x57, /* Byte value: 0x74 */ + 0x7b, 0x49, 0xf0, 0x96, 0xec, 0x74, 0x58, 0x18, 0x12, 0xf6, 0xee, 0x97, 0x2b, 0x6d, 0xe1, 0xca, /* Byte value: 0x75 */ + 0x41, 0x64, 0x7a, 0xd9, 0x0d, 0x0e, 0xb5, 0x66, 0xad, 0x82, 0xe4, 0x4c, 0xd6, 0xfd, 0xab, 0x7d, /* Byte value: 0x76 */ + 0xcb, 0x91, 0x07, 0x3c, 0x51, 0x46, 0xc5, 0x8b, 0x7d, 0x55, 0xde, 0x9a, 0x77, 0x6b, 0x53, 0x24, /* Byte value: 0x77 */ + 0x95, 0x9c, 0x8b, 0x06, 0x3d, 0x60, 0x40, 0x4b, 0xed, 0xe9, 0xa2, 0xa8, 0xec, 0x85, 0x57, 0xbb, /* Byte value: 0x78 */ + 0xaa, 0x01, 0x1c, 0xf0, 0x87, 0xdb, 0x92, 0x69, 0x37, 0x97, 0xfe, 0x2d, 0x1f, 0x6f, 0x8f, 0x90, /* Byte value: 0x79 */ + 0xe6, 0xe8, 0xf2, 0x04, 0x97, 0x40, 0xc1, 0x72, 0xb6, 0x0f, 0x7d, 0x71, 0x09, 0x47, 0xdb, 0xd2, /* Byte value: 0x7a */ + 0x37, 0xa0, 0x1e, 0x62, 0xfc, 0xef, 0x0b, 0x03, 0x93, 0x6e, 0x6d, 0xcb, 0x3d, 0x45, 0xb5, 0x88, /* Byte value: 0x7b */ + 0x21, 0xbb, 0xd9, 0xe6, 0xa3, 0x78, 0x50, 0x29, 0x47, 0x42, 0x6b, 0x82, 0xd7, 0x35, 0x32, 0xe5, /* Byte value: 0x7c */ + 0x42, 0xb5, 0x71, 0x0f, 0x85, 0xf0, 0xa0, 0x52, 0x8e, 0x84, 0xd6, 0xc7, 0x6d, 0x6a, 0x64, 0x09, /* Byte value: 0x7d */ + 0xd7, 0x29, 0xfa, 0x09, 0xb8, 0x90, 0xe0, 0x19, 0x63, 0x6d, 0x74, 0x6f, 0x81, 0xef, 0x33, 0xb2, /* Byte value: 0x7e */ + 0xc2, 0xe3, 0x36, 0x5b, 0x6f, 0xf9, 0xae, 0x07, 0x94, 0x47, 0x40, 0xad, 0xd0, 0x08, 0x7b, 0xab, /* Byte value: 0x7f */ + 0x74, 0x5a, 0xd7, 0x9e, 0x01, 0xf4, 0x19, 0xfc, 0xbd, 0xe8, 0x14, 0x75, 0x39, 0xe3, 0x94, 0xad, /* Byte value: 0x80 */ + 0xe2, 0x17, 0x57, 0x4e, 0xb4, 0x6a, 0x4c, 0x83, 0x73, 0x07, 0x84, 0x56, 0x6e, 0xf1, 0x0c, 0x62, /* Byte value: 0x81 */ + 0x91, 0x63, 0x2e, 0x4c, 0x1e, 0x4a, 0xcd, 0xba, 0x28, 0xe1, 0x5b, 0x8f, 0x8b, 0x33, 0x80, 0x0b, /* Byte value: 0x82 */ + 0x23, 0x25, 0x6a, 0xc3, 0x53, 0x6d, 0xf7, 0xb0, 0xc4, 0x46, 0xf6, 0x70, 0x05, 0x6e, 0xb8, 0xbd, /* Byte value: 0x83 */ + 0xc0, 0x7d, 0x85, 0x7e, 0x9f, 0xec, 0x09, 0x9e, 0x17, 0x43, 0xdd, 0x5f, 0x02, 0x53, 0xf1, 0xf3, /* Byte value: 0x84 */ + 0x64, 0x20, 0x06, 0x75, 0x8d, 0x5c, 0x68, 0xbe, 0x2f, 0xc8, 0x76, 0xe9, 0x66, 0x7e, 0x4e, 0x28, /* Byte value: 0x85 */ + 0x0f, 0x13, 0x27, 0x08, 0xed, 0x80, 0x41, 0xe4, 0xaf, 0x1e, 0xfa, 0xe2, 0x12, 0x8e, 0x75, 0x67, /* Byte value: 0x86 */ + 0x63, 0x0e, 0xa8, 0xe9, 0x26, 0x88, 0xf0, 0x7b, 0xc9, 0xc6, 0xbd, 0x45, 0xba, 0x5f, 0x56, 0xec, /* Byte value: 0x87 */ + 0xa8, 0x9f, 0xaf, 0xd5, 0x77, 0xce, 0x35, 0xf0, 0xb4, 0x93, 0x63, 0xdf, 0xcd, 0x34, 0x05, 0xc8, /* Byte value: 0x88 */ + 0x55, 0xe1, 0x0e, 0x78, 0xa2, 0x8c, 0x49, 0xd5, 0xfa, 0xaa, 0x7f, 0xf7, 0xee, 0xd6, 0xa6, 0x48, /* Byte value: 0x89 */ + 0xbd, 0x55, 0x63, 0x87, 0xa0, 0xa7, 0x7b, 0xee, 0x43, 0xb9, 0x57, 0x1d, 0x9c, 0xd3, 0x4d, 0xd1, /* Byte value: 0x8a */ + 0x9a, 0x8f, 0xac, 0x0e, 0xd0, 0xe0, 0x01, 0xaf, 0x42, 0xf7, 0x58, 0x4a, 0xfe, 0x0b, 0x22, 0xdc, /* Byte value: 0x8b */ + 0x03, 0xd1, 0x0b, 0xd6, 0x88, 0xfe, 0x15, 0x34, 0x23, 0x06, 0x32, 0x8b, 0xbb, 0x97, 0xcf, 0x74, /* Byte value: 0x8c */ + 0xc8, 0x40, 0x0c, 0xea, 0xd9, 0xb8, 0xd0, 0xbf, 0x5e, 0x53, 0xec, 0x11, 0xcc, 0xfc, 0x9c, 0x50, /* Byte value: 0x8d */ + 0xc4, 0x82, 0x20, 0x34, 0xbc, 0xc6, 0x84, 0x6f, 0xd2, 0x4b, 0x24, 0x78, 0x65, 0xe5, 0x26, 0x43, /* Byte value: 0x8e */ + 0x6d, 0x52, 0x37, 0x12, 0xb3, 0xe3, 0x03, 0x32, 0xc6, 0xda, 0xe8, 0xde, 0xc1, 0x1d, 0x66, 0xa7, /* Byte value: 0x8f */ + 0x92, 0xb2, 0x25, 0x9a, 0x96, 0xb4, 0xd8, 0x8e, 0x0b, 0xe7, 0x69, 0x04, 0x30, 0xa4, 0x4f, 0x7f, /* Byte value: 0x90 */ + 0x49, 0x59, 0xf3, 0x4d, 0x4b, 0x5a, 0x6c, 0x47, 0xe4, 0x92, 0xd5, 0x02, 0x18, 0x52, 0xc6, 0xde, /* Byte value: 0x91 */ + 0x9f, 0x3f, 0xb1, 0xb7, 0x8b, 0x21, 0x3e, 0xf3, 0x27, 0xfd, 0x0e, 0x14, 0xf0, 0x71, 0xb0, 0x40, /* Byte value: 0x92 */ + 0x44, 0xd4, 0x67, 0x60, 0x56, 0xcf, 0x8a, 0x3a, 0xc8, 0x88, 0xb2, 0x12, 0xd8, 0x87, 0x39, 0xe1, /* Byte value: 0x93 */ + 0x2d, 0x79, 0xf5, 0x38, 0xc6, 0x06, 0x04, 0xf9, 0xcb, 0x5a, 0xa3, 0xeb, 0x7e, 0x2c, 0x88, 0xf6, /* Byte value: 0x94 */ + 0x2f, 0xe7, 0x46, 0x1d, 0x36, 0x13, 0xa3, 0x60, 0x48, 0x5e, 0x3e, 0x19, 0xac, 0x77, 0x02, 0xae, /* Byte value: 0x95 */ + 0x36, 0xef, 0xa6, 0x91, 0x84, 0x04, 0xb9, 0xae, 0x33, 0x6c, 0xc2, 0xb2, 0x54, 0x89, 0xf0, 0xa4, /* Byte value: 0x96 */ + 0x15, 0xca, 0xcc, 0x52, 0xd7, 0x69, 0x4e, 0x1e, 0xf7, 0x2a, 0x34, 0xc2, 0x51, 0xe7, 0x48, 0x19, /* Byte value: 0x97 */ + 0x18, 0x47, 0x58, 0x7f, 0xca, 0xfc, 0xa8, 0x63, 0xdb, 0x30, 0x53, 0xd2, 0x91, 0x32, 0xb7, 0x26, /* Byte value: 0x98 */ + 0x70, 0xa5, 0x72, 0xd4, 0x22, 0xde, 0x94, 0x0d, 0x78, 0xe0, 0xed, 0x52, 0x5e, 0x55, 0x43, 0x1d, /* Byte value: 0x99 */ + 0xfe, 0xaf, 0xaa, 0x7b, 0x5d, 0xbc, 0x69, 0x11, 0x6d, 0x3f, 0x2e, 0xa3, 0x98, 0x75, 0x6c, 0xf4, /* Byte value: 0x9a */ + 0x2e, 0xa8, 0xfe, 0xee, 0x4e, 0xf8, 0x11, 0xcd, 0xe8, 0x5c, 0x91, 0x60, 0xc5, 0xbb, 0x47, 0x82, /* Byte value: 0x9b */ + 0x5c, 0x93, 0x3f, 0x1f, 0x9c, 0x33, 0x22, 0x59, 0x13, 0xb8, 0xe1, 0xc0, 0x49, 0xb5, 0x8e, 0xc7, /* Byte value: 0x9c */ + 0xa1, 0xed, 0x9e, 0xb2, 0x49, 0x71, 0x5e, 0x7c, 0x5d, 0x81, 0xfd, 0xe8, 0x6a, 0x57, 0x2d, 0x47, /* Byte value: 0x9d */ + 0xa6, 0xc3, 0x30, 0x2e, 0xe2, 0xa5, 0xc6, 0xb9, 0xbb, 0x8f, 0x36, 0x44, 0xb6, 0x76, 0x35, 0x83, /* Byte value: 0x9e */ + 0xea, 0x2a, 0xde, 0xda, 0xf2, 0x3e, 0x95, 0xa2, 0x3a, 0x17, 0xb5, 0x18, 0xa0, 0x5e, 0x61, 0xc1, /* Byte value: 0x9f */ + 0x1b, 0x96, 0x53, 0xa9, 0x42, 0x02, 0xbd, 0x57, 0xf8, 0x36, 0x61, 0x59, 0x2a, 0xa5, 0x78, 0x52, /* Byte value: 0xa0 */ + 0x90, 0x2c, 0x96, 0xbf, 0x66, 0xa1, 0x7f, 0x17, 0x88, 0xe3, 0xf4, 0xf6, 0xe2, 0xff, 0xc5, 0x27, /* Byte value: 0xa1 */ + 0xa4, 0x5d, 0x83, 0x0b, 0x12, 0xb0, 0x61, 0x20, 0x38, 0x8b, 0xab, 0xb6, 0x64, 0x2d, 0xbf, 0xdb, /* Byte value: 0xa2 */ + 0x68, 0xe2, 0x2a, 0xab, 0xe8, 0x22, 0x3c, 0x6e, 0xa3, 0xd0, 0xbe, 0x80, 0xcf, 0x67, 0xf4, 0x3b, /* Byte value: 0xa3 */ + 0x57, 0x7f, 0xbd, 0x5d, 0x52, 0x99, 0xee, 0x4c, 0x79, 0xae, 0xe2, 0x05, 0x3c, 0x8d, 0x2c, 0x10, /* Byte value: 0xa4 */ + 0xfd, 0x7e, 0xa1, 0xad, 0xd5, 0x42, 0x7c, 0x25, 0x4e, 0x39, 0x1c, 0x28, 0x23, 0xe2, 0xa3, 0x80, /* Byte value: 0xa5 */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xa6 */ + 0x35, 0x3e, 0xad, 0x47, 0x0c, 0xfa, 0xac, 0x9a, 0x10, 0x6a, 0xf0, 0x39, 0xef, 0x1e, 0x3f, 0xd0, /* Byte value: 0xa7 */ + 0x4b, 0xc7, 0x40, 0x68, 0xbb, 0x4f, 0xcb, 0xde, 0x67, 0x96, 0x48, 0xf0, 0xca, 0x09, 0x4c, 0x86, /* Byte value: 0xa8 */ + 0x47, 0x05, 0x6c, 0xb6, 0xde, 0x31, 0x9f, 0x0e, 0xeb, 0x8e, 0x80, 0x99, 0x63, 0x10, 0xf6, 0x95, /* Byte value: 0xa9 */ + 0xa2, 0x3c, 0x95, 0x64, 0xc1, 0x8f, 0x4b, 0x48, 0x7e, 0x87, 0xcf, 0x63, 0xd1, 0xc0, 0xe2, 0x33, /* Byte value: 0xaa */ + 0x83, 0x87, 0x4c, 0x82, 0x62, 0xf7, 0x1b, 0x61, 0x39, 0xc5, 0xa4, 0xe1, 0x06, 0xf5, 0xd0, 0xd6, /* Byte value: 0xab */ + 0x7c, 0x67, 0x5e, 0x0a, 0x47, 0xa0, 0xc0, 0xdd, 0xf4, 0xf8, 0x25, 0x3b, 0xf7, 0x4c, 0xf9, 0x0e, /* Byte value: 0xac */ + 0xd2, 0x99, 0xe7, 0xb0, 0xe3, 0x51, 0xdf, 0x45, 0x06, 0x67, 0x22, 0x31, 0x8f, 0x95, 0xa1, 0x2e, /* Byte value: 0xad */ + 0x13, 0xab, 0xda, 0x3d, 0x04, 0x56, 0x64, 0x76, 0xb1, 0x26, 0x50, 0x17, 0xe4, 0x0a, 0x15, 0xf1, /* Byte value: 0xae */ + 0x7e, 0xf9, 0xed, 0x2f, 0xb7, 0xb5, 0x67, 0x44, 0x77, 0xfc, 0xb8, 0xc9, 0x25, 0x17, 0x73, 0x56, /* Byte value: 0xaf */ + 0x3e, 0xd2, 0x2f, 0x05, 0xc2, 0x50, 0x60, 0x8f, 0x7a, 0x7c, 0xf3, 0xfc, 0x9a, 0x26, 0x9d, 0x07, /* Byte value: 0xb0 */ + 0x7a, 0x06, 0x48, 0x65, 0x94, 0x9f, 0xea, 0xb5, 0xb2, 0xf4, 0x41, 0xee, 0x42, 0xa1, 0xa4, 0xe6, /* Byte value: 0xb1 */ + 0x84, 0xa9, 0xe2, 0x1e, 0xc9, 0x23, 0x83, 0xa4, 0xdf, 0xcb, 0x6f, 0x4d, 0xda, 0xd4, 0xc8, 0x12, /* Byte value: 0xb2 */ + 0x16, 0x1b, 0xc7, 0x84, 0x5f, 0x97, 0x5b, 0x2a, 0xd4, 0x2c, 0x06, 0x49, 0xea, 0x70, 0x87, 0x6d, /* Byte value: 0xb3 */ + 0xd1, 0x48, 0xec, 0x66, 0x6b, 0xaf, 0xca, 0x71, 0x25, 0x61, 0x10, 0xba, 0x34, 0x02, 0x6e, 0x5a, /* Byte value: 0xb4 */ + 0xb1, 0x97, 0x4f, 0x59, 0xc5, 0xd9, 0x2f, 0x3e, 0xcf, 0xa1, 0x9f, 0x74, 0x35, 0xca, 0xf7, 0xc2, /* Byte value: 0xb5 */ + 0x0c, 0xc2, 0x2c, 0xde, 0x65, 0x7e, 0x54, 0xd0, 0x8c, 0x18, 0xc8, 0x69, 0xa9, 0x19, 0xba, 0x13, /* Byte value: 0xb6 */ + 0x29, 0x86, 0x50, 0x72, 0xe5, 0x2c, 0x89, 0x08, 0x0e, 0x52, 0x5a, 0xcc, 0x19, 0x9a, 0x5f, 0x46, /* Byte value: 0xb7 */ + 0x06, 0x61, 0x16, 0x6f, 0xd3, 0x3f, 0x2a, 0x68, 0x46, 0x0c, 0x64, 0xd5, 0xb5, 0xed, 0x5d, 0xe8, /* Byte value: 0xb8 */ + 0x2c, 0x36, 0x4d, 0xcb, 0xbe, 0xed, 0xb6, 0x54, 0x6b, 0x58, 0x0c, 0x92, 0x17, 0xe0, 0xcd, 0xda, /* Byte value: 0xb9 */ + 0x2a, 0x57, 0x5b, 0xa4, 0x6d, 0xd2, 0x9c, 0x3c, 0x2d, 0x54, 0x68, 0x47, 0xa2, 0x0d, 0x90, 0x32, /* Byte value: 0xba */ + 0xcd, 0xf0, 0x11, 0x53, 0x82, 0x79, 0xef, 0xe3, 0x3b, 0x59, 0xba, 0x4f, 0xc2, 0x86, 0x0e, 0xcc, /* Byte value: 0xbb */ + 0x07, 0x2e, 0xae, 0x9c, 0xab, 0xd4, 0x98, 0xc5, 0xe6, 0x0e, 0xcb, 0xac, 0xdc, 0x21, 0x18, 0xc4, /* Byte value: 0xbc */ + 0x10, 0x7a, 0xd1, 0xeb, 0x8c, 0xa8, 0x71, 0x42, 0x92, 0x20, 0x62, 0x9c, 0x5f, 0x9d, 0xda, 0x85, /* Byte value: 0xbd */ + 0x08, 0x3d, 0x89, 0x94, 0x46, 0x54, 0xd9, 0x21, 0x49, 0x10, 0x31, 0x4e, 0xce, 0xaf, 0x6d, 0xa3, /* Byte value: 0xbe */ + 0xf9, 0x81, 0x04, 0xe7, 0xf6, 0x68, 0xf1, 0xd4, 0x8b, 0x31, 0xe5, 0x0f, 0x44, 0x54, 0x74, 0x30, /* Byte value: 0xbf */ + 0x11, 0x35, 0x69, 0x18, 0xf4, 0x43, 0xc3, 0xef, 0x32, 0x22, 0xcd, 0xe5, 0x36, 0x51, 0x9f, 0xa9, /* Byte value: 0xc0 */ + 0x28, 0xc9, 0xe8, 0x81, 0x9d, 0xc7, 0x3b, 0xa5, 0xae, 0x50, 0xf5, 0xb5, 0x70, 0x56, 0x1a, 0x6a, /* Byte value: 0xc1 */ + 0xeb, 0x65, 0x66, 0x29, 0x8a, 0xd5, 0x27, 0x0f, 0x9a, 0x15, 0x1a, 0x61, 0xc9, 0x92, 0x24, 0xed, /* Byte value: 0xc2 */ + 0x87, 0x78, 0xe9, 0xc8, 0x41, 0xdd, 0x96, 0x90, 0xfc, 0xcd, 0x5d, 0xc6, 0x61, 0x43, 0x07, 0x66, /* Byte value: 0xc3 */ + 0xf1, 0xbc, 0x8d, 0x73, 0xb0, 0x3c, 0x28, 0xf5, 0xc2, 0x21, 0xd4, 0x41, 0x8a, 0xfb, 0x19, 0x93, /* Byte value: 0xc4 */ + 0xc6, 0x1c, 0x93, 0x11, 0x4c, 0xd3, 0x23, 0xf6, 0x51, 0x4f, 0xb9, 0x8a, 0xb7, 0xbe, 0xac, 0x1b, /* Byte value: 0xc5 */ + 0xb5, 0x68, 0xea, 0x13, 0xe6, 0xf3, 0xa2, 0xcf, 0x0a, 0xa9, 0x66, 0x53, 0x52, 0x7c, 0x20, 0x72, /* Byte value: 0xc6 */ + 0x6f, 0xcc, 0x84, 0x37, 0x43, 0xf6, 0xa4, 0xab, 0x45, 0xde, 0x75, 0x2c, 0x13, 0x46, 0xec, 0xff, /* Byte value: 0xc7 */ + 0xcc, 0xbf, 0xa9, 0xa0, 0xfa, 0x92, 0x5d, 0x4e, 0x9b, 0x5b, 0x15, 0x36, 0xab, 0x4a, 0x4b, 0xe0, /* Byte value: 0xc8 */ + 0x34, 0x71, 0x15, 0xb4, 0x74, 0x11, 0x1e, 0x37, 0xb0, 0x68, 0x5f, 0x40, 0x86, 0xd2, 0x7a, 0xfc, /* Byte value: 0xc9 */ + 0x1c, 0xb8, 0xfd, 0x35, 0xe9, 0xd6, 0x25, 0x92, 0x1e, 0x38, 0xaa, 0xf5, 0xf6, 0x84, 0x60, 0x96, /* Byte value: 0xca */ + 0x4d, 0xa6, 0x56, 0x07, 0x68, 0x70, 0xe1, 0xb6, 0x21, 0x9a, 0x2c, 0x25, 0x7f, 0xe4, 0x11, 0x6e, /* Byte value: 0xcb */ + 0xf3, 0x22, 0x3e, 0x56, 0x40, 0x29, 0x8f, 0x6c, 0x41, 0x25, 0x49, 0xb3, 0x58, 0xa0, 0x93, 0xcb, /* Byte value: 0xcc */ + 0x96, 0x4d, 0x80, 0xd0, 0xb5, 0x9e, 0x55, 0x7f, 0xce, 0xef, 0x90, 0x23, 0x57, 0x12, 0x98, 0xcf, /* Byte value: 0xcd */ + 0x05, 0xb0, 0x1d, 0xb9, 0x5b, 0xc1, 0x3f, 0x5c, 0x65, 0x0a, 0x56, 0x5e, 0x0e, 0x7a, 0x92, 0x9c, /* Byte value: 0xce */ + 0x61, 0x90, 0x1b, 0xcc, 0xd6, 0x9d, 0x57, 0xe2, 0x4a, 0xc2, 0x20, 0xb7, 0x68, 0x04, 0xdc, 0xb4, /* Byte value: 0xcf */ + 0x59, 0x23, 0x22, 0xa6, 0xc7, 0xf2, 0x1d, 0x05, 0x76, 0xb2, 0xb7, 0x9e, 0x47, 0xcf, 0x1c, 0x5b, /* Byte value: 0xd0 */ + 0x86, 0x37, 0x51, 0x3b, 0x39, 0x36, 0x24, 0x3d, 0x5c, 0xcf, 0xf2, 0xbf, 0x08, 0x8f, 0x42, 0x4a, /* Byte value: 0xd1 */ + 0x88, 0x6b, 0xce, 0xc0, 0xac, 0x5d, 0xd7, 0x74, 0x53, 0xd3, 0xa7, 0x24, 0x73, 0xcd, 0x72, 0x01, /* Byte value: 0xd2 */ + 0xb3, 0x09, 0xfc, 0x7c, 0x35, 0xcc, 0x88, 0xa7, 0x4c, 0xa5, 0x02, 0x86, 0xe7, 0x91, 0x7d, 0x9a, /* Byte value: 0xd3 */ + 0x89, 0x24, 0x76, 0x33, 0xd4, 0xb6, 0x65, 0xd9, 0xf3, 0xd1, 0x08, 0x5d, 0x1a, 0x01, 0x37, 0x2d, /* Byte value: 0xd4 */ + 0xfc, 0x31, 0x19, 0x5e, 0xad, 0xa9, 0xce, 0x88, 0xee, 0x3b, 0xb3, 0x51, 0x4a, 0x2e, 0xe6, 0xac, /* Byte value: 0xd5 */ + 0xab, 0x4e, 0xa4, 0x03, 0xff, 0x30, 0x20, 0xc4, 0x97, 0x95, 0x51, 0x54, 0x76, 0xa3, 0xca, 0xbc, /* Byte value: 0xd6 */ + 0x53, 0x80, 0x18, 0x17, 0x71, 0xb3, 0x63, 0xbd, 0xbc, 0xa6, 0x1b, 0x22, 0x5b, 0x3b, 0xfb, 0xa0, /* Byte value: 0xd7 */ + 0xb4, 0x27, 0x52, 0xe0, 0x9e, 0x18, 0x10, 0x62, 0xaa, 0xab, 0xc9, 0x2a, 0x3b, 0xb0, 0x65, 0x5e, /* Byte value: 0xd8 */ + 0xe3, 0x58, 0xef, 0xbd, 0xcc, 0x81, 0xfe, 0x2e, 0xd3, 0x05, 0x2b, 0x2f, 0x07, 0x3d, 0x49, 0x4e, /* Byte value: 0xd9 */ + 0xce, 0x21, 0x1a, 0x85, 0x0a, 0x87, 0xfa, 0xd7, 0x18, 0x5f, 0x88, 0xc4, 0x79, 0x11, 0xc1, 0xb8, /* Byte value: 0xda */ + 0x52, 0xcf, 0xa0, 0xe4, 0x09, 0x58, 0xd1, 0x10, 0x1c, 0xa4, 0xb4, 0x5b, 0x32, 0xf7, 0xbe, 0x8c, /* Byte value: 0xdb */ + 0x8c, 0x94, 0x6b, 0x8a, 0x8f, 0x77, 0x5a, 0x85, 0x96, 0xdb, 0x5e, 0x03, 0x14, 0x7b, 0xa5, 0xb1, /* Byte value: 0xdc */ + 0x3a, 0x2d, 0x8a, 0x4f, 0xe1, 0x7a, 0xed, 0x7e, 0xbf, 0x74, 0x0a, 0xdb, 0xfd, 0x90, 0x4a, 0xb7, /* Byte value: 0xdd */ + 0xca, 0xde, 0xbf, 0xcf, 0x29, 0xad, 0x77, 0x26, 0xdd, 0x57, 0x71, 0xe3, 0x1e, 0xa7, 0x16, 0x08, /* Byte value: 0xde */ + 0x54, 0xae, 0xb6, 0x8b, 0xda, 0x67, 0xfb, 0x78, 0x5a, 0xa8, 0xd0, 0x8e, 0x87, 0x1a, 0xe3, 0x64, /* Byte value: 0xdf */ + 0xa7, 0x8c, 0x88, 0xdd, 0x9a, 0x4e, 0x74, 0x14, 0x1b, 0x8d, 0x99, 0x3d, 0xdf, 0xba, 0x70, 0xaf, /* Byte value: 0xe0 */ + 0x99, 0x5e, 0xa7, 0xd8, 0x58, 0x1e, 0x14, 0x9b, 0x61, 0xf1, 0x6a, 0xc1, 0x45, 0x9c, 0xed, 0xa8, /* Byte value: 0xe1 */ + 0x0a, 0xa3, 0x3a, 0xb1, 0xb6, 0x41, 0x7e, 0xb8, 0xca, 0x14, 0xac, 0xbc, 0x1c, 0xf4, 0xe7, 0xfb, /* Byte value: 0xe2 */ + 0x40, 0x2b, 0xc2, 0x2a, 0x75, 0xe5, 0x07, 0xcb, 0x0d, 0x80, 0x4b, 0x35, 0xbf, 0x31, 0xee, 0x51, /* Byte value: 0xe3 */ + 0x02, 0x9e, 0xb3, 0x25, 0xf0, 0x15, 0xa7, 0x99, 0x83, 0x04, 0x9d, 0xf2, 0xd2, 0x5b, 0x8a, 0x58, /* Byte value: 0xe4 */ + 0x5b, 0xbd, 0x91, 0x83, 0x37, 0xe7, 0xba, 0x9c, 0xf5, 0xb6, 0x2a, 0x6c, 0x95, 0x94, 0x96, 0x03, /* Byte value: 0xe5 */ + 0xbc, 0x1a, 0xdb, 0x74, 0xd8, 0x4c, 0xc9, 0x43, 0xe3, 0xbb, 0xf8, 0x64, 0xf5, 0x1f, 0x08, 0xfd, /* Byte value: 0xe6 */ + 0x78, 0x98, 0xfb, 0x40, 0x64, 0x8a, 0x4d, 0x2c, 0x31, 0xf0, 0xdc, 0x1c, 0x90, 0xfa, 0x2e, 0xbe, /* Byte value: 0xe7 */ + 0x79, 0xd7, 0x43, 0xb3, 0x1c, 0x61, 0xff, 0x81, 0x91, 0xf2, 0x73, 0x65, 0xf9, 0x36, 0x6b, 0x92, /* Byte value: 0xe8 */ + 0xf5, 0x43, 0x28, 0x39, 0x93, 0x16, 0xa5, 0x04, 0x07, 0x29, 0x2d, 0x66, 0xed, 0x4d, 0xce, 0x23, /* Byte value: 0xe9 */ + 0xa9, 0xd0, 0x17, 0x26, 0x0f, 0x25, 0x87, 0x5d, 0x14, 0x91, 0xcc, 0xa6, 0xa4, 0xf8, 0x40, 0xe4, /* Byte value: 0xea */ + 0xec, 0x4b, 0xc8, 0xb5, 0x21, 0x01, 0xbf, 0xca, 0x7c, 0x1b, 0xd1, 0xcd, 0x15, 0xb3, 0x3c, 0x29, /* Byte value: 0xeb */ + 0x7d, 0x28, 0xe6, 0xf9, 0x3f, 0x4b, 0x72, 0x70, 0x54, 0xfa, 0x8a, 0x42, 0x9e, 0x80, 0xbc, 0x22, /* Byte value: 0xec */ + 0x45, 0x9b, 0xdf, 0x93, 0x2e, 0x24, 0x38, 0x97, 0x68, 0x8a, 0x1d, 0x6b, 0xb1, 0x4b, 0x7c, 0xcd, /* Byte value: 0xed */ + 0x3c, 0x4c, 0x9c, 0x20, 0x32, 0x45, 0xc7, 0x16, 0xf9, 0x78, 0x6e, 0x0e, 0x48, 0x7d, 0x17, 0x5f, /* Byte value: 0xee */ + 0x65, 0x6f, 0xbe, 0x86, 0xf5, 0xb7, 0xda, 0x13, 0x8f, 0xca, 0xd9, 0x90, 0x0f, 0xb2, 0x0b, 0x04, /* Byte value: 0xef */ + 0x01, 0x4f, 0xb8, 0xf3, 0x78, 0xeb, 0xb2, 0xad, 0xa0, 0x02, 0xaf, 0x79, 0x69, 0xcc, 0x45, 0x2c, /* Byte value: 0xf0 */ + 0x24, 0x0b, 0xc4, 0x5f, 0xf8, 0xb9, 0x6f, 0x75, 0x22, 0x48, 0x3d, 0xdc, 0xd9, 0x4f, 0xa0, 0x79, /* Byte value: 0xf1 */ + 0x5f, 0x42, 0x34, 0xc9, 0x14, 0xcd, 0x37, 0x6d, 0x30, 0xbe, 0xd3, 0x4b, 0xf2, 0x22, 0x41, 0xb3, /* Byte value: 0xf2 */ + 0x7f, 0xb6, 0x55, 0xdc, 0xcf, 0x5e, 0xd5, 0xe9, 0xd7, 0xfe, 0x17, 0xb0, 0x4c, 0xdb, 0x36, 0x7a, /* Byte value: 0xf3 */ + 0x20, 0xf4, 0x61, 0x15, 0xdb, 0x93, 0xe2, 0x84, 0xe7, 0x40, 0xc4, 0xfb, 0xbe, 0xf9, 0x77, 0xc9, /* Byte value: 0xf4 */ + 0xae, 0xfe, 0xb9, 0xba, 0xa4, 0xf1, 0x1f, 0x98, 0xf2, 0x9f, 0x07, 0x0a, 0x78, 0xd9, 0x58, 0x20, /* Byte value: 0xf5 */ + 0x5e, 0x0d, 0x8c, 0x3a, 0x6c, 0x26, 0x85, 0xc0, 0x90, 0xbc, 0x7c, 0x32, 0x9b, 0xee, 0x04, 0x9f, /* Byte value: 0xf6 */ + 0xaf, 0xb1, 0x01, 0x49, 0xdc, 0x1a, 0xad, 0x35, 0x52, 0x9d, 0xa8, 0x73, 0x11, 0x15, 0x1d, 0x0c, /* Byte value: 0xf7 */ + 0xd0, 0x07, 0x54, 0x95, 0x13, 0x44, 0x78, 0xdc, 0x85, 0x63, 0xbf, 0xc3, 0x5d, 0xce, 0x2b, 0x76, /* Byte value: 0xf8 */ + 0xb2, 0x46, 0x44, 0x8f, 0x4d, 0x27, 0x3a, 0x0a, 0xec, 0xa7, 0xad, 0xff, 0x8e, 0x5d, 0x38, 0xb6, /* Byte value: 0xf9 */ + 0x8f, 0x45, 0x60, 0x5c, 0x07, 0x89, 0x4f, 0xb1, 0xb5, 0xdd, 0x6c, 0x88, 0xaf, 0xec, 0x6a, 0xc5, /* Byte value: 0xfa */ + 0x27, 0xda, 0xcf, 0x89, 0x70, 0x47, 0x7a, 0x41, 0x01, 0x4e, 0x0f, 0x57, 0x62, 0xd8, 0x6f, 0x0d, /* Byte value: 0xfb */ + 0x8d, 0xdb, 0xd3, 0x79, 0xf7, 0x9c, 0xe8, 0x28, 0x36, 0xd9, 0xf1, 0x7a, 0x7d, 0xb7, 0xe0, 0x9d, /* Byte value: 0xfc */ + 0x4c, 0xe9, 0xee, 0xf4, 0x10, 0x9b, 0x53, 0x1b, 0x81, 0x98, 0x83, 0x5c, 0x16, 0x28, 0x54, 0x42, /* Byte value: 0xfd */ + 0xd4, 0xf8, 0xf1, 0xdf, 0x30, 0x6e, 0xf5, 0x2d, 0x40, 0x6b, 0x46, 0xe4, 0x3a, 0x78, 0xfc, 0xc6, /* Byte value: 0xfe */ + 0xfa, 0x50, 0x0f, 0x31, 0x7e, 0x96, 0xe4, 0xe0, 0xa8, 0x37, 0xd7, 0x84, 0xff, 0xc3, 0xbb, 0x44, /* Byte value: 0xff */ + 0xf2, 0x6d, 0x86, 0xa5, 0x38, 0xc2, 0x3d, 0xc1, 0xe1, 0x27, 0xe6, 0xca, 0x31, 0x6c, 0xd6, 0xe7, /* Byte value: 0x00 */ + + /* Matrix row: 1 */ + 0x2f, 0x36, 0x58, 0xea, 0xa2, 0x0c, 0x25, 0xcf, 0x7c, 0xd9, 0x9e, 0x49, 0x92, 0xd3, 0x50, 0x36, /* Byte value: 0x01 */ + 0x48, 0x33, 0xd6, 0x3a, 0x7e, 0x8c, 0x82, 0xb8, 0xf4, 0x6c, 0x87, 0xe4, 0x0b, 0xa6, 0x9f, 0x33, /* Byte value: 0x02 */ + 0x5c, 0x9c, 0xe9, 0x76, 0x65, 0xa2, 0xaf, 0xd9, 0x01, 0x72, 0x0a, 0x54, 0xa8, 0x81, 0x94, 0x9c, /* Byte value: 0x03 */ + 0x0c, 0x65, 0x15, 0x85, 0x09, 0x1a, 0x1b, 0x9e, 0x53, 0x0a, 0x7b, 0xd1, 0x61, 0x1d, 0xb8, 0x65, /* Byte value: 0x04 */ + 0x3b, 0x99, 0x67, 0xa6, 0xb9, 0x22, 0x08, 0xae, 0x89, 0xc7, 0x13, 0xf9, 0x31, 0xf4, 0x5b, 0x99, /* Byte value: 0x05 */ + 0x62, 0x5e, 0xf1, 0xc3, 0xaa, 0x6a, 0x3d, 0xfe, 0x24, 0x53, 0xab, 0x81, 0xc1, 0x0c, 0xbd, 0x5e, /* Byte value: 0x06 */ + 0xe7, 0xef, 0x6c, 0x2a, 0x3c, 0xc2, 0x44, 0x6e, 0x67, 0x75, 0x70, 0x59, 0xb2, 0xd6, 0x51, 0xef, /* Byte value: 0x07 */ + 0x42, 0x85, 0x28, 0x1c, 0x92, 0x9b, 0x75, 0x69, 0x6f, 0x63, 0x20, 0xbc, 0xbb, 0x54, 0x7b, 0x85, /* Byte value: 0x08 */ + 0x61, 0xd6, 0x65, 0x73, 0x39, 0x8d, 0x4b, 0x38, 0x40, 0xb0, 0xc5, 0x24, 0x48, 0x9a, 0x93, 0xd6, /* Byte value: 0x09 */ + 0x1a, 0x3a, 0x73, 0xa8, 0xf0, 0x8e, 0xd3, 0x7b, 0x5f, 0x17, 0x03, 0xa7, 0x8d, 0xde, 0x87, 0x3a, /* Byte value: 0x0a */ + 0xf9, 0xf6, 0xad, 0x40, 0xcb, 0xfb, 0x9e, 0xde, 0x09, 0x64, 0x5a, 0xb1, 0xa1, 0x03, 0xbe, 0xf6, /* Byte value: 0x0b */ + 0x12, 0x7c, 0xd4, 0xef, 0xfe, 0x23, 0xc1, 0x2e, 0x3d, 0x1b, 0x51, 0x39, 0x72, 0xc8, 0x57, 0x7c, /* Byte value: 0x0c */ + 0x80, 0xea, 0xe2, 0xfa, 0xe0, 0x42, 0xe3, 0x19, 0xef, 0xc0, 0x69, 0xf4, 0x2b, 0xa3, 0x9e, 0xea, /* Byte value: 0x0d */ + 0x82, 0x1a, 0xbb, 0x9b, 0x02, 0xf8, 0x06, 0x9d, 0x16, 0xc3, 0x9c, 0x32, 0x64, 0x47, 0xaa, 0x1a, /* Byte value: 0x0e */ + 0x25, 0x80, 0xa6, 0xcc, 0x4e, 0x1b, 0xd2, 0x1e, 0xe7, 0xd6, 0x39, 0x11, 0x22, 0x21, 0xb4, 0x80, /* Byte value: 0x0f */ + 0xe2, 0xb4, 0x13, 0x39, 0x4a, 0x28, 0xde, 0xe7, 0xcb, 0x93, 0xc2, 0x75, 0xea, 0xaf, 0x23, 0xb4, /* Byte value: 0x10 */ + 0x06, 0xd3, 0xeb, 0xa3, 0xe5, 0x0d, 0xec, 0x4f, 0xc8, 0x05, 0xdc, 0x89, 0xd1, 0xef, 0x5c, 0xd3, /* Byte value: 0x11 */ + 0x24, 0xf8, 0x6b, 0x1d, 0x3f, 0x46, 0x41, 0x5c, 0x7a, 0x36, 0xa2, 0x72, 0xe4, 0x53, 0xae, 0xf8, /* Byte value: 0x12 */ + 0x40, 0x75, 0x71, 0x7d, 0x70, 0x21, 0x90, 0xed, 0x96, 0x60, 0xd5, 0x7a, 0xf4, 0xb0, 0x4f, 0x75, /* Byte value: 0x13 */ + 0x8a, 0x5c, 0x1c, 0xdc, 0x0c, 0x55, 0x14, 0xc8, 0x74, 0xcf, 0xce, 0xac, 0x9b, 0x51, 0x7a, 0x5c, /* Byte value: 0x14 */ + 0xd5, 0x48, 0x61, 0x1a, 0xfa, 0x10, 0xcd, 0xd7, 0x11, 0x5e, 0xaa, 0x5d, 0xba, 0x46, 0xc0, 0x48, /* Byte value: 0x15 */ + 0x77, 0x89, 0x03, 0x5e, 0xc0, 0x19, 0x83, 0xdd, 0x4c, 0xad, 0xbd, 0x52, 0xa4, 0x59, 0xac, 0x89, /* Byte value: 0x16 */ + 0x6c, 0xcb, 0xbd, 0x27, 0x41, 0xca, 0xc3, 0xe4, 0x8e, 0x5a, 0x25, 0x96, 0xef, 0xf5, 0x31, 0xcb, /* Byte value: 0x17 */ + 0xec, 0x21, 0x5f, 0xdd, 0xa1, 0x88, 0x20, 0xfd, 0x61, 0x9a, 0x4c, 0x62, 0xc4, 0x56, 0xaf, 0x21, /* Byte value: 0x18 */ + 0xda, 0xa5, 0xe0, 0x2f, 0x60, 0xed, 0xa0, 0x8f, 0x26, 0xb7, 0xbf, 0x29, 0x52, 0xcd, 0x56, 0xa5, /* Byte value: 0x19 */ + 0xa9, 0x0f, 0x51, 0xb3, 0xa7, 0x43, 0x2a, 0x99, 0x5b, 0x1c, 0x2b, 0x34, 0x68, 0x9f, 0x92, 0x0f, /* Byte value: 0x1a */ + 0xd8, 0x55, 0xb9, 0x4e, 0x82, 0x57, 0x45, 0x0b, 0xdf, 0xb4, 0x4a, 0xef, 0x1d, 0x29, 0x62, 0x55, /* Byte value: 0x1b */ + 0x74, 0x01, 0x97, 0xee, 0x53, 0xfe, 0xf5, 0x1b, 0x28, 0x4e, 0xd3, 0xf7, 0x2d, 0xcf, 0x82, 0x01, /* Byte value: 0x1c */ + 0xb1, 0xc5, 0x7b, 0x7a, 0xb5, 0x77, 0x1c, 0x66, 0xfd, 0x08, 0xdd, 0x55, 0xaa, 0xa5, 0x21, 0xc5, /* Byte value: 0x1d */ + 0xc8, 0xd9, 0x34, 0xc0, 0x9e, 0xce, 0x61, 0xa1, 0x1b, 0xac, 0xee, 0x10, 0x20, 0x05, 0x01, 0xd9, /* Byte value: 0x1e */ + 0x85, 0xb1, 0x9d, 0xe9, 0x96, 0xa8, 0x79, 0x90, 0x43, 0x26, 0xdb, 0xd8, 0x73, 0xda, 0xec, 0xb1, /* Byte value: 0x1f */ + 0xa7, 0x9a, 0x1d, 0x57, 0x4c, 0xe3, 0xd4, 0x83, 0xf1, 0x15, 0xa5, 0x23, 0x46, 0x66, 0x1e, 0x9a, /* Byte value: 0x20 */ + 0x92, 0x96, 0x36, 0x15, 0x1e, 0x61, 0x22, 0x37, 0xd2, 0xdb, 0x38, 0xcd, 0x59, 0x6b, 0xc9, 0x96, /* Byte value: 0x21 */ + 0xde, 0x86, 0x52, 0xed, 0x67, 0x5a, 0xa9, 0x44, 0x17, 0xb1, 0x96, 0x66, 0xcc, 0xc6, 0x3e, 0x86, /* Byte value: 0x22 */ + 0x43, 0xfd, 0xe5, 0xcd, 0xe3, 0xc6, 0xe6, 0x2b, 0xf2, 0x83, 0xbb, 0xdf, 0x7d, 0x26, 0x61, 0xfd, /* Byte value: 0x23 */ + 0x38, 0x11, 0xf3, 0x16, 0x2a, 0xc5, 0x7e, 0x68, 0xed, 0x24, 0x7d, 0x5c, 0xb8, 0x62, 0x75, 0x11, /* Byte value: 0x24 */ + 0x27, 0x70, 0xff, 0xad, 0xac, 0xa1, 0x37, 0x9a, 0x1e, 0xd5, 0xcc, 0xd7, 0x6d, 0xc5, 0x80, 0x70, /* Byte value: 0x25 */ + 0xee, 0xd1, 0x06, 0xbc, 0x43, 0x32, 0xc5, 0x79, 0x98, 0x99, 0xb9, 0xa4, 0x8b, 0xb2, 0x9b, 0xd1, /* Byte value: 0x26 */ + 0xd0, 0x13, 0x1e, 0x09, 0x8c, 0xfa, 0x57, 0x5e, 0xbd, 0xb8, 0x18, 0x71, 0xe2, 0x3f, 0xb2, 0x13, /* Byte value: 0x27 */ + 0x73, 0xaa, 0xb1, 0x9c, 0xc7, 0xae, 0x8a, 0x16, 0x7d, 0xab, 0x94, 0x1d, 0x3a, 0x52, 0xc4, 0xaa, /* Byte value: 0x28 */ + 0xb2, 0x4d, 0xef, 0xca, 0x26, 0x90, 0x6a, 0xa0, 0x99, 0xeb, 0xb3, 0xf0, 0x23, 0x33, 0x0f, 0x4d, /* Byte value: 0x29 */ + 0xfb, 0x06, 0xf4, 0x21, 0x29, 0x41, 0x7b, 0x5a, 0xf0, 0x67, 0xaf, 0x77, 0xee, 0xe7, 0x8a, 0x06, /* Byte value: 0x2a */ + 0x10, 0x8c, 0x8d, 0x8e, 0x1c, 0x99, 0x24, 0xaa, 0xc4, 0x18, 0xa4, 0xff, 0x3d, 0x2c, 0x63, 0x8c, /* Byte value: 0x2b */ + 0xe6, 0x97, 0xa1, 0xfb, 0x4d, 0x9f, 0xd7, 0x2c, 0xfa, 0x95, 0xeb, 0x3a, 0x74, 0xa4, 0x4b, 0x97, /* Byte value: 0x2c */ + 0x0b, 0xce, 0x33, 0xf7, 0x9d, 0x4a, 0x64, 0x93, 0x06, 0xef, 0x3c, 0x3b, 0x76, 0x80, 0xfe, 0xce, /* Byte value: 0x2d */ + 0x98, 0x20, 0xc8, 0x33, 0xf2, 0x76, 0xd5, 0xe6, 0x49, 0xd4, 0x9f, 0x95, 0xe9, 0x99, 0x2d, 0x20, /* Byte value: 0x2e */ + 0xb6, 0x6e, 0x5d, 0x08, 0x21, 0x27, 0x63, 0x6b, 0xa8, 0xed, 0x9a, 0xbf, 0xbd, 0x38, 0x67, 0x6e, /* Byte value: 0x2f */ + 0x11, 0xf4, 0x40, 0x5f, 0x6d, 0xc4, 0xb7, 0xe8, 0x59, 0xf8, 0x3f, 0x9c, 0xfb, 0x5e, 0x79, 0xf4, /* Byte value: 0x30 */ + 0xbd, 0xa0, 0x6e, 0xff, 0xbc, 0x6d, 0x07, 0xf8, 0xae, 0x02, 0xa6, 0x84, 0xcb, 0xb8, 0x99, 0xa0, /* Byte value: 0x31 */ + 0x0f, 0xed, 0x81, 0x35, 0x9a, 0xfd, 0x6d, 0x58, 0x37, 0xe9, 0x15, 0x74, 0xe8, 0x8b, 0x96, 0xed, /* Byte value: 0x32 */ + 0xf3, 0x40, 0x53, 0x66, 0x27, 0xec, 0x69, 0x0f, 0x92, 0x6b, 0xfd, 0xe9, 0x11, 0xf1, 0x5a, 0x40, /* Byte value: 0x33 */ + 0x5d, 0xe4, 0x24, 0xa7, 0x14, 0xff, 0x3c, 0x9b, 0x9c, 0x92, 0x91, 0x37, 0x6e, 0xf3, 0x8e, 0xe4, /* Byte value: 0x34 */ + 0xbf, 0x50, 0x37, 0x9e, 0x5e, 0xd7, 0xe2, 0x7c, 0x57, 0x01, 0x53, 0x42, 0x84, 0x5c, 0xad, 0x50, /* Byte value: 0x35 */ + 0x47, 0xde, 0x57, 0x0f, 0xe4, 0x71, 0xef, 0xe0, 0xc3, 0x85, 0x92, 0x90, 0xe3, 0x2d, 0x09, 0xde, /* Byte value: 0x36 */ + 0x2e, 0x4e, 0x95, 0x3b, 0xd3, 0x51, 0xb6, 0x8d, 0xe1, 0x39, 0x05, 0x2a, 0x54, 0xa1, 0x4a, 0x4e, /* Byte value: 0x37 */ + 0xc1, 0xe7, 0x5e, 0x56, 0xe1, 0x3e, 0xe0, 0xb6, 0xe4, 0x40, 0x27, 0xed, 0x19, 0x61, 0xcb, 0xe7, /* Byte value: 0x38 */ + 0xd6, 0xc0, 0xf5, 0xaa, 0x69, 0xf7, 0xbb, 0x11, 0x75, 0xbd, 0xc4, 0xf8, 0x33, 0xd0, 0xee, 0xc0, /* Byte value: 0x39 */ + 0x21, 0xa3, 0x14, 0x0e, 0x49, 0xac, 0xdb, 0xd5, 0xd6, 0xd0, 0x10, 0x5e, 0xbc, 0x2a, 0xdc, 0xa3, /* Byte value: 0x3a */ + 0x23, 0x53, 0x4d, 0x6f, 0xab, 0x16, 0x3e, 0x51, 0x2f, 0xd3, 0xe5, 0x98, 0xf3, 0xce, 0xe8, 0x53, /* Byte value: 0x3b */ + 0xf4, 0xeb, 0x75, 0x14, 0xb3, 0xbc, 0x16, 0x02, 0xc7, 0x8e, 0xba, 0x03, 0x06, 0x6c, 0x1c, 0xeb, /* Byte value: 0x3c */ + 0x6e, 0x3b, 0xe4, 0x46, 0xa3, 0x70, 0x26, 0x60, 0x77, 0x59, 0xd0, 0x50, 0xa0, 0x11, 0x05, 0x3b, /* Byte value: 0x3d */ + 0x8e, 0x7f, 0xae, 0x1e, 0x0b, 0xe2, 0x1d, 0x03, 0x45, 0xc9, 0xe7, 0xe3, 0x05, 0x5a, 0x12, 0x7f, /* Byte value: 0x3e */ + 0xc0, 0x9f, 0x93, 0x87, 0x90, 0x63, 0x73, 0xf4, 0x79, 0xa0, 0xbc, 0x8e, 0xdf, 0x13, 0xd1, 0x9f, /* Byte value: 0x3f */ + 0x90, 0x66, 0x6f, 0x74, 0xfc, 0xdb, 0xc7, 0xb3, 0x2b, 0xd8, 0xcd, 0x0b, 0x16, 0x8f, 0xfd, 0x66, /* Byte value: 0x40 */ + 0xf5, 0x93, 0xb8, 0xc5, 0xc2, 0xe1, 0x85, 0x40, 0x5a, 0x6e, 0x21, 0x60, 0xc0, 0x1e, 0x06, 0x93, /* Byte value: 0x41 */ + 0x5a, 0x4f, 0x02, 0xd5, 0x80, 0xaf, 0x43, 0x96, 0xc9, 0x77, 0xd6, 0xdd, 0x79, 0x6e, 0xc8, 0x4f, /* Byte value: 0x42 */ + 0x84, 0xc9, 0x50, 0x38, 0xe7, 0xf5, 0xea, 0xd2, 0xde, 0xc6, 0x40, 0xbb, 0xb5, 0xa8, 0xf6, 0xc9, /* Byte value: 0x43 */ + 0x19, 0xb2, 0xe7, 0x18, 0x63, 0x69, 0xa5, 0xbd, 0x3b, 0xf4, 0x6d, 0x02, 0x04, 0x48, 0xa9, 0xb2, /* Byte value: 0x44 */ + 0x6d, 0xb3, 0x70, 0xf6, 0x30, 0x97, 0x50, 0xa6, 0x13, 0xba, 0xbe, 0xf5, 0x29, 0x87, 0x2b, 0xb3, /* Byte value: 0x45 */ + 0x75, 0x79, 0x5a, 0x3f, 0x22, 0xa3, 0x66, 0x59, 0xb5, 0xae, 0x48, 0x94, 0xeb, 0xbd, 0x98, 0x79, /* Byte value: 0x46 */ + 0x5f, 0x14, 0x7d, 0xc6, 0xf6, 0x45, 0xd9, 0x1f, 0x65, 0x91, 0x64, 0xf1, 0x21, 0x17, 0xba, 0x14, /* Byte value: 0x47 */ + 0xe0, 0x44, 0x4a, 0x58, 0xa8, 0x92, 0x3b, 0x63, 0x32, 0x90, 0x37, 0xb3, 0xa5, 0x4b, 0x17, 0x44, /* Byte value: 0x48 */ + 0xb3, 0x35, 0x22, 0x1b, 0x57, 0xcd, 0xf9, 0xe2, 0x04, 0x0b, 0x28, 0x93, 0xe5, 0x41, 0x15, 0x35, /* Byte value: 0x49 */ + 0x52, 0x09, 0xa5, 0x92, 0x8e, 0x02, 0x51, 0xc3, 0xab, 0x7b, 0x84, 0x43, 0x86, 0x78, 0x18, 0x09, /* Byte value: 0x4a */ + 0xb4, 0x9e, 0x04, 0x69, 0xc3, 0x9d, 0x86, 0xef, 0x51, 0xee, 0x6f, 0x79, 0xf2, 0xdc, 0x53, 0x9e, /* Byte value: 0x4b */ + 0x32, 0xa7, 0x0d, 0x30, 0xc6, 0xd2, 0x89, 0xb9, 0x76, 0x2b, 0xda, 0x04, 0x08, 0x90, 0x91, 0xa7, /* Byte value: 0x4c */ + 0xb7, 0x16, 0x90, 0xd9, 0x50, 0x7a, 0xf0, 0x29, 0x35, 0x0d, 0x01, 0xdc, 0x7b, 0x4a, 0x7d, 0x16, /* Byte value: 0x4d */ + 0xc6, 0x4c, 0x78, 0x24, 0x75, 0x6e, 0x9f, 0xbb, 0xb1, 0xa5, 0x60, 0x07, 0x0e, 0xfc, 0x8d, 0x4c, /* Byte value: 0x4e */ + 0xa3, 0xb9, 0xaf, 0x95, 0x4b, 0x54, 0xdd, 0x48, 0xc0, 0x13, 0x8c, 0x6c, 0xd8, 0x6d, 0x76, 0xb9, /* Byte value: 0x4f */ + 0x50, 0xf9, 0xfc, 0xf3, 0x6c, 0xb8, 0xb4, 0x47, 0x52, 0x78, 0x71, 0x85, 0xc9, 0x9c, 0x2c, 0xf9, /* Byte value: 0x50 */ + 0xe8, 0x02, 0xed, 0x1f, 0xa6, 0x3f, 0x29, 0x36, 0x50, 0x9c, 0x65, 0x2d, 0x5a, 0x5d, 0xc7, 0x02, /* Byte value: 0x51 */ + 0x6a, 0x18, 0x56, 0x84, 0xa4, 0xc7, 0x2f, 0xab, 0x46, 0x5f, 0xf9, 0x1f, 0x3e, 0x1a, 0x6d, 0x18, /* Byte value: 0x52 */ + 0x9c, 0x03, 0x7a, 0xf1, 0xf5, 0xc1, 0xdc, 0x2d, 0x78, 0xd2, 0xb6, 0xda, 0x77, 0x92, 0x45, 0x03, /* Byte value: 0x53 */ + 0x57, 0x52, 0xda, 0x81, 0xf8, 0xe8, 0xcb, 0x4a, 0x07, 0x9d, 0x36, 0x6f, 0xde, 0x01, 0x6a, 0x52, /* Byte value: 0x54 */ + 0x4a, 0xc3, 0x8f, 0x5b, 0x9c, 0x36, 0x67, 0x3c, 0x0d, 0x6f, 0x72, 0x22, 0x44, 0x42, 0xab, 0xc3, /* Byte value: 0x55 */ + 0x6f, 0x43, 0x29, 0x97, 0xd2, 0x2d, 0xb5, 0x22, 0xea, 0xb9, 0x4b, 0x33, 0x66, 0x63, 0x1f, 0x43, /* Byte value: 0x56 */ + 0xbc, 0xd8, 0xa3, 0x2e, 0xcd, 0x30, 0x94, 0xba, 0x33, 0xe2, 0x3d, 0xe7, 0x0d, 0xca, 0x83, 0xd8, /* Byte value: 0x57 */ + 0x67, 0x05, 0x8e, 0xd0, 0xdc, 0x80, 0xa7, 0x77, 0x88, 0xb5, 0x19, 0xad, 0x99, 0x75, 0xcf, 0x05, /* Byte value: 0x58 */ + 0x51, 0x81, 0x31, 0x22, 0x1d, 0xe5, 0x27, 0x05, 0xcf, 0x98, 0xea, 0xe6, 0x0f, 0xee, 0x36, 0x81, /* Byte value: 0x59 */ + 0x99, 0x58, 0x05, 0xe2, 0x83, 0x2b, 0x46, 0xa4, 0xd4, 0x34, 0x04, 0xf6, 0x2f, 0xeb, 0x37, 0x58, /* Byte value: 0x5a */ + 0xff, 0x25, 0x46, 0xe3, 0x2e, 0xf6, 0x72, 0x91, 0xc1, 0x61, 0x86, 0x38, 0x70, 0xec, 0xe2, 0x25, /* Byte value: 0x5b */ + 0x5b, 0x37, 0xcf, 0x04, 0xf1, 0xf2, 0xd0, 0xd4, 0x54, 0x97, 0x4d, 0xbe, 0xbf, 0x1c, 0xd2, 0x37, /* Byte value: 0x5c */ + 0xd1, 0x6b, 0xd3, 0xd8, 0xfd, 0xa7, 0xc4, 0x1c, 0x20, 0x58, 0x83, 0x12, 0x24, 0x4d, 0xa8, 0x6b, /* Byte value: 0x5d */ + 0x1b, 0x42, 0xbe, 0x79, 0x81, 0xd3, 0x40, 0x39, 0xc2, 0xf7, 0x98, 0xc4, 0x4b, 0xac, 0x9d, 0x42, /* Byte value: 0x5e */ + 0x76, 0xf1, 0xce, 0x8f, 0xb1, 0x44, 0x10, 0x9f, 0xd1, 0x4d, 0x26, 0x31, 0x62, 0x2b, 0xb6, 0xf1, /* Byte value: 0x5f */ + 0x64, 0x8d, 0x1a, 0x60, 0x4f, 0x67, 0xd1, 0xb1, 0xec, 0x56, 0x77, 0x08, 0x10, 0xe3, 0xe1, 0x8d, /* Byte value: 0x60 */ + 0x29, 0xe5, 0xb3, 0x49, 0x47, 0x01, 0xc9, 0x80, 0xb4, 0xdc, 0x42, 0xc0, 0x43, 0x3c, 0x0c, 0xe5, /* Byte value: 0x61 */ + 0x59, 0xc7, 0x96, 0x65, 0x13, 0x48, 0x35, 0x50, 0xad, 0x94, 0xb8, 0x78, 0xf0, 0xf8, 0xe6, 0xc7, /* Byte value: 0x62 */ + 0x68, 0xe8, 0x0f, 0xe5, 0x46, 0x7d, 0xca, 0x2f, 0xbf, 0x5c, 0x0c, 0xd9, 0x71, 0xfe, 0x59, 0xe8, /* Byte value: 0x63 */ + 0x39, 0x69, 0x3e, 0xc7, 0x5b, 0x98, 0xed, 0x2a, 0x70, 0xc4, 0xe6, 0x3f, 0x7e, 0x10, 0x6f, 0x69, /* Byte value: 0x64 */ + 0x94, 0x45, 0xdd, 0xb6, 0xfb, 0x6c, 0xce, 0x78, 0x1a, 0xde, 0xe4, 0x44, 0x88, 0x84, 0x95, 0x45, /* Byte value: 0x65 */ + 0x4b, 0xbb, 0x42, 0x8a, 0xed, 0x6b, 0xf4, 0x7e, 0x90, 0x8f, 0xe9, 0x41, 0x82, 0x30, 0xb1, 0xbb, /* Byte value: 0x66 */ + 0x07, 0xab, 0x26, 0x72, 0x94, 0x50, 0x7f, 0x0d, 0x55, 0xe5, 0x47, 0xea, 0x17, 0x9d, 0x46, 0xab, /* Byte value: 0x67 */ + 0x71, 0x5a, 0xe8, 0xfd, 0x25, 0x14, 0x6f, 0x92, 0x84, 0xa8, 0x61, 0xdb, 0x75, 0xb6, 0xf0, 0x5a, /* Byte value: 0x68 */ + 0x2b, 0x15, 0xea, 0x28, 0xa5, 0xbb, 0x2c, 0x04, 0x4d, 0xdf, 0xb7, 0x06, 0x0c, 0xd8, 0x38, 0x15, /* Byte value: 0x69 */ + 0x26, 0x08, 0x32, 0x7c, 0xdd, 0xfc, 0xa4, 0xd8, 0x83, 0x35, 0x57, 0xb4, 0xab, 0xb7, 0x9a, 0x08, /* Byte value: 0x6a */ + 0x20, 0xdb, 0xd9, 0xdf, 0x38, 0xf1, 0x48, 0x97, 0x4b, 0x30, 0x8b, 0x3d, 0x7a, 0x58, 0xc6, 0xdb, /* Byte value: 0x6b */ + 0xfa, 0x7e, 0x39, 0xf0, 0x58, 0x1c, 0xe8, 0x18, 0x6d, 0x87, 0x34, 0x14, 0x28, 0x95, 0x90, 0x7e, /* Byte value: 0x6c */ + 0x49, 0x4b, 0x1b, 0xeb, 0x0f, 0xd1, 0x11, 0xfa, 0x69, 0x8c, 0x1c, 0x87, 0xcd, 0xd4, 0x85, 0x4b, /* Byte value: 0x6d */ + 0x22, 0x2b, 0x80, 0xbe, 0xda, 0x4b, 0xad, 0x13, 0xb2, 0x33, 0x7e, 0xfb, 0x35, 0xbc, 0xf2, 0x2b, /* Byte value: 0x6e */ + 0xa6, 0xe2, 0xd0, 0x86, 0x3d, 0xbe, 0x47, 0xc1, 0x6c, 0xf5, 0x3e, 0x40, 0x80, 0x14, 0x04, 0xe2, /* Byte value: 0x6f */ + 0xcd, 0x82, 0x4b, 0xd3, 0xe8, 0x24, 0xfb, 0x28, 0xb7, 0x4a, 0x5c, 0x3c, 0x78, 0x7c, 0x73, 0x82, /* Byte value: 0x70 */ + 0x8c, 0x8f, 0xf7, 0x7f, 0xe9, 0x58, 0xf8, 0x87, 0xbc, 0xca, 0x12, 0x25, 0x4a, 0xbe, 0x26, 0x8f, /* Byte value: 0x71 */ + 0xd7, 0xb8, 0x38, 0x7b, 0x18, 0xaa, 0x28, 0x53, 0xe8, 0x5d, 0x5f, 0x9b, 0xf5, 0xa2, 0xf4, 0xb8, /* Byte value: 0x72 */ + 0x46, 0xa6, 0x9a, 0xde, 0x95, 0x2c, 0x7c, 0xa2, 0x5e, 0x65, 0x09, 0xf3, 0x25, 0x5f, 0x13, 0xa6, /* Byte value: 0x73 */ + 0x88, 0xac, 0x45, 0xbd, 0xee, 0xef, 0xf1, 0x4c, 0x8d, 0xcc, 0x3b, 0x6a, 0xd4, 0xb5, 0x4e, 0xac, /* Byte value: 0x74 */ + 0x44, 0x56, 0xc3, 0xbf, 0x77, 0x96, 0x99, 0x26, 0xa7, 0x66, 0xfc, 0x35, 0x6a, 0xbb, 0x27, 0x56, /* Byte value: 0x75 */ + 0xc2, 0x6f, 0xca, 0xe6, 0x72, 0xd9, 0x96, 0x70, 0x80, 0xa3, 0x49, 0x48, 0x90, 0xf7, 0xe5, 0x6f, /* Byte value: 0x76 */ + 0x93, 0xee, 0xfb, 0xc4, 0x6f, 0x3c, 0xb1, 0x75, 0x4f, 0x3b, 0xa3, 0xae, 0x9f, 0x19, 0xd3, 0xee, /* Byte value: 0x77 */ + 0xf6, 0x1b, 0x2c, 0x75, 0x51, 0x06, 0xf3, 0x86, 0x3e, 0x8d, 0x4f, 0xc5, 0x49, 0x88, 0x28, 0x1b, /* Byte value: 0x78 */ + 0x09, 0x3e, 0x6a, 0x96, 0x7f, 0xf0, 0x81, 0x17, 0xff, 0xec, 0xc9, 0xfd, 0x39, 0x64, 0xca, 0x3e, /* Byte value: 0x79 */ + 0xa4, 0x12, 0x89, 0xe7, 0xdf, 0x04, 0xa2, 0x45, 0x95, 0xf6, 0xcb, 0x86, 0xcf, 0xf0, 0x30, 0x12, /* Byte value: 0x7a */ + 0xe9, 0x7a, 0x20, 0xce, 0xd7, 0x62, 0xba, 0x74, 0xcd, 0x7c, 0xfe, 0x4e, 0x9c, 0x2f, 0xdd, 0x7a, /* Byte value: 0x7b */ + 0x2a, 0x6d, 0x27, 0xf9, 0xd4, 0xe6, 0xbf, 0x46, 0xd0, 0x3f, 0x2c, 0x65, 0xca, 0xaa, 0x22, 0x6d, /* Byte value: 0x7c */ + 0x54, 0xda, 0x4e, 0x31, 0x6b, 0x0f, 0xbd, 0x8c, 0x63, 0x7e, 0x58, 0xca, 0x57, 0x97, 0x44, 0xda, /* Byte value: 0x7d */ + 0xa2, 0xc1, 0x62, 0x44, 0x3a, 0x09, 0x4e, 0x0a, 0x5d, 0xf3, 0x17, 0x0f, 0x1e, 0x1f, 0x6c, 0xc1, /* Byte value: 0x7e */ + 0xf7, 0x63, 0xe1, 0xa4, 0x20, 0x5b, 0x60, 0xc4, 0xa3, 0x6d, 0xd4, 0xa6, 0x8f, 0xfa, 0x32, 0x63, /* Byte value: 0x7f */ + 0xcf, 0x72, 0x12, 0xb2, 0x0a, 0x9e, 0x1e, 0xac, 0x4e, 0x49, 0xa9, 0xfa, 0x37, 0x98, 0x47, 0x72, /* Byte value: 0x80 */ + 0xaf, 0xdc, 0xba, 0x10, 0x42, 0x4e, 0xc6, 0xd6, 0x93, 0x19, 0xf7, 0xbd, 0xb9, 0x70, 0xce, 0xdc, /* Byte value: 0x81 */ + 0xfd, 0xd5, 0x1f, 0x82, 0xcc, 0x4c, 0x97, 0x15, 0x38, 0x62, 0x73, 0xfe, 0x3f, 0x08, 0xd6, 0xd5, /* Byte value: 0x82 */ + 0xce, 0x0a, 0xdf, 0x63, 0x7b, 0xc3, 0x8d, 0xee, 0xd3, 0xa9, 0x32, 0x99, 0xf1, 0xea, 0x5d, 0x0a, /* Byte value: 0x83 */ + 0x13, 0x04, 0x19, 0x3e, 0x8f, 0x7e, 0x52, 0x6c, 0xa0, 0xfb, 0xca, 0x5a, 0xb4, 0xba, 0x4d, 0x04, /* Byte value: 0x84 */ + 0xe3, 0xcc, 0xde, 0xe8, 0x3b, 0x75, 0x4d, 0xa5, 0x56, 0x73, 0x59, 0x16, 0x2c, 0xdd, 0x39, 0xcc, /* Byte value: 0x85 */ + 0x8b, 0x24, 0xd1, 0x0d, 0x7d, 0x08, 0x87, 0x8a, 0xe9, 0x2f, 0x55, 0xcf, 0x5d, 0x23, 0x60, 0x24, /* Byte value: 0x86 */ + 0x7e, 0xb7, 0x69, 0xc8, 0xbf, 0xe9, 0x02, 0xca, 0xb3, 0x41, 0x74, 0xaf, 0x9d, 0x3d, 0x66, 0xb7, /* Byte value: 0x87 */ + 0xed, 0x59, 0x92, 0x0c, 0xd0, 0xd5, 0xb3, 0xbf, 0xfc, 0x7a, 0xd7, 0x01, 0x02, 0x24, 0xb5, 0x59, /* Byte value: 0x88 */ + 0xe5, 0x1f, 0x35, 0x4b, 0xde, 0x78, 0xa1, 0xea, 0x9e, 0x76, 0x85, 0x9f, 0xfd, 0x32, 0x65, 0x1f, /* Byte value: 0x89 */ + 0xb8, 0xfb, 0x11, 0xec, 0xca, 0x87, 0x9d, 0x71, 0x02, 0xe4, 0x14, 0xa8, 0x93, 0xc1, 0xeb, 0xfb, /* Byte value: 0x8a */ + 0x7d, 0x3f, 0xfd, 0x78, 0x2c, 0x0e, 0x74, 0x0c, 0xd7, 0xa2, 0x1a, 0x0a, 0x14, 0xab, 0x48, 0x3f, /* Byte value: 0x8b */ + 0x96, 0xb5, 0x84, 0xd7, 0x19, 0xd6, 0x2b, 0xfc, 0xe3, 0xdd, 0x11, 0x82, 0xc7, 0x60, 0xa1, 0xb5, /* Byte value: 0x8c */ + 0x05, 0x5b, 0x7f, 0x13, 0x76, 0xea, 0x9a, 0x89, 0xac, 0xe6, 0xb2, 0x2c, 0x58, 0x79, 0x72, 0x5b, /* Byte value: 0x8d */ + 0x18, 0xca, 0x2a, 0xc9, 0x12, 0x34, 0x36, 0xff, 0xa6, 0x14, 0xf6, 0x61, 0xc2, 0x3a, 0xb3, 0xca, /* Byte value: 0x8e */ + 0x87, 0x41, 0xc4, 0x88, 0x74, 0x12, 0x9c, 0x14, 0xba, 0x25, 0x2e, 0x1e, 0x3c, 0x3e, 0xd8, 0x41, /* Byte value: 0x8f */ + 0x6b, 0x60, 0x9b, 0x55, 0xd5, 0x9a, 0xbc, 0xe9, 0xdb, 0xbf, 0x62, 0x7c, 0xf8, 0x68, 0x77, 0x60, /* Byte value: 0x90 */ + 0xd4, 0x30, 0xac, 0xcb, 0x8b, 0x4d, 0x5e, 0x95, 0x8c, 0xbe, 0x31, 0x3e, 0x7c, 0x34, 0xda, 0x30, /* Byte value: 0x91 */ + 0x04, 0x23, 0xb2, 0xc2, 0x07, 0xb7, 0x09, 0xcb, 0x31, 0x06, 0x29, 0x4f, 0x9e, 0x0b, 0x68, 0x23, /* Byte value: 0x92 */ + 0xbb, 0x73, 0x85, 0x5c, 0x59, 0x60, 0xeb, 0xb7, 0x66, 0x07, 0x7a, 0x0d, 0x1a, 0x57, 0xc5, 0x73, /* Byte value: 0x93 */ + 0x37, 0xfc, 0x72, 0x23, 0xb0, 0x38, 0x13, 0x30, 0xda, 0xcd, 0x68, 0x28, 0x50, 0xe9, 0xe3, 0xfc, /* Byte value: 0x94 */ + 0xd3, 0x9b, 0x8a, 0xb9, 0x1f, 0x1d, 0x21, 0x98, 0xd9, 0x5b, 0x76, 0xd4, 0x6b, 0xa9, 0x9c, 0x9b, /* Byte value: 0x95 */ + 0x9b, 0xa8, 0x5c, 0x83, 0x61, 0x91, 0xa3, 0x20, 0x2d, 0x37, 0xf1, 0x30, 0x60, 0x0f, 0x03, 0xa8, /* Byte value: 0x96 */ + 0x55, 0xa2, 0x83, 0xe0, 0x1a, 0x52, 0x2e, 0xce, 0xfe, 0x9e, 0xc3, 0xa9, 0x91, 0xe5, 0x5e, 0xa2, /* Byte value: 0x97 */ + 0x3a, 0xe1, 0xaa, 0x77, 0xc8, 0x7f, 0x9b, 0xec, 0x14, 0x27, 0x88, 0x9a, 0xf7, 0x86, 0x41, 0xe1, /* Byte value: 0x98 */ + 0xc4, 0xbc, 0x21, 0x45, 0x97, 0xd4, 0x7a, 0x3f, 0x48, 0xa6, 0x95, 0xc1, 0x41, 0x18, 0xb9, 0xbc, /* Byte value: 0x99 */ + 0x9e, 0xf3, 0x23, 0x90, 0x17, 0x7b, 0x39, 0xa9, 0x81, 0xd1, 0x43, 0x1c, 0x38, 0x76, 0x71, 0xf3, /* Byte value: 0x9a */ + 0xa1, 0x49, 0xf6, 0xf4, 0xa9, 0xee, 0x38, 0xcc, 0x39, 0x10, 0x79, 0xaa, 0x97, 0x89, 0x42, 0x49, /* Byte value: 0x9b */ + 0x81, 0x92, 0x2f, 0x2b, 0x91, 0x1f, 0x70, 0x5b, 0x72, 0x20, 0xf2, 0x97, 0xed, 0xd1, 0x84, 0x92, /* Byte value: 0x9c */ + 0x89, 0xd4, 0x88, 0x6c, 0x9f, 0xb2, 0x62, 0x0e, 0x10, 0x2c, 0xa0, 0x09, 0x12, 0xc7, 0x54, 0xd4, /* Byte value: 0x9d */ + 0x14, 0xaf, 0x3f, 0x4c, 0x1b, 0x2e, 0x2d, 0x61, 0xf5, 0x1e, 0x8d, 0xb0, 0xa3, 0x27, 0x0b, 0xaf, /* Byte value: 0x9e */ + 0xb9, 0x83, 0xdc, 0x3d, 0xbb, 0xda, 0x0e, 0x33, 0x9f, 0x04, 0x8f, 0xcb, 0x55, 0xb3, 0xf1, 0x83, /* Byte value: 0x9f */ + 0xac, 0x54, 0x2e, 0xa0, 0xd1, 0xa9, 0xb0, 0x10, 0xf7, 0xfa, 0x99, 0x18, 0x30, 0xe6, 0xe0, 0x54, /* Byte value: 0xa0 */ + 0x8f, 0x07, 0x63, 0xcf, 0x7a, 0xbf, 0x8e, 0x41, 0xd8, 0x29, 0x7c, 0x80, 0xc3, 0x28, 0x08, 0x07, /* Byte value: 0xa1 */ + 0xf0, 0xc8, 0xc7, 0xd6, 0xb4, 0x0b, 0x1f, 0xc9, 0xf6, 0x88, 0x93, 0x4c, 0x98, 0x67, 0x74, 0xc8, /* Byte value: 0xa2 */ + 0xfe, 0x5d, 0x8b, 0x32, 0x5f, 0xab, 0xe1, 0xd3, 0x5c, 0x81, 0x1d, 0x5b, 0xb6, 0x9e, 0xf8, 0x5d, /* Byte value: 0xa3 */ + 0x01, 0x78, 0xcd, 0xd1, 0x71, 0x5d, 0x93, 0x42, 0x9d, 0xe0, 0x9b, 0x63, 0xc6, 0x72, 0x1a, 0x78, /* Byte value: 0xa4 */ + 0x08, 0x46, 0xa7, 0x47, 0x0e, 0xad, 0x12, 0x55, 0x62, 0x0c, 0x52, 0x9e, 0xff, 0x16, 0xd0, 0x46, /* Byte value: 0xa5 */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xa6 */ + 0x0d, 0x1d, 0xd8, 0x54, 0x78, 0x47, 0x88, 0xdc, 0xce, 0xea, 0xe0, 0xb2, 0xa7, 0x6f, 0xa2, 0x1d, /* Byte value: 0xa7 */ + 0x30, 0x57, 0x54, 0x51, 0x24, 0x68, 0x6c, 0x3d, 0x8f, 0x28, 0x2f, 0xc2, 0x47, 0x74, 0xa5, 0x57, /* Byte value: 0xa8 */ + 0x2d, 0xc6, 0x01, 0x8b, 0x40, 0xb6, 0xc0, 0x4b, 0x85, 0xda, 0x6b, 0x8f, 0xdd, 0x37, 0x64, 0xc6, /* Byte value: 0xa9 */ + 0x1f, 0x61, 0x0c, 0xbb, 0x86, 0x64, 0x49, 0xf2, 0xf3, 0xf1, 0xb1, 0x8b, 0xd5, 0xa7, 0xf5, 0x61, /* Byte value: 0xaa */ + 0x35, 0x0c, 0x2b, 0x42, 0x52, 0x82, 0xf6, 0xb4, 0x23, 0xce, 0x9d, 0xee, 0x1f, 0x0d, 0xd7, 0x0c, /* Byte value: 0xab */ + 0xd9, 0x2d, 0x74, 0x9f, 0xf3, 0x0a, 0xd6, 0x49, 0x42, 0x54, 0xd1, 0x8c, 0xdb, 0x5b, 0x78, 0x2d, /* Byte value: 0xac */ + 0xdb, 0xdd, 0x2d, 0xfe, 0x11, 0xb0, 0x33, 0xcd, 0xbb, 0x57, 0x24, 0x4a, 0x94, 0xbf, 0x4c, 0xdd, /* Byte value: 0xad */ + 0xba, 0x0b, 0x48, 0x8d, 0x28, 0x3d, 0x78, 0xf5, 0xfb, 0xe7, 0xe1, 0x6e, 0xdc, 0x25, 0xdf, 0x0b, /* Byte value: 0xae */ + 0x3d, 0x4a, 0x8c, 0x05, 0x5c, 0x2f, 0xe4, 0xe1, 0x41, 0xc2, 0xcf, 0x70, 0xe0, 0x1b, 0x07, 0x4a, /* Byte value: 0xaf */ + 0x8d, 0xf7, 0x3a, 0xae, 0x98, 0x05, 0x6b, 0xc5, 0x21, 0x2a, 0x89, 0x46, 0x8c, 0xcc, 0x3c, 0xf7, /* Byte value: 0xb0 */ + 0x36, 0x84, 0xbf, 0xf2, 0xc1, 0x65, 0x80, 0x72, 0x47, 0x2d, 0xf3, 0x4b, 0x96, 0x9b, 0xf9, 0x84, /* Byte value: 0xb1 */ + 0xa8, 0x77, 0x9c, 0x62, 0xd6, 0x1e, 0xb9, 0xdb, 0xc6, 0xfc, 0xb0, 0x57, 0xae, 0xed, 0x88, 0x77, /* Byte value: 0xb2 */ + 0xc3, 0x17, 0x07, 0x37, 0x03, 0x84, 0x05, 0x32, 0x1d, 0x43, 0xd2, 0x2b, 0x56, 0x85, 0xff, 0x17, /* Byte value: 0xb3 */ + 0x4d, 0x68, 0xa9, 0x29, 0x08, 0x66, 0x18, 0x31, 0x58, 0x8a, 0x35, 0xc8, 0x53, 0xdf, 0xed, 0x68, /* Byte value: 0xb4 */ + 0xa5, 0x6a, 0x44, 0x36, 0xae, 0x59, 0x31, 0x07, 0x08, 0x16, 0x50, 0xe5, 0x09, 0x82, 0x2a, 0x6a, /* Byte value: 0xb5 */ + 0x1d, 0x91, 0x55, 0xda, 0x64, 0xde, 0xac, 0x76, 0x0a, 0xf2, 0x44, 0x4d, 0x9a, 0x43, 0xc1, 0x91, /* Byte value: 0xb6 */ + 0x3c, 0x32, 0x41, 0xd4, 0x2d, 0x72, 0x77, 0xa3, 0xdc, 0x22, 0x54, 0x13, 0x26, 0x69, 0x1d, 0x32, /* Byte value: 0xb7 */ + 0xef, 0xa9, 0xcb, 0x6d, 0x32, 0x6f, 0x56, 0x3b, 0x05, 0x79, 0x22, 0xc7, 0x4d, 0xc0, 0x81, 0xa9, /* Byte value: 0xb8 */ + 0x45, 0x2e, 0x0e, 0x6e, 0x06, 0xcb, 0x0a, 0x64, 0x3a, 0x86, 0x67, 0x56, 0xac, 0xc9, 0x3d, 0x2e, /* Byte value: 0xb9 */ + 0xaa, 0x87, 0xc5, 0x03, 0x34, 0xa4, 0x5c, 0x5f, 0x3f, 0xff, 0x45, 0x91, 0xe1, 0x09, 0xbc, 0x87, /* Byte value: 0xba */ + 0x7c, 0x47, 0x30, 0xa9, 0x5d, 0x53, 0xe7, 0x4e, 0x4a, 0x42, 0x81, 0x69, 0xd2, 0xd9, 0x52, 0x47, /* Byte value: 0xbb */ + 0x9d, 0x7b, 0xb7, 0x20, 0x84, 0x9c, 0x4f, 0x6f, 0xe5, 0x32, 0x2d, 0xb9, 0xb1, 0xe0, 0x5f, 0x7b, /* Byte value: 0xbc */ + 0x2c, 0xbe, 0xcc, 0x5a, 0x31, 0xeb, 0x53, 0x09, 0x18, 0x3a, 0xf0, 0xec, 0x1b, 0x45, 0x7e, 0xbe, /* Byte value: 0xbd */ + 0x16, 0x5f, 0x66, 0x2d, 0xf9, 0x94, 0xc8, 0xe5, 0x0c, 0x1d, 0x78, 0x76, 0xec, 0xc3, 0x3f, 0x5f, /* Byte value: 0xbe */ + 0x03, 0x88, 0x94, 0xb0, 0x93, 0xe7, 0x76, 0xc6, 0x64, 0xe3, 0x6e, 0xa5, 0x89, 0x96, 0x2e, 0x88, /* Byte value: 0xbf */ + 0x5e, 0x6c, 0xb0, 0x17, 0x87, 0x18, 0x4a, 0x5d, 0xf8, 0x71, 0xff, 0x92, 0xe7, 0x65, 0xa0, 0x6c, /* Byte value: 0xc0 */ + 0x4e, 0xe0, 0x3d, 0x99, 0x9b, 0x81, 0x6e, 0xf7, 0x3c, 0x69, 0x5b, 0x6d, 0xda, 0x49, 0xc3, 0xe0, /* Byte value: 0xc1 */ + 0xcb, 0x51, 0xa0, 0x70, 0x0d, 0x29, 0x17, 0x67, 0x7f, 0x4f, 0x80, 0xb5, 0xa9, 0x93, 0x2f, 0x51, /* Byte value: 0xc2 */ + 0x3e, 0xc2, 0x18, 0xb5, 0xcf, 0xc8, 0x92, 0x27, 0x25, 0x21, 0xa1, 0xd5, 0x69, 0x8d, 0x29, 0xc2, /* Byte value: 0xc3 */ + 0x15, 0xd7, 0xf2, 0x9d, 0x6a, 0x73, 0xbe, 0x23, 0x68, 0xfe, 0x16, 0xd3, 0x65, 0x55, 0x11, 0xd7, /* Byte value: 0xc4 */ + 0xfc, 0xad, 0xd2, 0x53, 0xbd, 0x11, 0x04, 0x57, 0xa5, 0x82, 0xe8, 0x9d, 0xf9, 0x7a, 0xcc, 0xad, /* Byte value: 0xc5 */ + 0xae, 0xa4, 0x77, 0xc1, 0x33, 0x13, 0x55, 0x94, 0x0e, 0xf9, 0x6c, 0xde, 0x7f, 0x02, 0xd4, 0xa4, /* Byte value: 0xc6 */ + 0x63, 0x26, 0x3c, 0x12, 0xdb, 0x37, 0xae, 0xbc, 0xb9, 0xb3, 0x30, 0xe2, 0x07, 0x7e, 0xa7, 0x26, /* Byte value: 0xc7 */ + 0x0e, 0x95, 0x4c, 0xe4, 0xeb, 0xa0, 0xfe, 0x1a, 0xaa, 0x09, 0x8e, 0x17, 0x2e, 0xf9, 0x8c, 0x95, /* Byte value: 0xc8 */ + 0x7f, 0xcf, 0xa4, 0x19, 0xce, 0xb4, 0x91, 0x88, 0x2e, 0xa1, 0xef, 0xcc, 0x5b, 0x4f, 0x7c, 0xcf, /* Byte value: 0xc9 */ + 0x31, 0x2f, 0x99, 0x80, 0x55, 0x35, 0xff, 0x7f, 0x12, 0xc8, 0xb4, 0xa1, 0x81, 0x06, 0xbf, 0x2f, /* Byte value: 0xca */ + 0xdf, 0xfe, 0x9f, 0x3c, 0x16, 0x07, 0x3a, 0x06, 0x8a, 0x51, 0x0d, 0x05, 0x0a, 0xb4, 0x24, 0xfe, /* Byte value: 0xcb */ + 0xf1, 0xb0, 0x0a, 0x07, 0xc5, 0x56, 0x8c, 0x8b, 0x6b, 0x68, 0x08, 0x2f, 0x5e, 0x15, 0x6e, 0xb0, /* Byte value: 0xcc */ + 0x60, 0xae, 0xa8, 0xa2, 0x48, 0xd0, 0xd8, 0x7a, 0xdd, 0x50, 0x5e, 0x47, 0x8e, 0xe8, 0x89, 0xae, /* Byte value: 0xcd */ + 0x79, 0x1c, 0x4f, 0xba, 0x2b, 0xb9, 0x7d, 0xc7, 0xe6, 0xa4, 0x33, 0x45, 0x8a, 0xa0, 0x20, 0x1c, /* Byte value: 0xce */ + 0x9a, 0xd0, 0x91, 0x52, 0x10, 0xcc, 0x30, 0x62, 0xb0, 0xd7, 0x6a, 0x53, 0xa6, 0x7d, 0x19, 0xd0, /* Byte value: 0xcf */ + 0xf8, 0x8e, 0x60, 0x91, 0xba, 0xa6, 0x0d, 0x9c, 0x94, 0x84, 0xc1, 0xd2, 0x67, 0x71, 0xa4, 0x8e, /* Byte value: 0xd0 */ + 0x4c, 0x10, 0x64, 0xf8, 0x79, 0x3b, 0x8b, 0x73, 0xc5, 0x6a, 0xae, 0xab, 0x95, 0xad, 0xf7, 0x10, /* Byte value: 0xd1 */ + 0xb5, 0xe6, 0xc9, 0xb8, 0xb2, 0xc0, 0x15, 0xad, 0xcc, 0x0e, 0xf4, 0x1a, 0x34, 0xae, 0x49, 0xe6, /* Byte value: 0xd2 */ + 0x41, 0x0d, 0xbc, 0xac, 0x01, 0x7c, 0x03, 0xaf, 0x0b, 0x80, 0x4e, 0x19, 0x32, 0xc2, 0x55, 0x0d, /* Byte value: 0xd3 */ + 0xc7, 0x34, 0xb5, 0xf5, 0x04, 0x33, 0x0c, 0xf9, 0x2c, 0x45, 0xfb, 0x64, 0xc8, 0x8e, 0x97, 0x34, /* Byte value: 0xd4 */ + 0x7a, 0x94, 0xdb, 0x0a, 0xb8, 0x5e, 0x0b, 0x01, 0x82, 0x47, 0x5d, 0xe0, 0x03, 0x36, 0x0e, 0x94, /* Byte value: 0xd5 */ + 0x7b, 0xec, 0x16, 0xdb, 0xc9, 0x03, 0x98, 0x43, 0x1f, 0xa7, 0xc6, 0x83, 0xc5, 0x44, 0x14, 0xec, /* Byte value: 0xd6 */ + 0x0a, 0xb6, 0xfe, 0x26, 0xec, 0x17, 0xf7, 0xd1, 0x9b, 0x0f, 0xa7, 0x58, 0xb0, 0xf2, 0xe4, 0xb6, /* Byte value: 0xd7 */ + 0xdc, 0x76, 0x0b, 0x8c, 0x85, 0xe0, 0x4c, 0xc0, 0xee, 0xb2, 0x63, 0xa0, 0x83, 0x22, 0x0a, 0x76, /* Byte value: 0xd8 */ + 0xdd, 0x0e, 0xc6, 0x5d, 0xf4, 0xbd, 0xdf, 0x82, 0x73, 0x52, 0xf8, 0xc3, 0x45, 0x50, 0x10, 0x0e, /* Byte value: 0xd9 */ + 0xea, 0xf2, 0xb4, 0x7e, 0x44, 0x85, 0xcc, 0xb2, 0xa9, 0x9f, 0x90, 0xeb, 0x15, 0xb9, 0xf3, 0xf2, /* Byte value: 0xda */ + 0x78, 0x64, 0x82, 0x6b, 0x5a, 0xe4, 0xee, 0x85, 0x7b, 0x44, 0xa8, 0x26, 0x4c, 0xd2, 0x3a, 0x64, /* Byte value: 0xdb */ + 0xbe, 0x28, 0xfa, 0x4f, 0x2f, 0x8a, 0x71, 0x3e, 0xca, 0xe1, 0xc8, 0x21, 0x42, 0x2e, 0xb7, 0x28, /* Byte value: 0xdc */ + 0x86, 0x39, 0x09, 0x59, 0x05, 0x4f, 0x0f, 0x56, 0x27, 0xc5, 0xb5, 0x7d, 0xfa, 0x4c, 0xc2, 0x39, /* Byte value: 0xdd */ + 0xe1, 0x3c, 0x87, 0x89, 0xd9, 0xcf, 0xa8, 0x21, 0xaf, 0x70, 0xac, 0xd0, 0x63, 0x39, 0x0d, 0x3c, /* Byte value: 0xde */ + 0x97, 0xcd, 0x49, 0x06, 0x68, 0x8b, 0xb8, 0xbe, 0x7e, 0x3d, 0x8a, 0xe1, 0x01, 0x12, 0xbb, 0xcd, /* Byte value: 0xdf */ + 0x66, 0x7d, 0x43, 0x01, 0xad, 0xdd, 0x34, 0x35, 0x15, 0x55, 0x82, 0xce, 0x5f, 0x07, 0xd5, 0x7d, /* Byte value: 0xe0 */ + 0xeb, 0x8a, 0x79, 0xaf, 0x35, 0xd8, 0x5f, 0xf0, 0x34, 0x7f, 0x0b, 0x88, 0xd3, 0xcb, 0xe9, 0x8a, /* Byte value: 0xe1 */ + 0xf2, 0x38, 0x9e, 0xb7, 0x56, 0xb1, 0xfa, 0x4d, 0x0f, 0x8b, 0x66, 0x8a, 0xd7, 0x83, 0x40, 0x38, /* Byte value: 0xe2 */ + 0xb0, 0xbd, 0xb6, 0xab, 0xc4, 0x2a, 0x8f, 0x24, 0x60, 0xe8, 0x46, 0x36, 0x6c, 0xd7, 0x3b, 0xbd, /* Byte value: 0xe3 */ + 0xe4, 0x67, 0xf8, 0x9a, 0xaf, 0x25, 0x32, 0xa8, 0x03, 0x96, 0x1e, 0xfc, 0x3b, 0x40, 0x7f, 0x67, /* Byte value: 0xe4 */ + 0x1c, 0xe9, 0x98, 0x0b, 0x15, 0x83, 0x3f, 0x34, 0x97, 0x12, 0xdf, 0x2e, 0x5c, 0x31, 0xdb, 0xe9, /* Byte value: 0xe5 */ + 0xca, 0x29, 0x6d, 0xa1, 0x7c, 0x74, 0x84, 0x25, 0xe2, 0xaf, 0x1b, 0xd6, 0x6f, 0xe1, 0x35, 0x29, /* Byte value: 0xe6 */ + 0xd2, 0xe3, 0x47, 0x68, 0x6e, 0x40, 0xb2, 0xda, 0x44, 0xbb, 0xed, 0xb7, 0xad, 0xdb, 0x86, 0xe3, /* Byte value: 0xe7 */ + 0xa0, 0x31, 0x3b, 0x25, 0xd8, 0xb3, 0xab, 0x8e, 0xa4, 0xf0, 0xe2, 0xc9, 0x51, 0xfb, 0x58, 0x31, /* Byte value: 0xe8 */ + 0x1e, 0x19, 0xc1, 0x6a, 0xf7, 0x39, 0xda, 0xb0, 0x6e, 0x11, 0x2a, 0xe8, 0x13, 0xd5, 0xef, 0x19, /* Byte value: 0xe9 */ + 0x9f, 0x8b, 0xee, 0x41, 0x66, 0x26, 0xaa, 0xeb, 0x1c, 0x31, 0xd8, 0x7f, 0xfe, 0x04, 0x6b, 0x8b, /* Byte value: 0xea */ + 0x56, 0x2a, 0x17, 0x50, 0x89, 0xb5, 0x58, 0x08, 0x9a, 0x7d, 0xad, 0x0c, 0x18, 0x73, 0x70, 0x2a, /* Byte value: 0xeb */ + 0xab, 0xff, 0x08, 0xd2, 0x45, 0xf9, 0xcf, 0x1d, 0xa2, 0x1f, 0xde, 0xf2, 0x27, 0x7b, 0xa6, 0xff, /* Byte value: 0xec */ + 0xc9, 0xa1, 0xf9, 0x11, 0xef, 0x93, 0xf2, 0xe3, 0x86, 0x4c, 0x75, 0x73, 0xe6, 0x77, 0x1b, 0xa1, /* Byte value: 0xed */ + 0x69, 0x90, 0xc2, 0x34, 0x37, 0x20, 0x59, 0x6d, 0x22, 0xbc, 0x97, 0xba, 0xb7, 0x8c, 0x43, 0x90, /* Byte value: 0xee */ + 0x91, 0x1e, 0xa2, 0xa5, 0x8d, 0x86, 0x54, 0xf1, 0xb6, 0x38, 0x56, 0x68, 0xd0, 0xfd, 0xe7, 0x1e, /* Byte value: 0xef */ + 0x72, 0xd2, 0x7c, 0x4d, 0xb6, 0xf3, 0x19, 0x54, 0xe0, 0x4b, 0x0f, 0x7e, 0xfc, 0x20, 0xde, 0xd2, /* Byte value: 0xf0 */ + 0x53, 0x71, 0x68, 0x43, 0xff, 0x5f, 0xc2, 0x81, 0x36, 0x9b, 0x1f, 0x20, 0x40, 0x0a, 0x02, 0x71, /* Byte value: 0xf1 */ + 0x17, 0x27, 0xab, 0xfc, 0x88, 0xc9, 0x5b, 0xa7, 0x91, 0xfd, 0xe3, 0x15, 0x2a, 0xb1, 0x25, 0x27, /* Byte value: 0xf2 */ + 0x4f, 0x98, 0xf0, 0x48, 0xea, 0xdc, 0xfd, 0xb5, 0xa1, 0x89, 0xc0, 0x0e, 0x1c, 0x3b, 0xd9, 0x98, /* Byte value: 0xf3 */ + 0x58, 0xbf, 0x5b, 0xb4, 0x62, 0x15, 0xa6, 0x12, 0x30, 0x74, 0x23, 0x1b, 0x36, 0x8a, 0xfc, 0xbf, /* Byte value: 0xf4 */ + 0x02, 0xf0, 0x59, 0x61, 0xe2, 0xba, 0xe5, 0x84, 0xf9, 0x03, 0xf5, 0xc6, 0x4f, 0xe4, 0x34, 0xf0, /* Byte value: 0xf5 */ + 0x65, 0xf5, 0xd7, 0xb1, 0x3e, 0x3a, 0x42, 0xf3, 0x71, 0xb6, 0xec, 0x6b, 0xd6, 0x91, 0xfb, 0xf5, /* Byte value: 0xf6 */ + 0x70, 0x22, 0x25, 0x2c, 0x54, 0x49, 0xfc, 0xd0, 0x19, 0x48, 0xfa, 0xb8, 0xb3, 0xc4, 0xea, 0x22, /* Byte value: 0xf7 */ + 0x3f, 0xba, 0xd5, 0x64, 0xbe, 0x95, 0x01, 0x65, 0xb8, 0xc1, 0x3a, 0xb6, 0xaf, 0xff, 0x33, 0xba, /* Byte value: 0xf8 */ + 0x33, 0xdf, 0xc0, 0xe1, 0xb7, 0x8f, 0x1a, 0xfb, 0xeb, 0xcb, 0x41, 0x67, 0xce, 0xe2, 0x8b, 0xdf, /* Byte value: 0xf9 */ + 0x28, 0x9d, 0x7e, 0x98, 0x36, 0x5c, 0x5a, 0xc2, 0x29, 0x3c, 0xd9, 0xa3, 0x85, 0x4e, 0x16, 0x9d, /* Byte value: 0xfa */ + 0xc5, 0xc4, 0xec, 0x94, 0xe6, 0x89, 0xe9, 0x7d, 0xd5, 0x46, 0x0e, 0xa2, 0x87, 0x6a, 0xa3, 0xc4, /* Byte value: 0xfb */ + 0xcc, 0xfa, 0x86, 0x02, 0x99, 0x79, 0x68, 0x6a, 0x2a, 0xaa, 0xc7, 0x5f, 0xbe, 0x0e, 0x69, 0xfa, /* Byte value: 0xfc */ + 0xad, 0x2c, 0xe3, 0x71, 0xa0, 0xf4, 0x23, 0x52, 0x6a, 0x1a, 0x02, 0x7b, 0xf6, 0x94, 0xfa, 0x2c, /* Byte value: 0xfd */ + 0x34, 0x74, 0xe6, 0x93, 0x23, 0xdf, 0x65, 0xf6, 0xbe, 0x2e, 0x06, 0x8d, 0xd9, 0x7f, 0xcd, 0x74, /* Byte value: 0xfe */ + 0x95, 0x3d, 0x10, 0x67, 0x8a, 0x31, 0x5d, 0x3a, 0x87, 0x3e, 0x7f, 0x27, 0x4e, 0xf6, 0x8f, 0x3d, /* Byte value: 0xff */ + 0x83, 0x62, 0x76, 0x4a, 0x73, 0xa5, 0x95, 0xdf, 0x8b, 0x23, 0x07, 0x51, 0xa2, 0x35, 0xb0, 0x62, /* Byte value: 0x00 */ + + /* Matrix row: 2 */ + 0x37, 0xf9, 0x69, 0x1b, 0x71, 0x18, 0x61, 0xcd, 0x69, 0x19, 0x54, 0xd6, 0xaf, 0xcb, 0x37, 0x52, /* Byte value: 0x01 */ + 0x93, 0x8b, 0x03, 0xf8, 0xd8, 0xdb, 0x4a, 0x3b, 0x03, 0x7b, 0x5a, 0xc3, 0x52, 0x7d, 0x93, 0x1c, /* Byte value: 0x02 */ + 0xdb, 0x45, 0x63, 0x4e, 0xe4, 0x87, 0x1e, 0x57, 0x63, 0xc0, 0x4b, 0x19, 0x80, 0x06, 0xdb, 0x1a, /* Byte value: 0x03 */ + 0x38, 0xfb, 0x20, 0xd3, 0x14, 0x34, 0x8d, 0x24, 0x20, 0x69, 0x0f, 0xf7, 0x4e, 0x29, 0x38, 0x02, /* Byte value: 0x04 */ + 0x7f, 0x37, 0x09, 0xad, 0x4d, 0x44, 0x35, 0xa1, 0x09, 0xa2, 0x45, 0x0c, 0x7d, 0xb0, 0x7f, 0x54, /* Byte value: 0x05 */ + 0xb6, 0xa0, 0x72, 0x2f, 0xa6, 0xd4, 0x3e, 0xed, 0x72, 0x3c, 0x9b, 0xc2, 0x28, 0xd8, 0xb6, 0xae, /* Byte value: 0x06 */ + 0xa0, 0x81, 0xad, 0x96, 0xea, 0x47, 0x90, 0xf0, 0xad, 0x08, 0xae, 0x66, 0xb1, 0x91, 0xa0, 0xcf, /* Byte value: 0x07 */ + 0xb7, 0xec, 0x33, 0xa3, 0xc6, 0xf5, 0x60, 0x0d, 0x33, 0xc7, 0xb3, 0xae, 0x3b, 0xa1, 0xb7, 0x1f, /* Byte value: 0x08 */ + 0xb8, 0xee, 0x7a, 0x6b, 0xa3, 0xd9, 0x8c, 0xe4, 0x7a, 0xb7, 0xe8, 0x8f, 0xda, 0x43, 0xb8, 0x4f, /* Byte value: 0x09 */ + 0xc5, 0x41, 0xf1, 0x1d, 0x2e, 0xdf, 0x05, 0x46, 0xf1, 0x20, 0xfd, 0x5b, 0x81, 0x01, 0xc5, 0xba, /* Byte value: 0x0a */ + 0xcc, 0x28, 0xfd, 0x7b, 0xc8, 0x35, 0xee, 0xaa, 0xfd, 0x0f, 0x56, 0xd1, 0x0a, 0x36, 0xcc, 0xca, /* Byte value: 0x0b */ + 0x54, 0x52, 0x70, 0x3e, 0x36, 0x46, 0xf3, 0x7e, 0x70, 0x6e, 0xf7, 0x40, 0xf5, 0x8e, 0x54, 0x07, /* Byte value: 0x0c */ + 0x04, 0xf3, 0xc7, 0x75, 0x43, 0x84, 0xbb, 0x06, 0xc7, 0x6a, 0xa0, 0x73, 0x4c, 0x27, 0x04, 0x81, /* Byte value: 0x0d */ + 0xb1, 0x87, 0x76, 0x0d, 0x45, 0x33, 0x67, 0x08, 0x76, 0x98, 0x43, 0x05, 0x51, 0x74, 0xb1, 0x3f, /* Byte value: 0x0e */ + 0x13, 0x9e, 0x59, 0x40, 0x6f, 0x36, 0x4b, 0xfb, 0x59, 0xa5, 0xbd, 0xbb, 0xc6, 0x17, 0x13, 0x51, /* Byte value: 0x0f */ + 0xb2, 0x53, 0xb5, 0x5a, 0xe5, 0x50, 0x85, 0xeb, 0xb5, 0x56, 0x3b, 0xb1, 0x64, 0xff, 0xb2, 0x2f, /* Byte value: 0x10 */ + 0x1c, 0x9c, 0x10, 0x88, 0x0a, 0x1a, 0xa7, 0x12, 0x10, 0xd5, 0xe6, 0x9a, 0x27, 0xf5, 0x1c, 0x01, /* Byte value: 0x11 */ + 0xa8, 0xa4, 0xe0, 0x7c, 0x6c, 0x8c, 0x25, 0xfc, 0xe0, 0xdc, 0x2d, 0x80, 0x29, 0xdf, 0xa8, 0x0e, /* Byte value: 0x12 */ + 0x02, 0x98, 0x82, 0xdb, 0xc0, 0x42, 0xbc, 0x03, 0x82, 0x35, 0x50, 0xd8, 0x26, 0xf2, 0x02, 0xa1, /* Byte value: 0x13 */ + 0x20, 0x94, 0xf7, 0x2e, 0x5d, 0xaa, 0x91, 0x30, 0xf7, 0xd6, 0x49, 0x1e, 0x25, 0xfb, 0x20, 0x82, /* Byte value: 0x14 */ + 0xf5, 0x9f, 0x9c, 0x24, 0xbc, 0x20, 0x3d, 0x6e, 0x9c, 0x9d, 0x71, 0x4a, 0x57, 0x66, 0xf5, 0x79, /* Byte value: 0x15 */ + 0x45, 0x54, 0xab, 0xa5, 0x99, 0x32, 0x04, 0x86, 0xab, 0xfe, 0x1a, 0x23, 0x15, 0x6b, 0x45, 0xf7, /* Byte value: 0x16 */ + 0x3b, 0x2f, 0xe3, 0x84, 0xb4, 0x57, 0x6f, 0xc7, 0xe3, 0xa7, 0x77, 0x43, 0x7b, 0xa2, 0x3b, 0x12, /* Byte value: 0x17 */ + 0x3f, 0xdc, 0x24, 0xf1, 0xf7, 0xd3, 0xd4, 0xc1, 0x24, 0xcd, 0xd7, 0x30, 0x37, 0x85, 0x3f, 0x93, /* Byte value: 0x18 */ + 0xc3, 0x2a, 0xb4, 0xb3, 0xad, 0x19, 0x02, 0x43, 0xb4, 0x7f, 0x0d, 0xf0, 0xeb, 0xd4, 0xc3, 0x9a, /* Byte value: 0x19 */ + 0x2f, 0x96, 0xbe, 0xe6, 0x38, 0x86, 0x7d, 0xd9, 0xbe, 0xa6, 0x12, 0x3f, 0xc4, 0x19, 0x2f, 0xd2, /* Byte value: 0x1a */ + 0x76, 0x5e, 0x05, 0xcb, 0xab, 0xae, 0xde, 0x4d, 0x05, 0x8d, 0xee, 0x86, 0xf6, 0x87, 0x76, 0x24, /* Byte value: 0x1b */ + 0x4b, 0x1a, 0xa3, 0xe1, 0x9c, 0x3f, 0xb6, 0x8f, 0xa3, 0x75, 0x69, 0x6e, 0xe7, 0xf0, 0x4b, 0x16, /* Byte value: 0x1c */ + 0x5f, 0xa3, 0xfe, 0x83, 0x10, 0xee, 0xa4, 0x91, 0xfe, 0x74, 0x0c, 0x12, 0x58, 0x4b, 0x5f, 0xd6, /* Byte value: 0x1d */ + 0x97, 0x78, 0xc4, 0x8d, 0x9b, 0x5f, 0xf1, 0x3d, 0xc4, 0x11, 0xfa, 0xb0, 0x1e, 0x5a, 0x97, 0x9d, /* Byte value: 0x1e */ + 0x16, 0x21, 0xdf, 0xb9, 0x4c, 0x93, 0xae, 0x1d, 0xdf, 0x34, 0x35, 0xa4, 0x99, 0x49, 0x16, 0x61, /* Byte value: 0x1f */ + 0xa2, 0x19, 0x2f, 0x4d, 0x2a, 0x05, 0x2c, 0xf3, 0x2f, 0x3d, 0xfe, 0xbe, 0x97, 0x63, 0xa2, 0x6e, /* Byte value: 0x20 */ + 0x50, 0xa1, 0xb7, 0x4b, 0x75, 0xc2, 0x48, 0x78, 0xb7, 0x04, 0x57, 0x33, 0xb9, 0xa9, 0x50, 0x86, /* Byte value: 0x21 */ + 0x6a, 0xc2, 0x15, 0x43, 0xa1, 0xb4, 0x79, 0x5f, 0x15, 0x58, 0x08, 0x1c, 0xd1, 0x72, 0x6a, 0x25, /* Byte value: 0x22 */ + 0x0c, 0xd6, 0x8a, 0x9f, 0xc5, 0x4f, 0x0e, 0x0a, 0x8a, 0xbe, 0x23, 0x95, 0xd4, 0x69, 0x0c, 0x40, /* Byte value: 0x23 */ + 0x71, 0x79, 0x01, 0xe9, 0x48, 0x49, 0x87, 0xa8, 0x01, 0x29, 0x36, 0x41, 0x8f, 0x2b, 0x71, 0xb5, /* Byte value: 0x24 */ + 0xa6, 0xea, 0xe8, 0x38, 0x69, 0x81, 0x97, 0xf5, 0xe8, 0x57, 0x5e, 0xcd, 0xdb, 0x44, 0xa6, 0xef, /* Byte value: 0x25 */ + 0x8a, 0xa8, 0x95, 0x89, 0xf1, 0x64, 0x08, 0xcf, 0x95, 0x3f, 0x34, 0x46, 0x2a, 0xd6, 0x8a, 0x2d, /* Byte value: 0x26 */ + 0xe7, 0x4d, 0x84, 0xe8, 0xb3, 0x37, 0x28, 0x75, 0x84, 0xc3, 0xe4, 0x9d, 0x82, 0x08, 0xe7, 0x99, /* Byte value: 0x27 */ + 0xec, 0xbc, 0x0a, 0x55, 0x95, 0x9f, 0x7f, 0x9a, 0x0a, 0xd9, 0x1f, 0xcf, 0x2f, 0xcd, 0xec, 0x48, /* Byte value: 0x28 */ + 0x51, 0xed, 0xf6, 0xc7, 0x15, 0xe3, 0x16, 0x98, 0xf6, 0xff, 0x7f, 0x5f, 0xaa, 0xd0, 0x51, 0x37, /* Byte value: 0x29 */ + 0x79, 0x5c, 0x4c, 0x03, 0xce, 0x82, 0x32, 0xa4, 0x4c, 0xfd, 0xb5, 0xa7, 0x17, 0x65, 0x79, 0x74, /* Byte value: 0x2a */ + 0xe1, 0x26, 0xc1, 0x46, 0x30, 0xf1, 0x2f, 0x70, 0xc1, 0x9c, 0x14, 0x36, 0xe8, 0xdd, 0xe1, 0xb9, /* Byte value: 0x2b */ + 0x1b, 0xbb, 0x14, 0xaa, 0xe9, 0xfd, 0xfe, 0xf7, 0x14, 0x71, 0x3e, 0x5d, 0x5e, 0x59, 0x1b, 0x90, /* Byte value: 0x2c */ + 0x9f, 0x5d, 0x89, 0x67, 0x1d, 0x94, 0x44, 0x31, 0x89, 0xc5, 0x79, 0x56, 0x86, 0x14, 0x9f, 0x5c, /* Byte value: 0x2d */ + 0x74, 0xc6, 0x87, 0x10, 0x6b, 0xec, 0x62, 0x4e, 0x87, 0xb8, 0xbe, 0x5e, 0xd0, 0x75, 0x74, 0x85, /* Byte value: 0x2e */ + 0xf8, 0x05, 0x57, 0x37, 0x19, 0x4e, 0x6d, 0x84, 0x57, 0xd8, 0x7a, 0xb3, 0x90, 0x76, 0xf8, 0x88, /* Byte value: 0x2f */ + 0x5a, 0x1c, 0x78, 0x7a, 0x33, 0x4b, 0x41, 0x77, 0x78, 0xe5, 0x84, 0x0d, 0x07, 0x15, 0x5a, 0xe6, /* Byte value: 0x30 */ + 0x67, 0x58, 0xde, 0x50, 0x04, 0xda, 0x29, 0xb5, 0xde, 0x1d, 0x03, 0xe5, 0x16, 0x62, 0x67, 0xd4, /* Byte value: 0x31 */ + 0x36, 0xb5, 0x28, 0x97, 0x11, 0x39, 0x3f, 0x2d, 0x28, 0xe2, 0x7c, 0xba, 0xbc, 0xb2, 0x36, 0xe3, /* Byte value: 0x32 */ + 0xe8, 0x4f, 0xcd, 0x20, 0xd6, 0x1b, 0xc4, 0x9c, 0xcd, 0xb3, 0xbf, 0xbc, 0x63, 0xea, 0xe8, 0xc9, /* Byte value: 0x33 */ + 0x60, 0x7f, 0xda, 0x72, 0xe7, 0x3d, 0x70, 0x50, 0xda, 0xb9, 0xdb, 0x22, 0x6f, 0xce, 0x60, 0x45, /* Byte value: 0x34 */ + 0xd2, 0x2c, 0x6f, 0x28, 0x02, 0x6d, 0xf5, 0xbb, 0x6f, 0xef, 0xe0, 0x93, 0x0b, 0x31, 0xd2, 0x6a, /* Byte value: 0x35 */ + 0xa5, 0x3e, 0x2b, 0x6f, 0xc9, 0xe2, 0x75, 0x16, 0x2b, 0x99, 0x26, 0x79, 0xee, 0xcf, 0xa5, 0xff, /* Byte value: 0x36 */ + 0x8c, 0xc3, 0xd0, 0x27, 0x72, 0xa2, 0x0f, 0xca, 0xd0, 0x60, 0xc4, 0xed, 0x40, 0x03, 0x8c, 0x0d, /* Byte value: 0x37 */ + 0xbd, 0x51, 0xfc, 0x92, 0x80, 0x7c, 0x69, 0x02, 0xfc, 0x26, 0x60, 0x90, 0x85, 0x1d, 0xbd, 0x7f, /* Byte value: 0x38 */ + 0xfb, 0xd1, 0x94, 0x60, 0xb9, 0x2d, 0x8f, 0x67, 0x94, 0x16, 0x02, 0x07, 0xa5, 0xfd, 0xfb, 0x98, /* Byte value: 0x39 */ + 0xba, 0x76, 0xf8, 0xb0, 0x63, 0x9b, 0x30, 0xe7, 0xf8, 0x82, 0xb8, 0x57, 0xfc, 0xb1, 0xba, 0xee, /* Byte value: 0x3a */ + 0x0f, 0x02, 0x49, 0xc8, 0x65, 0x2c, 0xec, 0xe9, 0x49, 0x70, 0x5b, 0x21, 0xe1, 0xe2, 0x0f, 0x50, /* Byte value: 0x3b */ + 0x4f, 0xe9, 0x64, 0x94, 0xdf, 0xbb, 0x0d, 0x89, 0x64, 0x1f, 0xc9, 0x1d, 0xab, 0xd7, 0x4f, 0x97, /* Byte value: 0x3c */ + 0x8e, 0x5b, 0x52, 0xfc, 0xb2, 0xe0, 0xb3, 0xc9, 0x52, 0x55, 0x94, 0x35, 0x66, 0xf1, 0x8e, 0xac, /* Byte value: 0x3d */ + 0x89, 0x7c, 0x56, 0xde, 0x51, 0x07, 0xea, 0x2c, 0x56, 0xf1, 0x4c, 0xf2, 0x1f, 0x5d, 0x89, 0x3d, /* Byte value: 0x3e */ + 0x06, 0x6b, 0x45, 0xae, 0x83, 0xc6, 0x07, 0x05, 0x45, 0x5f, 0xf0, 0xab, 0x6a, 0xd5, 0x06, 0x20, /* Byte value: 0x3f */ + 0xe5, 0xd5, 0x06, 0x33, 0x73, 0x75, 0x94, 0x76, 0x06, 0xf6, 0xb4, 0x45, 0xa4, 0xfa, 0xe5, 0x38, /* Byte value: 0x40 */ + 0xf4, 0xd3, 0xdd, 0xa8, 0xdc, 0x01, 0x63, 0x8e, 0xdd, 0x66, 0x59, 0x26, 0x44, 0x1f, 0xf4, 0xc8, /* Byte value: 0x41 */ + 0xc7, 0xd9, 0x73, 0xc6, 0xee, 0x9d, 0xb9, 0x45, 0x73, 0x15, 0xad, 0x83, 0xa7, 0xf3, 0xc7, 0x1b, /* Byte value: 0x42 */ + 0xad, 0x1b, 0x66, 0x85, 0x4f, 0x29, 0xc0, 0x1a, 0x66, 0x4d, 0xa5, 0x9f, 0x76, 0x81, 0xad, 0x3e, /* Byte value: 0x43 */ + 0xcb, 0x0f, 0xf9, 0x59, 0x2b, 0xd2, 0xb7, 0x4f, 0xf9, 0xab, 0x8e, 0x16, 0x73, 0x9a, 0xcb, 0x5b, /* Byte value: 0x44 */ + 0x80, 0x15, 0x5a, 0xb8, 0xb7, 0xed, 0x01, 0xc0, 0x5a, 0xde, 0xe7, 0x78, 0x94, 0x6a, 0x80, 0x4d, /* Byte value: 0x45 */ + 0xf0, 0x20, 0x1a, 0xdd, 0x9f, 0x85, 0xd8, 0x88, 0x1a, 0x0c, 0xf9, 0x55, 0x08, 0x38, 0xf0, 0x49, /* Byte value: 0x46 */ + 0xd5, 0x0b, 0x6b, 0x0a, 0xe1, 0x8a, 0xac, 0x5e, 0x6b, 0x4b, 0x38, 0x54, 0x72, 0x9d, 0xd5, 0xfb, /* Byte value: 0x47 */ + 0x07, 0x27, 0x04, 0x22, 0xe3, 0xe7, 0x59, 0xe5, 0x04, 0xa4, 0xd8, 0xc7, 0x79, 0xac, 0x07, 0x91, /* Byte value: 0x48 */ + 0xea, 0xd7, 0x4f, 0xfb, 0x16, 0x59, 0x78, 0x9f, 0x4f, 0x86, 0xef, 0x64, 0x45, 0x18, 0xea, 0x68, /* Byte value: 0x49 */ + 0x56, 0xca, 0xf2, 0xe5, 0xf6, 0x04, 0x4f, 0x7d, 0xf2, 0x5b, 0xa7, 0x98, 0xd3, 0x7c, 0x56, 0xa6, /* Byte value: 0x4a */ + 0x4d, 0x71, 0xe6, 0x4f, 0x1f, 0xf9, 0xb1, 0x8a, 0xe6, 0x2a, 0x99, 0xc5, 0x8d, 0x25, 0x4d, 0x36, /* Byte value: 0x4b */ + 0x55, 0x1e, 0x31, 0xb2, 0x56, 0x67, 0xad, 0x9e, 0x31, 0x95, 0xdf, 0x2c, 0xe6, 0xf7, 0x55, 0xb6, /* Byte value: 0x4c */ + 0x43, 0x3f, 0xee, 0x0b, 0x1a, 0xf4, 0x03, 0x83, 0xee, 0xa1, 0xea, 0x88, 0x7f, 0xbe, 0x43, 0xd7, /* Byte value: 0x4d */ + 0x1a, 0xf7, 0x55, 0x26, 0x89, 0xdc, 0xa0, 0x17, 0x55, 0x8a, 0x16, 0x31, 0x4d, 0x20, 0x1a, 0x21, /* Byte value: 0x4e */ + 0x0b, 0xf1, 0x8e, 0xbd, 0x26, 0xa8, 0x57, 0xef, 0x8e, 0x1a, 0xfb, 0x52, 0xad, 0xc5, 0x0b, 0xd1, /* Byte value: 0x4f */ + 0xe3, 0xbe, 0x43, 0x9d, 0xf0, 0xb3, 0x93, 0x73, 0x43, 0xa9, 0x44, 0xee, 0xce, 0x2f, 0xe3, 0x18, /* Byte value: 0x50 */ + 0x96, 0x34, 0x85, 0x01, 0xfb, 0x7e, 0xaf, 0xdd, 0x85, 0xea, 0xd2, 0xdc, 0x0d, 0x23, 0x96, 0x2c, /* Byte value: 0x51 */ + 0x27, 0xb3, 0xf3, 0x0c, 0xbe, 0x4d, 0xc8, 0xd5, 0xf3, 0x72, 0x91, 0xd9, 0x5c, 0x57, 0x27, 0x13, /* Byte value: 0x52 */ + 0xdd, 0x2e, 0x26, 0xe0, 0x67, 0x41, 0x19, 0x52, 0x26, 0x9f, 0xbb, 0xb2, 0xea, 0xd3, 0xdd, 0x3a, /* Byte value: 0x53 */ + 0x44, 0x18, 0xea, 0x29, 0xf9, 0x13, 0x5a, 0x66, 0xea, 0x05, 0x32, 0x4f, 0x06, 0x12, 0x44, 0x46, /* Byte value: 0x54 */ + 0x26, 0xff, 0xb2, 0x80, 0xde, 0x6c, 0x96, 0x35, 0xb2, 0x89, 0xb9, 0xb5, 0x4f, 0x2e, 0x26, 0xa2, /* Byte value: 0x55 */ + 0x35, 0x61, 0xeb, 0xc0, 0xb1, 0x5a, 0xdd, 0xce, 0xeb, 0x2c, 0x04, 0x0e, 0x89, 0x39, 0x35, 0xf3, /* Byte value: 0x56 */ + 0xdc, 0x62, 0x67, 0x6c, 0x07, 0x60, 0x47, 0xb2, 0x67, 0x64, 0x93, 0xde, 0xf9, 0xaa, 0xdc, 0x8b, /* Byte value: 0x57 */ + 0xa4, 0x72, 0x6a, 0xe3, 0xa9, 0xc3, 0x2b, 0xf6, 0x6a, 0x62, 0x0e, 0x15, 0xfd, 0xb6, 0xa4, 0x4e, /* Byte value: 0x58 */ + 0x58, 0x84, 0xfa, 0xa1, 0xf3, 0x09, 0xfd, 0x74, 0xfa, 0xd0, 0xd4, 0xd5, 0x21, 0xe7, 0x58, 0x47, /* Byte value: 0x59 */ + 0xcf, 0xfc, 0x3e, 0x2c, 0x68, 0x56, 0x0c, 0x49, 0x3e, 0xc1, 0x2e, 0x65, 0x3f, 0xbd, 0xcf, 0xda, /* Byte value: 0x5a */ + 0xd0, 0xb4, 0xed, 0xf3, 0xc2, 0x2f, 0x49, 0xb8, 0xed, 0xda, 0xb0, 0x4b, 0x2d, 0xc3, 0xd0, 0xcb, /* Byte value: 0x5b */ + 0x7c, 0xe3, 0xca, 0xfa, 0xed, 0x27, 0xd7, 0x42, 0xca, 0x6c, 0x3d, 0xb8, 0x48, 0x3b, 0x7c, 0x44, /* Byte value: 0x5c */ + 0x5c, 0x77, 0x3d, 0xd4, 0xb0, 0x8d, 0x46, 0x72, 0x3d, 0xba, 0x74, 0xa6, 0x6d, 0xc0, 0x5c, 0xc6, /* Byte value: 0x5d */ + 0x7e, 0x7b, 0x48, 0x21, 0x2d, 0x65, 0x6b, 0x41, 0x48, 0x59, 0x6d, 0x60, 0x6e, 0xc9, 0x7e, 0xe5, /* Byte value: 0x5e */ + 0xfe, 0x6e, 0x12, 0x99, 0x9a, 0x88, 0x6a, 0x81, 0x12, 0x87, 0x8a, 0x18, 0xfa, 0xa3, 0xfe, 0xa8, /* Byte value: 0x5f */ + 0xaa, 0x3c, 0x62, 0xa7, 0xac, 0xce, 0x99, 0xff, 0x62, 0xe9, 0x7d, 0x58, 0x0f, 0x2d, 0xaa, 0xaf, /* Byte value: 0x60 */ + 0x2b, 0x65, 0x79, 0x93, 0x7b, 0x02, 0xc6, 0xdf, 0x79, 0xcc, 0xb2, 0x4c, 0x88, 0x3e, 0x2b, 0x53, /* Byte value: 0x61 */ + 0xc9, 0x97, 0x7b, 0x82, 0xeb, 0x90, 0x0b, 0x4c, 0x7b, 0x9e, 0xde, 0xce, 0x55, 0x68, 0xc9, 0xfa, /* Byte value: 0x62 */ + 0x92, 0xc7, 0x42, 0x74, 0xb8, 0xfa, 0x14, 0xdb, 0x42, 0x80, 0x72, 0xaf, 0x41, 0x04, 0x92, 0xad, /* Byte value: 0x63 */ + 0xca, 0x43, 0xb8, 0xd5, 0x4b, 0xf3, 0xe9, 0xaf, 0xb8, 0x50, 0xa6, 0x7a, 0x60, 0xe3, 0xca, 0xea, /* Byte value: 0x64 */ + 0x4c, 0x3d, 0xa7, 0xc3, 0x7f, 0xd8, 0xef, 0x6a, 0xa7, 0xd1, 0xb1, 0xa9, 0x9e, 0x5c, 0x4c, 0x87, /* Byte value: 0x65 */ + 0x9d, 0xc5, 0x0b, 0xbc, 0xdd, 0xd6, 0xf8, 0x32, 0x0b, 0xf0, 0x29, 0x8e, 0xa0, 0xe6, 0x9d, 0xfd, /* Byte value: 0x66 */ + 0xa7, 0xa6, 0xa9, 0xb4, 0x09, 0xa0, 0xc9, 0x15, 0xa9, 0xac, 0x76, 0xa1, 0xc8, 0x3d, 0xa7, 0x5e, /* Byte value: 0x67 */ + 0x59, 0xc8, 0xbb, 0x2d, 0x93, 0x28, 0xa3, 0x94, 0xbb, 0x2b, 0xfc, 0xb9, 0x32, 0x9e, 0x59, 0xf6, /* Byte value: 0x68 */ + 0x9e, 0x11, 0xc8, 0xeb, 0x7d, 0xb5, 0x1a, 0xd1, 0xc8, 0x3e, 0x51, 0x3a, 0x95, 0x6d, 0x9e, 0xed, /* Byte value: 0x69 */ + 0x1d, 0xd0, 0x51, 0x04, 0x6a, 0x3b, 0xf9, 0xf2, 0x51, 0x2e, 0xce, 0xf6, 0x34, 0x8c, 0x1d, 0xb0, /* Byte value: 0x6a */ + 0x01, 0x4c, 0x41, 0x8c, 0x60, 0x21, 0x5e, 0xe0, 0x41, 0xfb, 0x28, 0x6c, 0x13, 0x79, 0x01, 0xb1, /* Byte value: 0x6b */ + 0xc2, 0x66, 0xf5, 0x3f, 0xcd, 0x38, 0x5c, 0xa3, 0xf5, 0x84, 0x25, 0x9c, 0xf8, 0xad, 0xc2, 0x2b, /* Byte value: 0x6c */ + 0x28, 0xb1, 0xba, 0xc4, 0xdb, 0x61, 0x24, 0x3c, 0xba, 0x02, 0xca, 0xf8, 0xbd, 0xb5, 0x28, 0x43, /* Byte value: 0x6d */ + 0xb4, 0x38, 0xf0, 0xf4, 0x66, 0x96, 0x82, 0xee, 0xf0, 0x09, 0xcb, 0x1a, 0x0e, 0x2a, 0xb4, 0x0f, /* Byte value: 0x6e */ + 0x19, 0x23, 0x96, 0x71, 0x29, 0xbf, 0x42, 0xf4, 0x96, 0x44, 0x6e, 0x85, 0x78, 0xab, 0x19, 0x31, /* Byte value: 0x6f */ + 0x85, 0xaa, 0xdc, 0x41, 0x94, 0x48, 0xe4, 0x26, 0xdc, 0x4f, 0x6f, 0x67, 0xcb, 0x34, 0x85, 0x7d, /* Byte value: 0x70 */ + 0x3c, 0x08, 0xe7, 0xa6, 0x57, 0xb0, 0x36, 0x22, 0xe7, 0x03, 0xaf, 0x84, 0x02, 0x0e, 0x3c, 0x83, /* Byte value: 0x71 */ + 0x40, 0xeb, 0x2d, 0x5c, 0xba, 0x97, 0xe1, 0x60, 0x2d, 0x6f, 0x92, 0x3c, 0x4a, 0x35, 0x40, 0xc7, /* Byte value: 0x72 */ + 0x1e, 0x04, 0x92, 0x53, 0xca, 0x58, 0x1b, 0x11, 0x92, 0xe0, 0xb6, 0x42, 0x01, 0x07, 0x1e, 0xa0, /* Byte value: 0x73 */ + 0x95, 0xe0, 0x46, 0x56, 0x5b, 0x1d, 0x4d, 0x3e, 0x46, 0x24, 0xaa, 0x68, 0x38, 0xa8, 0x95, 0x3c, /* Byte value: 0x74 */ + 0xab, 0x70, 0x23, 0x2b, 0xcc, 0xef, 0xc7, 0x1f, 0x23, 0x12, 0x55, 0x34, 0x1c, 0x54, 0xab, 0x1e, /* Byte value: 0x75 */ + 0xb3, 0x1f, 0xf4, 0xd6, 0x85, 0x71, 0xdb, 0x0b, 0xf4, 0xad, 0x13, 0xdd, 0x77, 0x86, 0xb3, 0x9e, /* Byte value: 0x76 */ + 0xeb, 0x9b, 0x0e, 0x77, 0x76, 0x78, 0x26, 0x7f, 0x0e, 0x7d, 0xc7, 0x08, 0x56, 0x61, 0xeb, 0xd9, /* Byte value: 0x77 */ + 0xfa, 0x9d, 0xd5, 0xec, 0xd9, 0x0c, 0xd1, 0x87, 0xd5, 0xed, 0x2a, 0x6b, 0xb6, 0x84, 0xfa, 0x29, /* Byte value: 0x78 */ + 0x2a, 0x29, 0x38, 0x1f, 0x1b, 0x23, 0x98, 0x3f, 0x38, 0x37, 0x9a, 0x20, 0x9b, 0x47, 0x2a, 0xe2, /* Byte value: 0x79 */ + 0xac, 0x57, 0x27, 0x09, 0x2f, 0x08, 0x9e, 0xfa, 0x27, 0xb6, 0x8d, 0xf3, 0x65, 0xf8, 0xac, 0x8f, /* Byte value: 0x7a */ + 0x2d, 0x0e, 0x3c, 0x3d, 0xf8, 0xc4, 0xc1, 0xda, 0x3c, 0x93, 0x42, 0xe7, 0xe2, 0xeb, 0x2d, 0x73, /* Byte value: 0x7b */ + 0x25, 0x2b, 0x71, 0xd7, 0x7e, 0x0f, 0x74, 0xd6, 0x71, 0x47, 0xc1, 0x01, 0x7a, 0xa5, 0x25, 0xb2, /* Byte value: 0x7c */ + 0x4a, 0x56, 0xe2, 0x6d, 0xfc, 0x1e, 0xe8, 0x6f, 0xe2, 0x8e, 0x41, 0x02, 0xf4, 0x89, 0x4a, 0xa7, /* Byte value: 0x7d */ + 0xb0, 0xcb, 0x37, 0x81, 0x25, 0x12, 0x39, 0xe8, 0x37, 0x63, 0x6b, 0x69, 0x42, 0x0d, 0xb0, 0x8e, /* Byte value: 0x7e */ + 0x41, 0xa7, 0x6c, 0xd0, 0xda, 0xb6, 0xbf, 0x80, 0x6c, 0x94, 0xba, 0x50, 0x59, 0x4c, 0x41, 0x76, /* Byte value: 0x7f */ + 0x30, 0xde, 0x6d, 0x39, 0x92, 0xff, 0x38, 0x28, 0x6d, 0xbd, 0x8c, 0x11, 0xd6, 0x67, 0x30, 0xc3, /* Byte value: 0x80 */ + 0x33, 0x0a, 0xae, 0x6e, 0x32, 0x9c, 0xda, 0xcb, 0xae, 0x73, 0xf4, 0xa5, 0xe3, 0xec, 0x33, 0xd3, /* Byte value: 0x81 */ + 0x65, 0xc0, 0x5c, 0x8b, 0xc4, 0x98, 0x95, 0xb6, 0x5c, 0x28, 0x53, 0x3d, 0x30, 0x90, 0x65, 0x75, /* Byte value: 0x82 */ + 0x8b, 0xe4, 0xd4, 0x05, 0x91, 0x45, 0x56, 0x2f, 0xd4, 0xc4, 0x1c, 0x2a, 0x39, 0xaf, 0x8b, 0x9c, /* Byte value: 0x83 */ + 0xef, 0x68, 0xc9, 0x02, 0x35, 0xfc, 0x9d, 0x79, 0xc9, 0x17, 0x67, 0x7b, 0x1a, 0x46, 0xef, 0x58, /* Byte value: 0x84 */ + 0x09, 0x69, 0x0c, 0x66, 0xe6, 0xea, 0xeb, 0xec, 0x0c, 0x2f, 0xab, 0x8a, 0x8b, 0x37, 0x09, 0x70, /* Byte value: 0x85 */ + 0x9b, 0xae, 0x4e, 0x12, 0x5e, 0x10, 0xff, 0x37, 0x4e, 0xaf, 0xd9, 0x25, 0xca, 0x33, 0x9b, 0xdd, /* Byte value: 0x86 */ + 0x6f, 0x7d, 0x93, 0xba, 0x82, 0x11, 0x9c, 0xb9, 0x93, 0xc9, 0x80, 0x03, 0x8e, 0x2c, 0x6f, 0x15, /* Byte value: 0x87 */ + 0x84, 0xe6, 0x9d, 0xcd, 0xf4, 0x69, 0xba, 0xc6, 0x9d, 0xb4, 0x47, 0x0b, 0xd8, 0x4d, 0x84, 0xcc, /* Byte value: 0x88 */ + 0x15, 0xf5, 0x1c, 0xee, 0xec, 0xf0, 0x4c, 0xfe, 0x1c, 0xfa, 0x4d, 0x10, 0xac, 0xc2, 0x15, 0x71, /* Byte value: 0x89 */ + 0x75, 0x8a, 0xc6, 0x9c, 0x0b, 0xcd, 0x3c, 0xae, 0xc6, 0x43, 0x96, 0x32, 0xc3, 0x0c, 0x75, 0x34, /* Byte value: 0x8a */ + 0x61, 0x33, 0x9b, 0xfe, 0x87, 0x1c, 0x2e, 0xb0, 0x9b, 0x42, 0xf3, 0x4e, 0x7c, 0xb7, 0x61, 0xf4, /* Byte value: 0x8b */ + 0xf9, 0x49, 0x16, 0xbb, 0x79, 0x6f, 0x33, 0x64, 0x16, 0x23, 0x52, 0xdf, 0x83, 0x0f, 0xf9, 0x39, /* Byte value: 0x8c */ + 0x12, 0xd2, 0x18, 0xcc, 0x0f, 0x17, 0x15, 0x1b, 0x18, 0x5e, 0x95, 0xd7, 0xd5, 0x6e, 0x12, 0xe0, /* Byte value: 0x8d */ + 0x70, 0x35, 0x40, 0x65, 0x28, 0x68, 0xd9, 0x48, 0x40, 0xd2, 0x1e, 0x2d, 0x9c, 0x52, 0x70, 0x04, /* Byte value: 0x8e */ + 0xa3, 0x55, 0x6e, 0xc1, 0x4a, 0x24, 0x72, 0x13, 0x6e, 0xc6, 0xd6, 0xd2, 0x84, 0x1a, 0xa3, 0xdf, /* Byte value: 0x8f */ + 0x9c, 0x89, 0x4a, 0x30, 0xbd, 0xf7, 0xa6, 0xd2, 0x4a, 0x0b, 0x01, 0xe2, 0xb3, 0x9f, 0x9c, 0x4c, /* Byte value: 0x90 */ + 0x4e, 0xa5, 0x25, 0x18, 0xbf, 0x9a, 0x53, 0x69, 0x25, 0xe4, 0xe1, 0x71, 0xb8, 0xae, 0x4e, 0x26, /* Byte value: 0x91 */ + 0xa9, 0xe8, 0xa1, 0xf0, 0x0c, 0xad, 0x7b, 0x1c, 0xa1, 0x27, 0x05, 0xec, 0x3a, 0xa6, 0xa9, 0xbf, /* Byte value: 0x92 */ + 0x7b, 0xc4, 0xce, 0xd8, 0x0e, 0xc0, 0x8e, 0xa7, 0xce, 0xc8, 0xe5, 0x7f, 0x31, 0x97, 0x7b, 0xd5, /* Byte value: 0x93 */ + 0x47, 0xcc, 0x29, 0x7e, 0x59, 0x70, 0xb8, 0x85, 0x29, 0xcb, 0x4a, 0xfb, 0x33, 0x99, 0x47, 0x56, /* Byte value: 0x94 */ + 0xe9, 0x03, 0x8c, 0xac, 0xb6, 0x3a, 0x9a, 0x7c, 0x8c, 0x48, 0x97, 0xd0, 0x70, 0x93, 0xe9, 0x78, /* Byte value: 0x95 */ + 0x7a, 0x88, 0x8f, 0x54, 0x6e, 0xe1, 0xd0, 0x47, 0x8f, 0x33, 0xcd, 0x13, 0x22, 0xee, 0x7a, 0x64, /* Byte value: 0x96 */ + 0xf1, 0x6c, 0x5b, 0x51, 0xff, 0xa4, 0x86, 0x68, 0x5b, 0xf7, 0xd1, 0x39, 0x1b, 0x41, 0xf1, 0xf8, /* Byte value: 0x97 */ + 0xc4, 0x0d, 0xb0, 0x91, 0x4e, 0xfe, 0x5b, 0xa6, 0xb0, 0xdb, 0xd5, 0x37, 0x92, 0x78, 0xc4, 0x0b, /* Byte value: 0x98 */ + 0xaf, 0x83, 0xe4, 0x5e, 0x8f, 0x6b, 0x7c, 0x19, 0xe4, 0x78, 0xf5, 0x47, 0x50, 0x73, 0xaf, 0x9f, /* Byte value: 0x99 */ + 0x68, 0x5a, 0x97, 0x98, 0x61, 0xf6, 0xc5, 0x5c, 0x97, 0x6d, 0x58, 0xc4, 0xf7, 0x80, 0x68, 0x84, /* Byte value: 0x9a */ + 0xbe, 0x85, 0x3f, 0xc5, 0x20, 0x1f, 0x8b, 0xe1, 0x3f, 0xe8, 0x18, 0x24, 0xb0, 0x96, 0xbe, 0x6f, /* Byte value: 0x9b */ + 0xbf, 0xc9, 0x7e, 0x49, 0x40, 0x3e, 0xd5, 0x01, 0x7e, 0x13, 0x30, 0x48, 0xa3, 0xef, 0xbf, 0xde, /* Byte value: 0x9c */ + 0x2e, 0xda, 0xff, 0x6a, 0x58, 0xa7, 0x23, 0x39, 0xff, 0x5d, 0x3a, 0x53, 0xd7, 0x60, 0x2e, 0x63, /* Byte value: 0x9d */ + 0x48, 0xce, 0x60, 0xb6, 0x3c, 0x5c, 0x54, 0x6c, 0x60, 0xbb, 0x11, 0xda, 0xd2, 0x7b, 0x48, 0x06, /* Byte value: 0x9e */ + 0xce, 0xb0, 0x7f, 0xa0, 0x08, 0x77, 0x52, 0xa9, 0x7f, 0x3a, 0x06, 0x09, 0x2c, 0xc4, 0xce, 0x6b, /* Byte value: 0x9f */ + 0x3d, 0x44, 0xa6, 0x2a, 0x37, 0x91, 0x68, 0xc2, 0xa6, 0xf8, 0x87, 0xe8, 0x11, 0x77, 0x3d, 0x32, /* Byte value: 0xa0 */ + 0x32, 0x46, 0xef, 0xe2, 0x52, 0xbd, 0x84, 0x2b, 0xef, 0x88, 0xdc, 0xc9, 0xf0, 0x95, 0x32, 0x62, /* Byte value: 0xa1 */ + 0xe6, 0x01, 0xc5, 0x64, 0xd3, 0x16, 0x76, 0x95, 0xc5, 0x38, 0xcc, 0xf1, 0x91, 0x71, 0xe6, 0x28, /* Byte value: 0xa2 */ + 0x6b, 0x8e, 0x54, 0xcf, 0xc1, 0x95, 0x27, 0xbf, 0x54, 0xa3, 0x20, 0x70, 0xc2, 0x0b, 0x6b, 0x94, /* Byte value: 0xa3 */ + 0xbb, 0x3a, 0xb9, 0x3c, 0x03, 0xba, 0x6e, 0x07, 0xb9, 0x79, 0x90, 0x3b, 0xef, 0xc8, 0xbb, 0x5f, /* Byte value: 0xa4 */ + 0x91, 0x13, 0x81, 0x23, 0x18, 0x99, 0xf6, 0x38, 0x81, 0x4e, 0x0a, 0x1b, 0x74, 0x8f, 0x91, 0xbd, /* Byte value: 0xa5 */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xa6 */ + 0x83, 0xc1, 0x99, 0xef, 0x17, 0x8e, 0xe3, 0x23, 0x99, 0x10, 0x9f, 0xcc, 0xa1, 0xe1, 0x83, 0x5d, /* Byte value: 0xa7 */ + 0xe0, 0x6a, 0x80, 0xca, 0x50, 0xd0, 0x71, 0x90, 0x80, 0x67, 0x3c, 0x5a, 0xfb, 0xa4, 0xe0, 0x08, /* Byte value: 0xa8 */ + 0x82, 0x8d, 0xd8, 0x63, 0x77, 0xaf, 0xbd, 0xc3, 0xd8, 0xeb, 0xb7, 0xa0, 0xb2, 0x98, 0x82, 0xec, /* Byte value: 0xa9 */ + 0xd7, 0x93, 0xe9, 0xd1, 0x21, 0xc8, 0x10, 0x5d, 0xe9, 0x7e, 0x68, 0x8c, 0x54, 0x6f, 0xd7, 0x5a, /* Byte value: 0xaa */ + 0xf2, 0xb8, 0x98, 0x06, 0x5f, 0xc7, 0x64, 0x8b, 0x98, 0x39, 0xa9, 0x8d, 0x2e, 0xca, 0xf2, 0xe8, /* Byte value: 0xab */ + 0xcd, 0x64, 0xbc, 0xf7, 0xa8, 0x14, 0xb0, 0x4a, 0xbc, 0xf4, 0x7e, 0xbd, 0x19, 0x4f, 0xcd, 0x7b, /* Byte value: 0xac */ + 0x78, 0x10, 0x0d, 0x8f, 0xae, 0xa3, 0x6c, 0x44, 0x0d, 0x06, 0x9d, 0xcb, 0x04, 0x1c, 0x78, 0xc5, /* Byte value: 0xad */ + 0xc0, 0xfe, 0x77, 0xe4, 0x0d, 0x7a, 0xe0, 0xa0, 0x77, 0xb1, 0x75, 0x44, 0xde, 0x5f, 0xc0, 0x8a, /* Byte value: 0xae */ + 0x63, 0xab, 0x19, 0x25, 0x47, 0x5e, 0x92, 0xb3, 0x19, 0x77, 0xa3, 0x96, 0x5a, 0x45, 0x63, 0x55, /* Byte value: 0xaf */ + 0x87, 0x32, 0x5e, 0x9a, 0x54, 0x0a, 0x58, 0x25, 0x5e, 0x7a, 0x3f, 0xbf, 0xed, 0xc6, 0x87, 0xdc, /* Byte value: 0xb0 */ + 0xfc, 0xf6, 0x90, 0x42, 0x5a, 0xca, 0xd6, 0x82, 0x90, 0xb2, 0xda, 0xc0, 0xdc, 0x51, 0xfc, 0x09, /* Byte value: 0xb1 */ + 0x94, 0xac, 0x07, 0xda, 0x3b, 0x3c, 0x13, 0xde, 0x07, 0xdf, 0x82, 0x04, 0x2b, 0xd1, 0x94, 0x8d, /* Byte value: 0xb2 */ + 0x08, 0x25, 0x4d, 0xea, 0x86, 0xcb, 0xb5, 0x0c, 0x4d, 0xd4, 0x83, 0xe6, 0x98, 0x4e, 0x08, 0xc1, /* Byte value: 0xb3 */ + 0x81, 0x59, 0x1b, 0x34, 0xd7, 0xcc, 0x5f, 0x20, 0x1b, 0x25, 0xcf, 0x14, 0x87, 0x13, 0x81, 0xfc, /* Byte value: 0xb4 */ + 0x17, 0x6d, 0x9e, 0x35, 0x2c, 0xb2, 0xf0, 0xfd, 0x9e, 0xcf, 0x1d, 0xc8, 0x8a, 0x30, 0x17, 0xd0, /* Byte value: 0xb5 */ + 0x62, 0xe7, 0x58, 0xa9, 0x27, 0x7f, 0xcc, 0x53, 0x58, 0x8c, 0x8b, 0xfa, 0x49, 0x3c, 0x62, 0xe4, /* Byte value: 0xb6 */ + 0xd8, 0x91, 0xa0, 0x19, 0x44, 0xe4, 0xfc, 0xb4, 0xa0, 0x0e, 0x33, 0xad, 0xb5, 0x8d, 0xd8, 0x0a, /* Byte value: 0xb7 */ + 0x31, 0x92, 0x2c, 0xb5, 0xf2, 0xde, 0x66, 0xc8, 0x2c, 0x46, 0xa4, 0x7d, 0xc5, 0x1e, 0x31, 0x72, /* Byte value: 0xb8 */ + 0x10, 0x4a, 0x9a, 0x17, 0xcf, 0x55, 0xa9, 0x18, 0x9a, 0x6b, 0xc5, 0x0f, 0xf3, 0x9c, 0x10, 0x41, /* Byte value: 0xb9 */ + 0x21, 0xd8, 0xb6, 0xa2, 0x3d, 0x8b, 0xcf, 0xd0, 0xb6, 0x2d, 0x61, 0x72, 0x36, 0x82, 0x21, 0x33, /* Byte value: 0xba */ + 0xda, 0x09, 0x22, 0xc2, 0x84, 0xa6, 0x40, 0xb7, 0x22, 0x3b, 0x63, 0x75, 0x93, 0x7f, 0xda, 0xab, /* Byte value: 0xbb */ + 0x66, 0x14, 0x9f, 0xdc, 0x64, 0xfb, 0x77, 0x55, 0x9f, 0xe6, 0x2b, 0x89, 0x05, 0x1b, 0x66, 0x65, /* Byte value: 0xbc */ + 0x39, 0xb7, 0x61, 0x5f, 0x74, 0x15, 0xd3, 0xc4, 0x61, 0x92, 0x27, 0x9b, 0x5d, 0x50, 0x39, 0xb3, /* Byte value: 0xbd */ + 0xfd, 0xba, 0xd1, 0xce, 0x3a, 0xeb, 0x88, 0x62, 0xd1, 0x49, 0xf2, 0xac, 0xcf, 0x28, 0xfd, 0xb8, /* Byte value: 0xbe */ + 0x0e, 0x4e, 0x08, 0x44, 0x05, 0x0d, 0xb2, 0x09, 0x08, 0x8b, 0x73, 0x4d, 0xf2, 0x9b, 0x0e, 0xe1, /* Byte value: 0xbf */ + 0x6e, 0x31, 0xd2, 0x36, 0xe2, 0x30, 0xc2, 0x59, 0xd2, 0x32, 0xa8, 0x6f, 0x9d, 0x55, 0x6e, 0xa4, /* Byte value: 0xc0 */ + 0x8f, 0x17, 0x13, 0x70, 0xd2, 0xc1, 0xed, 0x29, 0x13, 0xae, 0xbc, 0x59, 0x75, 0x88, 0x8f, 0x1d, /* Byte value: 0xc1 */ + 0x99, 0x36, 0xcc, 0xc9, 0x9e, 0x52, 0x43, 0x34, 0xcc, 0x9a, 0x89, 0xfd, 0xec, 0xc1, 0x99, 0x7c, /* Byte value: 0xc2 */ + 0x6d, 0xe5, 0x11, 0x61, 0x42, 0x53, 0x20, 0xba, 0x11, 0xfc, 0xd0, 0xdb, 0xa8, 0xde, 0x6d, 0xb4, /* Byte value: 0xc3 */ + 0xf3, 0xf4, 0xd9, 0x8a, 0x3f, 0xe6, 0x3a, 0x6b, 0xd9, 0xc2, 0x81, 0xe1, 0x3d, 0xb3, 0xf3, 0x59, /* Byte value: 0xc4 */ + 0xde, 0xfa, 0xe5, 0xb7, 0xc7, 0x22, 0xfb, 0xb1, 0xe5, 0x51, 0xc3, 0x06, 0xdf, 0x58, 0xde, 0x2a, /* Byte value: 0xc5 */ + 0x88, 0x30, 0x17, 0x52, 0x31, 0x26, 0xb4, 0xcc, 0x17, 0x0a, 0x64, 0x9e, 0x0c, 0x24, 0x88, 0x8c, /* Byte value: 0xc6 */ + 0x0d, 0x9a, 0xcb, 0x13, 0xa5, 0x6e, 0x50, 0xea, 0xcb, 0x45, 0x0b, 0xf9, 0xc7, 0x10, 0x0d, 0xf1, /* Byte value: 0xc7 */ + 0x8d, 0x8f, 0x91, 0xab, 0x12, 0x83, 0x51, 0x2a, 0x91, 0x9b, 0xec, 0x81, 0x53, 0x7a, 0x8d, 0xbc, /* Byte value: 0xc8 */ + 0xd4, 0x47, 0x2a, 0x86, 0x81, 0xab, 0xf2, 0xbe, 0x2a, 0xb0, 0x10, 0x38, 0x61, 0xe4, 0xd4, 0x4a, /* Byte value: 0xc9 */ + 0x5b, 0x50, 0x39, 0xf6, 0x53, 0x6a, 0x1f, 0x97, 0x39, 0x1e, 0xac, 0x61, 0x14, 0x6c, 0x5b, 0x57, /* Byte value: 0xca */ + 0xd1, 0xf8, 0xac, 0x7f, 0xa2, 0x0e, 0x17, 0x58, 0xac, 0x21, 0x98, 0x27, 0x3e, 0xba, 0xd1, 0x7a, /* Byte value: 0xcb */ + 0x5d, 0x3b, 0x7c, 0x58, 0xd0, 0xac, 0x18, 0x92, 0x7c, 0x41, 0x5c, 0xca, 0x7e, 0xb9, 0x5d, 0x77, /* Byte value: 0xcc */ + 0x03, 0xd4, 0xc3, 0x57, 0xa0, 0x63, 0xe2, 0xe3, 0xc3, 0xce, 0x78, 0xb4, 0x35, 0x8b, 0x03, 0x10, /* Byte value: 0xcd */ + 0xc8, 0xdb, 0x3a, 0x0e, 0x8b, 0xb1, 0x55, 0xac, 0x3a, 0x65, 0xf6, 0xa2, 0x46, 0x11, 0xc8, 0x4b, /* Byte value: 0xce */ + 0xc1, 0xb2, 0x36, 0x68, 0x6d, 0x5b, 0xbe, 0x40, 0x36, 0x4a, 0x5d, 0x28, 0xcd, 0x26, 0xc1, 0x3b, /* Byte value: 0xcf */ + 0x77, 0x12, 0x44, 0x47, 0xcb, 0x8f, 0x80, 0xad, 0x44, 0x76, 0xc6, 0xea, 0xe5, 0xfe, 0x77, 0x95, /* Byte value: 0xd0 */ + 0x3a, 0x63, 0xa2, 0x08, 0xd4, 0x76, 0x31, 0x27, 0xa2, 0x5c, 0x5f, 0x2f, 0x68, 0xdb, 0x3a, 0xa3, /* Byte value: 0xd1 */ + 0xf6, 0x4b, 0x5f, 0x73, 0x1c, 0x43, 0xdf, 0x8d, 0x5f, 0x53, 0x09, 0xfe, 0x62, 0xed, 0xf6, 0x69, /* Byte value: 0xd2 */ + 0xb9, 0xa2, 0x3b, 0xe7, 0xc3, 0xf8, 0xd2, 0x04, 0x3b, 0x4c, 0xc0, 0xe3, 0xc9, 0x3a, 0xb9, 0xfe, /* Byte value: 0xd3 */ + 0xa1, 0xcd, 0xec, 0x1a, 0x8a, 0x66, 0xce, 0x10, 0xec, 0xf3, 0x86, 0x0a, 0xa2, 0xe8, 0xa1, 0x7e, /* Byte value: 0xd4 */ + 0xc6, 0x95, 0x32, 0x4a, 0x8e, 0xbc, 0xe7, 0xa5, 0x32, 0xee, 0x85, 0xef, 0xb4, 0x8a, 0xc6, 0xaa, /* Byte value: 0xd5 */ + 0x7d, 0xaf, 0x8b, 0x76, 0x8d, 0x06, 0x89, 0xa2, 0x8b, 0x97, 0x15, 0xd4, 0x5b, 0x42, 0x7d, 0xf5, /* Byte value: 0xd6 */ + 0x24, 0x67, 0x30, 0x5b, 0x1e, 0x2e, 0x2a, 0x36, 0x30, 0xbc, 0xe9, 0x6d, 0x69, 0xdc, 0x24, 0x03, /* Byte value: 0xd7 */ + 0xdf, 0xb6, 0xa4, 0x3b, 0xa7, 0x03, 0xa5, 0x51, 0xa4, 0xaa, 0xeb, 0x6a, 0xcc, 0x21, 0xdf, 0x9b, /* Byte value: 0xd8 */ + 0x64, 0x8c, 0x1d, 0x07, 0xa4, 0xb9, 0xcb, 0x56, 0x1d, 0xd3, 0x7b, 0x51, 0x23, 0xe9, 0x64, 0xc4, /* Byte value: 0xd9 */ + 0x23, 0x40, 0x34, 0x79, 0xfd, 0xc9, 0x73, 0xd3, 0x34, 0x18, 0x31, 0xaa, 0x10, 0x70, 0x23, 0x92, /* Byte value: 0xda */ + 0x73, 0xe1, 0x83, 0x32, 0x88, 0x0b, 0x3b, 0xab, 0x83, 0x1c, 0x66, 0x99, 0xa9, 0xd9, 0x73, 0x14, /* Byte value: 0xdb */ + 0x69, 0x16, 0xd6, 0x14, 0x01, 0xd7, 0x9b, 0xbc, 0xd6, 0x96, 0x70, 0xa8, 0xe4, 0xf9, 0x69, 0x35, /* Byte value: 0xdc */ + 0x18, 0x6f, 0xd7, 0xfd, 0x49, 0x9e, 0x1c, 0x14, 0xd7, 0xbf, 0x46, 0xe9, 0x6b, 0xd2, 0x18, 0x80, /* Byte value: 0xdd */ + 0xbc, 0x1d, 0xbd, 0x1e, 0xe0, 0x5d, 0x37, 0xe2, 0xbd, 0xdd, 0x48, 0xfc, 0x96, 0x64, 0xbc, 0xce, /* Byte value: 0xde */ + 0x42, 0x73, 0xaf, 0x87, 0x7a, 0xd5, 0x5d, 0x63, 0xaf, 0x5a, 0xc2, 0xe4, 0x6c, 0xc7, 0x42, 0x66, /* Byte value: 0xdf */ + 0x1f, 0x48, 0xd3, 0xdf, 0xaa, 0x79, 0x45, 0xf1, 0xd3, 0x1b, 0x9e, 0x2e, 0x12, 0x7e, 0x1f, 0x11, /* Byte value: 0xe0 */ + 0x98, 0x7a, 0x8d, 0x45, 0xfe, 0x73, 0x1d, 0xd4, 0x8d, 0x61, 0xa1, 0x91, 0xff, 0xb8, 0x98, 0xcd, /* Byte value: 0xe1 */ + 0x53, 0x75, 0x74, 0x1c, 0xd5, 0xa1, 0xaa, 0x9b, 0x74, 0xca, 0x2f, 0x87, 0x8c, 0x22, 0x53, 0x96, /* Byte value: 0xe2 */ + 0xe4, 0x99, 0x47, 0xbf, 0x13, 0x54, 0xca, 0x96, 0x47, 0x0d, 0x9c, 0x29, 0xb7, 0x83, 0xe4, 0x89, /* Byte value: 0xe3 */ + 0xae, 0xcf, 0xa5, 0xd2, 0xef, 0x4a, 0x22, 0xf9, 0xa5, 0x83, 0xdd, 0x2b, 0x43, 0x0a, 0xae, 0x2e, /* Byte value: 0xe4 */ + 0xd9, 0xdd, 0xe1, 0x95, 0x24, 0xc5, 0xa2, 0x54, 0xe1, 0xf5, 0x1b, 0xc1, 0xa6, 0xf4, 0xd9, 0xbb, /* Byte value: 0xe5 */ + 0x22, 0x0c, 0x75, 0xf5, 0x9d, 0xe8, 0x2d, 0x33, 0x75, 0xe3, 0x19, 0xc6, 0x03, 0x09, 0x22, 0x23, /* Byte value: 0xe6 */ + 0x52, 0x39, 0x35, 0x90, 0xb5, 0x80, 0xf4, 0x7b, 0x35, 0x31, 0x07, 0xeb, 0x9f, 0x5b, 0x52, 0x27, /* Byte value: 0xe7 */ + 0x05, 0xbf, 0x86, 0xf9, 0x23, 0xa5, 0xe5, 0xe6, 0x86, 0x91, 0x88, 0x1f, 0x5f, 0x5e, 0x05, 0x30, /* Byte value: 0xe8 */ + 0x6c, 0xa9, 0x50, 0xed, 0x22, 0x72, 0x7e, 0x5a, 0x50, 0x07, 0xf8, 0xb7, 0xbb, 0xa7, 0x6c, 0x05, /* Byte value: 0xe9 */ + 0xd3, 0x60, 0x2e, 0xa4, 0x62, 0x4c, 0xab, 0x5b, 0x2e, 0x14, 0xc8, 0xff, 0x18, 0x48, 0xd3, 0xdb, /* Byte value: 0xea */ + 0xff, 0x22, 0x53, 0x15, 0xfa, 0xa9, 0x34, 0x61, 0x53, 0x7c, 0xa2, 0x74, 0xe9, 0xda, 0xff, 0x19, /* Byte value: 0xeb */ + 0x9a, 0xe2, 0x0f, 0x9e, 0x3e, 0x31, 0xa1, 0xd7, 0x0f, 0x54, 0xf1, 0x49, 0xd9, 0x4a, 0x9a, 0x6c, /* Byte value: 0xec */ + 0x2c, 0x42, 0x7d, 0xb1, 0x98, 0xe5, 0x9f, 0x3a, 0x7d, 0x68, 0x6a, 0x8b, 0xf1, 0x92, 0x2c, 0xc2, /* Byte value: 0xed */ + 0x29, 0xfd, 0xfb, 0x48, 0xbb, 0x40, 0x7a, 0xdc, 0xfb, 0xf9, 0xe2, 0x94, 0xae, 0xcc, 0x29, 0xf2, /* Byte value: 0xee */ + 0x5e, 0xef, 0xbf, 0x0f, 0x70, 0xcf, 0xfa, 0x71, 0xbf, 0x8f, 0x24, 0x7e, 0x4b, 0x32, 0x5e, 0x67, /* Byte value: 0xef */ + 0x57, 0x86, 0xb3, 0x69, 0x96, 0x25, 0x11, 0x9d, 0xb3, 0xa0, 0x8f, 0xf4, 0xc0, 0x05, 0x57, 0x17, /* Byte value: 0xf0 */ + 0xed, 0xf0, 0x4b, 0xd9, 0xf5, 0xbe, 0x21, 0x7a, 0x4b, 0x22, 0x37, 0xa3, 0x3c, 0xb4, 0xed, 0xf9, /* Byte value: 0xf1 */ + 0x46, 0x80, 0x68, 0xf2, 0x39, 0x51, 0xe6, 0x65, 0x68, 0x30, 0x62, 0x97, 0x20, 0xe0, 0x46, 0xe7, /* Byte value: 0xf2 */ + 0x34, 0x2d, 0xaa, 0x4c, 0xd1, 0x7b, 0x83, 0x2e, 0xaa, 0xd7, 0x2c, 0x62, 0x9a, 0x40, 0x34, 0x42, /* Byte value: 0xf3 */ + 0x72, 0xad, 0xc2, 0xbe, 0xe8, 0x2a, 0x65, 0x4b, 0xc2, 0xe7, 0x4e, 0xf5, 0xba, 0xa0, 0x72, 0xa5, /* Byte value: 0xf4 */ + 0xb5, 0x74, 0xb1, 0x78, 0x06, 0xb7, 0xdc, 0x0e, 0xb1, 0xf2, 0xe3, 0x76, 0x1d, 0x53, 0xb5, 0xbe, /* Byte value: 0xf5 */ + 0x11, 0x06, 0xdb, 0x9b, 0xaf, 0x74, 0xf7, 0xf8, 0xdb, 0x90, 0xed, 0x63, 0xe0, 0xe5, 0x11, 0xf0, /* Byte value: 0xf6 */ + 0xe2, 0xf2, 0x02, 0x11, 0x90, 0x92, 0xcd, 0x93, 0x02, 0x52, 0x6c, 0x82, 0xdd, 0x56, 0xe2, 0xa9, /* Byte value: 0xf7 */ + 0xd6, 0xdf, 0xa8, 0x5d, 0x41, 0xe9, 0x4e, 0xbd, 0xa8, 0x85, 0x40, 0xe0, 0x47, 0x16, 0xd6, 0xeb, /* Byte value: 0xf8 */ + 0xee, 0x24, 0x88, 0x8e, 0x55, 0xdd, 0xc3, 0x99, 0x88, 0xec, 0x4f, 0x17, 0x09, 0x3f, 0xee, 0xe9, /* Byte value: 0xf9 */ + 0x90, 0x5f, 0xc0, 0xaf, 0x78, 0xb8, 0xa8, 0xd8, 0xc0, 0xb5, 0x22, 0x77, 0x67, 0xf6, 0x90, 0x0c, /* Byte value: 0xfa */ + 0x14, 0xb9, 0x5d, 0x62, 0x8c, 0xd1, 0x12, 0x1e, 0x5d, 0x01, 0x65, 0x7c, 0xbf, 0xbb, 0x14, 0xc0, /* Byte value: 0xfb */ + 0x3e, 0x90, 0x65, 0x7d, 0x97, 0xf2, 0x8a, 0x21, 0x65, 0x36, 0xff, 0x5c, 0x24, 0xfc, 0x3e, 0x22, /* Byte value: 0xfc */ + 0x86, 0x7e, 0x1f, 0x16, 0x34, 0x2b, 0x06, 0xc5, 0x1f, 0x81, 0x17, 0xd3, 0xfe, 0xbf, 0x86, 0x6d, /* Byte value: 0xfd */ + 0x49, 0x82, 0x21, 0x3a, 0x5c, 0x7d, 0x0a, 0x8c, 0x21, 0x40, 0x39, 0xb6, 0xc1, 0x02, 0x49, 0xb7, /* Byte value: 0xfe */ + 0xf7, 0x07, 0x1e, 0xff, 0x7c, 0x62, 0x81, 0x6d, 0x1e, 0xa8, 0x21, 0x92, 0x71, 0x94, 0xf7, 0xd8, /* Byte value: 0xff */ + 0x0a, 0xbd, 0xcf, 0x31, 0x46, 0x89, 0x09, 0x0f, 0xcf, 0xe1, 0xd3, 0x3e, 0xbe, 0xbc, 0x0a, 0x60, /* Byte value: 0x00 */ + + /* Matrix row: 3 */ + 0xa8, 0x78, 0x41, 0x9a, 0x63, 0x9a, 0xaa, 0xcc, 0x6d, 0x6b, 0xb6, 0x2d, 0xbf, 0x63, 0x8e, 0x1b, /* Byte value: 0x01 */ + 0xb4, 0x31, 0xad, 0x42, 0xc9, 0x42, 0x37, 0x9b, 0xc6, 0x80, 0x29, 0xcb, 0xc0, 0xc9, 0x15, 0xf8, /* Byte value: 0x02 */ + 0x96, 0xde, 0x29, 0xd5, 0x90, 0xd5, 0x18, 0x10, 0xbc, 0xed, 0x40, 0xd2, 0x7a, 0x90, 0xf0, 0x4e, /* Byte value: 0x03 */ + 0x1e, 0xe4, 0x7c, 0xcc, 0x37, 0xcc, 0xa4, 0x79, 0x97, 0x9a, 0x27, 0xb6, 0xd7, 0x37, 0xe2, 0xd3, /* Byte value: 0x04 */ + 0x8a, 0x97, 0xc5, 0x0d, 0x3a, 0x0d, 0x85, 0x47, 0x17, 0x06, 0xdf, 0x34, 0x05, 0x3a, 0x6b, 0xad, /* Byte value: 0x05 */ + 0xf5, 0x02, 0xcd, 0x8d, 0x2d, 0x8d, 0xe6, 0x05, 0x54, 0x61, 0x14, 0x71, 0xb0, 0x2d, 0x33, 0x2f, /* Byte value: 0x06 */ + 0x9f, 0x98, 0x64, 0x8f, 0x0e, 0x8f, 0x01, 0xbf, 0x52, 0x33, 0xb9, 0x79, 0xcb, 0x0e, 0x0a, 0x96, /* Byte value: 0x07 */ + 0xa5, 0xa7, 0xef, 0xe8, 0x04, 0xe8, 0xc1, 0x3f, 0xfb, 0x57, 0xfc, 0x26, 0x9d, 0x04, 0x86, 0xa3, /* Byte value: 0x08 */ + 0x13, 0x3b, 0xd2, 0xbe, 0x50, 0xbe, 0xcf, 0x8a, 0x01, 0xa6, 0x6d, 0xbd, 0xf5, 0x50, 0xea, 0x6b, /* Byte value: 0x09 */ + 0x39, 0x25, 0x53, 0x79, 0x38, 0x79, 0x04, 0xb9, 0x8b, 0xcc, 0xa1, 0x27, 0xaa, 0x38, 0x28, 0x1d, /* Byte value: 0x0a */ + 0xac, 0xe1, 0xa2, 0xb2, 0x9a, 0xb2, 0xd8, 0x90, 0x15, 0x89, 0x05, 0x8d, 0x2c, 0x9a, 0x7c, 0x7b, /* Byte value: 0x0b */ + 0x2d, 0x9d, 0xba, 0xf1, 0xa3, 0xf1, 0x7d, 0x56, 0xd0, 0x20, 0x9b, 0x42, 0x30, 0xa3, 0x94, 0x3e, /* Byte value: 0x0c */ + 0x83, 0xd1, 0x88, 0x57, 0xa4, 0x57, 0x9c, 0xe8, 0xf9, 0xd8, 0x26, 0x9f, 0xb4, 0xa4, 0x91, 0x75, /* Byte value: 0x0d */ + 0x86, 0xff, 0x23, 0x75, 0xf2, 0x75, 0x13, 0xa3, 0x9f, 0xe3, 0xc9, 0x17, 0x73, 0xf2, 0xbe, 0x0d, /* Byte value: 0x0e */ + 0xb9, 0xee, 0x03, 0x30, 0xae, 0x30, 0x5c, 0x68, 0x50, 0xbc, 0x63, 0xc0, 0xe2, 0xae, 0x1d, 0x40, /* Byte value: 0x0f */ + 0x76, 0xd3, 0x45, 0xda, 0x89, 0xda, 0x7a, 0xed, 0xad, 0xb9, 0x32, 0xee, 0x04, 0x89, 0xa2, 0x5a, /* Byte value: 0x10 */ + 0x0f, 0x72, 0x3e, 0x66, 0xfa, 0x66, 0x52, 0xdd, 0xaa, 0x4d, 0xf2, 0x5b, 0x8a, 0xfa, 0x71, 0x88, /* Byte value: 0x11 */ + 0x5a, 0xf9, 0xb7, 0x21, 0x85, 0x21, 0xfa, 0xac, 0x63, 0x40, 0xf5, 0x84, 0x60, 0x85, 0xeb, 0x7c, /* Byte value: 0x12 */ + 0xa0, 0x89, 0x44, 0xca, 0x52, 0xca, 0x4e, 0x74, 0x9d, 0x6c, 0x13, 0xae, 0x5a, 0x52, 0xa9, 0xdb, /* Byte value: 0x13 */ + 0x92, 0x47, 0xca, 0xfd, 0x69, 0xfd, 0x6a, 0x4c, 0xc4, 0x0f, 0xf3, 0x72, 0xe9, 0x69, 0x02, 0x2e, /* Byte value: 0x14 */ + 0xe2, 0xa0, 0xfc, 0x1b, 0x84, 0x1b, 0x5b, 0xd3, 0x2d, 0x25, 0xca, 0x6c, 0xd6, 0x84, 0x2b, 0x24, /* Byte value: 0x15 */ + 0x34, 0xfa, 0xfd, 0x0b, 0x5f, 0x0b, 0x6f, 0x4a, 0x1d, 0xf0, 0xeb, 0x2c, 0x88, 0x5f, 0x20, 0xa5, /* Byte value: 0x16 */ + 0xee, 0xc8, 0x1a, 0x63, 0x4c, 0x63, 0xcd, 0x37, 0xa5, 0xc0, 0xdc, 0x4f, 0xa0, 0x4c, 0xfe, 0x84, /* Byte value: 0x17 */ + 0x6d, 0x19, 0x92, 0x34, 0xe8, 0x34, 0x51, 0xdf, 0x5c, 0x18, 0xfa, 0xd0, 0x14, 0xe8, 0x6f, 0xf1, /* Byte value: 0x18 */ + 0x1a, 0x7d, 0x9f, 0xe4, 0xce, 0xe4, 0xd6, 0x25, 0xef, 0x78, 0x94, 0x16, 0x44, 0xce, 0x10, 0xb3, /* Byte value: 0x19 */ + 0x24, 0xdb, 0xf7, 0xab, 0x3d, 0xab, 0x64, 0xf9, 0x3e, 0xfe, 0x62, 0xe9, 0x81, 0x3d, 0x6e, 0xe6, /* Byte value: 0x1a */ + 0x1f, 0x53, 0x34, 0xc6, 0x98, 0xc6, 0x59, 0x6e, 0x89, 0x43, 0x7b, 0x9e, 0x83, 0x98, 0x3f, 0xcb, /* Byte value: 0x1b */ + 0xd2, 0xc3, 0xe2, 0x38, 0x22, 0x38, 0x46, 0xc5, 0x48, 0x37, 0x92, 0xe0, 0xcd, 0x22, 0xf9, 0xe1, /* Byte value: 0x1c */ + 0x18, 0xd0, 0x0f, 0xf0, 0x53, 0xf0, 0xef, 0x0b, 0xd3, 0x09, 0x2c, 0x46, 0xec, 0x53, 0x69, 0x83, /* Byte value: 0x1d */ + 0x37, 0xe0, 0x25, 0x15, 0x6d, 0x15, 0xab, 0x73, 0x3f, 0x58, 0x0f, 0x54, 0x74, 0x6d, 0x84, 0x8d, /* Byte value: 0x1e */ + 0x6a, 0x9a, 0xa9, 0x02, 0x23, 0x02, 0xe7, 0xba, 0x06, 0x52, 0xad, 0x08, 0x7b, 0x23, 0x39, 0xb9, /* Byte value: 0x1f */ + 0x3f, 0x11, 0x20, 0x45, 0x5c, 0x45, 0x4f, 0xcb, 0xcf, 0x5f, 0xaa, 0xd7, 0x91, 0x5c, 0xa3, 0x4d, /* Byte value: 0x20 */ + 0xae, 0x4c, 0x32, 0xa6, 0x07, 0xa6, 0xe1, 0xbe, 0x29, 0xf8, 0xbd, 0xdd, 0x84, 0x07, 0x05, 0x4b, /* Byte value: 0x21 */ + 0x10, 0x21, 0x0a, 0xa0, 0x62, 0xa0, 0x0b, 0xb3, 0x23, 0x0e, 0x89, 0xc5, 0x09, 0x62, 0x4e, 0x43, /* Byte value: 0x22 */ + 0x46, 0xb0, 0x5b, 0xf9, 0x2f, 0xf9, 0x67, 0xfb, 0xc8, 0xab, 0x6a, 0x62, 0x1f, 0x2f, 0x70, 0x9f, /* Byte value: 0x23 */ + 0x6c, 0xae, 0xda, 0x3e, 0x47, 0x3e, 0xac, 0xc8, 0x42, 0xc1, 0xa6, 0xf8, 0x40, 0x47, 0xb2, 0xe9, /* Byte value: 0x24 */ + 0xbc, 0xc0, 0xa8, 0x12, 0xf8, 0x12, 0xd3, 0x23, 0x36, 0x87, 0x8c, 0x48, 0x25, 0xf8, 0x32, 0x38, /* Byte value: 0x25 */ + 0x68, 0x37, 0x39, 0x16, 0xbe, 0x16, 0xde, 0x94, 0x3a, 0x23, 0x15, 0x58, 0xd3, 0xbe, 0x40, 0x89, /* Byte value: 0x26 */ + 0x0b, 0xeb, 0xdd, 0x4e, 0x03, 0x4e, 0x20, 0x81, 0xd2, 0xaf, 0x41, 0xfb, 0x19, 0x03, 0x83, 0xe8, /* Byte value: 0x27 */ + 0x3e, 0xa6, 0x68, 0x4f, 0xf3, 0x4f, 0xb2, 0xdc, 0xd1, 0x86, 0xf6, 0xff, 0xc5, 0xf3, 0x7e, 0x55, /* Byte value: 0x28 */ + 0xfe, 0xe9, 0x10, 0xc3, 0x2e, 0xc3, 0xc6, 0x84, 0x86, 0xce, 0x55, 0x8a, 0xa9, 0x2e, 0xb0, 0xc7, /* Byte value: 0x29 */ + 0xa9, 0xcf, 0x09, 0x90, 0xcc, 0x90, 0x57, 0xdb, 0x73, 0xb2, 0xea, 0x05, 0xeb, 0xcc, 0x53, 0x03, /* Byte value: 0x2a */ + 0x28, 0xb3, 0x11, 0xd3, 0xf5, 0xd3, 0xf2, 0x1d, 0xb6, 0x1b, 0x74, 0xca, 0xf7, 0xf5, 0xbb, 0x46, /* Byte value: 0x2b */ + 0x7c, 0x8f, 0xd0, 0x9e, 0x25, 0x9e, 0xa7, 0x7b, 0x61, 0xcf, 0x2f, 0x3d, 0x49, 0x25, 0xfc, 0xaa, /* Byte value: 0x2c */ + 0xf2, 0x81, 0xf6, 0xbb, 0xe6, 0xbb, 0x50, 0x60, 0x0e, 0x2b, 0x43, 0xa9, 0xdf, 0xe6, 0x65, 0x67, /* Byte value: 0x2d */ + 0xbf, 0xda, 0x70, 0x0c, 0xca, 0x0c, 0x17, 0x1a, 0x14, 0x2f, 0x68, 0x30, 0xd9, 0xca, 0x96, 0x10, /* Byte value: 0x2e */ + 0xf4, 0xb5, 0x85, 0x87, 0x82, 0x87, 0x1b, 0x12, 0x4a, 0xb8, 0x48, 0x59, 0xe4, 0x82, 0xee, 0x37, /* Byte value: 0x2f */ + 0xcb, 0xa4, 0xa5, 0xc2, 0xde, 0xc2, 0x54, 0xd9, 0x85, 0xe7, 0xe2, 0x8e, 0x75, 0xde, 0x4d, 0x7a, /* Byte value: 0x30 */ + 0x06, 0x34, 0x73, 0x3c, 0x64, 0x3c, 0x4b, 0x72, 0x44, 0x93, 0x0b, 0xf0, 0x3b, 0x64, 0x8b, 0x50, /* Byte value: 0x31 */ + 0xf8, 0xdd, 0x63, 0xff, 0x4a, 0xff, 0x8d, 0xf6, 0xc2, 0x5d, 0x5e, 0x7a, 0x92, 0x4a, 0x3b, 0x97, /* Byte value: 0x32 */ + 0xbd, 0x77, 0xe0, 0x18, 0x57, 0x18, 0x2e, 0x34, 0x28, 0x5e, 0xd0, 0x60, 0x71, 0x57, 0xef, 0x20, /* Byte value: 0x33 */ + 0x75, 0xc9, 0x9d, 0xc4, 0xbb, 0xc4, 0xbe, 0xd4, 0x8f, 0x11, 0xd6, 0x96, 0xf8, 0xbb, 0x06, 0x72, /* Byte value: 0x34 */ + 0x03, 0x1a, 0xd8, 0x1e, 0x32, 0x1e, 0xc4, 0x39, 0x22, 0xa8, 0xe4, 0x78, 0xfc, 0x32, 0xa4, 0x28, /* Byte value: 0x35 */ + 0x4c, 0xec, 0xce, 0xbd, 0x83, 0xbd, 0xba, 0x6d, 0x04, 0xdd, 0x77, 0xb1, 0x52, 0x83, 0x2e, 0x6f, /* Byte value: 0x36 */ + 0x4b, 0x6f, 0xf5, 0x8b, 0x48, 0x8b, 0x0c, 0x08, 0x5e, 0x97, 0x20, 0x69, 0x3d, 0x48, 0x78, 0x27, /* Byte value: 0x37 */ + 0xc0, 0x4f, 0x78, 0x8c, 0xdd, 0x8c, 0x74, 0x58, 0x57, 0x48, 0xa3, 0x75, 0x6c, 0xdd, 0xce, 0x92, /* Byte value: 0x38 */ + 0x04, 0x99, 0xe3, 0x28, 0xf9, 0x28, 0x72, 0x5c, 0x78, 0xe2, 0xb3, 0xa0, 0x93, 0xf9, 0xf2, 0x60, /* Byte value: 0x39 */ + 0xb3, 0xb2, 0x96, 0x74, 0x02, 0x74, 0x81, 0xfe, 0x9c, 0xca, 0x7e, 0x13, 0xaf, 0x02, 0x43, 0xb0, /* Byte value: 0x3a */ + 0xb6, 0x9c, 0x3d, 0x56, 0x54, 0x56, 0x0e, 0xb5, 0xfa, 0xf1, 0x91, 0x9b, 0x68, 0x54, 0x6c, 0xc8, /* Byte value: 0x3b */ + 0x51, 0x12, 0x6a, 0x6f, 0x86, 0x6f, 0xda, 0x2d, 0xb1, 0xef, 0xb4, 0x7f, 0x79, 0x86, 0x68, 0x94, /* Byte value: 0x3c */ + 0xeb, 0xe6, 0xb1, 0x41, 0x1a, 0x41, 0x42, 0x7c, 0xc3, 0xfb, 0x33, 0xc7, 0x67, 0x1a, 0xd1, 0xfc, /* Byte value: 0x3d */ + 0x98, 0x1b, 0x5f, 0xb9, 0xc5, 0xb9, 0xb7, 0xda, 0x08, 0x79, 0xee, 0xa1, 0xa4, 0xc5, 0x5c, 0xde, /* Byte value: 0x3e */ + 0x23, 0x58, 0xcc, 0x9d, 0xf6, 0x9d, 0xd2, 0x9c, 0x64, 0xb4, 0x35, 0x31, 0xee, 0xf6, 0x38, 0xae, /* Byte value: 0x3f */ + 0xab, 0x62, 0x99, 0x84, 0x51, 0x84, 0x6e, 0xf5, 0x4f, 0xc3, 0x52, 0x55, 0x43, 0x51, 0x2a, 0x33, /* Byte value: 0x40 */ + 0xb2, 0x05, 0xde, 0x7e, 0xad, 0x7e, 0x7c, 0xe9, 0x82, 0x13, 0x22, 0x3b, 0xfb, 0xad, 0x9e, 0xa8, /* Byte value: 0x41 */ + 0x99, 0xac, 0x17, 0xb3, 0x6a, 0xb3, 0x4a, 0xcd, 0x16, 0xa0, 0xb2, 0x89, 0xf0, 0x6a, 0x81, 0xc6, /* Byte value: 0x42 */ + 0x89, 0x8d, 0x1d, 0x13, 0x08, 0x13, 0x41, 0x7e, 0x35, 0xae, 0x3b, 0x4c, 0xf9, 0x08, 0xcf, 0x85, /* Byte value: 0x43 */ + 0xdf, 0x1c, 0x4c, 0x4a, 0x45, 0x4a, 0x2d, 0x36, 0xde, 0x0b, 0xd8, 0xeb, 0xef, 0x45, 0xf1, 0x59, /* Byte value: 0x44 */ + 0x0d, 0xdf, 0xae, 0x72, 0x67, 0x72, 0x6b, 0xf3, 0x96, 0x3c, 0x4a, 0x0b, 0x22, 0x67, 0x08, 0xb8, /* Byte value: 0x45 */ + 0x31, 0xd4, 0x56, 0x29, 0x09, 0x29, 0xe0, 0x01, 0x7b, 0xcb, 0x04, 0xa4, 0x4f, 0x09, 0x0f, 0xdd, /* Byte value: 0x46 */ + 0x70, 0xe7, 0x36, 0xe6, 0xed, 0xe6, 0x31, 0x9f, 0xe9, 0x2a, 0x39, 0x1e, 0x3f, 0xed, 0x29, 0x0a, /* Byte value: 0x47 */ + 0x73, 0xfd, 0xee, 0xf8, 0xdf, 0xf8, 0xf5, 0xa6, 0xcb, 0x82, 0xdd, 0x66, 0xc3, 0xdf, 0x8d, 0x22, /* Byte value: 0x48 */ + 0x1d, 0xfe, 0xa4, 0xd2, 0x05, 0xd2, 0x60, 0x40, 0xb5, 0x32, 0xc3, 0xce, 0x2b, 0x05, 0x46, 0xfb, /* Byte value: 0x49 */ + 0x8d, 0x14, 0xfe, 0x3b, 0xf1, 0x3b, 0x33, 0x22, 0x4d, 0x4c, 0x88, 0xec, 0x6a, 0xf1, 0x3d, 0xe5, /* Byte value: 0x4a */ + 0xf1, 0x9b, 0x2e, 0xa5, 0xd4, 0xa5, 0x94, 0x59, 0x2c, 0x83, 0xa7, 0xd1, 0x23, 0xd4, 0xc1, 0x4f, /* Byte value: 0x4b */ + 0x7d, 0x38, 0x98, 0x94, 0x8a, 0x94, 0x5a, 0x6c, 0x7f, 0x16, 0x73, 0x15, 0x1d, 0x8a, 0x21, 0xb2, /* Byte value: 0x4c */ + 0x17, 0xa2, 0x31, 0x96, 0xa9, 0x96, 0xbd, 0xd6, 0x79, 0x44, 0xde, 0x1d, 0x66, 0xa9, 0x18, 0x0b, /* Byte value: 0x4d */ + 0x2c, 0x2a, 0xf2, 0xfb, 0x0c, 0xfb, 0x80, 0x41, 0xce, 0xf9, 0xc7, 0x6a, 0x64, 0x0c, 0x49, 0x26, /* Byte value: 0x4e */ + 0x35, 0x4d, 0xb5, 0x01, 0xf0, 0x01, 0x92, 0x5d, 0x03, 0x29, 0xb7, 0x04, 0xdc, 0xf0, 0xfd, 0xbd, /* Byte value: 0x4f */ + 0x88, 0x3a, 0x55, 0x19, 0xa7, 0x19, 0xbc, 0x69, 0x2b, 0x77, 0x67, 0x64, 0xad, 0xa7, 0x12, 0x9d, /* Byte value: 0x50 */ + 0x67, 0x45, 0x07, 0x70, 0x44, 0x70, 0x8c, 0x49, 0x90, 0x6e, 0xe7, 0x03, 0x59, 0x44, 0x31, 0x01, /* Byte value: 0x51 */ + 0xe1, 0xba, 0x24, 0x05, 0xb6, 0x05, 0x9f, 0xea, 0x0f, 0x8d, 0x2e, 0x14, 0x2a, 0xb6, 0x8f, 0x0c, /* Byte value: 0x52 */ + 0xb5, 0x86, 0xe5, 0x48, 0x66, 0x48, 0xca, 0x8c, 0xd8, 0x59, 0x75, 0xe3, 0x94, 0x66, 0xc8, 0xe0, /* Byte value: 0x53 */ + 0x64, 0x5f, 0xdf, 0x6e, 0x76, 0x6e, 0x48, 0x70, 0xb2, 0xc6, 0x03, 0x7b, 0xa5, 0x76, 0x95, 0x29, /* Byte value: 0x54 */ + 0xb1, 0x1f, 0x06, 0x60, 0x9f, 0x60, 0xb8, 0xd0, 0xa0, 0xbb, 0xc6, 0x43, 0x07, 0x9f, 0x3a, 0x80, /* Byte value: 0x55 */ + 0x08, 0xf1, 0x05, 0x50, 0x31, 0x50, 0xe4, 0xb8, 0xf0, 0x07, 0xa5, 0x83, 0xe5, 0x31, 0x27, 0xc0, /* Byte value: 0x56 */ + 0xe5, 0x23, 0xc7, 0x2d, 0x4f, 0x2d, 0xed, 0xb6, 0x77, 0x6f, 0x9d, 0xb4, 0xb9, 0x4f, 0x7d, 0x6c, /* Byte value: 0x57 */ + 0x1c, 0x49, 0xec, 0xd8, 0xaa, 0xd8, 0x9d, 0x57, 0xab, 0xeb, 0x9f, 0xe6, 0x7f, 0xaa, 0x9b, 0xe3, /* Byte value: 0x58 */ + 0x6b, 0x2d, 0xe1, 0x08, 0x8c, 0x08, 0x1a, 0xad, 0x18, 0x8b, 0xf1, 0x20, 0x2f, 0x8c, 0xe4, 0xa1, /* Byte value: 0x59 */ + 0x5c, 0xcd, 0xc4, 0x1d, 0xe1, 0x1d, 0xb1, 0xde, 0x27, 0xd3, 0xfe, 0x74, 0x5b, 0xe1, 0x60, 0x2c, /* Byte value: 0x5a */ + 0xa3, 0x93, 0x9c, 0xd4, 0x60, 0xd4, 0x8a, 0x4d, 0xbf, 0xc4, 0xf7, 0xd6, 0xa6, 0x60, 0x0d, 0xf3, /* Byte value: 0x5b */ + 0x7a, 0xbb, 0xa3, 0xa2, 0x41, 0xa2, 0xec, 0x09, 0x25, 0x5c, 0x24, 0xcd, 0x72, 0x41, 0x77, 0xfa, /* Byte value: 0x5c */ + 0xe8, 0xfc, 0x69, 0x5f, 0x28, 0x5f, 0x86, 0x45, 0xe1, 0x53, 0xd7, 0xbf, 0x9b, 0x28, 0x75, 0xd4, /* Byte value: 0x5d */ + 0xda, 0x32, 0xe7, 0x68, 0x13, 0x68, 0xa2, 0x7d, 0xb8, 0x30, 0x37, 0x63, 0x28, 0x13, 0xde, 0x21, /* Byte value: 0x5e */ + 0xd7, 0xed, 0x49, 0x1a, 0x74, 0x1a, 0xc9, 0x8e, 0x2e, 0x0c, 0x7d, 0x68, 0x0a, 0x74, 0xd6, 0x99, /* Byte value: 0x5f */ + 0xfa, 0x70, 0xf3, 0xeb, 0xd7, 0xeb, 0xb4, 0xd8, 0xfe, 0x2c, 0xe6, 0x2a, 0x3a, 0xd7, 0x42, 0xa7, /* Byte value: 0x60 */ + 0xa7, 0x0a, 0x7f, 0xfc, 0x99, 0xfc, 0xf8, 0x11, 0xc7, 0x26, 0x44, 0x76, 0x35, 0x99, 0xff, 0x93, /* Byte value: 0x61 */ + 0x7f, 0x95, 0x08, 0x80, 0x17, 0x80, 0x63, 0x42, 0x43, 0x67, 0xcb, 0x45, 0xb5, 0x17, 0x58, 0x82, /* Byte value: 0x62 */ + 0xe4, 0x94, 0x8f, 0x27, 0xe0, 0x27, 0x10, 0xa1, 0x69, 0xb6, 0xc1, 0x9c, 0xed, 0xe0, 0xa0, 0x74, /* Byte value: 0x63 */ + 0x8f, 0xb9, 0x6e, 0x2f, 0x6c, 0x2f, 0x0a, 0x0c, 0x71, 0x3d, 0x30, 0xbc, 0xc2, 0x6c, 0x44, 0xd5, /* Byte value: 0x64 */ + 0xa1, 0x3e, 0x0c, 0xc0, 0xfd, 0xc0, 0xb3, 0x63, 0x83, 0xb5, 0x4f, 0x86, 0x0e, 0xfd, 0x74, 0xc3, /* Byte value: 0x65 */ + 0x52, 0x08, 0xb2, 0x71, 0xb4, 0x71, 0x1e, 0x14, 0x93, 0x47, 0x50, 0x07, 0x85, 0xb4, 0xcc, 0xbc, /* Byte value: 0x66 */ + 0xec, 0x65, 0x8a, 0x77, 0xd1, 0x77, 0xf4, 0x19, 0x99, 0xb1, 0x64, 0x1f, 0x08, 0xd1, 0x87, 0xb4, /* Byte value: 0x67 */ + 0x3b, 0x88, 0xc3, 0x6d, 0xa5, 0x6d, 0x3d, 0x97, 0xb7, 0xbd, 0x19, 0x77, 0x02, 0xa5, 0x51, 0x2d, /* Byte value: 0x68 */ + 0xa2, 0x24, 0xd4, 0xde, 0xcf, 0xde, 0x77, 0x5a, 0xa1, 0x1d, 0xab, 0xfe, 0xf2, 0xcf, 0xd0, 0xeb, /* Byte value: 0x69 */ + 0x5f, 0xd7, 0x1c, 0x03, 0xd3, 0x03, 0x75, 0xe7, 0x05, 0x7b, 0x1a, 0x0c, 0xa7, 0xd3, 0xc4, 0x04, /* Byte value: 0x6a */ + 0x50, 0xa5, 0x22, 0x65, 0x29, 0x65, 0x27, 0x3a, 0xaf, 0x36, 0xe8, 0x57, 0x2d, 0x29, 0xb5, 0x8c, /* Byte value: 0x6b */ + 0x4a, 0xd8, 0xbd, 0x81, 0xe7, 0x81, 0xf1, 0x1f, 0x40, 0x4e, 0x7c, 0x41, 0x69, 0xe7, 0xa5, 0x3f, /* Byte value: 0x6c */ + 0x57, 0x26, 0x19, 0x53, 0xe2, 0x53, 0x91, 0x5f, 0xf5, 0x7c, 0xbf, 0x8f, 0x42, 0xe2, 0xe3, 0xc4, /* Byte value: 0x6d */ + 0x55, 0x8b, 0x89, 0x47, 0x7f, 0x47, 0xa8, 0x71, 0xc9, 0x0d, 0x07, 0xdf, 0xea, 0x7f, 0x9a, 0xf4, /* Byte value: 0x6e */ + 0xdc, 0x06, 0x94, 0x54, 0x77, 0x54, 0xe9, 0x0f, 0xfc, 0xa3, 0x3c, 0x93, 0x13, 0x77, 0x55, 0x71, /* Byte value: 0x6f */ + 0xde, 0xab, 0x04, 0x40, 0xea, 0x40, 0xd0, 0x21, 0xc0, 0xd2, 0x84, 0xc3, 0xbb, 0xea, 0x2c, 0x41, /* Byte value: 0x70 */ + 0x9d, 0x35, 0xf4, 0x9b, 0x93, 0x9b, 0x38, 0x91, 0x6e, 0x42, 0x01, 0x29, 0x63, 0x93, 0x73, 0xa6, /* Byte value: 0x71 */ + 0xe7, 0x8e, 0x57, 0x39, 0xd2, 0x39, 0xd4, 0x98, 0x4b, 0x1e, 0x25, 0xe4, 0x11, 0xd2, 0x04, 0x5c, /* Byte value: 0x72 */ + 0xaf, 0xfb, 0x7a, 0xac, 0xa8, 0xac, 0x1c, 0xa9, 0x37, 0x21, 0xe1, 0xf5, 0xd0, 0xa8, 0xd8, 0x53, /* Byte value: 0x73 */ + 0x97, 0x69, 0x61, 0xdf, 0x3f, 0xdf, 0xe5, 0x07, 0xa2, 0x34, 0x1c, 0xfa, 0x2e, 0x3f, 0x2d, 0x56, /* Byte value: 0x74 */ + 0xaa, 0xd5, 0xd1, 0x8e, 0xfe, 0x8e, 0x93, 0xe2, 0x51, 0x1a, 0x0e, 0x7d, 0x17, 0xfe, 0xf7, 0x2b, /* Byte value: 0x75 */ + 0x26, 0x76, 0x67, 0xbf, 0xa0, 0xbf, 0x5d, 0xd7, 0x02, 0x8f, 0xda, 0xb9, 0x29, 0xa0, 0x17, 0xd6, /* Byte value: 0x76 */ + 0x4d, 0x5b, 0x86, 0xb7, 0x2c, 0xb7, 0x47, 0x7a, 0x1a, 0x04, 0x2b, 0x99, 0x06, 0x2c, 0xf3, 0x77, /* Byte value: 0x77 */ + 0x54, 0x3c, 0xc1, 0x4d, 0xd0, 0x4d, 0x55, 0x66, 0xd7, 0xd4, 0x5b, 0xf7, 0xbe, 0xd0, 0x47, 0xec, /* Byte value: 0x78 */ + 0xf7, 0xaf, 0x5d, 0x99, 0xb0, 0x99, 0xdf, 0x2b, 0x68, 0x10, 0xac, 0x21, 0x18, 0xb0, 0x4a, 0x1f, /* Byte value: 0x79 */ + 0xd9, 0x28, 0x3f, 0x76, 0x21, 0x76, 0x66, 0x44, 0x9a, 0x98, 0xd3, 0x1b, 0xd4, 0x21, 0x7a, 0x09, /* Byte value: 0x7a */ + 0x84, 0x52, 0xb3, 0x61, 0x6f, 0x61, 0x2a, 0x8d, 0xa3, 0x92, 0x71, 0x47, 0xdb, 0x6f, 0xc7, 0x3d, /* Byte value: 0x7b */ + 0x41, 0x33, 0x60, 0xcf, 0xe4, 0xcf, 0xd1, 0x9e, 0x92, 0xe1, 0x3d, 0xba, 0x70, 0xe4, 0x26, 0xd7, /* Byte value: 0x7c */ + 0x82, 0x66, 0xc0, 0x5d, 0x0b, 0x5d, 0x61, 0xff, 0xe7, 0x01, 0x7a, 0xb7, 0xe0, 0x0b, 0x4c, 0x6d, /* Byte value: 0x7d */ + 0xd6, 0x5a, 0x01, 0x10, 0xdb, 0x10, 0x34, 0x99, 0x30, 0xd5, 0x21, 0x40, 0x5e, 0xdb, 0x0b, 0x81, /* Byte value: 0x7e */ + 0xb7, 0x2b, 0x75, 0x5c, 0xfb, 0x5c, 0xf3, 0xa2, 0xe4, 0x28, 0xcd, 0xb3, 0x3c, 0xfb, 0xb1, 0xd0, /* Byte value: 0x7f */ + 0xdb, 0x85, 0xaf, 0x62, 0xbc, 0x62, 0x5f, 0x6a, 0xa6, 0xe9, 0x6b, 0x4b, 0x7c, 0xbc, 0x03, 0x39, /* Byte value: 0x80 */ + 0x2b, 0xa9, 0xc9, 0xcd, 0xc7, 0xcd, 0x36, 0x24, 0x94, 0xb3, 0x90, 0xb2, 0x0b, 0xc7, 0x1f, 0x6e, /* Byte value: 0x81 */ + 0xa6, 0xbd, 0x37, 0xf6, 0x36, 0xf6, 0x05, 0x06, 0xd9, 0xff, 0x18, 0x5e, 0x61, 0x36, 0x22, 0x8b, /* Byte value: 0x82 */ + 0x38, 0x92, 0x1b, 0x73, 0x97, 0x73, 0xf9, 0xae, 0x95, 0x15, 0xfd, 0x0f, 0xfe, 0x97, 0xf5, 0x05, /* Byte value: 0x83 */ + 0xce, 0x8a, 0x0e, 0xe0, 0x88, 0xe0, 0xdb, 0x92, 0xe3, 0xdc, 0x0d, 0x06, 0xb2, 0x88, 0x62, 0x02, /* Byte value: 0x84 */ + 0x95, 0xc4, 0xf1, 0xcb, 0xa2, 0xcb, 0xdc, 0x29, 0x9e, 0x45, 0xa4, 0xaa, 0x86, 0xa2, 0x54, 0x66, /* Byte value: 0x85 */ + 0x71, 0x50, 0x7e, 0xec, 0x42, 0xec, 0xcc, 0x88, 0xf7, 0xf3, 0x65, 0x36, 0x6b, 0x42, 0xf4, 0x12, /* Byte value: 0x86 */ + 0xc3, 0x55, 0xa0, 0x92, 0xef, 0x92, 0xb0, 0x61, 0x75, 0xe0, 0x47, 0x0d, 0x90, 0xef, 0x6a, 0xba, /* Byte value: 0x87 */ + 0x8e, 0x0e, 0x26, 0x25, 0xc3, 0x25, 0xf7, 0x1b, 0x6f, 0xe4, 0x6c, 0x94, 0x96, 0xc3, 0x99, 0xcd, /* Byte value: 0x88 */ + 0x9a, 0xb6, 0xcf, 0xad, 0x58, 0xad, 0x8e, 0xf4, 0x34, 0x08, 0x56, 0xf1, 0x0c, 0x58, 0x25, 0xee, /* Byte value: 0x89 */ + 0xef, 0x7f, 0x52, 0x69, 0xe3, 0x69, 0x30, 0x20, 0xbb, 0x19, 0x80, 0x67, 0xf4, 0xe3, 0x23, 0x9c, /* Byte value: 0x8a */ + 0x25, 0x6c, 0xbf, 0xa1, 0x92, 0xa1, 0x99, 0xee, 0x20, 0x27, 0x3e, 0xc1, 0xd5, 0x92, 0xb3, 0xfe, /* Byte value: 0x8b */ + 0xa4, 0x10, 0xa7, 0xe2, 0xab, 0xe2, 0x3c, 0x28, 0xe5, 0x8e, 0xa0, 0x0e, 0xc9, 0xab, 0x5b, 0xbb, /* Byte value: 0x8c */ + 0xe9, 0x4b, 0x21, 0x55, 0x87, 0x55, 0x7b, 0x52, 0xff, 0x8a, 0x8b, 0x97, 0xcf, 0x87, 0xa8, 0xcc, /* Byte value: 0x8d */ + 0x3c, 0x0b, 0xf8, 0x5b, 0x6e, 0x5b, 0x8b, 0xf2, 0xed, 0xf7, 0x4e, 0xaf, 0x6d, 0x6e, 0x07, 0x65, /* Byte value: 0x8e */ + 0x6f, 0xb4, 0x02, 0x20, 0x75, 0x20, 0x68, 0xf1, 0x60, 0x69, 0x42, 0x80, 0xbc, 0x75, 0x16, 0xc1, /* Byte value: 0x8f */ + 0x02, 0xad, 0x90, 0x14, 0x9d, 0x14, 0x39, 0x2e, 0x3c, 0x71, 0xb8, 0x50, 0xa8, 0x9d, 0x79, 0x30, /* Byte value: 0x90 */ + 0x01, 0xb7, 0x48, 0x0a, 0xaf, 0x0a, 0xfd, 0x17, 0x1e, 0xd9, 0x5c, 0x28, 0x54, 0xaf, 0xdd, 0x18, /* Byte value: 0x91 */ + 0x0a, 0x5c, 0x95, 0x44, 0xac, 0x44, 0xdd, 0x96, 0xcc, 0x76, 0x1d, 0xd3, 0x4d, 0xac, 0x5e, 0xf0, /* Byte value: 0x92 */ + 0x09, 0x46, 0x4d, 0x5a, 0x9e, 0x5a, 0x19, 0xaf, 0xee, 0xde, 0xf9, 0xab, 0xb1, 0x9e, 0xfa, 0xd8, /* Byte value: 0x93 */ + 0x94, 0x73, 0xb9, 0xc1, 0x0d, 0xc1, 0x21, 0x3e, 0x80, 0x9c, 0xf8, 0x82, 0xd2, 0x0d, 0x89, 0x7e, /* Byte value: 0x94 */ + 0xed, 0xd2, 0xc2, 0x7d, 0x7e, 0x7d, 0x09, 0x0e, 0x87, 0x68, 0x38, 0x37, 0x5c, 0x7e, 0x5a, 0xac, /* Byte value: 0x95 */ + 0x59, 0xe3, 0x6f, 0x3f, 0xb7, 0x3f, 0x3e, 0x95, 0x41, 0xe8, 0x11, 0xfc, 0x9c, 0xb7, 0x4f, 0x54, /* Byte value: 0x96 */ + 0x61, 0x71, 0x74, 0x4c, 0x20, 0x4c, 0xc7, 0x3b, 0xd4, 0xfd, 0xec, 0xf3, 0x62, 0x20, 0xba, 0x51, /* Byte value: 0x97 */ + 0x69, 0x80, 0x71, 0x1c, 0x11, 0x1c, 0x23, 0x83, 0x24, 0xfa, 0x49, 0x70, 0x87, 0x11, 0x9d, 0x91, /* Byte value: 0x98 */ + 0x29, 0x04, 0x59, 0xd9, 0x5a, 0xd9, 0x0f, 0x0a, 0xa8, 0xc2, 0x28, 0xe2, 0xa3, 0x5a, 0x66, 0x5e, /* Byte value: 0x99 */ + 0xb0, 0xa8, 0x4e, 0x6a, 0x30, 0x6a, 0x45, 0xc7, 0xbe, 0x62, 0x9a, 0x6b, 0x53, 0x30, 0xe7, 0x98, /* Byte value: 0x9a */ + 0x30, 0x63, 0x1e, 0x23, 0xa6, 0x23, 0x1d, 0x16, 0x65, 0x12, 0x58, 0x8c, 0x1b, 0xa6, 0xd2, 0xc5, /* Byte value: 0x9b */ + 0x60, 0xc6, 0x3c, 0x46, 0x8f, 0x46, 0x3a, 0x2c, 0xca, 0x24, 0xb0, 0xdb, 0x36, 0x8f, 0x67, 0x49, /* Byte value: 0x9c */ + 0x74, 0x7e, 0xd5, 0xce, 0x14, 0xce, 0x43, 0xc3, 0x91, 0xc8, 0x8a, 0xbe, 0xac, 0x14, 0xdb, 0x6a, /* Byte value: 0x9d */ + 0x22, 0xef, 0x84, 0x97, 0x59, 0x97, 0x2f, 0x8b, 0x7a, 0x6d, 0x69, 0x19, 0xba, 0x59, 0xe5, 0xb6, /* Byte value: 0x9e */ + 0x0c, 0x68, 0xe6, 0x78, 0xc8, 0x78, 0x96, 0xe4, 0x88, 0xe5, 0x16, 0x23, 0x76, 0xc8, 0xd5, 0xa0, /* Byte value: 0x9f */ + 0xcd, 0x90, 0xd6, 0xfe, 0xba, 0xfe, 0x1f, 0xab, 0xc1, 0x74, 0xe9, 0x7e, 0x4e, 0xba, 0xc6, 0x2a, /* Byte value: 0xa0 */ + 0x7b, 0x0c, 0xeb, 0xa8, 0xee, 0xa8, 0x11, 0x1e, 0x3b, 0x85, 0x78, 0xe5, 0x26, 0xee, 0xaa, 0xe2, /* Byte value: 0xa1 */ + 0x5b, 0x4e, 0xff, 0x2b, 0x2a, 0x2b, 0x07, 0xbb, 0x7d, 0x99, 0xa9, 0xac, 0x34, 0x2a, 0x36, 0x64, /* Byte value: 0xa2 */ + 0x40, 0x84, 0x28, 0xc5, 0x4b, 0xc5, 0x2c, 0x89, 0x8c, 0x38, 0x61, 0x92, 0x24, 0x4b, 0xfb, 0xcf, /* Byte value: 0xa3 */ + 0xe3, 0x17, 0xb4, 0x11, 0x2b, 0x11, 0xa6, 0xc4, 0x33, 0xfc, 0x96, 0x44, 0x82, 0x2b, 0xf6, 0x3c, /* Byte value: 0xa4 */ + 0x14, 0xb8, 0xe9, 0x88, 0x9b, 0x88, 0x79, 0xef, 0x5b, 0xec, 0x3a, 0x65, 0x9a, 0x9b, 0xbc, 0x23, /* Byte value: 0xa5 */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xa6 */ + 0xfd, 0xf3, 0xc8, 0xdd, 0x1c, 0xdd, 0x02, 0xbd, 0xa4, 0x66, 0xb1, 0xf2, 0x55, 0x1c, 0x14, 0xef, /* Byte value: 0xa7 */ + 0x78, 0x16, 0x33, 0xb6, 0xdc, 0xb6, 0xd5, 0x27, 0x19, 0x2d, 0x9c, 0x9d, 0xda, 0xdc, 0x0e, 0xca, /* Byte value: 0xa8 */ + 0xad, 0x56, 0xea, 0xb8, 0x35, 0xb8, 0x25, 0x87, 0x0b, 0x50, 0x59, 0xa5, 0x78, 0x35, 0xa1, 0x63, /* Byte value: 0xa9 */ + 0xd0, 0x6e, 0x72, 0x2c, 0xbf, 0x2c, 0x7f, 0xeb, 0x74, 0x46, 0x2a, 0xb0, 0x65, 0xbf, 0x80, 0xd1, /* Byte value: 0xaa */ + 0x91, 0x5d, 0x12, 0xe3, 0x5b, 0xe3, 0xae, 0x75, 0xe6, 0xa7, 0x17, 0x0a, 0x15, 0x5b, 0xa6, 0x06, /* Byte value: 0xab */ + 0xfc, 0x44, 0x80, 0xd7, 0xb3, 0xd7, 0xff, 0xaa, 0xba, 0xbf, 0xed, 0xda, 0x01, 0xb3, 0xc9, 0xf7, /* Byte value: 0xac */ + 0xf9, 0x6a, 0x2b, 0xf5, 0xe5, 0xf5, 0x70, 0xe1, 0xdc, 0x84, 0x02, 0x52, 0xc6, 0xe5, 0xe6, 0x8f, /* Byte value: 0xad */ + 0xea, 0x51, 0xf9, 0x4b, 0xb5, 0x4b, 0xbf, 0x6b, 0xdd, 0x22, 0x6f, 0xef, 0x33, 0xb5, 0x0c, 0xe4, /* Byte value: 0xae */ + 0x85, 0xe5, 0xfb, 0x6b, 0xc0, 0x6b, 0xd7, 0x9a, 0xbd, 0x4b, 0x2d, 0x6f, 0x8f, 0xc0, 0x1a, 0x25, /* Byte value: 0xaf */ + 0x7e, 0x22, 0x40, 0x8a, 0xb8, 0x8a, 0x9e, 0x55, 0x5d, 0xbe, 0x97, 0x6d, 0xe1, 0xb8, 0x85, 0x9a, /* Byte value: 0xb0 */ + 0x77, 0x64, 0x0d, 0xd0, 0x26, 0xd0, 0x87, 0xfa, 0xb3, 0x60, 0x6e, 0xc6, 0x50, 0x26, 0x7f, 0x42, /* Byte value: 0xb1 */ + 0xc7, 0xcc, 0x43, 0xba, 0x16, 0xba, 0xc2, 0x3d, 0x0d, 0x02, 0xf4, 0xad, 0x03, 0x16, 0x98, 0xda, /* Byte value: 0xb2 */ + 0xc5, 0x61, 0xd3, 0xae, 0x8b, 0xae, 0xfb, 0x13, 0x31, 0x73, 0x4c, 0xfd, 0xab, 0x8b, 0xe1, 0xea, /* Byte value: 0xb3 */ + 0x5d, 0x7a, 0x8c, 0x17, 0x4e, 0x17, 0x4c, 0xc9, 0x39, 0x0a, 0xa2, 0x5c, 0x0f, 0x4e, 0xbd, 0x34, /* Byte value: 0xb4 */ + 0x3a, 0x3f, 0x8b, 0x67, 0x0a, 0x67, 0xc0, 0x80, 0xa9, 0x64, 0x45, 0x5f, 0x56, 0x0a, 0x8c, 0x35, /* Byte value: 0xb5 */ + 0xd5, 0x40, 0xd9, 0x0e, 0xe9, 0x0e, 0xf0, 0xa0, 0x12, 0x7d, 0xc5, 0x38, 0xa2, 0xe9, 0xaf, 0xa9, /* Byte value: 0xb6 */ + 0x66, 0xf2, 0x4f, 0x7a, 0xeb, 0x7a, 0x71, 0x5e, 0x8e, 0xb7, 0xbb, 0x2b, 0x0d, 0xeb, 0xec, 0x19, /* Byte value: 0xb7 */ + 0x8b, 0x20, 0x8d, 0x07, 0x95, 0x07, 0x78, 0x50, 0x09, 0xdf, 0x83, 0x1c, 0x51, 0x95, 0xb6, 0xb5, /* Byte value: 0xb8 */ + 0x49, 0xc2, 0x65, 0x9f, 0xd5, 0x9f, 0x35, 0x26, 0x62, 0xe6, 0x98, 0x39, 0x95, 0xd5, 0x01, 0x17, /* Byte value: 0xb9 */ + 0xc2, 0xe2, 0xe8, 0x98, 0x40, 0x98, 0x4d, 0x76, 0x6b, 0x39, 0x1b, 0x25, 0xc4, 0x40, 0xb7, 0xa2, /* Byte value: 0xba */ + 0xc6, 0x7b, 0x0b, 0xb0, 0xb9, 0xb0, 0x3f, 0x2a, 0x13, 0xdb, 0xa8, 0x85, 0x57, 0xb9, 0x45, 0xc2, /* Byte value: 0xbb */ + 0x56, 0x91, 0x51, 0x59, 0x4d, 0x59, 0x6c, 0x48, 0xeb, 0xa5, 0xe3, 0xa7, 0x16, 0x4d, 0x3e, 0xdc, /* Byte value: 0xbc */ + 0x4e, 0x41, 0x5e, 0xa9, 0x1e, 0xa9, 0x83, 0x43, 0x38, 0xac, 0xcf, 0xe1, 0xfa, 0x1e, 0x57, 0x5f, /* Byte value: 0xbd */ + 0x27, 0xc1, 0x2f, 0xb5, 0x0f, 0xb5, 0xa0, 0xc0, 0x1c, 0x56, 0x86, 0x91, 0x7d, 0x0f, 0xca, 0xce, /* Byte value: 0xbe */ + 0xe6, 0x39, 0x1f, 0x33, 0x7d, 0x33, 0x29, 0x8f, 0x55, 0xc7, 0x79, 0xcc, 0x45, 0x7d, 0xd9, 0x44, /* Byte value: 0xbf */ + 0x93, 0xf0, 0x82, 0xf7, 0xc6, 0xf7, 0x97, 0x5b, 0xda, 0xd6, 0xaf, 0x5a, 0xbd, 0xc6, 0xdf, 0x36, /* Byte value: 0xc0 */ + 0xbb, 0x43, 0x93, 0x24, 0x33, 0x24, 0x65, 0x46, 0x6c, 0xcd, 0xdb, 0x90, 0x4a, 0x33, 0x64, 0x70, /* Byte value: 0xc1 */ + 0xd1, 0xd9, 0x3a, 0x26, 0x10, 0x26, 0x82, 0xfc, 0x6a, 0x9f, 0x76, 0x98, 0x31, 0x10, 0x5d, 0xc9, /* Byte value: 0xc2 */ + 0x63, 0xdc, 0xe4, 0x58, 0xbd, 0x58, 0xfe, 0x15, 0xe8, 0x8c, 0x54, 0xa3, 0xca, 0xbd, 0xc3, 0x61, /* Byte value: 0xc3 */ + 0xc1, 0xf8, 0x30, 0x86, 0x72, 0x86, 0x89, 0x4f, 0x49, 0x91, 0xff, 0x5d, 0x38, 0x72, 0x13, 0x8a, /* Byte value: 0xc4 */ + 0x45, 0xaa, 0x83, 0xe7, 0x1d, 0xe7, 0xa3, 0xc2, 0xea, 0x03, 0x8e, 0x1a, 0xe3, 0x1d, 0xd4, 0xb7, /* Byte value: 0xc5 */ + 0xc8, 0xbe, 0x7d, 0xdc, 0xec, 0xdc, 0x90, 0xe0, 0xa7, 0x4f, 0x06, 0xf6, 0x89, 0xec, 0xe9, 0x52, /* Byte value: 0xc6 */ + 0x16, 0x15, 0x79, 0x9c, 0x06, 0x9c, 0x40, 0xc1, 0x67, 0x9d, 0x82, 0x35, 0x32, 0x06, 0xc5, 0x13, /* Byte value: 0xc7 */ + 0x1b, 0xca, 0xd7, 0xee, 0x61, 0xee, 0x2b, 0x32, 0xf1, 0xa1, 0xc8, 0x3e, 0x10, 0x61, 0xcd, 0xab, /* Byte value: 0xc8 */ + 0x20, 0x42, 0x14, 0x83, 0xc4, 0x83, 0x16, 0xa5, 0x46, 0x1c, 0xd1, 0x49, 0x12, 0xc4, 0x9c, 0x86, /* Byte value: 0xc9 */ + 0x9b, 0x01, 0x87, 0xa7, 0xf7, 0xa7, 0x73, 0xe3, 0x2a, 0xd1, 0x0a, 0xd9, 0x58, 0xf7, 0xf8, 0xf6, /* Byte value: 0xca */ + 0xf3, 0x36, 0xbe, 0xb1, 0x49, 0xb1, 0xad, 0x77, 0x10, 0xf2, 0x1f, 0x81, 0x8b, 0x49, 0xb8, 0x7f, /* Byte value: 0xcb */ + 0xb8, 0x59, 0x4b, 0x3a, 0x01, 0x3a, 0xa1, 0x7f, 0x4e, 0x65, 0x3f, 0xe8, 0xb6, 0x01, 0xc0, 0x58, /* Byte value: 0xcc */ + 0xf0, 0x2c, 0x66, 0xaf, 0x7b, 0xaf, 0x69, 0x4e, 0x32, 0x5a, 0xfb, 0xf9, 0x77, 0x7b, 0x1c, 0x57, /* Byte value: 0xcd */ + 0x2f, 0x30, 0x2a, 0xe5, 0x3e, 0xe5, 0x44, 0x78, 0xec, 0x51, 0x23, 0x12, 0x98, 0x3e, 0xed, 0x0e, /* Byte value: 0xce */ + 0xba, 0xf4, 0xdb, 0x2e, 0x9c, 0x2e, 0x98, 0x51, 0x72, 0x14, 0x87, 0xb8, 0x1e, 0x9c, 0xb9, 0x68, /* Byte value: 0xcf */ + 0x4f, 0xf6, 0x16, 0xa3, 0xb1, 0xa3, 0x7e, 0x54, 0x26, 0x75, 0x93, 0xc9, 0xae, 0xb1, 0x8a, 0x47, /* Byte value: 0xd0 */ + 0xbe, 0x6d, 0x38, 0x06, 0x65, 0x06, 0xea, 0x0d, 0x0a, 0xf6, 0x34, 0x18, 0x8d, 0x65, 0x4b, 0x08, /* Byte value: 0xd1 */ + 0x12, 0x8c, 0x9a, 0xb4, 0xff, 0xb4, 0x32, 0x9d, 0x1f, 0x7f, 0x31, 0x95, 0xa1, 0xff, 0x37, 0x73, /* Byte value: 0xd2 */ + 0x43, 0x9e, 0xf0, 0xdb, 0x79, 0xdb, 0xe8, 0xb0, 0xae, 0x90, 0x85, 0xea, 0xd8, 0x79, 0x5f, 0xe7, /* Byte value: 0xd3 */ + 0xcf, 0x3d, 0x46, 0xea, 0x27, 0xea, 0x26, 0x85, 0xfd, 0x05, 0x51, 0x2e, 0xe6, 0x27, 0xbf, 0x1a, /* Byte value: 0xd4 */ + 0xc9, 0x09, 0x35, 0xd6, 0x43, 0xd6, 0x6d, 0xf7, 0xb9, 0x96, 0x5a, 0xde, 0xdd, 0x43, 0x34, 0x4a, /* Byte value: 0xd5 */ + 0x2a, 0x1e, 0x81, 0xc7, 0x68, 0xc7, 0xcb, 0x33, 0x8a, 0x6a, 0xcc, 0x9a, 0x5f, 0x68, 0xc2, 0x76, /* Byte value: 0xd6 */ + 0x11, 0x96, 0x42, 0xaa, 0xcd, 0xaa, 0xf6, 0xa4, 0x3d, 0xd7, 0xd5, 0xed, 0x5d, 0xcd, 0x93, 0x5b, /* Byte value: 0xd7 */ + 0x15, 0x0f, 0xa1, 0x82, 0x34, 0x82, 0x84, 0xf8, 0x45, 0x35, 0x66, 0x4d, 0xce, 0x34, 0x61, 0x3b, /* Byte value: 0xd8 */ + 0xf6, 0x18, 0x15, 0x93, 0x1f, 0x93, 0x22, 0x3c, 0x76, 0xc9, 0xf0, 0x09, 0x4c, 0x1f, 0x97, 0x07, /* Byte value: 0xd9 */ + 0x62, 0x6b, 0xac, 0x52, 0x12, 0x52, 0x03, 0x02, 0xf6, 0x55, 0x08, 0x8b, 0x9e, 0x12, 0x1e, 0x79, /* Byte value: 0xda */ + 0xcc, 0x27, 0x9e, 0xf4, 0x15, 0xf4, 0xe2, 0xbc, 0xdf, 0xad, 0xb5, 0x56, 0x1a, 0x15, 0x1b, 0x32, /* Byte value: 0xdb */ + 0xe0, 0x0d, 0x6c, 0x0f, 0x19, 0x0f, 0x62, 0xfd, 0x11, 0x54, 0x72, 0x3c, 0x7e, 0x19, 0x52, 0x14, /* Byte value: 0xdc */ + 0x8c, 0xa3, 0xb6, 0x31, 0x5e, 0x31, 0xce, 0x35, 0x53, 0x95, 0xd4, 0xc4, 0x3e, 0x5e, 0xe0, 0xfd, /* Byte value: 0xdd */ + 0x90, 0xea, 0x5a, 0xe9, 0xf4, 0xe9, 0x53, 0x62, 0xf8, 0x7e, 0x4b, 0x22, 0x41, 0xf4, 0x7b, 0x1e, /* Byte value: 0xde */ + 0x47, 0x07, 0x13, 0xf3, 0x80, 0xf3, 0x9a, 0xec, 0xd6, 0x72, 0x36, 0x4a, 0x4b, 0x80, 0xad, 0x87, /* Byte value: 0xdf */ + 0xff, 0x5e, 0x58, 0xc9, 0x81, 0xc9, 0x3b, 0x93, 0x98, 0x17, 0x09, 0xa2, 0xfd, 0x81, 0x6d, 0xdf, /* Byte value: 0xe0 */ + 0x81, 0x7c, 0x18, 0x43, 0x39, 0x43, 0xa5, 0xc6, 0xc5, 0xa9, 0x9e, 0xcf, 0x1c, 0x39, 0xe8, 0x45, /* Byte value: 0xe1 */ + 0x5e, 0x60, 0x54, 0x09, 0x7c, 0x09, 0x88, 0xf0, 0x1b, 0xa2, 0x46, 0x24, 0xf3, 0x7c, 0x19, 0x1c, /* Byte value: 0xe2 */ + 0xfb, 0xc7, 0xbb, 0xe1, 0x78, 0xe1, 0x49, 0xcf, 0xe0, 0xf5, 0xba, 0x02, 0x6e, 0x78, 0x9f, 0xbf, /* Byte value: 0xe3 */ + 0x79, 0xa1, 0x7b, 0xbc, 0x73, 0xbc, 0x28, 0x30, 0x07, 0xf4, 0xc0, 0xb5, 0x8e, 0x73, 0xd3, 0xd2, /* Byte value: 0xe4 */ + 0x36, 0x57, 0x6d, 0x1f, 0xc2, 0x1f, 0x56, 0x64, 0x21, 0x81, 0x53, 0x7c, 0x20, 0xc2, 0x59, 0x95, /* Byte value: 0xe5 */ + 0x32, 0xce, 0x8e, 0x37, 0x3b, 0x37, 0x24, 0x38, 0x59, 0x63, 0xe0, 0xdc, 0xb3, 0x3b, 0xab, 0xf5, /* Byte value: 0xe6 */ + 0x0e, 0xc5, 0x76, 0x6c, 0x55, 0x6c, 0xaf, 0xca, 0xb4, 0x94, 0xae, 0x73, 0xde, 0x55, 0xac, 0x90, /* Byte value: 0xe7 */ + 0xd3, 0x74, 0xaa, 0x32, 0x8d, 0x32, 0xbb, 0xd2, 0x56, 0xee, 0xce, 0xc8, 0x99, 0x8d, 0x24, 0xf9, /* Byte value: 0xe8 */ + 0x33, 0x79, 0xc6, 0x3d, 0x94, 0x3d, 0xd9, 0x2f, 0x47, 0xba, 0xbc, 0xf4, 0xe7, 0x94, 0x76, 0xed, /* Byte value: 0xe9 */ + 0x53, 0xbf, 0xfa, 0x7b, 0x1b, 0x7b, 0xe3, 0x03, 0x8d, 0x9e, 0x0c, 0x2f, 0xd1, 0x1b, 0x11, 0xa4, /* Byte value: 0xea */ + 0x87, 0x48, 0x6b, 0x7f, 0x5d, 0x7f, 0xee, 0xb4, 0x81, 0x3a, 0x95, 0x3f, 0x27, 0x5d, 0x63, 0x15, /* Byte value: 0xeb */ + 0x21, 0xf5, 0x5c, 0x89, 0x6b, 0x89, 0xeb, 0xb2, 0x58, 0xc5, 0x8d, 0x61, 0x46, 0x6b, 0x41, 0x9e, /* Byte value: 0xec */ + 0xd4, 0xf7, 0x91, 0x04, 0x46, 0x04, 0x0d, 0xb7, 0x0c, 0xa4, 0x99, 0x10, 0xf6, 0x46, 0x72, 0xb1, /* Byte value: 0xed */ + 0x07, 0x83, 0x3b, 0x36, 0xcb, 0x36, 0xb6, 0x65, 0x5a, 0x4a, 0x57, 0xd8, 0x6f, 0xcb, 0x56, 0x48, /* Byte value: 0xee */ + 0x48, 0x75, 0x2d, 0x95, 0x7a, 0x95, 0xc8, 0x31, 0x7c, 0x3f, 0xc4, 0x11, 0xc1, 0x7a, 0xdc, 0x0f, /* Byte value: 0xef */ + 0xdd, 0xb1, 0xdc, 0x5e, 0xd8, 0x5e, 0x14, 0x18, 0xe2, 0x7a, 0x60, 0xbb, 0x47, 0xd8, 0x88, 0x69, /* Byte value: 0xf0 */ + 0x6e, 0x03, 0x4a, 0x2a, 0xda, 0x2a, 0x95, 0xe6, 0x7e, 0xb0, 0x1e, 0xa8, 0xe8, 0xda, 0xcb, 0xd9, /* Byte value: 0xf1 */ + 0xc4, 0xd6, 0x9b, 0xa4, 0x24, 0xa4, 0x06, 0x04, 0x2f, 0xaa, 0x10, 0xd5, 0xff, 0x24, 0x3c, 0xf2, /* Byte value: 0xf2 */ + 0x58, 0x54, 0x27, 0x35, 0x18, 0x35, 0xc3, 0x82, 0x5f, 0x31, 0x4d, 0xd4, 0xc8, 0x18, 0x92, 0x4c, /* Byte value: 0xf3 */ + 0x9c, 0x82, 0xbc, 0x91, 0x3c, 0x91, 0xc5, 0x86, 0x70, 0x9b, 0x5d, 0x01, 0x37, 0x3c, 0xae, 0xbe, /* Byte value: 0xf4 */ + 0x05, 0x2e, 0xab, 0x22, 0x56, 0x22, 0x8f, 0x4b, 0x66, 0x3b, 0xef, 0x88, 0xc7, 0x56, 0x2f, 0x78, /* Byte value: 0xf5 */ + 0x19, 0x67, 0x47, 0xfa, 0xfc, 0xfa, 0x12, 0x1c, 0xcd, 0xd0, 0x70, 0x6e, 0xb8, 0xfc, 0xb4, 0x9b, /* Byte value: 0xf6 */ + 0xd8, 0x9f, 0x77, 0x7c, 0x8e, 0x7c, 0x9b, 0x53, 0x84, 0x41, 0x8f, 0x33, 0x80, 0x8e, 0xa7, 0x11, /* Byte value: 0xf7 */ + 0x80, 0xcb, 0x50, 0x49, 0x96, 0x49, 0x58, 0xd1, 0xdb, 0x70, 0xc2, 0xe7, 0x48, 0x96, 0x35, 0x5d, /* Byte value: 0xf8 */ + 0x9e, 0x2f, 0x2c, 0x85, 0xa1, 0x85, 0xfc, 0xa8, 0x4c, 0xea, 0xe5, 0x51, 0x9f, 0xa1, 0xd7, 0x8e, /* Byte value: 0xf9 */ + 0x44, 0x1d, 0xcb, 0xed, 0xb2, 0xed, 0x5e, 0xd5, 0xf4, 0xda, 0xd2, 0x32, 0xb7, 0xb2, 0x09, 0xaf, /* Byte value: 0xfa */ + 0xca, 0x13, 0xed, 0xc8, 0x71, 0xc8, 0xa9, 0xce, 0x9b, 0x3e, 0xbe, 0xa6, 0x21, 0x71, 0x90, 0x62, /* Byte value: 0xfb */ + 0x3d, 0xbc, 0xb0, 0x51, 0xc1, 0x51, 0x76, 0xe5, 0xf3, 0x2e, 0x12, 0x87, 0x39, 0xc1, 0xda, 0x7d, /* Byte value: 0xfc */ + 0x2e, 0x87, 0x62, 0xef, 0x91, 0xef, 0xb9, 0x6f, 0xf2, 0x88, 0x7f, 0x3a, 0xcc, 0x91, 0x30, 0x16, /* Byte value: 0xfd */ + 0x72, 0x4a, 0xa6, 0xf2, 0x70, 0xf2, 0x08, 0xb1, 0xd5, 0x5b, 0x81, 0x4e, 0x97, 0x70, 0x50, 0x3a, /* Byte value: 0xfe */ + 0x42, 0x29, 0xb8, 0xd1, 0xd6, 0xd1, 0x15, 0xa7, 0xb0, 0x49, 0xd9, 0xc2, 0x8c, 0xd6, 0x82, 0xff, /* Byte value: 0xff */ + 0x65, 0xe8, 0x97, 0x64, 0xd9, 0x64, 0xb5, 0x67, 0xac, 0x1f, 0x5f, 0x53, 0xf1, 0xd9, 0x48, 0x31, /* Byte value: 0x00 */ + + /* Matrix row: 4 */ + 0x04, 0xc3, 0xb6, 0x81, 0x36, 0x3e, 0x4d, 0xde, 0x9f, 0xbe, 0xcc, 0x92, 0x5e, 0x72, 0x11, 0x8f, /* Byte value: 0x01 */ + 0xc5, 0x34, 0x29, 0xba, 0x33, 0x7a, 0x21, 0x8a, 0x27, 0x60, 0x9b, 0x0b, 0x90, 0xfb, 0x32, 0xb5, /* Byte value: 0x02 */ + 0xdf, 0x7b, 0x40, 0x9b, 0x9c, 0xe1, 0x8b, 0x64, 0x4d, 0x3d, 0x10, 0xa8, 0xb8, 0x2d, 0xbd, 0xb7, /* Byte value: 0x03 */ + 0xb7, 0x84, 0x27, 0x1f, 0x65, 0xc8, 0x66, 0x5a, 0x26, 0x8a, 0x79, 0x61, 0x18, 0xf3, 0xc4, 0xbf, /* Byte value: 0x04 */ + 0x1e, 0x8c, 0xdf, 0xa0, 0x99, 0xa5, 0xe7, 0x30, 0xf5, 0xe3, 0x47, 0x31, 0x76, 0xa4, 0x9e, 0x8d, /* Byte value: 0x05 */ + 0x26, 0x94, 0x14, 0xa2, 0x5e, 0x12, 0xa7, 0x8e, 0x43, 0x58, 0x05, 0xc1, 0xc4, 0x5d, 0x70, 0xdb, /* Byte value: 0x06 */ + 0xff, 0xac, 0xb9, 0x19, 0xef, 0xd2, 0xa6, 0x5b, 0x3f, 0x84, 0xbf, 0xb2, 0x0d, 0x3b, 0x35, 0x45, /* Byte value: 0x07 */ + 0xc8, 0xf2, 0xfc, 0x4b, 0x85, 0xd6, 0x74, 0xfd, 0x12, 0xaf, 0x3f, 0xbb, 0x84, 0x90, 0x94, 0xb4, /* Byte value: 0x08 */ + 0x7b, 0xb5, 0x6d, 0xd5, 0xd6, 0x20, 0x5f, 0x79, 0xab, 0x9b, 0x8a, 0x48, 0xc2, 0x11, 0x41, 0x84, /* Byte value: 0x09 */ + 0x7a, 0xf5, 0xa1, 0x64, 0x3a, 0xce, 0xdd, 0xaf, 0xfc, 0x55, 0xb9, 0x8d, 0x34, 0xec, 0xd4, 0xd7, /* Byte value: 0x0a */ + 0xe8, 0x25, 0x05, 0xc9, 0xf6, 0xe5, 0x59, 0xc2, 0x60, 0x16, 0x90, 0xa1, 0x31, 0x86, 0x1c, 0x46, /* Byte value: 0x0b */ + 0xa0, 0x0d, 0x9b, 0xcf, 0x7c, 0xff, 0x99, 0xc3, 0x79, 0x18, 0x56, 0x72, 0x24, 0x4e, 0xed, 0xbc, /* Byte value: 0x0c */ + 0x3e, 0x5b, 0x26, 0x22, 0xea, 0x96, 0xca, 0x0f, 0x87, 0x5a, 0xe8, 0x2b, 0xc3, 0xb2, 0x16, 0x7f, /* Byte value: 0x0d */ + 0xe9, 0x65, 0xc9, 0x78, 0x1a, 0x0b, 0xdb, 0x14, 0x37, 0xd8, 0xa3, 0x64, 0xc7, 0x7b, 0x89, 0x15, /* Byte value: 0x0e */ + 0x09, 0x05, 0x63, 0x70, 0x80, 0x92, 0x18, 0xa9, 0xaa, 0x71, 0x68, 0x22, 0x4a, 0x19, 0xb7, 0x8e, /* Byte value: 0x0f */ + 0x18, 0xcf, 0x32, 0x80, 0xb4, 0x84, 0x6d, 0x81, 0xc4, 0x02, 0xed, 0xea, 0x07, 0xef, 0x66, 0xa4, /* Byte value: 0x10 */ + 0xba, 0x42, 0xf2, 0xee, 0xd3, 0x64, 0x33, 0x2d, 0x13, 0x45, 0xdd, 0xd1, 0x0c, 0x98, 0x62, 0xbe, /* Byte value: 0x11 */ + 0x83, 0x1a, 0xf5, 0x5d, 0xf8, 0x3d, 0xf1, 0x45, 0xf2, 0x30, 0xac, 0xe4, 0x48, 0x9c, 0x19, 0xbb, /* Byte value: 0x12 */ + 0x1f, 0xcc, 0x13, 0x11, 0x75, 0x4b, 0x65, 0xe6, 0xa2, 0x2d, 0x74, 0xf4, 0x80, 0x59, 0x0b, 0xde, /* Byte value: 0x13 */ + 0x33, 0x9d, 0xf3, 0xd3, 0x5c, 0x3a, 0x9f, 0x78, 0xb2, 0x95, 0x4c, 0x9b, 0xd7, 0xd9, 0xb0, 0x7e, /* Byte value: 0x14 */ + 0xb1, 0xc7, 0xca, 0x3f, 0x48, 0xe9, 0xec, 0xeb, 0x17, 0x6b, 0xd3, 0xba, 0x69, 0xb8, 0x3c, 0x96, /* Byte value: 0x15 */ + 0xb6, 0xc4, 0xeb, 0xae, 0x89, 0x26, 0xe4, 0x8c, 0x71, 0x44, 0x4a, 0xa4, 0xee, 0x0e, 0x51, 0xec, /* Byte value: 0x16 */ + 0x46, 0x2e, 0xdc, 0xe7, 0xcb, 0x47, 0xd0, 0xcf, 0xd5, 0x50, 0x37, 0xef, 0xd8, 0x67, 0x2b, 0x0e, /* Byte value: 0x17 */ + 0x78, 0x75, 0xfa, 0xc5, 0x21, 0xd1, 0x1a, 0xc0, 0x52, 0x0a, 0xdf, 0xc4, 0x1b, 0xd5, 0x3d, 0x71, /* Byte value: 0x18 */ + 0x5b, 0x62, 0x94, 0x57, 0xa5, 0x13, 0x72, 0x46, 0xd9, 0x22, 0x25, 0x52, 0x77, 0x07, 0xc9, 0x76, /* Byte value: 0x19 */ + 0x80, 0xda, 0x62, 0x4d, 0x0f, 0xcc, 0xb4, 0xfc, 0x0b, 0xa1, 0xf9, 0x68, 0x91, 0x58, 0x65, 0x4e, /* Byte value: 0x1a */ + 0x8c, 0x5c, 0x7b, 0x0d, 0x55, 0x8e, 0x63, 0x5d, 0x69, 0xa0, 0x6e, 0x1d, 0x73, 0xce, 0x56, 0x1c, /* Byte value: 0x1b */ + 0xeb, 0xe5, 0x92, 0xd9, 0x01, 0x14, 0x1c, 0x7b, 0x99, 0x87, 0xc5, 0x2d, 0xe8, 0x42, 0x60, 0xb3, /* Byte value: 0x1c */ + 0x2d, 0x11, 0x2c, 0x73, 0xc5, 0x9f, 0x78, 0x48, 0x47, 0x76, 0x0b, 0xaa, 0xa1, 0x7d, 0x2e, 0xf3, /* Byte value: 0x1d */ + 0xfb, 0x6f, 0x0f, 0x98, 0xd9, 0xec, 0xeb, 0x85, 0xa0, 0x3a, 0x73, 0x20, 0x53, 0x49, 0x24, 0xca, /* Byte value: 0x1e */ + 0xd9, 0x38, 0xad, 0xbb, 0xb1, 0xc0, 0x01, 0xd5, 0x7c, 0xdc, 0xba, 0x73, 0xc9, 0x66, 0x45, 0x9e, /* Byte value: 0x1f */ + 0xe0, 0x60, 0xaa, 0x08, 0x9a, 0x99, 0xc3, 0xbd, 0x9d, 0xa9, 0xcb, 0x46, 0x8d, 0x62, 0x3e, 0x9b, /* Byte value: 0x20 */ + 0x9e, 0x56, 0xbd, 0xed, 0x96, 0x69, 0x53, 0xcc, 0xfe, 0x42, 0xbe, 0x59, 0xe7, 0xfc, 0xfb, 0xc3, /* Byte value: 0x21 */ + 0x36, 0x1e, 0x89, 0xe3, 0x86, 0xea, 0x50, 0x70, 0x7a, 0xe5, 0xb3, 0xcc, 0x7f, 0x56, 0x34, 0xa2, /* Byte value: 0x22 */ + 0x42, 0xed, 0x6a, 0x66, 0xfd, 0x79, 0x9d, 0x11, 0x4a, 0xee, 0xfb, 0x7d, 0x86, 0x15, 0x3a, 0x81, /* Byte value: 0x23 */ + 0x43, 0xad, 0xa6, 0xd7, 0x11, 0x97, 0x1f, 0xc7, 0x1d, 0x20, 0xc8, 0xb8, 0x70, 0xe8, 0xaf, 0xd2, /* Byte value: 0x24 */ + 0xde, 0x3b, 0x8c, 0x2a, 0x70, 0x0f, 0x09, 0xb2, 0x1a, 0xf3, 0x23, 0x6d, 0x4e, 0xd0, 0x28, 0xe4, /* Byte value: 0x25 */ + 0xaf, 0x4b, 0x15, 0x9f, 0xd1, 0x4c, 0x0b, 0xdb, 0xe2, 0x88, 0x94, 0x8b, 0x1f, 0x1c, 0xa2, 0x1b, /* Byte value: 0x26 */ + 0x56, 0xa4, 0x41, 0xa6, 0x13, 0xbf, 0x27, 0x31, 0xec, 0xed, 0x81, 0xe2, 0x63, 0x6c, 0x6f, 0x77, /* Byte value: 0x27 */ + 0xdb, 0xb8, 0xf6, 0x1a, 0xaa, 0xdf, 0xc6, 0xba, 0xd2, 0x83, 0xdc, 0x3a, 0xe6, 0x5f, 0xac, 0x38, /* Byte value: 0x28 */ + 0x70, 0x30, 0x55, 0x04, 0x4d, 0xad, 0x80, 0xbf, 0xaf, 0xb5, 0x84, 0x23, 0xa7, 0x31, 0x1f, 0xac, /* Byte value: 0x29 */ + 0x3f, 0x1b, 0xea, 0x93, 0x06, 0x78, 0x48, 0xd9, 0xd0, 0x94, 0xdb, 0xee, 0x35, 0x4f, 0x83, 0x2c, /* Byte value: 0x2a */ + 0x77, 0x33, 0x74, 0x95, 0x8c, 0x62, 0x88, 0xd8, 0xc9, 0x9a, 0x1d, 0x3d, 0x20, 0x87, 0x72, 0xd6, /* Byte value: 0x2b */ + 0x75, 0xb3, 0x2f, 0x34, 0x97, 0x7d, 0x4f, 0xb7, 0x67, 0xc5, 0x7b, 0x74, 0x0f, 0xbe, 0x9b, 0x70, /* Byte value: 0x2c */ + 0x87, 0xd9, 0x43, 0xdc, 0xce, 0x03, 0xbc, 0x9b, 0x6d, 0x8e, 0x60, 0x76, 0x16, 0xee, 0x08, 0x34, /* Byte value: 0x2d */ + 0x93, 0x90, 0x68, 0x1c, 0x20, 0xc5, 0x06, 0xbb, 0xcb, 0x8d, 0x1a, 0xe9, 0xf3, 0x97, 0x5d, 0xc2, /* Byte value: 0x2e */ + 0x1d, 0x4c, 0x48, 0xb0, 0x6e, 0x54, 0xa2, 0x89, 0x0c, 0x72, 0x12, 0xbd, 0xaf, 0x60, 0xe2, 0x78, /* Byte value: 0x2f */ + 0xfd, 0x2c, 0xe2, 0xb8, 0xf4, 0xcd, 0x61, 0x34, 0x91, 0xdb, 0xd9, 0xfb, 0x22, 0x02, 0xdc, 0xe3, /* Byte value: 0x30 */ + 0x9a, 0x95, 0x0b, 0x6c, 0xa0, 0x57, 0x1e, 0x12, 0x61, 0xfc, 0x72, 0xcb, 0xb9, 0x8e, 0xea, 0x4c, /* Byte value: 0x31 */ + 0xea, 0xa5, 0x5e, 0x68, 0xed, 0xfa, 0x9e, 0xad, 0xce, 0x49, 0xf6, 0xe8, 0x1e, 0xbf, 0xf5, 0xe0, /* Byte value: 0x32 */ + 0xe5, 0xe3, 0xd0, 0x38, 0x40, 0x49, 0x0c, 0xb5, 0x55, 0xd9, 0x34, 0x11, 0x25, 0xed, 0xba, 0x47, /* Byte value: 0x33 */ + 0x55, 0x64, 0xd6, 0xb6, 0xe4, 0x4e, 0x62, 0x88, 0x15, 0x7c, 0xd4, 0x6e, 0xba, 0xa8, 0x13, 0x82, /* Byte value: 0x34 */ + 0x4d, 0xab, 0xe4, 0x36, 0x50, 0xca, 0x0f, 0x09, 0xd1, 0x7e, 0x39, 0x84, 0xbd, 0x47, 0x75, 0x26, /* Byte value: 0x35 */ + 0x2f, 0x91, 0x77, 0xd2, 0xde, 0x80, 0xbf, 0x27, 0xe9, 0x29, 0x6d, 0xe3, 0x8e, 0x44, 0xc7, 0x55, /* Byte value: 0x36 */ + 0x8e, 0xdc, 0x20, 0xac, 0x4e, 0x91, 0xa4, 0x32, 0xc7, 0xff, 0x08, 0x54, 0x5c, 0xf7, 0xbf, 0xba, /* Byte value: 0x37 */ + 0xab, 0x88, 0xa3, 0x1e, 0xe7, 0x72, 0x46, 0x05, 0x7d, 0x36, 0x58, 0x19, 0x41, 0x6e, 0xb3, 0x94, /* Byte value: 0x38 */ + 0xec, 0xe6, 0xb3, 0x48, 0xc0, 0xdb, 0x14, 0x1c, 0xff, 0xa8, 0x5c, 0x33, 0x6f, 0xf4, 0x0d, 0xc9, /* Byte value: 0x39 */ + 0x64, 0x79, 0x7e, 0xc4, 0xa3, 0x6b, 0x3a, 0x9f, 0x09, 0xb6, 0xfe, 0xbc, 0x42, 0x48, 0x4a, 0x5a, /* Byte value: 0x3a */ + 0xb3, 0x47, 0x91, 0x9e, 0x53, 0xf6, 0x2b, 0x84, 0xb9, 0x34, 0xb5, 0xf3, 0x46, 0x81, 0xd5, 0x30, /* Byte value: 0x3b */ + 0xd5, 0xbe, 0xb4, 0xfb, 0xeb, 0x82, 0xd6, 0x74, 0x1e, 0xdd, 0x2d, 0x06, 0x2b, 0xf0, 0x76, 0xcc, /* Byte value: 0x3c */ + 0x91, 0x10, 0x33, 0xbd, 0x3b, 0xda, 0xc1, 0xd4, 0x65, 0xd2, 0x7c, 0xa0, 0xdc, 0xae, 0xb4, 0x64, /* Byte value: 0x3d */ + 0x5e, 0xe1, 0xee, 0x67, 0x7f, 0xc3, 0xbd, 0x4e, 0x11, 0x52, 0xda, 0x05, 0xdf, 0x88, 0x4d, 0xaa, /* Byte value: 0x3e */ + 0x21, 0x97, 0x35, 0x33, 0x9f, 0xdd, 0xaf, 0xe9, 0x25, 0x77, 0x9c, 0xdf, 0x43, 0xeb, 0x1d, 0xa1, /* Byte value: 0x3f */ + 0x49, 0x68, 0x52, 0xb7, 0x66, 0xf4, 0x42, 0xd7, 0x4e, 0xc0, 0xf5, 0x16, 0xe3, 0x35, 0x64, 0xa9, /* Byte value: 0x40 */ + 0x5f, 0xa1, 0x22, 0xd6, 0x93, 0x2d, 0x3f, 0x98, 0x46, 0x9c, 0xe9, 0xc0, 0x29, 0x75, 0xd8, 0xf9, /* Byte value: 0x41 */ + 0x65, 0x39, 0xb2, 0x75, 0x4f, 0x85, 0xb8, 0x49, 0x5e, 0x78, 0xcd, 0x79, 0xb4, 0xb5, 0xdf, 0x09, /* Byte value: 0x42 */ + 0x53, 0x27, 0x3b, 0x96, 0xc9, 0x6f, 0xe8, 0x39, 0x24, 0x9d, 0x7e, 0xb5, 0xcb, 0xe3, 0xeb, 0xab, /* Byte value: 0x43 */ + 0x27, 0xd4, 0xd8, 0x13, 0xb2, 0xfc, 0x25, 0x58, 0x14, 0x96, 0x36, 0x04, 0x32, 0xa0, 0xe5, 0x88, /* Byte value: 0x44 */ + 0xcc, 0x31, 0x4a, 0xca, 0xb3, 0xe8, 0x39, 0x23, 0x8d, 0x11, 0xf3, 0x29, 0xda, 0xe2, 0x85, 0x3b, /* Byte value: 0x45 */ + 0x61, 0xfa, 0x04, 0xf4, 0x79, 0xbb, 0xf5, 0x97, 0xc1, 0xc6, 0x01, 0xeb, 0xea, 0xc7, 0xce, 0x86, /* Byte value: 0x46 */ + 0x82, 0x5a, 0x39, 0xec, 0x14, 0xd3, 0x73, 0x93, 0xa5, 0xfe, 0x9f, 0x21, 0xbe, 0x61, 0x8c, 0xe8, /* Byte value: 0x47 */ + 0xcf, 0xf1, 0xdd, 0xda, 0x44, 0x19, 0x7c, 0x9a, 0x74, 0x80, 0xa6, 0xa5, 0x03, 0x26, 0xf9, 0xce, /* Byte value: 0x48 */ + 0xfa, 0x2f, 0xc3, 0x29, 0x35, 0x02, 0x69, 0x53, 0xf7, 0xf4, 0x40, 0xe5, 0xa5, 0xb4, 0xb1, 0x99, /* Byte value: 0x49 */ + 0xbf, 0xc1, 0x88, 0xde, 0x09, 0xb4, 0xfc, 0x25, 0xdb, 0x35, 0x22, 0x86, 0xa4, 0x17, 0xe6, 0x62, /* Byte value: 0x4a */ + 0xca, 0x72, 0xa7, 0xea, 0x9e, 0xc9, 0xb3, 0x92, 0xbc, 0xf0, 0x59, 0xf2, 0xab, 0xa9, 0x7d, 0x12, /* Byte value: 0x4b */ + 0x4e, 0x6b, 0x73, 0x26, 0xa7, 0x3b, 0x4a, 0xb0, 0x28, 0xef, 0x6c, 0x08, 0x64, 0x83, 0x09, 0xd3, /* Byte value: 0x4c */ + 0x97, 0x53, 0xde, 0x9d, 0x16, 0xfb, 0x4b, 0x65, 0x54, 0x33, 0xd6, 0x7b, 0xad, 0xe5, 0x4c, 0x4d, /* Byte value: 0x4d */ + 0x9b, 0xd5, 0xc7, 0xdd, 0x4c, 0xb9, 0x9c, 0xc4, 0x36, 0x32, 0x41, 0x0e, 0x4f, 0x73, 0x7f, 0x1f, /* Byte value: 0x4e */ + 0x8d, 0x1c, 0xb7, 0xbc, 0xb9, 0x60, 0xe1, 0x8b, 0x3e, 0x6e, 0x5d, 0xd8, 0x85, 0x33, 0xc3, 0x4f, /* Byte value: 0x4f */ + 0x68, 0xff, 0x67, 0x84, 0xf9, 0x29, 0xed, 0x3e, 0x6b, 0xb7, 0x69, 0xc9, 0xa0, 0xde, 0x79, 0x08, /* Byte value: 0x50 */ + 0x15, 0x09, 0xe7, 0x71, 0x02, 0x28, 0x38, 0xf6, 0xf1, 0xcd, 0x49, 0x5a, 0x13, 0x84, 0xc0, 0xa5, /* Byte value: 0x51 */ + 0xfc, 0x6c, 0x2e, 0x09, 0x18, 0x23, 0xe3, 0xe2, 0xc6, 0x15, 0xea, 0x3e, 0xd4, 0xff, 0x49, 0xb0, /* Byte value: 0x52 */ + 0xfe, 0xec, 0x75, 0xa8, 0x03, 0x3c, 0x24, 0x8d, 0x68, 0x4a, 0x8c, 0x77, 0xfb, 0xc6, 0xa0, 0x16, /* Byte value: 0x53 */ + 0x58, 0xa2, 0x03, 0x47, 0x52, 0xe2, 0x37, 0xff, 0x20, 0xb3, 0x70, 0xde, 0xae, 0xc3, 0xb5, 0x83, /* Byte value: 0x54 */ + 0x12, 0x0a, 0xc6, 0xe0, 0xc3, 0xe7, 0x30, 0x91, 0x97, 0xe2, 0xd0, 0x44, 0x94, 0x32, 0xad, 0xdf, /* Byte value: 0x55 */ + 0x1b, 0x0f, 0xa5, 0x90, 0x43, 0x75, 0x28, 0x38, 0x3d, 0x93, 0xb8, 0x66, 0xde, 0x2b, 0x1a, 0x51, /* Byte value: 0x56 */ + 0x10, 0x8a, 0x9d, 0x41, 0xd8, 0xf8, 0xf7, 0xfe, 0x39, 0xbd, 0xb6, 0x0d, 0xbb, 0x0b, 0x44, 0x79, /* Byte value: 0x57 */ + 0xc1, 0xf7, 0x9f, 0x3b, 0x05, 0x44, 0x6c, 0x54, 0xb8, 0xde, 0x57, 0x99, 0xce, 0x89, 0x23, 0x3a, /* Byte value: 0x58 */ + 0xe2, 0xe0, 0xf1, 0xa9, 0x81, 0x86, 0x04, 0xd2, 0x33, 0xf6, 0xad, 0x0f, 0xa2, 0x5b, 0xd7, 0x3d, /* Byte value: 0x59 */ + 0x19, 0x8f, 0xfe, 0x31, 0x58, 0x6a, 0xef, 0x57, 0x93, 0xcc, 0xde, 0x2f, 0xf1, 0x12, 0xf3, 0xf7, /* Byte value: 0x5a */ + 0x52, 0x67, 0xf7, 0x27, 0x25, 0x81, 0x6a, 0xef, 0x73, 0x53, 0x4d, 0x70, 0x3d, 0x1e, 0x7e, 0xf8, /* Byte value: 0x5b */ + 0xef, 0x26, 0x24, 0x58, 0x37, 0x2a, 0x51, 0xa5, 0x06, 0x39, 0x09, 0xbf, 0xb6, 0x30, 0x71, 0x3c, /* Byte value: 0x5c */ + 0xdc, 0xbb, 0xd7, 0x8b, 0x6b, 0x10, 0xce, 0xdd, 0xb4, 0xac, 0x45, 0x24, 0x61, 0xe9, 0xc1, 0x42, /* Byte value: 0x5d */ + 0xf0, 0xea, 0x37, 0x49, 0x42, 0x61, 0x34, 0x43, 0xa4, 0x14, 0x7d, 0x4b, 0x36, 0x69, 0x7a, 0xe2, /* Byte value: 0x5e */ + 0x3c, 0xdb, 0x7d, 0x83, 0xf1, 0x89, 0x0d, 0x60, 0x29, 0x05, 0x8e, 0x62, 0xec, 0x8b, 0xff, 0xd9, /* Byte value: 0x5f */ + 0x9c, 0xd6, 0xe6, 0x4c, 0x8d, 0x76, 0x94, 0xa3, 0x50, 0x1d, 0xd8, 0x10, 0xc8, 0xc5, 0x12, 0x65, /* Byte value: 0x60 */ + 0xbe, 0x81, 0x44, 0x6f, 0xe5, 0x5a, 0x7e, 0xf3, 0x8c, 0xfb, 0x11, 0x43, 0x52, 0xea, 0x73, 0x31, /* Byte value: 0x61 */ + 0x38, 0x18, 0xcb, 0x02, 0xc7, 0xb7, 0x40, 0xbe, 0xb6, 0xbb, 0x42, 0xf0, 0xb2, 0xf9, 0xee, 0x56, /* Byte value: 0x62 */ + 0x2b, 0x52, 0xc1, 0x53, 0xe8, 0xbe, 0xf2, 0xf9, 0x76, 0x97, 0xa1, 0x71, 0xd0, 0x36, 0xd6, 0xda, /* Byte value: 0x63 */ + 0xc9, 0xb2, 0x30, 0xfa, 0x69, 0x38, 0xf6, 0x2b, 0x45, 0x61, 0x0c, 0x7e, 0x72, 0x6d, 0x01, 0xe7, /* Byte value: 0x64 */ + 0x24, 0x14, 0x4f, 0x03, 0x45, 0x0d, 0x60, 0xe1, 0xed, 0x07, 0x63, 0x88, 0xeb, 0x64, 0x99, 0x7d, /* Byte value: 0x65 */ + 0x98, 0x15, 0x50, 0xcd, 0xbb, 0x48, 0xd9, 0x7d, 0xcf, 0xa3, 0x14, 0x82, 0x96, 0xb7, 0x03, 0xea, /* Byte value: 0x66 */ + 0x30, 0x5d, 0x64, 0xc3, 0xab, 0xcb, 0xda, 0xc1, 0x4b, 0x04, 0x19, 0x17, 0x0e, 0x1d, 0xcc, 0x8b, /* Byte value: 0x67 */ + 0x0c, 0x86, 0x19, 0x40, 0x5a, 0x42, 0xd7, 0xa1, 0x62, 0x01, 0x97, 0x75, 0xe2, 0x96, 0x33, 0x52, /* Byte value: 0x68 */ + 0x69, 0xbf, 0xab, 0x35, 0x15, 0xc7, 0x6f, 0xe8, 0x3c, 0x79, 0x5a, 0x0c, 0x56, 0x23, 0xec, 0x5b, /* Byte value: 0x69 */ + 0x54, 0x24, 0x1a, 0x07, 0x08, 0xa0, 0xe0, 0x5e, 0x42, 0xb2, 0xe7, 0xab, 0x4c, 0x55, 0x86, 0xd1, /* Byte value: 0x6a */ + 0xee, 0x66, 0xe8, 0xe9, 0xdb, 0xc4, 0xd3, 0x73, 0x51, 0xf7, 0x3a, 0x7a, 0x40, 0xcd, 0xe4, 0x6f, /* Byte value: 0x6b */ + 0xb5, 0x04, 0x7c, 0xbe, 0x7e, 0xd7, 0xa1, 0x35, 0x88, 0xd5, 0x1f, 0x28, 0x37, 0xca, 0x2d, 0x19, /* Byte value: 0x6c */ + 0x4f, 0x2b, 0xbf, 0x97, 0x4b, 0xd5, 0xc8, 0x66, 0x7f, 0x21, 0x5f, 0xcd, 0x92, 0x7e, 0x9c, 0x80, /* Byte value: 0x6d */ + 0x39, 0x58, 0x07, 0xb3, 0x2b, 0x59, 0xc2, 0x68, 0xe1, 0x75, 0x71, 0x35, 0x44, 0x04, 0x7b, 0x05, /* Byte value: 0x6e */ + 0x6a, 0x7f, 0x3c, 0x25, 0xe2, 0x36, 0x2a, 0x51, 0xc5, 0xe8, 0x0f, 0x80, 0x8f, 0xe7, 0x90, 0xae, /* Byte value: 0x6f */ + 0x1c, 0x0c, 0x84, 0x01, 0x82, 0xba, 0x20, 0x5f, 0x5b, 0xbc, 0x21, 0x78, 0x59, 0x9d, 0x77, 0x2b, /* Byte value: 0x70 */ + 0x89, 0xdf, 0x01, 0x3d, 0x8f, 0x5e, 0xac, 0x55, 0xa1, 0xd0, 0x91, 0x4a, 0xdb, 0x41, 0xd2, 0xc0, /* Byte value: 0x71 */ + 0x66, 0xf9, 0x25, 0x65, 0xb8, 0x74, 0xfd, 0xf0, 0xa7, 0xe9, 0x98, 0xf5, 0x6d, 0x71, 0xa3, 0xfc, /* Byte value: 0x72 */ + 0xa5, 0x8e, 0xe1, 0xff, 0xa6, 0x2f, 0x56, 0xcb, 0xb1, 0x68, 0xa9, 0x25, 0x8c, 0xc1, 0x69, 0x60, /* Byte value: 0x73 */ + 0xe4, 0xa3, 0x1c, 0x89, 0xac, 0xa7, 0x8e, 0x63, 0x02, 0x17, 0x07, 0xd4, 0xd3, 0x10, 0x2f, 0x14, /* Byte value: 0x74 */ + 0x72, 0xb0, 0x0e, 0xa5, 0x56, 0xb2, 0x47, 0xd0, 0x01, 0xea, 0xe2, 0x6a, 0x88, 0x08, 0xf6, 0x0a, /* Byte value: 0x75 */ + 0xf6, 0xa9, 0xda, 0x69, 0x6f, 0x40, 0xbe, 0xf2, 0x95, 0xf5, 0xd7, 0x90, 0x47, 0x22, 0x82, 0xcb, /* Byte value: 0x76 */ + 0x14, 0x49, 0x2b, 0xc0, 0xee, 0xc6, 0xba, 0x20, 0xa6, 0x03, 0x7a, 0x9f, 0xe5, 0x79, 0x55, 0xf6, /* Byte value: 0x77 */ + 0x02, 0x80, 0x5b, 0xa1, 0x1b, 0x1f, 0xc7, 0x6f, 0xae, 0x5f, 0x66, 0x49, 0x2f, 0x39, 0xe9, 0xa6, /* Byte value: 0x78 */ + 0x50, 0xe7, 0xac, 0x86, 0x3e, 0x9e, 0xad, 0x80, 0xdd, 0x0c, 0x2b, 0x39, 0x12, 0x27, 0x97, 0x5e, /* Byte value: 0x79 */ + 0xbd, 0x41, 0xd3, 0x7f, 0x12, 0xab, 0x3b, 0x4a, 0x75, 0x6a, 0x44, 0xcf, 0x8b, 0x2e, 0x0f, 0xc4, /* Byte value: 0x7a */ + 0x9f, 0x16, 0x71, 0x5c, 0x7a, 0x87, 0xd1, 0x1a, 0xa9, 0x8c, 0x8d, 0x9c, 0x11, 0x01, 0x6e, 0x90, /* Byte value: 0x7b */ + 0xe3, 0xa0, 0x3d, 0x18, 0x6d, 0x68, 0x86, 0x04, 0x64, 0x38, 0x9e, 0xca, 0x54, 0xa6, 0x42, 0x6e, /* Byte value: 0x7c */ + 0x05, 0x83, 0x7a, 0x30, 0xda, 0xd0, 0xcf, 0x08, 0xc8, 0x70, 0xff, 0x57, 0xa8, 0x8f, 0x84, 0xdc, /* Byte value: 0x7d */ + 0x07, 0x03, 0x21, 0x91, 0xc1, 0xcf, 0x08, 0x67, 0x66, 0x2f, 0x99, 0x1e, 0x87, 0xb6, 0x6d, 0x7a, /* Byte value: 0x7e */ + 0x88, 0x9f, 0xcd, 0x8c, 0x63, 0xb0, 0x2e, 0x83, 0xf6, 0x1e, 0xa2, 0x8f, 0x2d, 0xbc, 0x47, 0x93, /* Byte value: 0x7f */ + 0xcb, 0x32, 0x6b, 0x5b, 0x72, 0x27, 0x31, 0x44, 0xeb, 0x3e, 0x6a, 0x37, 0x5d, 0x54, 0xe8, 0x41, /* Byte value: 0x80 */ + 0x3a, 0x98, 0x90, 0xa3, 0xdc, 0xa8, 0x87, 0xd1, 0x18, 0xe4, 0x24, 0xb9, 0x9d, 0xc0, 0x07, 0xf0, /* Byte value: 0x81 */ + 0x85, 0x59, 0x18, 0x7d, 0xd5, 0x1c, 0x7b, 0xf4, 0xc3, 0xd1, 0x06, 0x3f, 0x39, 0xd7, 0xe1, 0x92, /* Byte value: 0x82 */ + 0x41, 0x2d, 0xfd, 0x76, 0x0a, 0x88, 0xd8, 0xa8, 0xb3, 0x7f, 0xae, 0xf1, 0x5f, 0xd1, 0x46, 0x74, /* Byte value: 0x83 */ + 0x2a, 0x12, 0x0d, 0xe2, 0x04, 0x50, 0x70, 0x2f, 0x21, 0x59, 0x92, 0xb4, 0x26, 0xcb, 0x43, 0x89, /* Byte value: 0x84 */ + 0x92, 0xd0, 0xa4, 0xad, 0xcc, 0x2b, 0x84, 0x6d, 0x9c, 0x43, 0x29, 0x2c, 0x05, 0x6a, 0xc8, 0x91, /* Byte value: 0x85 */ + 0xb9, 0x82, 0x65, 0xfe, 0x24, 0x95, 0x76, 0x94, 0xea, 0xd4, 0x88, 0x5d, 0xd5, 0x5c, 0x1e, 0x4b, /* Byte value: 0x86 */ + 0xe6, 0x23, 0x47, 0x28, 0xb7, 0xb8, 0x49, 0x0c, 0xac, 0x48, 0x61, 0x9d, 0xfc, 0x29, 0xc6, 0xb2, /* Byte value: 0x87 */ + 0xf2, 0x6a, 0x6c, 0xe8, 0x59, 0x7e, 0xf3, 0x2c, 0x0a, 0x4b, 0x1b, 0x02, 0x19, 0x50, 0x93, 0x44, /* Byte value: 0x88 */ + 0x28, 0x92, 0x56, 0x43, 0x1f, 0x4f, 0xb7, 0x40, 0x8f, 0x06, 0xf4, 0xfd, 0x09, 0xf2, 0xaa, 0x2f, /* Byte value: 0x89 */ + 0x7d, 0xf6, 0x80, 0xf5, 0xfb, 0x01, 0xd5, 0xc8, 0x9a, 0x7a, 0x20, 0x93, 0xb3, 0x5a, 0xb9, 0xad, /* Byte value: 0x8a */ + 0xbb, 0x02, 0x3e, 0x5f, 0x3f, 0x8a, 0xb1, 0xfb, 0x44, 0x8b, 0xee, 0x14, 0xfa, 0x65, 0xf7, 0xed, /* Byte value: 0x8b */ + 0xf3, 0x2a, 0xa0, 0x59, 0xb5, 0x90, 0x71, 0xfa, 0x5d, 0x85, 0x28, 0xc7, 0xef, 0xad, 0x06, 0x17, /* Byte value: 0x8c */ + 0xe7, 0x63, 0x8b, 0x99, 0x5b, 0x56, 0xcb, 0xda, 0xfb, 0x86, 0x52, 0x58, 0x0a, 0xd4, 0x53, 0xe1, /* Byte value: 0x8d */ + 0xad, 0xcb, 0x4e, 0x3e, 0xca, 0x53, 0xcc, 0xb4, 0x4c, 0xd7, 0xf2, 0xc2, 0x30, 0x25, 0x4b, 0xbd, /* Byte value: 0x8e */ + 0x0e, 0x06, 0x42, 0xe1, 0x41, 0x5d, 0x10, 0xce, 0xcc, 0x5e, 0xf1, 0x3c, 0xcd, 0xaf, 0xda, 0xf4, /* Byte value: 0x8f */ + 0x76, 0x73, 0xb8, 0x24, 0x60, 0x8c, 0x0a, 0x0e, 0x9e, 0x54, 0x2e, 0xf8, 0xd6, 0x7a, 0xe7, 0x85, /* Byte value: 0x90 */ + 0x3b, 0xd8, 0x5c, 0x12, 0x30, 0x46, 0x05, 0x07, 0x4f, 0x2a, 0x17, 0x7c, 0x6b, 0x3d, 0x92, 0xa3, /* Byte value: 0x91 */ + 0x6d, 0x7c, 0x1d, 0xb4, 0x23, 0xf9, 0x22, 0x36, 0xa3, 0xc7, 0x96, 0x9e, 0x08, 0x51, 0xfd, 0xd4, /* Byte value: 0x92 */ + 0x20, 0xd7, 0xf9, 0x82, 0x73, 0x33, 0x2d, 0x3f, 0x72, 0xb9, 0xaf, 0x1a, 0xb5, 0x16, 0x88, 0xf2, /* Byte value: 0x93 */ + 0xa9, 0x08, 0xf8, 0xbf, 0xfc, 0x6d, 0x81, 0x6a, 0xd3, 0x69, 0x3e, 0x50, 0x6e, 0x57, 0x5a, 0x32, /* Byte value: 0x94 */ + 0x0b, 0x85, 0x38, 0xd1, 0x9b, 0x8d, 0xdf, 0xc6, 0x04, 0x2e, 0x0e, 0x6b, 0x65, 0x20, 0x5e, 0x28, /* Byte value: 0x95 */ + 0xce, 0xb1, 0x11, 0x6b, 0xa8, 0xf7, 0xfe, 0x4c, 0x23, 0x4e, 0x95, 0x60, 0xf5, 0xdb, 0x6c, 0x9d, /* Byte value: 0x96 */ + 0x8f, 0x9c, 0xec, 0x1d, 0xa2, 0x7f, 0x26, 0xe4, 0x90, 0x31, 0x3b, 0x91, 0xaa, 0x0a, 0x2a, 0xe9, /* Byte value: 0x97 */ + 0x94, 0x93, 0x49, 0x8d, 0xe1, 0x0a, 0x0e, 0xdc, 0xad, 0xa2, 0x83, 0xf7, 0x74, 0x21, 0x30, 0xb8, /* Byte value: 0x98 */ + 0x4c, 0xeb, 0x28, 0x87, 0xbc, 0x24, 0x8d, 0xdf, 0x86, 0xb0, 0x0a, 0x41, 0x4b, 0xba, 0xe0, 0x75, /* Byte value: 0x99 */ + 0x29, 0xd2, 0x9a, 0xf2, 0xf3, 0xa1, 0x35, 0x96, 0xd8, 0xc8, 0xc7, 0x38, 0xff, 0x0f, 0x3f, 0x7c, /* Byte value: 0x9a */ + 0x5a, 0x22, 0x58, 0xe6, 0x49, 0xfd, 0xf0, 0x90, 0x8e, 0xec, 0x16, 0x97, 0x81, 0xfa, 0x5c, 0x25, /* Byte value: 0x9b */ + 0xb4, 0x44, 0xb0, 0x0f, 0x92, 0x39, 0x23, 0xe3, 0xdf, 0x1b, 0x2c, 0xed, 0xc1, 0x37, 0xb8, 0x4a, /* Byte value: 0x9c */ + 0x6e, 0xbc, 0x8a, 0xa4, 0xd4, 0x08, 0x67, 0x8f, 0x5a, 0x56, 0xc3, 0x12, 0xd1, 0x95, 0x81, 0x21, /* Byte value: 0x9d */ + 0x1a, 0x4f, 0x69, 0x21, 0xaf, 0x9b, 0xaa, 0xee, 0x6a, 0x5d, 0x8b, 0xa3, 0x28, 0xd6, 0x8f, 0x02, /* Byte value: 0x9e */ + 0xf7, 0xe9, 0x16, 0xd8, 0x83, 0xae, 0x3c, 0x24, 0xc2, 0x3b, 0xe4, 0x55, 0xb1, 0xdf, 0x17, 0x98, /* Byte value: 0x9f */ + 0x67, 0xb9, 0xe9, 0xd4, 0x54, 0x9a, 0x7f, 0x26, 0xf0, 0x27, 0xab, 0x30, 0x9b, 0x8c, 0x36, 0xaf, /* Byte value: 0xa0 */ + 0xd4, 0xfe, 0x78, 0x4a, 0x07, 0x6c, 0x54, 0xa2, 0x49, 0x13, 0x1e, 0xc3, 0xdd, 0x0d, 0xe3, 0x9f, /* Byte value: 0xa1 */ + 0xb8, 0xc2, 0xa9, 0x4f, 0xc8, 0x7b, 0xf4, 0x42, 0xbd, 0x1a, 0xbb, 0x98, 0x23, 0xa1, 0x8b, 0x18, /* Byte value: 0xa2 */ + 0xd8, 0x78, 0x61, 0x0a, 0x5d, 0x2e, 0x83, 0x03, 0x2b, 0x12, 0x89, 0xb6, 0x3f, 0x9b, 0xd0, 0xcd, /* Byte value: 0xa3 */ + 0x8a, 0x1f, 0x96, 0x2d, 0x78, 0xaf, 0xe9, 0xec, 0x58, 0x41, 0xc4, 0xc6, 0x02, 0x85, 0xae, 0x35, /* Byte value: 0xa4 */ + 0xda, 0xf8, 0x3a, 0xab, 0x46, 0x31, 0x44, 0x6c, 0x85, 0x4d, 0xef, 0xff, 0x10, 0xa2, 0x39, 0x6b, /* Byte value: 0xa5 */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xa6 */ + 0x3d, 0x9b, 0xb1, 0x32, 0x1d, 0x67, 0x8f, 0xb6, 0x7e, 0xcb, 0xbd, 0xa7, 0x1a, 0x76, 0x6a, 0x8a, /* Byte value: 0xa7 */ + 0x99, 0x55, 0x9c, 0x7c, 0x57, 0xa6, 0x5b, 0xab, 0x98, 0x6d, 0x27, 0x47, 0x60, 0x4a, 0x96, 0xb9, /* Byte value: 0xa8 */ + 0xd3, 0xfd, 0x59, 0xdb, 0xc6, 0xa3, 0x5c, 0xc5, 0x2f, 0x3c, 0x87, 0xdd, 0x5a, 0xbb, 0x8e, 0xe5, /* Byte value: 0xa9 */ + 0x9d, 0x96, 0x2a, 0xfd, 0x61, 0x98, 0x16, 0x75, 0x07, 0xd3, 0xeb, 0xd5, 0x3e, 0x38, 0x87, 0x36, /* Byte value: 0xaa */ + 0x7e, 0x36, 0x17, 0xe5, 0x0c, 0xf0, 0x90, 0x71, 0x63, 0xeb, 0x75, 0x1f, 0x6a, 0x9e, 0xc5, 0x58, /* Byte value: 0xab */ + 0x06, 0x43, 0xed, 0x20, 0x2d, 0x21, 0x8a, 0xb1, 0x31, 0xe1, 0xaa, 0xdb, 0x71, 0x4b, 0xf8, 0x29, /* Byte value: 0xac */ + 0xd1, 0x7d, 0x02, 0x7a, 0xdd, 0xbc, 0x9b, 0xaa, 0x81, 0x63, 0xe1, 0x94, 0x75, 0x82, 0x67, 0x43, /* Byte value: 0xad */ + 0xaa, 0xc8, 0x6f, 0xaf, 0x0b, 0x9c, 0xc4, 0xd3, 0x2a, 0xf8, 0x6b, 0xdc, 0xb7, 0x93, 0x26, 0xc7, /* Byte value: 0xae */ + 0xa4, 0xce, 0x2d, 0x4e, 0x4a, 0xc1, 0xd4, 0x1d, 0xe6, 0xa6, 0x9a, 0xe0, 0x7a, 0x3c, 0xfc, 0x33, /* Byte value: 0xaf */ + 0x03, 0xc0, 0x97, 0x10, 0xf7, 0xf1, 0x45, 0xb9, 0xf9, 0x91, 0x55, 0x8c, 0xd9, 0xc4, 0x7c, 0xf5, /* Byte value: 0xb0 */ + 0x23, 0x17, 0x6e, 0x92, 0x84, 0xc2, 0x68, 0x86, 0x8b, 0x28, 0xfa, 0x96, 0x6c, 0xd2, 0xf4, 0x07, /* Byte value: 0xb1 */ + 0x0a, 0xc5, 0xf4, 0x60, 0x77, 0x63, 0x5d, 0x10, 0x53, 0xe0, 0x3d, 0xae, 0x93, 0xdd, 0xcb, 0x7b, /* Byte value: 0xb2 */ + 0x7c, 0xb6, 0x4c, 0x44, 0x17, 0xef, 0x57, 0x1e, 0xcd, 0xb4, 0x13, 0x56, 0x45, 0xa7, 0x2c, 0xfe, /* Byte value: 0xb3 */ + 0x22, 0x57, 0xa2, 0x23, 0x68, 0x2c, 0xea, 0x50, 0xdc, 0xe6, 0xc9, 0x53, 0x9a, 0x2f, 0x61, 0x54, /* Byte value: 0xb4 */ + 0x37, 0x5e, 0x45, 0x52, 0x6a, 0x04, 0xd2, 0xa6, 0x2d, 0x2b, 0x80, 0x09, 0x89, 0xab, 0xa1, 0xf1, /* Byte value: 0xb5 */ + 0x4a, 0xa8, 0xc5, 0xa7, 0x91, 0x05, 0x07, 0x6e, 0xb7, 0x51, 0xa0, 0x9a, 0x3a, 0xf1, 0x18, 0x5c, /* Byte value: 0xb6 */ + 0x2e, 0xd1, 0xbb, 0x63, 0x32, 0x6e, 0x3d, 0xf1, 0xbe, 0xe7, 0x5e, 0x26, 0x78, 0xb9, 0x52, 0x06, /* Byte value: 0xb7 */ + 0x25, 0x54, 0x83, 0xb2, 0xa9, 0xe3, 0xe2, 0x37, 0xba, 0xc9, 0x50, 0x4d, 0x1d, 0x99, 0x0c, 0x2e, /* Byte value: 0xb8 */ + 0xf8, 0xaf, 0x98, 0x88, 0x2e, 0x1d, 0xae, 0x3c, 0x59, 0xab, 0x26, 0xac, 0x8a, 0x8d, 0x58, 0x3f, /* Byte value: 0xb9 */ + 0xdd, 0xfb, 0x1b, 0x3a, 0x87, 0xfe, 0x4c, 0x0b, 0xe3, 0x62, 0x76, 0xe1, 0x97, 0x14, 0x54, 0x11, /* Byte value: 0xba */ + 0x31, 0x1d, 0xa8, 0x72, 0x47, 0x25, 0x58, 0x17, 0x1c, 0xca, 0x2a, 0xd2, 0xf8, 0xe0, 0x59, 0xd8, /* Byte value: 0xbb */ + 0x74, 0xf3, 0xe3, 0x85, 0x7b, 0x93, 0xcd, 0x61, 0x30, 0x0b, 0x48, 0xb1, 0xf9, 0x43, 0x0e, 0x23, /* Byte value: 0xbc */ + 0x59, 0xe2, 0xcf, 0xf6, 0xbe, 0x0c, 0xb5, 0x29, 0x77, 0x7d, 0x43, 0x1b, 0x58, 0x3e, 0x20, 0xd0, /* Byte value: 0xbd */ + 0xcd, 0x71, 0x86, 0x7b, 0x5f, 0x06, 0xbb, 0xf5, 0xda, 0xdf, 0xc0, 0xec, 0x2c, 0x1f, 0x10, 0x68, /* Byte value: 0xbe */ + 0x5d, 0x21, 0x79, 0x77, 0x88, 0x32, 0xf8, 0xf7, 0xe8, 0xc3, 0x8f, 0x89, 0x06, 0x4c, 0x31, 0x5f, /* Byte value: 0xbf */ + 0x08, 0x45, 0xaf, 0xc1, 0x6c, 0x7c, 0x9a, 0x7f, 0xfd, 0xbf, 0x5b, 0xe7, 0xbc, 0xe4, 0x22, 0xdd, /* Byte value: 0xc0 */ + 0x7f, 0x76, 0xdb, 0x54, 0xe0, 0x1e, 0x12, 0xa7, 0x34, 0x25, 0x46, 0xda, 0x9c, 0x63, 0x50, 0x0b, /* Byte value: 0xc1 */ + 0xa6, 0x4e, 0x76, 0xef, 0x51, 0xde, 0x13, 0x72, 0x48, 0xf9, 0xfc, 0xa9, 0x55, 0x05, 0x15, 0x95, /* Byte value: 0xc2 */ + 0xf9, 0xef, 0x54, 0x39, 0xc2, 0xf3, 0x2c, 0xea, 0x0e, 0x65, 0x15, 0x69, 0x7c, 0x70, 0xcd, 0x6c, /* Byte value: 0xc3 */ + 0x90, 0x50, 0xff, 0x0c, 0xd7, 0x34, 0x43, 0x02, 0x32, 0x1c, 0x4f, 0x65, 0x2a, 0x53, 0x21, 0x37, /* Byte value: 0xc4 */ + 0x0f, 0x46, 0x8e, 0x50, 0xad, 0xb3, 0x92, 0x18, 0x9b, 0x90, 0xc2, 0xf9, 0x3b, 0x52, 0x4f, 0xa7, /* Byte value: 0xc5 */ + 0xb0, 0x87, 0x06, 0x8e, 0xa4, 0x07, 0x6e, 0x3d, 0x40, 0xa5, 0xe0, 0x7f, 0x9f, 0x45, 0xa9, 0xc5, /* Byte value: 0xc6 */ + 0xac, 0x8b, 0x82, 0x8f, 0x26, 0xbd, 0x4e, 0x62, 0x1b, 0x19, 0xc1, 0x07, 0xc6, 0xd8, 0xde, 0xee, /* Byte value: 0xc7 */ + 0x60, 0xba, 0xc8, 0x45, 0x95, 0x55, 0x77, 0x41, 0x96, 0x08, 0x32, 0x2e, 0x1c, 0x3a, 0x5b, 0xd5, /* Byte value: 0xc8 */ + 0x6c, 0x3c, 0xd1, 0x05, 0xcf, 0x17, 0xa0, 0xe0, 0xf4, 0x09, 0xa5, 0x5b, 0xfe, 0xac, 0x68, 0x87, /* Byte value: 0xc9 */ + 0x13, 0x4a, 0x0a, 0x51, 0x2f, 0x09, 0xb2, 0x47, 0xc0, 0x2c, 0xe3, 0x81, 0x62, 0xcf, 0x38, 0x8c, /* Byte value: 0xca */ + 0xbc, 0x01, 0x1f, 0xce, 0xfe, 0x45, 0xb9, 0x9c, 0x22, 0xa4, 0x77, 0x0a, 0x7d, 0xd3, 0x9a, 0x97, /* Byte value: 0xcb */ + 0x32, 0xdd, 0x3f, 0x62, 0xb0, 0xd4, 0x1d, 0xae, 0xe5, 0x5b, 0x7f, 0x5e, 0x21, 0x24, 0x25, 0x2d, /* Byte value: 0xcc */ + 0xf1, 0xaa, 0xfb, 0xf8, 0xae, 0x8f, 0xb6, 0x95, 0xf3, 0xda, 0x4e, 0x8e, 0xc0, 0x94, 0xef, 0xb1, /* Byte value: 0xcd */ + 0xd6, 0x7e, 0x23, 0xeb, 0x1c, 0x73, 0x93, 0xcd, 0xe7, 0x4c, 0x78, 0x8a, 0xf2, 0x34, 0x0a, 0x39, /* Byte value: 0xce */ + 0x44, 0xae, 0x87, 0x46, 0xd0, 0x58, 0x17, 0xa0, 0x7b, 0x0f, 0x51, 0xa6, 0xf7, 0x5e, 0xc2, 0xa8, /* Byte value: 0xcf */ + 0x62, 0x3a, 0x93, 0xe4, 0x8e, 0x4a, 0xb0, 0x2e, 0x38, 0x57, 0x54, 0x67, 0x33, 0x03, 0xb2, 0x73, /* Byte value: 0xd0 */ + 0xa8, 0x48, 0x34, 0x0e, 0x10, 0x83, 0x03, 0xbc, 0x84, 0xa7, 0x0d, 0x95, 0x98, 0xaa, 0xcf, 0x61, /* Byte value: 0xd1 */ + 0x40, 0x6d, 0x31, 0xc7, 0xe6, 0x66, 0x5a, 0x7e, 0xe4, 0xb1, 0x9d, 0x34, 0xa9, 0x2c, 0xd3, 0x27, /* Byte value: 0xd2 */ + 0x95, 0xd3, 0x85, 0x3c, 0x0d, 0xe4, 0x8c, 0x0a, 0xfa, 0x6c, 0xb0, 0x32, 0x82, 0xdc, 0xa5, 0xeb, /* Byte value: 0xd3 */ + 0x11, 0xca, 0x51, 0xf0, 0x34, 0x16, 0x75, 0x28, 0x6e, 0x73, 0x85, 0xc8, 0x4d, 0xf6, 0xd1, 0x2a, /* Byte value: 0xd4 */ + 0x8b, 0x5f, 0x5a, 0x9c, 0x94, 0x41, 0x6b, 0x3a, 0x0f, 0x8f, 0xf7, 0x03, 0xf4, 0x78, 0x3b, 0x66, /* Byte value: 0xd5 */ + 0x01, 0x40, 0xcc, 0xb1, 0xec, 0xee, 0x82, 0xd6, 0x57, 0xce, 0x33, 0xc5, 0xf6, 0xfd, 0x95, 0x53, /* Byte value: 0xd6 */ + 0x0d, 0xc6, 0xd5, 0xf1, 0xb6, 0xac, 0x55, 0x77, 0x35, 0xcf, 0xa4, 0xb0, 0x14, 0x6b, 0xa6, 0x01, /* Byte value: 0xd7 */ + 0xe1, 0x20, 0x66, 0xb9, 0x76, 0x77, 0x41, 0x6b, 0xca, 0x67, 0xf8, 0x83, 0x7b, 0x9f, 0xab, 0xc8, /* Byte value: 0xd8 */ + 0x6b, 0x3f, 0xf0, 0x94, 0x0e, 0xd8, 0xa8, 0x87, 0x92, 0x26, 0x3c, 0x45, 0x79, 0x1a, 0x05, 0xfd, /* Byte value: 0xd9 */ + 0xc2, 0x37, 0x08, 0x2b, 0xf2, 0xb5, 0x29, 0xed, 0x41, 0x4f, 0x02, 0x15, 0x17, 0x4d, 0x5f, 0xcf, /* Byte value: 0xda */ + 0x5c, 0x61, 0xb5, 0xc6, 0x64, 0xdc, 0x7a, 0x21, 0xbf, 0x0d, 0xbc, 0x4c, 0xf0, 0xb1, 0xa4, 0x0c, /* Byte value: 0xdb */ + 0xc7, 0xb4, 0x72, 0x1b, 0x28, 0x65, 0xe6, 0xe5, 0x89, 0x3f, 0xfd, 0x42, 0xbf, 0xc2, 0xdb, 0x13, /* Byte value: 0xdc */ + 0x84, 0x19, 0xd4, 0xcc, 0x39, 0xf2, 0xf9, 0x22, 0x94, 0x1f, 0x35, 0xfa, 0xcf, 0x2a, 0x74, 0xc1, /* Byte value: 0xdd */ + 0x45, 0xee, 0x4b, 0xf7, 0x3c, 0xb6, 0x95, 0x76, 0x2c, 0xc1, 0x62, 0x63, 0x01, 0xa3, 0x57, 0xfb, /* Byte value: 0xde */ + 0x79, 0x35, 0x36, 0x74, 0xcd, 0x3f, 0x98, 0x16, 0x05, 0xc4, 0xec, 0x01, 0xed, 0x28, 0xa8, 0x22, /* Byte value: 0xdf */ + 0x4b, 0xe8, 0x09, 0x16, 0x7d, 0xeb, 0x85, 0xb8, 0xe0, 0x9f, 0x93, 0x5f, 0xcc, 0x0c, 0x8d, 0x0f, /* Byte value: 0xe0 */ + 0x48, 0x28, 0x9e, 0x06, 0x8a, 0x1a, 0xc0, 0x01, 0x19, 0x0e, 0xc6, 0xd3, 0x15, 0xc8, 0xf1, 0xfa, /* Byte value: 0xe1 */ + 0x6f, 0xfc, 0x46, 0x15, 0x38, 0xe6, 0xe5, 0x59, 0x0d, 0x98, 0xf0, 0xd7, 0x27, 0x68, 0x14, 0x72, /* Byte value: 0xe2 */ + 0xa7, 0x0e, 0xba, 0x5e, 0xbd, 0x30, 0x91, 0xa4, 0x1f, 0x37, 0xcf, 0x6c, 0xa3, 0xf8, 0x80, 0xc6, /* Byte value: 0xe3 */ + 0xa2, 0x8d, 0xc0, 0x6e, 0x67, 0xe0, 0x5e, 0xac, 0xd7, 0x47, 0x30, 0x3b, 0x0b, 0x77, 0x04, 0x1a, /* Byte value: 0xe4 */ + 0xc0, 0xb7, 0x53, 0x8a, 0xe9, 0xaa, 0xee, 0x82, 0xef, 0x10, 0x64, 0x5c, 0x38, 0x74, 0xb6, 0x69, /* Byte value: 0xe5 */ + 0x2c, 0x51, 0xe0, 0xc2, 0x29, 0x71, 0xfa, 0x9e, 0x10, 0xb8, 0x38, 0x6f, 0x57, 0x80, 0xbb, 0xa0, /* Byte value: 0xe6 */ + 0x81, 0x9a, 0xae, 0xfc, 0xe3, 0x22, 0x36, 0x2a, 0x5c, 0x6f, 0xca, 0xad, 0x67, 0xa5, 0xf0, 0x1d, /* Byte value: 0xe7 */ + 0xd0, 0x3d, 0xce, 0xcb, 0x31, 0x52, 0x19, 0x7c, 0xd6, 0xad, 0xd2, 0x51, 0x83, 0x7f, 0xf2, 0x10, /* Byte value: 0xe8 */ + 0x17, 0x89, 0xbc, 0xd0, 0x19, 0x37, 0xff, 0x99, 0x5f, 0x92, 0x2f, 0x13, 0x3c, 0xbd, 0x29, 0x03, /* Byte value: 0xe9 */ + 0xa3, 0xcd, 0x0c, 0xdf, 0x8b, 0x0e, 0xdc, 0x7a, 0x80, 0x89, 0x03, 0xfe, 0xfd, 0x8a, 0x91, 0x49, /* Byte value: 0xea */ + 0xd2, 0xbd, 0x95, 0x6a, 0x2a, 0x4d, 0xde, 0x13, 0x78, 0xf2, 0xb4, 0x18, 0xac, 0x46, 0x1b, 0xb6, /* Byte value: 0xeb */ + 0x57, 0xe4, 0x8d, 0x17, 0xff, 0x51, 0xa5, 0xe7, 0xbb, 0x23, 0xb2, 0x27, 0x95, 0x91, 0xfa, 0x24, /* Byte value: 0xec */ + 0x71, 0x70, 0x99, 0xb5, 0xa1, 0x43, 0x02, 0x69, 0xf8, 0x7b, 0xb7, 0xe6, 0x51, 0xcc, 0x8a, 0xff, /* Byte value: 0xed */ + 0xa1, 0x4d, 0x57, 0x7e, 0x90, 0x11, 0x1b, 0x15, 0x2e, 0xd6, 0x65, 0xb7, 0xd2, 0xb3, 0x78, 0xef, /* Byte value: 0xee */ + 0xc3, 0x77, 0xc4, 0x9a, 0x1e, 0x5b, 0xab, 0x3b, 0x16, 0x81, 0x31, 0xd0, 0xe1, 0xb0, 0xca, 0x9c, /* Byte value: 0xef */ + 0x51, 0xa7, 0x60, 0x37, 0xd2, 0x70, 0x2f, 0x56, 0x8a, 0xc2, 0x18, 0xfc, 0xe4, 0xda, 0x02, 0x0d, /* Byte value: 0xf0 */ + 0x35, 0xde, 0x1e, 0xf3, 0x71, 0x1b, 0x15, 0xc9, 0x83, 0x74, 0xe6, 0x40, 0xa6, 0x92, 0x48, 0x57, /* Byte value: 0xf1 */ + 0x47, 0x6e, 0x10, 0x56, 0x27, 0xa9, 0x52, 0x19, 0x82, 0x9e, 0x04, 0x2a, 0x2e, 0x9a, 0xbe, 0x5d, /* Byte value: 0xf2 */ + 0xf5, 0x69, 0x4d, 0x79, 0x98, 0xb1, 0xfb, 0x4b, 0x6c, 0x64, 0x82, 0x1c, 0x9e, 0xe6, 0xfe, 0x3e, /* Byte value: 0xf3 */ + 0xb2, 0x07, 0x5d, 0x2f, 0xbf, 0x18, 0xa9, 0x52, 0xee, 0xfa, 0x86, 0x36, 0xb0, 0x7c, 0x40, 0x63, /* Byte value: 0xf4 */ + 0xd7, 0x3e, 0xef, 0x5a, 0xf0, 0x9d, 0x11, 0x1b, 0xb0, 0x82, 0x4b, 0x4f, 0x04, 0xc9, 0x9f, 0x6a, /* Byte value: 0xf5 */ + 0x16, 0xc9, 0x70, 0x61, 0xf5, 0xd9, 0x7d, 0x4f, 0x08, 0x5c, 0x1c, 0xd6, 0xca, 0x40, 0xbc, 0x50, /* Byte value: 0xf6 */ + 0x86, 0x99, 0x8f, 0x6d, 0x22, 0xed, 0x3e, 0x4d, 0x3a, 0x40, 0x53, 0xb3, 0xe0, 0x13, 0x9d, 0x67, /* Byte value: 0xf7 */ + 0x73, 0xf0, 0xc2, 0x14, 0xba, 0x5c, 0xc5, 0x06, 0x56, 0x24, 0xd1, 0xaf, 0x7e, 0xf5, 0x63, 0x59, /* Byte value: 0xf8 */ + 0xc4, 0x74, 0xe5, 0x0b, 0xdf, 0x94, 0xa3, 0x5c, 0x70, 0xae, 0xa8, 0xce, 0x66, 0x06, 0xa7, 0xe6, /* Byte value: 0xf9 */ + 0x34, 0x9e, 0xd2, 0x42, 0x9d, 0xf5, 0x97, 0x1f, 0xd4, 0xba, 0xd5, 0x85, 0x50, 0x6f, 0xdd, 0x04, /* Byte value: 0xfa */ + 0xc6, 0xf4, 0xbe, 0xaa, 0xc4, 0x8b, 0x64, 0x33, 0xde, 0xf1, 0xce, 0x87, 0x49, 0x3f, 0x4e, 0x40, /* Byte value: 0xfb */ + 0x96, 0x13, 0x12, 0x2c, 0xfa, 0x15, 0xc9, 0xb3, 0x03, 0xfd, 0xe5, 0xbe, 0x5b, 0x18, 0xd9, 0x1e, /* Byte value: 0xfc */ + 0xed, 0xa6, 0x7f, 0xf9, 0x2c, 0x35, 0x96, 0xca, 0xa8, 0x66, 0x6f, 0xf6, 0x99, 0x09, 0x98, 0x9a, /* Byte value: 0xfd */ + 0xf4, 0x29, 0x81, 0xc8, 0x74, 0x5f, 0x79, 0x9d, 0x3b, 0xaa, 0xb1, 0xd9, 0x68, 0x1b, 0x6b, 0x6d, /* Byte value: 0xfe */ + 0xae, 0x0b, 0xd9, 0x2e, 0x3d, 0xa2, 0x89, 0x0d, 0xb5, 0x46, 0xa7, 0x4e, 0xe9, 0xe1, 0x37, 0x48, /* Byte value: 0xff */ + 0x63, 0x7a, 0x5f, 0x55, 0x62, 0xa4, 0x32, 0xf8, 0x6f, 0x99, 0x67, 0xa2, 0xc5, 0xfe, 0x27, 0x20, /* Byte value: 0x00 */ + + /* Matrix row: 5 */ + 0x79, 0x80, 0xab, 0xe7, 0xa9, 0x6f, 0x50, 0x97, 0xbe, 0x20, 0xdb, 0x5a, 0xb5, 0x1a, 0xf0, 0xb4, /* Byte value: 0x01 */ + 0x91, 0x1a, 0x97, 0x16, 0x14, 0x45, 0x9f, 0x6e, 0x60, 0xe7, 0xef, 0x55, 0x0a, 0x58, 0x62, 0xaa, /* Byte value: 0x02 */ + 0x99, 0xdc, 0x5f, 0x93, 0xd1, 0x32, 0x94, 0x30, 0x3d, 0x37, 0xfc, 0x67, 0x89, 0x09, 0x7f, 0xce, /* Byte value: 0x03 */ + 0xb9, 0x42, 0xf9, 0xc2, 0x43, 0x2d, 0xb8, 0x8b, 0x8a, 0xf1, 0xb0, 0xaf, 0xc0, 0x8e, 0x0b, 0x9d, /* Byte value: 0x04 */ + 0x71, 0x46, 0x63, 0x62, 0x6c, 0x18, 0x5b, 0xc9, 0xe3, 0xf0, 0xc8, 0x68, 0x36, 0x4b, 0xed, 0xd0, /* Byte value: 0x05 */ + 0xea, 0x4a, 0x0e, 0x41, 0x1d, 0x47, 0xbd, 0x0f, 0x58, 0xf3, 0x40, 0xe2, 0xef, 0xc7, 0x04, 0x07, /* Byte value: 0x06 */ + 0xd7, 0x56, 0x4e, 0xa7, 0xd0, 0xcc, 0x51, 0x02, 0x84, 0xf4, 0xeb, 0xf2, 0x68, 0xd9, 0xf3, 0x27, /* Byte value: 0x07 */ + 0x95, 0x79, 0xf3, 0xb5, 0x97, 0x9f, 0x7b, 0x41, 0xaf, 0x8f, 0x07, 0x4c, 0xaa, 0x91, 0x8d, 0x98, /* Byte value: 0x08 */ + 0x55, 0xbb, 0xa1, 0x90, 0x7d, 0xdd, 0x93, 0x5d, 0x9b, 0x5e, 0x6c, 0xb9, 0xdf, 0x05, 0x76, 0xb1, /* Byte value: 0x09 */ + 0xda, 0x9b, 0xfb, 0xd9, 0xc6, 0xb6, 0x87, 0x08, 0x55, 0x56, 0x2a, 0x4e, 0x63, 0xe2, 0x4a, 0x9c, /* Byte value: 0x0a */ + 0xdb, 0xf3, 0xe2, 0x81, 0x96, 0x61, 0xbe, 0x73, 0x16, 0x4c, 0x10, 0xd9, 0x4b, 0x41, 0x01, 0x71, /* Byte value: 0x0b */ + 0xb5, 0xe7, 0x55, 0xe4, 0x05, 0x80, 0x57, 0xfa, 0x18, 0x49, 0x4b, 0x84, 0xe3, 0x16, 0xf9, 0xcb, /* Byte value: 0x0c */ + 0x3f, 0xcc, 0x72, 0x56, 0x6d, 0xe6, 0x9e, 0xfb, 0x5a, 0x33, 0xdf, 0xfd, 0xd7, 0x9b, 0x61, 0x39, /* Byte value: 0x0d */ + 0x54, 0xd3, 0xb8, 0xc8, 0x2d, 0x0a, 0xaa, 0x26, 0xd8, 0x44, 0x56, 0x2e, 0xf7, 0xa6, 0x3d, 0x5c, /* Byte value: 0x0e */ + 0x7d, 0xe3, 0xcf, 0x44, 0x2a, 0xb5, 0xb4, 0xb8, 0x71, 0x48, 0x33, 0x43, 0x15, 0xd3, 0x1f, 0x86, /* Byte value: 0x0f */ + 0xd5, 0x86, 0x7c, 0x17, 0x70, 0xa1, 0x23, 0xf4, 0x02, 0xc0, 0x9f, 0x1f, 0x38, 0x5c, 0x65, 0x3e, /* Byte value: 0x10 */ + 0xbd, 0x21, 0x9d, 0x61, 0xc0, 0xf7, 0x5c, 0xa4, 0x45, 0x99, 0x58, 0xb6, 0x60, 0x47, 0xe4, 0xaf, /* Byte value: 0x11 */ + 0xa9, 0x0d, 0xaa, 0x0b, 0x0a, 0xc3, 0xae, 0x37, 0x30, 0x92, 0x96, 0xcb, 0x05, 0x2c, 0x31, 0x55, /* Byte value: 0x12 */ + 0xfe, 0x66, 0x39, 0x2b, 0xd7, 0x73, 0x4f, 0x9c, 0x2d, 0xf8, 0x8e, 0x9f, 0x8a, 0xac, 0xd1, 0xfd, /* Byte value: 0x13 */ + 0x3b, 0xaf, 0x16, 0xf5, 0xee, 0x3c, 0x7a, 0xd4, 0x95, 0x5b, 0x37, 0xe4, 0x77, 0x52, 0x8e, 0x0b, /* Byte value: 0x14 */ + 0x1d, 0x82, 0xe6, 0xb7, 0x5f, 0x94, 0xc0, 0xb6, 0x6b, 0xc1, 0xe7, 0xd8, 0xce, 0x99, 0x83, 0x73, /* Byte value: 0x15 */ + 0x36, 0x62, 0xa3, 0x8b, 0xf8, 0x46, 0xac, 0xde, 0x44, 0xf9, 0xf6, 0x58, 0x7c, 0x69, 0x37, 0xb0, /* Byte value: 0x16 */ + 0x38, 0x17, 0x3d, 0x1d, 0x1e, 0x86, 0x31, 0x59, 0x50, 0x75, 0x79, 0x9e, 0x0f, 0x74, 0x53, 0xff, /* Byte value: 0x17 */ + 0x07, 0xdb, 0x4f, 0x4b, 0x73, 0x60, 0xaf, 0xa2, 0x0a, 0x46, 0xa6, 0x63, 0xd8, 0xef, 0x32, 0xc6, /* Byte value: 0x18 */ + 0x1b, 0x31, 0xb0, 0xa4, 0x7c, 0x23, 0x56, 0x6f, 0x22, 0x9d, 0x7b, 0x2c, 0x3e, 0xd5, 0xfa, 0x58, /* Byte value: 0x19 */ + 0xfb, 0x6d, 0x44, 0xd0, 0x04, 0x7e, 0x92, 0xc8, 0xa1, 0x8a, 0x5c, 0x11, 0x02, 0xc6, 0x75, 0x22, /* Byte value: 0x1a */ + 0x70, 0x2e, 0x7a, 0x3a, 0x3c, 0xcf, 0x62, 0xb2, 0xa0, 0xea, 0xf2, 0xff, 0x1e, 0xe8, 0xa6, 0x3d, /* Byte value: 0x1b */ + 0x89, 0x93, 0x0c, 0x5a, 0x98, 0xdc, 0x82, 0x8c, 0x87, 0x54, 0xda, 0x03, 0x4c, 0xab, 0x45, 0x06, /* Byte value: 0x1c */ + 0x4a, 0xe9, 0x75, 0x97, 0x82, 0x24, 0x21, 0x1d, 0x76, 0xab, 0xff, 0x8c, 0x41, 0x19, 0x63, 0xdb, /* Byte value: 0x1d */ + 0xae, 0xd6, 0xe5, 0x40, 0x79, 0xa3, 0x01, 0x95, 0x3a, 0xd4, 0x30, 0xa8, 0xdd, 0xc3, 0x03, 0x93, /* Byte value: 0x1e */ + 0x3d, 0x1c, 0x40, 0xe6, 0xcd, 0x8b, 0xec, 0x0d, 0xdc, 0x07, 0xab, 0x10, 0x87, 0x1e, 0xf7, 0x20, /* Byte value: 0x1f */ + 0x29, 0x30, 0x77, 0x8c, 0x07, 0xbf, 0x1e, 0x9e, 0xa9, 0x0c, 0x65, 0x6d, 0xe2, 0x75, 0x22, 0xda, /* Byte value: 0x20 */ + 0x8a, 0x2b, 0x27, 0xb2, 0x68, 0x66, 0xc9, 0x01, 0x42, 0x7a, 0x94, 0x79, 0x34, 0x8d, 0x98, 0xf2, /* Byte value: 0x21 */ + 0xcd, 0x0f, 0xe7, 0x5b, 0xfc, 0x38, 0x3e, 0x16, 0xe5, 0x73, 0xaa, 0x49, 0x7e, 0xaf, 0x42, 0x92, /* Byte value: 0x22 */ + 0x41, 0x97, 0x96, 0xfa, 0xb7, 0xe9, 0x61, 0xce, 0xee, 0x55, 0xa2, 0xc4, 0xba, 0x6e, 0xa3, 0x4b, /* Byte value: 0x23 */ + 0xce, 0xb7, 0xcc, 0xb3, 0x0c, 0x82, 0x75, 0x9b, 0x20, 0x5d, 0xe4, 0x33, 0x06, 0x89, 0x9f, 0x66, /* Byte value: 0x24 */ + 0x16, 0xfc, 0x05, 0xda, 0x6a, 0x59, 0x80, 0x65, 0xf3, 0x3f, 0xba, 0x90, 0x35, 0xee, 0x43, 0xe3, /* Byte value: 0x25 */ + 0x6c, 0xc4, 0x85, 0xd5, 0x33, 0x8c, 0x9b, 0x7f, 0x88, 0x31, 0x2f, 0xb0, 0xf8, 0xd2, 0x6e, 0xa3, /* Byte value: 0x26 */ + 0x1f, 0x52, 0xd4, 0x07, 0xff, 0xf9, 0xb2, 0x40, 0xed, 0xf5, 0x93, 0x35, 0x9e, 0x1c, 0x15, 0x6a, /* Byte value: 0x27 */ + 0xe0, 0x5c, 0xf4, 0x74, 0x78, 0x5d, 0xc4, 0xa7, 0x83, 0x17, 0x27, 0x3d, 0x3c, 0x13, 0x8f, 0x7a, /* Byte value: 0x28 */ + 0xf5, 0x18, 0xda, 0x46, 0xe2, 0xbe, 0x0f, 0x4f, 0xb5, 0x06, 0xd3, 0xd7, 0x71, 0xdb, 0x11, 0x6d, /* Byte value: 0x29 */ + 0xb0, 0xec, 0x28, 0x1f, 0xd6, 0x8d, 0x8a, 0xae, 0x94, 0x3b, 0x99, 0x0a, 0x6b, 0x7c, 0x5d, 0x14, /* Byte value: 0x2a */ + 0xde, 0xf8, 0x9f, 0x7a, 0x45, 0x6c, 0x63, 0x27, 0x9a, 0x3e, 0xc2, 0x57, 0xc3, 0x2b, 0xa5, 0xae, /* Byte value: 0x2b */ + 0x03, 0xb8, 0x2b, 0xe8, 0xf0, 0xba, 0x4b, 0x8d, 0xc5, 0x2e, 0x4e, 0x7a, 0x78, 0x26, 0xdd, 0xf4, /* Byte value: 0x2c */ + 0xd0, 0x8d, 0x01, 0xec, 0xa3, 0xac, 0xfe, 0xa0, 0x8e, 0xb2, 0x4d, 0x91, 0xb0, 0x36, 0xc1, 0xe1, /* Byte value: 0x2d */ + 0x8e, 0x48, 0x43, 0x11, 0xeb, 0xbc, 0x2d, 0x2e, 0x8d, 0x12, 0x7c, 0x60, 0x94, 0x44, 0x77, 0xc0, /* Byte value: 0x2e */ + 0x23, 0x26, 0x8d, 0xb9, 0x62, 0xa5, 0x67, 0x36, 0x72, 0xe8, 0x02, 0xb2, 0x31, 0xa1, 0xa9, 0xa7, /* Byte value: 0x2f */ + 0x0a, 0x16, 0xfa, 0x35, 0x65, 0x1a, 0x79, 0xa8, 0xdb, 0xe4, 0x67, 0xdf, 0xd3, 0xd4, 0x8b, 0x7d, /* Byte value: 0x30 */ + 0xf3, 0xab, 0x8c, 0x55, 0xc1, 0x09, 0x99, 0x96, 0xfc, 0x5a, 0x4f, 0x23, 0x81, 0x97, 0x68, 0x46, /* Byte value: 0x31 */ + 0x06, 0xb3, 0x56, 0x13, 0x23, 0xb7, 0x96, 0xd9, 0x49, 0x5c, 0x9c, 0xf4, 0xf0, 0x4c, 0x79, 0x2b, /* Byte value: 0x32 */ + 0xdf, 0x90, 0x86, 0x22, 0x15, 0xbb, 0x5a, 0x5c, 0xd9, 0x24, 0xf8, 0xc0, 0xeb, 0x88, 0xee, 0x43, /* Byte value: 0x33 */ + 0x4d, 0x32, 0x3a, 0xdc, 0xf1, 0x44, 0x8e, 0xbf, 0x7c, 0xed, 0x59, 0xef, 0x99, 0xf6, 0x51, 0x1d, /* Byte value: 0x34 */ + 0x98, 0xb4, 0x46, 0xcb, 0x81, 0xe5, 0xad, 0x4b, 0x7e, 0x2d, 0xc6, 0xf0, 0xa1, 0xaa, 0x34, 0x23, /* Byte value: 0x35 */ + 0x97, 0xa9, 0xc1, 0x05, 0x37, 0xf2, 0x09, 0xb7, 0x29, 0xbb, 0x73, 0xa1, 0xfa, 0x14, 0x1b, 0x81, /* Byte value: 0x36 */ + 0xad, 0x6e, 0xce, 0xa8, 0x89, 0x19, 0x4a, 0x18, 0xff, 0xfa, 0x7e, 0xd2, 0xa5, 0xe5, 0xde, 0x67, /* Byte value: 0x37 */ + 0x15, 0x44, 0x2e, 0x32, 0x9a, 0xe3, 0xcb, 0xe8, 0x36, 0x11, 0xf4, 0xea, 0x4d, 0xc8, 0x9e, 0x17, /* Byte value: 0x38 */ + 0xa2, 0x73, 0x49, 0x66, 0x3f, 0x0e, 0xee, 0xe4, 0xa8, 0x6c, 0xcb, 0x83, 0xfe, 0x5b, 0xf1, 0xc5, /* Byte value: 0x39 */ + 0xab, 0xdd, 0x98, 0xbb, 0xaa, 0xae, 0xdc, 0xc1, 0xb6, 0xa6, 0xe2, 0x26, 0x55, 0xa9, 0xa7, 0x4c, /* Byte value: 0x3a */ + 0xc0, 0xc2, 0x52, 0x25, 0xea, 0x42, 0xe8, 0x1c, 0x34, 0xd1, 0x6b, 0xf5, 0x75, 0x94, 0xfb, 0x29, /* Byte value: 0x3b */ + 0xb6, 0x5f, 0x7e, 0x0c, 0xf5, 0x3a, 0x1c, 0x77, 0xdd, 0x67, 0x05, 0xfe, 0x9b, 0x30, 0x24, 0x3f, /* Byte value: 0x3c */ + 0x53, 0x08, 0xf7, 0x83, 0x5e, 0x6a, 0x05, 0x84, 0xd2, 0x02, 0xf0, 0x4d, 0x2f, 0x49, 0x0f, 0x9a, /* Byte value: 0x3d */ + 0xed, 0x91, 0x41, 0x0a, 0x6e, 0x27, 0x12, 0xad, 0x52, 0xb5, 0xe6, 0x81, 0x37, 0x28, 0x36, 0xc1, /* Byte value: 0x3e */ + 0xc1, 0xaa, 0x4b, 0x7d, 0xba, 0x95, 0xd1, 0x67, 0x77, 0xcb, 0x51, 0x62, 0x5d, 0x37, 0xb0, 0xc4, /* Byte value: 0x3f */ + 0xe1, 0x34, 0xed, 0x2c, 0x28, 0x8a, 0xfd, 0xdc, 0xc0, 0x0d, 0x1d, 0xaa, 0x14, 0xb0, 0xc4, 0x97, /* Byte value: 0x40 */ + 0x62, 0xb1, 0x1b, 0x43, 0xd5, 0x4c, 0x06, 0xf8, 0x9c, 0xbd, 0xa0, 0x76, 0x8b, 0xcf, 0x0a, 0xec, /* Byte value: 0x41 */ + 0x24, 0xfd, 0xc2, 0xf2, 0x11, 0xc5, 0xc8, 0x94, 0x78, 0xae, 0xa4, 0xd1, 0xe9, 0x4e, 0x9b, 0x61, /* Byte value: 0x42 */ + 0xe9, 0xf2, 0x25, 0xa9, 0xed, 0xfd, 0xf6, 0x82, 0x9d, 0xdd, 0x0e, 0x98, 0x97, 0xe1, 0xd9, 0xf3, /* Byte value: 0x43 */ + 0x65, 0x6a, 0x54, 0x08, 0xa6, 0x2c, 0xa9, 0x5a, 0x96, 0xfb, 0x06, 0x15, 0x53, 0x20, 0x38, 0x2a, /* Byte value: 0x44 */ + 0xec, 0xf9, 0x58, 0x52, 0x3e, 0xf0, 0x2b, 0xd6, 0x11, 0xaf, 0xdc, 0x16, 0x1f, 0x8b, 0x7d, 0x2c, /* Byte value: 0x45 */ + 0x5d, 0x7d, 0x69, 0x15, 0xb8, 0xaa, 0x98, 0x03, 0xc6, 0x8e, 0x7f, 0x8b, 0x5c, 0x54, 0x6b, 0xd5, /* Byte value: 0x46 */ + 0x26, 0x2d, 0xf0, 0x42, 0xb1, 0xa8, 0xba, 0x62, 0xfe, 0x9a, 0xd0, 0x3c, 0xb9, 0xcb, 0x0d, 0x78, /* Byte value: 0x47 */ + 0xbe, 0x99, 0xb6, 0x89, 0x30, 0x4d, 0x17, 0x29, 0x80, 0xb7, 0x16, 0xcc, 0x18, 0x61, 0x39, 0x5b, /* Byte value: 0x48 */ + 0x21, 0xf6, 0xbf, 0x09, 0xc2, 0xc8, 0x15, 0xc0, 0xf4, 0xdc, 0x76, 0x5f, 0x61, 0x24, 0x3f, 0xbe, /* Byte value: 0x49 */ + 0x4b, 0x81, 0x6c, 0xcf, 0xd2, 0xf3, 0x18, 0x66, 0x35, 0xb1, 0xc5, 0x1b, 0x69, 0xba, 0x28, 0x36, /* Byte value: 0x4a */ + 0x48, 0x39, 0x47, 0x27, 0x22, 0x49, 0x53, 0xeb, 0xf0, 0x9f, 0x8b, 0x61, 0x11, 0x9c, 0xf5, 0xc2, /* Byte value: 0x4b */ + 0xca, 0xd4, 0xa8, 0x10, 0x8f, 0x58, 0x91, 0xb4, 0xef, 0x35, 0x0c, 0x2a, 0xa6, 0x40, 0x70, 0x54, /* Byte value: 0x4c */ + 0xf7, 0xc8, 0xe8, 0xf6, 0x42, 0xd3, 0x7d, 0xb9, 0x33, 0x32, 0xa7, 0x3a, 0x21, 0x5e, 0x87, 0x74, /* Byte value: 0x4d */ + 0x7c, 0x8b, 0xd6, 0x1c, 0x7a, 0x62, 0x8d, 0xc3, 0x32, 0x52, 0x09, 0xd4, 0x3d, 0x70, 0x54, 0x6b, /* Byte value: 0x4e */ + 0xff, 0x0e, 0x20, 0x73, 0x87, 0xa4, 0x76, 0xe7, 0x6e, 0xe2, 0xb4, 0x08, 0xa2, 0x0f, 0x9a, 0x10, /* Byte value: 0x4f */ + 0x20, 0x9e, 0xa6, 0x51, 0x92, 0x1f, 0x2c, 0xbb, 0xb7, 0xc6, 0x4c, 0xc8, 0x49, 0x87, 0x74, 0x53, /* Byte value: 0x50 */ + 0xd1, 0xe5, 0x18, 0xb4, 0xf3, 0x7b, 0xc7, 0xdb, 0xcd, 0xa8, 0x77, 0x06, 0x98, 0x95, 0x8a, 0x0c, /* Byte value: 0x51 */ + 0x85, 0x36, 0xa0, 0x7c, 0xde, 0x71, 0x6d, 0xfd, 0x15, 0xec, 0x21, 0x28, 0x6f, 0x33, 0xb7, 0x50, /* Byte value: 0x52 */ + 0x58, 0x76, 0x14, 0xee, 0x6b, 0xa7, 0x45, 0x57, 0x4a, 0xfc, 0xad, 0x05, 0xd4, 0x3e, 0xcf, 0x0a, /* Byte value: 0x53 */ + 0x49, 0x51, 0x5e, 0x7f, 0x72, 0x9e, 0x6a, 0x90, 0xb3, 0x85, 0xb1, 0xf6, 0x39, 0x3f, 0xbe, 0x2f, /* Byte value: 0x54 */ + 0xfa, 0x05, 0x5d, 0x88, 0x54, 0xa9, 0xab, 0xb3, 0xe2, 0x90, 0x66, 0x86, 0x2a, 0x65, 0x3e, 0xcf, /* Byte value: 0x55 */ + 0x87, 0xe6, 0x92, 0xcc, 0x7e, 0x1c, 0x1f, 0x0b, 0x93, 0xd8, 0x55, 0xc5, 0x3f, 0xb6, 0x21, 0x49, /* Byte value: 0x56 */ + 0x27, 0x45, 0xe9, 0x1a, 0xe1, 0x7f, 0x83, 0x19, 0xbd, 0x80, 0xea, 0xab, 0x91, 0x68, 0x46, 0x95, /* Byte value: 0x57 */ + 0xe8, 0x9a, 0x3c, 0xf1, 0xbd, 0x2a, 0xcf, 0xf9, 0xde, 0xc7, 0x34, 0x0f, 0xbf, 0x42, 0x92, 0x1e, /* Byte value: 0x58 */ + 0xf4, 0x70, 0xc3, 0x1e, 0xb2, 0x69, 0x36, 0x34, 0xf6, 0x1c, 0xe9, 0x40, 0x59, 0x78, 0x5a, 0x80, /* Byte value: 0x59 */ + 0x5a, 0xa6, 0x26, 0x5e, 0xcb, 0xca, 0x37, 0xa1, 0xcc, 0xc8, 0xd9, 0xe8, 0x84, 0xbb, 0x59, 0x13, /* Byte value: 0x5a */ + 0x66, 0xd2, 0x7f, 0xe0, 0x56, 0x96, 0xe2, 0xd7, 0x53, 0xd5, 0x48, 0x6f, 0x2b, 0x06, 0xe5, 0xde, /* Byte value: 0x5b */ + 0xf0, 0x13, 0xa7, 0xbd, 0x31, 0xb3, 0xd2, 0x1b, 0x39, 0x74, 0x01, 0x59, 0xf9, 0xb1, 0xb5, 0xb2, /* Byte value: 0x5c */ + 0xcb, 0xbc, 0xb1, 0x48, 0xdf, 0x8f, 0xa8, 0xcf, 0xac, 0x2f, 0x36, 0xbd, 0x8e, 0xe3, 0x3b, 0xb9, /* Byte value: 0x5d */ + 0x0e, 0x75, 0x9e, 0x96, 0xe6, 0xc0, 0x9d, 0x87, 0x14, 0x8c, 0x8f, 0xc6, 0x73, 0x1d, 0x64, 0x4f, /* Byte value: 0x5e */ + 0xe2, 0x8c, 0xc6, 0xc4, 0xd8, 0x30, 0xb6, 0x51, 0x05, 0x23, 0x53, 0xd0, 0x6c, 0x96, 0x19, 0x63, /* Byte value: 0x5f */ + 0x57, 0x6b, 0x93, 0x20, 0xdd, 0xb0, 0xe1, 0xab, 0x1d, 0x6a, 0x18, 0x54, 0x8f, 0x80, 0xe0, 0xa8, /* Byte value: 0x60 */ + 0xc4, 0xa1, 0x36, 0x86, 0x69, 0x98, 0x0c, 0x33, 0xfb, 0xb9, 0x83, 0xec, 0xd5, 0x5d, 0x14, 0x1b, /* Byte value: 0x61 */ + 0x9b, 0x0c, 0x6d, 0x23, 0x71, 0x5f, 0xe6, 0xc6, 0xbb, 0x03, 0x88, 0x8a, 0xd9, 0x8c, 0xe9, 0xd7, /* Byte value: 0x62 */ + 0xee, 0x29, 0x6a, 0xe2, 0x9e, 0x9d, 0x59, 0x20, 0x97, 0x9b, 0xa8, 0xfb, 0x4f, 0x0e, 0xeb, 0x35, /* Byte value: 0x63 */ + 0x1a, 0x59, 0xa9, 0xfc, 0x2c, 0xf4, 0x6f, 0x14, 0x61, 0x87, 0x41, 0xbb, 0x16, 0x76, 0xb1, 0xb5, /* Byte value: 0x64 */ + 0x37, 0x0a, 0xba, 0xd3, 0xa8, 0x91, 0x95, 0xa5, 0x07, 0xe3, 0xcc, 0xcf, 0x54, 0xca, 0x7c, 0x5d, /* Byte value: 0x65 */ + 0x2e, 0xeb, 0x38, 0xc7, 0x74, 0xdf, 0xb1, 0x3c, 0xa3, 0x4a, 0xc3, 0x0e, 0x3a, 0x9a, 0x10, 0x1c, /* Byte value: 0x66 */ + 0x69, 0xcf, 0xf8, 0x2e, 0xe0, 0x81, 0x46, 0x2b, 0x04, 0x43, 0xfd, 0x3e, 0x70, 0xb8, 0xca, 0x7c, /* Byte value: 0x67 */ + 0x8b, 0x43, 0x3e, 0xea, 0x38, 0xb1, 0xf0, 0x7a, 0x01, 0x60, 0xae, 0xee, 0x1c, 0x2e, 0xd3, 0x1f, /* Byte value: 0x68 */ + 0xaf, 0xbe, 0xfc, 0x18, 0x29, 0x74, 0x38, 0xee, 0x79, 0xce, 0x0a, 0x3f, 0xf5, 0x60, 0x48, 0x7e, /* Byte value: 0x69 */ + 0xc2, 0x12, 0x60, 0x95, 0x4a, 0x2f, 0x9a, 0xea, 0xb2, 0xe5, 0x1f, 0x18, 0x25, 0x11, 0x6d, 0x30, /* Byte value: 0x6a */ + 0x7f, 0x33, 0xfd, 0xf4, 0x8a, 0xd8, 0xc6, 0x4e, 0xf7, 0x7c, 0x47, 0xae, 0x45, 0x56, 0x89, 0x9f, /* Byte value: 0x6b */ + 0x64, 0x02, 0x4d, 0x50, 0xf6, 0xfb, 0x90, 0x21, 0xd5, 0xe1, 0x3c, 0x82, 0x7b, 0x83, 0x73, 0xc7, /* Byte value: 0x6c */ + 0x45, 0xf4, 0xf2, 0x59, 0x34, 0x33, 0x85, 0xe1, 0x21, 0x3d, 0x4a, 0xdd, 0x1a, 0xa7, 0x4c, 0x79, /* Byte value: 0x6d */ + 0x14, 0x2c, 0x37, 0x6a, 0xca, 0x34, 0xf2, 0x93, 0x75, 0x0b, 0xce, 0x7d, 0x65, 0x6b, 0xd5, 0xfa, /* Byte value: 0x6e */ + 0xfd, 0xde, 0x12, 0xc3, 0x27, 0xc9, 0x04, 0x11, 0xe8, 0xd6, 0xc0, 0xe5, 0xf2, 0x8a, 0x0c, 0x09, /* Byte value: 0x6f */ + 0xac, 0x06, 0xd7, 0xf0, 0xd9, 0xce, 0x73, 0x63, 0xbc, 0xe0, 0x44, 0x45, 0x8d, 0x46, 0x95, 0x8a, /* Byte value: 0x70 */ + 0x86, 0x8e, 0x8b, 0x94, 0x2e, 0xcb, 0x26, 0x70, 0xd0, 0xc2, 0x6f, 0x52, 0x17, 0x15, 0x6a, 0xa4, /* Byte value: 0x71 */ + 0x76, 0x9d, 0x2c, 0x29, 0x1f, 0x78, 0xf4, 0x6b, 0xe9, 0xb6, 0x6e, 0x0b, 0xee, 0xa4, 0xdf, 0x16, /* Byte value: 0x72 */ + 0x43, 0x47, 0xa4, 0x4a, 0x17, 0x84, 0x13, 0x38, 0x68, 0x61, 0xd6, 0x29, 0xea, 0xeb, 0x35, 0x52, /* Byte value: 0x73 */ + 0x50, 0xb0, 0xdc, 0x6b, 0xae, 0xd0, 0x4e, 0x09, 0x17, 0x2c, 0xbe, 0x37, 0x57, 0x6f, 0xd2, 0x6e, /* Byte value: 0x74 */ + 0x28, 0x58, 0x6e, 0xd4, 0x57, 0x68, 0x27, 0xe5, 0xea, 0x16, 0x5f, 0xfa, 0xca, 0xd6, 0x69, 0x37, /* Byte value: 0x75 */ + 0xaa, 0xb5, 0x81, 0xe3, 0xfa, 0x79, 0xe5, 0xba, 0xf5, 0xbc, 0xd8, 0xb1, 0x7d, 0x0a, 0xec, 0xa1, /* Byte value: 0x76 */ + 0x5e, 0xc5, 0x42, 0xfd, 0x48, 0x10, 0xd3, 0x8e, 0x03, 0xa0, 0x31, 0xf1, 0x24, 0x72, 0xb6, 0x21, /* Byte value: 0x77 */ + 0xdd, 0x40, 0xb4, 0x92, 0xb5, 0xd6, 0x28, 0xaa, 0x5f, 0x10, 0x8c, 0x2d, 0xbb, 0x0d, 0x78, 0x5a, /* Byte value: 0x78 */ + 0xbb, 0x92, 0xcb, 0x72, 0xe3, 0x40, 0xca, 0x7d, 0x0c, 0xc5, 0xc4, 0x42, 0x90, 0x0b, 0x9d, 0x84, /* Byte value: 0x79 */ + 0x96, 0xc1, 0xd8, 0x5d, 0x67, 0x25, 0x30, 0xcc, 0x6a, 0xa1, 0x49, 0x36, 0xd2, 0xb7, 0x50, 0x6c, /* Byte value: 0x7a */ + 0x05, 0x0b, 0x7d, 0xfb, 0xd3, 0x0d, 0xdd, 0x54, 0x8c, 0x72, 0xd2, 0x8e, 0x88, 0x6a, 0xa4, 0xdf, /* Byte value: 0x7b */ + 0x7b, 0x50, 0x99, 0x57, 0x09, 0x02, 0x22, 0x61, 0x38, 0x14, 0xaf, 0xb7, 0xe5, 0x9f, 0x66, 0xad, /* Byte value: 0x7c */ + 0xf6, 0xa0, 0xf1, 0xae, 0x12, 0x04, 0x44, 0xc2, 0x70, 0x28, 0x9d, 0xad, 0x09, 0xfd, 0xcc, 0x99, /* Byte value: 0x7d */ + 0x2b, 0xe0, 0x45, 0x3c, 0xa7, 0xd2, 0x6c, 0x68, 0x2f, 0x38, 0x11, 0x80, 0xb2, 0xf0, 0xb4, 0xc3, /* Byte value: 0x7e */ + 0x09, 0xae, 0xd1, 0xdd, 0x95, 0xa0, 0x32, 0x25, 0x1e, 0xca, 0x29, 0xa5, 0xab, 0xf2, 0x56, 0x89, /* Byte value: 0x7f */ + 0xc7, 0x19, 0x1d, 0x6e, 0x99, 0x22, 0x47, 0xbe, 0x3e, 0x97, 0xcd, 0x96, 0xad, 0x7b, 0xc9, 0xef, /* Byte value: 0x80 */ + 0x46, 0x4c, 0xd9, 0xb1, 0xc4, 0x89, 0xce, 0x6c, 0xe4, 0x13, 0x04, 0xa7, 0x62, 0x81, 0x91, 0x8d, /* Byte value: 0x81 */ + 0x0d, 0xcd, 0xb5, 0x7e, 0x16, 0x7a, 0xd6, 0x0a, 0xd1, 0xa2, 0xc1, 0xbc, 0x0b, 0x3b, 0xb9, 0xbb, /* Byte value: 0x82 */ + 0x13, 0xf7, 0x78, 0x21, 0xb9, 0x54, 0x5d, 0x31, 0x7f, 0x4d, 0x68, 0x1e, 0xbd, 0x84, 0xe7, 0x3c, /* Byte value: 0x83 */ + 0x61, 0x09, 0x30, 0xab, 0x25, 0xf6, 0x4d, 0x75, 0x59, 0x93, 0xee, 0x0c, 0xf3, 0xe9, 0xd7, 0x18, /* Byte value: 0x84 */ + 0x01, 0x68, 0x19, 0x58, 0x50, 0xd7, 0x39, 0x7b, 0x43, 0x1a, 0x3a, 0x97, 0x28, 0xa3, 0x4b, 0xed, /* Byte value: 0x85 */ + 0xef, 0x41, 0x73, 0xba, 0xce, 0x4a, 0x60, 0x5b, 0xd4, 0x81, 0x92, 0x6c, 0x67, 0xad, 0xa0, 0xd8, /* Byte value: 0x86 */ + 0x8d, 0xf0, 0x68, 0xf9, 0x1b, 0x06, 0x66, 0xa3, 0x48, 0x3c, 0x32, 0x1a, 0xec, 0x62, 0xaa, 0x34, /* Byte value: 0x87 */ + 0xd3, 0x35, 0x2a, 0x04, 0x53, 0x16, 0xb5, 0x2d, 0x4b, 0x9c, 0x03, 0xeb, 0xc8, 0x10, 0x1c, 0x15, /* Byte value: 0x88 */ + 0xbc, 0x49, 0x84, 0x39, 0x90, 0x20, 0x65, 0xdf, 0x06, 0x83, 0x62, 0x21, 0x48, 0xe4, 0xaf, 0x42, /* Byte value: 0x89 */ + 0xf1, 0x7b, 0xbe, 0xe5, 0x61, 0x64, 0xeb, 0x60, 0x7a, 0x6e, 0x3b, 0xce, 0xd1, 0x12, 0xfe, 0x5f, /* Byte value: 0x8a */ + 0x32, 0x01, 0xc7, 0x28, 0x7b, 0x9c, 0x48, 0xf1, 0x8b, 0x91, 0x1e, 0x41, 0xdc, 0xa0, 0xd8, 0x82, /* Byte value: 0x8b */ + 0x5c, 0x15, 0x70, 0x4d, 0xe8, 0x7d, 0xa1, 0x78, 0x85, 0x94, 0x45, 0x1c, 0x74, 0xf7, 0x20, 0x38, /* Byte value: 0x8c */ + 0x02, 0xd0, 0x32, 0xb0, 0xa0, 0x6d, 0x72, 0xf6, 0x86, 0x34, 0x74, 0xed, 0x50, 0x85, 0x96, 0x19, /* Byte value: 0x8d */ + 0xb1, 0x84, 0x31, 0x47, 0x86, 0x5a, 0xb3, 0xd5, 0xd7, 0x21, 0xa3, 0x9d, 0x43, 0xdf, 0x16, 0xf9, /* Byte value: 0x8e */ + 0x56, 0x03, 0x8a, 0x78, 0x8d, 0x67, 0xd8, 0xd0, 0x5e, 0x70, 0x22, 0xc3, 0xa7, 0x23, 0xab, 0x45, /* Byte value: 0x8f */ + 0x51, 0xd8, 0xc5, 0x33, 0xfe, 0x07, 0x77, 0x72, 0x54, 0x36, 0x84, 0xa0, 0x7f, 0xcc, 0x99, 0x83, /* Byte value: 0x90 */ + 0xc9, 0x6c, 0x83, 0xf8, 0x7f, 0xe2, 0xda, 0x39, 0x2a, 0x1b, 0x42, 0x50, 0xde, 0x66, 0xad, 0xa0, /* Byte value: 0x91 */ + 0xd6, 0x3e, 0x57, 0xff, 0x80, 0x1b, 0x68, 0x79, 0xc7, 0xee, 0xd1, 0x65, 0x40, 0x7a, 0xb8, 0xca, /* Byte value: 0x92 */ + 0x4e, 0x8a, 0x11, 0x34, 0x01, 0xfe, 0xc5, 0x32, 0xb9, 0xc3, 0x17, 0x95, 0xe1, 0xd0, 0x8c, 0xe9, /* Byte value: 0x93 */ + 0xc8, 0x04, 0x9a, 0xa0, 0x2f, 0x35, 0xe3, 0x42, 0x69, 0x01, 0x78, 0xc7, 0xf6, 0xc5, 0xe6, 0x4d, /* Byte value: 0x94 */ + 0xa0, 0xa3, 0x7b, 0xd6, 0x9f, 0x63, 0x9c, 0x12, 0x2e, 0x58, 0xbf, 0x6e, 0xae, 0xde, 0x67, 0xdc, /* Byte value: 0x95 */ + 0x31, 0xb9, 0xec, 0xc0, 0x8b, 0x26, 0x03, 0x7c, 0x4e, 0xbf, 0x50, 0x3b, 0xa4, 0x86, 0x05, 0x76, /* Byte value: 0x96 */ + 0x22, 0x4e, 0x94, 0xe1, 0x32, 0x72, 0x5e, 0x4d, 0x31, 0xf2, 0x38, 0x25, 0x19, 0x02, 0xe2, 0x4a, /* Byte value: 0x97 */ + 0xa5, 0xa8, 0x06, 0x2d, 0x4c, 0x6e, 0x41, 0x46, 0xa2, 0x2a, 0x6d, 0xe0, 0x26, 0xb4, 0xc3, 0x03, /* Byte value: 0x98 */ + 0x17, 0x94, 0x1c, 0x82, 0x3a, 0x8e, 0xb9, 0x1e, 0xb0, 0x25, 0x80, 0x07, 0x1d, 0x4d, 0x08, 0x0e, /* Byte value: 0x99 */ + 0x33, 0x69, 0xde, 0x70, 0x2b, 0x4b, 0x71, 0x8a, 0xc8, 0x8b, 0x24, 0xd6, 0xf4, 0x03, 0x93, 0x6f, /* Byte value: 0x9a */ + 0x94, 0x11, 0xea, 0xed, 0xc7, 0x48, 0x42, 0x3a, 0xec, 0x95, 0x3d, 0xdb, 0x82, 0x32, 0xc6, 0x75, /* Byte value: 0x9b */ + 0xeb, 0x22, 0x17, 0x19, 0x4d, 0x90, 0x84, 0x74, 0x1b, 0xe9, 0x7a, 0x75, 0xc7, 0x64, 0x4f, 0xea, /* Byte value: 0x9c */ + 0x84, 0x5e, 0xb9, 0x24, 0x8e, 0xa6, 0x54, 0x86, 0x56, 0xf6, 0x1b, 0xbf, 0x47, 0x90, 0xfc, 0xbd, /* Byte value: 0x9d */ + 0x08, 0xc6, 0xc8, 0x85, 0xc5, 0x77, 0x0b, 0x5e, 0x5d, 0xd0, 0x13, 0x32, 0x83, 0x51, 0x1d, 0x64, /* Byte value: 0x9e */ + 0x25, 0x95, 0xdb, 0xaa, 0x41, 0x12, 0xf1, 0xef, 0x3b, 0xb4, 0x9e, 0x46, 0xc1, 0xed, 0xd0, 0x8c, /* Byte value: 0x9f */ + 0xf9, 0xbd, 0x76, 0x60, 0xa4, 0x13, 0xe0, 0x3e, 0x27, 0xbe, 0x28, 0xfc, 0x52, 0x43, 0xe3, 0x3b, /* Byte value: 0xa0 */ + 0x39, 0x7f, 0x24, 0x45, 0x4e, 0x51, 0x08, 0x22, 0x13, 0x6f, 0x43, 0x09, 0x27, 0xd7, 0x18, 0x12, /* Byte value: 0xa1 */ + 0x60, 0x61, 0x29, 0xf3, 0x75, 0x21, 0x74, 0x0e, 0x1a, 0x89, 0xd4, 0x9b, 0xdb, 0x4a, 0x9c, 0xf5, /* Byte value: 0xa2 */ + 0xb2, 0x3c, 0x1a, 0xaf, 0x76, 0xe0, 0xf8, 0x58, 0x12, 0x0f, 0xed, 0xe7, 0x3b, 0xf9, 0xcb, 0x0d, /* Byte value: 0xa3 */ + 0xd4, 0xee, 0x65, 0x4f, 0x20, 0x76, 0x1a, 0x8f, 0x41, 0xda, 0xa5, 0x88, 0x10, 0xff, 0x2e, 0xd3, /* Byte value: 0xa4 */ + 0x6f, 0x7c, 0xae, 0x3d, 0xc3, 0x36, 0xd0, 0xf2, 0x4d, 0x1f, 0x61, 0xca, 0x80, 0xf4, 0xb3, 0x57, /* Byte value: 0xa5 */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xa6 */ + 0x6d, 0xac, 0x9c, 0x8d, 0x63, 0x5b, 0xa2, 0x04, 0xcb, 0x2b, 0x15, 0x27, 0xd0, 0x71, 0x25, 0x4e, /* Byte value: 0xa7 */ + 0xa1, 0xcb, 0x62, 0x8e, 0xcf, 0xb4, 0xa5, 0x69, 0x6d, 0x42, 0x85, 0xf9, 0x86, 0x7d, 0x2c, 0x31, /* Byte value: 0xa8 */ + 0x12, 0x9f, 0x61, 0x79, 0xe9, 0x83, 0x64, 0x4a, 0x3c, 0x57, 0x52, 0x89, 0x95, 0x27, 0xac, 0xd1, /* Byte value: 0xa9 */ + 0xd8, 0x4b, 0xc9, 0x69, 0x66, 0xdb, 0xf5, 0xfe, 0xd3, 0x62, 0x5e, 0xa3, 0x33, 0x67, 0xdc, 0x85, /* Byte value: 0xaa */ + 0xa3, 0x1b, 0x50, 0x3e, 0x6f, 0xd9, 0xd7, 0x9f, 0xeb, 0x76, 0xf1, 0x14, 0xd6, 0xf8, 0xba, 0x28, /* Byte value: 0xab */ + 0xa4, 0xc0, 0x1f, 0x75, 0x1c, 0xb9, 0x78, 0x3d, 0xe1, 0x30, 0x57, 0x77, 0x0e, 0x17, 0x88, 0xee, /* Byte value: 0xac */ + 0xcf, 0xdf, 0xd5, 0xeb, 0x5c, 0x55, 0x4c, 0xe0, 0x63, 0x47, 0xde, 0xa4, 0x2e, 0x2a, 0xd4, 0x8b, /* Byte value: 0xad */ + 0x9a, 0x64, 0x74, 0x7b, 0x21, 0x88, 0xdf, 0xbd, 0xf8, 0x19, 0xb2, 0x1d, 0xf1, 0x2f, 0xa2, 0x3a, /* Byte value: 0xae */ + 0xcc, 0x67, 0xfe, 0x03, 0xac, 0xef, 0x07, 0x6d, 0xa6, 0x69, 0x90, 0xde, 0x56, 0x0c, 0x09, 0x7f, /* Byte value: 0xaf */ + 0x52, 0x60, 0xee, 0xdb, 0x0e, 0xbd, 0x3c, 0xff, 0x91, 0x18, 0xca, 0xda, 0x07, 0xea, 0x44, 0x77, /* Byte value: 0xb0 */ + 0x1c, 0xea, 0xff, 0xef, 0x0f, 0x43, 0xf9, 0xcd, 0x28, 0xdb, 0xdd, 0x4f, 0xe6, 0x3a, 0xc8, 0x9e, /* Byte value: 0xb1 */ + 0x2f, 0x83, 0x21, 0x9f, 0x24, 0x08, 0x88, 0x47, 0xe0, 0x50, 0xf9, 0x99, 0x12, 0x39, 0x5b, 0xf1, /* Byte value: 0xb2 */ + 0x7e, 0x5b, 0xe4, 0xac, 0xda, 0x0f, 0xff, 0x35, 0xb4, 0x66, 0x7d, 0x39, 0x6d, 0xf5, 0xc2, 0x72, /* Byte value: 0xb3 */ + 0x93, 0xca, 0xa5, 0xa6, 0xb4, 0x28, 0xed, 0x98, 0xe6, 0xd3, 0x9b, 0xb8, 0x5a, 0xdd, 0xf4, 0xb3, /* Byte value: 0xb4 */ + 0x42, 0x2f, 0xbd, 0x12, 0x47, 0x53, 0x2a, 0x43, 0x2b, 0x7b, 0xec, 0xbe, 0xc2, 0x48, 0x7e, 0xbf, /* Byte value: 0xb5 */ + 0xb3, 0x54, 0x03, 0xf7, 0x26, 0x37, 0xc1, 0x23, 0x51, 0x15, 0xd7, 0x70, 0x13, 0x5a, 0x80, 0xe0, /* Byte value: 0xb6 */ + 0x18, 0x89, 0x9b, 0x4c, 0x8c, 0x99, 0x1d, 0xe2, 0xe7, 0xb3, 0x35, 0x56, 0x46, 0xf3, 0x27, 0xac, /* Byte value: 0xb7 */ + 0xb8, 0x2a, 0xe0, 0x9a, 0x13, 0xfa, 0x81, 0xf0, 0xc9, 0xeb, 0x8a, 0x38, 0xe8, 0x2d, 0x40, 0x70, /* Byte value: 0xb8 */ + 0xfc, 0xb6, 0x0b, 0x9b, 0x77, 0x1e, 0x3d, 0x6a, 0xab, 0xcc, 0xfa, 0x72, 0xda, 0x29, 0x47, 0xe4, /* Byte value: 0xb9 */ + 0x44, 0x9c, 0xeb, 0x01, 0x64, 0xe4, 0xbc, 0x9a, 0x62, 0x27, 0x70, 0x4a, 0x32, 0x04, 0x07, 0x94, /* Byte value: 0xba */ + 0xe6, 0xef, 0xa2, 0x67, 0x5b, 0xea, 0x52, 0x7e, 0xca, 0x4b, 0xbb, 0xc9, 0xcc, 0x5f, 0xf6, 0x51, /* Byte value: 0xbb */ + 0x8c, 0x98, 0x71, 0xa1, 0x4b, 0xd1, 0x5f, 0xd8, 0x0b, 0x26, 0x08, 0x8d, 0xc4, 0xc1, 0xe1, 0xd9, /* Byte value: 0xbc */ + 0xc6, 0x71, 0x04, 0x36, 0xc9, 0xf5, 0x7e, 0xc5, 0x7d, 0x8d, 0xf7, 0x01, 0x85, 0xd8, 0x82, 0x02, /* Byte value: 0xbd */ + 0x63, 0xd9, 0x02, 0x1b, 0x85, 0x9b, 0x3f, 0x83, 0xdf, 0xa7, 0x9a, 0xe1, 0xa3, 0x6c, 0x41, 0x01, /* Byte value: 0xbe */ + 0xbf, 0xf1, 0xaf, 0xd1, 0x60, 0x9a, 0x2e, 0x52, 0xc3, 0xad, 0x2c, 0x5b, 0x30, 0xc2, 0x72, 0xb6, /* Byte value: 0xbf */ + 0xf2, 0xc3, 0x95, 0x0d, 0x91, 0xde, 0xa0, 0xed, 0xbf, 0x40, 0x75, 0xb4, 0xa9, 0x34, 0x23, 0xab, /* Byte value: 0xc0 */ + 0x2c, 0x3b, 0x0a, 0x77, 0xd4, 0xb2, 0xc3, 0xca, 0x25, 0x7e, 0xb7, 0xe3, 0x6a, 0x1f, 0x86, 0x05, /* Byte value: 0xc1 */ + 0x11, 0x27, 0x4a, 0x91, 0x19, 0x39, 0x2f, 0xc7, 0xf9, 0x79, 0x1c, 0xf3, 0xed, 0x01, 0x71, 0x25, /* Byte value: 0xc2 */ + 0x73, 0x96, 0x51, 0xd2, 0xcc, 0x75, 0x29, 0x3f, 0x65, 0xc4, 0xbc, 0x85, 0x66, 0xce, 0x7b, 0xc9, /* Byte value: 0xc3 */ + 0xdc, 0x28, 0xad, 0xca, 0xe5, 0x01, 0x11, 0xd1, 0x1c, 0x0a, 0xb6, 0xba, 0x93, 0xae, 0x33, 0xb7, /* Byte value: 0xc4 */ + 0xd9, 0x23, 0xd0, 0x31, 0x36, 0x0c, 0xcc, 0x85, 0x90, 0x78, 0x64, 0x34, 0x1b, 0xc4, 0x97, 0x68, /* Byte value: 0xc5 */ + 0x92, 0xa2, 0xbc, 0xfe, 0xe4, 0xff, 0xd4, 0xe3, 0xa5, 0xc9, 0xa1, 0x2f, 0x72, 0x7e, 0xbf, 0x5e, /* Byte value: 0xc6 */ + 0x3e, 0xa4, 0x6b, 0x0e, 0x3d, 0x31, 0xa7, 0x80, 0x19, 0x29, 0xe5, 0x6a, 0xff, 0x38, 0x2a, 0xd4, /* Byte value: 0xc7 */ + 0xd2, 0x5d, 0x33, 0x5c, 0x03, 0xc1, 0x8c, 0x56, 0x08, 0x86, 0x39, 0x7c, 0xe0, 0xb3, 0x57, 0xf8, /* Byte value: 0xc8 */ + 0x59, 0x1e, 0x0d, 0xb6, 0x3b, 0x70, 0x7c, 0x2c, 0x09, 0xe6, 0x97, 0x92, 0xfc, 0x9d, 0x84, 0xe7, /* Byte value: 0xc9 */ + 0x75, 0x25, 0x07, 0xc1, 0xef, 0xc2, 0xbf, 0xe6, 0x2c, 0x98, 0x20, 0x71, 0x96, 0x82, 0x02, 0xe2, /* Byte value: 0xca */ + 0x19, 0xe1, 0x82, 0x14, 0xdc, 0x4e, 0x24, 0x99, 0xa4, 0xa9, 0x0f, 0xc1, 0x6e, 0x50, 0x6c, 0x41, /* Byte value: 0xcb */ + 0xb4, 0x8f, 0x4c, 0xbc, 0x55, 0x57, 0x6e, 0x81, 0x5b, 0x53, 0x71, 0x13, 0xcb, 0xb5, 0xb2, 0x26, /* Byte value: 0xcc */ + 0x81, 0x55, 0xc4, 0xdf, 0x5d, 0xab, 0x89, 0xd2, 0xda, 0x84, 0xc9, 0x31, 0xcf, 0xfa, 0x58, 0x62, /* Byte value: 0xcd */ + 0xe4, 0x3f, 0x90, 0xd7, 0xfb, 0x87, 0x20, 0x88, 0x4c, 0x7f, 0xcf, 0x24, 0x9c, 0xda, 0x60, 0x48, /* Byte value: 0xce */ + 0xe5, 0x57, 0x89, 0x8f, 0xab, 0x50, 0x19, 0xf3, 0x0f, 0x65, 0xf5, 0xb3, 0xb4, 0x79, 0x2b, 0xa5, /* Byte value: 0xcf */ + 0x0f, 0x1d, 0x87, 0xce, 0xb6, 0x17, 0xa4, 0xfc, 0x57, 0x96, 0xb5, 0x51, 0x5b, 0xbe, 0x2f, 0xa2, /* Byte value: 0xd0 */ + 0x47, 0x24, 0xc0, 0xe9, 0x94, 0x5e, 0xf7, 0x17, 0xa7, 0x09, 0x3e, 0x30, 0x4a, 0x22, 0xda, 0x60, /* Byte value: 0xd1 */ + 0x9c, 0xd7, 0x22, 0x68, 0x02, 0x3f, 0x49, 0x64, 0xb1, 0x45, 0x2e, 0xe9, 0x01, 0x63, 0xdb, 0x11, /* Byte value: 0xd2 */ + 0x2a, 0x88, 0x5c, 0x64, 0xf7, 0x05, 0x55, 0x13, 0x6c, 0x22, 0x2b, 0x17, 0x9a, 0x53, 0xff, 0x2e, /* Byte value: 0xd3 */ + 0xa8, 0x65, 0xb3, 0x53, 0x5a, 0x14, 0x97, 0x4c, 0x73, 0x88, 0xac, 0x5c, 0x2d, 0x8f, 0x7a, 0xb8, /* Byte value: 0xd4 */ + 0x5b, 0xce, 0x3f, 0x06, 0x9b, 0x1d, 0x0e, 0xda, 0x8f, 0xd2, 0xe3, 0x7f, 0xac, 0x18, 0x12, 0xfe, /* Byte value: 0xd5 */ + 0x8f, 0x20, 0x5a, 0x49, 0xbb, 0x6b, 0x14, 0x55, 0xce, 0x08, 0x46, 0xf7, 0xbc, 0xe7, 0x3c, 0x2d, /* Byte value: 0xd6 */ + 0x04, 0x63, 0x64, 0xa3, 0x83, 0xda, 0xe4, 0x2f, 0xcf, 0x68, 0xe8, 0x19, 0xa0, 0xc9, 0xef, 0x32, /* Byte value: 0xd7 */ + 0xa6, 0x10, 0x2d, 0xc5, 0xbc, 0xd4, 0x0a, 0xcb, 0x67, 0x04, 0x23, 0x9a, 0x5e, 0x92, 0x1e, 0xf7, /* Byte value: 0xd8 */ + 0x72, 0xfe, 0x48, 0x8a, 0x9c, 0xa2, 0x10, 0x44, 0x26, 0xde, 0x86, 0x12, 0x4e, 0x6d, 0x30, 0x24, /* Byte value: 0xd9 */ + 0xba, 0xfa, 0xd2, 0x2a, 0xb3, 0x97, 0xf3, 0x06, 0x4f, 0xdf, 0xfe, 0xd5, 0xb8, 0xa8, 0xd6, 0x69, /* Byte value: 0xda */ + 0x30, 0xd1, 0xf5, 0x98, 0xdb, 0xf1, 0x3a, 0x07, 0x0d, 0xa5, 0x6a, 0xac, 0x8c, 0x25, 0x4e, 0x9b, /* Byte value: 0xdb */ + 0x4c, 0x5a, 0x23, 0x84, 0xa1, 0x93, 0xb7, 0xc4, 0x3f, 0xf7, 0x63, 0x78, 0xb1, 0x55, 0x1a, 0xf0, /* Byte value: 0xdc */ + 0x82, 0xed, 0xef, 0x37, 0xad, 0x11, 0xc2, 0x5f, 0x1f, 0xaa, 0x87, 0x4b, 0xb7, 0xdc, 0x85, 0x96, /* Byte value: 0xdd */ + 0x6a, 0x77, 0xd3, 0xc6, 0x10, 0x3b, 0x0d, 0xa6, 0xc1, 0x6d, 0xb3, 0x44, 0x08, 0x9e, 0x17, 0x88, /* Byte value: 0xde */ + 0x88, 0xfb, 0x15, 0x02, 0xc8, 0x0b, 0xbb, 0xf7, 0xc4, 0x4e, 0xe0, 0x94, 0x64, 0x08, 0x0e, 0xeb, /* Byte value: 0xdf */ + 0x3c, 0x74, 0x59, 0xbe, 0x9d, 0x5c, 0xd5, 0x76, 0x9f, 0x1d, 0x91, 0x87, 0xaf, 0xbd, 0xbc, 0xcd, /* Byte value: 0xe0 */ + 0x6e, 0x14, 0xb7, 0x65, 0x93, 0xe1, 0xe9, 0x89, 0x0e, 0x05, 0x5b, 0x5d, 0xa8, 0x57, 0xf8, 0xba, /* Byte value: 0xe1 */ + 0x0b, 0x7e, 0xe3, 0x6d, 0x35, 0xcd, 0x40, 0xd3, 0x98, 0xfe, 0x5d, 0x48, 0xfb, 0x77, 0xc0, 0x90, /* Byte value: 0xe2 */ + 0x9e, 0x07, 0x10, 0xd8, 0xa2, 0x52, 0x3b, 0x92, 0x37, 0x71, 0x5a, 0x04, 0x51, 0xe6, 0x4d, 0x08, /* Byte value: 0xe3 */ + 0x68, 0xa7, 0xe1, 0x76, 0xb0, 0x56, 0x7f, 0x50, 0x47, 0x59, 0xc7, 0xa9, 0x58, 0x1b, 0x81, 0x91, /* Byte value: 0xe4 */ + 0x67, 0xba, 0x66, 0xb8, 0x06, 0x41, 0xdb, 0xac, 0x10, 0xcf, 0x72, 0xf8, 0x03, 0xa5, 0xae, 0x33, /* Byte value: 0xe5 */ + 0xc5, 0xc9, 0x2f, 0xde, 0x39, 0x4f, 0x35, 0x48, 0xb8, 0xa3, 0xb9, 0x7b, 0xfd, 0xfe, 0x5f, 0xf6, /* Byte value: 0xe6 */ + 0x74, 0x4d, 0x1e, 0x99, 0xbf, 0x15, 0x86, 0x9d, 0x6f, 0x82, 0x1a, 0xe6, 0xbe, 0x21, 0x49, 0x0f, /* Byte value: 0xe7 */ + 0x40, 0xff, 0x8f, 0xa2, 0xe7, 0x3e, 0x58, 0xb5, 0xad, 0x4f, 0x98, 0x53, 0x92, 0xcd, 0xe8, 0xa6, /* Byte value: 0xe8 */ + 0x0c, 0xa5, 0xac, 0x26, 0x46, 0xad, 0xef, 0x71, 0x92, 0xb8, 0xfb, 0x2b, 0x23, 0x98, 0xf2, 0x56, /* Byte value: 0xe9 */ + 0xe7, 0x87, 0xbb, 0x3f, 0x0b, 0x3d, 0x6b, 0x05, 0x89, 0x51, 0x81, 0x5e, 0xe4, 0xfc, 0xbd, 0xbc, /* Byte value: 0xea */ + 0x9d, 0xbf, 0x3b, 0x30, 0x52, 0xe8, 0x70, 0x1f, 0xf2, 0x5f, 0x14, 0x7e, 0x29, 0xc0, 0x90, 0xfc, /* Byte value: 0xeb */ + 0x90, 0x72, 0x8e, 0x4e, 0x44, 0x92, 0xa6, 0x15, 0x23, 0xfd, 0xd5, 0xc2, 0x22, 0xfb, 0x29, 0x47, /* Byte value: 0xec */ + 0x7a, 0x38, 0x80, 0x0f, 0x59, 0xd5, 0x1b, 0x1a, 0x7b, 0x0e, 0x95, 0x20, 0xcd, 0x3c, 0x2d, 0x40, /* Byte value: 0xed */ + 0x3a, 0xc7, 0x0f, 0xad, 0xbe, 0xeb, 0x43, 0xaf, 0xd6, 0x41, 0x0d, 0x73, 0x5f, 0xf1, 0xc5, 0xe6, /* Byte value: 0xee */ + 0x35, 0xda, 0x88, 0x63, 0x08, 0xfc, 0xe7, 0x53, 0x81, 0xd7, 0xb8, 0x22, 0x04, 0x4f, 0xea, 0x44, /* Byte value: 0xef */ + 0x34, 0xb2, 0x91, 0x3b, 0x58, 0x2b, 0xde, 0x28, 0xc2, 0xcd, 0x82, 0xb5, 0x2c, 0xec, 0xa1, 0xa9, /* Byte value: 0xf0 */ + 0x9f, 0x6f, 0x09, 0x80, 0xf2, 0x85, 0x02, 0xe9, 0x74, 0x6b, 0x60, 0x93, 0x79, 0x45, 0x06, 0xe5, /* Byte value: 0xf1 */ + 0xb7, 0x37, 0x67, 0x54, 0xa5, 0xed, 0x25, 0x0c, 0x9e, 0x7d, 0x3f, 0x69, 0xb3, 0x93, 0x6f, 0xd2, /* Byte value: 0xf2 */ + 0xf8, 0xd5, 0x6f, 0x38, 0xf4, 0xc4, 0xd9, 0x45, 0x64, 0xa4, 0x12, 0x6b, 0x7a, 0xe0, 0xa8, 0xd6, /* Byte value: 0xf3 */ + 0x4f, 0xe2, 0x08, 0x6c, 0x51, 0x29, 0xfc, 0x49, 0xfa, 0xd9, 0x2d, 0x02, 0xc9, 0x73, 0xc7, 0x04, /* Byte value: 0xf4 */ + 0x6b, 0x1f, 0xca, 0x9e, 0x40, 0xec, 0x34, 0xdd, 0x82, 0x77, 0x89, 0xd3, 0x20, 0x3d, 0x5c, 0x65, /* Byte value: 0xf5 */ + 0x83, 0x85, 0xf6, 0x6f, 0xfd, 0xc6, 0xfb, 0x24, 0x5c, 0xb0, 0xbd, 0xdc, 0x9f, 0x7f, 0xce, 0x7b, /* Byte value: 0xf6 */ + 0x5f, 0xad, 0x5b, 0xa5, 0x18, 0xc7, 0xea, 0xf5, 0x40, 0xba, 0x0b, 0x66, 0x0c, 0xd1, 0xfd, 0xcc, /* Byte value: 0xf7 */ + 0xa7, 0x78, 0x34, 0x9d, 0xec, 0x03, 0x33, 0xb0, 0x24, 0x1e, 0x19, 0x0d, 0x76, 0x31, 0x55, 0x1a, /* Byte value: 0xf8 */ + 0x1e, 0x3a, 0xcd, 0x5f, 0xaf, 0x2e, 0x8b, 0x3b, 0xae, 0xef, 0xa9, 0xa2, 0xb6, 0xbf, 0x5e, 0x87, /* Byte value: 0xf9 */ + 0x10, 0x4f, 0x53, 0xc9, 0x49, 0xee, 0x16, 0xbc, 0xba, 0x63, 0x26, 0x64, 0xc5, 0xa2, 0x3a, 0xc8, /* Byte value: 0xfa */ + 0xc3, 0x7a, 0x79, 0xcd, 0x1a, 0xf8, 0xa3, 0x91, 0xf1, 0xff, 0x25, 0x8f, 0x0d, 0xb2, 0x26, 0xdd, /* Byte value: 0xfb */ + 0x78, 0xe8, 0xb2, 0xbf, 0xf9, 0xb8, 0x69, 0xec, 0xfd, 0x3a, 0xe1, 0xcd, 0x9d, 0xb9, 0xbb, 0x59, /* Byte value: 0xfc */ + 0x2d, 0x53, 0x13, 0x2f, 0x84, 0x65, 0xfa, 0xb1, 0x66, 0x64, 0x8d, 0x74, 0x42, 0xbc, 0xcd, 0xe8, /* Byte value: 0xfd */ + 0x77, 0xf5, 0x35, 0x71, 0x4f, 0xaf, 0xcd, 0x10, 0xaa, 0xac, 0x54, 0x9c, 0xc6, 0x07, 0x94, 0xfb, /* Byte value: 0xfe */ + 0xe3, 0xe4, 0xdf, 0x9c, 0x88, 0xe7, 0x8f, 0x2a, 0x46, 0x39, 0x69, 0x47, 0x44, 0x35, 0x52, 0x8e, /* Byte value: 0xff */ + 0x80, 0x3d, 0xdd, 0x87, 0x0d, 0x7c, 0xb0, 0xa9, 0x99, 0x9e, 0xf3, 0xa6, 0xe7, 0x59, 0x13, 0x8f, /* Byte value: 0x00 */ + + /* Matrix row: 6 */ + 0xa2, 0xc8, 0x81, 0xe2, 0x3b, 0xb3, 0xc2, 0x65, 0xc5, 0x23, 0x88, 0xa8, 0x4b, 0xa0, 0x07, 0xfc, /* Byte value: 0x01 */ + 0x7e, 0x5e, 0xba, 0x73, 0x1f, 0x4c, 0x94, 0x8f, 0x72, 0xc4, 0x53, 0xb4, 0x67, 0xfd, 0xe6, 0xee, /* Byte value: 0x02 */ + 0x65, 0xcf, 0x9b, 0x0b, 0x79, 0xd8, 0x3c, 0xc1, 0x2a, 0xfe, 0xa1, 0x96, 0xda, 0xeb, 0x16, 0xdd, /* Byte value: 0x03 */ + 0x09, 0xce, 0x1f, 0x28, 0x22, 0xcd, 0xd9, 0x3a, 0x89, 0x16, 0xef, 0x1e, 0x6b, 0xb3, 0x50, 0x11, /* Byte value: 0x04 */ + 0xb9, 0x59, 0xa0, 0x9a, 0x5d, 0x27, 0x6a, 0x2b, 0x9d, 0x19, 0x7a, 0x8a, 0xf6, 0xb6, 0xf7, 0xcf, /* Byte value: 0x05 */ + 0xaa, 0x23, 0xa2, 0x8f, 0xdc, 0xda, 0x7c, 0x18, 0xa1, 0x08, 0x06, 0xf5, 0x92, 0xb9, 0xdd, 0x6e, /* Byte value: 0x06 */ + 0x3c, 0x40, 0x19, 0x17, 0x62, 0x09, 0xe3, 0x6f, 0xcd, 0x25, 0x6b, 0x9f, 0xc7, 0xa2, 0x2e, 0x31, /* Byte value: 0x07 */ + 0x92, 0xf7, 0x4b, 0x4f, 0x2c, 0x06, 0xc0, 0xa8, 0x5e, 0xd9, 0x2a, 0xa5, 0xd8, 0xf6, 0x9e, 0x16, /* Byte value: 0x08 */ + 0x39, 0xf1, 0xd5, 0x85, 0x35, 0x78, 0xdb, 0xf7, 0x12, 0xec, 0x4d, 0x13, 0xf8, 0xe5, 0xc9, 0xfb, /* Byte value: 0x09 */ + 0xf0, 0xc3, 0x64, 0x5c, 0x4b, 0x24, 0x0a, 0x7f, 0xb2, 0x94, 0x6f, 0x39, 0x9a, 0xcd, 0xb8, 0xc4, /* Byte value: 0x0a */ + 0xcb, 0x78, 0xc9, 0x53, 0x37, 0xd7, 0x1f, 0x06, 0xb9, 0x02, 0xe0, 0xac, 0xc5, 0xbf, 0xa6, 0xfa, /* Byte value: 0x0b */ + 0xfe, 0xf6, 0xcf, 0x6c, 0x77, 0x13, 0x25, 0x53, 0xfd, 0x31, 0x64, 0x2d, 0x69, 0xae, 0xd8, 0xda, /* Byte value: 0x0c */ + 0xe0, 0xd6, 0x22, 0x86, 0x46, 0xf6, 0xb5, 0x85, 0x7a, 0xc2, 0xb0, 0x83, 0xeb, 0xff, 0xcf, 0x23, /* Byte value: 0x0d */ + 0x02, 0x4a, 0x78, 0x8a, 0x49, 0x8b, 0xce, 0x8e, 0x19, 0x7a, 0xc2, 0x86, 0xa7, 0x97, 0xd7, 0xc5, /* Byte value: 0x0e */ + 0x4e, 0x61, 0x70, 0xde, 0x08, 0xf9, 0x96, 0x42, 0xe9, 0x3e, 0xf1, 0xb9, 0xf4, 0xab, 0x7f, 0x04, /* Byte value: 0x0f */ + 0x4a, 0xf5, 0x80, 0x09, 0x9a, 0x2c, 0xc9, 0x9d, 0xdb, 0xca, 0xb6, 0x76, 0x79, 0x46, 0x12, 0x4d, /* Byte value: 0x10 */ + 0xe5, 0x67, 0xee, 0x14, 0x11, 0x87, 0x8d, 0x1d, 0xa5, 0x0b, 0x96, 0x0f, 0xd4, 0xb8, 0x28, 0xe9, /* Byte value: 0x11 */ + 0x3f, 0x2f, 0x5d, 0xd8, 0xee, 0x26, 0x4a, 0xa6, 0x39, 0x62, 0xc8, 0x5a, 0xd2, 0x9f, 0x73, 0x77, /* Byte value: 0x12 */ + 0x70, 0x6b, 0x11, 0x43, 0x23, 0x7b, 0xbb, 0xa3, 0x3d, 0x61, 0x58, 0xa0, 0x94, 0x9e, 0x86, 0xf0, /* Byte value: 0x13 */ + 0x0c, 0x7f, 0xd3, 0xba, 0x75, 0xbc, 0xe1, 0xa2, 0x56, 0xdf, 0xc9, 0x92, 0x54, 0xf4, 0xb7, 0xdb, /* Byte value: 0x14 */ + 0xfa, 0x62, 0x3f, 0xbb, 0xe5, 0xc6, 0x7a, 0x8c, 0xcf, 0xc5, 0x23, 0xe2, 0xe4, 0x43, 0xb5, 0x93, /* Byte value: 0x15 */ + 0xc0, 0xfc, 0xae, 0xf1, 0x5c, 0x91, 0x08, 0xb2, 0x29, 0x6e, 0xcd, 0x34, 0x09, 0x9b, 0x21, 0x2e, /* Byte value: 0x16 */ + 0x41, 0x71, 0xe7, 0xab, 0xf1, 0x6a, 0xde, 0x29, 0x4b, 0xa6, 0x9b, 0xee, 0xb5, 0x62, 0x95, 0x99, /* Byte value: 0x17 */ + 0xa1, 0xa7, 0xc5, 0x2d, 0xb7, 0x9c, 0x6b, 0xac, 0x31, 0x64, 0x2b, 0x6d, 0x5e, 0x9d, 0x5a, 0xba, /* Byte value: 0x18 */ + 0x60, 0x7e, 0x57, 0x99, 0x2e, 0xa9, 0x04, 0x59, 0xf5, 0x37, 0x87, 0x1a, 0xe5, 0xac, 0xf1, 0x17, /* Byte value: 0x19 */ + 0xa7, 0x79, 0x4d, 0x70, 0x6c, 0xc2, 0xfa, 0xfd, 0x1a, 0xea, 0xae, 0x24, 0x74, 0xe7, 0xe0, 0x36, /* Byte value: 0x1a */ + 0x82, 0xe2, 0x0d, 0x95, 0x21, 0xd4, 0x7f, 0x52, 0x96, 0x8f, 0xf5, 0x1f, 0xa9, 0xc4, 0xe9, 0xf1, /* Byte value: 0x1b */ + 0x53, 0x2e, 0xd9, 0xfb, 0xb5, 0x33, 0xaf, 0x5d, 0x9a, 0x8a, 0x86, 0xd2, 0x63, 0xc7, 0x35, 0xbb, /* Byte value: 0x1c */ + 0xb5, 0x26, 0x73, 0x20, 0x28, 0x9b, 0x8b, 0x89, 0xcb, 0xc6, 0xb3, 0x18, 0xa2, 0x42, 0x40, 0x14, /* Byte value: 0x1d */ + 0x9e, 0x88, 0x98, 0xf5, 0x59, 0xba, 0x21, 0x0a, 0x08, 0x06, 0xe3, 0x37, 0x8c, 0x02, 0x29, 0xcd, /* Byte value: 0x1e */ + 0x96, 0x63, 0xbb, 0x98, 0xbe, 0xd3, 0x9f, 0x77, 0x6c, 0x2d, 0x6d, 0x6a, 0x55, 0x1b, 0xf3, 0x5f, /* Byte value: 0x1f */ + 0x4c, 0x2b, 0x08, 0x54, 0x41, 0x72, 0x58, 0xcc, 0xf0, 0x44, 0x33, 0x3f, 0x53, 0x3c, 0xa8, 0xc1, /* Byte value: 0x20 */ + 0x1e, 0x20, 0xed, 0xea, 0x31, 0xe5, 0x90, 0xd6, 0x87, 0xf3, 0xd4, 0xae, 0x82, 0x51, 0x17, 0xf9, /* Byte value: 0x21 */ + 0x67, 0x85, 0xe3, 0x81, 0x30, 0x53, 0xf2, 0x4f, 0x33, 0x84, 0x63, 0x10, 0x7d, 0x7c, 0xc1, 0x18, /* Byte value: 0x22 */ + 0xe3, 0xb9, 0x66, 0x49, 0xca, 0xd9, 0x1c, 0x4c, 0x8e, 0x85, 0x13, 0x46, 0xfe, 0xc2, 0x92, 0x65, /* Byte value: 0x23 */ + 0x2a, 0x8b, 0xd7, 0x90, 0xb4, 0x85, 0xcd, 0xc4, 0x2e, 0xfd, 0x31, 0x6c, 0x9c, 0xea, 0xe3, 0x5a, /* Byte value: 0x24 */ + 0xac, 0xfd, 0x2a, 0xd2, 0x07, 0x84, 0xed, 0x49, 0x8a, 0x86, 0x83, 0xbc, 0xb8, 0xc3, 0x67, 0xe2, /* Byte value: 0x25 */ + 0x43, 0x3b, 0x9f, 0x21, 0xb8, 0xe1, 0x10, 0xa7, 0x52, 0xdc, 0x59, 0x68, 0x12, 0xf5, 0x42, 0x5c, /* Byte value: 0x26 */ + 0x8c, 0xd7, 0xa6, 0xa5, 0x1d, 0xe3, 0x50, 0x7e, 0xd9, 0x2a, 0xfe, 0x0b, 0x5a, 0xa7, 0x89, 0xef, /* Byte value: 0x27 */ + 0xc7, 0x07, 0x1a, 0xe9, 0x42, 0x6b, 0xfe, 0xa4, 0xef, 0xdd, 0x29, 0x3e, 0x91, 0x4b, 0x11, 0x21, /* Byte value: 0x28 */ + 0x26, 0xf4, 0x04, 0x2a, 0xc1, 0x39, 0x2c, 0x66, 0x78, 0x22, 0xf8, 0xfe, 0xc8, 0x1e, 0x54, 0x81, /* Byte value: 0x29 */ + 0x29, 0xe4, 0x93, 0x5f, 0x38, 0xaa, 0x64, 0x0d, 0xda, 0xba, 0x92, 0xa9, 0x89, 0xd7, 0xbe, 0x1c, /* Byte value: 0x2a */ + 0x1c, 0x6a, 0x95, 0x60, 0x78, 0x6e, 0x5e, 0x58, 0x9e, 0x89, 0x16, 0x28, 0x25, 0xc6, 0xc0, 0x3c, /* Byte value: 0x2b */ + 0x4d, 0x0e, 0x34, 0x11, 0x84, 0xd6, 0x3f, 0x8b, 0x1d, 0x79, 0x52, 0x7c, 0xe1, 0x96, 0x22, 0x42, /* Byte value: 0x2c */ + 0x9d, 0xe7, 0xdc, 0x3a, 0xd5, 0x95, 0x88, 0xc3, 0xfc, 0x41, 0x40, 0xf2, 0x99, 0x3f, 0x74, 0x8b, /* Byte value: 0x2d */ + 0xf2, 0x89, 0x1c, 0xd6, 0x02, 0xaf, 0xc4, 0xf1, 0xab, 0xee, 0xad, 0xbf, 0x3d, 0x5a, 0x6f, 0x01, /* Byte value: 0x2e */ + 0x21, 0x0f, 0xb0, 0x32, 0xdf, 0xc3, 0xda, 0x70, 0xbe, 0x91, 0x1c, 0xf4, 0x50, 0xce, 0x64, 0x8e, /* Byte value: 0x2f */ + 0x6d, 0x24, 0xb8, 0x66, 0x9e, 0xb1, 0x82, 0xbc, 0x4e, 0xd5, 0x2f, 0xcb, 0x03, 0xf2, 0xcc, 0x4f, /* Byte value: 0x30 */ + 0xbc, 0xe8, 0x6c, 0x08, 0x0a, 0x56, 0x52, 0xb3, 0x42, 0xd0, 0x5c, 0x06, 0xc9, 0xf1, 0x10, 0x05, /* Byte value: 0x31 */ + 0x9a, 0x1c, 0x68, 0x22, 0xcb, 0x6f, 0x7e, 0xd5, 0x3a, 0xf2, 0xa4, 0xf8, 0x01, 0xef, 0x44, 0x84, /* Byte value: 0x32 */ + 0x27, 0xd1, 0x38, 0x6f, 0x04, 0x9d, 0x4b, 0x21, 0x95, 0x1f, 0x99, 0xbd, 0x7a, 0xb4, 0xde, 0x02, /* Byte value: 0x33 */ + 0x14, 0x81, 0xb6, 0x0d, 0x9f, 0x07, 0xe0, 0x25, 0xfa, 0xa2, 0x98, 0x75, 0xfc, 0xdf, 0x1a, 0xae, /* Byte value: 0x34 */ + 0x5e, 0x74, 0x36, 0x04, 0x05, 0x2b, 0x29, 0xb8, 0x21, 0x68, 0x2e, 0x03, 0x85, 0x99, 0x08, 0xe3, /* Byte value: 0x35 */ + 0xe4, 0x42, 0xd2, 0x51, 0xd4, 0x23, 0xea, 0x5a, 0x48, 0x36, 0xf7, 0x4c, 0x66, 0x12, 0xa2, 0x6a, /* Byte value: 0x36 */ + 0xd3, 0x86, 0xac, 0xe4, 0xdd, 0x6c, 0x1e, 0x81, 0x15, 0x7f, 0xb1, 0x4b, 0x6d, 0x94, 0x0b, 0x8f, /* Byte value: 0x37 */ + 0xe1, 0xf3, 0x1e, 0xc3, 0x83, 0x52, 0xd2, 0xc2, 0x97, 0xff, 0xd1, 0xc0, 0x59, 0x55, 0x45, 0xa0, /* Byte value: 0x38 */ + 0x69, 0xb0, 0x48, 0xb1, 0x0c, 0x64, 0xdd, 0x63, 0x7c, 0x21, 0x68, 0x04, 0x8e, 0x1f, 0xa1, 0x06, /* Byte value: 0x39 */ + 0x49, 0x9a, 0xc4, 0xc6, 0x16, 0x03, 0x60, 0x54, 0x2f, 0x8d, 0x15, 0xb3, 0x6c, 0x7b, 0x4f, 0x0b, /* Byte value: 0x3a */ + 0xab, 0x06, 0x9e, 0xca, 0x19, 0x7e, 0x1b, 0x5f, 0x4c, 0x35, 0x67, 0xb6, 0x20, 0x13, 0x57, 0xed, /* Byte value: 0x3b */ + 0xb3, 0xf8, 0xfb, 0x7d, 0xf3, 0xc5, 0x1a, 0xd8, 0xe0, 0x48, 0x36, 0x51, 0x88, 0x38, 0xfa, 0x98, /* Byte value: 0x3c */ + 0xa3, 0xed, 0xbd, 0xa7, 0xfe, 0x17, 0xa5, 0x22, 0x28, 0x1e, 0xe9, 0xeb, 0xf9, 0x0a, 0x8d, 0x7f, /* Byte value: 0x3d */ + 0x0b, 0x84, 0x67, 0xa2, 0x6b, 0x46, 0x17, 0xb4, 0x90, 0x6c, 0x2d, 0x98, 0xcc, 0x24, 0x87, 0xd4, /* Byte value: 0x3e */ + 0x90, 0xbd, 0x33, 0xc5, 0x65, 0x8d, 0x0e, 0x26, 0x47, 0xa3, 0xe8, 0x23, 0x7f, 0x61, 0x49, 0xd3, /* Byte value: 0x3f */ + 0xfc, 0xbc, 0xb7, 0xe6, 0x3e, 0x98, 0xeb, 0xdd, 0xe4, 0x4b, 0xa6, 0xab, 0xce, 0x39, 0x0f, 0x1f, /* Byte value: 0x40 */ + 0xc2, 0xb6, 0xd6, 0x7b, 0x15, 0x1a, 0xc6, 0x3c, 0x30, 0x14, 0x0f, 0xb2, 0xae, 0x0c, 0xf6, 0xeb, /* Byte value: 0x41 */ + 0x80, 0xa8, 0x75, 0x1f, 0x68, 0x5f, 0xb1, 0xdc, 0x8f, 0xf5, 0x37, 0x99, 0x0e, 0x53, 0x3e, 0x34, /* Byte value: 0x42 */ + 0xe7, 0x2d, 0x96, 0x9e, 0x58, 0x0c, 0x43, 0x93, 0xbc, 0x71, 0x54, 0x89, 0x73, 0x2f, 0xff, 0x2c, /* Byte value: 0x43 */ + 0x63, 0x11, 0x13, 0x56, 0xa2, 0x86, 0xad, 0x90, 0x01, 0x70, 0x24, 0xdf, 0xf0, 0x91, 0xac, 0x51, /* Byte value: 0x44 */ + 0x30, 0x3f, 0xca, 0xad, 0x17, 0xb5, 0x02, 0xcd, 0x9b, 0xfa, 0xa2, 0x0d, 0x93, 0x56, 0x99, 0xea, /* Byte value: 0x45 */ + 0x22, 0x60, 0xf4, 0xfd, 0x53, 0xec, 0x73, 0xb9, 0x4a, 0xd6, 0xbf, 0x31, 0x45, 0xf3, 0x39, 0xc8, /* Byte value: 0x46 */ + 0xf6, 0x1d, 0xec, 0x01, 0x90, 0x7a, 0x9b, 0x2e, 0x99, 0x1a, 0xea, 0x70, 0xb0, 0xb7, 0x02, 0x48, /* Byte value: 0x47 */ + 0xa8, 0x69, 0xda, 0x05, 0x95, 0x51, 0xb2, 0x96, 0xb8, 0x72, 0xc4, 0x73, 0x35, 0x2e, 0x0a, 0xab, /* Byte value: 0x48 */ + 0x57, 0xba, 0x29, 0x2c, 0x27, 0xe6, 0xf0, 0x82, 0xa8, 0x7e, 0xc1, 0x1d, 0xee, 0x2a, 0x58, 0xf2, /* Byte value: 0x49 */ + 0x8e, 0x9d, 0xde, 0x2f, 0x54, 0x68, 0x9e, 0xf0, 0xc0, 0x50, 0x3c, 0x8d, 0xfd, 0x30, 0x5e, 0x2a, /* Byte value: 0x4a */ + 0xc3, 0x93, 0xea, 0x3e, 0xd0, 0xbe, 0xa1, 0x7b, 0xdd, 0x29, 0x6e, 0xf1, 0x1c, 0xa6, 0x7c, 0x68, /* Byte value: 0x4b */ + 0xc6, 0x22, 0x26, 0xac, 0x87, 0xcf, 0x99, 0xe3, 0x02, 0xe0, 0x48, 0x7d, 0x23, 0xe1, 0x9b, 0xa2, /* Byte value: 0x4c */ + 0x50, 0x41, 0x9d, 0x34, 0x39, 0x1c, 0x06, 0x94, 0x6e, 0xcd, 0x25, 0x17, 0x76, 0xfa, 0x68, 0xfd, /* Byte value: 0x4d */ + 0x75, 0xda, 0xdd, 0xd1, 0x74, 0x0a, 0x83, 0x3b, 0xe2, 0xa8, 0x7e, 0x2c, 0xab, 0xd9, 0x61, 0x3a, /* Byte value: 0x4e */ + 0x4b, 0xd0, 0xbc, 0x4c, 0x5f, 0x88, 0xae, 0xda, 0x36, 0xf7, 0xd7, 0x35, 0xcb, 0xec, 0x98, 0xce, /* Byte value: 0x4f */ + 0x6c, 0x01, 0x84, 0x23, 0x5b, 0x15, 0xe5, 0xfb, 0xa3, 0xe8, 0x4e, 0x88, 0xb1, 0x58, 0x46, 0xcc, /* Byte value: 0x50 */ + 0xa6, 0x5c, 0x71, 0x35, 0xa9, 0x66, 0x9d, 0xba, 0xf7, 0xd7, 0xcf, 0x67, 0xc6, 0x4d, 0x6a, 0xb5, /* Byte value: 0x51 */ + 0xa4, 0x16, 0x09, 0xbf, 0xe0, 0xed, 0x53, 0x34, 0xee, 0xad, 0x0d, 0xe1, 0x61, 0xda, 0xbd, 0x70, /* Byte value: 0x52 */ + 0xf5, 0x72, 0xa8, 0xce, 0x1c, 0x55, 0x32, 0xe7, 0x6d, 0x5d, 0x49, 0xb5, 0xa5, 0x8a, 0x5f, 0x0e, /* Byte value: 0x53 */ + 0xf8, 0x28, 0x47, 0x31, 0xac, 0x4d, 0xb4, 0x02, 0xd6, 0xbf, 0xe1, 0x64, 0x43, 0xd4, 0x62, 0x56, /* Byte value: 0x54 */ + 0x9c, 0xc2, 0xe0, 0x7f, 0x10, 0x31, 0xef, 0x84, 0x11, 0x7c, 0x21, 0xb1, 0x2b, 0x95, 0xfe, 0x08, /* Byte value: 0x55 */ + 0xd2, 0xa3, 0x90, 0xa1, 0x18, 0xc8, 0x79, 0xc6, 0xf8, 0x42, 0xd0, 0x08, 0xdf, 0x3e, 0x81, 0x0c, /* Byte value: 0x56 */ + 0xcd, 0xa6, 0x41, 0x0e, 0xec, 0x89, 0x8e, 0x57, 0x92, 0x8c, 0x65, 0xe5, 0xef, 0xc5, 0x1c, 0x76, /* Byte value: 0x57 */ + 0xdc, 0x96, 0x3b, 0x91, 0x24, 0xff, 0x56, 0xea, 0xb7, 0xe7, 0xdb, 0x1c, 0x2c, 0x5d, 0xe1, 0x12, /* Byte value: 0x58 */ + 0x1d, 0x4f, 0xa9, 0x25, 0xbd, 0xca, 0x39, 0x1f, 0x73, 0xb4, 0x77, 0x6b, 0x97, 0x6c, 0x4a, 0xbf, /* Byte value: 0x59 */ + 0x83, 0xc7, 0x31, 0xd0, 0xe4, 0x70, 0x18, 0x15, 0x7b, 0xb2, 0x94, 0x5c, 0x1b, 0x6e, 0x63, 0x72, /* Byte value: 0x5a */ + 0x2e, 0x1f, 0x27, 0x47, 0x26, 0x50, 0x92, 0x1b, 0x1c, 0x09, 0x76, 0xa3, 0x11, 0x07, 0x8e, 0x13, /* Byte value: 0x5b */ + 0xf1, 0xe6, 0x58, 0x19, 0x8e, 0x80, 0x6d, 0x38, 0x5f, 0xa9, 0x0e, 0x7a, 0x28, 0x67, 0x32, 0x47, /* Byte value: 0x5c */ + 0xfd, 0x99, 0x8b, 0xa3, 0xfb, 0x3c, 0x8c, 0x9a, 0x09, 0x76, 0xc7, 0xe8, 0x7c, 0x93, 0x85, 0x9c, /* Byte value: 0x5d */ + 0x81, 0x8d, 0x49, 0x5a, 0xad, 0xfb, 0xd6, 0x9b, 0x62, 0xc8, 0x56, 0xda, 0xbc, 0xf9, 0xb4, 0xb7, /* Byte value: 0x5e */ + 0xb1, 0xb2, 0x83, 0xf7, 0xba, 0x4e, 0xd4, 0x56, 0xf9, 0x32, 0xf4, 0xd7, 0x2f, 0xaf, 0x2d, 0x5d, /* Byte value: 0x5f */ + 0x4f, 0x44, 0x4c, 0x9b, 0xcd, 0x5d, 0xf1, 0x05, 0x04, 0x03, 0x90, 0xfa, 0x46, 0x01, 0xf5, 0x87, /* Byte value: 0x60 */ + 0x47, 0xaf, 0x6f, 0xf6, 0x2a, 0x34, 0x4f, 0x78, 0x60, 0x28, 0x1e, 0xa7, 0x9f, 0x18, 0x2f, 0x15, /* Byte value: 0x61 */ + 0x13, 0x7a, 0x02, 0x15, 0x81, 0xfd, 0x16, 0x33, 0x3c, 0x11, 0x7c, 0x7f, 0x64, 0x0f, 0x2a, 0xa1, /* Byte value: 0x62 */ + 0x46, 0x8a, 0x53, 0xb3, 0xef, 0x90, 0x28, 0x3f, 0x8d, 0x15, 0x7f, 0xe4, 0x2d, 0xb2, 0xa5, 0x96, /* Byte value: 0x63 */ + 0x5b, 0xc5, 0xfa, 0x96, 0x52, 0x5a, 0x11, 0x20, 0xfe, 0xa1, 0x08, 0x8f, 0xba, 0xde, 0xef, 0x29, /* Byte value: 0x64 */ + 0xfb, 0x47, 0x03, 0xfe, 0x20, 0x62, 0x1d, 0xcb, 0x22, 0xf8, 0x42, 0xa1, 0x56, 0xe9, 0x3f, 0x10, /* Byte value: 0x65 */ + 0xed, 0x8c, 0xcd, 0x79, 0xf6, 0xee, 0x33, 0x60, 0xc1, 0x20, 0x18, 0x52, 0x0d, 0xa1, 0xf2, 0x7b, /* Byte value: 0x66 */ + 0x94, 0x29, 0xc3, 0x12, 0xf7, 0x58, 0x51, 0xf9, 0x75, 0x57, 0xaf, 0xec, 0xf2, 0x8c, 0x24, 0x9a, /* Byte value: 0x67 */ + 0x25, 0x9b, 0x40, 0xe5, 0x4d, 0x16, 0x85, 0xaf, 0x8c, 0x65, 0x5b, 0x3b, 0xdd, 0x23, 0x09, 0xc7, /* Byte value: 0x68 */ + 0xa5, 0x33, 0x35, 0xfa, 0x25, 0x49, 0x34, 0x73, 0x03, 0x90, 0x6c, 0xa2, 0xd3, 0x70, 0x37, 0xf3, /* Byte value: 0x69 */ + 0xdd, 0xb3, 0x07, 0xd4, 0xe1, 0x5b, 0x31, 0xad, 0x5a, 0xda, 0xba, 0x5f, 0x9e, 0xf7, 0x6b, 0x91, /* Byte value: 0x6a */ + 0x38, 0xd4, 0xe9, 0xc0, 0xf0, 0xdc, 0xbc, 0xb0, 0xff, 0xd1, 0x2c, 0x50, 0x4a, 0x4f, 0x43, 0x78, /* Byte value: 0x6b */ + 0x58, 0xaa, 0xbe, 0x59, 0xde, 0x75, 0xb8, 0xe9, 0x0a, 0xe6, 0xab, 0x4a, 0xaf, 0xe3, 0xb2, 0x6f, /* Byte value: 0x6c */ + 0x0f, 0x10, 0x97, 0x75, 0xf9, 0x93, 0x48, 0x6b, 0xa2, 0x98, 0x6a, 0x57, 0x41, 0xc9, 0xea, 0x9d, /* Byte value: 0x6d */ + 0xda, 0x48, 0xb3, 0xcc, 0xff, 0xa1, 0xc7, 0xbb, 0x9c, 0x69, 0x5e, 0x55, 0x06, 0x27, 0x5b, 0x9e, /* Byte value: 0x6e */ + 0x3d, 0x65, 0x25, 0x52, 0xa7, 0xad, 0x84, 0x28, 0x20, 0x18, 0x0a, 0xdc, 0x75, 0x08, 0xa4, 0xb2, /* Byte value: 0x6f */ + 0xe8, 0x3d, 0x01, 0xeb, 0xa1, 0x9f, 0x0b, 0xf8, 0x1e, 0xe9, 0x3e, 0xde, 0x32, 0xe6, 0x15, 0xb1, /* Byte value: 0x70 */ + 0xe9, 0x18, 0x3d, 0xae, 0x64, 0x3b, 0x6c, 0xbf, 0xf3, 0xd4, 0x5f, 0x9d, 0x80, 0x4c, 0x9f, 0x32, /* Byte value: 0x71 */ + 0x18, 0xfe, 0x65, 0xb7, 0xea, 0xbb, 0x01, 0x87, 0xac, 0x7d, 0x51, 0xe7, 0xa8, 0x2b, 0xad, 0x75, /* Byte value: 0x72 */ + 0x95, 0x0c, 0xff, 0x57, 0x32, 0xfc, 0x36, 0xbe, 0x98, 0x6a, 0xce, 0xaf, 0x40, 0x26, 0xae, 0x19, /* Byte value: 0x73 */ + 0xee, 0xe3, 0x89, 0xb6, 0x7a, 0xc1, 0x9a, 0xa9, 0x35, 0x67, 0xbb, 0x97, 0x18, 0x9c, 0xaf, 0x3d, /* Byte value: 0x74 */ + 0x77, 0x90, 0xa5, 0x5b, 0x3d, 0x81, 0x4d, 0xb5, 0xfb, 0xd2, 0xbc, 0xaa, 0x0c, 0x4e, 0xb6, 0xff, /* Byte value: 0x75 */ + 0x72, 0x21, 0x69, 0xc9, 0x6a, 0xf0, 0x75, 0x2d, 0x24, 0x1b, 0x9a, 0x26, 0x33, 0x09, 0x51, 0x35, /* Byte value: 0x76 */ + 0x6f, 0x6e, 0xc0, 0xec, 0xd7, 0x3a, 0x4c, 0x32, 0x57, 0xaf, 0xed, 0x4d, 0xa4, 0x65, 0x1b, 0x8a, /* Byte value: 0x77 */ + 0x51, 0x64, 0xa1, 0x71, 0xfc, 0xb8, 0x61, 0xd3, 0x83, 0xf0, 0x44, 0x54, 0xc4, 0x50, 0xe2, 0x7e, /* Byte value: 0x78 */ + 0x7f, 0x7b, 0x86, 0x36, 0xda, 0xe8, 0xf3, 0xc8, 0x9f, 0xf9, 0x32, 0xf7, 0xd5, 0x57, 0x6c, 0x6d, /* Byte value: 0x79 */ + 0xdf, 0xf9, 0x7f, 0x5e, 0xa8, 0xd0, 0xff, 0x23, 0x43, 0xa0, 0x78, 0xd9, 0x39, 0x60, 0xbc, 0x54, /* Byte value: 0x7a */ + 0xd7, 0x12, 0x5c, 0x33, 0x4f, 0xb9, 0x41, 0x5e, 0x27, 0x8b, 0xf6, 0x84, 0xe0, 0x79, 0x66, 0xc6, /* Byte value: 0x7b */ + 0xd4, 0x7d, 0x18, 0xfc, 0xc3, 0x96, 0xe8, 0x97, 0xd3, 0xcc, 0x55, 0x41, 0xf5, 0x44, 0x3b, 0x80, /* Byte value: 0x7c */ + 0x6b, 0xfa, 0x30, 0x3b, 0x45, 0xef, 0x13, 0xed, 0x65, 0x5b, 0xaa, 0x82, 0x29, 0x88, 0x76, 0xc3, /* Byte value: 0x7d */ + 0x3a, 0x9e, 0x91, 0x4a, 0xb9, 0x57, 0x72, 0x3e, 0xe6, 0xab, 0xee, 0xd6, 0xed, 0xd8, 0x94, 0xbd, /* Byte value: 0x7e */ + 0x20, 0x2a, 0x8c, 0x77, 0x1a, 0x67, 0xbd, 0x37, 0x53, 0xac, 0x7d, 0xb7, 0xe2, 0x64, 0xee, 0x0d, /* Byte value: 0x7f */ + 0x0a, 0xa1, 0x5b, 0xe7, 0xae, 0xe2, 0x70, 0xf3, 0x7d, 0x51, 0x4c, 0xdb, 0x7e, 0x8e, 0x0d, 0x57, /* Byte value: 0x80 */ + 0x42, 0x1e, 0xa3, 0x64, 0x7d, 0x45, 0x77, 0xe0, 0xbf, 0xe1, 0x38, 0x2b, 0xa0, 0x5f, 0xc8, 0xdf, /* Byte value: 0x81 */ + 0xcc, 0x83, 0x7d, 0x4b, 0x29, 0x2d, 0xe9, 0x10, 0x7f, 0xb1, 0x04, 0xa6, 0x5d, 0x6f, 0x96, 0xf5, /* Byte value: 0x82 */ + 0x7b, 0xef, 0x76, 0xe1, 0x48, 0x3d, 0xac, 0x17, 0xad, 0x0d, 0x75, 0x38, 0x58, 0xba, 0x01, 0x24, /* Byte value: 0x83 */ + 0x8f, 0xb8, 0xe2, 0x6a, 0x91, 0xcc, 0xf9, 0xb7, 0x2d, 0x6d, 0x5d, 0xce, 0x4f, 0x9a, 0xd4, 0xa9, /* Byte value: 0x84 */ + 0x3b, 0xbb, 0xad, 0x0f, 0x7c, 0xf3, 0x15, 0x79, 0x0b, 0x96, 0x8f, 0x95, 0x5f, 0x72, 0x1e, 0x3e, /* Byte value: 0x85 */ + 0x7d, 0x31, 0xfe, 0xbc, 0x93, 0x63, 0x3d, 0x46, 0x86, 0x83, 0xf0, 0x71, 0x72, 0xc0, 0xbb, 0xa8, /* Byte value: 0x86 */ + 0xbf, 0x87, 0x28, 0xc7, 0x86, 0x79, 0xfb, 0x7a, 0xb6, 0x97, 0xff, 0xc3, 0xdc, 0xcc, 0x4d, 0x43, /* Byte value: 0x87 */ + 0xd0, 0xe9, 0xe8, 0x2b, 0x51, 0x43, 0xb7, 0x48, 0xe1, 0x38, 0x12, 0x8e, 0x78, 0xa9, 0x56, 0xc9, /* Byte value: 0x88 */ + 0xde, 0xdc, 0x43, 0x1b, 0x6d, 0x74, 0x98, 0x64, 0xae, 0x9d, 0x19, 0x9a, 0x8b, 0xca, 0x36, 0xd7, /* Byte value: 0x89 */ + 0xca, 0x5d, 0xf5, 0x16, 0xf2, 0x73, 0x78, 0x41, 0x54, 0x3f, 0x81, 0xef, 0x77, 0x15, 0x2c, 0x79, /* Byte value: 0x8a */ + 0x2c, 0x55, 0x5f, 0xcd, 0x6f, 0xdb, 0x5c, 0x95, 0x05, 0x73, 0xb4, 0x25, 0xb6, 0x90, 0x59, 0xd6, /* Byte value: 0x8b */ + 0x19, 0xdb, 0x59, 0xf2, 0x2f, 0x1f, 0x66, 0xc0, 0x41, 0x40, 0x30, 0xa4, 0x1a, 0x81, 0x27, 0xf6, /* Byte value: 0x8c */ + 0x76, 0xb5, 0x99, 0x1e, 0xf8, 0x25, 0x2a, 0xf2, 0x16, 0xef, 0xdd, 0xe9, 0xbe, 0xe4, 0x3c, 0x7c, /* Byte value: 0x8d */ + 0x12, 0x5f, 0x3e, 0x50, 0x44, 0x59, 0x71, 0x74, 0xd1, 0x2c, 0x1d, 0x3c, 0xd6, 0xa5, 0xa0, 0x22, /* Byte value: 0x8e */ + 0x74, 0xff, 0xe1, 0x94, 0xb1, 0xae, 0xe4, 0x7c, 0x0f, 0x95, 0x1f, 0x6f, 0x19, 0x73, 0xeb, 0xb9, /* Byte value: 0x8f */ + 0xd5, 0x58, 0x24, 0xb9, 0x06, 0x32, 0x8f, 0xd0, 0x3e, 0xf1, 0x34, 0x02, 0x47, 0xee, 0xb1, 0x03, /* Byte value: 0x90 */ + 0x8b, 0x2c, 0x12, 0xbd, 0x03, 0x19, 0xa6, 0x68, 0x1f, 0x99, 0x1a, 0x01, 0xc2, 0x77, 0xb9, 0xe0, /* Byte value: 0x91 */ + 0x07, 0xfb, 0xb4, 0x18, 0x1e, 0xfa, 0xf6, 0x16, 0xc6, 0xb3, 0xe4, 0x0a, 0x98, 0xd0, 0x30, 0x0f, /* Byte value: 0x92 */ + 0x59, 0x8f, 0x82, 0x1c, 0x1b, 0xd1, 0xdf, 0xae, 0xe7, 0xdb, 0xca, 0x09, 0x1d, 0x49, 0x38, 0xec, /* Byte value: 0x93 */ + 0xb0, 0x97, 0xbf, 0xb2, 0x7f, 0xea, 0xb3, 0x11, 0x14, 0x0f, 0x95, 0x94, 0x9d, 0x05, 0xa7, 0xde, /* Byte value: 0x94 */ + 0x1f, 0x05, 0xd1, 0xaf, 0xf4, 0x41, 0xf7, 0x91, 0x6a, 0xce, 0xb5, 0xed, 0x30, 0xfb, 0x9d, 0x7a, /* Byte value: 0x95 */ + 0x61, 0x5b, 0x6b, 0xdc, 0xeb, 0x0d, 0x63, 0x1e, 0x18, 0x0a, 0xe6, 0x59, 0x57, 0x06, 0x7b, 0x94, /* Byte value: 0x96 */ + 0x1a, 0xb4, 0x1d, 0x3d, 0xa3, 0x30, 0xcf, 0x09, 0xb5, 0x07, 0x93, 0x61, 0x0f, 0xbc, 0x7a, 0xb0, /* Byte value: 0x97 */ + 0xc8, 0x17, 0x8d, 0x9c, 0xbb, 0xf8, 0xb6, 0xcf, 0x4d, 0x45, 0x43, 0x69, 0xd0, 0x82, 0xfb, 0xbc, /* Byte value: 0x98 */ + 0x97, 0x46, 0x87, 0xdd, 0x7b, 0x77, 0xf8, 0x30, 0x81, 0x10, 0x0c, 0x29, 0xe7, 0xb1, 0x79, 0xdc, /* Byte value: 0x99 */ + 0x17, 0xee, 0xf2, 0xc2, 0x13, 0x28, 0x49, 0xec, 0x0e, 0xe5, 0x3b, 0xb0, 0xe9, 0xe2, 0x47, 0xe8, /* Byte value: 0x9a */ + 0xa9, 0x4c, 0xe6, 0x40, 0x50, 0xf5, 0xd5, 0xd1, 0x55, 0x4f, 0xa5, 0x30, 0x87, 0x84, 0x80, 0x28, /* Byte value: 0x9b */ + 0x91, 0x98, 0x0f, 0x80, 0xa0, 0x29, 0x69, 0x61, 0xaa, 0x9e, 0x89, 0x60, 0xcd, 0xcb, 0xc3, 0x50, /* Byte value: 0x9c */ + 0x9f, 0xad, 0xa4, 0xb0, 0x9c, 0x1e, 0x46, 0x4d, 0xe5, 0x3b, 0x82, 0x74, 0x3e, 0xa8, 0xa3, 0x4e, /* Byte value: 0x9d */ + 0x1b, 0x91, 0x21, 0x78, 0x66, 0x94, 0xa8, 0x4e, 0x58, 0x3a, 0xf2, 0x22, 0xbd, 0x16, 0xf0, 0x33, /* Byte value: 0x9e */ + 0xbb, 0x13, 0xd8, 0x10, 0x14, 0xac, 0xa4, 0xa5, 0x84, 0x63, 0xb8, 0x0c, 0x51, 0x21, 0x20, 0x0a, /* Byte value: 0x9f */ + 0xd1, 0xcc, 0xd4, 0x6e, 0x94, 0xe7, 0xd0, 0x0f, 0x0c, 0x05, 0x73, 0xcd, 0xca, 0x03, 0xdc, 0x4a, /* Byte value: 0xa0 */ + 0x7a, 0xca, 0x4a, 0xa4, 0x8d, 0x99, 0xcb, 0x50, 0x40, 0x30, 0x14, 0x7b, 0xea, 0x10, 0x8b, 0xa7, /* Byte value: 0xa1 */ + 0xb4, 0x03, 0x4f, 0x65, 0xed, 0x3f, 0xec, 0xce, 0x26, 0xfb, 0xd2, 0x5b, 0x10, 0xe8, 0xca, 0x97, /* Byte value: 0xa2 */ + 0x5f, 0x51, 0x0a, 0x41, 0xc0, 0x8f, 0x4e, 0xff, 0xcc, 0x55, 0x4f, 0x40, 0x37, 0x33, 0x82, 0x60, /* Byte value: 0xa3 */ + 0x71, 0x4e, 0x2d, 0x06, 0xe6, 0xdf, 0xdc, 0xe4, 0xd0, 0x5c, 0x39, 0xe3, 0x26, 0x34, 0x0c, 0x73, /* Byte value: 0xa4 */ + 0x0e, 0x35, 0xab, 0x30, 0x3c, 0x37, 0x2f, 0x2c, 0x4f, 0xa5, 0x0b, 0x14, 0xf3, 0x63, 0x60, 0x1e, /* Byte value: 0xa5 */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xa6 */ + 0x78, 0x80, 0x32, 0x2e, 0xc4, 0x12, 0x05, 0xde, 0x59, 0x4a, 0xd6, 0xfd, 0x4d, 0x87, 0x5c, 0x62, /* Byte value: 0xa7 */ + 0x24, 0xbe, 0x7c, 0xa0, 0x88, 0xb2, 0xe2, 0xe8, 0x61, 0x58, 0x3a, 0x78, 0x6f, 0x89, 0x83, 0x44, /* Byte value: 0xa8 */ + 0x40, 0x54, 0xdb, 0xee, 0x34, 0xce, 0xb9, 0x6e, 0xa6, 0x9b, 0xfa, 0xad, 0x07, 0xc8, 0x1f, 0x1a, /* Byte value: 0xa9 */ + 0x86, 0x76, 0xfd, 0x42, 0xb3, 0x01, 0x20, 0x8d, 0xa4, 0x7b, 0xb2, 0xd0, 0x24, 0x29, 0x84, 0xb8, /* Byte value: 0xaa */ + 0x52, 0x0b, 0xe5, 0xbe, 0x70, 0x97, 0xc8, 0x1a, 0x77, 0xb7, 0xe7, 0x91, 0xd1, 0x6d, 0xbf, 0x38, /* Byte value: 0xab */ + 0xf3, 0xac, 0x20, 0x93, 0xc7, 0x0b, 0xa3, 0xb6, 0x46, 0xd3, 0xcc, 0xfc, 0x8f, 0xf0, 0xe5, 0x82, /* Byte value: 0xac */ + 0x11, 0x30, 0x7a, 0x9f, 0xc8, 0x76, 0xd8, 0xbd, 0x25, 0x6b, 0xbe, 0xf9, 0xc3, 0x98, 0xfd, 0x64, /* Byte value: 0xad */ + 0x28, 0xc1, 0xaf, 0x1a, 0xfd, 0x0e, 0x03, 0x4a, 0x37, 0x87, 0xf3, 0xea, 0x3b, 0x7d, 0x34, 0x9f, /* Byte value: 0xae */ + 0x5c, 0x3e, 0x4e, 0x8e, 0x4c, 0xa0, 0xe7, 0x36, 0x38, 0x12, 0xec, 0x85, 0x22, 0x0e, 0xdf, 0x26, /* Byte value: 0xaf */ + 0x98, 0x56, 0x10, 0xa8, 0x82, 0xe4, 0xb0, 0x5b, 0x23, 0x88, 0x66, 0x7e, 0xa6, 0x78, 0x93, 0x41, /* Byte value: 0xb0 */ + 0xc1, 0xd9, 0x92, 0xb4, 0x99, 0x35, 0x6f, 0xf5, 0xc4, 0x53, 0xac, 0x77, 0xbb, 0x31, 0xab, 0xad, /* Byte value: 0xb1 */ + 0xd6, 0x37, 0x60, 0x76, 0x8a, 0x1d, 0x26, 0x19, 0xca, 0xb6, 0x97, 0xc7, 0x52, 0xd3, 0xec, 0x45, /* Byte value: 0xb2 */ + 0x03, 0x6f, 0x44, 0xcf, 0x8c, 0x2f, 0xa9, 0xc9, 0xf4, 0x47, 0xa3, 0xc5, 0x15, 0x3d, 0x5d, 0x46, /* Byte value: 0xb3 */ + 0x08, 0xeb, 0x23, 0x6d, 0xe7, 0x69, 0xbe, 0x7d, 0x64, 0x2b, 0x8e, 0x5d, 0xd9, 0x19, 0xda, 0x92, /* Byte value: 0xb4 */ + 0xae, 0xb7, 0x52, 0x58, 0x4e, 0x0f, 0x23, 0xc7, 0x93, 0xfc, 0x41, 0x3a, 0x1f, 0x54, 0xb0, 0x27, /* Byte value: 0xb5 */ + 0x64, 0xea, 0xa7, 0x4e, 0xbc, 0x7c, 0x5b, 0x86, 0xc7, 0xc3, 0xc0, 0xd5, 0x68, 0x41, 0x9c, 0x5e, /* Byte value: 0xb6 */ + 0x2d, 0x70, 0x63, 0x88, 0xaa, 0x7f, 0x3b, 0xd2, 0xe8, 0x4e, 0xd5, 0x66, 0x04, 0x3a, 0xd3, 0x55, /* Byte value: 0xb7 */ + 0x32, 0x75, 0xb2, 0x27, 0x5e, 0x3e, 0xcc, 0x43, 0x82, 0x80, 0x60, 0x8b, 0x34, 0xc1, 0x4e, 0x2f, /* Byte value: 0xb8 */ + 0x06, 0xde, 0x88, 0x5d, 0xdb, 0x5e, 0x91, 0x51, 0x2b, 0x8e, 0x85, 0x49, 0x2a, 0x7a, 0xba, 0x8c, /* Byte value: 0xb9 */ + 0x34, 0xab, 0x3a, 0x7a, 0x85, 0x60, 0x5d, 0x12, 0xa9, 0x0e, 0xe5, 0xc2, 0x1e, 0xbb, 0xf4, 0xa3, /* Byte value: 0xba */ + 0x5d, 0x1b, 0x72, 0xcb, 0x89, 0x04, 0x80, 0x71, 0xd5, 0x2f, 0x8d, 0xc6, 0x90, 0xa4, 0x55, 0xa5, /* Byte value: 0xbb */ + 0x84, 0x3c, 0x85, 0xc8, 0xfa, 0x8a, 0xee, 0x03, 0xbd, 0x01, 0x70, 0x56, 0x83, 0xbe, 0x53, 0x7d, /* Byte value: 0xbc */ + 0x31, 0x1a, 0xf6, 0xe8, 0xd2, 0x11, 0x65, 0x8a, 0x76, 0xc7, 0xc3, 0x4e, 0x21, 0xfc, 0x13, 0x69, /* Byte value: 0xbd */ + 0xf9, 0x0d, 0x7b, 0x74, 0x69, 0xe9, 0xd3, 0x45, 0x3b, 0x82, 0x80, 0x27, 0xf1, 0x7e, 0xe8, 0xd5, /* Byte value: 0xbe */ + 0x93, 0xd2, 0x77, 0x0a, 0xe9, 0xa2, 0xa7, 0xef, 0xb3, 0xe4, 0x4b, 0xe6, 0x6a, 0x5c, 0x14, 0x95, /* Byte value: 0xbf */ + 0x87, 0x53, 0xc1, 0x07, 0x76, 0xa5, 0x47, 0xca, 0x49, 0x46, 0xd3, 0x93, 0x96, 0x83, 0x0e, 0x3b, /* Byte value: 0xc0 */ + 0x9b, 0x39, 0x54, 0x67, 0x0e, 0xcb, 0x19, 0x92, 0xd7, 0xcf, 0xc5, 0xbb, 0xb3, 0x45, 0xce, 0x07, /* Byte value: 0xc1 */ + 0x0d, 0x5a, 0xef, 0xff, 0xb0, 0x18, 0x86, 0xe5, 0xbb, 0xe2, 0xa8, 0xd1, 0xe6, 0x5e, 0x3d, 0x58, /* Byte value: 0xc2 */ + 0xcf, 0xec, 0x39, 0x84, 0xa5, 0x02, 0x40, 0xd9, 0x8b, 0xf6, 0xa7, 0x63, 0x48, 0x52, 0xcb, 0xb3, /* Byte value: 0xc3 */ + 0x6a, 0xdf, 0x0c, 0x7e, 0x80, 0x4b, 0x74, 0xaa, 0x88, 0x66, 0xcb, 0xc1, 0x9b, 0x22, 0xfc, 0x40, /* Byte value: 0xc4 */ + 0xbd, 0xcd, 0x50, 0x4d, 0xcf, 0xf2, 0x35, 0xf4, 0xaf, 0xed, 0x3d, 0x45, 0x7b, 0x5b, 0x9a, 0x86, /* Byte value: 0xc5 */ + 0x33, 0x50, 0x8e, 0x62, 0x9b, 0x9a, 0xab, 0x04, 0x6f, 0xbd, 0x01, 0xc8, 0x86, 0x6b, 0xc4, 0xac, /* Byte value: 0xc6 */ + 0xdb, 0x6d, 0x8f, 0x89, 0x3a, 0x05, 0xa0, 0xfc, 0x71, 0x54, 0x3f, 0x16, 0xb4, 0x8d, 0xd1, 0x1d, /* Byte value: 0xc7 */ + 0xeb, 0x52, 0x45, 0x24, 0x2d, 0xb0, 0xa2, 0x31, 0xea, 0xae, 0x9d, 0x1b, 0x27, 0xdb, 0x48, 0xf7, /* Byte value: 0xc8 */ + 0xce, 0xc9, 0x05, 0xc1, 0x60, 0xa6, 0x27, 0x9e, 0x66, 0xcb, 0xc6, 0x20, 0xfa, 0xf8, 0x41, 0x30, /* Byte value: 0xc9 */ + 0x55, 0xf0, 0x51, 0xa6, 0x6e, 0x6d, 0x3e, 0x0c, 0xb1, 0x04, 0x03, 0x9b, 0x49, 0xbd, 0x8f, 0x37, /* Byte value: 0xca */ + 0x16, 0xcb, 0xce, 0x87, 0xd6, 0x8c, 0x2e, 0xab, 0xe3, 0xd8, 0x5a, 0xf3, 0x5b, 0x48, 0xcd, 0x6b, /* Byte value: 0xcb */ + 0xc5, 0x4d, 0x62, 0x63, 0x0b, 0xe0, 0x30, 0x2a, 0xf6, 0xa7, 0xeb, 0xb8, 0x36, 0xdc, 0xc6, 0xe4, /* Byte value: 0xcc */ + 0x48, 0xbf, 0xf8, 0x83, 0xd3, 0xa7, 0x07, 0x13, 0xc2, 0xb0, 0x74, 0xf0, 0xde, 0xd1, 0xc5, 0x88, /* Byte value: 0xcd */ + 0x2b, 0xae, 0xeb, 0xd5, 0x71, 0x21, 0xaa, 0x83, 0xc3, 0xc0, 0x50, 0x2f, 0x2e, 0x40, 0x69, 0xd9, /* Byte value: 0xce */ + 0x10, 0x15, 0x46, 0xda, 0x0d, 0xd2, 0xbf, 0xfa, 0xc8, 0x56, 0xdf, 0xba, 0x71, 0x32, 0x77, 0xe7, /* Byte value: 0xcf */ + 0xba, 0x36, 0xe4, 0x55, 0xd1, 0x08, 0xc3, 0xe2, 0x69, 0x5e, 0xd9, 0x4f, 0xe3, 0x8b, 0xaa, 0x89, /* Byte value: 0xd0 */ + 0x79, 0xa5, 0x0e, 0x6b, 0x01, 0xb6, 0x62, 0x99, 0xb4, 0x77, 0xb7, 0xbe, 0xff, 0x2d, 0xd6, 0xe1, /* Byte value: 0xd1 */ + 0xb2, 0xdd, 0xc7, 0x38, 0x36, 0x61, 0x7d, 0x9f, 0x0d, 0x75, 0x57, 0x12, 0x3a, 0x92, 0x70, 0x1b, /* Byte value: 0xd2 */ + 0x01, 0x25, 0x3c, 0x45, 0xc5, 0xa4, 0x67, 0x47, 0xed, 0x3d, 0x61, 0x43, 0xb2, 0xaa, 0x8a, 0x83, /* Byte value: 0xd3 */ + 0x04, 0x94, 0xf0, 0xd7, 0x92, 0xd5, 0x5f, 0xdf, 0x32, 0xf4, 0x47, 0xcf, 0x8d, 0xed, 0x6d, 0x49, /* Byte value: 0xd4 */ + 0xb8, 0x7c, 0x9c, 0xdf, 0x98, 0x83, 0x0d, 0x6c, 0x70, 0x24, 0x1b, 0xc9, 0x44, 0x1c, 0x7d, 0x4c, /* Byte value: 0xd5 */ + 0xc9, 0x32, 0xb1, 0xd9, 0x7e, 0x5c, 0xd1, 0x88, 0xa0, 0x78, 0x22, 0x2a, 0x62, 0x28, 0x71, 0x3f, /* Byte value: 0xd6 */ + 0xec, 0xa9, 0xf1, 0x3c, 0x33, 0x4a, 0x54, 0x27, 0x2c, 0x1d, 0x79, 0x11, 0xbf, 0x0b, 0x78, 0xf8, /* Byte value: 0xd7 */ + 0x85, 0x19, 0xb9, 0x8d, 0x3f, 0x2e, 0x89, 0x44, 0x50, 0x3c, 0x11, 0x15, 0x31, 0x14, 0xd9, 0xfe, /* Byte value: 0xd8 */ + 0xf4, 0x57, 0x94, 0x8b, 0xd9, 0xf1, 0x55, 0xa0, 0x80, 0x60, 0x28, 0xf6, 0x17, 0x20, 0xd5, 0x8d, /* Byte value: 0xd9 */ + 0x44, 0xc0, 0x2b, 0x39, 0xa6, 0x1b, 0xe6, 0xb1, 0x94, 0x6f, 0xbd, 0x62, 0x8a, 0x25, 0x72, 0x53, /* Byte value: 0xda */ + 0x5a, 0xe0, 0xc6, 0xd3, 0x97, 0xfe, 0x76, 0x67, 0x13, 0x9c, 0x69, 0xcc, 0x08, 0x74, 0x65, 0xaa, /* Byte value: 0xdb */ + 0x2f, 0x3a, 0x1b, 0x02, 0xe3, 0xf4, 0xf5, 0x5c, 0xf1, 0x34, 0x17, 0xe0, 0xa3, 0xad, 0x04, 0x90, /* Byte value: 0xdc */ + 0x05, 0xb1, 0xcc, 0x92, 0x57, 0x71, 0x38, 0x98, 0xdf, 0xc9, 0x26, 0x8c, 0x3f, 0x47, 0xe7, 0xca, /* Byte value: 0xdd */ + 0xd9, 0x27, 0xf7, 0x03, 0x73, 0x8e, 0x6e, 0x72, 0x68, 0x2e, 0xfd, 0x90, 0x13, 0x1a, 0x06, 0xd8, /* Byte value: 0xde */ + 0x68, 0x95, 0x74, 0xf4, 0xc9, 0xc0, 0xba, 0x24, 0x91, 0x1c, 0x09, 0x47, 0x3c, 0xb5, 0x2b, 0x85, /* Byte value: 0xdf */ + 0xad, 0xd8, 0x16, 0x97, 0xc2, 0x20, 0x8a, 0x0e, 0x67, 0xbb, 0xe2, 0xff, 0x0a, 0x69, 0xed, 0x61, /* Byte value: 0xe0 */ + 0x35, 0x8e, 0x06, 0x3f, 0x40, 0xc4, 0x3a, 0x55, 0x44, 0x33, 0x84, 0x81, 0xac, 0x11, 0x7e, 0x20, /* Byte value: 0xe1 */ + 0x56, 0x9f, 0x15, 0x69, 0xe2, 0x42, 0x97, 0xc5, 0x45, 0x43, 0xa0, 0x5e, 0x5c, 0x80, 0xd2, 0x71, /* Byte value: 0xe2 */ + 0xc4, 0x68, 0x5e, 0x26, 0xce, 0x44, 0x57, 0x6d, 0x1b, 0x9a, 0x8a, 0xfb, 0x84, 0x76, 0x4c, 0x67, /* Byte value: 0xe3 */ + 0xaf, 0x92, 0x6e, 0x1d, 0x8b, 0xab, 0x44, 0x80, 0x7e, 0xc1, 0x20, 0x79, 0xad, 0xfe, 0x3a, 0xa4, /* Byte value: 0xe4 */ + 0x15, 0xa4, 0x8a, 0x48, 0x5a, 0xa3, 0x87, 0x62, 0x17, 0x9f, 0xf9, 0x36, 0x4e, 0x75, 0x90, 0x2d, /* Byte value: 0xe5 */ + 0x7c, 0x14, 0xc2, 0xf9, 0x56, 0xc7, 0x5a, 0x01, 0x6b, 0xbe, 0x91, 0x32, 0xc0, 0x6a, 0x31, 0x2b, /* Byte value: 0xe6 */ + 0x6e, 0x4b, 0xfc, 0xa9, 0x12, 0x9e, 0x2b, 0x75, 0xba, 0x92, 0x8c, 0x0e, 0x16, 0xcf, 0x91, 0x09, /* Byte value: 0xe7 */ + 0xd8, 0x02, 0xcb, 0x46, 0xb6, 0x2a, 0x09, 0x35, 0x85, 0x13, 0x9c, 0xd3, 0xa1, 0xb0, 0x8c, 0x5b, /* Byte value: 0xe8 */ + 0xf7, 0x38, 0xd0, 0x44, 0x55, 0xde, 0xfc, 0x69, 0x74, 0x27, 0x8b, 0x33, 0x02, 0x1d, 0x88, 0xcb, /* Byte value: 0xe9 */ + 0x66, 0xa0, 0xdf, 0xc4, 0xf5, 0xf7, 0x95, 0x08, 0xde, 0xb9, 0x02, 0x53, 0xcf, 0xd6, 0x4b, 0x9b, /* Byte value: 0xea */ + 0x89, 0x66, 0x6a, 0x37, 0x4a, 0x92, 0x68, 0xe6, 0x06, 0xe3, 0xd8, 0x87, 0x65, 0xe0, 0x6e, 0x25, /* Byte value: 0xeb */ + 0x45, 0xe5, 0x17, 0x7c, 0x63, 0xbf, 0x81, 0xf6, 0x79, 0x52, 0xdc, 0x21, 0x38, 0x8f, 0xf8, 0xd0, /* Byte value: 0xec */ + 0xef, 0xc6, 0xb5, 0xf3, 0xbf, 0x65, 0xfd, 0xee, 0xd8, 0x5a, 0xda, 0xd4, 0xaa, 0x36, 0x25, 0xbe, /* Byte value: 0xed */ + 0x37, 0xc4, 0x7e, 0xb5, 0x09, 0x4f, 0xf4, 0xdb, 0x5d, 0x49, 0x46, 0x07, 0x0b, 0x86, 0xa9, 0xe5, /* Byte value: 0xee */ + 0x8d, 0xf2, 0x9a, 0xe0, 0xd8, 0x47, 0x37, 0x39, 0x34, 0x17, 0x9f, 0x48, 0xe8, 0x0d, 0x03, 0x6c, /* Byte value: 0xef */ + 0xb6, 0x49, 0x37, 0xef, 0xa4, 0xb4, 0x22, 0x40, 0x3f, 0x81, 0x10, 0xdd, 0xb7, 0x7f, 0x1d, 0x52, /* Byte value: 0xf0 */ + 0xff, 0xd3, 0xf3, 0x29, 0xb2, 0xb7, 0x42, 0x14, 0x10, 0x0c, 0x05, 0x6e, 0xdb, 0x04, 0x52, 0x59, /* Byte value: 0xf1 */ + 0x88, 0x43, 0x56, 0x72, 0x8f, 0x36, 0x0f, 0xa1, 0xeb, 0xde, 0xb9, 0xc4, 0xd7, 0x4a, 0xe4, 0xa6, /* Byte value: 0xf2 */ + 0xea, 0x77, 0x79, 0x61, 0xe8, 0x14, 0xc5, 0x76, 0x07, 0x93, 0xfc, 0x58, 0x95, 0x71, 0xc2, 0x74, /* Byte value: 0xf3 */ + 0x62, 0x34, 0x2f, 0x13, 0x67, 0x22, 0xca, 0xd7, 0xec, 0x4d, 0x45, 0x9c, 0x42, 0x3b, 0x26, 0xd2, /* Byte value: 0xf4 */ + 0xe2, 0x9c, 0x5a, 0x0c, 0x0f, 0x7d, 0x7b, 0x0b, 0x63, 0xb8, 0x72, 0x05, 0x4c, 0x68, 0x18, 0xe6, /* Byte value: 0xf5 */ + 0x3e, 0x0a, 0x61, 0x9d, 0x2b, 0x82, 0x2d, 0xe1, 0xd4, 0x5f, 0xa9, 0x19, 0x60, 0x35, 0xf9, 0xf4, /* Byte value: 0xf6 */ + 0x54, 0xd5, 0x6d, 0xe3, 0xab, 0xc9, 0x59, 0x4b, 0x5c, 0x39, 0x62, 0xd8, 0xfb, 0x17, 0x05, 0xb4, /* Byte value: 0xf7 */ + 0xbe, 0xa2, 0x14, 0x82, 0x43, 0xdd, 0x9c, 0x3d, 0x5b, 0xaa, 0x9e, 0x80, 0x6e, 0x66, 0xc7, 0xc0, /* Byte value: 0xf8 */ + 0xb7, 0x6c, 0x0b, 0xaa, 0x61, 0x10, 0x45, 0x07, 0xd2, 0xbc, 0x71, 0x9e, 0x05, 0xd5, 0x97, 0xd1, /* Byte value: 0xf9 */ + 0x36, 0xe1, 0x42, 0xf0, 0xcc, 0xeb, 0x93, 0x9c, 0xb0, 0x74, 0x27, 0x44, 0xb9, 0x2c, 0x23, 0x66, /* Byte value: 0xfa */ + 0xe6, 0x08, 0xaa, 0xdb, 0x9d, 0xa8, 0x24, 0xd4, 0x51, 0x4c, 0x35, 0xca, 0xc1, 0x85, 0x75, 0xaf, /* Byte value: 0xfb */ + 0x99, 0x73, 0x2c, 0xed, 0x47, 0x40, 0xd7, 0x1c, 0xce, 0xb5, 0x07, 0x3d, 0x14, 0xd2, 0x19, 0xc2, /* Byte value: 0xfc */ + 0xa0, 0x82, 0xf9, 0x68, 0x72, 0x38, 0x0c, 0xeb, 0xdc, 0x59, 0x4a, 0x2e, 0xec, 0x37, 0xd0, 0x39, /* Byte value: 0xfd */ + 0x23, 0x45, 0xc8, 0xb8, 0x96, 0x48, 0x14, 0xfe, 0xa7, 0xeb, 0xde, 0x72, 0xf7, 0x59, 0xb3, 0x4b, /* Byte value: 0xfe */ + 0x8a, 0x09, 0x2e, 0xf8, 0xc6, 0xbd, 0xc1, 0x2f, 0xf2, 0xa4, 0x7b, 0x42, 0x70, 0xdd, 0x33, 0x63, /* Byte value: 0xff */ + 0x73, 0x04, 0x55, 0x8c, 0xaf, 0x54, 0x12, 0x6a, 0xc9, 0x26, 0xfb, 0x65, 0x81, 0xa3, 0xdb, 0xb6, /* Byte value: 0x00 */ + + /* Matrix row: 7 */ + 0x5e, 0x59, 0x1d, 0x8d, 0x98, 0xfb, 0x33, 0x54, 0x7c, 0xd4, 0xa4, 0x1b, 0xb3, 0x82, 0x0e, 0xb2, /* Byte value: 0x01 */ + 0x90, 0x76, 0xbe, 0x36, 0xc1, 0x08, 0x56, 0x5a, 0xf4, 0x40, 0x38, 0xf8, 0x4c, 0x99, 0x0f, 0xec, /* Byte value: 0x02 */ + 0xb8, 0xae, 0x1f, 0x39, 0x5b, 0xcb, 0x04, 0x4b, 0x01, 0x97, 0x34, 0x4e, 0xd8, 0x52, 0x2c, 0x9f, /* Byte value: 0x03 */ + 0x18, 0x48, 0xde, 0x05, 0x76, 0x41, 0x8f, 0x0f, 0x53, 0x4d, 0x04, 0xd3, 0xcd, 0xf8, 0xa0, 0x90, /* Byte value: 0x04 */ + 0x76, 0x81, 0xbc, 0x82, 0x02, 0x38, 0x61, 0x45, 0x89, 0x03, 0xa8, 0xad, 0x27, 0x49, 0x2d, 0xc1, /* Byte value: 0x05 */ + 0xc4, 0x19, 0x1a, 0xc8, 0x9e, 0xb3, 0x90, 0x9b, 0x24, 0xd1, 0x9f, 0x2f, 0xda, 0x59, 0x79, 0x32, /* Byte value: 0x06 */ + 0x0d, 0x23, 0xf7, 0xdb, 0x11, 0x1f, 0x5f, 0xae, 0x67, 0xf8, 0x5d, 0x96, 0x09, 0xc8, 0x5c, 0x46, /* Byte value: 0x07 */ + 0x84, 0x1a, 0x0f, 0xd0, 0x8c, 0x88, 0x7f, 0xb3, 0x6f, 0xca, 0x3e, 0xa3, 0x06, 0x1d, 0xff, 0x34, /* Byte value: 0x08 */ + 0xc2, 0x0b, 0xcc, 0x58, 0x62, 0x32, 0xc3, 0xe8, 0x40, 0x53, 0x9e, 0x6b, 0x78, 0x67, 0x51, 0x16, /* Byte value: 0x09 */ + 0x34, 0x8c, 0x5a, 0xea, 0x44, 0x7c, 0xbf, 0xfd, 0x5f, 0x66, 0xb7, 0x1d, 0x24, 0xa6, 0xb3, 0xdb, /* Byte value: 0x0a */ + 0x31, 0x97, 0xe7, 0x32, 0xc6, 0x5c, 0x24, 0x56, 0x09, 0xa5, 0x57, 0x7b, 0xd7, 0x87, 0x8f, 0xed, /* Byte value: 0x0b */ + 0x24, 0xfc, 0xce, 0xec, 0xa1, 0x02, 0xf4, 0xf7, 0x3d, 0x10, 0x0e, 0x3e, 0x13, 0xb7, 0x73, 0x3b, /* Byte value: 0x0c */ + 0xc3, 0x0c, 0x54, 0x60, 0x48, 0xec, 0x3a, 0xa0, 0xef, 0x6c, 0xc1, 0x75, 0xf6, 0xd3, 0x5d, 0x18, /* Byte value: 0x0d */ + 0xc7, 0x10, 0x71, 0x80, 0xe0, 0x12, 0x58, 0x43, 0x16, 0x90, 0x7e, 0x0d, 0x8b, 0x46, 0x6d, 0x20, /* Byte value: 0x0e */ + 0x4a, 0x35, 0xac, 0x6b, 0xd5, 0x7b, 0x1a, 0xbd, 0xe7, 0x5e, 0xa2, 0x40, 0xf9, 0x06, 0xfe, 0x6a, /* Byte value: 0x0f */ + 0x07, 0x15, 0x4e, 0xa8, 0xd6, 0x5f, 0xaa, 0x3b, 0xcb, 0xbd, 0x5e, 0x5a, 0x2c, 0x8a, 0x24, 0x2a, /* Byte value: 0x10 */ + 0x0c, 0x24, 0x6f, 0xe3, 0x3b, 0xc1, 0xa6, 0xe6, 0xc8, 0xc7, 0x02, 0x88, 0x87, 0x7c, 0x50, 0x48, /* Byte value: 0x11 */ + 0x48, 0x3b, 0x5f, 0x1b, 0x81, 0x04, 0x2b, 0x2d, 0x7a, 0x20, 0x1c, 0x7c, 0x26, 0xad, 0xe6, 0x76, /* Byte value: 0x12 */ + 0x80, 0x06, 0x2a, 0x30, 0x24, 0x76, 0x1d, 0x50, 0x96, 0x36, 0x81, 0xdb, 0x7b, 0x88, 0xcf, 0x0c, /* Byte value: 0x13 */ + 0xd7, 0x60, 0xe5, 0x86, 0x05, 0x6c, 0x13, 0x49, 0x74, 0xe6, 0xc7, 0x2e, 0xbc, 0x57, 0xad, 0xc0, /* Byte value: 0x14 */ + 0x69, 0xdc, 0x2c, 0x2f, 0xa2, 0x26, 0x44, 0x71, 0x11, 0xf3, 0xf2, 0x24, 0xc6, 0x3b, 0xa9, 0x7b, /* Byte value: 0x15 */ + 0xee, 0xcf, 0x48, 0xb7, 0x50, 0x0f, 0xf3, 0x1a, 0x4c, 0x78, 0x2d, 0xa5, 0x91, 0x39, 0x42, 0x5d, /* Byte value: 0x16 */ + 0xd8, 0x4d, 0xe1, 0x2d, 0x40, 0x0c, 0x7d, 0x77, 0x8e, 0x60, 0x24, 0x84, 0x6a, 0x34, 0xe9, 0x9a, /* Byte value: 0x17 */ + 0x1b, 0x41, 0xb5, 0x4d, 0x08, 0xe0, 0x47, 0xd7, 0x61, 0x0c, 0xe5, 0xf1, 0x9c, 0xe7, 0xb4, 0x82, /* Byte value: 0x18 */ + 0x77, 0x86, 0x24, 0xba, 0x28, 0xe6, 0x98, 0x0d, 0x26, 0x3c, 0xf7, 0xb3, 0xa9, 0xfd, 0x21, 0xcf, /* Byte value: 0x19 */ + 0x91, 0x71, 0x26, 0x0e, 0xeb, 0xd6, 0xaf, 0x12, 0x5b, 0x7f, 0x67, 0xe6, 0xc2, 0x2d, 0x03, 0xe2, /* Byte value: 0x1a */ + 0x73, 0x9a, 0x01, 0x5a, 0x80, 0x18, 0xfa, 0xee, 0xdf, 0xc0, 0x48, 0xcb, 0xd4, 0x68, 0x11, 0xf7, /* Byte value: 0x1b */ + 0xe8, 0xdd, 0x9e, 0x27, 0xac, 0x8e, 0xa0, 0x69, 0x28, 0xfa, 0x2c, 0xe1, 0x33, 0x07, 0x6a, 0x79, /* Byte value: 0x1c */ + 0xa1, 0xe1, 0x59, 0x04, 0x07, 0x54, 0x72, 0x0c, 0xfd, 0xe5, 0x6f, 0x83, 0x9b, 0x1e, 0x80, 0x01, /* Byte value: 0x1d */ + 0x53, 0x7a, 0xea, 0x56, 0x89, 0xe4, 0x6c, 0xfa, 0x1b, 0x2c, 0xf9, 0x8d, 0xba, 0x4a, 0x52, 0xf4, /* Byte value: 0x1e */ + 0xc9, 0x3a, 0xed, 0x13, 0x8f, 0xac, 0xcf, 0x35, 0x43, 0x29, 0xc2, 0xb9, 0xd3, 0x91, 0x25, 0x74, /* Byte value: 0x1f */ + 0x8d, 0x25, 0xdd, 0xeb, 0x35, 0x69, 0x42, 0xfe, 0xf1, 0xce, 0xdc, 0x4d, 0x72, 0x40, 0x93, 0x4a, /* Byte value: 0x20 */ + 0xe7, 0xf0, 0x9a, 0x8c, 0xe9, 0xee, 0xce, 0x57, 0xd2, 0x7c, 0xcf, 0x4b, 0xe5, 0x64, 0x2e, 0x23, /* Byte value: 0x21 */ + 0x7f, 0xbe, 0x6e, 0xb9, 0xbb, 0xd9, 0x5c, 0x08, 0x17, 0x07, 0x4a, 0x43, 0x53, 0x14, 0x41, 0xbf, /* Byte value: 0x22 */ + 0x86, 0x14, 0xfc, 0xa0, 0xd8, 0xf7, 0x4e, 0x23, 0xf2, 0xb4, 0x80, 0x9f, 0xd9, 0xb6, 0xe7, 0x28, /* Byte value: 0x23 */ + 0x70, 0x93, 0x6a, 0x12, 0xfe, 0xb9, 0x32, 0x36, 0xed, 0x81, 0xa9, 0xe9, 0x85, 0x77, 0x05, 0xe5, /* Byte value: 0x24 */ + 0x4e, 0x29, 0x89, 0x8b, 0x7d, 0x85, 0x78, 0x5e, 0x1e, 0xa2, 0x1d, 0x38, 0x84, 0x93, 0xce, 0x52, /* Byte value: 0x25 */ + 0x1f, 0x5d, 0x90, 0xad, 0xa0, 0x1e, 0x25, 0x34, 0x98, 0xf0, 0x5a, 0x89, 0xe1, 0x72, 0x84, 0xba, /* Byte value: 0x26 */ + 0x63, 0xea, 0x95, 0x5c, 0x65, 0x66, 0xb1, 0xe4, 0xbd, 0xb6, 0xf1, 0xe8, 0xe3, 0x79, 0xd1, 0x17, /* Byte value: 0x27 */ + 0xe6, 0xf7, 0x02, 0xb4, 0xc3, 0x30, 0x37, 0x1f, 0x7d, 0x43, 0x90, 0x55, 0x6b, 0xd0, 0x22, 0x2d, /* Byte value: 0x28 */ + 0xa7, 0xf3, 0x8f, 0x94, 0xfb, 0xd5, 0x21, 0x7f, 0x99, 0x67, 0x6e, 0xc7, 0x39, 0x20, 0xa8, 0x25, /* Byte value: 0x29 */ + 0x35, 0x8b, 0xc2, 0xd2, 0x6e, 0xa2, 0x46, 0xb5, 0xf0, 0x59, 0xe8, 0x03, 0xaa, 0x12, 0xbf, 0xd5, /* Byte value: 0x2a */ + 0x20, 0xe0, 0xeb, 0x0c, 0x09, 0xfc, 0x96, 0x14, 0xc4, 0xec, 0xb1, 0x46, 0x6e, 0x22, 0x43, 0x03, /* Byte value: 0x2b */ + 0x0f, 0x2d, 0x04, 0xab, 0x45, 0x60, 0x6e, 0x3e, 0xfa, 0x86, 0xe3, 0xaa, 0xd6, 0x63, 0x44, 0x5a, /* Byte value: 0x2c */ + 0x16, 0x62, 0x42, 0x96, 0x19, 0xff, 0x18, 0x79, 0x06, 0xf4, 0xb8, 0x67, 0x95, 0x2f, 0xe8, 0xc4, /* Byte value: 0x2d */ + 0xf3, 0x9c, 0x2b, 0x6a, 0xa4, 0x6e, 0xe7, 0xbe, 0x49, 0xf6, 0xc9, 0x10, 0xaf, 0xe0, 0xde, 0xfb, /* Byte value: 0x2e */ + 0xaf, 0xcb, 0xc5, 0x97, 0x68, 0xea, 0xe5, 0x7a, 0xa8, 0x5c, 0xd3, 0x37, 0xc3, 0xc9, 0xc8, 0x55, /* Byte value: 0x2f */ + 0x22, 0xee, 0x18, 0x7c, 0x5d, 0x83, 0xa7, 0x84, 0x59, 0x92, 0x0f, 0x7a, 0xb1, 0x89, 0x5b, 0x1f, /* Byte value: 0x30 */ + 0xb9, 0xa9, 0x87, 0x01, 0x71, 0x15, 0xfd, 0x03, 0xae, 0xa8, 0x6b, 0x50, 0x56, 0xe6, 0x20, 0x91, /* Byte value: 0x31 */ + 0x1e, 0x5a, 0x08, 0x95, 0x8a, 0xc0, 0xdc, 0x7c, 0x37, 0xcf, 0x05, 0x97, 0x6f, 0xc6, 0x88, 0xb4, /* Byte value: 0x32 */ + 0x25, 0xfb, 0x56, 0xd4, 0x8b, 0xdc, 0x0d, 0xbf, 0x92, 0x2f, 0x51, 0x20, 0x9d, 0x03, 0x7f, 0x35, /* Byte value: 0x33 */ + 0xba, 0xa0, 0xec, 0x49, 0x0f, 0xb4, 0x35, 0xdb, 0x9c, 0xe9, 0x8a, 0x72, 0x07, 0xf9, 0x34, 0x83, /* Byte value: 0x34 */ + 0xbd, 0xb5, 0xa2, 0xe1, 0xd9, 0xeb, 0x9f, 0xe0, 0x57, 0x54, 0xd4, 0x28, 0x2b, 0x73, 0x10, 0xa9, /* Byte value: 0x35 */ + 0x8e, 0x2c, 0xb6, 0xa3, 0x4b, 0xc8, 0x8a, 0x26, 0xc3, 0x8f, 0x3d, 0x6f, 0x23, 0x5f, 0x87, 0x58, /* Byte value: 0x36 */ + 0x5c, 0x57, 0xee, 0xfd, 0xcc, 0x84, 0x02, 0xc4, 0xe1, 0xaa, 0x1a, 0x27, 0x6c, 0x29, 0x16, 0xae, /* Byte value: 0x37 */ + 0x41, 0x04, 0x8d, 0x20, 0x38, 0xe5, 0x16, 0x60, 0xe4, 0x24, 0xfe, 0x92, 0x52, 0xf0, 0x8a, 0x08, /* Byte value: 0x38 */ + 0x6f, 0xce, 0xfa, 0xbf, 0x5e, 0xa7, 0x17, 0x02, 0x75, 0x71, 0xf3, 0x60, 0x64, 0x05, 0x81, 0x5f, /* Byte value: 0x39 */ + 0x42, 0x0d, 0xe6, 0x68, 0x46, 0x44, 0xde, 0xb8, 0xd6, 0x65, 0x1f, 0xb0, 0x03, 0xef, 0x9e, 0x1a, /* Byte value: 0x3a */ + 0x46, 0x11, 0xc3, 0x88, 0xee, 0xba, 0xbc, 0x5b, 0x2f, 0x99, 0xa0, 0xc8, 0x7e, 0x7a, 0xae, 0x22, /* Byte value: 0x3b */ + 0x2b, 0xd1, 0xca, 0x47, 0xe4, 0x62, 0x9a, 0xc9, 0xc7, 0x96, 0xed, 0x94, 0xc5, 0xd4, 0x37, 0x61, /* Byte value: 0x3c */ + 0xdc, 0x51, 0xc4, 0xcd, 0xe8, 0xf2, 0x1f, 0x94, 0x77, 0x9c, 0x9b, 0xfc, 0x17, 0xa1, 0xd9, 0xa2, /* Byte value: 0x3d */ + 0xdf, 0x58, 0xaf, 0x85, 0x96, 0x53, 0xd7, 0x4c, 0x45, 0xdd, 0x7a, 0xde, 0x46, 0xbe, 0xcd, 0xb0, /* Byte value: 0x3e */ + 0x43, 0x0a, 0x7e, 0x50, 0x6c, 0x9a, 0x27, 0xf0, 0x79, 0x5a, 0x40, 0xae, 0x8d, 0x5b, 0x92, 0x14, /* Byte value: 0x3f */ + 0xe3, 0xec, 0xbf, 0x6c, 0x41, 0x10, 0xac, 0xb4, 0x2b, 0x80, 0x70, 0x33, 0x98, 0xf1, 0x1e, 0x1b, /* Byte value: 0x40 */ + 0x29, 0xdf, 0x39, 0x37, 0xb0, 0x1d, 0xab, 0x59, 0x5a, 0xe8, 0x53, 0xa8, 0x1a, 0x7f, 0x2f, 0x7d, /* Byte value: 0x41 */ + 0xb4, 0x8a, 0x70, 0xda, 0x60, 0x0a, 0xa2, 0xad, 0xc9, 0x50, 0x36, 0xc6, 0x5f, 0x2e, 0x7c, 0xd7, /* Byte value: 0x42 */ + 0xcb, 0x34, 0x1e, 0x63, 0xdb, 0xd3, 0xfe, 0xa5, 0xde, 0x57, 0x7c, 0x85, 0x0c, 0x3a, 0x3d, 0x68, /* Byte value: 0x43 */ + 0x32, 0x9e, 0x8c, 0x7a, 0xb8, 0xfd, 0xec, 0x8e, 0x3b, 0xe4, 0xb6, 0x59, 0x86, 0x98, 0x9b, 0xff, /* Byte value: 0x44 */ + 0xda, 0x43, 0x12, 0x5d, 0x14, 0x73, 0x4c, 0xe7, 0x13, 0x1e, 0x9a, 0xb8, 0xb5, 0x9f, 0xf1, 0x86, /* Byte value: 0x45 */ + 0xea, 0xd3, 0x6d, 0x57, 0xf8, 0xf1, 0x91, 0xf9, 0xb5, 0x84, 0x92, 0xdd, 0xec, 0xac, 0x72, 0x65, /* Byte value: 0x46 */ + 0xbe, 0xbc, 0xc9, 0xa9, 0xa7, 0x4a, 0x57, 0x38, 0x65, 0x15, 0x35, 0x0a, 0x7a, 0x6c, 0x04, 0xbb, /* Byte value: 0x47 */ + 0x03, 0x09, 0x6b, 0x48, 0x7e, 0xa1, 0xc8, 0xd8, 0x32, 0x41, 0xe1, 0x22, 0x51, 0x1f, 0x14, 0x12, /* Byte value: 0x48 */ + 0xa5, 0xfd, 0x7c, 0xe4, 0xaf, 0xaa, 0x10, 0xef, 0x04, 0x19, 0xd0, 0xfb, 0xe6, 0x8b, 0xb0, 0x39, /* Byte value: 0x49 */ + 0xa4, 0xfa, 0xe4, 0xdc, 0x85, 0x74, 0xe9, 0xa7, 0xab, 0x26, 0x8f, 0xe5, 0x68, 0x3f, 0xbc, 0x37, /* Byte value: 0x4a */ + 0xab, 0xd7, 0xe0, 0x77, 0xc0, 0x14, 0x87, 0x99, 0x51, 0xa0, 0x6c, 0x4f, 0xbe, 0x5c, 0xf8, 0x6d, /* Byte value: 0x4b */ + 0x64, 0xff, 0xdb, 0xf4, 0xb3, 0x39, 0x1b, 0xdf, 0x76, 0x0b, 0xaf, 0xb2, 0xcf, 0xf3, 0xf5, 0x3d, /* Byte value: 0x4c */ + 0xad, 0xc5, 0x36, 0xe7, 0x3c, 0x95, 0xd4, 0xea, 0x35, 0x22, 0x6d, 0x0b, 0x1c, 0x62, 0xd0, 0x49, /* Byte value: 0x4d */ + 0x4f, 0x2e, 0x11, 0xb3, 0x57, 0x5b, 0x81, 0x16, 0xb1, 0x9d, 0x42, 0x26, 0x0a, 0x27, 0xc2, 0x5c, /* Byte value: 0x4e */ + 0x85, 0x1d, 0x97, 0xe8, 0xa6, 0x56, 0x86, 0xfb, 0xc0, 0xf5, 0x61, 0xbd, 0x88, 0xa9, 0xf3, 0x3a, /* Byte value: 0x4f */ + 0xa0, 0xe6, 0xc1, 0x3c, 0x2d, 0x8a, 0x8b, 0x44, 0x52, 0xda, 0x30, 0x9d, 0x15, 0xaa, 0x8c, 0x0f, /* Byte value: 0x50 */ + 0x13, 0x79, 0xff, 0x4e, 0x9b, 0xdf, 0x83, 0xd2, 0x50, 0x37, 0x58, 0x01, 0x66, 0x0e, 0xd4, 0xf2, /* Byte value: 0x51 */ + 0xd4, 0x69, 0x8e, 0xce, 0x7b, 0xcd, 0xdb, 0x91, 0x46, 0xa7, 0x26, 0x0c, 0xed, 0x48, 0xb9, 0xd2, /* Byte value: 0x52 */ + 0xfb, 0xa4, 0x61, 0x69, 0x37, 0x51, 0x23, 0xbb, 0x78, 0xcd, 0x74, 0xe0, 0x55, 0x09, 0xbe, 0x8b, /* Byte value: 0x53 */ + 0xae, 0xcc, 0x5d, 0xaf, 0x42, 0x34, 0x1c, 0x32, 0x07, 0x63, 0x8c, 0x29, 0x4d, 0x7d, 0xc4, 0x5b, /* Byte value: 0x54 */ + 0x94, 0x6a, 0x9b, 0xd6, 0x69, 0xf6, 0x34, 0xb9, 0x0d, 0xbc, 0x87, 0x80, 0x31, 0x0c, 0x3f, 0xd4, /* Byte value: 0x55 */ + 0xde, 0x5f, 0x37, 0xbd, 0xbc, 0x8d, 0x2e, 0x04, 0xea, 0xe2, 0x25, 0xc0, 0xc8, 0x0a, 0xc1, 0xbe, /* Byte value: 0x56 */ + 0xbb, 0xa7, 0x74, 0x71, 0x25, 0x6a, 0xcc, 0x93, 0x33, 0xd6, 0xd5, 0x6c, 0x89, 0x4d, 0x38, 0x8d, /* Byte value: 0x57 */ + 0xce, 0x2f, 0xa3, 0xbb, 0x59, 0xf3, 0x65, 0x0e, 0x88, 0x94, 0x9c, 0xe3, 0xff, 0x1b, 0x01, 0x5e, /* Byte value: 0x58 */ + 0xa2, 0xe8, 0x32, 0x4c, 0x79, 0xf5, 0xba, 0xd4, 0xcf, 0xa4, 0x8e, 0xa1, 0xca, 0x01, 0x94, 0x13, /* Byte value: 0x59 */ + 0xf1, 0x92, 0xd8, 0x1a, 0xf0, 0x11, 0xd6, 0x2e, 0xd4, 0x88, 0x77, 0x2c, 0x70, 0x4b, 0xc6, 0xe7, /* Byte value: 0x5a */ + 0x3d, 0xb3, 0x88, 0xd1, 0xfd, 0x9d, 0x82, 0xb0, 0xc1, 0x62, 0x55, 0xf3, 0x50, 0xfb, 0xdf, 0xa5, /* Byte value: 0x5b */ + 0xb6, 0x84, 0x83, 0xaa, 0x34, 0x75, 0x93, 0x3d, 0x54, 0x2e, 0x88, 0xfa, 0x80, 0x85, 0x64, 0xcb, /* Byte value: 0x5c */ + 0x61, 0xe4, 0x66, 0x2c, 0x31, 0x19, 0x80, 0x74, 0x20, 0xc8, 0x4f, 0xd4, 0x3c, 0xd2, 0xc9, 0x0b, /* Byte value: 0x5d */ + 0x36, 0x82, 0xa9, 0x9a, 0x10, 0x03, 0x8e, 0x6d, 0xc2, 0x18, 0x09, 0x21, 0xfb, 0x0d, 0xab, 0xc7, /* Byte value: 0x5e */ + 0xec, 0xc1, 0xbb, 0xc7, 0x04, 0x70, 0xc2, 0x8a, 0xd1, 0x06, 0x93, 0x99, 0x4e, 0x92, 0x5a, 0x41, /* Byte value: 0x5f */ + 0xc8, 0x3d, 0x75, 0x2b, 0xa5, 0x72, 0x36, 0x7d, 0xec, 0x16, 0x9d, 0xa7, 0x5d, 0x25, 0x29, 0x7a, /* Byte value: 0x60 */ + 0x52, 0x7d, 0x72, 0x6e, 0xa3, 0x3a, 0x95, 0xb2, 0xb4, 0x13, 0xa6, 0x93, 0x34, 0xfe, 0x5e, 0xfa, /* Byte value: 0x61 */ + 0xb2, 0x98, 0xa6, 0x4a, 0x9c, 0x8b, 0xf1, 0xde, 0xad, 0xd2, 0x37, 0x82, 0xfd, 0x10, 0x54, 0xf3, /* Byte value: 0x62 */ + 0xd0, 0x75, 0xab, 0x2e, 0xd3, 0x33, 0xb9, 0x72, 0xbf, 0x5b, 0x99, 0x74, 0x90, 0xdd, 0x89, 0xea, /* Byte value: 0x63 */ + 0x72, 0x9d, 0x99, 0x62, 0xaa, 0xc6, 0x03, 0xa6, 0x70, 0xff, 0x17, 0xd5, 0x5a, 0xdc, 0x1d, 0xf9, /* Byte value: 0x64 */ + 0xeb, 0xd4, 0xf5, 0x6f, 0xd2, 0x2f, 0x68, 0xb1, 0x1a, 0xbb, 0xcd, 0xc3, 0x62, 0x18, 0x7e, 0x6b, /* Byte value: 0x65 */ + 0x96, 0x64, 0x68, 0xa6, 0x3d, 0x89, 0x05, 0x29, 0x90, 0xc2, 0x39, 0xbc, 0xee, 0xa7, 0x27, 0xc8, /* Byte value: 0x66 */ + 0x0e, 0x2a, 0x9c, 0x93, 0x6f, 0xbe, 0x97, 0x76, 0x55, 0xb9, 0xbc, 0xb4, 0x58, 0xd7, 0x48, 0x54, /* Byte value: 0x67 */ + 0xe2, 0xeb, 0x27, 0x54, 0x6b, 0xce, 0x55, 0xfc, 0x84, 0xbf, 0x2f, 0x2d, 0x16, 0x45, 0x12, 0x15, /* Byte value: 0x68 */ + 0x56, 0x61, 0x57, 0x8e, 0x0b, 0xc4, 0xf7, 0x51, 0x4d, 0xef, 0x19, 0xeb, 0x49, 0x6b, 0x6e, 0xc2, /* Byte value: 0x69 */ + 0x4c, 0x27, 0x7a, 0xfb, 0x29, 0xfa, 0x49, 0xce, 0x83, 0xdc, 0xa3, 0x04, 0x5b, 0x38, 0xd6, 0x4e, /* Byte value: 0x6a */ + 0x40, 0x03, 0x15, 0x18, 0x12, 0x3b, 0xef, 0x28, 0x4b, 0x1b, 0xa1, 0x8c, 0xdc, 0x44, 0x86, 0x06, /* Byte value: 0x6b */ + 0x37, 0x85, 0x31, 0xa2, 0x3a, 0xdd, 0x77, 0x25, 0x6d, 0x27, 0x56, 0x3f, 0x75, 0xb9, 0xa7, 0xc9, /* Byte value: 0x6c */ + 0x92, 0x78, 0x4d, 0x46, 0x95, 0x77, 0x67, 0xca, 0x69, 0x3e, 0x86, 0xc4, 0x93, 0x32, 0x17, 0xf0, /* Byte value: 0x6d */ + 0x44, 0x1f, 0x30, 0xf8, 0xba, 0xc5, 0x8d, 0xcb, 0xb2, 0xe7, 0x1e, 0xf4, 0xa1, 0xd1, 0xb6, 0x3e, /* Byte value: 0x6e */ + 0x8f, 0x2b, 0x2e, 0x9b, 0x61, 0x16, 0x73, 0x6e, 0x6c, 0xb0, 0x62, 0x71, 0xad, 0xeb, 0x8b, 0x56, /* Byte value: 0x6f */ + 0x59, 0x4c, 0x53, 0x25, 0x4e, 0xa4, 0x99, 0x6f, 0xb7, 0x69, 0xfa, 0x41, 0x9f, 0x08, 0x2a, 0x98, /* Byte value: 0x70 */ + 0xdb, 0x44, 0x8a, 0x65, 0x3e, 0xad, 0xb5, 0xaf, 0xbc, 0x21, 0xc5, 0xa6, 0x3b, 0x2b, 0xfd, 0x88, /* Byte value: 0x71 */ + 0x6d, 0xc0, 0x09, 0xcf, 0x0a, 0xd8, 0x26, 0x92, 0xe8, 0x0f, 0x4d, 0x5c, 0xbb, 0xae, 0x99, 0x43, /* Byte value: 0x72 */ + 0x8c, 0x22, 0x45, 0xd3, 0x1f, 0xb7, 0xbb, 0xb6, 0x5e, 0xf1, 0x83, 0x53, 0xfc, 0xf4, 0x9f, 0x44, /* Byte value: 0x73 */ + 0xd3, 0x7c, 0xc0, 0x66, 0xad, 0x92, 0x71, 0xaa, 0x8d, 0x1a, 0x78, 0x56, 0xc1, 0xc2, 0x9d, 0xf8, /* Byte value: 0x74 */ + 0x88, 0x3e, 0x60, 0x33, 0xb7, 0x49, 0xd9, 0x55, 0xa7, 0x0d, 0x3c, 0x2b, 0x81, 0x61, 0xaf, 0x7c, /* Byte value: 0x75 */ + 0x47, 0x16, 0x5b, 0xb0, 0xc4, 0x64, 0x45, 0x13, 0x80, 0xa6, 0xff, 0xd6, 0xf0, 0xce, 0xa2, 0x2c, /* Byte value: 0x76 */ + 0xe5, 0xfe, 0x69, 0xfc, 0xbd, 0x91, 0xff, 0xc7, 0x4f, 0x02, 0x71, 0x77, 0x3a, 0xcf, 0x36, 0x3f, /* Byte value: 0x77 */ + 0x2f, 0xcd, 0xef, 0xa7, 0x4c, 0x9c, 0xf8, 0x2a, 0x3e, 0x6a, 0x52, 0xec, 0xb8, 0x41, 0x07, 0x59, /* Byte value: 0x78 */ + 0x12, 0x7e, 0x67, 0x76, 0xb1, 0x01, 0x7a, 0x9a, 0xff, 0x08, 0x07, 0x1f, 0xe8, 0xba, 0xd8, 0xfc, /* Byte value: 0x79 */ + 0x8b, 0x37, 0x0b, 0x7b, 0xc9, 0xe8, 0x11, 0x8d, 0x95, 0x4c, 0xdd, 0x09, 0xd0, 0x7e, 0xbb, 0x6e, /* Byte value: 0x7a */ + 0x11, 0x77, 0x0c, 0x3e, 0xcf, 0xa0, 0xb2, 0x42, 0xcd, 0x49, 0xe6, 0x3d, 0xb9, 0xa5, 0xcc, 0xee, /* Byte value: 0x7b */ + 0x54, 0x6f, 0xa4, 0xfe, 0x5f, 0xbb, 0xc6, 0xc1, 0xd0, 0x91, 0xa7, 0xd7, 0x96, 0xc0, 0x76, 0xde, /* Byte value: 0x7c */ + 0xa8, 0xde, 0x8b, 0x3f, 0xbe, 0xb5, 0x4f, 0x41, 0x63, 0xe1, 0x8d, 0x6d, 0xef, 0x43, 0xec, 0x7f, /* Byte value: 0x7d */ + 0x87, 0x13, 0x64, 0x98, 0xf2, 0x29, 0xb7, 0x6b, 0x5d, 0x8b, 0xdf, 0x81, 0x57, 0x02, 0xeb, 0x26, /* Byte value: 0x7e */ + 0x2d, 0xc3, 0x1c, 0xd7, 0x18, 0xe3, 0xc9, 0xba, 0xa3, 0x14, 0xec, 0xd0, 0x67, 0xea, 0x1f, 0x45, /* Byte value: 0x7f */ + 0x5d, 0x50, 0x76, 0xc5, 0xe6, 0x5a, 0xfb, 0x8c, 0x4e, 0x95, 0x45, 0x39, 0xe2, 0x9d, 0x1a, 0xa0, /* Byte value: 0x80 */ + 0x9d, 0x55, 0x49, 0xed, 0xd0, 0x17, 0x09, 0xf4, 0x93, 0xb8, 0x65, 0x6e, 0x45, 0x51, 0x53, 0xaa, /* Byte value: 0x81 */ + 0x39, 0xaf, 0xad, 0x31, 0x55, 0x63, 0xe0, 0x53, 0x38, 0x9e, 0xea, 0x8b, 0x2d, 0x6e, 0xef, 0x9d, /* Byte value: 0x82 */ + 0x5f, 0x5e, 0x85, 0xb5, 0xb2, 0x25, 0xca, 0x1c, 0xd3, 0xeb, 0xfb, 0x05, 0x3d, 0x36, 0x02, 0xbc, /* Byte value: 0x83 */ + 0x26, 0xf2, 0x3d, 0x9c, 0xf5, 0x7d, 0xc5, 0x67, 0xa0, 0x6e, 0xb0, 0x02, 0xcc, 0x1c, 0x6b, 0x27, /* Byte value: 0x84 */ + 0x05, 0x1b, 0xbd, 0xd8, 0x82, 0x20, 0x9b, 0xab, 0x56, 0xc3, 0xe0, 0x66, 0xf3, 0x21, 0x3c, 0x36, /* Byte value: 0x85 */ + 0xd5, 0x6e, 0x16, 0xf6, 0x51, 0x13, 0x22, 0xd9, 0xe9, 0x98, 0x79, 0x12, 0x63, 0xfc, 0xb5, 0xdc, /* Byte value: 0x86 */ + 0xfc, 0xb1, 0x2f, 0xc1, 0xe1, 0x0e, 0x89, 0x80, 0xb3, 0x70, 0x2a, 0xba, 0x79, 0x83, 0x9a, 0xa1, /* Byte value: 0x87 */ + 0x19, 0x4f, 0x46, 0x3d, 0x5c, 0x9f, 0x76, 0x47, 0xfc, 0x72, 0x5b, 0xcd, 0x43, 0x4c, 0xac, 0x9e, /* Byte value: 0x88 */ + 0x09, 0x3f, 0xd2, 0x3b, 0xb9, 0xe1, 0x3d, 0x4d, 0x9e, 0x04, 0xe2, 0xee, 0x74, 0x5d, 0x6c, 0x7e, /* Byte value: 0x89 */ + 0xb3, 0x9f, 0x3e, 0x72, 0xb6, 0x55, 0x08, 0x96, 0x02, 0xed, 0x68, 0x9c, 0x73, 0xa4, 0x58, 0xfd, /* Byte value: 0x8a */ + 0xfa, 0xa3, 0xf9, 0x51, 0x1d, 0x8f, 0xda, 0xf3, 0xd7, 0xf2, 0x2b, 0xfe, 0xdb, 0xbd, 0xb2, 0x85, /* Byte value: 0x8b */ + 0xef, 0xc8, 0xd0, 0x8f, 0x7a, 0xd1, 0x0a, 0x52, 0xe3, 0x47, 0x72, 0xbb, 0x1f, 0x8d, 0x4e, 0x53, /* Byte value: 0x8c */ + 0x0a, 0x36, 0xb9, 0x73, 0xc7, 0x40, 0xf5, 0x95, 0xac, 0x45, 0x03, 0xcc, 0x25, 0x42, 0x78, 0x6c, /* Byte value: 0x8d */ + 0x30, 0x90, 0x7f, 0x0a, 0xec, 0x82, 0xdd, 0x1e, 0xa6, 0x9a, 0x08, 0x65, 0x59, 0x33, 0x83, 0xe3, /* Byte value: 0x8e */ + 0xcd, 0x26, 0xc8, 0xf3, 0x27, 0x52, 0xad, 0xd6, 0xba, 0xd5, 0x7d, 0xc1, 0xae, 0x04, 0x15, 0x4c, /* Byte value: 0x8f */ + 0xd6, 0x67, 0x7d, 0xbe, 0x2f, 0xb2, 0xea, 0x01, 0xdb, 0xd9, 0x98, 0x30, 0x32, 0xe3, 0xa1, 0xce, /* Byte value: 0x90 */ + 0x6b, 0xd2, 0xdf, 0x5f, 0xf6, 0x59, 0x75, 0xe1, 0x8c, 0x8d, 0x4c, 0x18, 0x19, 0x90, 0xb1, 0x67, /* Byte value: 0x91 */ + 0x08, 0x38, 0x4a, 0x03, 0x93, 0x3f, 0xc4, 0x05, 0x31, 0x3b, 0xbd, 0xf0, 0xfa, 0xe9, 0x60, 0x70, /* Byte value: 0x92 */ + 0xb5, 0x8d, 0xe8, 0xe2, 0x4a, 0xd4, 0x5b, 0xe5, 0x66, 0x6f, 0x69, 0xd8, 0xd1, 0x9a, 0x70, 0xd9, /* Byte value: 0x93 */ + 0x6e, 0xc9, 0x62, 0x87, 0x74, 0x79, 0xee, 0x4a, 0xda, 0x4e, 0xac, 0x7e, 0xea, 0xb1, 0x8d, 0x51, /* Byte value: 0x94 */ + 0x65, 0xf8, 0x43, 0xcc, 0x99, 0xe7, 0xe2, 0x97, 0xd9, 0x34, 0xf0, 0xac, 0x41, 0x47, 0xf9, 0x33, /* Byte value: 0x95 */ + 0xf5, 0x8e, 0xfd, 0xfa, 0x58, 0xef, 0xb4, 0xcd, 0x2d, 0x74, 0xc8, 0x54, 0x0d, 0xde, 0xf6, 0xdf, /* Byte value: 0x96 */ + 0xaa, 0xd0, 0x78, 0x4f, 0xea, 0xca, 0x7e, 0xd1, 0xfe, 0x9f, 0x33, 0x51, 0x30, 0xe8, 0xf4, 0x63, /* Byte value: 0x97 */ + 0x74, 0x8f, 0x4f, 0xf2, 0x56, 0x47, 0x50, 0xd5, 0x14, 0x7d, 0x16, 0x91, 0xf8, 0xe2, 0x35, 0xdd, /* Byte value: 0x98 */ + 0x4b, 0x32, 0x34, 0x53, 0xff, 0xa5, 0xe3, 0xf5, 0x48, 0x61, 0xfd, 0x5e, 0x77, 0xb2, 0xf2, 0x64, /* Byte value: 0x99 */ + 0xff, 0xb8, 0x44, 0x89, 0x9f, 0xaf, 0x41, 0x58, 0x81, 0x31, 0xcb, 0x98, 0x28, 0x9c, 0x8e, 0xb3, /* Byte value: 0x9a */ + 0x81, 0x01, 0xb2, 0x08, 0x0e, 0xa8, 0xe4, 0x18, 0x39, 0x09, 0xde, 0xc5, 0xf5, 0x3c, 0xc3, 0x02, /* Byte value: 0x9b */ + 0xc1, 0x02, 0xa7, 0x10, 0x1c, 0x93, 0x0b, 0x30, 0x72, 0x12, 0x7f, 0x49, 0x29, 0x78, 0x45, 0x04, /* Byte value: 0x9c */ + 0xd1, 0x72, 0x33, 0x16, 0xf9, 0xed, 0x40, 0x3a, 0x10, 0x64, 0xc6, 0x6a, 0x1e, 0x69, 0x85, 0xe4, /* Byte value: 0x9d */ + 0x28, 0xd8, 0xa1, 0x0f, 0x9a, 0xc3, 0x52, 0x11, 0xf5, 0xd7, 0x0c, 0xb6, 0x94, 0xcb, 0x23, 0x73, /* Byte value: 0x9e */ + 0xb1, 0x91, 0xcd, 0x02, 0xe2, 0x2a, 0x39, 0x06, 0x9f, 0x93, 0xd6, 0xa0, 0xac, 0x0f, 0x40, 0xe1, /* Byte value: 0x9f */ + 0x9b, 0x47, 0x9f, 0x7d, 0x2c, 0x96, 0x5a, 0x87, 0xf7, 0x3a, 0x64, 0x2a, 0xe7, 0x6f, 0x7b, 0x8e, /* Byte value: 0xa0 */ + 0xdd, 0x56, 0x5c, 0xf5, 0xc2, 0x2c, 0xe6, 0xdc, 0xd8, 0xa3, 0xc4, 0xe2, 0x99, 0x15, 0xd5, 0xac, /* Byte value: 0xa1 */ + 0x23, 0xe9, 0x80, 0x44, 0x77, 0x5d, 0x5e, 0xcc, 0xf6, 0xad, 0x50, 0x64, 0x3f, 0x3d, 0x57, 0x11, /* Byte value: 0xa2 */ + 0x3f, 0xbd, 0x7b, 0xa1, 0xa9, 0xe2, 0xb3, 0x20, 0x5c, 0x1c, 0xeb, 0xcf, 0x8f, 0x50, 0xc7, 0xb9, /* Byte value: 0xa3 */ + 0x02, 0x0e, 0xf3, 0x70, 0x54, 0x7f, 0x31, 0x90, 0x9d, 0x7e, 0xbe, 0x3c, 0xdf, 0xab, 0x18, 0x1c, /* Byte value: 0xa4 */ + 0x10, 0x70, 0x94, 0x06, 0xe5, 0x7e, 0x4b, 0x0a, 0x62, 0x76, 0xb9, 0x23, 0x37, 0x11, 0xc0, 0xe0, /* Byte value: 0xa5 */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xa6 */ + 0x1a, 0x46, 0x2d, 0x75, 0x22, 0x3e, 0xbe, 0x9f, 0xce, 0x33, 0xba, 0xef, 0x12, 0x53, 0xb8, 0x8c, /* Byte value: 0xa7 */ + 0x60, 0xe3, 0xfe, 0x14, 0x1b, 0xc7, 0x79, 0x3c, 0x8f, 0xf7, 0x10, 0xca, 0xb2, 0x66, 0xc5, 0x05, /* Byte value: 0xa8 */ + 0x5a, 0x45, 0x38, 0x6d, 0x30, 0x05, 0x51, 0xb7, 0x85, 0x28, 0x1b, 0x63, 0xce, 0x17, 0x3e, 0x8a, /* Byte value: 0xa9 */ + 0x3e, 0xba, 0xe3, 0x99, 0x83, 0x3c, 0x4a, 0x68, 0xf3, 0x23, 0xb4, 0xd1, 0x01, 0xe4, 0xcb, 0xb7, /* Byte value: 0xaa */ + 0x6a, 0xd5, 0x47, 0x67, 0xdc, 0x87, 0x8c, 0xa9, 0x23, 0xb2, 0x13, 0x06, 0x97, 0x24, 0xbd, 0x69, /* Byte value: 0xab */ + 0x71, 0x94, 0xf2, 0x2a, 0xd4, 0x67, 0xcb, 0x7e, 0x42, 0xbe, 0xf6, 0xf7, 0x0b, 0xc3, 0x09, 0xeb, /* Byte value: 0xac */ + 0x75, 0x88, 0xd7, 0xca, 0x7c, 0x99, 0xa9, 0x9d, 0xbb, 0x42, 0x49, 0x8f, 0x76, 0x56, 0x39, 0xd3, /* Byte value: 0xad */ + 0xb7, 0x83, 0x1b, 0x92, 0x1e, 0xab, 0x6a, 0x75, 0xfb, 0x11, 0xd7, 0xe4, 0x0e, 0x31, 0x68, 0xc5, /* Byte value: 0xae */ + 0x7a, 0xa5, 0xd3, 0x61, 0x39, 0xf9, 0xc7, 0xa3, 0x41, 0xc4, 0xaa, 0x25, 0xa0, 0x35, 0x7d, 0x89, /* Byte value: 0xaf */ + 0xd9, 0x4a, 0x79, 0x15, 0x6a, 0xd2, 0x84, 0x3f, 0x21, 0x5f, 0x7b, 0x9a, 0xe4, 0x80, 0xe5, 0x94, /* Byte value: 0xb0 */ + 0x6c, 0xc7, 0x91, 0xf7, 0x20, 0x06, 0xdf, 0xda, 0x47, 0x30, 0x12, 0x42, 0x35, 0x1a, 0x95, 0x4d, /* Byte value: 0xb1 */ + 0x93, 0x7f, 0xd5, 0x7e, 0xbf, 0xa9, 0x9e, 0x82, 0xc6, 0x01, 0xd9, 0xda, 0x1d, 0x86, 0x1b, 0xfe, /* Byte value: 0xb2 */ + 0x45, 0x18, 0xa8, 0xc0, 0x90, 0x1b, 0x74, 0x83, 0x1d, 0xd8, 0x41, 0xea, 0x2f, 0x65, 0xba, 0x30, /* Byte value: 0xb3 */ + 0x9a, 0x40, 0x07, 0x45, 0x06, 0x48, 0xa3, 0xcf, 0x58, 0x05, 0x3b, 0x34, 0x69, 0xdb, 0x77, 0x80, /* Byte value: 0xb4 */ + 0x89, 0x39, 0xf8, 0x0b, 0x9d, 0x97, 0x20, 0x1d, 0x08, 0x32, 0x63, 0x35, 0x0f, 0xd5, 0xa3, 0x72, /* Byte value: 0xb5 */ + 0x3a, 0xa6, 0xc6, 0x79, 0x2b, 0xc2, 0x28, 0x8b, 0x0a, 0xdf, 0x0b, 0xa9, 0x7c, 0x71, 0xfb, 0x8f, /* Byte value: 0xb6 */ + 0x78, 0xab, 0x20, 0x11, 0x6d, 0x86, 0xf6, 0x33, 0xdc, 0xba, 0x14, 0x19, 0x7f, 0x9e, 0x65, 0x95, /* Byte value: 0xb7 */ + 0x1d, 0x53, 0x63, 0xdd, 0xf4, 0x61, 0x14, 0xa4, 0x05, 0x8e, 0xe4, 0xb5, 0x3e, 0xd9, 0x9c, 0xa6, /* Byte value: 0xb8 */ + 0x8a, 0x30, 0x93, 0x43, 0xe3, 0x36, 0xe8, 0xc5, 0x3a, 0x73, 0x82, 0x17, 0x5e, 0xca, 0xb7, 0x60, /* Byte value: 0xb9 */ + 0x97, 0x63, 0xf0, 0x9e, 0x17, 0x57, 0xfc, 0x61, 0x3f, 0xfd, 0x66, 0xa2, 0x60, 0x13, 0x2b, 0xc6, /* Byte value: 0xba */ + 0xf8, 0xad, 0x0a, 0x21, 0x49, 0xf0, 0xeb, 0x63, 0x4a, 0x8c, 0x95, 0xc2, 0x04, 0x16, 0xaa, 0x99, /* Byte value: 0xbb */ + 0xf9, 0xaa, 0x92, 0x19, 0x63, 0x2e, 0x12, 0x2b, 0xe5, 0xb3, 0xca, 0xdc, 0x8a, 0xa2, 0xa6, 0x97, /* Byte value: 0xbc */ + 0x58, 0x4b, 0xcb, 0x1d, 0x64, 0x7a, 0x60, 0x27, 0x18, 0x56, 0xa5, 0x5f, 0x11, 0xbc, 0x26, 0x96, /* Byte value: 0xbd */ + 0x2c, 0xc4, 0x84, 0xef, 0x32, 0x3d, 0x30, 0xf2, 0x0c, 0x2b, 0xb3, 0xce, 0xe9, 0x5e, 0x13, 0x4b, /* Byte value: 0xbe */ + 0x06, 0x12, 0xd6, 0x90, 0xfc, 0x81, 0x53, 0x73, 0x64, 0x82, 0x01, 0x44, 0xa2, 0x3e, 0x28, 0x24, /* Byte value: 0xbf */ + 0xbc, 0xb2, 0x3a, 0xd9, 0xf3, 0x35, 0x66, 0xa8, 0xf8, 0x6b, 0x8b, 0x36, 0xa5, 0xc7, 0x1c, 0xa7, /* Byte value: 0xc0 */ + 0x9c, 0x52, 0xd1, 0xd5, 0xfa, 0xc9, 0xf0, 0xbc, 0x3c, 0x87, 0x3a, 0x70, 0xcb, 0xe5, 0x5f, 0xa4, /* Byte value: 0xc1 */ + 0x55, 0x68, 0x3c, 0xc6, 0x75, 0x65, 0x3f, 0x89, 0x7f, 0xae, 0xf8, 0xc9, 0x18, 0x74, 0x7a, 0xd0, /* Byte value: 0xc2 */ + 0x7c, 0xb7, 0x05, 0xf1, 0xc5, 0x78, 0x94, 0xd0, 0x25, 0x46, 0xab, 0x61, 0x02, 0x0b, 0x55, 0xad, /* Byte value: 0xc3 */ + 0x2a, 0xd6, 0x52, 0x7f, 0xce, 0xbc, 0x63, 0x81, 0x68, 0xa9, 0xb2, 0x8a, 0x4b, 0x60, 0x3b, 0x6f, /* Byte value: 0xc4 */ + 0x3b, 0xa1, 0x5e, 0x41, 0x01, 0x1c, 0xd1, 0xc3, 0xa5, 0xe0, 0x54, 0xb7, 0xf2, 0xc5, 0xf7, 0x81, /* Byte value: 0xc5 */ + 0x9f, 0x5b, 0xba, 0x9d, 0x84, 0x68, 0x38, 0x64, 0x0e, 0xc6, 0xdb, 0x52, 0x9a, 0xfa, 0x4b, 0xb6, /* Byte value: 0xc6 */ + 0xc6, 0x17, 0xe9, 0xb8, 0xca, 0xcc, 0xa1, 0x0b, 0xb9, 0xaf, 0x21, 0x13, 0x05, 0xf2, 0x61, 0x2e, /* Byte value: 0xc7 */ + 0x1c, 0x54, 0xfb, 0xe5, 0xde, 0xbf, 0xed, 0xec, 0xaa, 0xb1, 0xbb, 0xab, 0xb0, 0x6d, 0x90, 0xa8, /* Byte value: 0xc8 */ + 0xfe, 0xbf, 0xdc, 0xb1, 0xb5, 0x71, 0xb8, 0x10, 0x2e, 0x0e, 0x94, 0x86, 0xa6, 0x28, 0x82, 0xbd, /* Byte value: 0xc9 */ + 0x62, 0xed, 0x0d, 0x64, 0x4f, 0xb8, 0x48, 0xac, 0x12, 0x89, 0xae, 0xf6, 0x6d, 0xcd, 0xdd, 0x19, /* Byte value: 0xca */ + 0x7d, 0xb0, 0x9d, 0xc9, 0xef, 0xa6, 0x6d, 0x98, 0x8a, 0x79, 0xf4, 0x7f, 0x8c, 0xbf, 0x59, 0xa3, /* Byte value: 0xcb */ + 0x21, 0xe7, 0x73, 0x34, 0x23, 0x22, 0x6f, 0x5c, 0x6b, 0xd3, 0xee, 0x58, 0xe0, 0x96, 0x4f, 0x0d, /* Byte value: 0xcc */ + 0xc0, 0x05, 0x3f, 0x28, 0x36, 0x4d, 0xf2, 0x78, 0xdd, 0x2d, 0x20, 0x57, 0xa7, 0xcc, 0x49, 0x0a, /* Byte value: 0xcd */ + 0xf2, 0x9b, 0xb3, 0x52, 0x8e, 0xb0, 0x1e, 0xf6, 0xe6, 0xc9, 0x96, 0x0e, 0x21, 0x54, 0xd2, 0xf5, /* Byte value: 0xce */ + 0xf7, 0x80, 0x0e, 0x8a, 0x0c, 0x90, 0x85, 0x5d, 0xb0, 0x0a, 0x76, 0x68, 0xd2, 0x75, 0xee, 0xc3, /* Byte value: 0xcf */ + 0x33, 0x99, 0x14, 0x42, 0x92, 0x23, 0x15, 0xc6, 0x94, 0xdb, 0xe9, 0x47, 0x08, 0x2c, 0x97, 0xf1, /* Byte value: 0xd0 */ + 0x98, 0x4e, 0xf4, 0x35, 0x52, 0x37, 0x92, 0x5f, 0xc5, 0x7b, 0x85, 0x08, 0xb6, 0x70, 0x6f, 0x9c, /* Byte value: 0xd1 */ + 0xa9, 0xd9, 0x13, 0x07, 0x94, 0x6b, 0xb6, 0x09, 0xcc, 0xde, 0xd2, 0x73, 0x61, 0xf7, 0xe0, 0x71, /* Byte value: 0xd2 */ + 0x82, 0x08, 0xd9, 0x40, 0x70, 0x09, 0x2c, 0xc0, 0x0b, 0x48, 0x3f, 0xe7, 0xa4, 0x23, 0xd7, 0x10, /* Byte value: 0xd3 */ + 0x4d, 0x20, 0xe2, 0xc3, 0x03, 0x24, 0xb0, 0x86, 0x2c, 0xe3, 0xfc, 0x1a, 0xd5, 0x8c, 0xda, 0x40, /* Byte value: 0xd4 */ + 0xf4, 0x89, 0x65, 0xc2, 0x72, 0x31, 0x4d, 0x85, 0x82, 0x4b, 0x97, 0x4a, 0x83, 0x6a, 0xfa, 0xd1, /* Byte value: 0xd5 */ + 0xf6, 0x87, 0x96, 0xb2, 0x26, 0x4e, 0x7c, 0x15, 0x1f, 0x35, 0x29, 0x76, 0x5c, 0xc1, 0xe2, 0xcd, /* Byte value: 0xd6 */ + 0x14, 0x6c, 0xb1, 0xe6, 0x4d, 0x80, 0x29, 0xe9, 0x9b, 0x8a, 0x06, 0x5b, 0x4a, 0x84, 0xf0, 0xd8, /* Byte value: 0xd7 */ + 0x7b, 0xa2, 0x4b, 0x59, 0x13, 0x27, 0x3e, 0xeb, 0xee, 0xfb, 0xf5, 0x3b, 0x2e, 0x81, 0x71, 0x87, /* Byte value: 0xd8 */ + 0x79, 0xac, 0xb8, 0x29, 0x47, 0x58, 0x0f, 0x7b, 0x73, 0x85, 0x4b, 0x07, 0xf1, 0x2a, 0x69, 0x9b, /* Byte value: 0xd9 */ + 0x17, 0x65, 0xda, 0xae, 0x33, 0x21, 0xe1, 0x31, 0xa9, 0xcb, 0xe7, 0x79, 0x1b, 0x9b, 0xe4, 0xca, /* Byte value: 0xda */ + 0xf0, 0x95, 0x40, 0x22, 0xda, 0xcf, 0x2f, 0x66, 0x7b, 0xb7, 0x28, 0x32, 0xfe, 0xff, 0xca, 0xe9, /* Byte value: 0xdb */ + 0xbf, 0xbb, 0x51, 0x91, 0x8d, 0x94, 0xae, 0x70, 0xca, 0x2a, 0x6a, 0x14, 0xf4, 0xd8, 0x08, 0xb5, /* Byte value: 0xdc */ + 0xcf, 0x28, 0x3b, 0x83, 0x73, 0x2d, 0x9c, 0x46, 0x27, 0xab, 0xc3, 0xfd, 0x71, 0xaf, 0x0d, 0x50, /* Byte value: 0xdd */ + 0x01, 0x07, 0x98, 0x38, 0x2a, 0xde, 0xf9, 0x48, 0xaf, 0x3f, 0x5f, 0x1e, 0x8e, 0xb4, 0x0c, 0x0e, /* Byte value: 0xde */ + 0xed, 0xc6, 0x23, 0xff, 0x2e, 0xae, 0x3b, 0xc2, 0x7e, 0x39, 0xcc, 0x87, 0xc0, 0x26, 0x56, 0x4f, /* Byte value: 0xdf */ + 0xcc, 0x21, 0x50, 0xcb, 0x0d, 0x8c, 0x54, 0x9e, 0x15, 0xea, 0x22, 0xdf, 0x20, 0xb0, 0x19, 0x42, /* Byte value: 0xe0 */ + 0x15, 0x6b, 0x29, 0xde, 0x67, 0x5e, 0xd0, 0xa1, 0x34, 0xb5, 0x59, 0x45, 0xc4, 0x30, 0xfc, 0xd6, /* Byte value: 0xe1 */ + 0x27, 0xf5, 0xa5, 0xa4, 0xdf, 0xa3, 0x3c, 0x2f, 0x0f, 0x51, 0xef, 0x1c, 0x42, 0xa8, 0x67, 0x29, /* Byte value: 0xe2 */ + 0xa3, 0xef, 0xaa, 0x74, 0x53, 0x2b, 0x43, 0x9c, 0x60, 0x9b, 0xd1, 0xbf, 0x44, 0xb5, 0x98, 0x1d, /* Byte value: 0xe3 */ + 0x0b, 0x31, 0x21, 0x4b, 0xed, 0x9e, 0x0c, 0xdd, 0x03, 0x7a, 0x5c, 0xd2, 0xab, 0xf6, 0x74, 0x62, /* Byte value: 0xe4 */ + 0x38, 0xa8, 0x35, 0x09, 0x7f, 0xbd, 0x19, 0x1b, 0x97, 0xa1, 0xb5, 0x95, 0xa3, 0xda, 0xe3, 0x93, /* Byte value: 0xe5 */ + 0x57, 0x66, 0xcf, 0xb6, 0x21, 0x1a, 0x0e, 0x19, 0xe2, 0xd0, 0x46, 0xf5, 0xc7, 0xdf, 0x62, 0xcc, /* Byte value: 0xe6 */ + 0x67, 0xf6, 0xb0, 0xbc, 0xcd, 0x98, 0xd3, 0x07, 0x44, 0x4a, 0x4e, 0x90, 0x9e, 0xec, 0xe1, 0x2f, /* Byte value: 0xe7 */ + 0x83, 0x0f, 0x41, 0x78, 0x5a, 0xd7, 0xd5, 0x88, 0xa4, 0x77, 0x60, 0xf9, 0x2a, 0x97, 0xdb, 0x1e, /* Byte value: 0xe8 */ + 0x3c, 0xb4, 0x10, 0xe9, 0xd7, 0x43, 0x7b, 0xf8, 0x6e, 0x5d, 0x0a, 0xed, 0xde, 0x4f, 0xd3, 0xab, /* Byte value: 0xe9 */ + 0xfd, 0xb6, 0xb7, 0xf9, 0xcb, 0xd0, 0x70, 0xc8, 0x1c, 0x4f, 0x75, 0xa4, 0xf7, 0x37, 0x96, 0xaf, /* Byte value: 0xea */ + 0xac, 0xc2, 0xae, 0xdf, 0x16, 0x4b, 0x2d, 0xa2, 0x9a, 0x1d, 0x32, 0x15, 0x92, 0xd6, 0xdc, 0x47, /* Byte value: 0xeb */ + 0x95, 0x6d, 0x03, 0xee, 0x43, 0x28, 0xcd, 0xf1, 0xa2, 0x83, 0xd8, 0x9e, 0xbf, 0xb8, 0x33, 0xda, /* Byte value: 0xec */ + 0x51, 0x74, 0x19, 0x26, 0xdd, 0x9b, 0x5d, 0x6a, 0x86, 0x52, 0x47, 0xb1, 0x65, 0xe1, 0x4a, 0xe8, /* Byte value: 0xed */ + 0xd2, 0x7b, 0x58, 0x5e, 0x87, 0x4c, 0x88, 0xe2, 0x22, 0x25, 0x27, 0x48, 0x4f, 0x76, 0x91, 0xf6, /* Byte value: 0xee */ + 0xe1, 0xe2, 0x4c, 0x1c, 0x15, 0x6f, 0x9d, 0x24, 0xb6, 0xfe, 0xce, 0x0f, 0x47, 0x5a, 0x06, 0x07, /* Byte value: 0xef */ + 0xe4, 0xf9, 0xf1, 0xc4, 0x97, 0x4f, 0x06, 0x8f, 0xe0, 0x3d, 0x2e, 0x69, 0xb4, 0x7b, 0x3a, 0x31, /* Byte value: 0xf0 */ + 0xa6, 0xf4, 0x17, 0xac, 0xd1, 0x0b, 0xd8, 0x37, 0x36, 0x58, 0x31, 0xd9, 0xb7, 0x94, 0xa4, 0x2b, /* Byte value: 0xf1 */ + 0x2e, 0xca, 0x77, 0x9f, 0x66, 0x42, 0x01, 0x62, 0x91, 0x55, 0x0d, 0xf2, 0x36, 0xf5, 0x0b, 0x57, /* Byte value: 0xf2 */ + 0x9e, 0x5c, 0x22, 0xa5, 0xae, 0xb6, 0xc1, 0x2c, 0xa1, 0xf9, 0x84, 0x4c, 0x14, 0x4e, 0x47, 0xb8, /* Byte value: 0xf3 */ + 0xb0, 0x96, 0x55, 0x3a, 0xc8, 0xf4, 0xc0, 0x4e, 0x30, 0xac, 0x89, 0xbe, 0x22, 0xbb, 0x4c, 0xef, /* Byte value: 0xf4 */ + 0x04, 0x1c, 0x25, 0xe0, 0xa8, 0xfe, 0x62, 0xe3, 0xf9, 0xfc, 0xbf, 0x78, 0x7d, 0x95, 0x30, 0x38, /* Byte value: 0xf5 */ + 0xca, 0x33, 0x86, 0x5b, 0xf1, 0x0d, 0x07, 0xed, 0x71, 0x68, 0x23, 0x9b, 0x82, 0x8e, 0x31, 0x66, /* Byte value: 0xf6 */ + 0xe0, 0xe5, 0xd4, 0x24, 0x3f, 0xb1, 0x64, 0x6c, 0x19, 0xc1, 0x91, 0x11, 0xc9, 0xee, 0x0a, 0x09, /* Byte value: 0xf7 */ + 0x7e, 0xb9, 0xf6, 0x81, 0x91, 0x07, 0xa5, 0x40, 0xb8, 0x38, 0x15, 0x5d, 0xdd, 0xa0, 0x4d, 0xb1, /* Byte value: 0xf8 */ + 0x66, 0xf1, 0x28, 0x84, 0xe7, 0x46, 0x2a, 0x4f, 0xeb, 0x75, 0x11, 0x8e, 0x10, 0x58, 0xed, 0x21, /* Byte value: 0xf9 */ + 0x50, 0x73, 0x81, 0x1e, 0xf7, 0x45, 0xa4, 0x22, 0x29, 0x6d, 0x18, 0xaf, 0xeb, 0x55, 0x46, 0xe6, /* Byte value: 0xfa */ + 0x49, 0x3c, 0xc7, 0x23, 0xab, 0xda, 0xd2, 0x65, 0xd5, 0x1f, 0x43, 0x62, 0xa8, 0x19, 0xea, 0x78, /* Byte value: 0xfb */ + 0x5b, 0x42, 0xa0, 0x55, 0x1a, 0xdb, 0xa8, 0xff, 0x2a, 0x17, 0x44, 0x7d, 0x40, 0xa3, 0x32, 0x84, /* Byte value: 0xfc */ + 0x99, 0x49, 0x6c, 0x0d, 0x78, 0xe9, 0x6b, 0x17, 0x6a, 0x44, 0xda, 0x16, 0x38, 0xc4, 0x63, 0x92, /* Byte value: 0xfd */ + 0x68, 0xdb, 0xb4, 0x17, 0x88, 0xf8, 0xbd, 0x39, 0xbe, 0xcc, 0xad, 0x3a, 0x48, 0x8f, 0xa5, 0x75, /* Byte value: 0xfe */ + 0xe9, 0xda, 0x06, 0x1f, 0x86, 0x50, 0x59, 0x21, 0x87, 0xc5, 0x73, 0xff, 0xbd, 0xb3, 0x66, 0x77, /* Byte value: 0xff */ + 0xc5, 0x1e, 0x82, 0xf0, 0xb4, 0x6d, 0x69, 0xd3, 0x8b, 0xee, 0xc0, 0x31, 0x54, 0xed, 0x75, 0x3c, /* Byte value: 0x00 */ + + /* Matrix row: 8 */ + 0xd7, 0x48, 0x12, 0xa3, 0xbe, 0x29, 0x25, 0xd9, 0x10, 0xec, 0xae, 0xdf, 0x6a, 0x2d, 0x01, 0xfc, /* Byte value: 0x01 */ + 0x63, 0x44, 0x11, 0xde, 0x60, 0x0e, 0x82, 0x6c, 0x92, 0x7c, 0xf2, 0x2a, 0x20, 0xcb, 0xa0, 0xee, /* Byte value: 0x02 */ + 0x5e, 0x13, 0x74, 0x22, 0x3d, 0x0d, 0xaf, 0x72, 0xfa, 0x27, 0xc7, 0x23, 0xaa, 0xd2, 0x47, 0xdd, /* Byte value: 0x03 */ + 0xaa, 0x8c, 0x23, 0x54, 0x8a, 0x01, 0x1b, 0x0a, 0x99, 0x88, 0x13, 0x07, 0xc7, 0xb6, 0x5d, 0x11, /* Byte value: 0x04 */ + 0xea, 0x1f, 0x77, 0x5f, 0xe3, 0x2a, 0x08, 0xc7, 0x78, 0xb7, 0x9b, 0xd6, 0xe0, 0x34, 0xe6, 0xcf, /* Byte value: 0x05 */ + 0x2a, 0x69, 0x8b, 0x42, 0x58, 0x57, 0x3d, 0x53, 0x98, 0xf6, 0xc0, 0x66, 0x89, 0x71, 0xe8, 0x6e, /* Byte value: 0x06 */ + 0x29, 0x16, 0xe4, 0x73, 0x84, 0x86, 0x44, 0x75, 0x7a, 0x4b, 0xfe, 0x14, 0x7c, 0x79, 0x4f, 0x31, /* Byte value: 0x07 */ + 0x9c, 0x8e, 0xc2, 0xa0, 0xaf, 0xee, 0x75, 0x63, 0xa6, 0xb0, 0x09, 0xcf, 0x65, 0x26, 0x32, 0x16, /* Byte value: 0x08 */ + 0xe1, 0x4a, 0xf3, 0x57, 0x9b, 0xc6, 0x4b, 0xb0, 0x2f, 0xd4, 0xb4, 0x17, 0xc8, 0xbd, 0x6e, 0xfb, /* Byte value: 0x09 */ + 0xa4, 0x58, 0x16, 0x0f, 0x55, 0x5d, 0xd3, 0x17, 0x2b, 0xef, 0x7e, 0x50, 0x33, 0x27, 0xff, 0xc4, /* Byte value: 0x0a */ + 0xeb, 0x8b, 0x52, 0xf1, 0x16, 0x65, 0x9e, 0x64, 0x26, 0xdc, 0x30, 0xf8, 0xb3, 0x8d, 0x3a, 0xfa, /* Byte value: 0x0b */ + 0x68, 0x11, 0x95, 0xd6, 0x18, 0xe2, 0xc1, 0x1b, 0xc5, 0x1f, 0xdd, 0xeb, 0x08, 0x42, 0x28, 0xda, /* Byte value: 0x0c */ + 0x9d, 0x1a, 0xe7, 0x0e, 0x5a, 0xa1, 0xe3, 0xc0, 0xf8, 0xdb, 0xa2, 0xe1, 0x36, 0x9f, 0xee, 0x23, /* Byte value: 0x0d */ + 0xae, 0x99, 0xb7, 0xa9, 0xd8, 0xfe, 0x06, 0xc3, 0x22, 0xe7, 0xfa, 0xbf, 0x48, 0x17, 0xab, 0xc5, /* Byte value: 0x0e */ + 0x28, 0x82, 0xc1, 0xdd, 0x71, 0xc9, 0xd2, 0xd6, 0x24, 0x20, 0x55, 0x3a, 0x2f, 0xc0, 0x93, 0x04, /* Byte value: 0x0f */ + 0xb7, 0x73, 0x6c, 0x4c, 0x02, 0xf6, 0xde, 0x93, 0x60, 0x2d, 0x62, 0x87, 0xbf, 0xee, 0x06, 0x4d, /* Byte value: 0x10 */ + 0x55, 0x46, 0xf0, 0x2a, 0x45, 0xe1, 0xec, 0x05, 0xad, 0x44, 0xe8, 0xe2, 0x82, 0x5b, 0xcf, 0xe9, /* Byte value: 0x11 */ + 0xd0, 0x22, 0xe9, 0x6f, 0x30, 0x07, 0x41, 0x36, 0x49, 0x3e, 0x79, 0x15, 0x10, 0x84, 0x50, 0x77, /* Byte value: 0x12 */ + 0xaf, 0x0d, 0x92, 0x07, 0x2d, 0xb1, 0x90, 0x60, 0x7c, 0x8c, 0x51, 0x91, 0x1b, 0xae, 0x77, 0xf0, /* Byte value: 0x13 */ + 0x62, 0xd0, 0x34, 0x70, 0x95, 0x41, 0x14, 0xcf, 0xcc, 0x17, 0x59, 0x04, 0x73, 0x72, 0x7c, 0xdb, /* Byte value: 0x14 */ + 0xf7, 0xe0, 0x38, 0x47, 0x6b, 0xdd, 0xcd, 0x5e, 0x81, 0x12, 0xea, 0x56, 0x98, 0x6c, 0xbd, 0x93, /* Byte value: 0x15 */ + 0xef, 0x9e, 0xc6, 0x0c, 0x44, 0x9a, 0x83, 0xad, 0x9d, 0xb3, 0xd9, 0x40, 0x3c, 0x2c, 0xcc, 0x2e, /* Byte value: 0x16 */ + 0xb3, 0x66, 0xf8, 0xb1, 0x50, 0x09, 0xc3, 0x5a, 0xdb, 0x42, 0x8b, 0x3f, 0x30, 0x4f, 0xf0, 0x99, /* Byte value: 0x17 */ + 0x2e, 0x7c, 0x1f, 0xbf, 0x0a, 0xa8, 0x20, 0x9a, 0x23, 0x99, 0x29, 0xde, 0x06, 0xd0, 0x1e, 0xba, /* Byte value: 0x18 */ + 0x96, 0x4f, 0x63, 0x06, 0x22, 0x4d, 0xa0, 0xb7, 0xaf, 0xb8, 0x8d, 0x20, 0x1e, 0x16, 0x66, 0x17, /* Byte value: 0x19 */ + 0x1f, 0x14, 0x05, 0x87, 0xa1, 0x69, 0x2a, 0x1c, 0x45, 0x73, 0xe4, 0xdc, 0xde, 0xe9, 0x20, 0x36, /* Byte value: 0x1a */ + 0xa5, 0xcc, 0x33, 0xa1, 0xa0, 0x12, 0x45, 0xb4, 0x75, 0x84, 0xd5, 0x7e, 0x60, 0x9e, 0x23, 0xf1, /* Byte value: 0x1b */ + 0x24, 0xbd, 0xbe, 0x19, 0x87, 0x0b, 0xf5, 0x4e, 0x2a, 0x91, 0xad, 0x31, 0x7d, 0xe0, 0x4a, 0xbb, /* Byte value: 0x1c */ + 0x88, 0xcf, 0x43, 0x2f, 0x76, 0x6b, 0x1c, 0x08, 0xb4, 0xa0, 0xc2, 0xd2, 0x93, 0x46, 0x9a, 0x14, /* Byte value: 0x1d */ + 0xfe, 0x5e, 0xf6, 0xd0, 0x3a, 0xaf, 0x61, 0xac, 0x6a, 0xa7, 0x50, 0xcb, 0x16, 0x54, 0x4e, 0xcd, /* Byte value: 0x1e */ + 0x03, 0x7f, 0x6f, 0x31, 0xdc, 0xd1, 0x79, 0x26, 0xe2, 0xbd, 0x3e, 0x72, 0xf5, 0x08, 0xa7, 0x5f, /* Byte value: 0x1f */ + 0x86, 0x1b, 0x76, 0x74, 0xa9, 0x37, 0xd4, 0x15, 0x06, 0xc7, 0xaf, 0x85, 0x67, 0xd7, 0x38, 0xc1, /* Byte value: 0x20 */ + 0xf5, 0x0b, 0x72, 0xd8, 0x42, 0x43, 0x22, 0xdb, 0x3d, 0xc4, 0x7f, 0x0a, 0x3e, 0xdd, 0xc6, 0xf9, /* Byte value: 0x21 */ + 0xf0, 0x8a, 0xc3, 0x8b, 0xe5, 0xf3, 0xa9, 0xb1, 0xd8, 0xc0, 0x3d, 0x9c, 0xe2, 0xc5, 0xec, 0x18, /* Byte value: 0x22 */ + 0x64, 0x2e, 0xea, 0x12, 0xee, 0x20, 0xe6, 0x83, 0xcb, 0xae, 0x25, 0xe0, 0x5a, 0x62, 0xf1, 0x65, /* Byte value: 0x23 */ + 0x21, 0x3c, 0x0f, 0x4a, 0x20, 0xbb, 0x7e, 0x24, 0xcf, 0x95, 0xef, 0xa7, 0xa1, 0xf8, 0x60, 0x5a, /* Byte value: 0x24 */ + 0x1b, 0x01, 0x91, 0x7a, 0xf3, 0x96, 0x37, 0xd5, 0xfe, 0x1c, 0x0d, 0x64, 0x51, 0x48, 0xd6, 0xe2, /* Byte value: 0x25 */ + 0x1d, 0xff, 0x4f, 0x18, 0x88, 0xf7, 0xc5, 0x99, 0xf9, 0xa5, 0x71, 0x80, 0x78, 0x58, 0x5b, 0x5c, /* Byte value: 0x26 */ + 0x69, 0x85, 0xb0, 0x78, 0xed, 0xad, 0x57, 0xb8, 0x9b, 0x74, 0x76, 0xc5, 0x5b, 0xfb, 0xf4, 0xef, /* Byte value: 0x27 */ + 0x89, 0x5b, 0x66, 0x81, 0x83, 0x24, 0x8a, 0xab, 0xea, 0xcb, 0x69, 0xfc, 0xc0, 0xff, 0x46, 0x21, /* Byte value: 0x28 */ + 0x43, 0xec, 0x3b, 0x3a, 0xb5, 0xfa, 0x6a, 0xeb, 0x03, 0x82, 0xb6, 0xa3, 0xd2, 0x8a, 0x1c, 0x81, /* Byte value: 0x29 */ + 0xd8, 0x08, 0x02, 0x56, 0x94, 0x3a, 0x7b, 0x67, 0xfc, 0xe0, 0x68, 0xa6, 0xcd, 0x05, 0x7f, 0x1c, /* Byte value: 0x2a */ + 0x5b, 0x92, 0xc5, 0x71, 0x9a, 0xbd, 0x24, 0x18, 0x1f, 0x23, 0x85, 0xb5, 0x76, 0xca, 0x6d, 0x3c, /* Byte value: 0x2b */ + 0xd1, 0xb6, 0xcc, 0xc1, 0xc5, 0x48, 0xd7, 0x95, 0x17, 0x55, 0xd2, 0x3b, 0x43, 0x3d, 0x8c, 0x42, /* Byte value: 0x2c */ + 0x07, 0x6a, 0xfb, 0xcc, 0x8e, 0x2e, 0x64, 0xef, 0x59, 0xd2, 0xd7, 0xca, 0x7a, 0xa9, 0x51, 0x8b, /* Byte value: 0x2d */ + 0x0a, 0xc1, 0xa1, 0xa6, 0x8d, 0xa3, 0xd5, 0xd4, 0x09, 0x08, 0x84, 0xef, 0x7b, 0x30, 0x54, 0x01, /* Byte value: 0x2e */ + 0x25, 0x29, 0x9b, 0xb7, 0x72, 0x44, 0x63, 0xed, 0x74, 0xfa, 0x06, 0x1f, 0x2e, 0x59, 0x96, 0x8e, /* Byte value: 0x2f */ + 0xa3, 0x32, 0xed, 0xc3, 0xdb, 0x73, 0xb7, 0xf8, 0x72, 0x3d, 0xa9, 0x9a, 0x49, 0x8e, 0xae, 0x4f, /* Byte value: 0x30 */ + 0x22, 0x43, 0x60, 0x7b, 0xfc, 0x6a, 0x07, 0x02, 0x2d, 0x28, 0xd1, 0xd5, 0x54, 0xf0, 0xc7, 0x05, /* Byte value: 0x31 */ + 0x61, 0xaf, 0x5b, 0x41, 0x49, 0x90, 0x6d, 0xe9, 0x2e, 0xaa, 0x67, 0x76, 0x86, 0x7a, 0xdb, 0x84, /* Byte value: 0x32 */ + 0x14, 0x41, 0x81, 0x8f, 0xd9, 0x85, 0x69, 0x6b, 0x12, 0x10, 0xcb, 0x1d, 0xf6, 0x60, 0xa8, 0x02, /* Byte value: 0x33 */ + 0xa6, 0xb3, 0x5c, 0x90, 0x7c, 0xc3, 0x3c, 0x92, 0x97, 0x39, 0xeb, 0x0c, 0x95, 0x96, 0x84, 0xae, /* Byte value: 0x34 */ + 0x11, 0xc0, 0x30, 0xdc, 0x7e, 0x35, 0xe2, 0x01, 0xf7, 0x14, 0x89, 0x8b, 0x2a, 0x78, 0x82, 0xe3, /* Byte value: 0x35 */ + 0x02, 0xeb, 0x4a, 0x9f, 0x29, 0x9e, 0xef, 0x85, 0xbc, 0xd6, 0x95, 0x5c, 0xa6, 0xb1, 0x7b, 0x6a, /* Byte value: 0x36 */ + 0x2f, 0xe8, 0x3a, 0x11, 0xff, 0xe7, 0xb6, 0x39, 0x7d, 0xf2, 0x82, 0xf0, 0x55, 0x69, 0xc2, 0x8f, /* Byte value: 0x37 */ + 0xca, 0xb7, 0x5d, 0xbb, 0x36, 0xde, 0xe0, 0x40, 0xe9, 0x49, 0xdf, 0x5f, 0x12, 0x75, 0x5a, 0xa0, /* Byte value: 0x38 */ + 0x3c, 0xc3, 0x40, 0x52, 0xa8, 0x4c, 0xbb, 0xbd, 0x36, 0x30, 0x9e, 0x27, 0xd9, 0xa0, 0x3b, 0x06, /* Byte value: 0x39 */ + 0x4e, 0x47, 0x61, 0x50, 0xb6, 0x77, 0xdb, 0xd0, 0x53, 0x58, 0xe5, 0x86, 0xd3, 0x13, 0x19, 0x0b, /* Byte value: 0x3a */ + 0x7d, 0xc4, 0x31, 0xf7, 0x34, 0x28, 0x3e, 0xd3, 0x89, 0x64, 0xbd, 0xd8, 0xad, 0x9b, 0x5c, 0xed, /* Byte value: 0x3b */ + 0xb9, 0xa7, 0x59, 0x17, 0xdd, 0xaa, 0x16, 0x8e, 0xd2, 0x4a, 0x0f, 0xd0, 0x4b, 0x7f, 0xa4, 0x98, /* Byte value: 0x3c */ + 0x80, 0xe5, 0xa8, 0x16, 0xd2, 0x56, 0x26, 0x59, 0x01, 0x7e, 0xd3, 0x61, 0x4e, 0xc7, 0xb5, 0x7f, /* Byte value: 0x3d */ + 0x04, 0x15, 0x94, 0xfd, 0x52, 0xff, 0x1d, 0xc9, 0xbb, 0x6f, 0xe9, 0xb8, 0x8f, 0xa1, 0xf6, 0xd4, /* Byte value: 0x3e */ + 0x32, 0x17, 0x75, 0x09, 0x77, 0x10, 0x73, 0xa0, 0x84, 0x57, 0xf3, 0x70, 0x2d, 0x31, 0x99, 0xd3, /* Byte value: 0x3f */ + 0xc6, 0x88, 0x22, 0x7f, 0xc0, 0x1c, 0xc7, 0xd8, 0xe7, 0xf8, 0x27, 0x54, 0x40, 0x55, 0x83, 0x1f, /* Byte value: 0x40 */ + 0x41, 0x07, 0x71, 0xa5, 0x9c, 0x64, 0x85, 0x6e, 0xbf, 0x54, 0x23, 0xff, 0x74, 0x3b, 0x67, 0xeb, /* Byte value: 0x41 */ + 0x0b, 0x55, 0x84, 0x08, 0x78, 0xec, 0x43, 0x77, 0x57, 0x63, 0x2f, 0xc1, 0x28, 0x89, 0x88, 0x34, /* Byte value: 0x42 */ + 0xfb, 0xdf, 0x47, 0x83, 0x9d, 0x1f, 0xea, 0xc6, 0x8f, 0xa3, 0x12, 0x5d, 0xca, 0x4c, 0x64, 0x2c, /* Byte value: 0x43 */ + 0x6f, 0x7b, 0x6e, 0x1a, 0x96, 0xcc, 0xa5, 0xf4, 0x9c, 0xcd, 0x0a, 0x21, 0x72, 0xeb, 0x79, 0x51, /* Byte value: 0x44 */ + 0x4b, 0xc6, 0xd0, 0x03, 0x11, 0xc7, 0x50, 0xba, 0xb6, 0x5c, 0xa7, 0x10, 0x0f, 0x0b, 0x33, 0xea, /* Byte value: 0x45 */ + 0xdc, 0x1d, 0x96, 0xab, 0xc6, 0xc5, 0x66, 0xae, 0x47, 0x8f, 0x81, 0x1e, 0x42, 0xa4, 0x89, 0xc8, /* Byte value: 0x46 */ + 0x95, 0x30, 0x0c, 0x37, 0xfe, 0x9c, 0xd9, 0x91, 0x4d, 0x05, 0xb3, 0x52, 0xeb, 0x1e, 0xc1, 0x48, /* Byte value: 0x47 */ + 0x84, 0xf0, 0x3c, 0xeb, 0x80, 0xa9, 0x3b, 0x90, 0xba, 0x11, 0x3a, 0xd9, 0xc1, 0x66, 0x43, 0xab, /* Byte value: 0x48 */ + 0xbb, 0x4c, 0x13, 0x88, 0xf4, 0x34, 0xf9, 0x0b, 0x6e, 0x9c, 0x9a, 0x8c, 0xed, 0xce, 0xdf, 0xf2, /* Byte value: 0x49 */ + 0xc7, 0x1c, 0x07, 0xd1, 0x35, 0x53, 0x51, 0x7b, 0xb9, 0x93, 0x8c, 0x7a, 0x13, 0xec, 0x5f, 0x2a, /* Byte value: 0x4a */ + 0x16, 0xaa, 0xcb, 0x10, 0xf0, 0x1b, 0x86, 0xee, 0xae, 0xc6, 0x5e, 0x41, 0x50, 0xd1, 0xd3, 0x68, /* Byte value: 0x4b */ + 0xde, 0xf6, 0xdc, 0x34, 0xef, 0x5b, 0x89, 0x2b, 0xfb, 0x59, 0x14, 0x42, 0xe4, 0x15, 0xf2, 0xa2, /* Byte value: 0x4c */ + 0xdd, 0x89, 0xb3, 0x05, 0x33, 0x8a, 0xf0, 0x0d, 0x19, 0xe4, 0x2a, 0x30, 0x11, 0x1d, 0x55, 0xfd, /* Byte value: 0x4d */ + 0x67, 0x51, 0x85, 0x23, 0x32, 0xf1, 0x9f, 0xa5, 0x29, 0x13, 0x1b, 0x92, 0xaf, 0x6a, 0x56, 0x3a, /* Byte value: 0x4e */ + 0xe0, 0xde, 0xd6, 0xf9, 0x6e, 0x89, 0xdd, 0x13, 0x71, 0xbf, 0x1f, 0x39, 0x9b, 0x04, 0xb2, 0xce, /* Byte value: 0x4f */ + 0xf4, 0x9f, 0x57, 0x76, 0xb7, 0x0c, 0xb4, 0x78, 0x63, 0xaf, 0xd4, 0x24, 0x6d, 0x64, 0x1a, 0xcc, /* Byte value: 0x50 */ + 0x48, 0xb9, 0xbf, 0x32, 0xcd, 0x16, 0x29, 0x9c, 0x54, 0xe1, 0x99, 0x62, 0xfa, 0x03, 0x94, 0xb5, /* Byte value: 0x51 */ + 0xe6, 0x20, 0x08, 0x9b, 0x15, 0xe8, 0x2f, 0x5f, 0x76, 0x06, 0x63, 0xdd, 0xb2, 0x14, 0x3f, 0x70, /* Byte value: 0x52 */ + 0x6c, 0x04, 0x01, 0x2b, 0x4a, 0x1d, 0xdc, 0xd2, 0x7e, 0x70, 0x34, 0x53, 0x87, 0xe3, 0xde, 0x0e, /* Byte value: 0x53 */ + 0x59, 0x79, 0x8f, 0xee, 0xb3, 0x23, 0xcb, 0x9d, 0xa3, 0xf5, 0x10, 0xe9, 0xd0, 0x7b, 0x16, 0x56, /* Byte value: 0x54 */ + 0x50, 0xc7, 0x41, 0x79, 0xe2, 0x51, 0x67, 0x6f, 0x48, 0x40, 0xaa, 0x74, 0x5e, 0x43, 0xe5, 0x08, /* Byte value: 0x55 */ + 0x78, 0x45, 0x80, 0xa4, 0x93, 0x98, 0xb5, 0xb9, 0x6c, 0x60, 0xff, 0x4e, 0x71, 0x83, 0x76, 0x0c, /* Byte value: 0x56 */ + 0xda, 0xe3, 0x48, 0xc9, 0xbd, 0xa4, 0x94, 0xe2, 0x40, 0x36, 0xfd, 0xfa, 0x6b, 0xb4, 0x04, 0x76, /* Byte value: 0x57 */ + 0xb4, 0x0c, 0x03, 0x7d, 0xde, 0x27, 0xa7, 0xb5, 0x82, 0x90, 0x5c, 0xf5, 0x4a, 0xe6, 0xa1, 0x12, /* Byte value: 0x58 */ + 0x0c, 0x3f, 0x7f, 0xc4, 0xf6, 0xc2, 0x27, 0x98, 0x0e, 0xb1, 0xf8, 0x0b, 0x52, 0x20, 0xd9, 0xbf, /* Byte value: 0x59 */ + 0xf2, 0x61, 0x89, 0x14, 0xcc, 0x6d, 0x46, 0x34, 0x64, 0x16, 0xa8, 0xc0, 0x44, 0x74, 0x97, 0x72, /* Byte value: 0x5a */ + 0xbe, 0xcd, 0xa2, 0xdb, 0x53, 0x84, 0x72, 0x61, 0x8b, 0x98, 0xd8, 0x1a, 0x31, 0xd6, 0xf5, 0x13, /* Byte value: 0x5b */ + 0xf3, 0xf5, 0xac, 0xba, 0x39, 0x22, 0xd0, 0x97, 0x3a, 0x7d, 0x03, 0xee, 0x17, 0xcd, 0x4b, 0x47, /* Byte value: 0x5c */ + 0x91, 0x25, 0x98, 0xca, 0xac, 0x63, 0xc4, 0x58, 0xf6, 0x6a, 0x5a, 0xea, 0x64, 0xbf, 0x37, 0x9c, /* Byte value: 0x5d */ + 0x5c, 0xf8, 0x3e, 0xbd, 0x14, 0x93, 0x40, 0xf7, 0x46, 0xf1, 0x52, 0x7f, 0x0c, 0x63, 0x3c, 0xb7, /* Byte value: 0x5e */ + 0x17, 0x3e, 0xee, 0xbe, 0x05, 0x54, 0x10, 0x4d, 0xf0, 0xad, 0xf5, 0x6f, 0x03, 0x68, 0x0f, 0x5d, /* Byte value: 0x5f */ + 0x7f, 0x2f, 0x7b, 0x68, 0x1d, 0xb6, 0xd1, 0x56, 0x35, 0xb2, 0x28, 0x84, 0x0b, 0x2a, 0x27, 0x87, /* Byte value: 0x60 */ + 0x82, 0x0e, 0xe2, 0x89, 0xfb, 0xc8, 0xc9, 0xdc, 0xbd, 0xa8, 0x46, 0x3d, 0xe8, 0x76, 0xce, 0x15, /* Byte value: 0x61 */ + 0xc0, 0x76, 0xfc, 0x1d, 0xbb, 0x7d, 0x35, 0x94, 0xe0, 0x41, 0x5b, 0xb0, 0x69, 0x45, 0x0e, 0xa1, /* Byte value: 0x62 */ + 0xd5, 0xa3, 0x58, 0x3c, 0x97, 0xb7, 0xca, 0x5c, 0xac, 0x3a, 0x3b, 0x83, 0xcc, 0x9c, 0x7a, 0x96, /* Byte value: 0x63 */ + 0xd9, 0x9c, 0x27, 0xf8, 0x61, 0x75, 0xed, 0xc4, 0xa2, 0x8b, 0xc3, 0x88, 0x9e, 0xbc, 0xa3, 0x29, /* Byte value: 0x64 */ + 0xa0, 0x4d, 0x82, 0xf2, 0x07, 0xa2, 0xce, 0xde, 0x90, 0x80, 0x97, 0xe8, 0xbc, 0x86, 0x09, 0x10, /* Byte value: 0x65 */ + 0xa8, 0x67, 0x69, 0xcb, 0xa3, 0x9f, 0xf4, 0x8f, 0x25, 0x5e, 0x86, 0x5b, 0x61, 0x07, 0x26, 0x7b, /* Byte value: 0x66 */ + 0xad, 0xe6, 0xd8, 0x98, 0x04, 0x2f, 0x7f, 0xe5, 0xc0, 0x5a, 0xc4, 0xcd, 0xbd, 0x1f, 0x0c, 0x9a, /* Byte value: 0x67 */ + 0xba, 0xd8, 0x36, 0x26, 0x01, 0x7b, 0x6f, 0xa8, 0x30, 0xf7, 0x31, 0xa2, 0xbe, 0x77, 0x03, 0xc7, /* Byte value: 0x68 */ + 0xb1, 0x8d, 0xb2, 0x2e, 0x79, 0x97, 0x2c, 0xdf, 0x67, 0x94, 0x1e, 0x63, 0x96, 0xfe, 0x8b, 0xf3, /* Byte value: 0x69 */ + 0xe3, 0xa1, 0xb9, 0xc8, 0xb2, 0x58, 0xa4, 0x35, 0x93, 0x02, 0x21, 0x4b, 0x6e, 0x0c, 0x15, 0x91, /* Byte value: 0x6a */ + 0xb6, 0xe7, 0x49, 0xe2, 0xf7, 0xb9, 0x48, 0x30, 0x3e, 0x46, 0xc9, 0xa9, 0xec, 0x57, 0xda, 0x78, /* Byte value: 0x6b */ + 0x20, 0xa8, 0x2a, 0xe4, 0xd5, 0xf4, 0xe8, 0x87, 0x91, 0xfe, 0x44, 0x89, 0xf2, 0x41, 0xbc, 0x6f, /* Byte value: 0x6c */ + 0x9b, 0xe4, 0x39, 0x6c, 0x21, 0xc0, 0x11, 0x8c, 0xff, 0x62, 0xde, 0x05, 0x1f, 0x8f, 0x63, 0x9d, /* Byte value: 0x6d */ + 0x85, 0x64, 0x19, 0x45, 0x75, 0xe6, 0xad, 0x33, 0xe4, 0x7a, 0x91, 0xf7, 0x92, 0xdf, 0x9f, 0x9e, /* Byte value: 0x6e */ + 0x7e, 0xbb, 0x5e, 0xc6, 0xe8, 0xf9, 0x47, 0xf5, 0x6b, 0xd9, 0x83, 0xaa, 0x58, 0x93, 0xfb, 0xb2, /* Byte value: 0x6f */ + 0x60, 0x3b, 0x7e, 0xef, 0xbc, 0xdf, 0xfb, 0x4a, 0x70, 0xc1, 0xcc, 0x58, 0xd5, 0xc3, 0x07, 0xb1, /* Byte value: 0x70 */ + 0x37, 0x96, 0xc4, 0x5a, 0xd0, 0xa0, 0xf8, 0xca, 0x61, 0x53, 0xb1, 0xe6, 0xf1, 0x29, 0xb3, 0x32, /* Byte value: 0x71 */ + 0xc4, 0x63, 0x68, 0xe0, 0xe9, 0x82, 0x28, 0x5d, 0x5b, 0x2e, 0xb2, 0x08, 0xe6, 0xe4, 0xf8, 0x75, /* Byte value: 0x72 */ + 0xfa, 0x4b, 0x62, 0x2d, 0x68, 0x50, 0x7c, 0x65, 0xd1, 0xc8, 0xb9, 0x73, 0x99, 0xf5, 0xb8, 0x19, /* Byte value: 0x73 */ + 0x51, 0x53, 0x64, 0xd7, 0x17, 0x1e, 0xf1, 0xcc, 0x16, 0x2b, 0x01, 0x5a, 0x0d, 0xfa, 0x39, 0x3d, /* Byte value: 0x74 */ + 0xc9, 0xc8, 0x32, 0x8a, 0xea, 0x0f, 0x99, 0x66, 0x0b, 0xf4, 0xe1, 0x2d, 0xe7, 0x7d, 0xfd, 0xff, /* Byte value: 0x75 */ + 0x01, 0x94, 0x25, 0xae, 0xf5, 0x4f, 0x96, 0xa3, 0x5e, 0x6b, 0xab, 0x2e, 0x53, 0xb9, 0xdc, 0x35, /* Byte value: 0x76 */ + 0x0d, 0xab, 0x5a, 0x6a, 0x03, 0x8d, 0xb1, 0x3b, 0x50, 0xda, 0x53, 0x25, 0x01, 0x99, 0x05, 0x8a, /* Byte value: 0x77 */ + 0x8a, 0x24, 0x09, 0xb0, 0x5f, 0xf5, 0xf3, 0x8d, 0x08, 0x76, 0x57, 0x8e, 0x35, 0xf7, 0xe1, 0x7e, /* Byte value: 0x78 */ + 0x34, 0xe9, 0xab, 0x6b, 0x0c, 0x71, 0x81, 0xec, 0x83, 0xee, 0x8f, 0x94, 0x04, 0x21, 0x14, 0x6d, /* Byte value: 0x79 */ + 0x4d, 0x38, 0x0e, 0x61, 0x6a, 0xa6, 0xa2, 0xf6, 0xb1, 0xe5, 0xdb, 0xf4, 0x26, 0x1b, 0xbe, 0x54, /* Byte value: 0x7a */ + 0xb0, 0x19, 0x97, 0x80, 0x8c, 0xd8, 0xba, 0x7c, 0x39, 0xff, 0xb5, 0x4d, 0xc5, 0x47, 0x57, 0xc6, /* Byte value: 0x7b */ + 0x49, 0x2d, 0x9a, 0x9c, 0x38, 0x59, 0xbf, 0x3f, 0x0a, 0x8a, 0x32, 0x4c, 0xa9, 0xba, 0x48, 0x80, /* Byte value: 0x7c */ + 0x92, 0x5a, 0xf7, 0xfb, 0x70, 0xb2, 0xbd, 0x7e, 0x14, 0xd7, 0x64, 0x98, 0x91, 0xb7, 0x90, 0xc3, /* Byte value: 0x7d */ + 0x18, 0x7e, 0xfe, 0x4b, 0x2f, 0x47, 0x4e, 0xf3, 0x1c, 0xa1, 0x33, 0x16, 0xa4, 0x40, 0x71, 0xbd, /* Byte value: 0x7e */ + 0x72, 0x84, 0x21, 0x02, 0x1e, 0x3b, 0x60, 0x6d, 0x65, 0x68, 0x7b, 0xa1, 0x0a, 0xb3, 0x22, 0x0d, /* Byte value: 0x7f */ + 0x53, 0xb8, 0x2e, 0x48, 0x3e, 0x80, 0x1e, 0x49, 0xaa, 0xfd, 0x94, 0x06, 0xab, 0x4b, 0x42, 0x57, /* Byte value: 0x80 */ + 0x4a, 0x52, 0xf5, 0xad, 0xe4, 0x88, 0xc6, 0x19, 0xe8, 0x37, 0x0c, 0x3e, 0x5c, 0xb2, 0xef, 0xdf, /* Byte value: 0x81 */ + 0x8d, 0x4e, 0xf2, 0x7c, 0xd1, 0xdb, 0x97, 0x62, 0x51, 0xa4, 0x80, 0x44, 0x4f, 0x5e, 0xb0, 0xf5, /* Byte value: 0x82 */ + 0xab, 0x18, 0x06, 0xfa, 0x7f, 0x4e, 0x8d, 0xa9, 0xc7, 0xe3, 0xb8, 0x29, 0x94, 0x0f, 0x81, 0x24, /* Byte value: 0x83 */ + 0x90, 0xb1, 0xbd, 0x64, 0x59, 0x2c, 0x52, 0xfb, 0xa8, 0x01, 0xf1, 0xc4, 0x37, 0x06, 0xeb, 0xa9, /* Byte value: 0x84 */ + 0x4f, 0xd3, 0x44, 0xfe, 0x43, 0x38, 0x4d, 0x73, 0x0d, 0x33, 0x4e, 0xa8, 0x80, 0xaa, 0xc5, 0x3e, /* Byte value: 0x85 */ + 0x9a, 0x70, 0x1c, 0xc2, 0xd4, 0x8f, 0x87, 0x2f, 0xa1, 0x09, 0x75, 0x2b, 0x4c, 0x36, 0xbf, 0xa8, /* Byte value: 0x86 */ + 0xdb, 0x77, 0x6d, 0x67, 0x48, 0xeb, 0x02, 0x41, 0x1e, 0x5d, 0x56, 0xd4, 0x38, 0x0d, 0xd8, 0x43, /* Byte value: 0x87 */ + 0xd6, 0xdc, 0x37, 0x0d, 0x4b, 0x66, 0xb3, 0x7a, 0x4e, 0x87, 0x05, 0xf1, 0x39, 0x94, 0xdd, 0xc9, /* Byte value: 0x88 */ + 0x1a, 0x95, 0xb4, 0xd4, 0x06, 0xd9, 0xa1, 0x76, 0xa0, 0x77, 0xa6, 0x4a, 0x02, 0xf1, 0x0a, 0xd7, /* Byte value: 0x89 */ + 0xbc, 0x26, 0xe8, 0x44, 0x7a, 0x1a, 0x9d, 0xe4, 0x37, 0x4e, 0x4d, 0x46, 0x97, 0x67, 0x8e, 0x79, /* Byte value: 0x8a */ + 0x10, 0x54, 0x15, 0x72, 0x8b, 0x7a, 0x74, 0xa2, 0xa9, 0x7f, 0x22, 0xa5, 0x79, 0xc1, 0x5e, 0xd6, /* Byte value: 0x8b */ + 0x93, 0xce, 0xd2, 0x55, 0x85, 0xfd, 0x2b, 0xdd, 0x4a, 0xbc, 0xcf, 0xb6, 0xc2, 0x0e, 0x4c, 0xf6, /* Byte value: 0x8c */ + 0x9e, 0x65, 0x88, 0x3f, 0x86, 0x70, 0x9a, 0xe6, 0x1a, 0x66, 0x9c, 0x93, 0xc3, 0x97, 0x49, 0x7c, /* Byte value: 0x8d */ + 0x97, 0xdb, 0x46, 0xa8, 0xd7, 0x02, 0x36, 0x14, 0xf1, 0xd3, 0x26, 0x0e, 0x4d, 0xaf, 0xba, 0x22, /* Byte value: 0x8e */ + 0x30, 0xfc, 0x3f, 0x96, 0x5e, 0x8e, 0x9c, 0x25, 0x38, 0x81, 0x66, 0x2c, 0x8b, 0x80, 0xe2, 0xb9, /* Byte value: 0x8f */ + 0x1e, 0x80, 0x20, 0x29, 0x54, 0x26, 0xbc, 0xbf, 0x1b, 0x18, 0x4f, 0xf2, 0x8d, 0x50, 0xfc, 0x03, /* Byte value: 0x90 */ + 0x0f, 0x40, 0x10, 0xf5, 0x2a, 0x13, 0x5e, 0xbe, 0xec, 0x0c, 0xc6, 0x79, 0xa7, 0x28, 0x7e, 0xe0, /* Byte value: 0x91 */ + 0x66, 0xc5, 0xa0, 0x8d, 0xc7, 0xbe, 0x09, 0x06, 0x77, 0x78, 0xb0, 0xbc, 0xfc, 0xd3, 0x8a, 0x0f, /* Byte value: 0x92 */ + 0x77, 0x05, 0x90, 0x51, 0xb9, 0x8b, 0xeb, 0x07, 0x80, 0x6c, 0x39, 0x37, 0xd6, 0xab, 0x08, 0xec, /* Byte value: 0x93 */ + 0x40, 0x93, 0x54, 0x0b, 0x69, 0x2b, 0x13, 0xcd, 0xe1, 0x3f, 0x88, 0xd1, 0x27, 0x82, 0xbb, 0xde, /* Byte value: 0x94 */ + 0xa2, 0xa6, 0xc8, 0x6d, 0x2e, 0x3c, 0x21, 0x5b, 0x2c, 0x56, 0x02, 0xb4, 0x1a, 0x37, 0x72, 0x7a, /* Byte value: 0x95 */ + 0xc1, 0xe2, 0xd9, 0xb3, 0x4e, 0x32, 0xa3, 0x37, 0xbe, 0x2a, 0xf0, 0x9e, 0x3a, 0xfc, 0xd2, 0x94, /* Byte value: 0x96 */ + 0x6a, 0xfa, 0xdf, 0x49, 0x31, 0x7c, 0x2e, 0x9e, 0x79, 0xc9, 0x48, 0xb7, 0xae, 0xf3, 0x53, 0xb0, /* Byte value: 0x97 */ + 0x12, 0xbf, 0x5f, 0xed, 0xa2, 0xe4, 0x9b, 0x27, 0x15, 0xa9, 0xb7, 0xf9, 0xdf, 0x70, 0x25, 0xbc, /* Byte value: 0x98 */ + 0x54, 0xd2, 0xd5, 0x84, 0xb0, 0xae, 0x7a, 0xa6, 0xf3, 0x2f, 0x43, 0xcc, 0xd1, 0xe2, 0x13, 0xdc, /* Byte value: 0x99 */ + 0x5f, 0x87, 0x51, 0x8c, 0xc8, 0x42, 0x39, 0xd1, 0xa4, 0x4c, 0x6c, 0x0d, 0xf9, 0x6b, 0x9b, 0xe8, /* Byte value: 0x9a */ + 0xd3, 0x5d, 0x86, 0x5e, 0xec, 0xd6, 0x38, 0x10, 0xab, 0x83, 0x47, 0x67, 0xe5, 0x8c, 0xf7, 0x28, /* Byte value: 0x9b */ + 0x65, 0xba, 0xcf, 0xbc, 0x1b, 0x6f, 0x70, 0x20, 0x95, 0xc5, 0x8e, 0xce, 0x09, 0xdb, 0x2d, 0x50, /* Byte value: 0x9c */ + 0xa9, 0xf3, 0x4c, 0x65, 0x56, 0xd0, 0x62, 0x2c, 0x7b, 0x35, 0x2d, 0x75, 0x32, 0xbe, 0xfa, 0x4e, /* Byte value: 0x9d */ + 0x3d, 0x57, 0x65, 0xfc, 0x5d, 0x03, 0x2d, 0x1e, 0x68, 0x5b, 0x35, 0x09, 0x8a, 0x19, 0xe7, 0x33, /* Byte value: 0x9e */ + 0x44, 0x86, 0xc0, 0xf6, 0x3b, 0xd4, 0x0e, 0x04, 0x5a, 0x50, 0x61, 0x69, 0xa8, 0x23, 0x4d, 0x0a, /* Byte value: 0x9f */ + 0x81, 0x71, 0x8d, 0xb8, 0x27, 0x19, 0xb0, 0xfa, 0x5f, 0x15, 0x78, 0x4f, 0x1d, 0x7e, 0x69, 0x4a, /* Byte value: 0xa0 */ + 0xfc, 0xb5, 0xbc, 0x4f, 0x13, 0x31, 0x8e, 0x29, 0xd6, 0x71, 0xc5, 0x97, 0xb0, 0xe5, 0x35, 0xa7, /* Byte value: 0xa1 */ + 0xdf, 0x62, 0xf9, 0x9a, 0x1a, 0x14, 0x1f, 0x88, 0xa5, 0x32, 0xbf, 0x6c, 0xb7, 0xac, 0x2e, 0x97, /* Byte value: 0xa2 */ + 0x46, 0x6d, 0x8a, 0x69, 0x12, 0x4a, 0xe1, 0x81, 0xe6, 0x86, 0xf4, 0x35, 0x0e, 0x92, 0x36, 0x60, /* Byte value: 0xa3 */ + 0xf8, 0xa0, 0x28, 0xb2, 0x41, 0xce, 0x93, 0xe0, 0x6d, 0x1e, 0x2c, 0x2f, 0x3f, 0x44, 0xc3, 0x73, /* Byte value: 0xa4 */ + 0xcc, 0x49, 0x83, 0xd9, 0x4d, 0xbf, 0x12, 0x0c, 0xee, 0xf0, 0xa3, 0xbb, 0x3b, 0x65, 0xd7, 0x1e, /* Byte value: 0xa5 */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xa6 */ + 0x52, 0x2c, 0x0b, 0xe6, 0xcb, 0xcf, 0x88, 0xea, 0xf4, 0x96, 0x3f, 0x28, 0xf8, 0xf2, 0x9e, 0x62, /* Byte value: 0xa7 */ + 0xed, 0x75, 0x8c, 0x93, 0x6d, 0x04, 0x6c, 0x28, 0x21, 0x65, 0x4c, 0x1c, 0x9a, 0x9d, 0xb7, 0x44, /* Byte value: 0xa8 */ + 0xe4, 0xcb, 0x42, 0x04, 0x3c, 0x76, 0xc0, 0xda, 0xca, 0xd0, 0xf6, 0x81, 0x14, 0xa5, 0x44, 0x1a, /* Byte value: 0xa9 */ + 0x3a, 0x3d, 0x9e, 0x30, 0xd3, 0x2d, 0x49, 0xf1, 0x31, 0x89, 0xe2, 0xc3, 0xf0, 0xb0, 0xb6, 0xb8, /* Byte value: 0xaa */ + 0x73, 0x10, 0x04, 0xac, 0xeb, 0x74, 0xf6, 0xce, 0x3b, 0x03, 0xd0, 0x8f, 0x59, 0x0a, 0xfe, 0x38, /* Byte value: 0xab */ + 0x5d, 0x6c, 0x1b, 0x13, 0xe1, 0xdc, 0xd6, 0x54, 0x18, 0x9a, 0xf9, 0x51, 0x5f, 0xda, 0xe0, 0x82, /* Byte value: 0xac */ + 0x6e, 0xef, 0x4b, 0xb4, 0x63, 0x83, 0x33, 0x57, 0xc2, 0xa6, 0xa1, 0x0f, 0x21, 0x52, 0xa5, 0x64, /* Byte value: 0xad */ + 0x8f, 0xa5, 0xb8, 0xe3, 0xf8, 0x45, 0x78, 0xe7, 0xed, 0x72, 0x15, 0x18, 0xe9, 0xef, 0xcb, 0x9f, /* Byte value: 0xae */ + 0xbf, 0x59, 0x87, 0x75, 0xa6, 0xcb, 0xe4, 0xc2, 0xd5, 0xf3, 0x73, 0x34, 0x62, 0x6f, 0x29, 0x26, /* Byte value: 0xaf */ + 0xcf, 0x36, 0xec, 0xe8, 0x91, 0x6e, 0x6b, 0x2a, 0x0c, 0x4d, 0x9d, 0xc9, 0xce, 0x6d, 0x70, 0x41, /* Byte value: 0xb0 */ + 0xb8, 0x33, 0x7c, 0xb9, 0x28, 0xe5, 0x80, 0x2d, 0x8c, 0x21, 0xa4, 0xfe, 0x18, 0xc6, 0x78, 0xad, /* Byte value: 0xb1 */ + 0xe7, 0xb4, 0x2d, 0x35, 0xe0, 0xa7, 0xb9, 0xfc, 0x28, 0x6d, 0xc8, 0xf3, 0xe1, 0xad, 0xe3, 0x45, /* Byte value: 0xb2 */ + 0xf9, 0x34, 0x0d, 0x1c, 0xb4, 0x81, 0x05, 0x43, 0x33, 0x75, 0x87, 0x01, 0x6c, 0xfd, 0x1f, 0x46, /* Byte value: 0xb3 */ + 0xfd, 0x21, 0x99, 0xe1, 0xe6, 0x7e, 0x18, 0x8a, 0x88, 0x1a, 0x6e, 0xb9, 0xe3, 0x5c, 0xe9, 0x92, /* Byte value: 0xb4 */ + 0xb5, 0x98, 0x26, 0xd3, 0x2b, 0x68, 0x31, 0x16, 0xdc, 0xfb, 0xf7, 0xdb, 0x19, 0x5f, 0x7d, 0x27, /* Byte value: 0xb5 */ + 0x09, 0xbe, 0xce, 0x97, 0x51, 0x72, 0xac, 0xf2, 0xeb, 0xb5, 0xba, 0x9d, 0x8e, 0x38, 0xf3, 0x5e, /* Byte value: 0xb6 */ + 0x47, 0xf9, 0xaf, 0xc7, 0xe7, 0x05, 0x77, 0x22, 0xb8, 0xed, 0x5f, 0x1b, 0x5d, 0x2b, 0xea, 0x55, /* Byte value: 0xb7 */ + 0xe5, 0x5f, 0x67, 0xaa, 0xc9, 0x39, 0x56, 0x79, 0x94, 0xbb, 0x5d, 0xaf, 0x47, 0x1c, 0x98, 0x2f, /* Byte value: 0xb8 */ + 0x31, 0x68, 0x1a, 0x38, 0xab, 0xc1, 0x0a, 0x86, 0x66, 0xea, 0xcd, 0x02, 0xd8, 0x39, 0x3e, 0x8c, /* Byte value: 0xb9 */ + 0xd4, 0x37, 0x7d, 0x92, 0x62, 0xf8, 0x5c, 0xff, 0xf2, 0x51, 0x90, 0xad, 0x9f, 0x25, 0xa6, 0xa3, /* Byte value: 0xba */ + 0xe8, 0xf4, 0x3d, 0xc0, 0xca, 0xb4, 0xe7, 0x42, 0xc4, 0x61, 0x0e, 0x8a, 0x46, 0x85, 0x9d, 0xa5, /* Byte value: 0xbb */ + 0x94, 0xa4, 0x29, 0x99, 0x0b, 0xd3, 0x4f, 0x32, 0x13, 0x6e, 0x18, 0x7c, 0xb8, 0xa7, 0x1d, 0x7d, /* Byte value: 0xbc */ + 0x1c, 0x6b, 0x6a, 0xb6, 0x7d, 0xb8, 0x53, 0x3a, 0xa7, 0xce, 0xda, 0xae, 0x2b, 0xe1, 0x87, 0x69, /* Byte value: 0xbd */ + 0x0e, 0xd4, 0x35, 0x5b, 0xdf, 0x5c, 0xc8, 0x1d, 0xb2, 0x67, 0x6d, 0x57, 0xf4, 0x91, 0xa2, 0xd5, /* Byte value: 0xbe */ + 0xcb, 0x23, 0x78, 0x15, 0xc3, 0x91, 0x76, 0xe3, 0xb7, 0x22, 0x74, 0x71, 0x41, 0xcc, 0x86, 0x95, /* Byte value: 0xbf */ + 0x6d, 0x90, 0x24, 0x85, 0xbf, 0x52, 0x4a, 0x71, 0x20, 0x1b, 0x9f, 0x7d, 0xd4, 0x5a, 0x02, 0x3b, /* Byte value: 0xc0 */ + 0x36, 0x02, 0xe1, 0xf4, 0x25, 0xef, 0x6e, 0x69, 0x3f, 0x38, 0x1a, 0xc8, 0xa2, 0x90, 0x6f, 0x07, /* Byte value: 0xc1 */ + 0x35, 0x7d, 0x8e, 0xc5, 0xf9, 0x3e, 0x17, 0x4f, 0xdd, 0x85, 0x24, 0xba, 0x57, 0x98, 0xc8, 0x58, /* Byte value: 0xc2 */ + 0x74, 0x7a, 0xff, 0x60, 0x65, 0x5a, 0x92, 0x21, 0x62, 0xd1, 0x07, 0x45, 0x23, 0xa3, 0xaf, 0xb3, /* Byte value: 0xc3 */ + 0xc5, 0xf7, 0x4d, 0x4e, 0x1c, 0xcd, 0xbe, 0xfe, 0x05, 0x45, 0x19, 0x26, 0xb5, 0x5d, 0x24, 0x40, /* Byte value: 0xc4 */ + 0x75, 0xee, 0xda, 0xce, 0x90, 0x15, 0x04, 0x82, 0x3c, 0xba, 0xac, 0x6b, 0x70, 0x1a, 0x73, 0x86, /* Byte value: 0xc5 */ + 0xb2, 0xf2, 0xdd, 0x1f, 0xa5, 0x46, 0x55, 0xf9, 0x85, 0x29, 0x20, 0x11, 0x63, 0xf6, 0x2c, 0xac, /* Byte value: 0xc6 */ + 0xd2, 0xc9, 0xa3, 0xf0, 0x19, 0x99, 0xae, 0xb3, 0xf5, 0xe8, 0xec, 0x49, 0xb6, 0x35, 0x2b, 0x1d, /* Byte value: 0xc7 */ + 0x99, 0x0f, 0x73, 0xf3, 0x08, 0x5e, 0xfe, 0x09, 0x43, 0xb4, 0x4b, 0x59, 0xb9, 0x3e, 0x18, 0xf7, /* Byte value: 0xc8 */ + 0x23, 0xd7, 0x45, 0xd5, 0x09, 0x25, 0x91, 0xa1, 0x73, 0x43, 0x7a, 0xfb, 0x07, 0x49, 0x1b, 0x30, /* Byte value: 0xc9 */ + 0x15, 0xd5, 0xa4, 0x21, 0x2c, 0xca, 0xff, 0xc8, 0x4c, 0x7b, 0x60, 0x33, 0xa5, 0xd9, 0x74, 0x37, /* Byte value: 0xca */ + 0x08, 0x2a, 0xeb, 0x39, 0xa4, 0x3d, 0x3a, 0x51, 0xb5, 0xde, 0x11, 0xb3, 0xdd, 0x81, 0x2f, 0x6b, /* Byte value: 0xcb */ + 0x27, 0xc2, 0xd1, 0x28, 0x5b, 0xda, 0x8c, 0x68, 0xc8, 0x2c, 0x93, 0x43, 0x88, 0xe8, 0xed, 0xe4, /* Byte value: 0xcc */ + 0x19, 0xea, 0xdb, 0xe5, 0xda, 0x08, 0xd8, 0x50, 0x42, 0xca, 0x98, 0x38, 0xf7, 0xf9, 0xad, 0x88, /* Byte value: 0xcd */ + 0x76, 0x91, 0xb5, 0xff, 0x4c, 0xc4, 0x7d, 0xa4, 0xde, 0x07, 0x92, 0x19, 0x85, 0x12, 0xd4, 0xd9, /* Byte value: 0xce */ + 0x39, 0x42, 0xf1, 0x01, 0x0f, 0xfc, 0x30, 0xd7, 0xd3, 0x34, 0xdc, 0xb1, 0x05, 0xb8, 0x11, 0xe7, /* Byte value: 0xcf */ + 0x13, 0x2b, 0x7a, 0x43, 0x57, 0xab, 0x0d, 0x84, 0x4b, 0xc2, 0x1c, 0xd7, 0x8c, 0xc9, 0xf9, 0x89, /* Byte value: 0xd0 */ + 0x05, 0x81, 0xb1, 0x53, 0xa7, 0xb0, 0x8b, 0x6a, 0xe5, 0x04, 0x42, 0x96, 0xdc, 0x18, 0x2a, 0xe1, /* Byte value: 0xd1 */ + 0xee, 0x0a, 0xe3, 0xa2, 0xb1, 0xd5, 0x15, 0x0e, 0xc3, 0xd8, 0x72, 0x6e, 0x6f, 0x95, 0x10, 0x1b, /* Byte value: 0xd2 */ + 0x57, 0xad, 0xba, 0xb5, 0x6c, 0x7f, 0x03, 0x80, 0x11, 0x92, 0x7d, 0xbe, 0x24, 0xea, 0xb4, 0x83, /* Byte value: 0xd3 */ + 0x9f, 0xf1, 0xad, 0x91, 0x73, 0x3f, 0x0c, 0x45, 0x44, 0x0d, 0x37, 0xbd, 0x90, 0x2e, 0x95, 0x49, /* Byte value: 0xd4 */ + 0xbd, 0xb2, 0xcd, 0xea, 0x8f, 0x55, 0x0b, 0x47, 0x69, 0x25, 0xe6, 0x68, 0xc4, 0xde, 0x52, 0x4c, /* Byte value: 0xd5 */ + 0x45, 0x12, 0xe5, 0x58, 0xce, 0x9b, 0x98, 0xa7, 0x04, 0x3b, 0xca, 0x47, 0xfb, 0x9a, 0x91, 0x3f, /* Byte value: 0xd6 */ + 0xff, 0xca, 0xd3, 0x7e, 0xcf, 0xe0, 0xf7, 0x0f, 0x34, 0xcc, 0xfb, 0xe5, 0x45, 0xed, 0x92, 0xf8, /* Byte value: 0xd7 */ + 0xc3, 0x09, 0x93, 0x2c, 0x67, 0xac, 0x4c, 0xb2, 0x02, 0xfc, 0x65, 0xc2, 0x9c, 0x4d, 0xa9, 0xfe, /* Byte value: 0xd8 */ + 0x3b, 0xa9, 0xbb, 0x9e, 0x26, 0x62, 0xdf, 0x52, 0x6f, 0xe2, 0x49, 0xed, 0xa3, 0x09, 0x6a, 0x8d, /* Byte value: 0xd9 */ + 0x7b, 0x3a, 0xef, 0x95, 0x4f, 0x49, 0xcc, 0x9f, 0x8e, 0xdd, 0xc1, 0x3c, 0x84, 0x8b, 0xd1, 0x53, /* Byte value: 0xda */ + 0x8e, 0x31, 0x9d, 0x4d, 0x0d, 0x0a, 0xee, 0x44, 0xb3, 0x19, 0xbe, 0x36, 0xba, 0x56, 0x17, 0xaa, /* Byte value: 0xdb */ + 0xe9, 0x60, 0x18, 0x6e, 0x3f, 0xfb, 0x71, 0xe1, 0x9a, 0x0a, 0xa5, 0xa4, 0x15, 0x3c, 0x41, 0x90, /* Byte value: 0xdc */ + 0xc8, 0x5c, 0x17, 0x24, 0x1f, 0x40, 0x0f, 0xc5, 0x55, 0x9f, 0x4a, 0x03, 0xb4, 0xc4, 0x21, 0xca, /* Byte value: 0xdd */ + 0x7c, 0x50, 0x14, 0x59, 0xc1, 0x67, 0xa8, 0x70, 0xd7, 0x0f, 0x16, 0xf6, 0xfe, 0x22, 0x80, 0xd8, /* Byte value: 0xde */ + 0x6b, 0x6e, 0xfa, 0xe7, 0xc4, 0x33, 0xb8, 0x3d, 0x27, 0xa2, 0xe3, 0x99, 0xfd, 0x4a, 0x8f, 0x85, /* Byte value: 0xdf */ + 0x4c, 0xac, 0x2b, 0xcf, 0x9f, 0xe9, 0x34, 0x55, 0xef, 0x8e, 0x70, 0xda, 0x75, 0xa2, 0x62, 0x61, /* Byte value: 0xe0 */ + 0x83, 0x9a, 0xc7, 0x27, 0x0e, 0x87, 0x5f, 0x7f, 0xe3, 0xc3, 0xed, 0x13, 0xbb, 0xcf, 0x12, 0x20, /* Byte value: 0xe1 */ + 0xec, 0xe1, 0xa9, 0x3d, 0x98, 0x4b, 0xfa, 0x8b, 0x7f, 0x0e, 0xe7, 0x32, 0xc9, 0x24, 0x6b, 0x71, /* Byte value: 0xe2 */ + 0x70, 0x6f, 0x6b, 0x9d, 0x37, 0xa5, 0x8f, 0xe8, 0xd9, 0xbe, 0xee, 0xfd, 0xac, 0x02, 0x59, 0x67, /* Byte value: 0xe3 */ + 0xe2, 0x35, 0x9c, 0x66, 0x47, 0x17, 0x32, 0x96, 0xcd, 0x69, 0x8a, 0x65, 0x3d, 0xb5, 0xc9, 0xa4, /* Byte value: 0xe4 */ + 0xf1, 0x1e, 0xe6, 0x25, 0x10, 0xbc, 0x3f, 0x12, 0x86, 0xab, 0x96, 0xb2, 0xb1, 0x7c, 0x30, 0x2d, /* Byte value: 0xe5 */ + 0xcd, 0xdd, 0xa6, 0x77, 0xb8, 0xf0, 0x84, 0xaf, 0xb0, 0x9b, 0x08, 0x95, 0x68, 0xdc, 0x0b, 0x2b, /* Byte value: 0xe6 */ + 0x5a, 0x06, 0xe0, 0xdf, 0x6f, 0xf2, 0xb2, 0xbb, 0x41, 0x48, 0x2e, 0x9b, 0x25, 0x73, 0xb1, 0x09, /* Byte value: 0xe7 */ + 0x2b, 0xfd, 0xae, 0xec, 0xad, 0x18, 0xab, 0xf0, 0xc6, 0x9d, 0x6b, 0x48, 0xda, 0xc8, 0x34, 0x5b, /* Byte value: 0xe8 */ + 0xc2, 0x9d, 0xb6, 0x82, 0x92, 0xe3, 0xda, 0x11, 0x5c, 0x97, 0xce, 0xec, 0xcf, 0xf4, 0x75, 0xcb, /* Byte value: 0xe9 */ + 0xa7, 0x27, 0x79, 0x3e, 0x89, 0x8c, 0xaa, 0x31, 0xc9, 0x52, 0x40, 0x22, 0xc6, 0x2f, 0x58, 0x9b, /* Byte value: 0xea */ + 0xa1, 0xd9, 0xa7, 0x5c, 0xf2, 0xed, 0x58, 0x7d, 0xce, 0xeb, 0x3c, 0xc6, 0xef, 0x3f, 0xd5, 0x25, /* Byte value: 0xeb */ + 0x2c, 0x97, 0x55, 0x20, 0x23, 0x36, 0xcf, 0x1f, 0x9f, 0x4f, 0xbc, 0x82, 0xa0, 0x61, 0x65, 0xd0, /* Byte value: 0xec */ + 0x06, 0xfe, 0xde, 0x62, 0x7b, 0x61, 0xf2, 0x4c, 0x07, 0xb9, 0x7c, 0xe4, 0x29, 0x10, 0x8d, 0xbe, /* Byte value: 0xed */ + 0x2d, 0x03, 0x70, 0x8e, 0xd6, 0x79, 0x59, 0xbc, 0xc1, 0x24, 0x17, 0xac, 0xf3, 0xd8, 0xb9, 0xe5, /* Byte value: 0xee */ + 0x3e, 0x28, 0x0a, 0xcd, 0x81, 0xd2, 0x54, 0x38, 0x8a, 0xe6, 0x0b, 0x7b, 0x7f, 0x11, 0x40, 0x6c, /* Byte value: 0xef */ + 0x71, 0xfb, 0x4e, 0x33, 0xc2, 0xea, 0x19, 0x4b, 0x87, 0xd5, 0x45, 0xd3, 0xff, 0xbb, 0x85, 0x52, /* Byte value: 0xf0 */ + 0x3f, 0xbc, 0x2f, 0x63, 0x74, 0x9d, 0xc2, 0x9b, 0xd4, 0x8d, 0xa0, 0x55, 0x2c, 0xa8, 0x9c, 0x59, /* Byte value: 0xf1 */ + 0xf6, 0x74, 0x1d, 0xe9, 0x9e, 0x92, 0x5b, 0xfd, 0xdf, 0x79, 0x41, 0x78, 0xcb, 0xd5, 0x61, 0xa6, /* Byte value: 0xf2 */ + 0xce, 0xa2, 0xc9, 0x46, 0x64, 0x21, 0xfd, 0x89, 0x52, 0x26, 0x36, 0xe7, 0x9d, 0xd4, 0xac, 0x74, /* Byte value: 0xf3 */ + 0x38, 0xd6, 0xd4, 0xaf, 0xfa, 0xb3, 0xa6, 0x74, 0x8d, 0x5f, 0x77, 0x9f, 0x56, 0x01, 0xcd, 0xd2, /* Byte value: 0xf4 */ + 0x33, 0x83, 0x50, 0xa7, 0x82, 0x5f, 0xe5, 0x03, 0xda, 0x3c, 0x58, 0x5e, 0x7e, 0x88, 0x45, 0xe6, /* Byte value: 0xf5 */ + 0x87, 0x8f, 0x53, 0xda, 0x5c, 0x78, 0x42, 0xb6, 0x58, 0xac, 0x04, 0xab, 0x34, 0x6e, 0xe4, 0xf4, /* Byte value: 0xf6 */ + 0x42, 0x78, 0x1e, 0x94, 0x40, 0xb5, 0xfc, 0x48, 0x5d, 0xe9, 0x1d, 0x8d, 0x81, 0x33, 0xc0, 0xb4, /* Byte value: 0xf7 */ + 0x8c, 0xda, 0xd7, 0xd2, 0x24, 0x94, 0x01, 0xc1, 0x0f, 0xcf, 0x2b, 0x6a, 0x1c, 0xe7, 0x6c, 0xc0, /* Byte value: 0xf8 */ + 0x26, 0x56, 0xf4, 0x86, 0xae, 0x95, 0x1a, 0xcb, 0x96, 0x47, 0x38, 0x6d, 0xdb, 0x51, 0x31, 0xd1, /* Byte value: 0xf9 */ + 0x7a, 0xae, 0xca, 0x3b, 0xba, 0x06, 0x5a, 0x3c, 0xd0, 0xb6, 0x6a, 0x12, 0xd7, 0x32, 0x0d, 0x66, /* Byte value: 0xfa */ + 0xac, 0x72, 0xfd, 0x36, 0xf1, 0x60, 0xe9, 0x46, 0x9e, 0x31, 0x6f, 0xe3, 0xee, 0xa6, 0xd0, 0xaf, /* Byte value: 0xfb */ + 0x98, 0x9b, 0x56, 0x5d, 0xfd, 0x11, 0x68, 0xaa, 0x1d, 0xdf, 0xe0, 0x77, 0xea, 0x87, 0xc4, 0xc2, /* Byte value: 0xfc */ + 0x79, 0xd1, 0xa5, 0x0a, 0x66, 0xd7, 0x23, 0x1a, 0x32, 0x0b, 0x54, 0x60, 0x22, 0x3a, 0xaa, 0x39, /* Byte value: 0xfd */ + 0x8b, 0xb0, 0x2c, 0x1e, 0xaa, 0xba, 0x65, 0x2e, 0x56, 0x1d, 0xfc, 0xa0, 0x66, 0x4e, 0x3d, 0x4b, /* Byte value: 0xfe */ + 0x58, 0xed, 0xaa, 0x40, 0x46, 0x6c, 0x5d, 0x3e, 0xfd, 0x9e, 0xbb, 0xc7, 0x83, 0xc2, 0xca, 0x63, /* Byte value: 0xff */ + 0x56, 0x39, 0x9f, 0x1b, 0x99, 0x30, 0x95, 0x23, 0x4f, 0xf9, 0xd6, 0x90, 0x77, 0x53, 0x68, 0xb6, /* Byte value: 0x00 */ + + /* Matrix row: 9 */ + 0xa8, 0x2c, 0x9d, 0x1e, 0xd9, 0x7e, 0xa9, 0xb3, 0xc0, 0x01, 0x6b, 0x3d, 0xc4, 0xa3, 0x83, 0xb4, /* Byte value: 0x01 */ + 0xb4, 0x6b, 0xa4, 0x9d, 0x6c, 0x77, 0x14, 0x4c, 0x17, 0xa0, 0x80, 0x59, 0xd2, 0xde, 0x39, 0xaa, /* Byte value: 0x02 */ + 0x96, 0x95, 0xc3, 0xd6, 0x72, 0x8f, 0xd1, 0xd8, 0xb2, 0x47, 0xed, 0x28, 0x6d, 0x22, 0x15, 0xce, /* Byte value: 0x03 */ + 0x1e, 0xeb, 0x9c, 0x39, 0x0a, 0xe9, 0x43, 0xcd, 0x63, 0x5d, 0x9a, 0x2f, 0xd4, 0x54, 0xa5, 0x9d, /* Byte value: 0x04 */ + 0x8a, 0xd2, 0xfa, 0x55, 0xc7, 0x86, 0x6c, 0x27, 0x65, 0xe6, 0x06, 0x4c, 0x7b, 0x5f, 0xaf, 0xd0, /* Byte value: 0x05 */ + 0xf5, 0x99, 0x50, 0xe1, 0x53, 0x37, 0x1d, 0xda, 0x6f, 0xe8, 0x61, 0xe9, 0x49, 0x42, 0xb1, 0x07, /* Byte value: 0x06 */ + 0x9f, 0x36, 0xa1, 0x26, 0x75, 0xf9, 0xd0, 0x09, 0x7d, 0x4f, 0x33, 0x03, 0x82, 0x73, 0x87, 0x27, /* Byte value: 0x07 */ + 0xa5, 0x14, 0x76, 0x59, 0x63, 0x0b, 0x97, 0x06, 0xa4, 0x32, 0x57, 0x80, 0x6c, 0xa0, 0x2f, 0x98, /* Byte value: 0x08 */ + 0x13, 0xd3, 0x77, 0x7e, 0xb0, 0x9c, 0x7d, 0x78, 0x07, 0x6e, 0xa6, 0x92, 0x7c, 0x57, 0x09, 0xb1, /* Byte value: 0x09 */ + 0x39, 0xd8, 0xc1, 0x98, 0x17, 0x62, 0xc6, 0x24, 0x37, 0xff, 0xcc, 0x0c, 0x4d, 0x0f, 0x59, 0x9c, /* Byte value: 0x0a */ + 0xac, 0xb7, 0x14, 0xa9, 0x64, 0x7d, 0x96, 0xd7, 0x6b, 0x3a, 0x89, 0xab, 0x83, 0xf1, 0xbd, 0x71, /* Byte value: 0x0b */ + 0x2d, 0x6a, 0x29, 0xb6, 0x1b, 0x6d, 0x05, 0x13, 0x75, 0x28, 0x20, 0x87, 0xd5, 0xd6, 0x9f, 0xcb, /* Byte value: 0x0c */ + 0x83, 0x71, 0x98, 0xa5, 0xc0, 0xf0, 0x6d, 0xf6, 0xaa, 0xee, 0xd8, 0x67, 0x94, 0x0e, 0x3d, 0x39, /* Byte value: 0x0d */ + 0x86, 0xbc, 0xa2, 0x4f, 0xc3, 0x83, 0x2d, 0x8b, 0x5b, 0xab, 0xe3, 0x35, 0xb2, 0xa9, 0xed, 0x5c, /* Byte value: 0x0e */ + 0xb9, 0x53, 0x4f, 0xda, 0xd6, 0x02, 0x2a, 0xf9, 0x73, 0x93, 0xbc, 0xe4, 0x7a, 0xdd, 0x95, 0x86, /* Byte value: 0x0f */ + 0x76, 0xe8, 0xc8, 0x44, 0x93, 0xc7, 0x70, 0x2c, 0xc5, 0x06, 0xb9, 0x8e, 0xdd, 0x4c, 0x8c, 0x3e, /* Byte value: 0x10 */ + 0x0f, 0x94, 0x4e, 0xfd, 0x05, 0x95, 0xc0, 0x87, 0xd0, 0xcf, 0x4d, 0xf6, 0x6a, 0x2a, 0xb3, 0xaf, /* Byte value: 0x11 */ + 0x5a, 0xd4, 0x52, 0xaf, 0x36, 0xda, 0x0a, 0x26, 0xea, 0x50, 0x40, 0xcd, 0x69, 0x6f, 0xfd, 0x55, /* Byte value: 0x12 */ + 0xa0, 0xd9, 0x4c, 0xb3, 0x60, 0x78, 0xd7, 0x7b, 0x55, 0x77, 0x6c, 0xd2, 0x4a, 0x07, 0xff, 0xfd, /* Byte value: 0x13 */ + 0x92, 0x0e, 0x4a, 0x61, 0xcf, 0x8c, 0xee, 0xbc, 0x19, 0x7c, 0x0f, 0xbe, 0x2a, 0x70, 0x2b, 0x0b, /* Byte value: 0x14 */ + 0xe2, 0xd1, 0xae, 0x28, 0x5e, 0xa8, 0x5f, 0xc6, 0xc3, 0xbd, 0x25, 0xed, 0x72, 0x47, 0x86, 0x73, /* Byte value: 0x15 */ + 0x34, 0xe0, 0x2a, 0xdf, 0xad, 0x17, 0xf8, 0x91, 0x53, 0xcc, 0xf0, 0xb1, 0xe5, 0x0c, 0xf5, 0xb0, /* Byte value: 0x16 */ + 0xee, 0xbf, 0xf6, 0x32, 0x5a, 0xad, 0x1e, 0x6a, 0xfd, 0xf0, 0xc0, 0x94, 0xbb, 0xb1, 0xc4, 0xff, /* Byte value: 0x17 */ + 0x6d, 0xce, 0x6e, 0x97, 0x9a, 0x5d, 0x73, 0x9c, 0x57, 0x1e, 0x18, 0xf3, 0x2f, 0xbf, 0xf9, 0xc6, /* Byte value: 0x18 */ + 0x1a, 0x70, 0x15, 0x8e, 0xb7, 0xea, 0x7c, 0xa9, 0xc8, 0x66, 0x78, 0xb9, 0x93, 0x06, 0x9b, 0x58, /* Byte value: 0x19 */ + 0x24, 0xc9, 0x4b, 0x46, 0x1c, 0x1b, 0x04, 0xc2, 0xba, 0x20, 0xfe, 0xac, 0x3a, 0x87, 0x0d, 0x22, /* Byte value: 0x1a */ + 0x1f, 0xbd, 0x2f, 0x64, 0xb4, 0x99, 0x3c, 0xd4, 0x39, 0x23, 0x43, 0xeb, 0xb5, 0xa1, 0x4b, 0x3d, /* Byte value: 0x1b */ + 0xd2, 0xaa, 0x0d, 0x40, 0x4e, 0xbc, 0x98, 0x33, 0x3b, 0x4a, 0x37, 0xca, 0xd0, 0x19, 0x4d, 0x06, /* Byte value: 0x1c */ + 0x18, 0xdc, 0xb0, 0x34, 0x08, 0x0a, 0x82, 0x9b, 0x7c, 0x9a, 0x09, 0xf2, 0x51, 0x2f, 0x84, 0xdb, /* Byte value: 0x1d */ + 0x37, 0x1a, 0x3c, 0x38, 0xac, 0x87, 0x79, 0xba, 0xbd, 0x4e, 0x58, 0x3e, 0x46, 0xd0, 0x04, 0x93, /* Byte value: 0x1e */ + 0x6a, 0xaf, 0xf1, 0xc7, 0x26, 0xce, 0xcd, 0xd3, 0x12, 0xa7, 0x52, 0xea, 0xcb, 0x31, 0x36, 0x20, /* Byte value: 0x1f */ + 0x3f, 0xef, 0xed, 0x95, 0x15, 0x81, 0x07, 0x72, 0x28, 0x38, 0x5f, 0xd1, 0xc8, 0x74, 0x78, 0xda, /* Byte value: 0x20 */ + 0xae, 0x1b, 0xb1, 0x13, 0xdb, 0x9d, 0x68, 0xe5, 0xdf, 0xc6, 0xf8, 0xe0, 0x41, 0xd8, 0xa2, 0xf2, /* Byte value: 0x21 */ + 0x10, 0x29, 0x61, 0x99, 0xb1, 0x0c, 0xfc, 0x53, 0xe9, 0xec, 0x0e, 0x1d, 0xdf, 0x8b, 0xf8, 0x92, /* Byte value: 0x22 */ + 0x46, 0x93, 0x6b, 0x2c, 0x83, 0xd3, 0xb7, 0xd9, 0x3d, 0xf1, 0xab, 0xa9, 0x7f, 0x12, 0x47, 0x4b, /* Byte value: 0x23 */ + 0x6c, 0x98, 0xdd, 0xca, 0x24, 0x2d, 0x0c, 0x85, 0x0d, 0x60, 0xc1, 0x37, 0x4e, 0x4a, 0x17, 0x66, /* Byte value: 0x24 */ + 0xbc, 0x9e, 0x75, 0x30, 0xd5, 0x71, 0x6a, 0x84, 0x82, 0xd6, 0x87, 0xb6, 0x5c, 0x7a, 0x45, 0xe3, /* Byte value: 0x25 */ + 0x68, 0x03, 0x54, 0x7d, 0x99, 0x2e, 0x33, 0xe1, 0xa6, 0x5b, 0x23, 0xa1, 0x09, 0x18, 0x29, 0xa3, /* Byte value: 0x26 */ + 0x0b, 0x0f, 0xc7, 0x4a, 0xb8, 0x96, 0xff, 0xe3, 0x7b, 0xf4, 0xaf, 0x60, 0x2d, 0x78, 0x8d, 0x6a, /* Byte value: 0x27 */ + 0x3e, 0xb9, 0x5e, 0xc8, 0xab, 0xf1, 0x78, 0x6b, 0x72, 0x46, 0x86, 0x15, 0xa9, 0x81, 0x96, 0x7a, /* Byte value: 0x28 */ + 0xfe, 0x96, 0x97, 0xab, 0xeb, 0xa1, 0xe2, 0x39, 0x14, 0x1c, 0xce, 0x89, 0x64, 0x3a, 0x3c, 0x6d, /* Byte value: 0x29 */ + 0xa9, 0x7a, 0x2e, 0x43, 0x67, 0x0e, 0xd6, 0xaa, 0x9a, 0x7f, 0xb2, 0xf9, 0xa5, 0x56, 0x6d, 0x14, /* Byte value: 0x2a */ + 0x28, 0xa7, 0x13, 0x5c, 0x18, 0x1e, 0x45, 0x6e, 0x84, 0x6d, 0x1b, 0xd5, 0xf3, 0x71, 0x4f, 0xae, /* Byte value: 0x2b */ + 0x7c, 0xb1, 0xbc, 0x53, 0x95, 0x21, 0xf0, 0xd6, 0xe4, 0x8c, 0xcf, 0x2a, 0x91, 0xc1, 0xef, 0xf4, /* Byte value: 0x2c */ + 0xf2, 0xf8, 0xcf, 0xb1, 0xef, 0xa4, 0xa3, 0x95, 0x2a, 0x51, 0x2b, 0xf0, 0xad, 0xcc, 0x7e, 0xe1, /* Byte value: 0x2d */ + 0xbf, 0x64, 0x63, 0xd7, 0xd4, 0xe1, 0xeb, 0xaf, 0x6c, 0x54, 0x2f, 0x39, 0xff, 0xa6, 0xb4, 0xc0, /* Byte value: 0x2e */ + 0xf4, 0xcf, 0xe3, 0xbc, 0xed, 0x47, 0x62, 0xc3, 0x35, 0x96, 0xb8, 0x2d, 0x28, 0xb7, 0x5f, 0xa7, /* Byte value: 0x2f */ + 0xcb, 0x20, 0x0e, 0x29, 0xf8, 0xc6, 0x65, 0xb1, 0x1d, 0xae, 0xe7, 0xfc, 0xe0, 0xc3, 0x27, 0x7d, /* Byte value: 0x30 */ + 0x06, 0x37, 0x2c, 0x0d, 0x02, 0xe3, 0xc1, 0x56, 0x1f, 0xc7, 0x93, 0xdd, 0x85, 0x7b, 0x21, 0x46, /* Byte value: 0x31 */ + 0xf8, 0xa1, 0xbb, 0xa6, 0xe9, 0x42, 0x23, 0x6f, 0x0b, 0xdb, 0x5d, 0x54, 0xe1, 0x41, 0x1d, 0x2b, /* Byte value: 0x32 */ + 0xbd, 0xc8, 0xc6, 0x6d, 0x6b, 0x01, 0x15, 0x9d, 0xd8, 0xa8, 0x5e, 0x72, 0x3d, 0x8f, 0xab, 0x43, /* Byte value: 0x33 */ + 0x75, 0x12, 0xde, 0xa3, 0x92, 0x57, 0xf1, 0x07, 0x2b, 0x84, 0x11, 0x01, 0x7e, 0x90, 0x7d, 0x1d, /* Byte value: 0x34 */ + 0x03, 0xfa, 0x16, 0xe7, 0x01, 0x90, 0x81, 0x2b, 0xee, 0x82, 0xa8, 0x8f, 0xa3, 0xdc, 0xf1, 0x23, /* Byte value: 0x35 */ + 0x4c, 0xca, 0x1f, 0x3b, 0x85, 0x35, 0x37, 0x23, 0x1c, 0x7b, 0xdd, 0x0d, 0x33, 0x9f, 0x24, 0x81, /* Byte value: 0x36 */ + 0x4b, 0xab, 0x80, 0x6b, 0x39, 0xa6, 0x89, 0x6c, 0x59, 0xc2, 0x97, 0x14, 0xd7, 0x11, 0xeb, 0x67, /* Byte value: 0x37 */ + 0xc0, 0x2f, 0xc9, 0x63, 0x40, 0x50, 0x9a, 0x52, 0x66, 0x5a, 0x48, 0x9c, 0xcd, 0xbb, 0xaa, 0x17, /* Byte value: 0x38 */ + 0x04, 0x9b, 0x89, 0xb7, 0xbd, 0x03, 0x3f, 0x64, 0xab, 0x3b, 0xe2, 0x96, 0x47, 0x52, 0x3e, 0xc5, /* Byte value: 0x39 */ + 0xb3, 0x0a, 0x3b, 0xcd, 0xd0, 0xe4, 0xaa, 0x03, 0x52, 0x19, 0xca, 0x40, 0x36, 0x50, 0xf6, 0x4c, /* Byte value: 0x3a */ + 0xb6, 0xc7, 0x01, 0x27, 0xd3, 0x97, 0xea, 0x7e, 0xa3, 0x5c, 0xf1, 0x12, 0x10, 0xf7, 0x26, 0x29, /* Byte value: 0x3b */ + 0x51, 0xdb, 0x95, 0xe5, 0x8e, 0x4c, 0xf5, 0xc5, 0x91, 0xa4, 0xef, 0xad, 0x44, 0x17, 0x70, 0x3f, /* Byte value: 0x3c */ + 0xeb, 0x72, 0xcc, 0xd8, 0x59, 0xde, 0x5e, 0x17, 0x0c, 0xb5, 0xfb, 0xc6, 0x9d, 0x16, 0x14, 0x9a, /* Byte value: 0x3d */ + 0x98, 0x57, 0x3e, 0x76, 0xc9, 0x6a, 0x6e, 0x46, 0x38, 0xf6, 0x79, 0x1a, 0x66, 0xfd, 0x48, 0xc1, /* Byte value: 0x3e */ + 0x23, 0xa8, 0xd4, 0x16, 0xa0, 0x88, 0xba, 0x8d, 0xff, 0x99, 0xb4, 0xb5, 0xde, 0x09, 0xc2, 0xc4, /* Byte value: 0x3f */ + 0xab, 0xd6, 0x8b, 0xf9, 0xd8, 0xee, 0x28, 0x98, 0x2e, 0x83, 0xc3, 0xb2, 0x67, 0x7f, 0x72, 0x97, /* Byte value: 0x40 */ + 0xb2, 0x5c, 0x88, 0x90, 0x6e, 0x94, 0xd5, 0x1a, 0x08, 0x67, 0x13, 0x84, 0x57, 0xa5, 0x18, 0xec, /* Byte value: 0x41 */ + 0x99, 0x01, 0x8d, 0x2b, 0x77, 0x1a, 0x11, 0x5f, 0x62, 0x88, 0xa0, 0xde, 0x07, 0x08, 0xa6, 0x61, /* Byte value: 0x42 */ + 0x89, 0x28, 0xec, 0xb2, 0xc6, 0x16, 0xed, 0x0c, 0x8b, 0x64, 0xae, 0xc3, 0xd8, 0x83, 0x5e, 0xf3, /* Byte value: 0x43 */ + 0xdf, 0x92, 0xe6, 0x07, 0xf4, 0xc9, 0xa6, 0x86, 0x5f, 0x79, 0x0b, 0x77, 0x78, 0x1a, 0xe1, 0x2a, /* Byte value: 0x44 */ + 0x0d, 0x38, 0xeb, 0x47, 0xba, 0x75, 0x3e, 0xb5, 0x64, 0x33, 0x3c, 0xbd, 0xa8, 0x03, 0xac, 0x2c, /* Byte value: 0x45 */ + 0x31, 0x2d, 0x10, 0x35, 0xae, 0x64, 0xb8, 0xec, 0xa2, 0x89, 0xcb, 0xe3, 0xc3, 0xab, 0x25, 0xd5, /* Byte value: 0x46 */ + 0x70, 0xdf, 0xe4, 0x49, 0x91, 0x24, 0xb1, 0x7a, 0xda, 0xc1, 0x2a, 0x53, 0x58, 0x37, 0xad, 0x78, /* Byte value: 0x47 */ + 0x73, 0x25, 0xf2, 0xae, 0x90, 0xb4, 0x30, 0x51, 0x34, 0x43, 0x82, 0xdc, 0xfb, 0xeb, 0x5c, 0x5b, /* Byte value: 0x48 */ + 0x1d, 0x11, 0x8a, 0xde, 0x0b, 0x79, 0xc2, 0xe6, 0x8d, 0xdf, 0x32, 0xa0, 0x77, 0x88, 0x54, 0xbe, /* Byte value: 0x49 */ + 0x8d, 0xb3, 0x65, 0x05, 0x7b, 0x15, 0xd2, 0x68, 0x20, 0x5f, 0x4c, 0x55, 0x9f, 0xd1, 0x60, 0x36, /* Byte value: 0x4a */ + 0xf1, 0x02, 0xd9, 0x56, 0xee, 0x34, 0x22, 0xbe, 0xc4, 0xd3, 0x83, 0x7f, 0x0e, 0x10, 0x8f, 0xc2, /* Byte value: 0x4b */ + 0x7d, 0xe7, 0x0f, 0x0e, 0x2b, 0x51, 0x8f, 0xcf, 0xbe, 0xf2, 0x16, 0xee, 0xf0, 0x34, 0x01, 0x54, /* Byte value: 0x4c */ + 0x17, 0x48, 0xfe, 0xc9, 0x0d, 0x9f, 0x42, 0x1c, 0xac, 0x55, 0x44, 0x04, 0x3b, 0x05, 0x37, 0x74, /* Byte value: 0x4d */ + 0x2c, 0x3c, 0x9a, 0xeb, 0xa5, 0x1d, 0x7a, 0x0a, 0x2f, 0x56, 0xf9, 0x43, 0xb4, 0x23, 0x71, 0x6b, /* Byte value: 0x4e */ + 0x35, 0xb6, 0x99, 0x82, 0x13, 0x67, 0x87, 0x88, 0x09, 0xb2, 0x29, 0x75, 0x84, 0xf9, 0x1b, 0x10, /* Byte value: 0x4f */ + 0x88, 0x7e, 0x5f, 0xef, 0x78, 0x66, 0x92, 0x15, 0xd1, 0x1a, 0x77, 0x07, 0xb9, 0x76, 0xb0, 0x53, /* Byte value: 0x50 */ + 0x67, 0x97, 0x1a, 0x80, 0x9c, 0xbb, 0xf3, 0x66, 0x76, 0x94, 0x6e, 0x57, 0x63, 0x32, 0x9a, 0x0c, /* Byte value: 0x51 */ + 0xe1, 0x2b, 0xb8, 0xcf, 0x5f, 0x38, 0xde, 0xed, 0x2d, 0x3f, 0x8d, 0x62, 0xd1, 0x9b, 0x77, 0x50, /* Byte value: 0x52 */ + 0xb5, 0x3d, 0x17, 0xc0, 0xd2, 0x07, 0x6b, 0x55, 0x4d, 0xde, 0x59, 0x9d, 0xb3, 0x2b, 0xd7, 0x0a, /* Byte value: 0x53 */ + 0x64, 0x6d, 0x0c, 0x67, 0x9d, 0x2b, 0x72, 0x4d, 0x98, 0x16, 0xc6, 0xd8, 0xc0, 0xee, 0x6b, 0x2f, /* Byte value: 0x54 */ + 0xb1, 0xa6, 0x9e, 0x77, 0x6f, 0x04, 0x54, 0x31, 0xe6, 0xe5, 0xbb, 0x0b, 0xf4, 0x79, 0xe9, 0xcf, /* Byte value: 0x55 */ + 0x08, 0xf5, 0xd1, 0xad, 0xb9, 0x06, 0x7e, 0xc8, 0x95, 0x76, 0x07, 0xef, 0x8e, 0xa4, 0x7c, 0x49, /* Byte value: 0x56 */ + 0xe5, 0xb0, 0x31, 0x78, 0xe2, 0x3b, 0xe1, 0x89, 0x86, 0x04, 0x6f, 0xf4, 0x96, 0xc9, 0x49, 0x95, /* Byte value: 0x57 */ + 0x1c, 0x47, 0x39, 0x83, 0xb5, 0x09, 0xbd, 0xff, 0xd7, 0xa1, 0xeb, 0x64, 0x16, 0x7d, 0xba, 0x1e, /* Byte value: 0x58 */ + 0x6b, 0xf9, 0x42, 0x9a, 0x98, 0xbe, 0xb2, 0xca, 0x48, 0xd9, 0x8b, 0x2e, 0xaa, 0xc4, 0xd8, 0x80, /* Byte value: 0x59 */ + 0x5c, 0xe3, 0x7e, 0xa2, 0x34, 0x39, 0xcb, 0x70, 0xf5, 0x97, 0xd3, 0x10, 0xec, 0x14, 0xdc, 0x13, /* Byte value: 0x5a */ + 0xa3, 0x23, 0x5a, 0x54, 0x61, 0xe8, 0x56, 0x50, 0xbb, 0xf5, 0xc4, 0x5d, 0xe9, 0xdb, 0x0e, 0xde, /* Byte value: 0x5b */ + 0x7a, 0x86, 0x90, 0x5e, 0x97, 0xc2, 0x31, 0x80, 0xfb, 0x4b, 0x5c, 0xf7, 0x14, 0xba, 0xce, 0xb2, /* Byte value: 0x5c */ + 0xe8, 0x88, 0xda, 0x3f, 0x58, 0x4e, 0xdf, 0x3c, 0xe2, 0x37, 0x53, 0x49, 0x3e, 0xca, 0xe5, 0xb9, /* Byte value: 0x5d */ + 0xda, 0x5f, 0xdc, 0xed, 0xf7, 0xba, 0xe6, 0xfb, 0xae, 0x3c, 0x30, 0x25, 0x5e, 0xbd, 0x31, 0x4f, /* Byte value: 0x5e */ + 0xd7, 0x67, 0x37, 0xaa, 0x4d, 0xcf, 0xd8, 0x4e, 0xca, 0x0f, 0x0c, 0x98, 0xf6, 0xbe, 0x9d, 0x63, /* Byte value: 0x5f */ + 0xfa, 0x0d, 0x1e, 0x1c, 0x56, 0xa2, 0xdd, 0x5d, 0xbf, 0x27, 0x2c, 0x1f, 0x23, 0x68, 0x02, 0xa8, /* Byte value: 0x60 */ + 0xa7, 0xb8, 0xd3, 0xe3, 0xdc, 0xeb, 0x69, 0x34, 0x10, 0xce, 0x26, 0xcb, 0xae, 0x89, 0x30, 0x1b, /* Byte value: 0x61 */ + 0x7f, 0x4b, 0xaa, 0xb4, 0x94, 0xb1, 0x71, 0xfd, 0x0a, 0x0e, 0x67, 0xa5, 0x32, 0x1d, 0x1e, 0xd7, /* Byte value: 0x62 */ + 0xe4, 0xe6, 0x82, 0x25, 0x5c, 0x4b, 0x9e, 0x90, 0xdc, 0x7a, 0xb6, 0x30, 0xf7, 0x3c, 0xa7, 0x35, /* Byte value: 0x63 */ + 0x8f, 0x1f, 0xc0, 0xbf, 0xc4, 0xf5, 0x2c, 0x5a, 0x94, 0xa3, 0x3d, 0x1e, 0x5d, 0xf8, 0x7f, 0xb5, /* Byte value: 0x64 */ + 0xa1, 0x8f, 0xff, 0xee, 0xde, 0x08, 0xa8, 0x62, 0x0f, 0x09, 0xb5, 0x16, 0x2b, 0xf2, 0x11, 0x5d, /* Byte value: 0x65 */ + 0x52, 0x21, 0x83, 0x02, 0x8f, 0xdc, 0x74, 0xee, 0x7f, 0x26, 0x47, 0x22, 0xe7, 0xcb, 0x81, 0x1c, /* Byte value: 0x66 */ + 0xec, 0x13, 0x53, 0x88, 0xe5, 0x4d, 0xe0, 0x58, 0x49, 0x0c, 0xb1, 0xdf, 0x79, 0x98, 0xdb, 0x7c, /* Byte value: 0x67 */ + 0x3b, 0x74, 0x64, 0x22, 0xa8, 0x82, 0x38, 0x16, 0x83, 0x03, 0xbd, 0x47, 0x8f, 0x26, 0x46, 0x1f, /* Byte value: 0x68 */ + 0xa2, 0x75, 0xe9, 0x09, 0xdf, 0x98, 0x29, 0x49, 0xe1, 0x8b, 0x1d, 0x99, 0x88, 0x2e, 0xe0, 0x7e, /* Byte value: 0x69 */ + 0x5f, 0x19, 0x68, 0x45, 0x35, 0xa9, 0x4a, 0x5b, 0x1b, 0x15, 0x7b, 0x9f, 0x4f, 0xc8, 0x2d, 0x30, /* Byte value: 0x6a */ + 0x50, 0x8d, 0x26, 0xb8, 0x30, 0x3c, 0x8a, 0xdc, 0xcb, 0xda, 0x36, 0x69, 0x25, 0xe2, 0x9e, 0x9f, /* Byte value: 0x6b */ + 0x4a, 0xfd, 0x33, 0x36, 0x87, 0xd6, 0xf6, 0x75, 0x03, 0xbc, 0x4e, 0xd0, 0xb6, 0xe4, 0x05, 0xc7, /* Byte value: 0x6c */ + 0x57, 0xec, 0xb9, 0xe8, 0x8c, 0xaf, 0x34, 0x93, 0x8e, 0x63, 0x7c, 0x70, 0xc1, 0x6c, 0x51, 0x79, /* Byte value: 0x6d */ + 0x55, 0x40, 0x1c, 0x52, 0x33, 0x4f, 0xca, 0xa1, 0x3a, 0x9f, 0x0d, 0x3b, 0x03, 0x45, 0x4e, 0xfa, /* Byte value: 0x6e */ + 0xdc, 0x68, 0xf0, 0xe0, 0xf5, 0x59, 0x27, 0xad, 0xb1, 0xfb, 0xa3, 0xf8, 0xdb, 0xc6, 0x10, 0x09, /* Byte value: 0x6f */ + 0xde, 0xc4, 0x55, 0x5a, 0x4a, 0xb9, 0xd9, 0x9f, 0x05, 0x07, 0xd2, 0xb3, 0x19, 0xef, 0x0f, 0x8a, /* Byte value: 0x70 */ + 0x9d, 0x9a, 0x04, 0x9c, 0xca, 0x19, 0x2e, 0x3b, 0xc9, 0xb3, 0x42, 0x48, 0x40, 0x5a, 0x98, 0xa4, /* Byte value: 0x71 */ + 0xe7, 0x1c, 0x94, 0xc2, 0x5d, 0xdb, 0x1f, 0xbb, 0x32, 0xf8, 0x1e, 0xbf, 0x54, 0xe0, 0x56, 0x16, /* Byte value: 0x72 */ + 0xaf, 0x4d, 0x02, 0x4e, 0x65, 0xed, 0x17, 0xfc, 0x85, 0xb8, 0x21, 0x24, 0x20, 0x2d, 0x4c, 0x52, /* Byte value: 0x73 */ + 0x97, 0xc3, 0x70, 0x8b, 0xcc, 0xff, 0xae, 0xc1, 0xe8, 0x39, 0x34, 0xec, 0x0c, 0xd7, 0xfb, 0x6e, /* Byte value: 0x74 */ + 0xaa, 0x80, 0x38, 0xa4, 0x66, 0x9e, 0x57, 0x81, 0x74, 0xfd, 0x1a, 0x76, 0x06, 0x8a, 0x9c, 0x37, /* Byte value: 0x75 */ + 0x26, 0x65, 0xee, 0xfc, 0xa3, 0xfb, 0xfa, 0xf0, 0x0e, 0xdc, 0x8f, 0xe7, 0xf8, 0xae, 0x12, 0xa1, /* Byte value: 0x76 */ + 0x4d, 0x9c, 0xac, 0x66, 0x3b, 0x45, 0x48, 0x3a, 0x46, 0x05, 0x04, 0xc9, 0x52, 0x6a, 0xca, 0x21, /* Byte value: 0x77 */ + 0x54, 0x16, 0xaf, 0x0f, 0x8d, 0x3f, 0xb5, 0xb8, 0x60, 0xe1, 0xd4, 0xff, 0x62, 0xb0, 0xa0, 0x5a, /* Byte value: 0x78 */ + 0xf7, 0x35, 0xf5, 0x5b, 0xec, 0xd7, 0xe3, 0xe8, 0xdb, 0x14, 0x10, 0xa2, 0x8b, 0x6b, 0xae, 0x84, /* Byte value: 0x79 */ + 0xd9, 0xa5, 0xca, 0x0a, 0xf6, 0x2a, 0x67, 0xd0, 0x40, 0xbe, 0x98, 0xaa, 0xfd, 0x61, 0xc0, 0x6c, /* Byte value: 0x7a */ + 0x84, 0x10, 0x07, 0xf5, 0x7c, 0x63, 0xd3, 0xb9, 0xef, 0x57, 0x92, 0x7e, 0x70, 0x80, 0xf2, 0xdf, /* Byte value: 0x7b */ + 0x41, 0xf2, 0xf4, 0x7c, 0x3f, 0x40, 0x09, 0x96, 0x78, 0x48, 0xe1, 0xb0, 0x9b, 0x9c, 0x88, 0xad, /* Byte value: 0x7c */ + 0x82, 0x27, 0x2b, 0xf8, 0x7e, 0x80, 0x12, 0xef, 0xf0, 0x90, 0x01, 0xa3, 0xf5, 0xfb, 0xd3, 0x99, /* Byte value: 0x7d */ + 0xd6, 0x31, 0x84, 0xf7, 0xf3, 0xbf, 0xa7, 0x57, 0x90, 0x71, 0xd5, 0x5c, 0x97, 0x4b, 0x73, 0xc3, /* Byte value: 0x7e */ + 0xb7, 0x91, 0xb2, 0x7a, 0x6d, 0xe7, 0x95, 0x67, 0xf9, 0x22, 0x28, 0xd6, 0x71, 0x02, 0xc8, 0x89, /* Byte value: 0x7f */ + 0xdb, 0x09, 0x6f, 0xb0, 0x49, 0xca, 0x99, 0xe2, 0xf4, 0x42, 0xe9, 0xe1, 0x3f, 0x48, 0xdf, 0xef, /* Byte value: 0x80 */ + 0x2b, 0x5d, 0x05, 0xbb, 0x19, 0x8e, 0xc4, 0x45, 0x6a, 0xef, 0xb3, 0x5a, 0x50, 0xad, 0xbe, 0x8d, /* Byte value: 0x81 */ + 0xa6, 0xee, 0x60, 0xbe, 0x62, 0x9b, 0x16, 0x2d, 0x4a, 0xb0, 0xff, 0x0f, 0xcf, 0x7c, 0xde, 0xbb, /* Byte value: 0x82 */ + 0x38, 0x8e, 0x72, 0xc5, 0xa9, 0x12, 0xb9, 0x3d, 0x6d, 0x81, 0x15, 0xc8, 0x2c, 0xfa, 0xb7, 0x3c, /* Byte value: 0x83 */ + 0xce, 0xed, 0x34, 0xc3, 0xfb, 0xb5, 0x25, 0xcc, 0xec, 0xeb, 0xdc, 0xae, 0xc6, 0x64, 0xf7, 0x18, /* Byte value: 0x84 */ + 0x95, 0x6f, 0xd5, 0x31, 0x73, 0x1f, 0x50, 0xf3, 0x5c, 0xc5, 0x45, 0xa7, 0xce, 0xfe, 0xe4, 0xed, /* Byte value: 0x85 */ + 0x71, 0x89, 0x57, 0x14, 0x2f, 0x54, 0xce, 0x63, 0x80, 0xbf, 0xf3, 0x97, 0x39, 0xc2, 0x43, 0xd8, /* Byte value: 0x86 */ + 0xc3, 0xd5, 0xdf, 0x84, 0x41, 0xc0, 0x1b, 0x79, 0x88, 0xd8, 0xe0, 0x13, 0x6e, 0x67, 0x5b, 0x34, /* Byte value: 0x87 */ + 0x8e, 0x49, 0x73, 0xe2, 0x7a, 0x85, 0x53, 0x43, 0xce, 0xdd, 0xe4, 0xda, 0x3c, 0x0d, 0x91, 0x15, /* Byte value: 0x88 */ + 0x9a, 0xfb, 0x9b, 0xcc, 0x76, 0x8a, 0x90, 0x74, 0x8c, 0x0a, 0x08, 0x51, 0xa4, 0xd4, 0x57, 0x42, /* Byte value: 0x89 */ + 0xef, 0xe9, 0x45, 0x6f, 0xe4, 0xdd, 0x61, 0x73, 0xa7, 0x8e, 0x19, 0x50, 0xda, 0x44, 0x2a, 0x5f, /* Byte value: 0x8a */ + 0x25, 0x9f, 0xf8, 0x1b, 0xa2, 0x6b, 0x7b, 0xdb, 0xe0, 0x5e, 0x27, 0x68, 0x5b, 0x72, 0xe3, 0x82, /* Byte value: 0x8b */ + 0xa4, 0x42, 0xc5, 0x04, 0xdd, 0x7b, 0xe8, 0x1f, 0xfe, 0x4c, 0x8e, 0x44, 0x0d, 0x55, 0xc1, 0x38, /* Byte value: 0x8c */ + 0xe9, 0xde, 0x69, 0x62, 0xe6, 0x3e, 0xa0, 0x25, 0xb8, 0x49, 0x8a, 0x8d, 0x5f, 0x3f, 0x0b, 0x19, /* Byte value: 0x8d */ + 0x3c, 0x15, 0xfb, 0x72, 0x14, 0x11, 0x86, 0x59, 0xc6, 0xba, 0xf7, 0x5e, 0x6b, 0xa8, 0x89, 0xf9, /* Byte value: 0x8e */ + 0x6f, 0x62, 0xcb, 0x2d, 0x25, 0xbd, 0x8d, 0xae, 0xe3, 0xe2, 0x69, 0xb8, 0xed, 0x96, 0xe6, 0x45, /* Byte value: 0x8f */ + 0x02, 0xac, 0xa5, 0xba, 0xbf, 0xe0, 0xfe, 0x32, 0xb4, 0xfc, 0x71, 0x4b, 0xc2, 0x29, 0x1f, 0x83, /* Byte value: 0x90 */ + 0x01, 0x56, 0xb3, 0x5d, 0xbe, 0x70, 0x7f, 0x19, 0x5a, 0x7e, 0xd9, 0xc4, 0x61, 0xf5, 0xee, 0xa0, /* Byte value: 0x91 */ + 0x0a, 0x59, 0x74, 0x17, 0x06, 0xe6, 0x80, 0xfa, 0x21, 0x8a, 0x76, 0xa4, 0x4c, 0x8d, 0x63, 0xca, /* Byte value: 0x92 */ + 0x09, 0xa3, 0x62, 0xf0, 0x07, 0x76, 0x01, 0xd1, 0xcf, 0x08, 0xde, 0x2b, 0xef, 0x51, 0x92, 0xe9, /* Byte value: 0x93 */ + 0x94, 0x39, 0x66, 0x6c, 0xcd, 0x6f, 0x2f, 0xea, 0x06, 0xbb, 0x9c, 0x63, 0xaf, 0x0b, 0x0a, 0x4d, /* Byte value: 0x94 */ + 0xed, 0x45, 0xe0, 0xd5, 0x5b, 0x3d, 0x9f, 0x41, 0x13, 0x72, 0x68, 0x1b, 0x18, 0x6d, 0x35, 0xdc, /* Byte value: 0x95 */ + 0x59, 0x2e, 0x44, 0x48, 0x37, 0x4a, 0x8b, 0x0d, 0x04, 0xd2, 0xe8, 0x42, 0xca, 0xb3, 0x0c, 0x76, /* Byte value: 0x96 */ + 0x61, 0xa0, 0x36, 0x8d, 0x9e, 0x58, 0x32, 0x30, 0x69, 0x53, 0xfd, 0x8a, 0xe6, 0x49, 0xbb, 0x4a, /* Byte value: 0x97 */ + 0x69, 0x55, 0xe7, 0x20, 0x27, 0x5e, 0x4c, 0xf8, 0xfc, 0x25, 0xfa, 0x65, 0x68, 0xed, 0xc7, 0x03, /* Byte value: 0x98 */ + 0x29, 0xf1, 0xa0, 0x01, 0xa6, 0x6e, 0x3a, 0x77, 0xde, 0x13, 0xc2, 0x11, 0x92, 0x84, 0xa1, 0x0e, /* Byte value: 0x99 */ + 0xb0, 0xf0, 0x2d, 0x2a, 0xd1, 0x74, 0x2b, 0x28, 0xbc, 0x9b, 0x62, 0xcf, 0x95, 0x8c, 0x07, 0x6f, /* Byte value: 0x9a */ + 0x30, 0x7b, 0xa3, 0x68, 0x10, 0x14, 0xc7, 0xf5, 0xf8, 0xf7, 0x12, 0x27, 0xa2, 0x5e, 0xcb, 0x75, /* Byte value: 0x9b */ + 0x60, 0xf6, 0x85, 0xd0, 0x20, 0x28, 0x4d, 0x29, 0x33, 0x2d, 0x24, 0x4e, 0x87, 0xbc, 0x55, 0xea, /* Byte value: 0x9c */ + 0x74, 0x44, 0x6d, 0xfe, 0x2c, 0x27, 0x8e, 0x1e, 0x71, 0xfa, 0xc8, 0xc5, 0x1f, 0x65, 0x93, 0xbd, /* Byte value: 0x9d */ + 0x22, 0xfe, 0x67, 0x4b, 0x1e, 0xf8, 0xc5, 0x94, 0xa5, 0xe7, 0x6d, 0x71, 0xbf, 0xfc, 0x2c, 0x64, /* Byte value: 0x9e */ + 0x0c, 0x6e, 0x58, 0x1a, 0x04, 0x05, 0x41, 0xac, 0x3e, 0x4d, 0xe5, 0x79, 0xc9, 0xf6, 0x42, 0x8c, /* Byte value: 0x9f */ + 0xcd, 0x17, 0x22, 0x24, 0xfa, 0x25, 0xa4, 0xe7, 0x02, 0x69, 0x74, 0x21, 0x65, 0xb8, 0x06, 0x3b, /* Byte value: 0xa0 */ + 0x7b, 0xd0, 0x23, 0x03, 0x29, 0xb2, 0x4e, 0x99, 0xa1, 0x35, 0x85, 0x33, 0x75, 0x4f, 0x20, 0x12, /* Byte value: 0xa1 */ + 0x5b, 0x82, 0xe1, 0xf2, 0x88, 0xaa, 0x75, 0x3f, 0xb0, 0x2e, 0x99, 0x09, 0x08, 0x9a, 0x13, 0xf5, /* Byte value: 0xa2 */ + 0x40, 0xa4, 0x47, 0x21, 0x81, 0x30, 0x76, 0x8f, 0x22, 0x36, 0x38, 0x74, 0xfa, 0x69, 0x66, 0x0d, /* Byte value: 0xa3 */ + 0xe3, 0x87, 0x1d, 0x75, 0xe0, 0xd8, 0x20, 0xdf, 0x99, 0xc3, 0xfc, 0x29, 0x13, 0xb2, 0x68, 0xd3, /* Byte value: 0xa4 */ + 0x14, 0xb2, 0xe8, 0x2e, 0x0c, 0x0f, 0xc3, 0x37, 0x42, 0xd7, 0xec, 0x8b, 0x98, 0xd9, 0xc6, 0x57, /* Byte value: 0xa5 */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xa6 */ + 0xfd, 0x6c, 0x81, 0x4c, 0xea, 0x31, 0x63, 0x12, 0xfa, 0x9e, 0x66, 0x06, 0xc7, 0xe6, 0xcd, 0x4e, /* Byte value: 0xa7 */ + 0x78, 0x2a, 0x35, 0xe4, 0x28, 0x22, 0xcf, 0xb2, 0x4f, 0xb7, 0x2d, 0xbc, 0xd6, 0x93, 0xd1, 0x31, /* Byte value: 0xa8 */ + 0xad, 0xe1, 0xa7, 0xf4, 0xda, 0x0d, 0xe9, 0xce, 0x31, 0x44, 0x50, 0x6f, 0xe2, 0x04, 0x53, 0xd1, /* Byte value: 0xa9 */ + 0xd0, 0x06, 0xa8, 0xfa, 0xf1, 0x5c, 0x66, 0x01, 0x8f, 0xb6, 0x46, 0x81, 0x12, 0x30, 0x52, 0x85, /* Byte value: 0xaa */ + 0x91, 0xf4, 0x5c, 0x86, 0xce, 0x1c, 0x6f, 0x97, 0xf7, 0xfe, 0xa7, 0x31, 0x89, 0xac, 0xda, 0x28, /* Byte value: 0xab */ + 0xfc, 0x3a, 0x32, 0x11, 0x54, 0x41, 0x1c, 0x0b, 0xa0, 0xe0, 0xbf, 0xc2, 0xa6, 0x13, 0x23, 0xee, /* Byte value: 0xac */ + 0xf9, 0xf7, 0x08, 0xfb, 0x57, 0x32, 0x5c, 0x76, 0x51, 0xa5, 0x84, 0x90, 0x80, 0xb4, 0xf3, 0x8b, /* Byte value: 0xad */ + 0xea, 0x24, 0x7f, 0x85, 0xe7, 0xae, 0x21, 0x0e, 0x56, 0xcb, 0x22, 0x02, 0xfc, 0xe3, 0xfa, 0x3a, /* Byte value: 0xae */ + 0x85, 0x46, 0xb4, 0xa8, 0xc2, 0x13, 0xac, 0xa0, 0xb5, 0x29, 0x4b, 0xba, 0x11, 0x75, 0x1c, 0x7f, /* Byte value: 0xaf */ + 0x7e, 0x1d, 0x19, 0xe9, 0x2a, 0xc1, 0x0e, 0xe4, 0x50, 0x70, 0xbe, 0x61, 0x53, 0xe8, 0xf0, 0x77, /* Byte value: 0xb0 */ + 0x77, 0xbe, 0x7b, 0x19, 0x2d, 0xb7, 0x0f, 0x35, 0x9f, 0x78, 0x60, 0x4a, 0xbc, 0xb9, 0x62, 0x9e, /* Byte value: 0xb1 */ + 0xc7, 0x4e, 0x56, 0x33, 0xfc, 0xc3, 0x24, 0x1d, 0x23, 0xe3, 0x02, 0x85, 0x29, 0x35, 0x65, 0xf1, /* Byte value: 0xb2 */ + 0xc5, 0xe2, 0xf3, 0x89, 0x43, 0x23, 0xda, 0x2f, 0x97, 0x1f, 0x73, 0xce, 0xeb, 0x1c, 0x7a, 0x72, /* Byte value: 0xb3 */ + 0x5d, 0xb5, 0xcd, 0xff, 0x8a, 0x49, 0xb4, 0x69, 0xaf, 0xe9, 0x0a, 0xd4, 0x8d, 0xe1, 0x32, 0xb3, /* Byte value: 0xb4 */ + 0x3a, 0x22, 0xd7, 0x7f, 0x16, 0xf2, 0x47, 0x0f, 0xd9, 0x7d, 0x64, 0x83, 0xee, 0xd3, 0xa8, 0xbf, /* Byte value: 0xb5 */ + 0xd5, 0xcb, 0x92, 0x10, 0xf2, 0x2f, 0x26, 0x7c, 0x7e, 0xf3, 0x7d, 0xd3, 0x34, 0x97, 0x82, 0xe0, /* Byte value: 0xb6 */ + 0x66, 0xc1, 0xa9, 0xdd, 0x22, 0xcb, 0x8c, 0x7f, 0x2c, 0xea, 0xb7, 0x93, 0x02, 0xc7, 0x74, 0xac, /* Byte value: 0xb7 */ + 0x8b, 0x84, 0x49, 0x08, 0x79, 0xf6, 0x13, 0x3e, 0x3f, 0x98, 0xdf, 0x88, 0x1a, 0xaa, 0x41, 0x70, /* Byte value: 0xb8 */ + 0x49, 0x07, 0x25, 0xd1, 0x86, 0x46, 0x77, 0x5e, 0xed, 0x3e, 0xe6, 0x5f, 0x15, 0x38, 0xf4, 0xe4, /* Byte value: 0xb9 */ + 0xc2, 0x83, 0x6c, 0xd9, 0xff, 0xb0, 0x64, 0x60, 0xd2, 0xa6, 0x39, 0xd7, 0x0f, 0x92, 0xb5, 0x94, /* Byte value: 0xba */ + 0xc6, 0x18, 0xe5, 0x6e, 0x42, 0xb3, 0x5b, 0x04, 0x79, 0x9d, 0xdb, 0x41, 0x48, 0xc0, 0x8b, 0x51, /* Byte value: 0xbb */ + 0x56, 0xba, 0x0a, 0xb5, 0x32, 0xdf, 0x4b, 0x8a, 0xd4, 0x1d, 0xa5, 0xb4, 0xa0, 0x99, 0xbf, 0xd9, /* Byte value: 0xbc */ + 0x4e, 0x66, 0xba, 0x81, 0x3a, 0xd5, 0xc9, 0x11, 0xa8, 0x87, 0xac, 0x46, 0xf1, 0xb6, 0x3b, 0x02, /* Byte value: 0xbd */ + 0x27, 0x33, 0x5d, 0xa1, 0x1d, 0x8b, 0x85, 0xe9, 0x54, 0xa2, 0x56, 0x23, 0x99, 0x5b, 0xfc, 0x01, /* Byte value: 0xbe */ + 0xe6, 0x4a, 0x27, 0x9f, 0xe3, 0xab, 0x60, 0xa2, 0x68, 0x86, 0xc7, 0x7b, 0x35, 0x15, 0xb8, 0xb6, /* Byte value: 0xbf */ + 0x93, 0x58, 0xf9, 0x3c, 0x71, 0xfc, 0x91, 0xa5, 0x43, 0x02, 0xd6, 0x7a, 0x4b, 0x85, 0xc5, 0xab, /* Byte value: 0xc0 */ + 0xbb, 0xff, 0xea, 0x60, 0x69, 0xe2, 0xd4, 0xcb, 0xc7, 0x6f, 0xcd, 0xaf, 0xb8, 0xf4, 0x8a, 0x05, /* Byte value: 0xc1 */ + 0xd1, 0x50, 0x1b, 0xa7, 0x4f, 0x2c, 0x19, 0x18, 0xd5, 0xc8, 0x9f, 0x45, 0x73, 0xc5, 0xbc, 0x25, /* Byte value: 0xc2 */ + 0x63, 0x0c, 0x93, 0x37, 0x21, 0xb8, 0xcc, 0x02, 0xdd, 0xaf, 0x8c, 0xc1, 0x24, 0x60, 0xa4, 0xc9, /* Byte value: 0xc3 */ + 0xc1, 0x79, 0x7a, 0x3e, 0xfe, 0x20, 0xe5, 0x4b, 0x3c, 0x24, 0x91, 0x58, 0xac, 0x4e, 0x44, 0xb7, /* Byte value: 0xc4 */ + 0x45, 0x69, 0x7d, 0xcb, 0x82, 0x43, 0x36, 0xf2, 0xd3, 0x73, 0x03, 0x26, 0xdc, 0xce, 0xb6, 0x68, /* Byte value: 0xc5 */ + 0xc8, 0xda, 0x18, 0xce, 0xf9, 0x56, 0xe4, 0x9a, 0xf3, 0x2c, 0x4f, 0x73, 0x43, 0x1f, 0xd6, 0x5e, /* Byte value: 0xc6 */ + 0x16, 0x1e, 0x4d, 0x94, 0xb3, 0xef, 0x3d, 0x05, 0xf6, 0x2b, 0x9d, 0xc0, 0x5a, 0xf0, 0xd9, 0xd4, /* Byte value: 0xc7 */ + 0x1b, 0x26, 0xa6, 0xd3, 0x09, 0x9a, 0x03, 0xb0, 0x92, 0x18, 0xa1, 0x7d, 0xf2, 0xf3, 0x75, 0xf8, /* Byte value: 0xc8 */ + 0x20, 0x52, 0xc2, 0xf1, 0xa1, 0x18, 0x3b, 0xa6, 0x11, 0x1b, 0x1c, 0x3a, 0x7d, 0xd5, 0x33, 0xe7, /* Byte value: 0xc9 */ + 0x9b, 0xad, 0x28, 0x91, 0xc8, 0xfa, 0xef, 0x6d, 0xd6, 0x74, 0xd1, 0x95, 0xc5, 0x21, 0xb9, 0xe2, /* Byte value: 0xca */ + 0xf3, 0xae, 0x7c, 0xec, 0x51, 0xd4, 0xdc, 0x8c, 0x70, 0x2f, 0xf2, 0x34, 0xcc, 0x39, 0x90, 0x41, /* Byte value: 0xcb */ + 0xb8, 0x05, 0xfc, 0x87, 0x68, 0x72, 0x55, 0xe0, 0x29, 0xed, 0x65, 0x20, 0x1b, 0x28, 0x7b, 0x26, /* Byte value: 0xcc */ + 0xf0, 0x54, 0x6a, 0x0b, 0x50, 0x44, 0x5d, 0xa7, 0x9e, 0xad, 0x5a, 0xbb, 0x6f, 0xe5, 0x61, 0x62, /* Byte value: 0xcd */ + 0x2f, 0xc6, 0x8c, 0x0c, 0xa4, 0x8d, 0xfb, 0x21, 0xc1, 0xd4, 0x51, 0xcc, 0x17, 0xff, 0x80, 0x48, /* Byte value: 0xce */ + 0xba, 0xa9, 0x59, 0x3d, 0xd7, 0x92, 0xab, 0xd2, 0x9d, 0x11, 0x14, 0x6b, 0xd9, 0x01, 0x64, 0xa5, /* Byte value: 0xcf */ + 0x4f, 0x30, 0x09, 0xdc, 0x84, 0xa5, 0xb6, 0x08, 0xf2, 0xf9, 0x75, 0x82, 0x90, 0x43, 0xd5, 0xa2, /* Byte value: 0xd0 */ + 0xbe, 0x32, 0xd0, 0x8a, 0x6a, 0x91, 0x94, 0xb6, 0x36, 0x2a, 0xf6, 0xfd, 0x9e, 0x53, 0x5a, 0x60, /* Byte value: 0xd1 */ + 0x12, 0x85, 0xc4, 0x23, 0x0e, 0xec, 0x02, 0x61, 0x5d, 0x10, 0x7f, 0x56, 0x1d, 0xa2, 0xe7, 0x11, /* Byte value: 0xd2 */ + 0x43, 0x5e, 0x51, 0xc6, 0x80, 0xa0, 0xf7, 0xa4, 0xcc, 0xb4, 0x90, 0xfb, 0x59, 0xb5, 0x97, 0x2e, /* Byte value: 0xd3 */ + 0xcf, 0xbb, 0x87, 0x9e, 0x45, 0xc5, 0x5a, 0xd5, 0xb6, 0x95, 0x05, 0x6a, 0xa7, 0x91, 0x19, 0xb8, /* Byte value: 0xd4 */ + 0xc9, 0x8c, 0xab, 0x93, 0x47, 0x26, 0x9b, 0x83, 0xa9, 0x52, 0x96, 0xb7, 0x22, 0xea, 0x38, 0xfe, /* Byte value: 0xd5 */ + 0x2a, 0x0b, 0xb6, 0xe6, 0xa7, 0xfe, 0xbb, 0x5c, 0x30, 0x91, 0x6a, 0x9e, 0x31, 0x58, 0x50, 0x2d, /* Byte value: 0xd6 */ + 0x11, 0x7f, 0xd2, 0xc4, 0x0f, 0x7c, 0x83, 0x4a, 0xb3, 0x92, 0xd7, 0xd9, 0xbe, 0x7e, 0x16, 0x32, /* Byte value: 0xd7 */ + 0x15, 0xe4, 0x5b, 0x73, 0xb2, 0x7f, 0xbc, 0x2e, 0x18, 0xa9, 0x35, 0x4f, 0xf9, 0x2c, 0x28, 0xf7, /* Byte value: 0xd8 */ + 0xf6, 0x63, 0x46, 0x06, 0x52, 0xa7, 0x9c, 0xf1, 0x81, 0x6a, 0xc9, 0x66, 0xea, 0x9e, 0x40, 0x24, /* Byte value: 0xd9 */ + 0x62, 0x5a, 0x20, 0x6a, 0x9f, 0xc8, 0xb3, 0x1b, 0x87, 0xd1, 0x55, 0x05, 0x45, 0x95, 0x4a, 0x69, /* Byte value: 0xda */ + 0xcc, 0x41, 0x91, 0x79, 0x44, 0x55, 0xdb, 0xfe, 0x58, 0x17, 0xad, 0xe5, 0x04, 0x4d, 0xe8, 0x9b, /* Byte value: 0xdb */ + 0xe0, 0x7d, 0x0b, 0x92, 0xe1, 0x48, 0xa1, 0xf4, 0x77, 0x41, 0x54, 0xa6, 0xb0, 0x6e, 0x99, 0xf0, /* Byte value: 0xdc */ + 0x8c, 0xe5, 0xd6, 0x58, 0xc5, 0x65, 0xad, 0x71, 0x7a, 0x21, 0x95, 0x91, 0xfe, 0x24, 0x8e, 0x96, /* Byte value: 0xdd */ + 0x90, 0xa2, 0xef, 0xdb, 0x70, 0x6c, 0x10, 0x8e, 0xad, 0x80, 0x7e, 0xf5, 0xe8, 0x59, 0x34, 0x88, /* Byte value: 0xde */ + 0x47, 0xc5, 0xd8, 0x71, 0x3d, 0xa3, 0xc8, 0xc0, 0x67, 0x8f, 0x72, 0x6d, 0x1e, 0xe7, 0xa9, 0xeb, /* Byte value: 0xdf */ + 0xff, 0xc0, 0x24, 0xf6, 0x55, 0xd1, 0x9d, 0x20, 0x4e, 0x62, 0x17, 0x4d, 0x05, 0xcf, 0xd2, 0xcd, /* Byte value: 0xe0 */ + 0x81, 0xdd, 0x3d, 0x1f, 0x7f, 0x10, 0x93, 0xc4, 0x1e, 0x12, 0xa9, 0x2c, 0x56, 0x27, 0x22, 0xba, /* Byte value: 0xe1 */ + 0x5e, 0x4f, 0xdb, 0x18, 0x8b, 0xd9, 0x35, 0x42, 0x41, 0x6b, 0xa2, 0x5b, 0x2e, 0x3d, 0xc3, 0x90, /* Byte value: 0xe2 */ + 0xfb, 0x5b, 0xad, 0x41, 0xe8, 0xd2, 0xa2, 0x44, 0xe5, 0x59, 0xf5, 0xdb, 0x42, 0x9d, 0xec, 0x08, /* Byte value: 0xe3 */ + 0x79, 0x7c, 0x86, 0xb9, 0x96, 0x52, 0xb0, 0xab, 0x15, 0xc9, 0xf4, 0x78, 0xb7, 0x66, 0x3f, 0x91, /* Byte value: 0xe4 */ + 0x36, 0x4c, 0x8f, 0x65, 0x12, 0xf7, 0x06, 0xa3, 0xe7, 0x30, 0x81, 0xfa, 0x27, 0x25, 0xea, 0x33, /* Byte value: 0xe5 */ + 0x32, 0xd7, 0x06, 0xd2, 0xaf, 0xf4, 0x39, 0xc7, 0x4c, 0x0b, 0x63, 0x6c, 0x60, 0x77, 0xd4, 0xf6, /* Byte value: 0xe6 */ + 0x0e, 0xc2, 0xfd, 0xa0, 0xbb, 0xe5, 0xbf, 0x9e, 0x8a, 0xb1, 0x94, 0x32, 0x0b, 0xdf, 0x5d, 0x0f, /* Byte value: 0xe7 */ + 0xd3, 0xfc, 0xbe, 0x1d, 0xf0, 0xcc, 0xe7, 0x2a, 0x61, 0x34, 0xee, 0x0e, 0xb1, 0xec, 0xa3, 0xa6, /* Byte value: 0xe8 */ + 0x33, 0x81, 0xb5, 0x8f, 0x11, 0x84, 0x46, 0xde, 0x16, 0x75, 0xba, 0xa8, 0x01, 0x82, 0x3a, 0x56, /* Byte value: 0xe9 */ + 0x53, 0x77, 0x30, 0x5f, 0x31, 0xac, 0x0b, 0xf7, 0x25, 0x58, 0x9e, 0xe6, 0x86, 0x3e, 0x6f, 0xbc, /* Byte value: 0xea */ + 0x87, 0xea, 0x11, 0x12, 0x7d, 0xf3, 0x52, 0x92, 0x01, 0xd5, 0x3a, 0xf1, 0xd3, 0x5c, 0x03, 0xfc, /* Byte value: 0xeb */ + 0x21, 0x04, 0x71, 0xac, 0x1f, 0x68, 0x44, 0xbf, 0x4b, 0x65, 0xc5, 0xfe, 0x1c, 0x20, 0xdd, 0x47, /* Byte value: 0xec */ + 0xd4, 0x9d, 0x21, 0x4d, 0x4c, 0x5f, 0x59, 0x65, 0x24, 0x8d, 0xa4, 0x17, 0x55, 0x62, 0x6c, 0x40, /* Byte value: 0xed */ + 0x07, 0x61, 0x9f, 0x50, 0xbc, 0x93, 0xbe, 0x4f, 0x45, 0xb9, 0x4a, 0x19, 0xe4, 0x8e, 0xcf, 0xe6, /* Byte value: 0xee */ + 0x48, 0x51, 0x96, 0x8c, 0x38, 0x36, 0x08, 0x47, 0xb7, 0x40, 0x3f, 0x9b, 0x74, 0xcd, 0x1a, 0x44, /* Byte value: 0xef */ + 0xdd, 0x3e, 0x43, 0xbd, 0x4b, 0x29, 0x58, 0xb4, 0xeb, 0x85, 0x7a, 0x3c, 0xba, 0x33, 0xfe, 0xa9, /* Byte value: 0xf0 */ + 0x6e, 0x34, 0x78, 0x70, 0x9b, 0xcd, 0xf2, 0xb7, 0xb9, 0x9c, 0xb0, 0x7c, 0x8c, 0x63, 0x08, 0xe5, /* Byte value: 0xf1 */ + 0xc4, 0xb4, 0x40, 0xd4, 0xfd, 0x53, 0xa5, 0x36, 0xcd, 0x61, 0xaa, 0x0a, 0x8a, 0xe9, 0x94, 0xd2, /* Byte value: 0xf2 */ + 0x58, 0x78, 0xf7, 0x15, 0x89, 0x3a, 0xf4, 0x14, 0x5e, 0xac, 0x31, 0x86, 0xab, 0x46, 0xe2, 0xd6, /* Byte value: 0xf3 */ + 0x9c, 0xcc, 0xb7, 0xc1, 0x74, 0x69, 0x51, 0x22, 0x93, 0xcd, 0x9b, 0x8c, 0x21, 0xaf, 0x76, 0x04, /* Byte value: 0xf4 */ + 0x05, 0xcd, 0x3a, 0xea, 0x03, 0x73, 0x40, 0x7d, 0xf1, 0x45, 0x3b, 0x52, 0x26, 0xa7, 0xd0, 0x65, /* Byte value: 0xf5 */ + 0x19, 0x8a, 0x03, 0x69, 0xb6, 0x7a, 0xfd, 0x82, 0x26, 0xe4, 0xd0, 0x36, 0x30, 0xda, 0x6a, 0x7b, /* Byte value: 0xf6 */ + 0xd8, 0xf3, 0x79, 0x57, 0x48, 0x5a, 0x18, 0xc9, 0x1a, 0xc0, 0x41, 0x6e, 0x9c, 0x94, 0x2e, 0xcc, /* Byte value: 0xf7 */ + 0x80, 0x8b, 0x8e, 0x42, 0xc1, 0x60, 0xec, 0xdd, 0x44, 0x6c, 0x70, 0xe8, 0x37, 0xd2, 0xcc, 0x1a, /* Byte value: 0xf8 */ + 0x9e, 0x60, 0x12, 0x7b, 0xcb, 0x89, 0xaf, 0x10, 0x27, 0x31, 0xea, 0xc7, 0xe3, 0x86, 0x69, 0x87, /* Byte value: 0xf9 */ + 0x44, 0x3f, 0xce, 0x96, 0x3c, 0x33, 0x49, 0xeb, 0x89, 0x0d, 0xda, 0xe2, 0xbd, 0x3b, 0x58, 0xc8, /* Byte value: 0xfa */ + 0xca, 0x76, 0xbd, 0x74, 0x46, 0xb6, 0x1a, 0xa8, 0x47, 0xd0, 0x3e, 0x38, 0x81, 0x36, 0xc9, 0xdd, /* Byte value: 0xfb */ + 0x3d, 0x43, 0x48, 0x2f, 0xaa, 0x61, 0xf9, 0x40, 0x9c, 0xc4, 0x2e, 0x9a, 0x0a, 0x5d, 0x67, 0x59, /* Byte value: 0xfc */ + 0x2e, 0x90, 0x3f, 0x51, 0x1a, 0xfd, 0x84, 0x38, 0x9b, 0xaa, 0x88, 0x08, 0x76, 0x0a, 0x6e, 0xe8, /* Byte value: 0xfd */ + 0x72, 0x73, 0x41, 0xf3, 0x2e, 0xc4, 0x4f, 0x48, 0x6e, 0x3d, 0x5b, 0x18, 0x9a, 0x1e, 0xb2, 0xfb, /* Byte value: 0xfe */ + 0x42, 0x08, 0xe2, 0x9b, 0x3e, 0xd0, 0x88, 0xbd, 0x96, 0xca, 0x49, 0x3f, 0x38, 0x40, 0x79, 0x8e, /* Byte value: 0xff */ + 0x65, 0x3b, 0xbf, 0x3a, 0x23, 0x5b, 0x0d, 0x54, 0xc2, 0x68, 0x1f, 0x1c, 0xa1, 0x1b, 0x85, 0x8f, /* Byte value: 0x00 */ + + /* Matrix row: 10 */ + 0xd4, 0x19, 0x2d, 0xd4, 0xc2, 0xc3, 0xd3, 0x9c, 0xe1, 0x5d, 0xa9, 0x18, 0x2c, 0xd1, 0xbe, 0x8f, /* Byte value: 0x01 */ + 0x40, 0x7b, 0xcb, 0x40, 0x94, 0x34, 0xa6, 0x04, 0x50, 0xb3, 0x14, 0xdb, 0x6b, 0x25, 0x60, 0xb5, /* Byte value: 0x02 */ + 0x97, 0xc0, 0xd2, 0x97, 0x3c, 0x7b, 0x81, 0x84, 0xc2, 0x71, 0xd1, 0x87, 0x95, 0x0f, 0x3d, 0xb7, /* Byte value: 0x03 */ + 0x4d, 0x69, 0xb6, 0x4d, 0xd9, 0x84, 0x1d, 0xc1, 0xcf, 0xff, 0x43, 0x34, 0xeb, 0xa7, 0x8a, 0xbf, /* Byte value: 0x04 */ + 0x03, 0xa2, 0x34, 0x03, 0x6a, 0x8c, 0xf4, 0x1c, 0x73, 0x9f, 0x6c, 0x44, 0xd2, 0xfb, 0xe3, 0x8d, /* Byte value: 0x05 */ + 0xd1, 0x3c, 0x71, 0xd1, 0x7c, 0x94, 0x0c, 0xb8, 0x74, 0x3f, 0x1d, 0xd4, 0x99, 0x1f, 0x58, 0xdb, /* Byte value: 0x06 */ + 0xf8, 0x08, 0x79, 0xf8, 0xe3, 0xac, 0xd6, 0xee, 0xc6, 0xdc, 0xd0, 0x47, 0x36, 0x48, 0x84, 0x45, /* Byte value: 0x07 */ + 0xca, 0xc7, 0x26, 0xca, 0xc0, 0xf2, 0x54, 0x44, 0x19, 0xd2, 0x97, 0xf5, 0x14, 0x30, 0xaf, 0xb4, /* Byte value: 0x08 */ + 0x53, 0xb7, 0xbd, 0x53, 0xdb, 0xb5, 0x9a, 0x19, 0x37, 0x70, 0x7d, 0xd9, 0xd3, 0x46, 0x9b, 0x84, /* Byte value: 0x09 */ + 0x66, 0x20, 0x27, 0x66, 0x0a, 0xf5, 0xde, 0x3e, 0x9e, 0xf6, 0xc6, 0xdf, 0xd8, 0xe3, 0x55, 0xd7, /* Byte value: 0x0a */ + 0xa5, 0x0f, 0x8d, 0xa5, 0x1f, 0x25, 0x03, 0x2e, 0x1d, 0x7f, 0x96, 0x35, 0xb7, 0x77, 0x16, 0x46, /* Byte value: 0x0b */ + 0x10, 0x6e, 0x42, 0x10, 0x25, 0x0d, 0xc8, 0x01, 0x14, 0x5c, 0x05, 0x46, 0x6a, 0x98, 0x18, 0xbc, /* Byte value: 0x0c */ + 0x6c, 0x6a, 0x9f, 0x6c, 0xb5, 0x5b, 0xa3, 0x76, 0x77, 0x32, 0x6d, 0x84, 0x71, 0xbc, 0x5a, 0x7f, /* Byte value: 0x0d */ + 0x90, 0x98, 0x17, 0x90, 0xce, 0x65, 0x47, 0x09, 0xb4, 0xf9, 0x2d, 0x33, 0xbc, 0xd2, 0xd8, 0x15, /* Byte value: 0x0e */ + 0x5e, 0xa5, 0xc0, 0x5e, 0x96, 0x05, 0x21, 0xdc, 0xa8, 0x3c, 0x2a, 0x36, 0x53, 0xc4, 0x71, 0x8e, /* Byte value: 0x0f */ + 0xbd, 0x56, 0xee, 0xbd, 0xc9, 0xcf, 0xaf, 0xce, 0x03, 0x0d, 0x70, 0x50, 0xe8, 0xa3, 0x02, 0xa4, /* Byte value: 0x10 */ + 0xc7, 0xd5, 0x5b, 0xc7, 0x8d, 0x42, 0xef, 0x81, 0x86, 0x9e, 0xc0, 0x1a, 0x94, 0xb2, 0x45, 0xbe, /* Byte value: 0x11 */ + 0x20, 0xdc, 0x84, 0x20, 0x4a, 0x1a, 0x53, 0x02, 0x28, 0xb8, 0x0a, 0x8c, 0xd4, 0xf3, 0x30, 0xbb, /* Byte value: 0x12 */ + 0x36, 0x35, 0xae, 0x36, 0xbb, 0xcc, 0xb0, 0x3b, 0xda, 0x19, 0xd7, 0x42, 0xd9, 0x5e, 0x2d, 0xde, /* Byte value: 0x13 */ + 0xe6, 0xd6, 0x72, 0xe6, 0xe1, 0x9d, 0x51, 0x36, 0x3e, 0x53, 0xee, 0xaa, 0x0e, 0xa9, 0x95, 0x7e, /* Byte value: 0x14 */ + 0xf3, 0x9d, 0x6c, 0xf3, 0x7a, 0xc7, 0x46, 0x13, 0xbf, 0x6d, 0x5f, 0x20, 0xd1, 0xff, 0x6b, 0x96, /* Byte value: 0x15 */ + 0x78, 0xfe, 0x2c, 0x78, 0x08, 0xc4, 0x59, 0xe6, 0x66, 0x79, 0xf8, 0x32, 0xe0, 0x02, 0x44, 0xec, /* Byte value: 0x16 */ + 0x60, 0xa7, 0x4f, 0x60, 0xde, 0x2e, 0xf5, 0x06, 0x78, 0x0b, 0x1e, 0x57, 0xbf, 0xd6, 0x50, 0x0e, /* Byte value: 0x17 */ + 0x0c, 0xcd, 0xd0, 0x0c, 0x6b, 0x75, 0x56, 0x70, 0x0f, 0x39, 0x73, 0xd3, 0xce, 0x6a, 0x0a, 0x71, /* Byte value: 0x18 */ + 0x3c, 0x7f, 0x16, 0x3c, 0x04, 0x62, 0xcd, 0x73, 0x33, 0xdd, 0x7c, 0x19, 0x70, 0x01, 0x22, 0x76, /* Byte value: 0x19 */ + 0x7f, 0xa6, 0xe9, 0x7f, 0xfa, 0xda, 0x9f, 0x6b, 0x10, 0xf1, 0x04, 0x86, 0xc9, 0xdf, 0xa1, 0x4e, /* Byte value: 0x1a */ + 0xc0, 0x8d, 0x9e, 0xc0, 0x7f, 0x5c, 0x29, 0x0c, 0xf0, 0x16, 0x3c, 0xae, 0xbd, 0x6f, 0xa0, 0x1c, /* Byte value: 0x1b */ + 0xfa, 0x75, 0xe0, 0xfa, 0xaf, 0xe5, 0xcf, 0x47, 0x25, 0x36, 0x98, 0x3f, 0xaa, 0x5b, 0x87, 0xb3, /* Byte value: 0x1c */ + 0xe5, 0x74, 0x46, 0xe5, 0x8b, 0x11, 0xa5, 0x2a, 0x4d, 0xcc, 0x82, 0xee, 0xdc, 0x52, 0x76, 0xf3, /* Byte value: 0x1d */ + 0x2c, 0x11, 0x54, 0x2c, 0x21, 0x6f, 0x05, 0x72, 0x27, 0x81, 0x79, 0x5f, 0x1a, 0x99, 0x3a, 0xca, /* Byte value: 0x1e */ + 0x29, 0x34, 0x08, 0x29, 0x9f, 0x38, 0xda, 0x56, 0xb2, 0xe3, 0xcd, 0x93, 0xaf, 0x57, 0xdc, 0x9e, /* Byte value: 0x1f */ + 0xce, 0x3d, 0xd7, 0xce, 0x58, 0x60, 0x66, 0xd5, 0x1c, 0xc5, 0x07, 0x05, 0xef, 0x16, 0xa9, 0x9b, /* Byte value: 0x20 */ + 0x7c, 0x04, 0xdd, 0x7c, 0x90, 0x56, 0x6b, 0x77, 0x63, 0x6e, 0x68, 0xc2, 0x1b, 0x24, 0x42, 0xc3, /* Byte value: 0x21 */ + 0x07, 0x58, 0xc5, 0x07, 0xf2, 0x1e, 0xc6, 0x8d, 0x76, 0x88, 0xfc, 0xb4, 0x29, 0xdd, 0xe5, 0xa2, /* Byte value: 0x22 */ + 0xb4, 0xbe, 0x62, 0xb4, 0x1c, 0xed, 0x26, 0x9a, 0x99, 0x56, 0xb7, 0x4f, 0x93, 0x07, 0xee, 0x81, /* Byte value: 0x23 */ + 0x81, 0x29, 0xf8, 0x81, 0xcd, 0xad, 0x62, 0xbd, 0x30, 0xd0, 0x0c, 0x49, 0x98, 0xa2, 0x20, 0xd2, /* Byte value: 0x24 */ + 0xa2, 0x57, 0x48, 0xa2, 0xed, 0x3b, 0xc5, 0xa3, 0x6b, 0xf7, 0x6a, 0x81, 0x9e, 0xaa, 0xf3, 0xe4, /* Byte value: 0x25 */ + 0xf0, 0x3f, 0x58, 0xf0, 0x10, 0x4b, 0xb2, 0x0f, 0xcc, 0xf2, 0x33, 0x64, 0x03, 0x04, 0x88, 0x1b, /* Byte value: 0x26 */ + 0xb6, 0xc3, 0xfb, 0xb6, 0x50, 0xa4, 0x3f, 0x33, 0x7a, 0xbc, 0xff, 0x37, 0x0f, 0x14, 0xed, 0x77, /* Byte value: 0x27 */ + 0x43, 0xd9, 0xff, 0x43, 0xfe, 0xb8, 0x52, 0x18, 0x23, 0x2c, 0x78, 0x9f, 0xb9, 0xde, 0x83, 0x38, /* Byte value: 0x28 */ + 0x67, 0xff, 0x8a, 0x67, 0x2c, 0x30, 0x33, 0x8b, 0x0e, 0x83, 0xe2, 0xe3, 0x96, 0x0b, 0xb5, 0xac, /* Byte value: 0x29 */ + 0x59, 0xfd, 0x05, 0x59, 0x64, 0x1b, 0xe7, 0x51, 0xde, 0xb4, 0xd6, 0x82, 0x7a, 0x19, 0x94, 0x2c, /* Byte value: 0x2a */ + 0xec, 0x9c, 0xca, 0xec, 0x5e, 0x33, 0x2c, 0x7e, 0xd7, 0x97, 0x45, 0xf1, 0xa7, 0xf6, 0x9a, 0xd6, /* Byte value: 0x2b */ + 0x86, 0x71, 0x3d, 0x86, 0x3f, 0xb3, 0xa4, 0x30, 0x46, 0x58, 0xf0, 0xfd, 0xb1, 0x7f, 0xc5, 0x70, /* Byte value: 0x2c */ + 0xf4, 0xc5, 0xa9, 0xf4, 0x88, 0xd9, 0x80, 0x9e, 0xc9, 0xe5, 0xa3, 0x94, 0xf8, 0x22, 0x8e, 0x34, /* Byte value: 0x2d */ + 0xf6, 0xb8, 0x30, 0xf6, 0xc4, 0x90, 0x99, 0x37, 0x2a, 0x0f, 0xeb, 0xec, 0x64, 0x31, 0x8d, 0xc2, /* Byte value: 0x2e */ + 0x5c, 0xd8, 0x59, 0x5c, 0xda, 0x4c, 0x38, 0x75, 0x4b, 0xd6, 0x62, 0x4e, 0xcf, 0xd7, 0x72, 0x78, /* Byte value: 0x2f */ + 0x92, 0xe5, 0x8e, 0x92, 0x82, 0x2c, 0x5e, 0xa0, 0x57, 0x13, 0x65, 0x4b, 0x20, 0xc1, 0xdb, 0xe3, /* Byte value: 0x30 */ + 0xa8, 0x1d, 0xf0, 0xa8, 0x52, 0x95, 0xb8, 0xeb, 0x82, 0x33, 0xc1, 0xda, 0x37, 0xf5, 0xfc, 0x4c, /* Byte value: 0x31 */ + 0xcf, 0xe2, 0x7a, 0xcf, 0x7e, 0xa5, 0x8b, 0x60, 0x8c, 0xb0, 0x23, 0x39, 0xa1, 0xfe, 0x49, 0xe0, /* Byte value: 0x32 */ + 0x2f, 0xb3, 0x60, 0x2f, 0x4b, 0xe3, 0xf1, 0x6e, 0x54, 0x1e, 0x15, 0x1b, 0xc8, 0x62, 0xd9, 0x47, /* Byte value: 0x33 */ + 0xe9, 0xb9, 0x96, 0xe9, 0xe0, 0x64, 0xf3, 0x5a, 0x42, 0xf5, 0xf1, 0x3d, 0x12, 0x38, 0x7c, 0x82, /* Byte value: 0x34 */ + 0x54, 0xef, 0x78, 0x54, 0x29, 0xab, 0x5c, 0x94, 0x41, 0xf8, 0x81, 0x6d, 0xfa, 0x9b, 0x7e, 0x26, /* Byte value: 0x35 */ + 0x8f, 0x99, 0xb1, 0x8f, 0xea, 0x91, 0x2d, 0x64, 0xdc, 0x03, 0x37, 0xe2, 0xca, 0xdb, 0x29, 0x55, /* Byte value: 0x36 */ + 0xaa, 0x60, 0x69, 0xaa, 0x1e, 0xdc, 0xa1, 0x42, 0x61, 0xd9, 0x89, 0xa2, 0xab, 0xe6, 0xff, 0xba, /* Byte value: 0x37 */ + 0x24, 0x26, 0x75, 0x24, 0xd2, 0x88, 0x61, 0x93, 0x2d, 0xaf, 0x9a, 0x7c, 0x2f, 0xd5, 0x36, 0x94, /* Byte value: 0x38 */ + 0x71, 0x16, 0xa0, 0x71, 0xdd, 0xe6, 0xd0, 0xb2, 0xfc, 0x22, 0x3f, 0x2d, 0x9b, 0xa6, 0xa8, 0xc9, /* Byte value: 0x39 */ + 0x65, 0x82, 0x13, 0x65, 0x60, 0x79, 0x2a, 0x22, 0xed, 0x69, 0xaa, 0x9b, 0x0a, 0x18, 0xb6, 0x5a, /* Byte value: 0x3a */ + 0x99, 0x70, 0x9b, 0x99, 0x1b, 0x47, 0xce, 0x5d, 0x2e, 0xa2, 0xea, 0x2c, 0xc7, 0x76, 0x34, 0x30, /* Byte value: 0x3b */ + 0x96, 0x1f, 0x7f, 0x96, 0x1a, 0xbe, 0x6c, 0x31, 0x52, 0x04, 0xf5, 0xbb, 0xdb, 0xe7, 0xdd, 0xcc, /* Byte value: 0x3c */ + 0x9c, 0x55, 0xc7, 0x9c, 0xa5, 0x10, 0x11, 0x79, 0xbb, 0xc0, 0x5e, 0xe0, 0x72, 0xb8, 0xd2, 0x64, /* Byte value: 0x3d */ + 0xdd, 0xf1, 0xa1, 0xdd, 0x17, 0xe1, 0x5a, 0xc8, 0x7b, 0x06, 0x6e, 0x07, 0x57, 0x75, 0x52, 0xaa, /* Byte value: 0x3e */ + 0x5a, 0x5f, 0x31, 0x5a, 0x0e, 0x97, 0x13, 0x4d, 0xad, 0x2b, 0xba, 0xc6, 0xa8, 0xe2, 0x77, 0xa1, /* Byte value: 0x3f */ + 0x80, 0xf6, 0x55, 0x80, 0xeb, 0x68, 0x8f, 0x08, 0xa0, 0xa5, 0x28, 0x75, 0xd6, 0x4a, 0xc0, 0xa9, /* Byte value: 0x40 */ + 0xe8, 0x66, 0x3b, 0xe8, 0xc6, 0xa1, 0x1e, 0xef, 0xd2, 0x80, 0xd5, 0x01, 0x5c, 0xd0, 0x9c, 0xf9, /* Byte value: 0x41 */ + 0x50, 0x15, 0x89, 0x50, 0xb1, 0x39, 0x6e, 0x05, 0x44, 0xef, 0x11, 0x9d, 0x01, 0xbd, 0x78, 0x09, /* Byte value: 0x42 */ + 0x57, 0x4d, 0x4c, 0x57, 0x43, 0x27, 0xa8, 0x88, 0x32, 0x67, 0xed, 0x29, 0x28, 0x60, 0x9d, 0xab, /* Byte value: 0x43 */ + 0xe4, 0xab, 0xeb, 0xe4, 0xad, 0xd4, 0x48, 0x9f, 0xdd, 0xb9, 0xa6, 0xd2, 0x92, 0xba, 0x96, 0x88, /* Byte value: 0x44 */ + 0x1e, 0xde, 0x0b, 0x1e, 0x02, 0x31, 0x87, 0xd8, 0xf8, 0x8f, 0x3e, 0xed, 0x38, 0xe1, 0x11, 0x3b, /* Byte value: 0x45 */ + 0x84, 0x0c, 0xa4, 0x84, 0x73, 0xfa, 0xbd, 0x99, 0xa5, 0xb2, 0xb8, 0x85, 0x2d, 0x6c, 0xc6, 0x86, /* Byte value: 0x46 */ + 0x15, 0x4b, 0x1e, 0x15, 0x9b, 0x5a, 0x17, 0x25, 0x81, 0x3e, 0xb1, 0x8a, 0xdf, 0x56, 0xfe, 0xe8, /* Byte value: 0x47 */ + 0x41, 0xa4, 0x66, 0x41, 0xb2, 0xf1, 0x4b, 0xb1, 0xc0, 0xc6, 0x30, 0xe7, 0x25, 0xcd, 0x80, 0xce, /* Byte value: 0x48 */ + 0x19, 0x86, 0xce, 0x19, 0xf0, 0x2f, 0x41, 0x55, 0x8e, 0x07, 0xc2, 0x59, 0x11, 0x3c, 0xf4, 0x99, /* Byte value: 0x49 */ + 0x26, 0x5b, 0xec, 0x26, 0x9e, 0xc1, 0x78, 0x3a, 0xce, 0x45, 0xd2, 0x04, 0xb3, 0xc6, 0x35, 0x62, /* Byte value: 0x4a */ + 0xa0, 0x2a, 0xd1, 0xa0, 0xa1, 0x72, 0xdc, 0x0a, 0x88, 0x1d, 0x22, 0xf9, 0x02, 0xb9, 0xf0, 0x12, /* Byte value: 0x4b */ + 0x0b, 0x95, 0x15, 0x0b, 0x99, 0x6b, 0x90, 0xfd, 0x79, 0xb1, 0x8f, 0x67, 0xe7, 0xb7, 0xef, 0xd3, /* Byte value: 0x4c */ + 0x22, 0xa1, 0x1d, 0x22, 0x06, 0x53, 0x4a, 0xab, 0xcb, 0x52, 0x42, 0xf4, 0x48, 0xe0, 0x33, 0x4d, /* Byte value: 0x4d */ + 0x9d, 0x8a, 0x6a, 0x9d, 0x83, 0xd5, 0xfc, 0xcc, 0x2b, 0xb5, 0x7a, 0xdc, 0x3c, 0x50, 0x32, 0x1f, /* Byte value: 0x4e */ + 0xf5, 0x1a, 0x04, 0xf5, 0xae, 0x1c, 0x6d, 0x2b, 0x59, 0x90, 0x87, 0xa8, 0xb6, 0xca, 0x6e, 0x4f, /* Byte value: 0x4f */ + 0xda, 0xa9, 0x64, 0xda, 0xe5, 0xff, 0x9c, 0x45, 0x0d, 0x8e, 0x92, 0xb3, 0x7e, 0xa8, 0xb7, 0x08, /* Byte value: 0x50 */ + 0x37, 0xea, 0x03, 0x37, 0x9d, 0x09, 0x5d, 0x8e, 0x4a, 0x6c, 0xf3, 0x7e, 0x97, 0xb6, 0xcd, 0xa5, /* Byte value: 0x51 */ + 0xa7, 0x72, 0x14, 0xa7, 0x53, 0x6c, 0x1a, 0x87, 0xfe, 0x95, 0xde, 0x4d, 0x2b, 0x64, 0x15, 0xb0, /* Byte value: 0x52 */ + 0xcd, 0x9f, 0xe3, 0xcd, 0x32, 0xec, 0x92, 0xc9, 0x6f, 0x5a, 0x6b, 0x41, 0x3d, 0xed, 0x4a, 0x16, /* Byte value: 0x53 */ + 0x63, 0x05, 0x7b, 0x63, 0xb4, 0xa2, 0x01, 0x1a, 0x0b, 0x94, 0x72, 0x13, 0x6d, 0x2d, 0xb3, 0x83, /* Byte value: 0x54 */ + 0xbc, 0x89, 0x43, 0xbc, 0xef, 0x0a, 0x42, 0x7b, 0x93, 0x78, 0x54, 0x6c, 0xa6, 0x4b, 0xe2, 0xdf, /* Byte value: 0x55 */ + 0xe2, 0x2c, 0x83, 0xe2, 0x79, 0x0f, 0x63, 0xa7, 0x3b, 0x44, 0x7e, 0x5a, 0xf5, 0x8f, 0x93, 0x51, /* Byte value: 0x56 */ + 0xd6, 0x64, 0xb4, 0xd6, 0x8e, 0x8a, 0xca, 0x35, 0x02, 0xb7, 0xe1, 0x60, 0xb0, 0xc2, 0xbd, 0x79, /* Byte value: 0x57 */ + 0x94, 0x62, 0xe6, 0x94, 0x56, 0xf7, 0x75, 0x98, 0xb1, 0xee, 0xbd, 0xc3, 0x47, 0xf4, 0xde, 0x3a, /* Byte value: 0x58 */ + 0xa4, 0xd0, 0x20, 0xa4, 0x39, 0xe0, 0xee, 0x9b, 0x8d, 0x0a, 0xb2, 0x09, 0xf9, 0x9f, 0xf6, 0x3d, /* Byte value: 0x59 */ + 0x88, 0xc1, 0x74, 0x88, 0x18, 0x8f, 0xeb, 0xe9, 0xaa, 0x8b, 0xcb, 0x56, 0xe3, 0x06, 0xcc, 0xf7, /* Byte value: 0x5a */ + 0x62, 0xda, 0xd6, 0x62, 0x92, 0x67, 0xec, 0xaf, 0x9b, 0xe1, 0x56, 0x2f, 0x23, 0xc5, 0x53, 0xf8, /* Byte value: 0x5b */ + 0x2e, 0x6c, 0xcd, 0x2e, 0x6d, 0x26, 0x1c, 0xdb, 0xc4, 0x6b, 0x31, 0x27, 0x86, 0x8a, 0x39, 0x3c, /* Byte value: 0x5c */ + 0xc8, 0xba, 0xbf, 0xc8, 0x8c, 0xbb, 0x4d, 0xed, 0xfa, 0x38, 0xdf, 0x8d, 0x88, 0x23, 0xac, 0x42, /* Byte value: 0x5d */ + 0x18, 0x59, 0x63, 0x18, 0xd6, 0xea, 0xac, 0xe0, 0x1e, 0x72, 0xe6, 0x65, 0x5f, 0xd4, 0x14, 0xe2, /* Byte value: 0x5e */ + 0x06, 0x87, 0x68, 0x06, 0xd4, 0xdb, 0x2b, 0x38, 0xe6, 0xfd, 0xd8, 0x88, 0x67, 0x35, 0x05, 0xd9, /* Byte value: 0x5f */ + 0x16, 0xe9, 0x2a, 0x16, 0xf1, 0xd6, 0xe3, 0x39, 0xf2, 0xa1, 0xdd, 0xce, 0x0d, 0xad, 0x1d, 0x65, /* Byte value: 0x60 */ + 0x13, 0xcc, 0x76, 0x13, 0x4f, 0x81, 0x3c, 0x1d, 0x67, 0xc3, 0x69, 0x02, 0xb8, 0x63, 0xfb, 0x31, /* Byte value: 0x61 */ + 0xd2, 0x9e, 0x45, 0xd2, 0x16, 0x18, 0xf8, 0xa4, 0x07, 0xa0, 0x71, 0x90, 0x4b, 0xe4, 0xbb, 0x56, /* Byte value: 0x62 */ + 0x5b, 0x80, 0x9c, 0x5b, 0x28, 0x52, 0xfe, 0xf8, 0x3d, 0x5e, 0x9e, 0xfa, 0xe6, 0x0a, 0x97, 0xda, /* Byte value: 0x63 */ + 0xff, 0x50, 0xbc, 0xff, 0x11, 0xb2, 0x10, 0x63, 0xb0, 0x54, 0x2c, 0xf3, 0x1f, 0x95, 0x61, 0xe7, /* Byte value: 0x64 */ + 0xbb, 0xd1, 0x86, 0xbb, 0x1d, 0x14, 0x84, 0xf6, 0xe5, 0xf0, 0xa8, 0xd8, 0x8f, 0x96, 0x07, 0x7d, /* Byte value: 0x65 */ + 0xc2, 0xf0, 0x07, 0xc2, 0x33, 0x15, 0x30, 0xa5, 0x13, 0xfc, 0x74, 0xd6, 0x21, 0x7c, 0xa3, 0xea, /* Byte value: 0x66 */ + 0xb9, 0xac, 0x1f, 0xb9, 0x51, 0x5d, 0x9d, 0x5f, 0x06, 0x1a, 0xe0, 0xa0, 0x13, 0x85, 0x04, 0x8b, /* Byte value: 0x67 */ + 0xbf, 0x2b, 0x77, 0xbf, 0x85, 0x86, 0xb6, 0x67, 0xe0, 0xe7, 0x38, 0x28, 0x74, 0xb0, 0x01, 0x52, /* Byte value: 0x68 */ + 0xef, 0x3e, 0xfe, 0xef, 0x34, 0xbf, 0xd8, 0x62, 0xa4, 0x08, 0x29, 0xb5, 0x75, 0x0d, 0x79, 0x5b, /* Byte value: 0x69 */ + 0xdc, 0x2e, 0x0c, 0xdc, 0x31, 0x24, 0xb7, 0x7d, 0xeb, 0x73, 0x4a, 0x3b, 0x19, 0x9d, 0xb2, 0xd1, /* Byte value: 0x6a */ + 0x1b, 0xfb, 0x57, 0x1b, 0xbc, 0x66, 0x58, 0xfc, 0x6d, 0xed, 0x8a, 0x21, 0x8d, 0x2f, 0xf7, 0x6f, /* Byte value: 0x6b */ + 0x27, 0x84, 0x41, 0x27, 0xb8, 0x04, 0x95, 0x8f, 0x5e, 0x30, 0xf6, 0x38, 0xfd, 0x2e, 0xd5, 0x19, /* Byte value: 0x6c */ + 0x3e, 0x02, 0x8f, 0x3e, 0x48, 0x2b, 0xd4, 0xda, 0xd0, 0x37, 0x34, 0x61, 0xec, 0x12, 0x21, 0x80, /* Byte value: 0x6d */ + 0xe7, 0x09, 0xdf, 0xe7, 0xc7, 0x58, 0xbc, 0x83, 0xae, 0x26, 0xca, 0x96, 0x40, 0x41, 0x75, 0x05, /* Byte value: 0x6e */ + 0xb0, 0x44, 0x93, 0xb0, 0x84, 0x7f, 0x14, 0x0b, 0x9c, 0x41, 0x27, 0xbf, 0x68, 0x21, 0xe8, 0xae, /* Byte value: 0x6f */ + 0x69, 0x4f, 0xc3, 0x69, 0x0b, 0x0c, 0x7c, 0x52, 0xe2, 0x50, 0xd9, 0x48, 0xc4, 0x72, 0xbc, 0x2b, /* Byte value: 0x70 */ + 0x21, 0x03, 0x29, 0x21, 0x6c, 0xdf, 0xbe, 0xb7, 0xb8, 0xcd, 0x2e, 0xb0, 0x9a, 0x1b, 0xd0, 0xc0, /* Byte value: 0x71 */ + 0x0f, 0x6f, 0xe4, 0x0f, 0x01, 0xf9, 0xa2, 0x6c, 0x7c, 0xa6, 0x1f, 0x97, 0x1c, 0x91, 0xe9, 0xfc, /* Byte value: 0x72 */ + 0xf1, 0xe0, 0xf5, 0xf1, 0x36, 0x8e, 0x5f, 0xba, 0x5c, 0x87, 0x17, 0x58, 0x4d, 0xec, 0x68, 0x60, /* Byte value: 0x73 */ + 0x1a, 0x24, 0xfa, 0x1a, 0x9a, 0xa3, 0xb5, 0x49, 0xfd, 0x98, 0xae, 0x1d, 0xc3, 0xc7, 0x17, 0x14, /* Byte value: 0x74 */ + 0x0d, 0x12, 0x7d, 0x0d, 0x4d, 0xb0, 0xbb, 0xc5, 0x9f, 0x4c, 0x57, 0xef, 0x80, 0x82, 0xea, 0x0a, /* Byte value: 0x75 */ + 0xa6, 0xad, 0xb9, 0xa6, 0x75, 0xa9, 0xf7, 0x32, 0x6e, 0xe0, 0xfa, 0x71, 0x65, 0x8c, 0xf5, 0xcb, /* Byte value: 0x76 */ + 0x02, 0x7d, 0x99, 0x02, 0x4c, 0x49, 0x19, 0xa9, 0xe3, 0xea, 0x48, 0x78, 0x9c, 0x13, 0x03, 0xf6, /* Byte value: 0x77 */ + 0x6a, 0xed, 0xf7, 0x6a, 0x61, 0x80, 0x88, 0x4e, 0x91, 0xcf, 0xb5, 0x0c, 0x16, 0x89, 0x5f, 0xa6, /* Byte value: 0x78 */ + 0x08, 0x37, 0x21, 0x08, 0xf3, 0xe7, 0x64, 0xe1, 0x0a, 0x2e, 0xe3, 0x23, 0x35, 0x4c, 0x0c, 0x5e, /* Byte value: 0x79 */ + 0x4c, 0xb6, 0x1b, 0x4c, 0xff, 0x41, 0xf0, 0x74, 0x5f, 0x8a, 0x67, 0x08, 0xa5, 0x4f, 0x6a, 0xc4, /* Byte value: 0x7a */ + 0x49, 0x93, 0x47, 0x49, 0x41, 0x16, 0x2f, 0x50, 0xca, 0xe8, 0xd3, 0xc4, 0x10, 0x81, 0x8c, 0x90, /* Byte value: 0x7b */ + 0x91, 0x47, 0xba, 0x91, 0xe8, 0xa0, 0xaa, 0xbc, 0x24, 0x8c, 0x09, 0x0f, 0xf2, 0x3a, 0x38, 0x6e, /* Byte value: 0x7c */ + 0xe1, 0x8e, 0xb7, 0xe1, 0x13, 0x83, 0x97, 0xbb, 0x48, 0xdb, 0x12, 0x1e, 0x27, 0x74, 0x70, 0xdc, /* Byte value: 0x7d */ + 0x8b, 0x63, 0x40, 0x8b, 0x72, 0x03, 0x1f, 0xf5, 0xd9, 0x14, 0xa7, 0x12, 0x31, 0xfd, 0x2f, 0x7a, /* Byte value: 0x7e */ + 0x14, 0x94, 0xb3, 0x14, 0xbd, 0x9f, 0xfa, 0x90, 0x11, 0x4b, 0x95, 0xb6, 0x91, 0xbe, 0x1e, 0x93, /* Byte value: 0x7f */ + 0x95, 0xbd, 0x4b, 0x95, 0x70, 0x32, 0x98, 0x2d, 0x21, 0x9b, 0x99, 0xff, 0x09, 0x1c, 0x3e, 0x41, /* Byte value: 0x80 */ + 0xb8, 0x73, 0xb2, 0xb8, 0x77, 0x98, 0x70, 0xea, 0x96, 0x6f, 0xc4, 0x9c, 0x5d, 0x6d, 0xe4, 0xf0, /* Byte value: 0x81 */ + 0x9e, 0x28, 0x5e, 0x9e, 0xe9, 0x59, 0x08, 0xd0, 0x58, 0x2a, 0x16, 0x98, 0xee, 0xab, 0xd1, 0x92, /* Byte value: 0x82 */ + 0xeb, 0xc4, 0x0f, 0xeb, 0xac, 0x2d, 0xea, 0xf3, 0xa1, 0x1f, 0xb9, 0x45, 0x8e, 0x2b, 0x7f, 0x74, /* Byte value: 0x83 */ + 0x6e, 0x17, 0x06, 0x6e, 0xf9, 0x12, 0xba, 0xdf, 0x94, 0xd8, 0x25, 0xfc, 0xed, 0xaf, 0x59, 0x89, /* Byte value: 0x84 */ + 0xc3, 0x2f, 0xaa, 0xc3, 0x15, 0xd0, 0xdd, 0x10, 0x83, 0x89, 0x50, 0xea, 0x6f, 0x94, 0x43, 0x91, /* Byte value: 0x85 */ + 0x98, 0xaf, 0x36, 0x98, 0x3d, 0x82, 0x23, 0xe8, 0xbe, 0xd7, 0xce, 0x10, 0x89, 0x9e, 0xd4, 0x4b, /* Byte value: 0x86 */ + 0x70, 0xc9, 0x0d, 0x70, 0xfb, 0x23, 0x3d, 0x07, 0x6c, 0x57, 0x1b, 0x11, 0xd5, 0x4e, 0x48, 0xb2, /* Byte value: 0x87 */ + 0x72, 0xb4, 0x94, 0x72, 0xb7, 0x6a, 0x24, 0xae, 0x8f, 0xbd, 0x53, 0x69, 0x49, 0x5d, 0x4b, 0x44, /* Byte value: 0x88 */ + 0x04, 0xfa, 0xf1, 0x04, 0x98, 0x92, 0x32, 0x91, 0x05, 0x17, 0x90, 0xf0, 0xfb, 0x26, 0x06, 0x2f, /* Byte value: 0x89 */ + 0xed, 0x43, 0x67, 0xed, 0x78, 0xf6, 0xc1, 0xcb, 0x47, 0xe2, 0x61, 0xcd, 0xe9, 0x1e, 0x7a, 0xad, /* Byte value: 0x8a */ + 0xf2, 0x42, 0xc1, 0xf2, 0x5c, 0x02, 0xab, 0xa6, 0x2f, 0x18, 0x7b, 0x1c, 0x9f, 0x17, 0x8b, 0xed, /* Byte value: 0x8b */ + 0x47, 0x23, 0x0e, 0x47, 0x66, 0x2a, 0x60, 0x89, 0x26, 0x3b, 0xe8, 0x6f, 0x42, 0xf8, 0x85, 0x17, /* Byte value: 0x8c */ + 0x45, 0x5e, 0x97, 0x45, 0x2a, 0x63, 0x79, 0x20, 0xc5, 0xd1, 0xa0, 0x17, 0xde, 0xeb, 0x86, 0xe1, /* Byte value: 0x8d */ + 0x9a, 0xd2, 0xaf, 0x9a, 0x71, 0xcb, 0x3a, 0x41, 0x5d, 0x3d, 0x86, 0x68, 0x15, 0x8d, 0xd7, 0xbd, /* Byte value: 0x8e */ + 0xd5, 0xc6, 0x80, 0xd5, 0xe4, 0x06, 0x3e, 0x29, 0x71, 0x28, 0x8d, 0x24, 0x62, 0x39, 0x5e, 0xf4, /* Byte value: 0x8f */ + 0xd9, 0x0b, 0x50, 0xd9, 0x8f, 0x73, 0x68, 0x59, 0x7e, 0x11, 0xfe, 0xf7, 0xac, 0x53, 0x54, 0x85, /* Byte value: 0x90 */ + 0x8d, 0xe4, 0x28, 0x8d, 0xa6, 0xd8, 0x34, 0xcd, 0x3f, 0xe9, 0x7f, 0x9a, 0x56, 0xc8, 0x2a, 0xa3, /* Byte value: 0x91 */ + 0x3b, 0x27, 0xd3, 0x3b, 0xf6, 0x7c, 0x0b, 0xfe, 0x45, 0x55, 0x80, 0xad, 0x59, 0xdc, 0xc7, 0xd4, /* Byte value: 0x92 */ + 0x6f, 0xc8, 0xab, 0x6f, 0xdf, 0xd7, 0x57, 0x6a, 0x04, 0xad, 0x01, 0xc0, 0xa3, 0x47, 0xb9, 0xf2, /* Byte value: 0x93 */ + 0x4e, 0xcb, 0x82, 0x4e, 0xb3, 0x08, 0xe9, 0xdd, 0xbc, 0x60, 0x2f, 0x70, 0x39, 0x5c, 0x69, 0x32, /* Byte value: 0x94 */ + 0x34, 0x48, 0x37, 0x34, 0xf7, 0x85, 0xa9, 0x92, 0x39, 0xf3, 0x9f, 0x3a, 0x45, 0x4d, 0x2e, 0x28, /* Byte value: 0x95 */ + 0x74, 0x33, 0xfc, 0x74, 0x63, 0xb1, 0x0f, 0x96, 0x69, 0x40, 0x8b, 0xe1, 0x2e, 0x68, 0x4e, 0x9d, /* Byte value: 0x96 */ + 0x9f, 0xf7, 0xf3, 0x9f, 0xcf, 0x9c, 0xe5, 0x65, 0xc8, 0x5f, 0x32, 0xa4, 0xa0, 0x43, 0x31, 0xe9, /* Byte value: 0x97 */ + 0x7d, 0xdb, 0x70, 0x7d, 0xb6, 0x93, 0x86, 0xc2, 0xf3, 0x1b, 0x4c, 0xfe, 0x55, 0xcc, 0xa2, 0xb8, /* Byte value: 0x98 */ + 0x61, 0x78, 0xe2, 0x61, 0xf8, 0xeb, 0x18, 0xb3, 0xe8, 0x7e, 0x3a, 0x6b, 0xf1, 0x3e, 0xb0, 0x75, /* Byte value: 0x99 */ + 0x31, 0x6d, 0x6b, 0x31, 0x49, 0xd2, 0x76, 0xb6, 0xac, 0x91, 0x2b, 0xf6, 0xf0, 0x83, 0xc8, 0x7c, /* Byte value: 0x9a */ + 0x09, 0xe8, 0x8c, 0x09, 0xd5, 0x22, 0x89, 0x54, 0x9a, 0x5b, 0xc7, 0x1f, 0x7b, 0xa4, 0xec, 0x25, /* Byte value: 0x9b */ + 0x12, 0x13, 0xdb, 0x12, 0x69, 0x44, 0xd1, 0xa8, 0xf7, 0xb6, 0x4d, 0x3e, 0xf6, 0x8b, 0x1b, 0x4a, /* Byte value: 0x9c */ + 0x64, 0x5d, 0xbe, 0x64, 0x46, 0xbc, 0xc7, 0x97, 0x7d, 0x1c, 0x8e, 0xa7, 0x44, 0xf0, 0x56, 0x21, /* Byte value: 0x9d */ + 0xd7, 0xbb, 0x19, 0xd7, 0xa8, 0x4f, 0x27, 0x80, 0x92, 0xc2, 0xc5, 0x5c, 0xfe, 0x2a, 0x5d, 0x02, /* Byte value: 0x9e */ + 0x93, 0x3a, 0x23, 0x93, 0xa4, 0xe9, 0xb3, 0x15, 0xc7, 0x66, 0x41, 0x77, 0x6e, 0x29, 0x3b, 0x98, /* Byte value: 0x9f */ + 0x3a, 0xf8, 0x7e, 0x3a, 0xd0, 0xb9, 0xe6, 0x4b, 0xd5, 0x20, 0xa4, 0x91, 0x17, 0x34, 0x27, 0xaf, /* Byte value: 0xa0 */ + 0xa3, 0x88, 0xe5, 0xa3, 0xcb, 0xfe, 0x28, 0x16, 0xfb, 0x82, 0x4e, 0xbd, 0xd0, 0x42, 0x13, 0x9f, /* Byte value: 0xa1 */ + 0xad, 0x38, 0xac, 0xad, 0xec, 0xc2, 0x67, 0xcf, 0x17, 0x51, 0x75, 0x16, 0x82, 0x3b, 0x1a, 0x18, /* Byte value: 0xa2 */ + 0x1c, 0xa3, 0x92, 0x1c, 0x4e, 0x78, 0x9e, 0x71, 0x1b, 0x65, 0x76, 0x95, 0xa4, 0xf2, 0x12, 0xcd, /* Byte value: 0xa3 */ + 0x7e, 0x79, 0x44, 0x7e, 0xdc, 0x1f, 0x72, 0xde, 0x80, 0x84, 0x20, 0xba, 0x87, 0x37, 0x41, 0x35, /* Byte value: 0xa4 */ + 0x76, 0x4e, 0x65, 0x76, 0x2f, 0xf8, 0x16, 0x3f, 0x8a, 0xaa, 0xc3, 0x99, 0xb2, 0x7b, 0x4d, 0x6b, /* Byte value: 0xa5 */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xa6 */ + 0x33, 0x10, 0xf2, 0x33, 0x05, 0x9b, 0x6f, 0x1f, 0x4f, 0x7b, 0x63, 0x8e, 0x6c, 0x90, 0xcb, 0x8a, /* Byte value: 0xa7 */ + 0xf7, 0x67, 0x9d, 0xf7, 0xe2, 0x55, 0x74, 0x82, 0xba, 0x7a, 0xcf, 0xd0, 0x2a, 0xd9, 0x6d, 0xb9, /* Byte value: 0xa8 */ + 0x28, 0xeb, 0xa5, 0x28, 0xb9, 0xfd, 0x37, 0xe3, 0x22, 0x96, 0xe9, 0xaf, 0xe1, 0xbf, 0x3c, 0xe5, /* Byte value: 0xa9 */ + 0x23, 0x7e, 0xb0, 0x23, 0x20, 0x96, 0xa7, 0x1e, 0x5b, 0x27, 0x66, 0xc8, 0x06, 0x08, 0xd3, 0x36, /* Byte value: 0xaa */ + 0xb2, 0x39, 0x0a, 0xb2, 0xc8, 0x36, 0x0d, 0xa2, 0x7f, 0xab, 0x6f, 0xc7, 0xf4, 0x32, 0xeb, 0x58, /* Byte value: 0xab */ + 0xbe, 0xf4, 0xda, 0xbe, 0xa3, 0x43, 0x5b, 0xd2, 0x70, 0x92, 0x1c, 0x14, 0x3a, 0x58, 0xe1, 0x29, /* Byte value: 0xac */ + 0x42, 0x06, 0x52, 0x42, 0xd8, 0x7d, 0xbf, 0xad, 0xb3, 0x59, 0x5c, 0xa3, 0xf7, 0x36, 0x63, 0x43, /* Byte value: 0xad */ + 0x11, 0xb1, 0xef, 0x11, 0x03, 0xc8, 0x25, 0xb4, 0x84, 0x29, 0x21, 0x7a, 0x24, 0x70, 0xf8, 0xc7, /* Byte value: 0xae */ + 0xc4, 0x77, 0x6f, 0xc4, 0xe7, 0xce, 0x1b, 0x9d, 0xf5, 0x01, 0xac, 0x5e, 0x46, 0x49, 0xa6, 0x33, /* Byte value: 0xaf */ + 0x5f, 0x7a, 0x6d, 0x5f, 0xb0, 0xc0, 0xcc, 0x69, 0x38, 0x49, 0x0e, 0x0a, 0x1d, 0x2c, 0x91, 0xf5, /* Byte value: 0xb0 */ + 0x30, 0xb2, 0xc6, 0x30, 0x6f, 0x17, 0x9b, 0x03, 0x3c, 0xe4, 0x0f, 0xca, 0xbe, 0x6b, 0x28, 0x07, /* Byte value: 0xb1 */ + 0x01, 0xdf, 0xad, 0x01, 0x26, 0xc5, 0xed, 0xb5, 0x90, 0x75, 0x24, 0x3c, 0x4e, 0xe8, 0xe0, 0x7b, /* Byte value: 0xb2 */ + 0xd8, 0xd4, 0xfd, 0xd8, 0xa9, 0xb6, 0x85, 0xec, 0xee, 0x64, 0xda, 0xcb, 0xe2, 0xbb, 0xb4, 0xfe, /* Byte value: 0xb3 */ + 0x05, 0x25, 0x5c, 0x05, 0xbe, 0x57, 0xdf, 0x24, 0x95, 0x62, 0xb4, 0xcc, 0xb5, 0xce, 0xe6, 0x54, /* Byte value: 0xb4 */ + 0x32, 0xcf, 0x5f, 0x32, 0x23, 0x5e, 0x82, 0xaa, 0xdf, 0x0e, 0x47, 0xb2, 0x22, 0x78, 0x2b, 0xf1, /* Byte value: 0xb5 */ + 0xdf, 0x8c, 0x38, 0xdf, 0x5b, 0xa8, 0x43, 0x61, 0x98, 0xec, 0x26, 0x7f, 0xcb, 0x66, 0x51, 0x5c, /* Byte value: 0xb6 */ + 0xba, 0x0e, 0x2b, 0xba, 0x3b, 0xd1, 0x69, 0x43, 0x75, 0x85, 0x8c, 0xe4, 0xc1, 0x7e, 0xe7, 0x06, /* Byte value: 0xb7 */ + 0x8e, 0x46, 0x1c, 0x8e, 0xcc, 0x54, 0xc0, 0xd1, 0x4c, 0x76, 0x13, 0xde, 0x84, 0x33, 0xc9, 0x2e, /* Byte value: 0xb8 */ + 0x73, 0x6b, 0x39, 0x73, 0x91, 0xaf, 0xc9, 0x1b, 0x1f, 0xc8, 0x77, 0x55, 0x07, 0xb5, 0xab, 0x3f, /* Byte value: 0xb9 */ + 0xfd, 0x2d, 0x25, 0xfd, 0x5d, 0xfb, 0x09, 0xca, 0x53, 0xbe, 0x64, 0x8b, 0x83, 0x86, 0x62, 0x11, /* Byte value: 0xba */ + 0x8c, 0x3b, 0x85, 0x8c, 0x80, 0x1d, 0xd9, 0x78, 0xaf, 0x9c, 0x5b, 0xa6, 0x18, 0x20, 0xca, 0xd8, /* Byte value: 0xbb */ + 0xb3, 0xe6, 0xa7, 0xb3, 0xee, 0xf3, 0xe0, 0x17, 0xef, 0xde, 0x4b, 0xfb, 0xba, 0xda, 0x0b, 0x23, /* Byte value: 0xbc */ + 0x56, 0x92, 0xe1, 0x56, 0x65, 0xe2, 0x45, 0x3d, 0xa2, 0x12, 0xc9, 0x15, 0x66, 0x88, 0x7d, 0xd0, /* Byte value: 0xbd */ + 0x2b, 0x49, 0x91, 0x2b, 0xd3, 0x71, 0xc3, 0xff, 0x51, 0x09, 0x85, 0xeb, 0x33, 0x44, 0xdf, 0x68, /* Byte value: 0xbe */ + 0x82, 0x8b, 0xcc, 0x82, 0xa7, 0x21, 0x96, 0xa1, 0x43, 0x4f, 0x60, 0x0d, 0x4a, 0x59, 0xc3, 0x5f, /* Byte value: 0xbf */ + 0x6b, 0x32, 0x5a, 0x6b, 0x47, 0x45, 0x65, 0xfb, 0x01, 0xba, 0x91, 0x30, 0x58, 0x61, 0xbf, 0xdd, /* Byte value: 0xc0 */ + 0x87, 0xae, 0x90, 0x87, 0x19, 0x76, 0x49, 0x85, 0xd6, 0x2d, 0xd4, 0xc1, 0xff, 0x97, 0x25, 0x0b, /* Byte value: 0xc1 */ + 0xae, 0x9a, 0x98, 0xae, 0x86, 0x4e, 0x93, 0xd3, 0x64, 0xce, 0x19, 0x52, 0x50, 0xc0, 0xf9, 0x95, /* Byte value: 0xc2 */ + 0x46, 0xfc, 0xa3, 0x46, 0x40, 0xef, 0x8d, 0x3c, 0xb6, 0x4e, 0xcc, 0x53, 0x0c, 0x10, 0x65, 0x6c, /* Byte value: 0xc3 */ + 0xa9, 0xc2, 0x5d, 0xa9, 0x74, 0x50, 0x55, 0x5e, 0x12, 0x46, 0xe5, 0xe6, 0x79, 0x1d, 0x1c, 0x37, /* Byte value: 0xc4 */ + 0xe0, 0x51, 0x1a, 0xe0, 0x35, 0x46, 0x7a, 0x0e, 0xd8, 0xae, 0x36, 0x22, 0x69, 0x9c, 0x90, 0xa7, /* Byte value: 0xc5 */ + 0xc6, 0x0a, 0xf6, 0xc6, 0xab, 0x87, 0x02, 0x34, 0x16, 0xeb, 0xe4, 0x26, 0xda, 0x5a, 0xa5, 0xc5, /* Byte value: 0xc6 */ + 0xaf, 0x45, 0x35, 0xaf, 0xa0, 0x8b, 0x7e, 0x66, 0xf4, 0xbb, 0x3d, 0x6e, 0x1e, 0x28, 0x19, 0xee, /* Byte value: 0xc7 */ + 0xb1, 0x9b, 0x3e, 0xb1, 0xa2, 0xba, 0xf9, 0xbe, 0x0c, 0x34, 0x03, 0x83, 0x26, 0xc9, 0x08, 0xd5, /* Byte value: 0xc8 */ + 0x0e, 0xb0, 0x49, 0x0e, 0x27, 0x3c, 0x4f, 0xd9, 0xec, 0xd3, 0x3b, 0xab, 0x52, 0x79, 0x09, 0x87, /* Byte value: 0xc9 */ + 0x89, 0x1e, 0xd9, 0x89, 0x3e, 0x4a, 0x06, 0x5c, 0x3a, 0xfe, 0xef, 0x6a, 0xad, 0xee, 0x2c, 0x8c, /* Byte value: 0xca */ + 0x79, 0x21, 0x81, 0x79, 0x2e, 0x01, 0xb4, 0x53, 0xf6, 0x0c, 0xdc, 0x0e, 0xae, 0xea, 0xa4, 0x97, /* Byte value: 0xcb */ + 0xd3, 0x41, 0xe8, 0xd3, 0x30, 0xdd, 0x15, 0x11, 0x97, 0xd5, 0x55, 0xac, 0x05, 0x0c, 0x5b, 0x2d, /* Byte value: 0xcc */ + 0x2d, 0xce, 0xf9, 0x2d, 0x07, 0xaa, 0xe8, 0xc7, 0xb7, 0xf4, 0x5d, 0x63, 0x54, 0x71, 0xda, 0xb1, /* Byte value: 0xcd */ + 0xc9, 0x65, 0x12, 0xc9, 0xaa, 0x7e, 0xa0, 0x58, 0x6a, 0x4d, 0xfb, 0xb1, 0xc6, 0xcb, 0x4c, 0x39, /* Byte value: 0xce */ + 0x0a, 0x4a, 0xb8, 0x0a, 0xbf, 0xae, 0x7d, 0x48, 0xe9, 0xc4, 0xab, 0x5b, 0xa9, 0x5f, 0x0f, 0xa8, /* Byte value: 0xcf */ + 0xdb, 0x76, 0xc9, 0xdb, 0xc3, 0x3a, 0x71, 0xf0, 0x9d, 0xfb, 0xb6, 0x8f, 0x30, 0x40, 0x57, 0x73, /* Byte value: 0xd0 */ + 0x7b, 0x5c, 0x18, 0x7b, 0x62, 0x48, 0xad, 0xfa, 0x15, 0xe6, 0x94, 0x76, 0x32, 0xf9, 0xa7, 0x61, /* Byte value: 0xd1 */ + 0xde, 0x53, 0x95, 0xde, 0x7d, 0x6d, 0xae, 0xd4, 0x08, 0x99, 0x02, 0x43, 0x85, 0x8e, 0xb1, 0x27, /* Byte value: 0xd2 */ + 0x48, 0x4c, 0xea, 0x48, 0x67, 0xd3, 0xc2, 0xe5, 0x5a, 0x9d, 0xf7, 0xf8, 0x5e, 0x69, 0x6c, 0xeb, /* Byte value: 0xd3 */ + 0xe3, 0xf3, 0x2e, 0xe3, 0x5f, 0xca, 0x8e, 0x12, 0xab, 0x31, 0x5a, 0x66, 0xbb, 0x67, 0x73, 0x2a, /* Byte value: 0xd4 */ + 0x4b, 0xee, 0xde, 0x4b, 0x0d, 0x5f, 0x36, 0xf9, 0x29, 0x02, 0x9b, 0xbc, 0x8c, 0x92, 0x8f, 0x66, /* Byte value: 0xd5 */ + 0x35, 0x97, 0x9a, 0x35, 0xd1, 0x40, 0x44, 0x27, 0xa9, 0x86, 0xbb, 0x06, 0x0b, 0xa5, 0xce, 0x53, /* Byte value: 0xd6 */ + 0x8a, 0xbc, 0xed, 0x8a, 0x54, 0xc6, 0xf2, 0x40, 0x49, 0x61, 0x83, 0x2e, 0x7f, 0x15, 0xcf, 0x01, /* Byte value: 0xd7 */ + 0xfb, 0xaa, 0x4d, 0xfb, 0x89, 0x20, 0x22, 0xf2, 0xb5, 0x43, 0xbc, 0x03, 0xe4, 0xb3, 0x67, 0xc8, /* Byte value: 0xd8 */ + 0x85, 0xd3, 0x09, 0x85, 0x55, 0x3f, 0x50, 0x2c, 0x35, 0xc7, 0x9c, 0xb9, 0x63, 0x84, 0x26, 0xfd, /* Byte value: 0xd9 */ + 0xcb, 0x18, 0x8b, 0xcb, 0xe6, 0x37, 0xb9, 0xf1, 0x89, 0xa7, 0xb3, 0xc9, 0x5a, 0xd8, 0x4f, 0xcf, /* Byte value: 0xda */ + 0xb7, 0x1c, 0x56, 0xb7, 0x76, 0x61, 0xd2, 0x86, 0xea, 0xc9, 0xdb, 0x0b, 0x41, 0xfc, 0x0d, 0x0c, /* Byte value: 0xdb */ + 0x2a, 0x96, 0x3c, 0x2a, 0xf5, 0xb4, 0x2e, 0x4a, 0xc1, 0x7c, 0xa1, 0xd7, 0x7d, 0xac, 0x3f, 0x13, /* Byte value: 0xdc */ + 0xab, 0xbf, 0xc4, 0xab, 0x38, 0x19, 0x4c, 0xf7, 0xf1, 0xac, 0xad, 0x9e, 0xe5, 0x0e, 0x1f, 0xc1, /* Byte value: 0xdd */ + 0x3f, 0xdd, 0x22, 0x3f, 0x6e, 0xee, 0x39, 0x6f, 0x40, 0x42, 0x10, 0x5d, 0xa2, 0xfa, 0xc1, 0xfb, /* Byte value: 0xde */ + 0x39, 0x5a, 0x4a, 0x39, 0xba, 0x35, 0x12, 0x57, 0xa6, 0xbf, 0xc8, 0xd5, 0xc5, 0xcf, 0xc4, 0x22, /* Byte value: 0xdf */ + 0xea, 0x1b, 0xa2, 0xea, 0x8a, 0xe8, 0x07, 0x46, 0x31, 0x6a, 0x9d, 0x79, 0xc0, 0xc3, 0x9f, 0x0f, /* Byte value: 0xe0 */ + 0xb5, 0x61, 0xcf, 0xb5, 0x3a, 0x28, 0xcb, 0x2f, 0x09, 0x23, 0x93, 0x73, 0xdd, 0xef, 0x0e, 0xfa, /* Byte value: 0xe1 */ + 0x51, 0xca, 0x24, 0x51, 0x97, 0xfc, 0x83, 0xb0, 0xd4, 0x9a, 0x35, 0xa1, 0x4f, 0x55, 0x98, 0x72, /* Byte value: 0xe2 */ + 0x9b, 0x0d, 0x02, 0x9b, 0x57, 0x0e, 0xd7, 0xf4, 0xcd, 0x48, 0xa2, 0x54, 0x5b, 0x65, 0x37, 0xc6, /* Byte value: 0xe3 */ + 0x7a, 0x83, 0xb5, 0x7a, 0x44, 0x8d, 0x40, 0x4f, 0x85, 0x93, 0xb0, 0x4a, 0x7c, 0x11, 0x47, 0x1a, /* Byte value: 0xe4 */ + 0xa1, 0xf5, 0x7c, 0xa1, 0x87, 0xb7, 0x31, 0xbf, 0x18, 0x68, 0x06, 0xc5, 0x4c, 0x51, 0x10, 0x69, /* Byte value: 0xe5 */ + 0xd0, 0xe3, 0xdc, 0xd0, 0x5a, 0x51, 0xe1, 0x0d, 0xe4, 0x4a, 0x39, 0xe8, 0xd7, 0xf7, 0xb8, 0xa0, /* Byte value: 0xe6 */ + 0x4a, 0x31, 0x73, 0x4a, 0x2b, 0x9a, 0xdb, 0x4c, 0xb9, 0x77, 0xbf, 0x80, 0xc2, 0x7a, 0x6f, 0x1d, /* Byte value: 0xe7 */ + 0x77, 0x91, 0xc8, 0x77, 0x09, 0x3d, 0xfb, 0x8a, 0x1a, 0xdf, 0xe7, 0xa5, 0xfc, 0x93, 0xad, 0x10, /* Byte value: 0xe8 */ + 0x5d, 0x07, 0xf4, 0x5d, 0xfc, 0x89, 0xd5, 0xc0, 0xdb, 0xa3, 0x46, 0x72, 0x81, 0x3f, 0x92, 0x03, /* Byte value: 0xe9 */ + 0x4f, 0x14, 0x2f, 0x4f, 0x95, 0xcd, 0x04, 0x68, 0x2c, 0x15, 0x0b, 0x4c, 0x77, 0xb4, 0x89, 0x49, /* Byte value: 0xea */ + 0x1d, 0x7c, 0x3f, 0x1d, 0x68, 0xbd, 0x73, 0xc4, 0x8b, 0x10, 0x52, 0xa9, 0xea, 0x1a, 0xf2, 0xb6, /* Byte value: 0xeb */ + 0x83, 0x54, 0x61, 0x83, 0x81, 0xe4, 0x7b, 0x14, 0xd3, 0x3a, 0x44, 0x31, 0x04, 0xb1, 0x23, 0x24, /* Byte value: 0xec */ + 0x52, 0x68, 0x10, 0x52, 0xfd, 0x70, 0x77, 0xac, 0xa7, 0x05, 0x59, 0xe5, 0x9d, 0xae, 0x7b, 0xff, /* Byte value: 0xed */ + 0x25, 0xf9, 0xd8, 0x25, 0xf4, 0x4d, 0x8c, 0x26, 0xbd, 0xda, 0xbe, 0x40, 0x61, 0x3d, 0xd6, 0xef, /* Byte value: 0xee */ + 0xfe, 0x8f, 0x11, 0xfe, 0x37, 0x77, 0xfd, 0xd6, 0x20, 0x21, 0x08, 0xcf, 0x51, 0x7d, 0x81, 0x9c, /* Byte value: 0xef */ + 0x3d, 0xa0, 0xbb, 0x3d, 0x22, 0xa7, 0x20, 0xc6, 0xa3, 0xa8, 0x58, 0x25, 0x3e, 0xe9, 0xc2, 0x0d, /* Byte value: 0xf0 */ + 0x58, 0x22, 0xa8, 0x58, 0x42, 0xde, 0x0a, 0xe4, 0x4e, 0xc1, 0xf2, 0xbe, 0x34, 0xf1, 0x74, 0x57, /* Byte value: 0xf1 */ + 0x55, 0x30, 0xd5, 0x55, 0x0f, 0x6e, 0xb1, 0x21, 0xd1, 0x8d, 0xa5, 0x51, 0xb4, 0x73, 0x9e, 0x5d, /* Byte value: 0xf2 */ + 0xf9, 0xd7, 0xd4, 0xf9, 0xc5, 0x69, 0x3b, 0x5b, 0x56, 0xa9, 0xf4, 0x7b, 0x78, 0xa0, 0x64, 0x3e, /* Byte value: 0xf3 */ + 0xac, 0xe7, 0x01, 0xac, 0xca, 0x07, 0x8a, 0x7a, 0x87, 0x24, 0x51, 0x2a, 0xcc, 0xd3, 0xfa, 0x63, /* Byte value: 0xf4 */ + 0xfc, 0xf2, 0x88, 0xfc, 0x7b, 0x3e, 0xe4, 0x7f, 0xc3, 0xcb, 0x40, 0xb7, 0xcd, 0x6e, 0x82, 0x6a, /* Byte value: 0xf5 */ + 0x68, 0x90, 0x6e, 0x68, 0x2d, 0xc9, 0x91, 0xe7, 0x72, 0x25, 0xfd, 0x74, 0x8a, 0x9a, 0x5c, 0x50, /* Byte value: 0xf6 */ + 0xc1, 0x52, 0x33, 0xc1, 0x59, 0x99, 0xc4, 0xb9, 0x60, 0x63, 0x18, 0x92, 0xf3, 0x87, 0x40, 0x67, /* Byte value: 0xf7 */ + 0x38, 0x85, 0xe7, 0x38, 0x9c, 0xf0, 0xff, 0xe2, 0x36, 0xca, 0xec, 0xe9, 0x8b, 0x27, 0x24, 0x59, /* Byte value: 0xf8 */ + 0x75, 0xec, 0x51, 0x75, 0x45, 0x74, 0xe2, 0x23, 0xf9, 0x35, 0xaf, 0xdd, 0x60, 0x80, 0xae, 0xe6, /* Byte value: 0xf9 */ + 0x6d, 0xb5, 0x32, 0x6d, 0x93, 0x9e, 0x4e, 0xc3, 0xe7, 0x47, 0x49, 0xb8, 0x3f, 0x54, 0xba, 0x04, /* Byte value: 0xfa */ + 0x1f, 0x01, 0xa6, 0x1f, 0x24, 0xf4, 0x6a, 0x6d, 0x68, 0xfa, 0x1a, 0xd1, 0x76, 0x09, 0xf1, 0x40, /* Byte value: 0xfb */ + 0x17, 0x36, 0x87, 0x17, 0xd7, 0x13, 0x0e, 0x8c, 0x62, 0xd4, 0xf9, 0xf2, 0x43, 0x45, 0xfd, 0x1e, /* Byte value: 0xfc */ + 0x44, 0x81, 0x3a, 0x44, 0x0c, 0xa6, 0x94, 0x95, 0x55, 0xa4, 0x84, 0x2b, 0x90, 0x03, 0x66, 0x9a, /* Byte value: 0xfd */ + 0xcc, 0x40, 0x4e, 0xcc, 0x14, 0x29, 0x7f, 0x7c, 0xff, 0x2f, 0x4f, 0x7d, 0x73, 0x05, 0xaa, 0x6d, /* Byte value: 0xfe */ + 0xc5, 0xa8, 0xc2, 0xc5, 0xc1, 0x0b, 0xf6, 0x28, 0x65, 0x74, 0x88, 0x62, 0x08, 0xa1, 0x46, 0x48, /* Byte value: 0xff */ + 0xee, 0xe1, 0x53, 0xee, 0x12, 0x7a, 0x35, 0xd7, 0x34, 0x7d, 0x0d, 0x89, 0x3b, 0xe5, 0x99, 0x20, /* Byte value: 0x00 */ + + /* Matrix row: 11 */ + 0xfe, 0x50, 0x71, 0x7c, 0xfc, 0x9b, 0xad, 0x09, 0xfc, 0x5e, 0x38, 0x3f, 0x3f, 0x68, 0x53, 0x1b, /* Byte value: 0x01 */ + 0x6d, 0x9f, 0xd8, 0xf4, 0xee, 0xe2, 0xd1, 0xe9, 0xee, 0x90, 0x3c, 0xda, 0xda, 0xa3, 0xbc, 0xf8, /* Byte value: 0x02 */ + 0x53, 0x94, 0xe4, 0x01, 0xdd, 0x92, 0x0e, 0x3a, 0xdd, 0xb8, 0xb0, 0xa6, 0xa6, 0x24, 0x5d, 0x4e, /* Byte value: 0x03 */ + 0xab, 0xb8, 0x14, 0x53, 0x11, 0x91, 0xf4, 0xf0, 0x11, 0x18, 0xc5, 0x95, 0x95, 0x7d, 0x5f, 0xd3, /* Byte value: 0x04 */ + 0xc0, 0x5b, 0x4d, 0x89, 0xcf, 0xeb, 0x72, 0xda, 0xcf, 0x76, 0xb4, 0x43, 0x43, 0xef, 0xb2, 0xad, /* Byte value: 0x05 */ + 0x7d, 0xbd, 0xa6, 0x24, 0x6e, 0x65, 0x3b, 0xa4, 0x6e, 0xc4, 0x27, 0xfa, 0xfa, 0x9a, 0x46, 0x2f, /* Byte value: 0x06 */ + 0xaf, 0x51, 0xea, 0x67, 0x31, 0xc0, 0x2f, 0x72, 0x31, 0x0d, 0xb3, 0x9d, 0x9d, 0xe2, 0x80, 0x96, /* Byte value: 0x07 */ + 0x72, 0x7b, 0xc6, 0x6f, 0x16, 0xda, 0x5f, 0x61, 0x16, 0x84, 0x7a, 0xe4, 0xe4, 0x01, 0x2d, 0xa3, /* Byte value: 0x08 */ + 0x27, 0x93, 0xa3, 0x40, 0xfb, 0xd0, 0x06, 0x98, 0xfb, 0xc2, 0x87, 0x4e, 0x4e, 0x14, 0x21, 0x6b, /* Byte value: 0x09 */ + 0xf9, 0x87, 0x2e, 0x5f, 0xc4, 0x86, 0xbc, 0x0b, 0xc4, 0x34, 0x89, 0x31, 0x31, 0x0e, 0x45, 0x1d, /* Byte value: 0x0a */ + 0x8e, 0xbe, 0xc8, 0x09, 0xfa, 0x88, 0x7e, 0x29, 0xfa, 0x31, 0x79, 0xdf, 0xdf, 0xc7, 0xf0, 0x7b, /* Byte value: 0x0b */ + 0x8a, 0x57, 0x36, 0x3d, 0xda, 0xd9, 0xa5, 0xab, 0xda, 0x24, 0x0f, 0xd7, 0xd7, 0x58, 0x2f, 0x3e, /* Byte value: 0x0c */ + 0x3c, 0x9e, 0x43, 0xef, 0x23, 0xb9, 0x53, 0x92, 0x23, 0xc3, 0xb7, 0x78, 0x78, 0x29, 0x6f, 0x75, /* Byte value: 0x0d */ + 0x50, 0xaa, 0x45, 0x16, 0xc5, 0xde, 0xc4, 0xba, 0xc5, 0xc7, 0x77, 0xa0, 0xa0, 0xdd, 0x94, 0x0d, /* Byte value: 0x0e */ + 0xe1, 0xb4, 0x6f, 0xe7, 0x04, 0xa3, 0x23, 0x81, 0x04, 0x4a, 0x7e, 0x01, 0x01, 0xca, 0xc2, 0x40, /* Byte value: 0x0f */ + 0x41, 0x23, 0xe5, 0xcb, 0x4d, 0xdc, 0x68, 0x36, 0x4d, 0x07, 0x90, 0x82, 0x82, 0xb3, 0x29, 0x5a, /* Byte value: 0x10 */ + 0xb4, 0x5c, 0x0a, 0xc8, 0xe9, 0xa9, 0x7a, 0x78, 0xe9, 0x0c, 0x83, 0xab, 0xab, 0xdf, 0xce, 0x88, /* Byte value: 0x11 */ + 0xd7, 0xae, 0x6c, 0x7a, 0x77, 0x71, 0x89, 0x95, 0x77, 0x48, 0x1e, 0x6d, 0x6d, 0xb0, 0x5e, 0x7c, /* Byte value: 0x12 */ + 0x1e, 0x4f, 0xc0, 0x96, 0xf0, 0xbd, 0xc8, 0x49, 0xf0, 0x80, 0xba, 0x3c, 0x3c, 0xf5, 0xd6, 0xdb, /* Byte value: 0x13 */ + 0x23, 0x7a, 0x5d, 0x74, 0xdb, 0x81, 0xdd, 0x1a, 0xdb, 0xd7, 0xf1, 0x46, 0x46, 0x8b, 0xfe, 0x2e, /* Byte value: 0x14 */ + 0x2a, 0xc0, 0xbc, 0x11, 0x93, 0xa6, 0xee, 0x1c, 0x93, 0x69, 0xe1, 0x54, 0x54, 0x21, 0xc4, 0x24, /* Byte value: 0x15 */ + 0x75, 0xac, 0x99, 0x4c, 0x2e, 0xc7, 0x4e, 0x63, 0x2e, 0xee, 0xcb, 0xea, 0xea, 0x67, 0x3b, 0xa5, /* Byte value: 0x16 */ + 0xba, 0x31, 0xb4, 0x8e, 0x99, 0x93, 0x58, 0x7c, 0x99, 0xd8, 0x22, 0xb7, 0xb7, 0x13, 0xe2, 0x84, /* Byte value: 0x17 */ + 0x86, 0xaf, 0xf7, 0x61, 0xba, 0x2a, 0x0b, 0xee, 0xba, 0x1b, 0x95, 0xcf, 0xcf, 0x3a, 0x8d, 0xf1, /* Byte value: 0x18 */ + 0xdb, 0x56, 0xad, 0x26, 0x17, 0x82, 0x27, 0xd0, 0x17, 0x77, 0x84, 0x75, 0x75, 0xd2, 0xfc, 0xb3, /* Byte value: 0x19 */ + 0x76, 0x92, 0x38, 0x5b, 0x36, 0x8b, 0x84, 0xe3, 0x36, 0x91, 0x0c, 0xec, 0xec, 0x9e, 0xf2, 0xe6, /* Byte value: 0x1a */ + 0xb7, 0x62, 0xab, 0xdf, 0xf1, 0xe5, 0xb0, 0xf8, 0xf1, 0x73, 0x44, 0xad, 0xad, 0x26, 0x07, 0xcb, /* Byte value: 0x1b */ + 0x2f, 0x82, 0x9c, 0x28, 0xbb, 0x72, 0x73, 0x5f, 0xbb, 0xe8, 0x6b, 0x5e, 0x5e, 0xe9, 0x5c, 0xe1, /* Byte value: 0x1c */ + 0xe3, 0x21, 0x10, 0xfd, 0x14, 0x6a, 0xaf, 0xc0, 0x14, 0xa1, 0x45, 0x05, 0x05, 0x64, 0x4c, 0x83, /* Byte value: 0x1d */ + 0x51, 0x01, 0x9b, 0x1b, 0xcd, 0x5b, 0x82, 0x7b, 0xcd, 0x53, 0x8b, 0xa2, 0xa2, 0x8a, 0xd3, 0x8d, /* Byte value: 0x1e */ + 0xd2, 0xec, 0x4c, 0x43, 0x5f, 0xa5, 0x14, 0xd6, 0x5f, 0xc9, 0x94, 0x67, 0x67, 0x78, 0xc6, 0xb9, /* Byte value: 0x1f */ + 0xb1, 0x1e, 0x2a, 0xf1, 0xc1, 0x7d, 0xe7, 0x3b, 0xc1, 0x8d, 0x09, 0xa1, 0xa1, 0x17, 0x56, 0x4d, /* Byte value: 0x20 */ + 0xb6, 0xc9, 0x75, 0xd2, 0xf9, 0x60, 0xf6, 0x39, 0xf9, 0xe7, 0xb8, 0xaf, 0xaf, 0x71, 0x40, 0x4b, /* Byte value: 0x21 */ + 0x03, 0x3e, 0xa1, 0x17, 0x18, 0x4c, 0xca, 0x80, 0x18, 0x7f, 0xc7, 0x06, 0x06, 0xf9, 0xc9, 0x43, /* Byte value: 0x22 */ + 0x44, 0x61, 0xc5, 0xf2, 0x65, 0x08, 0xf5, 0x75, 0x65, 0x86, 0x1a, 0x88, 0x88, 0x7b, 0xb1, 0x9f, /* Byte value: 0x23 */ + 0x9a, 0x75, 0x48, 0xed, 0x5a, 0x5e, 0x4f, 0xe6, 0x5a, 0x70, 0x14, 0xf7, 0xf7, 0x61, 0xd5, 0xe9, /* Byte value: 0x24 */ + 0x8d, 0x80, 0x69, 0x1e, 0xe2, 0xc4, 0xb4, 0xa9, 0xe2, 0x4e, 0xbe, 0xd9, 0xd9, 0x3e, 0x39, 0x38, /* Byte value: 0x25 */ + 0xea, 0x9b, 0xf1, 0x98, 0x5c, 0x4d, 0x9c, 0xc6, 0x5c, 0x1f, 0x55, 0x17, 0x17, 0xce, 0x76, 0x89, /* Byte value: 0x26 */ + 0xc4, 0xb2, 0xb3, 0xbd, 0xef, 0xba, 0xa9, 0x58, 0xef, 0x63, 0xc2, 0x4b, 0x4b, 0x70, 0x6d, 0xe8, /* Byte value: 0x27 */ + 0xad, 0xc4, 0x95, 0x7d, 0x21, 0x09, 0xa3, 0x33, 0x21, 0xe6, 0x88, 0x99, 0x99, 0x4c, 0x0e, 0x55, /* Byte value: 0x28 */ + 0xb9, 0x0f, 0x15, 0x99, 0x81, 0xdf, 0x92, 0xfc, 0x81, 0xa7, 0xe5, 0xb1, 0xb1, 0xea, 0x2b, 0xc7, /* Byte value: 0x29 */ + 0xe2, 0x8a, 0xce, 0xf0, 0x1c, 0xef, 0xe9, 0x01, 0x1c, 0x35, 0xb9, 0x07, 0x07, 0x33, 0x0b, 0x03, /* Byte value: 0x2a */ + 0xe6, 0x63, 0x30, 0xc4, 0x3c, 0xbe, 0x32, 0x83, 0x3c, 0x20, 0xcf, 0x0f, 0x0f, 0xac, 0xd4, 0x46, /* Byte value: 0x2b */ + 0x99, 0x4b, 0xe9, 0xfa, 0x42, 0x12, 0x85, 0x66, 0x42, 0x0f, 0xd3, 0xf1, 0xf1, 0x98, 0x1c, 0xaa, /* Byte value: 0x2c */ + 0x29, 0xfe, 0x1d, 0x06, 0x8b, 0xea, 0x24, 0x9c, 0x8b, 0x16, 0x26, 0x52, 0x52, 0xd8, 0x0d, 0x67, /* Byte value: 0x2d */ + 0xa9, 0x2d, 0x6b, 0x49, 0x01, 0x58, 0x78, 0xb1, 0x01, 0xf3, 0xfe, 0x91, 0x91, 0xd3, 0xd1, 0x10, /* Byte value: 0x2e */ + 0x61, 0x67, 0x19, 0xa8, 0x8e, 0x11, 0x7f, 0xac, 0x8e, 0xaf, 0xa6, 0xc2, 0xc2, 0xc1, 0x1e, 0x37, /* Byte value: 0x2f */ + 0xd0, 0x79, 0x33, 0x59, 0x4f, 0x6c, 0x98, 0x97, 0x4f, 0x22, 0xaf, 0x63, 0x63, 0xd6, 0x48, 0x7a, /* Byte value: 0x30 */ + 0x48, 0x99, 0x04, 0xae, 0x05, 0xfb, 0x5b, 0x30, 0x05, 0xb9, 0x80, 0x90, 0x90, 0x19, 0x13, 0x50, /* Byte value: 0x31 */ + 0xf1, 0x96, 0x11, 0x37, 0x84, 0x24, 0xc9, 0xcc, 0x84, 0x1e, 0x65, 0x21, 0x21, 0xf3, 0x38, 0x97, /* Byte value: 0x32 */ + 0x91, 0x5a, 0xd6, 0x92, 0x02, 0xb0, 0xf0, 0xa1, 0x02, 0x25, 0x3f, 0xe1, 0xe1, 0x65, 0x61, 0x20, /* Byte value: 0x33 */ + 0x65, 0x8e, 0xe7, 0x9c, 0xae, 0x40, 0xa4, 0x2e, 0xae, 0xba, 0xd0, 0xca, 0xca, 0x5e, 0xc1, 0x72, /* Byte value: 0x34 */ + 0x24, 0xad, 0x02, 0x57, 0xe3, 0x9c, 0xcc, 0x18, 0xe3, 0xbd, 0x40, 0x48, 0x48, 0xed, 0xe8, 0x28, /* Byte value: 0x35 */ + 0x9c, 0x09, 0xc9, 0xc3, 0x6a, 0xc6, 0x18, 0x25, 0x6a, 0x8e, 0x59, 0xfb, 0xfb, 0x50, 0x84, 0x6f, /* Byte value: 0x36 */ + 0xc8, 0x4a, 0x72, 0xe1, 0x8f, 0x49, 0x07, 0x1d, 0x8f, 0x5c, 0x58, 0x53, 0x53, 0x12, 0xcf, 0x27, /* Byte value: 0x37 */ + 0x14, 0xcb, 0x80, 0xe4, 0xa0, 0xd6, 0x31, 0xcf, 0xa0, 0x41, 0x6d, 0x28, 0x28, 0xa6, 0x25, 0x92, /* Byte value: 0x38 */ + 0x70, 0xee, 0xb9, 0x75, 0x06, 0x13, 0xd3, 0x20, 0x06, 0x6f, 0x41, 0xe0, 0xe0, 0xaf, 0xa3, 0x60, /* Byte value: 0x39 */ + 0x39, 0xdc, 0x63, 0xd6, 0x0b, 0x6d, 0xce, 0xd1, 0x0b, 0x42, 0x3d, 0x72, 0x72, 0xe1, 0xf7, 0xb0, /* Byte value: 0x3a */ + 0x55, 0xe8, 0x65, 0x2f, 0xed, 0x0a, 0x59, 0xf9, 0xed, 0x46, 0xfd, 0xaa, 0xaa, 0x15, 0x0c, 0xc8, /* Byte value: 0x3b */ + 0x13, 0x1c, 0xdf, 0xc7, 0x98, 0xcb, 0x20, 0xcd, 0x98, 0x2b, 0xdc, 0x26, 0x26, 0xc0, 0x33, 0x94, /* Byte value: 0x3c */ + 0xd6, 0x05, 0xb2, 0x77, 0x7f, 0xf4, 0xcf, 0x54, 0x7f, 0xdc, 0xe2, 0x6f, 0x6f, 0xe7, 0x19, 0xfc, /* Byte value: 0x3d */ + 0xfb, 0x12, 0x51, 0x45, 0xd4, 0x4f, 0x30, 0x4a, 0xd4, 0xdf, 0xb2, 0x35, 0x35, 0xa0, 0xcb, 0xde, /* Byte value: 0x3e */ + 0x22, 0xd1, 0x83, 0x79, 0xd3, 0x04, 0x9b, 0xdb, 0xd3, 0x43, 0x0d, 0x44, 0x44, 0xdc, 0xb9, 0xae, /* Byte value: 0x3f */ + 0xda, 0xfd, 0x73, 0x2b, 0x1f, 0x07, 0x61, 0x11, 0x1f, 0xe3, 0x78, 0x77, 0x77, 0x85, 0xbb, 0x33, /* Byte value: 0x40 */ + 0x25, 0x06, 0xdc, 0x5a, 0xeb, 0x19, 0x8a, 0xd9, 0xeb, 0x29, 0xbc, 0x4a, 0x4a, 0xba, 0xaf, 0xa8, /* Byte value: 0x41 */ + 0xe7, 0xc8, 0xee, 0xc9, 0x34, 0x3b, 0x74, 0x42, 0x34, 0xb4, 0x33, 0x0d, 0x0d, 0xfb, 0x93, 0xc6, /* Byte value: 0x42 */ + 0xe4, 0xf6, 0x4f, 0xde, 0x2c, 0x77, 0xbe, 0xc2, 0x2c, 0xcb, 0xf4, 0x0b, 0x0b, 0x02, 0x5a, 0x85, /* Byte value: 0x43 */ + 0xa3, 0xa9, 0x2b, 0x3b, 0x51, 0x33, 0x81, 0x37, 0x51, 0x32, 0x29, 0x85, 0x85, 0x80, 0x22, 0x59, /* Byte value: 0x44 */ + 0x8c, 0x2b, 0xb7, 0x13, 0xea, 0x41, 0xf2, 0x68, 0xea, 0xda, 0x42, 0xdb, 0xdb, 0x69, 0x7e, 0xb8, /* Byte value: 0x45 */ + 0x19, 0x98, 0x9f, 0xb5, 0xc8, 0xa0, 0xd9, 0x4b, 0xc8, 0xea, 0x0b, 0x32, 0x32, 0x93, 0xc0, 0xdd, /* Byte value: 0x46 */ + 0x09, 0xba, 0xe1, 0x65, 0x48, 0x27, 0x33, 0x06, 0x48, 0xbe, 0x10, 0x12, 0x12, 0xaa, 0x3a, 0x0a, /* Byte value: 0x47 */ + 0x2d, 0x17, 0xe3, 0x32, 0xab, 0xbb, 0xff, 0x1e, 0xab, 0x03, 0x50, 0x5a, 0x5a, 0x47, 0xd2, 0x22, /* Byte value: 0x48 */ + 0x8f, 0x15, 0x16, 0x04, 0xf2, 0x0d, 0x38, 0xe8, 0xf2, 0xa5, 0x85, 0xdd, 0xdd, 0x90, 0xb7, 0xfb, /* Byte value: 0x49 */ + 0x94, 0x18, 0xf6, 0xab, 0x2a, 0x64, 0x6d, 0xe2, 0x2a, 0xa4, 0xb5, 0xeb, 0xeb, 0xad, 0xf9, 0xe5, /* Byte value: 0x4a */ + 0x0d, 0x53, 0x1f, 0x51, 0x68, 0x76, 0xe8, 0x84, 0x68, 0xab, 0x66, 0x1a, 0x1a, 0x35, 0xe5, 0x4f, /* Byte value: 0x4b */ + 0x85, 0x91, 0x56, 0x76, 0xa2, 0x66, 0xc1, 0x6e, 0xa2, 0x64, 0x52, 0xc9, 0xc9, 0xc3, 0x44, 0xb2, /* Byte value: 0x4c */ + 0x57, 0x7d, 0x1a, 0x35, 0xfd, 0xc3, 0xd5, 0xb8, 0xfd, 0xad, 0xc6, 0xae, 0xae, 0xbb, 0x82, 0x0b, /* Byte value: 0x4d */ + 0x96, 0x8d, 0x89, 0xb1, 0x3a, 0xad, 0xe1, 0xa3, 0x3a, 0x4f, 0x8e, 0xef, 0xef, 0x03, 0x77, 0x26, /* Byte value: 0x4e */ + 0x69, 0x76, 0x26, 0xc0, 0xce, 0xb3, 0x0a, 0x6b, 0xce, 0x85, 0x4a, 0xd2, 0xd2, 0x3c, 0x63, 0xbd, /* Byte value: 0x4f */ + 0xf8, 0x2c, 0xf0, 0x52, 0xcc, 0x03, 0xfa, 0xca, 0xcc, 0xa0, 0x75, 0x33, 0x33, 0x59, 0x02, 0x9d, /* Byte value: 0x50 */ + 0x5e, 0xc7, 0xfb, 0x50, 0xb5, 0xe4, 0xe6, 0xbe, 0xb5, 0x13, 0xd6, 0xbc, 0xbc, 0x11, 0xb8, 0x01, /* Byte value: 0x51 */ + 0x0e, 0x6d, 0xbe, 0x46, 0x70, 0x3a, 0x22, 0x04, 0x70, 0xd4, 0xa1, 0x1c, 0x1c, 0xcc, 0x2c, 0x0c, /* Byte value: 0x52 */ + 0x71, 0x45, 0x67, 0x78, 0x0e, 0x96, 0x95, 0xe1, 0x0e, 0xfb, 0xbd, 0xe2, 0xe2, 0xf8, 0xe4, 0xe0, /* Byte value: 0x53 */ + 0x7a, 0x6a, 0xf9, 0x07, 0x56, 0x78, 0x2a, 0xa6, 0x56, 0xae, 0x96, 0xf4, 0xf4, 0xfc, 0x50, 0x29, /* Byte value: 0x54 */ + 0x01, 0xab, 0xde, 0x0d, 0x08, 0x85, 0x46, 0xc1, 0x08, 0x94, 0xfc, 0x02, 0x02, 0x57, 0x47, 0x80, /* Byte value: 0x55 */ + 0xe0, 0x1f, 0xb1, 0xea, 0x0c, 0x26, 0x65, 0x40, 0x0c, 0xde, 0x82, 0x03, 0x03, 0x9d, 0x85, 0xc0, /* Byte value: 0x56 */ + 0x7e, 0x83, 0x07, 0x33, 0x76, 0x29, 0xf1, 0x24, 0x76, 0xbb, 0xe0, 0xfc, 0xfc, 0x63, 0x8f, 0x6c, /* Byte value: 0x57 */ + 0x93, 0xcf, 0xa9, 0x88, 0x12, 0x79, 0x7c, 0xe0, 0x12, 0xce, 0x04, 0xe5, 0xe5, 0xcb, 0xef, 0xe3, /* Byte value: 0x58 */ + 0xce, 0x36, 0xf3, 0xcf, 0xbf, 0xd1, 0x50, 0xde, 0xbf, 0xa2, 0x15, 0x5f, 0x5f, 0x23, 0x9e, 0xa1, /* Byte value: 0x59 */ + 0x9f, 0x37, 0x68, 0xd4, 0x72, 0x8a, 0xd2, 0xa5, 0x72, 0xf1, 0x9e, 0xfd, 0xfd, 0xa9, 0x4d, 0x2c, /* Byte value: 0x5a */ + 0x3a, 0xe2, 0xc2, 0xc1, 0x13, 0x21, 0x04, 0x51, 0x13, 0x3d, 0xfa, 0x74, 0x74, 0x18, 0x3e, 0xf3, /* Byte value: 0x5b */ + 0xd1, 0xd2, 0xed, 0x54, 0x47, 0xe9, 0xde, 0x56, 0x47, 0xb6, 0x53, 0x61, 0x61, 0x81, 0x0f, 0xfa, /* Byte value: 0x5c */ + 0xf2, 0xa8, 0xb0, 0x20, 0x9c, 0x68, 0x03, 0x4c, 0x9c, 0x61, 0xa2, 0x27, 0x27, 0x0a, 0xf1, 0xd4, /* Byte value: 0x5d */ + 0xcf, 0x9d, 0x2d, 0xc2, 0xb7, 0x54, 0x16, 0x1f, 0xb7, 0x36, 0xe9, 0x5d, 0x5d, 0x74, 0xd9, 0x21, /* Byte value: 0x5e */ + 0x43, 0xb6, 0x9a, 0xd1, 0x5d, 0x15, 0xe4, 0x77, 0x5d, 0xec, 0xab, 0x86, 0x86, 0x1d, 0xa7, 0x99, /* Byte value: 0x5f */ + 0xc9, 0xe1, 0xac, 0xec, 0x87, 0xcc, 0x41, 0xdc, 0x87, 0xc8, 0xa4, 0x51, 0x51, 0x45, 0x88, 0xa7, /* Byte value: 0x60 */ + 0x4a, 0x0c, 0x7b, 0xb4, 0x15, 0x32, 0xd7, 0x71, 0x15, 0x52, 0xbb, 0x94, 0x94, 0xb7, 0x9d, 0x93, /* Byte value: 0x61 */ + 0xbd, 0xe6, 0xeb, 0xad, 0xa1, 0x8e, 0x49, 0x7e, 0xa1, 0xb2, 0x93, 0xb9, 0xb9, 0x75, 0xf4, 0x82, /* Byte value: 0x62 */ + 0x62, 0x59, 0xb8, 0xbf, 0x96, 0x5d, 0xb5, 0x2c, 0x96, 0xd0, 0x61, 0xc4, 0xc4, 0x38, 0xd7, 0x74, /* Byte value: 0x63 */ + 0xac, 0x6f, 0x4b, 0x70, 0x29, 0x8c, 0xe5, 0xf2, 0x29, 0x72, 0x74, 0x9b, 0x9b, 0x1b, 0x49, 0xd5, /* Byte value: 0x64 */ + 0x02, 0x95, 0x7f, 0x1a, 0x10, 0xc9, 0x8c, 0x41, 0x10, 0xeb, 0x3b, 0x04, 0x04, 0xae, 0x8e, 0xc3, /* Byte value: 0x65 */ + 0x37, 0xb1, 0xdd, 0x90, 0x7b, 0x57, 0xec, 0xd5, 0x7b, 0x96, 0x9c, 0x6e, 0x6e, 0x2d, 0xdb, 0xbc, /* Byte value: 0x66 */ + 0x82, 0x46, 0x09, 0x55, 0x9a, 0x7b, 0xd0, 0x6c, 0x9a, 0x0e, 0xe3, 0xc7, 0xc7, 0xa5, 0x52, 0xb4, /* Byte value: 0x67 */ + 0xc1, 0xf0, 0x93, 0x84, 0xc7, 0x6e, 0x34, 0x1b, 0xc7, 0xe2, 0x48, 0x41, 0x41, 0xb8, 0xf5, 0x2d, /* Byte value: 0x68 */ + 0x26, 0x38, 0x7d, 0x4d, 0xf3, 0x55, 0x40, 0x59, 0xf3, 0x56, 0x7b, 0x4c, 0x4c, 0x43, 0x66, 0xeb, /* Byte value: 0x69 */ + 0xbb, 0x9a, 0x6a, 0x83, 0x91, 0x16, 0x1e, 0xbd, 0x91, 0x4c, 0xde, 0xb5, 0xb5, 0x44, 0xa5, 0x04, /* Byte value: 0x6a */ + 0x0f, 0xc6, 0x60, 0x4b, 0x78, 0xbf, 0x64, 0xc5, 0x78, 0x40, 0x5d, 0x1e, 0x1e, 0x9b, 0x6b, 0x8c, /* Byte value: 0x6b */ + 0xd4, 0x90, 0xcd, 0x6d, 0x6f, 0x3d, 0x43, 0x15, 0x6f, 0x37, 0xd9, 0x6b, 0x6b, 0x49, 0x97, 0x3f, /* Byte value: 0x6c */ + 0x5b, 0x85, 0xdb, 0x69, 0x9d, 0x30, 0x7b, 0xfd, 0x9d, 0x92, 0x5c, 0xb6, 0xb6, 0xd9, 0x20, 0xc4, /* Byte value: 0x6d */ + 0x63, 0xf2, 0x66, 0xb2, 0x9e, 0xd8, 0xf3, 0xed, 0x9e, 0x44, 0x9d, 0xc6, 0xc6, 0x6f, 0x90, 0xf4, /* Byte value: 0x6e */ + 0x87, 0x04, 0x29, 0x6c, 0xb2, 0xaf, 0x4d, 0x2f, 0xb2, 0x8f, 0x69, 0xcd, 0xcd, 0x6d, 0xca, 0x71, /* Byte value: 0x6f */ + 0xbf, 0x73, 0x94, 0xb7, 0xb1, 0x47, 0xc5, 0x3f, 0xb1, 0x59, 0xa8, 0xbd, 0xbd, 0xdb, 0x7a, 0x41, /* Byte value: 0x70 */ + 0x97, 0x26, 0x57, 0xbc, 0x32, 0x28, 0xa7, 0x62, 0x32, 0xdb, 0x72, 0xed, 0xed, 0x54, 0x30, 0xa6, /* Byte value: 0x71 */ + 0x46, 0xf4, 0xba, 0xe8, 0x75, 0xc1, 0x79, 0x34, 0x75, 0x6d, 0x21, 0x8c, 0x8c, 0xd5, 0x3f, 0x5c, /* Byte value: 0x72 */ + 0xaa, 0x13, 0xca, 0x5e, 0x19, 0x14, 0xb2, 0x31, 0x19, 0x8c, 0x39, 0x97, 0x97, 0x2a, 0x18, 0x53, /* Byte value: 0x73 */ + 0x4f, 0x4e, 0x5b, 0x8d, 0x3d, 0xe6, 0x4a, 0x32, 0x3d, 0xd3, 0x31, 0x9e, 0x9e, 0x7f, 0x05, 0x56, /* Byte value: 0x74 */ + 0xc6, 0x27, 0xcc, 0xa7, 0xff, 0x73, 0x25, 0x19, 0xff, 0x88, 0xf9, 0x4f, 0x4f, 0xde, 0xe3, 0x2b, /* Byte value: 0x75 */ + 0x4e, 0xe5, 0x85, 0x80, 0x35, 0x63, 0x0c, 0xf3, 0x35, 0x47, 0xcd, 0x9c, 0x9c, 0x28, 0x42, 0xd6, /* Byte value: 0x76 */ + 0x80, 0xd3, 0x76, 0x4f, 0x8a, 0xb2, 0x5c, 0x2d, 0x8a, 0xe5, 0xd8, 0xc3, 0xc3, 0x0b, 0xdc, 0x77, /* Byte value: 0x77 */ + 0x7f, 0x28, 0xd9, 0x3e, 0x7e, 0xac, 0xb7, 0xe5, 0x7e, 0x2f, 0x1c, 0xfe, 0xfe, 0x34, 0xc8, 0xec, /* Byte value: 0x78 */ + 0x45, 0xca, 0x1b, 0xff, 0x6d, 0x8d, 0xb3, 0xb4, 0x6d, 0x12, 0xe6, 0x8a, 0x8a, 0x2c, 0xf6, 0x1f, /* Byte value: 0x79 */ + 0xeb, 0x30, 0x2f, 0x95, 0x54, 0xc8, 0xda, 0x07, 0x54, 0x8b, 0xa9, 0x15, 0x15, 0x99, 0x31, 0x09, /* Byte value: 0x7a */ + 0x68, 0xdd, 0xf8, 0xcd, 0xc6, 0x36, 0x4c, 0xaa, 0xc6, 0x11, 0xb6, 0xd0, 0xd0, 0x6b, 0x24, 0x3d, /* Byte value: 0x7b */ + 0x10, 0x22, 0x7e, 0xd0, 0x80, 0x87, 0xea, 0x4d, 0x80, 0x54, 0x1b, 0x20, 0x20, 0x39, 0xfa, 0xd7, /* Byte value: 0x7c */ + 0x20, 0x44, 0xfc, 0x63, 0xc3, 0xcd, 0x17, 0x9a, 0xc3, 0xa8, 0x36, 0x40, 0x40, 0x72, 0x37, 0x6d, /* Byte value: 0x7d */ + 0x5f, 0x6c, 0x25, 0x5d, 0xbd, 0x61, 0xa0, 0x7f, 0xbd, 0x87, 0x2a, 0xbe, 0xbe, 0x46, 0xff, 0x81, /* Byte value: 0x7e */ + 0x49, 0x32, 0xda, 0xa3, 0x0d, 0x7e, 0x1d, 0xf1, 0x0d, 0x2d, 0x7c, 0x92, 0x92, 0x4e, 0x54, 0xd0, /* Byte value: 0x7f */ + 0xd3, 0x47, 0x92, 0x4e, 0x57, 0x20, 0x52, 0x17, 0x57, 0x5d, 0x68, 0x65, 0x65, 0x2f, 0x81, 0x39, /* Byte value: 0x80 */ + 0xc2, 0xce, 0x32, 0x93, 0xdf, 0x22, 0xfe, 0x9b, 0xdf, 0x9d, 0x8f, 0x47, 0x47, 0x41, 0x3c, 0x6e, /* Byte value: 0x81 */ + 0x56, 0xd6, 0xc4, 0x38, 0xf5, 0x46, 0x93, 0x79, 0xf5, 0x39, 0x3a, 0xac, 0xac, 0xec, 0xc5, 0x8b, /* Byte value: 0x82 */ + 0xe5, 0x5d, 0x91, 0xd3, 0x24, 0xf2, 0xf8, 0x03, 0x24, 0x5f, 0x08, 0x09, 0x09, 0x55, 0x1d, 0x05, /* Byte value: 0x83 */ + 0xbc, 0x4d, 0x35, 0xa0, 0xa9, 0x0b, 0x0f, 0xbf, 0xa9, 0x26, 0x6f, 0xbb, 0xbb, 0x22, 0xb3, 0x02, /* Byte value: 0x84 */ + 0x77, 0x39, 0xe6, 0x56, 0x3e, 0x0e, 0xc2, 0x22, 0x3e, 0x05, 0xf0, 0xee, 0xee, 0xc9, 0xb5, 0x66, /* Byte value: 0x85 */ + 0x15, 0x60, 0x5e, 0xe9, 0xa8, 0x53, 0x77, 0x0e, 0xa8, 0xd5, 0x91, 0x2a, 0x2a, 0xf1, 0x62, 0x12, /* Byte value: 0x86 */ + 0x30, 0x66, 0x82, 0xb3, 0x43, 0x4a, 0xfd, 0xd7, 0x43, 0xfc, 0x2d, 0x60, 0x60, 0x4b, 0xcd, 0xba, /* Byte value: 0x87 */ + 0xb0, 0xb5, 0xf4, 0xfc, 0xc9, 0xf8, 0xa1, 0xfa, 0xc9, 0x19, 0xf5, 0xa3, 0xa3, 0x40, 0x11, 0xcd, /* Byte value: 0x88 */ + 0xc3, 0x65, 0xec, 0x9e, 0xd7, 0xa7, 0xb8, 0x5a, 0xd7, 0x09, 0x73, 0x45, 0x45, 0x16, 0x7b, 0xee, /* Byte value: 0x89 */ + 0xa6, 0xeb, 0x0b, 0x02, 0x79, 0xe7, 0x1c, 0x74, 0x79, 0xb3, 0xa3, 0x8f, 0x8f, 0x48, 0xba, 0x9c, /* Byte value: 0x8a */ + 0x6a, 0x48, 0x87, 0xd7, 0xd6, 0xff, 0xc0, 0xeb, 0xd6, 0xfa, 0x8d, 0xd4, 0xd4, 0xc5, 0xaa, 0xfe, /* Byte value: 0x8b */ + 0x6e, 0xa1, 0x79, 0xe3, 0xf6, 0xae, 0x1b, 0x69, 0xf6, 0xef, 0xfb, 0xdc, 0xdc, 0x5a, 0x75, 0xbb, /* Byte value: 0x8c */ + 0xee, 0x72, 0x0f, 0xac, 0x7c, 0x1c, 0x47, 0x44, 0x7c, 0x0a, 0x23, 0x1f, 0x1f, 0x51, 0xa9, 0xcc, /* Byte value: 0x8d */ + 0x95, 0xb3, 0x28, 0xa6, 0x22, 0xe1, 0x2b, 0x23, 0x22, 0x30, 0x49, 0xe9, 0xe9, 0xfa, 0xbe, 0x65, /* Byte value: 0x8e */ + 0xbe, 0xd8, 0x4a, 0xba, 0xb9, 0xc2, 0x83, 0xfe, 0xb9, 0xcd, 0x54, 0xbf, 0xbf, 0x8c, 0x3d, 0xc1, /* Byte value: 0x8f */ + 0x38, 0x77, 0xbd, 0xdb, 0x03, 0xe8, 0x88, 0x10, 0x03, 0xd6, 0xc1, 0x70, 0x70, 0xb6, 0xb0, 0x30, /* Byte value: 0x90 */ + 0x1c, 0xda, 0xbf, 0x8c, 0xe0, 0x74, 0x44, 0x08, 0xe0, 0x6b, 0x81, 0x38, 0x38, 0x5b, 0x58, 0x18, /* Byte value: 0x91 */ + 0xd8, 0x68, 0x0c, 0x31, 0x0f, 0xce, 0xed, 0x50, 0x0f, 0x08, 0x43, 0x73, 0x73, 0x2b, 0x35, 0xf0, /* Byte value: 0x92 */ + 0xfc, 0xc5, 0x0e, 0x66, 0xec, 0x52, 0x21, 0x48, 0xec, 0xb5, 0x03, 0x3b, 0x3b, 0xc6, 0xdd, 0xd8, /* Byte value: 0x93 */ + 0x6b, 0xe3, 0x59, 0xda, 0xde, 0x7a, 0x86, 0x2a, 0xde, 0x6e, 0x71, 0xd6, 0xd6, 0x92, 0xed, 0x7e, /* Byte value: 0x94 */ + 0x9e, 0x9c, 0xb6, 0xd9, 0x7a, 0x0f, 0x94, 0x64, 0x7a, 0x65, 0x62, 0xff, 0xff, 0xfe, 0x0a, 0xac, /* Byte value: 0x95 */ + 0xf3, 0x03, 0x6e, 0x2d, 0x94, 0xed, 0x45, 0x8d, 0x94, 0xf5, 0x5e, 0x25, 0x25, 0x5d, 0xb6, 0x54, /* Byte value: 0x96 */ + 0x16, 0x5e, 0xff, 0xfe, 0xb0, 0x1f, 0xbd, 0x8e, 0xb0, 0xaa, 0x56, 0x2c, 0x2c, 0x08, 0xab, 0x51, /* Byte value: 0x97 */ + 0xf6, 0x41, 0x4e, 0x14, 0xbc, 0x39, 0xd8, 0xce, 0xbc, 0x74, 0xd4, 0x2f, 0x2f, 0x95, 0x2e, 0x91, /* Byte value: 0x98 */ + 0xfa, 0xb9, 0x8f, 0x48, 0xdc, 0xca, 0x76, 0x8b, 0xdc, 0x4b, 0x4e, 0x37, 0x37, 0xf7, 0x8c, 0x5e, /* Byte value: 0x99 */ + 0x1d, 0x71, 0x61, 0x81, 0xe8, 0xf1, 0x02, 0xc9, 0xe8, 0xff, 0x7d, 0x3a, 0x3a, 0x0c, 0x1f, 0x98, /* Byte value: 0x9a */ + 0x05, 0x42, 0x20, 0x39, 0x28, 0xd4, 0x9d, 0x43, 0x28, 0x81, 0x8a, 0x0a, 0x0a, 0xc8, 0x98, 0xc5, /* Byte value: 0x9b */ + 0x0a, 0x84, 0x40, 0x72, 0x50, 0x6b, 0xf9, 0x86, 0x50, 0xc1, 0xd7, 0x14, 0x14, 0x53, 0xf3, 0x49, /* Byte value: 0x9c */ + 0x79, 0x54, 0x58, 0x10, 0x4e, 0x34, 0xe0, 0x26, 0x4e, 0xd1, 0x51, 0xf2, 0xf2, 0x05, 0x99, 0x6a, /* Byte value: 0x9d */ + 0x3e, 0x0b, 0x3c, 0xf5, 0x33, 0x70, 0xdf, 0xd3, 0x33, 0x28, 0x8c, 0x7c, 0x7c, 0x87, 0xe1, 0xb6, /* Byte value: 0x9e */ + 0x90, 0xf1, 0x08, 0x9f, 0x0a, 0x35, 0xb6, 0x60, 0x0a, 0xb1, 0xc3, 0xe3, 0xe3, 0x32, 0x26, 0xa0, /* Byte value: 0x9f */ + 0x98, 0xe0, 0x37, 0xf7, 0x4a, 0x97, 0xc3, 0xa7, 0x4a, 0x9b, 0x2f, 0xf3, 0xf3, 0xcf, 0x5b, 0x2a, /* Byte value: 0xa0 */ + 0xcd, 0x08, 0x52, 0xd8, 0xa7, 0x9d, 0x9a, 0x5e, 0xa7, 0xdd, 0xd2, 0x59, 0x59, 0xda, 0x57, 0xe2, /* Byte value: 0xa1 */ + 0xcb, 0x74, 0xd3, 0xf6, 0x97, 0x05, 0xcd, 0x9d, 0x97, 0x23, 0x9f, 0x55, 0x55, 0xeb, 0x06, 0x64, /* Byte value: 0xa2 */ + 0x0c, 0xf8, 0xc1, 0x5c, 0x60, 0xf3, 0xae, 0x45, 0x60, 0x3f, 0x9a, 0x18, 0x18, 0x62, 0xa2, 0xcf, /* Byte value: 0xa3 */ + 0x36, 0x1a, 0x03, 0x9d, 0x73, 0xd2, 0xaa, 0x14, 0x73, 0x02, 0x60, 0x6c, 0x6c, 0x7a, 0x9c, 0x3c, /* Byte value: 0xa4 */ + 0x73, 0xd0, 0x18, 0x62, 0x1e, 0x5f, 0x19, 0xa0, 0x1e, 0x10, 0x86, 0xe6, 0xe6, 0x56, 0x6a, 0x23, /* Byte value: 0xa5 */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xa6 */ + 0x9d, 0xa2, 0x17, 0xce, 0x62, 0x43, 0x5e, 0xe4, 0x62, 0x1a, 0xa5, 0xf9, 0xf9, 0x07, 0xc3, 0xef, /* Byte value: 0xa7 */ + 0xe9, 0xa5, 0x50, 0x8f, 0x44, 0x01, 0x56, 0x46, 0x44, 0x60, 0x92, 0x11, 0x11, 0x37, 0xbf, 0xca, /* Byte value: 0xa8 */ + 0x92, 0x64, 0x77, 0x85, 0x1a, 0xfc, 0x3a, 0x21, 0x1a, 0x5a, 0xf8, 0xe7, 0xe7, 0x9c, 0xa8, 0x63, /* Byte value: 0xa9 */ + 0x17, 0xf5, 0x21, 0xf3, 0xb8, 0x9a, 0xfb, 0x4f, 0xb8, 0x3e, 0xaa, 0x2e, 0x2e, 0x5f, 0xec, 0xd1, /* Byte value: 0xaa */ + 0x07, 0xd7, 0x5f, 0x23, 0x38, 0x1d, 0x11, 0x02, 0x38, 0x6a, 0xb1, 0x0e, 0x0e, 0x66, 0x16, 0x06, /* Byte value: 0xab */ + 0x81, 0x78, 0xa8, 0x42, 0x82, 0x37, 0x1a, 0xec, 0x82, 0x71, 0x24, 0xc1, 0xc1, 0x5c, 0x9b, 0xf7, /* Byte value: 0xac */ + 0xed, 0x4c, 0xae, 0xbb, 0x64, 0x50, 0x8d, 0xc4, 0x64, 0x75, 0xe4, 0x19, 0x19, 0xa8, 0x60, 0x8f, /* Byte value: 0xad */ + 0xca, 0xdf, 0x0d, 0xfb, 0x9f, 0x80, 0x8b, 0x5c, 0x9f, 0xb7, 0x63, 0x57, 0x57, 0xbc, 0x41, 0xe4, /* Byte value: 0xae */ + 0x74, 0x07, 0x47, 0x41, 0x26, 0x42, 0x08, 0xa2, 0x26, 0x7a, 0x37, 0xe8, 0xe8, 0x30, 0x7c, 0x25, /* Byte value: 0xaf */ + 0xa1, 0x3c, 0x54, 0x21, 0x41, 0xfa, 0x0d, 0x76, 0x41, 0xd9, 0x12, 0x81, 0x81, 0x2e, 0xac, 0x9a, /* Byte value: 0xb0 */ + 0x5d, 0xf9, 0x5a, 0x47, 0xad, 0xa8, 0x2c, 0x3e, 0xad, 0x6c, 0x11, 0xba, 0xba, 0xe8, 0x71, 0x42, /* Byte value: 0xb1 */ + 0x40, 0x88, 0x3b, 0xc6, 0x45, 0x59, 0x2e, 0xf7, 0x45, 0x93, 0x6c, 0x80, 0x80, 0xe4, 0x6e, 0xda, /* Byte value: 0xb2 */ + 0x78, 0xff, 0x86, 0x1d, 0x46, 0xb1, 0xa6, 0xe7, 0x46, 0x45, 0xad, 0xf0, 0xf0, 0x52, 0xde, 0xea, /* Byte value: 0xb3 */ + 0x83, 0xed, 0xd7, 0x58, 0x92, 0xfe, 0x96, 0xad, 0x92, 0x9a, 0x1f, 0xc5, 0xc5, 0xf2, 0x15, 0x34, /* Byte value: 0xb4 */ + 0xdd, 0x2a, 0x2c, 0x08, 0x27, 0x1a, 0x70, 0x13, 0x27, 0x89, 0xc9, 0x79, 0x79, 0xe3, 0xad, 0x35, /* Byte value: 0xb5 */ + 0x7b, 0xc1, 0x27, 0x0a, 0x5e, 0xfd, 0x6c, 0x67, 0x5e, 0x3a, 0x6a, 0xf6, 0xf6, 0xab, 0x17, 0xa9, /* Byte value: 0xb6 */ + 0x42, 0x1d, 0x44, 0xdc, 0x55, 0x90, 0xa2, 0xb6, 0x55, 0x78, 0x57, 0x84, 0x84, 0x4a, 0xe0, 0x19, /* Byte value: 0xb7 */ + 0xdc, 0x81, 0xf2, 0x05, 0x2f, 0x9f, 0x36, 0xd2, 0x2f, 0x1d, 0x35, 0x7b, 0x7b, 0xb4, 0xea, 0xb5, /* Byte value: 0xb8 */ + 0xf0, 0x3d, 0xcf, 0x3a, 0x8c, 0xa1, 0x8f, 0x0d, 0x8c, 0x8a, 0x99, 0x23, 0x23, 0xa4, 0x7f, 0x17, /* Byte value: 0xb9 */ + 0x2c, 0xbc, 0x3d, 0x3f, 0xa3, 0x3e, 0xb9, 0xdf, 0xa3, 0x97, 0xac, 0x58, 0x58, 0x10, 0x95, 0xa2, /* Byte value: 0xba */ + 0x5c, 0x52, 0x84, 0x4a, 0xa5, 0x2d, 0x6a, 0xff, 0xa5, 0xf8, 0xed, 0xb8, 0xb8, 0xbf, 0x36, 0xc2, /* Byte value: 0xbb */ + 0x47, 0x5f, 0x64, 0xe5, 0x7d, 0x44, 0x3f, 0xf5, 0x7d, 0xf9, 0xdd, 0x8e, 0x8e, 0x82, 0x78, 0xdc, /* Byte value: 0xbc */ + 0xa4, 0x7e, 0x74, 0x18, 0x69, 0x2e, 0x90, 0x35, 0x69, 0x58, 0x98, 0x8b, 0x8b, 0xe6, 0x34, 0x5f, /* Byte value: 0xbd */ + 0x52, 0x3f, 0x3a, 0x0c, 0xd5, 0x17, 0x48, 0xfb, 0xd5, 0x2c, 0x4c, 0xa4, 0xa4, 0x73, 0x1a, 0xce, /* Byte value: 0xbe */ + 0x5a, 0x2e, 0x05, 0x64, 0x95, 0xb5, 0x3d, 0x3c, 0x95, 0x06, 0xa0, 0xb4, 0xb4, 0x8e, 0x67, 0x44, /* Byte value: 0xbf */ + 0x3f, 0xa0, 0xe2, 0xf8, 0x3b, 0xf5, 0x99, 0x12, 0x3b, 0xbc, 0x70, 0x7e, 0x7e, 0xd0, 0xa6, 0x36, /* Byte value: 0xc0 */ + 0xd9, 0xc3, 0xd2, 0x3c, 0x07, 0x4b, 0xab, 0x91, 0x07, 0x9c, 0xbf, 0x71, 0x71, 0x7c, 0x72, 0x70, /* Byte value: 0xc1 */ + 0x0b, 0x2f, 0x9e, 0x7f, 0x58, 0xee, 0xbf, 0x47, 0x58, 0x55, 0x2b, 0x16, 0x16, 0x04, 0xb4, 0xc9, /* Byte value: 0xc2 */ + 0x2e, 0x29, 0x42, 0x25, 0xb3, 0xf7, 0x35, 0x9e, 0xb3, 0x7c, 0x97, 0x5c, 0x5c, 0xbe, 0x1b, 0x61, /* Byte value: 0xc3 */ + 0x08, 0x11, 0x3f, 0x68, 0x40, 0xa2, 0x75, 0xc7, 0x40, 0x2a, 0xec, 0x10, 0x10, 0xfd, 0x7d, 0x8a, /* Byte value: 0xc4 */ + 0x60, 0xcc, 0xc7, 0xa5, 0x86, 0x94, 0x39, 0x6d, 0x86, 0x3b, 0x5a, 0xc0, 0xc0, 0x96, 0x59, 0xb7, /* Byte value: 0xc5 */ + 0xf4, 0xd4, 0x31, 0x0e, 0xac, 0xf0, 0x54, 0x8f, 0xac, 0x9f, 0xef, 0x2b, 0x2b, 0x3b, 0xa0, 0x52, /* Byte value: 0xc6 */ + 0x4b, 0xa7, 0xa5, 0xb9, 0x1d, 0xb7, 0x91, 0xb0, 0x1d, 0xc6, 0x47, 0x96, 0x96, 0xe0, 0xda, 0x13, /* Byte value: 0xc7 */ + 0xc7, 0x8c, 0x12, 0xaa, 0xf7, 0xf6, 0x63, 0xd8, 0xf7, 0x1c, 0x05, 0x4d, 0x4d, 0x89, 0xa4, 0xab, /* Byte value: 0xc8 */ + 0x06, 0x7c, 0x81, 0x2e, 0x30, 0x98, 0x57, 0xc3, 0x30, 0xfe, 0x4d, 0x0c, 0x0c, 0x31, 0x51, 0x86, /* Byte value: 0xc9 */ + 0xdf, 0xbf, 0x53, 0x12, 0x37, 0xd3, 0xfc, 0x52, 0x37, 0x62, 0xf2, 0x7d, 0x7d, 0x4d, 0x23, 0xf6, /* Byte value: 0xca */ + 0x35, 0x24, 0xa2, 0x8a, 0x6b, 0x9e, 0x60, 0x94, 0x6b, 0x7d, 0xa7, 0x6a, 0x6a, 0x83, 0x55, 0x7f, /* Byte value: 0xcb */ + 0xfd, 0x6e, 0xd0, 0x6b, 0xe4, 0xd7, 0x67, 0x89, 0xe4, 0x21, 0xff, 0x39, 0x39, 0x91, 0x9a, 0x58, /* Byte value: 0xcc */ + 0x11, 0x89, 0xa0, 0xdd, 0x88, 0x02, 0xac, 0x8c, 0x88, 0xc0, 0xe7, 0x22, 0x22, 0x6e, 0xbd, 0x57, /* Byte value: 0xcd */ + 0xb2, 0x20, 0x8b, 0xe6, 0xd9, 0x31, 0x2d, 0xbb, 0xd9, 0xf2, 0xce, 0xa7, 0xa7, 0xee, 0x9f, 0x0e, /* Byte value: 0xce */ + 0xc5, 0x19, 0x6d, 0xb0, 0xe7, 0x3f, 0xef, 0x99, 0xe7, 0xf7, 0x3e, 0x49, 0x49, 0x27, 0x2a, 0x68, /* Byte value: 0xcf */ + 0xb8, 0xa4, 0xcb, 0x94, 0x89, 0x5a, 0xd4, 0x3d, 0x89, 0x33, 0x19, 0xb3, 0xb3, 0xbd, 0x6c, 0x47, /* Byte value: 0xd0 */ + 0xb5, 0xf7, 0xd4, 0xc5, 0xe1, 0x2c, 0x3c, 0xb9, 0xe1, 0x98, 0x7f, 0xa9, 0xa9, 0x88, 0x89, 0x08, /* Byte value: 0xd1 */ + 0x3b, 0x49, 0x1c, 0xcc, 0x1b, 0xa4, 0x42, 0x90, 0x1b, 0xa9, 0x06, 0x76, 0x76, 0x4f, 0x79, 0x73, /* Byte value: 0xd2 */ + 0x28, 0x55, 0xc3, 0x0b, 0x83, 0x6f, 0x62, 0x5d, 0x83, 0x82, 0xda, 0x50, 0x50, 0x8f, 0x4a, 0xe7, /* Byte value: 0xd3 */ + 0xa0, 0x97, 0x8a, 0x2c, 0x49, 0x7f, 0x4b, 0xb7, 0x49, 0x4d, 0xee, 0x83, 0x83, 0x79, 0xeb, 0x1a, /* Byte value: 0xd4 */ + 0xe8, 0x0e, 0x8e, 0x82, 0x4c, 0x84, 0x10, 0x87, 0x4c, 0xf4, 0x6e, 0x13, 0x13, 0x60, 0xf8, 0x4a, /* Byte value: 0xd5 */ + 0xde, 0x14, 0x8d, 0x1f, 0x3f, 0x56, 0xba, 0x93, 0x3f, 0xf6, 0x0e, 0x7f, 0x7f, 0x1a, 0x64, 0x76, /* Byte value: 0xd6 */ + 0x1f, 0xe4, 0x1e, 0x9b, 0xf8, 0x38, 0x8e, 0x88, 0xf8, 0x14, 0x46, 0x3e, 0x3e, 0xa2, 0x91, 0x5b, /* Byte value: 0xd7 */ + 0x6f, 0x0a, 0xa7, 0xee, 0xfe, 0x2b, 0x5d, 0xa8, 0xfe, 0x7b, 0x07, 0xde, 0xde, 0x0d, 0x32, 0x3b, /* Byte value: 0xd8 */ + 0x59, 0x10, 0xa4, 0x73, 0x8d, 0xf9, 0xf7, 0xbc, 0x8d, 0x79, 0x67, 0xb2, 0xb2, 0x77, 0xae, 0x07, /* Byte value: 0xd9 */ + 0x32, 0xf3, 0xfd, 0xa9, 0x53, 0x83, 0x71, 0x96, 0x53, 0x17, 0x16, 0x64, 0x64, 0xe5, 0x43, 0x79, /* Byte value: 0xda */ + 0x84, 0x3a, 0x88, 0x7b, 0xaa, 0xe3, 0x87, 0xaf, 0xaa, 0xf0, 0xae, 0xcb, 0xcb, 0x94, 0x03, 0x32, /* Byte value: 0xdb */ + 0x12, 0xb7, 0x01, 0xca, 0x90, 0x4e, 0x66, 0x0c, 0x90, 0xbf, 0x20, 0x24, 0x24, 0x97, 0x74, 0x14, /* Byte value: 0xdc */ + 0x88, 0xc2, 0x49, 0x27, 0xca, 0x10, 0x29, 0xea, 0xca, 0xcf, 0x34, 0xd3, 0xd3, 0xf6, 0xa1, 0xfd, /* Byte value: 0xdd */ + 0x1b, 0x0d, 0xe0, 0xaf, 0xd8, 0x69, 0x55, 0x0a, 0xd8, 0x01, 0x30, 0x36, 0x36, 0x3d, 0x4e, 0x1e, /* Byte value: 0xde */ + 0x58, 0xbb, 0x7a, 0x7e, 0x85, 0x7c, 0xb1, 0x7d, 0x85, 0xed, 0x9b, 0xb0, 0xb0, 0x20, 0xe9, 0x87, /* Byte value: 0xdf */ + 0xa5, 0xd5, 0xaa, 0x15, 0x61, 0xab, 0xd6, 0xf4, 0x61, 0xcc, 0x64, 0x89, 0x89, 0xb1, 0x73, 0xdf, /* Byte value: 0xe0 */ + 0x04, 0xe9, 0xfe, 0x34, 0x20, 0x51, 0xdb, 0x82, 0x20, 0x15, 0x76, 0x08, 0x08, 0x9f, 0xdf, 0x45, /* Byte value: 0xe1 */ + 0xa7, 0x40, 0xd5, 0x0f, 0x71, 0x62, 0x5a, 0xb5, 0x71, 0x27, 0x5f, 0x8d, 0x8d, 0x1f, 0xfd, 0x1c, /* Byte value: 0xe2 */ + 0xd5, 0x3b, 0x13, 0x60, 0x67, 0xb8, 0x05, 0xd4, 0x67, 0xa3, 0x25, 0x69, 0x69, 0x1e, 0xd0, 0xbf, /* Byte value: 0xe3 */ + 0xf5, 0x7f, 0xef, 0x03, 0xa4, 0x75, 0x12, 0x4e, 0xa4, 0x0b, 0x13, 0x29, 0x29, 0x6c, 0xe7, 0xd2, /* Byte value: 0xe4 */ + 0x4d, 0xdb, 0x24, 0x97, 0x2d, 0x2f, 0xc6, 0x73, 0x2d, 0x38, 0x0a, 0x9a, 0x9a, 0xd1, 0x8b, 0x95, /* Byte value: 0xe5 */ + 0x3d, 0x35, 0x9d, 0xe2, 0x2b, 0x3c, 0x15, 0x53, 0x2b, 0x57, 0x4b, 0x7a, 0x7a, 0x7e, 0x28, 0xf5, /* Byte value: 0xe6 */ + 0xa8, 0x86, 0xb5, 0x44, 0x09, 0xdd, 0x3e, 0x70, 0x09, 0x67, 0x02, 0x93, 0x93, 0x84, 0x96, 0x90, /* Byte value: 0xe7 */ + 0x33, 0x58, 0x23, 0xa4, 0x5b, 0x06, 0x37, 0x57, 0x5b, 0x83, 0xea, 0x66, 0x66, 0xb2, 0x04, 0xf9, /* Byte value: 0xe8 */ + 0x21, 0xef, 0x22, 0x6e, 0xcb, 0x48, 0x51, 0x5b, 0xcb, 0x3c, 0xca, 0x42, 0x42, 0x25, 0x70, 0xed, /* Byte value: 0xe9 */ + 0x2b, 0x6b, 0x62, 0x1c, 0x9b, 0x23, 0xa8, 0xdd, 0x9b, 0xfd, 0x1d, 0x56, 0x56, 0x76, 0x83, 0xa4, /* Byte value: 0xea */ + 0x4c, 0x70, 0xfa, 0x9a, 0x25, 0xaa, 0x80, 0xb2, 0x25, 0xac, 0xf6, 0x98, 0x98, 0x86, 0xcc, 0x15, /* Byte value: 0xeb */ + 0x1a, 0xa6, 0x3e, 0xa2, 0xd0, 0xec, 0x13, 0xcb, 0xd0, 0x95, 0xcc, 0x34, 0x34, 0x6a, 0x09, 0x9e, /* Byte value: 0xec */ + 0x67, 0x1b, 0x98, 0x86, 0xbe, 0x89, 0x28, 0x6f, 0xbe, 0x51, 0xeb, 0xce, 0xce, 0xf0, 0x4f, 0xb1, /* Byte value: 0xed */ + 0x54, 0x43, 0xbb, 0x22, 0xe5, 0x8f, 0x1f, 0x38, 0xe5, 0xd2, 0x01, 0xa8, 0xa8, 0x42, 0x4b, 0x48, /* Byte value: 0xee */ + 0xec, 0xe7, 0x70, 0xb6, 0x6c, 0xd5, 0xcb, 0x05, 0x6c, 0xe1, 0x18, 0x1b, 0x1b, 0xff, 0x27, 0x0f, /* Byte value: 0xef */ + 0x9b, 0xde, 0x96, 0xe0, 0x52, 0xdb, 0x09, 0x27, 0x52, 0xe4, 0xe8, 0xf5, 0xf5, 0x36, 0x92, 0x69, /* Byte value: 0xf0 */ + 0xa2, 0x02, 0xf5, 0x36, 0x59, 0xb6, 0xc7, 0xf6, 0x59, 0xa6, 0xd5, 0x87, 0x87, 0xd7, 0x65, 0xd9, /* Byte value: 0xf1 */ + 0x64, 0x25, 0x39, 0x91, 0xa6, 0xc5, 0xe2, 0xef, 0xa6, 0x2e, 0x2c, 0xc8, 0xc8, 0x09, 0x86, 0xf2, /* Byte value: 0xf2 */ + 0xef, 0xd9, 0xd1, 0xa1, 0x74, 0x99, 0x01, 0x85, 0x74, 0x9e, 0xdf, 0x1d, 0x1d, 0x06, 0xee, 0x4c, /* Byte value: 0xf3 */ + 0x8b, 0xfc, 0xe8, 0x30, 0xd2, 0x5c, 0xe3, 0x6a, 0xd2, 0xb0, 0xf3, 0xd5, 0xd5, 0x0f, 0x68, 0xbe, /* Byte value: 0xf4 */ + 0x6c, 0x34, 0x06, 0xf9, 0xe6, 0x67, 0x97, 0x28, 0xe6, 0x04, 0xc0, 0xd8, 0xd8, 0xf4, 0xfb, 0x78, /* Byte value: 0xf5 */ + 0xff, 0xfb, 0xaf, 0x71, 0xf4, 0x1e, 0xeb, 0xc8, 0xf4, 0xca, 0xc4, 0x3d, 0x3d, 0x3f, 0x14, 0x9b, /* Byte value: 0xf6 */ + 0xf7, 0xea, 0x90, 0x19, 0xb4, 0xbc, 0x9e, 0x0f, 0xb4, 0xe0, 0x28, 0x2d, 0x2d, 0xc2, 0x69, 0x11, /* Byte value: 0xf7 */ + 0x18, 0x33, 0x41, 0xb8, 0xc0, 0x25, 0x9f, 0x8a, 0xc0, 0x7e, 0xf7, 0x30, 0x30, 0xc4, 0x87, 0x5d, /* Byte value: 0xf8 */ + 0xb3, 0x8b, 0x55, 0xeb, 0xd1, 0xb4, 0x6b, 0x7a, 0xd1, 0x66, 0x32, 0xa5, 0xa5, 0xb9, 0xd8, 0x8e, /* Byte value: 0xf9 */ + 0x7c, 0x16, 0x78, 0x29, 0x66, 0xe0, 0x7d, 0x65, 0x66, 0x50, 0xdb, 0xf8, 0xf8, 0xcd, 0x01, 0xaf, /* Byte value: 0xfa */ + 0xcc, 0xa3, 0x8c, 0xd5, 0xaf, 0x18, 0xdc, 0x9f, 0xaf, 0x49, 0x2e, 0x5b, 0x5b, 0x8d, 0x10, 0x62, /* Byte value: 0xfb */ + 0x89, 0x69, 0x97, 0x2a, 0xc2, 0x95, 0x6f, 0x2b, 0xc2, 0x5b, 0xc8, 0xd1, 0xd1, 0xa1, 0xe6, 0x7d, /* Byte value: 0xfc */ + 0xae, 0xfa, 0x34, 0x6a, 0x39, 0x45, 0x69, 0xb3, 0x39, 0x99, 0x4f, 0x9f, 0x9f, 0xb5, 0xc7, 0x16, /* Byte value: 0xfd */ + 0x31, 0xcd, 0x5c, 0xbe, 0x4b, 0xcf, 0xbb, 0x16, 0x4b, 0x68, 0xd1, 0x62, 0x62, 0x1c, 0x8a, 0x3a, /* Byte value: 0xfe */ + 0x34, 0x8f, 0x7c, 0x87, 0x63, 0x1b, 0x26, 0x55, 0x63, 0xe9, 0x5b, 0x68, 0x68, 0xd4, 0x12, 0xff, /* Byte value: 0xff */ + 0x66, 0xb0, 0x46, 0x8b, 0xb6, 0x0c, 0x6e, 0xae, 0xb6, 0xc5, 0x17, 0xcc, 0xcc, 0xa7, 0x08, 0x31, /* Byte value: 0x00 */ + + /* Matrix row: 12 */ + 0x55, 0x95, 0x9e, 0xb1, 0xd0, 0xa1, 0x4c, 0xd9, 0x5a, 0x2f, 0xf9, 0x1c, 0x4c, 0xf2, 0x1a, 0x52, /* Byte value: 0x01 */ + 0xfa, 0xed, 0x87, 0xcf, 0x85, 0x5d, 0x81, 0x6c, 0x55, 0x48, 0x8b, 0x1e, 0x81, 0xe1, 0x58, 0x1c, /* Byte value: 0x02 */ + 0x0c, 0xbe, 0x0a, 0x56, 0x48, 0xac, 0xcc, 0x72, 0x67, 0x5c, 0x45, 0x58, 0xcc, 0xf1, 0x09, 0x1a, /* Byte value: 0x03 */ + 0x52, 0x31, 0x7b, 0x77, 0xfa, 0xee, 0x3b, 0x0a, 0xaf, 0x0c, 0xfb, 0x83, 0x3b, 0xb1, 0x8e, 0x02, /* Byte value: 0x04 */ + 0xa3, 0xc6, 0x13, 0x28, 0x1d, 0x50, 0x01, 0xc7, 0x68, 0x3b, 0x37, 0x5a, 0x01, 0xe2, 0x4b, 0x54, /* Byte value: 0x05 */ + 0x73, 0x1c, 0xab, 0xc9, 0xf7, 0x51, 0x4f, 0x53, 0xe2, 0x62, 0xa0, 0xf2, 0x4f, 0x17, 0xc7, 0xae, /* Byte value: 0x06 */ + 0xe1, 0x9c, 0x70, 0x97, 0x07, 0xed, 0xe9, 0x75, 0xf2, 0xe7, 0x81, 0xb8, 0xe9, 0x6d, 0xd9, 0xcf, /* Byte value: 0x07 */ + 0x81, 0x25, 0x20, 0x62, 0x02, 0xc4, 0x46, 0x63, 0x4c, 0x42, 0xec, 0x3d, 0x46, 0xe9, 0x91, 0x1f, /* Byte value: 0x08 */ + 0x86, 0x81, 0xc5, 0xa4, 0x28, 0x8b, 0x31, 0xb0, 0xb9, 0x61, 0xee, 0xa2, 0x31, 0xaa, 0x05, 0x4f, /* Byte value: 0x09 */ + 0x02, 0x35, 0x03, 0x19, 0x1c, 0x32, 0x22, 0x17, 0x4e, 0x1a, 0x41, 0xa5, 0x22, 0x77, 0xe2, 0xba, /* Byte value: 0x0a */ + 0x6c, 0x07, 0x5a, 0xa3, 0x4d, 0x85, 0x63, 0x64, 0xd9, 0xf9, 0x28, 0xdd, 0x63, 0x75, 0x41, 0xca, /* Byte value: 0x0b */ + 0xdf, 0xaa, 0x51, 0x43, 0xb0, 0x86, 0xb1, 0x1b, 0x84, 0x12, 0x52, 0xe6, 0xb1, 0xa9, 0x16, 0x07, /* Byte value: 0x0c */ + 0x4e, 0xe4, 0x69, 0xe9, 0x52, 0x11, 0x24, 0xc0, 0xfd, 0x80, 0xf3, 0xba, 0x24, 0x7e, 0x9b, 0x81, /* Byte value: 0x0d */ + 0xe8, 0xb3, 0x9c, 0x1e, 0x79, 0x3c, 0x70, 0xc3, 0x2e, 0x82, 0x87, 0xda, 0x70, 0xa8, 0xa6, 0x3f, /* Byte value: 0x0e */ + 0x2e, 0x5d, 0x39, 0x1c, 0x57, 0x38, 0x8b, 0xd6, 0x43, 0x25, 0x9e, 0x3f, 0x8b, 0xfa, 0xd3, 0x51, /* Byte value: 0x0f */ + 0x3d, 0xf8, 0xc2, 0x20, 0xa5, 0x40, 0x6b, 0x93, 0x1f, 0xe2, 0x53, 0x48, 0x6b, 0x69, 0x5c, 0x2f, /* Byte value: 0x10 */ + 0x29, 0xf9, 0xdc, 0xda, 0x7d, 0x77, 0xfc, 0x05, 0xb6, 0x06, 0x9c, 0xa0, 0xfc, 0xb9, 0x47, 0x01, /* Byte value: 0x11 */ + 0x7d, 0x97, 0xa2, 0x86, 0xa3, 0xcf, 0xa1, 0x36, 0xcb, 0x24, 0xa4, 0x0f, 0xa1, 0x91, 0x2c, 0x0e, /* Byte value: 0x12 */ + 0x27, 0x72, 0xd5, 0x95, 0x29, 0xe9, 0x12, 0x60, 0x9f, 0x40, 0x98, 0x5d, 0x12, 0x3f, 0xac, 0xa1, /* Byte value: 0x13 */ + 0x35, 0x2c, 0xce, 0x44, 0xd5, 0x88, 0xe3, 0xcf, 0xe4, 0x8a, 0x94, 0x99, 0xe3, 0x76, 0x52, 0x82, /* Byte value: 0x14 */ + 0xcc, 0x0f, 0xaa, 0x7f, 0x42, 0xfe, 0x51, 0x5e, 0xd8, 0xd5, 0x9f, 0x91, 0x51, 0x3a, 0x99, 0x79, /* Byte value: 0x15 */ + 0xd6, 0x85, 0xbd, 0xca, 0xce, 0x57, 0x28, 0xad, 0x58, 0x77, 0x54, 0x84, 0x28, 0x6c, 0x69, 0xf7, /* Byte value: 0x16 */ + 0x87, 0x7a, 0x25, 0x49, 0x26, 0x92, 0x20, 0x5a, 0x9e, 0x6c, 0x2f, 0x11, 0x20, 0x70, 0x74, 0x12, /* Byte value: 0x17 */ + 0xc9, 0x9e, 0x4c, 0xa0, 0x74, 0x83, 0x04, 0x9a, 0x63, 0xec, 0xdc, 0xab, 0x04, 0x0e, 0xef, 0x93, /* Byte value: 0x18 */ + 0x6b, 0xa3, 0xbf, 0x65, 0x67, 0xca, 0x14, 0xb7, 0x2c, 0xda, 0x2a, 0x42, 0x14, 0x36, 0xd5, 0x9a, /* Byte value: 0x19 */ + 0x32, 0x88, 0x2b, 0x82, 0xff, 0xc7, 0x94, 0x1c, 0x11, 0xa9, 0x96, 0x06, 0x94, 0x35, 0xc6, 0xd2, /* Byte value: 0x1a */ + 0xcd, 0xf4, 0x4a, 0x92, 0x4c, 0xe7, 0x40, 0xb4, 0xff, 0xd8, 0x5e, 0x22, 0x40, 0xe0, 0xe8, 0x24, /* Byte value: 0x1b */ + 0x23, 0x18, 0xd3, 0xa7, 0x11, 0x8d, 0x56, 0x4e, 0x03, 0x74, 0x1a, 0xd4, 0x56, 0xd1, 0xab, 0x16, /* Byte value: 0x1c */ + 0x96, 0xea, 0xdd, 0x6c, 0xc8, 0xd8, 0xe2, 0x08, 0x8c, 0xb1, 0xa3, 0xc3, 0xe2, 0x94, 0x19, 0xd6, /* Byte value: 0x1d */ + 0xb4, 0x09, 0xee, 0x26, 0xd7, 0x4c, 0xa5, 0xac, 0xa8, 0xc8, 0x78, 0xa4, 0xa5, 0x9f, 0xc3, 0x9d, /* Byte value: 0x1e */ + 0x92, 0x80, 0xdb, 0x5e, 0xf0, 0xbc, 0xa6, 0x26, 0x10, 0x85, 0x21, 0x4a, 0xa6, 0x7a, 0x1e, 0x61, /* Byte value: 0x1f */ + 0xc6, 0xee, 0xa5, 0x02, 0x2e, 0x04, 0xfb, 0x15, 0x6d, 0xa7, 0x19, 0xe5, 0xfb, 0x52, 0x75, 0x6e, /* Byte value: 0x20 */ + 0x91, 0x4e, 0x38, 0xaa, 0xe2, 0x97, 0x95, 0xdb, 0x79, 0x92, 0xa1, 0x5c, 0x95, 0xd7, 0x8d, 0x86, /* Byte value: 0x21 */ + 0xe4, 0x0d, 0x96, 0x48, 0x31, 0x90, 0xbc, 0xb1, 0x49, 0xde, 0xc2, 0x82, 0xbc, 0x59, 0xaf, 0x25, /* Byte value: 0x22 */ + 0xd2, 0xef, 0xbb, 0xf8, 0xf6, 0x33, 0x6c, 0x83, 0xc4, 0x43, 0xd6, 0x0d, 0x6c, 0x82, 0x6e, 0x40, /* Byte value: 0x23 */ + 0x56, 0x5b, 0x7d, 0x45, 0xc2, 0x8a, 0x7f, 0x24, 0x33, 0x38, 0x79, 0x0a, 0x7f, 0x5f, 0x89, 0xb5, /* Byte value: 0x24 */ + 0x88, 0x0a, 0xcc, 0xeb, 0x7c, 0x15, 0xdf, 0xd5, 0x90, 0x27, 0xea, 0x5f, 0xdf, 0x2c, 0xee, 0xef, /* Byte value: 0x25 */ + 0x6f, 0xc9, 0xb9, 0x57, 0x5f, 0xae, 0x50, 0x99, 0xb0, 0xee, 0xa8, 0xcb, 0x50, 0xd8, 0xd2, 0x2d, /* Byte value: 0x26 */ + 0x10, 0x6b, 0x18, 0xc8, 0xe0, 0x53, 0xd3, 0xb8, 0x35, 0xd0, 0x4d, 0x61, 0xd3, 0x3e, 0x1c, 0x99, /* Byte value: 0x27 */ + 0x59, 0x2b, 0x94, 0xe7, 0x98, 0x0d, 0x80, 0xab, 0x3d, 0x73, 0xbc, 0x44, 0x80, 0x03, 0x13, 0x48, /* Byte value: 0x28 */ + 0x63, 0x77, 0xb3, 0x01, 0x17, 0x02, 0x9c, 0xeb, 0xd7, 0xb2, 0xed, 0x93, 0x9c, 0x29, 0xdb, 0x37, /* Byte value: 0x29 */ + 0xca, 0x50, 0xaf, 0x54, 0x66, 0xa8, 0x37, 0x67, 0x0a, 0xfb, 0x5c, 0xbd, 0x37, 0xa3, 0x7c, 0x74, /* Byte value: 0x2a */ + 0x79, 0xfd, 0xa4, 0xb4, 0x9b, 0xab, 0xe5, 0x18, 0x57, 0x10, 0x26, 0x86, 0xe5, 0x7f, 0x2b, 0xb9, /* Byte value: 0x2b */ + 0xb2, 0x56, 0xeb, 0x0d, 0xf3, 0x1a, 0xc3, 0x95, 0x7a, 0xe6, 0xbb, 0x88, 0xc3, 0x06, 0x26, 0x90, /* Byte value: 0x2c */ + 0x28, 0x02, 0x3c, 0x37, 0x73, 0x6e, 0xed, 0xef, 0x91, 0x0b, 0x5d, 0x13, 0xed, 0x63, 0x36, 0x5c, /* Byte value: 0x2d */ + 0xea, 0x86, 0x9f, 0x07, 0x65, 0x0e, 0x52, 0xd4, 0x60, 0x98, 0xc6, 0x7f, 0x52, 0xdf, 0x44, 0x85, /* Byte value: 0x2e */ + 0xec, 0xd9, 0x9a, 0x2c, 0x41, 0x58, 0x34, 0xed, 0xb2, 0xb6, 0x05, 0x53, 0x34, 0x46, 0xa1, 0x88, /* Byte value: 0x2f */ + 0x2a, 0x37, 0x3f, 0x2e, 0x6f, 0x5c, 0xcf, 0xf8, 0xdf, 0x11, 0x1c, 0xb6, 0xcf, 0x14, 0xd4, 0xe6, /* Byte value: 0x30 */ + 0xc4, 0xdb, 0xa6, 0x1b, 0x32, 0x36, 0xd9, 0x02, 0x23, 0xbd, 0x58, 0x40, 0xd9, 0x25, 0x97, 0xd4, /* Byte value: 0x31 */ + 0xa7, 0xac, 0x15, 0x1a, 0x25, 0x34, 0x45, 0xe9, 0xf4, 0x0f, 0xb5, 0xd3, 0x45, 0x0c, 0x4c, 0xe3, /* Byte value: 0x32 */ + 0x17, 0xcf, 0xfd, 0x0e, 0xca, 0x1c, 0xa4, 0x6b, 0xc0, 0xf3, 0x4f, 0xfe, 0xa4, 0x7d, 0x88, 0xc9, /* Byte value: 0x33 */ + 0x5f, 0x74, 0x91, 0xcc, 0xbc, 0x5b, 0xe6, 0x92, 0xef, 0x5d, 0x7f, 0x68, 0xe6, 0x9a, 0xf6, 0x45, /* Byte value: 0x34 */ + 0x62, 0x8c, 0x53, 0xec, 0x19, 0x1b, 0x8d, 0x01, 0xf0, 0xbf, 0x2c, 0x20, 0x8d, 0xf3, 0xaa, 0x6a, /* Byte value: 0x35 */ + 0x5d, 0x41, 0x92, 0xd5, 0xa0, 0x69, 0xc4, 0x85, 0xa1, 0x47, 0x3e, 0xcd, 0xc4, 0xed, 0x14, 0xff, /* Byte value: 0x36 */ + 0x06, 0x5f, 0x05, 0x2b, 0x24, 0x56, 0x66, 0x39, 0xd2, 0x2e, 0xc3, 0x2c, 0x66, 0x99, 0xe5, 0x0d, /* Byte value: 0x37 */ + 0x3a, 0x5c, 0x27, 0xe6, 0x8f, 0x0f, 0x1c, 0x40, 0xea, 0xc1, 0x51, 0xd7, 0x1c, 0x2a, 0xc8, 0x7f, /* Byte value: 0x38 */ + 0x39, 0x92, 0xc4, 0x12, 0x9d, 0x24, 0x2f, 0xbd, 0x83, 0xd6, 0xd1, 0xc1, 0x2f, 0x87, 0x5b, 0x98, /* Byte value: 0x39 */ + 0xa1, 0xf3, 0x10, 0x31, 0x01, 0x62, 0x23, 0xd0, 0x26, 0x21, 0x76, 0xff, 0x23, 0x95, 0xa9, 0xee, /* Byte value: 0x3a */ + 0x07, 0xa4, 0xe5, 0xc6, 0x2a, 0x4f, 0x77, 0xd3, 0xf5, 0x23, 0x02, 0x9f, 0x77, 0x43, 0x94, 0x50, /* Byte value: 0x3b */ + 0x6d, 0xfc, 0xba, 0x4e, 0x43, 0x9c, 0x72, 0x8e, 0xfe, 0xf4, 0xe9, 0x6e, 0x72, 0xaf, 0x30, 0x97, /* Byte value: 0x3c */ + 0x21, 0x2d, 0xd0, 0xbe, 0x0d, 0xbf, 0x74, 0x59, 0x4d, 0x6e, 0x5b, 0x71, 0x74, 0xa6, 0x49, 0xac, /* Byte value: 0x3d */ + 0xba, 0x82, 0xe7, 0x69, 0x83, 0xd2, 0x4b, 0xc9, 0x81, 0x8e, 0x7c, 0x59, 0x4b, 0x19, 0x28, 0x3d, /* Byte value: 0x3e */ + 0x69, 0x96, 0xbc, 0x7c, 0x7b, 0xf8, 0x36, 0xa0, 0x62, 0xc0, 0x6b, 0xe7, 0x36, 0x41, 0x37, 0x20, /* Byte value: 0x3f */ + 0x37, 0x19, 0xcd, 0x5d, 0xc9, 0xba, 0xc1, 0xd8, 0xaa, 0x90, 0xd5, 0x3c, 0xc1, 0x01, 0xb0, 0x38, /* Byte value: 0x40 */ + 0x3e, 0x36, 0x21, 0xd4, 0xb7, 0x6b, 0x58, 0x6e, 0x76, 0xf5, 0xd3, 0x5e, 0x58, 0xc4, 0xcf, 0xc8, /* Byte value: 0x41 */ + 0x25, 0x47, 0xd6, 0x8c, 0x35, 0xdb, 0x30, 0x77, 0xd1, 0x5a, 0xd9, 0xf8, 0x30, 0x48, 0x4e, 0x1b, /* Byte value: 0x42 */ + 0xc1, 0x4a, 0x40, 0xc4, 0x04, 0x4b, 0x8c, 0xc6, 0x98, 0x84, 0x1b, 0x7a, 0x8c, 0x11, 0xe1, 0x3e, /* Byte value: 0x43 */ + 0xf7, 0xa8, 0x6d, 0x74, 0xc3, 0xe8, 0x5c, 0xf4, 0x15, 0x19, 0x0f, 0xf5, 0x5c, 0xca, 0x20, 0x5b, /* Byte value: 0x44 */ + 0xd4, 0xb0, 0xbe, 0xd3, 0xd2, 0x65, 0x0a, 0xba, 0x16, 0x6d, 0x15, 0x21, 0x0a, 0x1b, 0x8b, 0x4d, /* Byte value: 0x45 */ + 0x70, 0xd2, 0x48, 0x3d, 0xe5, 0x7a, 0x7c, 0xae, 0x8b, 0x75, 0x20, 0xe4, 0x7c, 0xba, 0x54, 0x49, /* Byte value: 0x46 */ + 0xf9, 0x23, 0x64, 0x3b, 0x97, 0x76, 0xb2, 0x91, 0x3c, 0x5f, 0x0b, 0x08, 0xb2, 0x4c, 0xcb, 0xfb, /* Byte value: 0x47 */ + 0x9b, 0xaf, 0x37, 0xd7, 0x8e, 0x6d, 0x3f, 0x90, 0xcc, 0xe0, 0x27, 0x28, 0x3f, 0xbf, 0x61, 0x91, /* Byte value: 0x48 */ + 0x30, 0xbd, 0x28, 0x9b, 0xe3, 0xf5, 0xb6, 0x0b, 0x5f, 0xb3, 0xd7, 0xa3, 0xb6, 0x42, 0x24, 0x68, /* Byte value: 0x49 */ + 0xf8, 0xd8, 0x84, 0xd6, 0x99, 0x6f, 0xa3, 0x7b, 0x1b, 0x52, 0xca, 0xbb, 0xa3, 0x96, 0xba, 0xa6, /* Byte value: 0x4a */ + 0x4a, 0x8e, 0x6f, 0xdb, 0x6a, 0x75, 0x60, 0xee, 0x61, 0xb4, 0x71, 0x33, 0x60, 0x90, 0x9c, 0x36, /* Byte value: 0x4b */ + 0x2d, 0x93, 0xda, 0xe8, 0x45, 0x13, 0xb8, 0x2b, 0x2a, 0x32, 0x1e, 0x29, 0xb8, 0x57, 0x40, 0xb6, /* Byte value: 0x4c */ + 0xbf, 0x13, 0x01, 0xb6, 0xb5, 0xaf, 0x1e, 0x0d, 0x3a, 0xb7, 0x3f, 0x63, 0x1e, 0x2d, 0x5e, 0xd7, /* Byte value: 0x4d */ + 0x40, 0x6f, 0x60, 0xa6, 0x06, 0x8f, 0xca, 0xa5, 0xd4, 0xc6, 0xf7, 0x47, 0xca, 0xf8, 0x70, 0x21, /* Byte value: 0x4e */ + 0x49, 0x40, 0x8c, 0x2f, 0x78, 0x5e, 0x53, 0x13, 0x08, 0xa3, 0xf1, 0x25, 0x53, 0x3d, 0x0f, 0xd1, /* Byte value: 0x4f */ + 0x5e, 0x8f, 0x71, 0x21, 0xb2, 0x42, 0xf7, 0x78, 0xc8, 0x50, 0xbe, 0xdb, 0xf7, 0x40, 0x87, 0x18, /* Byte value: 0x50 */ + 0x46, 0x30, 0x65, 0x8d, 0x22, 0xd9, 0xac, 0x9c, 0x06, 0xe8, 0x34, 0x6b, 0xac, 0x61, 0x95, 0x2c, /* Byte value: 0x51 */ + 0xae, 0x83, 0xf9, 0x93, 0x5b, 0xe5, 0xdc, 0x5f, 0x28, 0x6a, 0xb3, 0xb1, 0xdc, 0xc9, 0x33, 0x13, /* Byte value: 0x52 */ + 0x65, 0x28, 0xb6, 0x2a, 0x33, 0x54, 0xfa, 0xd2, 0x05, 0x9c, 0x2e, 0xbf, 0xfa, 0xb0, 0x3e, 0x3a, /* Byte value: 0x53 */ + 0x24, 0xbc, 0x36, 0x61, 0x3b, 0xc2, 0x21, 0x9d, 0xf6, 0x57, 0x18, 0x4b, 0x21, 0x92, 0x3f, 0x46, /* Byte value: 0x54 */ + 0x5c, 0xba, 0x72, 0x38, 0xae, 0x70, 0xd5, 0x6f, 0x86, 0x4a, 0xff, 0x7e, 0xd5, 0x37, 0x65, 0xa2, /* Byte value: 0x55 */ + 0x72, 0xe7, 0x4b, 0x24, 0xf9, 0x48, 0x5e, 0xb9, 0xc5, 0x6f, 0x61, 0x41, 0x5e, 0xcd, 0xb6, 0xf3, /* Byte value: 0x56 */ + 0x97, 0x11, 0x3d, 0x81, 0xc6, 0xc1, 0xf3, 0xe2, 0xab, 0xbc, 0x62, 0x70, 0xf3, 0x4e, 0x68, 0x8b, /* Byte value: 0x57 */ + 0xaf, 0x78, 0x19, 0x7e, 0x55, 0xfc, 0xcd, 0xb5, 0x0f, 0x67, 0x72, 0x02, 0xcd, 0x13, 0x42, 0x4e, /* Byte value: 0x58 */ + 0x0d, 0x45, 0xea, 0xbb, 0x46, 0xb5, 0xdd, 0x98, 0x40, 0x51, 0x84, 0xeb, 0xdd, 0x2b, 0x78, 0x47, /* Byte value: 0x59 */ + 0xb9, 0x4c, 0x04, 0x9d, 0x91, 0xf9, 0x78, 0x34, 0xe8, 0x99, 0xfc, 0x4f, 0x78, 0xb4, 0xbb, 0xda, /* Byte value: 0x5a */ + 0x45, 0xfe, 0x86, 0x79, 0x30, 0xf2, 0x9f, 0x61, 0x6f, 0xff, 0xb4, 0x7d, 0x9f, 0xcc, 0x06, 0xcb, /* Byte value: 0x5b */ + 0x76, 0x8d, 0x4d, 0x16, 0xc1, 0x2c, 0x1a, 0x97, 0x59, 0x5b, 0xe3, 0xc8, 0x1a, 0x23, 0xb1, 0x44, /* Byte value: 0x5c */ + 0x43, 0xa1, 0x83, 0x52, 0x14, 0xa4, 0xf9, 0x58, 0xbd, 0xd1, 0x77, 0x51, 0xf9, 0x55, 0xe3, 0xc6, /* Byte value: 0x5d */ + 0x51, 0xff, 0x98, 0x83, 0xe8, 0xc5, 0x08, 0xf7, 0xc6, 0x1b, 0x7b, 0x95, 0x08, 0x1c, 0x1d, 0xe5, /* Byte value: 0x5e */ + 0x85, 0x4f, 0x26, 0x50, 0x3a, 0xa0, 0x02, 0x4d, 0xd0, 0x76, 0x6e, 0xb4, 0x02, 0x07, 0x96, 0xa8, /* Byte value: 0x5f */ + 0x5a, 0xe5, 0x77, 0x13, 0x8a, 0x26, 0xb3, 0x56, 0x54, 0x64, 0x3c, 0x52, 0xb3, 0xae, 0x80, 0xaf, /* Byte value: 0x60 */ + 0x7c, 0x6c, 0x42, 0x6b, 0xad, 0xd6, 0xb0, 0xdc, 0xec, 0x29, 0x65, 0xbc, 0xb0, 0x4b, 0x5d, 0x53, /* Byte value: 0x61 */ + 0xd0, 0xda, 0xb8, 0xe1, 0xea, 0x01, 0x4e, 0x94, 0x8a, 0x59, 0x97, 0xa8, 0x4e, 0xf5, 0x8c, 0xfa, /* Byte value: 0x62 */ + 0x08, 0xd4, 0x0c, 0x64, 0x70, 0xc8, 0x88, 0x5c, 0xfb, 0x68, 0xc7, 0xd1, 0x88, 0x1f, 0x0e, 0xad, /* Byte value: 0x63 */ + 0x05, 0x91, 0xe6, 0xdf, 0x36, 0x7d, 0x55, 0xc4, 0xbb, 0x39, 0x43, 0x3a, 0x55, 0x34, 0x76, 0xea, /* Byte value: 0x64 */ + 0xb8, 0xb7, 0xe4, 0x70, 0x9f, 0xe0, 0x69, 0xde, 0xcf, 0x94, 0x3d, 0xfc, 0x69, 0x6e, 0xca, 0x87, /* Byte value: 0x65 */ + 0x0f, 0x70, 0xe9, 0xa2, 0x5a, 0x87, 0xff, 0x8f, 0x0e, 0x4b, 0xc5, 0x4e, 0xff, 0x5c, 0x9a, 0xfd, /* Byte value: 0x66 */ + 0x7a, 0x33, 0x47, 0x40, 0x89, 0x80, 0xd6, 0xe5, 0x3e, 0x07, 0xa6, 0x90, 0xd6, 0xd2, 0xb8, 0x5e, /* Byte value: 0x67 */ + 0xff, 0x7c, 0x61, 0x10, 0xb3, 0x20, 0xd4, 0xa8, 0xee, 0x71, 0xc8, 0x24, 0xd4, 0xd5, 0x2e, 0xf6, /* Byte value: 0x68 */ + 0xda, 0x3b, 0xb7, 0x9c, 0x86, 0xfb, 0xe4, 0xdf, 0x3f, 0x2b, 0x11, 0xdc, 0xe4, 0x9d, 0x60, 0xed, /* Byte value: 0x69 */ + 0xdb, 0xc0, 0x57, 0x71, 0x88, 0xe2, 0xf5, 0x35, 0x18, 0x26, 0xd0, 0x6f, 0xf5, 0x47, 0x11, 0xb0, /* Byte value: 0x6a */ + 0xf2, 0x39, 0x8b, 0xab, 0xf5, 0x95, 0x09, 0x30, 0xae, 0x20, 0x4c, 0xcf, 0x09, 0xfe, 0x56, 0xb1, /* Byte value: 0x6b */ + 0x99, 0x9a, 0x34, 0xce, 0x92, 0x5f, 0x1d, 0x87, 0x82, 0xfa, 0x66, 0x8d, 0x1d, 0xc8, 0x83, 0x2b, /* Byte value: 0x6c */ + 0xa9, 0x27, 0x1c, 0x55, 0x71, 0xaa, 0xab, 0x8c, 0xdd, 0x49, 0xb1, 0x2e, 0xab, 0x8a, 0xa7, 0x43, /* Byte value: 0x6d */ + 0x54, 0x6e, 0x7e, 0x5c, 0xde, 0xb8, 0x5d, 0x33, 0x7d, 0x22, 0x38, 0xaf, 0x5d, 0x28, 0x6b, 0x0f, /* Byte value: 0x6e */ + 0x95, 0x24, 0x3e, 0x98, 0xda, 0xf3, 0xd1, 0xf5, 0xe5, 0xa6, 0x23, 0xd5, 0xd1, 0x39, 0x8a, 0x31, /* Byte value: 0x6f */ + 0x68, 0x6d, 0x5c, 0x91, 0x75, 0xe1, 0x27, 0x4a, 0x45, 0xcd, 0xaa, 0x54, 0x27, 0x9b, 0x46, 0x7d, /* Byte value: 0x70 */ + 0x1c, 0xd5, 0x12, 0x9e, 0xa8, 0xff, 0x1f, 0xca, 0x52, 0x8c, 0x08, 0x39, 0x1f, 0xcf, 0x15, 0x83, /* Byte value: 0x71 */ + 0x6a, 0x58, 0x5f, 0x88, 0x69, 0xd3, 0x05, 0x5d, 0x0b, 0xd7, 0xeb, 0xf1, 0x05, 0xec, 0xa4, 0xc7, /* Byte value: 0x72 */ + 0x0e, 0x8b, 0x09, 0x4f, 0x54, 0x9e, 0xee, 0x65, 0x29, 0x46, 0x04, 0xfd, 0xee, 0x86, 0xeb, 0xa0, /* Byte value: 0x73 */ + 0x93, 0x7b, 0x3b, 0xb3, 0xfe, 0xa5, 0xb7, 0xcc, 0x37, 0x88, 0xe0, 0xf9, 0xb7, 0xa0, 0x6f, 0x3c, /* Byte value: 0x74 */ + 0xa8, 0xdc, 0xfc, 0xb8, 0x7f, 0xb3, 0xba, 0x66, 0xfa, 0x44, 0x70, 0x9d, 0xba, 0x50, 0xd6, 0x1e, /* Byte value: 0x75 */ + 0xcf, 0xc1, 0x49, 0x8b, 0x50, 0xd5, 0x62, 0xa3, 0xb1, 0xc2, 0x1f, 0x87, 0x62, 0x97, 0x0a, 0x9e, /* Byte value: 0x76 */ + 0xc2, 0x84, 0xa3, 0x30, 0x16, 0x60, 0xbf, 0x3b, 0xf1, 0x93, 0x9b, 0x6c, 0xbf, 0xbc, 0x72, 0xd9, /* Byte value: 0x77 */ + 0xcb, 0xab, 0x4f, 0xb9, 0x68, 0xb1, 0x26, 0x8d, 0x2d, 0xf6, 0x9d, 0x0e, 0x26, 0x79, 0x0d, 0x29, /* Byte value: 0x78 */ + 0x8e, 0x55, 0xc9, 0xc0, 0x58, 0x43, 0xb9, 0xec, 0x42, 0x09, 0x29, 0x73, 0xb9, 0xb5, 0x0b, 0xe2, /* Byte value: 0x79 */ + 0x33, 0x73, 0xcb, 0x6f, 0xf1, 0xde, 0x85, 0xf6, 0x36, 0xa4, 0x57, 0xb5, 0x85, 0xef, 0xb7, 0x8f, /* Byte value: 0x7a */ + 0x15, 0xfa, 0xfe, 0x17, 0xd6, 0x2e, 0x86, 0x7c, 0x8e, 0xe9, 0x0e, 0x5b, 0x86, 0x0a, 0x6a, 0x73, /* Byte value: 0x7b */ + 0x89, 0xf1, 0x2c, 0x06, 0x72, 0x0c, 0xce, 0x3f, 0xb7, 0x2a, 0x2b, 0xec, 0xce, 0xf6, 0x9f, 0xb2, /* Byte value: 0x7c */ + 0xd1, 0x21, 0x58, 0x0c, 0xe4, 0x18, 0x5f, 0x7e, 0xad, 0x54, 0x56, 0x1b, 0x5f, 0x2f, 0xfd, 0xa7, /* Byte value: 0x7d */ + 0x1a, 0x8a, 0x17, 0xb5, 0x8c, 0xa9, 0x79, 0xf3, 0x80, 0xa2, 0xcb, 0x15, 0x79, 0x56, 0xf0, 0x8e, /* Byte value: 0x7e */ + 0x98, 0x61, 0xd4, 0x23, 0x9c, 0x46, 0x0c, 0x6d, 0xa5, 0xf7, 0xa7, 0x3e, 0x0c, 0x12, 0xf2, 0x76, /* Byte value: 0x7f */ + 0xce, 0x3a, 0xa9, 0x66, 0x5e, 0xcc, 0x73, 0x49, 0x96, 0xcf, 0xde, 0x34, 0x73, 0x4d, 0x7b, 0xc3, /* Byte value: 0x80 */ + 0x1b, 0x71, 0xf7, 0x58, 0x82, 0xb0, 0x68, 0x19, 0xa7, 0xaf, 0x0a, 0xa6, 0x68, 0x8c, 0x81, 0xd3, /* Byte value: 0x81 */ + 0xe3, 0xa9, 0x73, 0x8e, 0x1b, 0xdf, 0xcb, 0x62, 0xbc, 0xfd, 0xc0, 0x1d, 0xcb, 0x1a, 0x3b, 0x75, /* Byte value: 0x82 */ + 0x9d, 0xf0, 0x32, 0xfc, 0xaa, 0x3b, 0x59, 0xa9, 0x1e, 0xce, 0xe4, 0x04, 0x59, 0x26, 0x84, 0x9c, /* Byte value: 0x83 */ + 0x8c, 0x60, 0xca, 0xd9, 0x44, 0x71, 0x9b, 0xfb, 0x0c, 0x13, 0x68, 0xd6, 0x9b, 0xc2, 0xe9, 0x58, /* Byte value: 0x84 */ + 0x6e, 0x32, 0x59, 0xba, 0x51, 0xb7, 0x41, 0x73, 0x97, 0xe3, 0x69, 0x78, 0x41, 0x02, 0xa3, 0x70, /* Byte value: 0x85 */ + 0x66, 0xe6, 0x55, 0xde, 0x21, 0x7f, 0xc9, 0x2f, 0x6c, 0x8b, 0xae, 0xa9, 0xc9, 0x1d, 0xad, 0xdd, /* Byte value: 0x86 */ + 0x58, 0xd0, 0x74, 0x0a, 0x96, 0x14, 0x91, 0x41, 0x1a, 0x7e, 0x7d, 0xf7, 0x91, 0xd9, 0x62, 0x15, /* Byte value: 0x87 */ + 0x9a, 0x54, 0xd7, 0x3a, 0x80, 0x74, 0x2e, 0x7a, 0xeb, 0xed, 0xe6, 0x9b, 0x2e, 0x65, 0x10, 0xcc, /* Byte value: 0x88 */ + 0x47, 0xcb, 0x85, 0x60, 0x2c, 0xc0, 0xbd, 0x76, 0x21, 0xe5, 0xf5, 0xd8, 0xbd, 0xbb, 0xe4, 0x71, /* Byte value: 0x89 */ + 0x18, 0xbf, 0x14, 0xac, 0x90, 0x9b, 0x5b, 0xe4, 0xce, 0xb8, 0x8a, 0xb0, 0x5b, 0x21, 0x12, 0x34, /* Byte value: 0x8a */ + 0xad, 0x4d, 0x1a, 0x67, 0x49, 0xce, 0xef, 0xa2, 0x41, 0x7d, 0x33, 0xa7, 0xef, 0x64, 0xa0, 0xf4, /* Byte value: 0x8b */ + 0x1e, 0xe0, 0x11, 0x87, 0xb4, 0xcd, 0x3d, 0xdd, 0x1c, 0x96, 0x49, 0x9c, 0x3d, 0xb8, 0xf7, 0x39, /* Byte value: 0x8c */ + 0xdc, 0x64, 0xb2, 0xb7, 0xa2, 0xad, 0x82, 0xe6, 0xed, 0x05, 0xd2, 0xf0, 0x82, 0x04, 0x85, 0xe0, /* Byte value: 0x8d */ + 0xa4, 0x62, 0xf6, 0xee, 0x37, 0x1f, 0x76, 0x14, 0x9d, 0x18, 0x35, 0xc5, 0x76, 0xa1, 0xdf, 0x04, /* Byte value: 0x8e */ + 0x34, 0xd7, 0x2e, 0xa9, 0xdb, 0x91, 0xf2, 0x25, 0xc3, 0x87, 0x55, 0x2a, 0xf2, 0xac, 0x23, 0xdf, /* Byte value: 0x8f */ + 0xfd, 0x49, 0x62, 0x09, 0xaf, 0x12, 0xf6, 0xbf, 0xa0, 0x6b, 0x89, 0x81, 0xf6, 0xa2, 0xcc, 0x4c, /* Byte value: 0x90 */ + 0x9f, 0xc5, 0x31, 0xe5, 0xb6, 0x09, 0x7b, 0xbe, 0x50, 0xd4, 0xa5, 0xa1, 0x7b, 0x51, 0x66, 0x26, /* Byte value: 0x91 */ + 0x8f, 0xae, 0x29, 0x2d, 0x56, 0x5a, 0xa8, 0x06, 0x65, 0x04, 0xe8, 0xc0, 0xa8, 0x6f, 0x7a, 0xbf, /* Byte value: 0x92 */ + 0xed, 0x22, 0x7a, 0xc1, 0x4f, 0x41, 0x25, 0x07, 0x95, 0xbb, 0xc4, 0xe0, 0x25, 0x9c, 0xd0, 0xd5, /* Byte value: 0x93 */ + 0xf1, 0xf7, 0x68, 0x5f, 0xe7, 0xbe, 0x3a, 0xcd, 0xc7, 0x37, 0xcc, 0xd9, 0x3a, 0x53, 0xc5, 0x56, /* Byte value: 0x94 */ + 0xe5, 0xf6, 0x76, 0xa5, 0x3f, 0x89, 0xad, 0x5b, 0x6e, 0xd3, 0x03, 0x31, 0xad, 0x83, 0xde, 0x78, /* Byte value: 0x95 */ + 0x1f, 0x1b, 0xf1, 0x6a, 0xba, 0xd4, 0x2c, 0x37, 0x3b, 0x9b, 0x88, 0x2f, 0x2c, 0x62, 0x86, 0x64, /* Byte value: 0x96 */ + 0x82, 0xeb, 0xc3, 0x96, 0x10, 0xef, 0x75, 0x9e, 0x25, 0x55, 0x6c, 0x2b, 0x75, 0x44, 0x02, 0xf8, /* Byte value: 0x97 */ + 0xf0, 0x0c, 0x88, 0xb2, 0xe9, 0xa7, 0x2b, 0x27, 0xe0, 0x3a, 0x0d, 0x6a, 0x2b, 0x89, 0xb4, 0x0b, /* Byte value: 0x98 */ + 0xe6, 0x38, 0x95, 0x51, 0x2d, 0xa2, 0x9e, 0xa6, 0x07, 0xc4, 0x83, 0x27, 0x9e, 0x2e, 0x4d, 0x9f, /* Byte value: 0x99 */ + 0xc3, 0x7f, 0x43, 0xdd, 0x18, 0x79, 0xae, 0xd1, 0xd6, 0x9e, 0x5a, 0xdf, 0xae, 0x66, 0x03, 0x84, /* Byte value: 0x9a */ + 0xef, 0x17, 0x79, 0xd8, 0x53, 0x73, 0x07, 0x10, 0xdb, 0xa1, 0x85, 0x45, 0x07, 0xeb, 0x32, 0x6f, /* Byte value: 0x9b */ + 0x1d, 0x2e, 0xf2, 0x73, 0xa6, 0xe6, 0x0e, 0x20, 0x75, 0x81, 0xc9, 0x8a, 0x0e, 0x15, 0x64, 0xde, /* Byte value: 0x9c */ + 0xc0, 0xb1, 0xa0, 0x29, 0x0a, 0x52, 0x9d, 0x2c, 0xbf, 0x89, 0xda, 0xc9, 0x9d, 0xcb, 0x90, 0x63, /* Byte value: 0x9d */ + 0xf6, 0x53, 0x8d, 0x99, 0xcd, 0xf1, 0x4d, 0x1e, 0x32, 0x14, 0xce, 0x46, 0x4d, 0x10, 0x51, 0x06, /* Byte value: 0x9e */ + 0x4b, 0x75, 0x8f, 0x36, 0x64, 0x6c, 0x71, 0x04, 0x46, 0xb9, 0xb0, 0x80, 0x71, 0x4a, 0xed, 0x6b, /* Byte value: 0x9f */ + 0xee, 0xec, 0x99, 0x35, 0x5d, 0x6a, 0x16, 0xfa, 0xfc, 0xac, 0x44, 0xf6, 0x16, 0x31, 0x43, 0x32, /* Byte value: 0xa0 */ + 0xe9, 0x48, 0x7c, 0xf3, 0x77, 0x25, 0x61, 0x29, 0x09, 0x8f, 0x46, 0x69, 0x61, 0x72, 0xd7, 0x62, /* Byte value: 0xa1 */ + 0xe2, 0x52, 0x93, 0x63, 0x15, 0xc6, 0xda, 0x88, 0x9b, 0xf0, 0x01, 0xae, 0xda, 0xc0, 0x4a, 0x28, /* Byte value: 0xa2 */ + 0x16, 0x34, 0x1d, 0xe3, 0xc4, 0x05, 0xb5, 0x81, 0xe7, 0xfe, 0x8e, 0x4d, 0xb5, 0xa7, 0xf9, 0x94, /* Byte value: 0xa3 */ + 0x53, 0xca, 0x9b, 0x9a, 0xf4, 0xf7, 0x2a, 0xe0, 0x88, 0x01, 0x3a, 0x30, 0x2a, 0x6b, 0xff, 0x5f, /* Byte value: 0xa4 */ + 0xdd, 0x9f, 0x52, 0x5a, 0xac, 0xb4, 0x93, 0x0c, 0xca, 0x08, 0x13, 0x43, 0x93, 0xde, 0xf4, 0xbd, /* Byte value: 0xa5 */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xa6 */ + 0x01, 0xfb, 0xe0, 0xed, 0x0e, 0x19, 0x11, 0xea, 0x27, 0x0d, 0xc1, 0xb3, 0x11, 0xda, 0x71, 0x5d, /* Byte value: 0xa7 */ + 0x8b, 0xc4, 0x2f, 0x1f, 0x6e, 0x3e, 0xec, 0x28, 0xf9, 0x30, 0x6a, 0x49, 0xec, 0x81, 0x7d, 0x08, /* Byte value: 0xa8 */ + 0xf3, 0xc2, 0x6b, 0x46, 0xfb, 0x8c, 0x18, 0xda, 0x89, 0x2d, 0x8d, 0x7c, 0x18, 0x24, 0x27, 0xec, /* Byte value: 0xa9 */ + 0xde, 0x51, 0xb1, 0xae, 0xbe, 0x9f, 0xa0, 0xf1, 0xa3, 0x1f, 0x93, 0x55, 0xa0, 0x73, 0x67, 0x5a, /* Byte value: 0xaa */ + 0x57, 0xa0, 0x9d, 0xa8, 0xcc, 0x93, 0x6e, 0xce, 0x14, 0x35, 0xb8, 0xb9, 0x6e, 0x85, 0xf8, 0xe8, /* Byte value: 0xab */ + 0x9e, 0x3e, 0xd1, 0x08, 0xb8, 0x10, 0x6a, 0x54, 0x77, 0xd9, 0x64, 0x12, 0x6a, 0x8b, 0x17, 0x7b, /* Byte value: 0xac */ + 0x38, 0x69, 0x24, 0xff, 0x93, 0x3d, 0x3e, 0x57, 0xa4, 0xdb, 0x10, 0x72, 0x3e, 0x5d, 0x2a, 0xc5, /* Byte value: 0xad */ + 0xbe, 0xe8, 0xe1, 0x5b, 0xbb, 0xb6, 0x0f, 0xe7, 0x1d, 0xba, 0xfe, 0xd0, 0x0f, 0xf7, 0x2f, 0x8a, /* Byte value: 0xae */ + 0x8a, 0x3f, 0xcf, 0xf2, 0x60, 0x27, 0xfd, 0xc2, 0xde, 0x3d, 0xab, 0xfa, 0xfd, 0x5b, 0x0c, 0x55, /* Byte value: 0xaf */ + 0x4f, 0x1f, 0x89, 0x04, 0x5c, 0x08, 0x35, 0x2a, 0xda, 0x8d, 0x32, 0x09, 0x35, 0xa4, 0xea, 0xdc, /* Byte value: 0xb0 */ + 0xa2, 0x3d, 0xf3, 0xc5, 0x13, 0x49, 0x10, 0x2d, 0x4f, 0x36, 0xf6, 0xe9, 0x10, 0x38, 0x3a, 0x09, /* Byte value: 0xb1 */ + 0x61, 0x42, 0xb0, 0x18, 0x0b, 0x30, 0xbe, 0xfc, 0x99, 0xa8, 0xac, 0x36, 0xbe, 0x5e, 0x39, 0x8d, /* Byte value: 0xb2 */ + 0x9c, 0x0b, 0xd2, 0x11, 0xa4, 0x22, 0x48, 0x43, 0x39, 0xc3, 0x25, 0xb7, 0x48, 0xfc, 0xf5, 0xc1, /* Byte value: 0xb3 */ + 0x26, 0x89, 0x35, 0x78, 0x27, 0xf0, 0x03, 0x8a, 0xb8, 0x4d, 0x59, 0xee, 0x03, 0xe5, 0xdd, 0xfc, /* Byte value: 0xb4 */ + 0x60, 0xb9, 0x50, 0xf5, 0x05, 0x29, 0xaf, 0x16, 0xbe, 0xa5, 0x6d, 0x85, 0xaf, 0x84, 0x48, 0xd0, /* Byte value: 0xb5 */ + 0x78, 0x06, 0x44, 0x59, 0x95, 0xb2, 0xf4, 0xf2, 0x70, 0x1d, 0xe7, 0x35, 0xf4, 0xa5, 0x5a, 0xe4, /* Byte value: 0xb6 */ + 0xd9, 0xf5, 0x54, 0x68, 0x94, 0xd0, 0xd7, 0x22, 0x56, 0x3c, 0x91, 0xca, 0xd7, 0x30, 0xf3, 0x0a, /* Byte value: 0xb7 */ + 0x3c, 0x03, 0x22, 0xcd, 0xab, 0x59, 0x7a, 0x79, 0x38, 0xef, 0x92, 0xfb, 0x7a, 0xb3, 0x2d, 0x72, /* Byte value: 0xb8 */ + 0xfb, 0x16, 0x67, 0x22, 0x8b, 0x44, 0x90, 0x86, 0x72, 0x45, 0x4a, 0xad, 0x90, 0x3b, 0x29, 0x41, /* Byte value: 0xb9 */ + 0xc7, 0x15, 0x45, 0xef, 0x20, 0x1d, 0xea, 0xff, 0x4a, 0xaa, 0xd8, 0x56, 0xea, 0x88, 0x04, 0x33, /* Byte value: 0xba */ + 0xfe, 0x87, 0x81, 0xfd, 0xbd, 0x39, 0xc5, 0x42, 0xc9, 0x7c, 0x09, 0x97, 0xc5, 0x0f, 0x5f, 0xab, /* Byte value: 0xbb */ + 0x36, 0xe2, 0x2d, 0xb0, 0xc7, 0xa3, 0xd0, 0x32, 0x8d, 0x9d, 0x14, 0x8f, 0xd0, 0xdb, 0xc1, 0x65, /* Byte value: 0xbc */ + 0xa0, 0x08, 0xf0, 0xdc, 0x0f, 0x7b, 0x32, 0x3a, 0x01, 0x2c, 0xb7, 0x4c, 0x32, 0x4f, 0xd8, 0xb3, /* Byte value: 0xbd */ + 0x50, 0x04, 0x78, 0x6e, 0xe6, 0xdc, 0x19, 0x1d, 0xe1, 0x16, 0xba, 0x26, 0x19, 0xc6, 0x6c, 0xb8, /* Byte value: 0xbe */ + 0xf5, 0x9d, 0x6e, 0x6d, 0xdf, 0xda, 0x7e, 0xe3, 0x5b, 0x03, 0x4e, 0x50, 0x7e, 0xbd, 0xc2, 0xe1, /* Byte value: 0xbf */ + 0xaa, 0xe9, 0xff, 0xa1, 0x63, 0x81, 0x98, 0x71, 0xb4, 0x5e, 0x31, 0x38, 0x98, 0x27, 0x34, 0xa4, /* Byte value: 0xc0 */ + 0xd3, 0x14, 0x5b, 0x15, 0xf8, 0x2a, 0x7d, 0x69, 0xe3, 0x4e, 0x17, 0xbe, 0x7d, 0x58, 0x1f, 0x1d, /* Byte value: 0xc1 */ + 0x41, 0x94, 0x80, 0x4b, 0x08, 0x96, 0xdb, 0x4f, 0xf3, 0xcb, 0x36, 0xf4, 0xdb, 0x22, 0x01, 0x7c, /* Byte value: 0xc2 */ + 0x7f, 0xa2, 0xa1, 0x9f, 0xbf, 0xfd, 0x83, 0x21, 0x85, 0x3e, 0xe5, 0xaa, 0x83, 0xe6, 0xce, 0xb4, /* Byte value: 0xc3 */ + 0xa5, 0x99, 0x16, 0x03, 0x39, 0x06, 0x67, 0xfe, 0xba, 0x15, 0xf4, 0x76, 0x67, 0x7b, 0xae, 0x59, /* Byte value: 0xc4 */ + 0xb0, 0x63, 0xe8, 0x14, 0xef, 0x28, 0xe1, 0x82, 0x34, 0xfc, 0xfa, 0x2d, 0xe1, 0x71, 0xc4, 0x2a, /* Byte value: 0xc5 */ + 0x48, 0xbb, 0x6c, 0xc2, 0x76, 0x47, 0x42, 0xf9, 0x2f, 0xae, 0x30, 0x96, 0x42, 0xe7, 0x7e, 0x8c, /* Byte value: 0xc6 */ + 0x20, 0xd6, 0x30, 0x53, 0x03, 0xa6, 0x65, 0xb3, 0x6a, 0x63, 0x9a, 0xc2, 0x65, 0x7c, 0x38, 0xf1, /* Byte value: 0xc7 */ + 0xf4, 0x66, 0x8e, 0x80, 0xd1, 0xc3, 0x6f, 0x09, 0x7c, 0x0e, 0x8f, 0xe3, 0x6f, 0x67, 0xb3, 0xbc, /* Byte value: 0xc8 */ + 0x0b, 0x1a, 0xef, 0x90, 0x62, 0xe3, 0xbb, 0xa1, 0x92, 0x7f, 0x47, 0xc7, 0xbb, 0xb2, 0x9d, 0x4a, /* Byte value: 0xc9 */ + 0xd8, 0x0e, 0xb4, 0x85, 0x9a, 0xc9, 0xc6, 0xc8, 0x71, 0x31, 0x50, 0x79, 0xc6, 0xea, 0x82, 0x57, /* Byte value: 0xca */ + 0xb7, 0xc7, 0x0d, 0xd2, 0xc5, 0x67, 0x96, 0x51, 0xc1, 0xdf, 0xf8, 0xb2, 0x96, 0x32, 0x50, 0x7a, /* Byte value: 0xcb */ + 0xb1, 0x98, 0x08, 0xf9, 0xe1, 0x31, 0xf0, 0x68, 0x13, 0xf1, 0x3b, 0x9e, 0xf0, 0xab, 0xb5, 0x77, /* Byte value: 0xcc */ + 0xd5, 0x4b, 0x5e, 0x3e, 0xdc, 0x7c, 0x1b, 0x50, 0x31, 0x60, 0xd4, 0x92, 0x1b, 0xc1, 0xfa, 0x10, /* Byte value: 0xcd */ + 0x22, 0xe3, 0x33, 0x4a, 0x1f, 0x94, 0x47, 0xa4, 0x24, 0x79, 0xdb, 0x67, 0x47, 0x0b, 0xda, 0x4b, /* Byte value: 0xce */ + 0x4c, 0xd1, 0x6a, 0xf0, 0x4e, 0x23, 0x06, 0xd7, 0xb3, 0x9a, 0xb2, 0x1f, 0x06, 0x09, 0x79, 0x3b, /* Byte value: 0xcf */ + 0x3f, 0xcd, 0xc1, 0x39, 0xb9, 0x72, 0x49, 0x84, 0x51, 0xf8, 0x12, 0xed, 0x49, 0x1e, 0xbe, 0x95, /* Byte value: 0xd0 */ + 0x75, 0x43, 0xae, 0xe2, 0xd3, 0x07, 0x29, 0x6a, 0x30, 0x4c, 0x63, 0xde, 0x29, 0x8e, 0x22, 0xa3, /* Byte value: 0xd1 */ + 0x19, 0x44, 0xf4, 0x41, 0x9e, 0x82, 0x4a, 0x0e, 0xe9, 0xb5, 0x4b, 0x03, 0x4a, 0xfb, 0x63, 0x69, /* Byte value: 0xd2 */ + 0x74, 0xb8, 0x4e, 0x0f, 0xdd, 0x1e, 0x38, 0x80, 0x17, 0x41, 0xa2, 0x6d, 0x38, 0x54, 0x53, 0xfe, /* Byte value: 0xd3 */ + 0x13, 0xa5, 0xfb, 0x3c, 0xf2, 0x78, 0xe0, 0x45, 0x5c, 0xc7, 0xcd, 0x77, 0xe0, 0x93, 0x8f, 0x7e, /* Byte value: 0xd4 */ + 0xd7, 0x7e, 0x5d, 0x27, 0xc0, 0x4e, 0x39, 0x47, 0x7f, 0x7a, 0x95, 0x37, 0x39, 0xb6, 0x18, 0xaa, /* Byte value: 0xd5 */ + 0x84, 0xb4, 0xc6, 0xbd, 0x34, 0xb9, 0x13, 0xa7, 0xf7, 0x7b, 0xaf, 0x07, 0x13, 0xdd, 0xe7, 0xf5, /* Byte value: 0xd6 */ + 0x7b, 0xc8, 0xa7, 0xad, 0x87, 0x99, 0xc7, 0x0f, 0x19, 0x0a, 0x67, 0x23, 0xc7, 0x08, 0xc9, 0x03, /* Byte value: 0xd7 */ + 0x42, 0x5a, 0x63, 0xbf, 0x1a, 0xbd, 0xe8, 0xb2, 0x9a, 0xdc, 0xb6, 0xe2, 0xe8, 0x8f, 0x92, 0x9b, /* Byte value: 0xd8 */ + 0x11, 0x90, 0xf8, 0x25, 0xee, 0x4a, 0xc2, 0x52, 0x12, 0xdd, 0x8c, 0xd2, 0xc2, 0xe4, 0x6d, 0xc4, /* Byte value: 0xd9 */ + 0xe0, 0x67, 0x90, 0x7a, 0x09, 0xf4, 0xf8, 0x9f, 0xd5, 0xea, 0x40, 0x0b, 0xf8, 0xb7, 0xa8, 0x92, /* Byte value: 0xda */ + 0x71, 0x29, 0xa8, 0xd0, 0xeb, 0x63, 0x6d, 0x44, 0xac, 0x78, 0xe1, 0x57, 0x6d, 0x60, 0x25, 0x14, /* Byte value: 0xdb */ + 0x31, 0x46, 0xc8, 0x76, 0xed, 0xec, 0xa7, 0xe1, 0x78, 0xbe, 0x16, 0x10, 0xa7, 0x98, 0x55, 0x35, /* Byte value: 0xdc */ + 0x67, 0x1d, 0xb5, 0x33, 0x2f, 0x66, 0xd8, 0xc5, 0x4b, 0x86, 0x6f, 0x1a, 0xd8, 0xc7, 0xdc, 0x80, /* Byte value: 0xdd */ + 0xc8, 0x65, 0xac, 0x4d, 0x7a, 0x9a, 0x15, 0x70, 0x44, 0xe1, 0x1d, 0x18, 0x15, 0xd4, 0x9e, 0xce, /* Byte value: 0xde */ + 0x4d, 0x2a, 0x8a, 0x1d, 0x40, 0x3a, 0x17, 0x3d, 0x94, 0x97, 0x73, 0xac, 0x17, 0xd3, 0x08, 0x66, /* Byte value: 0xdf */ + 0xfc, 0xb2, 0x82, 0xe4, 0xa1, 0x0b, 0xe7, 0x55, 0x87, 0x66, 0x48, 0x32, 0xe7, 0x78, 0xbd, 0x11, /* Byte value: 0xe0 */ + 0xb3, 0xad, 0x0b, 0xe0, 0xfd, 0x03, 0xd2, 0x7f, 0x5d, 0xeb, 0x7a, 0x3b, 0xd2, 0xdc, 0x57, 0xcd, /* Byte value: 0xe1 */ + 0x44, 0x05, 0x66, 0x94, 0x3e, 0xeb, 0x8e, 0x8b, 0x48, 0xf2, 0x75, 0xce, 0x8e, 0x16, 0x77, 0x96, /* Byte value: 0xe2 */ + 0xc5, 0x20, 0x46, 0xf6, 0x3c, 0x2f, 0xc8, 0xe8, 0x04, 0xb0, 0x99, 0xf3, 0xc8, 0xff, 0xe6, 0x89, /* Byte value: 0xe3 */ + 0x14, 0x01, 0x1e, 0xfa, 0xd8, 0x37, 0x97, 0x96, 0xa9, 0xe4, 0xcf, 0xe8, 0x97, 0xd0, 0x1b, 0x2e, /* Byte value: 0xe4 */ + 0x2b, 0xcc, 0xdf, 0xc3, 0x61, 0x45, 0xde, 0x12, 0xf8, 0x1c, 0xdd, 0x05, 0xde, 0xce, 0xa5, 0xbb, /* Byte value: 0xe5 */ + 0x12, 0x5e, 0x1b, 0xd1, 0xfc, 0x61, 0xf1, 0xaf, 0x7b, 0xca, 0x0c, 0xc4, 0xf1, 0x49, 0xfe, 0x23, /* Byte value: 0xe6 */ + 0xb6, 0x3c, 0xed, 0x3f, 0xcb, 0x7e, 0x87, 0xbb, 0xe6, 0xd2, 0x39, 0x01, 0x87, 0xe8, 0x21, 0x27, /* Byte value: 0xe7 */ + 0xbc, 0xdd, 0xe2, 0x42, 0xa7, 0x84, 0x2d, 0xf0, 0x53, 0xa0, 0xbf, 0x75, 0x2d, 0x80, 0xcd, 0x30, /* Byte value: 0xe8 */ + 0x8d, 0x9b, 0x2a, 0x34, 0x4a, 0x68, 0x8a, 0x11, 0x2b, 0x1e, 0xa9, 0x65, 0x8a, 0x18, 0x98, 0x05, /* Byte value: 0xe9 */ + 0x90, 0xb5, 0xd8, 0x47, 0xec, 0x8e, 0x84, 0x31, 0x5e, 0x9f, 0x60, 0xef, 0x84, 0x0d, 0xfc, 0xdb, /* Byte value: 0xea */ + 0x77, 0x76, 0xad, 0xfb, 0xcf, 0x35, 0x0b, 0x7d, 0x7e, 0x56, 0x22, 0x7b, 0x0b, 0xf9, 0xc0, 0x19, /* Byte value: 0xeb */ + 0x94, 0xdf, 0xde, 0x75, 0xd4, 0xea, 0xc0, 0x1f, 0xc2, 0xab, 0xe2, 0x66, 0xc0, 0xe3, 0xfb, 0x6c, /* Byte value: 0xec */ + 0xe7, 0xc3, 0x75, 0xbc, 0x23, 0xbb, 0x8f, 0x4c, 0x20, 0xc9, 0x42, 0x94, 0x8f, 0xf4, 0x3c, 0xc2, /* Byte value: 0xed */ + 0x5b, 0x1e, 0x97, 0xfe, 0x84, 0x3f, 0xa2, 0xbc, 0x73, 0x69, 0xfd, 0xe1, 0xa2, 0x74, 0xf1, 0xf2, /* Byte value: 0xee */ + 0x64, 0xd3, 0x56, 0xc7, 0x3d, 0x4d, 0xeb, 0x38, 0x22, 0x91, 0xef, 0x0c, 0xeb, 0x6a, 0x4f, 0x67, /* Byte value: 0xef */ + 0x0a, 0xe1, 0x0f, 0x7d, 0x6c, 0xfa, 0xaa, 0x4b, 0xb5, 0x72, 0x86, 0x74, 0xaa, 0x68, 0xec, 0x17, /* Byte value: 0xf0 */ + 0xab, 0x12, 0x1f, 0x4c, 0x6d, 0x98, 0x89, 0x9b, 0x93, 0x53, 0xf0, 0x8b, 0x89, 0xfd, 0x45, 0xf9, /* Byte value: 0xf1 */ + 0x03, 0xce, 0xe3, 0xf4, 0x12, 0x2b, 0x33, 0xfd, 0x69, 0x17, 0x80, 0x16, 0x33, 0xad, 0x93, 0xe7, /* Byte value: 0xf2 */ + 0x80, 0xde, 0xc0, 0x8f, 0x0c, 0xdd, 0x57, 0x89, 0x6b, 0x4f, 0x2d, 0x8e, 0x57, 0x33, 0xe0, 0x42, /* Byte value: 0xf3 */ + 0x83, 0x10, 0x23, 0x7b, 0x1e, 0xf6, 0x64, 0x74, 0x02, 0x58, 0xad, 0x98, 0x64, 0x9e, 0x73, 0xa5, /* Byte value: 0xf4 */ + 0xa6, 0x57, 0xf5, 0xf7, 0x2b, 0x2d, 0x54, 0x03, 0xd3, 0x02, 0x74, 0x60, 0x54, 0xd6, 0x3d, 0xbe, /* Byte value: 0xf5 */ + 0x09, 0x2f, 0xec, 0x89, 0x7e, 0xd1, 0x99, 0xb6, 0xdc, 0x65, 0x06, 0x62, 0x99, 0xc5, 0x7f, 0xf0, /* Byte value: 0xf6 */ + 0xac, 0xb6, 0xfa, 0x8a, 0x47, 0xd7, 0xfe, 0x48, 0x66, 0x70, 0xf2, 0x14, 0xfe, 0xbe, 0xd1, 0xa9, /* Byte value: 0xf7 */ + 0x2c, 0x68, 0x3a, 0x05, 0x4b, 0x0a, 0xa9, 0xc1, 0x0d, 0x3f, 0xdf, 0x9a, 0xa9, 0x8d, 0x31, 0xeb, /* Byte value: 0xf8 */ + 0x7e, 0x59, 0x41, 0x72, 0xb1, 0xe4, 0x92, 0xcb, 0xa2, 0x33, 0x24, 0x19, 0x92, 0x3c, 0xbf, 0xe9, /* Byte value: 0xf9 */ + 0x2f, 0xa6, 0xd9, 0xf1, 0x59, 0x21, 0x9a, 0x3c, 0x64, 0x28, 0x5f, 0x8c, 0x9a, 0x20, 0xa2, 0x0c, /* Byte value: 0xfa */ + 0xb5, 0xf2, 0x0e, 0xcb, 0xd9, 0x55, 0xb4, 0x46, 0x8f, 0xc5, 0xb9, 0x17, 0xb4, 0x45, 0xb2, 0xc0, /* Byte value: 0xfb */ + 0x3b, 0xa7, 0xc7, 0x0b, 0x81, 0x16, 0x0d, 0xaa, 0xcd, 0xcc, 0x90, 0x64, 0x0d, 0xf0, 0xb9, 0x22, /* Byte value: 0xfc */ + 0xbd, 0x26, 0x02, 0xaf, 0xa9, 0x9d, 0x3c, 0x1a, 0x74, 0xad, 0x7e, 0xc6, 0x3c, 0x5a, 0xbc, 0x6d, /* Byte value: 0xfd */ + 0x04, 0x6a, 0x06, 0x32, 0x38, 0x64, 0x44, 0x2e, 0x9c, 0x34, 0x82, 0x89, 0x44, 0xee, 0x07, 0xb7, /* Byte value: 0xfe */ + 0xeb, 0x7d, 0x7f, 0xea, 0x6b, 0x17, 0x43, 0x3e, 0x47, 0x95, 0x07, 0xcc, 0x43, 0x05, 0x35, 0xd8, /* Byte value: 0xff */ + 0xbb, 0x79, 0x07, 0x84, 0x8d, 0xcb, 0x5a, 0x23, 0xa6, 0x83, 0xbd, 0xea, 0x5a, 0xc3, 0x59, 0x60, /* Byte value: 0x00 */ + + /* Matrix row: 13 */ + 0x79, 0x1a, 0x2d, 0x6d, 0xc9, 0x3b, 0x13, 0xe1, 0x79, 0x58, 0x80, 0x80, 0x75, 0x80, 0xb7, 0x36, /* Byte value: 0x01 */ + 0x91, 0x58, 0xcb, 0xc6, 0xfe, 0x1f, 0xb1, 0x50, 0x91, 0xd6, 0x1a, 0x1a, 0x1d, 0x1a, 0x89, 0x33, /* Byte value: 0x02 */ + 0x99, 0x09, 0xd2, 0xbc, 0x88, 0x79, 0x33, 0xc2, 0x99, 0xe9, 0xdc, 0xdc, 0x3b, 0xdc, 0x07, 0x9c, /* Byte value: 0x03 */ + 0xb9, 0x8e, 0xb6, 0x97, 0x93, 0x22, 0x7e, 0xcf, 0xb9, 0x15, 0x42, 0x42, 0xa3, 0x42, 0x7a, 0x65, /* Byte value: 0x04 */ + 0x71, 0x4b, 0x34, 0x17, 0xbf, 0x5d, 0x91, 0x73, 0x71, 0x67, 0x46, 0x46, 0x53, 0x46, 0x39, 0x99, /* Byte value: 0x05 */ + 0xea, 0xc7, 0x71, 0x54, 0xcb, 0xdc, 0x63, 0x74, 0xea, 0xf1, 0x4a, 0x4a, 0x80, 0x4a, 0xfc, 0x5e, /* Byte value: 0x06 */ + 0xd7, 0xd9, 0x79, 0x52, 0x0f, 0x62, 0xab, 0xc6, 0xd7, 0x6c, 0x56, 0x56, 0x15, 0x56, 0xf6, 0xef, /* Byte value: 0x07 */ + 0x95, 0x91, 0x26, 0xfb, 0xc5, 0x2c, 0xf0, 0x19, 0x95, 0x28, 0x79, 0x79, 0x0e, 0x79, 0xce, 0x85, /* Byte value: 0x08 */ + 0x55, 0x05, 0xbd, 0x01, 0x9f, 0x35, 0x9d, 0x37, 0x55, 0x65, 0xbb, 0xbb, 0xd8, 0xbb, 0x03, 0xd6, /* Byte value: 0x09 */ + 0xda, 0xe2, 0x27, 0x8b, 0x3c, 0x4b, 0xe9, 0x9e, 0xda, 0x73, 0x9b, 0x9b, 0x54, 0x9b, 0x5e, 0x3a, /* Byte value: 0x0a */ + 0xdb, 0x41, 0x8d, 0x15, 0x42, 0x37, 0x68, 0x1d, 0xdb, 0xad, 0xf3, 0xf3, 0x20, 0xf3, 0x3f, 0xf6, /* Byte value: 0x0b */ + 0xb5, 0x16, 0x42, 0xd0, 0xde, 0x77, 0xbd, 0x14, 0xb5, 0xd4, 0xe7, 0xe7, 0x96, 0xe7, 0xb3, 0x7c, /* Byte value: 0x0c */ + 0x3f, 0x9b, 0x9f, 0xf9, 0x38, 0x46, 0x09, 0x77, 0x3f, 0xe2, 0xcc, 0xcc, 0x7d, 0xcc, 0xc8, 0xea, /* Byte value: 0x0d */ + 0x54, 0xa6, 0x17, 0x9f, 0xe1, 0x49, 0x1c, 0xb4, 0x54, 0xbb, 0xd3, 0xd3, 0xac, 0xd3, 0x62, 0x1a, /* Byte value: 0x0e */ + 0x7d, 0xd3, 0xc0, 0x50, 0xf2, 0x08, 0x52, 0xa8, 0x7d, 0xa6, 0xe3, 0xe3, 0x66, 0xe3, 0xf0, 0x80, /* Byte value: 0x0f */ + 0xd5, 0x5c, 0xee, 0xad, 0xf3, 0x9a, 0x6a, 0x03, 0xd5, 0x13, 0x86, 0x86, 0xfd, 0x86, 0x34, 0xb4, /* Byte value: 0x10 */ + 0xbd, 0x47, 0x5b, 0xaa, 0xa8, 0x11, 0x3f, 0x86, 0xbd, 0xeb, 0x21, 0x21, 0xb0, 0x21, 0x3d, 0xd3, /* Byte value: 0x11 */ + 0xa9, 0x2c, 0x84, 0x63, 0x7f, 0xee, 0xb9, 0x28, 0xa9, 0x6b, 0x0d, 0x0d, 0xef, 0x0d, 0xa5, 0xf8, /* Byte value: 0x12 */ + 0xfe, 0xac, 0xae, 0x9d, 0x1c, 0x23, 0xe5, 0xda, 0xfe, 0x71, 0x66, 0x66, 0xdf, 0x66, 0x64, 0x75, /* Byte value: 0x13 */ + 0x3b, 0x52, 0x72, 0xc4, 0x03, 0x75, 0x48, 0x3e, 0x3b, 0x1c, 0xaf, 0xaf, 0x6e, 0xaf, 0x8f, 0x5c, /* Byte value: 0x14 */ + 0x1d, 0x99, 0x6c, 0x2d, 0xdf, 0xe5, 0x85, 0xbf, 0x1d, 0x61, 0x82, 0x82, 0x0d, 0x82, 0x77, 0x48, /* Byte value: 0x15 */ + 0x36, 0x69, 0x2c, 0x1d, 0x30, 0x5c, 0x0a, 0x66, 0x36, 0x03, 0x62, 0x62, 0x2f, 0x62, 0x27, 0x89, /* Byte value: 0x16 */ + 0x38, 0x74, 0x4f, 0xa5, 0x81, 0xf1, 0x08, 0x78, 0x38, 0xbd, 0x17, 0x17, 0xf2, 0x17, 0x2c, 0xcb, /* Byte value: 0x17 */ + 0x07, 0xef, 0xd0, 0x5c, 0xb9, 0xb7, 0x01, 0x0f, 0x07, 0x5f, 0xdb, 0xdb, 0x8f, 0xdb, 0xe4, 0x21, /* Byte value: 0x18 */ + 0x1b, 0xd5, 0x16, 0xef, 0x18, 0x2e, 0x05, 0x33, 0x1b, 0xe0, 0x31, 0x31, 0xf6, 0x31, 0xf2, 0xa5, /* Byte value: 0x19 */ + 0xfb, 0xc6, 0xe9, 0x3e, 0x59, 0x6c, 0x25, 0x10, 0xfb, 0x51, 0x6d, 0x6d, 0xb8, 0x6d, 0x42, 0x0f, /* Byte value: 0x1a */ + 0x70, 0xe8, 0x9e, 0x89, 0xc1, 0x21, 0x10, 0xf0, 0x70, 0xb9, 0x2e, 0x2e, 0x27, 0x2e, 0x58, 0x55, /* Byte value: 0x1b */ + 0x89, 0xab, 0xe0, 0x48, 0x64, 0xb5, 0xf4, 0x25, 0x89, 0x97, 0x93, 0x93, 0x77, 0x93, 0xd8, 0x01, /* Byte value: 0x1c */ + 0x4a, 0x19, 0x46, 0xd3, 0xbc, 0x28, 0xd9, 0x4d, 0x4a, 0x7b, 0xe9, 0xe9, 0x3d, 0xe9, 0xb6, 0xc5, /* Byte value: 0x1d */ + 0xae, 0xc3, 0x54, 0x3f, 0xc6, 0x59, 0xb8, 0x27, 0xae, 0x34, 0xd6, 0xd6, 0x60, 0xd6, 0x41, 0xd9, /* Byte value: 0x1e */ + 0x3d, 0x1e, 0x08, 0x06, 0xc4, 0xbe, 0xc8, 0xb2, 0x3d, 0x9d, 0x1c, 0x1c, 0x95, 0x1c, 0x0a, 0xb1, /* Byte value: 0x1f */ + 0x29, 0x75, 0xd7, 0xcf, 0x13, 0x41, 0x4e, 0x1c, 0x29, 0x1d, 0x30, 0x30, 0xca, 0x30, 0x92, 0x9a, /* Byte value: 0x20 */ + 0x8a, 0x8d, 0xdd, 0x29, 0xe6, 0x31, 0xb4, 0x63, 0x8a, 0x36, 0x2b, 0x2b, 0xeb, 0x2b, 0x7b, 0x96, /* Byte value: 0x21 */ + 0xcd, 0xaf, 0xc5, 0x23, 0x69, 0x30, 0x2f, 0x76, 0xcd, 0x52, 0x0f, 0x0f, 0x97, 0x0f, 0x65, 0x86, /* Byte value: 0x22 */ + 0x41, 0x6e, 0x62, 0xc8, 0x48, 0xca, 0x1b, 0x99, 0x41, 0xe5, 0x97, 0x97, 0x87, 0x97, 0x9b, 0xfd, /* Byte value: 0x23 */ + 0xce, 0x89, 0xf8, 0x42, 0xeb, 0xb4, 0x6f, 0x30, 0xce, 0xf3, 0xb7, 0xb7, 0x0b, 0xb7, 0xc6, 0x11, /* Byte value: 0x24 */ + 0x16, 0xee, 0x48, 0x36, 0x2b, 0x07, 0x47, 0x6b, 0x16, 0xff, 0xfc, 0xfc, 0xb7, 0xfc, 0x5a, 0x70, /* Byte value: 0x25 */ + 0x6c, 0xd2, 0x58, 0x3a, 0x60, 0xb8, 0x14, 0xcc, 0x6c, 0x06, 0xc4, 0xc4, 0x5e, 0xc4, 0x4e, 0xd1, /* Byte value: 0x26 */ + 0x1f, 0x1c, 0xfb, 0xd2, 0x23, 0x1d, 0x44, 0x7a, 0x1f, 0x1e, 0x52, 0x52, 0xe5, 0x52, 0xb5, 0x13, /* Byte value: 0x27 */ + 0xe0, 0x13, 0xff, 0xd1, 0x41, 0x42, 0x20, 0x23, 0xe0, 0xb1, 0x5c, 0x5c, 0x4e, 0x5c, 0xb0, 0xaa, /* Byte value: 0x28 */ + 0xf5, 0xdb, 0x8a, 0x86, 0xe8, 0xc1, 0x27, 0x0e, 0xf5, 0xef, 0x18, 0x18, 0x65, 0x18, 0x49, 0x4d, /* Byte value: 0x29 */ + 0xb0, 0x7c, 0x05, 0x73, 0x9b, 0x38, 0x7d, 0xde, 0xb0, 0xf4, 0xec, 0xec, 0xf1, 0xec, 0x95, 0x06, /* Byte value: 0x2a */ + 0xde, 0x2b, 0xca, 0xb6, 0x07, 0x78, 0xa8, 0xd7, 0xde, 0x8d, 0xf8, 0xf8, 0x47, 0xf8, 0x19, 0x8c, /* Byte value: 0x2b */ + 0x03, 0x26, 0x3d, 0x61, 0x82, 0x84, 0x40, 0x46, 0x03, 0xa1, 0xb8, 0xb8, 0x9c, 0xb8, 0xa3, 0x97, /* Byte value: 0x2c */ + 0xd0, 0x36, 0xa9, 0x0e, 0xb6, 0xd5, 0xaa, 0xc9, 0xd0, 0x33, 0x8d, 0x8d, 0x9a, 0x8d, 0x12, 0xce, /* Byte value: 0x2d */ + 0x8e, 0x44, 0x30, 0x14, 0xdd, 0x02, 0xf5, 0x2a, 0x8e, 0xc8, 0x48, 0x48, 0xf8, 0x48, 0x3c, 0x20, /* Byte value: 0x2e */ + 0x23, 0xa1, 0x59, 0x4a, 0x99, 0xdf, 0x0d, 0x4b, 0x23, 0x5d, 0x26, 0x26, 0x04, 0x26, 0xde, 0x6e, /* Byte value: 0x2f */ + 0x0a, 0xd4, 0x8e, 0x85, 0x8a, 0x9e, 0x43, 0x57, 0x0a, 0x40, 0x16, 0x16, 0xce, 0x16, 0x4c, 0xf4, /* Byte value: 0x30 */ + 0xf3, 0x97, 0xf0, 0x44, 0x2f, 0x0a, 0xa7, 0x82, 0xf3, 0x6e, 0xab, 0xab, 0x9e, 0xab, 0xcc, 0xa0, /* Byte value: 0x31 */ + 0x06, 0x4c, 0x7a, 0xc2, 0xc7, 0xcb, 0x80, 0x8c, 0x06, 0x81, 0xb3, 0xb3, 0xfb, 0xb3, 0x85, 0xed, /* Byte value: 0x32 */ + 0xdf, 0x88, 0x60, 0x28, 0x79, 0x04, 0x29, 0x54, 0xdf, 0x53, 0x90, 0x90, 0x33, 0x90, 0x78, 0x40, /* Byte value: 0x33 */ + 0x4d, 0xf6, 0x96, 0x8f, 0x05, 0x9f, 0xd8, 0x42, 0x4d, 0x24, 0x32, 0x32, 0xb2, 0x32, 0x52, 0xe4, /* Byte value: 0x34 */ + 0x98, 0xaa, 0x78, 0x22, 0xf6, 0x05, 0xb2, 0x41, 0x98, 0x37, 0xb4, 0xb4, 0x4f, 0xb4, 0x66, 0x50, /* Byte value: 0x35 */ + 0x97, 0x14, 0xb1, 0x04, 0x39, 0xd4, 0x31, 0xdc, 0x97, 0x57, 0xa9, 0xa9, 0xe6, 0xa9, 0x0c, 0xde, /* Byte value: 0x36 */ + 0xad, 0xe5, 0x69, 0x5e, 0x44, 0xdd, 0xf8, 0x61, 0xad, 0x95, 0x6e, 0x6e, 0xfc, 0x6e, 0xe2, 0x4e, /* Byte value: 0x37 */ + 0x15, 0xc8, 0x75, 0x57, 0xa9, 0x83, 0x07, 0x2d, 0x15, 0x5e, 0x44, 0x44, 0x2b, 0x44, 0xf9, 0xe7, /* Byte value: 0x38 */ + 0xa2, 0x5b, 0xa0, 0x78, 0x8b, 0x0c, 0x7b, 0xfc, 0xa2, 0xf5, 0x73, 0x73, 0x55, 0x73, 0x88, 0xc0, /* Byte value: 0x39 */ + 0xab, 0xa9, 0x13, 0x9c, 0x83, 0x16, 0x78, 0xed, 0xab, 0x14, 0xdd, 0xdd, 0x07, 0xdd, 0x67, 0xa3, /* Byte value: 0x3a */ + 0xc0, 0x94, 0x9b, 0xfa, 0x5a, 0x19, 0x6d, 0x2e, 0xc0, 0x4d, 0xc2, 0xc2, 0xd6, 0xc2, 0xcd, 0x53, /* Byte value: 0x3b */ + 0xb6, 0x30, 0x7f, 0xb1, 0x5c, 0xf3, 0xfd, 0x52, 0xb6, 0x75, 0x5f, 0x5f, 0x0a, 0x5f, 0x10, 0xeb, /* Byte value: 0x3c */ + 0x53, 0x49, 0xc7, 0xc3, 0x58, 0xfe, 0x1d, 0xbb, 0x53, 0xe4, 0x08, 0x08, 0x23, 0x08, 0x86, 0x3b, /* Byte value: 0x3d */ + 0xed, 0x28, 0xa1, 0x08, 0x72, 0x6b, 0x62, 0x7b, 0xed, 0xae, 0x91, 0x91, 0x0f, 0x91, 0x18, 0x7f, /* Byte value: 0x3e */ + 0xc1, 0x37, 0x31, 0x64, 0x24, 0x65, 0xec, 0xad, 0xc1, 0x93, 0xaa, 0xaa, 0xa2, 0xaa, 0xac, 0x9f, /* Byte value: 0x3f */ + 0xe1, 0xb0, 0x55, 0x4f, 0x3f, 0x3e, 0xa1, 0xa0, 0xe1, 0x6f, 0x34, 0x34, 0x3a, 0x34, 0xd1, 0x66, /* Byte value: 0x40 */ + 0x62, 0xcf, 0x3b, 0x82, 0xd1, 0x15, 0x16, 0xd2, 0x62, 0xb8, 0xb1, 0xb1, 0x83, 0xb1, 0x45, 0x93, /* Byte value: 0x41 */ + 0x24, 0x4e, 0x89, 0x16, 0x20, 0x68, 0x0c, 0x44, 0x24, 0x02, 0xfd, 0xfd, 0x8b, 0xfd, 0x3a, 0x4f, /* Byte value: 0x42 */ + 0xe9, 0xe1, 0x4c, 0x35, 0x49, 0x58, 0x23, 0x32, 0xe9, 0x50, 0xf2, 0xf2, 0x1c, 0xf2, 0x5f, 0xc9, /* Byte value: 0x43 */ + 0x65, 0x20, 0xeb, 0xde, 0x68, 0xa2, 0x17, 0xdd, 0x65, 0xe7, 0x6a, 0x6a, 0x0c, 0x6a, 0xa1, 0xb2, /* Byte value: 0x44 */ + 0xec, 0x8b, 0x0b, 0x96, 0x0c, 0x17, 0xe3, 0xf8, 0xec, 0x70, 0xf9, 0xf9, 0x7b, 0xf9, 0x79, 0xb3, /* Byte value: 0x45 */ + 0x5d, 0x54, 0xa4, 0x7b, 0xe9, 0x53, 0x1f, 0xa5, 0x5d, 0x5a, 0x7d, 0x7d, 0xfe, 0x7d, 0x8d, 0x79, /* Byte value: 0x46 */ + 0x26, 0xcb, 0x1e, 0xe9, 0xdc, 0x90, 0xcd, 0x81, 0x26, 0x7d, 0x2d, 0x2d, 0x63, 0x2d, 0xf8, 0x14, /* Byte value: 0x47 */ + 0xbe, 0x61, 0x66, 0xcb, 0x2a, 0x95, 0x7f, 0xc0, 0xbe, 0x4a, 0x99, 0x99, 0x2c, 0x99, 0x9e, 0x44, /* Byte value: 0x48 */ + 0x21, 0x24, 0xce, 0xb5, 0x65, 0x27, 0xcc, 0x8e, 0x21, 0x22, 0xf6, 0xf6, 0xec, 0xf6, 0x1c, 0x35, /* Byte value: 0x49 */ + 0x4b, 0xba, 0xec, 0x4d, 0xc2, 0x54, 0x58, 0xce, 0x4b, 0xa5, 0x81, 0x81, 0x49, 0x81, 0xd7, 0x09, /* Byte value: 0x4a */ + 0x48, 0x9c, 0xd1, 0x2c, 0x40, 0xd0, 0x18, 0x88, 0x48, 0x04, 0x39, 0x39, 0xd5, 0x39, 0x74, 0x9e, /* Byte value: 0x4b */ + 0xca, 0x40, 0x15, 0x7f, 0xd0, 0x87, 0x2e, 0x79, 0xca, 0x0d, 0xd4, 0xd4, 0x18, 0xd4, 0x81, 0xa7, /* Byte value: 0x4c */ + 0xf7, 0x5e, 0x1d, 0x79, 0x14, 0x39, 0xe6, 0xcb, 0xf7, 0x90, 0xc8, 0xc8, 0x8d, 0xc8, 0x8b, 0x16, /* Byte value: 0x4d */ + 0x7c, 0x70, 0x6a, 0xce, 0x8c, 0x74, 0xd3, 0x2b, 0x7c, 0x78, 0x8b, 0x8b, 0x12, 0x8b, 0x91, 0x4c, /* Byte value: 0x4e */ + 0xff, 0x0f, 0x04, 0x03, 0x62, 0x5f, 0x64, 0x59, 0xff, 0xaf, 0x0e, 0x0e, 0xab, 0x0e, 0x05, 0xb9, /* Byte value: 0x4f */ + 0x20, 0x87, 0x64, 0x2b, 0x1b, 0x5b, 0x4d, 0x0d, 0x20, 0xfc, 0x9e, 0x9e, 0x98, 0x9e, 0x7d, 0xf9, /* Byte value: 0x50 */ + 0xd1, 0x95, 0x03, 0x90, 0xc8, 0xa9, 0x2b, 0x4a, 0xd1, 0xed, 0xe5, 0xe5, 0xee, 0xe5, 0x73, 0x02, /* Byte value: 0x51 */ + 0x85, 0x33, 0x14, 0x0f, 0x29, 0xe0, 0x37, 0xfe, 0x85, 0x56, 0x36, 0x36, 0x42, 0x36, 0x11, 0x18, /* Byte value: 0x52 */ + 0x58, 0x3e, 0xe3, 0xd8, 0xac, 0x1c, 0xdf, 0x6f, 0x58, 0x7a, 0x76, 0x76, 0x99, 0x76, 0xab, 0x03, /* Byte value: 0x53 */ + 0x49, 0x3f, 0x7b, 0xb2, 0x3e, 0xac, 0x99, 0x0b, 0x49, 0xda, 0x51, 0x51, 0xa1, 0x51, 0x15, 0x52, /* Byte value: 0x54 */ + 0xfa, 0x65, 0x43, 0xa0, 0x27, 0x10, 0xa4, 0x93, 0xfa, 0x8f, 0x05, 0x05, 0xcc, 0x05, 0x23, 0xc3, /* Byte value: 0x55 */ + 0x87, 0xb6, 0x83, 0xf0, 0xd5, 0x18, 0xf6, 0x3b, 0x87, 0x29, 0xe6, 0xe6, 0xaa, 0xe6, 0xd3, 0x43, /* Byte value: 0x56 */ + 0x27, 0x68, 0xb4, 0x77, 0xa2, 0xec, 0x4c, 0x02, 0x27, 0xa3, 0x45, 0x45, 0x17, 0x45, 0x99, 0xd8, /* Byte value: 0x57 */ + 0xe8, 0x42, 0xe6, 0xab, 0x37, 0x24, 0xa2, 0xb1, 0xe8, 0x8e, 0x9a, 0x9a, 0x68, 0x9a, 0x3e, 0x05, /* Byte value: 0x58 */ + 0xf4, 0x78, 0x20, 0x18, 0x96, 0xbd, 0xa6, 0x8d, 0xf4, 0x31, 0x70, 0x70, 0x11, 0x70, 0x28, 0x81, /* Byte value: 0x59 */ + 0x5a, 0xbb, 0x74, 0x27, 0x50, 0xe4, 0x1e, 0xaa, 0x5a, 0x05, 0xa6, 0xa6, 0x71, 0xa6, 0x69, 0x58, /* Byte value: 0x5a */ + 0x66, 0x06, 0xd6, 0xbf, 0xea, 0x26, 0x57, 0x9b, 0x66, 0x46, 0xd2, 0xd2, 0x90, 0xd2, 0x02, 0x25, /* Byte value: 0x5b */ + 0xf0, 0xb1, 0xcd, 0x25, 0xad, 0x8e, 0xe7, 0xc4, 0xf0, 0xcf, 0x13, 0x13, 0x02, 0x13, 0x6f, 0x37, /* Byte value: 0x5c */ + 0xcb, 0xe3, 0xbf, 0xe1, 0xae, 0xfb, 0xaf, 0xfa, 0xcb, 0xd3, 0xbc, 0xbc, 0x6c, 0xbc, 0xe0, 0x6b, /* Byte value: 0x5d */ + 0x0e, 0x1d, 0x63, 0xb8, 0xb1, 0xad, 0x02, 0x1e, 0x0e, 0xbe, 0x75, 0x75, 0xdd, 0x75, 0x0b, 0x42, /* Byte value: 0x5e */ + 0xe2, 0x96, 0x68, 0x2e, 0xbd, 0xba, 0xe1, 0xe6, 0xe2, 0xce, 0x8c, 0x8c, 0xa6, 0x8c, 0x72, 0xf1, /* Byte value: 0x5f */ + 0x57, 0x80, 0x2a, 0xfe, 0x63, 0xcd, 0x5c, 0xf2, 0x57, 0x1a, 0x6b, 0x6b, 0x30, 0x6b, 0xc1, 0x8d, /* Byte value: 0x60 */ + 0xc4, 0x5d, 0x76, 0xc7, 0x61, 0x2a, 0x2c, 0x67, 0xc4, 0xb3, 0xa1, 0xa1, 0xc5, 0xa1, 0x8a, 0xe5, /* Byte value: 0x61 */ + 0x9b, 0x8c, 0x45, 0x43, 0x74, 0x81, 0xf2, 0x07, 0x9b, 0x96, 0x0c, 0x0c, 0xd3, 0x0c, 0xc5, 0xc7, /* Byte value: 0x62 */ + 0xee, 0x0e, 0x9c, 0x69, 0xf0, 0xef, 0x22, 0x3d, 0xee, 0x0f, 0x29, 0x29, 0x93, 0x29, 0xbb, 0xe8, /* Byte value: 0x63 */ + 0x1a, 0x76, 0xbc, 0x71, 0x66, 0x52, 0x84, 0xb0, 0x1a, 0x3e, 0x59, 0x59, 0x82, 0x59, 0x93, 0x69, /* Byte value: 0x64 */ + 0x37, 0xca, 0x86, 0x83, 0x4e, 0x20, 0x8b, 0xe5, 0x37, 0xdd, 0x0a, 0x0a, 0x5b, 0x0a, 0x46, 0x45, /* Byte value: 0x65 */ + 0x2e, 0x9a, 0x07, 0x93, 0xaa, 0xf6, 0x4f, 0x13, 0x2e, 0x42, 0xeb, 0xeb, 0x45, 0xeb, 0x76, 0xbb, /* Byte value: 0x66 */ + 0x69, 0xb8, 0x1f, 0x99, 0x25, 0xf7, 0xd4, 0x06, 0x69, 0x26, 0xcf, 0xcf, 0x39, 0xcf, 0x68, 0xab, /* Byte value: 0x67 */ + 0x8b, 0x2e, 0x77, 0xb7, 0x98, 0x4d, 0x35, 0xe0, 0x8b, 0xe8, 0x43, 0x43, 0x9f, 0x43, 0x1a, 0x5a, /* Byte value: 0x68 */ + 0xaf, 0x60, 0xfe, 0xa1, 0xb8, 0x25, 0x39, 0xa4, 0xaf, 0xea, 0xbe, 0xbe, 0x14, 0xbe, 0x20, 0x15, /* Byte value: 0x69 */ + 0xc2, 0x11, 0x0c, 0x05, 0xa6, 0xe1, 0xac, 0xeb, 0xc2, 0x32, 0x12, 0x12, 0x3e, 0x12, 0x0f, 0x08, /* Byte value: 0x6a */ + 0x7f, 0x56, 0x57, 0xaf, 0x0e, 0xf0, 0x93, 0x6d, 0x7f, 0xd9, 0x33, 0x33, 0x8e, 0x33, 0x32, 0xdb, /* Byte value: 0x6b */ + 0x64, 0x83, 0x41, 0x40, 0x16, 0xde, 0x96, 0x5e, 0x64, 0x39, 0x02, 0x02, 0x78, 0x02, 0xc0, 0x7e, /* Byte value: 0x6c */ + 0x45, 0xa7, 0x8f, 0xf5, 0x73, 0xf9, 0x5a, 0xd0, 0x45, 0x1b, 0xf4, 0xf4, 0x94, 0xf4, 0xdc, 0x4b, /* Byte value: 0x6d */ + 0x14, 0x6b, 0xdf, 0xc9, 0xd7, 0xff, 0x86, 0xae, 0x14, 0x80, 0x2c, 0x2c, 0x5f, 0x2c, 0x98, 0x2b, /* Byte value: 0x6e */ + 0xfd, 0x8a, 0x93, 0xfc, 0x9e, 0xa7, 0xa5, 0x9c, 0xfd, 0xd0, 0xde, 0xde, 0x43, 0xde, 0xc7, 0xe2, /* Byte value: 0x6f */ + 0xac, 0x46, 0xc3, 0xc0, 0x3a, 0xa1, 0x79, 0xe2, 0xac, 0x4b, 0x06, 0x06, 0x88, 0x06, 0x83, 0x82, /* Byte value: 0x70 */ + 0x86, 0x15, 0x29, 0x6e, 0xab, 0x64, 0x77, 0xb8, 0x86, 0xf7, 0x8e, 0x8e, 0xde, 0x8e, 0xb2, 0x8f, /* Byte value: 0x71 */ + 0x76, 0xa4, 0xe4, 0x4b, 0x06, 0xea, 0x90, 0x7c, 0x76, 0x38, 0x9d, 0x9d, 0xdc, 0x9d, 0xdd, 0xb8, /* Byte value: 0x72 */ + 0x43, 0xeb, 0xf5, 0x37, 0xb4, 0x32, 0xda, 0x5c, 0x43, 0x9a, 0x47, 0x47, 0x6f, 0x47, 0x59, 0xa6, /* Byte value: 0x73 */ + 0x50, 0x6f, 0xfa, 0xa2, 0xda, 0x7a, 0x5d, 0xfd, 0x50, 0x45, 0xb0, 0xb0, 0xbf, 0xb0, 0x25, 0xac, /* Byte value: 0x74 */ + 0x28, 0xd6, 0x7d, 0x51, 0x6d, 0x3d, 0xcf, 0x9f, 0x28, 0xc3, 0x58, 0x58, 0xbe, 0x58, 0xf3, 0x56, /* Byte value: 0x75 */ + 0xaa, 0x0a, 0xb9, 0x02, 0xfd, 0x6a, 0xf9, 0x6e, 0xaa, 0xca, 0xb5, 0xb5, 0x73, 0xb5, 0x06, 0x6f, /* Byte value: 0x76 */ + 0x5e, 0x72, 0x99, 0x1a, 0x6b, 0xd7, 0x5f, 0xe3, 0x5e, 0xfb, 0xc5, 0xc5, 0x62, 0xc5, 0x2e, 0xee, /* Byte value: 0x77 */ + 0xdd, 0x0d, 0xf7, 0xd7, 0x85, 0xfc, 0xe8, 0x91, 0xdd, 0x2c, 0x40, 0x40, 0xdb, 0x40, 0xba, 0x1b, /* Byte value: 0x78 */ + 0xbb, 0x0b, 0x21, 0x68, 0x6f, 0xda, 0xbf, 0x0a, 0xbb, 0x6a, 0x92, 0x92, 0x4b, 0x92, 0xb8, 0x3e, /* Byte value: 0x79 */ + 0x96, 0xb7, 0x1b, 0x9a, 0x47, 0xa8, 0xb0, 0x5f, 0x96, 0x89, 0xc1, 0xc1, 0x92, 0xc1, 0x6d, 0x12, /* Byte value: 0x7a */ + 0x05, 0x6a, 0x47, 0xa3, 0x45, 0x4f, 0xc0, 0xca, 0x05, 0x20, 0x0b, 0x0b, 0x67, 0x0b, 0x26, 0x7a, /* Byte value: 0x7b */ + 0x7b, 0x9f, 0xba, 0x92, 0x35, 0xc3, 0xd2, 0x24, 0x7b, 0x27, 0x50, 0x50, 0x9d, 0x50, 0x75, 0x6d, /* Byte value: 0x7c */ + 0xf6, 0xfd, 0xb7, 0xe7, 0x6a, 0x45, 0x67, 0x48, 0xf6, 0x4e, 0xa0, 0xa0, 0xf9, 0xa0, 0xea, 0xda, /* Byte value: 0x7d */ + 0x2b, 0xf0, 0x40, 0x30, 0xef, 0xb9, 0x8f, 0xd9, 0x2b, 0x62, 0xe0, 0xe0, 0x22, 0xe0, 0x50, 0xc1, /* Byte value: 0x7e */ + 0x09, 0xf2, 0xb3, 0xe4, 0x08, 0x1a, 0x03, 0x11, 0x09, 0xe1, 0xae, 0xae, 0x52, 0xae, 0xef, 0x63, /* Byte value: 0x7f */ + 0xc7, 0x7b, 0x4b, 0xa6, 0xe3, 0xae, 0x6c, 0x21, 0xc7, 0x12, 0x19, 0x19, 0x59, 0x19, 0x29, 0x72, /* Byte value: 0x80 */ + 0x46, 0x81, 0xb2, 0x94, 0xf1, 0x7d, 0x1a, 0x96, 0x46, 0xba, 0x4c, 0x4c, 0x08, 0x4c, 0x7f, 0xdc, /* Byte value: 0x81 */ + 0x0d, 0x3b, 0x5e, 0xd9, 0x33, 0x29, 0x42, 0x58, 0x0d, 0x1f, 0xcd, 0xcd, 0x41, 0xcd, 0xa8, 0xd5, /* Byte value: 0x82 */ + 0x13, 0x84, 0x0f, 0x95, 0x6e, 0x48, 0x87, 0xa1, 0x13, 0xdf, 0xf7, 0xf7, 0xd0, 0xf7, 0x7c, 0x0a, /* Byte value: 0x83 */ + 0x61, 0xe9, 0x06, 0xe3, 0x53, 0x91, 0x56, 0x94, 0x61, 0x19, 0x09, 0x09, 0x1f, 0x09, 0xe6, 0x04, /* Byte value: 0x84 */ + 0x01, 0xa3, 0xaa, 0x9e, 0x7e, 0x7c, 0x81, 0x83, 0x01, 0xde, 0x68, 0x68, 0x74, 0x68, 0x61, 0xcc, /* Byte value: 0x85 */ + 0xef, 0xad, 0x36, 0xf7, 0x8e, 0x93, 0xa3, 0xbe, 0xef, 0xd1, 0x41, 0x41, 0xe7, 0x41, 0xda, 0x24, /* Byte value: 0x86 */ + 0x8d, 0x62, 0x0d, 0x75, 0x5f, 0x86, 0xb5, 0x6c, 0x8d, 0x69, 0xf0, 0xf0, 0x64, 0xf0, 0x9f, 0xb7, /* Byte value: 0x87 */ + 0xd3, 0x10, 0x94, 0x6f, 0x34, 0x51, 0xea, 0x8f, 0xd3, 0x92, 0x35, 0x35, 0x06, 0x35, 0xb1, 0x59, /* Byte value: 0x88 */ + 0xbc, 0xe4, 0xf1, 0x34, 0xd6, 0x6d, 0xbe, 0x05, 0xbc, 0x35, 0x49, 0x49, 0xc4, 0x49, 0x5c, 0x1f, /* Byte value: 0x89 */ + 0xf1, 0x12, 0x67, 0xbb, 0xd3, 0xf2, 0x66, 0x47, 0xf1, 0x11, 0x7b, 0x7b, 0x76, 0x7b, 0x0e, 0xfb, /* Byte value: 0x8a */ + 0x32, 0xa0, 0xc1, 0x20, 0x0b, 0x6f, 0x4b, 0x2f, 0x32, 0xfd, 0x01, 0x01, 0x3c, 0x01, 0x60, 0x3f, /* Byte value: 0x8b */ + 0x5c, 0xf7, 0x0e, 0xe5, 0x97, 0x2f, 0x9e, 0x26, 0x5c, 0x84, 0x15, 0x15, 0x8a, 0x15, 0xec, 0xb5, /* Byte value: 0x8c */ + 0x02, 0x85, 0x97, 0xff, 0xfc, 0xf8, 0xc1, 0xc5, 0x02, 0x7f, 0xd0, 0xd0, 0xe8, 0xd0, 0xc2, 0x5b, /* Byte value: 0x8d */ + 0xb1, 0xdf, 0xaf, 0xed, 0xe5, 0x44, 0xfc, 0x5d, 0xb1, 0x2a, 0x84, 0x84, 0x85, 0x84, 0xf4, 0xca, /* Byte value: 0x8e */ + 0x56, 0x23, 0x80, 0x60, 0x1d, 0xb1, 0xdd, 0x71, 0x56, 0xc4, 0x03, 0x03, 0x44, 0x03, 0xa0, 0x41, /* Byte value: 0x8f */ + 0x51, 0xcc, 0x50, 0x3c, 0xa4, 0x06, 0xdc, 0x7e, 0x51, 0x9b, 0xd8, 0xd8, 0xcb, 0xd8, 0x44, 0x60, /* Byte value: 0x90 */ + 0xc9, 0x66, 0x28, 0x1e, 0x52, 0x03, 0x6e, 0x3f, 0xc9, 0xac, 0x6c, 0x6c, 0x84, 0x6c, 0x22, 0x30, /* Byte value: 0x91 */ + 0xd6, 0x7a, 0xd3, 0xcc, 0x71, 0x1e, 0x2a, 0x45, 0xd6, 0xb2, 0x3e, 0x3e, 0x61, 0x3e, 0x97, 0x23, /* Byte value: 0x92 */ + 0x4e, 0xd0, 0xab, 0xee, 0x87, 0x1b, 0x98, 0x04, 0x4e, 0x85, 0x8a, 0x8a, 0x2e, 0x8a, 0xf1, 0x73, /* Byte value: 0x93 */ + 0xc8, 0xc5, 0x82, 0x80, 0x2c, 0x7f, 0xef, 0xbc, 0xc8, 0x72, 0x04, 0x04, 0xf0, 0x04, 0x43, 0xfc, /* Byte value: 0x94 */ + 0xa0, 0xde, 0x37, 0x87, 0x77, 0xf4, 0xba, 0x39, 0xa0, 0x8a, 0xa3, 0xa3, 0xbd, 0xa3, 0x4a, 0x9b, /* Byte value: 0x95 */ + 0x31, 0x86, 0xfc, 0x41, 0x89, 0xeb, 0x0b, 0x69, 0x31, 0x5c, 0xb9, 0xb9, 0xa0, 0xb9, 0xc3, 0xa8, /* Byte value: 0x96 */ + 0x22, 0x02, 0xf3, 0xd4, 0xe7, 0xa3, 0x8c, 0xc8, 0x22, 0x83, 0x4e, 0x4e, 0x70, 0x4e, 0xbf, 0xa2, /* Byte value: 0x97 */ + 0xa5, 0xb4, 0x70, 0x24, 0x32, 0xbb, 0x7a, 0xf3, 0xa5, 0xaa, 0xa8, 0xa8, 0xda, 0xa8, 0x6c, 0xe1, /* Byte value: 0x98 */ + 0x17, 0x4d, 0xe2, 0xa8, 0x55, 0x7b, 0xc6, 0xe8, 0x17, 0x21, 0x94, 0x94, 0xc3, 0x94, 0x3b, 0xbc, /* Byte value: 0x99 */ + 0x33, 0x03, 0x6b, 0xbe, 0x75, 0x13, 0xca, 0xac, 0x33, 0x23, 0x69, 0x69, 0x48, 0x69, 0x01, 0xf3, /* Byte value: 0x9a */ + 0x94, 0x32, 0x8c, 0x65, 0xbb, 0x50, 0x71, 0x9a, 0x94, 0xf6, 0x11, 0x11, 0x7a, 0x11, 0xaf, 0x49, /* Byte value: 0x9b */ + 0xeb, 0x64, 0xdb, 0xca, 0xb5, 0xa0, 0xe2, 0xf7, 0xeb, 0x2f, 0x22, 0x22, 0xf4, 0x22, 0x9d, 0x92, /* Byte value: 0x9c */ + 0x84, 0x90, 0xbe, 0x91, 0x57, 0x9c, 0xb6, 0x7d, 0x84, 0x88, 0x5e, 0x5e, 0x36, 0x5e, 0x70, 0xd4, /* Byte value: 0x9d */ + 0x08, 0x51, 0x19, 0x7a, 0x76, 0x66, 0x82, 0x92, 0x08, 0x3f, 0xc6, 0xc6, 0x26, 0xc6, 0x8e, 0xaf, /* Byte value: 0x9e */ + 0x25, 0xed, 0x23, 0x88, 0x5e, 0x14, 0x8d, 0xc7, 0x25, 0xdc, 0x95, 0x95, 0xff, 0x95, 0x5b, 0x83, /* Byte value: 0x9f */ + 0xf9, 0x43, 0x7e, 0xc1, 0xa5, 0x94, 0xe4, 0xd5, 0xf9, 0x2e, 0xbd, 0xbd, 0x50, 0xbd, 0x80, 0x54, /* Byte value: 0xa0 */ + 0x39, 0xd7, 0xe5, 0x3b, 0xff, 0x8d, 0x89, 0xfb, 0x39, 0x63, 0x7f, 0x7f, 0x86, 0x7f, 0x4d, 0x07, /* Byte value: 0xa1 */ + 0x60, 0x4a, 0xac, 0x7d, 0x2d, 0xed, 0xd7, 0x17, 0x60, 0xc7, 0x61, 0x61, 0x6b, 0x61, 0x87, 0xc8, /* Byte value: 0xa2 */ + 0xb2, 0xf9, 0x92, 0x8c, 0x67, 0xc0, 0xbc, 0x1b, 0xb2, 0x8b, 0x3c, 0x3c, 0x19, 0x3c, 0x57, 0x5d, /* Byte value: 0xa3 */ + 0xd4, 0xff, 0x44, 0x33, 0x8d, 0xe6, 0xeb, 0x80, 0xd4, 0xcd, 0xee, 0xee, 0x89, 0xee, 0x55, 0x78, /* Byte value: 0xa4 */ + 0x6f, 0xf4, 0x65, 0x5b, 0xe2, 0x3c, 0x54, 0x8a, 0x6f, 0xa7, 0x7c, 0x7c, 0xc2, 0x7c, 0xed, 0x46, /* Byte value: 0xa5 */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xa6 */ + 0x6d, 0x71, 0xf2, 0xa4, 0x1e, 0xc4, 0x95, 0x4f, 0x6d, 0xd8, 0xac, 0xac, 0x2a, 0xac, 0x2f, 0x1d, /* Byte value: 0xa7 */ + 0xa1, 0x7d, 0x9d, 0x19, 0x09, 0x88, 0x3b, 0xba, 0xa1, 0x54, 0xcb, 0xcb, 0xc9, 0xcb, 0x2b, 0x57, /* Byte value: 0xa8 */ + 0x12, 0x27, 0xa5, 0x0b, 0x10, 0x34, 0x06, 0x22, 0x12, 0x01, 0x9f, 0x9f, 0xa4, 0x9f, 0x1d, 0xc6, /* Byte value: 0xa9 */ + 0xd8, 0x67, 0xb0, 0x74, 0xc0, 0xb3, 0x28, 0x5b, 0xd8, 0x0c, 0x4b, 0x4b, 0xbc, 0x4b, 0x9c, 0x61, /* Byte value: 0xaa */ + 0xa3, 0xf8, 0x0a, 0xe6, 0xf5, 0x70, 0xfa, 0x7f, 0xa3, 0x2b, 0x1b, 0x1b, 0x21, 0x1b, 0xe9, 0x0c, /* Byte value: 0xab */ + 0xa4, 0x17, 0xda, 0xba, 0x4c, 0xc7, 0xfb, 0x70, 0xa4, 0x74, 0xc0, 0xc0, 0xae, 0xc0, 0x0d, 0x2d, /* Byte value: 0xac */ + 0xcf, 0x2a, 0x52, 0xdc, 0x95, 0xc8, 0xee, 0xb3, 0xcf, 0x2d, 0xdf, 0xdf, 0x7f, 0xdf, 0xa7, 0xdd, /* Byte value: 0xad */ + 0x9a, 0x2f, 0xef, 0xdd, 0x0a, 0xfd, 0x73, 0x84, 0x9a, 0x48, 0x64, 0x64, 0xa7, 0x64, 0xa4, 0x0b, /* Byte value: 0xae */ + 0xcc, 0x0c, 0x6f, 0xbd, 0x17, 0x4c, 0xae, 0xf5, 0xcc, 0x8c, 0x67, 0x67, 0xe3, 0x67, 0x04, 0x4a, /* Byte value: 0xaf */ + 0x52, 0xea, 0x6d, 0x5d, 0x26, 0x82, 0x9c, 0x38, 0x52, 0x3a, 0x60, 0x60, 0x57, 0x60, 0xe7, 0xf7, /* Byte value: 0xb0 */ + 0x1c, 0x3a, 0xc6, 0xb3, 0xa1, 0x99, 0x04, 0x3c, 0x1c, 0xbf, 0xea, 0xea, 0x79, 0xea, 0x16, 0x84, /* Byte value: 0xb1 */ + 0x2f, 0x39, 0xad, 0x0d, 0xd4, 0x8a, 0xce, 0x90, 0x2f, 0x9c, 0x83, 0x83, 0x31, 0x83, 0x17, 0x77, /* Byte value: 0xb2 */ + 0x7e, 0xf5, 0xfd, 0x31, 0x70, 0x8c, 0x12, 0xee, 0x7e, 0x07, 0x5b, 0x5b, 0xfa, 0x5b, 0x53, 0x17, /* Byte value: 0xb3 */ + 0x93, 0xdd, 0x5c, 0x39, 0x02, 0xe7, 0x70, 0x95, 0x93, 0xa9, 0xca, 0xca, 0xf5, 0xca, 0x4b, 0x68, /* Byte value: 0xb4 */ + 0x42, 0x48, 0x5f, 0xa9, 0xca, 0x4e, 0x5b, 0xdf, 0x42, 0x44, 0x2f, 0x2f, 0x1b, 0x2f, 0x38, 0x6a, /* Byte value: 0xb5 */ + 0xb3, 0x5a, 0x38, 0x12, 0x19, 0xbc, 0x3d, 0x98, 0xb3, 0x55, 0x54, 0x54, 0x6d, 0x54, 0x36, 0x91, /* Byte value: 0xb6 */ + 0x18, 0xf3, 0x2b, 0x8e, 0x9a, 0xaa, 0x45, 0x75, 0x18, 0x41, 0x89, 0x89, 0x6a, 0x89, 0x51, 0x32, /* Byte value: 0xb7 */ + 0xb8, 0x2d, 0x1c, 0x09, 0xed, 0x5e, 0xff, 0x4c, 0xb8, 0xcb, 0x2a, 0x2a, 0xd7, 0x2a, 0x1b, 0xa9, /* Byte value: 0xb8 */ + 0xfc, 0x29, 0x39, 0x62, 0xe0, 0xdb, 0x24, 0x1f, 0xfc, 0x0e, 0xb6, 0xb6, 0x37, 0xb6, 0xa6, 0x2e, /* Byte value: 0xb9 */ + 0x44, 0x04, 0x25, 0x6b, 0x0d, 0x85, 0xdb, 0x53, 0x44, 0xc5, 0x9c, 0x9c, 0xe0, 0x9c, 0xbd, 0x87, /* Byte value: 0xba */ + 0xe6, 0x5f, 0x85, 0x13, 0x86, 0x89, 0xa0, 0xaf, 0xe6, 0x30, 0xef, 0xef, 0xb5, 0xef, 0x35, 0x47, /* Byte value: 0xbb */ + 0x8c, 0xc1, 0xa7, 0xeb, 0x21, 0xfa, 0x34, 0xef, 0x8c, 0xb7, 0x98, 0x98, 0x10, 0x98, 0xfe, 0x7b, /* Byte value: 0xbc */ + 0xc6, 0xd8, 0xe1, 0x38, 0x9d, 0xd2, 0xed, 0xa2, 0xc6, 0xcc, 0x71, 0x71, 0x2d, 0x71, 0x48, 0xbe, /* Byte value: 0xbd */ + 0x63, 0x6c, 0x91, 0x1c, 0xaf, 0x69, 0x97, 0x51, 0x63, 0x66, 0xd9, 0xd9, 0xf7, 0xd9, 0x24, 0x5f, /* Byte value: 0xbe */ + 0xbf, 0xc2, 0xcc, 0x55, 0x54, 0xe9, 0xfe, 0x43, 0xbf, 0x94, 0xf1, 0xf1, 0x58, 0xf1, 0xff, 0x88, /* Byte value: 0xbf */ + 0xf2, 0x34, 0x5a, 0xda, 0x51, 0x76, 0x26, 0x01, 0xf2, 0xb0, 0xc3, 0xc3, 0xea, 0xc3, 0xad, 0x6c, /* Byte value: 0xc0 */ + 0x2c, 0x1f, 0x90, 0x6c, 0x56, 0x0e, 0x8e, 0xd6, 0x2c, 0x3d, 0x3b, 0x3b, 0xad, 0x3b, 0xb4, 0xe0, /* Byte value: 0xc1 */ + 0x11, 0x01, 0x98, 0x6a, 0x92, 0xb0, 0x46, 0x64, 0x11, 0xa0, 0x27, 0x27, 0x38, 0x27, 0xbe, 0x51, /* Byte value: 0xc2 */ + 0x73, 0xce, 0xa3, 0xe8, 0x43, 0xa5, 0x50, 0xb6, 0x73, 0x18, 0x96, 0x96, 0xbb, 0x96, 0xfb, 0xc2, /* Byte value: 0xc3 */ + 0xdc, 0xae, 0x5d, 0x49, 0xfb, 0x80, 0x69, 0x12, 0xdc, 0xf2, 0x28, 0x28, 0xaf, 0x28, 0xdb, 0xd7, /* Byte value: 0xc4 */ + 0xd9, 0xc4, 0x1a, 0xea, 0xbe, 0xcf, 0xa9, 0xd8, 0xd9, 0xd2, 0x23, 0x23, 0xc8, 0x23, 0xfd, 0xad, /* Byte value: 0xc5 */ + 0x92, 0x7e, 0xf6, 0xa7, 0x7c, 0x9b, 0xf1, 0x16, 0x92, 0x77, 0xa2, 0xa2, 0x81, 0xa2, 0x2a, 0xa4, /* Byte value: 0xc6 */ + 0x3e, 0x38, 0x35, 0x67, 0x46, 0x3a, 0x88, 0xf4, 0x3e, 0x3c, 0xa4, 0xa4, 0x09, 0xa4, 0xa9, 0x26, /* Byte value: 0xc7 */ + 0xd2, 0xb3, 0x3e, 0xf1, 0x4a, 0x2d, 0x6b, 0x0c, 0xd2, 0x4c, 0x5d, 0x5d, 0x72, 0x5d, 0xd0, 0x95, /* Byte value: 0xc8 */ + 0x59, 0x9d, 0x49, 0x46, 0xd2, 0x60, 0x5e, 0xec, 0x59, 0xa4, 0x1e, 0x1e, 0xed, 0x1e, 0xca, 0xcf, /* Byte value: 0xc9 */ + 0x75, 0x82, 0xd9, 0x2a, 0x84, 0x6e, 0xd0, 0x3a, 0x75, 0x99, 0x25, 0x25, 0x40, 0x25, 0x7e, 0x2f, /* Byte value: 0xca */ + 0x19, 0x50, 0x81, 0x10, 0xe4, 0xd6, 0xc4, 0xf6, 0x19, 0x9f, 0xe1, 0xe1, 0x1e, 0xe1, 0x30, 0xfe, /* Byte value: 0xcb */ + 0xb4, 0xb5, 0xe8, 0x4e, 0xa0, 0x0b, 0x3c, 0x97, 0xb4, 0x0a, 0x8f, 0x8f, 0xe2, 0x8f, 0xd2, 0xb0, /* Byte value: 0xcc */ + 0x81, 0xfa, 0xf9, 0x32, 0x12, 0xd3, 0x76, 0xb7, 0x81, 0xa8, 0x55, 0x55, 0x51, 0x55, 0x56, 0xae, /* Byte value: 0xcd */ + 0xe4, 0xda, 0x12, 0xec, 0x7a, 0x71, 0x61, 0x6a, 0xe4, 0x4f, 0x3f, 0x3f, 0x5d, 0x3f, 0xf7, 0x1c, /* Byte value: 0xce */ + 0xe5, 0x79, 0xb8, 0x72, 0x04, 0x0d, 0xe0, 0xe9, 0xe5, 0x91, 0x57, 0x57, 0x29, 0x57, 0x96, 0xd0, /* Byte value: 0xcf */ + 0x0f, 0xbe, 0xc9, 0x26, 0xcf, 0xd1, 0x83, 0x9d, 0x0f, 0x60, 0x1d, 0x1d, 0xa9, 0x1d, 0x6a, 0x8e, /* Byte value: 0xd0 */ + 0x47, 0x22, 0x18, 0x0a, 0x8f, 0x01, 0x9b, 0x15, 0x47, 0x64, 0x24, 0x24, 0x7c, 0x24, 0x1e, 0x10, /* Byte value: 0xd1 */ + 0x9c, 0x63, 0x95, 0x1f, 0xcd, 0x36, 0xf3, 0x08, 0x9c, 0xc9, 0xd7, 0xd7, 0x5c, 0xd7, 0x21, 0xe6, /* Byte value: 0xd2 */ + 0x2a, 0x53, 0xea, 0xae, 0x91, 0xc5, 0x0e, 0x5a, 0x2a, 0xbc, 0x88, 0x88, 0x56, 0x88, 0x31, 0x0d, /* Byte value: 0xd3 */ + 0xa8, 0x8f, 0x2e, 0xfd, 0x01, 0x92, 0x38, 0xab, 0xa8, 0xb5, 0x65, 0x65, 0x9b, 0x65, 0xc4, 0x34, /* Byte value: 0xd4 */ + 0x5b, 0x18, 0xde, 0xb9, 0x2e, 0x98, 0x9f, 0x29, 0x5b, 0xdb, 0xce, 0xce, 0x05, 0xce, 0x08, 0x94, /* Byte value: 0xd5 */ + 0x8f, 0xe7, 0x9a, 0x8a, 0xa3, 0x7e, 0x74, 0xa9, 0x8f, 0x16, 0x20, 0x20, 0x8c, 0x20, 0x5d, 0xec, /* Byte value: 0xd6 */ + 0x04, 0xc9, 0xed, 0x3d, 0x3b, 0x33, 0x41, 0x49, 0x04, 0xfe, 0x63, 0x63, 0x13, 0x63, 0x47, 0xb6, /* Byte value: 0xd7 */ + 0xa6, 0x92, 0x4d, 0x45, 0xb0, 0x3f, 0x3a, 0xb5, 0xa6, 0x0b, 0x10, 0x10, 0x46, 0x10, 0xcf, 0x76, /* Byte value: 0xd8 */ + 0x72, 0x6d, 0x09, 0x76, 0x3d, 0xd9, 0xd1, 0x35, 0x72, 0xc6, 0xfe, 0xfe, 0xcf, 0xfe, 0x9a, 0x0e, /* Byte value: 0xd9 */ + 0xba, 0xa8, 0x8b, 0xf6, 0x11, 0xa6, 0x3e, 0x89, 0xba, 0xb4, 0xfa, 0xfa, 0x3f, 0xfa, 0xd9, 0xf2, /* Byte value: 0xda */ + 0x30, 0x25, 0x56, 0xdf, 0xf7, 0x97, 0x8a, 0xea, 0x30, 0x82, 0xd1, 0xd1, 0xd4, 0xd1, 0xa2, 0x64, /* Byte value: 0xdb */ + 0x4c, 0x55, 0x3c, 0x11, 0x7b, 0xe3, 0x59, 0xc1, 0x4c, 0xfa, 0x5a, 0x5a, 0xc6, 0x5a, 0x33, 0x28, /* Byte value: 0xdc */ + 0x82, 0xdc, 0xc4, 0x53, 0x90, 0x57, 0x36, 0xf1, 0x82, 0x09, 0xed, 0xed, 0xcd, 0xed, 0xf5, 0x39, /* Byte value: 0xdd */ + 0x6a, 0x9e, 0x22, 0xf8, 0xa7, 0x73, 0x94, 0x40, 0x6a, 0x87, 0x77, 0x77, 0xa5, 0x77, 0xcb, 0x3c, /* Byte value: 0xde */ + 0x88, 0x08, 0x4a, 0xd6, 0x1a, 0xc9, 0x75, 0xa6, 0x88, 0x49, 0xfb, 0xfb, 0x03, 0xfb, 0xb9, 0xcd, /* Byte value: 0xdf */ + 0x3c, 0xbd, 0xa2, 0x98, 0xba, 0xc2, 0x49, 0x31, 0x3c, 0x43, 0x74, 0x74, 0xe1, 0x74, 0x6b, 0x7d, /* Byte value: 0xe0 */ + 0x6e, 0x57, 0xcf, 0xc5, 0x9c, 0x40, 0xd5, 0x09, 0x6e, 0x79, 0x14, 0x14, 0xb6, 0x14, 0x8c, 0x8a, /* Byte value: 0xe1 */ + 0x0b, 0x77, 0x24, 0x1b, 0xf4, 0xe2, 0xc2, 0xd4, 0x0b, 0x9e, 0x7e, 0x7e, 0xba, 0x7e, 0x2d, 0x38, /* Byte value: 0xe2 */ + 0x9e, 0xe6, 0x02, 0xe0, 0x31, 0xce, 0x32, 0xcd, 0x9e, 0xb6, 0x07, 0x07, 0xb4, 0x07, 0xe3, 0xbd, /* Byte value: 0xe3 */ + 0x68, 0x1b, 0xb5, 0x07, 0x5b, 0x8b, 0x55, 0x85, 0x68, 0xf8, 0xa7, 0xa7, 0x4d, 0xa7, 0x09, 0x67, /* Byte value: 0xe4 */ + 0x67, 0xa5, 0x7c, 0x21, 0x94, 0x5a, 0xd6, 0x18, 0x67, 0x98, 0xba, 0xba, 0xe4, 0xba, 0x63, 0xe9, /* Byte value: 0xe5 */ + 0xc5, 0xfe, 0xdc, 0x59, 0x1f, 0x56, 0xad, 0xe4, 0xc5, 0x6d, 0xc9, 0xc9, 0xb1, 0xc9, 0xeb, 0x29, /* Byte value: 0xe6 */ + 0x74, 0x21, 0x73, 0xb4, 0xfa, 0x12, 0x51, 0xb9, 0x74, 0x47, 0x4d, 0x4d, 0x34, 0x4d, 0x1f, 0xe3, /* Byte value: 0xe7 */ + 0x40, 0xcd, 0xc8, 0x56, 0x36, 0xb6, 0x9a, 0x1a, 0x40, 0x3b, 0xff, 0xff, 0xf3, 0xff, 0xfa, 0x31, /* Byte value: 0xe8 */ + 0x0c, 0x98, 0xf4, 0x47, 0x4d, 0x55, 0xc3, 0xdb, 0x0c, 0xc1, 0xa5, 0xa5, 0x35, 0xa5, 0xc9, 0x19, /* Byte value: 0xe9 */ + 0xe7, 0xfc, 0x2f, 0x8d, 0xf8, 0xf5, 0x21, 0x2c, 0xe7, 0xee, 0x87, 0x87, 0xc1, 0x87, 0x54, 0x8b, /* Byte value: 0xea */ + 0x9d, 0xc0, 0x3f, 0x81, 0xb3, 0x4a, 0x72, 0x8b, 0x9d, 0x17, 0xbf, 0xbf, 0x28, 0xbf, 0x40, 0x2a, /* Byte value: 0xeb */ + 0x90, 0xfb, 0x61, 0x58, 0x80, 0x63, 0x30, 0xd3, 0x90, 0x08, 0x72, 0x72, 0x69, 0x72, 0xe8, 0xff, /* Byte value: 0xec */ + 0x7a, 0x3c, 0x10, 0x0c, 0x4b, 0xbf, 0x53, 0xa7, 0x7a, 0xf9, 0x38, 0x38, 0xe9, 0x38, 0x14, 0xa1, /* Byte value: 0xed */ + 0x3a, 0xf1, 0xd8, 0x5a, 0x7d, 0x09, 0xc9, 0xbd, 0x3a, 0xc2, 0xc7, 0xc7, 0x1a, 0xc7, 0xee, 0x90, /* Byte value: 0xee */ + 0x35, 0x4f, 0x11, 0x7c, 0xb2, 0xd8, 0x4a, 0x20, 0x35, 0xa2, 0xda, 0xda, 0xb3, 0xda, 0x84, 0x1e, /* Byte value: 0xef */ + 0x34, 0xec, 0xbb, 0xe2, 0xcc, 0xa4, 0xcb, 0xa3, 0x34, 0x7c, 0xb2, 0xb2, 0xc7, 0xb2, 0xe5, 0xd2, /* Byte value: 0xf0 */ + 0x9f, 0x45, 0xa8, 0x7e, 0x4f, 0xb2, 0xb3, 0x4e, 0x9f, 0x68, 0x6f, 0x6f, 0xc0, 0x6f, 0x82, 0x71, /* Byte value: 0xf1 */ + 0xb7, 0x93, 0xd5, 0x2f, 0x22, 0x8f, 0x7c, 0xd1, 0xb7, 0xab, 0x37, 0x37, 0x7e, 0x37, 0x71, 0x27, /* Byte value: 0xf2 */ + 0xf8, 0xe0, 0xd4, 0x5f, 0xdb, 0xe8, 0x65, 0x56, 0xf8, 0xf0, 0xd5, 0xd5, 0x24, 0xd5, 0xe1, 0x98, /* Byte value: 0xf3 */ + 0x4f, 0x73, 0x01, 0x70, 0xf9, 0x67, 0x19, 0x87, 0x4f, 0x5b, 0xe2, 0xe2, 0x5a, 0xe2, 0x90, 0xbf, /* Byte value: 0xf4 */ + 0x6b, 0x3d, 0x88, 0x66, 0xd9, 0x0f, 0x15, 0xc3, 0x6b, 0x59, 0x1f, 0x1f, 0xd1, 0x1f, 0xaa, 0xf0, /* Byte value: 0xf5 */ + 0x83, 0x7f, 0x6e, 0xcd, 0xee, 0x2b, 0xb7, 0x72, 0x83, 0xd7, 0x85, 0x85, 0xb9, 0x85, 0x94, 0xf5, /* Byte value: 0xf6 */ + 0x5f, 0xd1, 0x33, 0x84, 0x15, 0xab, 0xde, 0x60, 0x5f, 0x25, 0xad, 0xad, 0x16, 0xad, 0x4f, 0x22, /* Byte value: 0xf7 */ + 0xa7, 0x31, 0xe7, 0xdb, 0xce, 0x43, 0xbb, 0x36, 0xa7, 0xd5, 0x78, 0x78, 0x32, 0x78, 0xae, 0xba, /* Byte value: 0xf8 */ + 0x1e, 0xbf, 0x51, 0x4c, 0x5d, 0x61, 0xc5, 0xf9, 0x1e, 0xc0, 0x3a, 0x3a, 0x91, 0x3a, 0xd4, 0xdf, /* Byte value: 0xf9 */ + 0x10, 0xa2, 0x32, 0xf4, 0xec, 0xcc, 0xc7, 0xe7, 0x10, 0x7e, 0x4f, 0x4f, 0x4c, 0x4f, 0xdf, 0x9d, /* Byte value: 0xfa */ + 0xc3, 0xb2, 0xa6, 0x9b, 0xd8, 0x9d, 0x2d, 0x68, 0xc3, 0xec, 0x7a, 0x7a, 0x4a, 0x7a, 0x6e, 0xc4, /* Byte value: 0xfb */ + 0x78, 0xb9, 0x87, 0xf3, 0xb7, 0x47, 0x92, 0x62, 0x78, 0x86, 0xe8, 0xe8, 0x01, 0xe8, 0xd6, 0xfa, /* Byte value: 0xfc */ + 0x2d, 0xbc, 0x3a, 0xf2, 0x28, 0x72, 0x0f, 0x55, 0x2d, 0xe3, 0x53, 0x53, 0xd9, 0x53, 0xd5, 0x2c, /* Byte value: 0xfd */ + 0x77, 0x07, 0x4e, 0xd5, 0x78, 0x96, 0x11, 0xff, 0x77, 0xe6, 0xf5, 0xf5, 0xa8, 0xf5, 0xbc, 0x74, /* Byte value: 0xfe */ + 0xe3, 0x35, 0xc2, 0xb0, 0xc3, 0xc6, 0x60, 0x65, 0xe3, 0x10, 0xe4, 0xe4, 0xd2, 0xe4, 0x13, 0x3d, /* Byte value: 0xff */ + 0x80, 0x59, 0x53, 0xac, 0x6c, 0xaf, 0xf7, 0x34, 0x80, 0x76, 0x3d, 0x3d, 0x25, 0x3d, 0x37, 0x62, /* Byte value: 0x00 */ + + /* Matrix row: 14 */ + 0x0a, 0xaf, 0x45, 0x6e, 0xf6, 0x73, 0x56, 0xfb, 0x47, 0x1c, 0xd5, 0xc8, 0x66, 0x2c, 0x64, 0xb5, /* Byte value: 0x01 */ + 0xca, 0x52, 0x68, 0xe5, 0x24, 0x5b, 0xd9, 0x08, 0xeb, 0x1e, 0xe0, 0x5e, 0xac, 0x6b, 0x2f, 0x0a, /* Byte value: 0x02 */ + 0xf3, 0x80, 0xf6, 0x75, 0x2e, 0x6a, 0xc5, 0xcb, 0x78, 0x58, 0xd0, 0xcf, 0x08, 0x95, 0x86, 0x89, /* Byte value: 0x03 */ + 0x17, 0x4e, 0xcb, 0x70, 0x06, 0xae, 0xb5, 0x41, 0x71, 0x83, 0x10, 0xce, 0xdd, 0xeb, 0x67, 0xc0, /* Byte value: 0x04 */ + 0x33, 0x7d, 0xdb, 0xfe, 0xfc, 0x42, 0x4a, 0x38, 0xd4, 0x5a, 0xe5, 0x59, 0xc2, 0xd2, 0xcd, 0x36, /* Byte value: 0x05 */ + 0x5f, 0x28, 0xeb, 0xaf, 0x31, 0xb5, 0x88, 0xb3, 0xf8, 0xf2, 0x39, 0x23, 0xe3, 0x99, 0xf0, 0xef, /* Byte value: 0x06 */ + 0xa3, 0xb1, 0x9b, 0x83, 0x92, 0x74, 0x30, 0x1f, 0x05, 0xb8, 0xb7, 0x40, 0xbe, 0x36, 0x20, 0x68, /* Byte value: 0x07 */ + 0x37, 0x3b, 0x27, 0xad, 0x21, 0xa2, 0xd7, 0x88, 0x43, 0x3d, 0xf8, 0xf7, 0xfe, 0x14, 0x9a, 0xaa, /* Byte value: 0x08 */ + 0x2a, 0xda, 0xa9, 0xb3, 0xd1, 0x7f, 0x34, 0x32, 0x75, 0xa2, 0x3d, 0xf1, 0x45, 0xd3, 0x99, 0xdf, /* Byte value: 0x09 */ + 0xc9, 0x81, 0x29, 0x49, 0x0d, 0x13, 0xc0, 0x7c, 0x14, 0xa5, 0x99, 0xc3, 0xbd, 0xd8, 0x80, 0x63, /* Byte value: 0x0a */ + 0x67, 0x0a, 0x4a, 0x5b, 0x9d, 0xbc, 0x22, 0x5c, 0x3e, 0xdd, 0x9f, 0x78, 0x48, 0xb7, 0x3c, 0x4b, /* Byte value: 0x0b */ + 0xd3, 0xf5, 0x1a, 0xa8, 0x09, 0x66, 0xa7, 0x02, 0x4a, 0xe6, 0x38, 0xf6, 0x2b, 0x6a, 0x7b, 0xe3, /* Byte value: 0x0c */ + 0x63, 0x4c, 0xb6, 0x08, 0x40, 0x5c, 0xbf, 0xec, 0xa9, 0xba, 0x82, 0xd6, 0x74, 0x71, 0x6b, 0xd7, /* Byte value: 0x0d */ + 0x84, 0x51, 0xca, 0xa1, 0x41, 0xd0, 0xd6, 0x12, 0x5f, 0xda, 0x3b, 0x4a, 0xb0, 0xbc, 0x25, 0xf7, /* Byte value: 0x0e */ + 0xf7, 0xc6, 0x0a, 0x26, 0xf3, 0x8a, 0x58, 0x7b, 0xef, 0x3f, 0xcd, 0x61, 0x34, 0x53, 0xd1, 0x15, /* Byte value: 0x0f */ + 0x3c, 0x64, 0x5d, 0xa7, 0x71, 0xe9, 0x37, 0x5f, 0x51, 0x48, 0xbb, 0xf5, 0x97, 0xe8, 0x9b, 0x38, /* Byte value: 0x10 */ + 0xea, 0x27, 0x84, 0x38, 0x03, 0x57, 0xbb, 0xc1, 0xd9, 0xa0, 0x08, 0x67, 0x8f, 0x94, 0xd2, 0x60, /* Byte value: 0x11 */ + 0x65, 0x29, 0x34, 0x93, 0x12, 0xcc, 0x8d, 0x04, 0x94, 0x0f, 0x70, 0x2f, 0x56, 0xd4, 0xf6, 0x05, /* Byte value: 0x12 */ + 0xd0, 0x26, 0x5b, 0x04, 0x20, 0x2e, 0xbe, 0x76, 0xb5, 0x5d, 0x41, 0x6b, 0x3a, 0xd9, 0xd4, 0x8a, /* Byte value: 0x13 */ + 0x9e, 0x25, 0xf9, 0x40, 0x45, 0xa5, 0xb1, 0x6c, 0x01, 0x99, 0x9a, 0x7f, 0x26, 0x0e, 0xde, 0x77, /* Byte value: 0x14 */ + 0x18, 0x57, 0x4d, 0x29, 0x8b, 0x05, 0xc8, 0x26, 0xf4, 0x91, 0x4e, 0x62, 0x88, 0xd1, 0x31, 0xce, /* Byte value: 0x15 */ + 0xf4, 0x15, 0x4b, 0x8a, 0xda, 0xc2, 0x41, 0x0f, 0x10, 0x84, 0xb4, 0xfc, 0x25, 0xe0, 0x7e, 0x7c, /* Byte value: 0x16 */ + 0xaf, 0x7b, 0x5c, 0x76, 0x36, 0x97, 0x54, 0x0c, 0x7f, 0x11, 0x90, 0x71, 0xfa, 0xbf, 0xd9, 0x0f, /* Byte value: 0x17 */ + 0xcc, 0x37, 0xea, 0x7e, 0x76, 0xcb, 0xeb, 0xe0, 0xd6, 0xab, 0x12, 0xa7, 0x8e, 0xce, 0xb2, 0xd8, /* Byte value: 0x18 */ + 0x7a, 0xeb, 0xc4, 0x45, 0x6d, 0x61, 0xc1, 0xe6, 0x08, 0x42, 0x5a, 0x7e, 0xf3, 0x70, 0x3f, 0x3e, /* Byte value: 0x19 */ + 0x83, 0xc4, 0x77, 0x5e, 0xb5, 0x78, 0x52, 0xd6, 0x37, 0x06, 0x5f, 0x79, 0x9d, 0xc9, 0xdd, 0x02, /* Byte value: 0x1a */ + 0x9d, 0xf6, 0xb8, 0xec, 0x6c, 0xed, 0xa8, 0x18, 0xfe, 0x22, 0xe3, 0xe2, 0x37, 0xbd, 0x71, 0x1e, /* Byte value: 0x1b */ + 0x81, 0xe7, 0x09, 0x96, 0x3a, 0x08, 0xfd, 0x8e, 0x9d, 0xd4, 0xb0, 0x2e, 0x83, 0xaa, 0x17, 0x4c, /* Byte value: 0x1c */ + 0xad, 0x58, 0x22, 0xbe, 0xb9, 0xe7, 0xfb, 0x54, 0xd5, 0xc3, 0x7f, 0x26, 0xe4, 0xdc, 0x13, 0x41, /* Byte value: 0x1d */ + 0xa9, 0x1e, 0xde, 0xed, 0x64, 0x07, 0x66, 0xe4, 0x42, 0xa4, 0x62, 0x88, 0xd8, 0x1a, 0x44, 0xdd, /* Byte value: 0x1e */ + 0xfc, 0x99, 0x70, 0x2c, 0xa3, 0xc1, 0xb8, 0xac, 0xfd, 0x4a, 0x8e, 0x63, 0x5d, 0xaf, 0xd0, 0x87, /* Byte value: 0x1f */ + 0x73, 0x97, 0xc0, 0x87, 0xb2, 0x5a, 0x8e, 0x69, 0xb0, 0xe5, 0xf6, 0x2b, 0x84, 0xef, 0xf4, 0xe2, /* Byte value: 0x20 */ + 0xb0, 0xb9, 0xac, 0xa0, 0x49, 0x3a, 0x18, 0xee, 0xe3, 0x5c, 0xba, 0x20, 0x5f, 0x1b, 0x10, 0x34, /* Byte value: 0x21 */ + 0x77, 0xd1, 0x3c, 0xd4, 0x6f, 0xba, 0x13, 0xd9, 0x27, 0x82, 0xeb, 0x85, 0xb8, 0x29, 0xa3, 0x7e, /* Byte value: 0x22 */ + 0xa5, 0xd4, 0x19, 0x18, 0xc0, 0xe4, 0x02, 0xf7, 0x38, 0x0d, 0x45, 0xb9, 0x9c, 0x93, 0xbd, 0xba, /* Byte value: 0x23 */ + 0x46, 0x8f, 0x99, 0xe2, 0x1c, 0x88, 0xf6, 0xb9, 0x59, 0x0a, 0xe1, 0x8b, 0x64, 0x98, 0xa4, 0x06, /* Byte value: 0x24 */ + 0x10, 0xdb, 0x76, 0x8f, 0xf2, 0x06, 0x31, 0x85, 0x19, 0x5f, 0x74, 0xfd, 0xf0, 0x9e, 0x9f, 0x35, /* Byte value: 0x25 */ + 0x2b, 0x2a, 0x96, 0xd7, 0x77, 0x47, 0x82, 0x1e, 0x20, 0xcb, 0xab, 0x3b, 0x4a, 0x03, 0xfc, 0xf8, /* Byte value: 0x26 */ + 0x87, 0x82, 0x8b, 0x0d, 0x68, 0x98, 0xcf, 0x66, 0xa0, 0x61, 0x42, 0xd7, 0xa1, 0x0f, 0x8a, 0x9e, /* Byte value: 0x27 */ + 0xf9, 0x2f, 0xb3, 0x1b, 0xd8, 0x19, 0x93, 0x30, 0x3f, 0x44, 0x05, 0x07, 0x6e, 0xb9, 0xe2, 0x3c, /* Byte value: 0x28 */ + 0xd8, 0xaa, 0x60, 0xa2, 0x59, 0x2d, 0x47, 0xd5, 0x58, 0x93, 0x7b, 0xf4, 0x42, 0x96, 0x7a, 0x71, /* Byte value: 0x29 */ + 0x80, 0x17, 0x36, 0xf2, 0x9c, 0x30, 0x4b, 0xa2, 0xc8, 0xbd, 0x26, 0xe4, 0x8c, 0x7a, 0x72, 0x6b, /* Byte value: 0x2a */ + 0x34, 0xe8, 0x66, 0x01, 0x08, 0xea, 0xce, 0xfc, 0xbc, 0x86, 0x81, 0x6a, 0xef, 0xa7, 0x35, 0xc3, /* Byte value: 0x2b */ + 0x31, 0x5e, 0xa5, 0x36, 0x73, 0x32, 0xe5, 0x60, 0x7e, 0x88, 0x0a, 0x0e, 0xdc, 0xb1, 0x07, 0x78, /* Byte value: 0x2c */ + 0x6f, 0x86, 0x71, 0xfd, 0xe4, 0xbf, 0xdb, 0xff, 0xd3, 0x13, 0xa5, 0xe7, 0x30, 0xf8, 0x92, 0xb0, /* Byte value: 0x2d */ + 0x4d, 0xd0, 0xe3, 0xe8, 0x4c, 0xc3, 0x16, 0x6e, 0x4b, 0x7f, 0xa2, 0x89, 0x0d, 0x64, 0xa5, 0x94, /* Byte value: 0x2e */ + 0xd5, 0x90, 0x98, 0x33, 0x5b, 0xf6, 0x95, 0xea, 0x77, 0x53, 0xca, 0x0f, 0x09, 0xcf, 0xe6, 0x31, /* Byte value: 0x2f */ + 0xa6, 0x07, 0x58, 0xb4, 0xe9, 0xac, 0x1b, 0x83, 0xc7, 0xb6, 0x3c, 0x24, 0x8d, 0x20, 0x12, 0xd3, /* Byte value: 0x30 */ + 0xba, 0x16, 0xe9, 0xce, 0xbf, 0x49, 0x4e, 0x15, 0xa4, 0x40, 0x6f, 0xe8, 0x39, 0x37, 0x74, 0x81, /* Byte value: 0x31 */ + 0x62, 0xbc, 0x89, 0x6c, 0xe6, 0x64, 0x09, 0xc0, 0xfc, 0xd3, 0x14, 0x1c, 0x7b, 0xa1, 0x0e, 0xf0, /* Byte value: 0x32 */ + 0x9a, 0x63, 0x05, 0x13, 0x98, 0x45, 0x2c, 0xdc, 0x96, 0xfe, 0x87, 0xd1, 0x1a, 0xc8, 0x89, 0xeb, /* Byte value: 0x33 */ + 0x61, 0x6f, 0xc8, 0xc0, 0xcf, 0x2c, 0x10, 0xb4, 0x03, 0x68, 0x6d, 0x81, 0x6a, 0x12, 0xa1, 0x99, /* Byte value: 0x34 */ + 0x5d, 0x0b, 0x95, 0x67, 0xbe, 0xc5, 0x27, 0xeb, 0x52, 0x20, 0xd6, 0x74, 0xfd, 0xfa, 0x3a, 0xa1, /* Byte value: 0x35 */ + 0xa8, 0xee, 0xe1, 0x89, 0xc2, 0x3f, 0xd0, 0xc8, 0x17, 0xcd, 0xf4, 0x42, 0xd7, 0xca, 0x21, 0xfa, /* Byte value: 0x36 */ + 0x98, 0x40, 0x7b, 0xdb, 0x17, 0x35, 0x83, 0x84, 0x3c, 0x2c, 0x68, 0x86, 0x04, 0xab, 0x43, 0xa5, /* Byte value: 0x37 */ + 0x21, 0x85, 0xd3, 0xb9, 0x81, 0x34, 0xd4, 0xe5, 0x67, 0xd7, 0x7e, 0xf3, 0x2c, 0x2f, 0x98, 0x4d, /* Byte value: 0x38 */ + 0x6d, 0xa5, 0x0f, 0x35, 0x6b, 0xcf, 0x74, 0xa7, 0x79, 0xc1, 0x4a, 0xb0, 0x2e, 0x9b, 0x58, 0xfe, /* Byte value: 0x39 */ + 0xfa, 0xfc, 0xf2, 0xb7, 0xf1, 0x51, 0x8a, 0x44, 0xc0, 0xff, 0x7c, 0x9a, 0x7f, 0x0a, 0x4d, 0x55, /* Byte value: 0x3a */ + 0x1d, 0xe1, 0x8e, 0x1e, 0xf0, 0xdd, 0xe3, 0xba, 0x36, 0x9f, 0xc5, 0x06, 0xbb, 0xc7, 0x03, 0x75, /* Byte value: 0x3b */ + 0xe2, 0xab, 0xbf, 0x9e, 0x7a, 0x54, 0x42, 0x62, 0x34, 0x6e, 0x32, 0xf8, 0xf7, 0xdb, 0x7c, 0x9b, /* Byte value: 0x3c */ + 0x48, 0x66, 0x20, 0xdf, 0x37, 0x1b, 0x3d, 0xf2, 0x89, 0x71, 0x29, 0xed, 0x3e, 0x72, 0x97, 0x2f, /* Byte value: 0x3d */ + 0x93, 0x1f, 0x01, 0xd1, 0x47, 0x7e, 0x63, 0x53, 0x2e, 0x59, 0x2b, 0x84, 0x6d, 0x57, 0x42, 0x37, /* Byte value: 0x3e */ + 0xb3, 0x6a, 0xed, 0x0c, 0x60, 0x72, 0x01, 0x9a, 0x1c, 0xe7, 0xc3, 0xbd, 0x4e, 0xa8, 0xbf, 0x5d, /* Byte value: 0x3f */ + 0x57, 0xa4, 0xd0, 0x09, 0x48, 0xb6, 0x71, 0x10, 0x15, 0x3c, 0x03, 0xbc, 0x9b, 0xd6, 0x5e, 0x14, /* Byte value: 0x40 */ + 0x70, 0x44, 0x81, 0x2b, 0x9b, 0x12, 0x97, 0x1d, 0x4f, 0x5e, 0x8f, 0xb6, 0x95, 0x5c, 0x5b, 0x8b, /* Byte value: 0x41 */ + 0x19, 0xa7, 0x72, 0x4d, 0x2d, 0x3d, 0x7e, 0x0a, 0xa1, 0xf8, 0xd8, 0xa8, 0x87, 0x01, 0x54, 0xe9, /* Byte value: 0x42 */ + 0x6e, 0x76, 0x4e, 0x99, 0x42, 0x87, 0x6d, 0xd3, 0x86, 0x7a, 0x33, 0x2d, 0x3f, 0x28, 0xf7, 0x97, /* Byte value: 0x43 */ + 0xbc, 0x73, 0x6b, 0x55, 0xed, 0xd9, 0x7c, 0xfd, 0x99, 0xf5, 0x9d, 0x11, 0x1b, 0x92, 0xe9, 0x53, /* Byte value: 0x44 */ + 0x3d, 0x94, 0x62, 0xc3, 0xd7, 0xd1, 0x81, 0x73, 0x04, 0x21, 0x2d, 0x3f, 0x98, 0x38, 0xfe, 0x1f, /* Byte value: 0x45 */ + 0x13, 0x08, 0x37, 0x23, 0xdb, 0x4e, 0x28, 0xf1, 0xe6, 0xe4, 0x0d, 0x60, 0xe1, 0x2d, 0x30, 0x5c, /* Byte value: 0x46 */ + 0x86, 0x72, 0xb4, 0x69, 0xce, 0xa0, 0x79, 0x4a, 0xf5, 0x08, 0xd4, 0x1d, 0xae, 0xdf, 0xef, 0xb9, /* Byte value: 0x47 */ + 0xdb, 0x79, 0x21, 0x0e, 0x70, 0x65, 0x5e, 0xa1, 0xa7, 0x28, 0x02, 0x69, 0x53, 0x25, 0xd5, 0x18, /* Byte value: 0x48 */ + 0x4a, 0x45, 0x5e, 0x17, 0xb8, 0x6b, 0x92, 0xaa, 0x23, 0xa3, 0xc6, 0xba, 0x20, 0x11, 0x5d, 0x61, /* Byte value: 0x49 */ + 0x03, 0xd3, 0x41, 0xac, 0x29, 0x48, 0x19, 0x74, 0xff, 0xbb, 0x79, 0x9d, 0x11, 0xb3, 0xaf, 0x69, /* Byte value: 0x4a */ + 0x32, 0x8d, 0xe4, 0x9a, 0x5a, 0x7a, 0xfc, 0x14, 0x81, 0x33, 0x73, 0x93, 0xcd, 0x02, 0xa8, 0x11, /* Byte value: 0x4b */ + 0xbb, 0xe6, 0xd6, 0xaa, 0x19, 0x71, 0xf8, 0x39, 0xf1, 0x29, 0xf9, 0x22, 0x36, 0xe7, 0x11, 0xa6, /* Byte value: 0x4c */ + 0x47, 0x7f, 0xa6, 0x86, 0xba, 0xb0, 0x40, 0x95, 0x0c, 0x63, 0x77, 0x41, 0x6b, 0x48, 0xc1, 0x21, /* Byte value: 0x4d */ + 0x59, 0x4d, 0x69, 0x34, 0x63, 0x25, 0xba, 0x5b, 0xc5, 0x47, 0xcb, 0xda, 0xc1, 0x3c, 0x6d, 0x3d, /* Byte value: 0x4e */ + 0x7e, 0xad, 0x38, 0x16, 0xb0, 0x81, 0x5c, 0x56, 0x9f, 0x25, 0x47, 0xd0, 0xcf, 0xb6, 0x68, 0xa2, /* Byte value: 0x4f */ + 0xe4, 0xce, 0x3d, 0x05, 0x28, 0xc4, 0x70, 0x8a, 0x09, 0xdb, 0xc0, 0x01, 0xd5, 0x7e, 0xe1, 0x49, /* Byte value: 0x50 */ + 0xc1, 0x0d, 0x12, 0xef, 0x74, 0x10, 0x39, 0xdf, 0xf9, 0x6b, 0xa3, 0x5c, 0xc5, 0x97, 0x2e, 0x98, /* Byte value: 0x51 */ + 0x45, 0x5c, 0xd8, 0x4e, 0x35, 0xc0, 0xef, 0xcd, 0xa6, 0xb1, 0x98, 0x16, 0x75, 0x2b, 0x0b, 0x6f, /* Byte value: 0x52 */ + 0x40, 0xea, 0x1b, 0x79, 0x4e, 0x18, 0xc4, 0x51, 0x64, 0xbf, 0x13, 0x72, 0x46, 0x3d, 0x39, 0xd4, /* Byte value: 0x53 */ + 0x9c, 0x06, 0x87, 0x88, 0xca, 0xd5, 0x1e, 0x34, 0xab, 0x4b, 0x75, 0x28, 0x38, 0x6d, 0x14, 0x39, /* Byte value: 0x54 */ + 0x2d, 0x4f, 0x14, 0x4c, 0x25, 0xd7, 0xb0, 0xf6, 0x1d, 0x7e, 0x59, 0xc2, 0x68, 0xa6, 0x61, 0x2a, /* Byte value: 0x55 */ + 0xda, 0x89, 0x1e, 0x6a, 0xd6, 0x5d, 0xe8, 0x8d, 0xf2, 0x41, 0x94, 0xa3, 0x5c, 0xf5, 0xb0, 0x3f, /* Byte value: 0x56 */ + 0x28, 0xf9, 0xd7, 0x7b, 0x5e, 0x0f, 0x9b, 0x6a, 0xdf, 0x70, 0xd2, 0xa6, 0x5b, 0xb0, 0x53, 0x91, /* Byte value: 0x57 */ + 0xc0, 0xfd, 0x2d, 0x8b, 0xd2, 0x28, 0x8f, 0xf3, 0xac, 0x02, 0x35, 0x96, 0xca, 0x47, 0x4b, 0xbf, /* Byte value: 0x58 */ + 0x76, 0x21, 0x03, 0xb0, 0xc9, 0x82, 0xa5, 0xf5, 0x72, 0xeb, 0x7d, 0x4f, 0xb7, 0xf9, 0xc6, 0x59, /* Byte value: 0x59 */ + 0xdf, 0x3f, 0xdd, 0x5d, 0xad, 0x85, 0xc3, 0x11, 0x30, 0x4f, 0x1f, 0xc7, 0x6f, 0xe3, 0x82, 0x84, /* Byte value: 0x5a */ + 0x8d, 0x2d, 0xce, 0x63, 0x9e, 0xeb, 0x99, 0x9d, 0xe7, 0x7d, 0x97, 0x1f, 0xc7, 0x23, 0xee, 0x2b, /* Byte value: 0x5b */ + 0x8b, 0x48, 0x4c, 0xf8, 0xcc, 0x7b, 0xab, 0x75, 0xda, 0xc8, 0x65, 0xe6, 0xe5, 0x86, 0x73, 0xf9, /* Byte value: 0x5c */ + 0x15, 0x6d, 0xb5, 0xb8, 0x89, 0xde, 0x1a, 0x19, 0xdb, 0x51, 0xff, 0x99, 0xc3, 0x88, 0xad, 0x8e, /* Byte value: 0x5d */ + 0x5b, 0x6e, 0x17, 0xfc, 0xec, 0x55, 0x15, 0x03, 0x6f, 0x95, 0x24, 0x8d, 0xdf, 0x5f, 0xa7, 0x73, /* Byte value: 0x5e */ + 0x66, 0xfa, 0x75, 0x3f, 0x3b, 0x84, 0x94, 0x70, 0x6b, 0xb4, 0x09, 0xb2, 0x47, 0x67, 0x59, 0x6c, /* Byte value: 0x5f */ + 0xb5, 0x0f, 0x6f, 0x97, 0x32, 0xe2, 0x33, 0x72, 0x21, 0x52, 0x31, 0x44, 0x6c, 0x0d, 0x22, 0x8f, /* Byte value: 0x60 */ + 0xe0, 0x88, 0xc1, 0x56, 0xf5, 0x24, 0xed, 0x3a, 0x9e, 0xbc, 0xdd, 0xaf, 0xe9, 0xb8, 0xb6, 0xd5, /* Byte value: 0x61 */ + 0x6c, 0x55, 0x30, 0x51, 0xcd, 0xf7, 0xc2, 0x8b, 0x2c, 0xa8, 0xdc, 0x7a, 0x21, 0x4b, 0x3d, 0xd9, /* Byte value: 0x62 */ + 0xa2, 0x41, 0xa4, 0xe7, 0x34, 0x4c, 0x86, 0x33, 0x50, 0xd1, 0x21, 0x8a, 0xb1, 0xe6, 0x45, 0x4f, /* Byte value: 0x63 */ + 0xd4, 0x60, 0xa7, 0x57, 0xfd, 0xce, 0x23, 0xc6, 0x22, 0x3a, 0x5c, 0xc5, 0x06, 0x1f, 0x83, 0x16, /* Byte value: 0x64 */ + 0x5a, 0x9e, 0x28, 0x98, 0x4a, 0x6d, 0xa3, 0x2f, 0x3a, 0xfc, 0xb2, 0x47, 0xd0, 0x8f, 0xc2, 0x54, /* Byte value: 0x65 */ + 0xbf, 0xa0, 0x2a, 0xf9, 0xc4, 0x91, 0x65, 0x89, 0x66, 0x4e, 0xe4, 0x8c, 0x0a, 0x21, 0x46, 0x3a, /* Byte value: 0x66 */ + 0x78, 0xc8, 0xba, 0x8d, 0xe2, 0x11, 0x6e, 0xbe, 0xa2, 0x90, 0xb5, 0x29, 0xed, 0x13, 0xf5, 0x70, /* Byte value: 0x67 */ + 0x1e, 0x32, 0xcf, 0xb2, 0xd9, 0x95, 0xfa, 0xce, 0xc9, 0x24, 0xbc, 0x9b, 0xaa, 0x74, 0xac, 0x1c, /* Byte value: 0x68 */ + 0x07, 0x95, 0xbd, 0xff, 0xf4, 0xa8, 0x84, 0xc4, 0x68, 0xdc, 0x64, 0x33, 0x2d, 0x75, 0xf8, 0xf5, /* Byte value: 0x69 */ + 0x82, 0x34, 0x48, 0x3a, 0x13, 0x40, 0xe4, 0xfa, 0x62, 0x6f, 0xc9, 0xb3, 0x92, 0x19, 0xb8, 0x25, /* Byte value: 0x6a */ + 0x68, 0x13, 0xcc, 0x02, 0x10, 0x17, 0x5f, 0x3b, 0xbb, 0xcf, 0xc1, 0xd4, 0x1d, 0x8d, 0x6a, 0x45, /* Byte value: 0x6b */ + 0x12, 0xf8, 0x08, 0x47, 0x7d, 0x76, 0x9e, 0xdd, 0xb3, 0x8d, 0x9b, 0xaa, 0xee, 0xfd, 0x55, 0x7b, /* Byte value: 0x6c */ + 0x58, 0xbd, 0x56, 0x50, 0xc5, 0x1d, 0x0c, 0x77, 0x90, 0x2e, 0x5d, 0x10, 0xce, 0xec, 0x08, 0x1a, /* Byte value: 0x6d */ + 0x8f, 0x0e, 0xb0, 0xab, 0x11, 0x9b, 0x36, 0xc5, 0x4d, 0xaf, 0x78, 0x48, 0xd9, 0x40, 0x24, 0x65, /* Byte value: 0x6e */ + 0xe1, 0x78, 0xfe, 0x32, 0x53, 0x1c, 0x5b, 0x16, 0xcb, 0xd5, 0x4b, 0x65, 0xe6, 0x68, 0xd3, 0xf2, /* Byte value: 0x6f */ + 0x36, 0xcb, 0x18, 0xc9, 0x87, 0x9a, 0x61, 0xa4, 0x16, 0x54, 0x6e, 0x3d, 0xf1, 0xc4, 0xff, 0x8d, /* Byte value: 0x70 */ + 0x74, 0x02, 0x7d, 0x78, 0x46, 0xf2, 0x0a, 0xad, 0xd8, 0x39, 0x92, 0x18, 0xa9, 0x9a, 0x0c, 0x17, /* Byte value: 0x71 */ + 0xff, 0x4a, 0x31, 0x80, 0x8a, 0x89, 0xa1, 0xd8, 0x02, 0xf1, 0xf7, 0xfe, 0x4c, 0x1c, 0x7f, 0xee, /* Byte value: 0x72 */ + 0x3a, 0x01, 0xdf, 0x3c, 0x23, 0x79, 0x05, 0xb7, 0x6c, 0xfd, 0x49, 0x0c, 0xb5, 0x4d, 0x06, 0xea, /* Byte value: 0x73 */ + 0x79, 0x38, 0x85, 0xe9, 0x44, 0x29, 0xd8, 0x92, 0xf7, 0xf9, 0x23, 0xe3, 0xe2, 0xc3, 0x90, 0x57, /* Byte value: 0x74 */ + 0xdd, 0x1c, 0xa3, 0x95, 0x22, 0xf5, 0x6c, 0x49, 0x9a, 0x9d, 0xf0, 0x90, 0x71, 0x80, 0x48, 0xca, /* Byte value: 0x75 */ + 0x54, 0x77, 0x91, 0xa5, 0x61, 0xfe, 0x68, 0x64, 0xea, 0x87, 0x7a, 0x21, 0x8a, 0x65, 0xf1, 0x7d, /* Byte value: 0x76 */ + 0x22, 0x56, 0x92, 0x15, 0xa8, 0x7c, 0xcd, 0x91, 0x98, 0x6c, 0x07, 0x6e, 0x3d, 0x9c, 0x37, 0x24, /* Byte value: 0x77 */ + 0x05, 0xb6, 0xc3, 0x37, 0x7b, 0xd8, 0x2b, 0x9c, 0xc2, 0x0e, 0x8b, 0x64, 0x33, 0x16, 0x32, 0xbb, /* Byte value: 0x78 */ + 0x88, 0x9b, 0x0d, 0x54, 0xe5, 0x33, 0xb2, 0x01, 0x25, 0x73, 0x1c, 0x7b, 0xf4, 0x35, 0xdc, 0x90, /* Byte value: 0x79 */ + 0x06, 0x65, 0x82, 0x9b, 0x52, 0x90, 0x32, 0xe8, 0x3d, 0xb5, 0xf2, 0xf9, 0x22, 0xa5, 0x9d, 0xd2, /* Byte value: 0x7a */ + 0x53, 0xe2, 0x2c, 0x5a, 0x95, 0x56, 0xec, 0xa0, 0x82, 0x5b, 0x1e, 0x12, 0xa7, 0x10, 0x09, 0x88, /* Byte value: 0x7b */ + 0x95, 0x7a, 0x83, 0x4a, 0x15, 0xee, 0x51, 0xbb, 0x13, 0xec, 0xd9, 0x7d, 0x4f, 0xf2, 0xdf, 0xe5, /* Byte value: 0x7c */ + 0xe9, 0xf4, 0xc5, 0x94, 0x2a, 0x1f, 0xa2, 0xb5, 0x26, 0x1b, 0x71, 0xfa, 0x9e, 0x27, 0x7d, 0x09, /* Byte value: 0x7d */ + 0xec, 0x42, 0x06, 0xa3, 0x51, 0xc7, 0x89, 0x29, 0xe4, 0x15, 0xfa, 0x9e, 0xad, 0x31, 0x4f, 0xb2, /* Byte value: 0x7e */ + 0x97, 0x59, 0xfd, 0x82, 0x9a, 0x9e, 0xfe, 0xe3, 0xb9, 0x3e, 0x36, 0x2a, 0x51, 0x91, 0x15, 0xab, /* Byte value: 0x7f */ + 0xd1, 0xd6, 0x64, 0x60, 0x86, 0x16, 0x08, 0x5a, 0xe0, 0x34, 0xd7, 0xa1, 0x35, 0x09, 0xb1, 0xad, /* Byte value: 0x80 */ + 0x69, 0xe3, 0xf3, 0x66, 0xb6, 0x2f, 0xe9, 0x17, 0xee, 0xa6, 0x57, 0x1e, 0x12, 0x5d, 0x0f, 0x62, /* Byte value: 0x81 */ + 0x6a, 0x30, 0xb2, 0xca, 0x9f, 0x67, 0xf0, 0x63, 0x11, 0x1d, 0x2e, 0x83, 0x03, 0xee, 0xa0, 0x0b, /* Byte value: 0x82 */ + 0x43, 0x39, 0x5a, 0xd5, 0x67, 0x50, 0xdd, 0x25, 0x9b, 0x04, 0x6a, 0xef, 0x57, 0x8e, 0x96, 0xbd, /* Byte value: 0x83 */ + 0x41, 0x1a, 0x24, 0x1d, 0xe8, 0x20, 0x72, 0x7d, 0x31, 0xd6, 0x85, 0xb8, 0x49, 0xed, 0x5c, 0xf3, /* Byte value: 0x84 */ + 0xae, 0x8b, 0x63, 0x12, 0x90, 0xaf, 0xe2, 0x20, 0x2a, 0x78, 0x06, 0xbb, 0xf5, 0x6f, 0xbc, 0x28, /* Byte value: 0x85 */ + 0x0c, 0xca, 0xc7, 0xf5, 0xa4, 0xe3, 0x64, 0x13, 0x7a, 0xa9, 0x27, 0x31, 0x44, 0x89, 0xf9, 0x67, /* Byte value: 0x86 */ + 0x7c, 0x8e, 0x46, 0xde, 0x3f, 0xf1, 0xf3, 0x0e, 0x35, 0xf7, 0xa8, 0x87, 0xd1, 0xd5, 0xa2, 0xec, /* Byte value: 0x87 */ + 0x5e, 0xd8, 0xd4, 0xcb, 0x97, 0x8d, 0x3e, 0x9f, 0xad, 0x9b, 0xaf, 0xe9, 0xec, 0x49, 0x95, 0xc8, /* Byte value: 0x88 */ + 0x44, 0xac, 0xe7, 0x2a, 0x93, 0xf8, 0x59, 0xe1, 0xf3, 0xd8, 0x0e, 0xdc, 0x7a, 0xfb, 0x6e, 0x48, /* Byte value: 0x89 */ + 0x25, 0xc3, 0x2f, 0xea, 0x5c, 0xd4, 0x49, 0x55, 0xf0, 0xb0, 0x63, 0x5d, 0x10, 0xe9, 0xcf, 0xd1, /* Byte value: 0x8a */ + 0x09, 0x7c, 0x04, 0xc2, 0xdf, 0x3b, 0x4f, 0x8f, 0xb8, 0xa7, 0xac, 0x55, 0x77, 0x9f, 0xcb, 0xdc, /* Byte value: 0x8b */ + 0xbd, 0x83, 0x54, 0x31, 0x4b, 0xe1, 0xca, 0xd1, 0xcc, 0x9c, 0x0b, 0xdb, 0x14, 0x42, 0x8c, 0x74, /* Byte value: 0x8c */ + 0x9f, 0xd5, 0xc6, 0x24, 0xe3, 0x9d, 0x07, 0x40, 0x54, 0xf0, 0x0c, 0xb5, 0x29, 0xde, 0xbb, 0x50, /* Byte value: 0x8d */ + 0x2e, 0x9c, 0x55, 0xe0, 0x0c, 0x9f, 0xa9, 0x82, 0xe2, 0xc5, 0x20, 0x5f, 0x79, 0x15, 0xce, 0x43, /* Byte value: 0x8e */ + 0x1b, 0x84, 0x0c, 0x85, 0xa2, 0x4d, 0xd1, 0x52, 0x0b, 0x2a, 0x37, 0xff, 0x99, 0x62, 0x9e, 0xa7, /* Byte value: 0x8f */ + 0xd7, 0xb3, 0xe6, 0xfb, 0xd4, 0x86, 0x3a, 0xb2, 0xdd, 0x81, 0x25, 0x58, 0x17, 0xac, 0x2c, 0x7f, /* Byte value: 0x90 */ + 0x8a, 0xb8, 0x73, 0x9c, 0x6a, 0x43, 0x1d, 0x59, 0x8f, 0xa1, 0xf3, 0x2c, 0xea, 0x56, 0x16, 0xde, /* Byte value: 0x91 */ + 0x0d, 0x3a, 0xf8, 0x91, 0x02, 0xdb, 0xd2, 0x3f, 0x2f, 0xc0, 0xb1, 0xfb, 0x4b, 0x59, 0x9c, 0x40, /* Byte value: 0x92 */ + 0x50, 0x31, 0x6d, 0xf6, 0xbc, 0x1e, 0xf5, 0xd4, 0x7d, 0xe0, 0x67, 0x8f, 0xb6, 0xa3, 0xa6, 0xe1, /* Byte value: 0x93 */ + 0x24, 0x33, 0x10, 0x8e, 0xfa, 0xec, 0xff, 0x79, 0xa5, 0xd9, 0xf5, 0x97, 0x1f, 0x39, 0xaa, 0xf6, /* Byte value: 0x94 */ + 0xf2, 0x70, 0xc9, 0x11, 0x88, 0x52, 0x73, 0xe7, 0x2d, 0x31, 0x46, 0x05, 0x07, 0x45, 0xe3, 0xae, /* Byte value: 0x95 */ + 0x38, 0x22, 0xa1, 0xf4, 0xac, 0x09, 0xaa, 0xef, 0xc6, 0x2f, 0xa6, 0x5b, 0xab, 0x2e, 0xcc, 0xa4, /* Byte value: 0x96 */ + 0x7b, 0x1b, 0xfb, 0x21, 0xcb, 0x59, 0x77, 0xca, 0x5d, 0x2b, 0xcc, 0xb4, 0xfc, 0xa0, 0x5a, 0x19, /* Byte value: 0x97 */ + 0xa1, 0x92, 0xe5, 0x4b, 0x1d, 0x04, 0x9f, 0x47, 0xaf, 0x6a, 0x58, 0x17, 0xa0, 0x55, 0xea, 0x26, /* Byte value: 0x98 */ + 0xbe, 0x50, 0x15, 0x9d, 0x62, 0xa9, 0xd3, 0xa5, 0x33, 0x27, 0x72, 0x46, 0x05, 0xf1, 0x23, 0x1d, /* Byte value: 0x99 */ + 0xa7, 0xf7, 0x67, 0xd0, 0x4f, 0x94, 0xad, 0xaf, 0x92, 0xdf, 0xaa, 0xee, 0x82, 0xf0, 0x77, 0xf4, /* Byte value: 0x9a */ + 0x99, 0xb0, 0x44, 0xbf, 0xb1, 0x0d, 0x35, 0xa8, 0x69, 0x45, 0xfe, 0x4c, 0x0b, 0x7b, 0x26, 0x82, /* Byte value: 0x9b */ + 0xf1, 0xa3, 0x88, 0xbd, 0xa1, 0x1a, 0x6a, 0x93, 0xd2, 0x8a, 0x3f, 0x98, 0x16, 0xf6, 0x4c, 0xc7, /* Byte value: 0x9c */ + 0xeb, 0xd7, 0xbb, 0x5c, 0xa5, 0x6f, 0x0d, 0xed, 0x8c, 0xc9, 0x9e, 0xad, 0x80, 0x44, 0xb7, 0x47, /* Byte value: 0x9d */ + 0x39, 0xd2, 0x9e, 0x90, 0x0a, 0x31, 0x1c, 0xc3, 0x93, 0x46, 0x30, 0x91, 0xa4, 0xfe, 0xa9, 0x83, /* Byte value: 0x9e */ + 0xb7, 0x2c, 0x11, 0x5f, 0xbd, 0x92, 0x9c, 0x2a, 0x8b, 0x80, 0xde, 0x13, 0x72, 0x6e, 0xe8, 0xc1, /* Byte value: 0x9f */ + 0x1c, 0x11, 0xb1, 0x7a, 0x56, 0xe5, 0x55, 0x96, 0x63, 0xf6, 0x53, 0xcc, 0xb4, 0x17, 0x66, 0x52, /* Byte value: 0xa0 */ + 0x01, 0xf0, 0x3f, 0x64, 0xa6, 0x38, 0xb6, 0x2c, 0x55, 0x69, 0x96, 0xca, 0x0f, 0xd0, 0x65, 0x27, /* Byte value: 0xa1 */ + 0xef, 0x91, 0x47, 0x0f, 0x78, 0x8f, 0x90, 0x5d, 0x1b, 0xae, 0x83, 0x03, 0xbc, 0x82, 0xe0, 0xdb, /* Byte value: 0xa2 */ + 0x1f, 0xc2, 0xf0, 0xd6, 0x7f, 0xad, 0x4c, 0xe2, 0x9c, 0x4d, 0x2a, 0x51, 0xa5, 0xa4, 0xc9, 0x3b, /* Byte value: 0xa3 */ + 0x92, 0xef, 0x3e, 0xb5, 0xe1, 0x46, 0xd5, 0x7f, 0x7b, 0x30, 0xbd, 0x4e, 0x62, 0x87, 0x27, 0x10, /* Byte value: 0xa4 */ + 0x1a, 0x74, 0x33, 0xe1, 0x04, 0x75, 0x67, 0x7e, 0x5e, 0x43, 0xa1, 0x35, 0x96, 0xb2, 0xfb, 0x80, /* Byte value: 0xa5 */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xa6 */ + 0x85, 0xa1, 0xf5, 0xc5, 0xe7, 0xe8, 0x60, 0x3e, 0x0a, 0xb3, 0xad, 0x80, 0xbf, 0x6c, 0x40, 0xd0, /* Byte value: 0xa7 */ + 0x5c, 0xfb, 0xaa, 0x03, 0x18, 0xfd, 0x91, 0xc7, 0x07, 0x49, 0x40, 0xbe, 0xf2, 0x2a, 0x5f, 0x86, /* Byte value: 0xa8 */ + 0xed, 0xb2, 0x39, 0xc7, 0xf7, 0xff, 0x3f, 0x05, 0xb1, 0x7c, 0x6c, 0x54, 0xa2, 0xe1, 0x2a, 0x95, /* Byte value: 0xa9 */ + 0x56, 0x54, 0xef, 0x6d, 0xee, 0x8e, 0xc7, 0x3c, 0x40, 0x55, 0x95, 0x76, 0x94, 0x06, 0x3b, 0x33, /* Byte value: 0xaa */ + 0xc3, 0x2e, 0x6c, 0x27, 0xfb, 0x60, 0x96, 0x87, 0x53, 0xb9, 0x4c, 0x0b, 0xdb, 0xf4, 0xe4, 0xd6, /* Byte value: 0xab */ + 0x0f, 0x19, 0x86, 0x59, 0x8d, 0xab, 0x7d, 0x67, 0x85, 0x12, 0x5e, 0xac, 0x55, 0x3a, 0x56, 0x0e, /* Byte value: 0xac */ + 0xe8, 0x04, 0xfa, 0xf0, 0x8c, 0x27, 0x14, 0x99, 0x73, 0x72, 0xe7, 0x30, 0x91, 0xf7, 0x18, 0x2e, /* Byte value: 0xad */ + 0xc2, 0xde, 0x53, 0x43, 0x5d, 0x58, 0x20, 0xab, 0x06, 0xd0, 0xda, 0xc1, 0xd4, 0x24, 0x81, 0xf1, /* Byte value: 0xae */ + 0xd9, 0x5a, 0x5f, 0xc6, 0xff, 0x15, 0xf1, 0xf9, 0x0d, 0xfa, 0xed, 0x3e, 0x4d, 0x46, 0x1f, 0x56, /* Byte value: 0xaf */ + 0xe6, 0xed, 0x43, 0xcd, 0xa7, 0xb4, 0xdf, 0xd2, 0xa3, 0x09, 0x2f, 0x56, 0xcb, 0x1d, 0x2b, 0x07, /* Byte value: 0xb0 */ + 0xb6, 0xdc, 0x2e, 0x3b, 0x1b, 0xaa, 0x2a, 0x06, 0xde, 0xe9, 0x48, 0xd9, 0x7d, 0xbe, 0x8d, 0xe6, /* Byte value: 0xb1 */ + 0x11, 0x2b, 0x49, 0xeb, 0x54, 0x3e, 0x87, 0xa9, 0x4c, 0x36, 0xe2, 0x37, 0xff, 0x4e, 0xfa, 0x12, /* Byte value: 0xb2 */ + 0xc6, 0x98, 0xaf, 0x10, 0x80, 0xb8, 0xbd, 0x1b, 0x91, 0xb7, 0xc7, 0x6f, 0xe8, 0xe2, 0xd6, 0x6d, /* Byte value: 0xb3 */ + 0x55, 0x87, 0xae, 0xc1, 0xc7, 0xc6, 0xde, 0x48, 0xbf, 0xee, 0xec, 0xeb, 0x85, 0xb5, 0x94, 0x5a, /* Byte value: 0xb4 */ + 0x94, 0x8a, 0xbc, 0x2e, 0xb3, 0xd6, 0xe7, 0x97, 0x46, 0x85, 0x4f, 0xb7, 0x40, 0x22, 0xba, 0xc2, /* Byte value: 0xb5 */ + 0xb1, 0x49, 0x93, 0xc4, 0xef, 0x02, 0xae, 0xc2, 0xb6, 0x35, 0x2c, 0xea, 0x50, 0xcb, 0x75, 0x13, /* Byte value: 0xb6 */ + 0x4b, 0xb5, 0x61, 0x73, 0x1e, 0x53, 0x24, 0x86, 0x76, 0xca, 0x50, 0x70, 0x2f, 0xc1, 0x38, 0x46, /* Byte value: 0xb7 */ + 0xb9, 0xc5, 0xa8, 0x62, 0x96, 0x01, 0x57, 0x61, 0x5b, 0xfb, 0x16, 0x75, 0x28, 0x84, 0xdb, 0xe8, /* Byte value: 0xb8 */ + 0x4f, 0xf3, 0x9d, 0x20, 0xc3, 0xb3, 0xb9, 0x36, 0xe1, 0xad, 0x4d, 0xde, 0x13, 0x07, 0x6f, 0xda, /* Byte value: 0xb9 */ + 0xf6, 0x36, 0x35, 0x42, 0x55, 0xb2, 0xee, 0x57, 0xba, 0x56, 0x5b, 0xab, 0x3b, 0x83, 0xb4, 0x32, /* Byte value: 0xba */ + 0x9b, 0x93, 0x3a, 0x77, 0x3e, 0x7d, 0x9a, 0xf0, 0xc3, 0x97, 0x11, 0x1b, 0x15, 0x18, 0xec, 0xcc, /* Byte value: 0xbb */ + 0xd2, 0x05, 0x25, 0xcc, 0xaf, 0x5e, 0x11, 0x2e, 0x1f, 0x8f, 0xae, 0x3c, 0x24, 0xba, 0x1e, 0xc4, /* Byte value: 0xbc */ + 0x7f, 0x5d, 0x07, 0x72, 0x16, 0xb9, 0xea, 0x7a, 0xca, 0x4c, 0xd1, 0x1a, 0xc0, 0x66, 0x0d, 0x85, /* Byte value: 0xbd */ + 0xde, 0xcf, 0xe2, 0x39, 0x0b, 0xbd, 0x75, 0x3d, 0x65, 0x26, 0x89, 0x0d, 0x60, 0x33, 0xe7, 0xa3, /* Byte value: 0xbe */ + 0x75, 0xf2, 0x42, 0x1c, 0xe0, 0xca, 0xbc, 0x81, 0x8d, 0x50, 0x04, 0xd2, 0xa6, 0x4a, 0x69, 0x30, /* Byte value: 0xbf */ + 0x14, 0x9d, 0x8a, 0xdc, 0x2f, 0xe6, 0xac, 0x35, 0x8e, 0x38, 0x69, 0x53, 0xcc, 0x58, 0xc8, 0xa9, /* Byte value: 0xc0 */ + 0x20, 0x75, 0xec, 0xdd, 0x27, 0x0c, 0x62, 0xc9, 0x32, 0xbe, 0xe8, 0x39, 0x23, 0xff, 0xfd, 0x6a, /* Byte value: 0xc1 */ + 0xdc, 0xec, 0x9c, 0xf1, 0x84, 0xcd, 0xda, 0x65, 0xcf, 0xf4, 0x66, 0x5a, 0x7e, 0x50, 0x2d, 0xed, /* Byte value: 0xc2 */ + 0xac, 0xa8, 0x1d, 0xda, 0x1f, 0xdf, 0x4d, 0x78, 0x80, 0xaa, 0xe9, 0xec, 0xeb, 0x0c, 0x76, 0x66, /* Byte value: 0xc3 */ + 0xab, 0x3d, 0xa0, 0x25, 0xeb, 0x77, 0xc9, 0xbc, 0xe8, 0x76, 0x8d, 0xdf, 0xc6, 0x79, 0x8e, 0x93, /* Byte value: 0xc4 */ + 0xf8, 0xdf, 0x8c, 0x7f, 0x7e, 0x21, 0x25, 0x1c, 0x6a, 0x2d, 0x93, 0xcd, 0x61, 0x69, 0x87, 0x1b, /* Byte value: 0xc5 */ + 0xfb, 0x0c, 0xcd, 0xd3, 0x57, 0x69, 0x3c, 0x68, 0x95, 0x96, 0xea, 0x50, 0x70, 0xda, 0x28, 0x72, /* Byte value: 0xc6 */ + 0xcd, 0xc7, 0xd5, 0x1a, 0xd0, 0xf3, 0x5d, 0xcc, 0x83, 0xc2, 0x84, 0x6d, 0x81, 0x1e, 0xd7, 0xff, /* Byte value: 0xc7 */ + 0xf0, 0x53, 0xb7, 0xd9, 0x07, 0x22, 0xdc, 0xbf, 0x87, 0xe3, 0xa9, 0x52, 0x19, 0x26, 0x29, 0xe0, /* Byte value: 0xc8 */ + 0xee, 0x61, 0x78, 0x6b, 0xde, 0xb7, 0x26, 0x71, 0x4e, 0xc7, 0x15, 0xc9, 0xb3, 0x52, 0x85, 0xfc, /* Byte value: 0xc9 */ + 0xce, 0x14, 0x94, 0xb6, 0xf9, 0xbb, 0x44, 0xb8, 0x7c, 0x79, 0xfd, 0xf0, 0x90, 0xad, 0x78, 0x96, /* Byte value: 0xca */ + 0xe5, 0x3e, 0x02, 0x61, 0x8e, 0xfc, 0xc6, 0xa6, 0x5c, 0xb2, 0x56, 0xcb, 0xda, 0xae, 0x84, 0x6e, /* Byte value: 0xcb */ + 0x7d, 0x7e, 0x79, 0xba, 0x99, 0xc9, 0x45, 0x22, 0x60, 0x9e, 0x3e, 0x4d, 0xde, 0x05, 0xc7, 0xcb, /* Byte value: 0xcc */ + 0xb8, 0x35, 0x97, 0x06, 0x30, 0x39, 0xe1, 0x4d, 0x0e, 0x92, 0x80, 0xbf, 0x27, 0x54, 0xbe, 0xcf, /* Byte value: 0xcd */ + 0x04, 0x46, 0xfc, 0x53, 0xdd, 0xe0, 0x9d, 0xb0, 0x97, 0x67, 0x1d, 0xae, 0x3c, 0xc6, 0x57, 0x9c, /* Byte value: 0xce */ + 0xaa, 0xcd, 0x9f, 0x41, 0x4d, 0x4f, 0x7f, 0x90, 0xbd, 0x1f, 0x1b, 0x15, 0xc9, 0xa9, 0xeb, 0xb4, /* Byte value: 0xcf */ + 0xf5, 0xe5, 0x74, 0xee, 0x7c, 0xfa, 0xf7, 0x23, 0x45, 0xed, 0x22, 0x36, 0x2a, 0x30, 0x1b, 0x5b, /* Byte value: 0xd0 */ + 0xc7, 0x68, 0x90, 0x74, 0x26, 0x80, 0x0b, 0x37, 0xc4, 0xde, 0x51, 0xa5, 0xe7, 0x32, 0xb3, 0x4a, /* Byte value: 0xd1 */ + 0xa0, 0x62, 0xda, 0x2f, 0xbb, 0x3c, 0x29, 0x6b, 0xfa, 0x03, 0xce, 0xdd, 0xaf, 0x85, 0x8f, 0x01, /* Byte value: 0xd2 */ + 0x42, 0xc9, 0x65, 0xb1, 0xc1, 0x68, 0x6b, 0x09, 0xce, 0x6d, 0xfc, 0x25, 0x58, 0x5e, 0xf3, 0x9a, /* Byte value: 0xd3 */ + 0xcb, 0xa2, 0x57, 0x81, 0x82, 0x63, 0x6f, 0x24, 0xbe, 0x77, 0x76, 0x94, 0xa3, 0xbb, 0x4a, 0x2d, /* Byte value: 0xd4 */ + 0x71, 0xb4, 0xbe, 0x4f, 0x3d, 0x2a, 0x21, 0x31, 0x1a, 0x37, 0x19, 0x7c, 0x9a, 0x8c, 0x3e, 0xac, /* Byte value: 0xd5 */ + 0xe3, 0x5b, 0x80, 0xfa, 0xdc, 0x6c, 0xf4, 0x4e, 0x61, 0x07, 0xa4, 0x32, 0xf8, 0x0b, 0x19, 0xbc, /* Byte value: 0xd6 */ + 0xfd, 0x69, 0x4f, 0x48, 0x05, 0xf9, 0x0e, 0x80, 0xa8, 0x23, 0x18, 0xa9, 0x52, 0x7f, 0xb5, 0xa0, /* Byte value: 0xd7 */ + 0x90, 0xcc, 0x40, 0x7d, 0x6e, 0x36, 0x7a, 0x27, 0xd1, 0xe2, 0x52, 0x19, 0x7c, 0xe4, 0xed, 0x5e, /* Byte value: 0xd8 */ + 0x02, 0x23, 0x7e, 0xc8, 0x8f, 0x70, 0xaf, 0x58, 0xaa, 0xd2, 0xef, 0x57, 0x1e, 0x63, 0xca, 0x4e, /* Byte value: 0xd9 */ + 0x26, 0x10, 0x6e, 0x46, 0x75, 0x9c, 0x50, 0x21, 0x0f, 0x0b, 0x1a, 0xc0, 0x01, 0x5a, 0x60, 0xb8, /* Byte value: 0xda */ + 0x96, 0xa9, 0xc2, 0xe6, 0x3c, 0xa6, 0x48, 0xcf, 0xec, 0x57, 0xa0, 0xe0, 0x5e, 0x41, 0x70, 0x8c, /* Byte value: 0xdb */ + 0xcf, 0xe4, 0xab, 0xd2, 0x5f, 0x83, 0xf2, 0x94, 0x29, 0x10, 0x6b, 0x3a, 0x9f, 0x7d, 0x1d, 0xb1, /* Byte value: 0xdc */ + 0x89, 0x6b, 0x32, 0x30, 0x43, 0x0b, 0x04, 0x2d, 0x70, 0x1a, 0x8a, 0xb1, 0xfb, 0xe5, 0xb9, 0xb7, /* Byte value: 0xdd */ + 0x49, 0x96, 0x1f, 0xbb, 0x91, 0x23, 0x8b, 0xde, 0xdc, 0x18, 0xbf, 0x27, 0x31, 0xa2, 0xf2, 0x08, /* Byte value: 0xde */ + 0x2f, 0x6c, 0x6a, 0x84, 0xaa, 0xa7, 0x1f, 0xae, 0xb7, 0xac, 0xb6, 0x95, 0x76, 0xc5, 0xab, 0x64, /* Byte value: 0xdf */ + 0x52, 0x12, 0x13, 0x3e, 0x33, 0x6e, 0x5a, 0x8c, 0xd7, 0x32, 0x88, 0xd8, 0xa8, 0xc0, 0x6c, 0xaf, /* Byte value: 0xe0 */ + 0xb4, 0xff, 0x50, 0xf3, 0x94, 0xda, 0x85, 0x5e, 0x74, 0x3b, 0xa7, 0x8e, 0x63, 0xdd, 0x47, 0xa8, /* Byte value: 0xe1 */ + 0x08, 0x8c, 0x3b, 0xa6, 0x79, 0x03, 0xf9, 0xa3, 0xed, 0xce, 0x3a, 0x9f, 0x78, 0x4f, 0xae, 0xfb, /* Byte value: 0xe2 */ + 0x3f, 0xb7, 0x1c, 0x0b, 0x58, 0xa1, 0x2e, 0x2b, 0xae, 0xf3, 0xc2, 0x68, 0x86, 0x5b, 0x34, 0x51, /* Byte value: 0xe3 */ + 0xd6, 0x43, 0xd9, 0x9f, 0x72, 0xbe, 0x8c, 0x9e, 0x88, 0xe8, 0xb3, 0x92, 0x18, 0x7c, 0x49, 0x58, /* Byte value: 0xe4 */ + 0x23, 0xa6, 0xad, 0x71, 0x0e, 0x44, 0x7b, 0xbd, 0xcd, 0x05, 0x91, 0xa4, 0x32, 0x4c, 0x52, 0x03, /* Byte value: 0xe5 */ + 0x4e, 0x03, 0xa2, 0x44, 0x65, 0x8b, 0x0f, 0x1a, 0xb4, 0xc4, 0xdb, 0x14, 0x1c, 0xd7, 0x0a, 0xfd, /* Byte value: 0xe6 */ + 0x60, 0x9f, 0xf7, 0xa4, 0x69, 0x14, 0xa6, 0x98, 0x56, 0x01, 0xfb, 0x4b, 0x65, 0xc2, 0xc4, 0xbe, /* Byte value: 0xe7 */ + 0x0b, 0x5f, 0x7a, 0x0a, 0x50, 0x4b, 0xe0, 0xd7, 0x12, 0x75, 0x43, 0x02, 0x69, 0xfc, 0x01, 0x92, /* Byte value: 0xe8 */ + 0xc4, 0xbb, 0xd1, 0xd8, 0x0f, 0xc8, 0x12, 0x43, 0x3b, 0x65, 0x28, 0x38, 0xf6, 0x81, 0x1c, 0x23, /* Byte value: 0xe9 */ + 0x35, 0x18, 0x59, 0x65, 0xae, 0xd2, 0x78, 0xd0, 0xe9, 0xef, 0x17, 0xa0, 0xe0, 0x77, 0x50, 0xe4, /* Byte value: 0xea */ + 0x0e, 0xe9, 0xb9, 0x3d, 0x2b, 0x93, 0xcb, 0x4b, 0xd0, 0x7b, 0xc8, 0x66, 0x5a, 0xea, 0x33, 0x29, /* Byte value: 0xeb */ + 0x64, 0xd9, 0x0b, 0xf7, 0xb4, 0xf4, 0x3b, 0x28, 0xc1, 0x66, 0xe6, 0xe5, 0x59, 0x04, 0x93, 0x22, /* Byte value: 0xec */ + 0x3b, 0xf1, 0xe0, 0x58, 0x85, 0x41, 0xb3, 0x9b, 0x39, 0x94, 0xdf, 0xc6, 0xba, 0x9d, 0x63, 0xcd, /* Byte value: 0xed */ + 0x30, 0xae, 0x9a, 0x52, 0xd5, 0x0a, 0x53, 0x4c, 0x2b, 0xe1, 0x9c, 0xc4, 0xd3, 0x61, 0x62, 0x5f, /* Byte value: 0xee */ + 0xc5, 0x4b, 0xee, 0xbc, 0xa9, 0xf0, 0xa4, 0x6f, 0x6e, 0x0c, 0xbe, 0xf2, 0xf9, 0x51, 0x79, 0x04, /* Byte value: 0xef */ + 0x6b, 0xc0, 0x8d, 0xae, 0x39, 0x5f, 0x46, 0x4f, 0x44, 0x74, 0xb8, 0x49, 0x0c, 0x3e, 0xc5, 0x2c, /* Byte value: 0xf0 */ + 0x91, 0x3c, 0x7f, 0x19, 0xc8, 0x0e, 0xcc, 0x0b, 0x84, 0x8b, 0xc4, 0xd3, 0x73, 0x34, 0x88, 0x79, /* Byte value: 0xf1 */ + 0x4c, 0x20, 0xdc, 0x8c, 0xea, 0xfb, 0xa0, 0x42, 0x1e, 0x16, 0x34, 0x43, 0x02, 0xb4, 0xc0, 0xb3, /* Byte value: 0xf2 */ + 0xb2, 0x9a, 0xd2, 0x68, 0xc6, 0x4a, 0xb7, 0xb6, 0x49, 0x8e, 0x55, 0x77, 0x41, 0x78, 0xda, 0x7a, /* Byte value: 0xf3 */ + 0xfe, 0xba, 0x0e, 0xe4, 0x2c, 0xb1, 0x17, 0xf4, 0x57, 0x98, 0x61, 0x34, 0x43, 0xcc, 0x1a, 0xc9, /* Byte value: 0xf4 */ + 0xe7, 0x1d, 0x7c, 0xa9, 0x01, 0x8c, 0x69, 0xfe, 0xf6, 0x60, 0xb9, 0x9c, 0xc4, 0xcd, 0x4e, 0x20, /* Byte value: 0xf5 */ + 0x27, 0xe0, 0x51, 0x22, 0xd3, 0xa4, 0xe6, 0x0d, 0x5a, 0x62, 0x8c, 0x0a, 0x0e, 0x8a, 0x05, 0x9f, /* Byte value: 0xf6 */ + 0x8c, 0xdd, 0xf1, 0x07, 0x38, 0xd3, 0x2f, 0xb1, 0xb2, 0x14, 0x01, 0xd5, 0xc8, 0xf3, 0x8b, 0x0c, /* Byte value: 0xf7 */ + 0x3e, 0x47, 0x23, 0x6f, 0xfe, 0x99, 0x98, 0x07, 0xfb, 0x9a, 0x54, 0xa2, 0x89, 0x8b, 0x51, 0x76, /* Byte value: 0xf8 */ + 0x29, 0x09, 0xe8, 0x1f, 0xf8, 0x37, 0x2d, 0x46, 0x8a, 0x19, 0x44, 0x6c, 0x54, 0x60, 0x36, 0xb6, /* Byte value: 0xf9 */ + 0x72, 0x67, 0xff, 0xe3, 0x14, 0x62, 0x38, 0x45, 0xe5, 0x8c, 0x60, 0xe1, 0x8b, 0x3f, 0x91, 0xc5, /* Byte value: 0xfa */ + 0x2c, 0xbf, 0x2b, 0x28, 0x83, 0xef, 0x06, 0xda, 0x48, 0x17, 0xcf, 0x08, 0x67, 0x76, 0x04, 0x0d, /* Byte value: 0xfb */ + 0xa4, 0x24, 0x26, 0x7c, 0x66, 0xdc, 0xb4, 0xdb, 0x6d, 0x64, 0xd3, 0x73, 0x93, 0x43, 0xd8, 0x9d, /* Byte value: 0xfc */ + 0x8e, 0xfe, 0x8f, 0xcf, 0xb7, 0xa3, 0x80, 0xe9, 0x18, 0xc6, 0xee, 0x82, 0xd6, 0x90, 0x41, 0x42, /* Byte value: 0xfd */ + 0x51, 0xc1, 0x52, 0x92, 0x1a, 0x26, 0x43, 0xf8, 0x28, 0x89, 0xf1, 0x45, 0xb9, 0x73, 0xc3, 0xc6, /* Byte value: 0xfe */ + 0xc8, 0x71, 0x16, 0x2d, 0xab, 0x2b, 0x76, 0x50, 0x41, 0xcc, 0x0f, 0x09, 0xb2, 0x08, 0xe5, 0x44, /* Byte value: 0xff */ + 0x16, 0xbe, 0xf4, 0x14, 0xa0, 0x96, 0x03, 0x6d, 0x24, 0xea, 0x86, 0x04, 0xd2, 0x3b, 0x02, 0xe7, /* Byte value: 0x00 */ + + /* Matrix row: 15 */ + 0xfb, 0xd5, 0x0c, 0x7a, 0xc0, 0x80, 0x96, 0x19, 0x11, 0x87, 0x93, 0x1b, 0xc9, 0xae, 0xb5, 0xfc, /* Byte value: 0x01 */ + 0x08, 0xe0, 0x8c, 0xb2, 0x17, 0x1a, 0xce, 0x7b, 0x32, 0xfc, 0xab, 0xf8, 0xfe, 0xf2, 0x0a, 0xee, /* Byte value: 0x02 */ + 0xcb, 0xd0, 0xa2, 0x50, 0xb2, 0xdc, 0x77, 0xc0, 0xbd, 0xca, 0xef, 0x4e, 0x88, 0xc7, 0x89, 0xdd, /* Byte value: 0x03 */ + 0x41, 0x10, 0x1a, 0x5e, 0x63, 0x42, 0xd6, 0x69, 0xc4, 0x12, 0x3c, 0xd3, 0x93, 0x13, 0xc0, 0x11, /* Byte value: 0x04 */ + 0x38, 0xe5, 0x22, 0x98, 0x65, 0x46, 0x2f, 0xa2, 0x9e, 0xb1, 0xd7, 0xad, 0xbf, 0x9b, 0x36, 0xcf, /* Byte value: 0x05 */ + 0xb3, 0x39, 0x6a, 0x11, 0x6f, 0x4a, 0xe7, 0x3c, 0x70, 0x97, 0x29, 0x2f, 0xcb, 0xc0, 0xef, 0x6e, /* Byte value: 0x06 */ + 0x1f, 0xb7, 0xc2, 0x06, 0x7d, 0x56, 0x4d, 0x08, 0x35, 0x78, 0xfd, 0x96, 0x0f, 0xfe, 0x68, 0x31, /* Byte value: 0x07 */ + 0x88, 0xf8, 0x9b, 0xc3, 0xa4, 0x79, 0x73, 0xc7, 0x94, 0xe7, 0x89, 0xa3, 0xc5, 0x09, 0xaa, 0x16, /* Byte value: 0x08 */ + 0x32, 0x3d, 0x8d, 0xe7, 0x07, 0xbb, 0x33, 0xb7, 0x41, 0x72, 0x26, 0x6b, 0x9f, 0xb4, 0xdf, 0xfb, /* Byte value: 0x09 */ + 0x7c, 0x99, 0x8e, 0x18, 0x37, 0x9b, 0xf7, 0x20, 0xd4, 0x23, 0x72, 0x1d, 0x3c, 0x7e, 0x63, 0xc4, /* Byte value: 0x0a */ + 0x5c, 0x9f, 0xfb, 0x95, 0x6b, 0xf3, 0x49, 0x0f, 0x1c, 0x55, 0x9b, 0x7b, 0x42, 0x30, 0x4b, 0xfa, /* Byte value: 0x0b */ + 0x02, 0x38, 0x23, 0xcd, 0x75, 0xe7, 0xd2, 0x6e, 0xed, 0x3f, 0x5a, 0x3e, 0xde, 0xdd, 0xe3, 0xda, /* Byte value: 0x0c */ + 0xec, 0x82, 0x42, 0xce, 0xaa, 0xcc, 0x15, 0x6a, 0x16, 0x03, 0xc5, 0x75, 0x38, 0xa2, 0xd7, 0x23, /* Byte value: 0x0d */ + 0x12, 0x3b, 0xf8, 0x6a, 0x5b, 0xd3, 0x8d, 0x98, 0x89, 0x04, 0xcf, 0x0d, 0xe1, 0xfa, 0xf7, 0xc5, /* Byte value: 0x0e */ + 0x7b, 0xcd, 0x1b, 0x0b, 0x73, 0xe3, 0x2b, 0xa5, 0xb7, 0x9c, 0xb1, 0x40, 0xf2, 0x55, 0x15, 0x04, /* Byte value: 0x0f */ + 0x5f, 0xbb, 0x28, 0xdf, 0xc5, 0x86, 0xf2, 0x56, 0x66, 0x94, 0xec, 0x5a, 0xf3, 0x62, 0x38, 0x4d, /* Byte value: 0x10 */ + 0xc1, 0x08, 0x0d, 0x2f, 0xd0, 0x21, 0x6b, 0xd5, 0x62, 0x09, 0x1e, 0x88, 0xa8, 0xe8, 0x60, 0xe9, /* Byte value: 0x11 */ + 0x04, 0x70, 0x46, 0x59, 0xea, 0x0d, 0x67, 0xdc, 0x19, 0x7e, 0xb4, 0x7c, 0x7f, 0x79, 0x05, 0x77, /* Byte value: 0x12 */ + 0x76, 0x41, 0x21, 0x67, 0x55, 0x66, 0xeb, 0x35, 0x0b, 0xe0, 0x83, 0xdb, 0x1c, 0x51, 0x8a, 0xf0, /* Byte value: 0x13 */ + 0x6c, 0x9a, 0x55, 0xbf, 0x19, 0xaf, 0xa8, 0xd6, 0xb0, 0x18, 0xe7, 0x2e, 0x03, 0x59, 0x77, 0xdb, /* Byte value: 0x14 */ + 0x26, 0x4e, 0x10, 0x19, 0xc3, 0x82, 0x0b, 0x9d, 0x3c, 0x37, 0x07, 0x24, 0xdf, 0xea, 0xce, 0x93, /* Byte value: 0x15 */ + 0x0f, 0xb4, 0x19, 0xa1, 0x53, 0x62, 0x12, 0xfe, 0x51, 0x43, 0x68, 0xa5, 0x30, 0xd9, 0x7c, 0x2e, /* Byte value: 0x16 */ + 0x0c, 0x90, 0xca, 0xeb, 0xfd, 0x17, 0xa9, 0xa7, 0x2b, 0x82, 0x1f, 0x84, 0x81, 0x8b, 0x0f, 0x99, /* Byte value: 0x17 */ + 0xe0, 0x12, 0x88, 0x25, 0x57, 0xdb, 0xbc, 0xcd, 0x3d, 0x81, 0xda, 0xf1, 0xb9, 0x29, 0xd8, 0xba, /* Byte value: 0x18 */ + 0xe6, 0x5a, 0xed, 0xb1, 0xc8, 0x31, 0x09, 0x7f, 0xc9, 0xc0, 0x34, 0xb3, 0x18, 0x8d, 0x3e, 0x17, /* Byte value: 0x19 */ + 0xd6, 0x5f, 0x43, 0x9b, 0xba, 0x6d, 0xe8, 0xa6, 0x65, 0x8d, 0x48, 0xe6, 0x59, 0xe4, 0x02, 0x36, /* Byte value: 0x1a */ + 0x18, 0xe3, 0x57, 0x15, 0x39, 0x2e, 0x91, 0x8d, 0x56, 0xc7, 0x3e, 0xcb, 0xc1, 0xd5, 0x1e, 0xf1, /* Byte value: 0x1b */ + 0x8e, 0xb0, 0xfe, 0x57, 0x3b, 0x93, 0xc6, 0x75, 0x60, 0xa6, 0x67, 0xe1, 0x64, 0xad, 0x4c, 0xbb, /* Byte value: 0x1c */ + 0x54, 0x7f, 0x77, 0x27, 0x7c, 0xe9, 0x87, 0x74, 0x2e, 0xa9, 0x30, 0x83, 0xbc, 0xc2, 0x41, 0x14, /* Byte value: 0x1d */ + 0xe4, 0x62, 0xce, 0x7c, 0xbd, 0xd6, 0xdb, 0x11, 0x24, 0xff, 0x6e, 0x8d, 0xc6, 0x50, 0xdd, 0xcd, /* Byte value: 0x1e */ + 0xac, 0x8e, 0xa8, 0x17, 0x12, 0x1c, 0xaa, 0x34, 0x45, 0xef, 0xd4, 0xb9, 0xc4, 0x3e, 0x87, 0x5f, /* Byte value: 0x1f */ + 0x69, 0xf6, 0xe3, 0x61, 0x28, 0x30, 0xa6, 0x3d, 0x3e, 0x98, 0x7e, 0x4d, 0x13, 0xaf, 0xe2, 0xc1, /* Byte value: 0x20 */ + 0xee, 0xba, 0x61, 0x03, 0xdf, 0x2b, 0xc7, 0x04, 0xfb, 0x3c, 0x9f, 0x4b, 0xe6, 0x7f, 0x34, 0xf9, /* Byte value: 0x21 */ + 0xd9, 0xeb, 0x5a, 0x3a, 0xe9, 0x0f, 0xfa, 0x58, 0x34, 0xce, 0x20, 0x43, 0x69, 0x3d, 0x7e, 0x18, /* Byte value: 0x22 */ + 0xf7, 0x45, 0xc6, 0x91, 0x3d, 0x97, 0x3f, 0xbe, 0x3a, 0x05, 0x8c, 0x9f, 0x48, 0x25, 0xba, 0x65, /* Byte value: 0x23 */ + 0xb9, 0xe1, 0xc5, 0x6e, 0x0d, 0xb7, 0xfb, 0x29, 0xaf, 0x54, 0xd8, 0xe9, 0xeb, 0xef, 0x06, 0x5a, /* Byte value: 0x24 */ + 0x85, 0x74, 0xa1, 0xaf, 0x82, 0xfc, 0xb3, 0x57, 0x28, 0x9b, 0xbb, 0x38, 0x2b, 0x0d, 0x35, 0xe2, /* Byte value: 0x25 */ + 0x1e, 0xab, 0x32, 0x81, 0xa6, 0xc4, 0x24, 0x3f, 0xa2, 0x86, 0xd0, 0x89, 0x60, 0x71, 0xf8, 0x5c, /* Byte value: 0x26 */ + 0x66, 0x42, 0xfa, 0xc0, 0x7b, 0x52, 0xb4, 0xc3, 0x6f, 0xdb, 0x16, 0xe8, 0x23, 0x76, 0x9e, 0xef, /* Byte value: 0x27 */ + 0x30, 0x05, 0xae, 0x2a, 0x72, 0x5c, 0xe1, 0xd9, 0xac, 0x4d, 0x7c, 0x55, 0x41, 0x69, 0x3c, 0x21, /* Byte value: 0x28 */ + 0xd5, 0x7b, 0x90, 0xd1, 0x14, 0x18, 0x53, 0xff, 0x1f, 0x4c, 0x3f, 0xc7, 0xe8, 0xb6, 0x71, 0x81, /* Byte value: 0x29 */ + 0xa2, 0x26, 0x41, 0x31, 0x9a, 0xec, 0xd1, 0xfd, 0x83, 0x52, 0x91, 0x03, 0x9b, 0x68, 0x6b, 0x1c, /* Byte value: 0x2a */ + 0xfc, 0x81, 0x99, 0x69, 0x84, 0xf8, 0x4a, 0x9c, 0x72, 0x38, 0x50, 0x46, 0x07, 0x85, 0xc3, 0x3c, /* Byte value: 0x2b */ + 0x60, 0x0a, 0x9f, 0x54, 0xe4, 0xb8, 0x01, 0x71, 0x9b, 0x9a, 0xf8, 0xaa, 0x82, 0xd2, 0x78, 0x42, /* Byte value: 0x2c */ + 0xff, 0xa5, 0x4a, 0x23, 0x2a, 0x8d, 0xf1, 0xc5, 0x08, 0xf9, 0x27, 0x67, 0xb6, 0xd7, 0xb0, 0x8b, /* Byte value: 0x2d */ + 0x6e, 0xa2, 0x76, 0x72, 0x6c, 0x48, 0x7a, 0xb8, 0x5d, 0x27, 0xbd, 0x10, 0xdd, 0x84, 0x94, 0x01, /* Byte value: 0x2e */ + 0xea, 0xca, 0x27, 0x5a, 0x35, 0x26, 0xa0, 0xd8, 0xe2, 0x42, 0x2b, 0x37, 0x99, 0x06, 0x31, 0x8e, /* Byte value: 0x2f */ + 0x83, 0x3c, 0xc4, 0x3b, 0x1d, 0x16, 0x06, 0xe5, 0xdc, 0xda, 0x55, 0x7a, 0x8a, 0xa9, 0xd3, 0x4f, /* Byte value: 0x30 */ + 0x15, 0x6f, 0x6d, 0x79, 0x1f, 0xab, 0x51, 0x1d, 0xea, 0xbb, 0x0c, 0x50, 0x2f, 0xd1, 0x81, 0x05, /* Byte value: 0x31 */ + 0xc0, 0x14, 0xfd, 0xa8, 0x0b, 0xb3, 0x02, 0xe2, 0xf5, 0xf7, 0x33, 0x97, 0xc7, 0x67, 0xf0, 0x84, /* Byte value: 0x32 */ + 0xdc, 0x87, 0xec, 0xe4, 0xd8, 0x90, 0xf4, 0xb3, 0xba, 0x4e, 0xb9, 0x20, 0x79, 0xcb, 0xeb, 0x02, /* Byte value: 0x33 */ + 0xb4, 0x6d, 0xff, 0x02, 0x2b, 0x32, 0x3b, 0xb9, 0x13, 0x28, 0xea, 0x72, 0x05, 0xeb, 0x99, 0xae, /* Byte value: 0x34 */ + 0xeb, 0xd6, 0xd7, 0xdd, 0xee, 0xb4, 0xc9, 0xef, 0x75, 0xbc, 0x06, 0x28, 0xf6, 0x89, 0xa1, 0xe3, /* Byte value: 0x35 */ + 0xc8, 0xf4, 0x71, 0x1a, 0x1c, 0xa9, 0xcc, 0x99, 0xc7, 0x0b, 0x98, 0x6f, 0x39, 0x95, 0xfa, 0x6a, /* Byte value: 0x36 */ + 0x84, 0x68, 0x51, 0x28, 0x59, 0x6e, 0xda, 0x60, 0xbf, 0x65, 0x96, 0x27, 0x44, 0x82, 0xa5, 0x8f, /* Byte value: 0x37 */ + 0xe5, 0x7e, 0x3e, 0xfb, 0x66, 0x44, 0xb2, 0x26, 0xb3, 0x01, 0x43, 0x92, 0xa9, 0xdf, 0x4d, 0xa0, /* Byte value: 0x38 */ + 0xa7, 0x4a, 0xf7, 0xef, 0xab, 0x73, 0xdf, 0x16, 0x0d, 0xd2, 0x08, 0x60, 0x8b, 0x9e, 0xfe, 0x06, /* Byte value: 0x39 */ + 0x44, 0x7c, 0xac, 0x80, 0x52, 0xdd, 0xd8, 0x82, 0x4a, 0x92, 0xa5, 0xb0, 0x83, 0xe5, 0x55, 0x0b, /* Byte value: 0x3a */ + 0xba, 0xc5, 0x16, 0x24, 0xa3, 0xc2, 0x40, 0x70, 0xd5, 0x95, 0xaf, 0xc8, 0x5a, 0xbd, 0x75, 0xed, /* Byte value: 0x3b */ + 0x62, 0x32, 0xbc, 0x99, 0x91, 0x5f, 0xd3, 0x1f, 0x76, 0xa5, 0xa2, 0x94, 0x5c, 0x0f, 0x9b, 0x98, /* Byte value: 0x3c */ + 0xf2, 0x29, 0x70, 0x4f, 0x0c, 0x08, 0x31, 0x55, 0xb4, 0x85, 0x15, 0xfc, 0x58, 0xd3, 0x2f, 0x7f, /* Byte value: 0x3d */ + 0x53, 0x2b, 0xe2, 0x34, 0x38, 0x91, 0x5b, 0xf1, 0x4d, 0x16, 0xf3, 0xde, 0x72, 0xe9, 0x37, 0xd4, /* Byte value: 0x3e */ + 0x9a, 0xc3, 0x63, 0xa9, 0xff, 0xaa, 0xfe, 0x5f, 0x1d, 0xe3, 0x46, 0xae, 0x24, 0xf3, 0x5d, 0xd3, /* Byte value: 0x3f */ + 0x10, 0x03, 0xdb, 0xa7, 0x2e, 0x34, 0x5f, 0xf6, 0x64, 0x3b, 0x95, 0x33, 0x3f, 0x27, 0x14, 0x1f, /* Byte value: 0x40 */ + 0x1d, 0x8f, 0xe1, 0xcb, 0x08, 0xb1, 0x9f, 0x66, 0xd8, 0x47, 0xa7, 0xa8, 0xd1, 0x23, 0x8b, 0xeb, /* Byte value: 0x41 */ + 0x0a, 0xd8, 0xaf, 0x7f, 0x62, 0xfd, 0x1c, 0x15, 0xdf, 0xc3, 0xf1, 0xc6, 0x20, 0x2f, 0xe9, 0x34, /* Byte value: 0x42 */ + 0xd3, 0x33, 0xf5, 0x45, 0x8b, 0xf2, 0xe6, 0x4d, 0xeb, 0x0d, 0xd1, 0x85, 0x49, 0x12, 0x97, 0x2c, /* Byte value: 0x43 */ + 0xfd, 0x9d, 0x69, 0xee, 0x5f, 0x6a, 0x23, 0xab, 0xe5, 0xc6, 0x7d, 0x59, 0x68, 0x0a, 0x53, 0x51, /* Byte value: 0x44 */ + 0x73, 0x2d, 0x97, 0xb9, 0x64, 0xf9, 0xe5, 0xde, 0x85, 0x60, 0x1a, 0xb8, 0x0c, 0xa7, 0x1f, 0xea, /* Byte value: 0x45 */ + 0xf1, 0x0d, 0xa3, 0x05, 0xa2, 0x7d, 0x8a, 0x0c, 0xce, 0x44, 0x62, 0xdd, 0xe9, 0x81, 0x5c, 0xc8, /* Byte value: 0x46 */ + 0x4a, 0xd4, 0x45, 0xa6, 0xda, 0x2d, 0xa3, 0x4b, 0x8c, 0x2f, 0xe0, 0x0a, 0xdc, 0xb3, 0xb9, 0x48, /* Byte value: 0x47 */ + 0xa1, 0x02, 0x92, 0x7b, 0x34, 0x99, 0x6a, 0xa4, 0xf9, 0x93, 0xe6, 0x22, 0x2a, 0x3a, 0x18, 0xab, /* Byte value: 0x48 */ + 0xaa, 0xc6, 0xcd, 0x83, 0x8d, 0xf6, 0x1f, 0x86, 0xb1, 0xae, 0x3a, 0xfb, 0x65, 0x9a, 0x61, 0xf2, /* Byte value: 0x49 */ + 0x74, 0x79, 0x02, 0xaa, 0x20, 0x81, 0x39, 0x5b, 0xe6, 0xdf, 0xd9, 0xe5, 0xc2, 0x8c, 0x69, 0x2a, /* Byte value: 0x4a */ + 0x14, 0x73, 0x9d, 0xfe, 0xc4, 0x39, 0x38, 0x2a, 0x7d, 0x45, 0x21, 0x4f, 0x40, 0x5e, 0x11, 0x68, /* Byte value: 0x4b */ + 0x39, 0xf9, 0xd2, 0x1f, 0xbe, 0xd4, 0x46, 0x95, 0x09, 0x4f, 0xfa, 0xb2, 0xd0, 0x14, 0xa6, 0xa2, /* Byte value: 0x4c */ + 0x95, 0x77, 0x7a, 0x08, 0xac, 0xc8, 0xec, 0xa1, 0x4c, 0xa0, 0x2e, 0x0b, 0x14, 0x2a, 0x21, 0xfd, /* Byte value: 0x4d */ + 0x5b, 0xcb, 0x6e, 0x86, 0x2f, 0x8b, 0x95, 0x8a, 0x7f, 0xea, 0x58, 0x26, 0x8c, 0x1b, 0x3d, 0x3a, /* Byte value: 0x4e */ + 0x56, 0x47, 0x54, 0xea, 0x09, 0x0e, 0x55, 0x1a, 0xc3, 0x96, 0x6a, 0xbd, 0x62, 0x1f, 0xa2, 0xce, /* Byte value: 0x4f */ + 0x8a, 0xc0, 0xb8, 0x0e, 0xd1, 0x9e, 0xa1, 0xa9, 0x79, 0xd8, 0xd3, 0x9d, 0x1b, 0xd4, 0x49, 0xcc, /* Byte value: 0x50 */ + 0xdf, 0xa3, 0x3f, 0xae, 0x76, 0xe5, 0x4f, 0xea, 0xc0, 0x8f, 0xce, 0x01, 0xc8, 0x99, 0x98, 0xb5, /* Byte value: 0x51 */ + 0xcd, 0x98, 0xc7, 0xc4, 0x2d, 0x36, 0xc2, 0x72, 0x49, 0x8b, 0x01, 0x0c, 0x29, 0x63, 0x6f, 0x70, /* Byte value: 0x52 */ + 0x51, 0x13, 0xc1, 0xf9, 0x4d, 0x76, 0x89, 0x9f, 0xa0, 0x29, 0xa9, 0xe0, 0xac, 0x34, 0xd4, 0x0e, /* Byte value: 0x53 */ + 0x34, 0x75, 0xe8, 0x73, 0x98, 0x51, 0x86, 0x05, 0xb5, 0x33, 0xc8, 0x29, 0x3e, 0x10, 0x39, 0x56, /* Byte value: 0x54 */ + 0xf6, 0x59, 0x36, 0x16, 0xe6, 0x05, 0x56, 0x89, 0xad, 0xfb, 0xa1, 0x80, 0x27, 0xaa, 0x2a, 0x08, /* Byte value: 0x55 */ + 0x8d, 0x94, 0x2d, 0x1d, 0x95, 0xe6, 0x7d, 0x2c, 0x1a, 0x67, 0x10, 0xc0, 0xd5, 0xff, 0x3f, 0x0c, /* Byte value: 0x56 */ + 0x6a, 0xd2, 0x30, 0x2b, 0x86, 0x45, 0x1d, 0x64, 0x44, 0x59, 0x09, 0x6c, 0xa2, 0xfd, 0x91, 0x76, /* Byte value: 0x57 */ + 0xf3, 0x35, 0x80, 0xc8, 0xd7, 0x9a, 0x58, 0x62, 0x23, 0x7b, 0x38, 0xe3, 0x37, 0x5c, 0xbf, 0x12, /* Byte value: 0x58 */ + 0xf5, 0x7d, 0xe5, 0x5c, 0x48, 0x70, 0xed, 0xd0, 0xd7, 0x3a, 0xd6, 0xa1, 0x96, 0xf8, 0x59, 0xbf, /* Byte value: 0x59 */ + 0x11, 0x1f, 0x2b, 0x20, 0xf5, 0xa6, 0x36, 0xc1, 0xf3, 0xc5, 0xb8, 0x2c, 0x50, 0xa8, 0x84, 0x72, /* Byte value: 0x5a */ + 0x9d, 0x97, 0xf6, 0xba, 0xbb, 0xd2, 0x22, 0xda, 0x7e, 0x5c, 0x85, 0xf3, 0xea, 0xd8, 0x2b, 0x13, /* Byte value: 0x5b */ + 0x75, 0x65, 0xf2, 0x2d, 0xfb, 0x13, 0x50, 0x6c, 0x71, 0x21, 0xf4, 0xfa, 0xad, 0x03, 0xf9, 0x47, /* Byte value: 0x5c */ + 0x19, 0xff, 0xa7, 0x92, 0xe2, 0xbc, 0xf8, 0xba, 0xc1, 0x39, 0x13, 0xd4, 0xae, 0x5a, 0x8e, 0x9c, /* Byte value: 0x5d */ + 0x03, 0x24, 0xd3, 0x4a, 0xae, 0x75, 0xbb, 0x59, 0x7a, 0xc1, 0x77, 0x21, 0xb1, 0x52, 0x73, 0xb7, /* Byte value: 0x5e */ + 0x70, 0x09, 0x44, 0xf3, 0xca, 0x8c, 0x5e, 0x87, 0xff, 0xa1, 0x6d, 0x99, 0xbd, 0xf5, 0x6c, 0x5d, /* Byte value: 0x5f */ + 0x72, 0x31, 0x67, 0x3e, 0xbf, 0x6b, 0x8c, 0xe9, 0x12, 0x9e, 0x37, 0xa7, 0x63, 0x28, 0x8f, 0x87, /* Byte value: 0x60 */ + 0x3a, 0xdd, 0x01, 0x55, 0x10, 0xa1, 0xfd, 0xcc, 0x73, 0x8e, 0x8d, 0x93, 0x61, 0x46, 0xd5, 0x15, /* Byte value: 0x61 */ + 0x8b, 0xdc, 0x48, 0x89, 0x0a, 0x0c, 0xc8, 0x9e, 0xee, 0x26, 0xfe, 0x82, 0x74, 0x5b, 0xd9, 0xa1, /* Byte value: 0x62 */ + 0x33, 0x21, 0x7d, 0x60, 0xdc, 0x29, 0x5a, 0x80, 0xd6, 0x8c, 0x0b, 0x74, 0xf0, 0x3b, 0x4f, 0x96, /* Byte value: 0x63 */ + 0xc6, 0x5c, 0x98, 0x3c, 0x94, 0x59, 0xb7, 0x50, 0x01, 0xb6, 0xdd, 0xd5, 0x66, 0xc3, 0x16, 0x29, /* Byte value: 0x64 */ + 0x2f, 0xb2, 0x6c, 0x2c, 0x0f, 0x0a, 0xac, 0xd1, 0x99, 0x35, 0x81, 0xc3, 0x4e, 0x97, 0x54, 0x10, /* Byte value: 0x65 */ + 0x89, 0xe4, 0x6b, 0x44, 0x7f, 0xeb, 0x1a, 0xf0, 0x03, 0x19, 0xa4, 0xbc, 0xaa, 0x86, 0x3a, 0x7b, /* Byte value: 0x66 */ + 0xbe, 0xb5, 0x50, 0x7d, 0x49, 0xcf, 0x27, 0xac, 0xcc, 0xeb, 0x1b, 0xb4, 0x25, 0xc4, 0x70, 0x9a, /* Byte value: 0x67 */ + 0xce, 0xbc, 0x14, 0x8e, 0x83, 0x43, 0x79, 0x2b, 0x33, 0x4a, 0x76, 0x2d, 0x98, 0x31, 0x1c, 0xc7, /* Byte value: 0x68 */ + 0xc4, 0x64, 0xbb, 0xf1, 0xe1, 0xbe, 0x65, 0x3e, 0xec, 0x89, 0x87, 0xeb, 0xb8, 0x1e, 0xf5, 0xf3, /* Byte value: 0x69 */ + 0xfa, 0xc9, 0xfc, 0xfd, 0x1b, 0x12, 0xff, 0x2e, 0x86, 0x79, 0xbe, 0x04, 0xa6, 0x21, 0x25, 0x91, /* Byte value: 0x6a */ + 0x3b, 0xc1, 0xf1, 0xd2, 0xcb, 0x33, 0x94, 0xfb, 0xe4, 0x70, 0xa0, 0x8c, 0x0e, 0xc9, 0x45, 0x78, /* Byte value: 0x6b */ + 0xdd, 0x9b, 0x1c, 0x63, 0x03, 0x02, 0x9d, 0x84, 0x2d, 0xb0, 0x94, 0x3f, 0x16, 0x44, 0x7b, 0x6f, /* Byte value: 0x6c */ + 0x77, 0x5d, 0xd1, 0xe0, 0x8e, 0xf4, 0x82, 0x02, 0x9c, 0x1e, 0xae, 0xc4, 0x73, 0xde, 0x1a, 0x9d, /* Byte value: 0x6d */ + 0xc5, 0x78, 0x4b, 0x76, 0x3a, 0x2c, 0x0c, 0x09, 0x7b, 0x77, 0xaa, 0xf4, 0xd7, 0x91, 0x65, 0x9e, /* Byte value: 0x6e */ + 0x16, 0x4b, 0xbe, 0x33, 0xb1, 0xde, 0xea, 0x44, 0x90, 0x7a, 0x7b, 0x71, 0x9e, 0x83, 0xf2, 0xb2, /* Byte value: 0x6f */ + 0xa4, 0x6e, 0x24, 0xa5, 0x05, 0x06, 0x64, 0x4f, 0x77, 0x13, 0x7f, 0x41, 0x3a, 0xcc, 0x8d, 0xb1, /* Byte value: 0x70 */ + 0xad, 0x92, 0x58, 0x90, 0xc9, 0x8e, 0xc3, 0x03, 0xd2, 0x11, 0xf9, 0xa6, 0xab, 0xb1, 0x17, 0x32, /* Byte value: 0x71 */ + 0xd8, 0xf7, 0xaa, 0xbd, 0x32, 0x9d, 0x93, 0x6f, 0xa3, 0x30, 0x0d, 0x5c, 0x06, 0xb2, 0xee, 0x75, /* Byte value: 0x72 */ + 0xb7, 0x49, 0x2c, 0x48, 0x85, 0x47, 0x80, 0xe0, 0x69, 0xe9, 0x9d, 0x53, 0xb4, 0xb9, 0xea, 0x19, /* Byte value: 0x73 */ + 0x92, 0x23, 0xef, 0x1b, 0xe8, 0xb0, 0x30, 0x24, 0x2f, 0x1f, 0xed, 0x56, 0xda, 0x01, 0x57, 0x3d, /* Byte value: 0x74 */ + 0x49, 0xf0, 0x96, 0xec, 0x74, 0x58, 0x18, 0x12, 0xf6, 0xee, 0x97, 0x2b, 0x6d, 0xe1, 0xca, 0xff, /* Byte value: 0x75 */ + 0x64, 0x7a, 0xd9, 0x0d, 0x0e, 0xb5, 0x66, 0xad, 0x82, 0xe4, 0x4c, 0xd6, 0xfd, 0xab, 0x7d, 0x35, /* Byte value: 0x76 */ + 0x91, 0x07, 0x3c, 0x51, 0x46, 0xc5, 0x8b, 0x7d, 0x55, 0xde, 0x9a, 0x77, 0x6b, 0x53, 0x24, 0x8a, /* Byte value: 0x77 */ + 0x9c, 0x8b, 0x06, 0x3d, 0x60, 0x40, 0x4b, 0xed, 0xe9, 0xa2, 0xa8, 0xec, 0x85, 0x57, 0xbb, 0x7e, /* Byte value: 0x78 */ + 0x01, 0x1c, 0xf0, 0x87, 0xdb, 0x92, 0x69, 0x37, 0x97, 0xfe, 0x2d, 0x1f, 0x6f, 0x8f, 0x90, 0x6d, /* Byte value: 0x79 */ + 0xe8, 0xf2, 0x04, 0x97, 0x40, 0xc1, 0x72, 0xb6, 0x0f, 0x7d, 0x71, 0x09, 0x47, 0xdb, 0xd2, 0x54, /* Byte value: 0x7a */ + 0xa0, 0x1e, 0x62, 0xfc, 0xef, 0x0b, 0x03, 0x93, 0x6e, 0x6d, 0xcb, 0x3d, 0x45, 0xb5, 0x88, 0xc6, /* Byte value: 0x7b */ + 0xbb, 0xd9, 0xe6, 0xa3, 0x78, 0x50, 0x29, 0x47, 0x42, 0x6b, 0x82, 0xd7, 0x35, 0x32, 0xe5, 0x80, /* Byte value: 0x7c */ + 0xb5, 0x71, 0x0f, 0x85, 0xf0, 0xa0, 0x52, 0x8e, 0x84, 0xd6, 0xc7, 0x6d, 0x6a, 0x64, 0x09, 0xc3, /* Byte value: 0x7d */ + 0x29, 0xfa, 0x09, 0xb8, 0x90, 0xe0, 0x19, 0x63, 0x6d, 0x74, 0x6f, 0x81, 0xef, 0x33, 0xb2, 0xbd, /* Byte value: 0x7e */ + 0xe3, 0x36, 0x5b, 0x6f, 0xf9, 0xae, 0x07, 0x94, 0x47, 0x40, 0xad, 0xd0, 0x08, 0x7b, 0xab, 0x0d, /* Byte value: 0x7f */ + 0x5a, 0xd7, 0x9e, 0x01, 0xf4, 0x19, 0xfc, 0xbd, 0xe8, 0x14, 0x75, 0x39, 0xe3, 0x94, 0xad, 0x57, /* Byte value: 0x80 */ + 0x17, 0x57, 0x4e, 0xb4, 0x6a, 0x4c, 0x83, 0x73, 0x07, 0x84, 0x56, 0x6e, 0xf1, 0x0c, 0x62, 0xdf, /* Byte value: 0x81 */ + 0x63, 0x2e, 0x4c, 0x1e, 0x4a, 0xcd, 0xba, 0x28, 0xe1, 0x5b, 0x8f, 0x8b, 0x33, 0x80, 0x0b, 0xf5, /* Byte value: 0x82 */ + 0x25, 0x6a, 0xc3, 0x53, 0x6d, 0xf7, 0xb0, 0xc4, 0x46, 0xf6, 0x70, 0x05, 0x6e, 0xb8, 0xbd, 0x24, /* Byte value: 0x83 */ + 0x7d, 0x85, 0x7e, 0x9f, 0xec, 0x09, 0x9e, 0x17, 0x43, 0xdd, 0x5f, 0x02, 0x53, 0xf1, 0xf3, 0xa9, /* Byte value: 0x84 */ + 0x20, 0x06, 0x75, 0x8d, 0x5c, 0x68, 0xbe, 0x2f, 0xc8, 0x76, 0xe9, 0x66, 0x7e, 0x4e, 0x28, 0x3e, /* Byte value: 0x85 */ + 0x13, 0x27, 0x08, 0xed, 0x80, 0x41, 0xe4, 0xaf, 0x1e, 0xfa, 0xe2, 0x12, 0x8e, 0x75, 0x67, 0xa8, /* Byte value: 0x86 */ + 0x0e, 0xa8, 0xe9, 0x26, 0x88, 0xf0, 0x7b, 0xc9, 0xc6, 0xbd, 0x45, 0xba, 0x5f, 0x56, 0xec, 0x43, /* Byte value: 0x87 */ + 0x9f, 0xaf, 0xd5, 0x77, 0xce, 0x35, 0xf0, 0xb4, 0x93, 0x63, 0xdf, 0xcd, 0x34, 0x05, 0xc8, 0xc9, /* Byte value: 0x88 */ + 0xe1, 0x0e, 0x78, 0xa2, 0x8c, 0x49, 0xd5, 0xfa, 0xaa, 0x7f, 0xf7, 0xee, 0xd6, 0xa6, 0x48, 0xd7, /* Byte value: 0x89 */ + 0x55, 0x63, 0x87, 0xa0, 0xa7, 0x7b, 0xee, 0x43, 0xb9, 0x57, 0x1d, 0x9c, 0xd3, 0x4d, 0xd1, 0x79, /* Byte value: 0x8a */ + 0x8f, 0xac, 0x0e, 0xd0, 0xe0, 0x01, 0xaf, 0x42, 0xf7, 0x58, 0x4a, 0xfe, 0x0b, 0x22, 0xdc, 0xd6, /* Byte value: 0x8b */ + 0xd1, 0x0b, 0xd6, 0x88, 0xfe, 0x15, 0x34, 0x23, 0x06, 0x32, 0x8b, 0xbb, 0x97, 0xcf, 0x74, 0xf6, /* Byte value: 0x8c */ + 0x40, 0x0c, 0xea, 0xd9, 0xb8, 0xd0, 0xbf, 0x5e, 0x53, 0xec, 0x11, 0xcc, 0xfc, 0x9c, 0x50, 0x7c, /* Byte value: 0x8d */ + 0x82, 0x20, 0x34, 0xbc, 0xc6, 0x84, 0x6f, 0xd2, 0x4b, 0x24, 0x78, 0x65, 0xe5, 0x26, 0x43, 0x22, /* Byte value: 0x8e */ + 0x52, 0x37, 0x12, 0xb3, 0xe3, 0x03, 0x32, 0xc6, 0xda, 0xe8, 0xde, 0xc1, 0x1d, 0x66, 0xa7, 0xb9, /* Byte value: 0x8f */ + 0xb2, 0x25, 0x9a, 0x96, 0xb4, 0xd8, 0x8e, 0x0b, 0xe7, 0x69, 0x04, 0x30, 0xa4, 0x4f, 0x7f, 0x03, /* Byte value: 0x90 */ + 0x59, 0xf3, 0x4d, 0x4b, 0x5a, 0x6c, 0x47, 0xe4, 0x92, 0xd5, 0x02, 0x18, 0x52, 0xc6, 0xde, 0xe0, /* Byte value: 0x91 */ + 0x3f, 0xb1, 0xb7, 0x8b, 0x21, 0x3e, 0xf3, 0x27, 0xfd, 0x0e, 0x14, 0xf0, 0x71, 0xb0, 0x40, 0x0f, /* Byte value: 0x92 */ + 0xd4, 0x67, 0x60, 0x56, 0xcf, 0x8a, 0x3a, 0xc8, 0x88, 0xb2, 0x12, 0xd8, 0x87, 0x39, 0xe1, 0xec, /* Byte value: 0x93 */ + 0x79, 0xf5, 0x38, 0xc6, 0x06, 0x04, 0xf9, 0xcb, 0x5a, 0xa3, 0xeb, 0x7e, 0x2c, 0x88, 0xf6, 0xde, /* Byte value: 0x94 */ + 0xe7, 0x46, 0x1d, 0x36, 0x13, 0xa3, 0x60, 0x48, 0x5e, 0x3e, 0x19, 0xac, 0x77, 0x02, 0xae, 0x7a, /* Byte value: 0x95 */ + 0xef, 0xa6, 0x91, 0x84, 0x04, 0xb9, 0xae, 0x33, 0x6c, 0xc2, 0xb2, 0x54, 0x89, 0xf0, 0xa4, 0x94, /* Byte value: 0x96 */ + 0xca, 0xcc, 0x52, 0xd7, 0x69, 0x4e, 0x1e, 0xf7, 0x2a, 0x34, 0xc2, 0x51, 0xe7, 0x48, 0x19, 0xb0, /* Byte value: 0x97 */ + 0x47, 0x58, 0x7f, 0xca, 0xfc, 0xa8, 0x63, 0xdb, 0x30, 0x53, 0xd2, 0x91, 0x32, 0xb7, 0x26, 0xbc, /* Byte value: 0x98 */ + 0xa5, 0x72, 0xd4, 0x22, 0xde, 0x94, 0x0d, 0x78, 0xe0, 0xed, 0x52, 0x5e, 0x55, 0x43, 0x1d, 0xdc, /* Byte value: 0x99 */ + 0xaf, 0xaa, 0x7b, 0x5d, 0xbc, 0x69, 0x11, 0x6d, 0x3f, 0x2e, 0xa3, 0x98, 0x75, 0x6c, 0xf4, 0xe8, /* Byte value: 0x9a */ + 0xa8, 0xfe, 0xee, 0x4e, 0xf8, 0x11, 0xcd, 0xe8, 0x5c, 0x91, 0x60, 0xc5, 0xbb, 0x47, 0x82, 0x28, /* Byte value: 0x9b */ + 0x93, 0x3f, 0x1f, 0x9c, 0x33, 0x22, 0x59, 0x13, 0xb8, 0xe1, 0xc0, 0x49, 0xb5, 0x8e, 0xc7, 0x50, /* Byte value: 0x9c */ + 0xed, 0x9e, 0xb2, 0x49, 0x71, 0x5e, 0x7c, 0x5d, 0x81, 0xfd, 0xe8, 0x6a, 0x57, 0x2d, 0x47, 0x4e, /* Byte value: 0x9d */ + 0xc3, 0x30, 0x2e, 0xe2, 0xa5, 0xc6, 0xb9, 0xbb, 0x8f, 0x36, 0x44, 0xb6, 0x76, 0x35, 0x83, 0x33, /* Byte value: 0x9e */ + 0x2a, 0xde, 0xda, 0xf2, 0x3e, 0x95, 0xa2, 0x3a, 0x17, 0xb5, 0x18, 0xa0, 0x5e, 0x61, 0xc1, 0x0a, /* Byte value: 0x9f */ + 0x96, 0x53, 0xa9, 0x42, 0x02, 0xbd, 0x57, 0xf8, 0x36, 0x61, 0x59, 0x2a, 0xa5, 0x78, 0x52, 0x4a, /* Byte value: 0xa0 */ + 0x2c, 0x96, 0xbf, 0x66, 0xa1, 0x7f, 0x17, 0x88, 0xe3, 0xf4, 0xf6, 0xe2, 0xff, 0xc5, 0x27, 0xa7, /* Byte value: 0xa1 */ + 0x5d, 0x83, 0x0b, 0x12, 0xb0, 0x61, 0x20, 0x38, 0x8b, 0xab, 0xb6, 0x64, 0x2d, 0xbf, 0xdb, 0x97, /* Byte value: 0xa2 */ + 0xe2, 0x2a, 0xab, 0xe8, 0x22, 0x3c, 0x6e, 0xa3, 0xd0, 0xbe, 0x80, 0xcf, 0x67, 0xf4, 0x3b, 0x60, /* Byte value: 0xa3 */ + 0x7f, 0xbd, 0x5d, 0x52, 0x99, 0xee, 0x4c, 0x79, 0xae, 0xe2, 0x05, 0x3c, 0x8d, 0x2c, 0x10, 0x73, /* Byte value: 0xa4 */ + 0x7e, 0xa1, 0xad, 0xd5, 0x42, 0x7c, 0x25, 0x4e, 0x39, 0x1c, 0x28, 0x23, 0xe2, 0xa3, 0x80, 0x1e, /* Byte value: 0xa5 */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* Byte value: 0xa6 */ + 0x3e, 0xad, 0x47, 0x0c, 0xfa, 0xac, 0x9a, 0x10, 0x6a, 0xf0, 0x39, 0xef, 0x1e, 0x3f, 0xd0, 0x62, /* Byte value: 0xa7 */ + 0xc7, 0x40, 0x68, 0xbb, 0x4f, 0xcb, 0xde, 0x67, 0x96, 0x48, 0xf0, 0xca, 0x09, 0x4c, 0x86, 0x44, /* Byte value: 0xa8 */ + 0x05, 0x6c, 0xb6, 0xde, 0x31, 0x9f, 0x0e, 0xeb, 0x8e, 0x80, 0x99, 0x63, 0x10, 0xf6, 0x95, 0x1a, /* Byte value: 0xa9 */ + 0x3c, 0x95, 0x64, 0xc1, 0x8f, 0x4b, 0x48, 0x7e, 0x87, 0xcf, 0x63, 0xd1, 0xc0, 0xe2, 0x33, 0xb8, /* Byte value: 0xaa */ + 0x87, 0x4c, 0x82, 0x62, 0xf7, 0x1b, 0x61, 0x39, 0xc5, 0xa4, 0xe1, 0x06, 0xf5, 0xd0, 0xd6, 0x38, /* Byte value: 0xab */ + 0x67, 0x5e, 0x0a, 0x47, 0xa0, 0xc0, 0xdd, 0xf4, 0xf8, 0x25, 0x3b, 0xf7, 0x4c, 0xf9, 0x0e, 0x82, /* Byte value: 0xac */ + 0x99, 0xe7, 0xb0, 0xe3, 0x51, 0xdf, 0x45, 0x06, 0x67, 0x22, 0x31, 0x8f, 0x95, 0xa1, 0x2e, 0x64, /* Byte value: 0xad */ + 0xab, 0xda, 0x3d, 0x04, 0x56, 0x64, 0x76, 0xb1, 0x26, 0x50, 0x17, 0xe4, 0x0a, 0x15, 0xf1, 0x9f, /* Byte value: 0xae */ + 0xf9, 0xed, 0x2f, 0xb7, 0xb5, 0x67, 0x44, 0x77, 0xfc, 0xb8, 0xc9, 0x25, 0x17, 0x73, 0x56, 0x26, /* Byte value: 0xaf */ + 0xd2, 0x2f, 0x05, 0xc2, 0x50, 0x60, 0x8f, 0x7a, 0x7c, 0xf3, 0xfc, 0x9a, 0x26, 0x9d, 0x07, 0x41, /* Byte value: 0xb0 */ + 0x06, 0x48, 0x65, 0x94, 0x9f, 0xea, 0xb5, 0xb2, 0xf4, 0x41, 0xee, 0x42, 0xa1, 0xa4, 0xe6, 0xad, /* Byte value: 0xb1 */ + 0xa9, 0xe2, 0x1e, 0xc9, 0x23, 0x83, 0xa4, 0xdf, 0xcb, 0x6f, 0x4d, 0xda, 0xd4, 0xc8, 0x12, 0x45, /* Byte value: 0xb2 */ + 0x1b, 0xc7, 0x84, 0x5f, 0x97, 0x5b, 0x2a, 0xd4, 0x2c, 0x06, 0x49, 0xea, 0x70, 0x87, 0x6d, 0x46, /* Byte value: 0xb3 */ + 0x48, 0xec, 0x66, 0x6b, 0xaf, 0xca, 0x71, 0x25, 0x61, 0x10, 0xba, 0x34, 0x02, 0x6e, 0x5a, 0x92, /* Byte value: 0xb4 */ + 0x97, 0x4f, 0x59, 0xc5, 0xd9, 0x2f, 0x3e, 0xcf, 0xa1, 0x9f, 0x74, 0x35, 0xca, 0xf7, 0xc2, 0x27, /* Byte value: 0xb5 */ + 0xc2, 0x2c, 0xde, 0x65, 0x7e, 0x54, 0xd0, 0x8c, 0x18, 0xc8, 0x69, 0xa9, 0x19, 0xba, 0x13, 0x5e, /* Byte value: 0xb6 */ + 0x86, 0x50, 0x72, 0xe5, 0x2c, 0x89, 0x08, 0x0e, 0x52, 0x5a, 0xcc, 0x19, 0x9a, 0x5f, 0x46, 0x55, /* Byte value: 0xb7 */ + 0x61, 0x16, 0x6f, 0xd3, 0x3f, 0x2a, 0x68, 0x46, 0x0c, 0x64, 0xd5, 0xb5, 0xed, 0x5d, 0xe8, 0x2f, /* Byte value: 0xb8 */ + 0x36, 0x4d, 0xcb, 0xbe, 0xed, 0xb6, 0x54, 0x6b, 0x58, 0x0c, 0x92, 0x17, 0xe0, 0xcd, 0xda, 0x8c, /* Byte value: 0xb9 */ + 0x57, 0x5b, 0xa4, 0x6d, 0xd2, 0x9c, 0x3c, 0x2d, 0x54, 0x68, 0x47, 0xa2, 0x0d, 0x90, 0x32, 0xa3, /* Byte value: 0xba */ + 0xf0, 0x11, 0x53, 0x82, 0x79, 0xef, 0xe3, 0x3b, 0x59, 0xba, 0x4f, 0xc2, 0x86, 0x0e, 0xcc, 0xa5, /* Byte value: 0xbb */ + 0x2e, 0xae, 0x9c, 0xab, 0xd4, 0x98, 0xc5, 0xe6, 0x0e, 0xcb, 0xac, 0xdc, 0x21, 0x18, 0xc4, 0x7d, /* Byte value: 0xbc */ + 0x7a, 0xd1, 0xeb, 0x8c, 0xa8, 0x71, 0x42, 0x92, 0x20, 0x62, 0x9c, 0x5f, 0x9d, 0xda, 0x85, 0x69, /* Byte value: 0xbd */ + 0x3d, 0x89, 0x94, 0x46, 0x54, 0xd9, 0x21, 0x49, 0x10, 0x31, 0x4e, 0xce, 0xaf, 0x6d, 0xa3, 0xd5, /* Byte value: 0xbe */ + 0x81, 0x04, 0xe7, 0xf6, 0x68, 0xf1, 0xd4, 0x8b, 0x31, 0xe5, 0x0f, 0x44, 0x54, 0x74, 0x30, 0x95, /* Byte value: 0xbf */ + 0x35, 0x69, 0x18, 0xf4, 0x43, 0xc3, 0xef, 0x32, 0x22, 0xcd, 0xe5, 0x36, 0x51, 0x9f, 0xa9, 0x3b, /* Byte value: 0xc0 */ + 0xc9, 0xe8, 0x81, 0x9d, 0xc7, 0x3b, 0xa5, 0xae, 0x50, 0xf5, 0xb5, 0x70, 0x56, 0x1a, 0x6a, 0x07, /* Byte value: 0xc1 */ + 0x65, 0x66, 0x29, 0x8a, 0xd5, 0x27, 0x0f, 0x9a, 0x15, 0x1a, 0x61, 0xc9, 0x92, 0x24, 0xed, 0x58, /* Byte value: 0xc2 */ + 0x78, 0xe9, 0xc8, 0x41, 0xdd, 0x96, 0x90, 0xfc, 0xcd, 0x5d, 0xc6, 0x61, 0x43, 0x07, 0x66, 0xb3, /* Byte value: 0xc3 */ + 0xbc, 0x8d, 0x73, 0xb0, 0x3c, 0x28, 0xf5, 0xc2, 0x21, 0xd4, 0x41, 0x8a, 0xfb, 0x19, 0x93, 0x40, /* Byte value: 0xc4 */ + 0x1c, 0x93, 0x11, 0x4c, 0xd3, 0x23, 0xf6, 0x51, 0x4f, 0xb9, 0x8a, 0xb7, 0xbe, 0xac, 0x1b, 0x86, /* Byte value: 0xc5 */ + 0x68, 0xea, 0x13, 0xe6, 0xf3, 0xa2, 0xcf, 0x0a, 0xa9, 0x66, 0x53, 0x52, 0x7c, 0x20, 0x72, 0xac, /* Byte value: 0xc6 */ + 0xcc, 0x84, 0x37, 0x43, 0xf6, 0xa4, 0xab, 0x45, 0xde, 0x75, 0x2c, 0x13, 0x46, 0xec, 0xff, 0x1d, /* Byte value: 0xc7 */ + 0xbf, 0xa9, 0xa0, 0xfa, 0x92, 0x5d, 0x4e, 0x9b, 0x5b, 0x15, 0x36, 0xab, 0x4a, 0x4b, 0xe0, 0xf7, /* Byte value: 0xc8 */ + 0x71, 0x15, 0xb4, 0x74, 0x11, 0x1e, 0x37, 0xb0, 0x68, 0x5f, 0x40, 0x86, 0xd2, 0x7a, 0xfc, 0x30, /* Byte value: 0xc9 */ + 0xb8, 0xfd, 0x35, 0xe9, 0xd6, 0x25, 0x92, 0x1e, 0x38, 0xaa, 0xf5, 0xf6, 0x84, 0x60, 0x96, 0x37, /* Byte value: 0xca */ + 0xa6, 0x56, 0x07, 0x68, 0x70, 0xe1, 0xb6, 0x21, 0x9a, 0x2c, 0x25, 0x7f, 0xe4, 0x11, 0x6e, 0x6b, /* Byte value: 0xcb */ + 0x22, 0x3e, 0x56, 0x40, 0x29, 0x8f, 0x6c, 0x41, 0x25, 0x49, 0xb3, 0x58, 0xa0, 0x93, 0xcb, 0xe4, /* Byte value: 0xcc */ + 0x4d, 0x80, 0xd0, 0xb5, 0x9e, 0x55, 0x7f, 0xce, 0xef, 0x90, 0x23, 0x57, 0x12, 0x98, 0xcf, 0x88, /* Byte value: 0xcd */ + 0xb0, 0x1d, 0xb9, 0x5b, 0xc1, 0x3f, 0x5c, 0x65, 0x0a, 0x56, 0x5e, 0x0e, 0x7a, 0x92, 0x9c, 0xd9, /* Byte value: 0xce */ + 0x90, 0x1b, 0xcc, 0xd6, 0x9d, 0x57, 0xe2, 0x4a, 0xc2, 0x20, 0xb7, 0x68, 0x04, 0xdc, 0xb4, 0xe7, /* Byte value: 0xcf */ + 0x23, 0x22, 0xa6, 0xc7, 0xf2, 0x1d, 0x05, 0x76, 0xb2, 0xb7, 0x9e, 0x47, 0xcf, 0x1c, 0x5b, 0x89, /* Byte value: 0xd0 */ + 0x37, 0x51, 0x3b, 0x39, 0x36, 0x24, 0x3d, 0x5c, 0xcf, 0xf2, 0xbf, 0x08, 0x8f, 0x42, 0x4a, 0xe1, /* Byte value: 0xd1 */ + 0x6b, 0xce, 0xc0, 0xac, 0x5d, 0xd7, 0x74, 0x53, 0xd3, 0xa7, 0x24, 0x73, 0xcd, 0x72, 0x01, 0x1b, /* Byte value: 0xd2 */ + 0x09, 0xfc, 0x7c, 0x35, 0xcc, 0x88, 0xa7, 0x4c, 0xa5, 0x02, 0x86, 0xe7, 0x91, 0x7d, 0x9a, 0x83, /* Byte value: 0xd3 */ + 0x24, 0x76, 0x33, 0xd4, 0xb6, 0x65, 0xd9, 0xf3, 0xd1, 0x08, 0x5d, 0x1a, 0x01, 0x37, 0x2d, 0x49, /* Byte value: 0xd4 */ + 0x31, 0x19, 0x5e, 0xad, 0xa9, 0xce, 0x88, 0xee, 0x3b, 0xb3, 0x51, 0x4a, 0x2e, 0xe6, 0xac, 0x4c, /* Byte value: 0xd5 */ + 0x4e, 0xa4, 0x03, 0xff, 0x30, 0x20, 0xc4, 0x97, 0x95, 0x51, 0x54, 0x76, 0xa3, 0xca, 0xbc, 0x3f, /* Byte value: 0xd6 */ + 0x80, 0x18, 0x17, 0x71, 0xb3, 0x63, 0xbd, 0xbc, 0xa6, 0x1b, 0x22, 0x5b, 0x3b, 0xfb, 0xa0, 0xf8, /* Byte value: 0xd7 */ + 0x27, 0x52, 0xe0, 0x9e, 0x18, 0x10, 0x62, 0xaa, 0xab, 0xc9, 0x2a, 0x3b, 0xb0, 0x65, 0x5e, 0xfe, /* Byte value: 0xd8 */ + 0x58, 0xef, 0xbd, 0xcc, 0x81, 0xfe, 0x2e, 0xd3, 0x05, 0x2b, 0x2f, 0x07, 0x3d, 0x49, 0x4e, 0x8d, /* Byte value: 0xd9 */ + 0x21, 0x1a, 0x85, 0x0a, 0x87, 0xfa, 0xd7, 0x18, 0x5f, 0x88, 0xc4, 0x79, 0x11, 0xc1, 0xb8, 0x53, /* Byte value: 0xda */ + 0xcf, 0xa0, 0xe4, 0x09, 0x58, 0xd1, 0x10, 0x1c, 0xa4, 0xb4, 0x5b, 0x32, 0xf7, 0xbe, 0x8c, 0xaa, /* Byte value: 0xdb */ + 0x94, 0x6b, 0x8a, 0x8f, 0x77, 0x5a, 0x85, 0x96, 0xdb, 0x5e, 0x03, 0x14, 0x7b, 0xa5, 0xb1, 0x90, /* Byte value: 0xdc */ + 0x2d, 0x8a, 0x4f, 0xe1, 0x7a, 0xed, 0x7e, 0xbf, 0x74, 0x0a, 0xdb, 0xfd, 0x90, 0x4a, 0xb7, 0xca, /* Byte value: 0xdd */ + 0xde, 0xbf, 0xcf, 0x29, 0xad, 0x77, 0x26, 0xdd, 0x57, 0x71, 0xe3, 0x1e, 0xa7, 0x16, 0x08, 0xd8, /* Byte value: 0xde */ + 0xae, 0xb6, 0x8b, 0xda, 0x67, 0xfb, 0x78, 0x5a, 0xa8, 0xd0, 0x8e, 0x87, 0x1a, 0xe3, 0x64, 0x85, /* Byte value: 0xdf */ + 0x8c, 0x88, 0xdd, 0x9a, 0x4e, 0x74, 0x14, 0x1b, 0x8d, 0x99, 0x3d, 0xdf, 0xba, 0x70, 0xaf, 0x61, /* Byte value: 0xe0 */ + 0x5e, 0xa7, 0xd8, 0x58, 0x1e, 0x14, 0x9b, 0x61, 0xf1, 0x6a, 0xc1, 0x45, 0x9c, 0xed, 0xa8, 0x20, /* Byte value: 0xe1 */ + 0xa3, 0x3a, 0xb1, 0xb6, 0x41, 0x7e, 0xb8, 0xca, 0x14, 0xac, 0xbc, 0x1c, 0xf4, 0xe7, 0xfb, 0x71, /* Byte value: 0xe2 */ + 0x2b, 0xc2, 0x2a, 0x75, 0xe5, 0x07, 0xcb, 0x0d, 0x80, 0x4b, 0x35, 0xbf, 0x31, 0xee, 0x51, 0x67, /* Byte value: 0xe3 */ + 0x9e, 0xb3, 0x25, 0xf0, 0x15, 0xa7, 0x99, 0x83, 0x04, 0x9d, 0xf2, 0xd2, 0x5b, 0x8a, 0x58, 0xa4, /* Byte value: 0xe4 */ + 0xbd, 0x91, 0x83, 0x37, 0xe7, 0xba, 0x9c, 0xf5, 0xb6, 0x2a, 0x6c, 0x95, 0x94, 0x96, 0x03, 0x2d, /* Byte value: 0xe5 */ + 0x1a, 0xdb, 0x74, 0xd8, 0x4c, 0xc9, 0x43, 0xe3, 0xbb, 0xf8, 0x64, 0xf5, 0x1f, 0x08, 0xfd, 0x2b, /* Byte value: 0xe6 */ + 0x98, 0xfb, 0x40, 0x64, 0x8a, 0x4d, 0x2c, 0x31, 0xf0, 0xdc, 0x1c, 0x90, 0xfa, 0x2e, 0xbe, 0x09, /* Byte value: 0xe7 */ + 0xd7, 0x43, 0xb3, 0x1c, 0x61, 0xff, 0x81, 0x91, 0xf2, 0x73, 0x65, 0xf9, 0x36, 0x6b, 0x92, 0x5b, /* Byte value: 0xe8 */ + 0x43, 0x28, 0x39, 0x93, 0x16, 0xa5, 0x04, 0x07, 0x29, 0x2d, 0x66, 0xed, 0x4d, 0xce, 0x23, 0xcb, /* Byte value: 0xe9 */ + 0xd0, 0x17, 0x26, 0x0f, 0x25, 0x87, 0x5d, 0x14, 0x91, 0xcc, 0xa6, 0xa4, 0xf8, 0x40, 0xe4, 0x9b, /* Byte value: 0xea */ + 0x4b, 0xc8, 0xb5, 0x21, 0x01, 0xbf, 0xca, 0x7c, 0x1b, 0xd1, 0xcd, 0x15, 0xb3, 0x3c, 0x29, 0x25, /* Byte value: 0xeb */ + 0x28, 0xe6, 0xf9, 0x3f, 0x4b, 0x72, 0x70, 0x54, 0xfa, 0x8a, 0x42, 0x9e, 0x80, 0xbc, 0x22, 0xd0, /* Byte value: 0xec */ + 0x9b, 0xdf, 0x93, 0x2e, 0x24, 0x38, 0x97, 0x68, 0x8a, 0x1d, 0x6b, 0xb1, 0x4b, 0x7c, 0xcd, 0xbe, /* Byte value: 0xed */ + 0x4c, 0x9c, 0x20, 0x32, 0x45, 0xc7, 0x16, 0xf9, 0x78, 0x6e, 0x0e, 0x48, 0x7d, 0x17, 0x5f, 0xe5, /* Byte value: 0xee */ + 0x6f, 0xbe, 0x86, 0xf5, 0xb7, 0xda, 0x13, 0x8f, 0xca, 0xd9, 0x90, 0x0f, 0xb2, 0x0b, 0x04, 0x6c, /* Byte value: 0xef */ + 0x4f, 0xb8, 0xf3, 0x78, 0xeb, 0xb2, 0xad, 0xa0, 0x02, 0xaf, 0x79, 0x69, 0xcc, 0x45, 0x2c, 0x52, /* Byte value: 0xf0 */ + 0x0b, 0xc4, 0x5f, 0xf8, 0xb9, 0x6f, 0x75, 0x22, 0x48, 0x3d, 0xdc, 0xd9, 0x4f, 0xa0, 0x79, 0x59, /* Byte value: 0xf1 */ + 0x42, 0x34, 0xc9, 0x14, 0xcd, 0x37, 0x6d, 0x30, 0xbe, 0xd3, 0x4b, 0xf2, 0x22, 0x41, 0xb3, 0xa6, /* Byte value: 0xf2 */ + 0xb6, 0x55, 0xdc, 0xcf, 0x5e, 0xd5, 0xe9, 0xd7, 0xfe, 0x17, 0xb0, 0x4c, 0xdb, 0x36, 0x7a, 0x74, /* Byte value: 0xf3 */ + 0xf4, 0x61, 0x15, 0xdb, 0x93, 0xe2, 0x84, 0xe7, 0x40, 0xc4, 0xfb, 0xbe, 0xf9, 0x77, 0xc9, 0xd2, /* Byte value: 0xf4 */ + 0xfe, 0xb9, 0xba, 0xa4, 0xf1, 0x1f, 0x98, 0xf2, 0x9f, 0x07, 0x0a, 0x78, 0xd9, 0x58, 0x20, 0xe6, /* Byte value: 0xf5 */ + 0x0d, 0x8c, 0x3a, 0x6c, 0x26, 0x85, 0xc0, 0x90, 0xbc, 0x7c, 0x32, 0x9b, 0xee, 0x04, 0x9f, 0xf4, /* Byte value: 0xf6 */ + 0xb1, 0x01, 0x49, 0xdc, 0x1a, 0xad, 0x35, 0x52, 0x9d, 0xa8, 0x73, 0x11, 0x15, 0x1d, 0x0c, 0xb4, /* Byte value: 0xf7 */ + 0x07, 0x54, 0x95, 0x13, 0x44, 0x78, 0xdc, 0x85, 0x63, 0xbf, 0xc3, 0x5d, 0xce, 0x2b, 0x76, 0xc0, /* Byte value: 0xf8 */ + 0x46, 0x44, 0x8f, 0x4d, 0x27, 0x3a, 0x0a, 0xec, 0xa7, 0xad, 0xff, 0x8e, 0x5d, 0x38, 0xb6, 0xd1, /* Byte value: 0xf9 */ + 0x45, 0x60, 0x5c, 0x07, 0x89, 0x4f, 0xb1, 0xb5, 0xdd, 0x6c, 0x88, 0xaf, 0xec, 0x6a, 0xc5, 0x66, /* Byte value: 0xfa */ + 0xda, 0xcf, 0x89, 0x70, 0x47, 0x7a, 0x41, 0x01, 0x4e, 0x0f, 0x57, 0x62, 0xd8, 0x6f, 0x0d, 0xaf, /* Byte value: 0xfb */ + 0xdb, 0xd3, 0x79, 0xf7, 0x9c, 0xe8, 0x28, 0x36, 0xd9, 0xf1, 0x7a, 0x7d, 0xb7, 0xe0, 0x9d, 0xc2, /* Byte value: 0xfc */ + 0xe9, 0xee, 0xf4, 0x10, 0x9b, 0x53, 0x1b, 0x81, 0x98, 0x83, 0x5c, 0x16, 0x28, 0x54, 0x42, 0x39, /* Byte value: 0xfd */ + 0xf8, 0xf1, 0xdf, 0x30, 0x6e, 0xf5, 0x2d, 0x40, 0x6b, 0x46, 0xe4, 0x3a, 0x78, 0xfc, 0xc6, 0x4b, /* Byte value: 0xfe */ + 0x50, 0x0f, 0x31, 0x7e, 0x96, 0xe4, 0xe0, 0xa8, 0x37, 0xd7, 0x84, 0xff, 0xc3, 0xbb, 0x44, 0x63, /* Byte value: 0xff */ + 0x6d, 0x86, 0xa5, 0x38, 0xc2, 0x3d, 0xc1, 0xe1, 0x27, 0xe6, 0xca, 0x31, 0x6c, 0xd6, 0xe7, 0xb6, /* Byte value: 0x00 */ +}; + + +#define applySTransformation(block) { \ + _mm_storeu_si128(&(temporary_.asXMMWord), block); \ + for (byteIndex_ = 0; byteIndex_ < BlockLengthInBytes; ++byteIndex_) { \ + temporary_.asBytes[byteIndex_] = Pi[temporary_.asBytes[byteIndex_]]; \ + } \ + block = _mm_loadu_si128(&temporary_.asXMMWord); \ + } + +#define applySTransformation_2(block1,block2) { \ + _mm_storeu_si128(&(temporary1_.asXMMWord), block1); \ + _mm_storeu_si128(&(temporary2_.asXMMWord), block2); \ + for (byteIndex_ = 0; byteIndex_ < BlockLengthInBytes; ++byteIndex_) { \ + temporary1_.asBytes[byteIndex_] = Pi[temporary1_.asBytes[byteIndex_]]; \ + temporary2_.asBytes[byteIndex_] = Pi[temporary2_.asBytes[byteIndex_]]; \ + } \ + block1 = _mm_loadu_si128(&temporary1_.asXMMWord); \ + block2 = _mm_loadu_si128(&temporary2_.asXMMWord); \ + } + +#define applySTransformation_3(block1,block2,block3) { \ + _mm_storeu_si128(&(temporary1_.asXMMWord), block1); \ + _mm_storeu_si128(&(temporary2_.asXMMWord), block2); \ + _mm_storeu_si128(&(temporary3_.asXMMWord), block3); \ + for (byteIndex_ = 0; byteIndex_ < BlockLengthInBytes; ++byteIndex_) { \ + temporary1_.asBytes[byteIndex_] = Pi[temporary1_.asBytes[byteIndex_]]; \ + temporary2_.asBytes[byteIndex_] = Pi[temporary2_.asBytes[byteIndex_]]; \ + temporary3_.asBytes[byteIndex_] = Pi[temporary3_.asBytes[byteIndex_]]; \ + } \ + block1 = _mm_loadu_si128(&temporary1_.asXMMWord); \ + block2 = _mm_loadu_si128(&temporary2_.asXMMWord); \ + block3 = _mm_loadu_si128(&temporary3_.asXMMWord); \ + } + +#define applySTransformation_4(block1,block2,block3,block4) { \ + _mm_storeu_si128(&(temporary1_.asXMMWord), block1); \ + _mm_storeu_si128(&(temporary2_.asXMMWord), block2); \ + _mm_storeu_si128(&(temporary3_.asXMMWord), block3); \ + _mm_storeu_si128(&(temporary4_.asXMMWord), block4); \ + for (byteIndex_ = 0; byteIndex_ < BlockLengthInBytes; ++byteIndex_) { \ + temporary1_.asBytes[byteIndex_] = Pi[temporary1_.asBytes[byteIndex_]]; \ + temporary2_.asBytes[byteIndex_] = Pi[temporary2_.asBytes[byteIndex_]]; \ + temporary3_.asBytes[byteIndex_] = Pi[temporary3_.asBytes[byteIndex_]]; \ + temporary4_.asBytes[byteIndex_] = Pi[temporary4_.asBytes[byteIndex_]]; \ + } \ + block1 = _mm_loadu_si128(&temporary1_.asXMMWord); \ + block2 = _mm_loadu_si128(&temporary2_.asXMMWord); \ + block3 = _mm_loadu_si128(&temporary3_.asXMMWord); \ + block4 = _mm_loadu_si128(&temporary4_.asXMMWord); \ + } + +#define applyInversedSTransformation(block) { \ + _mm_storeu_si128(&temporary_.asXMMWord, block); \ + for (byteIndex_ = 0; byteIndex_ < BlockLengthInBytes; ++byteIndex_) { \ + temporary_.asBytes[byteIndex_] = InversedPi[temporary_.asBytes[byteIndex_]]; \ + } \ + block = _mm_loadu_si128(&temporary_.asXMMWord); \ + } + +#define applyInversedSTransformation_2(block1,block2) { \ + _mm_storeu_si128(&temporary1_.asXMMWord, block1); \ + _mm_storeu_si128(&temporary2_.asXMMWord, block2); \ + for (byteIndex_ = 0; byteIndex_ < BlockLengthInBytes; ++byteIndex_) { \ + temporary1_.asBytes[byteIndex_] = InversedPi[temporary1_.asBytes[byteIndex_]]; \ + temporary2_.asBytes[byteIndex_] = InversedPi[temporary2_.asBytes[byteIndex_]]; \ + } \ + block1 = _mm_loadu_si128(&temporary1_.asXMMWord); \ + block2 = _mm_loadu_si128(&temporary2_.asXMMWord); \ + } + +#define applyInversedSTransformation_3(block1,block2,block3) { \ + _mm_storeu_si128(&temporary1_.asXMMWord, block1); \ + _mm_storeu_si128(&temporary2_.asXMMWord, block2); \ + _mm_storeu_si128(&temporary3_.asXMMWord, block3); \ + for (byteIndex_ = 0; byteIndex_ < BlockLengthInBytes; ++byteIndex_) { \ + temporary1_.asBytes[byteIndex_] = InversedPi[temporary1_.asBytes[byteIndex_]]; \ + temporary2_.asBytes[byteIndex_] = InversedPi[temporary2_.asBytes[byteIndex_]]; \ + temporary3_.asBytes[byteIndex_] = InversedPi[temporary3_.asBytes[byteIndex_]]; \ + } \ + block1 = _mm_loadu_si128(&temporary1_.asXMMWord); \ + block2 = _mm_loadu_si128(&temporary2_.asXMMWord); \ + block3 = _mm_loadu_si128(&temporary3_.asXMMWord); \ + } + +#define applyInversedSTransformation_4(block1,block2,block3,block4) { \ + _mm_storeu_si128(&temporary1_.asXMMWord, block1); \ + _mm_storeu_si128(&temporary2_.asXMMWord, block2); \ + _mm_storeu_si128(&temporary3_.asXMMWord, block3); \ + _mm_storeu_si128(&temporary4_.asXMMWord, block4); \ + for (byteIndex_ = 0; byteIndex_ < BlockLengthInBytes; ++byteIndex_) { \ + temporary1_.asBytes[byteIndex_] = InversedPi[temporary1_.asBytes[byteIndex_]]; \ + temporary2_.asBytes[byteIndex_] = InversedPi[temporary2_.asBytes[byteIndex_]]; \ + temporary3_.asBytes[byteIndex_] = InversedPi[temporary3_.asBytes[byteIndex_]]; \ + temporary4_.asBytes[byteIndex_] = InversedPi[temporary4_.asBytes[byteIndex_]]; \ + } \ + block1 = _mm_loadu_si128(&temporary1_.asXMMWord); \ + block2 = _mm_loadu_si128(&temporary2_.asXMMWord); \ + block3 = _mm_loadu_si128(&temporary3_.asXMMWord); \ + block4 = _mm_loadu_si128(&temporary4_.asXMMWord); \ + } + +#define swapBlocks(left, right) { \ + left = _mm_xor_si128(left, right); \ + right = _mm_xor_si128(left, right); \ + left = _mm_xor_si128(left, right); \ + } + +#define applyLSTransformation(input) { \ + addresses1_ = _mm_and_si128(*(const __m128i *) bitmask, input); \ + addresses2_ = _mm_andnot_si128(*(const __m128i *) bitmask, input); \ + \ + addresses1_ = _mm_srli_epi16(addresses1_, 4); \ + addresses2_ = _mm_slli_epi16(addresses2_, 4); \ + \ + temporary1_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses1_, 0) + 0x1000)); \ + temporary2_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses2_, 0))); \ + \ + temporary1_ = _mm_xor_si128(temporary1_, *(const __m128i *) (precomputedLSTable+ _mm_extract_epi16(addresses1_, 1)+ 0x3000)); \ + temporary2_ = _mm_xor_si128(temporary2_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses2_, 1) + 0x2000)); \ + \ + temporary1_ = _mm_xor_si128(temporary1_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses1_, 2) + 0x5000)); \ + temporary2_ = _mm_xor_si128(temporary2_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses2_, 2) + 0x4000)); \ + \ + temporary1_ = _mm_xor_si128(temporary1_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses1_, 3) + 0x7000)); \ + temporary2_ = _mm_xor_si128(temporary2_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses2_, 3) + 0x6000)); \ + \ + temporary1_ = _mm_xor_si128(temporary1_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses1_, 4) + 0x9000)); \ + temporary2_ = _mm_xor_si128(temporary2_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses2_, 4) + 0x8000)); \ + \ + temporary1_ = _mm_xor_si128(temporary1_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses1_, 5) + 0xB000)); \ + temporary2_ = _mm_xor_si128(temporary2_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses2_, 5) + 0xA000)); \ + \ + temporary1_ = _mm_xor_si128(temporary1_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses1_, 6) + 0xD000)); \ + temporary2_ = _mm_xor_si128(temporary2_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses2_, 6) + 0xC000)); \ + \ + temporary1_ = _mm_xor_si128(temporary1_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses1_, 7) + 0xF000)); \ + temporary2_ = _mm_xor_si128(temporary2_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses2_, 7) + 0xE000)); \ + \ + input = _mm_xor_si128(temporary1_, temporary2_); \ + } + +#define applyLSTransformation_2(input1,input2) { \ + addresses11_ = _mm_and_si128(*(const __m128i *) bitmask, input1); \ + addresses21_ = _mm_and_si128(*(const __m128i *) bitmask, input2); \ + addresses12_ = _mm_andnot_si128(*(const __m128i *) bitmask, input1); \ + addresses22_ = _mm_andnot_si128(*(const __m128i *) bitmask, input2); \ + \ + addresses11_ = _mm_srli_epi16(addresses11_, 4); \ + addresses12_ = _mm_slli_epi16(addresses12_, 4); \ + addresses21_ = _mm_srli_epi16(addresses21_, 4); \ + addresses22_ = _mm_slli_epi16(addresses22_, 4); \ + \ + temporary11_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 0) + 0x1000)); \ + temporary21_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 0) + 0x1000)); \ + temporary12_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 0))); \ + temporary22_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 0))); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable+ _mm_extract_epi16(addresses11_, 1)+ 0x3000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 1) + 0x2000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable+ _mm_extract_epi16(addresses21_, 1)+ 0x3000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 1) + 0x2000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 2) + 0x5000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 2) + 0x4000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 2) + 0x5000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 2) + 0x4000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 3) + 0x7000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 3) + 0x6000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 3) + 0x7000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 3) + 0x6000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 4) + 0x9000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 4) + 0x8000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 4) + 0x9000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 4) + 0x8000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 5) + 0xB000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 5) + 0xA000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 5) + 0xB000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 5) + 0xA000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 6) + 0xD000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 6) + 0xC000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 6) + 0xD000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 6) + 0xC000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 7) + 0xF000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 7) + 0xE000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 7) + 0xF000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 7) + 0xE000)); \ + \ + input1 = _mm_xor_si128(temporary11_, temporary12_); \ + input2 = _mm_xor_si128(temporary21_, temporary22_); \ + } + +#define applyLSTransformation_3(input1,input2,input3) { \ + addresses11_ = _mm_and_si128(*(const __m128i *) bitmask, input1); \ + addresses21_ = _mm_and_si128(*(const __m128i *) bitmask, input2); \ + addresses31_ = _mm_and_si128(*(const __m128i *) bitmask, input3); \ + addresses12_ = _mm_andnot_si128(*(const __m128i *) bitmask, input1); \ + addresses22_ = _mm_andnot_si128(*(const __m128i *) bitmask, input2); \ + addresses32_ = _mm_andnot_si128(*(const __m128i *) bitmask, input3); \ + \ + addresses11_ = _mm_srli_epi16(addresses11_, 4); \ + addresses12_ = _mm_slli_epi16(addresses12_, 4); \ + addresses21_ = _mm_srli_epi16(addresses21_, 4); \ + addresses22_ = _mm_slli_epi16(addresses22_, 4); \ + addresses31_ = _mm_srli_epi16(addresses31_, 4); \ + addresses32_ = _mm_slli_epi16(addresses32_, 4); \ + \ + temporary11_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 0) + 0x1000)); \ + temporary21_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 0) + 0x1000)); \ + temporary31_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses31_, 0) + 0x1000)); \ + temporary12_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 0))); \ + temporary22_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 0))); \ + temporary32_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses32_, 0))); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable+ _mm_extract_epi16(addresses11_, 1)+ 0x3000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 1) + 0x2000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable+ _mm_extract_epi16(addresses21_, 1)+ 0x3000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 1) + 0x2000)); \ + temporary31_ = _mm_xor_si128(temporary31_, *(const __m128i *) (precomputedLSTable+ _mm_extract_epi16(addresses31_, 1)+ 0x3000)); \ + temporary32_ = _mm_xor_si128(temporary32_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses32_, 1) + 0x2000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 2) + 0x5000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 2) + 0x4000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 2) + 0x5000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 2) + 0x4000)); \ + temporary31_ = _mm_xor_si128(temporary31_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses31_, 2) + 0x5000)); \ + temporary32_ = _mm_xor_si128(temporary32_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses32_, 2) + 0x4000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 3) + 0x7000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 3) + 0x6000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 3) + 0x7000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 3) + 0x6000)); \ + temporary31_ = _mm_xor_si128(temporary31_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses31_, 3) + 0x7000)); \ + temporary32_ = _mm_xor_si128(temporary32_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses32_, 3) + 0x6000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 4) + 0x9000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 4) + 0x8000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 4) + 0x9000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 4) + 0x8000)); \ + temporary31_ = _mm_xor_si128(temporary31_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses31_, 4) + 0x9000)); \ + temporary32_ = _mm_xor_si128(temporary32_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses32_, 4) + 0x8000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 5) + 0xB000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 5) + 0xA000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 5) + 0xB000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 5) + 0xA000)); \ + temporary31_ = _mm_xor_si128(temporary31_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses31_, 5) + 0xB000)); \ + temporary32_ = _mm_xor_si128(temporary32_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses32_, 5) + 0xA000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 6) + 0xD000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 6) + 0xC000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 6) + 0xD000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 6) + 0xC000)); \ + temporary31_ = _mm_xor_si128(temporary31_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses31_, 6) + 0xD000)); \ + temporary32_ = _mm_xor_si128(temporary32_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses32_, 6) + 0xC000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 7) + 0xF000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 7) + 0xE000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 7) + 0xF000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 7) + 0xE000)); \ + temporary31_ = _mm_xor_si128(temporary31_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses31_, 7) + 0xF000)); \ + temporary32_ = _mm_xor_si128(temporary32_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses32_, 7) + 0xE000)); \ + \ + input1 = _mm_xor_si128(temporary11_, temporary12_); \ + input2 = _mm_xor_si128(temporary21_, temporary22_); \ + input3 = _mm_xor_si128(temporary31_, temporary32_); \ + } + +#define applyLSTransformation_4(input1,input2,input3,input4) { \ + addresses11_ = _mm_and_si128(*(const __m128i *) bitmask, input1); \ + addresses21_ = _mm_and_si128(*(const __m128i *) bitmask, input2); \ + addresses31_ = _mm_and_si128(*(const __m128i *) bitmask, input3); \ + addresses41_ = _mm_and_si128(*(const __m128i *) bitmask, input4); \ + addresses12_ = _mm_andnot_si128(*(const __m128i *) bitmask, input1); \ + addresses22_ = _mm_andnot_si128(*(const __m128i *) bitmask, input2); \ + addresses32_ = _mm_andnot_si128(*(const __m128i *) bitmask, input3); \ + addresses42_ = _mm_andnot_si128(*(const __m128i *) bitmask, input4); \ + \ + addresses11_ = _mm_srli_epi16(addresses11_, 4); \ + addresses12_ = _mm_slli_epi16(addresses12_, 4); \ + addresses21_ = _mm_srli_epi16(addresses21_, 4); \ + addresses22_ = _mm_slli_epi16(addresses22_, 4); \ + addresses31_ = _mm_srli_epi16(addresses31_, 4); \ + addresses32_ = _mm_slli_epi16(addresses32_, 4); \ + addresses41_ = _mm_srli_epi16(addresses41_, 4); \ + addresses42_ = _mm_slli_epi16(addresses42_, 4); \ + \ + temporary11_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 0) + 0x1000)); \ + temporary21_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 0) + 0x1000)); \ + temporary31_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses31_, 0) + 0x1000)); \ + temporary41_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses41_, 0) + 0x1000)); \ + temporary12_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 0))); \ + temporary22_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 0))); \ + temporary32_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses32_, 0))); \ + temporary42_ = _mm_load_si128((const void *) (precomputedLSTable + _mm_extract_epi16(addresses42_, 0))); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable+ _mm_extract_epi16(addresses11_, 1)+ 0x3000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable+ _mm_extract_epi16(addresses21_, 1)+ 0x3000)); \ + temporary31_ = _mm_xor_si128(temporary31_, *(const __m128i *) (precomputedLSTable+ _mm_extract_epi16(addresses31_, 1)+ 0x3000)); \ + temporary41_ = _mm_xor_si128(temporary41_, *(const __m128i *) (precomputedLSTable+ _mm_extract_epi16(addresses41_, 1)+ 0x3000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 1) + 0x2000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 1) + 0x2000)); \ + temporary32_ = _mm_xor_si128(temporary32_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses32_, 1) + 0x2000)); \ + temporary42_ = _mm_xor_si128(temporary42_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses42_, 1) + 0x2000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 2) + 0x5000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 2) + 0x5000)); \ + temporary31_ = _mm_xor_si128(temporary31_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses31_, 2) + 0x5000)); \ + temporary41_ = _mm_xor_si128(temporary41_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses41_, 2) + 0x5000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 2) + 0x4000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 2) + 0x4000)); \ + temporary32_ = _mm_xor_si128(temporary32_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses32_, 2) + 0x4000)); \ + temporary42_ = _mm_xor_si128(temporary42_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses42_, 2) + 0x4000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 3) + 0x7000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 3) + 0x7000)); \ + temporary31_ = _mm_xor_si128(temporary31_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses31_, 3) + 0x7000)); \ + temporary41_ = _mm_xor_si128(temporary41_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses41_, 3) + 0x7000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 3) + 0x6000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 3) + 0x6000)); \ + temporary32_ = _mm_xor_si128(temporary32_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses32_, 3) + 0x6000)); \ + temporary42_ = _mm_xor_si128(temporary42_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses42_, 3) + 0x6000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 4) + 0x9000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 4) + 0x9000)); \ + temporary31_ = _mm_xor_si128(temporary31_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses31_, 4) + 0x9000)); \ + temporary41_ = _mm_xor_si128(temporary41_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses41_, 4) + 0x9000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 4) + 0x8000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 4) + 0x8000)); \ + temporary32_ = _mm_xor_si128(temporary32_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses32_, 4) + 0x8000)); \ + temporary42_ = _mm_xor_si128(temporary42_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses42_, 4) + 0x8000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 5) + 0xB000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 5) + 0xB000)); \ + temporary31_ = _mm_xor_si128(temporary31_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses31_, 5) + 0xB000)); \ + temporary41_ = _mm_xor_si128(temporary41_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses41_, 5) + 0xB000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 5) + 0xA000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 5) + 0xA000)); \ + temporary32_ = _mm_xor_si128(temporary32_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses32_, 5) + 0xA000)); \ + temporary42_ = _mm_xor_si128(temporary42_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses42_, 5) + 0xA000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 6) + 0xD000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 6) + 0xD000)); \ + temporary31_ = _mm_xor_si128(temporary31_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses31_, 6) + 0xD000)); \ + temporary41_ = _mm_xor_si128(temporary41_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses41_, 6) + 0xD000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 6) + 0xC000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 6) + 0xC000)); \ + temporary32_ = _mm_xor_si128(temporary32_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses32_, 6) + 0xC000)); \ + temporary42_ = _mm_xor_si128(temporary42_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses42_, 6) + 0xC000)); \ + \ + temporary11_ = _mm_xor_si128(temporary11_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses11_, 7) + 0xF000)); \ + temporary21_ = _mm_xor_si128(temporary21_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses21_, 7) + 0xF000)); \ + temporary31_ = _mm_xor_si128(temporary31_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses31_, 7) + 0xF000)); \ + temporary41_ = _mm_xor_si128(temporary41_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses41_, 7) + 0xF000)); \ + temporary12_ = _mm_xor_si128(temporary12_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses12_, 7) + 0xE000)); \ + temporary22_ = _mm_xor_si128(temporary22_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses22_, 7) + 0xE000)); \ + temporary32_ = _mm_xor_si128(temporary32_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses32_, 7) + 0xE000)); \ + temporary42_ = _mm_xor_si128(temporary42_, *(const __m128i *) (precomputedLSTable + _mm_extract_epi16(addresses42_, 7) + 0xE000)); \ + \ + input1 = _mm_xor_si128(temporary11_, temporary12_); \ + input2 = _mm_xor_si128(temporary21_, temporary22_); \ + input3 = _mm_xor_si128(temporary31_, temporary32_); \ + input4 = _mm_xor_si128(temporary41_, temporary42_); \ + } + + +#define applyInversedLSTransformation(input) { \ + addresses1_ = _mm_and_si128(*(const __m128i *) bitmask, input); \ + addresses2_ = _mm_andnot_si128(*(const __m128i *) bitmask, input); \ + \ + addresses1_ = _mm_srli_epi16(addresses1_, 4); \ + addresses2_ = _mm_slli_epi16(addresses2_, 4); \ + \ + cache1_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses1_, 0) + 0x1000)); \ + cache2_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses2_, 0))); \ + \ + cache1_ = _mm_xor_si128(cache1_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses1_, 1) + 0x3000)); \ + cache2_ = _mm_xor_si128(cache2_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses2_, 1) + 0x2000)); \ + \ + cache1_ = _mm_xor_si128(cache1_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses1_, 2) + 0x5000)); \ + cache2_ = _mm_xor_si128(cache2_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses2_, 2) + 0x4000)); \ + \ + cache1_ = _mm_xor_si128(cache1_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses1_, 3) + 0x7000)); \ + cache2_ = _mm_xor_si128(cache2_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses2_, 3) + 0x6000)); \ + \ + cache1_ = _mm_xor_si128(cache1_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses1_, 4) + 0x9000)); \ + cache2_ = _mm_xor_si128(cache2_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses2_, 4) + 0x8000)); \ + \ + cache1_ = _mm_xor_si128(cache1_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses1_, 5) + 0xB000)); \ + cache2_ = _mm_xor_si128(cache2_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses2_, 5) + 0xA000)); \ + \ + cache1_ = _mm_xor_si128(cache1_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses1_, 6) + 0xD000)); \ + cache2_ = _mm_xor_si128(cache2_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses2_, 6) + 0xC000)); \ + \ + cache1_ = _mm_xor_si128(cache1_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses1_, 7) + 0xF000)); \ + cache2_ = _mm_xor_si128(cache2_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses2_, 7) + 0xE000)); \ + \ + input = _mm_xor_si128(cache1_, cache2_); \ + } + +#define applyInversedLSTransformation_2(input1, input2) { \ + addresses11_ = _mm_and_si128(*(const __m128i *) bitmask, input1); \ + addresses21_ = _mm_and_si128(*(const __m128i *) bitmask, input2); \ + addresses12_ = _mm_andnot_si128(*(const __m128i *) bitmask, input1); \ + addresses22_ = _mm_andnot_si128(*(const __m128i *) bitmask, input2); \ + \ + addresses11_ = _mm_srli_epi16(addresses11_, 4); \ + addresses21_ = _mm_srli_epi16(addresses21_, 4); \ + addresses12_ = _mm_slli_epi16(addresses12_, 4); \ + addresses22_ = _mm_slli_epi16(addresses22_, 4); \ + \ + cache11_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 0) + 0x1000)); \ + cache21_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 0) + 0x1000)); \ + cache12_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 0))); \ + cache22_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 0))); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 1) + 0x3000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 1) + 0x2000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 1) + 0x3000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 1) + 0x2000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 2) + 0x5000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 2) + 0x4000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 2) + 0x5000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 2) + 0x4000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 3) + 0x7000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 3) + 0x6000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 3) + 0x7000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 3) + 0x6000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 4) + 0x9000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 4) + 0x8000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 4) + 0x9000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 4) + 0x8000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 5) + 0xB000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 5) + 0xA000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 5) + 0xB000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 5) + 0xA000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 6) + 0xD000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 6) + 0xC000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 6) + 0xD000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 6) + 0xC000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 7) + 0xF000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 7) + 0xE000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 7) + 0xF000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 7) + 0xE000)); \ + \ + input1 = _mm_xor_si128(cache11_, cache12_); \ + input2 = _mm_xor_si128(cache21_, cache22_); \ + } + +#define applyInversedLSTransformation_3(input1, input2,input3) { \ + addresses11_ = _mm_and_si128(*(const __m128i *) bitmask, input1); \ + addresses21_ = _mm_and_si128(*(const __m128i *) bitmask, input2); \ + addresses31_ = _mm_and_si128(*(const __m128i *) bitmask, input3); \ + addresses12_ = _mm_andnot_si128(*(const __m128i *) bitmask, input1); \ + addresses22_ = _mm_andnot_si128(*(const __m128i *) bitmask, input2); \ + addresses32_ = _mm_andnot_si128(*(const __m128i *) bitmask, input3); \ + \ + addresses11_ = _mm_srli_epi16(addresses11_, 4); \ + addresses21_ = _mm_srli_epi16(addresses21_, 4); \ + addresses31_ = _mm_srli_epi16(addresses31_, 4); \ + addresses12_ = _mm_slli_epi16(addresses12_, 4); \ + addresses22_ = _mm_slli_epi16(addresses22_, 4); \ + addresses32_ = _mm_slli_epi16(addresses32_, 4); \ + \ + cache11_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 0) + 0x1000)); \ + cache21_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 0) + 0x1000)); \ + cache31_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses31_, 0) + 0x1000)); \ + cache12_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 0))); \ + cache22_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 0))); \ + cache32_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses32_, 0))); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 1) + 0x3000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 1) + 0x2000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 1) + 0x3000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 1) + 0x2000)); \ + cache31_ = _mm_xor_si128(cache31_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses31_, 1) + 0x3000)); \ + cache32_ = _mm_xor_si128(cache32_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses32_, 1) + 0x2000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 2) + 0x5000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 2) + 0x4000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 2) + 0x5000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 2) + 0x4000)); \ + cache31_ = _mm_xor_si128(cache31_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses31_, 2) + 0x5000)); \ + cache32_ = _mm_xor_si128(cache32_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses32_, 2) + 0x4000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 3) + 0x7000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 3) + 0x6000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 3) + 0x7000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 3) + 0x6000)); \ + cache31_ = _mm_xor_si128(cache31_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses31_, 3) + 0x7000)); \ + cache32_ = _mm_xor_si128(cache32_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses32_, 3) + 0x6000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 4) + 0x9000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 4) + 0x8000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 4) + 0x9000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 4) + 0x8000)); \ + cache31_ = _mm_xor_si128(cache31_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses31_, 4) + 0x9000)); \ + cache32_ = _mm_xor_si128(cache32_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses32_, 4) + 0x8000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 5) + 0xB000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 5) + 0xA000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 5) + 0xB000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 5) + 0xA000)); \ + cache31_ = _mm_xor_si128(cache31_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses31_, 5) + 0xB000)); \ + cache32_ = _mm_xor_si128(cache32_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses32_, 5) + 0xA000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 6) + 0xD000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 6) + 0xC000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 6) + 0xD000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 6) + 0xC000)); \ + cache31_ = _mm_xor_si128(cache31_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses31_, 6) + 0xD000)); \ + cache32_ = _mm_xor_si128(cache32_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses32_, 6) + 0xC000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 7) + 0xF000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 7) + 0xE000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 7) + 0xF000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 7) + 0xE000)); \ + cache31_ = _mm_xor_si128(cache31_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses31_, 7) + 0xF000)); \ + cache32_ = _mm_xor_si128(cache32_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses32_, 7) + 0xE000)); \ + \ + input1 = _mm_xor_si128(cache11_, cache12_); \ + input2 = _mm_xor_si128(cache21_, cache22_); \ + input3 = _mm_xor_si128(cache31_, cache32_); \ + } + +#define applyInversedLSTransformation_4(input1, input2,input3,input4) { \ + addresses11_ = _mm_and_si128(*(const __m128i *) bitmask, input1); \ + addresses21_ = _mm_and_si128(*(const __m128i *) bitmask, input2); \ + addresses31_ = _mm_and_si128(*(const __m128i *) bitmask, input3); \ + addresses41_ = _mm_and_si128(*(const __m128i *) bitmask, input4); \ + addresses12_ = _mm_andnot_si128(*(const __m128i *) bitmask, input1); \ + addresses22_ = _mm_andnot_si128(*(const __m128i *) bitmask, input2); \ + addresses32_ = _mm_andnot_si128(*(const __m128i *) bitmask, input3); \ + addresses42_ = _mm_andnot_si128(*(const __m128i *) bitmask, input4); \ + \ + addresses11_ = _mm_srli_epi16(addresses11_, 4); \ + addresses21_ = _mm_srli_epi16(addresses21_, 4); \ + addresses31_ = _mm_srli_epi16(addresses31_, 4); \ + addresses41_ = _mm_srli_epi16(addresses41_, 4); \ + addresses12_ = _mm_slli_epi16(addresses12_, 4); \ + addresses22_ = _mm_slli_epi16(addresses22_, 4); \ + addresses32_ = _mm_slli_epi16(addresses32_, 4); \ + addresses42_ = _mm_slli_epi16(addresses42_, 4); \ + \ + cache11_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 0) + 0x1000)); \ + cache21_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 0) + 0x1000)); \ + cache31_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses31_, 0) + 0x1000)); \ + cache41_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses41_, 0) + 0x1000)); \ + cache12_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 0))); \ + cache22_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 0))); \ + cache32_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses32_, 0))); \ + cache42_ = _mm_load_si128((const void *) (precomputedInversedLSTable + _mm_extract_epi16(addresses42_, 0))); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 1) + 0x3000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 1) + 0x3000)); \ + cache31_ = _mm_xor_si128(cache31_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses31_, 1) + 0x3000)); \ + cache41_ = _mm_xor_si128(cache41_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses41_, 1) + 0x3000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 1) + 0x2000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 1) + 0x2000)); \ + cache32_ = _mm_xor_si128(cache32_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses32_, 1) + 0x2000)); \ + cache42_ = _mm_xor_si128(cache42_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses42_, 1) + 0x2000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 2) + 0x5000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 2) + 0x5000)); \ + cache31_ = _mm_xor_si128(cache31_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses31_, 2) + 0x5000)); \ + cache41_ = _mm_xor_si128(cache41_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses41_, 2) + 0x5000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 2) + 0x4000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 2) + 0x4000)); \ + cache32_ = _mm_xor_si128(cache32_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses32_, 2) + 0x4000)); \ + cache42_ = _mm_xor_si128(cache42_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses42_, 2) + 0x4000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 3) + 0x7000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 3) + 0x7000)); \ + cache31_ = _mm_xor_si128(cache31_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses31_, 3) + 0x7000)); \ + cache41_ = _mm_xor_si128(cache41_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses41_, 3) + 0x7000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 3) + 0x6000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 3) + 0x6000)); \ + cache32_ = _mm_xor_si128(cache32_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses32_, 3) + 0x6000)); \ + cache42_ = _mm_xor_si128(cache42_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses42_, 3) + 0x6000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 4) + 0x9000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 4) + 0x9000)); \ + cache31_ = _mm_xor_si128(cache31_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses31_, 4) + 0x9000)); \ + cache41_ = _mm_xor_si128(cache41_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses41_, 4) + 0x9000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 4) + 0x8000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 4) + 0x8000)); \ + cache32_ = _mm_xor_si128(cache32_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses32_, 4) + 0x8000)); \ + cache42_ = _mm_xor_si128(cache42_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses42_, 4) + 0x8000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 5) + 0xB000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 5) + 0xB000)); \ + cache31_ = _mm_xor_si128(cache31_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses31_, 5) + 0xB000)); \ + cache41_ = _mm_xor_si128(cache41_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses41_, 5) + 0xB000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 5) + 0xA000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 5) + 0xA000)); \ + cache32_ = _mm_xor_si128(cache32_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses32_, 5) + 0xA000)); \ + cache42_ = _mm_xor_si128(cache42_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses42_, 5) + 0xA000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 6) + 0xD000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 6) + 0xD000)); \ + cache31_ = _mm_xor_si128(cache31_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses31_, 6) + 0xD000)); \ + cache41_ = _mm_xor_si128(cache41_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses41_, 6) + 0xD000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 6) + 0xC000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 6) + 0xC000)); \ + cache32_ = _mm_xor_si128(cache32_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses32_, 6) + 0xC000)); \ + cache42_ = _mm_xor_si128(cache42_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses42_, 6) + 0xC000)); \ + \ + cache11_ = _mm_xor_si128(cache11_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses11_, 7) + 0xF000)); \ + cache21_ = _mm_xor_si128(cache21_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses21_, 7) + 0xF000)); \ + cache31_ = _mm_xor_si128(cache31_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses31_, 7) + 0xF000)); \ + cache41_ = _mm_xor_si128(cache41_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses41_, 7) + 0xF000)); \ + cache12_ = _mm_xor_si128(cache12_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses12_, 7) + 0xE000)); \ + cache22_ = _mm_xor_si128(cache22_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses22_, 7) + 0xE000)); \ + cache32_ = _mm_xor_si128(cache32_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses32_, 7) + 0xE000)); \ + cache42_ = _mm_xor_si128(cache42_, *(const __m128i *) (precomputedInversedLSTable + _mm_extract_epi16(addresses42_, 7) + 0xE000)); \ + \ + input1 = _mm_xor_si128(cache11_, cache12_); \ + input2 = _mm_xor_si128(cache21_, cache22_); \ + input3 = _mm_xor_si128(cache31_, cache32_); \ + input4 = _mm_xor_si128(cache41_, cache42_); \ + } + +#define applyFTransformation(constantIndex, left, right) { \ + temporary1_ = _mm_loadu_si128((const __m128i *) &roundConstants[BlockLengthInBytes * constantIndex]); \ + temporary1_ = _mm_xor_si128(left, temporary1_); \ + applyLSTransformation(temporary1_); \ + right = _mm_xor_si128(right, temporary1_); \ + swapBlocks(left, right); \ + } + + +VC_INLINE void scheduleEncryptionRoundKeysForGost15( + void *roundKeys, + const void *key + ) { + uint_64t *roundKeys_ = (uint_64t *) roundKeys; + int nextKeyIndex_, constantIndex_; + __m128i temporary1_, temporary2_; + __m128i addresses1_, addresses2_; + + memcpy(&roundKeys_[0], key, BlockLengthInBytes * 2); + + for (nextKeyIndex_ = 2, constantIndex_ = 0; nextKeyIndex_ != NumberOfRounds; nextKeyIndex_ += 2) { + __m128i nextKey1_, nextKey2_; + int feistelRoundIndex_; + + nextKey1_ = _mm_loadu_si128((__m128i *) &roundKeys_[2 * (nextKeyIndex_ - 2)]); + nextKey2_ = _mm_loadu_si128((__m128i *) &roundKeys_[2 * (nextKeyIndex_ - 1)]); + + for (feistelRoundIndex_ = 0; feistelRoundIndex_ < NumberOfRoundsInKeySchedule; ++feistelRoundIndex_) { + applyFTransformation(constantIndex_++, nextKey1_, nextKey2_); + } + + _mm_storeu_si128((__m128i *) &roundKeys_[2 * (nextKeyIndex_)], nextKey1_); + _mm_storeu_si128((__m128i *) &roundKeys_[2 * (nextKeyIndex_ + 1)], nextKey2_); + } +} + + +VC_INLINE void scheduleDecryptionRoundKeysForGost15( + void *roundKeys, + const void *key + ) { + uint_64t *roundKeys_ = (uint_64t *) roundKeys; + int keyIndex_; + block_t temporary_ = {{0}}; + int byteIndex_; + __m128i temporary1_; + __m128i cache1_, cache2_; + __m128i addresses1_, addresses2_; + + scheduleEncryptionRoundKeysForGost15(roundKeys, key); + + for (keyIndex_ = 1; keyIndex_ <= 9; ++keyIndex_) { + temporary1_ = _mm_loadu_si128((__m128i *) &roundKeys_[2 * keyIndex_]); + applySTransformation(temporary1_); + applyInversedLSTransformation(temporary1_); + _mm_storeu_si128((__m128i *) &roundKeys_[2 * keyIndex_], temporary1_); + } +} + +#define ROUND_ENC_1x(round_) { \ + temporary1_ = _mm_loadu_si128((const __m128i *) &roundKeys_[2 * round_]); \ + data_ = _mm_xor_si128(data_, temporary1_); \ + applyLSTransformation(data_); \ +} + +#define ROUND_ENC_2x(round_) { \ + temporary11_ = _mm_loadu_si128((const __m128i *) &roundKeys_[2 * round_]); \ + data1_ = _mm_xor_si128(data1_, temporary11_); \ + data2_ = _mm_xor_si128(data2_, temporary11_); \ + applyLSTransformation_2(data1_, data2_); \ +} + +#define ROUND_ENC_3x(round_) { \ + temporary11_ = _mm_loadu_si128((const __m128i *) &roundKeys_[2 * round_]); \ + data1_ = _mm_xor_si128(data1_, temporary11_); \ + data2_ = _mm_xor_si128(data2_, temporary11_); \ + data3_ = _mm_xor_si128(data3_, temporary11_); \ + applyLSTransformation_3(data1_, data2_, data3_); \ +} + +#define ROUND_ENC_4x(round_) { \ + temporary11_ = _mm_loadu_si128((const __m128i *) &roundKeys_[2 * round_]); \ + data1_ = _mm_xor_si128(data1_, temporary11_); \ + data2_ = _mm_xor_si128(data2_, temporary11_); \ + data3_ = _mm_xor_si128(data3_, temporary11_); \ + data4_ = _mm_xor_si128(data4_, temporary11_); \ + applyLSTransformation_4(data1_, data2_, data3_,data4_); \ +} + +void kuznyechik_encrypt_block_simd(uint8* out, const uint8* in, kuznyechik_kds* kds) +{ + const uint_64t *roundKeys_ = (const uint_64t *) kds->rke; + __m128i data_; + __m128i temporary1_, temporary2_; + __m128i addresses1_, addresses2_; + int round_; + + data_ = _mm_loadu_si128((const __m128i*) in); + +#ifdef UNROLL_LOOPS + ROUND_ENC_1x (0); + ROUND_ENC_1x (1); + ROUND_ENC_1x (2); + ROUND_ENC_1x (3); + ROUND_ENC_1x (4); + ROUND_ENC_1x (5); + ROUND_ENC_1x (6); + ROUND_ENC_1x (7); + ROUND_ENC_1x (8); + round_ = 9; +#else + for (round_ = 0; round_ < 9; round_++) + ROUND_ENC_1x (round_); +#endif + + temporary1_ = _mm_loadu_si128((const __m128i *) &roundKeys_[2 * round_]); + data_ = _mm_xor_si128(data_, temporary1_); + _mm_storeu_si128((__m128i*) out, data_); +} + +void kuznyechik_encrypt_blocks_simd(uint8* out, const uint8* in, size_t blocks, kuznyechik_kds* kds) +{ + const uint_64t *roundKeys_ = (const uint_64t *) kds->rke; + __m128i data1_, data2_, data3_, data4_; + __m128i temporary11_, temporary12_; + __m128i addresses11_, addresses12_; + __m128i temporary21_, temporary22_; + __m128i addresses21_, addresses22_; + __m128i temporary31_, temporary32_; + __m128i addresses31_, addresses32_; + __m128i temporary41_, temporary42_; + __m128i addresses41_, addresses42_; + int round_; + + while (blocks >= 4) + { + + data1_ = _mm_loadu_si128((const __m128i*) in); + data2_ = _mm_loadu_si128((const __m128i*) (in + 16)); + data3_ = _mm_loadu_si128((const __m128i*) (in + 32)); + data4_ = _mm_loadu_si128((const __m128i*) (in + 48)); + +#ifdef UNROLL_LOOPS + ROUND_ENC_4x (0); + ROUND_ENC_4x (1); + ROUND_ENC_4x (2); + ROUND_ENC_4x (3); + ROUND_ENC_4x (4); + ROUND_ENC_4x (5); + ROUND_ENC_4x (6); + ROUND_ENC_4x (7); + ROUND_ENC_4x (8); + round_ = 9; +#else + for (round_ = 0; round_ < 9; round_++) + ROUND_ENC_4x (round_); +#endif + + temporary11_ = _mm_loadu_si128((const __m128i *) &roundKeys_[2 * round_]); + data1_ = _mm_xor_si128(data1_, temporary11_); + data2_ = _mm_xor_si128(data2_, temporary11_); + data3_ = _mm_xor_si128(data3_, temporary11_); + data4_ = _mm_xor_si128(data4_, temporary11_); + _mm_storeu_si128((__m128i*) out, data1_); + _mm_storeu_si128((__m128i*) (out + 16), data2_); + _mm_storeu_si128((__m128i*) (out + 32), data3_); + _mm_storeu_si128((__m128i*) (out + 48), data4_); + in += 64; + out += 64; + blocks -= 4; + } + + if (blocks == 3) + { + data1_ = _mm_loadu_si128((const __m128i*) in); + data2_ = _mm_loadu_si128((const __m128i*) (in + 16)); + data3_ = _mm_loadu_si128((const __m128i*) (in + 32)); + +#ifdef UNROLL_LOOPS + ROUND_ENC_3x (0); + ROUND_ENC_3x (1); + ROUND_ENC_3x (2); + ROUND_ENC_3x (3); + ROUND_ENC_3x (4); + ROUND_ENC_3x (5); + ROUND_ENC_3x (6); + ROUND_ENC_3x (7); + ROUND_ENC_3x (8); + round_ = 9; +#else + for (round_ = 0; round_ < 9; round_++) + ROUND_ENC_3x (round_); +#endif + + temporary11_ = _mm_loadu_si128((const __m128i *) &roundKeys_[2 * round_]); + data1_ = _mm_xor_si128(data1_, temporary11_); + data2_ = _mm_xor_si128(data2_, temporary11_); + data3_ = _mm_xor_si128(data3_, temporary11_); + _mm_storeu_si128((__m128i*) out, data1_); + _mm_storeu_si128((__m128i*) (out + 16), data2_); + _mm_storeu_si128((__m128i*) (out + 32), data3_); + } + + else if (blocks == 2) + { + data1_ = _mm_loadu_si128((const __m128i*) in); + data2_ = _mm_loadu_si128((const __m128i*) (in + 16)); + +#ifdef UNROLL_LOOPS + ROUND_ENC_2x (0); + ROUND_ENC_2x (1); + ROUND_ENC_2x (2); + ROUND_ENC_2x (3); + ROUND_ENC_2x (4); + ROUND_ENC_2x (5); + ROUND_ENC_2x (6); + ROUND_ENC_2x (7); + ROUND_ENC_2x (8); + round_ = 9; +#else + for (round_ = 0; round_ < 9; round_++) + ROUND_ENC_2x (round_); +#endif + + temporary11_ = _mm_loadu_si128((const __m128i *) &roundKeys_[2 * round_]); + data1_ = _mm_xor_si128(data1_, temporary11_); + data2_ = _mm_xor_si128(data2_, temporary11_); + _mm_storeu_si128((__m128i*) out, data1_); + _mm_storeu_si128((__m128i*) (out + 16), data2_); + } + else if (blocks) + { + kuznyechik_encrypt_block_simd (out, in, kds); + } + +} + +#define ROUND_DEC_1X(round_) { \ + applyInversedLSTransformation(data_); \ + cache1_ = _mm_loadu_si128((const __m128i *) &roundKeys_[2 * round_]); \ + data_ = _mm_xor_si128(data_, cache1_); \ +} + +#define ROUND_DEC_2X(round_) { \ + applyInversedLSTransformation_2(data1_, data2_); \ + cache11_ = _mm_loadu_si128((const __m128i *) &roundKeys_[2 * round_]); \ + data1_ = _mm_xor_si128(data1_, cache11_); \ + data2_ = _mm_xor_si128(data2_, cache11_); \ +} + +#define ROUND_DEC_3X(round_) { \ + applyInversedLSTransformation_3(data1_, data2_,data3_); \ + cache11_ = _mm_loadu_si128((const __m128i *) &roundKeys_[2 * round_]); \ + data1_ = _mm_xor_si128(data1_, cache11_); \ + data2_ = _mm_xor_si128(data2_, cache11_); \ + data3_ = _mm_xor_si128(data3_, cache11_); \ +} + +#define ROUND_DEC_4X(round_) { \ + applyInversedLSTransformation_4(data1_, data2_,data3_,data4_); \ + cache11_ = _mm_loadu_si128((const __m128i *) &roundKeys_[2 * round_]); \ + data1_ = _mm_xor_si128(data1_, cache11_); \ + data2_ = _mm_xor_si128(data2_, cache11_); \ + data3_ = _mm_xor_si128(data3_, cache11_); \ + data4_ = _mm_xor_si128(data4_, cache11_); \ +} + +void kuznyechik_decrypt_block_simd(uint8* out, const uint8* in, kuznyechik_kds* kds) +{ + const uint_64t *roundKeys_ = kds->rkd; + __m128i data_; +#ifndef UNROLL_LOOPS + int round_; +#endif + block_t temporary_; + int byteIndex_; + __m128i cache1_, cache2_; + __m128i addresses1_, addresses2_; + + data_ = _mm_loadu_si128((const __m128i*) in); + applySTransformation(data_); + +#ifdef UNROLL_LOOPS + ROUND_DEC_1X (9); + ROUND_DEC_1X (8); + ROUND_DEC_1X (7); + ROUND_DEC_1X (6); + ROUND_DEC_1X (5); + ROUND_DEC_1X (4); + ROUND_DEC_1X (3); + ROUND_DEC_1X (2); + ROUND_DEC_1X (1); +#else + for (round_ = NumberOfRounds - 1; round_ > 0; --round_) + ROUND_DEC_1X(round_); +#endif + + applyInversedSTransformation(data_); + cache1_ = _mm_loadu_si128((const __m128i *) &roundKeys_[0]); + data_ = _mm_xor_si128(data_, cache1_); + _mm_storeu_si128((__m128i*) out, data_); +} + +void kuznyechik_decrypt_blocks_simd(uint8* out, const uint8* in, size_t blocks, kuznyechik_kds* kds) +{ + const uint_64t *roundKeys_ = kds->rkd; + __m128i data1_, data2_,data3_,data4_; +#ifndef UNROLL_LOOPS + int round_; +#endif + block_t temporary1_; + block_t temporary2_; + block_t temporary3_; + block_t temporary4_; + int byteIndex_; + __m128i cache11_, cache12_; + __m128i cache21_, cache22_; + __m128i cache31_, cache32_; + __m128i cache41_, cache42_; + __m128i addresses11_, addresses12_; + __m128i addresses21_, addresses22_; + __m128i addresses31_, addresses32_; + __m128i addresses41_, addresses42_; + + while (blocks >= 4) + { + data1_ = _mm_loadu_si128((const __m128i*) in); + data2_ = _mm_loadu_si128((const __m128i*) (in + 16)); + data3_ = _mm_loadu_si128((const __m128i*) (in + 32)); + data4_ = _mm_loadu_si128((const __m128i*) (in + 48)); + + applySTransformation_4(data1_, data2_,data3_,data4_); + +#ifdef UNROLL_LOOPS + ROUND_DEC_4X (9); + ROUND_DEC_4X (8); + ROUND_DEC_4X (7); + ROUND_DEC_4X (6); + ROUND_DEC_4X (5); + ROUND_DEC_4X (4); + ROUND_DEC_4X (3); + ROUND_DEC_4X (2); + ROUND_DEC_4X (1); +#else + for (round_ = NumberOfRounds - 1; round_ > 0; --round_) + ROUND_DEC_4X(round_); +#endif + + applyInversedSTransformation_4(data1_,data2_,data3_,data4_); + cache11_ = _mm_loadu_si128((const __m128i *) &roundKeys_[0]); + data1_ = _mm_xor_si128(data1_, cache11_); + data2_ = _mm_xor_si128(data2_, cache11_); + data3_ = _mm_xor_si128(data3_, cache11_); + data4_ = _mm_xor_si128(data4_, cache11_); + _mm_storeu_si128((__m128i*) out, data1_); + _mm_storeu_si128((__m128i*) (out + 16), data2_); + _mm_storeu_si128((__m128i*) (out + 32), data3_); + _mm_storeu_si128((__m128i*) (out + 48), data4_); + in += 64; + out += 64; + blocks -= 4; + } + + if (blocks == 3) + { + data1_ = _mm_loadu_si128((const __m128i*) in); + data2_ = _mm_loadu_si128((const __m128i*) (in + 16)); + data3_ = _mm_loadu_si128((const __m128i*) (in + 32)); + + applySTransformation_3(data1_, data2_,data3_); + +#ifdef UNROLL_LOOPS + ROUND_DEC_3X (9); + ROUND_DEC_3X (8); + ROUND_DEC_3X (7); + ROUND_DEC_3X (6); + ROUND_DEC_3X (5); + ROUND_DEC_3X (4); + ROUND_DEC_3X (3); + ROUND_DEC_3X (2); + ROUND_DEC_3X (1); +#else + for (round_ = NumberOfRounds - 1; round_ > 0; --round_) + ROUND_DEC_3X(round_); +#endif + + applyInversedSTransformation_3(data1_,data2_,data3_); + cache11_ = _mm_loadu_si128((const __m128i *) &roundKeys_[0]); + data1_ = _mm_xor_si128(data1_, cache11_); + data2_ = _mm_xor_si128(data2_, cache11_); + data3_ = _mm_xor_si128(data3_, cache11_); + _mm_storeu_si128((__m128i*) out, data1_); + _mm_storeu_si128((__m128i*) (out + 16), data2_); + _mm_storeu_si128((__m128i*) (out + 32), data3_); + } + else if (blocks == 2) + { + data1_ = _mm_loadu_si128((const __m128i*) in); + data2_ = _mm_loadu_si128((const __m128i*) (in + 16)); + + applySTransformation_2(data1_, data2_); + +#ifdef UNROLL_LOOPS + ROUND_DEC_2X (9); + ROUND_DEC_2X (8); + ROUND_DEC_2X (7); + ROUND_DEC_2X (6); + ROUND_DEC_2X (5); + ROUND_DEC_2X (4); + ROUND_DEC_2X (3); + ROUND_DEC_2X (2); + ROUND_DEC_2X (1); +#else + for (round_ = NumberOfRounds - 1; round_ > 0; --round_) + ROUND_DEC_2X(round_); +#endif + + applyInversedSTransformation_2(data1_,data2_); + cache11_ = _mm_loadu_si128((const __m128i *) &roundKeys_[0]); + data1_ = _mm_xor_si128(data1_, cache11_); + data2_ = _mm_xor_si128(data2_, cache11_); + _mm_storeu_si128((__m128i*) out, data1_); + _mm_storeu_si128((__m128i*) (out + 16), data2_); + } + else if (blocks) + kuznyechik_decrypt_block_simd (out, in, kds); +} + +void kuznyechik_set_key_simd(const uint8* key, kuznyechik_kds *kds) +{ + scheduleEncryptionRoundKeysForGost15 (kds->rke, key); + scheduleDecryptionRoundKeysForGost15 (kds->rkd, key); +} + +#endif diff --git a/src/Crypto/misc.h b/src/Crypto/misc.h index 47d0288a..25313d1d 100644 --- a/src/Crypto/misc.h +++ b/src/Crypto/misc.h @@ -150,9 +150,9 @@ VC_INLINE uint64 ByteReverseWord64(uint64 value) return rotl64(value, 32U); #endif } -VC_INLINE void CorrectEndianess(uint64 *out, const uint64 *in, size_t byteCount) +VC_INLINE void CorrectEndianness(uint64 *out, const uint64 *in, size_t byteCount) { size_t i, count = byteCount/sizeof(uint64); for (i=0; i<count; i++) diff --git a/src/Crypto/rdrand.c b/src/Crypto/rdrand.c new file mode 100644 index 00000000..52f7f98e --- /dev/null +++ b/src/Crypto/rdrand.c @@ -0,0 +1,32 @@ +// rdrand.cpp - written and placed in public domain by Jeffrey Walton and Uri Blumenthal. + +/* modified for VeraCrypt */ + +#include "chacha256.h" +#include "cpu.h" +#include "misc.h" + +void CRYPTOPP_FASTCALL MASM_RDRAND_GenerateBlock(uint8*, size_t); +void CRYPTOPP_FASTCALL MASM_RDSEED_GenerateBlock(uint8*, size_t); + +int RDRAND_getBytes(unsigned char* buf, size_t bufLen) +{ + if (!buf || !HasRDRAND()) + return 0; + + if (bufLen) + MASM_RDRAND_GenerateBlock(buf, bufLen); + + return 1; +} + +int RDSEED_getBytes(unsigned char* buf, size_t bufLen) +{ + if (!buf || !HasRDSEED()) + return 0; + + if (bufLen) + MASM_RDSEED_GenerateBlock(buf, bufLen); + + return 1; +} diff --git a/src/Crypto/rdrand.h b/src/Crypto/rdrand.h new file mode 100644 index 00000000..ff8cfd29 --- /dev/null +++ b/src/Crypto/rdrand.h @@ -0,0 +1,26 @@ +#ifndef HEADER_Crypto_RDRAND +#define HEADER_Crypto_RDRAND + +#include "Common/Tcdefs.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * generate bufLen random bytes using CPU RDRAND instruction + * return 1 in case of success and 0 in case of failure + */ +int RDRAND_getBytes(unsigned char* buf, size_t bufLen); + +/* + * generate bufLen random bytes using CPU RDSEED instruction + * return 1 in case of success and 0 in case of failure + */ +int RDSEED_getBytes(unsigned char* buf, size_t bufLen); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/Crypto/rdrand_ml.asm b/src/Crypto/rdrand_ml.asm new file mode 100644 index 00000000..4b85f7fc --- /dev/null +++ b/src/Crypto/rdrand_ml.asm @@ -0,0 +1,230 @@ +;; rdrand.asm - written and placed in public domain by Jeffrey Walton and Uri Blumenthal. +;; Copyright assigned to the Crypto++ project. + +;; This ASM file provides RDRAND to downlevel Microsoft tool chains. +;; Everything "just works" under Visual Studio. Other platforms will +;; have to run MASM/MASM-64 and then link to the object files. + +;; set ASFLAGS=/nologo /D_M_X86 /W3 /Cx /Zi /safeseh +;; set ASFLAGS64=/nologo /D_M_X64 /W3 /Cx /Zi +;; "C:\Program Files (x86)\Microsoft Visual Studio 11.0\VC\bin\ml.exe" %ASFLAGS% /Fo rdrand-x86.obj /c rdrand.asm +;; "C:\Program Files (x86)\Microsoft Visual Studio 11.0\VC\bin\amd64\ml64.exe" %ASFLAGS64% /Fo rdrand-x64.obj /c rdrand.asm + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +TITLE MASM_RDRAND_GenerateBlock source file +SUBTITLE Microsoft specific ASM code to utilize RDRAND for down level Microsoft toolchains + +PUBLIC MASM_RDRAND_GenerateBlock + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +;; C/C++ Function prototypes (both are fastcall) +;; X86: +;; extern "C" void __fastcall MASM_RDRAND_GenerateBlock(byte* ptr, size_t size); +;; X64: +;; extern "C" void __fastcall MASM_RDRAND_GenerateBlock(byte* ptr, size_t size); + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +IFDEF _M_X86 ;; Set via the command line + +.486 +.MODEL FLAT + +;; Fastcall calling conventions exports +ALIAS <@MASM_RDRAND_GenerateBlock@8> = <MASM_RDRAND_GenerateBlock> + +ENDIF + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +IFDEF _M_X86 ;; Set via the command line + +.CODE +ALIGN 8 +OPTION PROLOGUE:NONE +OPTION EPILOGUE:NONE + +;; No need for Load_Arguments due to fastcall +;; ECX (in): arg1, byte* buffer +;; EDX (in): arg2, size_t bsize + +MASM_RDRAND_GenerateBlock PROC ;; arg1:DWORD, arg2:DWORD + + MWSIZE EQU 04h ;; machine word size + buffer EQU ecx + bsize EQU edx + + ;; Top of While loop +RDRAND_GenerateBlock_Top: + + ;; Check remaining size + cmp bsize, 0 + je RDRAND_GenerateBlock_Return + +RDRAND_Call_EAX: + ;; RDRAND is not available prior to VS2012. Just emit + ;; the byte codes using DB. This is `rdrand eax`. + DB 0Fh, 0C7h, 0F0h + + ;; If CF=1, the number returned by RDRAND is valid. + ;; If CF=0, a random number was not available. + + ;; Retry immediately + jnc RDRAND_Call_EAX + +RDRAND_succeeded: + + cmp bsize, MWSIZE + jb RDRAND_Partial_Machine_Word + +RDRAND_Full_Machine_Word: + + mov DWORD PTR [buffer], eax + add buffer, MWSIZE ;; No need for Intel Core 2 slow workarounds, like + sub bsize, MWSIZE ;; `lea buffer,[buffer+MWSIZE]` for faster adds + + ;; Continue + jmp RDRAND_GenerateBlock_Top + + ;; 1,2,3 bytes remain +RDRAND_Partial_Machine_Word: + + ;; Test bit 1 to see if size is at least 2 + test bsize, 2 + jz RDRAND_Bit_1_Not_Set + + mov WORD PTR [buffer], ax + shr eax, 16 + add buffer, 2 + +RDRAND_Bit_1_Not_Set: + + ;; Test bit 0 to see if size is at least 1 + test bsize, 1 + jz RDRAND_Bit_0_Not_Set + + mov BYTE PTR [buffer], al + ;; shr ax, 8 + ;; add buffer, 1 + +RDRAND_Bit_0_Not_Set: + + ;; We've hit all the bits + +RDRAND_GenerateBlock_Return: + + ;; Clear artifacts + xor eax, eax + ret + +MASM_RDRAND_GenerateBlock ENDP + +ENDIF ;; _M_X86 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +IFDEF _M_X64 ;; Set via the command line + +.CODE +ALIGN 16 +OPTION PROLOGUE:NONE +OPTION EPILOGUE:NONE + +;; No need for Load_Arguments due to fastcall +;; RCX (in): arg1, byte* buffer +;; RDX (in): arg2, size_t bsize + +MASM_RDRAND_GenerateBlock PROC ;; arg1:QWORD, arg2:QWORD + + MWSIZE EQU 08h ;; machine word size + buffer EQU rcx + bsize EQU rdx + + ;; Top of While loop +RDRAND_GenerateBlock_Top: + + ;; Check remaining size + cmp bsize, 0 + je RDRAND_GenerateBlock_Return + +RDRAND_Call_RAX: + ;; RDRAND is not available prior to VS2012. Just emit + ;; the byte codes using DB. This is `rdrand rax`. + DB 048h, 0Fh, 0C7h, 0F0h + + ;; If CF=1, the number returned by RDRAND is valid. + ;; If CF=0, a random number was not available. + + ;; Retry immediately + jnc RDRAND_Call_RAX + +RDRAND_succeeded: + + cmp bsize, MWSIZE + jb RDRAND_Partial_Machine_Word + +RDRAND_Full_Machine_Word: + + mov QWORD PTR [buffer], rax + add buffer, MWSIZE + sub bsize, MWSIZE + + ;; Continue + jmp RDRAND_GenerateBlock_Top + + ;; 1,2,3,4,5,6,7 bytes remain +RDRAND_Partial_Machine_Word: + + ;; Test bit 2 to see if size is at least 4 + test bsize, 4 + jz RDRAND_Bit_2_Not_Set + + mov DWORD PTR [buffer], eax + shr rax, 32 + add buffer, 4 + +RDRAND_Bit_2_Not_Set: + + ;; Test bit 1 to see if size is at least 2 + test bsize, 2 + jz RDRAND_Bit_1_Not_Set + + mov WORD PTR [buffer], ax + shr eax, 16 + add buffer, 2 + +RDRAND_Bit_1_Not_Set: + + ;; Test bit 0 to see if size is at least 1 + test bsize, 1 + jz RDRAND_Bit_0_Not_Set + + mov BYTE PTR [buffer], al + ;; shr ax, 8 + ;; add buffer, 1 + +RDRAND_Bit_0_Not_Set: + + ;; We've hit all the bits + +RDRAND_GenerateBlock_Return: + + ;; Clear artifacts + xor rax, rax + ret + +MASM_RDRAND_GenerateBlock ENDP + +ENDIF ;; _M_X64 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +END diff --git a/src/Crypto/rdseed_ml.asm b/src/Crypto/rdseed_ml.asm new file mode 100644 index 00000000..60704230 --- /dev/null +++ b/src/Crypto/rdseed_ml.asm @@ -0,0 +1,230 @@ +;; rdrand.asm - written and placed in public domain by Jeffrey Walton and Uri Blumenthal. +;; Copyright assigned to the Crypto++ project. + +;; This ASM file provides RDSEED to downlevel Microsoft tool chains. +;; Everything "just works" under Visual Studio. Other platforms will +;; have to run MASM/MASM-64 and then link to the object files. + +;; set ASFLAGS=/nologo /D_M_X86 /W3 /Cx /Zi /safeseh +;; set ASFLAGS64=/nologo /D_M_X64 /W3 /Cx /Zi +;; "C:\Program Files (x86)\Microsoft Visual Studio 11.0\VC\bin\ml.exe" %ASFLAGS% /Fo rdrand-x86.obj /c rdrand.asm +;; "C:\Program Files (x86)\Microsoft Visual Studio 11.0\VC\bin\amd64\ml64.exe" %ASFLAGS64% /Fo rdrand-x64.obj /c rdrand.asm + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +TITLE MASM_RDSEED_GenerateBlock source file +SUBTITLE Microsoft specific ASM code to utilize RDSEED for down level Microsoft toolchains + +PUBLIC MASM_RDSEED_GenerateBlock + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +;; C/C++ Function prototypes (both are fastcall) +;; X86: +;; extern "C" void __fastcall MASM_RDSEED_GenerateBlock(byte* ptr, size_t size); +;; X64: +;; extern "C" void __fastcall MASM_RDSEED_GenerateBlock(byte* ptr, size_t size); + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +IFDEF _M_X86 ;; Set via the command line + +.486 +.MODEL FLAT + +;; Fastcall calling conventions exports +ALIAS <@MASM_RDSEED_GenerateBlock@8> = <MASM_RDSEED_GenerateBlock> + +ENDIF + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +IFDEF _M_X86 ;; Set via the command line + +.CODE +ALIGN 8 +OPTION PROLOGUE:NONE +OPTION EPILOGUE:NONE + +;; No need for Load_Arguments due to fastcall +;; ECX (in): arg1, byte* buffer +;; EDX (in): arg2, size_t bsize + +MASM_RDSEED_GenerateBlock PROC ;; arg1:DWORD, arg2:DWORD + + MWSIZE EQU 04h ;; machine word size + buffer EQU ecx + bsize EQU edx + + ;; Top of While loop +RDSEED_GenerateBlock_Top: + + ;; Check remaining size + cmp bsize, 0 + je RDSEED_GenerateBlock_Return + +RDSEED_Call_EAX: + ;; RDSEED is not available prior to VS2012. Just emit + ;; the byte codes using DB. This is `rdseed eax`. + DB 0Fh, 0C7h, 0F8h + + ;; If CF=1, the number returned by RDSEED is valid. + ;; If CF=0, a random number was not available. + + ;; Retry immediately + jnc RDSEED_Call_EAX + +RDSEED_succeeded: + + cmp bsize, MWSIZE + jb RDSEED_Partial_Machine_Word + +RDSEED_Full_Machine_Word: + + mov DWORD PTR [buffer], eax + add buffer, MWSIZE ;; No need for Intel Core 2 slow workarounds, like + sub bsize, MWSIZE ;; `lea buffer,[buffer+MWSIZE]` for faster adds + + ;; Continue + jmp RDSEED_GenerateBlock_Top + + ;; 1,2,3 bytes remain +RDSEED_Partial_Machine_Word: + + ;; Test bit 1 to see if size is at least 2 + test bsize, 2 + jz RDSEED_Bit_1_Not_Set + + mov WORD PTR [buffer], ax + shr eax, 16 + add buffer, 2 + +RDSEED_Bit_1_Not_Set: + + ;; Test bit 0 to see if size is at least 1 + test bsize, 1 + jz RDSEED_Bit_0_Not_Set + + mov BYTE PTR [buffer], al + ;; shr ax, 8 + ;; add buffer, 1 + +RDSEED_Bit_0_Not_Set: + + ;; We've hit all the bits + +RDSEED_GenerateBlock_Return: + + ;; Clear artifacts + xor eax, eax + ret + +MASM_RDSEED_GenerateBlock ENDP + +ENDIF ;; _M_X86 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +IFDEF _M_X64 ;; Set via the command line + +.CODE +ALIGN 16 +OPTION PROLOGUE:NONE +OPTION EPILOGUE:NONE + +;; No need for Load_Arguments due to fastcall +;; RCX (in): arg1, byte* buffer +;; RDX (in): arg2, size_t bsize + +MASM_RDSEED_GenerateBlock PROC ;; arg1:QWORD, arg2:QWORD + + MWSIZE EQU 08h ;; machine word size + buffer EQU rcx + bsize EQU rdx + + ;; Top of While loop +RDSEED_GenerateBlock_Top: + + ;; Check remaining size + cmp bsize, 0 + je RDSEED_GenerateBlock_Return + +RDSEED_Call_RAX: + ;; RDSEED is not available prior to VS2012. Just emit + ;; the byte codes using DB. This is `rdseed rax`. + DB 048h, 0Fh, 0C7h, 0F8h + + ;; If CF=1, the number returned by RDSEED is valid. + ;; If CF=0, a random number was not available. + + ;; Retry immediately + jnc RDSEED_Call_RAX + +RDSEED_succeeded: + + cmp bsize, MWSIZE + jb RDSEED_Partial_Machine_Word + +RDSEED_Full_Machine_Word: + + mov QWORD PTR [buffer], rax + add buffer, MWSIZE + sub bsize, MWSIZE + + ;; Continue + jmp RDSEED_GenerateBlock_Top + + ;; 1,2,3,4,5,6,7 bytes remain +RDSEED_Partial_Machine_Word: + + ;; Test bit 2 to see if size is at least 4 + test bsize, 4 + jz RDSEED_Bit_2_Not_Set + + mov DWORD PTR [buffer], eax + shr rax, 32 + add buffer, 4 + +RDSEED_Bit_2_Not_Set: + + ;; Test bit 1 to see if size is at least 2 + test bsize, 2 + jz RDSEED_Bit_1_Not_Set + + mov WORD PTR [buffer], ax + shr eax, 16 + add buffer, 2 + +RDSEED_Bit_1_Not_Set: + + ;; Test bit 0 to see if size is at least 1 + test bsize, 1 + jz RDSEED_Bit_0_Not_Set + + mov BYTE PTR [buffer], al + ;; shr ax, 8 + ;; add buffer, 1 + +RDSEED_Bit_0_Not_Set: + + ;; We've hit all the bits + +RDSEED_GenerateBlock_Return: + + ;; Clear artifacts + xor rax, rax + ret + +MASM_RDSEED_GenerateBlock ENDP + +ENDIF ;; _M_X64 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +END diff --git a/src/Crypto/sha256-x64-nayuki.S b/src/Crypto/sha256-x64-nayuki.S new file mode 100644 index 00000000..444b28cd --- /dev/null +++ b/src/Crypto/sha256-x64-nayuki.S @@ -0,0 +1,6 @@ + + .ifndef __YASM__ +#if defined(__linux__) && defined(__ELF__) +.section .note.GNU-stack,"",%progbits +#endif + .endif
\ No newline at end of file diff --git a/src/Crypto/sha256-x86-nayuki.S b/src/Crypto/sha256-x86-nayuki.S new file mode 100644 index 00000000..067496b3 --- /dev/null +++ b/src/Crypto/sha256-x86-nayuki.S @@ -0,0 +1,168 @@ +/* + * SHA-256 hash in x86 assembly + * + * Copyright (c) 2014 Project Nayuki. (MIT License) + * https://www.nayuki.io/page/fast-sha2-hashes-in-x86-assembly + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of + * this software and associated documentation files (the "Software"), to deal in + * the Software without restriction, including without limitation the rights to + * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of + * the Software, and to permit persons to whom the Software is furnished to do so, + * subject to the following conditions: + * - The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * - The Software is provided "as is", without warranty of any kind, express or + * implied, including but not limited to the warranties of merchantability, + * fitness for a particular purpose and noninfringement. In no event shall the + * authors or copyright holders be liable for any claim, damages or other + * liability, whether in an action of contract, tort or otherwise, arising from, + * out of or in connection with the Software or the use or other dealings in the + * Software. + */ + + +/* void sha256_compress_nayuki(uint32_t state[8], const uint8_t block[64]) */ + + .ifdef MS_STDCALL + .globl _sha256_compress_nayuki@8 + _sha256_compress_nayuki@8: + .else + .globl sha256_compress_nayuki + .globl _sha256_compress_nayuki + sha256_compress_nayuki: + _sha256_compress_nayuki: + .endif + + /* + * Storage usage: + * Bytes Location Description + * 4 eax Temporary for calculation per round + * 4 ebx Temporary for calculation per round + * 4 ecx Temporary for calculation per round + * 4 edx Temporary for calculation per round + * 4 ebp Temporary for calculation per round + * 4 esi (During state loading and update) base address of state array argument + * (During hash rounds) temporary for calculation per round + * 4 edi Base address of block array argument (during key schedule loading rounds only) + * 4 esp x86 stack pointer + * 32 [esp+ 0] SHA-256 state variables A,B,C,D,E,F,G,H (4 bytes each) + * 64 [esp+ 32] Key schedule of 16 * 4 bytes + * 4 [esp+ 96] Caller's value of ebx + * 4 [esp+100] Caller's value of esi + * 4 [esp+104] Caller's value of edi + * 4 [esp+108] Caller's value of ebp + */ + + subl $112, %esp + movl %ebx, 96(%esp) + movl %esi, 100(%esp) + movl %edi, 104(%esp) + movl %ebp, 108(%esp) + + + movl 116(%esp), %esi + movl 0(%esi), %eax; movl %eax, 0(%esp) + movl 4(%esi), %eax; movl %eax, 4(%esp) + movl 8(%esi), %eax; movl %eax, 8(%esp) + movl 12(%esi), %eax; movl %eax, 12(%esp) + movl 16(%esi), %eax; movl %eax, 16(%esp) + movl 20(%esi), %eax; movl %eax, 20(%esp) + movl 24(%esi), %eax; movl %eax, 24(%esp) + movl 28(%esi), %eax; movl %eax, 28(%esp) + + + movl 120(%esp), %edi + movl (0*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((0)&0xF)+8)*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (7*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (6*4)(%esp), %ebx; movl (5*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x428A2F98(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (3*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (2*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (1*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (7*4)(%esp); + movl (1*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((1)&0xF)+8)*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (6*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (5*4)(%esp), %ebx; movl (4*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x71374491(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (2*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (1*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (0*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (6*4)(%esp); + movl (2*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((2)&0xF)+8)*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (5*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (4*4)(%esp), %ebx; movl (3*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xB5C0FBCF(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (1*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (0*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (7*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (5*4)(%esp); + movl (3*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((3)&0xF)+8)*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (4*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (3*4)(%esp), %ebx; movl (2*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xE9B5DBA5(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (0*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (7*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (6*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (4*4)(%esp); + movl (4*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((4)&0xF)+8)*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (3*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (2*4)(%esp), %ebx; movl (1*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x3956C25B(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (7*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (6*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (5*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (3*4)(%esp); + movl (5*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((5)&0xF)+8)*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (2*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (1*4)(%esp), %ebx; movl (0*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x59F111F1(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (6*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (5*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (4*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (2*4)(%esp); + movl (6*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((6)&0xF)+8)*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (1*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (0*4)(%esp), %ebx; movl (7*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x923F82A4(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (5*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (4*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (3*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (1*4)(%esp); + movl (7*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((7)&0xF)+8)*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (0*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (7*4)(%esp), %ebx; movl (6*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xAB1C5ED5(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (4*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (3*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (2*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (0*4)(%esp); + movl (8*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((8)&0xF)+8)*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (7*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (6*4)(%esp), %ebx; movl (5*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xD807AA98(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (3*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (2*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (1*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (7*4)(%esp); + movl (9*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((9)&0xF)+8)*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (6*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (5*4)(%esp), %ebx; movl (4*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x12835B01(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (2*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (1*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (0*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (6*4)(%esp); + movl (10*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((10)&0xF)+8)*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (5*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (4*4)(%esp), %ebx; movl (3*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x243185BE(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (1*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (0*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (7*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (5*4)(%esp); + movl (11*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((11)&0xF)+8)*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (4*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (3*4)(%esp), %ebx; movl (2*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x550C7DC3(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (0*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (7*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (6*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (4*4)(%esp); + movl (12*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((12)&0xF)+8)*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (3*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (2*4)(%esp), %ebx; movl (1*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x72BE5D74(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (7*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (6*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (5*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (3*4)(%esp); + movl (13*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((13)&0xF)+8)*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (2*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (1*4)(%esp), %ebx; movl (0*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x80DEB1FE(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (6*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (5*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (4*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (2*4)(%esp); + movl (14*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((14)&0xF)+8)*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (1*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (0*4)(%esp), %ebx; movl (7*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x9BDC06A7(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (5*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (4*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (3*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (1*4)(%esp); + movl (15*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((15)&0xF)+8)*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (0*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (7*4)(%esp), %ebx; movl (6*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xC19BF174(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (4*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (3*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (2*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (0*4)(%esp); + movl ((((16 -15)&0xF)+8)*4)(%esp), %eax; movl ((((16 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((16 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((16 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((16)&0xF)+8)*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (7*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (6*4)(%esp), %ebx; movl (5*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xE49B69C1(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (3*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (2*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (1*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (7*4)(%esp); + movl ((((17 -15)&0xF)+8)*4)(%esp), %eax; movl ((((17 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((17 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((17 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((17)&0xF)+8)*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (6*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (5*4)(%esp), %ebx; movl (4*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xEFBE4786(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (2*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (1*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (0*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (6*4)(%esp); + movl ((((18 -15)&0xF)+8)*4)(%esp), %eax; movl ((((18 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((18 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((18 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((18)&0xF)+8)*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (5*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (4*4)(%esp), %ebx; movl (3*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x0FC19DC6(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (1*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (0*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (7*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (5*4)(%esp); + movl ((((19 -15)&0xF)+8)*4)(%esp), %eax; movl ((((19 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((19 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((19 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((19)&0xF)+8)*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (4*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (3*4)(%esp), %ebx; movl (2*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x240CA1CC(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (0*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (7*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (6*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (4*4)(%esp); + movl ((((20 -15)&0xF)+8)*4)(%esp), %eax; movl ((((20 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((20 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((20 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((20)&0xF)+8)*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (3*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (2*4)(%esp), %ebx; movl (1*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x2DE92C6F(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (7*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (6*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (5*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (3*4)(%esp); + movl ((((21 -15)&0xF)+8)*4)(%esp), %eax; movl ((((21 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((21 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((21 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((21)&0xF)+8)*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (2*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (1*4)(%esp), %ebx; movl (0*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x4A7484AA(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (6*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (5*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (4*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (2*4)(%esp); + movl ((((22 -15)&0xF)+8)*4)(%esp), %eax; movl ((((22 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((22 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((22 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((22)&0xF)+8)*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (1*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (0*4)(%esp), %ebx; movl (7*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x5CB0A9DC(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (5*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (4*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (3*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (1*4)(%esp); + movl ((((23 -15)&0xF)+8)*4)(%esp), %eax; movl ((((23 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((23 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((23 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((23)&0xF)+8)*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (0*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (7*4)(%esp), %ebx; movl (6*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x76F988DA(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (4*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (3*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (2*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (0*4)(%esp); + movl ((((24 -15)&0xF)+8)*4)(%esp), %eax; movl ((((24 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((24 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((24 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((24)&0xF)+8)*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (7*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (6*4)(%esp), %ebx; movl (5*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x983E5152(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (3*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (2*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (1*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (7*4)(%esp); + movl ((((25 -15)&0xF)+8)*4)(%esp), %eax; movl ((((25 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((25 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((25 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((25)&0xF)+8)*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (6*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (5*4)(%esp), %ebx; movl (4*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xA831C66D(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (2*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (1*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (0*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (6*4)(%esp); + movl ((((26 -15)&0xF)+8)*4)(%esp), %eax; movl ((((26 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((26 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((26 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((26)&0xF)+8)*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (5*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (4*4)(%esp), %ebx; movl (3*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xB00327C8(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (1*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (0*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (7*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (5*4)(%esp); + movl ((((27 -15)&0xF)+8)*4)(%esp), %eax; movl ((((27 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((27 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((27 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((27)&0xF)+8)*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (4*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (3*4)(%esp), %ebx; movl (2*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xBF597FC7(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (0*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (7*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (6*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (4*4)(%esp); + movl ((((28 -15)&0xF)+8)*4)(%esp), %eax; movl ((((28 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((28 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((28 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((28)&0xF)+8)*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (3*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (2*4)(%esp), %ebx; movl (1*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xC6E00BF3(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (7*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (6*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (5*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (3*4)(%esp); + movl ((((29 -15)&0xF)+8)*4)(%esp), %eax; movl ((((29 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((29 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((29 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((29)&0xF)+8)*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (2*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (1*4)(%esp), %ebx; movl (0*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xD5A79147(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (6*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (5*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (4*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (2*4)(%esp); + movl ((((30 -15)&0xF)+8)*4)(%esp), %eax; movl ((((30 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((30 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((30 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((30)&0xF)+8)*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (1*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (0*4)(%esp), %ebx; movl (7*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x06CA6351(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (5*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (4*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (3*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (1*4)(%esp); + movl ((((31 -15)&0xF)+8)*4)(%esp), %eax; movl ((((31 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((31 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((31 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((31)&0xF)+8)*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (0*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (7*4)(%esp), %ebx; movl (6*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x14292967(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (4*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (3*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (2*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (0*4)(%esp); + movl ((((32 -15)&0xF)+8)*4)(%esp), %eax; movl ((((32 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((32 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((32 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((32)&0xF)+8)*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (7*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (6*4)(%esp), %ebx; movl (5*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x27B70A85(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (3*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (2*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (1*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (7*4)(%esp); + movl ((((33 -15)&0xF)+8)*4)(%esp), %eax; movl ((((33 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((33 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((33 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((33)&0xF)+8)*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (6*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (5*4)(%esp), %ebx; movl (4*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x2E1B2138(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (2*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (1*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (0*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (6*4)(%esp); + movl ((((34 -15)&0xF)+8)*4)(%esp), %eax; movl ((((34 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((34 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((34 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((34)&0xF)+8)*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (5*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (4*4)(%esp), %ebx; movl (3*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x4D2C6DFC(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (1*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (0*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (7*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (5*4)(%esp); + movl ((((35 -15)&0xF)+8)*4)(%esp), %eax; movl ((((35 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((35 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((35 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((35)&0xF)+8)*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (4*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (3*4)(%esp), %ebx; movl (2*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x53380D13(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (0*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (7*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (6*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (4*4)(%esp); + movl ((((36 -15)&0xF)+8)*4)(%esp), %eax; movl ((((36 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((36 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((36 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((36)&0xF)+8)*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (3*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (2*4)(%esp), %ebx; movl (1*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x650A7354(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (7*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (6*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (5*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (3*4)(%esp); + movl ((((37 -15)&0xF)+8)*4)(%esp), %eax; movl ((((37 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((37 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((37 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((37)&0xF)+8)*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (2*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (1*4)(%esp), %ebx; movl (0*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x766A0ABB(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (6*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (5*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (4*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (2*4)(%esp); + movl ((((38 -15)&0xF)+8)*4)(%esp), %eax; movl ((((38 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((38 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((38 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((38)&0xF)+8)*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (1*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (0*4)(%esp), %ebx; movl (7*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x81C2C92E(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (5*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (4*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (3*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (1*4)(%esp); + movl ((((39 -15)&0xF)+8)*4)(%esp), %eax; movl ((((39 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((39 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((39 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((39)&0xF)+8)*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (0*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (7*4)(%esp), %ebx; movl (6*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x92722C85(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (4*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (3*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (2*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (0*4)(%esp); + movl ((((40 -15)&0xF)+8)*4)(%esp), %eax; movl ((((40 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((40 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((40 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((40)&0xF)+8)*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (7*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (6*4)(%esp), %ebx; movl (5*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xA2BFE8A1(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (3*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (2*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (1*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (7*4)(%esp); + movl ((((41 -15)&0xF)+8)*4)(%esp), %eax; movl ((((41 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((41 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((41 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((41)&0xF)+8)*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (6*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (5*4)(%esp), %ebx; movl (4*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xA81A664B(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (2*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (1*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (0*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (6*4)(%esp); + movl ((((42 -15)&0xF)+8)*4)(%esp), %eax; movl ((((42 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((42 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((42 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((42)&0xF)+8)*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (5*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (4*4)(%esp), %ebx; movl (3*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xC24B8B70(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (1*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (0*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (7*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (5*4)(%esp); + movl ((((43 -15)&0xF)+8)*4)(%esp), %eax; movl ((((43 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((43 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((43 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((43)&0xF)+8)*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (4*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (3*4)(%esp), %ebx; movl (2*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xC76C51A3(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (0*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (7*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (6*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (4*4)(%esp); + movl ((((44 -15)&0xF)+8)*4)(%esp), %eax; movl ((((44 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((44 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((44 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((44)&0xF)+8)*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (3*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (2*4)(%esp), %ebx; movl (1*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xD192E819(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (7*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (6*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (5*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (3*4)(%esp); + movl ((((45 -15)&0xF)+8)*4)(%esp), %eax; movl ((((45 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((45 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((45 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((45)&0xF)+8)*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (2*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (1*4)(%esp), %ebx; movl (0*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xD6990624(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (6*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (5*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (4*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (2*4)(%esp); + movl ((((46 -15)&0xF)+8)*4)(%esp), %eax; movl ((((46 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((46 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((46 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((46)&0xF)+8)*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (1*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (0*4)(%esp), %ebx; movl (7*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xF40E3585(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (5*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (4*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (3*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (1*4)(%esp); + movl ((((47 -15)&0xF)+8)*4)(%esp), %eax; movl ((((47 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((47 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((47 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((47)&0xF)+8)*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (0*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (7*4)(%esp), %ebx; movl (6*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x106AA070(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (4*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (3*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (2*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (0*4)(%esp); + movl ((((48 -15)&0xF)+8)*4)(%esp), %eax; movl ((((48 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((48 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((48 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((48)&0xF)+8)*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (7*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (6*4)(%esp), %ebx; movl (5*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x19A4C116(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (3*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (2*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (1*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (7*4)(%esp); + movl ((((49 -15)&0xF)+8)*4)(%esp), %eax; movl ((((49 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((49 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((49 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((49)&0xF)+8)*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (6*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (5*4)(%esp), %ebx; movl (4*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x1E376C08(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (2*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (1*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (0*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (6*4)(%esp); + movl ((((50 -15)&0xF)+8)*4)(%esp), %eax; movl ((((50 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((50 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((50 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((50)&0xF)+8)*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (5*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (4*4)(%esp), %ebx; movl (3*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x2748774C(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (1*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (0*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (7*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (5*4)(%esp); + movl ((((51 -15)&0xF)+8)*4)(%esp), %eax; movl ((((51 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((51 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((51 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((51)&0xF)+8)*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (4*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (3*4)(%esp), %ebx; movl (2*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x34B0BCB5(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (0*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (7*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (6*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (4*4)(%esp); + movl ((((52 -15)&0xF)+8)*4)(%esp), %eax; movl ((((52 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((52 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((52 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((52)&0xF)+8)*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (3*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (2*4)(%esp), %ebx; movl (1*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x391C0CB3(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (7*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (6*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (5*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (3*4)(%esp); + movl ((((53 -15)&0xF)+8)*4)(%esp), %eax; movl ((((53 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((53 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((53 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((53)&0xF)+8)*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (2*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (1*4)(%esp), %ebx; movl (0*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x4ED8AA4A(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (6*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (5*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (4*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (2*4)(%esp); + movl ((((54 -15)&0xF)+8)*4)(%esp), %eax; movl ((((54 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((54 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((54 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((54)&0xF)+8)*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (1*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (0*4)(%esp), %ebx; movl (7*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x5B9CCA4F(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (5*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (4*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (3*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (1*4)(%esp); + movl ((((55 -15)&0xF)+8)*4)(%esp), %eax; movl ((((55 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((55 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((55 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((55)&0xF)+8)*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (0*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (7*4)(%esp), %ebx; movl (6*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x682E6FF3(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (4*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (3*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (2*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (0*4)(%esp); + movl ((((56 -15)&0xF)+8)*4)(%esp), %eax; movl ((((56 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((56 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((56 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((56)&0xF)+8)*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (7*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (6*4)(%esp), %ebx; movl (5*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x748F82EE(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (3*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (2*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (1*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (7*4)(%esp); + movl ((((57 -15)&0xF)+8)*4)(%esp), %eax; movl ((((57 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((57 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((57 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((57)&0xF)+8)*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (6*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (5*4)(%esp), %ebx; movl (4*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x78A5636F(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (2*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (1*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (0*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (6*4)(%esp); + movl ((((58 -15)&0xF)+8)*4)(%esp), %eax; movl ((((58 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((58 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((58 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((58)&0xF)+8)*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (5*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (4*4)(%esp), %ebx; movl (3*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x84C87814(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (1*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (0*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (7*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (5*4)(%esp); + movl ((((59 -15)&0xF)+8)*4)(%esp), %eax; movl ((((59 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((59 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((59 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((59)&0xF)+8)*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (4*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (3*4)(%esp), %ebx; movl (2*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x8CC70208(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (0*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (7*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (6*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (4*4)(%esp); + movl ((((60 -15)&0xF)+8)*4)(%esp), %eax; movl ((((60 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((60 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((60 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((60)&0xF)+8)*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (3*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (2*4)(%esp), %ebx; movl (1*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x90BEFFFA(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (7*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (6*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (5*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (3*4)(%esp); + movl ((((61 -15)&0xF)+8)*4)(%esp), %eax; movl ((((61 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((61 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((61 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((61)&0xF)+8)*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (2*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (1*4)(%esp), %ebx; movl (0*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xA4506CEB(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (6*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (5*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (4*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (2*4)(%esp); + movl ((((62 -15)&0xF)+8)*4)(%esp), %eax; movl ((((62 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((62 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((62 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((62)&0xF)+8)*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (1*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (0*4)(%esp), %ebx; movl (7*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xBEF9A3F7(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (5*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (4*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (3*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (1*4)(%esp); + movl ((((63 -15)&0xF)+8)*4)(%esp), %eax; movl ((((63 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((63 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((63 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((63)&0xF)+8)*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (0*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (7*4)(%esp), %ebx; movl (6*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xC67178F2(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (4*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (3*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (2*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (0*4)(%esp); + + + movl 116(%esp), %esi + movl 0(%esp), %eax; addl %eax, 0(%esi) + movl 4(%esp), %eax; addl %eax, 4(%esi) + movl 8(%esp), %eax; addl %eax, 8(%esi) + movl 12(%esp), %eax; addl %eax, 12(%esi) + movl 16(%esp), %eax; addl %eax, 16(%esi) + movl 20(%esp), %eax; addl %eax, 20(%esi) + movl 24(%esp), %eax; addl %eax, 24(%esi) + movl 28(%esp), %eax; addl %eax, 28(%esi) + + + movl 96(%esp), %ebx + movl 100(%esp), %esi + movl 104(%esp), %edi + movl 108(%esp), %ebp + addl $112, %esp + .ifdef MS_STDCALL + ret $8 + .else + retl + .endif + + .ifndef __YASM__ +#if defined(__linux__) && defined(__ELF__) +.section .note.GNU-stack,"",%progbits +#endif + .endif
\ No newline at end of file diff --git a/src/Crypto/sha256_avx1_x64.asm b/src/Crypto/sha256_avx1_x64.asm new file mode 100644 index 00000000..5c4ce559 --- /dev/null +++ b/src/Crypto/sha256_avx1_x64.asm @@ -0,0 +1,596 @@ +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; Copyright (c) 2012, Intel Corporation +; +; All rights reserved. +; +; Redistribution and use in source and binary forms, with or without +; modification, are permitted provided that the following conditions are +; met: +; +; * Redistributions of source code must retain the above copyright +; notice, this list of conditions and the following disclaimer. +; +; * Redistributions in binary form must reproduce the above copyright +; notice, this list of conditions and the following disclaimer in the +; documentation and/or other materials provided with the +; distribution. +; +; * Neither the name of the Intel Corporation nor the names of its +; contributors may be used to endorse or promote products derived from +; this software without specific prior written permission. +; +; +; THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS" AND ANY +; EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +; PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION OR +; CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +; EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +; PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +; LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; +; Example YASM command lines: +; Windows: yasm -Xvc -f x64 -rnasm -pnasm -o sha256_avx1.obj -g cv8 sha256_avx1.asm +; Linux: yasm -f x64 -f elf64 -X gnu -g dwarf2 -D LINUX -o sha256_avx1.o sha256_avx1.asm +; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; +; This code is described in an Intel White-Paper: +; "Fast SHA-256 Implementations on Intel Architecture Processors" +; +; To find it, surf to http://www.intel.com/p/en_US/embedded +; and search for that title. +; The paper is expected to be released roughly at the end of April, 2012 +; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; This code schedules 1 blocks at a time, with 4 lanes per block +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +%define VMOVDQ vmovdqu ;; assume buffers not aligned + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; Define Macros + +; addm [mem], reg +; Add reg to mem using reg-mem add and store +%macro addm 2 + add %2, %1 + mov %1, %2 +%endm + +%macro MY_ROR 2 + shld %1,%1,(32-(%2)) +%endm + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +; COPY_XMM_AND_BSWAP xmm, [mem], byte_flip_mask +; Load xmm with mem and byte swap each dword +%macro COPY_XMM_AND_BSWAP 3 + VMOVDQ %1, %2 + vpshufb %1, %1, %3 +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +%define X0 xmm4 +%define X1 xmm5 +%define X2 xmm6 +%define X3 xmm7 + +%define XTMP0 xmm0 +%define XTMP1 xmm1 +%define XTMP2 xmm2 +%define XTMP3 xmm3 +%define XTMP4 xmm8 +%define XFER xmm9 +%define XTMP5 xmm11 + +%define SHUF_00BA xmm10 ; shuffle xBxA -> 00BA +%define SHUF_DC00 xmm12 ; shuffle xDxC -> DC00 +%define BYTE_FLIP_MASK xmm13 + +%ifndef WINABI +%define NUM_BLKS rdx ; 3rd arg +%define CTX rsi ; 2nd arg +%define INP rdi ; 1st arg + +%define SRND rdi ; clobbers INP +%define c ecx +%define d r8d +%define e edx +%else +%define NUM_BLKS r8 ; 3rd arg +%define CTX rdx ; 2nd arg +%define INP rcx ; 1st arg + +%define SRND rcx ; clobbers INP +%define c edi +%define d esi +%define e r8d + +%endif +%define TBL rbp +%define a eax +%define b ebx + +%define f r9d +%define g r10d +%define h r11d + +%define y0 r13d +%define y1 r14d +%define y2 r15d + + +_INP_END_SIZE equ 8 +_INP_SIZE equ 8 +_XFER_SIZE equ 8 +%ifndef WINABI +_XMM_SAVE_SIZE equ 0 +%else +_XMM_SAVE_SIZE equ 8*16 +%endif +; STACK_SIZE plus pushes must be an odd multiple of 8 +_ALIGN_SIZE equ 8 + +_INP_END equ 0 +_INP equ _INP_END + _INP_END_SIZE +_XFER equ _INP + _INP_SIZE +_XMM_SAVE equ _XFER + _XFER_SIZE + _ALIGN_SIZE +STACK_SIZE equ _XMM_SAVE + _XMM_SAVE_SIZE + +; rotate_Xs +; Rotate values of symbols X0...X3 +%macro rotate_Xs 0 +%xdefine X_ X0 +%xdefine X0 X1 +%xdefine X1 X2 +%xdefine X2 X3 +%xdefine X3 X_ +%endm + +; ROTATE_ARGS +; Rotate values of symbols a...h +%macro ROTATE_ARGS 0 +%xdefine TMP_ h +%xdefine h g +%xdefine g f +%xdefine f e +%xdefine e d +%xdefine d c +%xdefine c b +%xdefine b a +%xdefine a TMP_ +%endm + +%macro FOUR_ROUNDS_AND_SCHED 0 + ;; compute s0 four at a time and s1 two at a time + ;; compute W[-16] + W[-7] 4 at a time + ;vmovdqa XTMP0, X3 + mov y0, e ; y0 = e + MY_ROR y0, (25-11) ; y0 = e >> (25-11) + mov y1, a ; y1 = a + vpalignr XTMP0, X3, X2, 4 ; XTMP0 = W[-7] + MY_ROR y1, (22-13) ; y1 = a >> (22-13) + xor y0, e ; y0 = e ^ (e >> (25-11)) + mov y2, f ; y2 = f + MY_ROR y0, (11-6) ; y0 = (e >> (11-6)) ^ (e >> (25-6)) + ;vmovdqa XTMP1, X1 + xor y1, a ; y1 = a ^ (a >> (22-13) + xor y2, g ; y2 = f^g + vpaddd XTMP0, XTMP0, X0 ; XTMP0 = W[-7] + W[-16] + xor y0, e ; y0 = e ^ (e >> (11-6)) ^ (e >> (25-6)) + and y2, e ; y2 = (f^g)&e + MY_ROR y1, (13-2) ; y1 = (a >> (13-2)) ^ (a >> (22-2)) + ;; compute s0 + vpalignr XTMP1, X1, X0, 4 ; XTMP1 = W[-15] + xor y1, a ; y1 = a ^ (a >> (13-2)) ^ (a >> (22-2)) + MY_ROR y0, 6 ; y0 = S1 = (e>>6) & (e>>11) ^ (e>>25) + xor y2, g ; y2 = CH = ((f^g)&e)^g + + + MY_ROR y1, 2 ; y1 = S0 = (a>>2) ^ (a>>13) ^ (a>>22) + add y2, y0 ; y2 = S1 + CH + add y2, [rsp + _XFER + 0*4] ; y2 = k + w + S1 + CH + + mov y0, a ; y0 = a + add h, y2 ; h = h + S1 + CH + k + w + mov y2, a ; y2 = a + + vpsrld XTMP2, XTMP1, 7 + + or y0, c ; y0 = a|c + add d, h ; d = d + h + S1 + CH + k + w + and y2, c ; y2 = a&c + + vpslld XTMP3, XTMP1, (32-7) + + and y0, b ; y0 = (a|c)&b + add h, y1 ; h = h + S1 + CH + k + w + S0 + + vpor XTMP3, XTMP3, XTMP2 ; XTMP1 = W[-15] MY_ROR 7 + + or y0, y2 ; y0 = MAJ = (a|c)&b)|(a&c) + add h, y0 ; h = h + S1 + CH + k + w + S0 + MAJ + +ROTATE_ARGS + + mov y0, e ; y0 = e + mov y1, a ; y1 = a + + + MY_ROR y0, (25-11) ; y0 = e >> (25-11) + xor y0, e ; y0 = e ^ (e >> (25-11)) + mov y2, f ; y2 = f + MY_ROR y1, (22-13) ; y1 = a >> (22-13) + + vpsrld XTMP2, XTMP1,18 + + xor y1, a ; y1 = a ^ (a >> (22-13) + MY_ROR y0, (11-6) ; y0 = (e >> (11-6)) ^ (e >> (25-6)) + xor y2, g ; y2 = f^g + + vpsrld XTMP4, XTMP1, 3 ; XTMP4 = W[-15] >> 3 + + MY_ROR y1, (13-2) ; y1 = (a >> (13-2)) ^ (a >> (22-2)) + xor y0, e ; y0 = e ^ (e >> (11-6)) ^ (e >> (25-6)) + and y2, e ; y2 = (f^g)&e + MY_ROR y0, 6 ; y0 = S1 = (e>>6) & (e>>11) ^ (e>>25) + + vpslld XTMP1, XTMP1, (32-18) + + xor y1, a ; y1 = a ^ (a >> (13-2)) ^ (a >> (22-2)) + xor y2, g ; y2 = CH = ((f^g)&e)^g + + vpxor XTMP3, XTMP3, XTMP1 + + add y2, y0 ; y2 = S1 + CH + add y2, [rsp + _XFER + 1*4] ; y2 = k + w + S1 + CH + MY_ROR y1, 2 ; y1 = S0 = (a>>2) ^ (a>>13) ^ (a>>22) + + vpxor XTMP3, XTMP3, XTMP2 ; XTMP1 = W[-15] MY_ROR 7 ^ W[-15] MY_ROR 18 + + mov y0, a ; y0 = a + add h, y2 ; h = h + S1 + CH + k + w + mov y2, a ; y2 = a + + vpxor XTMP1, XTMP3, XTMP4 ; XTMP1 = s0 + + or y0, c ; y0 = a|c + add d, h ; d = d + h + S1 + CH + k + w + and y2, c ; y2 = a&c + ;; compute low s1 + vpshufd XTMP2, X3, 11111010b ; XTMP2 = W[-2] {BBAA} + and y0, b ; y0 = (a|c)&b + add h, y1 ; h = h + S1 + CH + k + w + S0 + vpaddd XTMP0, XTMP0, XTMP1 ; XTMP0 = W[-16] + W[-7] + s0 + or y0, y2 ; y0 = MAJ = (a|c)&b)|(a&c) + add h, y0 ; h = h + S1 + CH + k + w + S0 + MAJ + +ROTATE_ARGS + ;vmovdqa XTMP3, XTMP2 ; XTMP3 = W[-2] {BBAA} + + mov y0, e ; y0 = e + mov y1, a ; y1 = a + MY_ROR y0, (25-11) ; y0 = e >> (25-11) + + ;vmovdqa XTMP4, XTMP2 ; XTMP4 = W[-2] {BBAA} + + xor y0, e ; y0 = e ^ (e >> (25-11)) + MY_ROR y1, (22-13) ; y1 = a >> (22-13) + mov y2, f ; y2 = f + xor y1, a ; y1 = a ^ (a >> (22-13) + MY_ROR y0, (11-6) ; y0 = (e >> (11-6)) ^ (e >> (25-6)) + + vpsrld XTMP4, XTMP2, 10 ; XTMP4 = W[-2] >> 10 {BBAA} + + xor y2, g ; y2 = f^g + + vpsrlq XTMP3, XTMP2, 19 ; XTMP3 = W[-2] MY_ROR 19 {xBxA} + + xor y0, e ; y0 = e ^ (e >> (11-6)) ^ (e >> (25-6)) + and y2, e ; y2 = (f^g)&e + + vpsrlq XTMP2, XTMP2, 17 ; XTMP2 = W[-2] MY_ROR 17 {xBxA} + + MY_ROR y1, (13-2) ; y1 = (a >> (13-2)) ^ (a >> (22-2)) + xor y1, a ; y1 = a ^ (a >> (13-2)) ^ (a >> (22-2)) + xor y2, g ; y2 = CH = ((f^g)&e)^g + MY_ROR y0, 6 ; y0 = S1 = (e>>6) & (e>>11) ^ (e>>25) + vpxor XTMP2, XTMP2, XTMP3 + add y2, y0 ; y2 = S1 + CH + MY_ROR y1, 2 ; y1 = S0 = (a>>2) ^ (a>>13) ^ (a>>22) + add y2, [rsp + _XFER + 2*4] ; y2 = k + w + S1 + CH + vpxor XTMP4, XTMP4, XTMP2 ; XTMP4 = s1 {xBxA} + mov y0, a ; y0 = a + add h, y2 ; h = h + S1 + CH + k + w + mov y2, a ; y2 = a + vpshufb XTMP4, XTMP4, SHUF_00BA ; XTMP4 = s1 {00BA} + or y0, c ; y0 = a|c + add d, h ; d = d + h + S1 + CH + k + w + and y2, c ; y2 = a&c + vpaddd XTMP0, XTMP0, XTMP4 ; XTMP0 = {..., ..., W[1], W[0]} + and y0, b ; y0 = (a|c)&b + add h, y1 ; h = h + S1 + CH + k + w + S0 + ;; compute high s1 + vpshufd XTMP2, XTMP0, 01010000b ; XTMP2 = W[-2] {DDCC} + or y0, y2 ; y0 = MAJ = (a|c)&b)|(a&c) + add h, y0 ; h = h + S1 + CH + k + w + S0 + MAJ + +ROTATE_ARGS + ;vmovdqa XTMP3, XTMP2 ; XTMP3 = W[-2] {DDCC} + mov y0, e ; y0 = e + MY_ROR y0, (25-11) ; y0 = e >> (25-11) + mov y1, a ; y1 = a + ;vmovdqa XTMP5, XTMP2 ; XTMP5 = W[-2] {DDCC} + MY_ROR y1, (22-13) ; y1 = a >> (22-13) + xor y0, e ; y0 = e ^ (e >> (25-11)) + mov y2, f ; y2 = f + MY_ROR y0, (11-6) ; y0 = (e >> (11-6)) ^ (e >> (25-6)) + + vpsrld XTMP5, XTMP2, 10 ; XTMP5 = W[-2] >> 10 {DDCC} + + xor y1, a ; y1 = a ^ (a >> (22-13) + xor y2, g ; y2 = f^g + + vpsrlq XTMP3, XTMP2, 19 ; XTMP3 = W[-2] MY_ROR 19 {xDxC} + + xor y0, e ; y0 = e ^ (e >> (11-6)) ^ (e >> (25-6)) + and y2, e ; y2 = (f^g)&e + MY_ROR y1, (13-2) ; y1 = (a >> (13-2)) ^ (a >> (22-2)) + + vpsrlq XTMP2, XTMP2, 17 ; XTMP2 = W[-2] MY_ROR 17 {xDxC} + + xor y1, a ; y1 = a ^ (a >> (13-2)) ^ (a >> (22-2)) + MY_ROR y0, 6 ; y0 = S1 = (e>>6) & (e>>11) ^ (e>>25) + xor y2, g ; y2 = CH = ((f^g)&e)^g + + vpxor XTMP2, XTMP2, XTMP3 + + MY_ROR y1, 2 ; y1 = S0 = (a>>2) ^ (a>>13) ^ (a>>22) + add y2, y0 ; y2 = S1 + CH + add y2, [rsp + _XFER + 3*4] ; y2 = k + w + S1 + CH + vpxor XTMP5, XTMP5, XTMP2 ; XTMP5 = s1 {xDxC} + mov y0, a ; y0 = a + add h, y2 ; h = h + S1 + CH + k + w + mov y2, a ; y2 = a + vpshufb XTMP5, XTMP5, SHUF_DC00 ; XTMP5 = s1 {DC00} + or y0, c ; y0 = a|c + add d, h ; d = d + h + S1 + CH + k + w + and y2, c ; y2 = a&c + vpaddd X0, XTMP5, XTMP0 ; X0 = {W[3], W[2], W[1], W[0]} + and y0, b ; y0 = (a|c)&b + add h, y1 ; h = h + S1 + CH + k + w + S0 + or y0, y2 ; y0 = MAJ = (a|c)&b)|(a&c) + add h, y0 ; h = h + S1 + CH + k + w + S0 + MAJ + +ROTATE_ARGS +rotate_Xs +%endm + +;; input is [rsp + _XFER + %1 * 4] +%macro DO_ROUND 1 + mov y0, e ; y0 = e + MY_ROR y0, (25-11) ; y0 = e >> (25-11) + mov y1, a ; y1 = a + xor y0, e ; y0 = e ^ (e >> (25-11)) + MY_ROR y1, (22-13) ; y1 = a >> (22-13) + mov y2, f ; y2 = f + xor y1, a ; y1 = a ^ (a >> (22-13) + MY_ROR y0, (11-6) ; y0 = (e >> (11-6)) ^ (e >> (25-6)) + xor y2, g ; y2 = f^g + xor y0, e ; y0 = e ^ (e >> (11-6)) ^ (e >> (25-6)) + MY_ROR y1, (13-2) ; y1 = (a >> (13-2)) ^ (a >> (22-2)) + and y2, e ; y2 = (f^g)&e + xor y1, a ; y1 = a ^ (a >> (13-2)) ^ (a >> (22-2)) + MY_ROR y0, 6 ; y0 = S1 = (e>>6) & (e>>11) ^ (e>>25) + xor y2, g ; y2 = CH = ((f^g)&e)^g + add y2, y0 ; y2 = S1 + CH + MY_ROR y1, 2 ; y1 = S0 = (a>>2) ^ (a>>13) ^ (a>>22) + add y2, [rsp + _XFER + %1 * 4] ; y2 = k + w + S1 + CH + mov y0, a ; y0 = a + add h, y2 ; h = h + S1 + CH + k + w + mov y2, a ; y2 = a + or y0, c ; y0 = a|c + add d, h ; d = d + h + S1 + CH + k + w + and y2, c ; y2 = a&c + and y0, b ; y0 = (a|c)&b + add h, y1 ; h = h + S1 + CH + k + w + S0 + or y0, y2 ; y0 = MAJ = (a|c)&b)|(a&c) + add h, y0 ; h = h + S1 + CH + k + w + S0 + MAJ + ROTATE_ARGS +%endm + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; void sha256_avx(void *input_data, UINT32 digest[8], UINT64 num_blks) +;; arg 1 : pointer to input data +;; arg 2 : pointer to digest +;; arg 3 : Num blocks +section .text +global sha256_avx +align 32 +sha256_avx: + push rbx +%ifdef WINABI + push rsi + push rdi +%endif + push rbp + push r13 + push r14 + push r15 + + sub rsp,STACK_SIZE +%ifdef WINABI + vmovdqa [rsp + _XMM_SAVE + 0*16],xmm6 + vmovdqa [rsp + _XMM_SAVE + 1*16],xmm7 + vmovdqa [rsp + _XMM_SAVE + 2*16],xmm8 + vmovdqa [rsp + _XMM_SAVE + 3*16],xmm9 + vmovdqa [rsp + _XMM_SAVE + 4*16],xmm10 + vmovdqa [rsp + _XMM_SAVE + 5*16],xmm11 + vmovdqa [rsp + _XMM_SAVE + 6*16],xmm12 + vmovdqa [rsp + _XMM_SAVE + 7*16],xmm13 +%endif + + shl NUM_BLKS, 6 ; convert to bytes + jz done_hash + add NUM_BLKS, INP ; pointer to end of data + mov [rsp + _INP_END], NUM_BLKS + + ;; load initial digest + mov a,[4*0 + CTX] + mov b,[4*1 + CTX] + mov c,[4*2 + CTX] + mov d,[4*3 + CTX] + mov e,[4*4 + CTX] + mov f,[4*5 + CTX] + mov g,[4*6 + CTX] + mov h,[4*7 + CTX] + + vmovdqa BYTE_FLIP_MASK, [PSHUFFLE_BYTE_FLIP_MASK wrt rip] + vmovdqa SHUF_00BA, [_SHUF_00BA wrt rip] + vmovdqa SHUF_DC00, [_SHUF_DC00 wrt rip] + +loop0: + lea TBL,[K256 wrt rip] + + ;; byte swap first 16 dwords + COPY_XMM_AND_BSWAP X0, [INP + 0*16], BYTE_FLIP_MASK + COPY_XMM_AND_BSWAP X1, [INP + 1*16], BYTE_FLIP_MASK + COPY_XMM_AND_BSWAP X2, [INP + 2*16], BYTE_FLIP_MASK + COPY_XMM_AND_BSWAP X3, [INP + 3*16], BYTE_FLIP_MASK + + mov [rsp + _INP], INP + + ;; schedule 48 input dwords, by doing 3 rounds of 16 each + mov SRND, 3 +align 16 +loop1: + vpaddd XFER, X0, [TBL + 0*16] + vmovdqa [rsp + _XFER], XFER + FOUR_ROUNDS_AND_SCHED + + vpaddd XFER, X0, [TBL + 1*16] + vmovdqa [rsp + _XFER], XFER + FOUR_ROUNDS_AND_SCHED + + vpaddd XFER, X0, [TBL + 2*16] + vmovdqa [rsp + _XFER], XFER + FOUR_ROUNDS_AND_SCHED + + vpaddd XFER, X0, [TBL + 3*16] + vmovdqa [rsp + _XFER], XFER + add TBL, 4*16 + FOUR_ROUNDS_AND_SCHED + + sub SRND, 1 + jne loop1 + + mov SRND, 2 +loop2: + vpaddd XFER, X0, [TBL + 0*16] + vmovdqa [rsp + _XFER], XFER + DO_ROUND 0 + DO_ROUND 1 + DO_ROUND 2 + DO_ROUND 3 + + vpaddd XFER, X1, [TBL + 1*16] + vmovdqa [rsp + _XFER], XFER + add TBL, 2*16 + DO_ROUND 0 + DO_ROUND 1 + DO_ROUND 2 + DO_ROUND 3 + + vmovdqa X0, X2 + vmovdqa X1, X3 + + sub SRND, 1 + jne loop2 + + + addm [4*0 + CTX],a + addm [4*1 + CTX],b + addm [4*2 + CTX],c + addm [4*3 + CTX],d + addm [4*4 + CTX],e + addm [4*5 + CTX],f + addm [4*6 + CTX],g + addm [4*7 + CTX],h + + mov INP, [rsp + _INP] + add INP, 64 + cmp INP, [rsp + _INP_END] + jne loop0 + +done_hash: +%ifdef WINABI + vmovdqa xmm6,[rsp + _XMM_SAVE + 0*16] + vmovdqa xmm7,[rsp + _XMM_SAVE + 1*16] + vmovdqa xmm8,[rsp + _XMM_SAVE + 2*16] + vmovdqa xmm9,[rsp + _XMM_SAVE + 3*16] + vmovdqa xmm10,[rsp + _XMM_SAVE + 4*16] + vmovdqa xmm11,[rsp + _XMM_SAVE + 5*16] + vmovdqa xmm12,[rsp + _XMM_SAVE + 6*16] + vmovdqa xmm13,[rsp + _XMM_SAVE + 7*16] +%endif + + + add rsp, STACK_SIZE + + pop r15 + pop r14 + pop r13 + pop rbp +%ifdef WINABI + pop rdi + pop rsi +%endif + pop rbx + + ret + + +section .data +align 64 +K256: + dd 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 + dd 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 + dd 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 + dd 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 + dd 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc + dd 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da + dd 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 + dd 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 + dd 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 + dd 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 + dd 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 + dd 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 + dd 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 + dd 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 + dd 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 + dd 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 + +PSHUFFLE_BYTE_FLIP_MASK: ddq 0x0c0d0e0f08090a0b0405060700010203 + +; shuffle xBxA -> 00BA +_SHUF_00BA: ddq 0xFFFFFFFFFFFFFFFF0b0a090803020100 + +; shuffle xDxC -> DC00 +_SHUF_DC00: ddq 0x0b0a090803020100FFFFFFFFFFFFFFFF + +%ifidn __OUTPUT_FORMAT__,elf +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf32 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf64 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif diff --git a/src/Crypto/sha256_avx1_x86.asm b/src/Crypto/sha256_avx1_x86.asm new file mode 100644 index 00000000..31c8bd0d --- /dev/null +++ b/src/Crypto/sha256_avx1_x86.asm @@ -0,0 +1,10 @@ + +%ifidn __OUTPUT_FORMAT__,elf +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf32 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf64 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif diff --git a/src/Crypto/sha256_avx2_x64.asm b/src/Crypto/sha256_avx2_x64.asm new file mode 100644 index 00000000..458c2945 --- /dev/null +++ b/src/Crypto/sha256_avx2_x64.asm @@ -0,0 +1,840 @@ +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; Copyright (c) 2012, Intel Corporation +; +; All rights reserved. +; +; Redistribution and use in source and binary forms, with or without +; modification, are permitted provided that the following conditions are +; met: +; +; * Redistributions of source code must retain the above copyright +; notice, this list of conditions and the following disclaimer. +; +; * Redistributions in binary form must reproduce the above copyright +; notice, this list of conditions and the following disclaimer in the +; documentation and/or other materials provided with the +; distribution. +; +; * Neither the name of the Intel Corporation nor the names of its +; contributors may be used to endorse or promote products derived from +; this software without specific prior written permission. +; +; +; THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS" AND ANY +; EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +; PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION OR +; CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +; EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +; PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +; LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; +; Example YASM command lines: +; Windows: yasm -Xvc -f x64 -rnasm -pnasm -o sha256_avx2_rorx2.obj -g cv8 sha256_avx2_rorx2.asm +; Linux: yasm -f x64 -f elf64 -X gnu -g dwarf2 -D LINUX -o sha256_avx2_rorx2.o sha256_avx2_rorx2.asm +; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; +; This code is described in an Intel White-Paper: +; "Fast SHA-256 Implementations on Intel Architecture Processors" +; +; To find it, surf to http://www.intel.com/p/en_US/embedded +; and search for that title. +; The paper is expected to be released roughly at the end of April, 2012 +; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; This code schedules 2 blocks at a time, with 4 lanes per block +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +; Modified by kerukuro for use in cppcrypto. + +%define VMOVDQ vmovdqu ;; assume buffers not aligned + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; Define Macros + +; addm [mem], reg +; Add reg to mem using reg-mem add and store +%macro addm 2 + add %2, %1 + mov %1, %2 +%endm + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +%define X0 ymm4 +%define X1 ymm5 +%define X2 ymm6 +%define X3 ymm7 + +; XMM versions of above +%define XWORD0 xmm4 +%define XWORD1 xmm5 +%define XWORD2 xmm6 +%define XWORD3 xmm7 + +%define XTMP0 ymm0 +%define XTMP1 ymm1 +%define XTMP2 ymm2 +%define XTMP3 ymm3 +%define XTMP4 ymm8 +%define XFER ymm9 +%define XTMP5 ymm11 + +%define SHUF_00BA ymm10 ; shuffle xBxA -> 00BA +%define SHUF_DC00 ymm12 ; shuffle xDxC -> DC00 +%define BYTE_FLIP_MASK ymm13 + +%define X_BYTE_FLIP_MASK xmm13 ; XMM version of BYTE_FLIP_MASK + +%ifndef WINABI +%define NUM_BLKS rdx ; 3rd arg +%define CTX rsi ; 2nd arg +%define INP rdi ; 1st arg +%define c ecx +%define d r8d +%define e edx ; clobbers NUM_BLKS +%define y3 edi ; clobbers INP +%else +%define NUM_BLKS r8 ; 3rd arg +%define CTX rdx ; 2nd arg +%define INP rcx ; 1st arg +%define c edi +%define d esi +%define e r8d ; clobbers NUM_BLKS +%define y3 ecx ; clobbers INP + +%endif + + +%define TBL rbp +%define SRND CTX ; SRND is same register as CTX + +%define a eax +%define b ebx +%define f r9d +%define g r10d +%define h r11d +%define old_h r11d + +%define T1 r12d +%define y0 r13d +%define y1 r14d +%define y2 r15d + + +_XFER_SIZE equ 2*64*4 ; 2 blocks, 64 rounds, 4 bytes/round +%ifndef WINABI +_XMM_SAVE_SIZE equ 0 +%else +_XMM_SAVE_SIZE equ 8*16 +%endif +_INP_END_SIZE equ 8 +_INP_SIZE equ 8 +_CTX_SIZE equ 8 +_RSP_SIZE equ 8 + +_XFER equ 0 +_XMM_SAVE equ _XFER + _XFER_SIZE +_INP_END equ _XMM_SAVE + _XMM_SAVE_SIZE +_INP equ _INP_END + _INP_END_SIZE +_CTX equ _INP + _INP_SIZE +_RSP equ _CTX + _CTX_SIZE +STACK_SIZE equ _RSP + _RSP_SIZE + +; rotate_Xs +; Rotate values of symbols X0...X3 +%macro rotate_Xs 0 +%xdefine X_ X0 +%xdefine X0 X1 +%xdefine X1 X2 +%xdefine X2 X3 +%xdefine X3 X_ +%endm + +; ROTATE_ARGS +; Rotate values of symbols a...h +%macro ROTATE_ARGS 0 +%xdefine old_h h +%xdefine TMP_ h +%xdefine h g +%xdefine g f +%xdefine f e +%xdefine e d +%xdefine d c +%xdefine c b +%xdefine b a +%xdefine a TMP_ +%endm + +%macro FOUR_ROUNDS_AND_SCHED 1 +%define %%XFER %1 +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; RND N + 0 ;;;;;;;;;;;;;;;;;;;;;;;;;;;; + + mov y3, a ; y3 = a ; MAJA + rorx y0, e, 25 ; y0 = e >> 25 ; S1A + rorx y1, e, 11 ; y1 = e >> 11 ; S1B + + add h, dword[%%XFER+0*4] ; h = k + w + h ; -- + or y3, c ; y3 = a|c ; MAJA + vpalignr XTMP0, X3, X2, 4 ; XTMP0 = W[-7] + mov y2, f ; y2 = f ; CH + rorx T1, a, 13 ; T1 = a >> 13 ; S0B + + xor y0, y1 ; y0 = (e>>25) ^ (e>>11) ; S1 + xor y2, g ; y2 = f^g ; CH + vpaddd XTMP0, XTMP0, X0 ; XTMP0 = W[-7] + W[-16]; y1 = (e >> 6) ; S1 + rorx y1, e, 6 ; y1 = (e >> 6) ; S1 + + and y2, e ; y2 = (f^g)&e ; CH + xor y0, y1 ; y0 = (e>>25) ^ (e>>11) ^ (e>>6) ; S1 + rorx y1, a, 22 ; y1 = a >> 22 ; S0A + add d, h ; d = k + w + h + d ; -- + + and y3, b ; y3 = (a|c)&b ; MAJA + vpalignr XTMP1, X1, X0, 4 ; XTMP1 = W[-15] + xor y1, T1 ; y1 = (a>>22) ^ (a>>13) ; S0 + rorx T1, a, 2 ; T1 = (a >> 2) ; S0 + + xor y2, g ; y2 = CH = ((f^g)&e)^g ; CH + vpsrld XTMP2, XTMP1, 7 + xor y1, T1 ; y1 = (a>>22) ^ (a>>13) ^ (a>>2) ; S0 + mov T1, a ; T1 = a ; MAJB + and T1, c ; T1 = a&c ; MAJB + + add y2, y0 ; y2 = S1 + CH ; -- + vpslld XTMP3, XTMP1, (32-7) + or y3, T1 ; y3 = MAJ = (a|c)&b)|(a&c) ; MAJ + add h, y1 ; h = k + w + h + S0 ; -- + + add d, y2 ; d = k + w + h + d + S1 + CH = d + t1 ; -- + vpor XTMP3, XTMP3, XTMP2 ; XTMP3 = W[-15] ror 7 + + vpsrld XTMP2, XTMP1,18 + add h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0; -- + add h, y3 ; h = t1 + S0 + MAJ ; -- + + +ROTATE_ARGS + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; RND N + 1 ;;;;;;;;;;;;;;;;;;;;;;;;;;;; + + + mov y3, a ; y3 = a ; MAJA + rorx y0, e, 25 ; y0 = e >> 25 ; S1A + rorx y1, e, 11 ; y1 = e >> 11 ; S1B + add h, dword[%%XFER+1*4] ; h = k + w + h ; -- + or y3, c ; y3 = a|c ; MAJA + + + vpsrld XTMP4, XTMP1, 3 ; XTMP4 = W[-15] >> 3 + mov y2, f ; y2 = f ; CH + rorx T1, a, 13 ; T1 = a >> 13 ; S0B + xor y0, y1 ; y0 = (e>>25) ^ (e>>11) ; S1 + xor y2, g ; y2 = f^g ; CH + + + rorx y1, e, 6 ; y1 = (e >> 6) ; S1 + xor y0, y1 ; y0 = (e>>25) ^ (e>>11) ^ (e>>6) ; S1 + rorx y1, a, 22 ; y1 = a >> 22 ; S0A + and y2, e ; y2 = (f^g)&e ; CH + add d, h ; d = k + w + h + d ; -- + + vpslld XTMP1, XTMP1, (32-18) + and y3, b ; y3 = (a|c)&b ; MAJA + xor y1, T1 ; y1 = (a>>22) ^ (a>>13) ; S0 + + vpxor XTMP3, XTMP3, XTMP1 + rorx T1, a, 2 ; T1 = (a >> 2) ; S0 + xor y2, g ; y2 = CH = ((f^g)&e)^g ; CH + + vpxor XTMP3, XTMP3, XTMP2 ; XTMP3 = W[-15] ror 7 ^ W[-15] ror 18 + xor y1, T1 ; y1 = (a>>22) ^ (a>>13) ^ (a>>2) ; S0 + mov T1, a ; T1 = a ; MAJB + and T1, c ; T1 = a&c ; MAJB + add y2, y0 ; y2 = S1 + CH ; -- + + vpxor XTMP1, XTMP3, XTMP4 ; XTMP1 = s0 + vpshufd XTMP2, X3, 11111010b ; XTMP2 = W[-2] {BBAA} + or y3, T1 ; y3 = MAJ = (a|c)&b)|(a&c) ; MAJ + add h, y1 ; h = k + w + h + S0 ; -- + + vpaddd XTMP0, XTMP0, XTMP1 ; XTMP0 = W[-16] + W[-7] + s0 + add d, y2 ; d = k + w + h + d + S1 + CH = d + t1 ; -- + add h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0; -- + add h, y3 ; h = t1 + S0 + MAJ ; -- + + vpsrld XTMP4, XTMP2, 10 ; XTMP4 = W[-2] >> 10 {BBAA} + + +ROTATE_ARGS + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; RND N + 2 ;;;;;;;;;;;;;;;;;;;;;;;;;;;; + + mov y3, a ; y3 = a ; MAJA + rorx y0, e, 25 ; y0 = e >> 25 ; S1A + add h, [%%XFER+2*4] ; h = k + w + h ; -- + + vpsrlq XTMP3, XTMP2, 19 ; XTMP3 = W[-2] ror 19 {xBxA} + rorx y1, e, 11 ; y1 = e >> 11 ; S1B + or y3, c ; y3 = a|c ; MAJA + mov y2, f ; y2 = f ; CH + xor y2, g ; y2 = f^g ; CH + + rorx T1, a, 13 ; T1 = a >> 13 ; S0B + xor y0, y1 ; y0 = (e>>25) ^ (e>>11) ; S1 + vpsrlq XTMP2, XTMP2, 17 ; XTMP2 = W[-2] ror 17 {xBxA} + and y2, e ; y2 = (f^g)&e ; CH + + rorx y1, e, 6 ; y1 = (e >> 6) ; S1 + vpxor XTMP2, XTMP2, XTMP3 + add d, h ; d = k + w + h + d ; -- + and y3, b ; y3 = (a|c)&b ; MAJA + + xor y0, y1 ; y0 = (e>>25) ^ (e>>11) ^ (e>>6) ; S1 + rorx y1, a, 22 ; y1 = a >> 22 ; S0A + vpxor XTMP4, XTMP4, XTMP2 ; XTMP4 = s1 {xBxA} + xor y2, g ; y2 = CH = ((f^g)&e)^g ; CH + + vpshufb XTMP4, XTMP4, SHUF_00BA ; XTMP4 = s1 {00BA} + xor y1, T1 ; y1 = (a>>22) ^ (a>>13) ; S0 + rorx T1, a, 2 ; T1 = (a >> 2) ; S0 + vpaddd XTMP0, XTMP0, XTMP4 ; XTMP0 = {..., ..., W[1], W[0]} + + xor y1, T1 ; y1 = (a>>22) ^ (a>>13) ^ (a>>2) ; S0 + mov T1, a ; T1 = a ; MAJB + and T1, c ; T1 = a&c ; MAJB + add y2, y0 ; y2 = S1 + CH ; -- + vpshufd XTMP2, XTMP0, 01010000b ; XTMP2 = W[-2] {DDCC} + + or y3, T1 ; y3 = MAJ = (a|c)&b)|(a&c) ; MAJ + add h, y1 ; h = k + w + h + S0 ; -- + add d, y2 ; d = k + w + h + d + S1 + CH = d + t1 ; -- + add h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0; -- + + add h, y3 ; h = t1 + S0 + MAJ ; -- + + +ROTATE_ARGS + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; RND N + 3 ;;;;;;;;;;;;;;;;;;;;;;;;;;;; + + mov y3, a ; y3 = a ; MAJA + rorx y0, e, 25 ; y0 = e >> 25 ; S1A + rorx y1, e, 11 ; y1 = e >> 11 ; S1B + add h, dword[%%XFER+3*4] ; h = k + w + h ; -- + or y3, c ; y3 = a|c ; MAJA + + + vpsrld XTMP5, XTMP2, 10 ; XTMP5 = W[-2] >> 10 {DDCC} + mov y2, f ; y2 = f ; CH + rorx T1, a, 13 ; T1 = a >> 13 ; S0B + xor y0, y1 ; y0 = (e>>25) ^ (e>>11) ; S1 + xor y2, g ; y2 = f^g ; CH + + + vpsrlq XTMP3, XTMP2, 19 ; XTMP3 = W[-2] ror 19 {xDxC} + rorx y1, e, 6 ; y1 = (e >> 6) ; S1 + and y2, e ; y2 = (f^g)&e ; CH + add d, h ; d = k + w + h + d ; -- + and y3, b ; y3 = (a|c)&b ; MAJA + + vpsrlq XTMP2, XTMP2, 17 ; XTMP2 = W[-2] ror 17 {xDxC} + xor y0, y1 ; y0 = (e>>25) ^ (e>>11) ^ (e>>6) ; S1 + xor y2, g ; y2 = CH = ((f^g)&e)^g ; CH + + vpxor XTMP2, XTMP2, XTMP3 + rorx y1, a, 22 ; y1 = a >> 22 ; S0A + add y2, y0 ; y2 = S1 + CH ; -- + + vpxor XTMP5, XTMP5, XTMP2 ; XTMP5 = s1 {xDxC} + xor y1, T1 ; y1 = (a>>22) ^ (a>>13) ; S0 + add d, y2 ; d = k + w + h + d + S1 + CH = d + t1 ; -- + + rorx T1, a, 2 ; T1 = (a >> 2) ; S0 + vpshufb XTMP5, XTMP5, SHUF_DC00 ; XTMP5 = s1 {DC00} + + vpaddd X0, XTMP5, XTMP0 ; X0 = {W[3], W[2], W[1], W[0]} + xor y1, T1 ; y1 = (a>>22) ^ (a>>13) ^ (a>>2) ; S0 + mov T1, a ; T1 = a ; MAJB + and T1, c ; T1 = a&c ; MAJB + or y3, T1 ; y3 = MAJ = (a|c)&b)|(a&c) ; MAJ + + add h, y1 ; h = k + w + h + S0 ; -- + add h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0; -- + add h, y3 ; h = t1 + S0 + MAJ ; -- + +ROTATE_ARGS +rotate_Xs +%endm + +%macro DO_4ROUNDS 1 +%define %%XFER %1 +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; RND N + 0 ;;;;;;;;;;;;;;;;;;;;;;;;;;; + + mov y2, f ; y2 = f ; CH + rorx y0, e, 25 ; y0 = e >> 25 ; S1A + rorx y1, e, 11 ; y1 = e >> 11 ; S1B + xor y2, g ; y2 = f^g ; CH + + xor y0, y1 ; y0 = (e>>25) ^ (e>>11) ; S1 + rorx y1, e, 6 ; y1 = (e >> 6) ; S1 + and y2, e ; y2 = (f^g)&e ; CH + + xor y0, y1 ; y0 = (e>>25) ^ (e>>11) ^ (e>>6) ; S1 + rorx T1, a, 13 ; T1 = a >> 13 ; S0B + xor y2, g ; y2 = CH = ((f^g)&e)^g ; CH + rorx y1, a, 22 ; y1 = a >> 22 ; S0A + mov y3, a ; y3 = a ; MAJA + + xor y1, T1 ; y1 = (a>>22) ^ (a>>13) ; S0 + rorx T1, a, 2 ; T1 = (a >> 2) ; S0 + add h, dword[%%XFER + 4*0] ; h = k + w + h ; -- + or y3, c ; y3 = a|c ; MAJA + + xor y1, T1 ; y1 = (a>>22) ^ (a>>13) ^ (a>>2) ; S0 + mov T1, a ; T1 = a ; MAJB + and y3, b ; y3 = (a|c)&b ; MAJA + and T1, c ; T1 = a&c ; MAJB + add y2, y0 ; y2 = S1 + CH ; -- + + + add d, h ; d = k + w + h + d ; -- + or y3, T1 ; y3 = MAJ = (a|c)&b)|(a&c) ; MAJ + add h, y1 ; h = k + w + h + S0 ; -- + + add d, y2 ; d = k + w + h + d + S1 + CH = d + t1 ; -- + + + ;add h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0; -- + + ;add h, y3 ; h = t1 + S0 + MAJ ; -- + + ROTATE_ARGS + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; RND N + 1 ;;;;;;;;;;;;;;;;;;;;;;;;;;; + + add old_h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0; -- + mov y2, f ; y2 = f ; CH + rorx y0, e, 25 ; y0 = e >> 25 ; S1A + rorx y1, e, 11 ; y1 = e >> 11 ; S1B + xor y2, g ; y2 = f^g ; CH + + xor y0, y1 ; y0 = (e>>25) ^ (e>>11) ; S1 + rorx y1, e, 6 ; y1 = (e >> 6) ; S1 + and y2, e ; y2 = (f^g)&e ; CH + add old_h, y3 ; h = t1 + S0 + MAJ ; -- + + xor y0, y1 ; y0 = (e>>25) ^ (e>>11) ^ (e>>6) ; S1 + rorx T1, a, 13 ; T1 = a >> 13 ; S0B + xor y2, g ; y2 = CH = ((f^g)&e)^g ; CH + rorx y1, a, 22 ; y1 = a >> 22 ; S0A + mov y3, a ; y3 = a ; MAJA + + xor y1, T1 ; y1 = (a>>22) ^ (a>>13) ; S0 + rorx T1, a, 2 ; T1 = (a >> 2) ; S0 + add h, dword[%%XFER + 4*1] ; h = k + w + h ; -- + or y3, c ; y3 = a|c ; MAJA + + xor y1, T1 ; y1 = (a>>22) ^ (a>>13) ^ (a>>2) ; S0 + mov T1, a ; T1 = a ; MAJB + and y3, b ; y3 = (a|c)&b ; MAJA + and T1, c ; T1 = a&c ; MAJB + add y2, y0 ; y2 = S1 + CH ; -- + + + add d, h ; d = k + w + h + d ; -- + or y3, T1 ; y3 = MAJ = (a|c)&b)|(a&c) ; MAJ + add h, y1 ; h = k + w + h + S0 ; -- + + add d, y2 ; d = k + w + h + d + S1 + CH = d + t1 ; -- + + + ;add h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0; -- + + ;add h, y3 ; h = t1 + S0 + MAJ ; -- + + ROTATE_ARGS + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; RND N + 2 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + + add old_h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0; -- + mov y2, f ; y2 = f ; CH + rorx y0, e, 25 ; y0 = e >> 25 ; S1A + rorx y1, e, 11 ; y1 = e >> 11 ; S1B + xor y2, g ; y2 = f^g ; CH + + xor y0, y1 ; y0 = (e>>25) ^ (e>>11) ; S1 + rorx y1, e, 6 ; y1 = (e >> 6) ; S1 + and y2, e ; y2 = (f^g)&e ; CH + add old_h, y3 ; h = t1 + S0 + MAJ ; -- + + xor y0, y1 ; y0 = (e>>25) ^ (e>>11) ^ (e>>6) ; S1 + rorx T1, a, 13 ; T1 = a >> 13 ; S0B + xor y2, g ; y2 = CH = ((f^g)&e)^g ; CH + rorx y1, a, 22 ; y1 = a >> 22 ; S0A + mov y3, a ; y3 = a ; MAJA + + xor y1, T1 ; y1 = (a>>22) ^ (a>>13) ; S0 + rorx T1, a, 2 ; T1 = (a >> 2) ; S0 + add h, dword[%%XFER + 4*2] ; h = k + w + h ; -- + or y3, c ; y3 = a|c ; MAJA + + xor y1, T1 ; y1 = (a>>22) ^ (a>>13) ^ (a>>2) ; S0 + mov T1, a ; T1 = a ; MAJB + and y3, b ; y3 = (a|c)&b ; MAJA + and T1, c ; T1 = a&c ; MAJB + add y2, y0 ; y2 = S1 + CH ; -- + + + add d, h ; d = k + w + h + d ; -- + or y3, T1 ; y3 = MAJ = (a|c)&b)|(a&c) ; MAJ + add h, y1 ; h = k + w + h + S0 ; -- + + add d, y2 ; d = k + w + h + d + S1 + CH = d + t1 ; -- + + + ;add h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0; -- + + ;add h, y3 ; h = t1 + S0 + MAJ ; -- + + ROTATE_ARGS + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; RND N + 3 ;;;;;;;;;;;;;;;;;;;;;;;;;;; + + add old_h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0; -- + mov y2, f ; y2 = f ; CH + rorx y0, e, 25 ; y0 = e >> 25 ; S1A + rorx y1, e, 11 ; y1 = e >> 11 ; S1B + xor y2, g ; y2 = f^g ; CH + + xor y0, y1 ; y0 = (e>>25) ^ (e>>11) ; S1 + rorx y1, e, 6 ; y1 = (e >> 6) ; S1 + and y2, e ; y2 = (f^g)&e ; CH + add old_h, y3 ; h = t1 + S0 + MAJ ; -- + + xor y0, y1 ; y0 = (e>>25) ^ (e>>11) ^ (e>>6) ; S1 + rorx T1, a, 13 ; T1 = a >> 13 ; S0B + xor y2, g ; y2 = CH = ((f^g)&e)^g ; CH + rorx y1, a, 22 ; y1 = a >> 22 ; S0A + mov y3, a ; y3 = a ; MAJA + + xor y1, T1 ; y1 = (a>>22) ^ (a>>13) ; S0 + rorx T1, a, 2 ; T1 = (a >> 2) ; S0 + add h, dword[%%XFER + 4*3] ; h = k + w + h ; -- + or y3, c ; y3 = a|c ; MAJA + + xor y1, T1 ; y1 = (a>>22) ^ (a>>13) ^ (a>>2) ; S0 + mov T1, a ; T1 = a ; MAJB + and y3, b ; y3 = (a|c)&b ; MAJA + and T1, c ; T1 = a&c ; MAJB + add y2, y0 ; y2 = S1 + CH ; -- + + + add d, h ; d = k + w + h + d ; -- + or y3, T1 ; y3 = MAJ = (a|c)&b)|(a&c) ; MAJ + add h, y1 ; h = k + w + h + S0 ; -- + + add d, y2 ; d = k + w + h + d + S1 + CH = d + t1 ; -- + + + add h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0; -- + + add h, y3 ; h = t1 + S0 + MAJ ; -- + + ROTATE_ARGS + +%endm + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; void sha256_rorx(void *input_data, UINT32 digest[8], UINT64 num_blks) +;; arg 1 : pointer to input data +;; arg 2 : pointer to digest +;; arg 3 : Num blocks +section .text +global sha256_rorx +global _sha256_rorx +align 32 +sha256_rorx: +_sha256_rorx: + push rbx +%ifdef WINABI + push rsi + push rdi +%endif + push rbp + push r12 + push r13 + push r14 + push r15 + + mov rax, rsp + sub rsp,STACK_SIZE + and rsp, -32 + mov [rsp + _RSP], rax + +%ifdef WINABI + vmovdqa [rsp + _XMM_SAVE + 0*16],xmm6 + vmovdqa [rsp + _XMM_SAVE + 1*16],xmm7 + vmovdqa [rsp + _XMM_SAVE + 2*16],xmm8 + vmovdqa [rsp + _XMM_SAVE + 3*16],xmm9 + vmovdqa [rsp + _XMM_SAVE + 4*16],xmm10 + vmovdqa [rsp + _XMM_SAVE + 5*16],xmm11 + vmovdqa [rsp + _XMM_SAVE + 6*16],xmm12 + vmovdqa [rsp + _XMM_SAVE + 7*16],xmm13 +%endif + + shl NUM_BLKS, 6 ; convert to bytes + jz done_hash + lea NUM_BLKS, [NUM_BLKS + INP - 64] ; pointer to last block + mov [rsp + _INP_END], NUM_BLKS + + cmp INP, NUM_BLKS + je only_one_block + + ;; load initial digest + mov a,[4*0 + CTX] + mov b,[4*1 + CTX] + mov c,[4*2 + CTX] + mov d,[4*3 + CTX] + mov e,[4*4 + CTX] + mov f,[4*5 + CTX] + mov g,[4*6 + CTX] + mov h,[4*7 + CTX] + + vmovdqa BYTE_FLIP_MASK, [PSHUFFLE_BYTE_FLIP_MASK wrt rip] + vmovdqa SHUF_00BA, [_SHUF_00BA wrt rip] + vmovdqa SHUF_DC00, [_SHUF_DC00 wrt rip] + + mov [rsp + _CTX], CTX + +loop0: + lea TBL,[K256 wrt rip] + + ;; Load first 16 dwords from two blocks + VMOVDQ XTMP0, [INP + 0*32] + VMOVDQ XTMP1, [INP + 1*32] + VMOVDQ XTMP2, [INP + 2*32] + VMOVDQ XTMP3, [INP + 3*32] + + ;; byte swap data + vpshufb XTMP0, XTMP0, BYTE_FLIP_MASK + vpshufb XTMP1, XTMP1, BYTE_FLIP_MASK + vpshufb XTMP2, XTMP2, BYTE_FLIP_MASK + vpshufb XTMP3, XTMP3, BYTE_FLIP_MASK + + ;; transpose data into high/low halves + vperm2i128 X0, XTMP0, XTMP2, 0x20 + vperm2i128 X1, XTMP0, XTMP2, 0x31 + vperm2i128 X2, XTMP1, XTMP3, 0x20 + vperm2i128 X3, XTMP1, XTMP3, 0x31 + +last_block_enter: + add INP, 64 + mov [rsp + _INP], INP + + ;; schedule 48 input dwords, by doing 3 rounds of 12 each + xor SRND, SRND + +align 16 +loop1: + vpaddd XFER, X0, [TBL + SRND + 0*32] + vmovdqa [rsp + _XFER + SRND + 0*32], XFER + FOUR_ROUNDS_AND_SCHED rsp + _XFER + SRND + 0*32 + + vpaddd XFER, X0, [TBL + SRND + 1*32] + vmovdqa [rsp + _XFER + SRND + 1*32], XFER + FOUR_ROUNDS_AND_SCHED rsp + _XFER + SRND + 1*32 + + vpaddd XFER, X0, [TBL + SRND + 2*32] + vmovdqa [rsp + _XFER + SRND + 2*32], XFER + FOUR_ROUNDS_AND_SCHED rsp + _XFER + SRND + 2*32 + + vpaddd XFER, X0, [TBL + SRND + 3*32] + vmovdqa [rsp + _XFER + SRND + 3*32], XFER + FOUR_ROUNDS_AND_SCHED rsp + _XFER + SRND + 3*32 + + add SRND, 4*32 + cmp SRND, 3 * 4*32 + jb loop1 + +loop2: + ;; Do last 16 rounds with no scheduling + vpaddd XFER, X0, [TBL + SRND + 0*32] + vmovdqa [rsp + _XFER + SRND + 0*32], XFER + DO_4ROUNDS rsp + _XFER + SRND + 0*32 + vpaddd XFER, X1, [TBL + SRND + 1*32] + vmovdqa [rsp + _XFER + SRND + 1*32], XFER + DO_4ROUNDS rsp + _XFER + SRND + 1*32 + add SRND, 2*32 + + vmovdqa X0, X2 + vmovdqa X1, X3 + + cmp SRND, 4 * 4*32 + jb loop2 + + mov CTX, [rsp + _CTX] + mov INP, [rsp + _INP] + + addm [4*0 + CTX],a + addm [4*1 + CTX],b + addm [4*2 + CTX],c + addm [4*3 + CTX],d + addm [4*4 + CTX],e + addm [4*5 + CTX],f + addm [4*6 + CTX],g + addm [4*7 + CTX],h + + cmp INP, [rsp + _INP_END] + ja done_hash + + ;;;; Do second block using previously scheduled results + xor SRND, SRND +align 16 +loop3: + DO_4ROUNDS rsp + _XFER + SRND + 0*32 + 16 + DO_4ROUNDS rsp + _XFER + SRND + 1*32 + 16 + add SRND, 2*32 + cmp SRND, 4 * 4*32 + jb loop3 + + mov CTX, [rsp + _CTX] + mov INP, [rsp + _INP] + add INP, 64 + + addm [4*0 + CTX],a + addm [4*1 + CTX],b + addm [4*2 + CTX],c + addm [4*3 + CTX],d + addm [4*4 + CTX],e + addm [4*5 + CTX],f + addm [4*6 + CTX],g + addm [4*7 + CTX],h + + cmp INP, [rsp + _INP_END] + jb loop0 + ja done_hash + +do_last_block: + ;;;; do last block + lea TBL,[K256 wrt rip] + + VMOVDQ XWORD0, [INP + 0*16] + VMOVDQ XWORD1, [INP + 1*16] + VMOVDQ XWORD2, [INP + 2*16] + VMOVDQ XWORD3, [INP + 3*16] + + vpshufb XWORD0, XWORD0, X_BYTE_FLIP_MASK + vpshufb XWORD1, XWORD1, X_BYTE_FLIP_MASK + vpshufb XWORD2, XWORD2, X_BYTE_FLIP_MASK + vpshufb XWORD3, XWORD3, X_BYTE_FLIP_MASK + + jmp last_block_enter + +only_one_block: + + ;; load initial digest + mov a,[4*0 + CTX] + mov b,[4*1 + CTX] + mov c,[4*2 + CTX] + mov d,[4*3 + CTX] + mov e,[4*4 + CTX] + mov f,[4*5 + CTX] + mov g,[4*6 + CTX] + mov h,[4*7 + CTX] + + vmovdqa BYTE_FLIP_MASK, [PSHUFFLE_BYTE_FLIP_MASK wrt rip] + vmovdqa SHUF_00BA, [_SHUF_00BA wrt rip] + vmovdqa SHUF_DC00, [_SHUF_DC00 wrt rip] + + mov [rsp + _CTX], CTX + jmp do_last_block + +done_hash: +%ifdef WINABI + vmovdqa xmm6,[rsp + _XMM_SAVE + 0*16] + vmovdqa xmm7,[rsp + _XMM_SAVE + 1*16] + vmovdqa xmm8,[rsp + _XMM_SAVE + 2*16] + vmovdqa xmm9,[rsp + _XMM_SAVE + 3*16] + vmovdqa xmm10,[rsp + _XMM_SAVE + 4*16] + vmovdqa xmm11,[rsp + _XMM_SAVE + 5*16] + vmovdqa xmm12,[rsp + _XMM_SAVE + 6*16] + vmovdqa xmm13,[rsp + _XMM_SAVE + 7*16] +%endif + + mov rsp, [rsp + _RSP] + + pop r15 + pop r14 + pop r13 + pop r12 + pop rbp +%ifdef WINABI + pop rdi + pop rsi +%endif + pop rbx + + ret + +section .data +align 64 +K256: + dd 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 + dd 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 + dd 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 + dd 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 + dd 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 + dd 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 + dd 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 + dd 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 + dd 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc + dd 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc + dd 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da + dd 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da + dd 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 + dd 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 + dd 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 + dd 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 + dd 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 + dd 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 + dd 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 + dd 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 + dd 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 + dd 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 + dd 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 + dd 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 + dd 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 + dd 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 + dd 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 + dd 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 + dd 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 + dd 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 + dd 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 + dd 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 + +PSHUFFLE_BYTE_FLIP_MASK: + ddq 0x0c0d0e0f08090a0b0405060700010203,0x0c0d0e0f08090a0b0405060700010203 + +; shuffle xBxA -> 00BA +_SHUF_00BA: + ddq 0xFFFFFFFFFFFFFFFF0b0a090803020100,0xFFFFFFFFFFFFFFFF0b0a090803020100 + +; shuffle xDxC -> DC00 +_SHUF_DC00: + ddq 0x0b0a090803020100FFFFFFFFFFFFFFFF,0x0b0a090803020100FFFFFFFFFFFFFFFF + +%ifidn __OUTPUT_FORMAT__,elf +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf32 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf64 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif diff --git a/src/Crypto/sha256_avx2_x86.asm b/src/Crypto/sha256_avx2_x86.asm new file mode 100644 index 00000000..31c8bd0d --- /dev/null +++ b/src/Crypto/sha256_avx2_x86.asm @@ -0,0 +1,10 @@ + +%ifidn __OUTPUT_FORMAT__,elf +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf32 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf64 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif diff --git a/src/Crypto/sha256_sse4_x64.asm b/src/Crypto/sha256_sse4_x64.asm new file mode 100644 index 00000000..c11630bc --- /dev/null +++ b/src/Crypto/sha256_sse4_x64.asm @@ -0,0 +1,560 @@ +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; Copyright (c) 2012, Intel Corporation +; +; All rights reserved. +; +; Redistribution and use in source and binary forms, with or without +; modification, are permitted provided that the following conditions are +; met: +; +; * Redistributions of source code must retain the above copyright +; notice, this list of conditions and the following disclaimer. +; +; * Redistributions in binary form must reproduce the above copyright +; notice, this list of conditions and the following disclaimer in the +; documentation and/or other materials provided with the +; distribution. +; +; * Neither the name of the Intel Corporation nor the names of its +; contributors may be used to endorse or promote products derived from +; this software without specific prior written permission. +; +; +; THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS" AND ANY +; EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +; PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION OR +; CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +; EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +; PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +; LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; +; Example YASM command lines: +; Windows: yasm -Xvc -f x64 -rnasm -pnasm -o sha256_sse4.obj -g cv8 sha256_sse4.asm +; Linux: yasm -f x64 -f elf64 -X gnu -g dwarf2 -D LINUX -o sha256_sse4.o sha256_sse4.asm +; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; +; This code is described in an Intel White-Paper: +; "Fast SHA-256 Implementations on Intel Architecture Processors" +; +; To find it, surf to http://www.intel.com/p/en_US/embedded +; and search for that title. +; The paper is expected to be released roughly at the end of April, 2012 +; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; This code schedules 1 blocks at a time, with 4 lanes per block +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +; Modified by kerukuro for use in cppcrypto. + +; Modified By Mounir IDRASSI for use in VeraCrypt + +%define MOVDQ movdqu ;; assume buffers not aligned + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; Define Macros + +; addm [mem], reg +; Add reg to mem using reg-mem add and store +%macro addm 2 + add %2, %1 + mov %1, %2 +%endm + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +; COPY_XMM_AND_BSWAP xmm, [mem], byte_flip_mask +; Load xmm with mem and byte swap each dword +%macro COPY_XMM_AND_BSWAP 3 + MOVDQ %1, %2 + pshufb %1, %3 +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +%define X0 xmm4 +%define X1 xmm5 +%define X2 xmm6 +%define X3 xmm7 + +%define XTMP0 xmm0 +%define XTMP1 xmm1 +%define XTMP2 xmm2 +%define XTMP3 xmm3 +%define XTMP4 xmm8 +%define XFER xmm9 + +%define SHUF_00BA xmm10 ; shuffle xBxA -> 00BA +%define SHUF_DC00 xmm11 ; shuffle xDxC -> DC00 +%define BYTE_FLIP_MASK xmm12 + +%ifndef WINABI +%define NUM_BLKS rdx ; 3rd arg +%define CTX rsi ; 2nd arg +%define INP rdi ; 1st arg + +%define SRND rdi ; clobbers INP +%define c ecx +%define d r8d +%define e edx +%else +%define NUM_BLKS r8 ; 3rd arg +%define CTX rdx ; 2nd arg +%define INP rcx ; 1st arg + +%define SRND rcx ; clobbers INP +%define c edi +%define d esi +%define e r8d + +%endif +%define TBL rbp +%define a eax +%define b ebx + +%define f r9d +%define g r10d +%define h r11d + +%define y0 r13d +%define y1 r14d +%define y2 r15d + + + +_INP_END_SIZE equ 8 +_INP_SIZE equ 8 +_XFER_SIZE equ 8 +%ifndef WINABI +_XMM_SAVE_SIZE equ 0 +%else +_XMM_SAVE_SIZE equ 7*16 +%endif +; STACK_SIZE plus pushes must be an odd multiple of 8 +_ALIGN_SIZE equ 8 + +_INP_END equ 0 +_INP equ _INP_END + _INP_END_SIZE +_XFER equ _INP + _INP_SIZE +_XMM_SAVE equ _XFER + _XFER_SIZE + _ALIGN_SIZE +STACK_SIZE equ _XMM_SAVE + _XMM_SAVE_SIZE + +; rotate_Xs +; Rotate values of symbols X0...X3 +%macro rotate_Xs 0 +%xdefine X_ X0 +%xdefine X0 X1 +%xdefine X1 X2 +%xdefine X2 X3 +%xdefine X3 X_ +%endm + +; ROTATE_ARGS +; Rotate values of symbols a...h +%macro ROTATE_ARGS 0 +%xdefine TMP_ h +%xdefine h g +%xdefine g f +%xdefine f e +%xdefine e d +%xdefine d c +%xdefine c b +%xdefine b a +%xdefine a TMP_ +%endm + +%macro FOUR_ROUNDS_AND_SCHED 0 + ;; compute s0 four at a time and s1 two at a time + ;; compute W[-16] + W[-7] 4 at a time + movdqa XTMP0, X3 + mov y0, e ; y0 = e + ror y0, (25-11) ; y0 = e >> (25-11) + mov y1, a ; y1 = a + palignr XTMP0, X2, 4 ; XTMP0 = W[-7] + ror y1, (22-13) ; y1 = a >> (22-13) + xor y0, e ; y0 = e ^ (e >> (25-11)) + mov y2, f ; y2 = f + ror y0, (11-6) ; y0 = (e >> (11-6)) ^ (e >> (25-6)) + movdqa XTMP1, X1 + xor y1, a ; y1 = a ^ (a >> (22-13) + xor y2, g ; y2 = f^g + paddd XTMP0, X0 ; XTMP0 = W[-7] + W[-16] + xor y0, e ; y0 = e ^ (e >> (11-6)) ^ (e >> (25-6)) + and y2, e ; y2 = (f^g)&e + ror y1, (13-2) ; y1 = (a >> (13-2)) ^ (a >> (22-2)) + ;; compute s0 + palignr XTMP1, X0, 4 ; XTMP1 = W[-15] + xor y1, a ; y1 = a ^ (a >> (13-2)) ^ (a >> (22-2)) + ror y0, 6 ; y0 = S1 = (e>>6) & (e>>11) ^ (e>>25) + xor y2, g ; y2 = CH = ((f^g)&e)^g + movdqa XTMP2, XTMP1 ; XTMP2 = W[-15] + ror y1, 2 ; y1 = S0 = (a>>2) ^ (a>>13) ^ (a>>22) + add y2, y0 ; y2 = S1 + CH + add y2, [rsp + _XFER + 0*4] ; y2 = k + w + S1 + CH + movdqa XTMP3, XTMP1 ; XTMP3 = W[-15] + mov y0, a ; y0 = a + add h, y2 ; h = h + S1 + CH + k + w + mov y2, a ; y2 = a + pslld XTMP1, (32-7) + or y0, c ; y0 = a|c + add d, h ; d = d + h + S1 + CH + k + w + and y2, c ; y2 = a&c + psrld XTMP2, 7 + and y0, b ; y0 = (a|c)&b + add h, y1 ; h = h + S1 + CH + k + w + S0 + por XTMP1, XTMP2 ; XTMP1 = W[-15] ror 7 + or y0, y2 ; y0 = MAJ = (a|c)&b)|(a&c) + add h, y0 ; h = h + S1 + CH + k + w + S0 + MAJ + +ROTATE_ARGS + movdqa XTMP2, XTMP3 ; XTMP2 = W[-15] + mov y0, e ; y0 = e + mov y1, a ; y1 = a + movdqa XTMP4, XTMP3 ; XTMP4 = W[-15] + ror y0, (25-11) ; y0 = e >> (25-11) + xor y0, e ; y0 = e ^ (e >> (25-11)) + mov y2, f ; y2 = f + ror y1, (22-13) ; y1 = a >> (22-13) + pslld XTMP3, (32-18) + xor y1, a ; y1 = a ^ (a >> (22-13) + ror y0, (11-6) ; y0 = (e >> (11-6)) ^ (e >> (25-6)) + xor y2, g ; y2 = f^g + psrld XTMP2, 18 + ror y1, (13-2) ; y1 = (a >> (13-2)) ^ (a >> (22-2)) + xor y0, e ; y0 = e ^ (e >> (11-6)) ^ (e >> (25-6)) + and y2, e ; y2 = (f^g)&e + ror y0, 6 ; y0 = S1 = (e>>6) & (e>>11) ^ (e>>25) + pxor XTMP1, XTMP3 + xor y1, a ; y1 = a ^ (a >> (13-2)) ^ (a >> (22-2)) + xor y2, g ; y2 = CH = ((f^g)&e)^g + psrld XTMP4, 3 ; XTMP4 = W[-15] >> 3 + add y2, y0 ; y2 = S1 + CH + add y2, [rsp + _XFER + 1*4] ; y2 = k + w + S1 + CH + ror y1, 2 ; y1 = S0 = (a>>2) ^ (a>>13) ^ (a>>22) + pxor XTMP1, XTMP2 ; XTMP1 = W[-15] ror 7 ^ W[-15] ror 18 + mov y0, a ; y0 = a + add h, y2 ; h = h + S1 + CH + k + w + mov y2, a ; y2 = a + pxor XTMP1, XTMP4 ; XTMP1 = s0 + or y0, c ; y0 = a|c + add d, h ; d = d + h + S1 + CH + k + w + and y2, c ; y2 = a&c + ;; compute low s1 + pshufd XTMP2, X3, 11111010b ; XTMP2 = W[-2] {BBAA} + and y0, b ; y0 = (a|c)&b + add h, y1 ; h = h + S1 + CH + k + w + S0 + paddd XTMP0, XTMP1 ; XTMP0 = W[-16] + W[-7] + s0 + or y0, y2 ; y0 = MAJ = (a|c)&b)|(a&c) + add h, y0 ; h = h + S1 + CH + k + w + S0 + MAJ + +ROTATE_ARGS + movdqa XTMP3, XTMP2 ; XTMP3 = W[-2] {BBAA} + mov y0, e ; y0 = e + mov y1, a ; y1 = a + ror y0, (25-11) ; y0 = e >> (25-11) + movdqa XTMP4, XTMP2 ; XTMP4 = W[-2] {BBAA} + xor y0, e ; y0 = e ^ (e >> (25-11)) + ror y1, (22-13) ; y1 = a >> (22-13) + mov y2, f ; y2 = f + xor y1, a ; y1 = a ^ (a >> (22-13) + ror y0, (11-6) ; y0 = (e >> (11-6)) ^ (e >> (25-6)) + psrlq XTMP2, 17 ; XTMP2 = W[-2] ror 17 {xBxA} + xor y2, g ; y2 = f^g + psrlq XTMP3, 19 ; XTMP3 = W[-2] ror 19 {xBxA} + xor y0, e ; y0 = e ^ (e >> (11-6)) ^ (e >> (25-6)) + and y2, e ; y2 = (f^g)&e + psrld XTMP4, 10 ; XTMP4 = W[-2] >> 10 {BBAA} + ror y1, (13-2) ; y1 = (a >> (13-2)) ^ (a >> (22-2)) + xor y1, a ; y1 = a ^ (a >> (13-2)) ^ (a >> (22-2)) + xor y2, g ; y2 = CH = ((f^g)&e)^g + ror y0, 6 ; y0 = S1 = (e>>6) & (e>>11) ^ (e>>25) + pxor XTMP2, XTMP3 + add y2, y0 ; y2 = S1 + CH + ror y1, 2 ; y1 = S0 = (a>>2) ^ (a>>13) ^ (a>>22) + add y2, [rsp + _XFER + 2*4] ; y2 = k + w + S1 + CH + pxor XTMP4, XTMP2 ; XTMP4 = s1 {xBxA} + mov y0, a ; y0 = a + add h, y2 ; h = h + S1 + CH + k + w + mov y2, a ; y2 = a + pshufb XTMP4, SHUF_00BA ; XTMP4 = s1 {00BA} + or y0, c ; y0 = a|c + add d, h ; d = d + h + S1 + CH + k + w + and y2, c ; y2 = a&c + paddd XTMP0, XTMP4 ; XTMP0 = {..., ..., W[1], W[0]} + and y0, b ; y0 = (a|c)&b + add h, y1 ; h = h + S1 + CH + k + w + S0 + ;; compute high s1 + pshufd XTMP2, XTMP0, 01010000b ; XTMP2 = W[-2] {DDCC} + or y0, y2 ; y0 = MAJ = (a|c)&b)|(a&c) + add h, y0 ; h = h + S1 + CH + k + w + S0 + MAJ + +ROTATE_ARGS + movdqa XTMP3, XTMP2 ; XTMP3 = W[-2] {DDCC} + mov y0, e ; y0 = e + ror y0, (25-11) ; y0 = e >> (25-11) + mov y1, a ; y1 = a + movdqa X0, XTMP2 ; X0 = W[-2] {DDCC} + ror y1, (22-13) ; y1 = a >> (22-13) + xor y0, e ; y0 = e ^ (e >> (25-11)) + mov y2, f ; y2 = f + ror y0, (11-6) ; y0 = (e >> (11-6)) ^ (e >> (25-6)) + psrlq XTMP2, 17 ; XTMP2 = W[-2] ror 17 {xDxC} + xor y1, a ; y1 = a ^ (a >> (22-13) + xor y2, g ; y2 = f^g + psrlq XTMP3, 19 ; XTMP3 = W[-2] ror 19 {xDxC} + xor y0, e ; y0 = e ^ (e >> (11-6)) ^ (e >> (25-6)) + and y2, e ; y2 = (f^g)&e + ror y1, (13-2) ; y1 = (a >> (13-2)) ^ (a >> (22-2)) + psrld X0, 10 ; X0 = W[-2] >> 10 {DDCC} + xor y1, a ; y1 = a ^ (a >> (13-2)) ^ (a >> (22-2)) + ror y0, 6 ; y0 = S1 = (e>>6) & (e>>11) ^ (e>>25) + xor y2, g ; y2 = CH = ((f^g)&e)^g + pxor XTMP2, XTMP3 + ror y1, 2 ; y1 = S0 = (a>>2) ^ (a>>13) ^ (a>>22) + add y2, y0 ; y2 = S1 + CH + add y2, [rsp + _XFER + 3*4] ; y2 = k + w + S1 + CH + pxor X0, XTMP2 ; X0 = s1 {xDxC} + mov y0, a ; y0 = a + add h, y2 ; h = h + S1 + CH + k + w + mov y2, a ; y2 = a + pshufb X0, SHUF_DC00 ; X0 = s1 {DC00} + or y0, c ; y0 = a|c + add d, h ; d = d + h + S1 + CH + k + w + and y2, c ; y2 = a&c + paddd X0, XTMP0 ; X0 = {W[3], W[2], W[1], W[0]} + and y0, b ; y0 = (a|c)&b + add h, y1 ; h = h + S1 + CH + k + w + S0 + or y0, y2 ; y0 = MAJ = (a|c)&b)|(a&c) + add h, y0 ; h = h + S1 + CH + k + w + S0 + MAJ + +ROTATE_ARGS +rotate_Xs +%endm + +;; input is [rsp + _XFER + %1 * 4] +%macro DO_ROUND 1 + mov y0, e ; y0 = e + ror y0, (25-11) ; y0 = e >> (25-11) + mov y1, a ; y1 = a + xor y0, e ; y0 = e ^ (e >> (25-11)) + ror y1, (22-13) ; y1 = a >> (22-13) + mov y2, f ; y2 = f + xor y1, a ; y1 = a ^ (a >> (22-13) + ror y0, (11-6) ; y0 = (e >> (11-6)) ^ (e >> (25-6)) + xor y2, g ; y2 = f^g + xor y0, e ; y0 = e ^ (e >> (11-6)) ^ (e >> (25-6)) + ror y1, (13-2) ; y1 = (a >> (13-2)) ^ (a >> (22-2)) + and y2, e ; y2 = (f^g)&e + xor y1, a ; y1 = a ^ (a >> (13-2)) ^ (a >> (22-2)) + ror y0, 6 ; y0 = S1 = (e>>6) & (e>>11) ^ (e>>25) + xor y2, g ; y2 = CH = ((f^g)&e)^g + add y2, y0 ; y2 = S1 + CH + ror y1, 2 ; y1 = S0 = (a>>2) ^ (a>>13) ^ (a>>22) + add y2, [rsp + _XFER + %1 * 4] ; y2 = k + w + S1 + CH + mov y0, a ; y0 = a + add h, y2 ; h = h + S1 + CH + k + w + mov y2, a ; y2 = a + or y0, c ; y0 = a|c + add d, h ; d = d + h + S1 + CH + k + w + and y2, c ; y2 = a&c + and y0, b ; y0 = (a|c)&b + add h, y1 ; h = h + S1 + CH + k + w + S0 + or y0, y2 ; y0 = MAJ = (a|c)&b)|(a&c) + add h, y0 ; h = h + S1 + CH + k + w + S0 + MAJ + ROTATE_ARGS +%endm + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; void sha256_sse4(void *input_data, UINT32 digest[8], UINT64 num_blks) +;; arg 1 : pointer to input data +;; arg 2 : pointer to digest +;; arg 3 : Num blocks +section .text +global sha256_sse4 +global _sha256_sse4 +align 32 +sha256_sse4: +_sha256_sse4: + push rbx +%ifdef WINABI + push rsi + push rdi +%endif + push rbp + push r13 + push r14 + push r15 + + sub rsp,STACK_SIZE +%ifdef WINABI + movdqa [rsp + _XMM_SAVE + 0*16],xmm6 + movdqa [rsp + _XMM_SAVE + 1*16],xmm7 + movdqa [rsp + _XMM_SAVE + 2*16],xmm8 + movdqa [rsp + _XMM_SAVE + 3*16],xmm9 + movdqa [rsp + _XMM_SAVE + 4*16],xmm10 + movdqa [rsp + _XMM_SAVE + 5*16],xmm11 + movdqa [rsp + _XMM_SAVE + 6*16],xmm12 +%endif + + shl NUM_BLKS, 6 ; convert to bytes + jz done_hash + add NUM_BLKS, INP ; pointer to end of data + mov [rsp + _INP_END], NUM_BLKS + + ;; load initial digest + mov a,[4*0 + CTX] + mov b,[4*1 + CTX] + mov c,[4*2 + CTX] + mov d,[4*3 + CTX] + mov e,[4*4 + CTX] + mov f,[4*5 + CTX] + mov g,[4*6 + CTX] + mov h,[4*7 + CTX] + + movdqa BYTE_FLIP_MASK, [PSHUFFLE_BYTE_FLIP_MASK wrt rip] + movdqa SHUF_00BA, [_SHUF_00BA wrt rip] + movdqa SHUF_DC00, [_SHUF_DC00 wrt rip] + +loop0: + lea TBL,[K256 wrt rip] + + ;; byte swap first 16 dwords + COPY_XMM_AND_BSWAP X0, [INP + 0*16], BYTE_FLIP_MASK + COPY_XMM_AND_BSWAP X1, [INP + 1*16], BYTE_FLIP_MASK + COPY_XMM_AND_BSWAP X2, [INP + 2*16], BYTE_FLIP_MASK + COPY_XMM_AND_BSWAP X3, [INP + 3*16], BYTE_FLIP_MASK + + mov [rsp + _INP], INP + + ;; schedule 48 input dwords, by doing 3 rounds of 16 each + mov SRND, 3 +align 16 +loop1: + movdqa XFER, [TBL + 0*16] + paddd XFER, X0 + movdqa [rsp + _XFER], XFER + FOUR_ROUNDS_AND_SCHED + + movdqa XFER, [TBL + 1*16] + paddd XFER, X0 + movdqa [rsp + _XFER], XFER + FOUR_ROUNDS_AND_SCHED + + movdqa XFER, [TBL + 2*16] + paddd XFER, X0 + movdqa [rsp + _XFER], XFER + FOUR_ROUNDS_AND_SCHED + + movdqa XFER, [TBL + 3*16] + paddd XFER, X0 + movdqa [rsp + _XFER], XFER + add TBL, 4*16 + FOUR_ROUNDS_AND_SCHED + + sub SRND, 1 + jne loop1 + + mov SRND, 2 +loop2: + paddd X0, [TBL + 0*16] + movdqa [rsp + _XFER], X0 + DO_ROUND 0 + DO_ROUND 1 + DO_ROUND 2 + DO_ROUND 3 + paddd X1, [TBL + 1*16] + movdqa [rsp + _XFER], X1 + add TBL, 2*16 + DO_ROUND 0 + DO_ROUND 1 + DO_ROUND 2 + DO_ROUND 3 + + movdqa X0, X2 + movdqa X1, X3 + + sub SRND, 1 + jne loop2 + + addm [4*0 + CTX],a + addm [4*1 + CTX],b + addm [4*2 + CTX],c + addm [4*3 + CTX],d + addm [4*4 + CTX],e + addm [4*5 + CTX],f + addm [4*6 + CTX],g + addm [4*7 + CTX],h + + mov INP, [rsp + _INP] + add INP, 64 + cmp INP, [rsp + _INP_END] + jne loop0 + +done_hash: +%ifdef WINABI + movdqa xmm6,[rsp + _XMM_SAVE + 0*16] + movdqa xmm7,[rsp + _XMM_SAVE + 1*16] + movdqa xmm8,[rsp + _XMM_SAVE + 2*16] + movdqa xmm9,[rsp + _XMM_SAVE + 3*16] + movdqa xmm10,[rsp + _XMM_SAVE + 4*16] + movdqa xmm11,[rsp + _XMM_SAVE + 5*16] + movdqa xmm12,[rsp + _XMM_SAVE + 6*16] +%endif + + add rsp, STACK_SIZE + + pop r15 + pop r14 + pop r13 + pop rbp +%ifdef WINABI + pop rdi + pop rsi +%endif + pop rbx + + ret + + +section .data +align 64 +K256: + dd 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 + dd 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 + dd 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 + dd 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 + dd 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc + dd 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da + dd 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 + dd 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 + dd 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 + dd 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 + dd 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 + dd 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 + dd 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 + dd 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 + dd 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 + dd 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 + +PSHUFFLE_BYTE_FLIP_MASK: ddq 0x0c0d0e0f08090a0b0405060700010203 + +; shuffle xBxA -> 00BA +_SHUF_00BA: ddq 0xFFFFFFFFFFFFFFFF0b0a090803020100 + +; shuffle xDxC -> DC00 +_SHUF_DC00: ddq 0x0b0a090803020100FFFFFFFFFFFFFFFF + +%ifidn __OUTPUT_FORMAT__,elf +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf32 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf64 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif diff --git a/src/Crypto/sha256_sse4_x86.asm b/src/Crypto/sha256_sse4_x86.asm new file mode 100644 index 00000000..31c8bd0d --- /dev/null +++ b/src/Crypto/sha256_sse4_x86.asm @@ -0,0 +1,10 @@ + +%ifidn __OUTPUT_FORMAT__,elf +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf32 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf64 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif diff --git a/src/Crypto/sha512-x64-nayuki.S b/src/Crypto/sha512-x64-nayuki.S new file mode 100644 index 00000000..96ffb1a3 --- /dev/null +++ b/src/Crypto/sha512-x64-nayuki.S @@ -0,0 +1,207 @@ +/* + * SHA-512 hash in x86-64 assembly + * + * Copyright (c) 2017 Project Nayuki. (MIT License) + * https://www.nayuki.io/page/fast-sha2-hashes-in-x86-assembly + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of + * this software and associated documentation files (the "Software"), to deal in + * the Software without restriction, including without limitation the rights to + * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of + * the Software, and to permit persons to whom the Software is furnished to do so, + * subject to the following conditions: + * - The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * - The Software is provided "as is", without warranty of any kind, express or + * implied, including but not limited to the warranties of merchantability, + * fitness for a particular purpose and noninfringement. In no event shall the + * authors or copyright holders be liable for any claim, damages or other + * liability, whether in an action of contract, tort or otherwise, arising from, + * out of or in connection with the Software or the use or other dealings in the + * Software. + */ + +# Adapted for VeraCrypt +# Adapt to Windows build: +# - specific calling convention +# - avoid using xmm6 register since it must be preserved. We use the stack to save RBX, RDI and RSI + + +/* void sha512_compress_nayuki(uint64_t state[8], const uint8_t block[128]) */ +.globl sha512_compress_nayuki +.globl _sha512_compress_nayuki +sha512_compress_nayuki: +_sha512_compress_nayuki: + /* + * Storage usage: + * Bytes Location Description + * 8 rax Temporary for calculation per round + * 8 rbx Temporary for calculation per round + * 8 rcx Temporary for calculation per round + * 8 rdx Temporary for calculation per round + * 8 rsi Base address of block array argument (read-only) + * 8 rdi Base address of state array argument (read-only) + * 8 rsp x86-64 stack pointer + * 8 r8 SHA-512 state variable A + * 8 r9 SHA-512 state variable B + * 8 r10 SHA-512 state variable C + * 8 r11 SHA-512 state variable D + * 8 r12 SHA-512 state variable E + * 8 r13 SHA-512 state variable F + * 8 r14 SHA-512 state variable G + * 8 r15 SHA-512 state variable H + * 128 [rsp+0] Circular buffer of most recent 16 key schedule items, 8 bytes each + * 16 xmm0 Caller's value of r10 (only low 64 bits are used) + * 16 xmm1 Caller's value of r11 (only low 64 bits are used) + * 16 xmm2 Caller's value of r12 (only low 64 bits are used) + * 16 xmm3 Caller's value of r13 (only low 64 bits are used) + * 16 xmm4 Caller's value of r14 (only low 64 bits are used) + * 16 xmm5 Caller's value of r15 (only low 64 bits are used) + * 8 mm0 Caller's value of rbx + */ + movq %r10, %xmm0 + movq %r11, %xmm1 + movq %r12, %xmm2 + movq %r13, %xmm3 + movq %r14, %xmm4 + movq %r15, %xmm5 + +.ifdef WINABI + subq $152, %rsp + movq %rbx, (0*8 + 128)(%rsp) + movq %rdi, (1*8 + 128)(%rsp) + movq %rsi, (2*8 + 128)(%rsp) + movq %rcx, %rdi + movq %rdx, %rsi + .else + movq %rbx, %xmm6 + subq $128, %rsp +.endif + + + movq 0(%rdi), %r8 + movq 8(%rdi), %r9 + movq 16(%rdi), %r10 + movq 24(%rdi), %r11 + movq 32(%rdi), %r12 + movq 40(%rdi), %r13 + movq 48(%rdi), %r14 + movq 56(%rdi), %r15 + + + movq (0*8)(%rsi), %rbx; bswapq %rbx; movq %rbx, (((0)&0xF)*8)(%rsp); movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r15; movq %r14, %rcx; xorq %r13, %rcx; andq %r12, %rcx; xorq %r14, %rcx; addq %rax, %r15; movabs $0x428A2F98D728AE22, %rax; addq %rcx, %r15; addq %rax, %r15; addq %r15, %r11; movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r10, %rcx; addq %rax, %r15; movq %r10, %rax; orq %r9, %rax; andq %r9, %rcx; andq %r8, %rax; orq %rcx, %rax; addq %rax, %r15; + movq (1*8)(%rsi), %rbx; bswapq %rbx; movq %rbx, (((1)&0xF)*8)(%rsp); movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r14; movq %r13, %rcx; xorq %r12, %rcx; andq %r11, %rcx; xorq %r13, %rcx; addq %rax, %r14; movabs $0x7137449123EF65CD, %rax; addq %rcx, %r14; addq %rax, %r14; addq %r14, %r10; movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r9, %rcx; addq %rax, %r14; movq %r9, %rax; orq %r8, %rax; andq %r8, %rcx; andq %r15, %rax; orq %rcx, %rax; addq %rax, %r14; + movq (2*8)(%rsi), %rbx; bswapq %rbx; movq %rbx, (((2)&0xF)*8)(%rsp); movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r13; movq %r12, %rcx; xorq %r11, %rcx; andq %r10, %rcx; xorq %r12, %rcx; addq %rax, %r13; movabs $0xB5C0FBCFEC4D3B2F, %rax; addq %rcx, %r13; addq %rax, %r13; addq %r13, %r9; movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r8, %rcx; addq %rax, %r13; movq %r8, %rax; orq %r15, %rax; andq %r15, %rcx; andq %r14, %rax; orq %rcx, %rax; addq %rax, %r13; + movq (3*8)(%rsi), %rbx; bswapq %rbx; movq %rbx, (((3)&0xF)*8)(%rsp); movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r12; movq %r11, %rcx; xorq %r10, %rcx; andq %r9, %rcx; xorq %r11, %rcx; addq %rax, %r12; movabs $0xE9B5DBA58189DBBC, %rax; addq %rcx, %r12; addq %rax, %r12; addq %r12, %r8; movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r15, %rcx; addq %rax, %r12; movq %r15, %rax; orq %r14, %rax; andq %r14, %rcx; andq %r13, %rax; orq %rcx, %rax; addq %rax, %r12; + movq (4*8)(%rsi), %rbx; bswapq %rbx; movq %rbx, (((4)&0xF)*8)(%rsp); movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r11; movq %r10, %rcx; xorq %r9, %rcx; andq %r8, %rcx; xorq %r10, %rcx; addq %rax, %r11; movabs $0x3956C25BF348B538, %rax; addq %rcx, %r11; addq %rax, %r11; addq %r11, %r15; movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r14, %rcx; addq %rax, %r11; movq %r14, %rax; orq %r13, %rax; andq %r13, %rcx; andq %r12, %rax; orq %rcx, %rax; addq %rax, %r11; + movq (5*8)(%rsi), %rbx; bswapq %rbx; movq %rbx, (((5)&0xF)*8)(%rsp); movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r10; movq %r9, %rcx; xorq %r8, %rcx; andq %r15, %rcx; xorq %r9, %rcx; addq %rax, %r10; movabs $0x59F111F1B605D019, %rax; addq %rcx, %r10; addq %rax, %r10; addq %r10, %r14; movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r13, %rcx; addq %rax, %r10; movq %r13, %rax; orq %r12, %rax; andq %r12, %rcx; andq %r11, %rax; orq %rcx, %rax; addq %rax, %r10; + movq (6*8)(%rsi), %rbx; bswapq %rbx; movq %rbx, (((6)&0xF)*8)(%rsp); movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r9; movq %r8, %rcx; xorq %r15, %rcx; andq %r14, %rcx; xorq %r8, %rcx; addq %rax, %r9; movabs $0x923F82A4AF194F9B, %rax; addq %rcx, %r9; addq %rax, %r9; addq %r9, %r13; movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r12, %rcx; addq %rax, %r9; movq %r12, %rax; orq %r11, %rax; andq %r11, %rcx; andq %r10, %rax; orq %rcx, %rax; addq %rax, %r9; + movq (7*8)(%rsi), %rbx; bswapq %rbx; movq %rbx, (((7)&0xF)*8)(%rsp); movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r8; movq %r15, %rcx; xorq %r14, %rcx; andq %r13, %rcx; xorq %r15, %rcx; addq %rax, %r8; movabs $0xAB1C5ED5DA6D8118, %rax; addq %rcx, %r8; addq %rax, %r8; addq %r8, %r12; movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r11, %rcx; addq %rax, %r8; movq %r11, %rax; orq %r10, %rax; andq %r10, %rcx; andq %r9, %rax; orq %rcx, %rax; addq %rax, %r8; + movq (8*8)(%rsi), %rbx; bswapq %rbx; movq %rbx, (((8)&0xF)*8)(%rsp); movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r15; movq %r14, %rcx; xorq %r13, %rcx; andq %r12, %rcx; xorq %r14, %rcx; addq %rax, %r15; movabs $0xD807AA98A3030242, %rax; addq %rcx, %r15; addq %rax, %r15; addq %r15, %r11; movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r10, %rcx; addq %rax, %r15; movq %r10, %rax; orq %r9, %rax; andq %r9, %rcx; andq %r8, %rax; orq %rcx, %rax; addq %rax, %r15; + movq (9*8)(%rsi), %rbx; bswapq %rbx; movq %rbx, (((9)&0xF)*8)(%rsp); movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r14; movq %r13, %rcx; xorq %r12, %rcx; andq %r11, %rcx; xorq %r13, %rcx; addq %rax, %r14; movabs $0x12835B0145706FBE, %rax; addq %rcx, %r14; addq %rax, %r14; addq %r14, %r10; movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r9, %rcx; addq %rax, %r14; movq %r9, %rax; orq %r8, %rax; andq %r8, %rcx; andq %r15, %rax; orq %rcx, %rax; addq %rax, %r14; + movq (10*8)(%rsi), %rbx; bswapq %rbx; movq %rbx, (((10)&0xF)*8)(%rsp); movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r13; movq %r12, %rcx; xorq %r11, %rcx; andq %r10, %rcx; xorq %r12, %rcx; addq %rax, %r13; movabs $0x243185BE4EE4B28C, %rax; addq %rcx, %r13; addq %rax, %r13; addq %r13, %r9; movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r8, %rcx; addq %rax, %r13; movq %r8, %rax; orq %r15, %rax; andq %r15, %rcx; andq %r14, %rax; orq %rcx, %rax; addq %rax, %r13; + movq (11*8)(%rsi), %rbx; bswapq %rbx; movq %rbx, (((11)&0xF)*8)(%rsp); movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r12; movq %r11, %rcx; xorq %r10, %rcx; andq %r9, %rcx; xorq %r11, %rcx; addq %rax, %r12; movabs $0x550C7DC3D5FFB4E2, %rax; addq %rcx, %r12; addq %rax, %r12; addq %r12, %r8; movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r15, %rcx; addq %rax, %r12; movq %r15, %rax; orq %r14, %rax; andq %r14, %rcx; andq %r13, %rax; orq %rcx, %rax; addq %rax, %r12; + movq (12*8)(%rsi), %rbx; bswapq %rbx; movq %rbx, (((12)&0xF)*8)(%rsp); movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r11; movq %r10, %rcx; xorq %r9, %rcx; andq %r8, %rcx; xorq %r10, %rcx; addq %rax, %r11; movabs $0x72BE5D74F27B896F, %rax; addq %rcx, %r11; addq %rax, %r11; addq %r11, %r15; movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r14, %rcx; addq %rax, %r11; movq %r14, %rax; orq %r13, %rax; andq %r13, %rcx; andq %r12, %rax; orq %rcx, %rax; addq %rax, %r11; + movq (13*8)(%rsi), %rbx; bswapq %rbx; movq %rbx, (((13)&0xF)*8)(%rsp); movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r10; movq %r9, %rcx; xorq %r8, %rcx; andq %r15, %rcx; xorq %r9, %rcx; addq %rax, %r10; movabs $0x80DEB1FE3B1696B1, %rax; addq %rcx, %r10; addq %rax, %r10; addq %r10, %r14; movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r13, %rcx; addq %rax, %r10; movq %r13, %rax; orq %r12, %rax; andq %r12, %rcx; andq %r11, %rax; orq %rcx, %rax; addq %rax, %r10; + movq (14*8)(%rsi), %rbx; bswapq %rbx; movq %rbx, (((14)&0xF)*8)(%rsp); movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r9; movq %r8, %rcx; xorq %r15, %rcx; andq %r14, %rcx; xorq %r8, %rcx; addq %rax, %r9; movabs $0x9BDC06A725C71235, %rax; addq %rcx, %r9; addq %rax, %r9; addq %r9, %r13; movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r12, %rcx; addq %rax, %r9; movq %r12, %rax; orq %r11, %rax; andq %r11, %rcx; andq %r10, %rax; orq %rcx, %rax; addq %rax, %r9; + movq (15*8)(%rsi), %rbx; bswapq %rbx; movq %rbx, (((15)&0xF)*8)(%rsp); movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r8; movq %r15, %rcx; xorq %r14, %rcx; andq %r13, %rcx; xorq %r15, %rcx; addq %rax, %r8; movabs $0xC19BF174CF692694, %rax; addq %rcx, %r8; addq %rax, %r8; addq %r8, %r12; movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r11, %rcx; addq %rax, %r8; movq %r11, %rax; orq %r10, %rax; andq %r10, %rcx; andq %r9, %rax; orq %rcx, %rax; addq %rax, %r8; + movq (((16 -15)&0xF)*8)(%rsp), %rax; movq (((16 -16)&0xF)*8)(%rsp), %rbx; addq (((16 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((16 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((16)&0xF)*8)(%rsp); movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r15; movq %r14, %rcx; xorq %r13, %rcx; andq %r12, %rcx; xorq %r14, %rcx; addq %rax, %r15; movabs $0xE49B69C19EF14AD2, %rax; addq %rcx, %r15; addq %rax, %r15; addq %r15, %r11; movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r10, %rcx; addq %rax, %r15; movq %r10, %rax; orq %r9, %rax; andq %r9, %rcx; andq %r8, %rax; orq %rcx, %rax; addq %rax, %r15; + movq (((17 -15)&0xF)*8)(%rsp), %rax; movq (((17 -16)&0xF)*8)(%rsp), %rbx; addq (((17 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((17 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((17)&0xF)*8)(%rsp); movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r14; movq %r13, %rcx; xorq %r12, %rcx; andq %r11, %rcx; xorq %r13, %rcx; addq %rax, %r14; movabs $0xEFBE4786384F25E3, %rax; addq %rcx, %r14; addq %rax, %r14; addq %r14, %r10; movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r9, %rcx; addq %rax, %r14; movq %r9, %rax; orq %r8, %rax; andq %r8, %rcx; andq %r15, %rax; orq %rcx, %rax; addq %rax, %r14; + movq (((18 -15)&0xF)*8)(%rsp), %rax; movq (((18 -16)&0xF)*8)(%rsp), %rbx; addq (((18 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((18 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((18)&0xF)*8)(%rsp); movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r13; movq %r12, %rcx; xorq %r11, %rcx; andq %r10, %rcx; xorq %r12, %rcx; addq %rax, %r13; movabs $0x0FC19DC68B8CD5B5, %rax; addq %rcx, %r13; addq %rax, %r13; addq %r13, %r9; movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r8, %rcx; addq %rax, %r13; movq %r8, %rax; orq %r15, %rax; andq %r15, %rcx; andq %r14, %rax; orq %rcx, %rax; addq %rax, %r13; + movq (((19 -15)&0xF)*8)(%rsp), %rax; movq (((19 -16)&0xF)*8)(%rsp), %rbx; addq (((19 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((19 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((19)&0xF)*8)(%rsp); movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r12; movq %r11, %rcx; xorq %r10, %rcx; andq %r9, %rcx; xorq %r11, %rcx; addq %rax, %r12; movabs $0x240CA1CC77AC9C65, %rax; addq %rcx, %r12; addq %rax, %r12; addq %r12, %r8; movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r15, %rcx; addq %rax, %r12; movq %r15, %rax; orq %r14, %rax; andq %r14, %rcx; andq %r13, %rax; orq %rcx, %rax; addq %rax, %r12; + movq (((20 -15)&0xF)*8)(%rsp), %rax; movq (((20 -16)&0xF)*8)(%rsp), %rbx; addq (((20 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((20 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((20)&0xF)*8)(%rsp); movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r11; movq %r10, %rcx; xorq %r9, %rcx; andq %r8, %rcx; xorq %r10, %rcx; addq %rax, %r11; movabs $0x2DE92C6F592B0275, %rax; addq %rcx, %r11; addq %rax, %r11; addq %r11, %r15; movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r14, %rcx; addq %rax, %r11; movq %r14, %rax; orq %r13, %rax; andq %r13, %rcx; andq %r12, %rax; orq %rcx, %rax; addq %rax, %r11; + movq (((21 -15)&0xF)*8)(%rsp), %rax; movq (((21 -16)&0xF)*8)(%rsp), %rbx; addq (((21 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((21 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((21)&0xF)*8)(%rsp); movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r10; movq %r9, %rcx; xorq %r8, %rcx; andq %r15, %rcx; xorq %r9, %rcx; addq %rax, %r10; movabs $0x4A7484AA6EA6E483, %rax; addq %rcx, %r10; addq %rax, %r10; addq %r10, %r14; movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r13, %rcx; addq %rax, %r10; movq %r13, %rax; orq %r12, %rax; andq %r12, %rcx; andq %r11, %rax; orq %rcx, %rax; addq %rax, %r10; + movq (((22 -15)&0xF)*8)(%rsp), %rax; movq (((22 -16)&0xF)*8)(%rsp), %rbx; addq (((22 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((22 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((22)&0xF)*8)(%rsp); movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r9; movq %r8, %rcx; xorq %r15, %rcx; andq %r14, %rcx; xorq %r8, %rcx; addq %rax, %r9; movabs $0x5CB0A9DCBD41FBD4, %rax; addq %rcx, %r9; addq %rax, %r9; addq %r9, %r13; movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r12, %rcx; addq %rax, %r9; movq %r12, %rax; orq %r11, %rax; andq %r11, %rcx; andq %r10, %rax; orq %rcx, %rax; addq %rax, %r9; + movq (((23 -15)&0xF)*8)(%rsp), %rax; movq (((23 -16)&0xF)*8)(%rsp), %rbx; addq (((23 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((23 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((23)&0xF)*8)(%rsp); movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r8; movq %r15, %rcx; xorq %r14, %rcx; andq %r13, %rcx; xorq %r15, %rcx; addq %rax, %r8; movabs $0x76F988DA831153B5, %rax; addq %rcx, %r8; addq %rax, %r8; addq %r8, %r12; movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r11, %rcx; addq %rax, %r8; movq %r11, %rax; orq %r10, %rax; andq %r10, %rcx; andq %r9, %rax; orq %rcx, %rax; addq %rax, %r8; + movq (((24 -15)&0xF)*8)(%rsp), %rax; movq (((24 -16)&0xF)*8)(%rsp), %rbx; addq (((24 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((24 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((24)&0xF)*8)(%rsp); movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r15; movq %r14, %rcx; xorq %r13, %rcx; andq %r12, %rcx; xorq %r14, %rcx; addq %rax, %r15; movabs $0x983E5152EE66DFAB, %rax; addq %rcx, %r15; addq %rax, %r15; addq %r15, %r11; movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r10, %rcx; addq %rax, %r15; movq %r10, %rax; orq %r9, %rax; andq %r9, %rcx; andq %r8, %rax; orq %rcx, %rax; addq %rax, %r15; + movq (((25 -15)&0xF)*8)(%rsp), %rax; movq (((25 -16)&0xF)*8)(%rsp), %rbx; addq (((25 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((25 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((25)&0xF)*8)(%rsp); movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r14; movq %r13, %rcx; xorq %r12, %rcx; andq %r11, %rcx; xorq %r13, %rcx; addq %rax, %r14; movabs $0xA831C66D2DB43210, %rax; addq %rcx, %r14; addq %rax, %r14; addq %r14, %r10; movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r9, %rcx; addq %rax, %r14; movq %r9, %rax; orq %r8, %rax; andq %r8, %rcx; andq %r15, %rax; orq %rcx, %rax; addq %rax, %r14; + movq (((26 -15)&0xF)*8)(%rsp), %rax; movq (((26 -16)&0xF)*8)(%rsp), %rbx; addq (((26 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((26 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((26)&0xF)*8)(%rsp); movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r13; movq %r12, %rcx; xorq %r11, %rcx; andq %r10, %rcx; xorq %r12, %rcx; addq %rax, %r13; movabs $0xB00327C898FB213F, %rax; addq %rcx, %r13; addq %rax, %r13; addq %r13, %r9; movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r8, %rcx; addq %rax, %r13; movq %r8, %rax; orq %r15, %rax; andq %r15, %rcx; andq %r14, %rax; orq %rcx, %rax; addq %rax, %r13; + movq (((27 -15)&0xF)*8)(%rsp), %rax; movq (((27 -16)&0xF)*8)(%rsp), %rbx; addq (((27 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((27 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((27)&0xF)*8)(%rsp); movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r12; movq %r11, %rcx; xorq %r10, %rcx; andq %r9, %rcx; xorq %r11, %rcx; addq %rax, %r12; movabs $0xBF597FC7BEEF0EE4, %rax; addq %rcx, %r12; addq %rax, %r12; addq %r12, %r8; movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r15, %rcx; addq %rax, %r12; movq %r15, %rax; orq %r14, %rax; andq %r14, %rcx; andq %r13, %rax; orq %rcx, %rax; addq %rax, %r12; + movq (((28 -15)&0xF)*8)(%rsp), %rax; movq (((28 -16)&0xF)*8)(%rsp), %rbx; addq (((28 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((28 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((28)&0xF)*8)(%rsp); movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r11; movq %r10, %rcx; xorq %r9, %rcx; andq %r8, %rcx; xorq %r10, %rcx; addq %rax, %r11; movabs $0xC6E00BF33DA88FC2, %rax; addq %rcx, %r11; addq %rax, %r11; addq %r11, %r15; movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r14, %rcx; addq %rax, %r11; movq %r14, %rax; orq %r13, %rax; andq %r13, %rcx; andq %r12, %rax; orq %rcx, %rax; addq %rax, %r11; + movq (((29 -15)&0xF)*8)(%rsp), %rax; movq (((29 -16)&0xF)*8)(%rsp), %rbx; addq (((29 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((29 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((29)&0xF)*8)(%rsp); movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r10; movq %r9, %rcx; xorq %r8, %rcx; andq %r15, %rcx; xorq %r9, %rcx; addq %rax, %r10; movabs $0xD5A79147930AA725, %rax; addq %rcx, %r10; addq %rax, %r10; addq %r10, %r14; movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r13, %rcx; addq %rax, %r10; movq %r13, %rax; orq %r12, %rax; andq %r12, %rcx; andq %r11, %rax; orq %rcx, %rax; addq %rax, %r10; + movq (((30 -15)&0xF)*8)(%rsp), %rax; movq (((30 -16)&0xF)*8)(%rsp), %rbx; addq (((30 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((30 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((30)&0xF)*8)(%rsp); movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r9; movq %r8, %rcx; xorq %r15, %rcx; andq %r14, %rcx; xorq %r8, %rcx; addq %rax, %r9; movabs $0x06CA6351E003826F, %rax; addq %rcx, %r9; addq %rax, %r9; addq %r9, %r13; movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r12, %rcx; addq %rax, %r9; movq %r12, %rax; orq %r11, %rax; andq %r11, %rcx; andq %r10, %rax; orq %rcx, %rax; addq %rax, %r9; + movq (((31 -15)&0xF)*8)(%rsp), %rax; movq (((31 -16)&0xF)*8)(%rsp), %rbx; addq (((31 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((31 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((31)&0xF)*8)(%rsp); movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r8; movq %r15, %rcx; xorq %r14, %rcx; andq %r13, %rcx; xorq %r15, %rcx; addq %rax, %r8; movabs $0x142929670A0E6E70, %rax; addq %rcx, %r8; addq %rax, %r8; addq %r8, %r12; movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r11, %rcx; addq %rax, %r8; movq %r11, %rax; orq %r10, %rax; andq %r10, %rcx; andq %r9, %rax; orq %rcx, %rax; addq %rax, %r8; + movq (((32 -15)&0xF)*8)(%rsp), %rax; movq (((32 -16)&0xF)*8)(%rsp), %rbx; addq (((32 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((32 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((32)&0xF)*8)(%rsp); movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r15; movq %r14, %rcx; xorq %r13, %rcx; andq %r12, %rcx; xorq %r14, %rcx; addq %rax, %r15; movabs $0x27B70A8546D22FFC, %rax; addq %rcx, %r15; addq %rax, %r15; addq %r15, %r11; movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r10, %rcx; addq %rax, %r15; movq %r10, %rax; orq %r9, %rax; andq %r9, %rcx; andq %r8, %rax; orq %rcx, %rax; addq %rax, %r15; + movq (((33 -15)&0xF)*8)(%rsp), %rax; movq (((33 -16)&0xF)*8)(%rsp), %rbx; addq (((33 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((33 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((33)&0xF)*8)(%rsp); movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r14; movq %r13, %rcx; xorq %r12, %rcx; andq %r11, %rcx; xorq %r13, %rcx; addq %rax, %r14; movabs $0x2E1B21385C26C926, %rax; addq %rcx, %r14; addq %rax, %r14; addq %r14, %r10; movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r9, %rcx; addq %rax, %r14; movq %r9, %rax; orq %r8, %rax; andq %r8, %rcx; andq %r15, %rax; orq %rcx, %rax; addq %rax, %r14; + movq (((34 -15)&0xF)*8)(%rsp), %rax; movq (((34 -16)&0xF)*8)(%rsp), %rbx; addq (((34 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((34 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((34)&0xF)*8)(%rsp); movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r13; movq %r12, %rcx; xorq %r11, %rcx; andq %r10, %rcx; xorq %r12, %rcx; addq %rax, %r13; movabs $0x4D2C6DFC5AC42AED, %rax; addq %rcx, %r13; addq %rax, %r13; addq %r13, %r9; movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r8, %rcx; addq %rax, %r13; movq %r8, %rax; orq %r15, %rax; andq %r15, %rcx; andq %r14, %rax; orq %rcx, %rax; addq %rax, %r13; + movq (((35 -15)&0xF)*8)(%rsp), %rax; movq (((35 -16)&0xF)*8)(%rsp), %rbx; addq (((35 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((35 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((35)&0xF)*8)(%rsp); movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r12; movq %r11, %rcx; xorq %r10, %rcx; andq %r9, %rcx; xorq %r11, %rcx; addq %rax, %r12; movabs $0x53380D139D95B3DF, %rax; addq %rcx, %r12; addq %rax, %r12; addq %r12, %r8; movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r15, %rcx; addq %rax, %r12; movq %r15, %rax; orq %r14, %rax; andq %r14, %rcx; andq %r13, %rax; orq %rcx, %rax; addq %rax, %r12; + movq (((36 -15)&0xF)*8)(%rsp), %rax; movq (((36 -16)&0xF)*8)(%rsp), %rbx; addq (((36 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((36 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((36)&0xF)*8)(%rsp); movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r11; movq %r10, %rcx; xorq %r9, %rcx; andq %r8, %rcx; xorq %r10, %rcx; addq %rax, %r11; movabs $0x650A73548BAF63DE, %rax; addq %rcx, %r11; addq %rax, %r11; addq %r11, %r15; movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r14, %rcx; addq %rax, %r11; movq %r14, %rax; orq %r13, %rax; andq %r13, %rcx; andq %r12, %rax; orq %rcx, %rax; addq %rax, %r11; + movq (((37 -15)&0xF)*8)(%rsp), %rax; movq (((37 -16)&0xF)*8)(%rsp), %rbx; addq (((37 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((37 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((37)&0xF)*8)(%rsp); movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r10; movq %r9, %rcx; xorq %r8, %rcx; andq %r15, %rcx; xorq %r9, %rcx; addq %rax, %r10; movabs $0x766A0ABB3C77B2A8, %rax; addq %rcx, %r10; addq %rax, %r10; addq %r10, %r14; movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r13, %rcx; addq %rax, %r10; movq %r13, %rax; orq %r12, %rax; andq %r12, %rcx; andq %r11, %rax; orq %rcx, %rax; addq %rax, %r10; + movq (((38 -15)&0xF)*8)(%rsp), %rax; movq (((38 -16)&0xF)*8)(%rsp), %rbx; addq (((38 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((38 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((38)&0xF)*8)(%rsp); movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r9; movq %r8, %rcx; xorq %r15, %rcx; andq %r14, %rcx; xorq %r8, %rcx; addq %rax, %r9; movabs $0x81C2C92E47EDAEE6, %rax; addq %rcx, %r9; addq %rax, %r9; addq %r9, %r13; movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r12, %rcx; addq %rax, %r9; movq %r12, %rax; orq %r11, %rax; andq %r11, %rcx; andq %r10, %rax; orq %rcx, %rax; addq %rax, %r9; + movq (((39 -15)&0xF)*8)(%rsp), %rax; movq (((39 -16)&0xF)*8)(%rsp), %rbx; addq (((39 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((39 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((39)&0xF)*8)(%rsp); movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r8; movq %r15, %rcx; xorq %r14, %rcx; andq %r13, %rcx; xorq %r15, %rcx; addq %rax, %r8; movabs $0x92722C851482353B, %rax; addq %rcx, %r8; addq %rax, %r8; addq %r8, %r12; movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r11, %rcx; addq %rax, %r8; movq %r11, %rax; orq %r10, %rax; andq %r10, %rcx; andq %r9, %rax; orq %rcx, %rax; addq %rax, %r8; + movq (((40 -15)&0xF)*8)(%rsp), %rax; movq (((40 -16)&0xF)*8)(%rsp), %rbx; addq (((40 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((40 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((40)&0xF)*8)(%rsp); movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r15; movq %r14, %rcx; xorq %r13, %rcx; andq %r12, %rcx; xorq %r14, %rcx; addq %rax, %r15; movabs $0xA2BFE8A14CF10364, %rax; addq %rcx, %r15; addq %rax, %r15; addq %r15, %r11; movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r10, %rcx; addq %rax, %r15; movq %r10, %rax; orq %r9, %rax; andq %r9, %rcx; andq %r8, %rax; orq %rcx, %rax; addq %rax, %r15; + movq (((41 -15)&0xF)*8)(%rsp), %rax; movq (((41 -16)&0xF)*8)(%rsp), %rbx; addq (((41 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((41 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((41)&0xF)*8)(%rsp); movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r14; movq %r13, %rcx; xorq %r12, %rcx; andq %r11, %rcx; xorq %r13, %rcx; addq %rax, %r14; movabs $0xA81A664BBC423001, %rax; addq %rcx, %r14; addq %rax, %r14; addq %r14, %r10; movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r9, %rcx; addq %rax, %r14; movq %r9, %rax; orq %r8, %rax; andq %r8, %rcx; andq %r15, %rax; orq %rcx, %rax; addq %rax, %r14; + movq (((42 -15)&0xF)*8)(%rsp), %rax; movq (((42 -16)&0xF)*8)(%rsp), %rbx; addq (((42 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((42 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((42)&0xF)*8)(%rsp); movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r13; movq %r12, %rcx; xorq %r11, %rcx; andq %r10, %rcx; xorq %r12, %rcx; addq %rax, %r13; movabs $0xC24B8B70D0F89791, %rax; addq %rcx, %r13; addq %rax, %r13; addq %r13, %r9; movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r8, %rcx; addq %rax, %r13; movq %r8, %rax; orq %r15, %rax; andq %r15, %rcx; andq %r14, %rax; orq %rcx, %rax; addq %rax, %r13; + movq (((43 -15)&0xF)*8)(%rsp), %rax; movq (((43 -16)&0xF)*8)(%rsp), %rbx; addq (((43 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((43 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((43)&0xF)*8)(%rsp); movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r12; movq %r11, %rcx; xorq %r10, %rcx; andq %r9, %rcx; xorq %r11, %rcx; addq %rax, %r12; movabs $0xC76C51A30654BE30, %rax; addq %rcx, %r12; addq %rax, %r12; addq %r12, %r8; movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r15, %rcx; addq %rax, %r12; movq %r15, %rax; orq %r14, %rax; andq %r14, %rcx; andq %r13, %rax; orq %rcx, %rax; addq %rax, %r12; + movq (((44 -15)&0xF)*8)(%rsp), %rax; movq (((44 -16)&0xF)*8)(%rsp), %rbx; addq (((44 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((44 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((44)&0xF)*8)(%rsp); movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r11; movq %r10, %rcx; xorq %r9, %rcx; andq %r8, %rcx; xorq %r10, %rcx; addq %rax, %r11; movabs $0xD192E819D6EF5218, %rax; addq %rcx, %r11; addq %rax, %r11; addq %r11, %r15; movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r14, %rcx; addq %rax, %r11; movq %r14, %rax; orq %r13, %rax; andq %r13, %rcx; andq %r12, %rax; orq %rcx, %rax; addq %rax, %r11; + movq (((45 -15)&0xF)*8)(%rsp), %rax; movq (((45 -16)&0xF)*8)(%rsp), %rbx; addq (((45 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((45 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((45)&0xF)*8)(%rsp); movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r10; movq %r9, %rcx; xorq %r8, %rcx; andq %r15, %rcx; xorq %r9, %rcx; addq %rax, %r10; movabs $0xD69906245565A910, %rax; addq %rcx, %r10; addq %rax, %r10; addq %r10, %r14; movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r13, %rcx; addq %rax, %r10; movq %r13, %rax; orq %r12, %rax; andq %r12, %rcx; andq %r11, %rax; orq %rcx, %rax; addq %rax, %r10; + movq (((46 -15)&0xF)*8)(%rsp), %rax; movq (((46 -16)&0xF)*8)(%rsp), %rbx; addq (((46 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((46 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((46)&0xF)*8)(%rsp); movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r9; movq %r8, %rcx; xorq %r15, %rcx; andq %r14, %rcx; xorq %r8, %rcx; addq %rax, %r9; movabs $0xF40E35855771202A, %rax; addq %rcx, %r9; addq %rax, %r9; addq %r9, %r13; movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r12, %rcx; addq %rax, %r9; movq %r12, %rax; orq %r11, %rax; andq %r11, %rcx; andq %r10, %rax; orq %rcx, %rax; addq %rax, %r9; + movq (((47 -15)&0xF)*8)(%rsp), %rax; movq (((47 -16)&0xF)*8)(%rsp), %rbx; addq (((47 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((47 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((47)&0xF)*8)(%rsp); movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r8; movq %r15, %rcx; xorq %r14, %rcx; andq %r13, %rcx; xorq %r15, %rcx; addq %rax, %r8; movabs $0x106AA07032BBD1B8, %rax; addq %rcx, %r8; addq %rax, %r8; addq %r8, %r12; movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r11, %rcx; addq %rax, %r8; movq %r11, %rax; orq %r10, %rax; andq %r10, %rcx; andq %r9, %rax; orq %rcx, %rax; addq %rax, %r8; + movq (((48 -15)&0xF)*8)(%rsp), %rax; movq (((48 -16)&0xF)*8)(%rsp), %rbx; addq (((48 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((48 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((48)&0xF)*8)(%rsp); movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r15; movq %r14, %rcx; xorq %r13, %rcx; andq %r12, %rcx; xorq %r14, %rcx; addq %rax, %r15; movabs $0x19A4C116B8D2D0C8, %rax; addq %rcx, %r15; addq %rax, %r15; addq %r15, %r11; movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r10, %rcx; addq %rax, %r15; movq %r10, %rax; orq %r9, %rax; andq %r9, %rcx; andq %r8, %rax; orq %rcx, %rax; addq %rax, %r15; + movq (((49 -15)&0xF)*8)(%rsp), %rax; movq (((49 -16)&0xF)*8)(%rsp), %rbx; addq (((49 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((49 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((49)&0xF)*8)(%rsp); movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r14; movq %r13, %rcx; xorq %r12, %rcx; andq %r11, %rcx; xorq %r13, %rcx; addq %rax, %r14; movabs $0x1E376C085141AB53, %rax; addq %rcx, %r14; addq %rax, %r14; addq %r14, %r10; movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r9, %rcx; addq %rax, %r14; movq %r9, %rax; orq %r8, %rax; andq %r8, %rcx; andq %r15, %rax; orq %rcx, %rax; addq %rax, %r14; + movq (((50 -15)&0xF)*8)(%rsp), %rax; movq (((50 -16)&0xF)*8)(%rsp), %rbx; addq (((50 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((50 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((50)&0xF)*8)(%rsp); movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r13; movq %r12, %rcx; xorq %r11, %rcx; andq %r10, %rcx; xorq %r12, %rcx; addq %rax, %r13; movabs $0x2748774CDF8EEB99, %rax; addq %rcx, %r13; addq %rax, %r13; addq %r13, %r9; movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r8, %rcx; addq %rax, %r13; movq %r8, %rax; orq %r15, %rax; andq %r15, %rcx; andq %r14, %rax; orq %rcx, %rax; addq %rax, %r13; + movq (((51 -15)&0xF)*8)(%rsp), %rax; movq (((51 -16)&0xF)*8)(%rsp), %rbx; addq (((51 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((51 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((51)&0xF)*8)(%rsp); movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r12; movq %r11, %rcx; xorq %r10, %rcx; andq %r9, %rcx; xorq %r11, %rcx; addq %rax, %r12; movabs $0x34B0BCB5E19B48A8, %rax; addq %rcx, %r12; addq %rax, %r12; addq %r12, %r8; movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r15, %rcx; addq %rax, %r12; movq %r15, %rax; orq %r14, %rax; andq %r14, %rcx; andq %r13, %rax; orq %rcx, %rax; addq %rax, %r12; + movq (((52 -15)&0xF)*8)(%rsp), %rax; movq (((52 -16)&0xF)*8)(%rsp), %rbx; addq (((52 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((52 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((52)&0xF)*8)(%rsp); movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r11; movq %r10, %rcx; xorq %r9, %rcx; andq %r8, %rcx; xorq %r10, %rcx; addq %rax, %r11; movabs $0x391C0CB3C5C95A63, %rax; addq %rcx, %r11; addq %rax, %r11; addq %r11, %r15; movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r14, %rcx; addq %rax, %r11; movq %r14, %rax; orq %r13, %rax; andq %r13, %rcx; andq %r12, %rax; orq %rcx, %rax; addq %rax, %r11; + movq (((53 -15)&0xF)*8)(%rsp), %rax; movq (((53 -16)&0xF)*8)(%rsp), %rbx; addq (((53 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((53 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((53)&0xF)*8)(%rsp); movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r10; movq %r9, %rcx; xorq %r8, %rcx; andq %r15, %rcx; xorq %r9, %rcx; addq %rax, %r10; movabs $0x4ED8AA4AE3418ACB, %rax; addq %rcx, %r10; addq %rax, %r10; addq %r10, %r14; movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r13, %rcx; addq %rax, %r10; movq %r13, %rax; orq %r12, %rax; andq %r12, %rcx; andq %r11, %rax; orq %rcx, %rax; addq %rax, %r10; + movq (((54 -15)&0xF)*8)(%rsp), %rax; movq (((54 -16)&0xF)*8)(%rsp), %rbx; addq (((54 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((54 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((54)&0xF)*8)(%rsp); movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r9; movq %r8, %rcx; xorq %r15, %rcx; andq %r14, %rcx; xorq %r8, %rcx; addq %rax, %r9; movabs $0x5B9CCA4F7763E373, %rax; addq %rcx, %r9; addq %rax, %r9; addq %r9, %r13; movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r12, %rcx; addq %rax, %r9; movq %r12, %rax; orq %r11, %rax; andq %r11, %rcx; andq %r10, %rax; orq %rcx, %rax; addq %rax, %r9; + movq (((55 -15)&0xF)*8)(%rsp), %rax; movq (((55 -16)&0xF)*8)(%rsp), %rbx; addq (((55 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((55 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((55)&0xF)*8)(%rsp); movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r8; movq %r15, %rcx; xorq %r14, %rcx; andq %r13, %rcx; xorq %r15, %rcx; addq %rax, %r8; movabs $0x682E6FF3D6B2B8A3, %rax; addq %rcx, %r8; addq %rax, %r8; addq %r8, %r12; movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r11, %rcx; addq %rax, %r8; movq %r11, %rax; orq %r10, %rax; andq %r10, %rcx; andq %r9, %rax; orq %rcx, %rax; addq %rax, %r8; + movq (((56 -15)&0xF)*8)(%rsp), %rax; movq (((56 -16)&0xF)*8)(%rsp), %rbx; addq (((56 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((56 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((56)&0xF)*8)(%rsp); movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r15; movq %r14, %rcx; xorq %r13, %rcx; andq %r12, %rcx; xorq %r14, %rcx; addq %rax, %r15; movabs $0x748F82EE5DEFB2FC, %rax; addq %rcx, %r15; addq %rax, %r15; addq %r15, %r11; movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r10, %rcx; addq %rax, %r15; movq %r10, %rax; orq %r9, %rax; andq %r9, %rcx; andq %r8, %rax; orq %rcx, %rax; addq %rax, %r15; + movq (((57 -15)&0xF)*8)(%rsp), %rax; movq (((57 -16)&0xF)*8)(%rsp), %rbx; addq (((57 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((57 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((57)&0xF)*8)(%rsp); movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r14; movq %r13, %rcx; xorq %r12, %rcx; andq %r11, %rcx; xorq %r13, %rcx; addq %rax, %r14; movabs $0x78A5636F43172F60, %rax; addq %rcx, %r14; addq %rax, %r14; addq %r14, %r10; movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r9, %rcx; addq %rax, %r14; movq %r9, %rax; orq %r8, %rax; andq %r8, %rcx; andq %r15, %rax; orq %rcx, %rax; addq %rax, %r14; + movq (((58 -15)&0xF)*8)(%rsp), %rax; movq (((58 -16)&0xF)*8)(%rsp), %rbx; addq (((58 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((58 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((58)&0xF)*8)(%rsp); movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r13; movq %r12, %rcx; xorq %r11, %rcx; andq %r10, %rcx; xorq %r12, %rcx; addq %rax, %r13; movabs $0x84C87814A1F0AB72, %rax; addq %rcx, %r13; addq %rax, %r13; addq %r13, %r9; movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r8, %rcx; addq %rax, %r13; movq %r8, %rax; orq %r15, %rax; andq %r15, %rcx; andq %r14, %rax; orq %rcx, %rax; addq %rax, %r13; + movq (((59 -15)&0xF)*8)(%rsp), %rax; movq (((59 -16)&0xF)*8)(%rsp), %rbx; addq (((59 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((59 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((59)&0xF)*8)(%rsp); movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r12; movq %r11, %rcx; xorq %r10, %rcx; andq %r9, %rcx; xorq %r11, %rcx; addq %rax, %r12; movabs $0x8CC702081A6439EC, %rax; addq %rcx, %r12; addq %rax, %r12; addq %r12, %r8; movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r15, %rcx; addq %rax, %r12; movq %r15, %rax; orq %r14, %rax; andq %r14, %rcx; andq %r13, %rax; orq %rcx, %rax; addq %rax, %r12; + movq (((60 -15)&0xF)*8)(%rsp), %rax; movq (((60 -16)&0xF)*8)(%rsp), %rbx; addq (((60 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((60 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((60)&0xF)*8)(%rsp); movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r11; movq %r10, %rcx; xorq %r9, %rcx; andq %r8, %rcx; xorq %r10, %rcx; addq %rax, %r11; movabs $0x90BEFFFA23631E28, %rax; addq %rcx, %r11; addq %rax, %r11; addq %r11, %r15; movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r14, %rcx; addq %rax, %r11; movq %r14, %rax; orq %r13, %rax; andq %r13, %rcx; andq %r12, %rax; orq %rcx, %rax; addq %rax, %r11; + movq (((61 -15)&0xF)*8)(%rsp), %rax; movq (((61 -16)&0xF)*8)(%rsp), %rbx; addq (((61 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((61 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((61)&0xF)*8)(%rsp); movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r10; movq %r9, %rcx; xorq %r8, %rcx; andq %r15, %rcx; xorq %r9, %rcx; addq %rax, %r10; movabs $0xA4506CEBDE82BDE9, %rax; addq %rcx, %r10; addq %rax, %r10; addq %r10, %r14; movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r13, %rcx; addq %rax, %r10; movq %r13, %rax; orq %r12, %rax; andq %r12, %rcx; andq %r11, %rax; orq %rcx, %rax; addq %rax, %r10; + movq (((62 -15)&0xF)*8)(%rsp), %rax; movq (((62 -16)&0xF)*8)(%rsp), %rbx; addq (((62 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((62 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((62)&0xF)*8)(%rsp); movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r9; movq %r8, %rcx; xorq %r15, %rcx; andq %r14, %rcx; xorq %r8, %rcx; addq %rax, %r9; movabs $0xBEF9A3F7B2C67915, %rax; addq %rcx, %r9; addq %rax, %r9; addq %r9, %r13; movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r12, %rcx; addq %rax, %r9; movq %r12, %rax; orq %r11, %rax; andq %r11, %rcx; andq %r10, %rax; orq %rcx, %rax; addq %rax, %r9; + movq (((63 -15)&0xF)*8)(%rsp), %rax; movq (((63 -16)&0xF)*8)(%rsp), %rbx; addq (((63 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((63 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((63)&0xF)*8)(%rsp); movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r8; movq %r15, %rcx; xorq %r14, %rcx; andq %r13, %rcx; xorq %r15, %rcx; addq %rax, %r8; movabs $0xC67178F2E372532B, %rax; addq %rcx, %r8; addq %rax, %r8; addq %r8, %r12; movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r11, %rcx; addq %rax, %r8; movq %r11, %rax; orq %r10, %rax; andq %r10, %rcx; andq %r9, %rax; orq %rcx, %rax; addq %rax, %r8; + movq (((64 -15)&0xF)*8)(%rsp), %rax; movq (((64 -16)&0xF)*8)(%rsp), %rbx; addq (((64 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((64 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((64)&0xF)*8)(%rsp); movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r15; movq %r14, %rcx; xorq %r13, %rcx; andq %r12, %rcx; xorq %r14, %rcx; addq %rax, %r15; movabs $0xCA273ECEEA26619C, %rax; addq %rcx, %r15; addq %rax, %r15; addq %r15, %r11; movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r10, %rcx; addq %rax, %r15; movq %r10, %rax; orq %r9, %rax; andq %r9, %rcx; andq %r8, %rax; orq %rcx, %rax; addq %rax, %r15; + movq (((65 -15)&0xF)*8)(%rsp), %rax; movq (((65 -16)&0xF)*8)(%rsp), %rbx; addq (((65 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((65 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((65)&0xF)*8)(%rsp); movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r14; movq %r13, %rcx; xorq %r12, %rcx; andq %r11, %rcx; xorq %r13, %rcx; addq %rax, %r14; movabs $0xD186B8C721C0C207, %rax; addq %rcx, %r14; addq %rax, %r14; addq %r14, %r10; movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r9, %rcx; addq %rax, %r14; movq %r9, %rax; orq %r8, %rax; andq %r8, %rcx; andq %r15, %rax; orq %rcx, %rax; addq %rax, %r14; + movq (((66 -15)&0xF)*8)(%rsp), %rax; movq (((66 -16)&0xF)*8)(%rsp), %rbx; addq (((66 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((66 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((66)&0xF)*8)(%rsp); movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r13; movq %r12, %rcx; xorq %r11, %rcx; andq %r10, %rcx; xorq %r12, %rcx; addq %rax, %r13; movabs $0xEADA7DD6CDE0EB1E, %rax; addq %rcx, %r13; addq %rax, %r13; addq %r13, %r9; movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r8, %rcx; addq %rax, %r13; movq %r8, %rax; orq %r15, %rax; andq %r15, %rcx; andq %r14, %rax; orq %rcx, %rax; addq %rax, %r13; + movq (((67 -15)&0xF)*8)(%rsp), %rax; movq (((67 -16)&0xF)*8)(%rsp), %rbx; addq (((67 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((67 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((67)&0xF)*8)(%rsp); movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r12; movq %r11, %rcx; xorq %r10, %rcx; andq %r9, %rcx; xorq %r11, %rcx; addq %rax, %r12; movabs $0xF57D4F7FEE6ED178, %rax; addq %rcx, %r12; addq %rax, %r12; addq %r12, %r8; movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r15, %rcx; addq %rax, %r12; movq %r15, %rax; orq %r14, %rax; andq %r14, %rcx; andq %r13, %rax; orq %rcx, %rax; addq %rax, %r12; + movq (((68 -15)&0xF)*8)(%rsp), %rax; movq (((68 -16)&0xF)*8)(%rsp), %rbx; addq (((68 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((68 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((68)&0xF)*8)(%rsp); movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r11; movq %r10, %rcx; xorq %r9, %rcx; andq %r8, %rcx; xorq %r10, %rcx; addq %rax, %r11; movabs $0x06F067AA72176FBA, %rax; addq %rcx, %r11; addq %rax, %r11; addq %r11, %r15; movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r14, %rcx; addq %rax, %r11; movq %r14, %rax; orq %r13, %rax; andq %r13, %rcx; andq %r12, %rax; orq %rcx, %rax; addq %rax, %r11; + movq (((69 -15)&0xF)*8)(%rsp), %rax; movq (((69 -16)&0xF)*8)(%rsp), %rbx; addq (((69 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((69 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((69)&0xF)*8)(%rsp); movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r10; movq %r9, %rcx; xorq %r8, %rcx; andq %r15, %rcx; xorq %r9, %rcx; addq %rax, %r10; movabs $0x0A637DC5A2C898A6, %rax; addq %rcx, %r10; addq %rax, %r10; addq %r10, %r14; movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r13, %rcx; addq %rax, %r10; movq %r13, %rax; orq %r12, %rax; andq %r12, %rcx; andq %r11, %rax; orq %rcx, %rax; addq %rax, %r10; + movq (((70 -15)&0xF)*8)(%rsp), %rax; movq (((70 -16)&0xF)*8)(%rsp), %rbx; addq (((70 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((70 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((70)&0xF)*8)(%rsp); movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r9; movq %r8, %rcx; xorq %r15, %rcx; andq %r14, %rcx; xorq %r8, %rcx; addq %rax, %r9; movabs $0x113F9804BEF90DAE, %rax; addq %rcx, %r9; addq %rax, %r9; addq %r9, %r13; movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r12, %rcx; addq %rax, %r9; movq %r12, %rax; orq %r11, %rax; andq %r11, %rcx; andq %r10, %rax; orq %rcx, %rax; addq %rax, %r9; + movq (((71 -15)&0xF)*8)(%rsp), %rax; movq (((71 -16)&0xF)*8)(%rsp), %rbx; addq (((71 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((71 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((71)&0xF)*8)(%rsp); movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r8; movq %r15, %rcx; xorq %r14, %rcx; andq %r13, %rcx; xorq %r15, %rcx; addq %rax, %r8; movabs $0x1B710B35131C471B, %rax; addq %rcx, %r8; addq %rax, %r8; addq %r8, %r12; movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r11, %rcx; addq %rax, %r8; movq %r11, %rax; orq %r10, %rax; andq %r10, %rcx; andq %r9, %rax; orq %rcx, %rax; addq %rax, %r8; + movq (((72 -15)&0xF)*8)(%rsp), %rax; movq (((72 -16)&0xF)*8)(%rsp), %rbx; addq (((72 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((72 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((72)&0xF)*8)(%rsp); movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r15; movq %r14, %rcx; xorq %r13, %rcx; andq %r12, %rcx; xorq %r14, %rcx; addq %rax, %r15; movabs $0x28DB77F523047D84, %rax; addq %rcx, %r15; addq %rax, %r15; addq %r15, %r11; movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r10, %rcx; addq %rax, %r15; movq %r10, %rax; orq %r9, %rax; andq %r9, %rcx; andq %r8, %rax; orq %rcx, %rax; addq %rax, %r15; + movq (((73 -15)&0xF)*8)(%rsp), %rax; movq (((73 -16)&0xF)*8)(%rsp), %rbx; addq (((73 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((73 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((73)&0xF)*8)(%rsp); movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r14; movq %r13, %rcx; xorq %r12, %rcx; andq %r11, %rcx; xorq %r13, %rcx; addq %rax, %r14; movabs $0x32CAAB7B40C72493, %rax; addq %rcx, %r14; addq %rax, %r14; addq %r14, %r10; movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r9, %rcx; addq %rax, %r14; movq %r9, %rax; orq %r8, %rax; andq %r8, %rcx; andq %r15, %rax; orq %rcx, %rax; addq %rax, %r14; + movq (((74 -15)&0xF)*8)(%rsp), %rax; movq (((74 -16)&0xF)*8)(%rsp), %rbx; addq (((74 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((74 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((74)&0xF)*8)(%rsp); movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r13; movq %r12, %rcx; xorq %r11, %rcx; andq %r10, %rcx; xorq %r12, %rcx; addq %rax, %r13; movabs $0x3C9EBE0A15C9BEBC, %rax; addq %rcx, %r13; addq %rax, %r13; addq %r13, %r9; movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r8, %rcx; addq %rax, %r13; movq %r8, %rax; orq %r15, %rax; andq %r15, %rcx; andq %r14, %rax; orq %rcx, %rax; addq %rax, %r13; + movq (((75 -15)&0xF)*8)(%rsp), %rax; movq (((75 -16)&0xF)*8)(%rsp), %rbx; addq (((75 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((75 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((75)&0xF)*8)(%rsp); movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r12; movq %r11, %rcx; xorq %r10, %rcx; andq %r9, %rcx; xorq %r11, %rcx; addq %rax, %r12; movabs $0x431D67C49C100D4C, %rax; addq %rcx, %r12; addq %rax, %r12; addq %r12, %r8; movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r15, %rcx; addq %rax, %r12; movq %r15, %rax; orq %r14, %rax; andq %r14, %rcx; andq %r13, %rax; orq %rcx, %rax; addq %rax, %r12; + movq (((76 -15)&0xF)*8)(%rsp), %rax; movq (((76 -16)&0xF)*8)(%rsp), %rbx; addq (((76 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((76 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((76)&0xF)*8)(%rsp); movq %r8, %rcx; movq %r8, %rdx; movq %r8, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r11; movq %r10, %rcx; xorq %r9, %rcx; andq %r8, %rcx; xorq %r10, %rcx; addq %rax, %r11; movabs $0x4CC5D4BECB3E42B6, %rax; addq %rcx, %r11; addq %rax, %r11; addq %r11, %r15; movq %r12, %rcx; movq %r12, %rdx; movq %r12, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r14, %rcx; addq %rax, %r11; movq %r14, %rax; orq %r13, %rax; andq %r13, %rcx; andq %r12, %rax; orq %rcx, %rax; addq %rax, %r11; + movq (((77 -15)&0xF)*8)(%rsp), %rax; movq (((77 -16)&0xF)*8)(%rsp), %rbx; addq (((77 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((77 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((77)&0xF)*8)(%rsp); movq %r15, %rcx; movq %r15, %rdx; movq %r15, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r10; movq %r9, %rcx; xorq %r8, %rcx; andq %r15, %rcx; xorq %r9, %rcx; addq %rax, %r10; movabs $0x597F299CFC657E2A, %rax; addq %rcx, %r10; addq %rax, %r10; addq %r10, %r14; movq %r11, %rcx; movq %r11, %rdx; movq %r11, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r13, %rcx; addq %rax, %r10; movq %r13, %rax; orq %r12, %rax; andq %r12, %rcx; andq %r11, %rax; orq %rcx, %rax; addq %rax, %r10; + movq (((78 -15)&0xF)*8)(%rsp), %rax; movq (((78 -16)&0xF)*8)(%rsp), %rbx; addq (((78 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((78 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((78)&0xF)*8)(%rsp); movq %r14, %rcx; movq %r14, %rdx; movq %r14, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r9; movq %r8, %rcx; xorq %r15, %rcx; andq %r14, %rcx; xorq %r8, %rcx; addq %rax, %r9; movabs $0x5FCB6FAB3AD6FAEC, %rax; addq %rcx, %r9; addq %rax, %r9; addq %r9, %r13; movq %r10, %rcx; movq %r10, %rdx; movq %r10, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r12, %rcx; addq %rax, %r9; movq %r12, %rax; orq %r11, %rax; andq %r11, %rcx; andq %r10, %rax; orq %rcx, %rax; addq %rax, %r9; + movq (((79 -15)&0xF)*8)(%rsp), %rax; movq (((79 -16)&0xF)*8)(%rsp), %rbx; addq (((79 - 7)&0xF)*8)(%rsp), %rbx; movq %rax, %rcx; movq %rax, %rdx; rorq $8, %rcx; shrq $7, %rdx; rorq $1, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq (((79 - 2)&0xF)*8)(%rsp), %rax; movq %rax, %rcx; movq %rax, %rdx; rorq $61, %rcx; shrq $6, %rdx; rorq $19, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rax, %rbx; movq %rbx, (((79)&0xF)*8)(%rsp); movq %r13, %rcx; movq %r13, %rdx; movq %r13, %rax; rorq $18, %rcx; rorq $41, %rdx; rorq $14, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; addq %rbx, %r8; movq %r15, %rcx; xorq %r14, %rcx; andq %r13, %rcx; xorq %r15, %rcx; addq %rax, %r8; movabs $0x6C44198C4A475817, %rax; addq %rcx, %r8; addq %rax, %r8; addq %r8, %r12; movq %r9, %rcx; movq %r9, %rdx; movq %r9, %rax; rorq $39, %rcx; rorq $34, %rdx; rorq $28, %rax; xorq %rdx, %rcx; xorq %rcx, %rax; movq %r11, %rcx; addq %rax, %r8; movq %r11, %rax; orq %r10, %rax; andq %r10, %rcx; andq %r9, %rax; orq %rcx, %rax; addq %rax, %r8; + + + addq %r8 , 0(%rdi) + addq %r9 , 8(%rdi) + addq %r10, 16(%rdi) + addq %r11, 24(%rdi) + addq %r12, 32(%rdi) + addq %r13, 40(%rdi) + addq %r14, 48(%rdi) + addq %r15, 56(%rdi) + + + movq %xmm0, %r10 + movq %xmm1, %r11 + movq %xmm2, %r12 + movq %xmm3, %r13 + movq %xmm4, %r14 + movq %xmm5, %r15 + +.ifdef WINABI + movq (0*8 + 128)(%rsp), %rbx + movq (1*8 + 128)(%rsp), %rdi + movq (2*8 + 128)(%rsp), %rsi + addq $152, %rsp + .else + movq %xmm6, %rbx + addq $128, %rsp +.endif + + retq + + .ifndef __YASM__ +#if defined(__linux__) && defined(__ELF__) +.section .note.GNU-stack,"",%progbits +#endif + .endif diff --git a/src/Crypto/sha512-x86-nayuki.S b/src/Crypto/sha512-x86-nayuki.S new file mode 100644 index 00000000..e1fce56e --- /dev/null +++ b/src/Crypto/sha512-x86-nayuki.S @@ -0,0 +1,180 @@ +# +# SHA-512 hash in x86 assembly +# +# Copyright (c) 2014 Project Nayuki +# http://www.nayuki.io/page/fast-sha2-hashes-in-x86-assembly +# +# (MIT License) +# Permission is hereby granted, free of charge, to any person obtaining a copy of +# this software and associated documentation files (the "Software"), to deal in +# the Software without restriction, including without limitation the rights to +# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +# the Software, and to permit persons to whom the Software is furnished to do so, +# subject to the following conditions: +# - The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# - The Software is provided "as is", without warranty of any kind, express or +# implied, including but not limited to the warranties of merchantability, +# fitness for a particular purpose and noninfringement. In no event shall the +# authors or copyright holders be liable for any claim, damages or other +# liability, whether in an action of contract, tort or otherwise, arising from, +# out of or in connection with the Software or the use or other dealings in the +# Software. +# + +# Modified by kerukuro for use in cppcrypto. + + .ifdef MS_STDCALL + .globl _sha512_compress_nayuki@8 + _sha512_compress_nayuki@8: + .else + .globl _sha512_compress_nayuki + .globl sha512_compress_nayuki + _sha512_compress_nayuki: + sha512_compress_nayuki: + .endif + + movl %esp, %ecx + subl $192, %esp + andl $~0xF, %esp + + + movl 4(%ecx), %eax + movdqu 0(%eax), %xmm0; movdqu %xmm0, 0(%esp) + movdqu 16(%eax), %xmm1; movdqu %xmm1, 16(%esp) + movdqu 32(%eax), %xmm2; movdqu %xmm2, 32(%esp) + movdqu 48(%eax), %xmm3; movdqu %xmm3, 48(%esp) + + + movl 8(%ecx), %eax + movq .bswap64, %mm7 + movq (0*8)(%eax), %mm0; pshufb %mm7, %mm0; movq %mm0, (((0)&0xF)*8+64)(%esp); paddq (7*8)(%esp), %mm0; movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+0*8, %mm0; movq (6*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (3*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (3*8)(%esp); movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (2*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (1*8)(%esp), %mm3; pand (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (7*8)(%esp); + movq (1*8)(%eax), %mm0; pshufb %mm7, %mm0; movq %mm0, (((1)&0xF)*8+64)(%esp); paddq (6*8)(%esp), %mm0; movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+1*8, %mm0; movq (5*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (2*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (2*8)(%esp); movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (1*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (0*8)(%esp), %mm3; pand (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (6*8)(%esp); + movq (2*8)(%eax), %mm0; pshufb %mm7, %mm0; movq %mm0, (((2)&0xF)*8+64)(%esp); paddq (5*8)(%esp), %mm0; movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+2*8, %mm0; movq (4*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (1*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (1*8)(%esp); movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (0*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (7*8)(%esp), %mm3; pand (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (5*8)(%esp); + movq (3*8)(%eax), %mm0; pshufb %mm7, %mm0; movq %mm0, (((3)&0xF)*8+64)(%esp); paddq (4*8)(%esp), %mm0; movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+3*8, %mm0; movq (3*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (0*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (0*8)(%esp); movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (7*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (6*8)(%esp), %mm3; pand (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (4*8)(%esp); + movq (4*8)(%eax), %mm0; pshufb %mm7, %mm0; movq %mm0, (((4)&0xF)*8+64)(%esp); paddq (3*8)(%esp), %mm0; movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+4*8, %mm0; movq (2*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (7*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (7*8)(%esp); movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (6*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (5*8)(%esp), %mm3; pand (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (3*8)(%esp); + movq (5*8)(%eax), %mm0; pshufb %mm7, %mm0; movq %mm0, (((5)&0xF)*8+64)(%esp); paddq (2*8)(%esp), %mm0; movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+5*8, %mm0; movq (1*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (6*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (6*8)(%esp); movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (5*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (4*8)(%esp), %mm3; pand (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (2*8)(%esp); + movq (6*8)(%eax), %mm0; pshufb %mm7, %mm0; movq %mm0, (((6)&0xF)*8+64)(%esp); paddq (1*8)(%esp), %mm0; movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+6*8, %mm0; movq (0*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (5*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (5*8)(%esp); movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (4*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (3*8)(%esp), %mm3; pand (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (1*8)(%esp); + movq (7*8)(%eax), %mm0; pshufb %mm7, %mm0; movq %mm0, (((7)&0xF)*8+64)(%esp); paddq (0*8)(%esp), %mm0; movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+7*8, %mm0; movq (7*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (4*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (4*8)(%esp); movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (3*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (2*8)(%esp), %mm3; pand (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (0*8)(%esp); + movq (8*8)(%eax), %mm0; pshufb %mm7, %mm0; movq %mm0, (((8)&0xF)*8+64)(%esp); paddq (7*8)(%esp), %mm0; movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+8*8, %mm0; movq (6*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (3*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (3*8)(%esp); movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (2*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (1*8)(%esp), %mm3; pand (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (7*8)(%esp); + movq (9*8)(%eax), %mm0; pshufb %mm7, %mm0; movq %mm0, (((9)&0xF)*8+64)(%esp); paddq (6*8)(%esp), %mm0; movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+9*8, %mm0; movq (5*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (2*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (2*8)(%esp); movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (1*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (0*8)(%esp), %mm3; pand (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (6*8)(%esp); + movq (10*8)(%eax), %mm0; pshufb %mm7, %mm0; movq %mm0, (((10)&0xF)*8+64)(%esp); paddq (5*8)(%esp), %mm0; movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+10*8, %mm0; movq (4*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (1*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (1*8)(%esp); movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (0*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (7*8)(%esp), %mm3; pand (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (5*8)(%esp); + movq (11*8)(%eax), %mm0; pshufb %mm7, %mm0; movq %mm0, (((11)&0xF)*8+64)(%esp); paddq (4*8)(%esp), %mm0; movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+11*8, %mm0; movq (3*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (0*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (0*8)(%esp); movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (7*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (6*8)(%esp), %mm3; pand (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (4*8)(%esp); + movq (12*8)(%eax), %mm0; pshufb %mm7, %mm0; movq %mm0, (((12)&0xF)*8+64)(%esp); paddq (3*8)(%esp), %mm0; movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+12*8, %mm0; movq (2*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (7*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (7*8)(%esp); movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (6*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (5*8)(%esp), %mm3; pand (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (3*8)(%esp); + movq (13*8)(%eax), %mm0; pshufb %mm7, %mm0; movq %mm0, (((13)&0xF)*8+64)(%esp); paddq (2*8)(%esp), %mm0; movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+13*8, %mm0; movq (1*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (6*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (6*8)(%esp); movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (5*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (4*8)(%esp), %mm3; pand (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (2*8)(%esp); + movq (14*8)(%eax), %mm0; pshufb %mm7, %mm0; movq %mm0, (((14)&0xF)*8+64)(%esp); paddq (1*8)(%esp), %mm0; movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+14*8, %mm0; movq (0*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (5*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (5*8)(%esp); movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (4*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (3*8)(%esp), %mm3; pand (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (1*8)(%esp); + movq (15*8)(%eax), %mm0; pshufb %mm7, %mm0; movq %mm0, (((15)&0xF)*8+64)(%esp); paddq (0*8)(%esp), %mm0; movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+15*8, %mm0; movq (7*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (4*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (4*8)(%esp); movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (3*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (2*8)(%esp), %mm3; pand (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (0*8)(%esp); + movq (((16 -16)&0xF)*8+64)(%esp), %mm0; paddq (((16 - 7)&0xF)*8+64)(%esp), %mm0; movq (((16 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((16 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((16)&0xF)*8+64)(%esp); paddq (7*8)(%esp), %mm0; movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+16*8, %mm0; movq (6*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (3*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (3*8)(%esp); movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (2*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (1*8)(%esp), %mm3; pand (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (7*8)(%esp); + movq (((17 -16)&0xF)*8+64)(%esp), %mm0; paddq (((17 - 7)&0xF)*8+64)(%esp), %mm0; movq (((17 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((17 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((17)&0xF)*8+64)(%esp); paddq (6*8)(%esp), %mm0; movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+17*8, %mm0; movq (5*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (2*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (2*8)(%esp); movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (1*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (0*8)(%esp), %mm3; pand (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (6*8)(%esp); + movq (((18 -16)&0xF)*8+64)(%esp), %mm0; paddq (((18 - 7)&0xF)*8+64)(%esp), %mm0; movq (((18 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((18 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((18)&0xF)*8+64)(%esp); paddq (5*8)(%esp), %mm0; movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+18*8, %mm0; movq (4*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (1*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (1*8)(%esp); movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (0*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (7*8)(%esp), %mm3; pand (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (5*8)(%esp); + movq (((19 -16)&0xF)*8+64)(%esp), %mm0; paddq (((19 - 7)&0xF)*8+64)(%esp), %mm0; movq (((19 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((19 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((19)&0xF)*8+64)(%esp); paddq (4*8)(%esp), %mm0; movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+19*8, %mm0; movq (3*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (0*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (0*8)(%esp); movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (7*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (6*8)(%esp), %mm3; pand (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (4*8)(%esp); + movq (((20 -16)&0xF)*8+64)(%esp), %mm0; paddq (((20 - 7)&0xF)*8+64)(%esp), %mm0; movq (((20 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((20 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((20)&0xF)*8+64)(%esp); paddq (3*8)(%esp), %mm0; movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+20*8, %mm0; movq (2*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (7*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (7*8)(%esp); movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (6*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (5*8)(%esp), %mm3; pand (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (3*8)(%esp); + movq (((21 -16)&0xF)*8+64)(%esp), %mm0; paddq (((21 - 7)&0xF)*8+64)(%esp), %mm0; movq (((21 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((21 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((21)&0xF)*8+64)(%esp); paddq (2*8)(%esp), %mm0; movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+21*8, %mm0; movq (1*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (6*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (6*8)(%esp); movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (5*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (4*8)(%esp), %mm3; pand (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (2*8)(%esp); + movq (((22 -16)&0xF)*8+64)(%esp), %mm0; paddq (((22 - 7)&0xF)*8+64)(%esp), %mm0; movq (((22 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((22 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((22)&0xF)*8+64)(%esp); paddq (1*8)(%esp), %mm0; movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+22*8, %mm0; movq (0*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (5*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (5*8)(%esp); movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (4*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (3*8)(%esp), %mm3; pand (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (1*8)(%esp); + movq (((23 -16)&0xF)*8+64)(%esp), %mm0; paddq (((23 - 7)&0xF)*8+64)(%esp), %mm0; movq (((23 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((23 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((23)&0xF)*8+64)(%esp); paddq (0*8)(%esp), %mm0; movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+23*8, %mm0; movq (7*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (4*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (4*8)(%esp); movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (3*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (2*8)(%esp), %mm3; pand (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (0*8)(%esp); + movq (((24 -16)&0xF)*8+64)(%esp), %mm0; paddq (((24 - 7)&0xF)*8+64)(%esp), %mm0; movq (((24 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((24 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((24)&0xF)*8+64)(%esp); paddq (7*8)(%esp), %mm0; movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+24*8, %mm0; movq (6*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (3*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (3*8)(%esp); movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (2*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (1*8)(%esp), %mm3; pand (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (7*8)(%esp); + movq (((25 -16)&0xF)*8+64)(%esp), %mm0; paddq (((25 - 7)&0xF)*8+64)(%esp), %mm0; movq (((25 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((25 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((25)&0xF)*8+64)(%esp); paddq (6*8)(%esp), %mm0; movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+25*8, %mm0; movq (5*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (2*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (2*8)(%esp); movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (1*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (0*8)(%esp), %mm3; pand (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (6*8)(%esp); + movq (((26 -16)&0xF)*8+64)(%esp), %mm0; paddq (((26 - 7)&0xF)*8+64)(%esp), %mm0; movq (((26 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((26 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((26)&0xF)*8+64)(%esp); paddq (5*8)(%esp), %mm0; movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+26*8, %mm0; movq (4*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (1*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (1*8)(%esp); movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (0*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (7*8)(%esp), %mm3; pand (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (5*8)(%esp); + movq (((27 -16)&0xF)*8+64)(%esp), %mm0; paddq (((27 - 7)&0xF)*8+64)(%esp), %mm0; movq (((27 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((27 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((27)&0xF)*8+64)(%esp); paddq (4*8)(%esp), %mm0; movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+27*8, %mm0; movq (3*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (0*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (0*8)(%esp); movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (7*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (6*8)(%esp), %mm3; pand (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (4*8)(%esp); + movq (((28 -16)&0xF)*8+64)(%esp), %mm0; paddq (((28 - 7)&0xF)*8+64)(%esp), %mm0; movq (((28 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((28 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((28)&0xF)*8+64)(%esp); paddq (3*8)(%esp), %mm0; movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+28*8, %mm0; movq (2*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (7*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (7*8)(%esp); movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (6*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (5*8)(%esp), %mm3; pand (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (3*8)(%esp); + movq (((29 -16)&0xF)*8+64)(%esp), %mm0; paddq (((29 - 7)&0xF)*8+64)(%esp), %mm0; movq (((29 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((29 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((29)&0xF)*8+64)(%esp); paddq (2*8)(%esp), %mm0; movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+29*8, %mm0; movq (1*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (6*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (6*8)(%esp); movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (5*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (4*8)(%esp), %mm3; pand (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (2*8)(%esp); + movq (((30 -16)&0xF)*8+64)(%esp), %mm0; paddq (((30 - 7)&0xF)*8+64)(%esp), %mm0; movq (((30 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((30 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((30)&0xF)*8+64)(%esp); paddq (1*8)(%esp), %mm0; movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+30*8, %mm0; movq (0*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (5*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (5*8)(%esp); movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (4*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (3*8)(%esp), %mm3; pand (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (1*8)(%esp); + movq (((31 -16)&0xF)*8+64)(%esp), %mm0; paddq (((31 - 7)&0xF)*8+64)(%esp), %mm0; movq (((31 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((31 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((31)&0xF)*8+64)(%esp); paddq (0*8)(%esp), %mm0; movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+31*8, %mm0; movq (7*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (4*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (4*8)(%esp); movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (3*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (2*8)(%esp), %mm3; pand (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (0*8)(%esp); + movq (((32 -16)&0xF)*8+64)(%esp), %mm0; paddq (((32 - 7)&0xF)*8+64)(%esp), %mm0; movq (((32 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((32 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((32)&0xF)*8+64)(%esp); paddq (7*8)(%esp), %mm0; movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+32*8, %mm0; movq (6*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (3*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (3*8)(%esp); movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (2*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (1*8)(%esp), %mm3; pand (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (7*8)(%esp); + movq (((33 -16)&0xF)*8+64)(%esp), %mm0; paddq (((33 - 7)&0xF)*8+64)(%esp), %mm0; movq (((33 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((33 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((33)&0xF)*8+64)(%esp); paddq (6*8)(%esp), %mm0; movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+33*8, %mm0; movq (5*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (2*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (2*8)(%esp); movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (1*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (0*8)(%esp), %mm3; pand (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (6*8)(%esp); + movq (((34 -16)&0xF)*8+64)(%esp), %mm0; paddq (((34 - 7)&0xF)*8+64)(%esp), %mm0; movq (((34 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((34 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((34)&0xF)*8+64)(%esp); paddq (5*8)(%esp), %mm0; movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+34*8, %mm0; movq (4*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (1*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (1*8)(%esp); movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (0*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (7*8)(%esp), %mm3; pand (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (5*8)(%esp); + movq (((35 -16)&0xF)*8+64)(%esp), %mm0; paddq (((35 - 7)&0xF)*8+64)(%esp), %mm0; movq (((35 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((35 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((35)&0xF)*8+64)(%esp); paddq (4*8)(%esp), %mm0; movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+35*8, %mm0; movq (3*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (0*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (0*8)(%esp); movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (7*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (6*8)(%esp), %mm3; pand (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (4*8)(%esp); + movq (((36 -16)&0xF)*8+64)(%esp), %mm0; paddq (((36 - 7)&0xF)*8+64)(%esp), %mm0; movq (((36 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((36 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((36)&0xF)*8+64)(%esp); paddq (3*8)(%esp), %mm0; movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+36*8, %mm0; movq (2*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (7*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (7*8)(%esp); movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (6*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (5*8)(%esp), %mm3; pand (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (3*8)(%esp); + movq (((37 -16)&0xF)*8+64)(%esp), %mm0; paddq (((37 - 7)&0xF)*8+64)(%esp), %mm0; movq (((37 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((37 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((37)&0xF)*8+64)(%esp); paddq (2*8)(%esp), %mm0; movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+37*8, %mm0; movq (1*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (6*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (6*8)(%esp); movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (5*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (4*8)(%esp), %mm3; pand (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (2*8)(%esp); + movq (((38 -16)&0xF)*8+64)(%esp), %mm0; paddq (((38 - 7)&0xF)*8+64)(%esp), %mm0; movq (((38 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((38 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((38)&0xF)*8+64)(%esp); paddq (1*8)(%esp), %mm0; movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+38*8, %mm0; movq (0*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (5*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (5*8)(%esp); movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (4*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (3*8)(%esp), %mm3; pand (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (1*8)(%esp); + movq (((39 -16)&0xF)*8+64)(%esp), %mm0; paddq (((39 - 7)&0xF)*8+64)(%esp), %mm0; movq (((39 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((39 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((39)&0xF)*8+64)(%esp); paddq (0*8)(%esp), %mm0; movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+39*8, %mm0; movq (7*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (4*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (4*8)(%esp); movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (3*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (2*8)(%esp), %mm3; pand (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (0*8)(%esp); + movq (((40 -16)&0xF)*8+64)(%esp), %mm0; paddq (((40 - 7)&0xF)*8+64)(%esp), %mm0; movq (((40 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((40 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((40)&0xF)*8+64)(%esp); paddq (7*8)(%esp), %mm0; movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+40*8, %mm0; movq (6*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (3*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (3*8)(%esp); movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (2*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (1*8)(%esp), %mm3; pand (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (7*8)(%esp); + movq (((41 -16)&0xF)*8+64)(%esp), %mm0; paddq (((41 - 7)&0xF)*8+64)(%esp), %mm0; movq (((41 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((41 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((41)&0xF)*8+64)(%esp); paddq (6*8)(%esp), %mm0; movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+41*8, %mm0; movq (5*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (2*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (2*8)(%esp); movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (1*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (0*8)(%esp), %mm3; pand (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (6*8)(%esp); + movq (((42 -16)&0xF)*8+64)(%esp), %mm0; paddq (((42 - 7)&0xF)*8+64)(%esp), %mm0; movq (((42 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((42 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((42)&0xF)*8+64)(%esp); paddq (5*8)(%esp), %mm0; movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+42*8, %mm0; movq (4*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (1*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (1*8)(%esp); movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (0*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (7*8)(%esp), %mm3; pand (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (5*8)(%esp); + movq (((43 -16)&0xF)*8+64)(%esp), %mm0; paddq (((43 - 7)&0xF)*8+64)(%esp), %mm0; movq (((43 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((43 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((43)&0xF)*8+64)(%esp); paddq (4*8)(%esp), %mm0; movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+43*8, %mm0; movq (3*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (0*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (0*8)(%esp); movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (7*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (6*8)(%esp), %mm3; pand (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (4*8)(%esp); + movq (((44 -16)&0xF)*8+64)(%esp), %mm0; paddq (((44 - 7)&0xF)*8+64)(%esp), %mm0; movq (((44 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((44 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((44)&0xF)*8+64)(%esp); paddq (3*8)(%esp), %mm0; movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+44*8, %mm0; movq (2*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (7*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (7*8)(%esp); movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (6*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (5*8)(%esp), %mm3; pand (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (3*8)(%esp); + movq (((45 -16)&0xF)*8+64)(%esp), %mm0; paddq (((45 - 7)&0xF)*8+64)(%esp), %mm0; movq (((45 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((45 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((45)&0xF)*8+64)(%esp); paddq (2*8)(%esp), %mm0; movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+45*8, %mm0; movq (1*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (6*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (6*8)(%esp); movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (5*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (4*8)(%esp), %mm3; pand (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (2*8)(%esp); + movq (((46 -16)&0xF)*8+64)(%esp), %mm0; paddq (((46 - 7)&0xF)*8+64)(%esp), %mm0; movq (((46 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((46 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((46)&0xF)*8+64)(%esp); paddq (1*8)(%esp), %mm0; movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+46*8, %mm0; movq (0*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (5*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (5*8)(%esp); movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (4*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (3*8)(%esp), %mm3; pand (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (1*8)(%esp); + movq (((47 -16)&0xF)*8+64)(%esp), %mm0; paddq (((47 - 7)&0xF)*8+64)(%esp), %mm0; movq (((47 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((47 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((47)&0xF)*8+64)(%esp); paddq (0*8)(%esp), %mm0; movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+47*8, %mm0; movq (7*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (4*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (4*8)(%esp); movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (3*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (2*8)(%esp), %mm3; pand (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (0*8)(%esp); + movq (((48 -16)&0xF)*8+64)(%esp), %mm0; paddq (((48 - 7)&0xF)*8+64)(%esp), %mm0; movq (((48 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((48 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((48)&0xF)*8+64)(%esp); paddq (7*8)(%esp), %mm0; movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+48*8, %mm0; movq (6*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (3*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (3*8)(%esp); movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (2*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (1*8)(%esp), %mm3; pand (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (7*8)(%esp); + movq (((49 -16)&0xF)*8+64)(%esp), %mm0; paddq (((49 - 7)&0xF)*8+64)(%esp), %mm0; movq (((49 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((49 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((49)&0xF)*8+64)(%esp); paddq (6*8)(%esp), %mm0; movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+49*8, %mm0; movq (5*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (2*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (2*8)(%esp); movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (1*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (0*8)(%esp), %mm3; pand (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (6*8)(%esp); + movq (((50 -16)&0xF)*8+64)(%esp), %mm0; paddq (((50 - 7)&0xF)*8+64)(%esp), %mm0; movq (((50 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((50 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((50)&0xF)*8+64)(%esp); paddq (5*8)(%esp), %mm0; movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+50*8, %mm0; movq (4*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (1*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (1*8)(%esp); movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (0*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (7*8)(%esp), %mm3; pand (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (5*8)(%esp); + movq (((51 -16)&0xF)*8+64)(%esp), %mm0; paddq (((51 - 7)&0xF)*8+64)(%esp), %mm0; movq (((51 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((51 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((51)&0xF)*8+64)(%esp); paddq (4*8)(%esp), %mm0; movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+51*8, %mm0; movq (3*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (0*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (0*8)(%esp); movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (7*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (6*8)(%esp), %mm3; pand (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (4*8)(%esp); + movq (((52 -16)&0xF)*8+64)(%esp), %mm0; paddq (((52 - 7)&0xF)*8+64)(%esp), %mm0; movq (((52 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((52 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((52)&0xF)*8+64)(%esp); paddq (3*8)(%esp), %mm0; movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+52*8, %mm0; movq (2*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (7*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (7*8)(%esp); movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (6*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (5*8)(%esp), %mm3; pand (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (3*8)(%esp); + movq (((53 -16)&0xF)*8+64)(%esp), %mm0; paddq (((53 - 7)&0xF)*8+64)(%esp), %mm0; movq (((53 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((53 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((53)&0xF)*8+64)(%esp); paddq (2*8)(%esp), %mm0; movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+53*8, %mm0; movq (1*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (6*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (6*8)(%esp); movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (5*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (4*8)(%esp), %mm3; pand (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (2*8)(%esp); + movq (((54 -16)&0xF)*8+64)(%esp), %mm0; paddq (((54 - 7)&0xF)*8+64)(%esp), %mm0; movq (((54 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((54 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((54)&0xF)*8+64)(%esp); paddq (1*8)(%esp), %mm0; movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+54*8, %mm0; movq (0*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (5*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (5*8)(%esp); movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (4*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (3*8)(%esp), %mm3; pand (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (1*8)(%esp); + movq (((55 -16)&0xF)*8+64)(%esp), %mm0; paddq (((55 - 7)&0xF)*8+64)(%esp), %mm0; movq (((55 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((55 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((55)&0xF)*8+64)(%esp); paddq (0*8)(%esp), %mm0; movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+55*8, %mm0; movq (7*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (4*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (4*8)(%esp); movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (3*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (2*8)(%esp), %mm3; pand (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (0*8)(%esp); + movq (((56 -16)&0xF)*8+64)(%esp), %mm0; paddq (((56 - 7)&0xF)*8+64)(%esp), %mm0; movq (((56 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((56 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((56)&0xF)*8+64)(%esp); paddq (7*8)(%esp), %mm0; movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+56*8, %mm0; movq (6*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (3*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (3*8)(%esp); movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (2*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (1*8)(%esp), %mm3; pand (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (7*8)(%esp); + movq (((57 -16)&0xF)*8+64)(%esp), %mm0; paddq (((57 - 7)&0xF)*8+64)(%esp), %mm0; movq (((57 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((57 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((57)&0xF)*8+64)(%esp); paddq (6*8)(%esp), %mm0; movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+57*8, %mm0; movq (5*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (2*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (2*8)(%esp); movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (1*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (0*8)(%esp), %mm3; pand (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (6*8)(%esp); + movq (((58 -16)&0xF)*8+64)(%esp), %mm0; paddq (((58 - 7)&0xF)*8+64)(%esp), %mm0; movq (((58 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((58 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((58)&0xF)*8+64)(%esp); paddq (5*8)(%esp), %mm0; movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+58*8, %mm0; movq (4*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (1*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (1*8)(%esp); movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (0*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (7*8)(%esp), %mm3; pand (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (5*8)(%esp); + movq (((59 -16)&0xF)*8+64)(%esp), %mm0; paddq (((59 - 7)&0xF)*8+64)(%esp), %mm0; movq (((59 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((59 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((59)&0xF)*8+64)(%esp); paddq (4*8)(%esp), %mm0; movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+59*8, %mm0; movq (3*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (0*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (0*8)(%esp); movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (7*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (6*8)(%esp), %mm3; pand (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (4*8)(%esp); + movq (((60 -16)&0xF)*8+64)(%esp), %mm0; paddq (((60 - 7)&0xF)*8+64)(%esp), %mm0; movq (((60 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((60 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((60)&0xF)*8+64)(%esp); paddq (3*8)(%esp), %mm0; movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+60*8, %mm0; movq (2*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (7*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (7*8)(%esp); movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (6*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (5*8)(%esp), %mm3; pand (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (3*8)(%esp); + movq (((61 -16)&0xF)*8+64)(%esp), %mm0; paddq (((61 - 7)&0xF)*8+64)(%esp), %mm0; movq (((61 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((61 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((61)&0xF)*8+64)(%esp); paddq (2*8)(%esp), %mm0; movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+61*8, %mm0; movq (1*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (6*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (6*8)(%esp); movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (5*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (4*8)(%esp), %mm3; pand (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (2*8)(%esp); + movq (((62 -16)&0xF)*8+64)(%esp), %mm0; paddq (((62 - 7)&0xF)*8+64)(%esp), %mm0; movq (((62 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((62 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((62)&0xF)*8+64)(%esp); paddq (1*8)(%esp), %mm0; movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+62*8, %mm0; movq (0*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (5*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (5*8)(%esp); movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (4*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (3*8)(%esp), %mm3; pand (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (1*8)(%esp); + movq (((63 -16)&0xF)*8+64)(%esp), %mm0; paddq (((63 - 7)&0xF)*8+64)(%esp), %mm0; movq (((63 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((63 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((63)&0xF)*8+64)(%esp); paddq (0*8)(%esp), %mm0; movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+63*8, %mm0; movq (7*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (4*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (4*8)(%esp); movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (3*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (2*8)(%esp), %mm3; pand (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (0*8)(%esp); + movq (((64 -16)&0xF)*8+64)(%esp), %mm0; paddq (((64 - 7)&0xF)*8+64)(%esp), %mm0; movq (((64 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((64 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((64)&0xF)*8+64)(%esp); paddq (7*8)(%esp), %mm0; movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+64*8, %mm0; movq (6*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (3*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (3*8)(%esp); movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (2*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (1*8)(%esp), %mm3; pand (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (7*8)(%esp); + movq (((65 -16)&0xF)*8+64)(%esp), %mm0; paddq (((65 - 7)&0xF)*8+64)(%esp), %mm0; movq (((65 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((65 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((65)&0xF)*8+64)(%esp); paddq (6*8)(%esp), %mm0; movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+65*8, %mm0; movq (5*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (2*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (2*8)(%esp); movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (1*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (0*8)(%esp), %mm3; pand (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (6*8)(%esp); + movq (((66 -16)&0xF)*8+64)(%esp), %mm0; paddq (((66 - 7)&0xF)*8+64)(%esp), %mm0; movq (((66 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((66 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((66)&0xF)*8+64)(%esp); paddq (5*8)(%esp), %mm0; movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+66*8, %mm0; movq (4*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (1*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (1*8)(%esp); movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (0*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (7*8)(%esp), %mm3; pand (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (5*8)(%esp); + movq (((67 -16)&0xF)*8+64)(%esp), %mm0; paddq (((67 - 7)&0xF)*8+64)(%esp), %mm0; movq (((67 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((67 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((67)&0xF)*8+64)(%esp); paddq (4*8)(%esp), %mm0; movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+67*8, %mm0; movq (3*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (0*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (0*8)(%esp); movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (7*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (6*8)(%esp), %mm3; pand (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (4*8)(%esp); + movq (((68 -16)&0xF)*8+64)(%esp), %mm0; paddq (((68 - 7)&0xF)*8+64)(%esp), %mm0; movq (((68 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((68 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((68)&0xF)*8+64)(%esp); paddq (3*8)(%esp), %mm0; movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+68*8, %mm0; movq (2*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (7*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (7*8)(%esp); movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (6*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (5*8)(%esp), %mm3; pand (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (3*8)(%esp); + movq (((69 -16)&0xF)*8+64)(%esp), %mm0; paddq (((69 - 7)&0xF)*8+64)(%esp), %mm0; movq (((69 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((69 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((69)&0xF)*8+64)(%esp); paddq (2*8)(%esp), %mm0; movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+69*8, %mm0; movq (1*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (6*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (6*8)(%esp); movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (5*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (4*8)(%esp), %mm3; pand (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (2*8)(%esp); + movq (((70 -16)&0xF)*8+64)(%esp), %mm0; paddq (((70 - 7)&0xF)*8+64)(%esp), %mm0; movq (((70 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((70 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((70)&0xF)*8+64)(%esp); paddq (1*8)(%esp), %mm0; movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+70*8, %mm0; movq (0*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (5*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (5*8)(%esp); movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (4*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (3*8)(%esp), %mm3; pand (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (1*8)(%esp); + movq (((71 -16)&0xF)*8+64)(%esp), %mm0; paddq (((71 - 7)&0xF)*8+64)(%esp), %mm0; movq (((71 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((71 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((71)&0xF)*8+64)(%esp); paddq (0*8)(%esp), %mm0; movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+71*8, %mm0; movq (7*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (4*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (4*8)(%esp); movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (3*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (2*8)(%esp), %mm3; pand (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (0*8)(%esp); + movq (((72 -16)&0xF)*8+64)(%esp), %mm0; paddq (((72 - 7)&0xF)*8+64)(%esp), %mm0; movq (((72 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((72 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((72)&0xF)*8+64)(%esp); paddq (7*8)(%esp), %mm0; movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+72*8, %mm0; movq (6*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (3*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (3*8)(%esp); movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (2*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (1*8)(%esp), %mm3; pand (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (7*8)(%esp); + movq (((73 -16)&0xF)*8+64)(%esp), %mm0; paddq (((73 - 7)&0xF)*8+64)(%esp), %mm0; movq (((73 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((73 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((73)&0xF)*8+64)(%esp); paddq (6*8)(%esp), %mm0; movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+73*8, %mm0; movq (5*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm2; pxor (5*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (2*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (2*8)(%esp); movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (1*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (0*8)(%esp), %mm3; pand (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (6*8)(%esp); + movq (((74 -16)&0xF)*8+64)(%esp), %mm0; paddq (((74 - 7)&0xF)*8+64)(%esp), %mm0; movq (((74 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((74 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((74)&0xF)*8+64)(%esp); paddq (5*8)(%esp), %mm0; movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+74*8, %mm0; movq (4*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm2; pxor (4*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (1*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (1*8)(%esp); movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (0*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (7*8)(%esp), %mm3; pand (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (5*8)(%esp); + movq (((75 -16)&0xF)*8+64)(%esp), %mm0; paddq (((75 - 7)&0xF)*8+64)(%esp), %mm0; movq (((75 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((75 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((75)&0xF)*8+64)(%esp); paddq (4*8)(%esp), %mm0; movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+75*8, %mm0; movq (3*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm2; pxor (3*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (0*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (0*8)(%esp); movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (7*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (6*8)(%esp), %mm3; pand (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (4*8)(%esp); + movq (((76 -16)&0xF)*8+64)(%esp), %mm0; paddq (((76 - 7)&0xF)*8+64)(%esp), %mm0; movq (((76 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((76 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((76)&0xF)*8+64)(%esp); paddq (3*8)(%esp), %mm0; movq (0*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+76*8, %mm0; movq (2*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; pand (0*8)(%esp), %mm2; pxor (2*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (7*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (7*8)(%esp); movq (4*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (6*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (5*8)(%esp), %mm3; pand (5*8)(%esp), %mm2; pand (4*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (3*8)(%esp); + movq (((77 -16)&0xF)*8+64)(%esp), %mm0; paddq (((77 - 7)&0xF)*8+64)(%esp), %mm0; movq (((77 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((77 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((77)&0xF)*8+64)(%esp); paddq (2*8)(%esp), %mm0; movq (7*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+77*8, %mm0; movq (1*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; pand (7*8)(%esp), %mm2; pxor (1*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (6*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (6*8)(%esp); movq (3*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (5*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (4*8)(%esp), %mm3; pand (4*8)(%esp), %mm2; pand (3*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (2*8)(%esp); + movq (((78 -16)&0xF)*8+64)(%esp), %mm0; paddq (((78 - 7)&0xF)*8+64)(%esp), %mm0; movq (((78 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((78 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((78)&0xF)*8+64)(%esp); paddq (1*8)(%esp), %mm0; movq (6*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+78*8, %mm0; movq (0*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; pand (6*8)(%esp), %mm2; pxor (0*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (5*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (5*8)(%esp); movq (2*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (4*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (3*8)(%esp), %mm3; pand (3*8)(%esp), %mm2; pand (2*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (1*8)(%esp); + movq (((79 -16)&0xF)*8+64)(%esp), %mm0; paddq (((79 - 7)&0xF)*8+64)(%esp), %mm0; movq (((79 -15)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-1), %mm5; psrlq $1, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-8), %mm4; psrlq $8, %mm2; por %mm4, %mm2; psrlq $7, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq (((79 - 2)&0xF)*8+64)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm5; psllq $(64-19), %mm5; psrlq $19, %mm1; por %mm5, %mm1; movq %mm2, %mm4; psllq $(64-61), %mm4; psrlq $61, %mm2; por %mm4, %mm2; psrlq $6, %mm3; pxor %mm3, %mm2; pxor %mm2, %mm1; paddq %mm1, %mm0; movq %mm0, (((79)&0xF)*8+64)(%esp); paddq (0*8)(%esp), %mm0; movq (5*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-18), %mm4; psrlq $18, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-41), %mm5; psrlq $41, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-14), %mm6; psrlq $14, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; paddq .roundconstants+79*8, %mm0; movq (7*8)(%esp), %mm2; pxor (6*8)(%esp), %mm2; pand (5*8)(%esp), %mm2; pxor (7*8)(%esp), %mm2; paddq %mm1, %mm0; paddq %mm2, %mm0; movq (4*8)(%esp), %mm1; paddq %mm0, %mm1; movq %mm1, (4*8)(%esp); movq (1*8)(%esp), %mm1; movq %mm1, %mm2; movq %mm1, %mm3; movq %mm1, %mm4; psllq $(64-39), %mm4; psrlq $39, %mm1; por %mm4, %mm1; movq %mm2, %mm5; psllq $(64-34), %mm5; psrlq $34, %mm2; por %mm5, %mm2; movq %mm3, %mm6; psllq $(64-28), %mm6; psrlq $28, %mm3; por %mm6, %mm3; pxor %mm2, %mm1; pxor %mm3, %mm1; movq (3*8)(%esp), %mm2; paddq %mm1, %mm0; movq %mm2, %mm3; por (2*8)(%esp), %mm3; pand (2*8)(%esp), %mm2; pand (1*8)(%esp), %mm3; por %mm2, %mm3; paddq %mm3, %mm0; movq %mm0, (0*8)(%esp); + + + movl 4(%ecx), %eax + movdqu 0(%eax), %xmm0; paddq 0(%esp), %xmm0; movdqu %xmm0, 0(%eax) + movdqu 16(%eax), %xmm1; paddq 16(%esp), %xmm1; movdqu %xmm1, 16(%eax) + movdqu 32(%eax), %xmm2; paddq 32(%esp), %xmm2; movdqu %xmm2, 32(%eax) + movdqu 48(%eax), %xmm3; paddq 48(%esp), %xmm3; movdqu %xmm3, 48(%eax) + + + emms + movl %ecx, %esp + + .ifdef MS_STDCALL + ret $8 + .else + retl + .endif + + +.balign 8 +.bswap64: + .quad 0x0001020304050607 + +.roundconstants: + .quad 0x428A2F98D728AE22, 0x7137449123EF65CD, 0xB5C0FBCFEC4D3B2F, 0xE9B5DBA58189DBBC + .quad 0x3956C25BF348B538, 0x59F111F1B605D019, 0x923F82A4AF194F9B, 0xAB1C5ED5DA6D8118 + .quad 0xD807AA98A3030242, 0x12835B0145706FBE, 0x243185BE4EE4B28C, 0x550C7DC3D5FFB4E2 + .quad 0x72BE5D74F27B896F, 0x80DEB1FE3B1696B1, 0x9BDC06A725C71235, 0xC19BF174CF692694 + .quad 0xE49B69C19EF14AD2, 0xEFBE4786384F25E3, 0x0FC19DC68B8CD5B5, 0x240CA1CC77AC9C65 + .quad 0x2DE92C6F592B0275, 0x4A7484AA6EA6E483, 0x5CB0A9DCBD41FBD4, 0x76F988DA831153B5 + .quad 0x983E5152EE66DFAB, 0xA831C66D2DB43210, 0xB00327C898FB213F, 0xBF597FC7BEEF0EE4 + .quad 0xC6E00BF33DA88FC2, 0xD5A79147930AA725, 0x06CA6351E003826F, 0x142929670A0E6E70 + .quad 0x27B70A8546D22FFC, 0x2E1B21385C26C926, 0x4D2C6DFC5AC42AED, 0x53380D139D95B3DF + .quad 0x650A73548BAF63DE, 0x766A0ABB3C77B2A8, 0x81C2C92E47EDAEE6, 0x92722C851482353B + .quad 0xA2BFE8A14CF10364, 0xA81A664BBC423001, 0xC24B8B70D0F89791, 0xC76C51A30654BE30 + .quad 0xD192E819D6EF5218, 0xD69906245565A910, 0xF40E35855771202A, 0x106AA07032BBD1B8 + .quad 0x19A4C116B8D2D0C8, 0x1E376C085141AB53, 0x2748774CDF8EEB99, 0x34B0BCB5E19B48A8 + .quad 0x391C0CB3C5C95A63, 0x4ED8AA4AE3418ACB, 0x5B9CCA4F7763E373, 0x682E6FF3D6B2B8A3 + .quad 0x748F82EE5DEFB2FC, 0x78A5636F43172F60, 0x84C87814A1F0AB72, 0x8CC702081A6439EC + .quad 0x90BEFFFA23631E28, 0xA4506CEBDE82BDE9, 0xBEF9A3F7B2C67915, 0xC67178F2E372532B + .quad 0xCA273ECEEA26619C, 0xD186B8C721C0C207, 0xEADA7DD6CDE0EB1E, 0xF57D4F7FEE6ED178 + .quad 0x06F067AA72176FBA, 0x0A637DC5A2C898A6, 0x113F9804BEF90DAE, 0x1B710B35131C471B + .quad 0x28DB77F523047D84, 0x32CAAB7B40C72493, 0x3C9EBE0A15C9BEBC, 0x431D67C49C100D4C + .quad 0x4CC5D4BECB3E42B6, 0x597F299CFC657E2A, 0x5FCB6FAB3AD6FAEC, 0x6C44198C4A475817 + + .ifndef __YASM__ +#if defined(__linux__) && defined(__ELF__) + .section .note.GNU-stack,"",%progbits +#endif + .endif
\ No newline at end of file diff --git a/src/Crypto/sha512_avx1_x64.asm b/src/Crypto/sha512_avx1_x64.asm new file mode 100644 index 00000000..06321b5b --- /dev/null +++ b/src/Crypto/sha512_avx1_x64.asm @@ -0,0 +1,427 @@ +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; Copyright (c) 2012, Intel Corporation +; +; All rights reserved. +; +; Redistribution and use in source and binary forms, with or without +; modification, are permitted provided that the following conditions are +; met: +; +; * Redistributions of source code must retain the above copyright +; notice, this list of conditions and the following disclaimer. +; +; * Redistributions in binary form must reproduce the above copyright +; notice, this list of conditions and the following disclaimer in the +; documentation and/or other materials provided with the +; distribution. +; +; * Neither the name of the Intel Corporation nor the names of its +; contributors may be used to endorse or promote products derived from +; this software without specific prior written permission. +; +; +; THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS" AND ANY +; EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +; PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION OR +; CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +; EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +; PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +; LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; +; Example YASM command lines: +; Windows: yasm -f x64 -D WINABI sha512_avx.asm +; Linux: yasm -f elf64 sha512_avx.asm +; + +BITS 64 +section .text + +; Virtual Registers +%ifdef WINABI + %define msg rcx ; ARG1 + %define digest rdx ; ARG2 + %define msglen r8 ; ARG3 + %define T1 rsi + %define T2 rdi +%else + %define msg rdi ; ARG1 + %define digest rsi ; ARG2 + %define msglen rdx ; ARG3 + %define T1 rcx + %define T2 r8 +%endif +%define a_64 r9 +%define b_64 r10 +%define c_64 r11 +%define d_64 r12 +%define e_64 r13 +%define f_64 r14 +%define g_64 r15 +%define h_64 rbx +%define tmp0 rax + +; Local variables (stack frame) +; Note: frame_size must be an odd multiple of 8 bytes to XMM align RSP +struc frame + .W: resq 80 ; Message Schedule + .WK: resq 2 ; W[t] + K[t] | W[t+1] + K[t+1] + +%ifdef WINABI + .XMMSAVE: resdq 4 + .GPRSAVE: resq 7 +%else + .GPRSAVE: resq 5 +%endif +endstruc + +; Useful QWORD "arrays" for simpler memory references +%define MSG(i) msg + 8*(i) ; Input message (arg1) +%define DIGEST(i) digest + 8*(i) ; Output Digest (arg2) +%define K_t(i) K512 + 8*(i) wrt rip ; SHA Constants (static mem) +%define W_t(i) rsp + frame.W + 8*(i) ; Message Schedule (stack frame) +%define WK_2(i) rsp + frame.WK + 8*((i) % 2) ; W[t]+K[t] (stack frame) +; MSG, DIGEST, K_t, W_t are arrays +; WK_2(t) points to 1 of 2 qwords at frame.WK depdending on t being odd/even + +%macro RotateState 0 + ; Rotate symbles a..h right + %xdefine %%TMP h_64 + %xdefine h_64 g_64 + %xdefine g_64 f_64 + %xdefine f_64 e_64 + %xdefine e_64 d_64 + %xdefine d_64 c_64 + %xdefine c_64 b_64 + %xdefine b_64 a_64 + %xdefine a_64 %%TMP +%endmacro + +%macro RORQ 2 + ; shld is faster than ror on Sandybridge + shld %1, %1, (64 - %2) +%endmacro + +%macro SHA512_Round 1 +%assign %%t (%1) + + ; Compute Round %%t + mov T1, f_64 ; T1 = f + mov tmp0, e_64 ; tmp = e + xor T1, g_64 ; T1 = f ^ g + RORQ tmp0, 23 ; 41 ; tmp = e ror 23 + and T1, e_64 ; T1 = (f ^ g) & e + xor tmp0, e_64 ; tmp = (e ror 23) ^ e + xor T1, g_64 ; T1 = ((f ^ g) & e) ^ g = CH(e,f,g) + add T1, [WK_2(%%t)] ; W[t] + K[t] from message scheduler + RORQ tmp0, 4 ; 18 ; tmp = ((e ror 23) ^ e) ror 4 + xor tmp0, e_64 ; tmp = (((e ror 23) ^ e) ror 4) ^ e + mov T2, a_64 ; T2 = a + add T1, h_64 ; T1 = CH(e,f,g) + W[t] + K[t] + h + RORQ tmp0, 14 ; 14 ; tmp = ((((e ror23)^e)ror4)^e)ror14 = S1(e) + add T1, tmp0 ; T1 = CH(e,f,g) + W[t] + K[t] + S1(e) + mov tmp0, a_64 ; tmp = a + xor T2, c_64 ; T2 = a ^ c + and tmp0, c_64 ; tmp = a & c + and T2, b_64 ; T2 = (a ^ c) & b + xor T2, tmp0 ; T2 = ((a ^ c) & b) ^ (a & c) = Maj(a,b,c) + mov tmp0, a_64 ; tmp = a + RORQ tmp0, 5 ; 39 ; tmp = a ror 5 + xor tmp0, a_64 ; tmp = (a ror 5) ^ a + add d_64, T1 ; e(next_state) = d + T1 + RORQ tmp0, 6 ; 34 ; tmp = ((a ror 5) ^ a) ror 6 + xor tmp0, a_64 ; tmp = (((a ror 5) ^ a) ror 6) ^ a + lea h_64, [T1 + T2] ; a(next_state) = T1 + Maj(a,b,c) + RORQ tmp0, 28 ; 28 ; tmp = ((((a ror5)^a)ror6)^a)ror28 = S0(a) + add h_64, tmp0 ; a(next_state) = T1 + Maj(a,b,c) S0(a) + RotateState +%endmacro + +%macro SHA512_2Sched_2Round_avx 1 +%assign %%t %1 + ; Compute rounds %%t-2 and %%t-1 + ; Compute message schedule QWORDS %%t and %%t+1 + + ; Two rounds are computed based on the values for K[t-2]+W[t-2] and + ; K[t-1]+W[t-1] which were previously stored at WK_2 by the message + ; scheduler. + ; The two new schedule QWORDS are stored at [W_t(%%t)] and [W_t(%%t+1)]. + ; They are then added to their respective SHA512 constants at + ; [K_t(%%t)] and [K_t(%%t+1)] and stored at dqword [WK_2(%%t)] + ; For brievity, the comments following vectored instructions only refer to + ; the first of a pair of QWORDS. + ; Eg. XMM4=W[t-2] really means XMM4={W[t-2]|W[t-1]} + ; The computation of the message schedule and the rounds are tightly + ; stitched to take advantage of instruction-level parallelism. + ; For clarity, integer instructions (for the rounds calculation) are indented + ; by one tab. Vectored instructions (for the message scheduler) are indented + ; by two tabs. + + vmovdqa xmm4, [W_t(%%t-2)] ; XMM4 = W[t-2] + vmovdqu xmm5, [W_t(%%t-15)] ; XMM5 = W[t-15] + mov T1, f_64 + vpsrlq xmm0, xmm4, 61 ; XMM0 = W[t-2]>>61 + mov tmp0, e_64 + vpsrlq xmm6, xmm5, 1 ; XMM6 = W[t-15]>>1 + xor T1, g_64 + RORQ tmp0, 23 ; 41 + vpsrlq xmm1, xmm4, 19 ; XMM1 = W[t-2]>>19 + and T1, e_64 + xor tmp0, e_64 + vpxor xmm0, xmm1 ; XMM0 = W[t-2]>>61 ^ W[t-2]>>19 + xor T1, g_64 + add T1, [WK_2(%%t)]; + vpsrlq xmm7, xmm5, 8 ; XMM7 = W[t-15]>>8 + RORQ tmp0, 4 ; 18 + vpsrlq xmm2, xmm4, 6 ; XMM2 = W[t-2]>>6 + xor tmp0, e_64 + mov T2, a_64 + add T1, h_64 + vpxor xmm6, xmm7 ; XMM6 = W[t-15]>>1 ^ W[t-15]>>8 + RORQ tmp0, 14 ; 14 + add T1, tmp0 + vpsrlq xmm8, xmm5, 7 ; XMM8 = W[t-15]>>7 + mov tmp0, a_64 + xor T2, c_64 + vpsllq xmm3, xmm4, (64-61) ; XMM3 = W[t-2]<<3 + and tmp0, c_64 + and T2, b_64 + vpxor xmm2, xmm3 ; XMM2 = W[t-2]>>6 ^ W[t-2]<<3 + xor T2, tmp0 + mov tmp0, a_64 + vpsllq xmm9, xmm5, (64-1) ; XMM9 = W[t-15]<<63 + RORQ tmp0, 5 ; 39 + vpxor xmm8, xmm9 ; XMM8 = W[t-15]>>7 ^ W[t-15]<<63 + xor tmp0, a_64 + add d_64, T1 + RORQ tmp0, 6 ; 34 + xor tmp0, a_64 + vpxor xmm6, xmm8 ; XMM6 = W[t-15]>>1 ^ W[t-15]>>8 ^ W[t-15]>>7 ^ W[t-15]<<63 + lea h_64, [T1 + T2] + RORQ tmp0, 28 ; 28 + vpsllq xmm4, (64-19) ; XMM4 = W[t-2]<<25 + add h_64, tmp0 + RotateState + vpxor xmm0, xmm4 ; XMM0 = W[t-2]>>61 ^ W[t-2]>>19 ^ W[t-2]<<25 + mov T1, f_64 + vpxor xmm0, xmm2 ; XMM0 = s1(W[t-2]) + mov tmp0, e_64 + xor T1, g_64 + vpaddq xmm0, [W_t(%%t-16)] ; XMM0 = s1(W[t-2]) + W[t-16] + vmovdqu xmm1, [W_t(%%t- 7)] ; XMM1 = W[t-7] + RORQ tmp0, 23 ; 41 + and T1, e_64 + xor tmp0, e_64 + xor T1, g_64 + vpsllq xmm5, (64-8) ; XMM5 = W[t-15]<<56 + add T1, [WK_2(%%t+1)] + vpxor xmm6, xmm5 ; XMM6 = s0(W[t-15]) + RORQ tmp0, 4 ; 18 + vpaddq xmm0, xmm6 ; XMM0 = s1(W[t-2]) + W[t-16] + s0(W[t-15]) + xor tmp0, e_64 + vpaddq xmm0, xmm1 ; XMM0 = W[t] = s1(W[t-2]) + W[t-7] + s0(W[t-15]) + W[t-16] + mov T2, a_64 + add T1, h_64 + RORQ tmp0, 14 ; 14 + add T1, tmp0 + vmovdqa [W_t(%%t)], xmm0 ; Store W[t] + vpaddq xmm0, [K_t(t)] ; Compute W[t]+K[t] + vmovdqa [WK_2(t)], xmm0 ; Store W[t]+K[t] for next rounds + mov tmp0, a_64 + xor T2, c_64 + and tmp0, c_64 + and T2, b_64 + xor T2, tmp0 + mov tmp0, a_64 + RORQ tmp0, 5 ; 39 + xor tmp0, a_64 + add d_64, T1 + RORQ tmp0, 6 ; 34 + xor tmp0, a_64 + lea h_64, [T1 + T2] + RORQ tmp0, 28 ; 28 + add h_64, tmp0 + RotateState +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; void sha512_avx(const void* M, void* D, uint64_t L); +; Purpose: Updates the SHA512 digest stored at D with the message stored in M. +; The size of the message pointed to by M must be an integer multiple of SHA512 +; message blocks. +; L is the message length in SHA512 blocks +global sha512_avx:function +sha512_avx: + cmp msglen, 0 + je .nowork + + ; Allocate Stack Space + sub rsp, frame_size + + ; Save GPRs + mov [rsp + frame.GPRSAVE + 8 * 0], rbx + mov [rsp + frame.GPRSAVE + 8 * 1], r12 + mov [rsp + frame.GPRSAVE + 8 * 2], r13 + mov [rsp + frame.GPRSAVE + 8 * 3], r14 + mov [rsp + frame.GPRSAVE + 8 * 4], r15 +%ifdef WINABI + mov [rsp + frame.GPRSAVE + 8 * 5], rsi + mov [rsp + frame.GPRSAVE + 8 * 6], rdi +%endif + ; Save XMMs +%ifdef WINABI + vmovdqa [rsp + frame.XMMSAVE + 16 * 0], xmm6 + vmovdqa [rsp + frame.XMMSAVE + 16 * 1], xmm7 + vmovdqa [rsp + frame.XMMSAVE + 16 * 2], xmm8 + vmovdqa [rsp + frame.XMMSAVE + 16 * 3], xmm9 +%endif + +.updateblock: + + ; Load state variables + mov a_64, [DIGEST(0)] + mov b_64, [DIGEST(1)] + mov c_64, [DIGEST(2)] + mov d_64, [DIGEST(3)] + mov e_64, [DIGEST(4)] + mov f_64, [DIGEST(5)] + mov g_64, [DIGEST(6)] + mov h_64, [DIGEST(7)] + + %assign t 0 + %rep 80/2 + 1 + ; (80 rounds) / (2 rounds/iteration) + (1 iteration) + ; +1 iteration because the scheduler leads hashing by 1 iteration + %if t < 2 + ; BSWAP 2 QWORDS + vmovdqa xmm1, [XMM_QWORD_BSWAP wrt rip] + vmovdqu xmm0, [MSG(t)] + vpshufb xmm0, xmm0, xmm1 ; BSWAP + vmovdqa [W_t(t)], xmm0 ; Store Scheduled Pair + vpaddq xmm0, xmm0, [K_t(t)] ; Compute W[t]+K[t] + vmovdqa [WK_2(t)], xmm0 ; Store into WK for rounds + %elif t < 16 + ; BSWAP 2 QWORDS, Compute 2 Rounds + vmovdqu xmm0, [MSG(t)] + vpshufb xmm0, xmm0, xmm1 ; BSWAP + SHA512_Round t - 2 ; Round t-2 + vmovdqa [W_t(t)], xmm0 ; Store Scheduled Pair + vpaddq xmm0, xmm0, [K_t(t)] ; Compute W[t]+K[t] + SHA512_Round t - 1 ; Round t-1 + vmovdqa [WK_2(t)], xmm0 ; W[t]+K[t] into WK + %elif t < 79 + ; Schedule 2 QWORDS; Compute 2 Rounds + SHA512_2Sched_2Round_avx t + %else + ; Compute 2 Rounds + SHA512_Round t - 2 + SHA512_Round t - 1 + %endif + %assign t t+2 + %endrep + + ; Update digest + add [DIGEST(0)], a_64 + add [DIGEST(1)], b_64 + add [DIGEST(2)], c_64 + add [DIGEST(3)], d_64 + add [DIGEST(4)], e_64 + add [DIGEST(5)], f_64 + add [DIGEST(6)], g_64 + add [DIGEST(7)], h_64 + + ; Advance to next message block + add msg, 16*8 + dec msglen + jnz .updateblock + + ; Restore XMMs +%ifdef WINABI + vmovdqa xmm6, [rsp + frame.XMMSAVE + 16 * 0] + vmovdqa xmm7, [rsp + frame.XMMSAVE + 16 * 1] + vmovdqa xmm8, [rsp + frame.XMMSAVE + 16 * 2] + vmovdqa xmm9, [rsp + frame.XMMSAVE + 16 * 3] +%endif + ; Restore GPRs + mov rbx, [rsp + frame.GPRSAVE + 8 * 0] + mov r12, [rsp + frame.GPRSAVE + 8 * 1] + mov r13, [rsp + frame.GPRSAVE + 8 * 2] + mov r14, [rsp + frame.GPRSAVE + 8 * 3] + mov r15, [rsp + frame.GPRSAVE + 8 * 4] +%ifdef WINABI + mov rsi, [rsp + frame.GPRSAVE + 8 * 5] + mov rdi, [rsp + frame.GPRSAVE + 8 * 6] +%endif + ; Restore Stack Pointer + add rsp, frame_size + +.nowork: + ret + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;; Binary Data + +section .data + +ALIGN 16 + +; Mask for byte-swapping a couple of qwords in an XMM register using (v)pshufb. +XMM_QWORD_BSWAP: + ddq 0x08090a0b0c0d0e0f0001020304050607 + +; K[t] used in SHA512 hashing +K512: + dq 0x428a2f98d728ae22,0x7137449123ef65cd + dq 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc + dq 0x3956c25bf348b538,0x59f111f1b605d019 + dq 0x923f82a4af194f9b,0xab1c5ed5da6d8118 + dq 0xd807aa98a3030242,0x12835b0145706fbe + dq 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2 + dq 0x72be5d74f27b896f,0x80deb1fe3b1696b1 + dq 0x9bdc06a725c71235,0xc19bf174cf692694 + dq 0xe49b69c19ef14ad2,0xefbe4786384f25e3 + dq 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65 + dq 0x2de92c6f592b0275,0x4a7484aa6ea6e483 + dq 0x5cb0a9dcbd41fbd4,0x76f988da831153b5 + dq 0x983e5152ee66dfab,0xa831c66d2db43210 + dq 0xb00327c898fb213f,0xbf597fc7beef0ee4 + dq 0xc6e00bf33da88fc2,0xd5a79147930aa725 + dq 0x06ca6351e003826f,0x142929670a0e6e70 + dq 0x27b70a8546d22ffc,0x2e1b21385c26c926 + dq 0x4d2c6dfc5ac42aed,0x53380d139d95b3df + dq 0x650a73548baf63de,0x766a0abb3c77b2a8 + dq 0x81c2c92e47edaee6,0x92722c851482353b + dq 0xa2bfe8a14cf10364,0xa81a664bbc423001 + dq 0xc24b8b70d0f89791,0xc76c51a30654be30 + dq 0xd192e819d6ef5218,0xd69906245565a910 + dq 0xf40e35855771202a,0x106aa07032bbd1b8 + dq 0x19a4c116b8d2d0c8,0x1e376c085141ab53 + dq 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8 + dq 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb + dq 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3 + dq 0x748f82ee5defb2fc,0x78a5636f43172f60 + dq 0x84c87814a1f0ab72,0x8cc702081a6439ec + dq 0x90befffa23631e28,0xa4506cebde82bde9 + dq 0xbef9a3f7b2c67915,0xc67178f2e372532b + dq 0xca273eceea26619c,0xd186b8c721c0c207 + dq 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178 + dq 0x06f067aa72176fba,0x0a637dc5a2c898a6 + dq 0x113f9804bef90dae,0x1b710b35131c471b + dq 0x28db77f523047d84,0x32caab7b40c72493 + dq 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c + dq 0x4cc5d4becb3e42b6,0x597f299cfc657e2a + dq 0x5fcb6fab3ad6faec,0x6c44198c4a475817 + +%ifidn __OUTPUT_FORMAT__,elf +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf32 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf64 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif diff --git a/src/Crypto/sha512_avx1_x86.asm b/src/Crypto/sha512_avx1_x86.asm new file mode 100644 index 00000000..31c8bd0d --- /dev/null +++ b/src/Crypto/sha512_avx1_x86.asm @@ -0,0 +1,10 @@ + +%ifidn __OUTPUT_FORMAT__,elf +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf32 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf64 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif diff --git a/src/Crypto/sha512_avx2_x64.asm b/src/Crypto/sha512_avx2_x64.asm new file mode 100644 index 00000000..1ba08665 --- /dev/null +++ b/src/Crypto/sha512_avx2_x64.asm @@ -0,0 +1,804 @@ +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; Copyright (c) 2012, Intel Corporation +; +; All rights reserved. +; +; Redistribution and use in source and binary forms, with or without +; modification, are permitted provided that the following conditions are +; met: +; +; * Redistributions of source code must retain the above copyright +; notice, this list of conditions and the following disclaimer. +; +; * Redistributions in binary form must reproduce the above copyright +; notice, this list of conditions and the following disclaimer in the +; documentation and/or other materials provided with the +; distribution. +; +; * Neither the name of the Intel Corporation nor the names of its +; contributors may be used to endorse or promote products derived from +; this software without specific prior written permission. +; +; +; THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS" AND ANY +; EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +; PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION OR +; CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +; EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +; PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +; LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; +; Example YASM command lines: +; Windows: yasm -f x64 -D WINABI sha512_rorx.asm +; Linux: yasm -f elf64 sha512_rorx.asm +; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; This code schedules 1 blocks at a time, with 4 lanes per block +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +BITS 64 +section .text + +; Virtual Registers +%define Y_0 ymm4 +%define Y_1 ymm5 +%define Y_2 ymm6 +%define Y_3 ymm7 + +%define YTMP0 ymm0 +%define YTMP1 ymm1 +%define YTMP2 ymm2 +%define YTMP3 ymm3 +%define YTMP4 ymm8 +%define XFER YTMP0 + +%define BYTE_FLIP_MASK ymm9 + +%ifdef WINABI + %define INP rcx ; 1st arg + %define CTX rdx ; 2nd arg + %define NUM_BLKS r8 ; 3rd arg + %define c rdi + %define d rsi + %define e r8 + %define y3 rcx +%else + %define INP rdi ; 1st arg + %define CTX rsi ; 2nd arg + %define NUM_BLKS rdx ; 3rd arg + %define c rcx + %define d r8 + %define e rdx + %define y3 rdi +%endif + +%define TBL rbp + +%define a rax +%define b rbx + +%define f r9 +%define g r10 +%define h r11 +%define old_h r11 + +%define T1 r12 +%define y0 r13 +%define y1 r14 +%define y2 r15 + +%define y4 r12 + +; Local variables (stack frame) +struc frame + .XFER: resq 4 + .SRND: resq 1 + .INP: resq 1 + .INPEND: resq 1 + .RSPSAVE: resq 1 + +%ifdef WINABI + .XMMSAVE: resdq 4 + .GPRSAVE: resq 8 +%else + .GPRSAVE: resq 6 +%endif +endstruc + +%define VMOVDQ vmovdqu ;; assume buffers not aligned + +; addm [mem], reg +; Add reg to mem using reg-mem add and store +%macro addm 2 + add %2, %1 + mov %1, %2 +%endm + + +; COPY_YMM_AND_BSWAP ymm, [mem], byte_flip_mask +; Load ymm with mem and byte swap each dword +%macro COPY_YMM_AND_BSWAP 3 + VMOVDQ %1, %2 + vpshufb %1, %1 ,%3 +%endmacro +; rotate_Ys +; Rotate values of symbols Y0...Y3 +%macro rotate_Ys 0 + %xdefine %%Y_ Y_0 + %xdefine Y_0 Y_1 + %xdefine Y_1 Y_2 + %xdefine Y_2 Y_3 + %xdefine Y_3 %%Y_ +%endm + +; RotateState +%macro RotateState 0 + ; Rotate symbles a..h right + %xdefine old_h h + %xdefine %%TMP_ h + %xdefine h g + %xdefine g f + %xdefine f e + %xdefine e d + %xdefine d c + %xdefine c b + %xdefine b a + %xdefine a %%TMP_ +%endm + +; %macro MY_VPALIGNR YDST, YSRC1, YSRC2, RVAL +; YDST = {YSRC1, YSRC2} >> RVAL*8 +%macro MY_VPALIGNR 4 +%define %%YDST %1 +%define %%YSRC1 %2 +%define %%YSRC2 %3 +%define %%RVAL %4 + vperm2f128 %%YDST, %%YSRC1, %%YSRC2, 0x3 ; YDST = {YS1_LO, YS2_HI} + vpalignr %%YDST, %%YDST, %%YSRC2, %%RVAL ; YDST = {YDS1, YS2} >> RVAL*8 +%endm + +%macro FOUR_ROUNDS_AND_SCHED 0 +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; RND N + 0 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + + ; Extract w[t-7] + MY_VPALIGNR YTMP0, Y_3, Y_2, 8 ; YTMP0 = W[-7] + ; Calculate w[t-16] + w[t-7] + vpaddq YTMP0, YTMP0, Y_0 ; YTMP0 = W[-7] + W[-16] + ; Extract w[t-15] + MY_VPALIGNR YTMP1, Y_1, Y_0, 8 ; YTMP1 = W[-15] + + ; Calculate sigma0 + + ; Calculate w[t-15] ror 1 + vpsrlq YTMP2, YTMP1, 1 + vpsllq YTMP3, YTMP1, (64-1) + vpor YTMP3, YTMP3, YTMP2 ; YTMP3 = W[-15] ror 1 + ; Calculate w[t-15] shr 7 + vpsrlq YTMP4, YTMP1, 7 ; YTMP4 = W[-15] >> 7 + + mov y3, a ; y3 = a ; MAJA + rorx y0, e, 41 ; y0 = e >> 41 ; S1A + rorx y1, e, 18 ; y1 = e >> 18 ; S1B + + add h, [rsp+frame.XFER+0*8] ; h = k + w + h ; -- + or y3, c ; y3 = a|c ; MAJA + mov y2, f ; y2 = f ; CH + rorx T1, a, 34 ; T1 = a >> 34 ; S0B + + xor y0, y1 ; y0 = (e>>41) ^ (e>>18) ; S1 + xor y2, g ; y2 = f^g ; CH + rorx y1, e, 14 ; y1 = (e >> 14) ; S1 + + and y2, e ; y2 = (f^g)&e ; CH + xor y0, y1 ; y0 = (e>>41) ^ (e>>18) ^ (e>>14) ; S1 + rorx y1, a, 39 ; y1 = a >> 39 ; S0A + add d, h ; d = k + w + h + d ; -- + + and y3, b ; y3 = (a|c)&b ; MAJA + xor y1, T1 ; y1 = (a>>39) ^ (a>>34) ; S0 + rorx T1, a, 28 ; T1 = (a >> 28) ; S0 + + xor y2, g ; y2 = CH = ((f^g)&e)^g ; CH + xor y1, T1 ; y1 = (a>>39) ^ (a>>34) ^ (a>>28) ; S0 + mov T1, a ; T1 = a ; MAJB + and T1, c ; T1 = a&c ; MAJB + + add y2, y0 ; y2 = S1 + CH ; -- + or y3, T1 ; y3 = MAJ = (a|c)&b)|(a&c) ; MAJ + add h, y1 ; h = k + w + h + S0 ; -- + + add d, y2 ; d = k + w + h + d + S1 + CH = d + t1 ; -- + + add h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0 ; -- + add h, y3 ; h = t1 + S0 + MAJ ; -- + +RotateState + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; RND N + 1 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +;;;;;;;;;;;;;;;;;;;;;;;;;; + + ; Calculate w[t-15] ror 8 + vpsrlq YTMP2, YTMP1, 8 + vpsllq YTMP1, YTMP1, (64-8) + vpor YTMP1, YTMP1, YTMP2 ; YTMP1 = W[-15] ror 8 + ; XOR the three components + vpxor YTMP3, YTMP3, YTMP4 ; YTMP3 = W[-15] ror 1 ^ W[-15] >> 7 + vpxor YTMP1, YTMP3, YTMP1 ; YTMP1 = s0 + + + ; Add three components, w[t-16], w[t-7] and sigma0 + vpaddq YTMP0, YTMP0, YTMP1 ; YTMP0 = W[-16] + W[-7] + s0 + ; Move to appropriate lanes for calculating w[16] and w[17] + vperm2f128 Y_0, YTMP0, YTMP0, 0x0 ; Y_0 = W[-16] + W[-7] + s0 {BABA} + ; Move to appropriate lanes for calculating w[18] and w[19] + vpand YTMP0, YTMP0, [MASK_YMM_LO wrt rip] ; YTMP0 = W[-16] + W[-7] + s0 {DC00} + + ; Calculate w[16] and w[17] in both 128 bit lanes + + ; Calculate sigma1 for w[16] and w[17] on both 128 bit lanes + vperm2f128 YTMP2, Y_3, Y_3, 0x11 ; YTMP2 = W[-2] {BABA} + vpsrlq YTMP4, YTMP2, 6 ; YTMP4 = W[-2] >> 6 {BABA} + + + mov y3, a ; y3 = a ; MAJA + rorx y0, e, 41 ; y0 = e >> 41 ; S1A + rorx y1, e, 18 ; y1 = e >> 18 ; S1B + add h, [rsp+frame.XFER+1*8] ; h = k + w + h ; -- + or y3, c ; y3 = a|c ; MAJA + + + mov y2, f ; y2 = f ; CH + rorx T1, a, 34 ; T1 = a >> 34 ; S0B + xor y0, y1 ; y0 = (e>>41) ^ (e>>18) ; S1 + xor y2, g ; y2 = f^g ; CH + + + rorx y1, e, 14 ; y1 = (e >> 14) ; S1 + xor y0, y1 ; y0 = (e>>41) ^ (e>>18) ^ (e>>14) ; S1 + rorx y1, a, 39 ; y1 = a >> 39 ; S0A + and y2, e ; y2 = (f^g)&e ; CH + add d, h ; d = k + w + h + d ; -- + + and y3, b ; y3 = (a|c)&b ; MAJA + xor y1, T1 ; y1 = (a>>39) ^ (a>>34) ; S0 + + rorx T1, a, 28 ; T1 = (a >> 28) ; S0 + xor y2, g ; y2 = CH = ((f^g)&e)^g ; CH + + xor y1, T1 ; y1 = (a>>39) ^ (a>>34) ^ (a>>28) ; S0 + mov T1, a ; T1 = a ; MAJB + and T1, c ; T1 = a&c ; MAJB + add y2, y0 ; y2 = S1 + CH ; -- + + or y3, T1 ; y3 = MAJ = (a|c)&b)|(a&c) ; MAJ + add h, y1 ; h = k + w + h + S0 ; -- + + add d, y2 ; d = k + w + h + d + S1 + CH = d + t1 ; -- + add h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0 ; -- + add h, y3 ; h = t1 + S0 + MAJ ; -- + +RotateState + + + + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; RND N + 2 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +;;;;;;;;;;;;;;;;;;;;;;;;;; + + + vpsrlq YTMP3, YTMP2, 19 ; YTMP3 = W[-2] >> 19 {BABA} + vpsllq YTMP1, YTMP2, (64-19) ; YTMP1 = W[-2] << 19 {BABA} + vpor YTMP3, YTMP3, YTMP1 ; YTMP3 = W[-2] ror 19 {BABA} + vpxor YTMP4, YTMP4, YTMP3 ; YTMP4 = W[-2] ror 19 ^ W[-2] >> 6 {BABA} + vpsrlq YTMP3, YTMP2, 61 ; YTMP3 = W[-2] >> 61 {BABA} + vpsllq YTMP1, YTMP2, (64-61) ; YTMP1 = W[-2] << 61 {BABA} + vpor YTMP3, YTMP3, YTMP1 ; YTMP3 = W[-2] ror 61 {BABA} + vpxor YTMP4, YTMP4, YTMP3 ; YTMP4 = s1 = (W[-2] ror 19) ^ (W[-2] ror 61) ^ (W[-2] >> 6) {BABA} + + ; Add sigma1 to the other compunents to get w[16] and w[17] + vpaddq Y_0, Y_0, YTMP4 ; Y_0 = {W[1], W[0], W[1], W[0]} + + ; Calculate sigma1 for w[18] and w[19] for upper 128 bit lane + vpsrlq YTMP4, Y_0, 6 ; YTMP4 = W[-2] >> 6 {DC--} + + mov y3, a ; y3 = a ; MAJA + rorx y0, e, 41 ; y0 = e >> 41 ; S1A + add h, [rsp+frame.XFER+2*8] ; h = k + w + h ; -- + + rorx y1, e, 18 ; y1 = e >> 18 ; S1B + or y3, c ; y3 = a|c ; MAJA + mov y2, f ; y2 = f ; CH + xor y2, g ; y2 = f^g ; CH + + rorx T1, a, 34 ; T1 = a >> 34 ; S0B + xor y0, y1 ; y0 = (e>>41) ^ (e>>18) ; S1 + and y2, e ; y2 = (f^g)&e ; CH + + rorx y1, e, 14 ; y1 = (e >> 14) ; S1 + add d, h ; d = k + w + h + d ; -- + and y3, b ; y3 = (a|c)&b ; MAJA + + xor y0, y1 ; y0 = (e>>41) ^ (e>>18) ^ (e>>14) ; S1 + rorx y1, a, 39 ; y1 = a >> 39 ; S0A + xor y2, g ; y2 = CH = ((f^g)&e)^g ; CH + + xor y1, T1 ; y1 = (a>>39) ^ (a>>34) ; S0 + rorx T1, a, 28 ; T1 = (a >> 28) ; S0 + + xor y1, T1 ; y1 = (a>>39) ^ (a>>34) ^ (a>>28) ; S0 + mov T1, a ; T1 = a ; MAJB + and T1, c ; T1 = a&c ; MAJB + add y2, y0 ; y2 = S1 + CH ; -- + + or y3, T1 ; y3 = MAJ = (a|c)&b)|(a&c) ; MAJ + add h, y1 ; h = k + w + h + S0 ; -- + add d, y2 ; d = k + w + h + d + S1 + CH = d + t1 ; -- + add h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0 ; -- + + add h, y3 ; h = t1 + S0 + MAJ ; -- + +RotateState + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; RND N + 3 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +;;;;;;;;;;;;;;;;;;;;;;;;;; + + vpsrlq YTMP3, Y_0, 19 ; YTMP3 = W[-2] >> 19 {DC--} + vpsllq YTMP1, Y_0, (64-19) ; YTMP1 = W[-2] << 19 {DC--} + vpor YTMP3, YTMP3, YTMP1 ; YTMP3 = W[-2] ror 19 {DC--} + vpxor YTMP4, YTMP4, YTMP3 ; YTMP4 = W[-2] ror 19 ^ W[-2] >> 6 {DC--} + vpsrlq YTMP3, Y_0, 61 ; YTMP3 = W[-2] >> 61 {DC--} + vpsllq YTMP1, Y_0, (64-61) ; YTMP1 = W[-2] << 61 {DC--} + vpor YTMP3, YTMP3, YTMP1 ; YTMP3 = W[-2] ror 61 {DC--} + vpxor YTMP4, YTMP4, YTMP3 ; YTMP4 = s1 = (W[-2] ror 19) ^ (W[-2] ror 61) ^ (W[-2] >> 6) {DC--} + + ; Add the sigma0 + w[t-7] + w[t-16] for w[18] and w[19] to newly calculated sigma1 to get w[18] and w[19] + vpaddq YTMP2, YTMP0, YTMP4 ; YTMP2 = {W[3], W[2], --, --} + + ; Form w[19, w[18], w17], w[16] + vpblendd Y_0, Y_0, YTMP2, 0xF0 ; Y_0 = {W[3], W[2], W[1], W[0]} +; vperm2f128 Y_0, Y_0, YTMP2, 0x30 + + mov y3, a ; y3 = a ; MAJA + rorx y0, e, 41 ; y0 = e >> 41 ; S1A + rorx y1, e, 18 ; y1 = e >> 18 ; S1B + add h, [rsp+frame.XFER+3*8] ; h = k + w + h ; -- + or y3, c ; y3 = a|c ; MAJA + + + mov y2, f ; y2 = f ; CH + rorx T1, a, 34 ; T1 = a >> 34 ; S0B + xor y0, y1 ; y0 = (e>>41) ^ (e>>18) ; S1 + xor y2, g ; y2 = f^g ; CH + + + rorx y1, e, 14 ; y1 = (e >> 14) ; S1 + and y2, e ; y2 = (f^g)&e ; CH + add d, h ; d = k + w + h + d ; -- + and y3, b ; y3 = (a|c)&b ; MAJA + + xor y0, y1 ; y0 = (e>>41) ^ (e>>18) ^ (e>>14) ; S1 + xor y2, g ; y2 = CH = ((f^g)&e)^g ; CH + + rorx y1, a, 39 ; y1 = a >> 39 ; S0A + add y2, y0 ; y2 = S1 + CH ; -- + + xor y1, T1 ; y1 = (a>>39) ^ (a>>34) ; S0 + add d, y2 ; d = k + w + h + d + S1 + CH = d + t1 ; -- + + rorx T1, a, 28 ; T1 = (a >> 28) ; S0 + + xor y1, T1 ; y1 = (a>>39) ^ (a>>34) ^ (a>>28) ; S0 + mov T1, a ; T1 = a ; MAJB + and T1, c ; T1 = a&c ; MAJB + or y3, T1 ; y3 = MAJ = (a|c)&b)|(a&c) ; MAJ + + add h, y1 ; h = k + w + h + S0 ; -- + add h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0 ; -- + add h, y3 ; h = t1 + S0 + MAJ ; -- + +RotateState + +rotate_Ys +%endm + +%macro DO_4ROUNDS 0 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; RND N + 0 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + + mov y2, f ; y2 = f ; CH + rorx y0, e, 41 ; y0 = e >> 41 ; S1A + rorx y1, e, 18 ; y1 = e >> 18 ; S1B + xor y2, g ; y2 = f^g ; CH + + xor y0, y1 ; y0 = (e>>41) ^ (e>>18) ; S1 + rorx y1, e, 14 ; y1 = (e >> 14) ; S1 + and y2, e ; y2 = (f^g)&e ; CH + + xor y0, y1 ; y0 = (e>>41) ^ (e>>18) ^ (e>>14) ; S1 + rorx T1, a, 34 ; T1 = a >> 34 ; S0B + xor y2, g ; y2 = CH = ((f^g)&e)^g ; CH + rorx y1, a, 39 ; y1 = a >> 39 ; S0A + mov y3, a ; y3 = a ; MAJA + + xor y1, T1 ; y1 = (a>>39) ^ (a>>34) ; S0 + rorx T1, a, 28 ; T1 = (a >> 28) ; S0 + add h, [rsp + frame.XFER + 8*0] ; h = k + w + h ; -- + or y3, c ; y3 = a|c ; MAJA + + xor y1, T1 ; y1 = (a>>39) ^ (a>>34) ^ (a>>28) ; S0 + mov T1, a ; T1 = a ; MAJB + and y3, b ; y3 = (a|c)&b ; MAJA + and T1, c ; T1 = a&c ; MAJB + add y2, y0 ; y2 = S1 + CH ; -- + + + add d, h ; d = k + w + h + d ; -- + or y3, T1 ; y3 = MAJ = (a|c)&b)|(a&c) ; MAJ + add h, y1 ; h = k + w + h + S0 ; -- + + add d, y2 ; d = k + w + h + d + S1 + CH = d + t1 ; -- + + + ;add h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0 ; -- + + ;add h, y3 ; h = t1 + S0 + MAJ ; -- + + RotateState + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; RND N + 1 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + + add old_h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0 ; -- + mov y2, f ; y2 = f ; CH + rorx y0, e, 41 ; y0 = e >> 41 ; S1A + rorx y1, e, 18 ; y1 = e >> 18 ; S1B + xor y2, g ; y2 = f^g ; CH + + xor y0, y1 ; y0 = (e>>41) ^ (e>>18) ; S1 + rorx y1, e, 14 ; y1 = (e >> 14) ; S1 + and y2, e ; y2 = (f^g)&e ; CH + add old_h, y3 ; h = t1 + S0 + MAJ ; -- + + xor y0, y1 ; y0 = (e>>41) ^ (e>>18) ^ (e>>14) ; S1 + rorx T1, a, 34 ; T1 = a >> 34 ; S0B + xor y2, g ; y2 = CH = ((f^g)&e)^g ; CH + rorx y1, a, 39 ; y1 = a >> 39 ; S0A + mov y3, a ; y3 = a ; MAJA + + xor y1, T1 ; y1 = (a>>39) ^ (a>>34) ; S0 + rorx T1, a, 28 ; T1 = (a >> 28) ; S0 + add h, [rsp + frame.XFER + 8*1] ; h = k + w + h ; -- + or y3, c ; y3 = a|c ; MAJA + + xor y1, T1 ; y1 = (a>>39) ^ (a>>34) ^ (a>>28) ; S0 + mov T1, a ; T1 = a ; MAJB + and y3, b ; y3 = (a|c)&b ; MAJA + and T1, c ; T1 = a&c ; MAJB + add y2, y0 ; y2 = S1 + CH ; -- + + + add d, h ; d = k + w + h + d ; -- + or y3, T1 ; y3 = MAJ = (a|c)&b)|(a&c) ; MAJ + add h, y1 ; h = k + w + h + S0 ; -- + + add d, y2 ; d = k + w + h + d + S1 + CH = d + t1 ; -- + + + ;add h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0 ; -- + + ;add h, y3 ; h = t1 + S0 + MAJ ; -- + + RotateState + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; RND N + 2 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + + add old_h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0 ; -- + mov y2, f ; y2 = f ; CH + rorx y0, e, 41 ; y0 = e >> 41 ; S1A + rorx y1, e, 18 ; y1 = e >> 18 ; S1B + xor y2, g ; y2 = f^g ; CH + + xor y0, y1 ; y0 = (e>>41) ^ (e>>18) ; S1 + rorx y1, e, 14 ; y1 = (e >> 14) ; S1 + and y2, e ; y2 = (f^g)&e ; CH + add old_h, y3 ; h = t1 + S0 + MAJ ; -- + + xor y0, y1 ; y0 = (e>>41) ^ (e>>18) ^ (e>>14) ; S1 + rorx T1, a, 34 ; T1 = a >> 34 ; S0B + xor y2, g ; y2 = CH = ((f^g)&e)^g ; CH + rorx y1, a, 39 ; y1 = a >> 39 ; S0A + mov y3, a ; y3 = a ; MAJA + + xor y1, T1 ; y1 = (a>>39) ^ (a>>34) ; S0 + rorx T1, a, 28 ; T1 = (a >> 28) ; S0 + add h, [rsp + frame.XFER + 8*2] ; h = k + w + h ; -- + or y3, c ; y3 = a|c ; MAJA + + xor y1, T1 ; y1 = (a>>39) ^ (a>>34) ^ (a>>28) ; S0 + mov T1, a ; T1 = a ; MAJB + and y3, b ; y3 = (a|c)&b ; MAJA + and T1, c ; T1 = a&c ; MAJB + add y2, y0 ; y2 = S1 + CH ; -- + + + add d, h ; d = k + w + h + d ; -- + or y3, T1 ; y3 = MAJ = (a|c)&b)|(a&c) ; MAJ + add h, y1 ; h = k + w + h + S0 ; -- + + add d, y2 ; d = k + w + h + d + S1 + CH = d + t1 ; -- + + + ;add h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0 ; -- + + ;add h, y3 ; h = t1 + S0 + MAJ ; -- + + RotateState + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; RND N + 3 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + + add old_h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0 ; -- + mov y2, f ; y2 = f ; CH + rorx y0, e, 41 ; y0 = e >> 41 ; S1A + rorx y1, e, 18 ; y1 = e >> 18 ; S1B + xor y2, g ; y2 = f^g ; CH + + xor y0, y1 ; y0 = (e>>41) ^ (e>>18) ; S1 + rorx y1, e, 14 ; y1 = (e >> 14) ; S1 + and y2, e ; y2 = (f^g)&e ; CH + add old_h, y3 ; h = t1 + S0 + MAJ ; -- + + xor y0, y1 ; y0 = (e>>41) ^ (e>>18) ^ (e>>14) ; S1 + rorx T1, a, 34 ; T1 = a >> 34 ; S0B + xor y2, g ; y2 = CH = ((f^g)&e)^g ; CH + rorx y1, a, 39 ; y1 = a >> 39 ; S0A + mov y3, a ; y3 = a ; MAJA + + xor y1, T1 ; y1 = (a>>39) ^ (a>>34) ; S0 + rorx T1, a, 28 ; T1 = (a >> 28) ; S0 + add h, [rsp + frame.XFER + 8*3] ; h = k + w + h ; -- + or y3, c ; y3 = a|c ; MAJA + + xor y1, T1 ; y1 = (a>>39) ^ (a>>34) ^ (a>>28) ; S0 + mov T1, a ; T1 = a ; MAJB + and y3, b ; y3 = (a|c)&b ; MAJA + and T1, c ; T1 = a&c ; MAJB + add y2, y0 ; y2 = S1 + CH ; -- + + + add d, h ; d = k + w + h + d ; -- + or y3, T1 ; y3 = MAJ = (a|c)&b)|(a&c) ; MAJ + add h, y1 ; h = k + w + h + S0 ; -- + + add d, y2 ; d = k + w + h + d + S1 + CH = d + t1 ; -- + + + add h, y2 ; h = k + w + h + S0 + S1 + CH = t1 + S0 ; -- + + add h, y3 ; h = t1 + S0 + MAJ ; -- + + RotateState + +%endm + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; void sha512_rorx(const void* M, void* D, uint64_t L); +; Purpose: Updates the SHA512 digest stored at D with the message stored in M. +; The size of the message pointed to by M must be an integer multiple of SHA512 +; message blocks. +; L is the message length in SHA512 blocks +global sha512_rorx:function +global _sha512_rorx:function +sha512_rorx: +_sha512_rorx: + + ; Allocate Stack Space + mov rax, rsp + sub rsp, frame_size + and rsp, ~(0x20 - 1) + mov [rsp + frame.RSPSAVE], rax + + ; Save GPRs + mov [rsp + frame.GPRSAVE + 8 * 0], rbp + mov [rsp + frame.GPRSAVE + 8 * 1], rbx + mov [rsp + frame.GPRSAVE + 8 * 2], r12 + mov [rsp + frame.GPRSAVE + 8 * 3], r13 + mov [rsp + frame.GPRSAVE + 8 * 4], r14 + mov [rsp + frame.GPRSAVE + 8 * 5], r15 +%ifdef WINABI + mov [rsp + frame.GPRSAVE + 8 * 6], rsi + mov [rsp + frame.GPRSAVE + 8 * 7], rdi +%endif + +%ifdef WINABI + vmovdqa [rsp + frame.XMMSAVE + 0*16], xmm6 + vmovdqa [rsp + frame.XMMSAVE + 1*16], xmm7 + vmovdqa [rsp + frame.XMMSAVE + 2*16], xmm8 + vmovdqa [rsp + frame.XMMSAVE + 3*16], xmm9 +%endif + + vpblendd xmm0, xmm0, xmm1, 0xf0 + vpblendd ymm0, ymm0, ymm1, 0xf0 + + shl NUM_BLKS, 7 ; convert to bytes + jz done_hash + add NUM_BLKS, INP ; pointer to end of data + mov [rsp + frame.INPEND], NUM_BLKS + + ;; load initial digest + mov a,[8*0 + CTX] + mov b,[8*1 + CTX] + mov c,[8*2 + CTX] + mov d,[8*3 + CTX] + mov e,[8*4 + CTX] + mov f,[8*5 + CTX] + mov g,[8*6 + CTX] + mov h,[8*7 + CTX] + + vmovdqa BYTE_FLIP_MASK, [PSHUFFLE_BYTE_FLIP_MASK wrt rip] + +loop0: + lea TBL,[K512 wrt rip] + + ;; byte swap first 16 dwords + COPY_YMM_AND_BSWAP Y_0, [INP + 0*32], BYTE_FLIP_MASK + COPY_YMM_AND_BSWAP Y_1, [INP + 1*32], BYTE_FLIP_MASK + COPY_YMM_AND_BSWAP Y_2, [INP + 2*32], BYTE_FLIP_MASK + COPY_YMM_AND_BSWAP Y_3, [INP + 3*32], BYTE_FLIP_MASK + + mov [rsp + frame.INP], INP + + ;; schedule 64 input dwords, by doing 12 rounds of 4 each + mov qword[rsp + frame.SRND],4 + +align 16 +loop1: + vpaddq XFER, Y_0, [TBL + 0*32] + vmovdqa [rsp + frame.XFER], XFER + FOUR_ROUNDS_AND_SCHED + + vpaddq XFER, Y_0, [TBL + 1*32] + vmovdqa [rsp + frame.XFER], XFER + FOUR_ROUNDS_AND_SCHED + + vpaddq XFER, Y_0, [TBL + 2*32] + vmovdqa [rsp + frame.XFER], XFER + FOUR_ROUNDS_AND_SCHED + + vpaddq XFER, Y_0, [TBL + 3*32] + vmovdqa [rsp + frame.XFER], XFER + add TBL, 4*32 + FOUR_ROUNDS_AND_SCHED + + sub qword[rsp + frame.SRND], 1 + jne loop1 + + mov qword[rsp + frame.SRND], 2 +loop2: + vpaddq XFER, Y_0, [TBL + 0*32] + vmovdqa [rsp + frame.XFER], XFER + DO_4ROUNDS + vpaddq XFER, Y_1, [TBL + 1*32] + vmovdqa [rsp + frame.XFER], XFER + add TBL, 2*32 + DO_4ROUNDS + + vmovdqa Y_0, Y_2 + vmovdqa Y_1, Y_3 + + sub qword[rsp + frame.SRND], 1 + jne loop2 + + addm [8*0 + CTX],a + addm [8*1 + CTX],b + addm [8*2 + CTX],c + addm [8*3 + CTX],d + addm [8*4 + CTX],e + addm [8*5 + CTX],f + addm [8*6 + CTX],g + addm [8*7 + CTX],h + + mov INP, [rsp + frame.INP] + add INP, 128 + cmp INP, [rsp + frame.INPEND] + jne loop0 + + done_hash: +%ifdef WINABI + vmovdqa xmm6, [rsp + frame.XMMSAVE + 0*16] + vmovdqa xmm7, [rsp + frame.XMMSAVE + 1*16] + vmovdqa xmm8, [rsp + frame.XMMSAVE + 2*16] + vmovdqa xmm9, [rsp + frame.XMMSAVE + 3*16] +%endif + +; Restore GPRs + mov rbp, [rsp + frame.GPRSAVE + 8 * 0] + mov rbx, [rsp + frame.GPRSAVE + 8 * 1] + mov r12, [rsp + frame.GPRSAVE + 8 * 2] + mov r13, [rsp + frame.GPRSAVE + 8 * 3] + mov r14, [rsp + frame.GPRSAVE + 8 * 4] + mov r15, [rsp + frame.GPRSAVE + 8 * 5] +%ifdef WINABI + mov rsi, [rsp + frame.GPRSAVE + 8 * 6] + mov rdi, [rsp + frame.GPRSAVE + 8 * 7] +%endif + ; Restore Stack Pointer + mov rsp, [rsp + frame.RSPSAVE] + + ret + + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;; Binary Data + +section .data + +align 64 +; K[t] used in SHA512 hashing +K512: + dq 0x428a2f98d728ae22,0x7137449123ef65cd + dq 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc + dq 0x3956c25bf348b538,0x59f111f1b605d019 + dq 0x923f82a4af194f9b,0xab1c5ed5da6d8118 + dq 0xd807aa98a3030242,0x12835b0145706fbe + dq 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2 + dq 0x72be5d74f27b896f,0x80deb1fe3b1696b1 + dq 0x9bdc06a725c71235,0xc19bf174cf692694 + dq 0xe49b69c19ef14ad2,0xefbe4786384f25e3 + dq 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65 + dq 0x2de92c6f592b0275,0x4a7484aa6ea6e483 + dq 0x5cb0a9dcbd41fbd4,0x76f988da831153b5 + dq 0x983e5152ee66dfab,0xa831c66d2db43210 + dq 0xb00327c898fb213f,0xbf597fc7beef0ee4 + dq 0xc6e00bf33da88fc2,0xd5a79147930aa725 + dq 0x06ca6351e003826f,0x142929670a0e6e70 + dq 0x27b70a8546d22ffc,0x2e1b21385c26c926 + dq 0x4d2c6dfc5ac42aed,0x53380d139d95b3df + dq 0x650a73548baf63de,0x766a0abb3c77b2a8 + dq 0x81c2c92e47edaee6,0x92722c851482353b + dq 0xa2bfe8a14cf10364,0xa81a664bbc423001 + dq 0xc24b8b70d0f89791,0xc76c51a30654be30 + dq 0xd192e819d6ef5218,0xd69906245565a910 + dq 0xf40e35855771202a,0x106aa07032bbd1b8 + dq 0x19a4c116b8d2d0c8,0x1e376c085141ab53 + dq 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8 + dq 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb + dq 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3 + dq 0x748f82ee5defb2fc,0x78a5636f43172f60 + dq 0x84c87814a1f0ab72,0x8cc702081a6439ec + dq 0x90befffa23631e28,0xa4506cebde82bde9 + dq 0xbef9a3f7b2c67915,0xc67178f2e372532b + dq 0xca273eceea26619c,0xd186b8c721c0c207 + dq 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178 + dq 0x06f067aa72176fba,0x0a637dc5a2c898a6 + dq 0x113f9804bef90dae,0x1b710b35131c471b + dq 0x28db77f523047d84,0x32caab7b40c72493 + dq 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c + dq 0x4cc5d4becb3e42b6,0x597f299cfc657e2a + dq 0x5fcb6fab3ad6faec,0x6c44198c4a475817 + +align 32 + +; Mask for byte-swapping a couple of qwords in an XMM register using (v)pshufb. +PSHUFFLE_BYTE_FLIP_MASK: ddq 0x08090a0b0c0d0e0f0001020304050607 + ddq 0x18191a1b1c1d1e1f1011121314151617 + +MASK_YMM_LO: ddq 0x00000000000000000000000000000000 + ddq 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF + +%ifidn __OUTPUT_FORMAT__,elf +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf32 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf64 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif diff --git a/src/Crypto/sha512_avx2_x86.asm b/src/Crypto/sha512_avx2_x86.asm new file mode 100644 index 00000000..31c8bd0d --- /dev/null +++ b/src/Crypto/sha512_avx2_x86.asm @@ -0,0 +1,10 @@ + +%ifidn __OUTPUT_FORMAT__,elf +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf32 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf64 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif diff --git a/src/Crypto/sha512_sse4_x64.asm b/src/Crypto/sha512_sse4_x64.asm new file mode 100644 index 00000000..d4a99875 --- /dev/null +++ b/src/Crypto/sha512_sse4_x64.asm @@ -0,0 +1,416 @@ +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; Copyright (c) 2012, Intel Corporation +; +; All rights reserved. +; +; Redistribution and use in source and binary forms, with or without +; modification, are permitted provided that the following conditions are +; met: +; +; * Redistributions of source code must retain the above copyright +; notice, this list of conditions and the following disclaimer. +; +; * Redistributions in binary form must reproduce the above copyright +; notice, this list of conditions and the following disclaimer in the +; documentation and/or other materials provided with the +; distribution. +; +; * Neither the name of the Intel Corporation nor the names of its +; contributors may be used to endorse or promote products derived from +; this software without specific prior written permission. +; +; +; THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS" AND ANY +; EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +; PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION OR +; CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +; EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +; PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +; LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; +; Example YASM command lines: +; Windows: yasm -f x64 -D WINABI sha512_sse4.asm +; Linux: yasm -f elf64 sha512_sse4.asm +; + +# Modified by kerukuro for use in cppcrypto. + +BITS 64 +section .text + +; Virtual Registers +%ifdef WINABI + %define msg rcx ; ARG1 + %define digest rdx ; ARG2 + %define msglen r8 ; ARG3 + %define T1 rsi + %define T2 rdi +%else + %define msg rdi ; ARG1 + %define digest rsi ; ARG2 + %define msglen rdx ; ARG3 + %define T1 rcx + %define T2 r8 +%endif +%define a_64 r9 +%define b_64 r10 +%define c_64 r11 +%define d_64 r12 +%define e_64 r13 +%define f_64 r14 +%define g_64 r15 +%define h_64 rbx +%define tmp0 rax + +; Local variables (stack frame) +; Note: frame_size must be an odd multiple of 8 bytes to XMM align RSP +struc frame + .W: resq 80 ; Message Schedule + .WK: resq 2 ; W[t] + K[t] | W[t+1] + K[t+1] + +%ifdef WINABI + .GPRSAVE: resq 7 +%else + .GPRSAVE: resq 5 +%endif +endstruc + +; Useful QWORD "arrays" for simpler memory references +%define MSG(i) msg + 8*(i) ; Input message (arg1) +%define DIGEST(i) digest + 8*(i) ; Output Digest (arg2) +%define K_t(i) K512 + 8*(i) wrt rip ; SHA Constants (static mem) +%define W_t(i) rsp + frame.W + 8*(i) ; Message Schedule (stack frame) +%define WK_2(i) rsp + frame.WK + 8*((i) % 2) ; W[t]+K[t] (stack frame) +; MSG, DIGEST, K_t, W_t are arrays +; WK_2(t) points to 1 of 2 qwords at frame.WK depdending on t being odd/even + +%macro RotateState 0 + ; Rotate symbles a..h right + %xdefine %%TMP h_64 + %xdefine h_64 g_64 + %xdefine g_64 f_64 + %xdefine f_64 e_64 + %xdefine e_64 d_64 + %xdefine d_64 c_64 + %xdefine c_64 b_64 + %xdefine b_64 a_64 + %xdefine a_64 %%TMP +%endmacro + +%macro SHA512_Round 1 +%assign %%t (%1) + + ; Compute Round %%t + mov T1, f_64 ; T1 = f + mov tmp0, e_64 ; tmp = e + xor T1, g_64 ; T1 = f ^ g + ror tmp0, 23 ; 41 ; tmp = e ror 23 + and T1, e_64 ; T1 = (f ^ g) & e + xor tmp0, e_64 ; tmp = (e ror 23) ^ e + xor T1, g_64 ; T1 = ((f ^ g) & e) ^ g = CH(e,f,g) + add T1, [WK_2(%%t)] ; W[t] + K[t] from message scheduler + ror tmp0, 4 ; 18 ; tmp = ((e ror 23) ^ e) ror 4 + xor tmp0, e_64 ; tmp = (((e ror 23) ^ e) ror 4) ^ e + mov T2, a_64 ; T2 = a + add T1, h_64 ; T1 = CH(e,f,g) + W[t] + K[t] + h + ror tmp0, 14 ; 14 ; tmp = ((((e ror23)^e)ror4)^e)ror14 = S1(e) + add T1, tmp0 ; T1 = CH(e,f,g) + W[t] + K[t] + S1(e) + mov tmp0, a_64 ; tmp = a + xor T2, c_64 ; T2 = a ^ c + and tmp0, c_64 ; tmp = a & c + and T2, b_64 ; T2 = (a ^ c) & b + xor T2, tmp0 ; T2 = ((a ^ c) & b) ^ (a & c) = Maj(a,b,c) + mov tmp0, a_64 ; tmp = a + ror tmp0, 5 ; 39 ; tmp = a ror 5 + xor tmp0, a_64 ; tmp = (a ror 5) ^ a + add d_64, T1 ; e(next_state) = d + T1 + ror tmp0, 6 ; 34 ; tmp = ((a ror 5) ^ a) ror 6 + xor tmp0, a_64 ; tmp = (((a ror 5) ^ a) ror 6) ^ a + lea h_64, [T1 + T2] ; a(next_state) = T1 + Maj(a,b,c) + ror tmp0, 28 ; 28 ; tmp = ((((a ror5)^a)ror6)^a)ror28 = S0(a) + add h_64, tmp0 ; a(next_state) = T1 + Maj(a,b,c) S0(a) + RotateState +%endmacro + +%macro SHA512_2Sched_2Round_sse 1 +%assign %%t (%1) + + ; Compute rounds %%t-2 and %%t-1 + ; Compute message schedule QWORDS %%t and %%t+1 + + ; Two rounds are computed based on the values for K[t-2]+W[t-2] and + ; K[t-1]+W[t-1] which were previously stored at WK_2 by the message + ; scheduler. + ; The two new schedule QWORDS are stored at [W_t(%%t)] and [W_t(%%t+1)]. + ; They are then added to their respective SHA512 constants at + ; [K_t(%%t)] and [K_t(%%t+1)] and stored at dqword [WK_2(%%t)] + ; For brievity, the comments following vectored instructions only refer to + ; the first of a pair of QWORDS. + ; Eg. XMM2=W[t-2] really means XMM2={W[t-2]|W[t-1]} + ; The computation of the message schedule and the rounds are tightly + ; stitched to take advantage of instruction-level parallelism. + ; For clarity, integer instructions (for the rounds calculation) are indented + ; by one tab. Vectored instructions (for the message scheduler) are indented + ; by two tabs. + + mov T1, f_64 + movdqa xmm2, [W_t(%%t-2)] ; XMM2 = W[t-2] + xor T1, g_64 + and T1, e_64 + movdqa xmm0, xmm2 ; XMM0 = W[t-2] + xor T1, g_64 + add T1, [WK_2(%%t)] + movdqu xmm5, [W_t(%%t-15)] ; XMM5 = W[t-15] + mov tmp0, e_64 + ror tmp0, 23 ; 41 + movdqa xmm3, xmm5 ; XMM3 = W[t-15] + xor tmp0, e_64 + ror tmp0, 4 ; 18 + psrlq xmm0, 61 - 19 ; XMM0 = W[t-2] >> 42 + xor tmp0, e_64 + ror tmp0, 14 ; 14 + psrlq xmm3, (8 - 7) ; XMM3 = W[t-15] >> 1 + add T1, tmp0 + add T1, h_64 + pxor xmm0, xmm2 ; XMM0 = (W[t-2] >> 42) ^ W[t-2] + mov T2, a_64 + xor T2, c_64 + pxor xmm3, xmm5 ; XMM3 = (W[t-15] >> 1) ^ W[t-15] + and T2, b_64 + mov tmp0, a_64 + psrlq xmm0, 19 - 6 ; XMM0 = ((W[t-2]>>42)^W[t-2])>>13 + and tmp0, c_64 + xor T2, tmp0 + psrlq xmm3, (7 - 1) ; XMM3 = ((W[t-15]>>1)^W[t-15])>>6 + mov tmp0, a_64 + ror tmp0, 5 ; 39 + pxor xmm0, xmm2 ; XMM0 = (((W[t-2]>>42)^W[t-2])>>13)^W[t-2] + xor tmp0, a_64 + ror tmp0, 6 ; 34 + pxor xmm3, xmm5 ; XMM3 = (((W[t-15]>>1)^W[t-15])>>6)^W[t-15] + xor tmp0, a_64 + ror tmp0, 28 ; 28 + psrlq xmm0, 6 ; XMM0 = ((((W[t-2]>>42)^W[t-2])>>13)^W[t-2])>>6 + add T2, tmp0 + add d_64, T1 + psrlq xmm3, 1 ; XMM3 = (((W[t-15]>>1)^W[t-15])>>6)^W[t-15]>>1 + lea h_64, [T1 + T2] + RotateState + movdqa xmm1, xmm2 ; XMM1 = W[t-2] + mov T1, f_64 + xor T1, g_64 + movdqa xmm4, xmm5 ; XMM4 = W[t-15] + and T1, e_64 + xor T1, g_64 + psllq xmm1, (64 - 19) - (64 - 61) ; XMM1 = W[t-2] << 42 + add T1, [WK_2(%%t+1)] + mov tmp0, e_64 + psllq xmm4, (64 - 1) - (64 - 8) ; XMM4 = W[t-15] << 7 + ror tmp0, 23 ; 41 + xor tmp0, e_64 + pxor xmm1, xmm2 ; XMM1 = (W[t-2] << 42)^W[t-2] + ror tmp0, 4 ; 18 + xor tmp0, e_64 + pxor xmm4, xmm5 ; XMM4 = (W[t-15]<<7)^W[t-15] + ror tmp0, 14 ; 14 + add T1, tmp0 + psllq xmm1, (64 - 61) ; XMM1 = ((W[t-2] << 42)^W[t-2])<<3 + add T1, h_64 + mov T2, a_64 + psllq xmm4, (64 - 8) ; XMM4 = ((W[t-15]<<7)^W[t-15])<<56 + xor T2, c_64 + and T2, b_64 + pxor xmm0, xmm1 ; XMM0 = s1(W[t-2]) + mov tmp0, a_64 + and tmp0, c_64 + movdqu xmm1, [W_t(%%t- 7)] ; XMM1 = W[t-7] + xor T2, tmp0 + pxor xmm3, xmm4 ; XMM3 = s0(W[t-15]) + mov tmp0, a_64 + paddq xmm0, xmm3 ; XMM0 = s1(W[t-2]) + s0(W[t-15]) + ror tmp0, 5 ; 39 + paddq xmm0, [W_t(%%t-16)] ; XMM0 = s1(W[t-2]) + s0(W[t-15]) + W[t-16] + xor tmp0, a_64 + paddq xmm0, xmm1 ; XMM0 = s1(W[t-2]) + W[t-7] + s0(W[t-15]) + W[t-16] + ror tmp0, 6 ; 34 + movdqa [W_t(%%t)], xmm0 ; Store scheduled qwords + xor tmp0, a_64 + paddq xmm0, [K_t(t)] ; Compute W[t]+K[t] + ror tmp0, 28 ; 28 + movdqa [WK_2(t)], xmm0 ; Store W[t]+K[t] for next rounds + add T2, tmp0 + add d_64, T1 + lea h_64, [T1 + T2] + RotateState +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; void sha512_sse4(const void* M, void* D, uint64_t L); +; Purpose: Updates the SHA512 digest stored at D with the message stored in M. +; The size of the message pointed to by M must be an integer multiple of SHA512 +; message blocks. +; L is the message length in SHA512 blocks. +global sha512_sse4:function +global _sha512_sse4:function +sha512_sse4: +_sha512_sse4: + cmp msglen, 0 + je .nowork + + ; Allocate Stack Space + sub rsp, frame_size + + ; Save GPRs + mov [rsp + frame.GPRSAVE + 8 * 0], rbx + mov [rsp + frame.GPRSAVE + 8 * 1], r12 + mov [rsp + frame.GPRSAVE + 8 * 2], r13 + mov [rsp + frame.GPRSAVE + 8 * 3], r14 + mov [rsp + frame.GPRSAVE + 8 * 4], r15 +%ifdef WINABI + mov [rsp + frame.GPRSAVE + 8 * 5], rsi + mov [rsp + frame.GPRSAVE + 8 * 6], rdi +%endif + +.updateblock: + + ; Load state variables + mov a_64, [DIGEST(0)] + mov b_64, [DIGEST(1)] + mov c_64, [DIGEST(2)] + mov d_64, [DIGEST(3)] + mov e_64, [DIGEST(4)] + mov f_64, [DIGEST(5)] + mov g_64, [DIGEST(6)] + mov h_64, [DIGEST(7)] + + %assign t 0 + %rep 80/2 + 1 + ; (80 rounds) / (2 rounds/iteration) + (1 iteration) + ; +1 iteration because the scheduler leads hashing by 1 iteration + %if t < 2 + ; BSWAP 2 QWORDS + movdqa xmm1, [XMM_QWORD_BSWAP wrt rip] + movdqu xmm0, [MSG(t)] + pshufb xmm0, xmm1 ; BSWAP + movdqa [W_t(t)], xmm0 ; Store Scheduled Pair + paddq xmm0, [K_t(t)] ; Compute W[t]+K[t] + movdqa [WK_2(t)], xmm0 ; Store into WK for rounds + %elif t < 16 + ; BSWAP 2 QWORDS; Compute 2 Rounds + movdqu xmm0, [MSG(t)] + pshufb xmm0, xmm1 ; BSWAP + SHA512_Round t - 2 ; Round t-2 + movdqa [W_t(t)], xmm0 ; Store Scheduled Pair + paddq xmm0, [K_t(t)] ; Compute W[t]+K[t] + SHA512_Round t - 1 ; Round t-1 + movdqa [WK_2(t)], xmm0 ; Store W[t]+K[t] into WK + %elif t < 79 + ; Schedule 2 QWORDS; Compute 2 Rounds + SHA512_2Sched_2Round_sse t + %else + ; Compute 2 Rounds + SHA512_Round t - 2 + SHA512_Round t - 1 + %endif + %assign t t+2 + %endrep + + ; Update digest + add [DIGEST(0)], a_64 + add [DIGEST(1)], b_64 + add [DIGEST(2)], c_64 + add [DIGEST(3)], d_64 + add [DIGEST(4)], e_64 + add [DIGEST(5)], f_64 + add [DIGEST(6)], g_64 + add [DIGEST(7)], h_64 + + ; Advance to next message block + add msg, 16*8 + dec msglen + jnz .updateblock + + ; Restore GPRs + mov rbx, [rsp + frame.GPRSAVE + 8 * 0] + mov r12, [rsp + frame.GPRSAVE + 8 * 1] + mov r13, [rsp + frame.GPRSAVE + 8 * 2] + mov r14, [rsp + frame.GPRSAVE + 8 * 3] + mov r15, [rsp + frame.GPRSAVE + 8 * 4] +%ifdef WINABI + mov rsi, [rsp + frame.GPRSAVE + 8 * 5] + mov rdi, [rsp + frame.GPRSAVE + 8 * 6] +%endif + ; Restore Stack Pointer + add rsp, frame_size + +.nowork: + ret + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;; Binary Data + +section .data + +ALIGN 16 + +; Mask for byte-swapping a couple of qwords in an XMM register using (v)pshufb. +XMM_QWORD_BSWAP: + ddq 0x08090a0b0c0d0e0f0001020304050607 + +; K[t] used in SHA512 hashing +K512: + dq 0x428a2f98d728ae22,0x7137449123ef65cd + dq 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc + dq 0x3956c25bf348b538,0x59f111f1b605d019 + dq 0x923f82a4af194f9b,0xab1c5ed5da6d8118 + dq 0xd807aa98a3030242,0x12835b0145706fbe + dq 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2 + dq 0x72be5d74f27b896f,0x80deb1fe3b1696b1 + dq 0x9bdc06a725c71235,0xc19bf174cf692694 + dq 0xe49b69c19ef14ad2,0xefbe4786384f25e3 + dq 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65 + dq 0x2de92c6f592b0275,0x4a7484aa6ea6e483 + dq 0x5cb0a9dcbd41fbd4,0x76f988da831153b5 + dq 0x983e5152ee66dfab,0xa831c66d2db43210 + dq 0xb00327c898fb213f,0xbf597fc7beef0ee4 + dq 0xc6e00bf33da88fc2,0xd5a79147930aa725 + dq 0x06ca6351e003826f,0x142929670a0e6e70 + dq 0x27b70a8546d22ffc,0x2e1b21385c26c926 + dq 0x4d2c6dfc5ac42aed,0x53380d139d95b3df + dq 0x650a73548baf63de,0x766a0abb3c77b2a8 + dq 0x81c2c92e47edaee6,0x92722c851482353b + dq 0xa2bfe8a14cf10364,0xa81a664bbc423001 + dq 0xc24b8b70d0f89791,0xc76c51a30654be30 + dq 0xd192e819d6ef5218,0xd69906245565a910 + dq 0xf40e35855771202a,0x106aa07032bbd1b8 + dq 0x19a4c116b8d2d0c8,0x1e376c085141ab53 + dq 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8 + dq 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb + dq 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3 + dq 0x748f82ee5defb2fc,0x78a5636f43172f60 + dq 0x84c87814a1f0ab72,0x8cc702081a6439ec + dq 0x90befffa23631e28,0xa4506cebde82bde9 + dq 0xbef9a3f7b2c67915,0xc67178f2e372532b + dq 0xca273eceea26619c,0xd186b8c721c0c207 + dq 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178 + dq 0x06f067aa72176fba,0x0a637dc5a2c898a6 + dq 0x113f9804bef90dae,0x1b710b35131c471b + dq 0x28db77f523047d84,0x32caab7b40c72493 + dq 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c + dq 0x4cc5d4becb3e42b6,0x597f299cfc657e2a + dq 0x5fcb6fab3ad6faec,0x6c44198c4a475817 + +%ifidn __OUTPUT_FORMAT__,elf +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf32 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf64 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif diff --git a/src/Crypto/sha512_sse4_x86.asm b/src/Crypto/sha512_sse4_x86.asm new file mode 100644 index 00000000..31c8bd0d --- /dev/null +++ b/src/Crypto/sha512_sse4_x86.asm @@ -0,0 +1,10 @@ + +%ifidn __OUTPUT_FORMAT__,elf +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf32 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif +%ifidn __OUTPUT_FORMAT__,elf64 +section .note.GNU-stack noalloc noexec nowrite progbits +%endif diff --git a/src/Crypto/t1ha.h b/src/Crypto/t1ha.h new file mode 100644 index 00000000..c32d07b5 --- /dev/null +++ b/src/Crypto/t1ha.h @@ -0,0 +1,261 @@ +/* + * Copyright (c) 2016-2018 Positive Technologies, https://www.ptsecurity.com, + * Fast Positive Hash. + * + * Portions Copyright (c) 2010-2018 Leonid Yuriev <leo@yuriev.ru>, + * The 1Hippeus project (t1h). + * + * This software is provided 'as-is', without any express or implied + * warranty. In no event will the authors be held liable for any damages + * arising from the use of this software. + * + * Permission is granted to anyone to use this software for any purpose, + * including commercial applications, and to alter it and redistribute it + * freely, subject to the following restrictions: + * + * 1. The origin of this software must not be misrepresented; you must not + * claim that you wrote the original software. If you use this software + * in a product, an acknowledgement in the product documentation would be + * appreciated but is not required. + * 2. Altered source versions must be plainly marked as such, and must not be + * misrepresented as being the original software. + * 3. This notice may not be removed or altered from any source distribution. + */ + +/* + * t1ha = { Fast Positive Hash, aka "Позитивный Хэш" } + * by [Positive Technologies](https://www.ptsecurity.ru) + * + * Briefly, it is a 64-bit Hash Function: + * 1. Created for 64-bit little-endian platforms, in predominantly for x86_64, + * but portable and without penalties it can run on any 64-bit CPU. + * 2. In most cases up to 15% faster than City64, xxHash, mum-hash, metro-hash + * and all others portable hash-functions (which do not use specific + * hardware tricks). + * 3. Not suitable for cryptography. + * + * The Future will Positive. Всё будет хорошо. + * + * ACKNOWLEDGEMENT: + * The t1ha was originally developed by Leonid Yuriev (Леонид Юрьев) + * for The 1Hippeus project - zerocopy messaging in the spirit of Sparta! + */ + +#pragma once + +/***************************************************************************** + * + * PLEASE PAY ATTENTION TO THE FOLLOWING NOTES + * about macros definitions which controls t1ha behaviour and/or performance. + * + * + * 1) T1HA_SYS_UNALIGNED_ACCESS = Defines the system/platform/CPU/architecture + * abilities for unaligned data access. + * + * By default, when the T1HA_SYS_UNALIGNED_ACCESS not defined, + * it will defined on the basis hardcoded knowledge about of capabilities + * of most common CPU architectures. But you could override this + * default behavior when build t1ha library itself: + * + * // To disable unaligned access at all. + * #define T1HA_SYS_UNALIGNED_ACCESS 0 + * + * // To enable unaligned access, but indicate that it significally slow. + * #define T1HA_SYS_UNALIGNED_ACCESS 1 + * + * // To enable unaligned access, and indicate that it effecient. + * #define T1HA_SYS_UNALIGNED_ACCESS 2 + * + * + * 2) T1HA_USE_FAST_ONESHOT_READ = Controls the data reads at the end of buffer. + * + * When defined to non-zero, t1ha will use 'one shot' method for reading + * up to 8 bytes at the end of data. In this case just the one 64-bit read + * will be performed even when the available less than 8 bytes. + * + * This is little bit faster that switching by length of data tail. + * Unfortunately this will triggering a false-positive alarms from Valgrind, + * AddressSanitizer and other similar tool. + * + * By default, t1ha defines it to 1, but you could override this + * default behavior when build t1ha library itself: + * + * // For little bit faster and small code. + * #define T1HA_USE_FAST_ONESHOT_READ 1 + * + * // For calmness if doubt. + * #define T1HA_USE_FAST_ONESHOT_READ 0 + * + * + * 3) T1HA0_RUNTIME_SELECT = Controls choice fastest function in runtime. + * + * t1ha library offers the t1ha0() function as the fastest for current CPU. + * But actual CPU's features/capabilities and may be significantly different, + * especially on x86 platform. Therefore, internally, t1ha0() may require + * dynamic dispatching for choice best implementation. + * + * By default, t1ha enables such runtime choice and (may be) corresponding + * indirect calls if it reasonable, but you could override this default + * behavior when build t1ha library itself: + * + * // To enable runtime choice of fastest implementation. + * #define T1HA0_RUNTIME_SELECT 1 + * + * // To disable runtime choice of fastest implementation. + * #define T1HA0_RUNTIME_SELECT 0 + * + * When T1HA0_RUNTIME_SELECT is nonzero the t1ha0_resolve() function could + * be used to get actual t1ha0() implementation address at runtime. This is + * useful for two cases: + * - calling by local pointer-to-function usually is little + * bit faster (less overhead) than via a PLT thru the DSO boundary. + * - GNU Indirect functions (see below) don't supported by environment + * and calling by t1ha0_funcptr is not available and/or expensive. + * + * 4) T1HA_USE_INDIRECT_FUNCTIONS = Controls usage of GNU Indirect functions. + * + * In continue of T1HA0_RUNTIME_SELECT the T1HA_USE_INDIRECT_FUNCTIONS + * controls usage of ELF indirect functions feature. In general, when + * available, this reduces overhead of indirect function's calls though + * a DSO-bundary (https://sourceware.org/glibc/wiki/GNU_IFUNC). + * + * By default, t1ha engage GNU Indirect functions when it available + * and useful, but you could override this default behavior when build + * t1ha library itself: + * + * // To enable use of GNU ELF Indirect functions. + * #define T1HA_USE_INDIRECT_FUNCTIONS 1 + * + * // To disable use of GNU ELF Indirect functions. This may be useful + * // if the actual toolchain or the system's loader don't support ones. + * #define T1HA_USE_INDIRECT_FUNCTIONS 0 + * + * 5) T1HA0_AESNI_AVAILABLE = Controls AES-NI detection and dispatching on x86. + * + * In continue of T1HA0_RUNTIME_SELECT the T1HA0_AESNI_AVAILABLE controls + * detection and usage of AES-NI CPU's feature. On the other hand, this + * requires compiling parts of t1ha library with certain properly options, + * and could be difficult or inconvenient in some cases. + * + * By default, t1ha engade AES-NI for t1ha0() on the x86 platform, but + * you could override this default behavior when build t1ha library itself: + * + * // To disable detection and usage of AES-NI instructions for t1ha0(). + * // This may be useful when you unable to build t1ha library properly + * // or known that AES-NI will be unavailable at the deploy. + * #define T1HA0_AESNI_AVAILABLE 0 + * + * // To force detection and usage of AES-NI instructions for t1ha0(), + * // but I don't known reasons to anybody would need this. + * #define T1HA0_AESNI_AVAILABLE 1 + * + * 6) T1HA0_DISABLED, T1HA1_DISABLED, T1HA2_DISABLED = Controls availability of + * t1ha functions. + * + * In some cases could be useful to import/use only few of t1ha functions + * or just the one. So, this definitions allows disable corresponding parts + * of t1ha library. + * + * // To disable t1ha0(), t1ha0_32le(), t1ha0_32be() and all AES-NI. + * #define T1HA0_DISABLED + * + * // To disable t1ha1_le() and t1ha1_be(). + * #define T1HA1_DISABLED + * + * // To disable t1ha2_atonce(), t1ha2_atonce128() and so on. + * #define T1HA2_DISABLED + * + *****************************************************************************/ + +#define T1HA_VERSION_MAJOR 2 +#define T1HA_VERSION_MINOR 1 +#define T1HA_VERSION_RELEASE 0 + +#include "Common/Tcdefs.h" +#include "config.h" +#include "misc.h" + +#ifdef __cplusplus +extern "C" { +#endif + +#define T1HA_ALIGN_PREFIX CRYPTOPP_ALIGN_DATA(32) +#define T1HA_ALIGN_SUFFIX + +#ifdef _MSC_VER +#define uint8_t uint8 +#define uint16_t uint16 +#define uint32_t uint32 +#define uint64_t uint64 +#endif + +typedef union T1HA_ALIGN_PREFIX t1ha_state256 { + uint8_t bytes[32]; + uint32_t u32[8]; + uint64_t u64[4]; + struct { + uint64_t a, b, c, d; + } n; +} t1ha_state256_t T1HA_ALIGN_SUFFIX; + +typedef struct t1ha_context { + t1ha_state256_t state; + t1ha_state256_t buffer; + size_t partial; + uint64_t total; +} t1ha_context_t; + +/****************************************************************************** + * + * t1ha2 = 64 and 128-bit, SLIGHTLY MORE ATTENTION FOR QUALITY AND STRENGTH. + * + * - The recommended version of "Fast Positive Hash" with good quality + * for checksum, hash tables and fingerprinting. + * - Portable and extremely efficiency on modern 64-bit CPUs. + * Designed for 64-bit little-endian platforms, + * in other cases will runs slowly. + * - Great quality of hashing and still faster than other non-t1ha hashes. + * Provides streaming mode and 128-bit result. + * + * Note: Due performance reason 64- and 128-bit results are completely + * different each other, i.e. 64-bit result is NOT any part of 128-bit. + */ + +/* The at-once variant with 64-bit result */ +uint64_t t1ha2_atonce(const void *data, size_t length, uint64_t seed); + +/* The at-once variant with 128-bit result. + * Argument `extra_result` is NOT optional and MUST be valid. + * The high 64-bit part of 128-bit hash will be always unconditionally + * stored to the address given by `extra_result` argument. */ +uint64_t t1ha2_atonce128(uint64_t *__restrict extra_result, + const void *__restrict data, size_t length, + uint64_t seed); + +/* The init/update/final trinity for streaming. + * Return 64 or 128-bit result depentently from `extra_result` argument. */ +void t1ha2_init(t1ha_context_t *ctx, uint64_t seed_x, uint64_t seed_y); +void t1ha2_update(t1ha_context_t *__restrict ctx, + const void *__restrict data, size_t length); + +/* Argument `extra_result` is optional and MAY be NULL. + * - If `extra_result` is NOT NULL then the 128-bit hash will be calculated, + * and high 64-bit part of it will be stored to the address given + * by `extra_result` argument. + * - Otherwise the 64-bit hash will be calculated + * and returned from function directly. + * + * Note: Due performance reason 64- and 128-bit results are completely + * different each other, i.e. 64-bit result is NOT any part of 128-bit. */ +uint64_t t1ha2_final(t1ha_context_t *__restrict ctx, + uint64_t *__restrict extra_result /* optional */); + + +int t1ha_selfcheck__t1ha2_atonce(void); +int t1ha_selfcheck__t1ha2_atonce128(void); +int t1ha_selfcheck__t1ha2_stream(void); +int t1ha_selfcheck__t1ha2(void); + +#ifdef __cplusplus +} +#endif diff --git a/src/Crypto/t1ha2.c b/src/Crypto/t1ha2.c new file mode 100644 index 00000000..1a67f9c4 --- /dev/null +++ b/src/Crypto/t1ha2.c @@ -0,0 +1,323 @@ +/* + * Copyright (c) 2016-2018 Positive Technologies, https://www.ptsecurity.com, + * Fast Positive Hash. + * + * Portions Copyright (c) 2010-2018 Leonid Yuriev <leo@yuriev.ru>, + * The 1Hippeus project (t1h). + * + * This software is provided 'as-is', without any express or implied + * warranty. In no event will the authors be held liable for any damages + * arising from the use of this software. + * + * Permission is granted to anyone to use this software for any purpose, + * including commercial applications, and to alter it and redistribute it + * freely, subject to the following restrictions: + * + * 1. The origin of this software must not be misrepresented; you must not + * claim that you wrote the original software. If you use this software + * in a product, an acknowledgement in the product documentation would be + * appreciated but is not required. + * 2. Altered source versions must be plainly marked as such, and must not be + * misrepresented as being the original software. + * 3. This notice may not be removed or altered from any source distribution. + */ + +/* + * t1ha = { Fast Positive Hash, aka "Позитивный Хэш" } + * by [Positive Technologies](https://www.ptsecurity.ru) + * + * Briefly, it is a 64-bit Hash Function: + * 1. Created for 64-bit little-endian platforms, in predominantly for x86_64, + * but portable and without penalties it can run on any 64-bit CPU. + * 2. In most cases up to 15% faster than City64, xxHash, mum-hash, metro-hash + * and all others portable hash-functions (which do not use specific + * hardware tricks). + * 3. Not suitable for cryptography. + * + * The Future will Positive. Всё будет хорошо. + * + * ACKNOWLEDGEMENT: + * The t1ha was originally developed by Leonid Yuriev (Леонид Юрьев) + * for The 1Hippeus project - zerocopy messaging in the spirit of Sparta! + */ + +#include "t1ha_bits.h" +#include "t1ha_selfcheck.h" + +static __always_inline void init_ab(t1ha_state256_t *s, uint64_t x, + uint64_t y) { + s->n.a = x; + s->n.b = y; +} + +static __always_inline void init_cd(t1ha_state256_t *s, uint64_t x, + uint64_t y) { + s->n.c = rot64(y, 23) + ~x; + s->n.d = ~y + rot64(x, 19); +} + +/* TODO: C++ template in the next version */ +#define T1HA2_UPDATE(ENDIANNES, ALIGNESS, state, v) \ + do { \ + t1ha_state256_t *const s = state; \ + const uint64_t w0 = fetch64_##ENDIANNES##_##ALIGNESS(v + 0); \ + const uint64_t w1 = fetch64_##ENDIANNES##_##ALIGNESS(v + 1); \ + const uint64_t w2 = fetch64_##ENDIANNES##_##ALIGNESS(v + 2); \ + const uint64_t w3 = fetch64_##ENDIANNES##_##ALIGNESS(v + 3); \ + \ + const uint64_t d02 = w0 + rot64(w2 + s->n.d, 56); \ + const uint64_t c13 = w1 + rot64(w3 + s->n.c, 19); \ + s->n.d ^= s->n.b + rot64(w1, 38); \ + s->n.c ^= s->n.a + rot64(w0, 57); \ + s->n.b ^= prime_6 * (c13 + w2); \ + s->n.a ^= prime_5 * (d02 + w3); \ + } while (0) + +static __always_inline void squash(t1ha_state256_t *s) { + s->n.a ^= prime_6 * (s->n.c + rot64(s->n.d, 23)); + s->n.b ^= prime_5 * (rot64(s->n.c, 19) + s->n.d); +} + +/* TODO: C++ template in the next version */ +#define T1HA2_LOOP(ENDIANNES, ALIGNESS, state, data, len) \ + do { \ + const void *detent = (const uint8_t *)data + len - 31; \ + do { \ + const uint64_t *v = (const uint64_t *)data; \ + data = (const uint64_t *)data + 4; \ + prefetch(data); \ + T1HA2_UPDATE(le, ALIGNESS, state, v); \ + } while (likely(data < detent)); \ + } while (0) + +/* TODO: C++ template in the next version */ +#define T1HA2_TAIL_AB(ENDIANNES, ALIGNESS, state, data, len) \ + do { \ + t1ha_state256_t *const s = state; \ + const uint64_t *v = (const uint64_t *)data; \ + switch (len) { \ + default: \ + mixup64(&s->n.a, &s->n.b, fetch64_##ENDIANNES##_##ALIGNESS(v++), \ + prime_4); \ + /* fall through */ \ + case 24: \ + case 23: \ + case 22: \ + case 21: \ + case 20: \ + case 19: \ + case 18: \ + case 17: \ + mixup64(&s->n.b, &s->n.a, fetch64_##ENDIANNES##_##ALIGNESS(v++), \ + prime_3); \ + /* fall through */ \ + case 16: \ + case 15: \ + case 14: \ + case 13: \ + case 12: \ + case 11: \ + case 10: \ + case 9: \ + mixup64(&s->n.a, &s->n.b, fetch64_##ENDIANNES##_##ALIGNESS(v++), \ + prime_2); \ + /* fall through */ \ + case 8: \ + case 7: \ + case 6: \ + case 5: \ + case 4: \ + case 3: \ + case 2: \ + case 1: \ + mixup64(&s->n.b, &s->n.a, tail64_##ENDIANNES##_##ALIGNESS(v, len), \ + prime_1); \ + /* fall through */ \ + case 0: \ + return final64(s->n.a, s->n.b); \ + } \ + } while (0) + +/* TODO: C++ template in the next version */ +#define T1HA2_TAIL_ABCD(ENDIANNES, ALIGNESS, state, data, len) \ + do { \ + t1ha_state256_t *const s = state; \ + const uint64_t *v = (const uint64_t *)data; \ + switch (len) { \ + default: \ + mixup64(&s->n.a, &s->n.d, fetch64_##ENDIANNES##_##ALIGNESS(v++), \ + prime_4); \ + /* fall through */ \ + case 24: \ + case 23: \ + case 22: \ + case 21: \ + case 20: \ + case 19: \ + case 18: \ + case 17: \ + mixup64(&s->n.b, &s->n.a, fetch64_##ENDIANNES##_##ALIGNESS(v++), \ + prime_3); \ + /* fall through */ \ + case 16: \ + case 15: \ + case 14: \ + case 13: \ + case 12: \ + case 11: \ + case 10: \ + case 9: \ + mixup64(&s->n.c, &s->n.b, fetch64_##ENDIANNES##_##ALIGNESS(v++), \ + prime_2); \ + /* fall through */ \ + case 8: \ + case 7: \ + case 6: \ + case 5: \ + case 4: \ + case 3: \ + case 2: \ + case 1: \ + mixup64(&s->n.d, &s->n.c, tail64_##ENDIANNES##_##ALIGNESS(v, len), \ + prime_1); \ + /* fall through */ \ + case 0: \ + return final128(s->n.a, s->n.b, s->n.c, s->n.d, extra_result); \ + } \ + } while (0) + +static __always_inline uint64_t final128(uint64_t a, uint64_t b, uint64_t c, + uint64_t d, uint64_t *h) { + mixup64(&a, &b, rot64(c, 41) ^ d, prime_0); + mixup64(&b, &c, rot64(d, 23) ^ a, prime_6); + mixup64(&c, &d, rot64(a, 19) ^ b, prime_5); + mixup64(&d, &a, rot64(b, 31) ^ c, prime_4); + *h = c + d; + return a ^ b; +} + +//------------------------------------------------------------------------------ + +uint64_t t1ha2_atonce(const void *data, size_t length, uint64_t seed) { + t1ha_state256_t state; + init_ab(&state, seed, length); + +#if T1HA_SYS_UNALIGNED_ACCESS == T1HA_UNALIGNED_ACCESS__EFFICIENT + if (unlikely(length > 32)) { + init_cd(&state, seed, length); + T1HA2_LOOP(le, unaligned, &state, data, length); + squash(&state); + length &= 31; + } + T1HA2_TAIL_AB(le, unaligned, &state, data, length); +#else + if ((((uintptr_t)data) & (ALIGNMENT_64 - 1)) != 0) { + if (unlikely(length > 32)) { + init_cd(&state, seed, length); + T1HA2_LOOP(le, unaligned, &state, data, length); + squash(&state); + length &= 31; + } + T1HA2_TAIL_AB(le, unaligned, &state, data, length); + } else { + if (unlikely(length > 32)) { + init_cd(&state, seed, length); + T1HA2_LOOP(le, aligned, &state, data, length); + squash(&state); + length &= 31; + } + T1HA2_TAIL_AB(le, aligned, &state, data, length); + } +#endif +} + +uint64_t t1ha2_atonce128(uint64_t *__restrict extra_result, + const void *__restrict data, size_t length, + uint64_t seed) { + t1ha_state256_t state; + init_ab(&state, seed, length); + init_cd(&state, seed, length); + +#if T1HA_SYS_UNALIGNED_ACCESS == T1HA_UNALIGNED_ACCESS__EFFICIENT + if (unlikely(length > 32)) { + T1HA2_LOOP(le, unaligned, &state, data, length); + length &= 31; + } + T1HA2_TAIL_ABCD(le, unaligned, &state, data, length); +#else + if ((((uintptr_t)data) & (ALIGNMENT_64 - 1)) != 0) { + if (unlikely(length > 32)) { + T1HA2_LOOP(le, unaligned, &state, data, length); + length &= 31; + } + T1HA2_TAIL_ABCD(le, unaligned, &state, data, length); + } else { + if (unlikely(length > 32)) { + T1HA2_LOOP(le, aligned, &state, data, length); + length &= 31; + } + T1HA2_TAIL_ABCD(le, aligned, &state, data, length); + } +#endif +} + +//------------------------------------------------------------------------------ + +void t1ha2_init(t1ha_context_t *ctx, uint64_t seed_x, uint64_t seed_y) { + init_ab(&ctx->state, seed_x, seed_y); + init_cd(&ctx->state, seed_x, seed_y); + ctx->partial = 0; + ctx->total = 0; +} + +void t1ha2_update(t1ha_context_t *__restrict ctx, const void *__restrict data, + size_t length) { + ctx->total += length; + + if (ctx->partial) { + const size_t left = 32 - ctx->partial; + const size_t chunk = (length >= left) ? left : length; + memcpy(ctx->buffer.bytes + ctx->partial, data, chunk); + ctx->partial += chunk; + if (ctx->partial < 32) { + assert(left >= length); + return; + } + ctx->partial = 0; + data = (const uint8_t *)data + chunk; + length -= chunk; + T1HA2_UPDATE(le, aligned, &ctx->state, ctx->buffer.u64); + } + + if (length >= 32) { +#if T1HA_SYS_UNALIGNED_ACCESS == T1HA_UNALIGNED_ACCESS__EFFICIENT + T1HA2_LOOP(le, unaligned, &ctx->state, data, length); +#else + if ((((uintptr_t)data) & (ALIGNMENT_64 - 1)) != 0) { + T1HA2_LOOP(le, unaligned, &ctx->state, data, length); + } else { + T1HA2_LOOP(le, aligned, &ctx->state, data, length); + } +#endif + length &= 31; + } + + if (length) + memcpy(ctx->buffer.bytes, data, ctx->partial = length); +} + +uint64_t t1ha2_final(t1ha_context_t *__restrict ctx, + uint64_t *__restrict extra_result) { + uint64_t bits = (ctx->total << 3) ^ (UINT64_C(1) << 63); +#if __BYTE_ORDER__ != __ORDER_LITTLE_ENDIAN__ + bits = bswap64(bits); +#endif + t1ha2_update(ctx, &bits, 8); + + if (likely(!extra_result)) { + squash(&ctx->state); + T1HA2_TAIL_AB(le, aligned, &ctx->state, ctx->buffer.u64, ctx->partial); + } + + T1HA2_TAIL_ABCD(le, aligned, &ctx->state, ctx->buffer.u64, ctx->partial); +} diff --git a/src/Crypto/t1ha2_selfcheck.c b/src/Crypto/t1ha2_selfcheck.c new file mode 100644 index 00000000..35e21916 --- /dev/null +++ b/src/Crypto/t1ha2_selfcheck.c @@ -0,0 +1,186 @@ +/* + * Copyright (c) 2016-2018 Positive Technologies, https://www.ptsecurity.com, + * Fast Positive Hash. + * + * Portions Copyright (c) 2010-2018 Leonid Yuriev <leo@yuriev.ru>, + * The 1Hippeus project (t1h). + * + * This software is provided 'as-is', without any express or implied + * warranty. In no event will the authors be held liable for any damages + * arising from the use of this software. + * + * Permission is granted to anyone to use this software for any purpose, + * including commercial applications, and to alter it and redistribute it + * freely, subject to the following restrictions: + * + * 1. The origin of this software must not be misrepresented; you must not + * claim that you wrote the original software. If you use this software + * in a product, an acknowledgement in the product documentation would be + * appreciated but is not required. + * 2. Altered source versions must be plainly marked as such, and must not be + * misrepresented as being the original software. + * 3. This notice may not be removed or altered from any source distribution. + */ + +/* + * t1ha = { Fast Positive Hash, aka "Позитивный Хэш" } + * by [Positive Technologies](https://www.ptsecurity.ru) + * + * Briefly, it is a 64-bit Hash Function: + * 1. Created for 64-bit little-endian platforms, in predominantly for x86_64, + * but portable and without penalties it can run on any 64-bit CPU. + * 2. In most cases up to 15% faster than City64, xxHash, mum-hash, metro-hash + * and all others portable hash-functions (which do not use specific + * hardware tricks). + * 3. Not suitable for cryptography. + * + * The Future will Positive. Всё будет хорошо. + * + * ACKNOWLEDGEMENT: + * The t1ha was originally developed by Leonid Yuriev (Леонид Юрьев) + * for The 1Hippeus project - zerocopy messaging in the spirit of Sparta! + */ + + +#include "t1ha_bits.h" +#include "t1ha_selfcheck.h" + +/* *INDENT-OFF* */ +/* clang-format off */ + +const uint64_t t1ha_refval_2atonce[81] = { 0, + 0x772C7311BE32FF42, 0x444753D23F207E03, 0x71F6DF5DA3B4F532, 0x555859635365F660, + 0xE98808F1CD39C626, 0x2EB18FAF2163BB09, 0x7B9DD892C8019C87, 0xE2B1431C4DA4D15A, + 0x1984E718A5477F70, 0x08DD17B266484F79, 0x4C83A05D766AD550, 0x92DCEBB131D1907D, + 0xD67BC6FC881B8549, 0xF6A9886555FBF66B, 0x6E31616D7F33E25E, 0x36E31B7426E3049D, + 0x4F8E4FAF46A13F5F, 0x03EB0CB3253F819F, 0x636A7769905770D2, 0x3ADF3781D16D1148, + 0x92D19CB1818BC9C2, 0x283E68F4D459C533, 0xFA83A8A88DECAA04, 0x8C6F00368EAC538C, + 0x7B66B0CF3797B322, 0x5131E122FDABA3FF, 0x6E59FF515C08C7A9, 0xBA2C5269B2C377B0, + 0xA9D24FD368FE8A2B, 0x22DB13D32E33E891, 0x7B97DFC804B876E5, 0xC598BDFCD0E834F9, + 0xB256163D3687F5A7, 0x66D7A73C6AEF50B3, 0x25A7201C85D9E2A3, 0x911573EDA15299AA, + 0x5C0062B669E18E4C, 0x17734ADE08D54E28, 0xFFF036E33883F43B, 0xFE0756E7777DF11E, + 0x37972472D023F129, 0x6CFCE201B55C7F57, 0xE019D1D89F02B3E1, 0xAE5CC580FA1BB7E6, + 0x295695FB7E59FC3A, 0x76B6C820A40DD35E, 0xB1680A1768462B17, 0x2FB6AF279137DADA, + 0x28FB6B4366C78535, 0xEC278E53924541B1, 0x164F8AAB8A2A28B5, 0xB6C330AEAC4578AD, + 0x7F6F371070085084, 0x94DEAD60C0F448D3, 0x99737AC232C559EF, 0x6F54A6F9CA8EDD57, + 0x979B01E926BFCE0C, 0xF7D20BC85439C5B4, 0x64EDB27CD8087C12, 0x11488DE5F79C0BE2, + 0x25541DDD1680B5A4, 0x8B633D33BE9D1973, 0x404A3113ACF7F6C6, 0xC59DBDEF8550CD56, + 0x039D23C68F4F992C, 0x5BBB48E4BDD6FD86, 0x41E312248780DF5A, 0xD34791CE75D4E94F, + 0xED523E5D04DCDCFF, 0x7A6BCE0B6182D879, 0x21FB37483CAC28D8, 0x19A1B66E8DA878AD, + 0x6F804C5295B09ABE, 0x2A4BE5014115BA81, 0xA678ECC5FC924BE0, 0x50F7A54A99A36F59, + 0x0FD7E63A39A66452, 0x5AB1B213DD29C4E4, 0xF3ED80D9DF6534C5, 0xC736B12EF90615FD +}; + +const uint64_t t1ha_refval_2atonce128[81] = { 0x4EC7F6A48E33B00A, + 0xB7B7FAA5BD7D8C1E, 0x3269533F66534A76, 0x6C3EC6B687923BFC, 0xC096F5E7EFA471A9, + 0x79D8AFB550CEA471, 0xCEE0507A20FD5119, 0xFB04CFFC14A9F4BF, 0xBD4406E923807AF2, + 0x375C02FF11010491, 0xA6EA4C2A59E173FF, 0xE0A606F0002CADDF, 0xE13BEAE6EBC07897, + 0xF069C2463E48EA10, 0x75BEE1A97089B5FA, 0x378F22F8DE0B8085, 0x9C726FC4D53D0D8B, + 0x71F6130A2D08F788, 0x7A9B20433FF6CF69, 0xFF49B7CD59BF6D61, 0xCCAAEE0D1CA9C6B3, + 0xC77889D86039D2AD, 0x7B378B5BEA9B0475, 0x6520BFA79D59AD66, 0x2441490CB8A37267, + 0xA715A66B7D5CF473, 0x9AE892C88334FD67, 0xD2FFE9AEC1D2169A, 0x790B993F18B18CBB, + 0xA0D02FBCF6A7B1AD, 0xA90833E6F151D0C1, 0x1AC7AFA37BD79BE0, 0xD5383628B2881A24, + 0xE5526F9D63F9F8F1, 0xC1F165A01A6D1F4D, 0x6CCEF8FF3FCFA3F2, 0x2030F18325E6DF48, + 0x289207230E3FB17A, 0x077B66F713A3C4B9, 0x9F39843CAF871754, 0x512FDA0F808ACCF3, + 0xF4D9801CD0CD1F14, 0x28A0C749ED323638, 0x94844CAFA671F01C, 0xD0E261876B8ACA51, + 0x8FC2A648A4792EA2, 0x8EF87282136AF5FE, 0x5FE6A54A9FBA6B40, 0xA3CC5B8FE6223D54, + 0xA8C3C0DD651BB01C, 0x625E9FDD534716F3, 0x1AB2604083C33AC5, 0xDE098853F8692F12, + 0x4B0813891BD87624, 0x4AB89C4553D182AD, 0x92C15AA2A3C27ADA, 0xFF2918D68191F5D9, + 0x06363174F641C325, 0x667112ADA74A2059, 0x4BD605D6B5E53D7D, 0xF2512C53663A14C8, + 0x21857BCB1852667C, 0xAFBEBD0369AEE228, 0x7049340E48FBFD6B, 0x50710E1924F46954, + 0x869A75E04A976A3F, 0x5A41ABBDD6373889, 0xA781778389B4B188, 0x21A3AFCED6C925B6, + 0x107226192EC10B42, 0x62A862E84EC2F9B1, 0x2B15E91659606DD7, 0x613934D1F9EC5A42, + 0x4DC3A96DC5361BAF, 0xC80BBA4CB5F12903, 0x3E3EDAE99A7D6987, 0x8F97B2D55941DCB0, + 0x4C9787364C3E4EC1, 0xEF0A2D07BEA90CA7, 0x5FABF32C70AEEAFB, 0x3356A5CFA8F23BF4 +}; + +const uint64_t t1ha_refval_2stream[81] = { 0x3C8426E33CB41606, + 0xFD74BE70EE73E617, 0xF43DE3CDD8A20486, 0x882FBCB37E8EA3BB, 0x1AA2CDD34CAA3D4B, + 0xEE755B2BFAE07ED5, 0xD4E225250D92E213, 0xA09B49083205965B, 0xD47B21724EF9EC9E, + 0xAC888FC3858CEE11, 0x94F820D85736F244, 0x1707951CCA920932, 0x8E0E45603F7877F0, + 0x9FD2592C0E3A7212, 0x9A66370F3AE3D427, 0xD33382D2161DE2B7, 0x9A35BE079DA7115F, + 0x73457C7FF58B4EC3, 0xBE8610BD53D7CE98, 0x65506DFE5CCD5371, 0x286A321AF9D5D9FA, + 0xB81EF9A7EF3C536D, 0x2CFDB5E6825C6E86, 0xB2A58CBFDFDD303A, 0xD26094A42B950635, + 0xA34D666A5F02AD9A, 0x0151E013EBCC72E5, 0x9254A6EA7FCB6BB5, 0x10C9361B3869DC2B, + 0xD7EC55A060606276, 0xA2FF7F8BF8976FFD, 0xB5181BB6852DCC88, 0x0EE394BB6178BAFF, + 0x3A8B4B400D21B89C, 0xEC270461970960FD, 0x615967FAB053877E, 0xFA51BF1CFEB4714C, + 0x29FDA8383070F375, 0xC3B663061BC52EDA, 0x192BBAF1F1A57923, 0x6D193B52F93C53AF, + 0x7F6F5639FE87CA1E, 0x69F7F9140B32EDC8, 0xD0F2416FB24325B6, 0x62C0E37FEDD49FF3, + 0x57866A4B809D373D, 0x9848D24BD935E137, 0xDFC905B66734D50A, 0x9A938DD194A68529, + 0x8276C44DF0625228, 0xA4B35D00AD67C0AB, 0x3D9CB359842DB452, 0x4241BFA8C23B267F, + 0x650FA517BEF15952, 0x782DE2ABD8C7B1E1, 0x4EAE456166CA3E15, 0x40CDF3A02614E337, + 0xAD84092C46102172, 0x0C68479B03F9A167, 0x7E1BA046749E181C, 0x3F3AB41A697382C1, + 0xC5E5DD6586EBFDC4, 0xFF926CD4EB02555C, 0x035CFE67F89E709B, 0x89F06AB6464A1B9D, + 0x8EFF58F3F7DEA758, 0x8B54AC657902089F, 0xC6C4F1F9F8DA4D64, 0xBDB729048AAAC93A, + 0xEA76BA628F5E5CD6, 0x742159B728B8A979, 0x6D151CD3C720E53D, 0xE97FFF9368FCDC42, + 0xCA5B38314914FBDA, 0xDD92C91D8B858EAE, 0x66E5F07CF647CBF2, 0xD4CF9B42F4985AFB, + 0x72AE17AC7D92F6B7, 0xB8206B22AB0472E1, 0x385876B5CFD42479, 0x03294A249EBE6B26 +}; + +const uint64_t t1ha_refval_2stream128[81] = { 0xCD2801D3B92237D6, + 0x10E4D47BD821546D, 0x9100704B9D65CD06, 0xD6951CB4016313EF, 0x24DB636F96F474DA, + 0x3F4AF7DF3C49E422, 0xBFF25B8AF143459B, 0xA157EC13538BE549, 0xD3F5F52C47DBD419, + 0x0EF3D7D735AF1575, 0x46B7B892823F7B1B, 0xEE22EA4655213289, 0x56AD76F02FE929BC, + 0x9CF6CD1AC886546E, 0xAF45CE47AEA0B933, 0x535F9DC09F3996B7, 0x1F0C3C01694AE128, + 0x18495069BE0766F7, 0x37E5FFB3D72A4CB1, 0x6D6C2E9299F30709, 0x4F39E693F50B41E3, + 0xB11FC4EF0658E116, 0x48BFAACB78E5079B, 0xE1B4C89C781B3AD0, 0x81D2F34888D333A1, + 0xF6D02270D2EA449C, 0xC884C3C2C3CE1503, 0x711AE16BA157A9B9, 0x1E6140C642558C9D, + 0x35AB3D238F5DC55B, 0x33F07B6AEF051177, 0xE57336776EEFA71C, 0x6D445F8318BA3752, + 0xD4F5F6631934C988, 0xD5E260085727C4A2, 0x5B54B41EC180B4FA, 0x7F5D75769C15A898, + 0xAE5A6DB850CA33C6, 0x038CCB8044663403, 0xDA16310133DC92B8, 0x6A2FFB7AB2B7CE2B, + 0xDC1832D9229BAE20, 0x8C62C479F5ABC9E4, 0x5EB7B617857C9CCB, 0xB79CF7D749A1E80D, + 0xDE7FAC3798324FD3, 0x8178911813685D06, 0x6A726CBD394D4410, 0x6CBE6B3280DA1113, + 0x6829BA4410CF1148, 0xFA7E417EB26C5BC6, 0x22ED87884D6E3A49, 0x15F1472D5115669D, + 0x2EA0B4C8BF69D318, 0xDFE87070AA545503, 0x6B4C14B5F7144AB9, 0xC1ED49C06126551A, + 0x351919FC425C3899, 0x7B569C0FA6F1BD3E, 0x713AC2350844CFFD, 0xE9367F9A638C2FF3, + 0x97F17D325AEA0786, 0xBCB907CC6CF75F91, 0x0CB7517DAF247719, 0xBE16093CC45BE8A9, + 0x786EEE97359AD6AB, 0xB7AFA4F326B97E78, 0x2694B67FE23E502E, 0x4CB492826E98E0B4, + 0x838D119F74A416C7, 0x70D6A91E4E5677FD, 0xF3E4027AD30000E6, 0x9BDF692795807F77, + 0x6A371F966E034A54, 0x8789CF41AE4D67EF, 0x02688755484D60AE, 0xD5834B3A4BF5CE42, + 0x9405FC61440DE25D, 0x35EB280A157979B6, 0x48D40D6A525297AC, 0x6A87DC185054BADA +}; + +/* *INDENT-ON* */ +/* clang-format on */ + +__cold int t1ha_selfcheck__t1ha2_atonce(void) { + return t1ha_selfcheck(t1ha2_atonce, t1ha_refval_2atonce); +} + +__cold static uint64_t thunk_atonce128(const void *data, size_t len, + uint64_t seed) { + uint64_t unused; + return t1ha2_atonce128(&unused, data, len, seed); +} + +__cold int t1ha_selfcheck__t1ha2_atonce128(void) { + return t1ha_selfcheck(thunk_atonce128, t1ha_refval_2atonce128); +} + +__cold static uint64_t thunk_stream(const void *data, size_t len, + uint64_t seed) { + t1ha_context_t ctx; + t1ha2_init(&ctx, seed, seed); + t1ha2_update(&ctx, data, len); + return t1ha2_final(&ctx, NULL); +} + +__cold static uint64_t thunk_stream128(const void *data, size_t len, + uint64_t seed) { + uint64_t unused; + t1ha_context_t ctx; + t1ha2_init(&ctx, seed, seed); + t1ha2_update(&ctx, data, len); + return t1ha2_final(&ctx, &unused); +} + +__cold int t1ha_selfcheck__t1ha2_stream(void) { + return t1ha_selfcheck(thunk_stream, t1ha_refval_2stream) | + t1ha_selfcheck(thunk_stream128, t1ha_refval_2stream128); +} + +__cold int t1ha_selfcheck__t1ha2(void) { + return t1ha_selfcheck__t1ha2_atonce() | t1ha_selfcheck__t1ha2_atonce128() | + t1ha_selfcheck__t1ha2_stream(); +} + diff --git a/src/Crypto/t1ha_bits.h b/src/Crypto/t1ha_bits.h new file mode 100644 index 00000000..c9355143 --- /dev/null +++ b/src/Crypto/t1ha_bits.h @@ -0,0 +1,906 @@ +/* + * Copyright (c) 2016-2018 Positive Technologies, https://www.ptsecurity.com, + * Fast Positive Hash. + * + * Portions Copyright (c) 2010-2018 Leonid Yuriev <leo@yuriev.ru>, + * The 1Hippeus project (t1h). + * + * This software is provided 'as-is', without any express or implied + * warranty. In no event will the authors be held liable for any damages + * arising from the use of this software. + * + * Permission is granted to anyone to use this software for any purpose, + * including commercial applications, and to alter it and redistribute it + * freely, subject to the following restrictions: + * + * 1. The origin of this software must not be misrepresented; you must not + * claim that you wrote the original software. If you use this software + * in a product, an acknowledgement in the product documentation would be + * appreciated but is not required. + * 2. Altered source versions must be plainly marked as such, and must not be + * misrepresented as being the original software. + * 3. This notice may not be removed or altered from any source distribution. + */ + +/* + * t1ha = { Fast Positive Hash, aka "Позитивный Хэш" } + * by [Positive Technologies](https://www.ptsecurity.ru) + * + * Briefly, it is a 64-bit Hash Function: + * 1. Created for 64-bit little-endian platforms, in predominantly for x86_64, + * but portable and without penalties it can run on any 64-bit CPU. + * 2. In most cases up to 15% faster than City64, xxHash, mum-hash, metro-hash + * and all others portable hash-functions (which do not use specific + * hardware tricks). + * 3. Not suitable for cryptography. + * + * The Future will Positive. Всё будет хорошо. + * + * ACKNOWLEDGEMENT: + * The t1ha was originally developed by Leonid Yuriev (Леонид Юрьев) + * for The 1Hippeus project - zerocopy messaging in the spirit of Sparta! + */ + +#pragma once + +#if defined(_MSC_VER) +#pragma warning(disable : 4201) /* nameless struct/union */ +#if _MSC_VER > 1800 +#pragma warning(disable : 4464) /* relative include path contains '..' */ +#endif /* 1800 */ +#endif /* MSVC */ +#include "t1ha.h" + +#ifndef T1HA_USE_FAST_ONESHOT_READ +/* Define it to 1 for little bit faster code. + * Unfortunately this may triggering a false-positive alarms from Valgrind, + * AddressSanitizer and other similar tool. + * So, define it to 0 for calmness if doubt. */ +#define T1HA_USE_FAST_ONESHOT_READ 1 +#endif /* T1HA_USE_FAST_ONESHOT_READ */ + +/*****************************************************************************/ + +#include <assert.h> /* for assert() */ +#include <string.h> /* for memcpy() */ + +#if __BYTE_ORDER__ != __ORDER_LITTLE_ENDIAN__ && \ + __BYTE_ORDER__ != __ORDER_BIG_ENDIAN__ +#error Unsupported byte order. +#endif + +#define T1HA_UNALIGNED_ACCESS__UNABLE 0 +#define T1HA_UNALIGNED_ACCESS__SLOW 1 +#define T1HA_UNALIGNED_ACCESS__EFFICIENT 2 + +#ifndef T1HA_SYS_UNALIGNED_ACCESS +#if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) +#define T1HA_SYS_UNALIGNED_ACCESS T1HA_UNALIGNED_ACCESS__EFFICIENT +#elif defined(__ia32__) +#define T1HA_SYS_UNALIGNED_ACCESS T1HA_UNALIGNED_ACCESS__EFFICIENT +#elif defined(__e2k__) +#define T1HA_SYS_UNALIGNED_ACCESS T1HA_UNALIGNED_ACCESS__SLOW +#elif defined(__ARM_FEATURE_UNALIGNED) +#define T1HA_SYS_UNALIGNED_ACCESS T1HA_UNALIGNED_ACCESS__EFFICIENT +#else +#define T1HA_SYS_UNALIGNED_ACCESS T1HA_UNALIGNED_ACCESS__UNABLE +#endif +#endif /* T1HA_SYS_UNALIGNED_ACCESS */ + +#define ALIGNMENT_16 2 +#define ALIGNMENT_32 4 +#if UINTPTR_MAX > 0xffffFFFFul || ULONG_MAX > 0xffffFFFFul +#define ALIGNMENT_64 8 +#else +#define ALIGNMENT_64 4 +#endif + +#ifndef PAGESIZE +#define PAGESIZE 4096 +#endif /* PAGESIZE */ + +/***************************************************************************/ + +#ifndef __has_builtin +#define __has_builtin(x) (0) +#endif + +#ifndef __has_warning +#define __has_warning(x) (0) +#endif + +#ifndef __has_feature +#define __has_feature(x) (0) +#endif + +#ifndef __has_extension +#define __has_extension(x) (0) +#endif + +#ifndef __has_attribute +#define __has_attribute(x) (0) +#endif + +#if __has_feature(address_sanitizer) +#define __SANITIZE_ADDRESS__ 1 +#endif + +#ifndef __optimize +#if defined(__clang__) && !__has_attribute(optimize) +#define __optimize(ops) +#elif defined(__GNUC__) || __has_attribute(optimize) +#define __optimize(ops) __attribute__((optimize(ops))) +#else +#define __optimize(ops) +#endif +#endif /* __optimize */ + +#ifndef __cold +#if defined(__OPTIMIZE__) +#if defined(__e2k__) +#define __cold __optimize(1) __attribute__((cold)) +#elif defined(__clang__) && !__has_attribute(cold) +/* just put infrequently used functions in separate section */ +#define __cold __attribute__((section("text.unlikely"))) __optimize("Os") +#elif defined(__GNUC__) || __has_attribute(cold) +#define __cold __attribute__((cold)) __optimize("Os") +#else +#define __cold __optimize("Os") +#endif +#else +#define __cold +#endif +#endif /* __cold */ + + +#if defined(_MSC_VER) + +#pragma warning(push, 1) + +#include <stdlib.h> +#define likely(cond) (cond) +#define unlikely(cond) (cond) +#define unreachable() __assume(0) +#define bswap64(v) byteswap_64(v) +#define bswap32(v) byteswap_32(v) +#define bswap16(v) byteswap_16(v) +#define rot64(v, s) rotr64(v, s) +#define rot32(v, s) rotr32(v, s) +#define __always_inline __forceinline + +#ifdef TC_WINDOWS_DRIVER +#undef assert +#define assert ASSERT +#endif + +#if defined(_M_X64) || defined(_M_IA64) +#pragma intrinsic(_umul128) +#define mul_64x64_128(a, b, ph) _umul128(a, b, ph) +#endif + +#if defined(_M_ARM64) || defined(_M_X64) || defined(_M_IA64) +#pragma intrinsic(__umulh) +#define mul_64x64_high(a, b) __umulh(a, b) +#endif + +#pragma warning(pop) +#pragma warning(disable : 4514) /* 'xyz': unreferenced inline function \ + has been removed */ +#pragma warning(disable : 4710) /* 'xyz': function not inlined */ +#pragma warning(disable : 4711) /* function 'xyz' selected for \ + automatic inline expansion */ +#pragma warning(disable : 4127) /* conditional expression is constant */ +#pragma warning(disable : 4702) /* unreachable code */ + +#define __GNUC_PREREQ(a,b) 0 +#ifndef UINT64_C +#define UINT64_C(value) value ## ULL +#endif + +#endif /* Compiler */ + +#ifndef likely +#define likely(cond) (cond) +#endif +#ifndef unlikely +#define unlikely(cond) (cond) +#endif +#ifndef __maybe_unused +#define __maybe_unused +#endif +#ifndef __always_inline +#define __always_inline __inline +#endif +#ifndef unreachable +#define unreachable() \ + do { \ + } while (1) +#endif + + + +#ifndef read_unaligned +#if defined(__GNUC__) || __has_attribute(packed) +typedef struct { + uint8_t unaligned_8; + uint16_t unaligned_16; + uint32_t unaligned_32; + uint64_t unaligned_64; +} __attribute__((packed)) t1ha_unaligned_proxy; +#define read_unaligned(ptr, bits) \ + (((const t1ha_unaligned_proxy *)((const uint8_t *)(ptr)-offsetof( \ + t1ha_unaligned_proxy, unaligned_##bits))) \ + ->unaligned_##bits) +#elif defined(_MSC_VER) +#pragma warning( \ + disable : 4235) /* nonstandard extension used: '__unaligned' \ + * keyword not supported on this architecture */ +#define read_unaligned(ptr, bits) (*(const __unaligned uint##bits##_t *)(ptr)) +#else +#pragma pack(push, 1) +typedef struct { + uint8_t unaligned_8; + uint16_t unaligned_16; + uint32_t unaligned_32; + uint64_t unaligned_64; +} t1ha_unaligned_proxy; +#pragma pack(pop) +#define read_unaligned(ptr, bits) \ + (((const t1ha_unaligned_proxy *)((const uint8_t *)(ptr)-offsetof( \ + t1ha_unaligned_proxy, unaligned_##bits))) \ + ->unaligned_##bits) +#endif +#endif /* read_unaligned */ + +#ifndef read_aligned +#if __GNUC_PREREQ(4, 8) || __has_builtin(__builtin_assume_aligned) +#define read_aligned(ptr, bits) \ + (*(const uint##bits##_t *)__builtin_assume_aligned(ptr, ALIGNMENT_##bits)) +#elif (__GNUC_PREREQ(3, 3) || __has_attribute(aligned)) && !defined(__clang__) +#define read_aligned(ptr, bits) \ + (*(const uint##bits##_t __attribute__((aligned(ALIGNMENT_##bits))) *)(ptr)) +#elif __has_attribute(assume_aligned) + +static __always_inline const + uint16_t *__attribute__((assume_aligned(ALIGNMENT_16))) + cast_aligned_16(const void *ptr) { + return (const uint16_t *)ptr; +} +static __always_inline const + uint32_t *__attribute__((assume_aligned(ALIGNMENT_32))) + cast_aligned_32(const void *ptr) { + return (const uint32_t *)ptr; +} +static __always_inline const + uint64_t *__attribute__((assume_aligned(ALIGNMENT_64))) + cast_aligned_64(const void *ptr) { + return (const uint64_t *)ptr; +} + +#define read_aligned(ptr, bits) (*cast_aligned_##bits(ptr)) + +#elif defined(_MSC_VER) +#define read_aligned(ptr, bits) \ + (*(const __declspec(align(ALIGNMENT_##bits)) uint##bits##_t *)(ptr)) +#else +#define read_aligned(ptr, bits) (*(const uint##bits##_t *)(ptr)) +#endif +#endif /* read_aligned */ + +#ifndef prefetch +#if (__GNUC_PREREQ(4, 0) || __has_builtin(__builtin_prefetch)) && \ + !defined(__ia32__) +#define prefetch(ptr) __builtin_prefetch(ptr) +#elif defined(_M_ARM64) || defined(_M_ARM) +#define prefetch(ptr) __prefetch(ptr) +#else +#define prefetch(ptr) \ + do { \ + (void)(ptr); \ + } while (0) +#endif +#endif /* prefetch */ + +#if __has_warning("-Wconstant-logical-operand") +#if defined(__clang__) +#pragma clang diagnostic ignored "-Wconstant-logical-operand" +#elif defined(__GNUC__) +#pragma GCC diagnostic ignored "-Wconstant-logical-operand" +#else +#pragma warning disable "constant-logical-operand" +#endif +#endif /* -Wconstant-logical-operand */ + +#if __has_warning("-Wtautological-pointer-compare") +#if defined(__clang__) +#pragma clang diagnostic ignored "-Wtautological-pointer-compare" +#elif defined(__GNUC__) +#pragma GCC diagnostic ignored "-Wtautological-pointer-compare" +#else +#pragma warning disable "tautological-pointer-compare" +#endif +#endif /* -Wtautological-pointer-compare */ + +/***************************************************************************/ + +#if __GNUC_PREREQ(4, 0) +#pragma GCC visibility push(hidden) +#endif /* __GNUC_PREREQ(4,0) */ + +/*---------------------------------------------------------- Little Endian */ + +#ifndef fetch16_le_aligned +static __always_inline uint16_t fetch16_le_aligned(const void *v) { + assert(((uintptr_t)v) % ALIGNMENT_16 == 0); +#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ + return read_aligned(v, 16); +#else + return bswap16(read_aligned(v, 16)); +#endif +} +#endif /* fetch16_le_aligned */ + +#ifndef fetch16_le_unaligned +static __always_inline uint16_t fetch16_le_unaligned(const void *v) { +#if T1HA_SYS_UNALIGNED_ACCESS == T1HA_UNALIGNED_ACCESS__UNABLE + const uint8_t *p = (const uint8_t *)v; + return p[0] | (uint16_t)p[1] << 8; +#elif __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ + return read_unaligned(v, 16); +#else + return bswap16(read_unaligned(v, 16)); +#endif +} +#endif /* fetch16_le_unaligned */ + +#ifndef fetch32_le_aligned +static __always_inline uint32_t fetch32_le_aligned(const void *v) { + assert(((uintptr_t)v) % ALIGNMENT_32 == 0); +#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ + return read_aligned(v, 32); +#else + return bswap32(read_aligned(v, 32)); +#endif +} +#endif /* fetch32_le_aligned */ + +#ifndef fetch32_le_unaligned +static __always_inline uint32_t fetch32_le_unaligned(const void *v) { +#if T1HA_SYS_UNALIGNED_ACCESS == T1HA_UNALIGNED_ACCESS__UNABLE + return fetch16_le_unaligned(v) | + (uint32_t)fetch16_le_unaligned((const uint8_t *)v + 2) << 16; +#elif __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ + return read_unaligned(v, 32); +#else + return bswap32(read_unaligned(v, 32)); +#endif +} +#endif /* fetch32_le_unaligned */ + +#ifndef fetch64_le_aligned +static __always_inline uint64_t fetch64_le_aligned(const void *v) { + assert(((uintptr_t)v) % ALIGNMENT_64 == 0); +#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ + return read_aligned(v, 64); +#else + return bswap64(read_aligned(v, 64)); +#endif +} +#endif /* fetch64_le_aligned */ + +#ifndef fetch64_le_unaligned +static __always_inline uint64_t fetch64_le_unaligned(const void *v) { +#if T1HA_SYS_UNALIGNED_ACCESS == T1HA_UNALIGNED_ACCESS__UNABLE + return fetch32_le_unaligned(v) | + (uint64_t)fetch32_le_unaligned((const uint8_t *)v + 4) << 32; +#elif __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ + return read_unaligned(v, 64); +#else + return bswap64(read_unaligned(v, 64)); +#endif +} +#endif /* fetch64_le_unaligned */ + +static __always_inline uint64_t tail64_le_aligned(const void *v, size_t tail) { + const uint8_t *const p = (const uint8_t *)v; +#if T1HA_USE_FAST_ONESHOT_READ && !defined(__SANITIZE_ADDRESS__) + /* We can perform a 'oneshot' read, which is little bit faster. */ + const unsigned shift = ((8 - tail) & 7) << 3; + return fetch64_le_aligned(p) & ((~UINT64_C(0)) >> shift); +#else + uint64_t r = 0; + switch (tail & 7) { + default: + unreachable(); +/* fall through */ +#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ + /* For most CPUs this code is better when not needed byte reordering. */ + case 0: + return fetch64_le_aligned(p); + case 7: + r = (uint64_t)p[6] << 8; + /* fall through */ + case 6: + r += p[5]; + r <<= 8; + /* fall through */ + case 5: + r += p[4]; + r <<= 32; + /* fall through */ + case 4: + return r + fetch32_le_aligned(p); + case 3: + r = (uint64_t)p[2] << 16; + /* fall through */ + case 2: + return r + fetch16_le_aligned(p); + case 1: + return p[0]; +#else + case 0: + r = p[7] << 8; + /* fall through */ + case 7: + r += p[6]; + r <<= 8; + /* fall through */ + case 6: + r += p[5]; + r <<= 8; + /* fall through */ + case 5: + r += p[4]; + r <<= 8; + /* fall through */ + case 4: + r += p[3]; + r <<= 8; + /* fall through */ + case 3: + r += p[2]; + r <<= 8; + /* fall through */ + case 2: + r += p[1]; + r <<= 8; + /* fall through */ + case 1: + return r + p[0]; +#endif + } +#endif /* T1HA_USE_FAST_ONESHOT_READ */ +} + +#if T1HA_USE_FAST_ONESHOT_READ && \ + T1HA_SYS_UNALIGNED_ACCESS != T1HA_UNALIGNED_ACCESS__UNABLE && \ + defined(PAGESIZE) && PAGESIZE > 42 && !defined(__SANITIZE_ADDRESS__) +#define can_read_underside(ptr, size) \ + (((PAGESIZE - (size)) & (uintptr_t)(ptr)) != 0) +#endif /* T1HA_USE_FAST_ONESHOT_READ */ + +static __always_inline uint64_t tail64_le_unaligned(const void *v, + size_t tail) { + const uint8_t *p = (const uint8_t *)v; +#if defined(can_read_underside) && \ + (UINTPTR_MAX > 0xffffFFFFul || ULONG_MAX > 0xffffFFFFul) + /* On some systems (e.g. x86_64) we can perform a 'oneshot' read, which + * is little bit faster. Thanks Marcin Żukowski <marcin.zukowski@gmail.com> + * for the reminder. */ + const unsigned offset = (8 - tail) & 7; + const unsigned shift = offset << 3; + if (likely(can_read_underside(p, 8))) { + p -= offset; + return fetch64_le_unaligned(p) >> shift; + } + return fetch64_le_unaligned(p) & ((~UINT64_C(0)) >> shift); +#else + uint64_t r = 0; + switch (tail & 7) { + default: + unreachable(); +/* fall through */ +#if T1HA_SYS_UNALIGNED_ACCESS == T1HA_UNALIGNED_ACCESS__EFFICIENT && \ + __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ + /* For most CPUs this code is better when not needed + * copying for alignment or byte reordering. */ + case 0: + return fetch64_le_unaligned(p); + case 7: + r = (uint64_t)p[6] << 8; + /* fall through */ + case 6: + r += p[5]; + r <<= 8; + /* fall through */ + case 5: + r += p[4]; + r <<= 32; + /* fall through */ + case 4: + return r + fetch32_le_unaligned(p); + case 3: + r = (uint64_t)p[2] << 16; + /* fall through */ + case 2: + return r + fetch16_le_unaligned(p); + case 1: + return p[0]; +#else + /* For most CPUs this code is better than a + * copying for alignment and/or byte reordering. */ + case 0: + r = p[7] << 8; + /* fall through */ + case 7: + r += p[6]; + r <<= 8; + /* fall through */ + case 6: + r += p[5]; + r <<= 8; + /* fall through */ + case 5: + r += p[4]; + r <<= 8; + /* fall through */ + case 4: + r += p[3]; + r <<= 8; + /* fall through */ + case 3: + r += p[2]; + r <<= 8; + /* fall through */ + case 2: + r += p[1]; + r <<= 8; + /* fall through */ + case 1: + return r + p[0]; +#endif + } +#endif /* can_read_underside */ +} + +/*------------------------------------------------------------- Big Endian */ + +#ifndef fetch16_be_aligned +static __maybe_unused __always_inline uint16_t +fetch16_be_aligned(const void *v) { + assert(((uintptr_t)v) % ALIGNMENT_16 == 0); +#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ + return read_aligned(v, 16); +#else + return bswap16(read_aligned(v, 16)); +#endif +} +#endif /* fetch16_be_aligned */ + +#ifndef fetch16_be_unaligned +static __maybe_unused __always_inline uint16_t +fetch16_be_unaligned(const void *v) { +#if T1HA_SYS_UNALIGNED_ACCESS == T1HA_UNALIGNED_ACCESS__UNABLE + const uint8_t *p = (const uint8_t *)v; + return (uint16_t)p[0] << 8 | p[1]; +#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ + return read_unaligned(v, 16); +#else + return bswap16(read_unaligned(v, 16)); +#endif +} +#endif /* fetch16_be_unaligned */ + +#ifndef fetch32_be_aligned +static __maybe_unused __always_inline uint32_t +fetch32_be_aligned(const void *v) { + assert(((uintptr_t)v) % ALIGNMENT_32 == 0); +#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ + return read_aligned(v, 32); +#else + return bswap32(read_aligned(v, 32)); +#endif +} +#endif /* fetch32_be_aligned */ + +#ifndef fetch32_be_unaligned +static __maybe_unused __always_inline uint32_t +fetch32_be_unaligned(const void *v) { +#if T1HA_SYS_UNALIGNED_ACCESS == T1HA_UNALIGNED_ACCESS__UNABLE + return (uint32_t)fetch16_be_unaligned(v) << 16 | + fetch16_be_unaligned((const uint8_t *)v + 2); +#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ + return read_unaligned(v, 32); +#else + return bswap32(read_unaligned(v, 32)); +#endif +} +#endif /* fetch32_be_unaligned */ + +#ifndef fetch64_be_aligned +static __maybe_unused __always_inline uint64_t +fetch64_be_aligned(const void *v) { + assert(((uintptr_t)v) % ALIGNMENT_64 == 0); +#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ + return read_aligned(v, 64); +#else + return bswap64(read_aligned(v, 64)); +#endif +} +#endif /* fetch64_be_aligned */ + +#ifndef fetch64_be_unaligned +static __maybe_unused __always_inline uint64_t +fetch64_be_unaligned(const void *v) { +#if T1HA_SYS_UNALIGNED_ACCESS == T1HA_UNALIGNED_ACCESS__UNABLE + return (uint64_t)fetch32_be_unaligned(v) << 32 | + fetch32_be_unaligned((const uint8_t *)v + 4); +#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ + return read_unaligned(v, 64); +#else + return bswap64(read_unaligned(v, 64)); +#endif +} +#endif /* fetch64_be_unaligned */ + +static __maybe_unused __always_inline uint64_t tail64_be_aligned(const void *v, + size_t tail) { + const uint8_t *const p = (const uint8_t *)v; +#if T1HA_USE_FAST_ONESHOT_READ && !defined(__SANITIZE_ADDRESS__) + /* We can perform a 'oneshot' read, which is little bit faster. */ + const unsigned shift = ((8 - tail) & 7) << 3; + return fetch64_be_aligned(p) >> shift; +#else + switch (tail & 7) { + default: + unreachable(); +/* fall through */ +#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ + /* For most CPUs this code is better when not byte reordering. */ + case 1: + return p[0]; + case 2: + return fetch16_be_aligned(p); + case 3: + return (uint32_t)fetch16_be_aligned(p) << 8 | p[2]; + case 4: + return fetch32_be_aligned(p); + case 5: + return (uint64_t)fetch32_be_aligned(p) << 8 | p[4]; + case 6: + return (uint64_t)fetch32_be_aligned(p) << 16 | fetch16_be_aligned(p + 4); + case 7: + return (uint64_t)fetch32_be_aligned(p) << 24 | + (uint32_t)fetch16_be_aligned(p + 4) << 8 | p[6]; + case 0: + return fetch64_be_aligned(p); +#else + case 1: + return p[0]; + case 2: + return p[1] | (uint32_t)p[0] << 8; + case 3: + return p[2] | (uint32_t)p[1] << 8 | (uint32_t)p[0] << 16; + case 4: + return p[3] | (uint32_t)p[2] << 8 | (uint32_t)p[1] << 16 | + (uint32_t)p[0] << 24; + case 5: + return p[4] | (uint32_t)p[3] << 8 | (uint32_t)p[2] << 16 | + (uint32_t)p[1] << 24 | (uint64_t)p[0] << 32; + case 6: + return p[5] | (uint32_t)p[4] << 8 | (uint32_t)p[3] << 16 | + (uint32_t)p[2] << 24 | (uint64_t)p[1] << 32 | (uint64_t)p[0] << 40; + case 7: + return p[6] | (uint32_t)p[5] << 8 | (uint32_t)p[4] << 16 | + (uint32_t)p[3] << 24 | (uint64_t)p[2] << 32 | (uint64_t)p[1] << 40 | + (uint64_t)p[0] << 48; + case 0: + return p[7] | (uint32_t)p[6] << 8 | (uint32_t)p[5] << 16 | + (uint32_t)p[4] << 24 | (uint64_t)p[3] << 32 | (uint64_t)p[2] << 40 | + (uint64_t)p[1] << 48 | (uint64_t)p[0] << 56; +#endif + } +#endif /* T1HA_USE_FAST_ONESHOT_READ */ +} + +static __maybe_unused __always_inline uint64_t +tail64_be_unaligned(const void *v, size_t tail) { + const uint8_t *p = (const uint8_t *)v; +#if defined(can_read_underside) && \ + (UINTPTR_MAX > 0xffffFFFFul || ULONG_MAX > 0xffffFFFFul) + /* On some systems (e.g. x86_64) we can perform a 'oneshot' read, which + * is little bit faster. Thanks Marcin Żukowski <marcin.zukowski@gmail.com> + * for the reminder. */ + const unsigned offset = (8 - tail) & 7; + const unsigned shift = offset << 3; + if (likely(can_read_underside(p, 8))) { + p -= offset; + return fetch64_be_unaligned(p) & ((~UINT64_C(0)) >> shift); + } + return fetch64_be_unaligned(p) >> shift; +#else + switch (tail & 7) { + default: + unreachable(); +/* fall through */ +#if T1HA_SYS_UNALIGNED_ACCESS == T1HA_UNALIGNED_ACCESS__EFFICIENT && \ + __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ + /* For most CPUs this code is better when not needed + * copying for alignment or byte reordering. */ + case 1: + return p[0]; + case 2: + return fetch16_be_unaligned(p); + case 3: + return (uint32_t)fetch16_be_unaligned(p) << 8 | p[2]; + case 4: + return fetch32_be(p); + case 5: + return (uint64_t)fetch32_be_unaligned(p) << 8 | p[4]; + case 6: + return (uint64_t)fetch32_be_unaligned(p) << 16 | + fetch16_be_unaligned(p + 4); + case 7: + return (uint64_t)fetch32_be_unaligned(p) << 24 | + (uint32_t)fetch16_be_unaligned(p + 4) << 8 | p[6]; + case 0: + return fetch64_be_unaligned(p); +#else + /* For most CPUs this code is better than a + * copying for alignment and/or byte reordering. */ + case 1: + return p[0]; + case 2: + return p[1] | (uint32_t)p[0] << 8; + case 3: + return p[2] | (uint32_t)p[1] << 8 | (uint32_t)p[0] << 16; + case 4: + return p[3] | (uint32_t)p[2] << 8 | (uint32_t)p[1] << 16 | + (uint32_t)p[0] << 24; + case 5: + return p[4] | (uint32_t)p[3] << 8 | (uint32_t)p[2] << 16 | + (uint32_t)p[1] << 24 | (uint64_t)p[0] << 32; + case 6: + return p[5] | (uint32_t)p[4] << 8 | (uint32_t)p[3] << 16 | + (uint32_t)p[2] << 24 | (uint64_t)p[1] << 32 | (uint64_t)p[0] << 40; + case 7: + return p[6] | (uint32_t)p[5] << 8 | (uint32_t)p[4] << 16 | + (uint32_t)p[3] << 24 | (uint64_t)p[2] << 32 | (uint64_t)p[1] << 40 | + (uint64_t)p[0] << 48; + case 0: + return p[7] | (uint32_t)p[6] << 8 | (uint32_t)p[5] << 16 | + (uint32_t)p[4] << 24 | (uint64_t)p[3] << 32 | (uint64_t)p[2] << 40 | + (uint64_t)p[1] << 48 | (uint64_t)p[0] << 56; +#endif + } +#endif /* can_read_underside */ +} + +/***************************************************************************/ + +#ifndef rot64 +static __always_inline uint64_t rot64(uint64_t v, unsigned s) { + return (v >> s) | (v << (64 - s)); +} +#endif /* rot64 */ + +#ifndef mul_32x32_64 +static __always_inline uint64_t mul_32x32_64(uint32_t a, uint32_t b) { + return a * (uint64_t)b; +} +#endif /* mul_32x32_64 */ + +#ifndef add64carry_first +static __maybe_unused __always_inline unsigned +add64carry_first(uint64_t base, uint64_t addend, uint64_t *sum) { +#if __has_builtin(__builtin_addcll) + unsigned long long carryout; + *sum = __builtin_addcll(base, addend, 0, &carryout); + return (unsigned)carryout; +#else + *sum = base + addend; + return *sum < addend; +#endif /* __has_builtin(__builtin_addcll) */ +} +#endif /* add64carry_fist */ + +#ifndef add64carry_next +static __maybe_unused __always_inline unsigned +add64carry_next(unsigned carry, uint64_t base, uint64_t addend, uint64_t *sum) { +#if __has_builtin(__builtin_addcll) + unsigned long long carryout; + *sum = __builtin_addcll(base, addend, carry, &carryout); + return (unsigned)carryout; +#else + *sum = base + addend + carry; + return *sum < addend || (carry && *sum == addend); +#endif /* __has_builtin(__builtin_addcll) */ +} +#endif /* add64carry_next */ + +#ifndef add64carry_last +static __maybe_unused __always_inline void +add64carry_last(unsigned carry, uint64_t base, uint64_t addend, uint64_t *sum) { +#if __has_builtin(__builtin_addcll) + unsigned long long carryout; + *sum = __builtin_addcll(base, addend, carry, &carryout); + (void)carryout; +#else + *sum = base + addend + carry; +#endif /* __has_builtin(__builtin_addcll) */ +} +#endif /* add64carry_last */ + +#ifndef mul_64x64_128 +static __maybe_unused __always_inline uint64_t mul_64x64_128(uint64_t a, + uint64_t b, + uint64_t *h) { +#if defined(__SIZEOF_INT128__) || \ + (defined(_INTEGRAL_MAX_BITS) && _INTEGRAL_MAX_BITS >= 128) + __uint128_t r = (__uint128_t)a * (__uint128_t)b; + /* modern GCC could nicely optimize this */ + *h = (uint64_t)(r >> 64); + return (uint64_t)r; +#elif defined(mul_64x64_high) + *h = mul_64x64_high(a, b); + return a * b; +#else + /* performs 64x64 to 128 bit multiplication */ + const uint64_t ll = mul_32x32_64((uint32_t)a, (uint32_t)b); + const uint64_t lh = mul_32x32_64(a >> 32, (uint32_t)b); + const uint64_t hl = mul_32x32_64((uint32_t)a, b >> 32); + const uint64_t hh = mul_32x32_64(a >> 32, b >> 32); + + /* Few simplification are possible here for 32-bit architectures, + * but thus we would lost compatibility with the original 64-bit + * version. Think is very bad idea, because then 32-bit t1ha will + * still (relatively) very slowly and well yet not compatible. */ + uint64_t l; + add64carry_last(add64carry_first(ll, lh << 32, &l), hh, lh >> 32, h); + add64carry_last(add64carry_first(l, hl << 32, &l), *h, hl >> 32, h); + return l; +#endif +} +#endif /* mul_64x64_128() */ + +#ifndef mul_64x64_high +static __maybe_unused __always_inline uint64_t mul_64x64_high(uint64_t a, + uint64_t b) { + uint64_t h; + mul_64x64_128(a, b, &h); + return h; +} +#endif /* mul_64x64_high */ + +/***************************************************************************/ + +/* 'magic' primes */ +static const uint64_t prime_0 = UINT64_C(0xEC99BF0D8372CAAB); +static const uint64_t prime_1 = UINT64_C(0x82434FE90EDCEF39); +static const uint64_t prime_2 = UINT64_C(0xD4F06DB99D67BE4B); +static const uint64_t prime_3 = UINT64_C(0xBD9CACC22C6E9571); +static const uint64_t prime_4 = UINT64_C(0x9C06FAF4D023E3AB); +static const uint64_t prime_5 = UINT64_C(0xC060724A8424F345); +static const uint64_t prime_6 = UINT64_C(0xCB5AF53AE3AAAC31); + +/* xor high and low parts of full 128-bit product */ +static __maybe_unused __always_inline uint64_t mux64(uint64_t v, + uint64_t prime) { + uint64_t l, h; + l = mul_64x64_128(v, prime, &h); + return l ^ h; +} + +static __always_inline uint64_t final64(uint64_t a, uint64_t b) { + uint64_t x = (a + rot64(b, 41)) * prime_0; + uint64_t y = (rot64(a, 23) + b) * prime_6; + return mux64(x ^ y, prime_5); +} + +static __always_inline void mixup64(uint64_t *__restrict a, + uint64_t *__restrict b, uint64_t v, + uint64_t prime) { + uint64_t h; + *a ^= mul_64x64_128(*b + v, prime, &h); + *b += h; +} diff --git a/src/Crypto/t1ha_selfcheck.c b/src/Crypto/t1ha_selfcheck.c new file mode 100644 index 00000000..51d02912 --- /dev/null +++ b/src/Crypto/t1ha_selfcheck.c @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2016-2018 Positive Technologies, https://www.ptsecurity.com, + * Fast Positive Hash. + * + * Portions Copyright (c) 2010-2018 Leonid Yuriev <leo@yuriev.ru>, + * The 1Hippeus project (t1h). + * + * This software is provided 'as-is', without any express or implied + * warranty. In no event will the authors be held liable for any damages + * arising from the use of this software. + * + * Permission is granted to anyone to use this software for any purpose, + * including commercial applications, and to alter it and redistribute it + * freely, subject to the following restrictions: + * + * 1. The origin of this software must not be misrepresented; you must not + * claim that you wrote the original software. If you use this software + * in a product, an acknowledgement in the product documentation would be + * appreciated but is not required. + * 2. Altered source versions must be plainly marked as such, and must not be + * misrepresented as being the original software. + * 3. This notice may not be removed or altered from any source distribution. + */ + +/* + * t1ha = { Fast Positive Hash, aka "Позитивный Хэш" } + * by [Positive Technologies](https://www.ptsecurity.ru) + * + * Briefly, it is a 64-bit Hash Function: + * 1. Created for 64-bit little-endian platforms, in predominantly for x86_64, + * but portable and without penalties it can run on any 64-bit CPU. + * 2. In most cases up to 15% faster than City64, xxHash, mum-hash, metro-hash + * and all others portable hash-functions (which do not use specific + * hardware tricks). + * 3. Not suitable for cryptography. + * + * The Future will Positive. Всё будет хорошо. + * + * ACKNOWLEDGEMENT: + * The t1ha was originally developed by Leonid Yuriev (Леонид Юрьев) + * for The 1Hippeus project - zerocopy messaging in the spirit of Sparta! + */ + +#include "t1ha_selfcheck.h" +#include "t1ha_bits.h" + +const uint8_t t1ha_test_pattern[64] = { + 0, 1, 2, 3, 4, 5, 6, 7, 0xFF, 0x7F, 0x3F, + 0x1F, 0xF, 8, 16, 32, 64, 0x80, 0xFE, 0xFC, 0xF8, 0xF0, + 0xE0, 0xC0, 0xFD, 0xFB, 0xF7, 0xEF, 0xDF, 0xBF, 0x55, 0xAA, 11, + 17, 19, 23, 29, 37, 42, 43, 'a', 'b', 'c', 'd', + 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', + 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x'}; + +static VC_INLINE int probe(uint64_t (*hash)(const void *, size_t, uint64_t), + const uint64_t reference, const void *data, + unsigned len, uint64_t seed) { + const uint64_t actual = hash(data, len, seed); + assert(actual == reference); + return actual != reference; +} + +__cold int t1ha_selfcheck(uint64_t (*hash)(const void *, size_t, uint64_t), + const uint64_t *reference_values) { + int failed = 0; + uint64_t seed = 1; + const uint64_t zero = 0; + uint8_t pattern_long[512]; + int i; + failed |= probe(hash, /* empty-zero */ *reference_values++, NULL, 0, zero); + failed |= probe(hash, /* empty-all1 */ *reference_values++, NULL, 0, ~zero); + failed |= probe(hash, /* bin64-zero */ *reference_values++, t1ha_test_pattern, + 64, zero); + + for (i = 1; i < 64; i++) { + /* bin%i-1p%i */ + failed |= probe(hash, *reference_values++, t1ha_test_pattern, i, seed); + seed <<= 1; + } + + seed = ~zero; + for (i = 1; i <= 7; i++) { + seed <<= 1; + /* align%i_F%i */; + failed |= + probe(hash, *reference_values++, t1ha_test_pattern + i, 64 - i, seed); + } + + + for (i = 0; i < sizeof(pattern_long); ++i) + pattern_long[i] = (uint8_t)i; + for (i = 0; i <= 7; i++) { + /* long-%05i */ + failed |= + probe(hash, *reference_values++, pattern_long + i, 128 + i * 17, seed); + } + + return failed ? -1 : 0; +} diff --git a/src/Crypto/t1ha_selfcheck.h b/src/Crypto/t1ha_selfcheck.h new file mode 100644 index 00000000..943bf2d2 --- /dev/null +++ b/src/Crypto/t1ha_selfcheck.h @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2016-2018 Positive Technologies, https://www.ptsecurity.com, + * Fast Positive Hash. + * + * Portions Copyright (c) 2010-2018 Leonid Yuriev <leo@yuriev.ru>, + * The 1Hippeus project (t1h). + * + * This software is provided 'as-is', without any express or implied + * warranty. In no event will the authors be held liable for any damages + * arising from the use of this software. + * + * Permission is granted to anyone to use this software for any purpose, + * including commercial applications, and to alter it and redistribute it + * freely, subject to the following restrictions: + * + * 1. The origin of this software must not be misrepresented; you must not + * claim that you wrote the original software. If you use this software + * in a product, an acknowledgement in the product documentation would be + * appreciated but is not required. + * 2. Altered source versions must be plainly marked as such, and must not be + * misrepresented as being the original software. + * 3. This notice may not be removed or altered from any source distribution. + */ + +/* + * t1ha = { Fast Positive Hash, aka "Позитивный Хэш" } + * by [Positive Technologies](https://www.ptsecurity.ru) + * + * Briefly, it is a 64-bit Hash Function: + * 1. Created for 64-bit little-endian platforms, in predominantly for x86_64, + * but portable and without penalties it can run on any 64-bit CPU. + * 2. In most cases up to 15% faster than City64, xxHash, mum-hash, metro-hash + * and all others portable hash-functions (which do not use specific + * hardware tricks). + * 3. Not suitable for cryptography. + * + * The Future will Positive. Всё будет хорошо. + * + * ACKNOWLEDGEMENT: + * The t1ha was originally developed by Leonid Yuriev (Леонид Юрьев) + * for The 1Hippeus project - zerocopy messaging in the spirit of Sparta! + */ + +#pragma once +#if defined(_MSC_VER) && _MSC_VER > 1800 +#pragma warning(disable : 4464) /* relative include path contains '..' */ +#endif /* MSVC */ +#include "t1ha.h" + +/***************************************************************************/ +/* Self-checking */ + +extern const uint8_t t1ha_test_pattern[64]; +int t1ha_selfcheck(uint64_t (*hash)(const void *, size_t, uint64_t), + const uint64_t *reference_values); + +#ifndef T1HA2_DISABLED +extern const uint64_t t1ha_refval_2atonce[81]; +extern const uint64_t t1ha_refval_2atonce128[81]; +extern const uint64_t t1ha_refval_2stream[81]; +extern const uint64_t t1ha_refval_2stream128[81]; +#endif /* T1HA2_DISABLED */ + +#ifndef T1HA1_DISABLED +extern const uint64_t t1ha_refval_64le[81]; +extern const uint64_t t1ha_refval_64be[81]; +#endif /* T1HA1_DISABLED */ + +#ifndef T1HA0_DISABLED +extern const uint64_t t1ha_refval_32le[81]; +extern const uint64_t t1ha_refval_32be[81]; +#if T1HA0_AESNI_AVAILABLE +extern const uint64_t t1ha_refval_ia32aes_a[81]; +extern const uint64_t t1ha_refval_ia32aes_b[81]; +#endif /* T1HA0_AESNI_AVAILABLE */ +#endif /* T1HA0_DISABLED */ diff --git a/src/Crypto/wolfCrypt.c b/src/Crypto/wolfCrypt.c new file mode 100644 index 00000000..da0fbe2f --- /dev/null +++ b/src/Crypto/wolfCrypt.c @@ -0,0 +1,243 @@ +/* See src/Crypto/wolfCrypt.md */ + +#include "Aes.h" +#include "Sha2.h" +#include "../Common/Crypto.h" +#include <wolfssl/wolfcrypt/hmac.h> + + +AES_RETURN aes_init() +{ +#if defined( AES_ERR_CHK ) + return EXIT_SUCCESS; +#else + return; +#endif +} + +AES_RETURN aes_encrypt_key(const unsigned char *key, int key_len, aes_encrypt_ctx cx[1]) +{ + int ret = 0; + + ret = wc_AesInit(&cx->wc_enc_aes, NULL, INVALID_DEVID); + + if (key_len == 128 || key_len == 192 || key_len == 256) + key_len = key_len/8; + + if (ret == 0) { + ret = wc_AesSetKey(&cx->wc_enc_aes, key, key_len, NULL, AES_ENCRYPTION); + } + +#if defined( AES_ERR_CHK ) + return ret ? EXIT_FAILURE : EXIT_SUCCESS; +#else + return; +#endif +} + +AES_RETURN aes_decrypt_key(const unsigned char *key, int key_len, aes_decrypt_ctx cx[1]) +{ + int ret = 0; + + ret = wc_AesInit(&cx->wc_dec_aes, NULL, INVALID_DEVID); + + if (key_len == 128 || key_len == 192 || key_len == 256) + key_len = key_len/8; + + if (ret == 0) { + ret = wc_AesSetKey(&cx->wc_dec_aes, key, key_len, NULL, AES_DECRYPTION); + } + +#if defined( AES_ERR_CHK ) + return ret ? EXIT_FAILURE : EXIT_SUCCESS; +#else + return; +#endif +} + +AES_RETURN aes_encrypt_key128(const unsigned char *key, aes_encrypt_ctx cx[1]) +{ + return aes_encrypt_key(key, 128, cx); +} + +AES_RETURN aes_encrypt_key192(const unsigned char *key, aes_encrypt_ctx cx[1]) +{ + return aes_encrypt_key(key, 192, cx); +} + +AES_RETURN aes_encrypt_key256(const unsigned char *key, aes_encrypt_ctx cx[1]) +{ + return aes_encrypt_key(key, 256, cx); +} + +AES_RETURN aes_decrypt_key128(const unsigned char *key, aes_decrypt_ctx cx[1]) +{ + return aes_decrypt_key(key, 128, cx); +} + +AES_RETURN aes_decrypt_key192(const unsigned char *key, aes_decrypt_ctx cx[1]) +{ + return aes_decrypt_key(key, 192, cx); +} + +AES_RETURN aes_decrypt_key256(const unsigned char *key, aes_decrypt_ctx cx[1]) +{ + return aes_decrypt_key(key, 256, cx); +} + +AES_RETURN aes_encrypt(const unsigned char *in, unsigned char *out, const aes_encrypt_ctx cx[1]) +{ + int ret = wc_AesEncryptDirect(&cx->wc_enc_aes, out, in); +#if defined( AES_ERR_CHK ) + return ret ? EXIT_FAILURE : EXIT_SUCCESS; +#else + return; +#endif + +} + +AES_RETURN aes_decrypt(const unsigned char *in, unsigned char *out, const aes_decrypt_ctx cx[1]) +{ + int ret = wc_AesDecryptDirect(&cx->wc_dec_aes, out, in); +#if defined( AES_ERR_CHK ) + return ret ? EXIT_FAILURE : EXIT_SUCCESS; +#else + return; +#endif + +} + +AES_RETURN xts_encrypt_key(const unsigned char *key, int key_len, aes_encrypt_ctx cx[1]) +{ + int ret = 0; + + cx->wc_enc_xts.aes = cx->wc_enc_aes; + + ret = wc_AesInit(&cx->wc_enc_xts.tweak, NULL, INVALID_DEVID); + + if (key_len == 128 || key_len == 192 || key_len == 256) + key_len = key_len/8; + + if (ret == 0) { + ret = wc_AesSetKey(&cx->wc_enc_xts.tweak, key, key_len, NULL, AES_ENCRYPTION); + } +#if defined( AES_ERR_CHK ) + return ret ? EXIT_FAILURE : EXIT_SUCCESS; +#else + return; +#endif +} + +AES_RETURN xts_decrypt_key(const unsigned char *key, int key_len, aes_decrypt_ctx cx[1]) +{ + int ret = 0; + + cx->wc_dec_xts.aes = cx->wc_dec_aes; + + ret = wc_AesInit(&cx->wc_dec_xts.tweak, NULL, INVALID_DEVID); + + if (key_len == 128 || key_len == 192 || key_len == 256) + key_len = key_len/8; + + if (ret == 0) { + ret = wc_AesSetKey(&cx->wc_dec_xts.tweak, key, key_len, NULL, AES_ENCRYPTION); + } + +#if defined( AES_ERR_CHK ) + return ret ? EXIT_FAILURE : EXIT_SUCCESS; +#else + return; +#endif +} + +AES_RETURN xts_encrypt_key256(const unsigned char *key, aes_encrypt_ctx cx[1]) +{ + return xts_encrypt_key(key, 256, cx); +} + +AES_RETURN xts_decrypt_key256(const unsigned char *key, aes_decrypt_ctx cx[1]) +{ + return xts_decrypt_key(key, 256, cx); +} + +AES_RETURN xts_encrypt(const unsigned char *in, unsigned char *out, word64 length, word64 sector, const aes_encrypt_ctx cx[1]) +{ + int ret = wc_AesXtsEncryptConsecutiveSectors(&cx->wc_enc_xts, out, in, length, sector, ENCRYPTION_DATA_UNIT_SIZE); + +#if defined( AES_ERR_CHK ) + return ret ? EXIT_FAILURE : EXIT_SUCCESS; +#else + return; +#endif + +} + +AES_RETURN xts_decrypt(const unsigned char *in, unsigned char *out, word64 length, word64 sector, const aes_decrypt_ctx cx[1]) +{ + int ret = wc_AesXtsDecryptConsecutiveSectors(&cx->wc_dec_xts, out, in, length, sector, ENCRYPTION_DATA_UNIT_SIZE); + +#if defined( AES_ERR_CHK ) + return ret ? EXIT_FAILURE : EXIT_SUCCESS; +#else + return; +#endif +} + + +void sha256_begin(sha256_ctx* ctx) +{ + wc_InitSha256(ctx); +} + +void sha256_hash(const unsigned char * source, uint_32t sourceLen, sha256_ctx *ctx) +{ + wc_Sha256Update(ctx, source, sourceLen); +} + +void sha256_end(unsigned char * result, sha256_ctx* ctx) +{ + wc_Sha256Final(ctx, result); +} + +void sha256(unsigned char * result, const unsigned char* source, uint_32t sourceLen) +{ + wc_Sha256 sha256; + wc_InitSha256(&sha256); + wc_Sha256Update(&sha256, source, sourceLen); + wc_Sha256Final(&sha256, result); + wc_Sha256Free(&sha256); +} + +void sha512_begin(sha512_ctx* ctx) +{ + wc_InitSha512(ctx); +} + +void sha512_hash(const unsigned char * source, uint_64t sourceLen, sha512_ctx *ctx) +{ + wc_Sha512Update(ctx, source, sourceLen); +} + +void sha512_end(unsigned char * result, sha512_ctx* ctx) +{ + wc_Sha512Final(ctx, result); +} + +void sha512(unsigned char * result, const unsigned char* source, uint_64t sourceLen) +{ + wc_Sha512 sha512; + wc_InitSha512(&sha512); + wc_Sha512Update(&sha512, source, sourceLen); + wc_Sha512Final(&sha512, result); + wc_Sha512Free(&sha512); +} + +void derive_key_sha512 (unsigned char *pwd, int pwd_len, unsigned char *salt, int salt_len, uint32 iterations, unsigned char *dk, int dklen) { + (void) iterations; + wc_HKDF(WC_SHA512, (uint8*)pwd, (word32)pwd_len, (uint8*)salt, (word32)salt_len, NULL, 0, (uint8*)dk, (word32)dklen); +} + +void derive_key_sha256 (unsigned char *pwd, int pwd_len, unsigned char *salt, int salt_len, uint32 iterations, unsigned char *dk, int dklen) { + (void) iterations; + wc_HKDF(WC_SHA256, (uint8*)pwd, (word32)pwd_len, (uint8*)salt, (word32)salt_len, NULL, 0, (uint8*)dk, (word32)dklen); +} diff --git a/src/Crypto/wolfCrypt.md b/src/Crypto/wolfCrypt.md new file mode 100644 index 00000000..32ccf242 --- /dev/null +++ b/src/Crypto/wolfCrypt.md @@ -0,0 +1,25 @@ +# wolfSSL as crypto provider for VeraCrypt + +[wolfCrypt](https://www.wolfssl.com/products/wolfcrypt/) is wolfSSL's cutting edge crypto engine and a +potential FIPS solution for users of VeraCrypt. Follow the steps below to setup VeraCrypt with wolfCrypt. + +## Building wolfSSL + +Clone wolfSSL and build it as shown below. + +``` +git clone https://github.com/wolfssl/wolfssl && cd wolfssl +./autogen.sh +./configure --enable-xts CFLAGS="-DNO_OLD_WC_NAMES" +make +sudo make install +``` + +## Building VeraCrypt with wolfSSL + +Build VeraCrypt with the `WOLFCRYPT` command line option. + +``` +make WXSTATIC=1 wxbuild && make WXSTATIC=1 clean && make WXSTATIC=1 WOLFCRYPT=1 && make WXSTATIC=1 WOLFCRYPT=1 package +``` + |