VeraCrypt
aboutsummaryrefslogtreecommitdiff
path: root/src/Common/zlib/trees.h
blob: d35639d82a27807e49ea35c334f8bbcf64720f82 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
/* header created automatically with -DGEN_TREES_H */

local const ct_data static_ltree[L_CODES+2] = {
{{ 12},{  8}}, {{140},{  8}}, {{ 76},{  8}}, {{204},{  8}}, {{ 44},{  8}},
{{172},{  8}}, {{108},{  8}}, {{236},{  8}}, {{ 28},{  8}}, {{156},{  8}},
{{ 92},{  8}}, {{220},{  8}}, {{ 60},{  8}}, {{188},{  8}}, {{124},{  8}},
{{252},{  8}}, {{  2},{  8}}, {{130},{  8}}, {{ 66},{  8}}, {{194},{  8}},
{{ 34},{  8}}, {{162},{  8}}, {{ 98},{  8}}, {{226},{  8}}, {{ 18},{  8}},
{{146},{  8}}, {{ 82},{  8}}, {{210},{  8}}, {{ 50},{  8}}, {{178},{  8}},
{{114},{  8}}, {{242},{  8}}, {{ 10},{  8}}, {{138},{  8}}, {{ 74},{  8}},
{{202},{  8}}, {{ 42},{  8}}, {{170},{  8}}, {{106},{  8}}, {{234},{  8}},
{{ 26},{  8}}, {{154},{  8}}, {{ 90},{  8}}, {{218},{  8}}, {{ 58},{  8}},
{{186},{  8}}, {{122},{  8}}, {{250},{  8}}, {{  6},{  8}}, {{134},{  8}},
{{ 70},{  8}}, {{198},{  8}}, {{ 38},{  8}}, {{166},{  8}}, {{102},{  8}},
{{230},{  8}}, {{ 22},{  8}}, {{150},{  8}}, {{ 86},{  8}}, {{214},{  8}},
{{ 54},{  8}}, {{182},{  8}}, {{118},{  8}}, {{246},{  8}}, {{ 14},{  8}},
{{142},{  8}}, {{ 78},{  8}}, {{206},{  8}}, {{ 46},{  8}}, {{174},{  8}},
{{110},{  8}}, {{238},{  8}}, {{ 30},{  8}}, {{158},{  8}}, {{ 94},{  8}},
{{222},{  8}}, {{ 62},{  8}}, {{190},{  8}}, {{126},{  8}}, {{254},{  8}},
{{  1},{  8}}, {{129},{  8}}, {{ 65},{  8}}, {{193},{  8}}, {{ 33},{  8}},
{{161},{  8}}, {{ 97},{  8}}, {{225},{  8}}, {{ 17},{  8}}, {{145},{  8}},
{{ 81},{  8}}, {{209},{  8}}, {{ 49},{  8}}, {{177},{  8}}, {{113},{  8}},
{{241},{  8}}, {{  9},{  8}}, {{137},{  8}}, {{ 73},{  8}}, {{201},{  8}},
{{ 41},{  8}}, {{169},{  8}}, {{105},{  8}}, {{233},{  8}}, {{ 25},{  8}},
{{153},{  8}}, {{ 89},{  8}}, {{217},{  8}}, {{ 57},{  8}}, {{185},{  8}},
{{121},{  8}}, {{249},{  8}}, {{  5},{  8}}, {{133},{  8}}, {{ 69},{  8}},
{{197},{  8}}, {{ 37},{  8}}, {{165},{  8}}, {{101},{  8}}, {{229},{  8}},
{{ 21},{  8}}, {{149},{  8}}, {{ 85},{  8}}, {{213},{  8}}, {{ 53},{  8}},
{{181},{  8}}, {{117},{  8}}, {{245},{  8}}, {{ 13},{  8}}, {{141},{  8}},
{{ 77},{  8}}, {{205},{  8}}, {{ 45},{  8}}, {{173},{  8}}, {{109},{  8}},
{{237},{  8}}, {{ 29},{  8}}, {{157},{  8}}, {{ 93},{  8}}, {{221},{  8}},
{{ 61},{  8}}, {{189},{  8}}, {{125},{  8}}, {{253},{  8}}, {{ 19},{  9}},
{{275},{  9}}, {{147},{  9}}, {{403},{  9}}, {{ 83},{  9}}, {{339},{  9}},
{{211},{  9}}, {{467},{  9}}, {{ 51},{  9}}, {{307},{  9}}, {{179},{  9}},
{{435},{  9}}, {{115},{  9}}, {{371},{  9}}, {{243},{  9}}, {{499},{  9}},
{{ 11},{  9}}, {{267},{  9}}, {{139},{  9}}, {{395},{  9}}, {{ 75},{  9}},
{{331},{  9}}, {{203},{  9}}, {{459},{  9}}, {{ 43},{  9}}, {{299},{  9}},
{{171},{  9}}, {{427},{  9}}, {{107},{  9}}, {{363},{  9}}, {{235},{  9}},
{{491},{  9}}, {{ 27},{  9}}, {{283},{  9}}, {{155},{  9}}, {{411},{  9}},
{{ 91},{  9}}, {{347},{  9}}, {{219},{  9}}, {{475},{  9}}, {{ 59},{  9}},
{{315},{  9}}, {{187},{  9}}, {{443},{  9}}, {{123},{  9}}, {{379},{  9}},
{{251},{  9}}, {{507},{  9}}, {{  7},{  9}}, {{263},{  9}}, {{135},{  9}},
{{391},{  9}}, {{ 71},{  9}}, {{327},{  9}}, {{199},{  9}}, {{455},{  9}},
{{ 39},{  9}}, {{295},{  9}}, {{167},{  9}}, {{423},{  9}}, {{103},{  9}},
{{359},{  9}}, {{231},{  9}}, {{487},{  9}}, {{ 23},{  9}}, {{279},{  9}},
{{151},{  9}}, {{407},{  9}}, {{ 87},{  9}}, {{343},{  9}}, {{215},{  9}},
{{471},{  9}}, {{ 55},{  9}}, {{311},{  9}}, {{183},{  9}}, {{439},{  9}},
{{119},{  9}}, {{375},{  9}}, {{247},{  9}}, {{503},{  9}}, {{ 15},{  9}},
{{271},{  9}}, {{143},{  9}}, {{399},{  9}}, {{ 79},{  9}}, {{335},{  9}},
{{207},{  9}}, {{463},{  9}}, {{ 47},{  9}}, {{303},{  9}}, {{175},{  9}},
{{431},{  9}}, {{111},{  9}}, {{367},{  9}}, {{239},{  9}}, {{495},{  9}},
{{ 31},{  9}}, {{287},{  9}}, {{159},{  9}}, {{415},{  9}}, {{ 95},{  9}},
{{351},{  9}}, {{223},{  9}}, {{479},{  9}}, {{ 63},{  9}}, {{319},{  9}},
{{191},{  9}}, {{447},{  9}}, {{127},{  9}}, {{383},{  9}}, {{255},{  9}},
{{511},{  9}}, {{  0},{  7}}, {{ 64},{  7}}, {{ 32},{  7}}, {{ 96},{  7}},
{{ 16},{  7}}, {{ 80},{  7}}, {{ 48},{  7}}, {{112},{  7}}, {{  8},{  7}},
{{ 72},{  7}}, {{ 40},{  7}}, {{104},{  7}}, {{ 24},{  7}}, {{ 88},{  7}},
{{ 56},{  7}}, {{120},{  7}}, {{  4},{  7}}, {{ 68},{  7}}, {{ 36},{  7}},
{{100},{  7}}, {{ 20},{  7}}, {{ 84},{  7}}, {{ 52},{  7}}, {{116},{  7}},
{{  3},{  8}}, {{131},{  8}}, {{ 67},{  8}}, {{195},{  8}}, {{ 35},{  8}},
{{163},{  8}}, {{ 99},{  8}}, {{227},{  8}}
};

local const ct_data static_dtree[D_CODES] = {
{{ 0},{ 5}}, {{16},{ 5}}, {{ 8},{ 5}}, {{24},{ 5}}, {{ 4},{ 5}},
{{20},{ 5}}, {{12},{ 5}}, {{28},{ 5}}, {{ 2},{ 5}}, {{18},{ 5}},
{{10},{ 5}}, {{26},{ 5}}, {{ 6},{ 5}}, {{22},{ 5}}, {{14},{ 5}},
{{30},{ 5}}, {{ 1},{ 5}}, {{17},{ 5}}, {{ 9},{ 5}}, {{25},{ 5}},
{{ 5},{ 5}}, {{21},{ 5}}, {{13},{ 5}}, {{29},{ 5}}, {{ 3},{ 5}},
{{19},{ 5}}, {{11},{ 5}}, {{27},{ 5}}, {{ 7},{ 5}}, {{23},{ 5}}
};

const uch ZLIB_INTERNAL _dist_code[DIST_CODE_LEN] = {
 0,  1,  2,  3,  4,  4,  5,  5,  6,  6,  6,  6,  7,  7,  7,  7,  8,  8,  8,  8,
 8,  8,  8,  8,  9,  9,  9,  9,  9,  9,  9,  9, 10, 10, 10, 10, 10, 10, 10, 10,
10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,  0,  0, 16, 17,
18, 18, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 22,
23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27,
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
27, 27, 27, 27, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29
};

const uch ZLIB_INTERNAL _length_code[MAX_MATCH-MIN_MATCH+1]= {
 0,  1,  2,  3,  4,  5,  6,  7,  8,  8,  9,  9, 10, 10, 11, 11, 12, 12, 12, 12,
13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16,
17, 17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19,
19, 19, 19, 19, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22,
22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 23,
23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28
};

local const int base_length[LENGTH_CODES] = {
0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56,
64, 80, 96, 112, 128, 160, 192, 224, 0
};

local const int base_dist[D_CODES] = {
    0,     1,     2,     3,     4,     6,     8,    12,    16,    24,
   32,    48,    64,    96,   128,   192,   256,   384,   512,   768,
 1024,  1536,  2048,  3072,  4096,  6144,  8192, 12288, 16384, 24576
};
>),%1(0xbc),%1(0xd3),%1(0x0a) db %1(0xf7),%1(0xe4),%1(0x58),%1(0x05),%1(0xb8),%1(0xb3),%1(0x45),%1(0x06) db %1(0xd0),%1(0x2c),%1(0x1e),%1(0x8f),%1(0xca),%1(0x3f),%1(0x0f),%1(0x02) db %1(0xc1),%1(0xaf),%1(0xbd),%1(0x03),%1(0x01),%1(0x13),%1(0x8a),%1(0x6b) db %1(0x3a),%1(0x91),%1(0x11),%1(0x41),%1(0x4f),%1(0x67),%1(0xdc),%1(0xea) db %1(0x97),%1(0xf2),%1(0xcf),%1(0xce),%1(0xf0),%1(0xb4),%1(0xe6),%1(0x73) db %1(0x96),%1(0xac),%1(0x74),%1(0x22),%1(0xe7),%1(0xad),%1(0x35),%1(0x85) db %1(0xe2),%1(0xf9),%1(0x37),%1(0xe8),%1(0x1c),%1(0x75),%1(0xdf),%1(0x6e) db %1(0x47),%1(0xf1),%1(0x1a),%1(0x71),%1(0x1d),%1(0x29),%1(0xc5),%1(0x89) db %1(0x6f),%1(0xb7),%1(0x62),%1(0x0e),%1(0xaa),%1(0x18),%1(0xbe),%1(0x1b) db %1(0xfc),%1(0x56),%1(0x3e),%1(0x4b),%1(0xc6),%1(0xd2),%1(0x79),%1(0x20) db %1(0x9a),%1(0xdb),%1(0xc0),%1(0xfe),%1(0x78),%1(0xcd),%1(0x5a),%1(0xf4) db %1(0x1f),%1(0xdd),%1(0xa8),%1(0x33),%1(0x88),%1(0x07),%1(0xc7),%1(0x31) db %1(0xb1),%1(0x12),%1(0x10),%1(0x59),%1(0x27),%1(0x80),%1(0xec),%1(0x5f) db %1(0x60),%1(0x51),%1(0x7f),%1(0xa9),%1(0x19),%1(0xb5),%1(0x4a),%1(0x0d) db %1(0x2d),%1(0xe5),%1(0x7a),%1(0x9f),%1(0x93),%1(0xc9),%1(0x9c),%1(0xef) db %1(0xa0),%1(0xe0),%1(0x3b),%1(0x4d),%1(0xae),%1(0x2a),%1(0xf5),%1(0xb0) db %1(0xc8),%1(0xeb),%1(0xbb),%1(0x3c),%1(0x83),%1(0x53),%1(0x99),%1(0x61) db %1(0x17),%1(0x2b),%1(0x04),%1(0x7e),%1(0xba),%1(0x77),%1(0xd6),%1(0x26) db %1(0xe1),%1(0x69),%1(0x14),%1(0x63),%1(0x55),%1(0x21),%1(0x0c),%1(0x7d) %endmacro %define u8(x) f2(x), x, x, f3(x), f2(x), x, x, f3(x) %define v8(x) fe(x), f9(x), fd(x), fb(x), fe(x), f9(x), fd(x), x %define w8(x) x, 0, 0, 0, x, 0, 0, 0 %define tptr rbp ; table pointer %define kptr r8 ; key schedule pointer %define fofs 128 ; adjust offset in key schedule to keep |disp| < 128 %define fk_ref(x,y) [kptr-16*x+fofs+4*y] %ifdef AES_REV_DKS %define rofs 128 %define ik_ref(x,y) [kptr-16*x+rofs+4*y] %else %define rofs -128 %define ik_ref(x,y) [kptr+16*x+rofs+4*y] %endif %define tab_0(x) [tptr+8*x] %define tab_1(x) [tptr+8*x+3] %define tab_2(x) [tptr+8*x+2] %define tab_3(x) [tptr+8*x+1] %define tab_f(x) byte [tptr+8*x+1] %define tab_i(x) byte [tptr+8*x+7] %define t_ref(x,r) tab_ %+ x(r) %macro ff_rnd 5 ; normal forward round mov %1d, fk_ref(%5,0) mov %2d, fk_ref(%5,1) mov %3d, fk_ref(%5,2) mov %4d, fk_ref(%5,3) movzx esi, al movzx edi, ah shr eax, 16 xor %1d, t_ref(0,rsi) xor %4d, t_ref(1,rdi) movzx esi, al movzx edi, ah xor %3d, t_ref(2,rsi) xor %2d, t_ref(3,rdi) movzx esi, bl movzx edi, bh shr ebx, 16 xor %2d, t_ref(0,rsi) xor %1d, t_ref(1,rdi) movzx esi, bl movzx edi, bh xor %4d, t_ref(2,rsi) xor %3d, t_ref(3,rdi) movzx esi, cl movzx edi, ch shr ecx, 16 xor %3d, t_ref(0,rsi) xor %2d, t_ref(1,rdi) movzx esi, cl movzx edi, ch xor %1d, t_ref(2,rsi) xor %4d, t_ref(3,rdi) movzx esi, dl movzx edi, dh shr edx, 16 xor %4d, t_ref(0,rsi) xor %3d, t_ref(1,rdi) movzx esi, dl movzx edi, dh xor %2d, t_ref(2,rsi) xor %1d, t_ref(3,rdi) mov eax,%1d mov ebx,%2d mov ecx,%3d mov edx,%4d %endmacro %ifdef LAST_ROUND_TABLES %macro fl_rnd 5 ; last forward round add tptr, 2048 mov %1d, fk_ref(%5,0) mov %2d, fk_ref(%5,1) mov %3d, fk_ref(%5,2) mov %4d, fk_ref(%5,3) movzx esi, al movzx edi, ah shr eax, 16 xor %1d, t_ref(0,rsi) xor %4d, t_ref(1,rdi) movzx esi, al movzx edi, ah xor %3d, t_ref(2,rsi) xor %2d, t_ref(3,rdi) movzx esi, bl movzx edi, bh shr ebx, 16 xor %2d, t_ref(0,rsi) xor %1d, t_ref(1,rdi) movzx esi, bl movzx edi, bh xor %4d, t_ref(2,rsi) xor %3d, t_ref(3,rdi) movzx esi, cl movzx edi, ch shr ecx, 16 xor %3d, t_ref(0,rsi) xor %2d, t_ref(1,rdi) movzx esi, cl movzx edi, ch xor %1d, t_ref(2,rsi) xor %4d, t_ref(3,rdi) movzx esi, dl movzx edi, dh shr edx, 16 xor %4d, t_ref(0,rsi) xor %3d, t_ref(1,rdi) movzx esi, dl movzx edi, dh xor %2d, t_ref(2,rsi) xor %1d, t_ref(3,rdi) %endmacro %else %macro fl_rnd 5 ; last forward round mov %1d, fk_ref(%5,0) mov %2d, fk_ref(%5,1) mov %3d, fk_ref(%5,2) mov %4d, fk_ref(%5,3) movzx esi, al movzx edi, ah shr eax, 16 movzx esi, t_ref(f,rsi) movzx edi, t_ref(f,rdi) xor %1d, esi rol edi, 8 xor %4d, edi movzx esi, al movzx edi, ah movzx esi, t_ref(f,rsi) movzx edi, t_ref(f,rdi) rol esi, 16 rol edi, 24 xor %3d, esi xor %2d, edi movzx esi, bl movzx edi, bh shr ebx, 16 movzx esi, t_ref(f,rsi) movzx edi, t_ref(f,rdi) xor %2d, esi rol edi, 8 xor %1d, edi movzx esi, bl movzx edi, bh movzx esi, t_ref(f,rsi) movzx edi, t_ref(f,rdi) rol esi, 16 rol edi, 24 xor %4d, esi xor %3d, edi movzx esi, cl movzx edi, ch movzx esi, t_ref(f,rsi) movzx edi, t_ref(f,rdi) shr ecx, 16 xor %3d, esi rol edi, 8 xor %2d, edi movzx esi, cl movzx edi, ch movzx esi, t_ref(f,rsi) movzx edi, t_ref(f,rdi) rol esi, 16 rol edi, 24 xor %1d, esi xor %4d, edi movzx esi, dl movzx edi, dh movzx esi, t_ref(f,rsi) movzx edi, t_ref(f,rdi) shr edx, 16 xor %4d, esi rol edi, 8 xor %3d, edi movzx esi, dl movzx edi, dh movzx esi, t_ref(f,rsi) movzx edi, t_ref(f,rdi) rol esi, 16 rol edi, 24 xor %2d, esi xor %1d, edi %endmacro %endif %macro ii_rnd 5 ; normal inverse round mov %1d, ik_ref(%5,0) mov %2d, ik_ref(%5,1) mov %3d, ik_ref(%5,2) mov %4d, ik_ref(%5,3) movzx esi, al movzx edi, ah shr eax, 16 xor %1d, t_ref(0,rsi) xor %2d, t_ref(1,rdi) movzx esi, al movzx edi, ah xor %3d, t_ref(2,rsi) xor %4d, t_ref(3,rdi) movzx esi, bl movzx edi, bh shr ebx, 16 xor %2d, t_ref(0,rsi) xor %3d, t_ref(1,rdi) movzx esi, bl movzx edi, bh xor %4d, t_ref(2,rsi) xor %1d, t_ref(3,rdi) movzx esi, cl movzx edi, ch shr ecx, 16 xor %3d, t_ref(0,rsi) xor %4d, t_ref(1,rdi) movzx esi, cl movzx edi, ch xor %1d, t_ref(2,rsi) xor %2d, t_ref(3,rdi) movzx esi, dl movzx edi, dh shr edx, 16 xor %4d, t_ref(0,rsi) xor %1d, t_ref(1,rdi) movzx esi, dl movzx edi, dh xor %2d, t_ref(2,rsi) xor %3d, t_ref(3,rdi) mov eax,%1d mov ebx,%2d mov ecx,%3d mov edx,%4d %endmacro %ifdef LAST_ROUND_TABLES %macro il_rnd 5 ; last inverse round add tptr, 2048 mov %1d, ik_ref(%5,0) mov %2d, ik_ref(%5,1) mov %3d, ik_ref(%5,2) mov %4d, ik_ref(%5,3) movzx esi, al movzx edi, ah shr eax, 16 xor %1d, t_ref(0,rsi) xor %2d, t_ref(1,rdi) movzx esi, al movzx edi, ah xor %3d, t_ref(2,rsi) xor %4d, t_ref(3,rdi) movzx esi, bl movzx edi, bh shr ebx, 16 xor %2d, t_ref(0,rsi) xor %3d, t_ref(1,rdi) movzx esi, bl movzx edi, bh xor %4d, t_ref(2,rsi) xor %1d, t_ref(3,rdi) movzx esi, cl movzx edi, ch shr ecx, 16 xor %3d, t_ref(0,rsi) xor %4d, t_ref(1,rdi) movzx esi, cl movzx edi, ch xor %1d, t_ref(2,rsi) xor %2d, t_ref(3,rdi) movzx esi, dl movzx edi, dh shr edx, 16 xor %4d, t_ref(0,rsi) xor %1d, t_ref(1,rdi) movzx esi, dl movzx edi, dh xor %2d, t_ref(2,rsi) xor %3d, t_ref(3,rdi) %endmacro %else %macro il_rnd 5 ; last inverse round mov %1d, ik_ref(%5,0) mov %2d, ik_ref(%5,1) mov %3d, ik_ref(%5,2) mov %4d, ik_ref(%5,3) movzx esi, al movzx edi, ah movzx esi, t_ref(i,rsi) movzx edi, t_ref(i,rdi) shr eax, 16 xor %1d, esi rol edi, 8 xor %2d, edi movzx esi, al movzx edi, ah movzx esi, t_ref(i,rsi) movzx edi, t_ref(i,rdi) rol esi, 16 rol edi, 24 xor %3d, esi xor %4d, edi movzx esi, bl movzx edi, bh movzx esi, t_ref(i,rsi) movzx edi, t_ref(i,rdi) shr ebx, 16 xor %2d, esi rol edi, 8 xor %3d, edi movzx esi, bl movzx edi, bh movzx esi, t_ref(i,rsi) movzx edi, t_ref(i,rdi) rol esi, 16 rol edi, 24 xor %4d, esi xor %1d, edi movzx esi, cl movzx edi, ch movzx esi, t_ref(i,rsi) movzx edi, t_ref(i,rdi) shr ecx, 16 xor %3d, esi rol edi, 8 xor %4d, edi movzx esi, cl movzx edi, ch movzx esi, t_ref(i,rsi) movzx edi, t_ref(i,rdi) rol esi, 16 rol edi, 24 xor %1d, esi xor %2d, edi movzx esi, dl movzx edi, dh movzx esi, t_ref(i,rsi) movzx edi, t_ref(i,rdi) shr edx, 16 xor %4d, esi rol edi, 8 xor %1d, edi movzx esi, dl movzx edi, dh movzx esi, t_ref(i,rsi) movzx edi, t_ref(i,rdi) rol esi, 16 rol edi, 24 xor %2d, esi xor %3d, edi %endmacro %endif %ifdef ENCRYPTION global aes_encrypt %ifdef DLL_EXPORT export aes_encrypt %endif section .data align=64 align 64 enc_tab: enc_vals u8 %ifdef LAST_ROUND_TABLES enc_vals w8 %endif section .text align=16 align 16 %ifdef _SEH_ proc_frame aes_encrypt alloc_stack 7*8 ; 7 to align stack to 16 bytes save_reg rsi,4*8 save_reg rdi,5*8 save_reg rbx,1*8 save_reg rbp,2*8 save_reg r12,3*8 end_prologue mov rdi, rcx ; input pointer mov [rsp+0*8], rdx ; output pointer %else aes_encrypt: %ifdef __GNUC__ sub rsp, 4*8 ; gnu/linux binary interface mov [rsp+0*8], rsi ; output pointer mov r8, rdx ; context %else sub rsp, 6*8 ; windows binary interface mov [rsp+4*8], rsi mov [rsp+5*8], rdi mov rdi, rcx ; input pointer mov [rsp+0*8], rdx ; output pointer %endif mov [rsp+1*8], rbx ; input pointer in rdi mov [rsp+2*8], rbp ; output pointer in [rsp] mov [rsp+3*8], r12 ; context in r8 %endif movzx esi, byte [kptr+4*KS_LENGTH] lea tptr, [rel enc_tab] sub kptr, fofs mov eax, [rdi+0*4] mov ebx, [rdi+1*4] mov ecx, [rdi+2*4] mov edx, [rdi+3*4] xor eax, [kptr+fofs] xor ebx, [kptr+fofs+4] xor ecx, [kptr+fofs+8] xor edx, [kptr+fofs+12] lea kptr,[kptr+rsi] cmp esi, 10*16 je .3 cmp esi, 12*16 je .2 cmp esi, 14*16 je .1 mov rax, -1 jmp .4 .1: ff_rnd r9, r10, r11, r12, 13 ff_rnd r9, r10, r11, r12, 12 .2: ff_rnd r9, r10, r11, r12, 11 ff_rnd r9, r10, r11, r12, 10 .3: ff_rnd r9, r10, r11, r12, 9 ff_rnd r9, r10, r11, r12, 8 ff_rnd r9, r10, r11, r12, 7 ff_rnd r9, r10, r11, r12, 6 ff_rnd r9, r10, r11, r12, 5 ff_rnd r9, r10, r11, r12, 4 ff_rnd r9, r10, r11, r12, 3 ff_rnd r9, r10, r11, r12, 2 ff_rnd r9, r10, r11, r12, 1 fl_rnd r9, r10, r11, r12, 0 mov rbx, [rsp] mov [rbx], r9d mov [rbx+4], r10d mov [rbx+8], r11d mov [rbx+12], r12d xor rax, rax .4: mov rbx, [rsp+1*8] mov rbp, [rsp+2*8] mov r12, [rsp+3*8] %ifdef __GNUC__ add rsp, 4*8 ret %else mov rsi, [rsp+4*8] mov rdi, [rsp+5*8] %ifdef _SEH_ add rsp, 7*8 ret endproc_frame %else add rsp, 6*8 ret %endif %endif %endif %ifdef DECRYPTION global aes_decrypt %ifdef DLL_EXPORT export aes_decrypt %endif section .data align 64 dec_tab: dec_vals v8 %ifdef LAST_ROUND_TABLES dec_vals w8 %endif section .text align 16 %ifdef _SEH_ proc_frame aes_decrypt alloc_stack 7*8 ; 7 to align stack to 16 bytes save_reg rsi,4*8 save_reg rdi,5*8 save_reg rbx,1*8 save_reg rbp,2*8 save_reg r12,3*8 end_prologue mov rdi, rcx ; input pointer mov [rsp+0*8], rdx ; output pointer %else aes_decrypt: %ifdef __GNUC__ sub rsp, 4*8 ; gnu/linux binary interface mov [rsp+0*8], rsi ; output pointer mov r8, rdx ; context %else sub rsp, 6*8 ; windows binary interface mov [rsp+4*8], rsi mov [rsp+5*8], rdi mov rdi, rcx ; input pointer mov [rsp+0*8], rdx ; output pointer %endif mov [rsp+1*8], rbx ; input pointer in rdi mov [rsp+2*8], rbp ; output pointer in [rsp] mov [rsp+3*8], r12 ; context in r8 %endif movzx esi,byte[kptr+4*KS_LENGTH] lea tptr, [rel dec_tab] sub kptr, rofs mov eax, [rdi+0*4] mov ebx, [rdi+1*4] mov ecx, [rdi+2*4] mov edx, [rdi+3*4] %ifdef AES_REV_DKS mov rdi, kptr lea kptr,[kptr+rsi] %else lea rdi,[kptr+rsi] %endif xor eax, [rdi+rofs] xor ebx, [rdi+rofs+4] xor ecx, [rdi+rofs+8] xor edx, [rdi+rofs+12] cmp esi, 10*16 je .3 cmp esi, 12*16 je .2 cmp esi, 14*16 je .1 mov rax, -1 jmp .4 .1: ii_rnd r9, r10, r11, r12, 13 ii_rnd r9, r10, r11, r12, 12 .2: ii_rnd r9, r10, r11, r12, 11 ii_rnd r9, r10, r11, r12, 10 .3: ii_rnd r9, r10, r11, r12, 9 ii_rnd r9, r10, r11, r12, 8 ii_rnd r9, r10, r11, r12, 7 ii_rnd r9, r10, r11, r12, 6 ii_rnd r9, r10, r11, r12, 5 ii_rnd r9, r10, r11, r12, 4 ii_rnd r9, r10, r11, r12, 3 ii_rnd r9, r10, r11, r12, 2 ii_rnd r9, r10, r11, r12, 1 il_rnd r9, r10, r11, r12, 0 mov rbx, [rsp] mov [rbx], r9d mov [rbx+4], r10d mov [rbx+8], r11d mov [rbx+12], r12d xor rax, rax .4: mov rbx, [rsp+1*8] mov rbp, [rsp+2*8] mov r12, [rsp+3*8] %ifdef __GNUC__ add rsp, 4*8 ret %else mov rsi, [rsp+4*8] mov rdi, [rsp+5*8] %ifdef _SEH_ add rsp, 7*8 ret endproc_frame %else add rsp, 6*8 ret %endif %endif %endif %ifidn __OUTPUT_FORMAT__,elf section .note.GNU-stack noalloc noexec nowrite progbits %endif %ifidn __OUTPUT_FORMAT__,elf32 section .note.GNU-stack noalloc noexec nowrite progbits %endif %ifidn __OUTPUT_FORMAT__,elf64 section .note.GNU-stack noalloc noexec nowrite progbits %endif