VeraCrypt
aboutsummaryrefslogtreecommitdiff
path: root/src/Common/zlib/gzguts.h
blob: 990a4d2514933709883a7d949ed52146675fe2c1 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
/* gzguts.h -- zlib internal header definitions for gz* operations
 * Copyright (C) 2004, 2005, 2010, 2011, 2012, 2013, 2016 Mark Adler
 * For conditions of distribution and use, see copyright notice in zlib.h
 */

#ifdef _LARGEFILE64_SOURCE
#  ifndef _LARGEFILE_SOURCE
#    define _LARGEFILE_SOURCE 1
#  endif
#  ifdef _FILE_OFFSET_BITS
#    undef _FILE_OFFSET_BITS
#  endif
#endif

#ifdef HAVE_HIDDEN
#  define ZLIB_INTERNAL __attribute__((visibility ("hidden")))
#else
#  define ZLIB_INTERNAL
#endif

#include <stdio.h>
#include "zlib.h"
#ifdef STDC
#  include <string.h>
#  include <stdlib.h>
#  include <limits.h>
#endif

#ifndef _POSIX_SOURCE
#  define _POSIX_SOURCE
#endif
#include <fcntl.h>

#ifdef _WIN32
#  include <stddef.h>
#endif

#if defined(__TURBOC__) || defined(_MSC_VER) || defined(_WIN32)
#  include <io.h>
#endif

#if defined(_WIN32) || defined(__CYGWIN__)
#  define WIDECHAR
#endif

#ifdef WINAPI_FAMILY
#  define open _open
#  define read _read
#  define write _write
#  define close _close
#endif

#ifdef NO_DEFLATE       /* for compatibility with old definition */
#  define NO_GZCOMPRESS
#endif

#if defined(STDC99) || (defined(__TURBOC__) && __TURBOC__ >= 0x550)
#  ifndef HAVE_VSNPRINTF
#    define HAVE_VSNPRINTF
#  endif
#endif

#if defined(__CYGWIN__)
#  ifndef HAVE_VSNPRINTF
#    define HAVE_VSNPRINTF
#  endif
#endif

#if defined(MSDOS) && defined(__BORLANDC__) && (BORLANDC > 0x410)
#  ifndef HAVE_VSNPRINTF
#    define HAVE_VSNPRINTF
#  endif
#endif

#ifndef HAVE_VSNPRINTF
#  ifdef MSDOS
/* vsnprintf may exist on some MS-DOS compilers (DJGPP?),
   but for now we just assume it doesn't. */
#    define NO_vsnprintf
#  endif
#  ifdef __TURBOC__
#    define NO_vsnprintf
#  endif
#  ifdef WIN32
/* In Win32, vsnprintf is available as the "non-ANSI" _vsnprintf. */
#    if !defined(vsnprintf) && !defined(NO_vsnprintf)
#      if !defined(_MSC_VER) || ( defined(_MSC_VER) && _MSC_VER < 1500 )
#         define vsnprintf _vsnprintf
#      endif
#    endif
#  endif
#  ifdef __SASC
#    define NO_vsnprintf
#  endif
#  ifdef VMS
#    define NO_vsnprintf
#  endif
#  ifdef __OS400__
#    define NO_vsnprintf
#  endif
#  ifdef __MVS__
#    define NO_vsnprintf
#  endif
#endif

/* unlike snprintf (which is required in C99), _snprintf does not guarantee
   null termination of the result -- however this is only used in gzlib.c where
   the result is assured to fit in the space provided */
#if defined(_MSC_VER) && _MSC_VER < 1900
#  define snprintf _snprintf
#endif

#ifndef local
#  define local static
#endif
/* since "static" is used to mean two completely different things in C, we
   define "local" for the non-static meaning of "static", for readability
   (compile with -Dlocal if your debugger can't find static symbols) */

/* gz* functions always use library allocation functions */
#ifndef STDC
  extern voidp  malloc OF((uInt size));
  extern void   free   OF((voidpf ptr));
#endif

/* get errno and strerror definition */
#if defined UNDER_CE
#  include <windows.h>
#  define zstrerror() gz_strwinerror((DWORD)GetLastError())
#else
#  ifndef NO_STRERROR
#    include <errno.h>
#    define zstrerror() strerror(errno)
#  else
#    define zstrerror() "stdio error (consult errno)"
#  endif
#endif

/* provide prototypes for these when building zlib without LFS */
#if !defined(_LARGEFILE64_SOURCE) || _LFS64_LARGEFILE-0 == 0
    ZEXTERN gzFile ZEXPORT gzopen64 OF((const char *, const char *));
    ZEXTERN z_off64_t ZEXPORT gzseek64 OF((gzFile, z_off64_t, int));
    ZEXTERN z_off64_t ZEXPORT gztell64 OF((gzFile));
    ZEXTERN z_off64_t ZEXPORT gzoffset64 OF((gzFile));
#endif

/* default memLevel */
#if MAX_MEM_LEVEL >= 8
#  define DEF_MEM_LEVEL 8
#else
#  define DEF_MEM_LEVEL  MAX_MEM_LEVEL
#endif

/* default i/o buffer size -- double this for output when reading (this and
   twice this must be able to fit in an unsigned type) */
#define GZBUFSIZE 8192

/* gzip modes, also provide a little integrity check on the passed structure */
#define GZ_NONE 0
#define GZ_READ 7247
#define GZ_WRITE 31153
#define GZ_APPEND 1     /* mode set to GZ_WRITE after the file is opened */

/* values for gz_state how */
#define LOOK 0      /* look for a gzip header */
#define COPY 1      /* copy input directly */
#define GZIP 2      /* decompress a gzip stream */

/* internal gzip file state data structure */
typedef struct {
        /* exposed contents for gzgetc() macro */
    struct gzFile_s x;      /* "x" for exposed */
                            /* x.have: number of bytes available at x.next */
                            /* x.next: next output data to deliver or write */
                            /* x.pos: current position in uncompressed data */
        /* used for both reading and writing */
    int mode;               /* see gzip modes above */
    int fd;                 /* file descriptor */
    char *path;             /* path or fd for error messages */
    unsigned size;          /* buffer size, zero if not allocated yet */
    unsigned want;          /* requested buffer size, default is GZBUFSIZE */
    unsigned char *in;      /* input buffer (double-sized when writing) */
    unsigned char *out;     /* output buffer (double-sized when reading) */
    int direct;             /* 0 if processing gzip, 1 if transparent */
        /* just for reading */
    int how;                /* 0: get header, 1: copy, 2: decompress */
    z_off64_t start;        /* where the gzip data started, for rewinding */
    int eof;                /* true if end of input file reached */
    int past;               /* true if read requested past end */
        /* just for writing */
    int level;              /* compression level */
    int strategy;           /* compression strategy */
        /* seek request */
    z_off64_t skip;         /* amount to skip (already rewound if backwards) */
    int seek;               /* true if seek request pending */
        /* error information */
    int err;                /* error code */
    char *msg;              /* error message */
        /* zlib inflate or deflate stream */
    z_stream strm;          /* stream structure in-place (not a pointer) */
} gz_state;
typedef gz_state FAR *gz_statep;

/* shared functions */
void ZLIB_INTERNAL gz_error OF((gz_statep, int, const char *));
#if defined UNDER_CE
char ZLIB_INTERNAL *gz_strwinerror OF((DWORD error));
#endif

/* GT_OFF(x), where x is an unsigned value, is true if x > maximum z_off64_t
   value -- needed when comparing unsigned to z_off64_t, which is signed
   (possible z_off64_t types off_t, off64_t, and long are all signed) */
#ifdef INT_MAX
#  define GT_OFF(x) (sizeof(int) == sizeof(z_off64_t) && (x) > INT_MAX)
#else
unsigned ZLIB_INTERNAL gz_intmax OF((void));
#  define GT_OFF(x) (sizeof(int) == sizeof(z_off64_t) && (x) > gz_intmax())
#endif
>%eax; xorl %ebx, %eax; leal 0x428A2F98(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (3*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (2*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (1*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (7*4)(%esp); movl (1*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((1)&0xF)+8)*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (6*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (5*4)(%esp), %ebx; movl (4*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x71374491(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (2*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (1*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (0*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (6*4)(%esp); movl (2*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((2)&0xF)+8)*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (5*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (4*4)(%esp), %ebx; movl (3*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xB5C0FBCF(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (1*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (0*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (7*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (5*4)(%esp); movl (3*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((3)&0xF)+8)*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (4*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (3*4)(%esp), %ebx; movl (2*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xE9B5DBA5(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (0*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (7*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (6*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (4*4)(%esp); movl (4*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((4)&0xF)+8)*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (3*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (2*4)(%esp), %ebx; movl (1*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x3956C25B(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (7*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (6*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (5*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (3*4)(%esp); movl (5*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((5)&0xF)+8)*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (2*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (1*4)(%esp), %ebx; movl (0*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x59F111F1(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (6*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (5*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (4*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (2*4)(%esp); movl (6*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((6)&0xF)+8)*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (1*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (0*4)(%esp), %ebx; movl (7*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x923F82A4(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (5*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (4*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (3*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (1*4)(%esp); movl (7*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((7)&0xF)+8)*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (0*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (7*4)(%esp), %ebx; movl (6*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xAB1C5ED5(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (4*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (3*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (2*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (0*4)(%esp); movl (8*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((8)&0xF)+8)*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (7*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (6*4)(%esp), %ebx; movl (5*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xD807AA98(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (3*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (2*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (1*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (7*4)(%esp); movl (9*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((9)&0xF)+8)*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (6*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (5*4)(%esp), %ebx; movl (4*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x12835B01(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (2*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (1*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (0*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (6*4)(%esp); movl (10*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((10)&0xF)+8)*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (5*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (4*4)(%esp), %ebx; movl (3*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x243185BE(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (1*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (0*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (7*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (5*4)(%esp); movl (11*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((11)&0xF)+8)*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (4*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (3*4)(%esp), %ebx; movl (2*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x550C7DC3(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (0*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (7*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (6*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (4*4)(%esp); movl (12*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((12)&0xF)+8)*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (3*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (2*4)(%esp), %ebx; movl (1*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x72BE5D74(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (7*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (6*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (5*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (3*4)(%esp); movl (13*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((13)&0xF)+8)*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (2*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (1*4)(%esp), %ebx; movl (0*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x80DEB1FE(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (6*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (5*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (4*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (2*4)(%esp); movl (14*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((14)&0xF)+8)*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (1*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (0*4)(%esp), %ebx; movl (7*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x9BDC06A7(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (5*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (4*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (3*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (1*4)(%esp); movl (15*4)(%edi), %ebp; bswapl %ebp; movl %ebp, ((((15)&0xF)+8)*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (0*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (7*4)(%esp), %ebx; movl (6*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xC19BF174(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (4*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (3*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (2*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (0*4)(%esp); movl ((((16 -15)&0xF)+8)*4)(%esp), %eax; movl ((((16 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((16 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((16 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((16)&0xF)+8)*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (7*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (6*4)(%esp), %ebx; movl (5*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xE49B69C1(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (3*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (2*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (1*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (7*4)(%esp); movl ((((17 -15)&0xF)+8)*4)(%esp), %eax; movl ((((17 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((17 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((17 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((17)&0xF)+8)*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (6*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (5*4)(%esp), %ebx; movl (4*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xEFBE4786(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (2*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (1*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (0*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (6*4)(%esp); movl ((((18 -15)&0xF)+8)*4)(%esp), %eax; movl ((((18 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((18 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((18 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((18)&0xF)+8)*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (5*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (4*4)(%esp), %ebx; movl (3*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x0FC19DC6(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (1*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (0*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (7*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (5*4)(%esp); movl ((((19 -15)&0xF)+8)*4)(%esp), %eax; movl ((((19 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((19 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((19 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((19)&0xF)+8)*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (4*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (3*4)(%esp), %ebx; movl (2*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x240CA1CC(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (0*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (7*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (6*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (4*4)(%esp); movl ((((20 -15)&0xF)+8)*4)(%esp), %eax; movl ((((20 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((20 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((20 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((20)&0xF)+8)*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (3*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (2*4)(%esp), %ebx; movl (1*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x2DE92C6F(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (7*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (6*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (5*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (3*4)(%esp); movl ((((21 -15)&0xF)+8)*4)(%esp), %eax; movl ((((21 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((21 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((21 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((21)&0xF)+8)*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (2*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (1*4)(%esp), %ebx; movl (0*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x4A7484AA(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (6*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (5*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (4*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (2*4)(%esp); movl ((((22 -15)&0xF)+8)*4)(%esp), %eax; movl ((((22 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((22 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((22 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((22)&0xF)+8)*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (1*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (0*4)(%esp), %ebx; movl (7*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x5CB0A9DC(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (5*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (4*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (3*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (1*4)(%esp); movl ((((23 -15)&0xF)+8)*4)(%esp), %eax; movl ((((23 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((23 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((23 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((23)&0xF)+8)*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (0*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (7*4)(%esp), %ebx; movl (6*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x76F988DA(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (4*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (3*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (2*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (0*4)(%esp); movl ((((24 -15)&0xF)+8)*4)(%esp), %eax; movl ((((24 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((24 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((24 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((24)&0xF)+8)*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (7*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (6*4)(%esp), %ebx; movl (5*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x983E5152(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (3*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (2*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (1*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (7*4)(%esp); movl ((((25 -15)&0xF)+8)*4)(%esp), %eax; movl ((((25 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((25 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((25 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((25)&0xF)+8)*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (6*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (5*4)(%esp), %ebx; movl (4*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xA831C66D(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (2*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (1*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (0*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (6*4)(%esp); movl ((((26 -15)&0xF)+8)*4)(%esp), %eax; movl ((((26 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((26 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((26 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((26)&0xF)+8)*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (5*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (4*4)(%esp), %ebx; movl (3*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xB00327C8(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (1*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (0*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (7*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (5*4)(%esp); movl ((((27 -15)&0xF)+8)*4)(%esp), %eax; movl ((((27 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((27 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((27 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((27)&0xF)+8)*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (4*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (3*4)(%esp), %ebx; movl (2*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xBF597FC7(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (0*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (7*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (6*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (4*4)(%esp); movl ((((28 -15)&0xF)+8)*4)(%esp), %eax; movl ((((28 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((28 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((28 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((28)&0xF)+8)*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (3*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (2*4)(%esp), %ebx; movl (1*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xC6E00BF3(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (7*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (6*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (5*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (3*4)(%esp); movl ((((29 -15)&0xF)+8)*4)(%esp), %eax; movl ((((29 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((29 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((29 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((29)&0xF)+8)*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (2*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (1*4)(%esp), %ebx; movl (0*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xD5A79147(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (6*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (5*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (4*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (2*4)(%esp); movl ((((30 -15)&0xF)+8)*4)(%esp), %eax; movl ((((30 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((30 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((30 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((30)&0xF)+8)*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (1*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (0*4)(%esp), %ebx; movl (7*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x06CA6351(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (5*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (4*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (3*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (1*4)(%esp); movl ((((31 -15)&0xF)+8)*4)(%esp), %eax; movl ((((31 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((31 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((31 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((31)&0xF)+8)*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (0*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (7*4)(%esp), %ebx; movl (6*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x14292967(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (4*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (3*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (2*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (0*4)(%esp); movl ((((32 -15)&0xF)+8)*4)(%esp), %eax; movl ((((32 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((32 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((32 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((32)&0xF)+8)*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (7*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (6*4)(%esp), %ebx; movl (5*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x27B70A85(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (3*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (2*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (1*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (7*4)(%esp); movl ((((33 -15)&0xF)+8)*4)(%esp), %eax; movl ((((33 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((33 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((33 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((33)&0xF)+8)*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (6*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (5*4)(%esp), %ebx; movl (4*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x2E1B2138(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (2*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (1*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (0*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (6*4)(%esp); movl ((((34 -15)&0xF)+8)*4)(%esp), %eax; movl ((((34 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((34 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((34 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((34)&0xF)+8)*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (5*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (4*4)(%esp), %ebx; movl (3*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x4D2C6DFC(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (1*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (0*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (7*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (5*4)(%esp); movl ((((35 -15)&0xF)+8)*4)(%esp), %eax; movl ((((35 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((35 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((35 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((35)&0xF)+8)*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (4*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (3*4)(%esp), %ebx; movl (2*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x53380D13(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (0*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (7*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (6*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (4*4)(%esp); movl ((((36 -15)&0xF)+8)*4)(%esp), %eax; movl ((((36 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((36 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((36 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((36)&0xF)+8)*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (3*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (2*4)(%esp), %ebx; movl (1*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x650A7354(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (7*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (6*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (5*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (3*4)(%esp); movl ((((37 -15)&0xF)+8)*4)(%esp), %eax; movl ((((37 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((37 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((37 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((37)&0xF)+8)*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (2*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (1*4)(%esp), %ebx; movl (0*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x766A0ABB(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (6*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (5*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (4*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (2*4)(%esp); movl ((((38 -15)&0xF)+8)*4)(%esp), %eax; movl ((((38 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((38 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((38 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((38)&0xF)+8)*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (1*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (0*4)(%esp), %ebx; movl (7*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x81C2C92E(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (5*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (4*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (3*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (1*4)(%esp); movl ((((39 -15)&0xF)+8)*4)(%esp), %eax; movl ((((39 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((39 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((39 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((39)&0xF)+8)*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (0*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (7*4)(%esp), %ebx; movl (6*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x92722C85(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (4*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (3*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (2*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (0*4)(%esp); movl ((((40 -15)&0xF)+8)*4)(%esp), %eax; movl ((((40 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((40 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((40 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((40)&0xF)+8)*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (7*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (6*4)(%esp), %ebx; movl (5*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xA2BFE8A1(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (3*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (2*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (1*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (7*4)(%esp); movl ((((41 -15)&0xF)+8)*4)(%esp), %eax; movl ((((41 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((41 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((41 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((41)&0xF)+8)*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (6*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (5*4)(%esp), %ebx; movl (4*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xA81A664B(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (2*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (1*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (0*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (6*4)(%esp); movl ((((42 -15)&0xF)+8)*4)(%esp), %eax; movl ((((42 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((42 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((42 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((42)&0xF)+8)*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (5*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (4*4)(%esp), %ebx; movl (3*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xC24B8B70(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (1*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (0*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (7*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (5*4)(%esp); movl ((((43 -15)&0xF)+8)*4)(%esp), %eax; movl ((((43 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((43 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((43 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((43)&0xF)+8)*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (4*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (3*4)(%esp), %ebx; movl (2*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xC76C51A3(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (0*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (7*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (6*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (4*4)(%esp); movl ((((44 -15)&0xF)+8)*4)(%esp), %eax; movl ((((44 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((44 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((44 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((44)&0xF)+8)*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (3*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (2*4)(%esp), %ebx; movl (1*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xD192E819(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (7*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (6*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (5*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (3*4)(%esp); movl ((((45 -15)&0xF)+8)*4)(%esp), %eax; movl ((((45 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((45 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((45 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((45)&0xF)+8)*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (2*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (1*4)(%esp), %ebx; movl (0*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xD6990624(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (6*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (5*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (4*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (2*4)(%esp); movl ((((46 -15)&0xF)+8)*4)(%esp), %eax; movl ((((46 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((46 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((46 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((46)&0xF)+8)*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (1*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (0*4)(%esp), %ebx; movl (7*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xF40E3585(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (5*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (4*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (3*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (1*4)(%esp); movl ((((47 -15)&0xF)+8)*4)(%esp), %eax; movl ((((47 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((47 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((47 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((47)&0xF)+8)*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (0*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (7*4)(%esp), %ebx; movl (6*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x106AA070(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (4*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (3*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (2*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (0*4)(%esp); movl ((((48 -15)&0xF)+8)*4)(%esp), %eax; movl ((((48 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((48 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((48 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((48)&0xF)+8)*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (7*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (6*4)(%esp), %ebx; movl (5*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x19A4C116(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (3*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (2*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (1*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (7*4)(%esp); movl ((((49 -15)&0xF)+8)*4)(%esp), %eax; movl ((((49 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((49 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((49 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((49)&0xF)+8)*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (6*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (5*4)(%esp), %ebx; movl (4*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x1E376C08(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (2*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (1*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (0*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (6*4)(%esp); movl ((((50 -15)&0xF)+8)*4)(%esp), %eax; movl ((((50 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((50 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((50 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((50)&0xF)+8)*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (5*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (4*4)(%esp), %ebx; movl (3*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x2748774C(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (1*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (0*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (7*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (5*4)(%esp); movl ((((51 -15)&0xF)+8)*4)(%esp), %eax; movl ((((51 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((51 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((51 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((51)&0xF)+8)*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (4*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (3*4)(%esp), %ebx; movl (2*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x34B0BCB5(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (0*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (7*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (6*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (4*4)(%esp); movl ((((52 -15)&0xF)+8)*4)(%esp), %eax; movl ((((52 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((52 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((52 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((52)&0xF)+8)*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (3*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (2*4)(%esp), %ebx; movl (1*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x391C0CB3(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (7*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (6*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (5*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (3*4)(%esp); movl ((((53 -15)&0xF)+8)*4)(%esp), %eax; movl ((((53 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((53 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((53 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((53)&0xF)+8)*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (2*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (1*4)(%esp), %ebx; movl (0*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x4ED8AA4A(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (6*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (5*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (4*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (2*4)(%esp); movl ((((54 -15)&0xF)+8)*4)(%esp), %eax; movl ((((54 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((54 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((54 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((54)&0xF)+8)*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (1*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (0*4)(%esp), %ebx; movl (7*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x5B9CCA4F(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (5*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (4*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (3*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (1*4)(%esp); movl ((((55 -15)&0xF)+8)*4)(%esp), %eax; movl ((((55 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((55 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((55 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((55)&0xF)+8)*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (0*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (7*4)(%esp), %ebx; movl (6*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x682E6FF3(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (4*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (3*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (2*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (0*4)(%esp); movl ((((56 -15)&0xF)+8)*4)(%esp), %eax; movl ((((56 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((56 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((56 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((56)&0xF)+8)*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (7*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (6*4)(%esp), %ebx; movl (5*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x748F82EE(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (3*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (2*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (1*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (7*4)(%esp); movl ((((57 -15)&0xF)+8)*4)(%esp), %eax; movl ((((57 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((57 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((57 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((57)&0xF)+8)*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (6*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (5*4)(%esp), %ebx; movl (4*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x78A5636F(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (2*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (1*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (0*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (6*4)(%esp); movl ((((58 -15)&0xF)+8)*4)(%esp), %eax; movl ((((58 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((58 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((58 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((58)&0xF)+8)*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (5*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (4*4)(%esp), %ebx; movl (3*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x84C87814(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (1*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (0*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (7*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (5*4)(%esp); movl ((((59 -15)&0xF)+8)*4)(%esp), %eax; movl ((((59 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((59 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((59 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((59)&0xF)+8)*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (4*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (3*4)(%esp), %ebx; movl (2*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x8CC70208(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (0*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (7*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (6*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (4*4)(%esp); movl ((((60 -15)&0xF)+8)*4)(%esp), %eax; movl ((((60 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((60 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((60 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((60)&0xF)+8)*4)(%esp); movl (0*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (3*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (2*4)(%esp), %ebx; movl (1*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0x90BEFFFA(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (7*4)(%esp); movl (4*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (6*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (5*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (3*4)(%esp); movl ((((61 -15)&0xF)+8)*4)(%esp), %eax; movl ((((61 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((61 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((61 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((61)&0xF)+8)*4)(%esp); movl (7*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (2*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (1*4)(%esp), %ebx; movl (0*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xA4506CEB(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (6*4)(%esp); movl (3*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (5*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (4*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (2*4)(%esp); movl ((((62 -15)&0xF)+8)*4)(%esp), %eax; movl ((((62 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((62 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((62 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((62)&0xF)+8)*4)(%esp); movl (6*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (1*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (0*4)(%esp), %ebx; movl (7*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xBEF9A3F7(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (5*4)(%esp); movl (2*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (4*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (3*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (1*4)(%esp); movl ((((63 -15)&0xF)+8)*4)(%esp), %eax; movl ((((63 -16)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ebx; addl ((((63 - 7)&0xF)+8)*4)(%esp), %ebp; movl %eax, %ecx; rorl $18, %ebx; shrl $3, %ecx; rorl $7, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl ((((63 - 2)&0xF)+8)*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; rorl $19, %ebx; shrl $10, %ecx; rorl $17, %eax; xorl %ecx, %ebx; xorl %ebx, %eax; addl %eax, %ebp; movl %ebp, ((((63)&0xF)+8)*4)(%esp); movl (5*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $11, %eax; rorl $25, %ebx; rorl $6, %ecx; movl (0*4)(%esp), %esi; xorl %ebx, %eax; xorl %eax, %ecx; addl %ebp, %esi; movl (7*4)(%esp), %ebx; movl (6*4)(%esp), %eax; xorl %ebx, %eax; andl %edx, %eax; xorl %ebx, %eax; leal 0xC67178F2(%ecx,%eax), %ecx; addl %ecx, %esi; addl %esi, (4*4)(%esp); movl (1*4)(%esp), %eax; movl %eax, %ebx; movl %eax, %ecx; movl %eax, %edx; rorl $13, %eax; rorl $22, %ebx; rorl $2, %ecx; xorl %ebx, %eax; xorl %eax, %ecx; movl (3*4)(%esp), %eax; addl %ecx, %esi; movl %eax, %ecx; movl (2*4)(%esp), %ebx; orl %ebx, %ecx; andl %ebx, %eax; andl %edx, %ecx; orl %eax, %ecx; addl %ecx, %esi; movl %esi, (0*4)(%esp); movl 116(%esp), %esi movl 0(%esp), %eax; addl %eax, 0(%esi) movl 4(%esp), %eax; addl %eax, 4(%esi) movl 8(%esp), %eax; addl %eax, 8(%esi) movl 12(%esp), %eax; addl %eax, 12(%esi) movl 16(%esp), %eax; addl %eax, 16(%esi) movl 20(%esp), %eax; addl %eax, 20(%esi) movl 24(%esp), %eax; addl %eax, 24(%esi) movl 28(%esp), %eax; addl %eax, 28(%esi) movl 96(%esp), %ebx movl 100(%esp), %esi movl 104(%esp), %edi movl 108(%esp), %ebp addl $112, %esp .ifdef MS_STDCALL ret $8 .else retl .endif .ifndef __YASM__ #if defined(__linux__) && defined(__ELF__) .section .note.GNU-stack,"",%progbits #endif .endif