xref: /third_party/openssl/crypto/evp/e_aes.c (revision e1051a39)
1/*
2 * Copyright 2001-2021 The OpenSSL Project Authors. All Rights Reserved.
3 *
4 * Licensed under the Apache License 2.0 (the "License").  You may not use
5 * this file except in compliance with the License.  You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
8 */
9
10/*
11 * This file uses the low level AES functions (which are deprecated for
12 * non-internal use) in order to implement the EVP AES ciphers.
13 */
14#include "internal/deprecated.h"
15
16#include <string.h>
17#include <assert.h>
18#include <openssl/opensslconf.h>
19#include <openssl/crypto.h>
20#include <openssl/evp.h>
21#include <openssl/err.h>
22#include <openssl/aes.h>
23#include <openssl/rand.h>
24#include <openssl/cmac.h>
25#include "crypto/evp.h"
26#include "internal/cryptlib.h"
27#include "crypto/modes.h"
28#include "crypto/siv.h"
29#include "crypto/aes_platform.h"
30#include "evp_local.h"
31
32typedef struct {
33    union {
34        OSSL_UNION_ALIGN;
35        AES_KEY ks;
36    } ks;
37    block128_f block;
38    union {
39        cbc128_f cbc;
40        ctr128_f ctr;
41    } stream;
42} EVP_AES_KEY;
43
44typedef struct {
45    union {
46        OSSL_UNION_ALIGN;
47        AES_KEY ks;
48    } ks;                       /* AES key schedule to use */
49    int key_set;                /* Set if key initialised */
50    int iv_set;                 /* Set if an iv is set */
51    GCM128_CONTEXT gcm;
52    unsigned char *iv;          /* Temporary IV store */
53    int ivlen;                  /* IV length */
54    int taglen;
55    int iv_gen;                 /* It is OK to generate IVs */
56    int iv_gen_rand;            /* No IV was specified, so generate a rand IV */
57    int tls_aad_len;            /* TLS AAD length */
58    uint64_t tls_enc_records;   /* Number of TLS records encrypted */
59    ctr128_f ctr;
60} EVP_AES_GCM_CTX;
61
62typedef struct {
63    union {
64        OSSL_UNION_ALIGN;
65        AES_KEY ks;
66    } ks1, ks2;                 /* AES key schedules to use */
67    XTS128_CONTEXT xts;
68    void (*stream) (const unsigned char *in,
69                    unsigned char *out, size_t length,
70                    const AES_KEY *key1, const AES_KEY *key2,
71                    const unsigned char iv[16]);
72} EVP_AES_XTS_CTX;
73
74#ifdef FIPS_MODULE
75static const int allow_insecure_decrypt = 0;
76#else
77static const int allow_insecure_decrypt = 1;
78#endif
79
80typedef struct {
81    union {
82        OSSL_UNION_ALIGN;
83        AES_KEY ks;
84    } ks;                       /* AES key schedule to use */
85    int key_set;                /* Set if key initialised */
86    int iv_set;                 /* Set if an iv is set */
87    int tag_set;                /* Set if tag is valid */
88    int len_set;                /* Set if message length set */
89    int L, M;                   /* L and M parameters from RFC3610 */
90    int tls_aad_len;            /* TLS AAD length */
91    CCM128_CONTEXT ccm;
92    ccm128_f str;
93} EVP_AES_CCM_CTX;
94
95#ifndef OPENSSL_NO_OCB
96typedef struct {
97    union {
98        OSSL_UNION_ALIGN;
99        AES_KEY ks;
100    } ksenc;                    /* AES key schedule to use for encryption */
101    union {
102        OSSL_UNION_ALIGN;
103        AES_KEY ks;
104    } ksdec;                    /* AES key schedule to use for decryption */
105    int key_set;                /* Set if key initialised */
106    int iv_set;                 /* Set if an iv is set */
107    OCB128_CONTEXT ocb;
108    unsigned char *iv;          /* Temporary IV store */
109    unsigned char tag[16];
110    unsigned char data_buf[16]; /* Store partial data blocks */
111    unsigned char aad_buf[16];  /* Store partial AAD blocks */
112    int data_buf_len;
113    int aad_buf_len;
114    int ivlen;                  /* IV length */
115    int taglen;
116} EVP_AES_OCB_CTX;
117#endif
118
119#define MAXBITCHUNK     ((size_t)1<<(sizeof(size_t)*8-4))
120
121/* increment counter (64-bit int) by 1 */
122static void ctr64_inc(unsigned char *counter)
123{
124    int n = 8;
125    unsigned char c;
126
127    do {
128        --n;
129        c = counter[n];
130        ++c;
131        counter[n] = c;
132        if (c)
133            return;
134    } while (n);
135}
136
137#if defined(AESNI_CAPABLE)
138# if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
139#  define AES_GCM_ASM2(gctx)      (gctx->gcm.block==(block128_f)aesni_encrypt && \
140                                 gctx->gcm.ghash==gcm_ghash_avx)
141#  undef AES_GCM_ASM2          /* minor size optimization */
142# endif
143
144static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
145                          const unsigned char *iv, int enc)
146{
147    int ret, mode;
148    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
149
150    mode = EVP_CIPHER_CTX_get_mode(ctx);
151    if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
152        && !enc) {
153        ret = aesni_set_decrypt_key(key,
154                                    EVP_CIPHER_CTX_get_key_length(ctx) * 8,
155                                    &dat->ks.ks);
156        dat->block = (block128_f) aesni_decrypt;
157        dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
158            (cbc128_f) aesni_cbc_encrypt : NULL;
159    } else {
160        ret = aesni_set_encrypt_key(key,
161                                    EVP_CIPHER_CTX_get_key_length(ctx) * 8,
162                                    &dat->ks.ks);
163        dat->block = (block128_f) aesni_encrypt;
164        if (mode == EVP_CIPH_CBC_MODE)
165            dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
166        else if (mode == EVP_CIPH_CTR_MODE)
167            dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
168        else
169            dat->stream.cbc = NULL;
170    }
171
172    if (ret < 0) {
173        ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
174        return 0;
175    }
176
177    return 1;
178}
179
180static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
181                            const unsigned char *in, size_t len)
182{
183    aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
184                      ctx->iv, EVP_CIPHER_CTX_is_encrypting(ctx));
185
186    return 1;
187}
188
189static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
190                            const unsigned char *in, size_t len)
191{
192    size_t bl = EVP_CIPHER_CTX_get_block_size(ctx);
193
194    if (len < bl)
195        return 1;
196
197    aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
198                      EVP_CIPHER_CTX_is_encrypting(ctx));
199
200    return 1;
201}
202
203# define aesni_ofb_cipher aes_ofb_cipher
204static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
205                            const unsigned char *in, size_t len);
206
207# define aesni_cfb_cipher aes_cfb_cipher
208static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
209                            const unsigned char *in, size_t len);
210
211# define aesni_cfb8_cipher aes_cfb8_cipher
212static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
213                             const unsigned char *in, size_t len);
214
215# define aesni_cfb1_cipher aes_cfb1_cipher
216static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
217                             const unsigned char *in, size_t len);
218
219# define aesni_ctr_cipher aes_ctr_cipher
220static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
221                            const unsigned char *in, size_t len);
222
223static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
224                              const unsigned char *iv, int enc)
225{
226    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
227    if (!iv && !key)
228        return 1;
229    if (key) {
230        aesni_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
231                              &gctx->ks.ks);
232        CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
233        gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
234        /*
235         * If we have an iv can set it directly, otherwise use saved IV.
236         */
237        if (iv == NULL && gctx->iv_set)
238            iv = gctx->iv;
239        if (iv) {
240            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
241            gctx->iv_set = 1;
242        }
243        gctx->key_set = 1;
244    } else {
245        /* If key set use IV, otherwise copy */
246        if (gctx->key_set)
247            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
248        else
249            memcpy(gctx->iv, iv, gctx->ivlen);
250        gctx->iv_set = 1;
251        gctx->iv_gen = 0;
252    }
253    return 1;
254}
255
256# define aesni_gcm_cipher aes_gcm_cipher
257static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
258                            const unsigned char *in, size_t len);
259
260static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
261                              const unsigned char *iv, int enc)
262{
263    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
264
265    if (!iv && !key)
266        return 1;
267
268    if (key) {
269        /* The key is two half length keys in reality */
270        const int bytes = EVP_CIPHER_CTX_get_key_length(ctx) / 2;
271        const int bits = bytes * 8;
272
273        /*
274         * Verify that the two keys are different.
275         *
276         * This addresses Rogaway's vulnerability.
277         * See comment in aes_xts_init_key() below.
278         */
279        if ((!allow_insecure_decrypt || enc)
280                && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
281            ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
282            return 0;
283        }
284
285        /* key_len is two AES keys */
286        if (enc) {
287            aesni_set_encrypt_key(key, bits, &xctx->ks1.ks);
288            xctx->xts.block1 = (block128_f) aesni_encrypt;
289            xctx->stream = aesni_xts_encrypt;
290        } else {
291            aesni_set_decrypt_key(key, bits, &xctx->ks1.ks);
292            xctx->xts.block1 = (block128_f) aesni_decrypt;
293            xctx->stream = aesni_xts_decrypt;
294        }
295
296        aesni_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
297        xctx->xts.block2 = (block128_f) aesni_encrypt;
298
299        xctx->xts.key1 = &xctx->ks1;
300    }
301
302    if (iv) {
303        xctx->xts.key2 = &xctx->ks2;
304        memcpy(ctx->iv, iv, 16);
305    }
306
307    return 1;
308}
309
310# define aesni_xts_cipher aes_xts_cipher
311static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
312                            const unsigned char *in, size_t len);
313
314static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
315                              const unsigned char *iv, int enc)
316{
317    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
318    if (!iv && !key)
319        return 1;
320    if (key) {
321        aesni_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
322                              &cctx->ks.ks);
323        CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
324                           &cctx->ks, (block128_f) aesni_encrypt);
325        cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
326            (ccm128_f) aesni_ccm64_decrypt_blocks;
327        cctx->key_set = 1;
328    }
329    if (iv) {
330        memcpy(ctx->iv, iv, 15 - cctx->L);
331        cctx->iv_set = 1;
332    }
333    return 1;
334}
335
336# define aesni_ccm_cipher aes_ccm_cipher
337static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
338                            const unsigned char *in, size_t len);
339
340# ifndef OPENSSL_NO_OCB
341static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
342                              const unsigned char *iv, int enc)
343{
344    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
345    if (!iv && !key)
346        return 1;
347    if (key) {
348        do {
349            /*
350             * We set both the encrypt and decrypt key here because decrypt
351             * needs both. We could possibly optimise to remove setting the
352             * decrypt for an encryption operation.
353             */
354            aesni_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
355                                  &octx->ksenc.ks);
356            aesni_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
357                                  &octx->ksdec.ks);
358            if (!CRYPTO_ocb128_init(&octx->ocb,
359                                    &octx->ksenc.ks, &octx->ksdec.ks,
360                                    (block128_f) aesni_encrypt,
361                                    (block128_f) aesni_decrypt,
362                                    enc ? aesni_ocb_encrypt
363                                        : aesni_ocb_decrypt))
364                return 0;
365        }
366        while (0);
367
368        /*
369         * If we have an iv we can set it directly, otherwise use saved IV.
370         */
371        if (iv == NULL && octx->iv_set)
372            iv = octx->iv;
373        if (iv) {
374            if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
375                != 1)
376                return 0;
377            octx->iv_set = 1;
378        }
379        octx->key_set = 1;
380    } else {
381        /* If key set use IV, otherwise copy */
382        if (octx->key_set)
383            CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
384        else
385            memcpy(octx->iv, iv, octx->ivlen);
386        octx->iv_set = 1;
387    }
388    return 1;
389}
390
391#  define aesni_ocb_cipher aes_ocb_cipher
392static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
393                            const unsigned char *in, size_t len);
394# endif                        /* OPENSSL_NO_OCB */
395
396# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
397static const EVP_CIPHER aesni_##keylen##_##mode = { \
398        nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
399        flags|EVP_CIPH_##MODE##_MODE,   \
400        EVP_ORIG_GLOBAL,                \
401        aesni_init_key,                 \
402        aesni_##mode##_cipher,          \
403        NULL,                           \
404        sizeof(EVP_AES_KEY),            \
405        NULL,NULL,NULL,NULL }; \
406static const EVP_CIPHER aes_##keylen##_##mode = { \
407        nid##_##keylen##_##nmode,blocksize,     \
408        keylen/8,ivlen,                 \
409        flags|EVP_CIPH_##MODE##_MODE,   \
410        EVP_ORIG_GLOBAL,                 \
411        aes_init_key,                   \
412        aes_##mode##_cipher,            \
413        NULL,                           \
414        sizeof(EVP_AES_KEY),            \
415        NULL,NULL,NULL,NULL }; \
416const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
418
419# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
420static const EVP_CIPHER aesni_##keylen##_##mode = { \
421        nid##_##keylen##_##mode,blocksize, \
422        (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
423        ivlen,                          \
424        flags|EVP_CIPH_##MODE##_MODE,   \
425        EVP_ORIG_GLOBAL,                \
426        aesni_##mode##_init_key,        \
427        aesni_##mode##_cipher,          \
428        aes_##mode##_cleanup,           \
429        sizeof(EVP_AES_##MODE##_CTX),   \
430        NULL,NULL,aes_##mode##_ctrl,NULL }; \
431static const EVP_CIPHER aes_##keylen##_##mode = { \
432        nid##_##keylen##_##mode,blocksize, \
433        (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
434        ivlen,                          \
435        flags|EVP_CIPH_##MODE##_MODE,   \
436        EVP_ORIG_GLOBAL,                \
437        aes_##mode##_init_key,          \
438        aes_##mode##_cipher,            \
439        aes_##mode##_cleanup,           \
440        sizeof(EVP_AES_##MODE##_CTX),   \
441        NULL,NULL,aes_##mode##_ctrl,NULL }; \
442const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
444
445#elif defined(SPARC_AES_CAPABLE)
446
447static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
448                           const unsigned char *iv, int enc)
449{
450    int ret, mode, bits;
451    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
452
453    mode = EVP_CIPHER_CTX_get_mode(ctx);
454    bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
455    if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
456        && !enc) {
457        ret = 0;
458        aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
459        dat->block = (block128_f) aes_t4_decrypt;
460        switch (bits) {
461        case 128:
462            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
463                (cbc128_f) aes128_t4_cbc_decrypt : NULL;
464            break;
465        case 192:
466            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
467                (cbc128_f) aes192_t4_cbc_decrypt : NULL;
468            break;
469        case 256:
470            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
471                (cbc128_f) aes256_t4_cbc_decrypt : NULL;
472            break;
473        default:
474            ret = -1;
475        }
476    } else {
477        ret = 0;
478        aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
479        dat->block = (block128_f) aes_t4_encrypt;
480        switch (bits) {
481        case 128:
482            if (mode == EVP_CIPH_CBC_MODE)
483                dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
484            else if (mode == EVP_CIPH_CTR_MODE)
485                dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
486            else
487                dat->stream.cbc = NULL;
488            break;
489        case 192:
490            if (mode == EVP_CIPH_CBC_MODE)
491                dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
492            else if (mode == EVP_CIPH_CTR_MODE)
493                dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
494            else
495                dat->stream.cbc = NULL;
496            break;
497        case 256:
498            if (mode == EVP_CIPH_CBC_MODE)
499                dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
500            else if (mode == EVP_CIPH_CTR_MODE)
501                dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
502            else
503                dat->stream.cbc = NULL;
504            break;
505        default:
506            ret = -1;
507        }
508    }
509
510    if (ret < 0) {
511        ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
512        return 0;
513    }
514
515    return 1;
516}
517
518# define aes_t4_cbc_cipher aes_cbc_cipher
519static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
520                             const unsigned char *in, size_t len);
521
522# define aes_t4_ecb_cipher aes_ecb_cipher
523static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
524                             const unsigned char *in, size_t len);
525
526# define aes_t4_ofb_cipher aes_ofb_cipher
527static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
528                             const unsigned char *in, size_t len);
529
530# define aes_t4_cfb_cipher aes_cfb_cipher
531static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
532                             const unsigned char *in, size_t len);
533
534# define aes_t4_cfb8_cipher aes_cfb8_cipher
535static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
536                              const unsigned char *in, size_t len);
537
538# define aes_t4_cfb1_cipher aes_cfb1_cipher
539static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
540                              const unsigned char *in, size_t len);
541
542# define aes_t4_ctr_cipher aes_ctr_cipher
543static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
544                             const unsigned char *in, size_t len);
545
546static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
547                               const unsigned char *iv, int enc)
548{
549    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
550    if (!iv && !key)
551        return 1;
552    if (key) {
553        int bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
554        aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
555        CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
556                           (block128_f) aes_t4_encrypt);
557        switch (bits) {
558        case 128:
559            gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
560            break;
561        case 192:
562            gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
563            break;
564        case 256:
565            gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
566            break;
567        default:
568            return 0;
569        }
570        /*
571         * If we have an iv can set it directly, otherwise use saved IV.
572         */
573        if (iv == NULL && gctx->iv_set)
574            iv = gctx->iv;
575        if (iv) {
576            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
577            gctx->iv_set = 1;
578        }
579        gctx->key_set = 1;
580    } else {
581        /* If key set use IV, otherwise copy */
582        if (gctx->key_set)
583            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
584        else
585            memcpy(gctx->iv, iv, gctx->ivlen);
586        gctx->iv_set = 1;
587        gctx->iv_gen = 0;
588    }
589    return 1;
590}
591
592# define aes_t4_gcm_cipher aes_gcm_cipher
593static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
594                             const unsigned char *in, size_t len);
595
596static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
597                               const unsigned char *iv, int enc)
598{
599    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
600
601    if (!iv && !key)
602        return 1;
603
604    if (key) {
605        /* The key is two half length keys in reality */
606        const int bytes = EVP_CIPHER_CTX_get_key_length(ctx) / 2;
607        const int bits = bytes * 8;
608
609        /*
610         * Verify that the two keys are different.
611         *
612         * This addresses Rogaway's vulnerability.
613         * See comment in aes_xts_init_key() below.
614         */
615        if ((!allow_insecure_decrypt || enc)
616                && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
617            ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
618            return 0;
619        }
620
621        xctx->stream = NULL;
622        /* key_len is two AES keys */
623        if (enc) {
624            aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
625            xctx->xts.block1 = (block128_f) aes_t4_encrypt;
626            switch (bits) {
627            case 128:
628                xctx->stream = aes128_t4_xts_encrypt;
629                break;
630            case 256:
631                xctx->stream = aes256_t4_xts_encrypt;
632                break;
633            default:
634                return 0;
635            }
636        } else {
637            aes_t4_set_decrypt_key(key, bits, &xctx->ks1.ks);
638            xctx->xts.block1 = (block128_f) aes_t4_decrypt;
639            switch (bits) {
640            case 128:
641                xctx->stream = aes128_t4_xts_decrypt;
642                break;
643            case 256:
644                xctx->stream = aes256_t4_xts_decrypt;
645                break;
646            default:
647                return 0;
648            }
649        }
650
651        aes_t4_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
652        xctx->xts.block2 = (block128_f) aes_t4_encrypt;
653
654        xctx->xts.key1 = &xctx->ks1;
655    }
656
657    if (iv) {
658        xctx->xts.key2 = &xctx->ks2;
659        memcpy(ctx->iv, iv, 16);
660    }
661
662    return 1;
663}
664
665# define aes_t4_xts_cipher aes_xts_cipher
666static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
667                             const unsigned char *in, size_t len);
668
669static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
670                               const unsigned char *iv, int enc)
671{
672    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
673    if (!iv && !key)
674        return 1;
675    if (key) {
676        int bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
677        aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
678        CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
679                           &cctx->ks, (block128_f) aes_t4_encrypt);
680        cctx->str = NULL;
681        cctx->key_set = 1;
682    }
683    if (iv) {
684        memcpy(ctx->iv, iv, 15 - cctx->L);
685        cctx->iv_set = 1;
686    }
687    return 1;
688}
689
690# define aes_t4_ccm_cipher aes_ccm_cipher
691static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
692                             const unsigned char *in, size_t len);
693
694# ifndef OPENSSL_NO_OCB
695static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
696                               const unsigned char *iv, int enc)
697{
698    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
699    if (!iv && !key)
700        return 1;
701    if (key) {
702        do {
703            /*
704             * We set both the encrypt and decrypt key here because decrypt
705             * needs both. We could possibly optimise to remove setting the
706             * decrypt for an encryption operation.
707             */
708            aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
709                                   &octx->ksenc.ks);
710            aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
711                                   &octx->ksdec.ks);
712            if (!CRYPTO_ocb128_init(&octx->ocb,
713                                    &octx->ksenc.ks, &octx->ksdec.ks,
714                                    (block128_f) aes_t4_encrypt,
715                                    (block128_f) aes_t4_decrypt,
716                                    NULL))
717                return 0;
718        }
719        while (0);
720
721        /*
722         * If we have an iv we can set it directly, otherwise use saved IV.
723         */
724        if (iv == NULL && octx->iv_set)
725            iv = octx->iv;
726        if (iv) {
727            if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
728                != 1)
729                return 0;
730            octx->iv_set = 1;
731        }
732        octx->key_set = 1;
733    } else {
734        /* If key set use IV, otherwise copy */
735        if (octx->key_set)
736            CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
737        else
738            memcpy(octx->iv, iv, octx->ivlen);
739        octx->iv_set = 1;
740    }
741    return 1;
742}
743
744#  define aes_t4_ocb_cipher aes_ocb_cipher
745static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
746                             const unsigned char *in, size_t len);
747# endif                        /* OPENSSL_NO_OCB */
748
749# ifndef OPENSSL_NO_SIV
750#  define aes_t4_siv_init_key aes_siv_init_key
751#  define aes_t4_siv_cipher aes_siv_cipher
752# endif /* OPENSSL_NO_SIV */
753
754# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
755static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
756        nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
757        flags|EVP_CIPH_##MODE##_MODE,   \
758        EVP_ORIG_GLOBAL,                \
759        aes_t4_init_key,                \
760        aes_t4_##mode##_cipher,         \
761        NULL,                           \
762        sizeof(EVP_AES_KEY),            \
763        NULL,NULL,NULL,NULL }; \
764static const EVP_CIPHER aes_##keylen##_##mode = { \
765        nid##_##keylen##_##nmode,blocksize,     \
766        keylen/8,ivlen, \
767        flags|EVP_CIPH_##MODE##_MODE,   \
768        EVP_ORIG_GLOBAL,                \
769        aes_init_key,                   \
770        aes_##mode##_cipher,            \
771        NULL,                           \
772        sizeof(EVP_AES_KEY),            \
773        NULL,NULL,NULL,NULL }; \
774const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
775{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
776
777# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
778static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
779        nid##_##keylen##_##mode,blocksize, \
780        (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
781        ivlen,                          \
782        flags|EVP_CIPH_##MODE##_MODE,   \
783        EVP_ORIG_GLOBAL,                \
784        aes_t4_##mode##_init_key,       \
785        aes_t4_##mode##_cipher,         \
786        aes_##mode##_cleanup,           \
787        sizeof(EVP_AES_##MODE##_CTX),   \
788        NULL,NULL,aes_##mode##_ctrl,NULL }; \
789static const EVP_CIPHER aes_##keylen##_##mode = { \
790        nid##_##keylen##_##mode,blocksize, \
791        (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
792        ivlen,                          \
793        flags|EVP_CIPH_##MODE##_MODE,   \
794        EVP_ORIG_GLOBAL,                \
795        aes_##mode##_init_key,          \
796        aes_##mode##_cipher,            \
797        aes_##mode##_cleanup,           \
798        sizeof(EVP_AES_##MODE##_CTX),   \
799        NULL,NULL,aes_##mode##_ctrl,NULL }; \
800const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
801{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
802
803#elif defined(S390X_aes_128_CAPABLE)
804/* IBM S390X support */
805typedef struct {
806    union {
807        OSSL_UNION_ALIGN;
808        /*-
809         * KM-AES parameter block - begin
810         * (see z/Architecture Principles of Operation >= SA22-7832-06)
811         */
812        struct {
813            unsigned char k[32];
814        } param;
815        /* KM-AES parameter block - end */
816    } km;
817    unsigned int fc;
818} S390X_AES_ECB_CTX;
819
820typedef struct {
821    union {
822        OSSL_UNION_ALIGN;
823        /*-
824         * KMO-AES parameter block - begin
825         * (see z/Architecture Principles of Operation >= SA22-7832-08)
826         */
827        struct {
828            unsigned char cv[16];
829            unsigned char k[32];
830        } param;
831        /* KMO-AES parameter block - end */
832    } kmo;
833    unsigned int fc;
834
835    int res;
836} S390X_AES_OFB_CTX;
837
838typedef struct {
839    union {
840        OSSL_UNION_ALIGN;
841        /*-
842         * KMF-AES parameter block - begin
843         * (see z/Architecture Principles of Operation >= SA22-7832-08)
844         */
845        struct {
846            unsigned char cv[16];
847            unsigned char k[32];
848        } param;
849        /* KMF-AES parameter block - end */
850    } kmf;
851    unsigned int fc;
852
853    int res;
854} S390X_AES_CFB_CTX;
855
856typedef struct {
857    union {
858        OSSL_UNION_ALIGN;
859        /*-
860         * KMA-GCM-AES parameter block - begin
861         * (see z/Architecture Principles of Operation >= SA22-7832-11)
862         */
863        struct {
864            unsigned char reserved[12];
865            union {
866                unsigned int w;
867                unsigned char b[4];
868            } cv;
869            union {
870                unsigned long long g[2];
871                unsigned char b[16];
872            } t;
873            unsigned char h[16];
874            unsigned long long taadl;
875            unsigned long long tpcl;
876            union {
877                unsigned long long g[2];
878                unsigned int w[4];
879            } j0;
880            unsigned char k[32];
881        } param;
882        /* KMA-GCM-AES parameter block - end */
883    } kma;
884    unsigned int fc;
885    int key_set;
886
887    unsigned char *iv;
888    int ivlen;
889    int iv_set;
890    int iv_gen;
891
892    int taglen;
893
894    unsigned char ares[16];
895    unsigned char mres[16];
896    unsigned char kres[16];
897    int areslen;
898    int mreslen;
899    int kreslen;
900
901    int tls_aad_len;
902    uint64_t tls_enc_records;   /* Number of TLS records encrypted */
903} S390X_AES_GCM_CTX;
904
905typedef struct {
906    union {
907        OSSL_UNION_ALIGN;
908        /*-
909         * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
910         * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
911         * rounds field is used to store the function code and that the key
912         * schedule is not stored (if aes hardware support is detected).
913         */
914        struct {
915            unsigned char pad[16];
916            AES_KEY k;
917        } key;
918
919        struct {
920            /*-
921             * KMAC-AES parameter block - begin
922             * (see z/Architecture Principles of Operation >= SA22-7832-08)
923             */
924            struct {
925                union {
926                    unsigned long long g[2];
927                    unsigned char b[16];
928                } icv;
929                unsigned char k[32];
930            } kmac_param;
931            /* KMAC-AES parameter block - end */
932
933            union {
934                unsigned long long g[2];
935                unsigned char b[16];
936            } nonce;
937            union {
938                unsigned long long g[2];
939                unsigned char b[16];
940            } buf;
941
942            unsigned long long blocks;
943            int l;
944            int m;
945            int tls_aad_len;
946            int iv_set;
947            int tag_set;
948            int len_set;
949            int key_set;
950
951            unsigned char pad[140];
952            unsigned int fc;
953        } ccm;
954    } aes;
955} S390X_AES_CCM_CTX;
956
957# define s390x_aes_init_key aes_init_key
958static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
959                              const unsigned char *iv, int enc);
960
961# define S390X_AES_CBC_CTX              EVP_AES_KEY
962
963# define s390x_aes_cbc_init_key aes_init_key
964
965# define s390x_aes_cbc_cipher aes_cbc_cipher
966static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
967                                const unsigned char *in, size_t len);
968
969static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
970                                  const unsigned char *key,
971                                  const unsigned char *iv, int enc)
972{
973    S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
974    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
975
976    cctx->fc = S390X_AES_FC(keylen);
977    if (!enc)
978        cctx->fc |= S390X_DECRYPT;
979
980    memcpy(cctx->km.param.k, key, keylen);
981    return 1;
982}
983
984static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
985                                const unsigned char *in, size_t len)
986{
987    S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
988
989    s390x_km(in, len, out, cctx->fc, &cctx->km.param);
990    return 1;
991}
992
993static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
994                                  const unsigned char *key,
995                                  const unsigned char *ivec, int enc)
996{
997    S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
998    const unsigned char *iv = ctx->oiv;
999    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1000    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1001
1002    memcpy(cctx->kmo.param.cv, iv, ivlen);
1003    memcpy(cctx->kmo.param.k, key, keylen);
1004    cctx->fc = S390X_AES_FC(keylen);
1005    cctx->res = 0;
1006    return 1;
1007}
1008
1009static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1010                                const unsigned char *in, size_t len)
1011{
1012    S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1013    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1014    unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1015    int n = cctx->res;
1016    int rem;
1017
1018    memcpy(cctx->kmo.param.cv, iv, ivlen);
1019    while (n && len) {
1020        *out = *in ^ cctx->kmo.param.cv[n];
1021        n = (n + 1) & 0xf;
1022        --len;
1023        ++in;
1024        ++out;
1025    }
1026
1027    rem = len & 0xf;
1028
1029    len &= ~(size_t)0xf;
1030    if (len) {
1031        s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1032
1033        out += len;
1034        in += len;
1035    }
1036
1037    if (rem) {
1038        s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1039                 cctx->kmo.param.k);
1040
1041        while (rem--) {
1042            out[n] = in[n] ^ cctx->kmo.param.cv[n];
1043            ++n;
1044        }
1045    }
1046
1047    memcpy(iv, cctx->kmo.param.cv, ivlen);
1048    cctx->res = n;
1049    return 1;
1050}
1051
1052static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1053                                  const unsigned char *key,
1054                                  const unsigned char *ivec, int enc)
1055{
1056    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1057    const unsigned char *iv = ctx->oiv;
1058    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1059    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1060
1061    cctx->fc = S390X_AES_FC(keylen);
1062    cctx->fc |= 16 << 24;   /* 16 bytes cipher feedback */
1063    if (!enc)
1064        cctx->fc |= S390X_DECRYPT;
1065
1066    cctx->res = 0;
1067    memcpy(cctx->kmf.param.cv, iv, ivlen);
1068    memcpy(cctx->kmf.param.k, key, keylen);
1069    return 1;
1070}
1071
1072static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1073                                const unsigned char *in, size_t len)
1074{
1075    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1076    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1077    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1078    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1079    unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1080    int n = cctx->res;
1081    int rem;
1082    unsigned char tmp;
1083
1084    memcpy(cctx->kmf.param.cv, iv, ivlen);
1085    while (n && len) {
1086        tmp = *in;
1087        *out = cctx->kmf.param.cv[n] ^ tmp;
1088        cctx->kmf.param.cv[n] = enc ? *out : tmp;
1089        n = (n + 1) & 0xf;
1090        --len;
1091        ++in;
1092        ++out;
1093    }
1094
1095    rem = len & 0xf;
1096
1097    len &= ~(size_t)0xf;
1098    if (len) {
1099        s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1100
1101        out += len;
1102        in += len;
1103    }
1104
1105    if (rem) {
1106        s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1107                 S390X_AES_FC(keylen), cctx->kmf.param.k);
1108
1109        while (rem--) {
1110            tmp = in[n];
1111            out[n] = cctx->kmf.param.cv[n] ^ tmp;
1112            cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1113            ++n;
1114        }
1115    }
1116
1117    memcpy(iv, cctx->kmf.param.cv, ivlen);
1118    cctx->res = n;
1119    return 1;
1120}
1121
1122static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1123                                   const unsigned char *key,
1124                                   const unsigned char *ivec, int enc)
1125{
1126    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1127    const unsigned char *iv = ctx->oiv;
1128    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1129    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1130
1131    cctx->fc = S390X_AES_FC(keylen);
1132    cctx->fc |= 1 << 24;   /* 1 byte cipher feedback */
1133    if (!enc)
1134        cctx->fc |= S390X_DECRYPT;
1135
1136    memcpy(cctx->kmf.param.cv, iv, ivlen);
1137    memcpy(cctx->kmf.param.k, key, keylen);
1138    return 1;
1139}
1140
1141static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1142                                 const unsigned char *in, size_t len)
1143{
1144    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1145    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1146    unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1147
1148    memcpy(cctx->kmf.param.cv, iv, ivlen);
1149    s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1150    memcpy(iv, cctx->kmf.param.cv, ivlen);
1151    return 1;
1152}
1153
1154# define s390x_aes_cfb1_init_key aes_init_key
1155
1156# define s390x_aes_cfb1_cipher aes_cfb1_cipher
1157static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1158                                 const unsigned char *in, size_t len);
1159
1160# define S390X_AES_CTR_CTX              EVP_AES_KEY
1161
1162# define s390x_aes_ctr_init_key aes_init_key
1163
1164# define s390x_aes_ctr_cipher aes_ctr_cipher
1165static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1166                                const unsigned char *in, size_t len);
1167
1168/* iv + padding length for iv lengths != 12 */
1169# define S390X_gcm_ivpadlen(i)  ((((i) + 15) >> 4 << 4) + 16)
1170
1171/*-
1172 * Process additional authenticated data. Returns 0 on success. Code is
1173 * big-endian.
1174 */
1175static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1176                             size_t len)
1177{
1178    unsigned long long alen;
1179    int n, rem;
1180
1181    if (ctx->kma.param.tpcl)
1182        return -2;
1183
1184    alen = ctx->kma.param.taadl + len;
1185    if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1186        return -1;
1187    ctx->kma.param.taadl = alen;
1188
1189    n = ctx->areslen;
1190    if (n) {
1191        while (n && len) {
1192            ctx->ares[n] = *aad;
1193            n = (n + 1) & 0xf;
1194            ++aad;
1195            --len;
1196        }
1197        /* ctx->ares contains a complete block if offset has wrapped around */
1198        if (!n) {
1199            s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1200            ctx->fc |= S390X_KMA_HS;
1201        }
1202        ctx->areslen = n;
1203    }
1204
1205    rem = len & 0xf;
1206
1207    len &= ~(size_t)0xf;
1208    if (len) {
1209        s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1210        aad += len;
1211        ctx->fc |= S390X_KMA_HS;
1212    }
1213
1214    if (rem) {
1215        ctx->areslen = rem;
1216
1217        do {
1218            --rem;
1219            ctx->ares[rem] = aad[rem];
1220        } while (rem);
1221    }
1222    return 0;
1223}
1224
1225/*-
1226 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1227 * success. Code is big-endian.
1228 */
1229static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1230                         unsigned char *out, size_t len)
1231{
1232    const unsigned char *inptr;
1233    unsigned long long mlen;
1234    union {
1235        unsigned int w[4];
1236        unsigned char b[16];
1237    } buf;
1238    size_t inlen;
1239    int n, rem, i;
1240
1241    mlen = ctx->kma.param.tpcl + len;
1242    if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1243        return -1;
1244    ctx->kma.param.tpcl = mlen;
1245
1246    n = ctx->mreslen;
1247    if (n) {
1248        inptr = in;
1249        inlen = len;
1250        while (n && inlen) {
1251            ctx->mres[n] = *inptr;
1252            n = (n + 1) & 0xf;
1253            ++inptr;
1254            --inlen;
1255        }
1256        /* ctx->mres contains a complete block if offset has wrapped around */
1257        if (!n) {
1258            s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1259                      ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1260            ctx->fc |= S390X_KMA_HS;
1261            ctx->areslen = 0;
1262
1263            /* previous call already encrypted/decrypted its remainder,
1264             * see comment below */
1265            n = ctx->mreslen;
1266            while (n) {
1267                *out = buf.b[n];
1268                n = (n + 1) & 0xf;
1269                ++out;
1270                ++in;
1271                --len;
1272            }
1273            ctx->mreslen = 0;
1274        }
1275    }
1276
1277    rem = len & 0xf;
1278
1279    len &= ~(size_t)0xf;
1280    if (len) {
1281        s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1282                  ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1283        in += len;
1284        out += len;
1285        ctx->fc |= S390X_KMA_HS;
1286        ctx->areslen = 0;
1287    }
1288
1289    /*-
1290     * If there is a remainder, it has to be saved such that it can be
1291     * processed by kma later. However, we also have to do the for-now
1292     * unauthenticated encryption/decryption part here and now...
1293     */
1294    if (rem) {
1295        if (!ctx->mreslen) {
1296            buf.w[0] = ctx->kma.param.j0.w[0];
1297            buf.w[1] = ctx->kma.param.j0.w[1];
1298            buf.w[2] = ctx->kma.param.j0.w[2];
1299            buf.w[3] = ctx->kma.param.cv.w + 1;
1300            s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1301        }
1302
1303        n = ctx->mreslen;
1304        for (i = 0; i < rem; i++) {
1305            ctx->mres[n + i] = in[i];
1306            out[i] = in[i] ^ ctx->kres[n + i];
1307        }
1308
1309        ctx->mreslen += rem;
1310    }
1311    return 0;
1312}
1313
1314/*-
1315 * Initialize context structure. Code is big-endian.
1316 */
1317static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1318                                const unsigned char *iv)
1319{
1320    ctx->kma.param.t.g[0] = 0;
1321    ctx->kma.param.t.g[1] = 0;
1322    ctx->kma.param.tpcl = 0;
1323    ctx->kma.param.taadl = 0;
1324    ctx->mreslen = 0;
1325    ctx->areslen = 0;
1326    ctx->kreslen = 0;
1327
1328    if (ctx->ivlen == 12) {
1329        memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1330        ctx->kma.param.j0.w[3] = 1;
1331        ctx->kma.param.cv.w = 1;
1332    } else {
1333        /* ctx->iv has the right size and is already padded. */
1334        memcpy(ctx->iv, iv, ctx->ivlen);
1335        s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1336                  ctx->fc, &ctx->kma.param);
1337        ctx->fc |= S390X_KMA_HS;
1338
1339        ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1340        ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1341        ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1342        ctx->kma.param.t.g[0] = 0;
1343        ctx->kma.param.t.g[1] = 0;
1344    }
1345}
1346
1347/*-
1348 * Performs various operations on the context structure depending on control
1349 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1350 * Code is big-endian.
1351 */
1352static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1353{
1354    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1355    S390X_AES_GCM_CTX *gctx_out;
1356    EVP_CIPHER_CTX *out;
1357    unsigned char *buf;
1358    int ivlen, enc, len;
1359
1360    switch (type) {
1361    case EVP_CTRL_INIT:
1362        ivlen = EVP_CIPHER_get_iv_length(c->cipher);
1363        gctx->key_set = 0;
1364        gctx->iv_set = 0;
1365        gctx->ivlen = ivlen;
1366        gctx->iv = c->iv;
1367        gctx->taglen = -1;
1368        gctx->iv_gen = 0;
1369        gctx->tls_aad_len = -1;
1370        return 1;
1371
1372    case EVP_CTRL_GET_IVLEN:
1373        *(int *)ptr = gctx->ivlen;
1374        return 1;
1375
1376    case EVP_CTRL_AEAD_SET_IVLEN:
1377        if (arg <= 0)
1378            return 0;
1379
1380        if (arg != 12) {
1381            len = S390X_gcm_ivpadlen(arg);
1382
1383            /* Allocate memory for iv if needed. */
1384            if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1385                if (gctx->iv != c->iv)
1386                    OPENSSL_free(gctx->iv);
1387
1388                if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1389                    ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
1390                    return 0;
1391                }
1392            }
1393            /* Add padding. */
1394            memset(gctx->iv + arg, 0, len - arg - 8);
1395            *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1396        }
1397        gctx->ivlen = arg;
1398        return 1;
1399
1400    case EVP_CTRL_AEAD_SET_TAG:
1401        buf = EVP_CIPHER_CTX_buf_noconst(c);
1402        enc = EVP_CIPHER_CTX_is_encrypting(c);
1403        if (arg <= 0 || arg > 16 || enc)
1404            return 0;
1405
1406        memcpy(buf, ptr, arg);
1407        gctx->taglen = arg;
1408        return 1;
1409
1410    case EVP_CTRL_AEAD_GET_TAG:
1411        enc = EVP_CIPHER_CTX_is_encrypting(c);
1412        if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1413            return 0;
1414
1415        memcpy(ptr, gctx->kma.param.t.b, arg);
1416        return 1;
1417
1418    case EVP_CTRL_GCM_SET_IV_FIXED:
1419        /* Special case: -1 length restores whole iv */
1420        if (arg == -1) {
1421            memcpy(gctx->iv, ptr, gctx->ivlen);
1422            gctx->iv_gen = 1;
1423            return 1;
1424        }
1425        /*
1426         * Fixed field must be at least 4 bytes and invocation field at least
1427         * 8.
1428         */
1429        if ((arg < 4) || (gctx->ivlen - arg) < 8)
1430            return 0;
1431
1432        if (arg)
1433            memcpy(gctx->iv, ptr, arg);
1434
1435        enc = EVP_CIPHER_CTX_is_encrypting(c);
1436        if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1437            return 0;
1438
1439        gctx->iv_gen = 1;
1440        return 1;
1441
1442    case EVP_CTRL_GCM_IV_GEN:
1443        if (gctx->iv_gen == 0 || gctx->key_set == 0)
1444            return 0;
1445
1446        s390x_aes_gcm_setiv(gctx, gctx->iv);
1447
1448        if (arg <= 0 || arg > gctx->ivlen)
1449            arg = gctx->ivlen;
1450
1451        memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1452        /*
1453         * Invocation field will be at least 8 bytes in size and so no need
1454         * to check wrap around or increment more than last 8 bytes.
1455         */
1456        ctr64_inc(gctx->iv + gctx->ivlen - 8);
1457        gctx->iv_set = 1;
1458        return 1;
1459
1460    case EVP_CTRL_GCM_SET_IV_INV:
1461        enc = EVP_CIPHER_CTX_is_encrypting(c);
1462        if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1463            return 0;
1464
1465        memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1466        s390x_aes_gcm_setiv(gctx, gctx->iv);
1467        gctx->iv_set = 1;
1468        return 1;
1469
1470    case EVP_CTRL_AEAD_TLS1_AAD:
1471        /* Save the aad for later use. */
1472        if (arg != EVP_AEAD_TLS1_AAD_LEN)
1473            return 0;
1474
1475        buf = EVP_CIPHER_CTX_buf_noconst(c);
1476        memcpy(buf, ptr, arg);
1477        gctx->tls_aad_len = arg;
1478        gctx->tls_enc_records = 0;
1479
1480        len = buf[arg - 2] << 8 | buf[arg - 1];
1481        /* Correct length for explicit iv. */
1482        if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1483            return 0;
1484        len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1485
1486        /* If decrypting correct for tag too. */
1487        enc = EVP_CIPHER_CTX_is_encrypting(c);
1488        if (!enc) {
1489            if (len < EVP_GCM_TLS_TAG_LEN)
1490                return 0;
1491            len -= EVP_GCM_TLS_TAG_LEN;
1492        }
1493        buf[arg - 2] = len >> 8;
1494        buf[arg - 1] = len & 0xff;
1495        /* Extra padding: tag appended to record. */
1496        return EVP_GCM_TLS_TAG_LEN;
1497
1498    case EVP_CTRL_COPY:
1499        out = ptr;
1500        gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1501
1502        if (gctx->iv == c->iv) {
1503            gctx_out->iv = out->iv;
1504        } else {
1505            len = S390X_gcm_ivpadlen(gctx->ivlen);
1506
1507            if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1508                ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
1509                return 0;
1510            }
1511
1512            memcpy(gctx_out->iv, gctx->iv, len);
1513        }
1514        return 1;
1515
1516    default:
1517        return -1;
1518    }
1519}
1520
1521/*-
1522 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1523 */
1524static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1525                                  const unsigned char *key,
1526                                  const unsigned char *iv, int enc)
1527{
1528    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1529    int keylen;
1530
1531    if (iv == NULL && key == NULL)
1532        return 1;
1533
1534    if (key != NULL) {
1535        keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1536        memcpy(&gctx->kma.param.k, key, keylen);
1537
1538        gctx->fc = S390X_AES_FC(keylen);
1539        if (!enc)
1540            gctx->fc |= S390X_DECRYPT;
1541
1542        if (iv == NULL && gctx->iv_set)
1543            iv = gctx->iv;
1544
1545        if (iv != NULL) {
1546            s390x_aes_gcm_setiv(gctx, iv);
1547            gctx->iv_set = 1;
1548        }
1549        gctx->key_set = 1;
1550    } else {
1551        if (gctx->key_set)
1552            s390x_aes_gcm_setiv(gctx, iv);
1553        else
1554            memcpy(gctx->iv, iv, gctx->ivlen);
1555
1556        gctx->iv_set = 1;
1557        gctx->iv_gen = 0;
1558    }
1559    return 1;
1560}
1561
1562/*-
1563 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1564 * if successful. Otherwise -1 is returned. Code is big-endian.
1565 */
1566static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1567                                    const unsigned char *in, size_t len)
1568{
1569    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1570    const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1571    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1572    int rv = -1;
1573
1574    if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1575        return -1;
1576
1577    /*
1578     * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1579     * Requirements from SP 800-38D".  The requirements is for one party to the
1580     * communication to fail after 2^64 - 1 keys.  We do this on the encrypting
1581     * side only.
1582     */
1583    if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
1584        ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
1585        goto err;
1586    }
1587
1588    if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1589                                     : EVP_CTRL_GCM_SET_IV_INV,
1590                            EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1591        goto err;
1592
1593    in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1594    out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1595    len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1596
1597    gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1598    gctx->kma.param.tpcl = len << 3;
1599    s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1600              gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1601
1602    if (enc) {
1603        memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1604        rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1605    } else {
1606        if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1607                          EVP_GCM_TLS_TAG_LEN)) {
1608            OPENSSL_cleanse(out, len);
1609            goto err;
1610        }
1611        rv = len;
1612    }
1613err:
1614    gctx->iv_set = 0;
1615    gctx->tls_aad_len = -1;
1616    return rv;
1617}
1618
1619/*-
1620 * Called from EVP layer to initialize context, process additional
1621 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1622 * ciphertext or process a TLS packet, depending on context. Returns bytes
1623 * written on success. Otherwise -1 is returned. Code is big-endian.
1624 */
1625static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1626                                const unsigned char *in, size_t len)
1627{
1628    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1629    unsigned char *buf, tmp[16];
1630    int enc;
1631
1632    if (!gctx->key_set)
1633        return -1;
1634
1635    if (gctx->tls_aad_len >= 0)
1636        return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1637
1638    if (!gctx->iv_set)
1639        return -1;
1640
1641    if (in != NULL) {
1642        if (out == NULL) {
1643            if (s390x_aes_gcm_aad(gctx, in, len))
1644                return -1;
1645        } else {
1646            if (s390x_aes_gcm(gctx, in, out, len))
1647                return -1;
1648        }
1649        return len;
1650    } else {
1651        gctx->kma.param.taadl <<= 3;
1652        gctx->kma.param.tpcl <<= 3;
1653        s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1654                  gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1655        /* recall that we already did en-/decrypt gctx->mres
1656         * and returned it to caller... */
1657        OPENSSL_cleanse(tmp, gctx->mreslen);
1658        gctx->iv_set = 0;
1659
1660        enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1661        if (enc) {
1662            gctx->taglen = 16;
1663        } else {
1664            if (gctx->taglen < 0)
1665                return -1;
1666
1667            buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1668            if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1669                return -1;
1670        }
1671        return 0;
1672    }
1673}
1674
1675static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1676{
1677    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1678
1679    if (gctx == NULL)
1680        return 0;
1681
1682    if (gctx->iv != c->iv)
1683        OPENSSL_free(gctx->iv);
1684
1685    OPENSSL_cleanse(gctx, sizeof(*gctx));
1686    return 1;
1687}
1688
1689# define S390X_AES_XTS_CTX              EVP_AES_XTS_CTX
1690
1691# define s390x_aes_xts_init_key aes_xts_init_key
1692static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1693                                  const unsigned char *key,
1694                                  const unsigned char *iv, int enc);
1695# define s390x_aes_xts_cipher aes_xts_cipher
1696static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1697                                const unsigned char *in, size_t len);
1698# define s390x_aes_xts_ctrl aes_xts_ctrl
1699static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1700# define s390x_aes_xts_cleanup aes_xts_cleanup
1701
1702/*-
1703 * Set nonce and length fields. Code is big-endian.
1704 */
1705static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1706                                          const unsigned char *nonce,
1707                                          size_t mlen)
1708{
1709    ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1710    ctx->aes.ccm.nonce.g[1] = mlen;
1711    memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1712}
1713
1714/*-
1715 * Process additional authenticated data. Code is big-endian.
1716 */
1717static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1718                              size_t alen)
1719{
1720    unsigned char *ptr;
1721    int i, rem;
1722
1723    if (!alen)
1724        return;
1725
1726    ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1727
1728    /* Suppress 'type-punned pointer dereference' warning. */
1729    ptr = ctx->aes.ccm.buf.b;
1730
1731    if (alen < ((1 << 16) - (1 << 8))) {
1732        *(uint16_t *)ptr = alen;
1733        i = 2;
1734    } else if (sizeof(alen) == 8
1735               && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1736        *(uint16_t *)ptr = 0xffff;
1737        *(uint64_t *)(ptr + 2) = alen;
1738        i = 10;
1739    } else {
1740        *(uint16_t *)ptr = 0xfffe;
1741        *(uint32_t *)(ptr + 2) = alen;
1742        i = 6;
1743    }
1744
1745    while (i < 16 && alen) {
1746        ctx->aes.ccm.buf.b[i] = *aad;
1747        ++aad;
1748        --alen;
1749        ++i;
1750    }
1751    while (i < 16) {
1752        ctx->aes.ccm.buf.b[i] = 0;
1753        ++i;
1754    }
1755
1756    ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1757    ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1758    s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
1759               &ctx->aes.ccm.kmac_param);
1760    ctx->aes.ccm.blocks += 2;
1761
1762    rem = alen & 0xf;
1763    alen &= ~(size_t)0xf;
1764    if (alen) {
1765        s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1766        ctx->aes.ccm.blocks += alen >> 4;
1767        aad += alen;
1768    }
1769    if (rem) {
1770        for (i = 0; i < rem; i++)
1771            ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
1772
1773        s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1774                 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1775                 ctx->aes.ccm.kmac_param.k);
1776        ctx->aes.ccm.blocks++;
1777    }
1778}
1779
1780/*-
1781 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
1782 * success.
1783 */
1784static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
1785                         unsigned char *out, size_t len, int enc)
1786{
1787    size_t n, rem;
1788    unsigned int i, l, num;
1789    unsigned char flags;
1790
1791    flags = ctx->aes.ccm.nonce.b[0];
1792    if (!(flags & S390X_CCM_AAD_FLAG)) {
1793        s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
1794                 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
1795        ctx->aes.ccm.blocks++;
1796    }
1797    l = flags & 0x7;
1798    ctx->aes.ccm.nonce.b[0] = l;
1799
1800    /*-
1801     * Reconstruct length from encoded length field
1802     * and initialize it with counter value.
1803     */
1804    n = 0;
1805    for (i = 15 - l; i < 15; i++) {
1806        n |= ctx->aes.ccm.nonce.b[i];
1807        ctx->aes.ccm.nonce.b[i] = 0;
1808        n <<= 8;
1809    }
1810    n |= ctx->aes.ccm.nonce.b[15];
1811    ctx->aes.ccm.nonce.b[15] = 1;
1812
1813    if (n != len)
1814        return -1;              /* length mismatch */
1815
1816    if (enc) {
1817        /* Two operations per block plus one for tag encryption */
1818        ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
1819        if (ctx->aes.ccm.blocks > (1ULL << 61))
1820            return -2;          /* too much data */
1821    }
1822
1823    num = 0;
1824    rem = len & 0xf;
1825    len &= ~(size_t)0xf;
1826
1827    if (enc) {
1828        /* mac-then-encrypt */
1829        if (len)
1830            s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1831        if (rem) {
1832            for (i = 0; i < rem; i++)
1833                ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
1834
1835            s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1836                     ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1837                     ctx->aes.ccm.kmac_param.k);
1838        }
1839
1840        CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1841                                    ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1842                                    &num, (ctr128_f)AES_ctr32_encrypt);
1843    } else {
1844        /* decrypt-then-mac */
1845        CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1846                                    ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1847                                    &num, (ctr128_f)AES_ctr32_encrypt);
1848
1849        if (len)
1850            s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1851        if (rem) {
1852            for (i = 0; i < rem; i++)
1853                ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
1854
1855            s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1856                     ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1857                     ctx->aes.ccm.kmac_param.k);
1858        }
1859    }
1860    /* encrypt tag */
1861    for (i = 15 - l; i < 16; i++)
1862        ctx->aes.ccm.nonce.b[i] = 0;
1863
1864    s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
1865             ctx->aes.ccm.kmac_param.k);
1866    ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
1867    ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
1868
1869    ctx->aes.ccm.nonce.b[0] = flags;    /* restore flags field */
1870    return 0;
1871}
1872
1873/*-
1874 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1875 * if successful. Otherwise -1 is returned.
1876 */
1877static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1878                                    const unsigned char *in, size_t len)
1879{
1880    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1881    unsigned char *ivec = ctx->iv;
1882    unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1883    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1884
1885    if (out != in
1886            || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
1887        return -1;
1888
1889    if (enc) {
1890        /* Set explicit iv (sequence number). */
1891        memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1892    }
1893
1894    len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1895    /*-
1896     * Get explicit iv (sequence number). We already have fixed iv
1897     * (server/client_write_iv) here.
1898     */
1899    memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1900    s390x_aes_ccm_setiv(cctx, ivec, len);
1901
1902    /* Process aad (sequence number|type|version|length) */
1903    s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
1904
1905    in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1906    out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1907
1908    if (enc) {
1909        if (s390x_aes_ccm(cctx, in, out, len, enc))
1910            return -1;
1911
1912        memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
1913        return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1914    } else {
1915        if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
1916            if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
1917                               cctx->aes.ccm.m))
1918                return len;
1919        }
1920
1921        OPENSSL_cleanse(out, len);
1922        return -1;
1923    }
1924}
1925
1926/*-
1927 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
1928 * returned.
1929 */
1930static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
1931                                  const unsigned char *key,
1932                                  const unsigned char *iv, int enc)
1933{
1934    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1935    int keylen;
1936
1937    if (iv == NULL && key == NULL)
1938        return 1;
1939
1940    if (key != NULL) {
1941        keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1942        cctx->aes.ccm.fc = S390X_AES_FC(keylen);
1943        memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
1944
1945        /* Store encoded m and l. */
1946        cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
1947                                 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
1948        memset(cctx->aes.ccm.nonce.b + 1, 0,
1949               sizeof(cctx->aes.ccm.nonce.b));
1950        cctx->aes.ccm.blocks = 0;
1951
1952        cctx->aes.ccm.key_set = 1;
1953    }
1954
1955    if (iv != NULL) {
1956        memcpy(ctx->iv, iv, 15 - cctx->aes.ccm.l);
1957
1958        cctx->aes.ccm.iv_set = 1;
1959    }
1960
1961    return 1;
1962}
1963
1964/*-
1965 * Called from EVP layer to initialize context, process additional
1966 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1967 * plaintext or process a TLS packet, depending on context. Returns bytes
1968 * written on success. Otherwise -1 is returned.
1969 */
1970static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1971                                const unsigned char *in, size_t len)
1972{
1973    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1974    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1975    int rv;
1976    unsigned char *buf;
1977
1978    if (!cctx->aes.ccm.key_set)
1979        return -1;
1980
1981    if (cctx->aes.ccm.tls_aad_len >= 0)
1982        return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
1983
1984    /*-
1985     * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
1986     * so integrity must be checked already at Update() i.e., before
1987     * potentially corrupted data is output.
1988     */
1989    if (in == NULL && out != NULL)
1990        return 0;
1991
1992    if (!cctx->aes.ccm.iv_set)
1993        return -1;
1994
1995    if (out == NULL) {
1996        /* Update(): Pass message length. */
1997        if (in == NULL) {
1998            s390x_aes_ccm_setiv(cctx, ctx->iv, len);
1999
2000            cctx->aes.ccm.len_set = 1;
2001            return len;
2002        }
2003
2004        /* Update(): Process aad. */
2005        if (!cctx->aes.ccm.len_set && len)
2006            return -1;
2007
2008        s390x_aes_ccm_aad(cctx, in, len);
2009        return len;
2010    }
2011
2012    /* The tag must be set before actually decrypting data */
2013    if (!enc && !cctx->aes.ccm.tag_set)
2014        return -1;
2015
2016    /* Update(): Process message. */
2017
2018    if (!cctx->aes.ccm.len_set) {
2019        /*-
2020         * In case message length was not previously set explicitly via
2021         * Update(), set it now.
2022         */
2023        s390x_aes_ccm_setiv(cctx, ctx->iv, len);
2024
2025        cctx->aes.ccm.len_set = 1;
2026    }
2027
2028    if (enc) {
2029        if (s390x_aes_ccm(cctx, in, out, len, enc))
2030            return -1;
2031
2032        cctx->aes.ccm.tag_set = 1;
2033        return len;
2034    } else {
2035        rv = -1;
2036
2037        if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2038            buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2039            if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2040                               cctx->aes.ccm.m))
2041                rv = len;
2042        }
2043
2044        if (rv == -1)
2045            OPENSSL_cleanse(out, len);
2046
2047        cctx->aes.ccm.iv_set = 0;
2048        cctx->aes.ccm.tag_set = 0;
2049        cctx->aes.ccm.len_set = 0;
2050        return rv;
2051    }
2052}
2053
2054/*-
2055 * Performs various operations on the context structure depending on control
2056 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2057 * Code is big-endian.
2058 */
2059static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2060{
2061    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2062    unsigned char *buf;
2063    int enc, len;
2064
2065    switch (type) {
2066    case EVP_CTRL_INIT:
2067        cctx->aes.ccm.key_set = 0;
2068        cctx->aes.ccm.iv_set = 0;
2069        cctx->aes.ccm.l = 8;
2070        cctx->aes.ccm.m = 12;
2071        cctx->aes.ccm.tag_set = 0;
2072        cctx->aes.ccm.len_set = 0;
2073        cctx->aes.ccm.tls_aad_len = -1;
2074        return 1;
2075
2076    case EVP_CTRL_GET_IVLEN:
2077        *(int *)ptr = 15 - cctx->aes.ccm.l;
2078        return 1;
2079
2080    case EVP_CTRL_AEAD_TLS1_AAD:
2081        if (arg != EVP_AEAD_TLS1_AAD_LEN)
2082            return 0;
2083
2084        /* Save the aad for later use. */
2085        buf = EVP_CIPHER_CTX_buf_noconst(c);
2086        memcpy(buf, ptr, arg);
2087        cctx->aes.ccm.tls_aad_len = arg;
2088
2089        len = buf[arg - 2] << 8 | buf[arg - 1];
2090        if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2091            return 0;
2092
2093        /* Correct length for explicit iv. */
2094        len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2095
2096        enc = EVP_CIPHER_CTX_is_encrypting(c);
2097        if (!enc) {
2098            if (len < cctx->aes.ccm.m)
2099                return 0;
2100
2101            /* Correct length for tag. */
2102            len -= cctx->aes.ccm.m;
2103        }
2104
2105        buf[arg - 2] = len >> 8;
2106        buf[arg - 1] = len & 0xff;
2107
2108        /* Extra padding: tag appended to record. */
2109        return cctx->aes.ccm.m;
2110
2111    case EVP_CTRL_CCM_SET_IV_FIXED:
2112        if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2113            return 0;
2114
2115        /* Copy to first part of the iv. */
2116        memcpy(c->iv, ptr, arg);
2117        return 1;
2118
2119    case EVP_CTRL_AEAD_SET_IVLEN:
2120        arg = 15 - arg;
2121        /* fall-through */
2122
2123    case EVP_CTRL_CCM_SET_L:
2124        if (arg < 2 || arg > 8)
2125            return 0;
2126
2127        cctx->aes.ccm.l = arg;
2128        return 1;
2129
2130    case EVP_CTRL_AEAD_SET_TAG:
2131        if ((arg & 1) || arg < 4 || arg > 16)
2132            return 0;
2133
2134        enc = EVP_CIPHER_CTX_is_encrypting(c);
2135        if (enc && ptr)
2136            return 0;
2137
2138        if (ptr) {
2139            cctx->aes.ccm.tag_set = 1;
2140            buf = EVP_CIPHER_CTX_buf_noconst(c);
2141            memcpy(buf, ptr, arg);
2142        }
2143
2144        cctx->aes.ccm.m = arg;
2145        return 1;
2146
2147    case EVP_CTRL_AEAD_GET_TAG:
2148        enc = EVP_CIPHER_CTX_is_encrypting(c);
2149        if (!enc || !cctx->aes.ccm.tag_set)
2150            return 0;
2151
2152        if(arg < cctx->aes.ccm.m)
2153            return 0;
2154
2155        memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2156        cctx->aes.ccm.tag_set = 0;
2157        cctx->aes.ccm.iv_set = 0;
2158        cctx->aes.ccm.len_set = 0;
2159        return 1;
2160
2161    case EVP_CTRL_COPY:
2162        return 1;
2163
2164    default:
2165        return -1;
2166    }
2167}
2168
2169# define s390x_aes_ccm_cleanup aes_ccm_cleanup
2170
2171# ifndef OPENSSL_NO_OCB
2172#  define S390X_AES_OCB_CTX             EVP_AES_OCB_CTX
2173
2174#  define s390x_aes_ocb_init_key aes_ocb_init_key
2175static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2176                                  const unsigned char *iv, int enc);
2177#  define s390x_aes_ocb_cipher aes_ocb_cipher
2178static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2179                                const unsigned char *in, size_t len);
2180#  define s390x_aes_ocb_cleanup aes_ocb_cleanup
2181static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2182#  define s390x_aes_ocb_ctrl aes_ocb_ctrl
2183static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2184# endif
2185
2186# ifndef OPENSSL_NO_SIV
2187#  define S390X_AES_SIV_CTX             EVP_AES_SIV_CTX
2188
2189#  define s390x_aes_siv_init_key aes_siv_init_key
2190#  define s390x_aes_siv_cipher aes_siv_cipher
2191#  define s390x_aes_siv_cleanup aes_siv_cleanup
2192#  define s390x_aes_siv_ctrl aes_siv_ctrl
2193# endif
2194
2195# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,    \
2196                              MODE,flags)                               \
2197static const EVP_CIPHER s390x_aes_##keylen##_##mode = {                 \
2198    nid##_##keylen##_##nmode,blocksize,                                 \
2199    keylen / 8,                                                         \
2200    ivlen,                                                              \
2201    flags | EVP_CIPH_##MODE##_MODE,                                     \
2202    EVP_ORIG_GLOBAL,                                                    \
2203    s390x_aes_##mode##_init_key,                                        \
2204    s390x_aes_##mode##_cipher,                                          \
2205    NULL,                                                               \
2206    sizeof(S390X_AES_##MODE##_CTX),                                     \
2207    NULL,                                                               \
2208    NULL,                                                               \
2209    NULL,                                                               \
2210    NULL                                                                \
2211};                                                                      \
2212static const EVP_CIPHER aes_##keylen##_##mode = {                       \
2213    nid##_##keylen##_##nmode,                                           \
2214    blocksize,                                                          \
2215    keylen / 8,                                                         \
2216    ivlen,                                                              \
2217    flags | EVP_CIPH_##MODE##_MODE,                                     \
2218    EVP_ORIG_GLOBAL,                                                    \
2219    aes_init_key,                                                       \
2220    aes_##mode##_cipher,                                                \
2221    NULL,                                                               \
2222    sizeof(EVP_AES_KEY),                                                \
2223    NULL,                                                               \
2224    NULL,                                                               \
2225    NULL,                                                               \
2226    NULL                                                                \
2227};                                                                      \
2228const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                       \
2229{                                                                       \
2230    return S390X_aes_##keylen##_##mode##_CAPABLE ?                      \
2231           &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode;       \
2232}
2233
2234# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2235static const EVP_CIPHER s390x_aes_##keylen##_##mode = {                 \
2236    nid##_##keylen##_##mode,                                            \
2237    blocksize,                                                          \
2238    (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8,        \
2239    ivlen,                                                              \
2240    flags | EVP_CIPH_##MODE##_MODE,                                     \
2241    EVP_ORIG_GLOBAL,                                                    \
2242    s390x_aes_##mode##_init_key,                                        \
2243    s390x_aes_##mode##_cipher,                                          \
2244    s390x_aes_##mode##_cleanup,                                         \
2245    sizeof(S390X_AES_##MODE##_CTX),                                     \
2246    NULL,                                                               \
2247    NULL,                                                               \
2248    s390x_aes_##mode##_ctrl,                                            \
2249    NULL                                                                \
2250};                                                                      \
2251static const EVP_CIPHER aes_##keylen##_##mode = {                       \
2252    nid##_##keylen##_##mode,blocksize,                                  \
2253    (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8,        \
2254    ivlen,                                                              \
2255    flags | EVP_CIPH_##MODE##_MODE,                                     \
2256    EVP_ORIG_GLOBAL,                                                    \
2257    aes_##mode##_init_key,                                              \
2258    aes_##mode##_cipher,                                                \
2259    aes_##mode##_cleanup,                                               \
2260    sizeof(EVP_AES_##MODE##_CTX),                                       \
2261    NULL,                                                               \
2262    NULL,                                                               \
2263    aes_##mode##_ctrl,                                                  \
2264    NULL                                                                \
2265};                                                                      \
2266const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                       \
2267{                                                                       \
2268    return S390X_aes_##keylen##_##mode##_CAPABLE ?                      \
2269           &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode;       \
2270}
2271
2272#else
2273
2274# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2275static const EVP_CIPHER aes_##keylen##_##mode = { \
2276        nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2277        flags|EVP_CIPH_##MODE##_MODE,   \
2278        EVP_ORIG_GLOBAL,                \
2279        aes_init_key,                   \
2280        aes_##mode##_cipher,            \
2281        NULL,                           \
2282        sizeof(EVP_AES_KEY),            \
2283        NULL,NULL,NULL,NULL }; \
2284const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2285{ return &aes_##keylen##_##mode; }
2286
2287# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2288static const EVP_CIPHER aes_##keylen##_##mode = { \
2289        nid##_##keylen##_##mode,blocksize, \
2290        (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2291        ivlen,                          \
2292        flags|EVP_CIPH_##MODE##_MODE,   \
2293        EVP_ORIG_GLOBAL,                \
2294        aes_##mode##_init_key,          \
2295        aes_##mode##_cipher,            \
2296        aes_##mode##_cleanup,           \
2297        sizeof(EVP_AES_##MODE##_CTX),   \
2298        NULL,NULL,aes_##mode##_ctrl,NULL }; \
2299const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2300{ return &aes_##keylen##_##mode; }
2301
2302#endif
2303
2304#define BLOCK_CIPHER_generic_pack(nid,keylen,flags)             \
2305        BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)     \
2306        BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)      \
2307        BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)   \
2308        BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)   \
2309        BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags)       \
2310        BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags)       \
2311        BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2312
2313static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2314                        const unsigned char *iv, int enc)
2315{
2316    int ret, mode;
2317    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2318
2319    mode = EVP_CIPHER_CTX_get_mode(ctx);
2320    if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2321        && !enc) {
2322#ifdef HWAES_CAPABLE
2323        if (HWAES_CAPABLE) {
2324            ret = HWAES_set_decrypt_key(key,
2325                                        EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2326                                        &dat->ks.ks);
2327            dat->block = (block128_f) HWAES_decrypt;
2328            dat->stream.cbc = NULL;
2329# ifdef HWAES_cbc_encrypt
2330            if (mode == EVP_CIPH_CBC_MODE)
2331                dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2332# endif
2333        } else
2334#endif
2335#ifdef BSAES_CAPABLE
2336        if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2337            ret = AES_set_decrypt_key(key,
2338                                      EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2339                                      &dat->ks.ks);
2340            dat->block = (block128_f) AES_decrypt;
2341            dat->stream.cbc = (cbc128_f) ossl_bsaes_cbc_encrypt;
2342        } else
2343#endif
2344#ifdef VPAES_CAPABLE
2345        if (VPAES_CAPABLE) {
2346            ret = vpaes_set_decrypt_key(key,
2347                                        EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2348                                        &dat->ks.ks);
2349            dat->block = (block128_f) vpaes_decrypt;
2350            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2351                (cbc128_f) vpaes_cbc_encrypt : NULL;
2352        } else
2353#endif
2354        {
2355            ret = AES_set_decrypt_key(key,
2356                                      EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2357                                      &dat->ks.ks);
2358            dat->block = (block128_f) AES_decrypt;
2359            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2360                (cbc128_f) AES_cbc_encrypt : NULL;
2361        }
2362    } else
2363#ifdef HWAES_CAPABLE
2364    if (HWAES_CAPABLE) {
2365        ret = HWAES_set_encrypt_key(key,
2366                                    EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2367                                    &dat->ks.ks);
2368        dat->block = (block128_f) HWAES_encrypt;
2369        dat->stream.cbc = NULL;
2370# ifdef HWAES_cbc_encrypt
2371        if (mode == EVP_CIPH_CBC_MODE)
2372            dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2373        else
2374# endif
2375# ifdef HWAES_ctr32_encrypt_blocks
2376        if (mode == EVP_CIPH_CTR_MODE)
2377            dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2378        else
2379# endif
2380            (void)0;            /* terminate potentially open 'else' */
2381    } else
2382#endif
2383#ifdef BSAES_CAPABLE
2384    if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2385        ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2386                                  &dat->ks.ks);
2387        dat->block = (block128_f) AES_encrypt;
2388        dat->stream.ctr = (ctr128_f) ossl_bsaes_ctr32_encrypt_blocks;
2389    } else
2390#endif
2391#ifdef VPAES_CAPABLE
2392    if (VPAES_CAPABLE) {
2393        ret = vpaes_set_encrypt_key(key,
2394                                    EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2395                                    &dat->ks.ks);
2396        dat->block = (block128_f) vpaes_encrypt;
2397        dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2398            (cbc128_f) vpaes_cbc_encrypt : NULL;
2399    } else
2400#endif
2401    {
2402        ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2403                                  &dat->ks.ks);
2404        dat->block = (block128_f) AES_encrypt;
2405        dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2406            (cbc128_f) AES_cbc_encrypt : NULL;
2407#ifdef AES_CTR_ASM
2408        if (mode == EVP_CIPH_CTR_MODE)
2409            dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2410#endif
2411    }
2412
2413    if (ret < 0) {
2414        ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
2415        return 0;
2416    }
2417
2418    return 1;
2419}
2420
2421static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2422                          const unsigned char *in, size_t len)
2423{
2424    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2425
2426    if (dat->stream.cbc)
2427        (*dat->stream.cbc) (in, out, len, &dat->ks, ctx->iv,
2428                            EVP_CIPHER_CTX_is_encrypting(ctx));
2429    else if (EVP_CIPHER_CTX_is_encrypting(ctx))
2430        CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv,
2431                              dat->block);
2432    else
2433        CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2434                              ctx->iv, dat->block);
2435
2436    return 1;
2437}
2438
2439static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2440                          const unsigned char *in, size_t len)
2441{
2442    size_t bl = EVP_CIPHER_CTX_get_block_size(ctx);
2443    size_t i;
2444    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2445
2446    if (len < bl)
2447        return 1;
2448
2449    for (i = 0, len -= bl; i <= len; i += bl)
2450        (*dat->block) (in + i, out + i, &dat->ks);
2451
2452    return 1;
2453}
2454
2455static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2456                          const unsigned char *in, size_t len)
2457{
2458    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2459
2460    int num = EVP_CIPHER_CTX_get_num(ctx);
2461    CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2462                          ctx->iv, &num, dat->block);
2463    EVP_CIPHER_CTX_set_num(ctx, num);
2464    return 1;
2465}
2466
2467static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2468                          const unsigned char *in, size_t len)
2469{
2470    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2471
2472    int num = EVP_CIPHER_CTX_get_num(ctx);
2473    CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2474                          ctx->iv, &num,
2475                          EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2476    EVP_CIPHER_CTX_set_num(ctx, num);
2477    return 1;
2478}
2479
2480static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2481                           const unsigned char *in, size_t len)
2482{
2483    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2484
2485    int num = EVP_CIPHER_CTX_get_num(ctx);
2486    CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2487                            ctx->iv, &num,
2488                            EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2489    EVP_CIPHER_CTX_set_num(ctx, num);
2490    return 1;
2491}
2492
2493static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2494                           const unsigned char *in, size_t len)
2495{
2496    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2497
2498    if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2499        int num = EVP_CIPHER_CTX_get_num(ctx);
2500        CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2501                                ctx->iv, &num,
2502                                EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2503        EVP_CIPHER_CTX_set_num(ctx, num);
2504        return 1;
2505    }
2506
2507    while (len >= MAXBITCHUNK) {
2508        int num = EVP_CIPHER_CTX_get_num(ctx);
2509        CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2510                                ctx->iv, &num,
2511                                EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2512        EVP_CIPHER_CTX_set_num(ctx, num);
2513        len -= MAXBITCHUNK;
2514        out += MAXBITCHUNK;
2515        in  += MAXBITCHUNK;
2516    }
2517    if (len) {
2518        int num = EVP_CIPHER_CTX_get_num(ctx);
2519        CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2520                                ctx->iv, &num,
2521                                EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2522        EVP_CIPHER_CTX_set_num(ctx, num);
2523    }
2524
2525    return 1;
2526}
2527
2528static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2529                          const unsigned char *in, size_t len)
2530{
2531    int n = EVP_CIPHER_CTX_get_num(ctx);
2532    unsigned int num;
2533    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2534
2535    if (n < 0)
2536        return 0;
2537    num = (unsigned int)n;
2538
2539    if (dat->stream.ctr)
2540        CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2541                                    ctx->iv,
2542                                    EVP_CIPHER_CTX_buf_noconst(ctx),
2543                                    &num, dat->stream.ctr);
2544    else
2545        CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2546                              ctx->iv,
2547                              EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2548                              dat->block);
2549    EVP_CIPHER_CTX_set_num(ctx, num);
2550    return 1;
2551}
2552
2553BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2554    BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2555    BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2556
2557static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2558{
2559    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2560    if (gctx == NULL)
2561        return 0;
2562    OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2563    if (gctx->iv != c->iv)
2564        OPENSSL_free(gctx->iv);
2565    return 1;
2566}
2567
2568static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2569{
2570    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2571    switch (type) {
2572    case EVP_CTRL_INIT:
2573        gctx->key_set = 0;
2574        gctx->iv_set = 0;
2575        gctx->ivlen = EVP_CIPHER_get_iv_length(c->cipher);
2576        gctx->iv = c->iv;
2577        gctx->taglen = -1;
2578        gctx->iv_gen = 0;
2579        gctx->tls_aad_len = -1;
2580        return 1;
2581
2582    case EVP_CTRL_GET_IVLEN:
2583        *(int *)ptr = gctx->ivlen;
2584        return 1;
2585
2586    case EVP_CTRL_AEAD_SET_IVLEN:
2587        if (arg <= 0)
2588            return 0;
2589        /* Allocate memory for IV if needed */
2590        if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2591            if (gctx->iv != c->iv)
2592                OPENSSL_free(gctx->iv);
2593            if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2594                ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
2595                return 0;
2596            }
2597        }
2598        gctx->ivlen = arg;
2599        return 1;
2600
2601    case EVP_CTRL_AEAD_SET_TAG:
2602        if (arg <= 0 || arg > 16 || c->encrypt)
2603            return 0;
2604        memcpy(c->buf, ptr, arg);
2605        gctx->taglen = arg;
2606        return 1;
2607
2608    case EVP_CTRL_AEAD_GET_TAG:
2609        if (arg <= 0 || arg > 16 || !c->encrypt
2610            || gctx->taglen < 0)
2611            return 0;
2612        memcpy(ptr, c->buf, arg);
2613        return 1;
2614
2615    case EVP_CTRL_GCM_SET_IV_FIXED:
2616        /* Special case: -1 length restores whole IV */
2617        if (arg == -1) {
2618            memcpy(gctx->iv, ptr, gctx->ivlen);
2619            gctx->iv_gen = 1;
2620            return 1;
2621        }
2622        /*
2623         * Fixed field must be at least 4 bytes and invocation field at least
2624         * 8.
2625         */
2626        if ((arg < 4) || (gctx->ivlen - arg) < 8)
2627            return 0;
2628        if (arg)
2629            memcpy(gctx->iv, ptr, arg);
2630        if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2631            return 0;
2632        gctx->iv_gen = 1;
2633        return 1;
2634
2635    case EVP_CTRL_GCM_IV_GEN:
2636        if (gctx->iv_gen == 0 || gctx->key_set == 0)
2637            return 0;
2638        CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2639        if (arg <= 0 || arg > gctx->ivlen)
2640            arg = gctx->ivlen;
2641        memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2642        /*
2643         * Invocation field will be at least 8 bytes in size and so no need
2644         * to check wrap around or increment more than last 8 bytes.
2645         */
2646        ctr64_inc(gctx->iv + gctx->ivlen - 8);
2647        gctx->iv_set = 1;
2648        return 1;
2649
2650    case EVP_CTRL_GCM_SET_IV_INV:
2651        if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2652            return 0;
2653        memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2654        CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2655        gctx->iv_set = 1;
2656        return 1;
2657
2658    case EVP_CTRL_AEAD_TLS1_AAD:
2659        /* Save the AAD for later use */
2660        if (arg != EVP_AEAD_TLS1_AAD_LEN)
2661            return 0;
2662        memcpy(c->buf, ptr, arg);
2663        gctx->tls_aad_len = arg;
2664        gctx->tls_enc_records = 0;
2665        {
2666            unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2667            /* Correct length for explicit IV */
2668            if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2669                return 0;
2670            len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2671            /* If decrypting correct for tag too */
2672            if (!c->encrypt) {
2673                if (len < EVP_GCM_TLS_TAG_LEN)
2674                    return 0;
2675                len -= EVP_GCM_TLS_TAG_LEN;
2676            }
2677            c->buf[arg - 2] = len >> 8;
2678            c->buf[arg - 1] = len & 0xff;
2679        }
2680        /* Extra padding: tag appended to record */
2681        return EVP_GCM_TLS_TAG_LEN;
2682
2683    case EVP_CTRL_COPY:
2684        {
2685            EVP_CIPHER_CTX *out = ptr;
2686            EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2687            if (gctx->gcm.key) {
2688                if (gctx->gcm.key != &gctx->ks)
2689                    return 0;
2690                gctx_out->gcm.key = &gctx_out->ks;
2691            }
2692            if (gctx->iv == c->iv)
2693                gctx_out->iv = out->iv;
2694            else {
2695                if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
2696                    ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
2697                    return 0;
2698                }
2699                memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2700            }
2701            return 1;
2702        }
2703
2704    default:
2705        return -1;
2706
2707    }
2708}
2709
2710static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2711                            const unsigned char *iv, int enc)
2712{
2713    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2714    if (!iv && !key)
2715        return 1;
2716    if (key) {
2717        do {
2718#ifdef HWAES_CAPABLE
2719            if (HWAES_CAPABLE) {
2720                HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2721                CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2722                                   (block128_f) HWAES_encrypt);
2723# ifdef HWAES_ctr32_encrypt_blocks
2724                gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2725# else
2726                gctx->ctr = NULL;
2727# endif
2728                break;
2729            } else
2730#endif
2731#ifdef BSAES_CAPABLE
2732            if (BSAES_CAPABLE) {
2733                AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2734                CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2735                                   (block128_f) AES_encrypt);
2736                gctx->ctr = (ctr128_f) ossl_bsaes_ctr32_encrypt_blocks;
2737                break;
2738            } else
2739#endif
2740#ifdef VPAES_CAPABLE
2741            if (VPAES_CAPABLE) {
2742                vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2743                CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2744                                   (block128_f) vpaes_encrypt);
2745                gctx->ctr = NULL;
2746                break;
2747            } else
2748#endif
2749                (void)0;        /* terminate potentially open 'else' */
2750
2751            AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2752            CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2753                               (block128_f) AES_encrypt);
2754#ifdef AES_CTR_ASM
2755            gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
2756#else
2757            gctx->ctr = NULL;
2758#endif
2759        } while (0);
2760
2761        /*
2762         * If we have an iv can set it directly, otherwise use saved IV.
2763         */
2764        if (iv == NULL && gctx->iv_set)
2765            iv = gctx->iv;
2766        if (iv) {
2767            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2768            gctx->iv_set = 1;
2769        }
2770        gctx->key_set = 1;
2771    } else {
2772        /* If key set use IV, otherwise copy */
2773        if (gctx->key_set)
2774            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2775        else
2776            memcpy(gctx->iv, iv, gctx->ivlen);
2777        gctx->iv_set = 1;
2778        gctx->iv_gen = 0;
2779    }
2780    return 1;
2781}
2782
2783/*
2784 * Handle TLS GCM packet format. This consists of the last portion of the IV
2785 * followed by the payload and finally the tag. On encrypt generate IV,
2786 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
2787 * and verify tag.
2788 */
2789
2790static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2791                              const unsigned char *in, size_t len)
2792{
2793    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2794    int rv = -1;
2795    /* Encrypt/decrypt must be performed in place */
2796    if (out != in
2797        || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
2798        return -1;
2799
2800    /*
2801     * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
2802     * Requirements from SP 800-38D".  The requirements is for one party to the
2803     * communication to fail after 2^64 - 1 keys.  We do this on the encrypting
2804     * side only.
2805     */
2806    if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
2807        ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
2808        goto err;
2809    }
2810
2811    /*
2812     * Set IV from start of buffer or generate IV and write to start of
2813     * buffer.
2814     */
2815    if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
2816                                              : EVP_CTRL_GCM_SET_IV_INV,
2817                            EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
2818        goto err;
2819    /* Use saved AAD */
2820    if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
2821        goto err;
2822    /* Fix buffer and length to point to payload */
2823    in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2824    out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2825    len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2826    if (ctx->encrypt) {
2827        /* Encrypt payload */
2828        if (gctx->ctr) {
2829            size_t bulk = 0;
2830#if defined(AES_GCM_ASM)
2831            if (len >= 32 && AES_GCM_ASM(gctx)) {
2832                if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2833                    return -1;
2834
2835                bulk = AES_gcm_encrypt(in, out, len,
2836                                       gctx->gcm.key,
2837                                       gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2838                gctx->gcm.len.u[1] += bulk;
2839            }
2840#endif
2841            if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2842                                            in + bulk,
2843                                            out + bulk,
2844                                            len - bulk, gctx->ctr))
2845                goto err;
2846        } else {
2847            size_t bulk = 0;
2848#if defined(AES_GCM_ASM2)
2849            if (len >= 32 && AES_GCM_ASM2(gctx)) {
2850                if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2851                    return -1;
2852
2853                bulk = AES_gcm_encrypt(in, out, len,
2854                                       gctx->gcm.key,
2855                                       gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2856                gctx->gcm.len.u[1] += bulk;
2857            }
2858#endif
2859            if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2860                                      in + bulk, out + bulk, len - bulk))
2861                goto err;
2862        }
2863        out += len;
2864        /* Finally write tag */
2865        CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
2866        rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2867    } else {
2868        /* Decrypt */
2869        if (gctx->ctr) {
2870            size_t bulk = 0;
2871#if defined(AES_GCM_ASM)
2872            if (len >= 16 && AES_GCM_ASM(gctx)) {
2873                if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2874                    return -1;
2875
2876                bulk = AES_gcm_decrypt(in, out, len,
2877                                       gctx->gcm.key,
2878                                       gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2879                gctx->gcm.len.u[1] += bulk;
2880            }
2881#endif
2882            if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2883                                            in + bulk,
2884                                            out + bulk,
2885                                            len - bulk, gctx->ctr))
2886                goto err;
2887        } else {
2888            size_t bulk = 0;
2889#if defined(AES_GCM_ASM2)
2890            if (len >= 16 && AES_GCM_ASM2(gctx)) {
2891                if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2892                    return -1;
2893
2894                bulk = AES_gcm_decrypt(in, out, len,
2895                                       gctx->gcm.key,
2896                                       gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2897                gctx->gcm.len.u[1] += bulk;
2898            }
2899#endif
2900            if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2901                                      in + bulk, out + bulk, len - bulk))
2902                goto err;
2903        }
2904        /* Retrieve tag */
2905        CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
2906        /* If tag mismatch wipe buffer */
2907        if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
2908            OPENSSL_cleanse(out, len);
2909            goto err;
2910        }
2911        rv = len;
2912    }
2913
2914 err:
2915    gctx->iv_set = 0;
2916    gctx->tls_aad_len = -1;
2917    return rv;
2918}
2919
2920#ifdef FIPS_MODULE
2921/*
2922 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
2923 *
2924 * See also 8.2.2 RBG-based construction.
2925 * Random construction consists of a free field (which can be NULL) and a
2926 * random field which will use a DRBG that can return at least 96 bits of
2927 * entropy strength. (The DRBG must be seeded by the FIPS module).
2928 */
2929static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset)
2930{
2931    int sz = gctx->ivlen - offset;
2932
2933    /* Must be at least 96 bits */
2934    if (sz <= 0 || gctx->ivlen < 12)
2935        return 0;
2936
2937    /* Use DRBG to generate random iv */
2938    if (RAND_bytes(gctx->iv + offset, sz) <= 0)
2939        return 0;
2940    return 1;
2941}
2942#endif /* FIPS_MODULE */
2943
2944static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2945                          const unsigned char *in, size_t len)
2946{
2947    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2948
2949    /* If not set up, return error */
2950    if (!gctx->key_set)
2951        return -1;
2952
2953    if (gctx->tls_aad_len >= 0)
2954        return aes_gcm_tls_cipher(ctx, out, in, len);
2955
2956#ifdef FIPS_MODULE
2957    /*
2958     * FIPS requires generation of AES-GCM IV's inside the FIPS module.
2959     * The IV can still be set externally (the security policy will state that
2960     * this is not FIPS compliant). There are some applications
2961     * where setting the IV externally is the only option available.
2962     */
2963    if (!gctx->iv_set) {
2964        if (!ctx->encrypt || !aes_gcm_iv_generate(gctx, 0))
2965            return -1;
2966        CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2967        gctx->iv_set = 1;
2968        gctx->iv_gen_rand = 1;
2969    }
2970#else
2971    if (!gctx->iv_set)
2972        return -1;
2973#endif /* FIPS_MODULE */
2974
2975    if (in) {
2976        if (out == NULL) {
2977            if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
2978                return -1;
2979        } else if (ctx->encrypt) {
2980            if (gctx->ctr) {
2981                size_t bulk = 0;
2982#if defined(AES_GCM_ASM)
2983                if (len >= 32 && AES_GCM_ASM(gctx)) {
2984                    size_t res = (16 - gctx->gcm.mres) % 16;
2985
2986                    if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2987                        return -1;
2988
2989                    bulk = AES_gcm_encrypt(in + res,
2990                                           out + res, len - res,
2991                                           gctx->gcm.key, gctx->gcm.Yi.c,
2992                                           gctx->gcm.Xi.u);
2993                    gctx->gcm.len.u[1] += bulk;
2994                    bulk += res;
2995                }
2996#endif
2997                if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2998                                                in + bulk,
2999                                                out + bulk,
3000                                                len - bulk, gctx->ctr))
3001                    return -1;
3002            } else {
3003                size_t bulk = 0;
3004#if defined(AES_GCM_ASM2)
3005                if (len >= 32 && AES_GCM_ASM2(gctx)) {
3006                    size_t res = (16 - gctx->gcm.mres) % 16;
3007
3008                    if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3009                        return -1;
3010
3011                    bulk = AES_gcm_encrypt(in + res,
3012                                           out + res, len - res,
3013                                           gctx->gcm.key, gctx->gcm.Yi.c,
3014                                           gctx->gcm.Xi.u);
3015                    gctx->gcm.len.u[1] += bulk;
3016                    bulk += res;
3017                }
3018#endif
3019                if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3020                                          in + bulk, out + bulk, len - bulk))
3021                    return -1;
3022            }
3023        } else {
3024            if (gctx->ctr) {
3025                size_t bulk = 0;
3026#if defined(AES_GCM_ASM)
3027                if (len >= 16 && AES_GCM_ASM(gctx)) {
3028                    size_t res = (16 - gctx->gcm.mres) % 16;
3029
3030                    if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3031                        return -1;
3032
3033                    bulk = AES_gcm_decrypt(in + res,
3034                                           out + res, len - res,
3035                                           gctx->gcm.key,
3036                                           gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3037                    gctx->gcm.len.u[1] += bulk;
3038                    bulk += res;
3039                }
3040#endif
3041                if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3042                                                in + bulk,
3043                                                out + bulk,
3044                                                len - bulk, gctx->ctr))
3045                    return -1;
3046            } else {
3047                size_t bulk = 0;
3048#if defined(AES_GCM_ASM2)
3049                if (len >= 16 && AES_GCM_ASM2(gctx)) {
3050                    size_t res = (16 - gctx->gcm.mres) % 16;
3051
3052                    if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3053                        return -1;
3054
3055                    bulk = AES_gcm_decrypt(in + res,
3056                                           out + res, len - res,
3057                                           gctx->gcm.key,
3058                                           gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3059                    gctx->gcm.len.u[1] += bulk;
3060                    bulk += res;
3061                }
3062#endif
3063                if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3064                                          in + bulk, out + bulk, len - bulk))
3065                    return -1;
3066            }
3067        }
3068        return len;
3069    } else {
3070        if (!ctx->encrypt) {
3071            if (gctx->taglen < 0)
3072                return -1;
3073            if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3074                return -1;
3075            gctx->iv_set = 0;
3076            return 0;
3077        }
3078        CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3079        gctx->taglen = 16;
3080        /* Don't reuse the IV */
3081        gctx->iv_set = 0;
3082        return 0;
3083    }
3084
3085}
3086
3087#define CUSTOM_FLAGS    (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3088                | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3089                | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3090                | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH)
3091
3092BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3093                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3094    BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3095                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3096    BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3097                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3098
3099static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3100{
3101    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3102
3103    if (type == EVP_CTRL_COPY) {
3104        EVP_CIPHER_CTX *out = ptr;
3105        EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3106
3107        if (xctx->xts.key1) {
3108            if (xctx->xts.key1 != &xctx->ks1)
3109                return 0;
3110            xctx_out->xts.key1 = &xctx_out->ks1;
3111        }
3112        if (xctx->xts.key2) {
3113            if (xctx->xts.key2 != &xctx->ks2)
3114                return 0;
3115            xctx_out->xts.key2 = &xctx_out->ks2;
3116        }
3117        return 1;
3118    } else if (type != EVP_CTRL_INIT)
3119        return -1;
3120    /* key1 and key2 are used as an indicator both key and IV are set */
3121    xctx->xts.key1 = NULL;
3122    xctx->xts.key2 = NULL;
3123    return 1;
3124}
3125
3126static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3127                            const unsigned char *iv, int enc)
3128{
3129    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3130
3131    if (!iv && !key)
3132        return 1;
3133
3134    if (key) {
3135        do {
3136            /* The key is two half length keys in reality */
3137            const int bytes = EVP_CIPHER_CTX_get_key_length(ctx) / 2;
3138            const int bits = bytes * 8;
3139
3140            /*
3141             * Verify that the two keys are different.
3142             *
3143             * This addresses the vulnerability described in Rogaway's
3144             * September 2004 paper:
3145             *
3146             *      "Efficient Instantiations of Tweakable Blockciphers and
3147             *       Refinements to Modes OCB and PMAC".
3148             *      (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3149             *
3150             * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3151             * that:
3152             *      "The check for Key_1 != Key_2 shall be done at any place
3153             *       BEFORE using the keys in the XTS-AES algorithm to process
3154             *       data with them."
3155             */
3156            if ((!allow_insecure_decrypt || enc)
3157                    && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
3158                ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3159                return 0;
3160            }
3161
3162#ifdef AES_XTS_ASM
3163            xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3164#else
3165            xctx->stream = NULL;
3166#endif
3167            /* key_len is two AES keys */
3168#ifdef HWAES_CAPABLE
3169            if (HWAES_CAPABLE) {
3170                if (enc) {
3171                    HWAES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3172                    xctx->xts.block1 = (block128_f) HWAES_encrypt;
3173# ifdef HWAES_xts_encrypt
3174                    xctx->stream = HWAES_xts_encrypt;
3175# endif
3176                } else {
3177                    HWAES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3178                    xctx->xts.block1 = (block128_f) HWAES_decrypt;
3179# ifdef HWAES_xts_decrypt
3180                    xctx->stream = HWAES_xts_decrypt;
3181#endif
3182                }
3183
3184                HWAES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3185                xctx->xts.block2 = (block128_f) HWAES_encrypt;
3186
3187                xctx->xts.key1 = &xctx->ks1;
3188                break;
3189            } else
3190#endif
3191#ifdef BSAES_CAPABLE
3192            if (BSAES_CAPABLE)
3193                xctx->stream = enc ? ossl_bsaes_xts_encrypt : ossl_bsaes_xts_decrypt;
3194            else
3195#endif
3196#ifdef VPAES_CAPABLE
3197            if (VPAES_CAPABLE) {
3198                if (enc) {
3199                    vpaes_set_encrypt_key(key, bits, &xctx->ks1.ks);
3200                    xctx->xts.block1 = (block128_f) vpaes_encrypt;
3201                } else {
3202                    vpaes_set_decrypt_key(key, bits, &xctx->ks1.ks);
3203                    xctx->xts.block1 = (block128_f) vpaes_decrypt;
3204                }
3205
3206                vpaes_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3207                xctx->xts.block2 = (block128_f) vpaes_encrypt;
3208
3209                xctx->xts.key1 = &xctx->ks1;
3210                break;
3211            } else
3212#endif
3213                (void)0;        /* terminate potentially open 'else' */
3214
3215            if (enc) {
3216                AES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3217                xctx->xts.block1 = (block128_f) AES_encrypt;
3218            } else {
3219                AES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3220                xctx->xts.block1 = (block128_f) AES_decrypt;
3221            }
3222
3223            AES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3224            xctx->xts.block2 = (block128_f) AES_encrypt;
3225
3226            xctx->xts.key1 = &xctx->ks1;
3227        } while (0);
3228    }
3229
3230    if (iv) {
3231        xctx->xts.key2 = &xctx->ks2;
3232        memcpy(ctx->iv, iv, 16);
3233    }
3234
3235    return 1;
3236}
3237
3238static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3239                          const unsigned char *in, size_t len)
3240{
3241    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3242
3243    if (xctx->xts.key1 == NULL
3244            || xctx->xts.key2 == NULL
3245            || out == NULL
3246            || in == NULL
3247            || len < AES_BLOCK_SIZE)
3248        return 0;
3249
3250    /*
3251     * Impose a limit of 2^20 blocks per data unit as specified by
3252     * IEEE Std 1619-2018.  The earlier and obsolete IEEE Std 1619-2007
3253     * indicated that this was a SHOULD NOT rather than a MUST NOT.
3254     * NIST SP 800-38E mandates the same limit.
3255     */
3256    if (len > XTS_MAX_BLOCKS_PER_DATA_UNIT * AES_BLOCK_SIZE) {
3257        ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE);
3258        return 0;
3259    }
3260
3261    if (xctx->stream)
3262        (*xctx->stream) (in, out, len,
3263                         xctx->xts.key1, xctx->xts.key2,
3264                         ctx->iv);
3265    else if (CRYPTO_xts128_encrypt(&xctx->xts, ctx->iv, in, out, len,
3266                                   EVP_CIPHER_CTX_is_encrypting(ctx)))
3267        return 0;
3268    return 1;
3269}
3270
3271#define aes_xts_cleanup NULL
3272
3273#define XTS_FLAGS       (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3274                         | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3275                         | EVP_CIPH_CUSTOM_COPY)
3276
3277BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3278    BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3279
3280static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3281{
3282    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3283    switch (type) {
3284    case EVP_CTRL_INIT:
3285        cctx->key_set = 0;
3286        cctx->iv_set = 0;
3287        cctx->L = 8;
3288        cctx->M = 12;
3289        cctx->tag_set = 0;
3290        cctx->len_set = 0;
3291        cctx->tls_aad_len = -1;
3292        return 1;
3293
3294    case EVP_CTRL_GET_IVLEN:
3295        *(int *)ptr = 15 - cctx->L;
3296        return 1;
3297
3298    case EVP_CTRL_AEAD_TLS1_AAD:
3299        /* Save the AAD for later use */
3300        if (arg != EVP_AEAD_TLS1_AAD_LEN)
3301            return 0;
3302        memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3303        cctx->tls_aad_len = arg;
3304        {
3305            uint16_t len =
3306                EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3307                | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3308            /* Correct length for explicit IV */
3309            if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3310                return 0;
3311            len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3312            /* If decrypting correct for tag too */
3313            if (!EVP_CIPHER_CTX_is_encrypting(c)) {
3314                if (len < cctx->M)
3315                    return 0;
3316                len -= cctx->M;
3317            }
3318            EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3319            EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3320        }
3321        /* Extra padding: tag appended to record */
3322        return cctx->M;
3323
3324    case EVP_CTRL_CCM_SET_IV_FIXED:
3325        /* Sanity check length */
3326        if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3327            return 0;
3328        /* Just copy to first part of IV */
3329        memcpy(c->iv, ptr, arg);
3330        return 1;
3331
3332    case EVP_CTRL_AEAD_SET_IVLEN:
3333        arg = 15 - arg;
3334        /* fall thru */
3335    case EVP_CTRL_CCM_SET_L:
3336        if (arg < 2 || arg > 8)
3337            return 0;
3338        cctx->L = arg;
3339        return 1;
3340
3341    case EVP_CTRL_AEAD_SET_TAG:
3342        if ((arg & 1) || arg < 4 || arg > 16)
3343            return 0;
3344        if (EVP_CIPHER_CTX_is_encrypting(c) && ptr)
3345            return 0;
3346        if (ptr) {
3347            cctx->tag_set = 1;
3348            memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3349        }
3350        cctx->M = arg;
3351        return 1;
3352
3353    case EVP_CTRL_AEAD_GET_TAG:
3354        if (!EVP_CIPHER_CTX_is_encrypting(c) || !cctx->tag_set)
3355            return 0;
3356        if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3357            return 0;
3358        cctx->tag_set = 0;
3359        cctx->iv_set = 0;
3360        cctx->len_set = 0;
3361        return 1;
3362
3363    case EVP_CTRL_COPY:
3364        {
3365            EVP_CIPHER_CTX *out = ptr;
3366            EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3367            if (cctx->ccm.key) {
3368                if (cctx->ccm.key != &cctx->ks)
3369                    return 0;
3370                cctx_out->ccm.key = &cctx_out->ks;
3371            }
3372            return 1;
3373        }
3374
3375    default:
3376        return -1;
3377
3378    }
3379}
3380
3381static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3382                            const unsigned char *iv, int enc)
3383{
3384    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3385    if (!iv && !key)
3386        return 1;
3387    if (key)
3388        do {
3389#ifdef HWAES_CAPABLE
3390            if (HWAES_CAPABLE) {
3391                HWAES_set_encrypt_key(key,
3392                                      EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3393                                      &cctx->ks.ks);
3394
3395                CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3396                                   &cctx->ks, (block128_f) HWAES_encrypt);
3397                cctx->str = NULL;
3398                cctx->key_set = 1;
3399                break;
3400            } else
3401#endif
3402#ifdef VPAES_CAPABLE
3403            if (VPAES_CAPABLE) {
3404                vpaes_set_encrypt_key(key,
3405                                      EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3406                                      &cctx->ks.ks);
3407                CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3408                                   &cctx->ks, (block128_f) vpaes_encrypt);
3409                cctx->str = NULL;
3410                cctx->key_set = 1;
3411                break;
3412            }
3413#endif
3414            AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3415                                &cctx->ks.ks);
3416            CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3417                               &cctx->ks, (block128_f) AES_encrypt);
3418            cctx->str = NULL;
3419            cctx->key_set = 1;
3420        } while (0);
3421    if (iv) {
3422        memcpy(ctx->iv, iv, 15 - cctx->L);
3423        cctx->iv_set = 1;
3424    }
3425    return 1;
3426}
3427
3428static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3429                              const unsigned char *in, size_t len)
3430{
3431    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3432    CCM128_CONTEXT *ccm = &cctx->ccm;
3433    /* Encrypt/decrypt must be performed in place */
3434    if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3435        return -1;
3436    /* If encrypting set explicit IV from sequence number (start of AAD) */
3437    if (EVP_CIPHER_CTX_is_encrypting(ctx))
3438        memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3439               EVP_CCM_TLS_EXPLICIT_IV_LEN);
3440    /* Get rest of IV from explicit IV */
3441    memcpy(ctx->iv + EVP_CCM_TLS_FIXED_IV_LEN, in,
3442           EVP_CCM_TLS_EXPLICIT_IV_LEN);
3443    /* Correct length value */
3444    len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3445    if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L,
3446                            len))
3447            return -1;
3448    /* Use saved AAD */
3449    CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx),
3450                      cctx->tls_aad_len);
3451    /* Fix buffer to point to payload */
3452    in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3453    out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3454    if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3455        if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3456                                                    cctx->str) :
3457            CRYPTO_ccm128_encrypt(ccm, in, out, len))
3458            return -1;
3459        if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3460            return -1;
3461        return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3462    } else {
3463        if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3464                                                     cctx->str) :
3465            !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3466            unsigned char tag[16];
3467            if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3468                if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3469                    return len;
3470            }
3471        }
3472        OPENSSL_cleanse(out, len);
3473        return -1;
3474    }
3475}
3476
3477static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3478                          const unsigned char *in, size_t len)
3479{
3480    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3481    CCM128_CONTEXT *ccm = &cctx->ccm;
3482    /* If not set up, return error */
3483    if (!cctx->key_set)
3484        return -1;
3485
3486    if (cctx->tls_aad_len >= 0)
3487        return aes_ccm_tls_cipher(ctx, out, in, len);
3488
3489    /* EVP_*Final() doesn't return any data */
3490    if (in == NULL && out != NULL)
3491        return 0;
3492
3493    if (!cctx->iv_set)
3494        return -1;
3495
3496    if (!out) {
3497        if (!in) {
3498            if (CRYPTO_ccm128_setiv(ccm, ctx->iv,
3499                                    15 - cctx->L, len))
3500                return -1;
3501            cctx->len_set = 1;
3502            return len;
3503        }
3504        /* If have AAD need message length */
3505        if (!cctx->len_set && len)
3506            return -1;
3507        CRYPTO_ccm128_aad(ccm, in, len);
3508        return len;
3509    }
3510
3511    /* The tag must be set before actually decrypting data */
3512    if (!EVP_CIPHER_CTX_is_encrypting(ctx) && !cctx->tag_set)
3513        return -1;
3514
3515    /* If not set length yet do it */
3516    if (!cctx->len_set) {
3517        if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
3518            return -1;
3519        cctx->len_set = 1;
3520    }
3521    if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3522        if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3523                                                    cctx->str) :
3524            CRYPTO_ccm128_encrypt(ccm, in, out, len))
3525            return -1;
3526        cctx->tag_set = 1;
3527        return len;
3528    } else {
3529        int rv = -1;
3530        if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3531                                                     cctx->str) :
3532            !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3533            unsigned char tag[16];
3534            if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3535                if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3536                                   cctx->M))
3537                    rv = len;
3538            }
3539        }
3540        if (rv == -1)
3541            OPENSSL_cleanse(out, len);
3542        cctx->iv_set = 0;
3543        cctx->tag_set = 0;
3544        cctx->len_set = 0;
3545        return rv;
3546    }
3547}
3548
3549#define aes_ccm_cleanup NULL
3550
3551BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3552                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3553BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3554                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3555BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3556                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3557
3558typedef struct {
3559    union {
3560        OSSL_UNION_ALIGN;
3561        AES_KEY ks;
3562    } ks;
3563    /* Indicates if IV has been set */
3564    unsigned char *iv;
3565} EVP_AES_WRAP_CTX;
3566
3567static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3568                             const unsigned char *iv, int enc)
3569{
3570    int len;
3571    EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3572
3573    if (iv == NULL && key == NULL)
3574        return 1;
3575    if (key != NULL) {
3576        if (EVP_CIPHER_CTX_is_encrypting(ctx))
3577            AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3578                                &wctx->ks.ks);
3579        else
3580            AES_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3581                                &wctx->ks.ks);
3582        if (iv == NULL)
3583            wctx->iv = NULL;
3584    }
3585    if (iv != NULL) {
3586        if ((len = EVP_CIPHER_CTX_get_iv_length(ctx)) < 0)
3587            return 0;
3588        memcpy(ctx->iv, iv, len);
3589        wctx->iv = ctx->iv;
3590    }
3591    return 1;
3592}
3593
3594static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3595                           const unsigned char *in, size_t inlen)
3596{
3597    EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3598    size_t rv;
3599    /* AES wrap with padding has IV length of 4, without padding 8 */
3600    int pad = EVP_CIPHER_CTX_get_iv_length(ctx) == 4;
3601    /* No final operation so always return zero length */
3602    if (!in)
3603        return 0;
3604    /* Input length must always be non-zero */
3605    if (!inlen)
3606        return -1;
3607    /* If decrypting need at least 16 bytes and multiple of 8 */
3608    if (!EVP_CIPHER_CTX_is_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3609        return -1;
3610    /* If not padding input must be multiple of 8 */
3611    if (!pad && inlen & 0x7)
3612        return -1;
3613    if (ossl_is_partially_overlapping(out, in, inlen)) {
3614        ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
3615        return 0;
3616    }
3617    if (!out) {
3618        if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3619            /* If padding round up to multiple of 8 */
3620            if (pad)
3621                inlen = (inlen + 7) / 8 * 8;
3622            /* 8 byte prefix */
3623            return inlen + 8;
3624        } else {
3625            /*
3626             * If not padding output will be exactly 8 bytes smaller than
3627             * input. If padding it will be at least 8 bytes smaller but we
3628             * don't know how much.
3629             */
3630            return inlen - 8;
3631        }
3632    }
3633    if (pad) {
3634        if (EVP_CIPHER_CTX_is_encrypting(ctx))
3635            rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3636                                     out, in, inlen,
3637                                     (block128_f) AES_encrypt);
3638        else
3639            rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3640                                       out, in, inlen,
3641                                       (block128_f) AES_decrypt);
3642    } else {
3643        if (EVP_CIPHER_CTX_is_encrypting(ctx))
3644            rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3645                                 out, in, inlen, (block128_f) AES_encrypt);
3646        else
3647            rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3648                                   out, in, inlen, (block128_f) AES_decrypt);
3649    }
3650    return rv ? (int)rv : -1;
3651}
3652
3653#define WRAP_FLAGS      (EVP_CIPH_WRAP_MODE \
3654                | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3655                | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3656
3657static const EVP_CIPHER aes_128_wrap = {
3658    NID_id_aes128_wrap,
3659    8, 16, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3660    aes_wrap_init_key, aes_wrap_cipher,
3661    NULL,
3662    sizeof(EVP_AES_WRAP_CTX),
3663    NULL, NULL, NULL, NULL
3664};
3665
3666const EVP_CIPHER *EVP_aes_128_wrap(void)
3667{
3668    return &aes_128_wrap;
3669}
3670
3671static const EVP_CIPHER aes_192_wrap = {
3672    NID_id_aes192_wrap,
3673    8, 24, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3674    aes_wrap_init_key, aes_wrap_cipher,
3675    NULL,
3676    sizeof(EVP_AES_WRAP_CTX),
3677    NULL, NULL, NULL, NULL
3678};
3679
3680const EVP_CIPHER *EVP_aes_192_wrap(void)
3681{
3682    return &aes_192_wrap;
3683}
3684
3685static const EVP_CIPHER aes_256_wrap = {
3686    NID_id_aes256_wrap,
3687    8, 32, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3688    aes_wrap_init_key, aes_wrap_cipher,
3689    NULL,
3690    sizeof(EVP_AES_WRAP_CTX),
3691    NULL, NULL, NULL, NULL
3692};
3693
3694const EVP_CIPHER *EVP_aes_256_wrap(void)
3695{
3696    return &aes_256_wrap;
3697}
3698
3699static const EVP_CIPHER aes_128_wrap_pad = {
3700    NID_id_aes128_wrap_pad,
3701    8, 16, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3702    aes_wrap_init_key, aes_wrap_cipher,
3703    NULL,
3704    sizeof(EVP_AES_WRAP_CTX),
3705    NULL, NULL, NULL, NULL
3706};
3707
3708const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3709{
3710    return &aes_128_wrap_pad;
3711}
3712
3713static const EVP_CIPHER aes_192_wrap_pad = {
3714    NID_id_aes192_wrap_pad,
3715    8, 24, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3716    aes_wrap_init_key, aes_wrap_cipher,
3717    NULL,
3718    sizeof(EVP_AES_WRAP_CTX),
3719    NULL, NULL, NULL, NULL
3720};
3721
3722const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3723{
3724    return &aes_192_wrap_pad;
3725}
3726
3727static const EVP_CIPHER aes_256_wrap_pad = {
3728    NID_id_aes256_wrap_pad,
3729    8, 32, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3730    aes_wrap_init_key, aes_wrap_cipher,
3731    NULL,
3732    sizeof(EVP_AES_WRAP_CTX),
3733    NULL, NULL, NULL, NULL
3734};
3735
3736const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3737{
3738    return &aes_256_wrap_pad;
3739}
3740
3741#ifndef OPENSSL_NO_OCB
3742static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3743{
3744    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3745    EVP_CIPHER_CTX *newc;
3746    EVP_AES_OCB_CTX *new_octx;
3747
3748    switch (type) {
3749    case EVP_CTRL_INIT:
3750        octx->key_set = 0;
3751        octx->iv_set = 0;
3752        octx->ivlen = EVP_CIPHER_get_iv_length(c->cipher);
3753        octx->iv = c->iv;
3754        octx->taglen = 16;
3755        octx->data_buf_len = 0;
3756        octx->aad_buf_len = 0;
3757        return 1;
3758
3759    case EVP_CTRL_GET_IVLEN:
3760        *(int *)ptr = octx->ivlen;
3761        return 1;
3762
3763    case EVP_CTRL_AEAD_SET_IVLEN:
3764        /* IV len must be 1 to 15 */
3765        if (arg <= 0 || arg > 15)
3766            return 0;
3767
3768        octx->ivlen = arg;
3769        return 1;
3770
3771    case EVP_CTRL_AEAD_SET_TAG:
3772        if (ptr == NULL) {
3773            /* Tag len must be 0 to 16 */
3774            if (arg < 0 || arg > 16)
3775                return 0;
3776
3777            octx->taglen = arg;
3778            return 1;
3779        }
3780        if (arg != octx->taglen || EVP_CIPHER_CTX_is_encrypting(c))
3781            return 0;
3782        memcpy(octx->tag, ptr, arg);
3783        return 1;
3784
3785    case EVP_CTRL_AEAD_GET_TAG:
3786        if (arg != octx->taglen || !EVP_CIPHER_CTX_is_encrypting(c))
3787            return 0;
3788
3789        memcpy(ptr, octx->tag, arg);
3790        return 1;
3791
3792    case EVP_CTRL_COPY:
3793        newc = (EVP_CIPHER_CTX *)ptr;
3794        new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
3795        return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
3796                                      &new_octx->ksenc.ks,
3797                                      &new_octx->ksdec.ks);
3798
3799    default:
3800        return -1;
3801
3802    }
3803}
3804
3805static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3806                            const unsigned char *iv, int enc)
3807{
3808    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
3809    if (!iv && !key)
3810        return 1;
3811    if (key) {
3812        do {
3813            /*
3814             * We set both the encrypt and decrypt key here because decrypt
3815             * needs both. We could possibly optimise to remove setting the
3816             * decrypt for an encryption operation.
3817             */
3818# ifdef HWAES_CAPABLE
3819            if (HWAES_CAPABLE) {
3820                HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3821                                      &octx->ksenc.ks);
3822                HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3823                                      &octx->ksdec.ks);
3824                if (!CRYPTO_ocb128_init(&octx->ocb,
3825                                        &octx->ksenc.ks, &octx->ksdec.ks,
3826                                        (block128_f) HWAES_encrypt,
3827                                        (block128_f) HWAES_decrypt,
3828                                        enc ? HWAES_ocb_encrypt
3829                                            : HWAES_ocb_decrypt))
3830                    return 0;
3831                break;
3832            }
3833# endif
3834# ifdef VPAES_CAPABLE
3835            if (VPAES_CAPABLE) {
3836                vpaes_set_encrypt_key(key,
3837                                      EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3838                                      &octx->ksenc.ks);
3839                vpaes_set_decrypt_key(key,
3840                                      EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3841                                      &octx->ksdec.ks);
3842                if (!CRYPTO_ocb128_init(&octx->ocb,
3843                                        &octx->ksenc.ks, &octx->ksdec.ks,
3844                                        (block128_f) vpaes_encrypt,
3845                                        (block128_f) vpaes_decrypt,
3846                                        NULL))
3847                    return 0;
3848                break;
3849            }
3850# endif
3851            AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3852                                &octx->ksenc.ks);
3853            AES_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3854                                &octx->ksdec.ks);
3855            if (!CRYPTO_ocb128_init(&octx->ocb,
3856                                    &octx->ksenc.ks, &octx->ksdec.ks,
3857                                    (block128_f) AES_encrypt,
3858                                    (block128_f) AES_decrypt,
3859                                    NULL))
3860                return 0;
3861        }
3862        while (0);
3863
3864        /*
3865         * If we have an iv we can set it directly, otherwise use saved IV.
3866         */
3867        if (iv == NULL && octx->iv_set)
3868            iv = octx->iv;
3869        if (iv) {
3870            if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
3871                != 1)
3872                return 0;
3873            octx->iv_set = 1;
3874        }
3875        octx->key_set = 1;
3876    } else {
3877        /* If key set use IV, otherwise copy */
3878        if (octx->key_set)
3879            CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
3880        else
3881            memcpy(octx->iv, iv, octx->ivlen);
3882        octx->iv_set = 1;
3883    }
3884    return 1;
3885}
3886
3887static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3888                          const unsigned char *in, size_t len)
3889{
3890    unsigned char *buf;
3891    int *buf_len;
3892    int written_len = 0;
3893    size_t trailing_len;
3894    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
3895
3896    /* If IV or Key not set then return error */
3897    if (!octx->iv_set)
3898        return -1;
3899
3900    if (!octx->key_set)
3901        return -1;
3902
3903    if (in != NULL) {
3904        /*
3905         * Need to ensure we are only passing full blocks to low level OCB
3906         * routines. We do it here rather than in EVP_EncryptUpdate/
3907         * EVP_DecryptUpdate because we need to pass full blocks of AAD too
3908         * and those routines don't support that
3909         */
3910
3911        /* Are we dealing with AAD or normal data here? */
3912        if (out == NULL) {
3913            buf = octx->aad_buf;
3914            buf_len = &(octx->aad_buf_len);
3915        } else {
3916            buf = octx->data_buf;
3917            buf_len = &(octx->data_buf_len);
3918
3919            if (ossl_is_partially_overlapping(out + *buf_len, in, len)) {
3920                ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
3921                return 0;
3922            }
3923        }
3924
3925        /*
3926         * If we've got a partially filled buffer from a previous call then
3927         * use that data first
3928         */
3929        if (*buf_len > 0) {
3930            unsigned int remaining;
3931
3932            remaining = AES_BLOCK_SIZE - (*buf_len);
3933            if (remaining > len) {
3934                memcpy(buf + (*buf_len), in, len);
3935                *(buf_len) += len;
3936                return 0;
3937            }
3938            memcpy(buf + (*buf_len), in, remaining);
3939
3940            /*
3941             * If we get here we've filled the buffer, so process it
3942             */
3943            len -= remaining;
3944            in += remaining;
3945            if (out == NULL) {
3946                if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
3947                    return -1;
3948            } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3949                if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
3950                                           AES_BLOCK_SIZE))
3951                    return -1;
3952            } else {
3953                if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
3954                                           AES_BLOCK_SIZE))
3955                    return -1;
3956            }
3957            written_len = AES_BLOCK_SIZE;
3958            *buf_len = 0;
3959            if (out != NULL)
3960                out += AES_BLOCK_SIZE;
3961        }
3962
3963        /* Do we have a partial block to handle at the end? */
3964        trailing_len = len % AES_BLOCK_SIZE;
3965
3966        /*
3967         * If we've got some full blocks to handle, then process these first
3968         */
3969        if (len != trailing_len) {
3970            if (out == NULL) {
3971                if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
3972                    return -1;
3973            } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3974                if (!CRYPTO_ocb128_encrypt
3975                    (&octx->ocb, in, out, len - trailing_len))
3976                    return -1;
3977            } else {
3978                if (!CRYPTO_ocb128_decrypt
3979                    (&octx->ocb, in, out, len - trailing_len))
3980                    return -1;
3981            }
3982            written_len += len - trailing_len;
3983            in += len - trailing_len;
3984        }
3985
3986        /* Handle any trailing partial block */
3987        if (trailing_len > 0) {
3988            memcpy(buf, in, trailing_len);
3989            *buf_len = trailing_len;
3990        }
3991
3992        return written_len;
3993    } else {
3994        /*
3995         * First of all empty the buffer of any partial block that we might
3996         * have been provided - both for data and AAD
3997         */
3998        if (octx->data_buf_len > 0) {
3999            if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
4000                if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4001                                           octx->data_buf_len))
4002                    return -1;
4003            } else {
4004                if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4005                                           octx->data_buf_len))
4006                    return -1;
4007            }
4008            written_len = octx->data_buf_len;
4009            octx->data_buf_len = 0;
4010        }
4011        if (octx->aad_buf_len > 0) {
4012            if (!CRYPTO_ocb128_aad
4013                (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4014                return -1;
4015            octx->aad_buf_len = 0;
4016        }
4017        /* If decrypting then verify */
4018        if (!EVP_CIPHER_CTX_is_encrypting(ctx)) {
4019            if (octx->taglen < 0)
4020                return -1;
4021            if (CRYPTO_ocb128_finish(&octx->ocb,
4022                                     octx->tag, octx->taglen) != 0)
4023                return -1;
4024            octx->iv_set = 0;
4025            return written_len;
4026        }
4027        /* If encrypting then just get the tag */
4028        if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4029            return -1;
4030        /* Don't reuse the IV */
4031        octx->iv_set = 0;
4032        return written_len;
4033    }
4034}
4035
4036static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4037{
4038    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4039    CRYPTO_ocb128_cleanup(&octx->ocb);
4040    return 1;
4041}
4042
4043BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4044                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4045BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4046                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4047BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4048                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4049#endif                         /* OPENSSL_NO_OCB */
4050