1
/* ====================================================================
2
* Copyright (c) 2001-2011 The OpenSSL Project. All rights reserved.
4
* Redistribution and use in source and binary forms, with or without
5
* modification, are permitted provided that the following conditions
8
* 1. Redistributions of source code must retain the above copyright
9
* notice, this list of conditions and the following disclaimer.
11
* 2. Redistributions in binary form must reproduce the above copyright
12
* notice, this list of conditions and the following disclaimer in
13
* the documentation and/or other materials provided with the
16
* 3. All advertising materials mentioning features or use of this
17
* software must display the following acknowledgment:
18
* "This product includes software developed by the OpenSSL Project
19
* for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
21
* 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
22
* endorse or promote products derived from this software without
23
* prior written permission. For written permission, please contact
24
* openssl-core@openssl.org.
26
* 5. Products derived from this software may not be called "OpenSSL"
27
* nor may "OpenSSL" appear in their names without prior written
28
* permission of the OpenSSL Project.
30
* 6. Redistributions of any form whatsoever must retain the following
32
* "This product includes software developed by the OpenSSL Project
33
* for use in the OpenSSL Toolkit (http://www.openssl.org/)"
35
* THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
36
* EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
37
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
38
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
39
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
40
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
41
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
42
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
43
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
44
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
45
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
46
* OF THE POSSIBILITY OF SUCH DAMAGE.
47
* ====================================================================
51
#include <openssl/opensslconf.h>
52
#ifndef OPENSSL_NO_AES
53
#include <openssl/crypto.h>
54
# include <openssl/evp.h>
55
# include <openssl/err.h>
58
# include <openssl/aes.h>
59
# include "evp_locl.h"
60
# include "modes_lcl.h"
61
# include <openssl/rand.h>
63
# undef EVP_CIPH_FLAG_FIPS
64
# define EVP_CIPH_FLAG_FIPS 0
82
} ks; /* AES key schedule to use */
83
int key_set; /* Set if key initialised */
84
int iv_set; /* Set if an iv is set */
86
unsigned char *iv; /* Temporary IV store */
87
int ivlen; /* IV length */
89
int iv_gen; /* It is OK to generate IVs */
90
int tls_aad_len; /* TLS AAD length */
98
} ks1, ks2; /* AES key schedules to use */
100
void (*stream) (const unsigned char *in,
101
unsigned char *out, size_t length,
102
const AES_KEY *key1, const AES_KEY *key2,
103
const unsigned char iv[16]);
110
} ks; /* AES key schedule to use */
111
int key_set; /* Set if key initialised */
112
int iv_set; /* Set if an iv is set */
113
int tag_set; /* Set if tag is valid */
114
int len_set; /* Set if message length set */
115
int L, M; /* L and M parameters from RFC3610 */
120
# define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
123
int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
125
int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
128
void vpaes_encrypt(const unsigned char *in, unsigned char *out,
130
void vpaes_decrypt(const unsigned char *in, unsigned char *out,
133
void vpaes_cbc_encrypt(const unsigned char *in,
136
const AES_KEY *key, unsigned char *ivec, int enc);
139
void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
140
size_t length, const AES_KEY *key,
141
unsigned char ivec[16], int enc);
142
void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
143
size_t len, const AES_KEY *key,
144
const unsigned char ivec[16]);
145
void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
146
size_t len, const AES_KEY *key1,
147
const AES_KEY *key2, const unsigned char iv[16]);
148
void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
149
size_t len, const AES_KEY *key1,
150
const AES_KEY *key2, const unsigned char iv[16]);
153
void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
154
size_t blocks, const AES_KEY *key,
155
const unsigned char ivec[AES_BLOCK_SIZE]);
158
void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len,
159
const AES_KEY *key1, const AES_KEY *key2,
160
const unsigned char iv[16]);
161
void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len,
162
const AES_KEY *key1, const AES_KEY *key2,
163
const unsigned char iv[16]);
166
# if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
167
# include "ppc_arch.h"
169
# define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
171
# define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
172
# define HWAES_set_encrypt_key aes_p8_set_encrypt_key
173
# define HWAES_set_decrypt_key aes_p8_set_decrypt_key
174
# define HWAES_encrypt aes_p8_encrypt
175
# define HWAES_decrypt aes_p8_decrypt
176
# define HWAES_cbc_encrypt aes_p8_cbc_encrypt
177
# define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
180
# if defined(AES_ASM) && !defined(I386_ONLY) && ( \
181
((defined(__i386) || defined(__i386__) || \
182
defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
183
defined(__x86_64) || defined(__x86_64__) || \
184
defined(_M_AMD64) || defined(_M_X64) || \
187
extern unsigned int OPENSSL_ia32cap_P[];
190
# define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
193
# define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
198
# define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
200
int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
202
int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
205
void aesni_encrypt(const unsigned char *in, unsigned char *out,
207
void aesni_decrypt(const unsigned char *in, unsigned char *out,
210
void aesni_ecb_encrypt(const unsigned char *in,
212
size_t length, const AES_KEY *key, int enc);
213
void aesni_cbc_encrypt(const unsigned char *in,
216
const AES_KEY *key, unsigned char *ivec, int enc);
218
void aesni_ctr32_encrypt_blocks(const unsigned char *in,
221
const void *key, const unsigned char *ivec);
223
void aesni_xts_encrypt(const unsigned char *in,
226
const AES_KEY *key1, const AES_KEY *key2,
227
const unsigned char iv[16]);
229
void aesni_xts_decrypt(const unsigned char *in,
232
const AES_KEY *key1, const AES_KEY *key2,
233
const unsigned char iv[16]);
235
void aesni_ccm64_encrypt_blocks(const unsigned char *in,
239
const unsigned char ivec[16],
240
unsigned char cmac[16]);
242
void aesni_ccm64_decrypt_blocks(const unsigned char *in,
246
const unsigned char ivec[16],
247
unsigned char cmac[16]);
249
# if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
250
size_t aesni_gcm_encrypt(const unsigned char *in,
253
const void *key, unsigned char ivec[16], u64 *Xi);
254
# define AES_gcm_encrypt aesni_gcm_encrypt
255
size_t aesni_gcm_decrypt(const unsigned char *in,
258
const void *key, unsigned char ivec[16], u64 *Xi);
259
# define AES_gcm_decrypt aesni_gcm_decrypt
260
void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
262
# define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
263
gctx->gcm.ghash==gcm_ghash_avx)
264
# define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
265
gctx->gcm.ghash==gcm_ghash_avx)
266
# undef AES_GCM_ASM2 /* minor size optimization */
269
static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
270
const unsigned char *iv, int enc)
273
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
275
mode = ctx->cipher->flags & EVP_CIPH_MODE;
276
if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
278
ret = aesni_set_decrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
279
dat->block = (block128_f) aesni_decrypt;
280
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
281
(cbc128_f) aesni_cbc_encrypt : NULL;
283
ret = aesni_set_encrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
284
dat->block = (block128_f) aesni_encrypt;
285
if (mode == EVP_CIPH_CBC_MODE)
286
dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
287
else if (mode == EVP_CIPH_CTR_MODE)
288
dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
290
dat->stream.cbc = NULL;
294
EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
301
static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
302
const unsigned char *in, size_t len)
304
aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv, ctx->encrypt);
309
static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
310
const unsigned char *in, size_t len)
312
size_t bl = ctx->cipher->block_size;
317
aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
322
# define aesni_ofb_cipher aes_ofb_cipher
323
static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
324
const unsigned char *in, size_t len);
326
# define aesni_cfb_cipher aes_cfb_cipher
327
static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
328
const unsigned char *in, size_t len);
330
# define aesni_cfb8_cipher aes_cfb8_cipher
331
static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
332
const unsigned char *in, size_t len);
334
# define aesni_cfb1_cipher aes_cfb1_cipher
335
static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
336
const unsigned char *in, size_t len);
338
# define aesni_ctr_cipher aes_ctr_cipher
339
static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
340
const unsigned char *in, size_t len);
342
static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
343
const unsigned char *iv, int enc)
345
EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
349
aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
350
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
351
gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
353
* If we have an iv can set it directly, otherwise use saved IV.
355
if (iv == NULL && gctx->iv_set)
358
CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
363
/* If key set use IV, otherwise copy */
365
CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
367
memcpy(gctx->iv, iv, gctx->ivlen);
374
# define aesni_gcm_cipher aes_gcm_cipher
375
static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
376
const unsigned char *in, size_t len);
378
static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
379
const unsigned char *iv, int enc)
381
EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
386
/* key_len is two AES keys */
388
aesni_set_encrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks);
389
xctx->xts.block1 = (block128_f) aesni_encrypt;
390
xctx->stream = aesni_xts_encrypt;
392
aesni_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks);
393
xctx->xts.block1 = (block128_f) aesni_decrypt;
394
xctx->stream = aesni_xts_decrypt;
397
aesni_set_encrypt_key(key + ctx->key_len / 2,
398
ctx->key_len * 4, &xctx->ks2.ks);
399
xctx->xts.block2 = (block128_f) aesni_encrypt;
401
xctx->xts.key1 = &xctx->ks1;
405
xctx->xts.key2 = &xctx->ks2;
406
memcpy(ctx->iv, iv, 16);
412
# define aesni_xts_cipher aes_xts_cipher
413
static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
414
const unsigned char *in, size_t len);
416
static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
417
const unsigned char *iv, int enc)
419
EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
423
aesni_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks.ks);
424
CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
425
&cctx->ks, (block128_f) aesni_encrypt);
426
cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
427
(ccm128_f) aesni_ccm64_decrypt_blocks;
431
memcpy(ctx->iv, iv, 15 - cctx->L);
437
# define aesni_ccm_cipher aes_ccm_cipher
438
static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
439
const unsigned char *in, size_t len);
441
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
442
static const EVP_CIPHER aesni_##keylen##_##mode = { \
443
nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
444
flags|EVP_CIPH_##MODE##_MODE, \
446
aesni_##mode##_cipher, \
448
sizeof(EVP_AES_KEY), \
449
NULL,NULL,NULL,NULL }; \
450
static const EVP_CIPHER aes_##keylen##_##mode = { \
451
nid##_##keylen##_##nmode,blocksize, \
453
flags|EVP_CIPH_##MODE##_MODE, \
455
aes_##mode##_cipher, \
457
sizeof(EVP_AES_KEY), \
458
NULL,NULL,NULL,NULL }; \
459
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
460
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
462
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
463
static const EVP_CIPHER aesni_##keylen##_##mode = { \
464
nid##_##keylen##_##mode,blocksize, \
465
(EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
466
flags|EVP_CIPH_##MODE##_MODE, \
467
aesni_##mode##_init_key, \
468
aesni_##mode##_cipher, \
469
aes_##mode##_cleanup, \
470
sizeof(EVP_AES_##MODE##_CTX), \
471
NULL,NULL,aes_##mode##_ctrl,NULL }; \
472
static const EVP_CIPHER aes_##keylen##_##mode = { \
473
nid##_##keylen##_##mode,blocksize, \
474
(EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
475
flags|EVP_CIPH_##MODE##_MODE, \
476
aes_##mode##_init_key, \
477
aes_##mode##_cipher, \
478
aes_##mode##_cleanup, \
479
sizeof(EVP_AES_##MODE##_CTX), \
480
NULL,NULL,aes_##mode##_ctrl,NULL }; \
481
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
482
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
484
# elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
486
# include "sparc_arch.h"
488
extern unsigned int OPENSSL_sparcv9cap_P[];
490
# define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
492
void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
493
void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
494
void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
496
void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
499
* Key-length specific subroutines were chosen for following reason.
500
* Each SPARC T4 core can execute up to 8 threads which share core's
501
* resources. Loading as much key material to registers allows to
502
* minimize references to shared memory interface, as well as amount
503
* of instructions in inner loops [much needed on T4]. But then having
504
* non-key-length specific routines would require conditional branches
505
* either in inner loops or on subroutines' entries. Former is hardly
506
* acceptable, while latter means code size increase to size occupied
507
* by multiple key-length specfic subroutines, so why fight?
509
void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
510
size_t len, const AES_KEY *key,
511
unsigned char *ivec);
512
void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
513
size_t len, const AES_KEY *key,
514
unsigned char *ivec);
515
void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
516
size_t len, const AES_KEY *key,
517
unsigned char *ivec);
518
void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
519
size_t len, const AES_KEY *key,
520
unsigned char *ivec);
521
void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
522
size_t len, const AES_KEY *key,
523
unsigned char *ivec);
524
void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
525
size_t len, const AES_KEY *key,
526
unsigned char *ivec);
527
void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
528
size_t blocks, const AES_KEY *key,
529
unsigned char *ivec);
530
void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
531
size_t blocks, const AES_KEY *key,
532
unsigned char *ivec);
533
void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
534
size_t blocks, const AES_KEY *key,
535
unsigned char *ivec);
536
void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
537
size_t blocks, const AES_KEY *key1,
538
const AES_KEY *key2, const unsigned char *ivec);
539
void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
540
size_t blocks, const AES_KEY *key1,
541
const AES_KEY *key2, const unsigned char *ivec);
542
void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
543
size_t blocks, const AES_KEY *key1,
544
const AES_KEY *key2, const unsigned char *ivec);
545
void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
546
size_t blocks, const AES_KEY *key1,
547
const AES_KEY *key2, const unsigned char *ivec);
549
static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
550
const unsigned char *iv, int enc)
553
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
555
mode = ctx->cipher->flags & EVP_CIPH_MODE;
556
bits = ctx->key_len * 8;
557
if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
560
aes_t4_set_decrypt_key(key, bits, ctx->cipher_data);
561
dat->block = (block128_f) aes_t4_decrypt;
564
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
565
(cbc128_f) aes128_t4_cbc_decrypt : NULL;
568
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
569
(cbc128_f) aes192_t4_cbc_decrypt : NULL;
572
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
573
(cbc128_f) aes256_t4_cbc_decrypt : NULL;
580
aes_t4_set_encrypt_key(key, bits, ctx->cipher_data);
581
dat->block = (block128_f) aes_t4_encrypt;
584
if (mode == EVP_CIPH_CBC_MODE)
585
dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
586
else if (mode == EVP_CIPH_CTR_MODE)
587
dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
589
dat->stream.cbc = NULL;
592
if (mode == EVP_CIPH_CBC_MODE)
593
dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
594
else if (mode == EVP_CIPH_CTR_MODE)
595
dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
597
dat->stream.cbc = NULL;
600
if (mode == EVP_CIPH_CBC_MODE)
601
dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
602
else if (mode == EVP_CIPH_CTR_MODE)
603
dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
605
dat->stream.cbc = NULL;
613
EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
620
# define aes_t4_cbc_cipher aes_cbc_cipher
621
static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
622
const unsigned char *in, size_t len);
624
# define aes_t4_ecb_cipher aes_ecb_cipher
625
static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
626
const unsigned char *in, size_t len);
628
# define aes_t4_ofb_cipher aes_ofb_cipher
629
static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
630
const unsigned char *in, size_t len);
632
# define aes_t4_cfb_cipher aes_cfb_cipher
633
static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
634
const unsigned char *in, size_t len);
636
# define aes_t4_cfb8_cipher aes_cfb8_cipher
637
static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
638
const unsigned char *in, size_t len);
640
# define aes_t4_cfb1_cipher aes_cfb1_cipher
641
static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
642
const unsigned char *in, size_t len);
644
# define aes_t4_ctr_cipher aes_ctr_cipher
645
static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
646
const unsigned char *in, size_t len);
648
static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
649
const unsigned char *iv, int enc)
651
EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
655
int bits = ctx->key_len * 8;
656
aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
657
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
658
(block128_f) aes_t4_encrypt);
661
gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
664
gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
667
gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
673
* If we have an iv can set it directly, otherwise use saved IV.
675
if (iv == NULL && gctx->iv_set)
678
CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
683
/* If key set use IV, otherwise copy */
685
CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
687
memcpy(gctx->iv, iv, gctx->ivlen);
694
# define aes_t4_gcm_cipher aes_gcm_cipher
695
static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
696
const unsigned char *in, size_t len);
698
static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
699
const unsigned char *iv, int enc)
701
EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
706
int bits = ctx->key_len * 4;
708
/* key_len is two AES keys */
710
aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
711
xctx->xts.block1 = (block128_f) aes_t4_encrypt;
714
xctx->stream = aes128_t4_xts_encrypt;
718
xctx->stream = aes192_t4_xts_encrypt;
722
xctx->stream = aes256_t4_xts_encrypt;
728
aes_t4_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks);
729
xctx->xts.block1 = (block128_f) aes_t4_decrypt;
732
xctx->stream = aes128_t4_xts_decrypt;
736
xctx->stream = aes192_t4_xts_decrypt;
740
xctx->stream = aes256_t4_xts_decrypt;
747
aes_t4_set_encrypt_key(key + ctx->key_len / 2,
748
ctx->key_len * 4, &xctx->ks2.ks);
749
xctx->xts.block2 = (block128_f) aes_t4_encrypt;
751
xctx->xts.key1 = &xctx->ks1;
755
xctx->xts.key2 = &xctx->ks2;
756
memcpy(ctx->iv, iv, 16);
762
# define aes_t4_xts_cipher aes_xts_cipher
763
static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
764
const unsigned char *in, size_t len);
766
static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
767
const unsigned char *iv, int enc)
769
EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
773
int bits = ctx->key_len * 8;
774
aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
775
CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
776
&cctx->ks, (block128_f) aes_t4_encrypt);
780
cctx->str = enc ? (ccm128_f) aes128_t4_ccm64_encrypt :
781
(ccm128_f) ae128_t4_ccm64_decrypt;
784
cctx->str = enc ? (ccm128_f) aes192_t4_ccm64_encrypt :
785
(ccm128_f) ae192_t4_ccm64_decrypt;
788
cctx->str = enc ? (ccm128_f) aes256_t4_ccm64_encrypt :
789
(ccm128_f) ae256_t4_ccm64_decrypt;
800
memcpy(ctx->iv, iv, 15 - cctx->L);
806
# define aes_t4_ccm_cipher aes_ccm_cipher
807
static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
808
const unsigned char *in, size_t len);
810
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
811
static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
812
nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
813
flags|EVP_CIPH_##MODE##_MODE, \
815
aes_t4_##mode##_cipher, \
817
sizeof(EVP_AES_KEY), \
818
NULL,NULL,NULL,NULL }; \
819
static const EVP_CIPHER aes_##keylen##_##mode = { \
820
nid##_##keylen##_##nmode,blocksize, \
822
flags|EVP_CIPH_##MODE##_MODE, \
824
aes_##mode##_cipher, \
826
sizeof(EVP_AES_KEY), \
827
NULL,NULL,NULL,NULL }; \
828
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
829
{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
831
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
832
static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
833
nid##_##keylen##_##mode,blocksize, \
834
(EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
835
flags|EVP_CIPH_##MODE##_MODE, \
836
aes_t4_##mode##_init_key, \
837
aes_t4_##mode##_cipher, \
838
aes_##mode##_cleanup, \
839
sizeof(EVP_AES_##MODE##_CTX), \
840
NULL,NULL,aes_##mode##_ctrl,NULL }; \
841
static const EVP_CIPHER aes_##keylen##_##mode = { \
842
nid##_##keylen##_##mode,blocksize, \
843
(EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
844
flags|EVP_CIPH_##MODE##_MODE, \
845
aes_##mode##_init_key, \
846
aes_##mode##_cipher, \
847
aes_##mode##_cleanup, \
848
sizeof(EVP_AES_##MODE##_CTX), \
849
NULL,NULL,aes_##mode##_ctrl,NULL }; \
850
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
851
{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
853
#elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
857
# include "s390x_arch.h"
863
* KMA-GCM-AES parameter block
864
* (see z/Architecture Principles of Operation SA22-7832-11)
867
unsigned char reserved[12];
873
unsigned long long g[2];
877
unsigned long long taadl;
878
unsigned long long tpcl;
880
unsigned long long g[2];
896
unsigned char ares[16];
897
unsigned char mres[16];
898
unsigned char kres[16];
906
# define S390X_aes_128_CAPABLE ((OPENSSL_s390xcap_P.km[0] & \
907
S390X_CAPBIT(S390X_AES_128)) &&\
908
(OPENSSL_s390xcap_P.kmc[0] & \
909
S390X_CAPBIT(S390X_AES_128)))
910
# define S390X_aes_192_CAPABLE ((OPENSSL_s390xcap_P.km[0] & \
911
S390X_CAPBIT(S390X_AES_192)) &&\
912
(OPENSSL_s390xcap_P.kmc[0] & \
913
S390X_CAPBIT(S390X_AES_192)))
914
# define S390X_aes_256_CAPABLE ((OPENSSL_s390xcap_P.km[0] & \
915
S390X_CAPBIT(S390X_AES_256)) &&\
916
(OPENSSL_s390xcap_P.kmc[0] & \
917
S390X_CAPBIT(S390X_AES_256)))
919
# define s390x_aes_init_key aes_init_key
920
static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
921
const unsigned char *iv, int enc);
923
# define S390X_aes_128_cbc_CAPABLE 1 /* checked by callee */
924
# define S390X_aes_192_cbc_CAPABLE 1
925
# define S390X_aes_256_cbc_CAPABLE 1
927
# define s390x_aes_cbc_cipher aes_cbc_cipher
928
static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
929
const unsigned char *in, size_t len);
931
# define S390X_aes_128_ecb_CAPABLE 0
932
# define S390X_aes_192_ecb_CAPABLE 0
933
# define S390X_aes_256_ecb_CAPABLE 0
935
# define s390x_aes_ecb_cipher aes_ecb_cipher
936
static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
937
const unsigned char *in, size_t len);
939
# define S390X_aes_128_ofb_CAPABLE 0
940
# define S390X_aes_192_ofb_CAPABLE 0
941
# define S390X_aes_256_ofb_CAPABLE 0
943
# define s390x_aes_ofb_cipher aes_ofb_cipher
944
static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
945
const unsigned char *in, size_t len);
947
# define S390X_aes_128_cfb_CAPABLE 0
948
# define S390X_aes_192_cfb_CAPABLE 0
949
# define S390X_aes_256_cfb_CAPABLE 0
951
# define s390x_aes_cfb_cipher aes_cfb_cipher
952
static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
953
const unsigned char *in, size_t len);
955
# define S390X_aes_128_cfb8_CAPABLE 0
956
# define S390X_aes_192_cfb8_CAPABLE 0
957
# define S390X_aes_256_cfb8_CAPABLE 0
959
# define s390x_aes_cfb8_cipher aes_cfb8_cipher
960
static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
961
const unsigned char *in, size_t len);
963
# define S390X_aes_128_cfb1_CAPABLE 0
964
# define S390X_aes_192_cfb1_CAPABLE 0
965
# define S390X_aes_256_cfb1_CAPABLE 0
967
# define s390x_aes_cfb1_cipher aes_cfb1_cipher
968
static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
969
const unsigned char *in, size_t len);
971
# define S390X_aes_128_ctr_CAPABLE 1 /* checked by callee */
972
# define S390X_aes_192_ctr_CAPABLE 1
973
# define S390X_aes_256_ctr_CAPABLE 1
975
# define s390x_aes_ctr_cipher aes_ctr_cipher
976
static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
977
const unsigned char *in, size_t len);
979
# define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \
980
(OPENSSL_s390xcap_P.kma[0] & \
981
S390X_CAPBIT(S390X_AES_128)))
982
# define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \
983
(OPENSSL_s390xcap_P.kma[0] & \
984
S390X_CAPBIT(S390X_AES_192)))
985
# define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \
986
(OPENSSL_s390xcap_P.kma[0] & \
987
S390X_CAPBIT(S390X_AES_256)))
989
/* iv + padding length for iv lenghts != 12 */
990
# define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
992
static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
995
unsigned long long alen;
998
if (ctx->kma.param.tpcl)
1001
alen = ctx->kma.param.taadl + len;
1002
if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1004
ctx->kma.param.taadl = alen;
1009
ctx->ares[n] = *aad;
1014
/* ctx->ares contains a complete block if offset has wrapped around */
1016
s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1017
ctx->fc |= S390X_KMA_HS;
1026
s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1028
ctx->fc |= S390X_KMA_HS;
1036
ctx->ares[rem] = aad[rem];
1042
static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1043
unsigned char *out, size_t len)
1045
const unsigned char *inptr;
1046
unsigned long long mlen;
1049
unsigned char b[16];
1054
mlen = ctx->kma.param.tpcl + len;
1055
if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1057
ctx->kma.param.tpcl = mlen;
1063
while (n && inlen) {
1064
ctx->mres[n] = *inptr;
1069
/* ctx->mres contains a complete block if offset has wrapped around */
1071
s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1072
ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1073
ctx->fc |= S390X_KMA_HS;
1076
/* previous call already encrypted/decrypted its remainder,
1077
* see comment below */
1094
s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1095
ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1098
ctx->fc |= S390X_KMA_HS;
1103
* If there is a remainder, it has to be saved such that it can be
1104
* processed by kma later. However, we also have to do the for-now
1105
* unauthenticated encryption/decryption part here and now...
1108
if (!ctx->mreslen) {
1109
buf.w[0] = ctx->kma.param.j0.w[0];
1110
buf.w[1] = ctx->kma.param.j0.w[1];
1111
buf.w[2] = ctx->kma.param.j0.w[2];
1112
buf.w[3] = ctx->kma.param.cv.w + 1;
1113
s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1117
for (i = 0; i < rem; i++) {
1118
ctx->mres[n + i] = in[i];
1119
out[i] = in[i] ^ ctx->kres[n + i];
1122
ctx->mreslen += rem;
1127
static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1128
const unsigned char *iv)
1130
ctx->kma.param.t.g[0] = 0;
1131
ctx->kma.param.t.g[1] = 0;
1132
ctx->kma.param.tpcl = 0;
1133
ctx->kma.param.taadl = 0;
1138
if (ctx->ivlen == 12) {
1139
memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1140
ctx->kma.param.j0.w[3] = 1;
1141
ctx->kma.param.cv.w = 1;
1143
/* ctx->iv has the right size and is already padded. */
1144
memcpy(ctx->iv, iv, ctx->ivlen);
1145
s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1146
ctx->fc, &ctx->kma.param);
1147
ctx->fc |= S390X_KMA_HS;
1149
ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1150
ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1151
ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1152
ctx->kma.param.t.g[0] = 0;
1153
ctx->kma.param.t.g[1] = 0;
1157
static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1159
S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1160
S390X_AES_GCM_CTX *gctx_out;
1161
EVP_CIPHER_CTX *out;
1162
unsigned char *buf, *iv;
1163
int ivlen, enc, len;
1167
ivlen = c->cipher->iv_len;;
1171
gctx->ivlen = ivlen;
1175
gctx->tls_aad_len = -1;
1178
case EVP_CTRL_GCM_SET_IVLEN:
1184
len = S390X_gcm_ivpadlen(arg);
1186
/* Allocate memory for iv if needed. */
1187
if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1189
OPENSSL_free(gctx->iv);
1191
gctx->iv = OPENSSL_malloc(len);
1192
if (gctx->iv == NULL)
1196
memset(gctx->iv + arg, 0, len - arg - 8);
1197
*((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1202
case EVP_CTRL_GCM_SET_TAG:
1205
if (arg <= 0 || arg > 16 || enc)
1208
memcpy(buf, ptr, arg);
1212
case EVP_CTRL_GCM_GET_TAG:
1214
if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1217
memcpy(ptr, gctx->kma.param.t.b, arg);
1220
case EVP_CTRL_GCM_SET_IV_FIXED:
1221
/* Special case: -1 length restores whole iv */
1223
memcpy(gctx->iv, ptr, gctx->ivlen);
1228
* Fixed field must be at least 4 bytes and invocation field at least
1231
if ((arg < 4) || (gctx->ivlen - arg) < 8)
1235
memcpy(gctx->iv, ptr, arg);
1238
if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1244
case EVP_CTRL_GCM_IV_GEN:
1245
if (gctx->iv_gen == 0 || gctx->key_set == 0)
1248
s390x_aes_gcm_setiv(gctx, gctx->iv);
1250
if (arg <= 0 || arg > gctx->ivlen)
1253
memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1255
* Invocation field will be at least 8 bytes in size and so no need
1256
* to check wrap around or increment more than last 8 bytes.
1258
(*(unsigned long long *)(gctx->iv + gctx->ivlen - 8))++;
1262
case EVP_CTRL_GCM_SET_IV_INV:
1264
if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1267
memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1268
s390x_aes_gcm_setiv(gctx, gctx->iv);
1272
case EVP_CTRL_AEAD_TLS1_AAD:
1273
/* Save the aad for later use. */
1274
if (arg != EVP_AEAD_TLS1_AAD_LEN)
1278
memcpy(buf, ptr, arg);
1279
gctx->tls_aad_len = arg;
1281
len = buf[arg - 2] << 8 | buf[arg - 1];
1282
/* Correct length for explicit iv. */
1283
if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1285
len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1287
/* If decrypting correct for tag too. */
1290
if (len < EVP_GCM_TLS_TAG_LEN)
1292
len -= EVP_GCM_TLS_TAG_LEN;
1294
buf[arg - 2] = len >> 8;
1295
buf[arg - 1] = len & 0xff;
1296
/* Extra padding: tag appended to record. */
1297
return EVP_GCM_TLS_TAG_LEN;
1301
gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1304
if (gctx->iv == iv) {
1305
gctx_out->iv = out->iv;
1307
len = S390X_gcm_ivpadlen(gctx->ivlen);
1309
gctx_out->iv = OPENSSL_malloc(len);
1310
if (gctx_out->iv == NULL)
1313
memcpy(gctx_out->iv, gctx->iv, len);
1322
static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1323
const unsigned char *key,
1324
const unsigned char *iv, int enc)
1326
S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1329
if (iv == NULL && key == NULL)
1333
keylen = EVP_CIPHER_CTX_key_length(ctx);
1334
memcpy(&gctx->kma.param.k, key, keylen);
1336
/* Convert key size to function code. */
1337
gctx->fc = S390X_AES_128 + (((keylen << 3) - 128) >> 6);
1339
gctx->fc |= S390X_DECRYPT;
1341
if (iv == NULL && gctx->iv_set)
1345
s390x_aes_gcm_setiv(gctx, iv);
1351
s390x_aes_gcm_setiv(gctx, iv);
1353
memcpy(gctx->iv, iv, gctx->ivlen);
1361
static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1362
const unsigned char *in, size_t len)
1364
S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1365
const unsigned char *buf = ctx->buf;
1366
const int enc = ctx->encrypt;
1369
if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1372
if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1373
: EVP_CTRL_GCM_SET_IV_INV,
1374
EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1377
in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1378
out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1379
len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1381
gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1382
gctx->kma.param.tpcl = len << 3;
1383
s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1384
gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1387
memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1388
rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1390
if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1391
EVP_GCM_TLS_TAG_LEN)) {
1392
OPENSSL_cleanse(out, len);
1399
gctx->tls_aad_len = -1;
1403
static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1404
const unsigned char *in, size_t len)
1406
S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1407
unsigned char *buf, tmp[16];
1413
if (gctx->tls_aad_len >= 0)
1414
return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1421
if (s390x_aes_gcm_aad(gctx, in, len))
1424
if (s390x_aes_gcm(gctx, in, out, len))
1429
gctx->kma.param.taadl <<= 3;
1430
gctx->kma.param.tpcl <<= 3;
1431
s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1432
gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1433
/* recall that we already did en-/decrypt gctx->mres
1434
* and returned it to caller... */
1435
OPENSSL_cleanse(tmp, gctx->mreslen);
1442
if (gctx->taglen < 0)
1446
if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1453
static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1455
S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1456
const unsigned char *iv;
1463
OPENSSL_free(gctx->iv);
1465
OPENSSL_cleanse(gctx, sizeof(*gctx));
1469
# define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1470
# define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */
1471
# define S390X_aes_256_xts_CAPABLE 1
1473
# define s390x_aes_xts_init_key aes_xts_init_key
1474
static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1475
const unsigned char *key,
1476
const unsigned char *iv, int enc);
1477
# define s390x_aes_xts_cipher aes_xts_cipher
1478
static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1479
const unsigned char *in, size_t len);
1480
# define s390x_aes_xts_ctrl aes_xts_ctrl
1481
static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1482
# define s390x_aes_xts_cleanup aes_xts_cleanup
1484
# define S390X_AES_CCM_CTX EVP_AES_CCM_CTX
1485
# define S390X_aes_128_ccm_CAPABLE 0
1486
# define S390X_aes_192_ccm_CAPABLE 0
1487
# define S390X_aes_256_ccm_CAPABLE 0
1489
# define s390x_aes_ccm_init_key aes_ccm_init_key
1490
static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
1491
const unsigned char *key,
1492
const unsigned char *iv, int enc);
1493
# define s390x_aes_ccm_cipher aes_ccm_cipher
1494
static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1495
const unsigned char *in, size_t len);
1496
# define s390x_aes_ccm_ctrl aes_ccm_ctrl
1497
static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1498
# define s390x_aes_ccm_cleanup aes_ccm_cleanup
1500
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
1502
static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
1503
nid##_##keylen##_##nmode,blocksize, \
1506
flags | EVP_CIPH_##MODE##_MODE, \
1507
s390x_aes_init_key, \
1508
s390x_aes_##mode##_cipher, \
1510
sizeof(EVP_AES_KEY), \
1516
static const EVP_CIPHER aes_##keylen##_##mode = { \
1517
nid##_##keylen##_##nmode, \
1521
flags | EVP_CIPH_##MODE##_MODE, \
1523
aes_##mode##_cipher, \
1525
sizeof(EVP_AES_KEY), \
1526
NULL,NULL,NULL,NULL \
1528
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
1530
return S390X_aes_##keylen##_##mode##_CAPABLE ? \
1531
&s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
1534
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
1535
static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
1536
nid##_##keylen##_##mode, \
1538
(EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
1540
flags | EVP_CIPH_##MODE##_MODE, \
1541
s390x_aes_##mode##_init_key, \
1542
s390x_aes_##mode##_cipher, \
1543
s390x_aes_##mode##_cleanup, \
1544
sizeof(S390X_AES_##MODE##_CTX), \
1547
s390x_aes_##mode##_ctrl, \
1550
static const EVP_CIPHER aes_##keylen##_##mode = { \
1551
nid##_##keylen##_##mode,blocksize, \
1552
(EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
1554
flags | EVP_CIPH_##MODE##_MODE, \
1555
aes_##mode##_init_key, \
1556
aes_##mode##_cipher, \
1557
aes_##mode##_cleanup, \
1558
sizeof(EVP_AES_##MODE##_CTX), \
1561
aes_##mode##_ctrl, \
1564
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
1566
return S390X_aes_##keylen##_##mode##_CAPABLE ? \
1567
&s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
1572
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
1573
static const EVP_CIPHER aes_##keylen##_##mode = { \
1574
nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
1575
flags|EVP_CIPH_##MODE##_MODE, \
1577
aes_##mode##_cipher, \
1579
sizeof(EVP_AES_KEY), \
1580
NULL,NULL,NULL,NULL }; \
1581
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
1582
{ return &aes_##keylen##_##mode; }
1584
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
1585
static const EVP_CIPHER aes_##keylen##_##mode = { \
1586
nid##_##keylen##_##mode,blocksize, \
1587
(EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
1588
flags|EVP_CIPH_##MODE##_MODE, \
1589
aes_##mode##_init_key, \
1590
aes_##mode##_cipher, \
1591
aes_##mode##_cleanup, \
1592
sizeof(EVP_AES_##MODE##_CTX), \
1593
NULL,NULL,aes_##mode##_ctrl,NULL }; \
1594
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
1595
{ return &aes_##keylen##_##mode; }
1598
# if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
1599
# include "arm_arch.h"
1600
# if __ARM_MAX_ARCH__>=7
1601
# if defined(BSAES_ASM)
1602
# define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
1604
# define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
1605
# define HWAES_set_encrypt_key aes_v8_set_encrypt_key
1606
# define HWAES_set_decrypt_key aes_v8_set_decrypt_key
1607
# define HWAES_encrypt aes_v8_encrypt
1608
# define HWAES_decrypt aes_v8_decrypt
1609
# define HWAES_cbc_encrypt aes_v8_cbc_encrypt
1610
# define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
1614
# if defined(HWAES_CAPABLE)
1615
int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
1617
int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
1619
void HWAES_encrypt(const unsigned char *in, unsigned char *out,
1620
const AES_KEY *key);
1621
void HWAES_decrypt(const unsigned char *in, unsigned char *out,
1622
const AES_KEY *key);
1623
void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
1624
size_t length, const AES_KEY *key,
1625
unsigned char *ivec, const int enc);
1626
void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
1627
size_t len, const AES_KEY *key,
1628
const unsigned char ivec[16]);
1631
# define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
1632
BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
1633
BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
1634
BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
1635
BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
1636
BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
1637
BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
1638
BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
1640
static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1641
const unsigned char *iv, int enc)
1644
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
1646
mode = ctx->cipher->flags & EVP_CIPH_MODE;
1647
if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
1649
# ifdef HWAES_CAPABLE
1650
if (HWAES_CAPABLE) {
1651
ret = HWAES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
1652
dat->block = (block128_f) HWAES_decrypt;
1653
dat->stream.cbc = NULL;
1654
# ifdef HWAES_cbc_encrypt
1655
if (mode == EVP_CIPH_CBC_MODE)
1656
dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
1660
# ifdef BSAES_CAPABLE
1661
if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
1662
ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
1663
dat->block = (block128_f) AES_decrypt;
1664
dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
1667
# ifdef VPAES_CAPABLE
1668
if (VPAES_CAPABLE) {
1669
ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
1670
dat->block = (block128_f) vpaes_decrypt;
1671
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
1672
(cbc128_f) vpaes_cbc_encrypt : NULL;
1676
ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
1677
dat->block = (block128_f) AES_decrypt;
1678
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
1679
(cbc128_f) AES_cbc_encrypt : NULL;
1681
# ifdef HWAES_CAPABLE
1682
if (HWAES_CAPABLE) {
1683
ret = HWAES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
1684
dat->block = (block128_f) HWAES_encrypt;
1685
dat->stream.cbc = NULL;
1686
# ifdef HWAES_cbc_encrypt
1687
if (mode == EVP_CIPH_CBC_MODE)
1688
dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
1691
# ifdef HWAES_ctr32_encrypt_blocks
1692
if (mode == EVP_CIPH_CTR_MODE)
1693
dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
1696
(void)0; /* terminate potentially open 'else' */
1699
# ifdef BSAES_CAPABLE
1700
if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
1701
ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
1702
dat->block = (block128_f) AES_encrypt;
1703
dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
1706
# ifdef VPAES_CAPABLE
1707
if (VPAES_CAPABLE) {
1708
ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
1709
dat->block = (block128_f) vpaes_encrypt;
1710
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
1711
(cbc128_f) vpaes_cbc_encrypt : NULL;
1715
ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
1716
dat->block = (block128_f) AES_encrypt;
1717
dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
1718
(cbc128_f) AES_cbc_encrypt : NULL;
1720
if (mode == EVP_CIPH_CTR_MODE)
1721
dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
1726
EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
1733
static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1734
const unsigned char *in, size_t len)
1736
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
1738
if (dat->stream.cbc)
1739
(*dat->stream.cbc) (in, out, len, &dat->ks, ctx->iv, ctx->encrypt);
1740
else if (ctx->encrypt)
1741
CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
1743
CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
1748
static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1749
const unsigned char *in, size_t len)
1751
size_t bl = ctx->cipher->block_size;
1753
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
1758
for (i = 0, len -= bl; i <= len; i += bl)
1759
(*dat->block) (in + i, out + i, &dat->ks);
1764
static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1765
const unsigned char *in, size_t len)
1767
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
1769
CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
1770
ctx->iv, &ctx->num, dat->block);
1774
static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1775
const unsigned char *in, size_t len)
1777
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
1779
CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
1780
ctx->iv, &ctx->num, ctx->encrypt, dat->block);
1784
static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1785
const unsigned char *in, size_t len)
1787
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
1789
CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
1790
ctx->iv, &ctx->num, ctx->encrypt, dat->block);
1794
static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1795
const unsigned char *in, size_t len)
1797
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
1799
if (ctx->flags & EVP_CIPH_FLAG_LENGTH_BITS) {
1800
CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
1801
ctx->iv, &ctx->num, ctx->encrypt, dat->block);
1805
while (len >= MAXBITCHUNK) {
1806
CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
1807
ctx->iv, &ctx->num, ctx->encrypt, dat->block);
1811
CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
1812
ctx->iv, &ctx->num, ctx->encrypt, dat->block);
1817
static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1818
const unsigned char *in, size_t len)
1820
unsigned int num = ctx->num;
1821
EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
1823
if (dat->stream.ctr)
1824
CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
1825
ctx->iv, ctx->buf, &num, dat->stream.ctr);
1827
CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
1828
ctx->iv, ctx->buf, &num, dat->block);
1829
ctx->num = (size_t)num;
1833
BLOCK_CIPHER_generic_pack(NID_aes, 128, EVP_CIPH_FLAG_FIPS)
1834
BLOCK_CIPHER_generic_pack(NID_aes, 192, EVP_CIPH_FLAG_FIPS)
1835
BLOCK_CIPHER_generic_pack(NID_aes, 256, EVP_CIPH_FLAG_FIPS)
1837
static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1839
EVP_AES_GCM_CTX *gctx = c->cipher_data;
1842
OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
1843
if (gctx->iv != c->iv)
1844
OPENSSL_free(gctx->iv);
1848
/* increment counter (64-bit int) by 1 */
1849
static void ctr64_inc(unsigned char *counter)
1864
static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1866
EVP_AES_GCM_CTX *gctx = c->cipher_data;
1871
gctx->ivlen = c->cipher->iv_len;
1875
gctx->tls_aad_len = -1;
1878
case EVP_CTRL_GCM_SET_IVLEN:
1881
/* Allocate memory for IV if needed */
1882
if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
1883
if (gctx->iv != c->iv)
1884
OPENSSL_free(gctx->iv);
1885
gctx->iv = OPENSSL_malloc(arg);
1892
case EVP_CTRL_GCM_SET_TAG:
1893
if (arg <= 0 || arg > 16 || c->encrypt)
1895
memcpy(c->buf, ptr, arg);
1899
case EVP_CTRL_GCM_GET_TAG:
1900
if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0)
1902
memcpy(ptr, c->buf, arg);
1905
case EVP_CTRL_GCM_SET_IV_FIXED:
1906
/* Special case: -1 length restores whole IV */
1908
memcpy(gctx->iv, ptr, gctx->ivlen);
1913
* Fixed field must be at least 4 bytes and invocation field at least
1916
if ((arg < 4) || (gctx->ivlen - arg) < 8)
1919
memcpy(gctx->iv, ptr, arg);
1920
if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1925
case EVP_CTRL_GCM_IV_GEN:
1926
if (gctx->iv_gen == 0 || gctx->key_set == 0)
1928
CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
1929
if (arg <= 0 || arg > gctx->ivlen)
1931
memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1933
* Invocation field will be at least 8 bytes in size and so no need
1934
* to check wrap around or increment more than last 8 bytes.
1936
ctr64_inc(gctx->iv + gctx->ivlen - 8);
1940
case EVP_CTRL_GCM_SET_IV_INV:
1941
if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
1943
memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1944
CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
1948
case EVP_CTRL_AEAD_TLS1_AAD:
1949
/* Save the AAD for later use */
1950
if (arg != EVP_AEAD_TLS1_AAD_LEN)
1952
memcpy(c->buf, ptr, arg);
1953
gctx->tls_aad_len = arg;
1955
unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
1956
/* Correct length for explicit IV */
1957
if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1959
len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1960
/* If decrypting correct for tag too */
1962
if (len < EVP_GCM_TLS_TAG_LEN)
1964
len -= EVP_GCM_TLS_TAG_LEN;
1966
c->buf[arg - 2] = len >> 8;
1967
c->buf[arg - 1] = len & 0xff;
1969
/* Extra padding: tag appended to record */
1970
return EVP_GCM_TLS_TAG_LEN;
1974
EVP_CIPHER_CTX *out = ptr;
1975
EVP_AES_GCM_CTX *gctx_out = out->cipher_data;
1976
if (gctx->gcm.key) {
1977
if (gctx->gcm.key != &gctx->ks)
1979
gctx_out->gcm.key = &gctx_out->ks;
1981
if (gctx->iv == c->iv)
1982
gctx_out->iv = out->iv;
1984
gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
1987
memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
1998
static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1999
const unsigned char *iv, int enc)
2001
EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
2006
# ifdef HWAES_CAPABLE
2007
if (HWAES_CAPABLE) {
2008
HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2009
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2010
(block128_f) HWAES_encrypt);
2011
# ifdef HWAES_ctr32_encrypt_blocks
2012
gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2019
# ifdef BSAES_CAPABLE
2020
if (BSAES_CAPABLE) {
2021
AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2022
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2023
(block128_f) AES_encrypt);
2024
gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2028
# ifdef VPAES_CAPABLE
2029
if (VPAES_CAPABLE) {
2030
vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2031
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2032
(block128_f) vpaes_encrypt);
2037
(void)0; /* terminate potentially open 'else' */
2039
AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2040
CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2041
(block128_f) AES_encrypt);
2043
gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
2050
* If we have an iv can set it directly, otherwise use saved IV.
2052
if (iv == NULL && gctx->iv_set)
2055
CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2060
/* If key set use IV, otherwise copy */
2062
CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2064
memcpy(gctx->iv, iv, gctx->ivlen);
2072
* Handle TLS GCM packet format. This consists of the last portion of the IV
2073
* followed by the payload and finally the tag. On encrypt generate IV,
2074
* encrypt payload and write the tag. On verify retrieve IV, decrypt payload
2078
static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2079
const unsigned char *in, size_t len)
2081
EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
2083
/* Encrypt/decrypt must be performed in place */
2085
|| len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
2088
* Set IV from start of buffer or generate IV and write to start of
2091
if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ?
2092
EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
2093
EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
2096
if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
2098
/* Fix buffer and length to point to payload */
2099
in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2100
out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2101
len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2103
/* Encrypt payload */
2106
# if defined(AES_GCM_ASM)
2107
if (len >= 32 && AES_GCM_ASM(gctx)) {
2108
if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2111
bulk = AES_gcm_encrypt(in, out, len,
2113
gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2114
gctx->gcm.len.u[1] += bulk;
2117
if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2120
len - bulk, gctx->ctr))
2124
# if defined(AES_GCM_ASM2)
2125
if (len >= 32 && AES_GCM_ASM2(gctx)) {
2126
if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2129
bulk = AES_gcm_encrypt(in, out, len,
2131
gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2132
gctx->gcm.len.u[1] += bulk;
2135
if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2136
in + bulk, out + bulk, len - bulk))
2140
/* Finally write tag */
2141
CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
2142
rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2147
# if defined(AES_GCM_ASM)
2148
if (len >= 16 && AES_GCM_ASM(gctx)) {
2149
if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2152
bulk = AES_gcm_decrypt(in, out, len,
2154
gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2155
gctx->gcm.len.u[1] += bulk;
2158
if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2161
len - bulk, gctx->ctr))
2165
# if defined(AES_GCM_ASM2)
2166
if (len >= 16 && AES_GCM_ASM2(gctx)) {
2167
if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2170
bulk = AES_gcm_decrypt(in, out, len,
2172
gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2173
gctx->gcm.len.u[1] += bulk;
2176
if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2177
in + bulk, out + bulk, len - bulk))
2181
CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
2182
/* If tag mismatch wipe buffer */
2183
if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
2184
OPENSSL_cleanse(out, len);
2192
gctx->tls_aad_len = -1;
2196
static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2197
const unsigned char *in, size_t len)
2199
EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
2200
/* If not set up, return error */
2204
if (gctx->tls_aad_len >= 0)
2205
return aes_gcm_tls_cipher(ctx, out, in, len);
2211
if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
2213
} else if (ctx->encrypt) {
2216
# if defined(AES_GCM_ASM)
2217
if (len >= 32 && AES_GCM_ASM(gctx)) {
2218
size_t res = (16 - gctx->gcm.mres) % 16;
2220
if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2223
bulk = AES_gcm_encrypt(in + res,
2224
out + res, len - res,
2225
gctx->gcm.key, gctx->gcm.Yi.c,
2227
gctx->gcm.len.u[1] += bulk;
2231
if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2234
len - bulk, gctx->ctr))
2238
# if defined(AES_GCM_ASM2)
2239
if (len >= 32 && AES_GCM_ASM2(gctx)) {
2240
size_t res = (16 - gctx->gcm.mres) % 16;
2242
if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2245
bulk = AES_gcm_encrypt(in + res,
2246
out + res, len - res,
2247
gctx->gcm.key, gctx->gcm.Yi.c,
2249
gctx->gcm.len.u[1] += bulk;
2253
if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2254
in + bulk, out + bulk, len - bulk))
2260
# if defined(AES_GCM_ASM)
2261
if (len >= 16 && AES_GCM_ASM(gctx)) {
2262
size_t res = (16 - gctx->gcm.mres) % 16;
2264
if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
2267
bulk = AES_gcm_decrypt(in + res,
2268
out + res, len - res,
2270
gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2271
gctx->gcm.len.u[1] += bulk;
2275
if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2278
len - bulk, gctx->ctr))
2282
# if defined(AES_GCM_ASM2)
2283
if (len >= 16 && AES_GCM_ASM2(gctx)) {
2284
size_t res = (16 - gctx->gcm.mres) % 16;
2286
if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
2289
bulk = AES_gcm_decrypt(in + res,
2290
out + res, len - res,
2292
gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2293
gctx->gcm.len.u[1] += bulk;
2297
if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2298
in + bulk, out + bulk, len - bulk))
2304
if (!ctx->encrypt) {
2305
if (gctx->taglen < 0)
2307
if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
2312
CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
2314
/* Don't reuse the IV */
2321
# define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
2322
| EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
2323
| EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
2324
| EVP_CIPH_CUSTOM_COPY)
2326
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
2327
EVP_CIPH_FLAG_FIPS | EVP_CIPH_FLAG_AEAD_CIPHER |
2329
BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
2330
EVP_CIPH_FLAG_FIPS | EVP_CIPH_FLAG_AEAD_CIPHER |
2332
BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
2333
EVP_CIPH_FLAG_FIPS | EVP_CIPH_FLAG_AEAD_CIPHER |
2336
static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2338
EVP_AES_XTS_CTX *xctx = c->cipher_data;
2339
if (type == EVP_CTRL_COPY) {
2340
EVP_CIPHER_CTX *out = ptr;
2341
EVP_AES_XTS_CTX *xctx_out = out->cipher_data;
2342
if (xctx->xts.key1) {
2343
if (xctx->xts.key1 != &xctx->ks1)
2345
xctx_out->xts.key1 = &xctx_out->ks1;
2347
if (xctx->xts.key2) {
2348
if (xctx->xts.key2 != &xctx->ks2)
2350
xctx_out->xts.key2 = &xctx_out->ks2;
2353
} else if (type != EVP_CTRL_INIT)
2355
/* key1 and key2 are used as an indicator both key and IV are set */
2356
xctx->xts.key1 = NULL;
2357
xctx->xts.key2 = NULL;
2361
static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2362
const unsigned char *iv, int enc)
2364
EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
2371
xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
2373
xctx->stream = NULL;
2375
/* key_len is two AES keys */
2376
# ifdef HWAES_CAPABLE
2377
if (HWAES_CAPABLE) {
2379
HWAES_set_encrypt_key(key, ctx->key_len * 4,
2381
xctx->xts.block1 = (block128_f) HWAES_encrypt;
2383
HWAES_set_decrypt_key(key, ctx->key_len * 4,
2385
xctx->xts.block1 = (block128_f) HWAES_decrypt;
2388
HWAES_set_encrypt_key(key + ctx->key_len / 2,
2389
ctx->key_len * 4, &xctx->ks2.ks);
2390
xctx->xts.block2 = (block128_f) HWAES_encrypt;
2392
xctx->xts.key1 = &xctx->ks1;
2396
# ifdef BSAES_CAPABLE
2398
xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
2401
# ifdef VPAES_CAPABLE
2402
if (VPAES_CAPABLE) {
2404
vpaes_set_encrypt_key(key, ctx->key_len * 4,
2406
xctx->xts.block1 = (block128_f) vpaes_encrypt;
2408
vpaes_set_decrypt_key(key, ctx->key_len * 4,
2410
xctx->xts.block1 = (block128_f) vpaes_decrypt;
2413
vpaes_set_encrypt_key(key + ctx->key_len / 2,
2414
ctx->key_len * 4, &xctx->ks2.ks);
2415
xctx->xts.block2 = (block128_f) vpaes_encrypt;
2417
xctx->xts.key1 = &xctx->ks1;
2421
(void)0; /* terminate potentially open 'else' */
2424
AES_set_encrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks);
2425
xctx->xts.block1 = (block128_f) AES_encrypt;
2427
AES_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks);
2428
xctx->xts.block1 = (block128_f) AES_decrypt;
2431
AES_set_encrypt_key(key + ctx->key_len / 2,
2432
ctx->key_len * 4, &xctx->ks2.ks);
2433
xctx->xts.block2 = (block128_f) AES_encrypt;
2435
xctx->xts.key1 = &xctx->ks1;
2439
xctx->xts.key2 = &xctx->ks2;
2440
memcpy(ctx->iv, iv, 16);
2446
static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2447
const unsigned char *in, size_t len)
2449
EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
2450
if (!xctx->xts.key1 || !xctx->xts.key2)
2452
if (!out || !in || len < AES_BLOCK_SIZE)
2455
(*xctx->stream) (in, out, len,
2456
xctx->xts.key1, xctx->xts.key2, ctx->iv);
2457
else if (CRYPTO_xts128_encrypt(&xctx->xts, ctx->iv, in, out, len,
2463
# define aes_xts_cleanup NULL
2465
# define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
2466
| EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
2467
| EVP_CIPH_CUSTOM_COPY)
2469
BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS,
2470
EVP_CIPH_FLAG_FIPS | XTS_FLAGS)
2471
BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS,
2472
EVP_CIPH_FLAG_FIPS | XTS_FLAGS)
2474
static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2476
EVP_AES_CCM_CTX *cctx = c->cipher_data;
2487
case EVP_CTRL_CCM_SET_IVLEN:
2489
case EVP_CTRL_CCM_SET_L:
2490
if (arg < 2 || arg > 8)
2495
case EVP_CTRL_CCM_SET_TAG:
2496
if ((arg & 1) || arg < 4 || arg > 16)
2498
if (c->encrypt && ptr)
2502
memcpy(c->buf, ptr, arg);
2507
case EVP_CTRL_CCM_GET_TAG:
2508
if (!c->encrypt || !cctx->tag_set)
2510
if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
2519
EVP_CIPHER_CTX *out = ptr;
2520
EVP_AES_CCM_CTX *cctx_out = out->cipher_data;
2521
if (cctx->ccm.key) {
2522
if (cctx->ccm.key != &cctx->ks)
2524
cctx_out->ccm.key = &cctx_out->ks;
2535
static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2536
const unsigned char *iv, int enc)
2538
EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
2543
# ifdef HWAES_CAPABLE
2544
if (HWAES_CAPABLE) {
2545
HWAES_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks.ks);
2547
CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
2548
&cctx->ks, (block128_f) HWAES_encrypt);
2554
# ifdef VPAES_CAPABLE
2555
if (VPAES_CAPABLE) {
2556
vpaes_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks.ks);
2557
CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
2558
&cctx->ks, (block128_f) vpaes_encrypt);
2564
AES_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks.ks);
2565
CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
2566
&cctx->ks, (block128_f) AES_encrypt);
2571
memcpy(ctx->iv, iv, 15 - cctx->L);
2577
static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2578
const unsigned char *in, size_t len)
2580
EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
2581
CCM128_CONTEXT *ccm = &cctx->ccm;
2582
/* If not set up, return error */
2583
if (!cctx->iv_set && !cctx->key_set)
2585
if (!ctx->encrypt && !cctx->tag_set)
2589
if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
2594
/* If have AAD need message length */
2595
if (!cctx->len_set && len)
2597
CRYPTO_ccm128_aad(ccm, in, len);
2600
/* EVP_*Final() doesn't return any data */
2603
/* If not set length yet do it */
2604
if (!cctx->len_set) {
2605
if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
2610
if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
2612
CRYPTO_ccm128_encrypt(ccm, in, out, len))
2618
if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
2620
!CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
2621
unsigned char tag[16];
2622
if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
2623
if (!CRYPTO_memcmp(tag, ctx->buf, cctx->M))
2628
OPENSSL_cleanse(out, len);
2637
# define aes_ccm_cleanup NULL
2639
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
2640
EVP_CIPH_FLAG_FIPS | CUSTOM_FLAGS)
2641
BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
2642
EVP_CIPH_FLAG_FIPS | CUSTOM_FLAGS)
2643
BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
2644
EVP_CIPH_FLAG_FIPS | CUSTOM_FLAGS)
2651
/* Indicates if IV has been set */
2655
static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2656
const unsigned char *iv, int enc)
2658
EVP_AES_WRAP_CTX *wctx = ctx->cipher_data;
2663
AES_set_encrypt_key(key, ctx->key_len * 8, &wctx->ks.ks);
2665
AES_set_decrypt_key(key, ctx->key_len * 8, &wctx->ks.ks);
2670
memcpy(ctx->iv, iv, 8);
2676
static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2677
const unsigned char *in, size_t inlen)
2679
EVP_AES_WRAP_CTX *wctx = ctx->cipher_data;
2685
if (ctx->encrypt && inlen < 8)
2687
if (!ctx->encrypt && inlen < 16)
2696
rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv, out, in, inlen,
2697
(block128_f) AES_encrypt);
2699
rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv, out, in, inlen,
2700
(block128_f) AES_decrypt);
2701
return rv ? (int)rv : -1;
2704
#define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
2705
| EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
2706
| EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
2708
static const EVP_CIPHER aes_128_wrap = {
2710
8, 16, 8, WRAP_FLAGS,
2711
aes_wrap_init_key, aes_wrap_cipher,
2713
sizeof(EVP_AES_WRAP_CTX),
2714
NULL, NULL, NULL, NULL
2717
const EVP_CIPHER *EVP_aes_128_wrap(void)
2719
return &aes_128_wrap;
2722
static const EVP_CIPHER aes_192_wrap = {
2724
8, 24, 8, WRAP_FLAGS,
2725
aes_wrap_init_key, aes_wrap_cipher,
2727
sizeof(EVP_AES_WRAP_CTX),
2728
NULL, NULL, NULL, NULL
2731
const EVP_CIPHER *EVP_aes_192_wrap(void)
2733
return &aes_192_wrap;
2736
static const EVP_CIPHER aes_256_wrap = {
2738
8, 32, 8, WRAP_FLAGS,
2739
aes_wrap_init_key, aes_wrap_cipher,
2741
sizeof(EVP_AES_WRAP_CTX),
2742
NULL, NULL, NULL, NULL
2745
const EVP_CIPHER *EVP_aes_256_wrap(void)
2747
return &aes_256_wrap;