GCC Code Coverage Report
Directory: ./ Exec Total Coverage
File: lib/libcrypto/evp/e_aes.c Lines: 111 595 18.7 %
Date: 2017-11-07 Branches: 30 360 8.3 %

Line Branch Exec Source
1
/* $OpenBSD: e_aes.c,v 1.34 2017/05/02 03:59:44 deraadt Exp $ */
2
/* ====================================================================
3
 * Copyright (c) 2001-2011 The OpenSSL Project.  All rights reserved.
4
 *
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
7
 * are met:
8
 *
9
 * 1. Redistributions of source code must retain the above copyright
10
 *    notice, this list of conditions and the following disclaimer.
11
 *
12
 * 2. Redistributions in binary form must reproduce the above copyright
13
 *    notice, this list of conditions and the following disclaimer in
14
 *    the documentation and/or other materials provided with the
15
 *    distribution.
16
 *
17
 * 3. All advertising materials mentioning features or use of this
18
 *    software must display the following acknowledgment:
19
 *    "This product includes software developed by the OpenSSL Project
20
 *    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
21
 *
22
 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
23
 *    endorse or promote products derived from this software without
24
 *    prior written permission. For written permission, please contact
25
 *    openssl-core@openssl.org.
26
 *
27
 * 5. Products derived from this software may not be called "OpenSSL"
28
 *    nor may "OpenSSL" appear in their names without prior written
29
 *    permission of the OpenSSL Project.
30
 *
31
 * 6. Redistributions of any form whatsoever must retain the following
32
 *    acknowledgment:
33
 *    "This product includes software developed by the OpenSSL Project
34
 *    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
35
 *
36
 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
37
 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
38
 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
39
 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
40
 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
41
 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
42
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
43
 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
44
 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
45
 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
46
 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
47
 * OF THE POSSIBILITY OF SUCH DAMAGE.
48
 * ====================================================================
49
 *
50
 */
51
52
#include <stdlib.h>
53
#include <string.h>
54
55
#include <openssl/opensslconf.h>
56
57
#ifndef OPENSSL_NO_AES
58
#include <openssl/aes.h>
59
#include <openssl/err.h>
60
#include <openssl/evp.h>
61
62
#include "evp_locl.h"
63
#include "modes_lcl.h"
64
65
typedef struct {
66
	AES_KEY ks;
67
	block128_f block;
68
	union {
69
		cbc128_f cbc;
70
		ctr128_f ctr;
71
	} stream;
72
} EVP_AES_KEY;
73
74
typedef struct {
75
	AES_KEY ks;		/* AES key schedule to use */
76
	int key_set;		/* Set if key initialised */
77
	int iv_set;		/* Set if an iv is set */
78
	GCM128_CONTEXT gcm;
79
	unsigned char *iv;	/* Temporary IV store */
80
	int ivlen;		/* IV length */
81
	int taglen;
82
	int iv_gen;		/* It is OK to generate IVs */
83
	int tls_aad_len;	/* TLS AAD length */
84
	ctr128_f ctr;
85
} EVP_AES_GCM_CTX;
86
87
typedef struct {
88
	AES_KEY ks1, ks2;	/* AES key schedules to use */
89
	XTS128_CONTEXT xts;
90
	void (*stream)(const unsigned char *in, unsigned char *out,
91
	    size_t length, const AES_KEY *key1, const AES_KEY *key2,
92
	    const unsigned char iv[16]);
93
} EVP_AES_XTS_CTX;
94
95
typedef struct {
96
	AES_KEY ks;		/* AES key schedule to use */
97
	int key_set;		/* Set if key initialised */
98
	int iv_set;		/* Set if an iv is set */
99
	int tag_set;		/* Set if tag is valid */
100
	int len_set;		/* Set if message length set */
101
	int L, M;		/* L and M parameters from RFC3610 */
102
	CCM128_CONTEXT ccm;
103
	ccm128_f str;
104
} EVP_AES_CCM_CTX;
105
106
#define MAXBITCHUNK	((size_t)1<<(sizeof(size_t)*8-4))
107
108
#ifdef VPAES_ASM
109
int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
110
    AES_KEY *key);
111
int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
112
    AES_KEY *key);
113
114
void vpaes_encrypt(const unsigned char *in, unsigned char *out,
115
    const AES_KEY *key);
116
void vpaes_decrypt(const unsigned char *in, unsigned char *out,
117
    const AES_KEY *key);
118
119
void vpaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
120
    size_t length, const AES_KEY *key, unsigned char *ivec, int enc);
121
#endif
122
#ifdef BSAES_ASM
123
void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
124
    size_t length, const AES_KEY *key, unsigned char ivec[16], int enc);
125
void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
126
    size_t len, const AES_KEY *key, const unsigned char ivec[16]);
127
void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
128
    size_t len, const AES_KEY *key1, const AES_KEY *key2,
129
    const unsigned char iv[16]);
130
void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
131
    size_t len, const AES_KEY *key1, const AES_KEY *key2,
132
    const unsigned char iv[16]);
133
#endif
134
#ifdef AES_CTR_ASM
135
void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
136
    size_t blocks, const AES_KEY *key,
137
    const unsigned char ivec[AES_BLOCK_SIZE]);
138
#endif
139
#ifdef AES_XTS_ASM
140
void AES_xts_encrypt(const char *inp, char *out, size_t len,
141
    const AES_KEY *key1, const AES_KEY *key2, const unsigned char iv[16]);
142
void AES_xts_decrypt(const char *inp, char *out, size_t len,
143
    const AES_KEY *key1, const AES_KEY *key2, const unsigned char iv[16]);
144
#endif
145
146
#if	defined(AES_ASM) &&				(  \
147
	((defined(__i386)	|| defined(__i386__)	|| \
148
	  defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
149
	defined(__x86_64)	|| defined(__x86_64__)	|| \
150
	defined(_M_AMD64)	|| defined(_M_X64)	|| \
151
	defined(__INTEL__)				)
152
153
#include "x86_arch.h"
154
155
#ifdef VPAES_ASM
156
#define VPAES_CAPABLE	(OPENSSL_cpu_caps() & CPUCAP_MASK_SSSE3)
157
#endif
158
#ifdef BSAES_ASM
159
#define BSAES_CAPABLE	VPAES_CAPABLE
160
#endif
161
/*
162
 * AES-NI section
163
 */
164
#define	AESNI_CAPABLE	(OPENSSL_cpu_caps() & CPUCAP_MASK_AESNI)
165
166
int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
167
    AES_KEY *key);
168
int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
169
    AES_KEY *key);
170
171
void aesni_encrypt(const unsigned char *in, unsigned char *out,
172
    const AES_KEY *key);
173
void aesni_decrypt(const unsigned char *in, unsigned char *out,
174
    const AES_KEY *key);
175
176
void aesni_ecb_encrypt(const unsigned char *in, unsigned char *out,
177
    size_t length, const AES_KEY *key, int enc);
178
void aesni_cbc_encrypt(const unsigned char *in, unsigned char *out,
179
    size_t length, const AES_KEY *key, unsigned char *ivec, int enc);
180
181
void aesni_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
182
    size_t blocks, const void *key, const unsigned char *ivec);
183
184
void aesni_xts_encrypt(const unsigned char *in, unsigned char *out,
185
    size_t length, const AES_KEY *key1, const AES_KEY *key2,
186
    const unsigned char iv[16]);
187
188
void aesni_xts_decrypt(const unsigned char *in, unsigned char *out,
189
    size_t length, const AES_KEY *key1, const AES_KEY *key2,
190
    const unsigned char iv[16]);
191
192
void aesni_ccm64_encrypt_blocks (const unsigned char *in, unsigned char *out,
193
    size_t blocks, const void *key, const unsigned char ivec[16],
194
    unsigned char cmac[16]);
195
196
void aesni_ccm64_decrypt_blocks (const unsigned char *in, unsigned char *out,
197
    size_t blocks, const void *key, const unsigned char ivec[16],
198
    unsigned char cmac[16]);
199
200
static int
201
aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
202
    const unsigned char *iv, int enc)
203
{
204
	int ret, mode;
205
2676
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
206
207
1338
	mode = ctx->cipher->flags & EVP_CIPH_MODE;
208
2676
	if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) &&
209
1338
	    !enc) {
210
318
		ret = aesni_set_decrypt_key(key, ctx->key_len * 8,
211
		    ctx->cipher_data);
212
318
		dat->block = (block128_f)aesni_decrypt;
213
318
		dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
214
		    (cbc128_f)aesni_cbc_encrypt : NULL;
215
318
	} else {
216
1020
		ret = aesni_set_encrypt_key(key, ctx->key_len * 8,
217
		    ctx->cipher_data);
218
1020
		dat->block = (block128_f)aesni_encrypt;
219
1020
		if (mode == EVP_CIPH_CBC_MODE)
220
474
			dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt;
221
546
		else if (mode == EVP_CIPH_CTR_MODE)
222
54
			dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
223
		else
224
492
			dat->stream.cbc = NULL;
225
	}
226
227
1338
	if (ret < 0) {
228
		EVPerror(EVP_R_AES_KEY_SETUP_FAILED);
229
		return 0;
230
	}
231
232
1338
	return 1;
233
1338
}
234
235
static int
236
aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
237
    const unsigned char *in, size_t len)
238
{
239
4908
	aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv,
240
1636
	    ctx->encrypt);
241
242
1636
	return 1;
243
}
244
245
static int
246
aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
247
    const unsigned char *in, size_t len)
248
{
249
1008
	size_t	bl = ctx->cipher->block_size;
250
251
504
	if (len < bl)
252
		return 1;
253
254
504
	aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
255
256
504
	return 1;
257
504
}
258
259
#define aesni_ofb_cipher aes_ofb_cipher
260
static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
261
    const unsigned char *in, size_t len);
262
263
#define aesni_cfb_cipher aes_cfb_cipher
264
static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
265
    const unsigned char *in, size_t len);
266
267
#define aesni_cfb8_cipher aes_cfb8_cipher
268
static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
269
    const unsigned char *in, size_t len);
270
271
#define aesni_cfb1_cipher aes_cfb1_cipher
272
static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
273
    const unsigned char *in, size_t len);
274
275
#define aesni_ctr_cipher aes_ctr_cipher
276
static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
277
    const unsigned char *in, size_t len);
278
279
static int
280
aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
281
    const unsigned char *iv, int enc)
282
{
283
	EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
284
285
	if (!iv && !key)
286
		return 1;
287
	if (key) {
288
		aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks);
289
		CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
290
		    (block128_f)aesni_encrypt);
291
		gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
292
		/* If we have an iv can set it directly, otherwise use
293
		 * saved IV.
294
		 */
295
		if (iv == NULL && gctx->iv_set)
296
			iv = gctx->iv;
297
		if (iv) {
298
			CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
299
			gctx->iv_set = 1;
300
		}
301
		gctx->key_set = 1;
302
	} else {
303
		/* If key set use IV, otherwise copy */
304
		if (gctx->key_set)
305
			CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
306
		else
307
			memcpy(gctx->iv, iv, gctx->ivlen);
308
		gctx->iv_set = 1;
309
		gctx->iv_gen = 0;
310
	}
311
	return 1;
312
}
313
314
#define aesni_gcm_cipher aes_gcm_cipher
315
static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
316
    const unsigned char *in, size_t len);
317
318
static int
319
aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
320
    const unsigned char *iv, int enc)
321
{
322
	EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
323
324
	if (!iv && !key)
325
		return 1;
326
327
	if (key) {
328
		/* key_len is two AES keys */
329
		if (enc) {
330
			aesni_set_encrypt_key(key, ctx->key_len * 4,
331
			    &xctx->ks1);
332
			xctx->xts.block1 = (block128_f)aesni_encrypt;
333
			xctx->stream = aesni_xts_encrypt;
334
		} else {
335
			aesni_set_decrypt_key(key, ctx->key_len * 4,
336
			    &xctx->ks1);
337
			xctx->xts.block1 = (block128_f)aesni_decrypt;
338
			xctx->stream = aesni_xts_decrypt;
339
		}
340
341
		aesni_set_encrypt_key(key + ctx->key_len / 2,
342
		    ctx->key_len * 4, &xctx->ks2);
343
		xctx->xts.block2 = (block128_f)aesni_encrypt;
344
345
		xctx->xts.key1 = &xctx->ks1;
346
	}
347
348
	if (iv) {
349
		xctx->xts.key2 = &xctx->ks2;
350
		memcpy(ctx->iv, iv, 16);
351
	}
352
353
	return 1;
354
}
355
356
#define aesni_xts_cipher aes_xts_cipher
357
static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
358
    const unsigned char *in, size_t len);
359
360
static int
361
aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
362
    const unsigned char *iv, int enc)
363
{
364
	EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
365
366
	if (!iv && !key)
367
		return 1;
368
	if (key) {
369
		aesni_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks);
370
		CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
371
		    &cctx->ks, (block128_f)aesni_encrypt);
372
		cctx->str = enc ? (ccm128_f)aesni_ccm64_encrypt_blocks :
373
		    (ccm128_f)aesni_ccm64_decrypt_blocks;
374
		cctx->key_set = 1;
375
	}
376
	if (iv) {
377
		memcpy(ctx->iv, iv, 15 - cctx->L);
378
		cctx->iv_set = 1;
379
	}
380
	return 1;
381
}
382
383
#define aesni_ccm_cipher aes_ccm_cipher
384
static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
385
    const unsigned char *in, size_t len);
386
387
#define BLOCK_CIPHER_generic(n,keylen,blocksize,ivlen,nmode,mode,MODE,fl) \
388
static const EVP_CIPHER aesni_##keylen##_##mode = {			\
389
	.nid = n##_##keylen##_##nmode,					\
390
	.block_size = blocksize,					\
391
	.key_len = keylen / 8,						\
392
	.iv_len = ivlen, 						\
393
	.flags = fl | EVP_CIPH_##MODE##_MODE,				\
394
	.init = aesni_init_key,						\
395
	.do_cipher = aesni_##mode##_cipher,				\
396
	.ctx_size = sizeof(EVP_AES_KEY)					\
397
};									\
398
static const EVP_CIPHER aes_##keylen##_##mode = {			\
399
	.nid = n##_##keylen##_##nmode,					\
400
	.block_size = blocksize,					\
401
	.key_len = keylen / 8,						\
402
	.iv_len = ivlen, 						\
403
	.flags = fl | EVP_CIPH_##MODE##_MODE,				\
404
	.init = aes_init_key,						\
405
	.do_cipher = aes_##mode##_cipher,				\
406
	.ctx_size = sizeof(EVP_AES_KEY)					\
407
};									\
408
const EVP_CIPHER *							\
409
EVP_aes_##keylen##_##mode(void)						\
410
{									\
411
	return AESNI_CAPABLE ?						\
412
	    &aesni_##keylen##_##mode : &aes_##keylen##_##mode;		\
413
}
414
415
#define BLOCK_CIPHER_custom(n,keylen,blocksize,ivlen,mode,MODE,fl)	\
416
static const EVP_CIPHER aesni_##keylen##_##mode = {			\
417
	.nid = n##_##keylen##_##mode,					\
418
	.block_size = blocksize,					\
419
	.key_len =							\
420
	    (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) *	\
421
	    keylen / 8,							\
422
	.iv_len = ivlen,						\
423
	.flags = fl | EVP_CIPH_##MODE##_MODE,				\
424
	.init = aesni_##mode##_init_key,				\
425
	.do_cipher = aesni_##mode##_cipher,				\
426
	.cleanup = aes_##mode##_cleanup,				\
427
	.ctx_size = sizeof(EVP_AES_##MODE##_CTX),			\
428
	.ctrl = aes_##mode##_ctrl					\
429
};									\
430
static const EVP_CIPHER aes_##keylen##_##mode = {			\
431
	.nid = n##_##keylen##_##mode,					\
432
	.block_size = blocksize,					\
433
	.key_len =							\
434
	    (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) *	\
435
	    keylen / 8,							\
436
	.iv_len = ivlen,						\
437
	.flags = fl | EVP_CIPH_##MODE##_MODE,				\
438
	.init = aes_##mode##_init_key,					\
439
	.do_cipher = aes_##mode##_cipher,				\
440
	.cleanup = aes_##mode##_cleanup,				\
441
	.ctx_size = sizeof(EVP_AES_##MODE##_CTX),			\
442
	.ctrl = aes_##mode##_ctrl					\
443
};									\
444
const EVP_CIPHER *							\
445
EVP_aes_##keylen##_##mode(void)						\
446
{									\
447
	return AESNI_CAPABLE ?						\
448
	    &aesni_##keylen##_##mode : &aes_##keylen##_##mode;		\
449
}
450
451
#else
452
453
#define BLOCK_CIPHER_generic(n,keylen,blocksize,ivlen,nmode,mode,MODE,fl) \
454
static const EVP_CIPHER aes_##keylen##_##mode = {			\
455
	.nid = n##_##keylen##_##nmode,					\
456
	.block_size = blocksize,					\
457
	.key_len = keylen / 8,						\
458
	.iv_len = ivlen,						\
459
	.flags = fl | EVP_CIPH_##MODE##_MODE,				\
460
	.init = aes_init_key,						\
461
	.do_cipher = aes_##mode##_cipher,				\
462
	.ctx_size = sizeof(EVP_AES_KEY)					\
463
};									\
464
const EVP_CIPHER *							\
465
EVP_aes_##keylen##_##mode(void)						\
466
{									\
467
	return &aes_##keylen##_##mode;					\
468
}
469
470
#define BLOCK_CIPHER_custom(n,keylen,blocksize,ivlen,mode,MODE,fl)	\
471
static const EVP_CIPHER aes_##keylen##_##mode = {			\
472
	.nid = n##_##keylen##_##mode,					\
473
	.block_size = blocksize,					\
474
	.key_len =							\
475
	    (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) *	\
476
	    keylen / 8,							\
477
	.iv_len = ivlen,						\
478
	.flags = fl | EVP_CIPH_##MODE##_MODE,				\
479
	.init = aes_##mode##_init_key,					\
480
	.do_cipher = aes_##mode##_cipher,				\
481
	.cleanup = aes_##mode##_cleanup,				\
482
	.ctx_size = sizeof(EVP_AES_##MODE##_CTX),			\
483
	.ctrl = aes_##mode##_ctrl					\
484
};									\
485
const EVP_CIPHER *							\
486
EVP_aes_##keylen##_##mode(void)						\
487
{									\
488
	return &aes_##keylen##_##mode;					\
489
}
490
491
#endif
492
493
#define BLOCK_CIPHER_generic_pack(nid,keylen,flags)		\
494
	BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)	\
495
	BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)	\
496
	BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)	\
497
	BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)	\
498
	BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags)	\
499
	BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags)	\
500
	BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
501
502
static int
503
aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
504
    const unsigned char *iv, int enc)
505
{
506
	int ret, mode;
507
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
508
509
	mode = ctx->cipher->flags & EVP_CIPH_MODE;
510
	if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) &&
511
	    !enc)
512
#ifdef BSAES_CAPABLE
513
		if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
514
			ret = AES_set_decrypt_key(key, ctx->key_len * 8,
515
			    &dat->ks);
516
			dat->block = (block128_f)AES_decrypt;
517
			dat->stream.cbc = (cbc128_f)bsaes_cbc_encrypt;
518
		} else
519
#endif
520
#ifdef VPAES_CAPABLE
521
		if (VPAES_CAPABLE) {
522
			ret = vpaes_set_decrypt_key(key, ctx->key_len * 8,
523
			    &dat->ks);
524
			dat->block = (block128_f)vpaes_decrypt;
525
			dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
526
			    (cbc128_f)vpaes_cbc_encrypt : NULL;
527
		} else
528
#endif
529
		{
530
			ret = AES_set_decrypt_key(key, ctx->key_len * 8,
531
			    &dat->ks);
532
			dat->block = (block128_f)AES_decrypt;
533
			dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
534
			    (cbc128_f)AES_cbc_encrypt : NULL;
535
		} else
536
#ifdef BSAES_CAPABLE
537
		if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
538
			ret = AES_set_encrypt_key(key, ctx->key_len * 8,
539
			    &dat->ks);
540
			dat->block = (block128_f)AES_encrypt;
541
			dat->stream.ctr = (ctr128_f)bsaes_ctr32_encrypt_blocks;
542
		} else
543
#endif
544
#ifdef VPAES_CAPABLE
545
		if (VPAES_CAPABLE) {
546
			ret = vpaes_set_encrypt_key(key, ctx->key_len * 8,
547
			    &dat->ks);
548
			dat->block = (block128_f)vpaes_encrypt;
549
			dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
550
			    (cbc128_f)vpaes_cbc_encrypt : NULL;
551
		} else
552
#endif
553
		{
554
			ret = AES_set_encrypt_key(key, ctx->key_len * 8,
555
			    &dat->ks);
556
			dat->block = (block128_f)AES_encrypt;
557
			dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
558
			    (cbc128_f)AES_cbc_encrypt : NULL;
559
#ifdef AES_CTR_ASM
560
			if (mode == EVP_CIPH_CTR_MODE)
561
				dat->stream.ctr = (ctr128_f)AES_ctr32_encrypt;
562
#endif
563
		}
564
565
	if (ret < 0) {
566
		EVPerror(EVP_R_AES_KEY_SETUP_FAILED);
567
		return 0;
568
	}
569
570
	return 1;
571
}
572
573
static int
574
aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
575
    const unsigned char *in, size_t len)
576
{
577
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
578
579
	if (dat->stream.cbc)
580
		(*dat->stream.cbc)(in, out, len, &dat->ks, ctx->iv,
581
		    ctx->encrypt);
582
	else if (ctx->encrypt)
583
		CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv,
584
		    dat->block);
585
	else
586
		CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, ctx->iv,
587
		    dat->block);
588
589
	return 1;
590
}
591
592
static int
593
aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
594
    const unsigned char *in, size_t len)
595
{
596
	size_t	bl = ctx->cipher->block_size;
597
	size_t	i;
598
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
599
600
	if (len < bl)
601
		return 1;
602
603
	for (i = 0, len -= bl; i <= len; i += bl)
604
		(*dat->block)(in + i, out + i, &dat->ks);
605
606
	return 1;
607
}
608
609
static int
610
aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
611
    const unsigned char *in, size_t len)
612
{
613
624
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
614
615
624
	CRYPTO_ofb128_encrypt(in, out, len, &dat->ks, ctx->iv, &ctx->num,
616
312
	    dat->block);
617
312
	return 1;
618
}
619
620
static int
621
aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
622
    const unsigned char *in, size_t len)
623
{
624
624
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
625
626
624
	CRYPTO_cfb128_encrypt(in, out, len, &dat->ks, ctx->iv, &ctx->num,
627
312
	    ctx->encrypt, dat->block);
628
312
	return 1;
629
}
630
631
static int
632
aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
633
    const unsigned char *in, size_t len)
634
{
635
336
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
636
637
336
	CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks, ctx->iv, &ctx->num,
638
168
	    ctx->encrypt, dat->block);
639
168
	return 1;
640
}
641
642
static int
643
aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
644
    const unsigned char *in, size_t len)
645
{
646
336
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
647
648
168
	if (ctx->flags&EVP_CIPH_FLAG_LENGTH_BITS) {
649
		CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks, ctx->iv,
650
		    &ctx->num, ctx->encrypt, dat->block);
651
		return 1;
652
	}
653
654
168
	while (len >= MAXBITCHUNK) {
655
		CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK*8, &dat->ks,
656
		    ctx->iv, &ctx->num, ctx->encrypt, dat->block);
657
		len -= MAXBITCHUNK;
658
	}
659
168
	if (len)
660
336
		CRYPTO_cfb128_1_encrypt(in, out, len*8, &dat->ks,
661
168
		    ctx->iv, &ctx->num, ctx->encrypt, dat->block);
662
663
168
	return 1;
664
168
}
665
666
static int aes_ctr_cipher (EVP_CIPHER_CTX *ctx, unsigned char *out,
667
    const unsigned char *in, size_t len)
668
{
669
108
	unsigned int num = ctx->num;
670
54
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
671
672
54
	if (dat->stream.ctr)
673
54
		CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
674
		    ctx->iv, ctx->buf, &num, dat->stream.ctr);
675
	else
676
		CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
677
		    ctx->iv, ctx->buf, &num, dat->block);
678
54
	ctx->num = (size_t)num;
679
54
	return 1;
680
54
}
681
682
28152
BLOCK_CIPHER_generic_pack(NID_aes, 128, EVP_CIPH_FLAG_FIPS)
683
27672
BLOCK_CIPHER_generic_pack(NID_aes, 192, EVP_CIPH_FLAG_FIPS)
684
27764
BLOCK_CIPHER_generic_pack(NID_aes, 256, EVP_CIPH_FLAG_FIPS)
685
686
static int
687
aes_gcm_cleanup(EVP_CIPHER_CTX *c)
688
{
689
	EVP_AES_GCM_CTX *gctx = c->cipher_data;
690
691
	if (gctx->iv != c->iv)
692
		free(gctx->iv);
693
	explicit_bzero(gctx, sizeof(*gctx));
694
	return 1;
695
}
696
697
/* increment counter (64-bit int) by 1 */
698
static void
699
ctr64_inc(unsigned char *counter)
700
{
701
	int n = 8;
702
	unsigned char  c;
703
704
	do {
705
		--n;
706
		c = counter[n];
707
		++c;
708
		counter[n] = c;
709
		if (c)
710
			return;
711
	} while (n);
712
}
713
714
static int
715
aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
716
{
717
	EVP_AES_GCM_CTX *gctx = c->cipher_data;
718
719
	switch (type) {
720
	case EVP_CTRL_INIT:
721
		gctx->key_set = 0;
722
		gctx->iv_set = 0;
723
		gctx->ivlen = c->cipher->iv_len;
724
		gctx->iv = c->iv;
725
		gctx->taglen = -1;
726
		gctx->iv_gen = 0;
727
		gctx->tls_aad_len = -1;
728
		return 1;
729
730
	case EVP_CTRL_GCM_SET_IVLEN:
731
		if (arg <= 0)
732
			return 0;
733
		/* Allocate memory for IV if needed */
734
		if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
735
			if (gctx->iv != c->iv)
736
				free(gctx->iv);
737
			gctx->iv = malloc(arg);
738
			if (!gctx->iv)
739
				return 0;
740
		}
741
		gctx->ivlen = arg;
742
		return 1;
743
744
	case EVP_CTRL_GCM_SET_TAG:
745
		if (arg <= 0 || arg > 16 || c->encrypt)
746
			return 0;
747
		memcpy(c->buf, ptr, arg);
748
		gctx->taglen = arg;
749
		return 1;
750
751
	case EVP_CTRL_GCM_GET_TAG:
752
		if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0)
753
			return 0;
754
		memcpy(ptr, c->buf, arg);
755
		return 1;
756
757
	case EVP_CTRL_GCM_SET_IV_FIXED:
758
		/* Special case: -1 length restores whole IV */
759
		if (arg == -1) {
760
			memcpy(gctx->iv, ptr, gctx->ivlen);
761
			gctx->iv_gen = 1;
762
			return 1;
763
		}
764
		/* Fixed field must be at least 4 bytes and invocation field
765
		 * at least 8.
766
		 */
767
		if ((arg < 4) || (gctx->ivlen - arg) < 8)
768
			return 0;
769
		if (arg)
770
			memcpy(gctx->iv, ptr, arg);
771
		if (c->encrypt)
772
			arc4random_buf(gctx->iv + arg, gctx->ivlen - arg);
773
		gctx->iv_gen = 1;
774
		return 1;
775
776
	case EVP_CTRL_GCM_IV_GEN:
777
		if (gctx->iv_gen == 0 || gctx->key_set == 0)
778
			return 0;
779
		CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
780
		if (arg <= 0 || arg > gctx->ivlen)
781
			arg = gctx->ivlen;
782
		memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
783
		/* Invocation field will be at least 8 bytes in size and
784
		 * so no need to check wrap around or increment more than
785
		 * last 8 bytes.
786
		 */
787
		ctr64_inc(gctx->iv + gctx->ivlen - 8);
788
		gctx->iv_set = 1;
789
		return 1;
790
791
	case EVP_CTRL_GCM_SET_IV_INV:
792
		if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
793
			return 0;
794
		memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
795
		CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
796
		gctx->iv_set = 1;
797
		return 1;
798
799
	case EVP_CTRL_AEAD_TLS1_AAD:
800
		/* Save the AAD for later use */
801
		if (arg != 13)
802
			return 0;
803
		memcpy(c->buf, ptr, arg);
804
		gctx->tls_aad_len = arg;
805
		{
806
			unsigned int len = c->buf[arg - 2] << 8 |
807
			    c->buf[arg - 1];
808
809
			/* Correct length for explicit IV */
810
			if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
811
				return 0;
812
			len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
813
814
			/* If decrypting correct for tag too */
815
			if (!c->encrypt) {
816
				if (len < EVP_GCM_TLS_TAG_LEN)
817
					return 0;
818
				len -= EVP_GCM_TLS_TAG_LEN;
819
			}
820
			c->buf[arg - 2] = len >> 8;
821
			c->buf[arg - 1] = len & 0xff;
822
		}
823
		/* Extra padding: tag appended to record */
824
		return EVP_GCM_TLS_TAG_LEN;
825
826
	case EVP_CTRL_COPY:
827
	    {
828
		EVP_CIPHER_CTX *out = ptr;
829
		EVP_AES_GCM_CTX *gctx_out = out->cipher_data;
830
831
		if (gctx->gcm.key) {
832
			if (gctx->gcm.key != &gctx->ks)
833
				return 0;
834
			gctx_out->gcm.key = &gctx_out->ks;
835
		}
836
		if (gctx->iv == c->iv)
837
			gctx_out->iv = out->iv;
838
		else {
839
			gctx_out->iv = malloc(gctx->ivlen);
840
			if (!gctx_out->iv)
841
				return 0;
842
			memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
843
		}
844
		return 1;
845
	    }
846
847
	default:
848
		return -1;
849
850
	}
851
}
852
853
static ctr128_f
854
aes_gcm_set_key(AES_KEY *aes_key, GCM128_CONTEXT *gcm_ctx,
855
    const unsigned char *key, size_t key_len)
856
{
857
#ifdef BSAES_CAPABLE
858
	if (BSAES_CAPABLE) {
859
		AES_set_encrypt_key(key, key_len * 8, aes_key);
860
		CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
861
		return (ctr128_f)bsaes_ctr32_encrypt_blocks;
862
	} else
863
#endif
864
#ifdef VPAES_CAPABLE
865
	if (VPAES_CAPABLE) {
866
		vpaes_set_encrypt_key(key, key_len * 8, aes_key);
867
		CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)vpaes_encrypt);
868
		return NULL;
869
	} else
870
#endif
871
		(void)0; /* terminate potentially open 'else' */
872
873
	AES_set_encrypt_key(key, key_len * 8, aes_key);
874
	CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
875
#ifdef AES_CTR_ASM
876
	return (ctr128_f)AES_ctr32_encrypt;
877
#else
878
	return NULL;
879
#endif
880
}
881
882
static int
883
aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
884
    const unsigned char *iv, int enc)
885
{
886
	EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
887
888
	if (!iv && !key)
889
		return 1;
890
	if (key) {
891
		gctx->ctr = aes_gcm_set_key(&gctx->ks, &gctx->gcm,
892
		    key, ctx->key_len);
893
894
		/* If we have an iv can set it directly, otherwise use
895
		 * saved IV.
896
		 */
897
		if (iv == NULL && gctx->iv_set)
898
			iv = gctx->iv;
899
		if (iv) {
900
			CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
901
			gctx->iv_set = 1;
902
		}
903
		gctx->key_set = 1;
904
	} else {
905
		/* If key set use IV, otherwise copy */
906
		if (gctx->key_set)
907
			CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
908
		else
909
			memcpy(gctx->iv, iv, gctx->ivlen);
910
		gctx->iv_set = 1;
911
		gctx->iv_gen = 0;
912
	}
913
	return 1;
914
}
915
916
/* Handle TLS GCM packet format. This consists of the last portion of the IV
917
 * followed by the payload and finally the tag. On encrypt generate IV,
918
 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
919
 * and verify tag.
920
 */
921
922
static int
923
aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
924
    const unsigned char *in, size_t len)
925
{
926
	EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
927
	int rv = -1;
928
929
	/* Encrypt/decrypt must be performed in place */
930
	if (out != in ||
931
	    len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
932
		return -1;
933
934
	/* Set IV from start of buffer or generate IV and write to start
935
	 * of buffer.
936
	 */
937
	if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ?
938
	    EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
939
	    EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
940
		goto err;
941
942
	/* Use saved AAD */
943
	if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
944
		goto err;
945
946
	/* Fix buffer and length to point to payload */
947
	in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
948
	out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
949
	len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
950
	if (ctx->encrypt) {
951
		/* Encrypt payload */
952
		if (gctx->ctr) {
953
			if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, in, out,
954
			    len, gctx->ctr))
955
				goto err;
956
		} else {
957
			if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
958
				goto err;
959
		}
960
		out += len;
961
962
		/* Finally write tag */
963
		CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
964
		rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
965
	} else {
966
		/* Decrypt */
967
		if (gctx->ctr) {
968
			if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, in, out,
969
			    len, gctx->ctr))
970
				goto err;
971
		} else {
972
			if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
973
				goto err;
974
		}
975
		/* Retrieve tag */
976
		CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
977
978
		/* If tag mismatch wipe buffer */
979
		if (memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
980
			explicit_bzero(out, len);
981
			goto err;
982
		}
983
		rv = len;
984
	}
985
986
err:
987
	gctx->iv_set = 0;
988
	gctx->tls_aad_len = -1;
989
	return rv;
990
}
991
992
static int
993
aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
994
    const unsigned char *in, size_t len)
995
{
996
	EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
997
998
	/* If not set up, return error */
999
	if (!gctx->key_set)
1000
		return -1;
1001
1002
	if (gctx->tls_aad_len >= 0)
1003
		return aes_gcm_tls_cipher(ctx, out, in, len);
1004
1005
	if (!gctx->iv_set)
1006
		return -1;
1007
1008
	if (in) {
1009
		if (out == NULL) {
1010
			if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
1011
				return -1;
1012
		} else if (ctx->encrypt) {
1013
			if (gctx->ctr) {
1014
				if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
1015
				    in, out, len, gctx->ctr))
1016
					return -1;
1017
			} else {
1018
				if (CRYPTO_gcm128_encrypt(&gctx->gcm,
1019
				    in, out, len))
1020
					return -1;
1021
			}
1022
		} else {
1023
			if (gctx->ctr) {
1024
				if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
1025
				    in, out, len, gctx->ctr))
1026
					return -1;
1027
			} else {
1028
				if (CRYPTO_gcm128_decrypt(&gctx->gcm,
1029
				    in, out, len))
1030
					return -1;
1031
			}
1032
		}
1033
		return len;
1034
	} else {
1035
		if (!ctx->encrypt) {
1036
			if (gctx->taglen < 0)
1037
				return -1;
1038
			if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf,
1039
			    gctx->taglen) != 0)
1040
				return -1;
1041
			gctx->iv_set = 0;
1042
			return 0;
1043
		}
1044
		CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
1045
		gctx->taglen = 16;
1046
1047
		/* Don't reuse the IV */
1048
		gctx->iv_set = 0;
1049
		return 0;
1050
	}
1051
1052
}
1053
1054
#define CUSTOM_FLAGS \
1055
    ( EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV | \
1056
      EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT | \
1057
      EVP_CIPH_CTRL_INIT | EVP_CIPH_CUSTOM_COPY )
1058
1059
9036
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
1060
    EVP_CIPH_FLAG_FIPS|EVP_CIPH_FLAG_AEAD_CIPHER|CUSTOM_FLAGS)
1061
3106
BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
1062
    EVP_CIPH_FLAG_FIPS|EVP_CIPH_FLAG_AEAD_CIPHER|CUSTOM_FLAGS)
1063
9036
BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
1064
    EVP_CIPH_FLAG_FIPS|EVP_CIPH_FLAG_AEAD_CIPHER|CUSTOM_FLAGS)
1065
1066
static int
1067
aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1068
{
1069
	EVP_AES_XTS_CTX *xctx = c->cipher_data;
1070
1071
	switch (type) {
1072
	case EVP_CTRL_INIT:
1073
		/*
1074
		 * key1 and key2 are used as an indicator both key and IV
1075
		 * are set
1076
		 */
1077
		xctx->xts.key1 = NULL;
1078
		xctx->xts.key2 = NULL;
1079
		return 1;
1080
1081
	case EVP_CTRL_COPY:
1082
	    {
1083
		EVP_CIPHER_CTX *out = ptr;
1084
		EVP_AES_XTS_CTX *xctx_out = out->cipher_data;
1085
1086
		if (xctx->xts.key1) {
1087
			if (xctx->xts.key1 != &xctx->ks1)
1088
				return 0;
1089
			xctx_out->xts.key1 = &xctx_out->ks1;
1090
		}
1091
		if (xctx->xts.key2) {
1092
			if (xctx->xts.key2 != &xctx->ks2)
1093
				return 0;
1094
			xctx_out->xts.key2 = &xctx_out->ks2;
1095
		}
1096
		return 1;
1097
	    }
1098
	}
1099
	return -1;
1100
}
1101
1102
static int
1103
aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1104
    const unsigned char *iv, int enc)
1105
{
1106
	EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
1107
1108
	if (!iv && !key)
1109
		return 1;
1110
1111
	if (key) do {
1112
#ifdef AES_XTS_ASM
1113
		xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
1114
#else
1115
		xctx->stream = NULL;
1116
#endif
1117
		/* key_len is two AES keys */
1118
#ifdef BSAES_CAPABLE
1119
		if (BSAES_CAPABLE)
1120
			xctx->stream = enc ? bsaes_xts_encrypt :
1121
			    bsaes_xts_decrypt;
1122
		else
1123
#endif
1124
#ifdef VPAES_CAPABLE
1125
		if (VPAES_CAPABLE) {
1126
			if (enc) {
1127
				vpaes_set_encrypt_key(key, ctx->key_len * 4,
1128
				    &xctx->ks1);
1129
				xctx->xts.block1 = (block128_f)vpaes_encrypt;
1130
			} else {
1131
				vpaes_set_decrypt_key(key, ctx->key_len * 4,
1132
				    &xctx->ks1);
1133
				xctx->xts.block1 = (block128_f)vpaes_decrypt;
1134
			}
1135
1136
			vpaes_set_encrypt_key(key + ctx->key_len / 2,
1137
			    ctx->key_len * 4, &xctx->ks2);
1138
			xctx->xts.block2 = (block128_f)vpaes_encrypt;
1139
1140
			xctx->xts.key1 = &xctx->ks1;
1141
			break;
1142
		} else
1143
#endif
1144
			(void)0;	/* terminate potentially open 'else' */
1145
1146
		if (enc) {
1147
			AES_set_encrypt_key(key, ctx->key_len * 4, &xctx->ks1);
1148
			xctx->xts.block1 = (block128_f)AES_encrypt;
1149
		} else {
1150
			AES_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1);
1151
			xctx->xts.block1 = (block128_f)AES_decrypt;
1152
		}
1153
1154
		AES_set_encrypt_key(key + ctx->key_len / 2,
1155
		    ctx->key_len * 4, &xctx->ks2);
1156
		xctx->xts.block2 = (block128_f)AES_encrypt;
1157
1158
		xctx->xts.key1 = &xctx->ks1;
1159
	} while (0);
1160
1161
	if (iv) {
1162
		xctx->xts.key2 = &xctx->ks2;
1163
		memcpy(ctx->iv, iv, 16);
1164
	}
1165
1166
	return 1;
1167
}
1168
1169
static int
1170
aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1171
    const unsigned char *in, size_t len)
1172
{
1173
	EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
1174
1175
	if (!xctx->xts.key1 || !xctx->xts.key2)
1176
		return 0;
1177
	if (!out || !in || len < AES_BLOCK_SIZE)
1178
		return 0;
1179
1180
	if (xctx->stream)
1181
		(*xctx->stream)(in, out, len, xctx->xts.key1, xctx->xts.key2,
1182
		    ctx->iv);
1183
	else if (CRYPTO_xts128_encrypt(&xctx->xts, ctx->iv, in, out, len,
1184
	    ctx->encrypt))
1185
		return 0;
1186
	return 1;
1187
}
1188
1189
#define aes_xts_cleanup NULL
1190
1191
#define XTS_FLAGS \
1192
    ( EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV | \
1193
      EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT | EVP_CIPH_CUSTOM_COPY )
1194
1195
3106
BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, EVP_CIPH_FLAG_FIPS|XTS_FLAGS)
1196
3106
BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, EVP_CIPH_FLAG_FIPS|XTS_FLAGS)
1197
1198
static int
1199
aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1200
{
1201
	EVP_AES_CCM_CTX *cctx = c->cipher_data;
1202
1203
	switch (type) {
1204
	case EVP_CTRL_INIT:
1205
		cctx->key_set = 0;
1206
		cctx->iv_set = 0;
1207
		cctx->L = 8;
1208
		cctx->M = 12;
1209
		cctx->tag_set = 0;
1210
		cctx->len_set = 0;
1211
		return 1;
1212
1213
	case EVP_CTRL_CCM_SET_IVLEN:
1214
		arg = 15 - arg;
1215
1216
	case EVP_CTRL_CCM_SET_L:
1217
		if (arg < 2 || arg > 8)
1218
			return 0;
1219
		cctx->L = arg;
1220
		return 1;
1221
1222
	case EVP_CTRL_CCM_SET_TAG:
1223
		if ((arg & 1) || arg < 4 || arg > 16)
1224
			return 0;
1225
		if ((c->encrypt && ptr) || (!c->encrypt && !ptr))
1226
			return 0;
1227
		if (ptr) {
1228
			cctx->tag_set = 1;
1229
			memcpy(c->buf, ptr, arg);
1230
		}
1231
		cctx->M = arg;
1232
		return 1;
1233
1234
	case EVP_CTRL_CCM_GET_TAG:
1235
		if (!c->encrypt || !cctx->tag_set)
1236
			return 0;
1237
		if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
1238
			return 0;
1239
		cctx->tag_set = 0;
1240
		cctx->iv_set = 0;
1241
		cctx->len_set = 0;
1242
		return 1;
1243
1244
	case EVP_CTRL_COPY:
1245
	    {
1246
		EVP_CIPHER_CTX *out = ptr;
1247
		EVP_AES_CCM_CTX *cctx_out = out->cipher_data;
1248
1249
		if (cctx->ccm.key) {
1250
			if (cctx->ccm.key != &cctx->ks)
1251
				return 0;
1252
			cctx_out->ccm.key = &cctx_out->ks;
1253
		}
1254
		return 1;
1255
	    }
1256
1257
	default:
1258
		return -1;
1259
	}
1260
}
1261
1262
static int
1263
aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1264
    const unsigned char *iv, int enc)
1265
{
1266
	EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
1267
1268
	if (!iv && !key)
1269
		return 1;
1270
	if (key) do {
1271
#ifdef VPAES_CAPABLE
1272
		if (VPAES_CAPABLE) {
1273
			vpaes_set_encrypt_key(key, ctx->key_len*8, &cctx->ks);
1274
			CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
1275
			    &cctx->ks, (block128_f)vpaes_encrypt);
1276
			cctx->str = NULL;
1277
			cctx->key_set = 1;
1278
			break;
1279
		}
1280
#endif
1281
		AES_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks);
1282
		CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
1283
		    &cctx->ks, (block128_f)AES_encrypt);
1284
		cctx->str = NULL;
1285
		cctx->key_set = 1;
1286
	} while (0);
1287
	if (iv) {
1288
		memcpy(ctx->iv, iv, 15 - cctx->L);
1289
		cctx->iv_set = 1;
1290
	}
1291
	return 1;
1292
}
1293
1294
static int
1295
aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1296
    const unsigned char *in, size_t len)
1297
{
1298
	EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
1299
	CCM128_CONTEXT *ccm = &cctx->ccm;
1300
1301
	/* If not set up, return error */
1302
	if (!cctx->iv_set && !cctx->key_set)
1303
		return -1;
1304
	if (!ctx->encrypt && !cctx->tag_set)
1305
		return -1;
1306
1307
	if (!out) {
1308
		if (!in) {
1309
			if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L,
1310
			    len))
1311
				return -1;
1312
			cctx->len_set = 1;
1313
			return len;
1314
		}
1315
		/* If have AAD need message length */
1316
		if (!cctx->len_set && len)
1317
			return -1;
1318
		CRYPTO_ccm128_aad(ccm, in, len);
1319
		return len;
1320
	}
1321
	/* EVP_*Final() doesn't return any data */
1322
	if (!in)
1323
		return 0;
1324
	/* If not set length yet do it */
1325
	if (!cctx->len_set) {
1326
		if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
1327
			return -1;
1328
		cctx->len_set = 1;
1329
	}
1330
	if (ctx->encrypt) {
1331
		if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
1332
		    cctx->str) : CRYPTO_ccm128_encrypt(ccm, in, out, len))
1333
			return -1;
1334
		cctx->tag_set = 1;
1335
		return len;
1336
	} else {
1337
		int rv = -1;
1338
		if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
1339
		    cctx->str) : !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
1340
			unsigned char tag[16];
1341
			if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
1342
				if (!memcmp(tag, ctx->buf, cctx->M))
1343
					rv = len;
1344
			}
1345
		}
1346
		if (rv == -1)
1347
			explicit_bzero(out, len);
1348
		cctx->iv_set = 0;
1349
		cctx->tag_set = 0;
1350
		cctx->len_set = 0;
1351
		return rv;
1352
	}
1353
1354
}
1355
1356
#define aes_ccm_cleanup NULL
1357
1358
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
1359
    EVP_CIPH_FLAG_FIPS|CUSTOM_FLAGS)
1360
BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
1361
    EVP_CIPH_FLAG_FIPS|CUSTOM_FLAGS)
1362
BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
1363
    EVP_CIPH_FLAG_FIPS|CUSTOM_FLAGS)
1364
1365
#define EVP_AEAD_AES_GCM_TAG_LEN 16
1366
1367
struct aead_aes_gcm_ctx {
1368
	union {
1369
		double align;
1370
		AES_KEY ks;
1371
	} ks;
1372
	GCM128_CONTEXT gcm;
1373
	ctr128_f ctr;
1374
	unsigned char tag_len;
1375
};
1376
1377
static int
1378
aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const unsigned char *key, size_t key_len,
1379
    size_t tag_len)
1380
{
1381
	struct aead_aes_gcm_ctx *gcm_ctx;
1382
32404
	const size_t key_bits = key_len * 8;
1383
1384
	/* EVP_AEAD_CTX_init should catch this. */
1385
16202
	if (key_bits != 128 && key_bits != 256) {
1386
		EVPerror(EVP_R_BAD_KEY_LENGTH);
1387
		return 0;
1388
	}
1389
1390
16202
	if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH)
1391
16178
		tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1392
1393
16202
	if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
1394
		EVPerror(EVP_R_TAG_TOO_LARGE);
1395
		return 0;
1396
	}
1397
1398
16202
	gcm_ctx = malloc(sizeof(struct aead_aes_gcm_ctx));
1399
16202
	if (gcm_ctx == NULL)
1400
		return 0;
1401
1402
#ifdef AESNI_CAPABLE
1403
16202
	if (AESNI_CAPABLE) {
1404
16202
		aesni_set_encrypt_key(key, key_bits, &gcm_ctx->ks.ks);
1405
16202
		CRYPTO_gcm128_init(&gcm_ctx->gcm, &gcm_ctx->ks.ks,
1406
		    (block128_f)aesni_encrypt);
1407
		gcm_ctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
1408
16202
	} else
1409
#endif
1410
	{
1411
		gcm_ctx->ctr = aes_gcm_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm,
1412
		    key, key_len);
1413
	}
1414
16202
	gcm_ctx->tag_len = tag_len;
1415
16202
	ctx->aead_state = gcm_ctx;
1416
1417
16202
	return 1;
1418
16202
}
1419
1420
static void
1421
aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx)
1422
{
1423
32404
	struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1424
1425
16202
	freezero(gcm_ctx, sizeof(*gcm_ctx));
1426
16202
}
1427
1428
static int
1429
aead_aes_gcm_seal(const EVP_AEAD_CTX *ctx, unsigned char *out, size_t *out_len,
1430
    size_t max_out_len, const unsigned char *nonce, size_t nonce_len,
1431
    const unsigned char *in, size_t in_len, const unsigned char *ad,
1432
    size_t ad_len)
1433
{
1434
16838
	const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1435
8419
	GCM128_CONTEXT gcm;
1436
	size_t bulk = 0;
1437
1438
8419
	if (max_out_len < in_len + gcm_ctx->tag_len) {
1439
		EVPerror(EVP_R_BUFFER_TOO_SMALL);
1440
		return 0;
1441
	}
1442
1443
8419
	memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
1444
8419
	CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
1445
1446

16838
	if (ad_len > 0 && CRYPTO_gcm128_aad(&gcm, ad, ad_len))
1447
		return 0;
1448
1449
8419
	if (gcm_ctx->ctr) {
1450
8419
		if (CRYPTO_gcm128_encrypt_ctr32(&gcm, in + bulk, out + bulk,
1451
		    in_len - bulk, gcm_ctx->ctr))
1452
			return 0;
1453
	} else {
1454
		if (CRYPTO_gcm128_encrypt(&gcm, in + bulk, out + bulk,
1455
		    in_len - bulk))
1456
			return 0;
1457
	}
1458
1459
8419
	CRYPTO_gcm128_tag(&gcm, out + in_len, gcm_ctx->tag_len);
1460
8419
	*out_len = in_len + gcm_ctx->tag_len;
1461
1462
8419
	return 1;
1463
8419
}
1464
1465
static int
1466
aead_aes_gcm_open(const EVP_AEAD_CTX *ctx, unsigned char *out, size_t *out_len,
1467
    size_t max_out_len, const unsigned char *nonce, size_t nonce_len,
1468
    const unsigned char *in, size_t in_len, const unsigned char *ad,
1469
    size_t ad_len)
1470
{
1471
16546
	const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1472
8273
	unsigned char tag[EVP_AEAD_AES_GCM_TAG_LEN];
1473
8273
	GCM128_CONTEXT gcm;
1474
	size_t plaintext_len;
1475
	size_t bulk = 0;
1476
1477
8273
	if (in_len < gcm_ctx->tag_len) {
1478
		EVPerror(EVP_R_BAD_DECRYPT);
1479
		return 0;
1480
	}
1481
1482
8273
	plaintext_len = in_len - gcm_ctx->tag_len;
1483
1484
8273
	if (max_out_len < plaintext_len) {
1485
		EVPerror(EVP_R_BUFFER_TOO_SMALL);
1486
		return 0;
1487
	}
1488
1489
8273
	memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
1490
8273
	CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
1491
1492
8273
	if (CRYPTO_gcm128_aad(&gcm, ad, ad_len))
1493
		return 0;
1494
1495
8273
	if (gcm_ctx->ctr) {
1496
8273
		if (CRYPTO_gcm128_decrypt_ctr32(&gcm, in + bulk, out + bulk,
1497
		    in_len - bulk - gcm_ctx->tag_len, gcm_ctx->ctr))
1498
			return 0;
1499
	} else {
1500
		if (CRYPTO_gcm128_decrypt(&gcm, in + bulk, out + bulk,
1501
		    in_len - bulk - gcm_ctx->tag_len))
1502
			return 0;
1503
	}
1504
1505
8273
	CRYPTO_gcm128_tag(&gcm, tag, gcm_ctx->tag_len);
1506
8273
	if (timingsafe_memcmp(tag, in + plaintext_len, gcm_ctx->tag_len) != 0) {
1507
24
		EVPerror(EVP_R_BAD_DECRYPT);
1508
24
		return 0;
1509
	}
1510
1511
8249
	*out_len = plaintext_len;
1512
1513
8249
	return 1;
1514
8273
}
1515
1516
static const EVP_AEAD aead_aes_128_gcm = {
1517
	.key_len = 16,
1518
	.nonce_len = 12,
1519
	.overhead = EVP_AEAD_AES_GCM_TAG_LEN,
1520
	.max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN,
1521
1522
	.init = aead_aes_gcm_init,
1523
	.cleanup = aead_aes_gcm_cleanup,
1524
	.seal = aead_aes_gcm_seal,
1525
	.open = aead_aes_gcm_open,
1526
};
1527
1528
static const EVP_AEAD aead_aes_256_gcm = {
1529
	.key_len = 32,
1530
	.nonce_len = 12,
1531
	.overhead = EVP_AEAD_AES_GCM_TAG_LEN,
1532
	.max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN,
1533
1534
	.init = aead_aes_gcm_init,
1535
	.cleanup = aead_aes_gcm_cleanup,
1536
	.seal = aead_aes_gcm_seal,
1537
	.open = aead_aes_gcm_open,
1538
};
1539
1540
const EVP_AEAD *
1541
EVP_aead_aes_128_gcm(void)
1542
{
1543
72
	return &aead_aes_128_gcm;
1544
}
1545
1546
const EVP_AEAD *
1547
EVP_aead_aes_256_gcm(void)
1548
{
1549
16154
	return &aead_aes_256_gcm;
1550
}
1551
1552
#endif