GCC Code Coverage Report
Directory: ./ Exec Total Coverage
File: lib/libcrypto/crypto/../../libssl/src/crypto/evp/e_aes.c Lines: 100 573 17.5 %
Date: 2016-12-06 Branches: 55 428 12.9 %

Line Branch Exec Source
1
/* $OpenBSD: e_aes.c,v 1.29 2015/09/10 15:56:25 jsing Exp $ */
2
/* ====================================================================
3
 * Copyright (c) 2001-2011 The OpenSSL Project.  All rights reserved.
4
 *
5
 * Redistribution and use in source and binary forms, with or without
6
 * modification, are permitted provided that the following conditions
7
 * are met:
8
 *
9
 * 1. Redistributions of source code must retain the above copyright
10
 *    notice, this list of conditions and the following disclaimer.
11
 *
12
 * 2. Redistributions in binary form must reproduce the above copyright
13
 *    notice, this list of conditions and the following disclaimer in
14
 *    the documentation and/or other materials provided with the
15
 *    distribution.
16
 *
17
 * 3. All advertising materials mentioning features or use of this
18
 *    software must display the following acknowledgment:
19
 *    "This product includes software developed by the OpenSSL Project
20
 *    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
21
 *
22
 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
23
 *    endorse or promote products derived from this software without
24
 *    prior written permission. For written permission, please contact
25
 *    openssl-core@openssl.org.
26
 *
27
 * 5. Products derived from this software may not be called "OpenSSL"
28
 *    nor may "OpenSSL" appear in their names without prior written
29
 *    permission of the OpenSSL Project.
30
 *
31
 * 6. Redistributions of any form whatsoever must retain the following
32
 *    acknowledgment:
33
 *    "This product includes software developed by the OpenSSL Project
34
 *    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
35
 *
36
 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
37
 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
38
 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
39
 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
40
 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
41
 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
42
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
43
 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
44
 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
45
 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
46
 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
47
 * OF THE POSSIBILITY OF SUCH DAMAGE.
48
 * ====================================================================
49
 *
50
 */
51
52
#include <stdlib.h>
53
#include <string.h>
54
55
#include <openssl/opensslconf.h>
56
57
#ifndef OPENSSL_NO_AES
58
#include <openssl/aes.h>
59
#include <openssl/err.h>
60
#include <openssl/evp.h>
61
62
#include "evp_locl.h"
63
#include "modes_lcl.h"
64
65
typedef struct {
66
	AES_KEY ks;
67
	block128_f block;
68
	union {
69
		cbc128_f cbc;
70
		ctr128_f ctr;
71
	} stream;
72
} EVP_AES_KEY;
73
74
typedef struct {
75
	AES_KEY ks;		/* AES key schedule to use */
76
	int key_set;		/* Set if key initialised */
77
	int iv_set;		/* Set if an iv is set */
78
	GCM128_CONTEXT gcm;
79
	unsigned char *iv;	/* Temporary IV store */
80
	int ivlen;		/* IV length */
81
	int taglen;
82
	int iv_gen;		/* It is OK to generate IVs */
83
	int tls_aad_len;	/* TLS AAD length */
84
	ctr128_f ctr;
85
} EVP_AES_GCM_CTX;
86
87
typedef struct {
88
	AES_KEY ks1, ks2;	/* AES key schedules to use */
89
	XTS128_CONTEXT xts;
90
	void (*stream)(const unsigned char *in, unsigned char *out,
91
	    size_t length, const AES_KEY *key1, const AES_KEY *key2,
92
	    const unsigned char iv[16]);
93
} EVP_AES_XTS_CTX;
94
95
typedef struct {
96
	AES_KEY ks;		/* AES key schedule to use */
97
	int key_set;		/* Set if key initialised */
98
	int iv_set;		/* Set if an iv is set */
99
	int tag_set;		/* Set if tag is valid */
100
	int len_set;		/* Set if message length set */
101
	int L, M;		/* L and M parameters from RFC3610 */
102
	CCM128_CONTEXT ccm;
103
	ccm128_f str;
104
} EVP_AES_CCM_CTX;
105
106
#define MAXBITCHUNK	((size_t)1<<(sizeof(size_t)*8-4))
107
108
#ifdef VPAES_ASM
109
int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
110
    AES_KEY *key);
111
int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
112
    AES_KEY *key);
113
114
void vpaes_encrypt(const unsigned char *in, unsigned char *out,
115
    const AES_KEY *key);
116
void vpaes_decrypt(const unsigned char *in, unsigned char *out,
117
    const AES_KEY *key);
118
119
void vpaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
120
    size_t length, const AES_KEY *key, unsigned char *ivec, int enc);
121
#endif
122
#ifdef BSAES_ASM
123
void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
124
    size_t length, const AES_KEY *key, unsigned char ivec[16], int enc);
125
void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
126
    size_t len, const AES_KEY *key, const unsigned char ivec[16]);
127
void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
128
    size_t len, const AES_KEY *key1, const AES_KEY *key2,
129
    const unsigned char iv[16]);
130
void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
131
    size_t len, const AES_KEY *key1, const AES_KEY *key2,
132
    const unsigned char iv[16]);
133
#endif
134
#ifdef AES_CTR_ASM
135
void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
136
    size_t blocks, const AES_KEY *key,
137
    const unsigned char ivec[AES_BLOCK_SIZE]);
138
#endif
139
#ifdef AES_XTS_ASM
140
void AES_xts_encrypt(const char *inp, char *out, size_t len,
141
    const AES_KEY *key1, const AES_KEY *key2, const unsigned char iv[16]);
142
void AES_xts_decrypt(const char *inp, char *out, size_t len,
143
    const AES_KEY *key1, const AES_KEY *key2, const unsigned char iv[16]);
144
#endif
145
146
#if	defined(AES_ASM) && !defined(I386_ONLY) &&	(  \
147
	((defined(__i386)	|| defined(__i386__)	|| \
148
	  defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
149
	defined(__x86_64)	|| defined(__x86_64__)	|| \
150
	defined(_M_AMD64)	|| defined(_M_X64)	|| \
151
	defined(__INTEL__)				)
152
153
extern unsigned int OPENSSL_ia32cap_P[2];
154
155
#ifdef VPAES_ASM
156
#define VPAES_CAPABLE	(OPENSSL_ia32cap_P[1]&(1<<(41-32)))
157
#endif
158
#ifdef BSAES_ASM
159
#define BSAES_CAPABLE	VPAES_CAPABLE
160
#endif
161
/*
162
 * AES-NI section
163
 */
164
#define	AESNI_CAPABLE	(OPENSSL_ia32cap_P[1]&(1<<(57-32)))
165
166
int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
167
    AES_KEY *key);
168
int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
169
    AES_KEY *key);
170
171
void aesni_encrypt(const unsigned char *in, unsigned char *out,
172
    const AES_KEY *key);
173
void aesni_decrypt(const unsigned char *in, unsigned char *out,
174
    const AES_KEY *key);
175
176
void aesni_ecb_encrypt(const unsigned char *in, unsigned char *out,
177
    size_t length, const AES_KEY *key, int enc);
178
void aesni_cbc_encrypt(const unsigned char *in, unsigned char *out,
179
    size_t length, const AES_KEY *key, unsigned char *ivec, int enc);
180
181
void aesni_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
182
    size_t blocks, const void *key, const unsigned char *ivec);
183
184
void aesni_xts_encrypt(const unsigned char *in, unsigned char *out,
185
    size_t length, const AES_KEY *key1, const AES_KEY *key2,
186
    const unsigned char iv[16]);
187
188
void aesni_xts_decrypt(const unsigned char *in, unsigned char *out,
189
    size_t length, const AES_KEY *key1, const AES_KEY *key2,
190
    const unsigned char iv[16]);
191
192
void aesni_ccm64_encrypt_blocks (const unsigned char *in, unsigned char *out,
193
    size_t blocks, const void *key, const unsigned char ivec[16],
194
    unsigned char cmac[16]);
195
196
void aesni_ccm64_decrypt_blocks (const unsigned char *in, unsigned char *out,
197
    size_t blocks, const void *key, const unsigned char ivec[16],
198
    unsigned char cmac[16]);
199
200
static int
201
aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
202
    const unsigned char *iv, int enc)
203
110
{
204
	int ret, mode;
205
110
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
206
207
110
	mode = ctx->cipher->flags & EVP_CIPH_MODE;
208
110
	if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) &&
209
	    !enc) {
210
25
		ret = aesni_set_decrypt_key(key, ctx->key_len * 8,
211
		    ctx->cipher_data);
212
25
		dat->block = (block128_f)aesni_decrypt;
213
25
		dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
214
		    (cbc128_f)aesni_cbc_encrypt : NULL;
215
	} else {
216
85
		ret = aesni_set_encrypt_key(key, ctx->key_len * 8,
217
		    ctx->cipher_data);
218
85
		dat->block = (block128_f)aesni_encrypt;
219
85
		if (mode == EVP_CIPH_CBC_MODE)
220
13
			dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt;
221
72
		else if (mode == EVP_CIPH_CTR_MODE)
222
9
			dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
223
		else
224
63
			dat->stream.cbc = NULL;
225
	}
226
227
110
	if (ret < 0) {
228
		EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
229
		return 0;
230
	}
231
232
110
	return 1;
233
}
234
235
static int
236
aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
237
    const unsigned char *in, size_t len)
238
27
{
239
27
	aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv,
240
	    ctx->encrypt);
241
242
27
	return 1;
243
}
244
245
static int
246
aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
247
    const unsigned char *in, size_t len)
248
27
{
249
27
	size_t	bl = ctx->cipher->block_size;
250
251
27
	if (len < bl)
252
		return 1;
253
254
27
	aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
255
256
27
	return 1;
257
}
258
259
#define aesni_ofb_cipher aes_ofb_cipher
260
static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
261
    const unsigned char *in, size_t len);
262
263
#define aesni_cfb_cipher aes_cfb_cipher
264
static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
265
    const unsigned char *in, size_t len);
266
267
#define aesni_cfb8_cipher aes_cfb8_cipher
268
static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
269
    const unsigned char *in, size_t len);
270
271
#define aesni_cfb1_cipher aes_cfb1_cipher
272
static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
273
    const unsigned char *in, size_t len);
274
275
#define aesni_ctr_cipher aes_ctr_cipher
276
static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
277
    const unsigned char *in, size_t len);
278
279
static int
280
aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
281
    const unsigned char *iv, int enc)
282
{
283
	EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
284
285
	if (!iv && !key)
286
		return 1;
287
	if (key) {
288
		aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks);
289
		CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
290
		    (block128_f)aesni_encrypt);
291
		gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
292
		/* If we have an iv can set it directly, otherwise use
293
		 * saved IV.
294
		 */
295
		if (iv == NULL && gctx->iv_set)
296
			iv = gctx->iv;
297
		if (iv) {
298
			CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
299
			gctx->iv_set = 1;
300
		}
301
		gctx->key_set = 1;
302
	} else {
303
		/* If key set use IV, otherwise copy */
304
		if (gctx->key_set)
305
			CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
306
		else
307
			memcpy(gctx->iv, iv, gctx->ivlen);
308
		gctx->iv_set = 1;
309
		gctx->iv_gen = 0;
310
	}
311
	return 1;
312
}
313
314
#define aesni_gcm_cipher aes_gcm_cipher
315
static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
316
    const unsigned char *in, size_t len);
317
318
static int
319
aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
320
    const unsigned char *iv, int enc)
321
{
322
	EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
323
324
	if (!iv && !key)
325
		return 1;
326
327
	if (key) {
328
		/* key_len is two AES keys */
329
		if (enc) {
330
			aesni_set_encrypt_key(key, ctx->key_len * 4,
331
			    &xctx->ks1);
332
			xctx->xts.block1 = (block128_f)aesni_encrypt;
333
			xctx->stream = aesni_xts_encrypt;
334
		} else {
335
			aesni_set_decrypt_key(key, ctx->key_len * 4,
336
			    &xctx->ks1);
337
			xctx->xts.block1 = (block128_f)aesni_decrypt;
338
			xctx->stream = aesni_xts_decrypt;
339
		}
340
341
		aesni_set_encrypt_key(key + ctx->key_len / 2,
342
		    ctx->key_len * 4, &xctx->ks2);
343
		xctx->xts.block2 = (block128_f)aesni_encrypt;
344
345
		xctx->xts.key1 = &xctx->ks1;
346
	}
347
348
	if (iv) {
349
		xctx->xts.key2 = &xctx->ks2;
350
		memcpy(ctx->iv, iv, 16);
351
	}
352
353
	return 1;
354
}
355
356
#define aesni_xts_cipher aes_xts_cipher
357
static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
358
    const unsigned char *in, size_t len);
359
360
static int
361
aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
362
    const unsigned char *iv, int enc)
363
{
364
	EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
365
366
	if (!iv && !key)
367
		return 1;
368
	if (key) {
369
		aesni_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks);
370
		CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
371
		    &cctx->ks, (block128_f)aesni_encrypt);
372
		cctx->str = enc ? (ccm128_f)aesni_ccm64_encrypt_blocks :
373
		    (ccm128_f)aesni_ccm64_decrypt_blocks;
374
		cctx->key_set = 1;
375
	}
376
	if (iv) {
377
		memcpy(ctx->iv, iv, 15 - cctx->L);
378
		cctx->iv_set = 1;
379
	}
380
	return 1;
381
}
382
383
#define aesni_ccm_cipher aes_ccm_cipher
384
static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
385
    const unsigned char *in, size_t len);
386
387
#define BLOCK_CIPHER_generic(n,keylen,blocksize,ivlen,nmode,mode,MODE,fl) \
388
static const EVP_CIPHER aesni_##keylen##_##mode = {			\
389
	.nid = n##_##keylen##_##nmode,					\
390
	.block_size = blocksize,					\
391
	.key_len = keylen / 8,						\
392
	.iv_len = ivlen, 						\
393
	.flags = fl | EVP_CIPH_##MODE##_MODE,				\
394
	.init = aesni_init_key,						\
395
	.do_cipher = aesni_##mode##_cipher,				\
396
	.ctx_size = sizeof(EVP_AES_KEY)					\
397
};									\
398
static const EVP_CIPHER aes_##keylen##_##mode = {			\
399
	.nid = n##_##keylen##_##nmode,					\
400
	.block_size = blocksize,					\
401
	.key_len = keylen / 8,						\
402
	.iv_len = ivlen, 						\
403
	.flags = fl | EVP_CIPH_##MODE##_MODE,				\
404
	.init = aes_init_key,						\
405
	.do_cipher = aes_##mode##_cipher,				\
406
	.ctx_size = sizeof(EVP_AES_KEY)					\
407
};									\
408
const EVP_CIPHER *							\
409
EVP_aes_##keylen##_##mode(void)						\
410
{									\
411
	return AESNI_CAPABLE ?						\
412
	    &aesni_##keylen##_##mode : &aes_##keylen##_##mode;		\
413
}
414
415
#define BLOCK_CIPHER_custom(n,keylen,blocksize,ivlen,mode,MODE,fl)	\
416
static const EVP_CIPHER aesni_##keylen##_##mode = {			\
417
	.nid = n##_##keylen##_##mode,					\
418
	.block_size = blocksize,					\
419
	.key_len =							\
420
	    (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) *	\
421
	    keylen / 8,							\
422
	.iv_len = ivlen,						\
423
	.flags = fl | EVP_CIPH_##MODE##_MODE,				\
424
	.init = aesni_##mode##_init_key,				\
425
	.do_cipher = aesni_##mode##_cipher,				\
426
	.cleanup = aes_##mode##_cleanup,				\
427
	.ctx_size = sizeof(EVP_AES_##MODE##_CTX),			\
428
	.ctrl = aes_##mode##_ctrl					\
429
};									\
430
static const EVP_CIPHER aes_##keylen##_##mode = {			\
431
	.nid = n##_##keylen##_##mode,					\
432
	.block_size = blocksize,					\
433
	.key_len =							\
434
	    (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) *	\
435
	    keylen / 8,							\
436
	.iv_len = ivlen,						\
437
	.flags = fl | EVP_CIPH_##MODE##_MODE,				\
438
	.init = aes_##mode##_init_key,					\
439
	.do_cipher = aes_##mode##_cipher,				\
440
	.cleanup = aes_##mode##_cleanup,				\
441
	.ctx_size = sizeof(EVP_AES_##MODE##_CTX),			\
442
	.ctrl = aes_##mode##_ctrl					\
443
};									\
444
const EVP_CIPHER *							\
445
EVP_aes_##keylen##_##mode(void)						\
446
{									\
447
	return AESNI_CAPABLE ?						\
448
	    &aesni_##keylen##_##mode : &aes_##keylen##_##mode;		\
449
}
450
451
#else
452
453
#define BLOCK_CIPHER_generic(n,keylen,blocksize,ivlen,nmode,mode,MODE,fl) \
454
static const EVP_CIPHER aes_##keylen##_##mode = {			\
455
	.nid = n##_##keylen##_##nmode,					\
456
	.block_size = blocksize,					\
457
	.key_len = keylen / 8,						\
458
	.iv_len = ivlen,						\
459
	.flags = fl | EVP_CIPH_##MODE##_MODE,				\
460
	.init = aes_init_key,						\
461
	.do_cipher = aes_##mode##_cipher,				\
462
	.ctx_size = sizeof(EVP_AES_KEY)					\
463
};									\
464
const EVP_CIPHER *							\
465
EVP_aes_##keylen##_##mode(void)						\
466
{									\
467
	return &aes_##keylen##_##mode;					\
468
}
469
470
#define BLOCK_CIPHER_custom(n,keylen,blocksize,ivlen,mode,MODE,fl)	\
471
static const EVP_CIPHER aes_##keylen##_##mode = {			\
472
	.nid = n##_##keylen##_##mode,					\
473
	.block_size = blocksize,					\
474
	.key_len =							\
475
	    (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) *	\
476
	    keylen / 8,							\
477
	.iv_len = ivlen,						\
478
	.flags = fl | EVP_CIPH_##MODE##_MODE,				\
479
	.init = aes_##mode##_init_key,					\
480
	.do_cipher = aes_##mode##_cipher,				\
481
	.cleanup = aes_##mode##_cleanup,				\
482
	.ctx_size = sizeof(EVP_AES_##MODE##_CTX),			\
483
	.ctrl = aes_##mode##_ctrl					\
484
};									\
485
const EVP_CIPHER *							\
486
EVP_aes_##keylen##_##mode(void)						\
487
{									\
488
	return &aes_##keylen##_##mode;					\
489
}
490
491
#endif
492
493
#define BLOCK_CIPHER_generic_pack(nid,keylen,flags)		\
494
	BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)	\
495
	BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)	\
496
	BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)	\
497
	BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)	\
498
	BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags)	\
499
	BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags)	\
500
	BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
501
502
static int
503
aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
504
    const unsigned char *iv, int enc)
505
{
506
	int ret, mode;
507
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
508
509
	mode = ctx->cipher->flags & EVP_CIPH_MODE;
510
	if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) &&
511
	    !enc)
512
#ifdef BSAES_CAPABLE
513
		if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
514
			ret = AES_set_decrypt_key(key, ctx->key_len * 8,
515
			    &dat->ks);
516
			dat->block = (block128_f)AES_decrypt;
517
			dat->stream.cbc = (cbc128_f)bsaes_cbc_encrypt;
518
		} else
519
#endif
520
#ifdef VPAES_CAPABLE
521
		if (VPAES_CAPABLE) {
522
			ret = vpaes_set_decrypt_key(key, ctx->key_len * 8,
523
			    &dat->ks);
524
			dat->block = (block128_f)vpaes_decrypt;
525
			dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
526
			    (cbc128_f)vpaes_cbc_encrypt : NULL;
527
		} else
528
#endif
529
		{
530
			ret = AES_set_decrypt_key(key, ctx->key_len * 8,
531
			    &dat->ks);
532
			dat->block = (block128_f)AES_decrypt;
533
			dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
534
			    (cbc128_f)AES_cbc_encrypt : NULL;
535
		} else
536
#ifdef BSAES_CAPABLE
537
		if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
538
			ret = AES_set_encrypt_key(key, ctx->key_len * 8,
539
			    &dat->ks);
540
			dat->block = (block128_f)AES_encrypt;
541
			dat->stream.ctr = (ctr128_f)bsaes_ctr32_encrypt_blocks;
542
		} else
543
#endif
544
#ifdef VPAES_CAPABLE
545
		if (VPAES_CAPABLE) {
546
			ret = vpaes_set_encrypt_key(key, ctx->key_len * 8,
547
			    &dat->ks);
548
			dat->block = (block128_f)vpaes_encrypt;
549
			dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
550
			    (cbc128_f)vpaes_cbc_encrypt : NULL;
551
		} else
552
#endif
553
		{
554
			ret = AES_set_encrypt_key(key, ctx->key_len * 8,
555
			    &dat->ks);
556
			dat->block = (block128_f)AES_encrypt;
557
			dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
558
			    (cbc128_f)AES_cbc_encrypt : NULL;
559
#ifdef AES_CTR_ASM
560
			if (mode == EVP_CIPH_CTR_MODE)
561
				dat->stream.ctr = (ctr128_f)AES_ctr32_encrypt;
562
#endif
563
		}
564
565
	if (ret < 0) {
566
		EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
567
		return 0;
568
	}
569
570
	return 1;
571
}
572
573
static int
574
aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
575
    const unsigned char *in, size_t len)
576
{
577
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
578
579
	if (dat->stream.cbc)
580
		(*dat->stream.cbc)(in, out, len, &dat->ks, ctx->iv,
581
		    ctx->encrypt);
582
	else if (ctx->encrypt)
583
		CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv,
584
		    dat->block);
585
	else
586
		CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, ctx->iv,
587
		    dat->block);
588
589
	return 1;
590
}
591
592
static int
593
aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
594
    const unsigned char *in, size_t len)
595
{
596
	size_t	bl = ctx->cipher->block_size;
597
	size_t	i;
598
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
599
600
	if (len < bl)
601
		return 1;
602
603
	for (i = 0, len -= bl; i <= len; i += bl)
604
		(*dat->block)(in + i, out + i, &dat->ks);
605
606
	return 1;
607
}
608
609
static int
610
aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
611
    const unsigned char *in, size_t len)
612
24
{
613
24
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
614
615
24
	CRYPTO_ofb128_encrypt(in, out, len, &dat->ks, ctx->iv, &ctx->num,
616
	    dat->block);
617
24
	return 1;
618
}
619
620
static int
621
aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
622
    const unsigned char *in, size_t len)
623
24
{
624
24
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
625
626
24
	CRYPTO_cfb128_encrypt(in, out, len, &dat->ks, ctx->iv, &ctx->num,
627
	    ctx->encrypt, dat->block);
628
24
	return 1;
629
}
630
631
static int
632
aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
633
    const unsigned char *in, size_t len)
634
{
635
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
636
637
	CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks, ctx->iv, &ctx->num,
638
	    ctx->encrypt, dat->block);
639
	return 1;
640
}
641
642
static int
643
aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
644
    const unsigned char *in, size_t len)
645
{
646
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
647
648
	if (ctx->flags&EVP_CIPH_FLAG_LENGTH_BITS) {
649
		CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks, ctx->iv,
650
		    &ctx->num, ctx->encrypt, dat->block);
651
		return 1;
652
	}
653
654
	while (len >= MAXBITCHUNK) {
655
		CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK*8, &dat->ks,
656
		    ctx->iv, &ctx->num, ctx->encrypt, dat->block);
657
		len -= MAXBITCHUNK;
658
	}
659
	if (len)
660
		CRYPTO_cfb128_1_encrypt(in, out, len*8, &dat->ks,
661
		    ctx->iv, &ctx->num, ctx->encrypt, dat->block);
662
663
	return 1;
664
}
665
666
static int aes_ctr_cipher (EVP_CIPHER_CTX *ctx, unsigned char *out,
667
    const unsigned char *in, size_t len)
668
9
{
669
9
	unsigned int num = ctx->num;
670
9
	EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
671
672
9
	if (dat->stream.ctr)
673
9
		CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
674
		    ctx->iv, ctx->buf, &num, dat->stream.ctr);
675
	else
676
		CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
677
		    ctx->iv, ctx->buf, &num, dat->block);
678
9
	ctx->num = (size_t)num;
679
9
	return 1;
680
}
681
682



94
BLOCK_CIPHER_generic_pack(NID_aes, 128, EVP_CIPH_FLAG_FIPS)
683



94
BLOCK_CIPHER_generic_pack(NID_aes, 192, EVP_CIPH_FLAG_FIPS)
684



95
BLOCK_CIPHER_generic_pack(NID_aes, 256, EVP_CIPH_FLAG_FIPS)
685
686
static int
687
aes_gcm_cleanup(EVP_CIPHER_CTX *c)
688
{
689
	EVP_AES_GCM_CTX *gctx = c->cipher_data;
690
691
	if (gctx->iv != c->iv)
692
		free(gctx->iv);
693
	explicit_bzero(gctx, sizeof(*gctx));
694
	return 1;
695
}
696
697
/* increment counter (64-bit int) by 1 */
698
static void
699
ctr64_inc(unsigned char *counter)
700
{
701
	int n = 8;
702
	unsigned char  c;
703
704
	do {
705
		--n;
706
		c = counter[n];
707
		++c;
708
		counter[n] = c;
709
		if (c)
710
			return;
711
	} while (n);
712
}
713
714
static int
715
aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
716
{
717
	EVP_AES_GCM_CTX *gctx = c->cipher_data;
718
719
	switch (type) {
720
	case EVP_CTRL_INIT:
721
		gctx->key_set = 0;
722
		gctx->iv_set = 0;
723
		gctx->ivlen = c->cipher->iv_len;
724
		gctx->iv = c->iv;
725
		gctx->taglen = -1;
726
		gctx->iv_gen = 0;
727
		gctx->tls_aad_len = -1;
728
		return 1;
729
730
	case EVP_CTRL_GCM_SET_IVLEN:
731
		if (arg <= 0)
732
			return 0;
733
		/* Allocate memory for IV if needed */
734
		if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
735
			if (gctx->iv != c->iv)
736
				free(gctx->iv);
737
			gctx->iv = malloc(arg);
738
			if (!gctx->iv)
739
				return 0;
740
		}
741
		gctx->ivlen = arg;
742
		return 1;
743
744
	case EVP_CTRL_GCM_SET_TAG:
745
		if (arg <= 0 || arg > 16 || c->encrypt)
746
			return 0;
747
		memcpy(c->buf, ptr, arg);
748
		gctx->taglen = arg;
749
		return 1;
750
751
	case EVP_CTRL_GCM_GET_TAG:
752
		if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0)
753
			return 0;
754
		memcpy(ptr, c->buf, arg);
755
		return 1;
756
757
	case EVP_CTRL_GCM_SET_IV_FIXED:
758
		/* Special case: -1 length restores whole IV */
759
		if (arg == -1) {
760
			memcpy(gctx->iv, ptr, gctx->ivlen);
761
			gctx->iv_gen = 1;
762
			return 1;
763
		}
764
		/* Fixed field must be at least 4 bytes and invocation field
765
		 * at least 8.
766
		 */
767
		if ((arg < 4) || (gctx->ivlen - arg) < 8)
768
			return 0;
769
		if (arg)
770
			memcpy(gctx->iv, ptr, arg);
771
		if (c->encrypt)
772
			arc4random_buf(gctx->iv + arg, gctx->ivlen - arg);
773
		gctx->iv_gen = 1;
774
		return 1;
775
776
	case EVP_CTRL_GCM_IV_GEN:
777
		if (gctx->iv_gen == 0 || gctx->key_set == 0)
778
			return 0;
779
		CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
780
		if (arg <= 0 || arg > gctx->ivlen)
781
			arg = gctx->ivlen;
782
		memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
783
		/* Invocation field will be at least 8 bytes in size and
784
		 * so no need to check wrap around or increment more than
785
		 * last 8 bytes.
786
		 */
787
		ctr64_inc(gctx->iv + gctx->ivlen - 8);
788
		gctx->iv_set = 1;
789
		return 1;
790
791
	case EVP_CTRL_GCM_SET_IV_INV:
792
		if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
793
			return 0;
794
		memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
795
		CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
796
		gctx->iv_set = 1;
797
		return 1;
798
799
	case EVP_CTRL_AEAD_TLS1_AAD:
800
		/* Save the AAD for later use */
801
		if (arg != 13)
802
			return 0;
803
		memcpy(c->buf, ptr, arg);
804
		gctx->tls_aad_len = arg;
805
		{
806
			unsigned int len = c->buf[arg - 2] << 8 |
807
			    c->buf[arg - 1];
808
809
			/* Correct length for explicit IV */
810
			len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
811
812
			/* If decrypting correct for tag too */
813
			if (!c->encrypt)
814
				len -= EVP_GCM_TLS_TAG_LEN;
815
			c->buf[arg - 2] = len >> 8;
816
			c->buf[arg - 1] = len & 0xff;
817
		}
818
		/* Extra padding: tag appended to record */
819
		return EVP_GCM_TLS_TAG_LEN;
820
821
	case EVP_CTRL_COPY:
822
	    {
823
		EVP_CIPHER_CTX *out = ptr;
824
		EVP_AES_GCM_CTX *gctx_out = out->cipher_data;
825
826
		if (gctx->gcm.key) {
827
			if (gctx->gcm.key != &gctx->ks)
828
				return 0;
829
			gctx_out->gcm.key = &gctx_out->ks;
830
		}
831
		if (gctx->iv == c->iv)
832
			gctx_out->iv = out->iv;
833
		else {
834
			gctx_out->iv = malloc(gctx->ivlen);
835
			if (!gctx_out->iv)
836
				return 0;
837
			memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
838
		}
839
		return 1;
840
	    }
841
842
	default:
843
		return -1;
844
845
	}
846
}
847
848
static ctr128_f
849
aes_gcm_set_key(AES_KEY *aes_key, GCM128_CONTEXT *gcm_ctx,
850
    const unsigned char *key, size_t key_len)
851
{
852
#ifdef BSAES_CAPABLE
853
	if (BSAES_CAPABLE) {
854
		AES_set_encrypt_key(key, key_len * 8, aes_key);
855
		CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
856
		return (ctr128_f)bsaes_ctr32_encrypt_blocks;
857
	} else
858
#endif
859
#ifdef VPAES_CAPABLE
860
	if (VPAES_CAPABLE) {
861
		vpaes_set_encrypt_key(key, key_len * 8, aes_key);
862
		CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)vpaes_encrypt);
863
		return NULL;
864
	} else
865
#endif
866
		(void)0; /* terminate potentially open 'else' */
867
868
	AES_set_encrypt_key(key, key_len * 8, aes_key);
869
	CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
870
#ifdef AES_CTR_ASM
871
	return (ctr128_f)AES_ctr32_encrypt;
872
#else
873
	return NULL;
874
#endif
875
}
876
877
static int
878
aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
879
    const unsigned char *iv, int enc)
880
{
881
	EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
882
883
	if (!iv && !key)
884
		return 1;
885
	if (key) {
886
		gctx->ctr = aes_gcm_set_key(&gctx->ks, &gctx->gcm,
887
		    key, ctx->key_len);
888
889
		/* If we have an iv can set it directly, otherwise use
890
		 * saved IV.
891
		 */
892
		if (iv == NULL && gctx->iv_set)
893
			iv = gctx->iv;
894
		if (iv) {
895
			CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
896
			gctx->iv_set = 1;
897
		}
898
		gctx->key_set = 1;
899
	} else {
900
		/* If key set use IV, otherwise copy */
901
		if (gctx->key_set)
902
			CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
903
		else
904
			memcpy(gctx->iv, iv, gctx->ivlen);
905
		gctx->iv_set = 1;
906
		gctx->iv_gen = 0;
907
	}
908
	return 1;
909
}
910
911
/* Handle TLS GCM packet format. This consists of the last portion of the IV
912
 * followed by the payload and finally the tag. On encrypt generate IV,
913
 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
914
 * and verify tag.
915
 */
916
917
static int
918
aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
919
    const unsigned char *in, size_t len)
920
{
921
	EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
922
	int rv = -1;
923
924
	/* Encrypt/decrypt must be performed in place */
925
	if (out != in ||
926
	    len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
927
		return -1;
928
929
	/* Set IV from start of buffer or generate IV and write to start
930
	 * of buffer.
931
	 */
932
	if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ?
933
	    EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
934
	    EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
935
		goto err;
936
937
	/* Use saved AAD */
938
	if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
939
		goto err;
940
941
	/* Fix buffer and length to point to payload */
942
	in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
943
	out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
944
	len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
945
	if (ctx->encrypt) {
946
		/* Encrypt payload */
947
		if (gctx->ctr) {
948
			if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, in, out,
949
			    len, gctx->ctr))
950
				goto err;
951
		} else {
952
			if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
953
				goto err;
954
		}
955
		out += len;
956
957
		/* Finally write tag */
958
		CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
959
		rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
960
	} else {
961
		/* Decrypt */
962
		if (gctx->ctr) {
963
			if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, in, out,
964
			    len, gctx->ctr))
965
				goto err;
966
		} else {
967
			if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
968
				goto err;
969
		}
970
		/* Retrieve tag */
971
		CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
972
973
		/* If tag mismatch wipe buffer */
974
		if (memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
975
			explicit_bzero(out, len);
976
			goto err;
977
		}
978
		rv = len;
979
	}
980
981
err:
982
	gctx->iv_set = 0;
983
	gctx->tls_aad_len = -1;
984
	return rv;
985
}
986
987
static int
988
aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
989
    const unsigned char *in, size_t len)
990
{
991
	EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
992
993
	/* If not set up, return error */
994
	if (!gctx->key_set)
995
		return -1;
996
997
	if (gctx->tls_aad_len >= 0)
998
		return aes_gcm_tls_cipher(ctx, out, in, len);
999
1000
	if (!gctx->iv_set)
1001
		return -1;
1002
1003
	if (in) {
1004
		if (out == NULL) {
1005
			if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
1006
				return -1;
1007
		} else if (ctx->encrypt) {
1008
			if (gctx->ctr) {
1009
				if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
1010
				    in, out, len, gctx->ctr))
1011
					return -1;
1012
			} else {
1013
				if (CRYPTO_gcm128_encrypt(&gctx->gcm,
1014
				    in, out, len))
1015
					return -1;
1016
			}
1017
		} else {
1018
			if (gctx->ctr) {
1019
				if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
1020
				    in, out, len, gctx->ctr))
1021
					return -1;
1022
			} else {
1023
				if (CRYPTO_gcm128_decrypt(&gctx->gcm,
1024
				    in, out, len))
1025
					return -1;
1026
			}
1027
		}
1028
		return len;
1029
	} else {
1030
		if (!ctx->encrypt) {
1031
			if (gctx->taglen < 0)
1032
				return -1;
1033
			if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf,
1034
			    gctx->taglen) != 0)
1035
				return -1;
1036
			gctx->iv_set = 0;
1037
			return 0;
1038
		}
1039
		CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
1040
		gctx->taglen = 16;
1041
1042
		/* Don't reuse the IV */
1043
		gctx->iv_set = 0;
1044
		return 0;
1045
	}
1046
1047
}
1048
1049
#define CUSTOM_FLAGS \
1050
    ( EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV | \
1051
      EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT | \
1052
      EVP_CIPH_CTRL_INIT | EVP_CIPH_CUSTOM_COPY )
1053
1054
22
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
1055
    EVP_CIPH_FLAG_FIPS|EVP_CIPH_FLAG_AEAD_CIPHER|CUSTOM_FLAGS)
1056
12
BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
1057
    EVP_CIPH_FLAG_FIPS|EVP_CIPH_FLAG_AEAD_CIPHER|CUSTOM_FLAGS)
1058
22
BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
1059
    EVP_CIPH_FLAG_FIPS|EVP_CIPH_FLAG_AEAD_CIPHER|CUSTOM_FLAGS)
1060
1061
static int
1062
aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1063
{
1064
	EVP_AES_XTS_CTX *xctx = c->cipher_data;
1065
1066
	switch (type) {
1067
	case EVP_CTRL_INIT:
1068
		/*
1069
		 * key1 and key2 are used as an indicator both key and IV
1070
		 * are set
1071
		 */
1072
		xctx->xts.key1 = NULL;
1073
		xctx->xts.key2 = NULL;
1074
		return 1;
1075
1076
	case EVP_CTRL_COPY:
1077
	    {
1078
		EVP_CIPHER_CTX *out = ptr;
1079
		EVP_AES_XTS_CTX *xctx_out = out->cipher_data;
1080
1081
		if (xctx->xts.key1) {
1082
			if (xctx->xts.key1 != &xctx->ks1)
1083
				return 0;
1084
			xctx_out->xts.key1 = &xctx_out->ks1;
1085
		}
1086
		if (xctx->xts.key2) {
1087
			if (xctx->xts.key2 != &xctx->ks2)
1088
				return 0;
1089
			xctx_out->xts.key2 = &xctx_out->ks2;
1090
		}
1091
		return 1;
1092
	    }
1093
	}
1094
	return -1;
1095
}
1096
1097
static int
1098
aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1099
    const unsigned char *iv, int enc)
1100
{
1101
	EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
1102
1103
	if (!iv && !key)
1104
		return 1;
1105
1106
	if (key) do {
1107
#ifdef AES_XTS_ASM
1108
		xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
1109
#else
1110
		xctx->stream = NULL;
1111
#endif
1112
		/* key_len is two AES keys */
1113
#ifdef BSAES_CAPABLE
1114
		if (BSAES_CAPABLE)
1115
			xctx->stream = enc ? bsaes_xts_encrypt :
1116
			    bsaes_xts_decrypt;
1117
		else
1118
#endif
1119
#ifdef VPAES_CAPABLE
1120
		if (VPAES_CAPABLE) {
1121
			if (enc) {
1122
				vpaes_set_encrypt_key(key, ctx->key_len * 4,
1123
				    &xctx->ks1);
1124
				xctx->xts.block1 = (block128_f)vpaes_encrypt;
1125
			} else {
1126
				vpaes_set_decrypt_key(key, ctx->key_len * 4,
1127
				    &xctx->ks1);
1128
				xctx->xts.block1 = (block128_f)vpaes_decrypt;
1129
			}
1130
1131
			vpaes_set_encrypt_key(key + ctx->key_len / 2,
1132
			    ctx->key_len * 4, &xctx->ks2);
1133
			xctx->xts.block2 = (block128_f)vpaes_encrypt;
1134
1135
			xctx->xts.key1 = &xctx->ks1;
1136
			break;
1137
		} else
1138
#endif
1139
			(void)0;	/* terminate potentially open 'else' */
1140
1141
		if (enc) {
1142
			AES_set_encrypt_key(key, ctx->key_len * 4, &xctx->ks1);
1143
			xctx->xts.block1 = (block128_f)AES_encrypt;
1144
		} else {
1145
			AES_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1);
1146
			xctx->xts.block1 = (block128_f)AES_decrypt;
1147
		}
1148
1149
		AES_set_encrypt_key(key + ctx->key_len / 2,
1150
		    ctx->key_len * 4, &xctx->ks2);
1151
		xctx->xts.block2 = (block128_f)AES_encrypt;
1152
1153
		xctx->xts.key1 = &xctx->ks1;
1154
	} while (0);
1155
1156
	if (iv) {
1157
		xctx->xts.key2 = &xctx->ks2;
1158
		memcpy(ctx->iv, iv, 16);
1159
	}
1160
1161
	return 1;
1162
}
1163
1164
static int
1165
aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1166
    const unsigned char *in, size_t len)
1167
{
1168
	EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
1169
1170
	if (!xctx->xts.key1 || !xctx->xts.key2)
1171
		return 0;
1172
	if (!out || !in || len < AES_BLOCK_SIZE)
1173
		return 0;
1174
1175
	if (xctx->stream)
1176
		(*xctx->stream)(in, out, len, xctx->xts.key1, xctx->xts.key2,
1177
		    ctx->iv);
1178
	else if (CRYPTO_xts128_encrypt(&xctx->xts, ctx->iv, in, out, len,
1179
	    ctx->encrypt))
1180
		return 0;
1181
	return 1;
1182
}
1183
1184
#define aes_xts_cleanup NULL
1185
1186
#define XTS_FLAGS \
1187
    ( EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV | \
1188
      EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT | EVP_CIPH_CUSTOM_COPY )
1189
1190
12
BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, EVP_CIPH_FLAG_FIPS|XTS_FLAGS)
1191
12
BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, EVP_CIPH_FLAG_FIPS|XTS_FLAGS)
1192
1193
static int
1194
aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1195
{
1196
	EVP_AES_CCM_CTX *cctx = c->cipher_data;
1197
1198
	switch (type) {
1199
	case EVP_CTRL_INIT:
1200
		cctx->key_set = 0;
1201
		cctx->iv_set = 0;
1202
		cctx->L = 8;
1203
		cctx->M = 12;
1204
		cctx->tag_set = 0;
1205
		cctx->len_set = 0;
1206
		return 1;
1207
1208
	case EVP_CTRL_CCM_SET_IVLEN:
1209
		arg = 15 - arg;
1210
1211
	case EVP_CTRL_CCM_SET_L:
1212
		if (arg < 2 || arg > 8)
1213
			return 0;
1214
		cctx->L = arg;
1215
		return 1;
1216
1217
	case EVP_CTRL_CCM_SET_TAG:
1218
		if ((arg & 1) || arg < 4 || arg > 16)
1219
			return 0;
1220
		if ((c->encrypt && ptr) || (!c->encrypt && !ptr))
1221
			return 0;
1222
		if (ptr) {
1223
			cctx->tag_set = 1;
1224
			memcpy(c->buf, ptr, arg);
1225
		}
1226
		cctx->M = arg;
1227
		return 1;
1228
1229
	case EVP_CTRL_CCM_GET_TAG:
1230
		if (!c->encrypt || !cctx->tag_set)
1231
			return 0;
1232
		if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
1233
			return 0;
1234
		cctx->tag_set = 0;
1235
		cctx->iv_set = 0;
1236
		cctx->len_set = 0;
1237
		return 1;
1238
1239
	case EVP_CTRL_COPY:
1240
	    {
1241
		EVP_CIPHER_CTX *out = ptr;
1242
		EVP_AES_CCM_CTX *cctx_out = out->cipher_data;
1243
1244
		if (cctx->ccm.key) {
1245
			if (cctx->ccm.key != &cctx->ks)
1246
				return 0;
1247
			cctx_out->ccm.key = &cctx_out->ks;
1248
		}
1249
		return 1;
1250
	    }
1251
1252
	default:
1253
		return -1;
1254
	}
1255
}
1256
1257
static int
1258
aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1259
    const unsigned char *iv, int enc)
1260
{
1261
	EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
1262
1263
	if (!iv && !key)
1264
		return 1;
1265
	if (key) do {
1266
#ifdef VPAES_CAPABLE
1267
		if (VPAES_CAPABLE) {
1268
			vpaes_set_encrypt_key(key, ctx->key_len*8, &cctx->ks);
1269
			CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
1270
			    &cctx->ks, (block128_f)vpaes_encrypt);
1271
			cctx->str = NULL;
1272
			cctx->key_set = 1;
1273
			break;
1274
		}
1275
#endif
1276
		AES_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks);
1277
		CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
1278
		    &cctx->ks, (block128_f)AES_encrypt);
1279
		cctx->str = NULL;
1280
		cctx->key_set = 1;
1281
	} while (0);
1282
	if (iv) {
1283
		memcpy(ctx->iv, iv, 15 - cctx->L);
1284
		cctx->iv_set = 1;
1285
	}
1286
	return 1;
1287
}
1288
1289
static int
1290
aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1291
    const unsigned char *in, size_t len)
1292
{
1293
	EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
1294
	CCM128_CONTEXT *ccm = &cctx->ccm;
1295
1296
	/* If not set up, return error */
1297
	if (!cctx->iv_set && !cctx->key_set)
1298
		return -1;
1299
	if (!ctx->encrypt && !cctx->tag_set)
1300
		return -1;
1301
1302
	if (!out) {
1303
		if (!in) {
1304
			if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L,
1305
			    len))
1306
				return -1;
1307
			cctx->len_set = 1;
1308
			return len;
1309
		}
1310
		/* If have AAD need message length */
1311
		if (!cctx->len_set && len)
1312
			return -1;
1313
		CRYPTO_ccm128_aad(ccm, in, len);
1314
		return len;
1315
	}
1316
	/* EVP_*Final() doesn't return any data */
1317
	if (!in)
1318
		return 0;
1319
	/* If not set length yet do it */
1320
	if (!cctx->len_set) {
1321
		if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
1322
			return -1;
1323
		cctx->len_set = 1;
1324
	}
1325
	if (ctx->encrypt) {
1326
		if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
1327
		    cctx->str) : CRYPTO_ccm128_encrypt(ccm, in, out, len))
1328
			return -1;
1329
		cctx->tag_set = 1;
1330
		return len;
1331
	} else {
1332
		int rv = -1;
1333
		if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
1334
		    cctx->str) : !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
1335
			unsigned char tag[16];
1336
			if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
1337
				if (!memcmp(tag, ctx->buf, cctx->M))
1338
					rv = len;
1339
			}
1340
		}
1341
		if (rv == -1)
1342
			explicit_bzero(out, len);
1343
		cctx->iv_set = 0;
1344
		cctx->tag_set = 0;
1345
		cctx->len_set = 0;
1346
		return rv;
1347
	}
1348
1349
}
1350
1351
#define aes_ccm_cleanup NULL
1352
1353
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
1354
    EVP_CIPH_FLAG_FIPS|CUSTOM_FLAGS)
1355
BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
1356
    EVP_CIPH_FLAG_FIPS|CUSTOM_FLAGS)
1357
BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
1358
    EVP_CIPH_FLAG_FIPS|CUSTOM_FLAGS)
1359
1360
#define EVP_AEAD_AES_GCM_TAG_LEN 16
1361
1362
struct aead_aes_gcm_ctx {
1363
	union {
1364
		double align;
1365
		AES_KEY ks;
1366
	} ks;
1367
	GCM128_CONTEXT gcm;
1368
	ctr128_f ctr;
1369
	unsigned char tag_len;
1370
};
1371
1372
static int
1373
aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const unsigned char *key, size_t key_len,
1374
    size_t tag_len)
1375
8
{
1376
	struct aead_aes_gcm_ctx *gcm_ctx;
1377
8
	const size_t key_bits = key_len * 8;
1378
1379
	/* EVP_AEAD_CTX_init should catch this. */
1380
8
	if (key_bits != 128 && key_bits != 256) {
1381
		EVPerr(EVP_F_AEAD_AES_GCM_INIT, EVP_R_BAD_KEY_LENGTH);
1382
		return 0;
1383
	}
1384
1385
8
	if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH)
1386
4
		tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1387
1388
8
	if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
1389
		EVPerr(EVP_F_AEAD_AES_GCM_INIT, EVP_R_TAG_TOO_LARGE);
1390
		return 0;
1391
	}
1392
1393
8
	gcm_ctx = malloc(sizeof(struct aead_aes_gcm_ctx));
1394
8
	if (gcm_ctx == NULL)
1395
		return 0;
1396
1397
#ifdef AESNI_CAPABLE
1398
8
	if (AESNI_CAPABLE) {
1399
8
		aesni_set_encrypt_key(key, key_bits, &gcm_ctx->ks.ks);
1400
8
		CRYPTO_gcm128_init(&gcm_ctx->gcm, &gcm_ctx->ks.ks,
1401
		    (block128_f)aesni_encrypt);
1402
8
		gcm_ctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
1403
	} else
1404
#endif
1405
	{
1406
		gcm_ctx->ctr = aes_gcm_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm,
1407
		    key, key_len);
1408
	}
1409
8
	gcm_ctx->tag_len = tag_len;
1410
8
	ctx->aead_state = gcm_ctx;
1411
1412
8
	return 1;
1413
}
1414
1415
static void
1416
aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx)
1417
4
{
1418
4
	struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1419
1420
4
	explicit_bzero(gcm_ctx, sizeof(*gcm_ctx));
1421
4
	free(gcm_ctx);
1422
4
}
1423
1424
static int
1425
aead_aes_gcm_seal(const EVP_AEAD_CTX *ctx, unsigned char *out, size_t *out_len,
1426
    size_t max_out_len, const unsigned char *nonce, size_t nonce_len,
1427
    const unsigned char *in, size_t in_len, const unsigned char *ad,
1428
    size_t ad_len)
1429
6
{
1430
6
	const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1431
	GCM128_CONTEXT gcm;
1432
6
	size_t bulk = 0;
1433
1434
6
	if (max_out_len < in_len + gcm_ctx->tag_len) {
1435
		EVPerr(EVP_F_AEAD_AES_GCM_SEAL, EVP_R_BUFFER_TOO_SMALL);
1436
		return 0;
1437
	}
1438
1439
6
	memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
1440
6
	CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
1441
1442

6
	if (ad_len > 0 && CRYPTO_gcm128_aad(&gcm, ad, ad_len))
1443
		return 0;
1444
1445
6
	if (gcm_ctx->ctr) {
1446
6
		if (CRYPTO_gcm128_encrypt_ctr32(&gcm, in + bulk, out + bulk,
1447
		    in_len - bulk, gcm_ctx->ctr))
1448
			return 0;
1449
	} else {
1450
		if (CRYPTO_gcm128_encrypt(&gcm, in + bulk, out + bulk,
1451
		    in_len - bulk))
1452
			return 0;
1453
	}
1454
1455
6
	CRYPTO_gcm128_tag(&gcm, out + in_len, gcm_ctx->tag_len);
1456
6
	*out_len = in_len + gcm_ctx->tag_len;
1457
1458
6
	return 1;
1459
}
1460
1461
static int
1462
aead_aes_gcm_open(const EVP_AEAD_CTX *ctx, unsigned char *out, size_t *out_len,
1463
    size_t max_out_len, const unsigned char *nonce, size_t nonce_len,
1464
    const unsigned char *in, size_t in_len, const unsigned char *ad,
1465
    size_t ad_len)
1466
10
{
1467
10
	const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1468
	unsigned char tag[EVP_AEAD_AES_GCM_TAG_LEN];
1469
	GCM128_CONTEXT gcm;
1470
	size_t plaintext_len;
1471
10
	size_t bulk = 0;
1472
1473
10
	if (in_len < gcm_ctx->tag_len) {
1474
		EVPerr(EVP_F_AEAD_AES_GCM_OPEN, EVP_R_BAD_DECRYPT);
1475
		return 0;
1476
	}
1477
1478
10
	plaintext_len = in_len - gcm_ctx->tag_len;
1479
1480
10
	if (max_out_len < plaintext_len) {
1481
		EVPerr(EVP_F_AEAD_AES_GCM_OPEN, EVP_R_BUFFER_TOO_SMALL);
1482
		return 0;
1483
	}
1484
1485
10
	memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
1486
10
	CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
1487
1488
10
	if (CRYPTO_gcm128_aad(&gcm, ad, ad_len))
1489
		return 0;
1490
1491
10
	if (gcm_ctx->ctr) {
1492
10
		if (CRYPTO_gcm128_decrypt_ctr32(&gcm, in + bulk, out + bulk,
1493
		    in_len - bulk - gcm_ctx->tag_len, gcm_ctx->ctr))
1494
			return 0;
1495
	} else {
1496
		if (CRYPTO_gcm128_decrypt(&gcm, in + bulk, out + bulk,
1497
		    in_len - bulk - gcm_ctx->tag_len))
1498
			return 0;
1499
	}
1500
1501
10
	CRYPTO_gcm128_tag(&gcm, tag, gcm_ctx->tag_len);
1502
10
	if (timingsafe_memcmp(tag, in + plaintext_len, gcm_ctx->tag_len) != 0) {
1503
4
		EVPerr(EVP_F_AEAD_AES_GCM_OPEN, EVP_R_BAD_DECRYPT);
1504
4
		return 0;
1505
	}
1506
1507
6
	*out_len = plaintext_len;
1508
1509
6
	return 1;
1510
}
1511
1512
static const EVP_AEAD aead_aes_128_gcm = {
1513
	.key_len = 16,
1514
	.nonce_len = 12,
1515
	.overhead = EVP_AEAD_AES_GCM_TAG_LEN,
1516
	.max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN,
1517
1518
	.init = aead_aes_gcm_init,
1519
	.cleanup = aead_aes_gcm_cleanup,
1520
	.seal = aead_aes_gcm_seal,
1521
	.open = aead_aes_gcm_open,
1522
};
1523
1524
static const EVP_AEAD aead_aes_256_gcm = {
1525
	.key_len = 32,
1526
	.nonce_len = 12,
1527
	.overhead = EVP_AEAD_AES_GCM_TAG_LEN,
1528
	.max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN,
1529
1530
	.init = aead_aes_gcm_init,
1531
	.cleanup = aead_aes_gcm_cleanup,
1532
	.seal = aead_aes_gcm_seal,
1533
	.open = aead_aes_gcm_open,
1534
};
1535
1536
const EVP_AEAD *
1537
EVP_aead_aes_128_gcm(void)
1538
4
{
1539
4
	return &aead_aes_128_gcm;
1540
}
1541
1542
const EVP_AEAD *
1543
EVP_aead_aes_256_gcm(void)
1544
2
{
1545
2
	return &aead_aes_256_gcm;
1546
}
1547
1548
#endif