GCC Code Coverage Report
Directory: ./ Exec Total Coverage
File: usr.bin/ssh/lib/../chacha.c Lines: 113 114 99.1 %
Date: 2017-11-07 Branches: 19 22 86.4 %

Line Branch Exec Source
1
/*
2
chacha-merged.c version 20080118
3
D. J. Bernstein
4
Public domain.
5
*/
6
7
#include "chacha.h"
8
9
/* $OpenBSD: chacha.c,v 1.1 2013/11/21 00:45:44 djm Exp $ */
10
11
typedef unsigned char u8;
12
typedef unsigned int u32;
13
14
typedef struct chacha_ctx chacha_ctx;
15
16
#define U8C(v) (v##U)
17
#define U32C(v) (v##U)
18
19
#define U8V(v) ((u8)(v) & U8C(0xFF))
20
#define U32V(v) ((u32)(v) & U32C(0xFFFFFFFF))
21
22
#define ROTL32(v, n) \
23
  (U32V((v) << (n)) | ((v) >> (32 - (n))))
24
25
#define U8TO32_LITTLE(p) \
26
  (((u32)((p)[0])      ) | \
27
   ((u32)((p)[1]) <<  8) | \
28
   ((u32)((p)[2]) << 16) | \
29
   ((u32)((p)[3]) << 24))
30
31
#define U32TO8_LITTLE(p, v) \
32
  do { \
33
    (p)[0] = U8V((v)      ); \
34
    (p)[1] = U8V((v) >>  8); \
35
    (p)[2] = U8V((v) >> 16); \
36
    (p)[3] = U8V((v) >> 24); \
37
  } while (0)
38
39
#define ROTATE(v,c) (ROTL32(v,c))
40
#define XOR(v,w) ((v) ^ (w))
41
#define PLUS(v,w) (U32V((v) + (w)))
42
#define PLUSONE(v) (PLUS((v),1))
43
44
#define QUARTERROUND(a,b,c,d) \
45
  a = PLUS(a,b); d = ROTATE(XOR(d,a),16); \
46
  c = PLUS(c,d); b = ROTATE(XOR(b,c),12); \
47
  a = PLUS(a,b); d = ROTATE(XOR(d,a), 8); \
48
  c = PLUS(c,d); b = ROTATE(XOR(b,c), 7);
49
50
static const char sigma[16] = "expand 32-byte k";
51
static const char tau[16] = "expand 16-byte k";
52
53
void
54
chacha_keysetup(chacha_ctx *x,const u8 *k,u32 kbits)
55
{
56
  const char *constants;
57
58
8
  x->input[4] = U8TO32_LITTLE(k + 0);
59
4
  x->input[5] = U8TO32_LITTLE(k + 4);
60
4
  x->input[6] = U8TO32_LITTLE(k + 8);
61
4
  x->input[7] = U8TO32_LITTLE(k + 12);
62
4
  if (kbits == 256) { /* recommended */
63
4
    k += 16;
64
    constants = sigma;
65
4
  } else { /* kbits == 128 */
66
    constants = tau;
67
  }
68
4
  x->input[8] = U8TO32_LITTLE(k + 0);
69
4
  x->input[9] = U8TO32_LITTLE(k + 4);
70
4
  x->input[10] = U8TO32_LITTLE(k + 8);
71
4
  x->input[11] = U8TO32_LITTLE(k + 12);
72
4
  x->input[0] = U8TO32_LITTLE(constants + 0);
73
4
  x->input[1] = U8TO32_LITTLE(constants + 4);
74
4
  x->input[2] = U8TO32_LITTLE(constants + 8);
75
4
  x->input[3] = U8TO32_LITTLE(constants + 12);
76
4
}
77
78
void
79
chacha_ivsetup(chacha_ctx *x, const u8 *iv, const u8 *counter)
80
{
81
619111
  x->input[12] = counter == NULL ? 0 : U8TO32_LITTLE(counter + 0);
82
431896
  x->input[13] = counter == NULL ? 0 : U8TO32_LITTLE(counter + 4);
83
187215
  x->input[14] = U8TO32_LITTLE(iv + 0);
84
187215
  x->input[15] = U8TO32_LITTLE(iv + 4);
85
187215
}
86
87
void
88
chacha_encrypt_bytes(chacha_ctx *x,const u8 *m,u8 *c,u32 bytes)
89
{
90
  u32 x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15;
91
  u32 j0, j1, j2, j3, j4, j5, j6, j7, j8, j9, j10, j11, j12, j13, j14, j15;
92
  u8 *ctarget = NULL;
93
374430
  u8 tmp[64];
94
  u_int i;
95
96
187215
  if (!bytes) return;
97
98
187215
  j0 = x->input[0];
99
187215
  j1 = x->input[1];
100
187215
  j2 = x->input[2];
101
187215
  j3 = x->input[3];
102
187215
  j4 = x->input[4];
103
187215
  j5 = x->input[5];
104
187215
  j6 = x->input[6];
105
187215
  j7 = x->input[7];
106
187215
  j8 = x->input[8];
107
187215
  j9 = x->input[9];
108
187215
  j10 = x->input[10];
109
187215
  j11 = x->input[11];
110
187215
  j12 = x->input[12];
111
187215
  j13 = x->input[13];
112
187215
  j14 = x->input[14];
113
187215
  j15 = x->input[15];
114
115
9268076
  for (;;) {
116
9268076
    if (bytes < 64) {
117
6499640
      for (i = 0;i < bytes;++i) tmp[i] = m[i];
118
184960
      m = tmp;
119
      ctarget = c;
120
      c = tmp;
121
184960
    }
122
    x0 = j0;
123
    x1 = j1;
124
    x2 = j2;
125
    x3 = j3;
126
    x4 = j4;
127
    x5 = j5;
128
    x6 = j6;
129
    x7 = j7;
130
    x8 = j8;
131
    x9 = j9;
132
    x10 = j10;
133
    x11 = j11;
134
    x12 = j12;
135
    x13 = j13;
136
    x14 = j14;
137
    x15 = j15;
138
203897672
    for (i = 20;i > 0;i -= 2) {
139
92680760
      QUARTERROUND( x0, x4, x8,x12)
140
92680760
      QUARTERROUND( x1, x5, x9,x13)
141
92680760
      QUARTERROUND( x2, x6,x10,x14)
142
92680760
      QUARTERROUND( x3, x7,x11,x15)
143
92680760
      QUARTERROUND( x0, x5,x10,x15)
144
92680760
      QUARTERROUND( x1, x6,x11,x12)
145
92680760
      QUARTERROUND( x2, x7, x8,x13)
146
92680760
      QUARTERROUND( x3, x4, x9,x14)
147
    }
148
9268076
    x0 = PLUS(x0,j0);
149
9268076
    x1 = PLUS(x1,j1);
150
9268076
    x2 = PLUS(x2,j2);
151
9268076
    x3 = PLUS(x3,j3);
152
9268076
    x4 = PLUS(x4,j4);
153
9268076
    x5 = PLUS(x5,j5);
154
9268076
    x6 = PLUS(x6,j6);
155
9268076
    x7 = PLUS(x7,j7);
156
9268076
    x8 = PLUS(x8,j8);
157
9268076
    x9 = PLUS(x9,j9);
158
9268076
    x10 = PLUS(x10,j10);
159
9268076
    x11 = PLUS(x11,j11);
160
9268076
    x12 = PLUS(x12,j12);
161
9268076
    x13 = PLUS(x13,j13);
162
9268076
    x14 = PLUS(x14,j14);
163
9268076
    x15 = PLUS(x15,j15);
164
165
9268076
    x0 = XOR(x0,U8TO32_LITTLE(m + 0));
166
9268076
    x1 = XOR(x1,U8TO32_LITTLE(m + 4));
167
9268076
    x2 = XOR(x2,U8TO32_LITTLE(m + 8));
168
9268076
    x3 = XOR(x3,U8TO32_LITTLE(m + 12));
169
9268076
    x4 = XOR(x4,U8TO32_LITTLE(m + 16));
170
9268076
    x5 = XOR(x5,U8TO32_LITTLE(m + 20));
171
9268076
    x6 = XOR(x6,U8TO32_LITTLE(m + 24));
172
9268076
    x7 = XOR(x7,U8TO32_LITTLE(m + 28));
173
9268076
    x8 = XOR(x8,U8TO32_LITTLE(m + 32));
174
9268076
    x9 = XOR(x9,U8TO32_LITTLE(m + 36));
175
9268076
    x10 = XOR(x10,U8TO32_LITTLE(m + 40));
176
9268076
    x11 = XOR(x11,U8TO32_LITTLE(m + 44));
177
9268076
    x12 = XOR(x12,U8TO32_LITTLE(m + 48));
178
9268076
    x13 = XOR(x13,U8TO32_LITTLE(m + 52));
179
9268076
    x14 = XOR(x14,U8TO32_LITTLE(m + 56));
180
9268076
    x15 = XOR(x15,U8TO32_LITTLE(m + 60));
181
182
9268076
    j12 = PLUSONE(j12);
183
9268076
    if (!j12) {
184
      j13 = PLUSONE(j13);
185
      /* stopping at 2^70 bytes per nonce is user's responsibility */
186
    }
187
188
9268076
    U32TO8_LITTLE(c + 0,x0);
189
9268076
    U32TO8_LITTLE(c + 4,x1);
190
9268076
    U32TO8_LITTLE(c + 8,x2);
191
9268076
    U32TO8_LITTLE(c + 12,x3);
192
9268076
    U32TO8_LITTLE(c + 16,x4);
193
9268076
    U32TO8_LITTLE(c + 20,x5);
194
9268076
    U32TO8_LITTLE(c + 24,x6);
195
9268076
    U32TO8_LITTLE(c + 28,x7);
196
9268076
    U32TO8_LITTLE(c + 32,x8);
197
9268076
    U32TO8_LITTLE(c + 36,x9);
198
9268076
    U32TO8_LITTLE(c + 40,x10);
199
9268076
    U32TO8_LITTLE(c + 44,x11);
200
9268076
    U32TO8_LITTLE(c + 48,x12);
201
9268076
    U32TO8_LITTLE(c + 52,x13);
202
9268076
    U32TO8_LITTLE(c + 56,x14);
203
9268076
    U32TO8_LITTLE(c + 60,x15);
204
205
9268076
    if (bytes <= 64) {
206
187215
      if (bytes < 64) {
207
6499640
        for (i = 0;i < bytes;++i) ctarget[i] = c[i];
208
      }
209
187215
      x->input[12] = j12;
210
187215
      x->input[13] = j13;
211
187215
      return;
212
    }
213
9080861
    bytes -= 64;
214
9080861
    c += 64;
215
9080861
    m += 64;
216
  }
217
187215
}