blob: 86ecca57b72fd50ac0d638b7970bae57d494dfb3 [file] [log] [blame]
Tom Rini421a5d02018-06-19 11:21:44 -04001// SPDX-License-Identifier: BSD-3-Clause
Igor Opaniuk8b23ae22018-06-03 21:56:36 +03002/*
3 * Copyright (C) 2005, 2007 Olivier Gay <olivier.gay@a3.epfl.ch>
4 * All rights reserved.
5 *
6 * FIPS 180-2 SHA-224/256/384/512 implementation
7 * Last update: 02/02/2007
8 * Issue date: 04/30/2005
Igor Opaniuk8b23ae22018-06-03 21:56:36 +03009 */
10
11#include "avb_sha.h"
12
13#define SHFR(x, n) (x >> n)
14#define ROTR(x, n) ((x >> n) | (x << ((sizeof(x) << 3) - n)))
15#define ROTL(x, n) ((x << n) | (x >> ((sizeof(x) << 3) - n)))
16#define CH(x, y, z) ((x & y) ^ (~x & z))
17#define MAJ(x, y, z) ((x & y) ^ (x & z) ^ (y & z))
18
19#define SHA256_F1(x) (ROTR(x, 2) ^ ROTR(x, 13) ^ ROTR(x, 22))
20#define SHA256_F2(x) (ROTR(x, 6) ^ ROTR(x, 11) ^ ROTR(x, 25))
21#define SHA256_F3(x) (ROTR(x, 7) ^ ROTR(x, 18) ^ SHFR(x, 3))
22#define SHA256_F4(x) (ROTR(x, 17) ^ ROTR(x, 19) ^ SHFR(x, 10))
23
24#define UNPACK32(x, str) \
25 { \
26 *((str) + 3) = (uint8_t)((x)); \
27 *((str) + 2) = (uint8_t)((x) >> 8); \
28 *((str) + 1) = (uint8_t)((x) >> 16); \
29 *((str) + 0) = (uint8_t)((x) >> 24); \
30 }
31
Sam Protsenko6a717022019-08-15 23:04:02 +030032#define UNPACK64(x, str) \
33 { \
34 *((str) + 7) = (uint8_t)x; \
35 *((str) + 6) = (uint8_t)((uint64_t)x >> 8); \
36 *((str) + 5) = (uint8_t)((uint64_t)x >> 16); \
37 *((str) + 4) = (uint8_t)((uint64_t)x >> 24); \
38 *((str) + 3) = (uint8_t)((uint64_t)x >> 32); \
39 *((str) + 2) = (uint8_t)((uint64_t)x >> 40); \
40 *((str) + 1) = (uint8_t)((uint64_t)x >> 48); \
41 *((str) + 0) = (uint8_t)((uint64_t)x >> 56); \
42 }
43
Igor Opaniuk8b23ae22018-06-03 21:56:36 +030044#define PACK32(str, x) \
45 { \
46 *(x) = ((uint32_t) * ((str) + 3)) | ((uint32_t) * ((str) + 2) << 8) | \
47 ((uint32_t) * ((str) + 1) << 16) | \
48 ((uint32_t) * ((str) + 0) << 24); \
49 }
50
51/* Macros used for loops unrolling */
52
53#define SHA256_SCR(i) \
54 { w[i] = SHA256_F4(w[i - 2]) + w[i - 7] + SHA256_F3(w[i - 15]) + w[i - 16]; }
55
56#define SHA256_EXP(a, b, c, d, e, f, g, h, j) \
57 { \
58 t1 = wv[h] + SHA256_F2(wv[e]) + CH(wv[e], wv[f], wv[g]) + sha256_k[j] + \
59 w[j]; \
60 t2 = SHA256_F1(wv[a]) + MAJ(wv[a], wv[b], wv[c]); \
61 wv[d] += t1; \
62 wv[h] = t1 + t2; \
63 }
64
65static const uint32_t sha256_h0[8] = {0x6a09e667,
66 0xbb67ae85,
67 0x3c6ef372,
68 0xa54ff53a,
69 0x510e527f,
70 0x9b05688c,
71 0x1f83d9ab,
72 0x5be0cd19};
73
74static const uint32_t sha256_k[64] = {
75 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1,
76 0x923f82a4, 0xab1c5ed5, 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,
77 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, 0xe49b69c1, 0xefbe4786,
78 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
79 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147,
80 0x06ca6351, 0x14292967, 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,
81 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, 0xa2bfe8a1, 0xa81a664b,
82 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
83 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a,
84 0x5b9cca4f, 0x682e6ff3, 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,
85 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2};
86
87/* SHA-256 implementation */
88void avb_sha256_init(AvbSHA256Ctx* ctx) {
89#ifndef UNROLL_LOOPS
90 int i;
91 for (i = 0; i < 8; i++) {
92 ctx->h[i] = sha256_h0[i];
93 }
94#else
95 ctx->h[0] = sha256_h0[0];
96 ctx->h[1] = sha256_h0[1];
97 ctx->h[2] = sha256_h0[2];
98 ctx->h[3] = sha256_h0[3];
99 ctx->h[4] = sha256_h0[4];
100 ctx->h[5] = sha256_h0[5];
101 ctx->h[6] = sha256_h0[6];
102 ctx->h[7] = sha256_h0[7];
103#endif /* !UNROLL_LOOPS */
104
105 ctx->len = 0;
106 ctx->tot_len = 0;
107}
108
109static void SHA256_transform(AvbSHA256Ctx* ctx,
110 const uint8_t* message,
Sam Protsenko6a717022019-08-15 23:04:02 +0300111 size_t block_nb) {
Igor Opaniuk8b23ae22018-06-03 21:56:36 +0300112 uint32_t w[64];
113 uint32_t wv[8];
114 uint32_t t1, t2;
115 const unsigned char* sub_block;
Sam Protsenko6a717022019-08-15 23:04:02 +0300116 size_t i;
Igor Opaniuk8b23ae22018-06-03 21:56:36 +0300117
118#ifndef UNROLL_LOOPS
Sam Protsenko6a717022019-08-15 23:04:02 +0300119 size_t j;
Igor Opaniuk8b23ae22018-06-03 21:56:36 +0300120#endif
121
Sam Protsenko6a717022019-08-15 23:04:02 +0300122 for (i = 0; i < block_nb; i++) {
Igor Opaniuk8b23ae22018-06-03 21:56:36 +0300123 sub_block = message + (i << 6);
124
125#ifndef UNROLL_LOOPS
126 for (j = 0; j < 16; j++) {
127 PACK32(&sub_block[j << 2], &w[j]);
128 }
129
130 for (j = 16; j < 64; j++) {
131 SHA256_SCR(j);
132 }
133
134 for (j = 0; j < 8; j++) {
135 wv[j] = ctx->h[j];
136 }
137
138 for (j = 0; j < 64; j++) {
139 t1 = wv[7] + SHA256_F2(wv[4]) + CH(wv[4], wv[5], wv[6]) + sha256_k[j] +
140 w[j];
141 t2 = SHA256_F1(wv[0]) + MAJ(wv[0], wv[1], wv[2]);
142 wv[7] = wv[6];
143 wv[6] = wv[5];
144 wv[5] = wv[4];
145 wv[4] = wv[3] + t1;
146 wv[3] = wv[2];
147 wv[2] = wv[1];
148 wv[1] = wv[0];
149 wv[0] = t1 + t2;
150 }
151
152 for (j = 0; j < 8; j++) {
153 ctx->h[j] += wv[j];
154 }
155#else
156 PACK32(&sub_block[0], &w[0]);
157 PACK32(&sub_block[4], &w[1]);
158 PACK32(&sub_block[8], &w[2]);
159 PACK32(&sub_block[12], &w[3]);
160 PACK32(&sub_block[16], &w[4]);
161 PACK32(&sub_block[20], &w[5]);
162 PACK32(&sub_block[24], &w[6]);
163 PACK32(&sub_block[28], &w[7]);
164 PACK32(&sub_block[32], &w[8]);
165 PACK32(&sub_block[36], &w[9]);
166 PACK32(&sub_block[40], &w[10]);
167 PACK32(&sub_block[44], &w[11]);
168 PACK32(&sub_block[48], &w[12]);
169 PACK32(&sub_block[52], &w[13]);
170 PACK32(&sub_block[56], &w[14]);
171 PACK32(&sub_block[60], &w[15]);
172
173 SHA256_SCR(16);
174 SHA256_SCR(17);
175 SHA256_SCR(18);
176 SHA256_SCR(19);
177 SHA256_SCR(20);
178 SHA256_SCR(21);
179 SHA256_SCR(22);
180 SHA256_SCR(23);
181 SHA256_SCR(24);
182 SHA256_SCR(25);
183 SHA256_SCR(26);
184 SHA256_SCR(27);
185 SHA256_SCR(28);
186 SHA256_SCR(29);
187 SHA256_SCR(30);
188 SHA256_SCR(31);
189 SHA256_SCR(32);
190 SHA256_SCR(33);
191 SHA256_SCR(34);
192 SHA256_SCR(35);
193 SHA256_SCR(36);
194 SHA256_SCR(37);
195 SHA256_SCR(38);
196 SHA256_SCR(39);
197 SHA256_SCR(40);
198 SHA256_SCR(41);
199 SHA256_SCR(42);
200 SHA256_SCR(43);
201 SHA256_SCR(44);
202 SHA256_SCR(45);
203 SHA256_SCR(46);
204 SHA256_SCR(47);
205 SHA256_SCR(48);
206 SHA256_SCR(49);
207 SHA256_SCR(50);
208 SHA256_SCR(51);
209 SHA256_SCR(52);
210 SHA256_SCR(53);
211 SHA256_SCR(54);
212 SHA256_SCR(55);
213 SHA256_SCR(56);
214 SHA256_SCR(57);
215 SHA256_SCR(58);
216 SHA256_SCR(59);
217 SHA256_SCR(60);
218 SHA256_SCR(61);
219 SHA256_SCR(62);
220 SHA256_SCR(63);
221
222 wv[0] = ctx->h[0];
223 wv[1] = ctx->h[1];
224 wv[2] = ctx->h[2];
225 wv[3] = ctx->h[3];
226 wv[4] = ctx->h[4];
227 wv[5] = ctx->h[5];
228 wv[6] = ctx->h[6];
229 wv[7] = ctx->h[7];
230
231 SHA256_EXP(0, 1, 2, 3, 4, 5, 6, 7, 0);
232 SHA256_EXP(7, 0, 1, 2, 3, 4, 5, 6, 1);
233 SHA256_EXP(6, 7, 0, 1, 2, 3, 4, 5, 2);
234 SHA256_EXP(5, 6, 7, 0, 1, 2, 3, 4, 3);
235 SHA256_EXP(4, 5, 6, 7, 0, 1, 2, 3, 4);
236 SHA256_EXP(3, 4, 5, 6, 7, 0, 1, 2, 5);
237 SHA256_EXP(2, 3, 4, 5, 6, 7, 0, 1, 6);
238 SHA256_EXP(1, 2, 3, 4, 5, 6, 7, 0, 7);
239 SHA256_EXP(0, 1, 2, 3, 4, 5, 6, 7, 8);
240 SHA256_EXP(7, 0, 1, 2, 3, 4, 5, 6, 9);
241 SHA256_EXP(6, 7, 0, 1, 2, 3, 4, 5, 10);
242 SHA256_EXP(5, 6, 7, 0, 1, 2, 3, 4, 11);
243 SHA256_EXP(4, 5, 6, 7, 0, 1, 2, 3, 12);
244 SHA256_EXP(3, 4, 5, 6, 7, 0, 1, 2, 13);
245 SHA256_EXP(2, 3, 4, 5, 6, 7, 0, 1, 14);
246 SHA256_EXP(1, 2, 3, 4, 5, 6, 7, 0, 15);
247 SHA256_EXP(0, 1, 2, 3, 4, 5, 6, 7, 16);
248 SHA256_EXP(7, 0, 1, 2, 3, 4, 5, 6, 17);
249 SHA256_EXP(6, 7, 0, 1, 2, 3, 4, 5, 18);
250 SHA256_EXP(5, 6, 7, 0, 1, 2, 3, 4, 19);
251 SHA256_EXP(4, 5, 6, 7, 0, 1, 2, 3, 20);
252 SHA256_EXP(3, 4, 5, 6, 7, 0, 1, 2, 21);
253 SHA256_EXP(2, 3, 4, 5, 6, 7, 0, 1, 22);
254 SHA256_EXP(1, 2, 3, 4, 5, 6, 7, 0, 23);
255 SHA256_EXP(0, 1, 2, 3, 4, 5, 6, 7, 24);
256 SHA256_EXP(7, 0, 1, 2, 3, 4, 5, 6, 25);
257 SHA256_EXP(6, 7, 0, 1, 2, 3, 4, 5, 26);
258 SHA256_EXP(5, 6, 7, 0, 1, 2, 3, 4, 27);
259 SHA256_EXP(4, 5, 6, 7, 0, 1, 2, 3, 28);
260 SHA256_EXP(3, 4, 5, 6, 7, 0, 1, 2, 29);
261 SHA256_EXP(2, 3, 4, 5, 6, 7, 0, 1, 30);
262 SHA256_EXP(1, 2, 3, 4, 5, 6, 7, 0, 31);
263 SHA256_EXP(0, 1, 2, 3, 4, 5, 6, 7, 32);
264 SHA256_EXP(7, 0, 1, 2, 3, 4, 5, 6, 33);
265 SHA256_EXP(6, 7, 0, 1, 2, 3, 4, 5, 34);
266 SHA256_EXP(5, 6, 7, 0, 1, 2, 3, 4, 35);
267 SHA256_EXP(4, 5, 6, 7, 0, 1, 2, 3, 36);
268 SHA256_EXP(3, 4, 5, 6, 7, 0, 1, 2, 37);
269 SHA256_EXP(2, 3, 4, 5, 6, 7, 0, 1, 38);
270 SHA256_EXP(1, 2, 3, 4, 5, 6, 7, 0, 39);
271 SHA256_EXP(0, 1, 2, 3, 4, 5, 6, 7, 40);
272 SHA256_EXP(7, 0, 1, 2, 3, 4, 5, 6, 41);
273 SHA256_EXP(6, 7, 0, 1, 2, 3, 4, 5, 42);
274 SHA256_EXP(5, 6, 7, 0, 1, 2, 3, 4, 43);
275 SHA256_EXP(4, 5, 6, 7, 0, 1, 2, 3, 44);
276 SHA256_EXP(3, 4, 5, 6, 7, 0, 1, 2, 45);
277 SHA256_EXP(2, 3, 4, 5, 6, 7, 0, 1, 46);
278 SHA256_EXP(1, 2, 3, 4, 5, 6, 7, 0, 47);
279 SHA256_EXP(0, 1, 2, 3, 4, 5, 6, 7, 48);
280 SHA256_EXP(7, 0, 1, 2, 3, 4, 5, 6, 49);
281 SHA256_EXP(6, 7, 0, 1, 2, 3, 4, 5, 50);
282 SHA256_EXP(5, 6, 7, 0, 1, 2, 3, 4, 51);
283 SHA256_EXP(4, 5, 6, 7, 0, 1, 2, 3, 52);
284 SHA256_EXP(3, 4, 5, 6, 7, 0, 1, 2, 53);
285 SHA256_EXP(2, 3, 4, 5, 6, 7, 0, 1, 54);
286 SHA256_EXP(1, 2, 3, 4, 5, 6, 7, 0, 55);
287 SHA256_EXP(0, 1, 2, 3, 4, 5, 6, 7, 56);
288 SHA256_EXP(7, 0, 1, 2, 3, 4, 5, 6, 57);
289 SHA256_EXP(6, 7, 0, 1, 2, 3, 4, 5, 58);
290 SHA256_EXP(5, 6, 7, 0, 1, 2, 3, 4, 59);
291 SHA256_EXP(4, 5, 6, 7, 0, 1, 2, 3, 60);
292 SHA256_EXP(3, 4, 5, 6, 7, 0, 1, 2, 61);
293 SHA256_EXP(2, 3, 4, 5, 6, 7, 0, 1, 62);
294 SHA256_EXP(1, 2, 3, 4, 5, 6, 7, 0, 63);
295
296 ctx->h[0] += wv[0];
297 ctx->h[1] += wv[1];
298 ctx->h[2] += wv[2];
299 ctx->h[3] += wv[3];
300 ctx->h[4] += wv[4];
301 ctx->h[5] += wv[5];
302 ctx->h[6] += wv[6];
303 ctx->h[7] += wv[7];
304#endif /* !UNROLL_LOOPS */
305 }
306}
307
Sam Protsenko6a717022019-08-15 23:04:02 +0300308void avb_sha256_update(AvbSHA256Ctx* ctx, const uint8_t* data, size_t len) {
309 size_t block_nb;
310 size_t new_len, rem_len, tmp_len;
Igor Opaniuk8b23ae22018-06-03 21:56:36 +0300311 const uint8_t* shifted_data;
312
313 tmp_len = AVB_SHA256_BLOCK_SIZE - ctx->len;
314 rem_len = len < tmp_len ? len : tmp_len;
315
316 avb_memcpy(&ctx->block[ctx->len], data, rem_len);
317
318 if (ctx->len + len < AVB_SHA256_BLOCK_SIZE) {
319 ctx->len += len;
320 return;
321 }
322
323 new_len = len - rem_len;
324 block_nb = new_len / AVB_SHA256_BLOCK_SIZE;
325
326 shifted_data = data + rem_len;
327
328 SHA256_transform(ctx, ctx->block, 1);
329 SHA256_transform(ctx, shifted_data, block_nb);
330
331 rem_len = new_len % AVB_SHA256_BLOCK_SIZE;
332
333 avb_memcpy(ctx->block, &shifted_data[block_nb << 6], rem_len);
334
335 ctx->len = rem_len;
336 ctx->tot_len += (block_nb + 1) << 6;
337}
338
339uint8_t* avb_sha256_final(AvbSHA256Ctx* ctx) {
Sam Protsenko6a717022019-08-15 23:04:02 +0300340 size_t block_nb;
341 size_t pm_len;
342 uint64_t len_b;
Igor Opaniuk8b23ae22018-06-03 21:56:36 +0300343#ifndef UNROLL_LOOPS
Sam Protsenko6a717022019-08-15 23:04:02 +0300344 size_t i;
Igor Opaniuk8b23ae22018-06-03 21:56:36 +0300345#endif
346
347 block_nb =
348 (1 + ((AVB_SHA256_BLOCK_SIZE - 9) < (ctx->len % AVB_SHA256_BLOCK_SIZE)));
349
350 len_b = (ctx->tot_len + ctx->len) << 3;
351 pm_len = block_nb << 6;
352
353 avb_memset(ctx->block + ctx->len, 0, pm_len - ctx->len);
354 ctx->block[ctx->len] = 0x80;
Sam Protsenko6a717022019-08-15 23:04:02 +0300355 UNPACK64(len_b, ctx->block + pm_len - 8);
Igor Opaniuk8b23ae22018-06-03 21:56:36 +0300356
357 SHA256_transform(ctx, ctx->block, block_nb);
358
359#ifndef UNROLL_LOOPS
360 for (i = 0; i < 8; i++) {
361 UNPACK32(ctx->h[i], &ctx->buf[i << 2]);
362 }
363#else
364 UNPACK32(ctx->h[0], &ctx->buf[0]);
365 UNPACK32(ctx->h[1], &ctx->buf[4]);
366 UNPACK32(ctx->h[2], &ctx->buf[8]);
367 UNPACK32(ctx->h[3], &ctx->buf[12]);
368 UNPACK32(ctx->h[4], &ctx->buf[16]);
369 UNPACK32(ctx->h[5], &ctx->buf[20]);
370 UNPACK32(ctx->h[6], &ctx->buf[24]);
371 UNPACK32(ctx->h[7], &ctx->buf[28]);
372#endif /* !UNROLL_LOOPS */
373
374 return ctx->buf;
375}