• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

cryspen / hacl-packages / 5808703668

pending completion
5808703668

Pull #418

github

web-flow
Merge 4abdd0203 into 1575f26e8
Pull Request #418: Add support for Hacl_AES_128_GCM_NI and Hacl_AES_128_GCM_M32

7433 of 7433 new or added lines in 12 files covered. (100.0%)

31975 of 62256 relevant lines covered (51.36%)

1238863.46 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

71.52
/src/Hacl_AES_128_CTR32_BitSlice.c
1
/* MIT License
2
 *
3
 * Copyright (c) 2016-2022 INRIA, CMU and Microsoft Corporation
4
 * Copyright (c) 2022-2023 HACL* Contributors
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a copy
7
 * of this software and associated documentation files (the "Software"), to deal
8
 * in the Software without restriction, including without limitation the rights
9
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10
 * copies of the Software, and to permit persons to whom the Software is
11
 * furnished to do so, subject to the following conditions:
12
 *
13
 * The above copyright notice and this permission notice shall be included in all
14
 * copies or substantial portions of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19
 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22
 * SOFTWARE.
23
 */
24

25

26
#include "internal/Hacl_AES_128_CTR32_BitSlice.h"
27

28
#include "internal/Hacl_Lib.h"
29

30
typedef struct __uint64_t_uint64_t_uint64_t_uint64_t_uint64_t_uint64_t_uint64_t_uint64_t_s
31
{
32
  uint64_t fst;
33
  uint64_t snd;
34
  uint64_t thd;
35
  uint64_t f3;
36
  uint64_t f4;
37
  uint64_t f5;
38
  uint64_t f6;
39
  uint64_t f7;
40
}
41
__uint64_t_uint64_t_uint64_t_uint64_t_uint64_t_uint64_t_uint64_t_uint64_t;
42

43
static __uint64_t_uint64_t_uint64_t_uint64_t_uint64_t_uint64_t_uint64_t_uint64_t
44
sub_bytes64x8(
45
  uint64_t st0,
46
  uint64_t st1,
47
  uint64_t st2,
48
  uint64_t st3,
49
  uint64_t st4,
50
  uint64_t st5,
51
  uint64_t st6,
52
  uint64_t st7
53
)
54
{
399,280✔
55
  uint64_t input[8U] = { 0U };
399,280✔
56
  input[0U] = st0;
399,280✔
57
  input[1U] = st1;
399,280✔
58
  input[2U] = st2;
399,280✔
59
  input[3U] = st3;
399,280✔
60
  input[4U] = st4;
399,280✔
61
  input[5U] = st5;
399,280✔
62
  input[6U] = st6;
399,280✔
63
  input[7U] = st7;
399,280✔
64
  uint64_t output[8U] = { 0U };
399,280✔
65
  uint64_t tmp[121U] = { 0U };
399,280✔
66
  tmp[0U] = input[7U];
399,280✔
67
  tmp[1U] = input[6U];
399,280✔
68
  tmp[2U] = input[5U];
399,280✔
69
  tmp[3U] = input[4U];
399,280✔
70
  tmp[4U] = input[3U];
399,280✔
71
  tmp[5U] = input[2U];
399,280✔
72
  tmp[6U] = input[1U];
399,280✔
73
  tmp[7U] = input[0U];
399,280✔
74
  tmp[8U] = tmp[3U] ^ tmp[5U];
399,280✔
75
  tmp[9U] = tmp[0U] ^ tmp[6U];
399,280✔
76
  tmp[10U] = tmp[0U] ^ tmp[3U];
399,280✔
77
  tmp[11U] = tmp[0U] ^ tmp[5U];
399,280✔
78
  tmp[12U] = tmp[1U] ^ tmp[2U];
399,280✔
79
  tmp[13U] = tmp[12U] ^ tmp[7U];
399,280✔
80
  tmp[14U] = tmp[13U] ^ tmp[3U];
399,280✔
81
  tmp[15U] = tmp[9U] ^ tmp[8U];
399,280✔
82
  tmp[16U] = tmp[13U] ^ tmp[0U];
399,280✔
83
  tmp[17U] = tmp[13U] ^ tmp[6U];
399,280✔
84
  tmp[18U] = tmp[17U] ^ tmp[11U];
399,280✔
85
  tmp[19U] = tmp[4U] ^ tmp[15U];
399,280✔
86
  tmp[20U] = tmp[19U] ^ tmp[5U];
399,280✔
87
  tmp[21U] = tmp[19U] ^ tmp[1U];
399,280✔
88
  tmp[22U] = tmp[20U] ^ tmp[7U];
399,280✔
89
  tmp[23U] = tmp[20U] ^ tmp[12U];
399,280✔
90
  tmp[24U] = tmp[21U] ^ tmp[10U];
399,280✔
91
  tmp[25U] = tmp[7U] ^ tmp[24U];
399,280✔
92
  tmp[26U] = tmp[23U] ^ tmp[24U];
399,280✔
93
  tmp[27U] = tmp[23U] ^ tmp[11U];
399,280✔
94
  tmp[28U] = tmp[12U] ^ tmp[24U];
399,280✔
95
  tmp[29U] = tmp[9U] ^ tmp[28U];
399,280✔
96
  tmp[30U] = tmp[0U] ^ tmp[28U];
399,280✔
97
  tmp[31U] = tmp[15U] & tmp[20U];
399,280✔
98
  tmp[32U] = tmp[18U] & tmp[22U];
399,280✔
99
  tmp[33U] = tmp[32U] ^ tmp[31U];
399,280✔
100
  tmp[34U] = tmp[14U] & tmp[7U];
399,280✔
101
  tmp[35U] = tmp[34U] ^ tmp[31U];
399,280✔
102
  tmp[36U] = tmp[9U] & tmp[28U];
399,280✔
103
  tmp[37U] = tmp[17U] & tmp[13U];
399,280✔
104
  tmp[38U] = tmp[37U] ^ tmp[36U];
399,280✔
105
  tmp[39U] = tmp[16U] & tmp[25U];
399,280✔
106
  tmp[40U] = tmp[39U] ^ tmp[36U];
399,280✔
107
  tmp[41U] = tmp[10U] & tmp[24U];
399,280✔
108
  tmp[42U] = tmp[8U] & tmp[26U];
399,280✔
109
  tmp[43U] = tmp[42U] ^ tmp[41U];
399,280✔
110
  tmp[44U] = tmp[11U] & tmp[23U];
399,280✔
111
  tmp[45U] = tmp[44U] ^ tmp[41U];
399,280✔
112
  tmp[46U] = tmp[33U] ^ tmp[21U];
399,280✔
113
  tmp[47U] = tmp[35U] ^ tmp[45U];
399,280✔
114
  tmp[48U] = tmp[38U] ^ tmp[43U];
399,280✔
115
  tmp[49U] = tmp[40U] ^ tmp[45U];
399,280✔
116
  tmp[50U] = tmp[46U] ^ tmp[43U];
399,280✔
117
  tmp[51U] = tmp[47U] ^ tmp[27U];
399,280✔
118
  tmp[52U] = tmp[48U] ^ tmp[29U];
399,280✔
119
  tmp[53U] = tmp[49U] ^ tmp[30U];
399,280✔
120
  tmp[54U] = tmp[50U] ^ tmp[51U];
399,280✔
121
  tmp[55U] = tmp[50U] & tmp[52U];
399,280✔
122
  tmp[56U] = tmp[53U] ^ tmp[55U];
399,280✔
123
  tmp[57U] = tmp[54U] & tmp[56U];
399,280✔
124
  tmp[58U] = tmp[57U] ^ tmp[51U];
399,280✔
125
  tmp[59U] = tmp[52U] ^ tmp[53U];
399,280✔
126
  tmp[60U] = tmp[51U] ^ tmp[55U];
399,280✔
127
  tmp[61U] = tmp[60U] & tmp[59U];
399,280✔
128
  tmp[62U] = tmp[61U] ^ tmp[53U];
399,280✔
129
  tmp[63U] = tmp[52U] ^ tmp[62U];
399,280✔
130
  tmp[64U] = tmp[56U] ^ tmp[62U];
399,280✔
131
  tmp[65U] = tmp[53U] & tmp[64U];
399,280✔
132
  tmp[66U] = tmp[65U] ^ tmp[63U];
399,280✔
133
  tmp[67U] = tmp[56U] ^ tmp[65U];
399,280✔
134
  tmp[68U] = tmp[58U] & tmp[67U];
399,280✔
135
  tmp[69U] = tmp[54U] ^ tmp[68U];
399,280✔
136
  tmp[70U] = tmp[69U] ^ tmp[66U];
399,280✔
137
  tmp[71U] = tmp[58U] ^ tmp[62U];
399,280✔
138
  tmp[72U] = tmp[58U] ^ tmp[69U];
399,280✔
139
  tmp[73U] = tmp[62U] ^ tmp[66U];
399,280✔
140
  tmp[74U] = tmp[71U] ^ tmp[70U];
399,280✔
141
  tmp[75U] = tmp[73U] & tmp[20U];
399,280✔
142
  tmp[76U] = tmp[66U] & tmp[22U];
399,280✔
143
  tmp[77U] = tmp[62U] & tmp[7U];
399,280✔
144
  tmp[78U] = tmp[72U] & tmp[28U];
399,280✔
145
  tmp[79U] = tmp[69U] & tmp[13U];
399,280✔
146
  tmp[80U] = tmp[58U] & tmp[25U];
399,280✔
147
  tmp[81U] = tmp[71U] & tmp[24U];
399,280✔
148
  tmp[82U] = tmp[74U] & tmp[26U];
399,280✔
149
  tmp[83U] = tmp[70U] & tmp[23U];
399,280✔
150
  tmp[84U] = tmp[73U] & tmp[15U];
399,280✔
151
  tmp[85U] = tmp[66U] & tmp[18U];
399,280✔
152
  tmp[86U] = tmp[62U] & tmp[14U];
399,280✔
153
  tmp[87U] = tmp[72U] & tmp[9U];
399,280✔
154
  tmp[88U] = tmp[69U] & tmp[17U];
399,280✔
155
  tmp[89U] = tmp[58U] & tmp[16U];
399,280✔
156
  tmp[90U] = tmp[71U] & tmp[10U];
399,280✔
157
  tmp[91U] = tmp[74U] & tmp[8U];
399,280✔
158
  tmp[92U] = tmp[70U] & tmp[11U];
399,280✔
159
  tmp[93U] = tmp[90U] ^ tmp[91U];
399,280✔
160
  tmp[94U] = tmp[85U] ^ tmp[93U];
399,280✔
161
  tmp[95U] = tmp[84U] ^ tmp[94U];
399,280✔
162
  tmp[96U] = tmp[75U] ^ tmp[77U];
399,280✔
163
  tmp[97U] = tmp[76U] ^ tmp[75U];
399,280✔
164
  tmp[98U] = tmp[78U] ^ tmp[79U];
399,280✔
165
  tmp[99U] = tmp[87U] ^ tmp[96U];
399,280✔
166
  tmp[100U] = tmp[82U] ^ tmp[98U];
399,280✔
167
  tmp[101U] = tmp[83U] ^ tmp[99U];
399,280✔
168
  tmp[102U] = tmp[100U] ^ tmp[101U];
399,280✔
169
  tmp[103U] = tmp[98U] ^ tmp[97U];
399,280✔
170
  tmp[104U] = tmp[78U] ^ tmp[80U];
399,280✔
171
  tmp[105U] = tmp[88U] ^ tmp[93U];
399,280✔
172
  tmp[106U] = tmp[96U] ^ tmp[104U];
399,280✔
173
  tmp[107U] = tmp[95U] ^ tmp[103U];
399,280✔
174
  tmp[108U] = tmp[81U] ^ tmp[100U];
399,280✔
175
  tmp[109U] = tmp[89U] ^ tmp[102U];
399,280✔
176
  tmp[110U] = tmp[105U] ^ tmp[106U];
399,280✔
177
  uint64_t uu____0 = tmp[87U];
399,280✔
178
  uint64_t uu____1 = tmp[110U];
399,280✔
179
  tmp[111U] = (~uu____0 & ~uu____1) | (uu____0 & uu____1);
399,280✔
180
  tmp[112U] = tmp[90U] ^ tmp[108U];
399,280✔
181
  tmp[113U] = tmp[94U] ^ tmp[86U];
399,280✔
182
  tmp[114U] = tmp[95U] ^ tmp[108U];
399,280✔
183
  uint64_t uu____2 = tmp[102U];
399,280✔
184
  uint64_t uu____3 = tmp[110U];
399,280✔
185
  tmp[115U] = (~uu____2 & ~uu____3) | (uu____2 & uu____3);
399,280✔
186
  tmp[116U] = tmp[106U] ^ tmp[107U];
399,280✔
187
  uint64_t uu____4 = tmp[107U];
399,280✔
188
  uint64_t uu____5 = tmp[108U];
399,280✔
189
  tmp[117U] = (~uu____4 & ~uu____5) | (uu____4 & uu____5);
399,280✔
190
  tmp[118U] = tmp[109U] ^ tmp[112U];
399,280✔
191
  uint64_t uu____6 = tmp[118U];
399,280✔
192
  uint64_t uu____7 = tmp[92U];
399,280✔
193
  tmp[119U] = (~uu____6 & ~uu____7) | (uu____6 & uu____7);
399,280✔
194
  tmp[120U] = tmp[113U] ^ tmp[109U];
399,280✔
195
  uint64_t o = tmp[111U];
399,280✔
196
  output[0U] = o;
399,280✔
197
  uint64_t o0 = tmp[115U];
399,280✔
198
  output[1U] = o0;
399,280✔
199
  uint64_t o8 = tmp[120U];
399,280✔
200
  output[2U] = o8;
399,280✔
201
  uint64_t o9 = tmp[116U];
399,280✔
202
  output[3U] = o9;
399,280✔
203
  uint64_t o10 = tmp[107U];
399,280✔
204
  output[4U] = o10;
399,280✔
205
  uint64_t o11 = tmp[119U];
399,280✔
206
  output[5U] = o11;
399,280✔
207
  uint64_t o12 = tmp[117U];
399,280✔
208
  output[6U] = o12;
399,280✔
209
  uint64_t o13 = tmp[114U];
399,280✔
210
  output[7U] = o13;
399,280✔
211
  uint64_t o00 = output[0U];
399,280✔
212
  uint64_t o1 = output[1U];
399,280✔
213
  uint64_t o2 = output[2U];
399,280✔
214
  uint64_t o3 = output[3U];
399,280✔
215
  uint64_t o4 = output[4U];
399,280✔
216
  uint64_t o5 = output[5U];
399,280✔
217
  uint64_t o6 = output[6U];
399,280✔
218
  uint64_t o7 = output[7U];
399,280✔
219
  return
399,280✔
220
    (
399,280✔
221
      (__uint64_t_uint64_t_uint64_t_uint64_t_uint64_t_uint64_t_uint64_t_uint64_t){
399,280✔
222
        .fst = o00,
399,280✔
223
        .snd = o1,
399,280✔
224
        .thd = o2,
399,280✔
225
        .f3 = o3,
399,280✔
226
        .f4 = o4,
399,280✔
227
        .f5 = o5,
399,280✔
228
        .f6 = o6,
399,280✔
229
        .f7 = o7
399,280✔
230
      }
399,280✔
231
    );
399,280✔
232
}
399,280✔
233

234
static void load_block0(uint64_t *out, uint8_t *inp)
235
{
54,622✔
236
  uint8_t *b1 = inp;
54,622✔
237
  uint8_t *b2 = inp + (uint32_t)8U;
54,622✔
238
  uint64_t u0 = load64_le(b1);
54,622✔
239
  uint64_t fst = u0;
54,622✔
240
  uint64_t u1 = load64_le(b2);
54,622✔
241
  uint64_t snd = u1;
54,622✔
242
  uint64_t fst1 = Lib_Transposition64x8_transpose_bits64(fst);
54,622✔
243
  uint64_t snd1 = Lib_Transposition64x8_transpose_bits64(snd);
54,622✔
244
  KRML_MAYBE_FOR8(i,
54,622✔
245
    (uint32_t)0U,
54,622✔
246
    (uint32_t)8U,
54,622✔
247
    (uint32_t)1U,
54,622✔
248
    uint32_t sh = i * (uint32_t)8U;
54,622✔
249
    uint64_t u = fst1 >> sh & (uint64_t)0xffU;
54,622✔
250
    uint64_t u10 = u ^ (snd1 >> sh & (uint64_t)0xffU) << (uint32_t)8U;
54,622✔
251
    out[i] = u10;);
54,622✔
252
}
54,622✔
253

254
static void transpose_state(uint64_t *st)
255
{
9,548✔
256
  uint64_t i0 = st[0U];
9,548✔
257
  uint64_t i1 = st[1U];
9,548✔
258
  uint64_t i2 = st[2U];
9,548✔
259
  uint64_t i3 = st[3U];
9,548✔
260
  uint64_t i4 = st[4U];
9,548✔
261
  uint64_t i5 = st[5U];
9,548✔
262
  uint64_t i6 = st[6U];
9,548✔
263
  uint64_t i7 = st[7U];
9,548✔
264
  Lib_Transposition64x8_uint64x8
9,548✔
265
  scrut =
9,548✔
266
    Lib_Transposition64x8_transpose_bits64x8((
9,548✔
267
        (Lib_Transposition64x8_uint64x8){
9,548✔
268
          .fst = { .fst = { .fst = i0, .snd = i1 }, .snd = { .fst = i2, .snd = i3 } },
9,548✔
269
          .snd = { .fst = { .fst = i4, .snd = i5 }, .snd = { .fst = i6, .snd = i7 } }
9,548✔
270
        }
9,548✔
271
      ));
9,548✔
272
  uint64_t t7 = scrut.snd.snd.snd;
9,548✔
273
  uint64_t t6 = scrut.snd.snd.fst;
9,548✔
274
  uint64_t t5 = scrut.snd.fst.snd;
9,548✔
275
  uint64_t t4 = scrut.snd.fst.fst;
9,548✔
276
  uint64_t t3 = scrut.fst.snd.snd;
9,548✔
277
  uint64_t t2 = scrut.fst.snd.fst;
9,548✔
278
  uint64_t t1 = scrut.fst.fst.snd;
9,548✔
279
  uint64_t t0 = scrut.fst.fst.fst;
9,548✔
280
  st[0U] = t0;
9,548✔
281
  st[1U] = t1;
9,548✔
282
  st[2U] = t2;
9,548✔
283
  st[3U] = t3;
9,548✔
284
  st[4U] = t4;
9,548✔
285
  st[5U] = t5;
9,548✔
286
  st[6U] = t6;
9,548✔
287
  st[7U] = t7;
9,548✔
288
}
9,548✔
289

290
void Hacl_Impl_AES_CoreBitSlice_store_block0(uint8_t *out, uint64_t *inp)
291
{
18,042✔
292
  uint64_t i0 = inp[0U];
18,042✔
293
  uint64_t i1 = inp[1U];
18,042✔
294
  uint64_t i2 = inp[2U];
18,042✔
295
  uint64_t i3 = inp[3U];
18,042✔
296
  uint64_t i4 = inp[4U];
18,042✔
297
  uint64_t i5 = inp[5U];
18,042✔
298
  uint64_t i6 = inp[6U];
18,042✔
299
  uint64_t i7 = inp[7U];
18,042✔
300
  Lib_Transposition64x8_uint64x8
18,042✔
301
  scrut =
18,042✔
302
    Lib_Transposition64x8_transpose_bits64x8((
18,042✔
303
        (Lib_Transposition64x8_uint64x8){
18,042✔
304
          .fst = { .fst = { .fst = i0, .snd = i1 }, .snd = { .fst = i2, .snd = i3 } },
18,042✔
305
          .snd = { .fst = { .fst = i4, .snd = i5 }, .snd = { .fst = i6, .snd = i7 } }
18,042✔
306
        }
18,042✔
307
      ));
18,042✔
308
  uint64_t t1 = scrut.fst.fst.snd;
18,042✔
309
  uint64_t t0 = scrut.fst.fst.fst;
18,042✔
310
  store64_le(out, t0);
18,042✔
311
  store64_le(out + (uint32_t)8U, t1);
18,042✔
312
}
18,042✔
313

314
void Hacl_Impl_AES_CoreBitSlice_load_key1(uint64_t *out, uint8_t *k)
315
{
27,032✔
316
  load_block0(out, k);
27,032✔
317
  KRML_MAYBE_FOR8(i,
27,032✔
318
    (uint32_t)0U,
27,032✔
319
    (uint32_t)8U,
27,032✔
320
    (uint32_t)1U,
27,032✔
321
    uint64_t u = out[i];
27,032✔
322
    uint64_t u1 = u ^ u << (uint32_t)16U;
27,032✔
323
    uint64_t u2 = u1 ^ u1 << (uint32_t)32U;
27,032✔
324
    out[i] = u2;);
27,032✔
325
}
27,032✔
326

327
void Hacl_Impl_AES_CoreBitSlice_load_nonce(uint64_t *out, uint8_t *nonce1)
328
{
18,042✔
329
  uint8_t nb[16U] = { 0U };
18,042✔
330
  memcpy(nb, nonce1, (uint32_t)12U * sizeof (uint8_t));
18,042✔
331
  Hacl_Impl_AES_CoreBitSlice_load_key1(out, nb);
18,042✔
332
}
18,042✔
333

334
void Hacl_Impl_AES_CoreBitSlice_load_state(uint64_t *out, uint64_t *nonce1, uint32_t counter)
335
{
27,590✔
336
  uint8_t ctr[16U] = { 0U };
27,590✔
337
  store32_be(ctr, counter);
27,590✔
338
  store32_be(ctr + (uint32_t)4U, counter + (uint32_t)1U);
27,590✔
339
  store32_be(ctr + (uint32_t)8U, counter + (uint32_t)2U);
27,590✔
340
  store32_be(ctr + (uint32_t)12U, counter + (uint32_t)3U);
27,590✔
341
  load_block0(out, ctr);
27,590✔
342
  KRML_MAYBE_FOR8(i,
27,590✔
343
    (uint32_t)0U,
27,590✔
344
    (uint32_t)8U,
27,590✔
345
    (uint32_t)1U,
27,590✔
346
    uint64_t u = out[i];
27,590✔
347
    uint64_t
27,590✔
348
    u1 = ((u << (uint32_t)12U | u << (uint32_t)24U) | u << (uint32_t)36U) | u << (uint32_t)48U;
27,590✔
349
    uint64_t u2 = u1 & (uint64_t)0xf000f000f000f000U;
27,590✔
350
    out[i] = u2 ^ nonce1[i];);
27,590✔
351
}
27,590✔
352

353
void Hacl_Impl_AES_CoreBitSlice_xor_state_key1(uint64_t *st, uint64_t *ost)
354
{
357,802✔
355
  KRML_MAYBE_FOR8(i, (uint32_t)0U, (uint32_t)8U, (uint32_t)1U, st[i] = st[i] ^ ost[i];);
357,802✔
356
}
357,802✔
357

358
static void xor_block(uint8_t *out, uint64_t *st, uint8_t *inp)
359
{
9,548✔
360
  transpose_state(st);
9,548✔
361
  KRML_MAYBE_FOR8(i,
9,548✔
362
    (uint32_t)0U,
9,548✔
363
    (uint32_t)8U,
9,548✔
364
    (uint32_t)1U,
9,548✔
365
    uint8_t *ob = out + i * (uint32_t)8U;
9,548✔
366
    uint8_t *ib = inp + i * (uint32_t)8U;
9,548✔
367
    uint64_t u = load64_le(ib);
9,548✔
368
    uint64_t u0 = u;
9,548✔
369
    store64_le(ob, u0 ^ st[i]););
9,548✔
370
}
9,548✔
371

372
static void sub_bytes_state(uint64_t *st)
373
{
399,280✔
374
  uint64_t st0 = st[0U];
399,280✔
375
  uint64_t st1 = st[1U];
399,280✔
376
  uint64_t st2 = st[2U];
399,280✔
377
  uint64_t st3 = st[3U];
399,280✔
378
  uint64_t st4 = st[4U];
399,280✔
379
  uint64_t st5 = st[5U];
399,280✔
380
  uint64_t st6 = st[6U];
399,280✔
381
  uint64_t st7 = st[7U];
399,280✔
382
  __uint64_t_uint64_t_uint64_t_uint64_t_uint64_t_uint64_t_uint64_t_uint64_t
399,280✔
383
  scrut = sub_bytes64x8(st0, st1, st2, st3, st4, st5, st6, st7);
399,280✔
384
  uint64_t st01 = scrut.fst;
399,280✔
385
  uint64_t st11 = scrut.snd;
399,280✔
386
  uint64_t st21 = scrut.thd;
399,280✔
387
  uint64_t st31 = scrut.f3;
399,280✔
388
  uint64_t st41 = scrut.f4;
399,280✔
389
  uint64_t st51 = scrut.f5;
399,280✔
390
  uint64_t st61 = scrut.f6;
399,280✔
391
  uint64_t st71 = scrut.f7;
399,280✔
392
  st[0U] = st01;
399,280✔
393
  st[1U] = st11;
399,280✔
394
  st[2U] = st21;
399,280✔
395
  st[3U] = st31;
399,280✔
396
  st[4U] = st41;
399,280✔
397
  st[5U] = st51;
399,280✔
398
  st[6U] = st61;
399,280✔
399
  st[7U] = st71;
399,280✔
400
}
399,280✔
401

402
static void shift_rows_state(uint64_t *st)
403
{
330,212✔
404
  KRML_MAYBE_FOR8(i,
330,212✔
405
    (uint32_t)0U,
330,212✔
406
    (uint32_t)8U,
330,212✔
407
    (uint32_t)1U,
330,212✔
408
    uint64_t rowi = st[i];
330,212✔
409
    st[i] =
330,212✔
410
      ((((((rowi & (uint64_t)0x1111111111111111U)
330,212✔
411
      | (rowi & (uint64_t)0x2220222022202220U) >> (uint32_t)4U)
330,212✔
412
      | (rowi & (uint64_t)0x0002000200020002U) << (uint32_t)12U)
330,212✔
413
      | (rowi & (uint64_t)0x4400440044004400U) >> (uint32_t)8U)
330,212✔
414
      | (rowi & (uint64_t)0x0044004400440044U) << (uint32_t)8U)
330,212✔
415
      | (rowi & (uint64_t)0x8000800080008000U) >> (uint32_t)12U)
330,212✔
416
      | (rowi & (uint64_t)0x0888088808880888U) << (uint32_t)4U;);
330,212✔
417
}
330,212✔
418

419
static void mix_columns_state(uint64_t *st)
420
{
302,622✔
421
  uint64_t col[8U] = { 0U };
302,622✔
422
  KRML_MAYBE_FOR8(i,
302,622✔
423
    (uint32_t)0U,
302,622✔
424
    (uint32_t)8U,
302,622✔
425
    (uint32_t)1U,
302,622✔
426
    uint64_t coli = st[i];
302,622✔
427
    col[i] =
302,622✔
428
      coli
302,622✔
429
      ^
302,622✔
430
        ((coli & (uint64_t)0xeeeeeeeeeeeeeeeeU)
302,622✔
431
        >> (uint32_t)1U
302,622✔
432
        | (coli & (uint64_t)0x1111111111111111U) << (uint32_t)3U););
302,622✔
433
  uint64_t col0 = col[0U];
302,622✔
434
  uint64_t
302,622✔
435
  ncol0 =
302,622✔
436
    col0
302,622✔
437
    ^
302,622✔
438
      ((col0 & (uint64_t)0xccccccccccccccccU)
302,622✔
439
      >> (uint32_t)2U
302,622✔
440
      | (col0 & (uint64_t)0x3333333333333333U) << (uint32_t)2U);
302,622✔
441
  st[0U] = st[0U] ^ ncol0;
302,622✔
442
  KRML_MAYBE_FOR7(i,
302,622✔
443
    (uint32_t)0U,
302,622✔
444
    (uint32_t)7U,
302,622✔
445
    (uint32_t)1U,
302,622✔
446
    uint64_t prev = col[i];
302,622✔
447
    uint64_t next = col[i + (uint32_t)1U];
302,622✔
448
    uint64_t
302,622✔
449
    ncoli =
302,622✔
450
      next
302,622✔
451
      ^
302,622✔
452
        ((next & (uint64_t)0xccccccccccccccccU)
302,622✔
453
        >> (uint32_t)2U
302,622✔
454
        | (next & (uint64_t)0x3333333333333333U) << (uint32_t)2U);
302,622✔
455
    st[i + (uint32_t)1U] = st[i + (uint32_t)1U] ^ (ncoli ^ prev););
302,622✔
456
  st[0U] = st[0U] ^ col[7U];
302,622✔
457
  st[1U] = st[1U] ^ col[7U];
302,622✔
458
  st[3U] = st[3U] ^ col[7U];
302,622✔
459
  st[4U] = st[4U] ^ col[7U];
302,622✔
460
}
302,622✔
461

462
void Hacl_Impl_AES_CoreBitSlice_aes_enc(uint64_t *st, uint64_t *key)
463
{
302,622✔
464
  sub_bytes_state(st);
302,622✔
465
  shift_rows_state(st);
302,622✔
466
  mix_columns_state(st);
302,622✔
467
  Hacl_Impl_AES_CoreBitSlice_xor_state_key1(st, key);
302,622✔
468
}
302,622✔
469

470
void Hacl_Impl_AES_CoreBitSlice_aes_enc_last(uint64_t *st, uint64_t *key)
471
{
27,590✔
472
  sub_bytes_state(st);
27,590✔
473
  shift_rows_state(st);
27,590✔
474
  Hacl_Impl_AES_CoreBitSlice_xor_state_key1(st, key);
27,590✔
475
}
27,590✔
476

477
void
478
Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(uint64_t *next, uint64_t *prev, uint8_t rcon1)
479
{
69,068✔
480
  memcpy(next, prev, (uint32_t)8U * sizeof (uint64_t));
69,068✔
481
  sub_bytes_state(next);
69,068✔
482
  KRML_MAYBE_FOR8(i,
69,068✔
483
    (uint32_t)0U,
69,068✔
484
    (uint32_t)8U,
69,068✔
485
    (uint32_t)1U,
69,068✔
486
    uint64_t u3 = next[i] & (uint64_t)0xf000f000f000f000U;
69,068✔
487
    uint64_t n = u3 >> (uint32_t)12U;
69,068✔
488
    uint64_t n1 = (n >> (uint32_t)1U | n << (uint32_t)3U) & (uint64_t)0x000f000f000f000fU;
69,068✔
489
    uint64_t ri = (uint64_t)(rcon1 >> i & (uint8_t)1U);
69,068✔
490
    uint64_t ri1 = ri ^ ri << (uint32_t)16U;
69,068✔
491
    uint64_t ri2 = ri1 ^ ri1 << (uint32_t)32U;
69,068✔
492
    uint64_t n2 = n1 ^ ri2;
69,068✔
493
    uint64_t n3 = n2 << (uint32_t)12U;
69,068✔
494
    next[i] = n3 ^ u3 >> (uint32_t)4U;);
69,068✔
495
}
69,068✔
496

497
void Hacl_Impl_AES_CoreBitSlice_key_expansion_step(uint64_t *next, uint64_t *prev)
498
{
69,068✔
499
  KRML_MAYBE_FOR8(i,
69,068✔
500
    (uint32_t)0U,
69,068✔
501
    (uint32_t)8U,
69,068✔
502
    (uint32_t)1U,
69,068✔
503
    uint64_t p = prev[i];
69,068✔
504
    uint64_t n = next[i];
69,068✔
505
    uint64_t
69,068✔
506
    p1 =
69,068✔
507
      p
69,068✔
508
      ^
69,068✔
509
        ((p & (uint64_t)0x0fff0fff0fff0fffU)
69,068✔
510
        << (uint32_t)4U
69,068✔
511
        ^
69,068✔
512
          ((p & (uint64_t)0x00ff00ff00ff00ffU)
69,068✔
513
          << (uint32_t)8U
69,068✔
514
          ^ (p & (uint64_t)0x000f000f000f000fU) << (uint32_t)12U));
69,068✔
515
    next[i] = n ^ p1;);
69,068✔
516
}
69,068✔
517

518
void
519
Hacl_Impl_AES_Generic_aes128_ctr_bitslice(
520
  uint32_t len,
521
  uint8_t *out,
522
  uint8_t *inp,
523
  uint64_t *ctx,
524
  uint32_t counter
525
)
526
{
4,402✔
527
  uint32_t blocks64 = len / (uint32_t)64U;
4,402✔
528
  for (uint32_t i = (uint32_t)0U; i < blocks64; i++)
5,270✔
529
  {
868✔
530
    uint32_t ctr = counter + i * (uint32_t)4U;
868✔
531
    uint8_t *ib = inp + i * (uint32_t)64U;
868✔
532
    uint8_t *ob = out + i * (uint32_t)64U;
868✔
533
    uint64_t st[8U] = { 0U };
868✔
534
    uint64_t *kex = ctx + (uint32_t)8U;
868✔
535
    uint64_t *n = ctx;
868✔
536
    Hacl_Impl_AES_CoreBitSlice_load_state(st, n, ctr);
868✔
537
    uint32_t klen = (uint32_t)8U;
868✔
538
    uint64_t *k0 = kex;
868✔
539
    uint64_t *kr = kex + klen;
868✔
540
    uint64_t *kn = kex + (uint32_t)10U * klen;
868✔
541
    Hacl_Impl_AES_CoreBitSlice_xor_state_key1(st, k0);
868✔
542
    KRML_MAYBE_FOR9(i0,
868✔
543
      (uint32_t)0U,
868✔
544
      (uint32_t)9U,
868✔
545
      (uint32_t)1U,
868✔
546
      uint64_t *sub_key = kr + i0 * (uint32_t)8U;
868✔
547
      Hacl_Impl_AES_CoreBitSlice_aes_enc(st, sub_key););
868✔
548
    Hacl_Impl_AES_CoreBitSlice_aes_enc_last(st, kn);
868✔
549
    xor_block(ob, st, ib);
868✔
550
  }
868✔
551
  uint32_t rem = len % (uint32_t)64U;
4,402✔
552
  uint8_t last[64U] = { 0U };
4,402✔
553
  if (rem > (uint32_t)0U)
4,402✔
554
  {
4,030✔
555
    uint32_t ctr = counter + blocks64 * (uint32_t)4U;
4,030✔
556
    uint8_t *ib = inp + blocks64 * (uint32_t)64U;
4,030✔
557
    uint8_t *ob = out + blocks64 * (uint32_t)64U;
4,030✔
558
    memcpy(last, ib, rem * sizeof (uint8_t));
4,030✔
559
    uint64_t st[8U] = { 0U };
4,030✔
560
    uint64_t *kex = ctx + (uint32_t)8U;
4,030✔
561
    uint64_t *n = ctx;
4,030✔
562
    Hacl_Impl_AES_CoreBitSlice_load_state(st, n, ctr);
4,030✔
563
    uint32_t klen = (uint32_t)8U;
4,030✔
564
    uint64_t *k0 = kex;
4,030✔
565
    uint64_t *kr = kex + klen;
4,030✔
566
    uint64_t *kn = kex + (uint32_t)10U * klen;
4,030✔
567
    Hacl_Impl_AES_CoreBitSlice_xor_state_key1(st, k0);
4,030✔
568
    KRML_MAYBE_FOR9(i,
4,030✔
569
      (uint32_t)0U,
4,030✔
570
      (uint32_t)9U,
4,030✔
571
      (uint32_t)1U,
4,030✔
572
      uint64_t *sub_key = kr + i * (uint32_t)8U;
4,030✔
573
      Hacl_Impl_AES_CoreBitSlice_aes_enc(st, sub_key););
4,030✔
574
    Hacl_Impl_AES_CoreBitSlice_aes_enc_last(st, kn);
4,030✔
575
    xor_block(last, st, last);
4,030✔
576
    memcpy(ob, last, rem * sizeof (uint8_t));
4,030✔
577
  }
4,030✔
578
}
4,402✔
579

580
void
581
Hacl_Impl_AES_Generic_aes256_ctr_bitslice(
582
  uint32_t len,
583
  uint8_t *out,
584
  uint8_t *inp,
585
  uint64_t *ctx,
586
  uint32_t counter
587
)
588
{
4,278✔
589
  uint32_t blocks64 = len / (uint32_t)64U;
4,278✔
590
  for (uint32_t i = (uint32_t)0U; i < blocks64; i++)
5,146✔
591
  {
868✔
592
    uint32_t ctr = counter + i * (uint32_t)4U;
868✔
593
    uint8_t *ib = inp + i * (uint32_t)64U;
868✔
594
    uint8_t *ob = out + i * (uint32_t)64U;
868✔
595
    uint64_t st[8U] = { 0U };
868✔
596
    uint64_t *kex = ctx + (uint32_t)8U;
868✔
597
    uint64_t *n = ctx;
868✔
598
    Hacl_Impl_AES_CoreBitSlice_load_state(st, n, ctr);
868✔
599
    uint32_t klen = (uint32_t)8U;
868✔
600
    uint64_t *k0 = kex;
868✔
601
    uint64_t *kr = kex + klen;
868✔
602
    uint64_t *kn = kex + (uint32_t)14U * klen;
868✔
603
    Hacl_Impl_AES_CoreBitSlice_xor_state_key1(st, k0);
868✔
604
    KRML_MAYBE_FOR13(i0,
868✔
605
      (uint32_t)0U,
868✔
606
      (uint32_t)13U,
868✔
607
      (uint32_t)1U,
868✔
608
      uint64_t *sub_key = kr + i0 * (uint32_t)8U;
868✔
609
      Hacl_Impl_AES_CoreBitSlice_aes_enc(st, sub_key););
868✔
610
    Hacl_Impl_AES_CoreBitSlice_aes_enc_last(st, kn);
868✔
611
    xor_block(ob, st, ib);
868✔
612
  }
868✔
613
  uint32_t rem = len % (uint32_t)64U;
4,278✔
614
  uint8_t last[64U] = { 0U };
4,278✔
615
  if (rem > (uint32_t)0U)
4,278✔
616
  {
3,782✔
617
    uint32_t ctr = counter + blocks64 * (uint32_t)4U;
3,782✔
618
    uint8_t *ib = inp + blocks64 * (uint32_t)64U;
3,782✔
619
    uint8_t *ob = out + blocks64 * (uint32_t)64U;
3,782✔
620
    memcpy(last, ib, rem * sizeof (uint8_t));
3,782✔
621
    uint64_t st[8U] = { 0U };
3,782✔
622
    uint64_t *kex = ctx + (uint32_t)8U;
3,782✔
623
    uint64_t *n = ctx;
3,782✔
624
    Hacl_Impl_AES_CoreBitSlice_load_state(st, n, ctr);
3,782✔
625
    uint32_t klen = (uint32_t)8U;
3,782✔
626
    uint64_t *k0 = kex;
3,782✔
627
    uint64_t *kr = kex + klen;
3,782✔
628
    uint64_t *kn = kex + (uint32_t)14U * klen;
3,782✔
629
    Hacl_Impl_AES_CoreBitSlice_xor_state_key1(st, k0);
3,782✔
630
    KRML_MAYBE_FOR13(i,
3,782✔
631
      (uint32_t)0U,
3,782✔
632
      (uint32_t)13U,
3,782✔
633
      (uint32_t)1U,
3,782✔
634
      uint64_t *sub_key = kr + i * (uint32_t)8U;
3,782✔
635
      Hacl_Impl_AES_CoreBitSlice_aes_enc(st, sub_key););
3,782✔
636
    Hacl_Impl_AES_CoreBitSlice_aes_enc_last(st, kn);
3,782✔
637
    xor_block(last, st, last);
3,782✔
638
    memcpy(ob, last, rem * sizeof (uint8_t));
3,782✔
639
  }
3,782✔
640
}
4,278✔
641

642
void Hacl_AES_128_CTR32_BitSlice_aes128_init(uint64_t *ctx, uint8_t *key, uint8_t *nonce)
643
{
3,038✔
644
  uint64_t *kex = ctx + (uint32_t)8U;
3,038✔
645
  uint64_t *n = ctx;
3,038✔
646
  uint32_t klen = (uint32_t)8U;
3,038✔
647
  Hacl_Impl_AES_CoreBitSlice_load_key1(kex, key);
3,038✔
648
  uint64_t *prev = kex;
3,038✔
649
  uint64_t *next = kex + klen;
3,038✔
650
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next, prev, (uint8_t)0x01U);
3,038✔
651
  KRML_MAYBE_FOR8(i,
3,038✔
652
    (uint32_t)0U,
3,038✔
653
    (uint32_t)8U,
3,038✔
654
    (uint32_t)1U,
3,038✔
655
    uint64_t n1 = next[i];
3,038✔
656
    uint64_t n2 = n1 & (uint64_t)0xf000f000f000f000U;
3,038✔
657
    uint64_t n3 = n2 ^ n2 >> (uint32_t)4U;
3,038✔
658
    uint64_t n4 = n3 ^ n3 >> (uint32_t)8U;
3,038✔
659
    next[i] = n4;);
3,038✔
660
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next, prev);
3,038✔
661
  uint64_t *prev1 = kex + klen;
3,038✔
662
  uint64_t *next1 = kex + (uint32_t)2U * klen;
3,038✔
663
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next1, prev1, (uint8_t)0x02U);
3,038✔
664
  KRML_MAYBE_FOR8(i,
3,038✔
665
    (uint32_t)0U,
3,038✔
666
    (uint32_t)8U,
3,038✔
667
    (uint32_t)1U,
3,038✔
668
    uint64_t n1 = next1[i];
3,038✔
669
    uint64_t n2 = n1 & (uint64_t)0xf000f000f000f000U;
3,038✔
670
    uint64_t n3 = n2 ^ n2 >> (uint32_t)4U;
3,038✔
671
    uint64_t n4 = n3 ^ n3 >> (uint32_t)8U;
3,038✔
672
    next1[i] = n4;);
3,038✔
673
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next1, prev1);
3,038✔
674
  uint64_t *prev2 = kex + klen * (uint32_t)2U;
3,038✔
675
  uint64_t *next2 = kex + klen * (uint32_t)3U;
3,038✔
676
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next2, prev2, (uint8_t)0x04U);
3,038✔
677
  KRML_MAYBE_FOR8(i,
3,038✔
678
    (uint32_t)0U,
3,038✔
679
    (uint32_t)8U,
3,038✔
680
    (uint32_t)1U,
3,038✔
681
    uint64_t n1 = next2[i];
3,038✔
682
    uint64_t n2 = n1 & (uint64_t)0xf000f000f000f000U;
3,038✔
683
    uint64_t n3 = n2 ^ n2 >> (uint32_t)4U;
3,038✔
684
    uint64_t n4 = n3 ^ n3 >> (uint32_t)8U;
3,038✔
685
    next2[i] = n4;);
3,038✔
686
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next2, prev2);
3,038✔
687
  uint64_t *prev3 = kex + klen * (uint32_t)3U;
3,038✔
688
  uint64_t *next3 = kex + klen * (uint32_t)4U;
3,038✔
689
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next3, prev3, (uint8_t)0x08U);
3,038✔
690
  KRML_MAYBE_FOR8(i,
3,038✔
691
    (uint32_t)0U,
3,038✔
692
    (uint32_t)8U,
3,038✔
693
    (uint32_t)1U,
3,038✔
694
    uint64_t n1 = next3[i];
3,038✔
695
    uint64_t n2 = n1 & (uint64_t)0xf000f000f000f000U;
3,038✔
696
    uint64_t n3 = n2 ^ n2 >> (uint32_t)4U;
3,038✔
697
    uint64_t n4 = n3 ^ n3 >> (uint32_t)8U;
3,038✔
698
    next3[i] = n4;);
3,038✔
699
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next3, prev3);
3,038✔
700
  uint64_t *prev4 = kex + klen * (uint32_t)4U;
3,038✔
701
  uint64_t *next4 = kex + klen * (uint32_t)5U;
3,038✔
702
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next4, prev4, (uint8_t)0x10U);
3,038✔
703
  KRML_MAYBE_FOR8(i,
3,038✔
704
    (uint32_t)0U,
3,038✔
705
    (uint32_t)8U,
3,038✔
706
    (uint32_t)1U,
3,038✔
707
    uint64_t n1 = next4[i];
3,038✔
708
    uint64_t n2 = n1 & (uint64_t)0xf000f000f000f000U;
3,038✔
709
    uint64_t n3 = n2 ^ n2 >> (uint32_t)4U;
3,038✔
710
    uint64_t n4 = n3 ^ n3 >> (uint32_t)8U;
3,038✔
711
    next4[i] = n4;);
3,038✔
712
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next4, prev4);
3,038✔
713
  uint64_t *prev5 = kex + klen * (uint32_t)5U;
3,038✔
714
  uint64_t *next5 = kex + klen * (uint32_t)6U;
3,038✔
715
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next5, prev5, (uint8_t)0x20U);
3,038✔
716
  KRML_MAYBE_FOR8(i,
3,038✔
717
    (uint32_t)0U,
3,038✔
718
    (uint32_t)8U,
3,038✔
719
    (uint32_t)1U,
3,038✔
720
    uint64_t n1 = next5[i];
3,038✔
721
    uint64_t n2 = n1 & (uint64_t)0xf000f000f000f000U;
3,038✔
722
    uint64_t n3 = n2 ^ n2 >> (uint32_t)4U;
3,038✔
723
    uint64_t n4 = n3 ^ n3 >> (uint32_t)8U;
3,038✔
724
    next5[i] = n4;);
3,038✔
725
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next5, prev5);
3,038✔
726
  uint64_t *prev6 = kex + klen * (uint32_t)6U;
3,038✔
727
  uint64_t *next6 = kex + klen * (uint32_t)7U;
3,038✔
728
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next6, prev6, (uint8_t)0x40U);
3,038✔
729
  KRML_MAYBE_FOR8(i,
3,038✔
730
    (uint32_t)0U,
3,038✔
731
    (uint32_t)8U,
3,038✔
732
    (uint32_t)1U,
3,038✔
733
    uint64_t n1 = next6[i];
3,038✔
734
    uint64_t n2 = n1 & (uint64_t)0xf000f000f000f000U;
3,038✔
735
    uint64_t n3 = n2 ^ n2 >> (uint32_t)4U;
3,038✔
736
    uint64_t n4 = n3 ^ n3 >> (uint32_t)8U;
3,038✔
737
    next6[i] = n4;);
3,038✔
738
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next6, prev6);
3,038✔
739
  uint64_t *prev7 = kex + klen * (uint32_t)7U;
3,038✔
740
  uint64_t *next7 = kex + klen * (uint32_t)8U;
3,038✔
741
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next7, prev7, (uint8_t)0x80U);
3,038✔
742
  KRML_MAYBE_FOR8(i,
3,038✔
743
    (uint32_t)0U,
3,038✔
744
    (uint32_t)8U,
3,038✔
745
    (uint32_t)1U,
3,038✔
746
    uint64_t n1 = next7[i];
3,038✔
747
    uint64_t n2 = n1 & (uint64_t)0xf000f000f000f000U;
3,038✔
748
    uint64_t n3 = n2 ^ n2 >> (uint32_t)4U;
3,038✔
749
    uint64_t n4 = n3 ^ n3 >> (uint32_t)8U;
3,038✔
750
    next7[i] = n4;);
3,038✔
751
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next7, prev7);
3,038✔
752
  uint64_t *prev8 = kex + klen * (uint32_t)8U;
3,038✔
753
  uint64_t *next8 = kex + klen * (uint32_t)9U;
3,038✔
754
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next8, prev8, (uint8_t)0x1bU);
3,038✔
755
  KRML_MAYBE_FOR8(i,
3,038✔
756
    (uint32_t)0U,
3,038✔
757
    (uint32_t)8U,
3,038✔
758
    (uint32_t)1U,
3,038✔
759
    uint64_t n1 = next8[i];
3,038✔
760
    uint64_t n2 = n1 & (uint64_t)0xf000f000f000f000U;
3,038✔
761
    uint64_t n3 = n2 ^ n2 >> (uint32_t)4U;
3,038✔
762
    uint64_t n4 = n3 ^ n3 >> (uint32_t)8U;
3,038✔
763
    next8[i] = n4;);
3,038✔
764
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next8, prev8);
3,038✔
765
  uint64_t *prev9 = kex + klen * (uint32_t)9U;
3,038✔
766
  uint64_t *next9 = kex + klen * (uint32_t)10U;
3,038✔
767
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next9, prev9, (uint8_t)0x36U);
3,038✔
768
  KRML_MAYBE_FOR8(i,
3,038✔
769
    (uint32_t)0U,
3,038✔
770
    (uint32_t)8U,
3,038✔
771
    (uint32_t)1U,
3,038✔
772
    uint64_t n1 = next9[i];
3,038✔
773
    uint64_t n2 = n1 & (uint64_t)0xf000f000f000f000U;
3,038✔
774
    uint64_t n3 = n2 ^ n2 >> (uint32_t)4U;
3,038✔
775
    uint64_t n4 = n3 ^ n3 >> (uint32_t)8U;
3,038✔
776
    next9[i] = n4;);
3,038✔
777
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next9, prev9);
3,038✔
778
  Hacl_Impl_AES_CoreBitSlice_load_nonce(n, nonce);
3,038✔
779
}
3,038✔
780

781
void Hacl_AES_128_CTR32_BitSlice_aes128_set_nonce(uint64_t *ctx, uint8_t *nonce)
782
{
6,076✔
783
  uint64_t *n = ctx;
6,076✔
784
  Hacl_Impl_AES_CoreBitSlice_load_nonce(n, nonce);
6,076✔
785
}
6,076✔
786

787
void Hacl_AES_128_CTR32_BitSlice_aes128_key_block(uint8_t *kb, uint64_t *ctx, uint32_t counter)
788
{
9,114✔
789
  uint64_t *kex = ctx + (uint32_t)8U;
9,114✔
790
  uint64_t *n = ctx;
9,114✔
791
  uint64_t st[8U] = { 0U };
9,114✔
792
  Hacl_Impl_AES_CoreBitSlice_load_state(st, n, counter);
9,114✔
793
  uint32_t klen = (uint32_t)8U;
9,114✔
794
  uint64_t *k0 = kex;
9,114✔
795
  uint64_t *kr = kex + klen;
9,114✔
796
  uint64_t *kn = kex + (uint32_t)10U * klen;
9,114✔
797
  Hacl_Impl_AES_CoreBitSlice_xor_state_key1(st, k0);
9,114✔
798
  KRML_MAYBE_FOR9(i,
9,114✔
799
    (uint32_t)0U,
9,114✔
800
    (uint32_t)9U,
9,114✔
801
    (uint32_t)1U,
9,114✔
802
    uint64_t *sub_key = kr + i * (uint32_t)8U;
9,114✔
803
    Hacl_Impl_AES_CoreBitSlice_aes_enc(st, sub_key););
9,114✔
804
  Hacl_Impl_AES_CoreBitSlice_aes_enc_last(st, kn);
9,114✔
805
  Hacl_Impl_AES_CoreBitSlice_store_block0(kb, st);
9,114✔
806
}
9,114✔
807

808
inline void
809
Hacl_AES_128_CTR32_BitSlice_aes128_ctr_encrypt(
810
  uint32_t len,
811
  uint8_t *out,
812
  uint8_t *inp,
813
  uint8_t *k,
814
  uint8_t *n,
815
  uint32_t c
816
)
817
{
×
818
  uint64_t ctx[96U] = { 0U };
×
819
  uint64_t *kex = ctx + (uint32_t)8U;
×
820
  uint64_t *n1 = ctx;
×
821
  uint32_t klen = (uint32_t)8U;
×
822
  Hacl_Impl_AES_CoreBitSlice_load_key1(kex, k);
×
823
  uint64_t *prev = kex;
×
824
  uint64_t *next = kex + klen;
×
825
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next, prev, (uint8_t)0x01U);
×
826
  KRML_MAYBE_FOR8(i,
×
827
    (uint32_t)0U,
×
828
    (uint32_t)8U,
×
829
    (uint32_t)1U,
×
830
    uint64_t n2 = next[i];
×
831
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
832
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
833
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
834
    next[i] = n5;);
×
835
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next, prev);
×
836
  uint64_t *prev1 = kex + klen;
×
837
  uint64_t *next1 = kex + (uint32_t)2U * klen;
×
838
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next1, prev1, (uint8_t)0x02U);
×
839
  KRML_MAYBE_FOR8(i,
×
840
    (uint32_t)0U,
×
841
    (uint32_t)8U,
×
842
    (uint32_t)1U,
×
843
    uint64_t n2 = next1[i];
×
844
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
845
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
846
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
847
    next1[i] = n5;);
×
848
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next1, prev1);
×
849
  uint64_t *prev2 = kex + klen * (uint32_t)2U;
×
850
  uint64_t *next2 = kex + klen * (uint32_t)3U;
×
851
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next2, prev2, (uint8_t)0x04U);
×
852
  KRML_MAYBE_FOR8(i,
×
853
    (uint32_t)0U,
×
854
    (uint32_t)8U,
×
855
    (uint32_t)1U,
×
856
    uint64_t n2 = next2[i];
×
857
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
858
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
859
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
860
    next2[i] = n5;);
×
861
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next2, prev2);
×
862
  uint64_t *prev3 = kex + klen * (uint32_t)3U;
×
863
  uint64_t *next3 = kex + klen * (uint32_t)4U;
×
864
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next3, prev3, (uint8_t)0x08U);
×
865
  KRML_MAYBE_FOR8(i,
×
866
    (uint32_t)0U,
×
867
    (uint32_t)8U,
×
868
    (uint32_t)1U,
×
869
    uint64_t n2 = next3[i];
×
870
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
871
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
872
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
873
    next3[i] = n5;);
×
874
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next3, prev3);
×
875
  uint64_t *prev4 = kex + klen * (uint32_t)4U;
×
876
  uint64_t *next4 = kex + klen * (uint32_t)5U;
×
877
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next4, prev4, (uint8_t)0x10U);
×
878
  KRML_MAYBE_FOR8(i,
×
879
    (uint32_t)0U,
×
880
    (uint32_t)8U,
×
881
    (uint32_t)1U,
×
882
    uint64_t n2 = next4[i];
×
883
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
884
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
885
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
886
    next4[i] = n5;);
×
887
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next4, prev4);
×
888
  uint64_t *prev5 = kex + klen * (uint32_t)5U;
×
889
  uint64_t *next5 = kex + klen * (uint32_t)6U;
×
890
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next5, prev5, (uint8_t)0x20U);
×
891
  KRML_MAYBE_FOR8(i,
×
892
    (uint32_t)0U,
×
893
    (uint32_t)8U,
×
894
    (uint32_t)1U,
×
895
    uint64_t n2 = next5[i];
×
896
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
897
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
898
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
899
    next5[i] = n5;);
×
900
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next5, prev5);
×
901
  uint64_t *prev6 = kex + klen * (uint32_t)6U;
×
902
  uint64_t *next6 = kex + klen * (uint32_t)7U;
×
903
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next6, prev6, (uint8_t)0x40U);
×
904
  KRML_MAYBE_FOR8(i,
×
905
    (uint32_t)0U,
×
906
    (uint32_t)8U,
×
907
    (uint32_t)1U,
×
908
    uint64_t n2 = next6[i];
×
909
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
910
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
911
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
912
    next6[i] = n5;);
×
913
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next6, prev6);
×
914
  uint64_t *prev7 = kex + klen * (uint32_t)7U;
×
915
  uint64_t *next7 = kex + klen * (uint32_t)8U;
×
916
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next7, prev7, (uint8_t)0x80U);
×
917
  KRML_MAYBE_FOR8(i,
×
918
    (uint32_t)0U,
×
919
    (uint32_t)8U,
×
920
    (uint32_t)1U,
×
921
    uint64_t n2 = next7[i];
×
922
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
923
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
924
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
925
    next7[i] = n5;);
×
926
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next7, prev7);
×
927
  uint64_t *prev8 = kex + klen * (uint32_t)8U;
×
928
  uint64_t *next8 = kex + klen * (uint32_t)9U;
×
929
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next8, prev8, (uint8_t)0x1bU);
×
930
  KRML_MAYBE_FOR8(i,
×
931
    (uint32_t)0U,
×
932
    (uint32_t)8U,
×
933
    (uint32_t)1U,
×
934
    uint64_t n2 = next8[i];
×
935
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
936
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
937
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
938
    next8[i] = n5;);
×
939
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next8, prev8);
×
940
  uint64_t *prev9 = kex + klen * (uint32_t)9U;
×
941
  uint64_t *next9 = kex + klen * (uint32_t)10U;
×
942
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next9, prev9, (uint8_t)0x36U);
×
943
  KRML_MAYBE_FOR8(i,
×
944
    (uint32_t)0U,
×
945
    (uint32_t)8U,
×
946
    (uint32_t)1U,
×
947
    uint64_t n2 = next9[i];
×
948
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
949
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
950
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
951
    next9[i] = n5;);
×
952
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next9, prev9);
×
953
  Hacl_Impl_AES_CoreBitSlice_load_nonce(n1, n);
×
954
  Hacl_Impl_AES_Generic_aes128_ctr_bitslice(len, out, inp, ctx, c);
×
955
}
×
956

957
inline void
958
Hacl_AES_128_CTR32_BitSlice_aes128_ctr_decrypt(
959
  uint32_t len,
960
  uint8_t *out,
961
  uint8_t *inp,
962
  uint8_t *k,
963
  uint8_t *n,
964
  uint32_t c
965
)
966
{
×
967
  uint64_t ctx[96U] = { 0U };
×
968
  uint64_t *kex = ctx + (uint32_t)8U;
×
969
  uint64_t *n1 = ctx;
×
970
  uint32_t klen = (uint32_t)8U;
×
971
  Hacl_Impl_AES_CoreBitSlice_load_key1(kex, k);
×
972
  uint64_t *prev = kex;
×
973
  uint64_t *next = kex + klen;
×
974
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next, prev, (uint8_t)0x01U);
×
975
  KRML_MAYBE_FOR8(i,
×
976
    (uint32_t)0U,
×
977
    (uint32_t)8U,
×
978
    (uint32_t)1U,
×
979
    uint64_t n2 = next[i];
×
980
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
981
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
982
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
983
    next[i] = n5;);
×
984
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next, prev);
×
985
  uint64_t *prev1 = kex + klen;
×
986
  uint64_t *next1 = kex + (uint32_t)2U * klen;
×
987
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next1, prev1, (uint8_t)0x02U);
×
988
  KRML_MAYBE_FOR8(i,
×
989
    (uint32_t)0U,
×
990
    (uint32_t)8U,
×
991
    (uint32_t)1U,
×
992
    uint64_t n2 = next1[i];
×
993
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
994
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
995
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
996
    next1[i] = n5;);
×
997
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next1, prev1);
×
998
  uint64_t *prev2 = kex + klen * (uint32_t)2U;
×
999
  uint64_t *next2 = kex + klen * (uint32_t)3U;
×
1000
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next2, prev2, (uint8_t)0x04U);
×
1001
  KRML_MAYBE_FOR8(i,
×
1002
    (uint32_t)0U,
×
1003
    (uint32_t)8U,
×
1004
    (uint32_t)1U,
×
1005
    uint64_t n2 = next2[i];
×
1006
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
1007
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
1008
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
1009
    next2[i] = n5;);
×
1010
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next2, prev2);
×
1011
  uint64_t *prev3 = kex + klen * (uint32_t)3U;
×
1012
  uint64_t *next3 = kex + klen * (uint32_t)4U;
×
1013
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next3, prev3, (uint8_t)0x08U);
×
1014
  KRML_MAYBE_FOR8(i,
×
1015
    (uint32_t)0U,
×
1016
    (uint32_t)8U,
×
1017
    (uint32_t)1U,
×
1018
    uint64_t n2 = next3[i];
×
1019
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
1020
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
1021
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
1022
    next3[i] = n5;);
×
1023
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next3, prev3);
×
1024
  uint64_t *prev4 = kex + klen * (uint32_t)4U;
×
1025
  uint64_t *next4 = kex + klen * (uint32_t)5U;
×
1026
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next4, prev4, (uint8_t)0x10U);
×
1027
  KRML_MAYBE_FOR8(i,
×
1028
    (uint32_t)0U,
×
1029
    (uint32_t)8U,
×
1030
    (uint32_t)1U,
×
1031
    uint64_t n2 = next4[i];
×
1032
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
1033
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
1034
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
1035
    next4[i] = n5;);
×
1036
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next4, prev4);
×
1037
  uint64_t *prev5 = kex + klen * (uint32_t)5U;
×
1038
  uint64_t *next5 = kex + klen * (uint32_t)6U;
×
1039
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next5, prev5, (uint8_t)0x20U);
×
1040
  KRML_MAYBE_FOR8(i,
×
1041
    (uint32_t)0U,
×
1042
    (uint32_t)8U,
×
1043
    (uint32_t)1U,
×
1044
    uint64_t n2 = next5[i];
×
1045
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
1046
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
1047
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
1048
    next5[i] = n5;);
×
1049
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next5, prev5);
×
1050
  uint64_t *prev6 = kex + klen * (uint32_t)6U;
×
1051
  uint64_t *next6 = kex + klen * (uint32_t)7U;
×
1052
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next6, prev6, (uint8_t)0x40U);
×
1053
  KRML_MAYBE_FOR8(i,
×
1054
    (uint32_t)0U,
×
1055
    (uint32_t)8U,
×
1056
    (uint32_t)1U,
×
1057
    uint64_t n2 = next6[i];
×
1058
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
1059
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
1060
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
1061
    next6[i] = n5;);
×
1062
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next6, prev6);
×
1063
  uint64_t *prev7 = kex + klen * (uint32_t)7U;
×
1064
  uint64_t *next7 = kex + klen * (uint32_t)8U;
×
1065
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next7, prev7, (uint8_t)0x80U);
×
1066
  KRML_MAYBE_FOR8(i,
×
1067
    (uint32_t)0U,
×
1068
    (uint32_t)8U,
×
1069
    (uint32_t)1U,
×
1070
    uint64_t n2 = next7[i];
×
1071
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
1072
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
1073
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
1074
    next7[i] = n5;);
×
1075
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next7, prev7);
×
1076
  uint64_t *prev8 = kex + klen * (uint32_t)8U;
×
1077
  uint64_t *next8 = kex + klen * (uint32_t)9U;
×
1078
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next8, prev8, (uint8_t)0x1bU);
×
1079
  KRML_MAYBE_FOR8(i,
×
1080
    (uint32_t)0U,
×
1081
    (uint32_t)8U,
×
1082
    (uint32_t)1U,
×
1083
    uint64_t n2 = next8[i];
×
1084
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
1085
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
1086
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
1087
    next8[i] = n5;);
×
1088
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next8, prev8);
×
1089
  uint64_t *prev9 = kex + klen * (uint32_t)9U;
×
1090
  uint64_t *next9 = kex + klen * (uint32_t)10U;
×
1091
  Hacl_Impl_AES_CoreBitSlice_aes_keygen_assist(next9, prev9, (uint8_t)0x36U);
×
1092
  KRML_MAYBE_FOR8(i,
×
1093
    (uint32_t)0U,
×
1094
    (uint32_t)8U,
×
1095
    (uint32_t)1U,
×
1096
    uint64_t n2 = next9[i];
×
1097
    uint64_t n3 = n2 & (uint64_t)0xf000f000f000f000U;
×
1098
    uint64_t n4 = n3 ^ n3 >> (uint32_t)4U;
×
1099
    uint64_t n5 = n4 ^ n4 >> (uint32_t)8U;
×
1100
    next9[i] = n5;);
×
1101
  Hacl_Impl_AES_CoreBitSlice_key_expansion_step(next9, prev9);
×
1102
  Hacl_Impl_AES_CoreBitSlice_load_nonce(n1, n);
×
1103
  Hacl_Impl_AES_Generic_aes128_ctr_bitslice(len, out, inp, ctx, c);
×
1104
}
×
1105

STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc