• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

cryspen / hacl-packages / 5808703668

pending completion
5808703668

Pull #418

github

web-flow
Merge 4abdd0203 into 1575f26e8
Pull Request #418: Add support for Hacl_AES_128_GCM_NI and Hacl_AES_128_GCM_M32

7433 of 7433 new or added lines in 12 files covered. (100.0%)

31975 of 62256 relevant lines covered (51.36%)

1238863.46 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

68.63
/src/Hacl_AES_128_GCM_CT64.c
1
/* MIT License
2
 *
3
 * Copyright (c) 2016-2022 INRIA, CMU and Microsoft Corporation
4
 * Copyright (c) 2022-2023 HACL* Contributors
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a copy
7
 * of this software and associated documentation files (the "Software"), to deal
8
 * in the Software without restriction, including without limitation the rights
9
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10
 * copies of the Software, and to permit persons to whom the Software is
11
 * furnished to do so, subject to the following conditions:
12
 *
13
 * The above copyright notice and this permission notice shall be included in all
14
 * copies or substantial portions of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19
 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22
 * SOFTWARE.
23
 */
24

25

26
#include "Hacl_AES_128_GCM_CT64.h"
27

28
#include "internal/Hacl_AES_128_CTR32_BitSlice.h"
29

30
uint32_t Hacl_AES_128_GCM_CT64_aes_gcm_ctx_len = (uint32_t)116U;
31

32
void Hacl_AES_128_GCM_CT64_aes128_gcm_init(uint64_t *ctx, uint8_t *key)
33
{
3,038✔
34
  uint8_t gcm_key[16U] = { 0U };
3,038✔
35
  uint8_t nonce0[12U] = { 0U };
3,038✔
36
  uint64_t *aes_ctx = ctx;
3,038✔
37
  uint64_t *gcm_ctx = ctx + (uint32_t)96U;
3,038✔
38
  Hacl_AES_128_CTR32_BitSlice_aes128_init(aes_ctx, key, nonce0);
3,038✔
39
  Hacl_AES_128_CTR32_BitSlice_aes128_key_block(gcm_key, aes_ctx, (uint32_t)0U);
3,038✔
40
  Hacl_Gf128_CT64_gcm_init(gcm_ctx, gcm_key);
3,038✔
41
}
3,038✔
42

43
void
44
Hacl_AES_128_GCM_CT64_aes128_gcm_encrypt(
45
  uint64_t *ctx,
46
  uint32_t len,
47
  uint8_t *out,
48
  uint8_t *text,
49
  uint32_t aad_len,
50
  uint8_t *aad,
51
  uint32_t iv_len,
52
  uint8_t *iv
53
)
54
{
3,038✔
55
  uint8_t tmp[16U] = { 0U };
3,038✔
56
  uint8_t *cip = out;
3,038✔
57
  uint64_t *aes_ctx = ctx;
3,038✔
58
  uint64_t *gcm_ctx = ctx + (uint32_t)96U;
3,038✔
59
  uint64_t *tag_mix = ctx + (uint32_t)114U;
3,038✔
60
  uint32_t ctr;
3,038✔
61
  uint8_t tag_mix10[16U] = { 0U };
3,038✔
62
  uint8_t gcm_key[16U] = { 0U };
3,038✔
63
  uint8_t tag_iv[16U] = { 0U };
3,038✔
64
  uint8_t size_iv[16U] = { 0U };
3,038✔
65
  uint8_t tag_mix1[16U] = { 0U };
3,038✔
66
  if (iv_len == (uint32_t)12U)
3,038✔
67
  {
3,038✔
68
    uint64_t *aes_ctx1 = ctx;
3,038✔
69
    Hacl_AES_128_CTR32_BitSlice_aes128_set_nonce(aes_ctx1, iv);
3,038✔
70
    Hacl_AES_128_CTR32_BitSlice_aes128_key_block(tag_mix10, aes_ctx1, (uint32_t)1U);
3,038✔
71
    uint64_t u = load64_le(tag_mix10);
3,038✔
72
    ctx[114U] = u;
3,038✔
73
    uint64_t u0 = load64_le(tag_mix10 + (uint32_t)8U);
3,038✔
74
    ctx[115U] = u0;
3,038✔
75
    ctr = (uint32_t)2U;
3,038✔
76
  }
3,038✔
77
  else
×
78
  {
×
79
    uint64_t *aes_ctx1 = ctx;
×
80
    uint64_t *gcm_ctx1 = ctx + (uint32_t)96U;
×
81
    store64_be(gcm_key + (uint32_t)8U, gcm_ctx1[8U]);
×
82
    store64_be(gcm_key, gcm_ctx1[9U]);
×
83
    Hacl_Gf128_CT64_ghash(tag_iv, iv_len, iv, gcm_key);
×
84
    store64_be(size_iv + (uint32_t)8U, (uint64_t)(iv_len * (uint32_t)8U));
×
85
    KRML_MAYBE_FOR16(i,
×
86
      (uint32_t)0U,
×
87
      (uint32_t)16U,
×
88
      (uint32_t)1U,
×
89
      size_iv[i] = tag_iv[i] ^ size_iv[i];);
×
90
    Hacl_Gf128_CT64_ghash(tag_iv, (uint32_t)16U, size_iv, gcm_key);
×
91
    Hacl_AES_128_CTR32_BitSlice_aes128_set_nonce(aes_ctx1, tag_iv);
×
92
    uint32_t u0 = load32_be(tag_iv + (uint32_t)12U);
×
93
    uint32_t ctr0 = u0;
×
94
    Hacl_AES_128_CTR32_BitSlice_aes128_key_block(tag_mix1, aes_ctx1, ctr0);
×
95
    uint64_t u = load64_le(tag_mix1);
×
96
    ctx[114U] = u;
×
97
    uint64_t u1 = load64_le(tag_mix1 + (uint32_t)8U);
×
98
    ctx[115U] = u1;
×
99
    ctr = ctr0 + (uint32_t)1U;
×
100
  }
×
101
  Hacl_Impl_AES_Generic_aes128_ctr_bitslice(len, cip, text, aes_ctx, ctr);
3,038✔
102
  gcm_ctx[0U] = (uint64_t)0U;
3,038✔
103
  gcm_ctx[1U] = (uint64_t)0U;
3,038✔
104
  Hacl_Gf128_CT64_gcm_update_blocks_padded(gcm_ctx, aad_len, aad);
3,038✔
105
  Hacl_Gf128_CT64_gcm_update_blocks_padded(gcm_ctx, len, cip);
3,038✔
106
  store64_be(tmp, (uint64_t)(aad_len * (uint32_t)8U));
3,038✔
107
  store64_be(tmp + (uint32_t)8U, (uint64_t)(len * (uint32_t)8U));
3,038✔
108
  Hacl_Gf128_CT64_gcm_update_blocks(gcm_ctx, (uint32_t)16U, tmp);
3,038✔
109
  Hacl_Gf128_CT64_gcm_emit(tmp, gcm_ctx);
3,038✔
110
  uint64_t u0 = load64_le(tmp);
3,038✔
111
  uint64_t tmp0 = u0;
3,038✔
112
  uint64_t u = load64_le(tmp + (uint32_t)8U);
3,038✔
113
  uint64_t tmp1 = u;
3,038✔
114
  uint64_t tmp01 = tmp0 ^ tag_mix[0U];
3,038✔
115
  uint64_t tmp11 = tmp1 ^ tag_mix[1U];
3,038✔
116
  store64_le(out + len, tmp01);
3,038✔
117
  store64_le(out + len + (uint32_t)8U, tmp11);
3,038✔
118
}
3,038✔
119

120
bool
121
Hacl_AES_128_GCM_CT64_aes128_gcm_decrypt(
122
  uint64_t *ctx,
123
  uint32_t len,
124
  uint8_t *out,
125
  uint8_t *cipher,
126
  uint32_t aad_len,
127
  uint8_t *aad,
128
  uint32_t iv_len,
129
  uint8_t *iv
130
)
131
{
3,038✔
132
  uint8_t scratch[18U] = { 0U };
3,038✔
133
  uint8_t *text = scratch;
3,038✔
134
  uint8_t *result = scratch + (uint32_t)17U;
3,038✔
135
  uint8_t *ciphertext = cipher;
3,038✔
136
  uint8_t *tag = cipher + len;
3,038✔
137
  uint32_t ctr;
3,038✔
138
  uint8_t tag_mix0[16U] = { 0U };
3,038✔
139
  uint8_t gcm_key[16U] = { 0U };
3,038✔
140
  uint8_t tag_iv[16U] = { 0U };
3,038✔
141
  uint8_t size_iv[16U] = { 0U };
3,038✔
142
  uint8_t tag_mix1[16U] = { 0U };
3,038✔
143
  if (iv_len == (uint32_t)12U)
3,038✔
144
  {
3,038✔
145
    uint64_t *aes_ctx = ctx;
3,038✔
146
    Hacl_AES_128_CTR32_BitSlice_aes128_set_nonce(aes_ctx, iv);
3,038✔
147
    Hacl_AES_128_CTR32_BitSlice_aes128_key_block(tag_mix0, aes_ctx, (uint32_t)1U);
3,038✔
148
    uint64_t u = load64_le(tag_mix0);
3,038✔
149
    ctx[114U] = u;
3,038✔
150
    uint64_t u0 = load64_le(tag_mix0 + (uint32_t)8U);
3,038✔
151
    ctx[115U] = u0;
3,038✔
152
    ctr = (uint32_t)2U;
3,038✔
153
  }
3,038✔
154
  else
×
155
  {
×
156
    uint64_t *aes_ctx = ctx;
×
157
    uint64_t *gcm_ctx = ctx + (uint32_t)96U;
×
158
    store64_be(gcm_key + (uint32_t)8U, gcm_ctx[8U]);
×
159
    store64_be(gcm_key, gcm_ctx[9U]);
×
160
    Hacl_Gf128_CT64_ghash(tag_iv, iv_len, iv, gcm_key);
×
161
    store64_be(size_iv + (uint32_t)8U, (uint64_t)(iv_len * (uint32_t)8U));
×
162
    KRML_MAYBE_FOR16(i,
×
163
      (uint32_t)0U,
×
164
      (uint32_t)16U,
×
165
      (uint32_t)1U,
×
166
      size_iv[i] = tag_iv[i] ^ size_iv[i];);
×
167
    Hacl_Gf128_CT64_ghash(tag_iv, (uint32_t)16U, size_iv, gcm_key);
×
168
    Hacl_AES_128_CTR32_BitSlice_aes128_set_nonce(aes_ctx, tag_iv);
×
169
    uint32_t u0 = load32_be(tag_iv + (uint32_t)12U);
×
170
    uint32_t ctr0 = u0;
×
171
    Hacl_AES_128_CTR32_BitSlice_aes128_key_block(tag_mix1, aes_ctx, ctr0);
×
172
    uint64_t u = load64_le(tag_mix1);
×
173
    ctx[114U] = u;
×
174
    uint64_t u1 = load64_le(tag_mix1 + (uint32_t)8U);
×
175
    ctx[115U] = u1;
×
176
    ctr = ctr0 + (uint32_t)1U;
×
177
  }
×
178
  uint64_t *aes_ctx = ctx;
3,038✔
179
  uint64_t *gcm_ctx = ctx + (uint32_t)96U;
3,038✔
180
  uint64_t *tag_mix = ctx + (uint32_t)114U;
3,038✔
181
  gcm_ctx[0U] = (uint64_t)0U;
3,038✔
182
  gcm_ctx[1U] = (uint64_t)0U;
3,038✔
183
  Hacl_Gf128_CT64_gcm_update_blocks_padded(gcm_ctx, aad_len, aad);
3,038✔
184
  Hacl_Gf128_CT64_gcm_update_blocks_padded(gcm_ctx, len, ciphertext);
3,038✔
185
  store64_be(text, (uint64_t)(aad_len * (uint32_t)8U));
3,038✔
186
  store64_be(text + (uint32_t)8U, (uint64_t)(len * (uint32_t)8U));
3,038✔
187
  Hacl_Gf128_CT64_gcm_update_blocks(gcm_ctx, (uint32_t)16U, text);
3,038✔
188
  Hacl_Gf128_CT64_gcm_emit(text, gcm_ctx);
3,038✔
189
  uint64_t u0 = load64_le(text);
3,038✔
190
  uint64_t text0 = u0;
3,038✔
191
  uint64_t u = load64_le(text + (uint32_t)8U);
3,038✔
192
  uint64_t text1 = u;
3,038✔
193
  uint64_t text01 = text0 ^ tag_mix[0U];
3,038✔
194
  uint64_t text11 = text1 ^ tag_mix[1U];
3,038✔
195
  store64_le(text, text01);
3,038✔
196
  store64_le(text + (uint32_t)8U, text11);
3,038✔
197
  KRML_MAYBE_FOR16(i,
3,038✔
198
    (uint32_t)0U,
3,038✔
199
    (uint32_t)16U,
3,038✔
200
    (uint32_t)1U,
3,038✔
201
    result[0U] = result[0U] | (text[i] ^ tag[i]););
3,038✔
202
  uint8_t res8 = result[0U];
3,038✔
203
  if (res8 == (uint8_t)0U)
3,038✔
204
  {
1,364✔
205
    Hacl_Impl_AES_Generic_aes128_ctr_bitslice(len, out, ciphertext, aes_ctx, ctr);
1,364✔
206
    return true;
1,364✔
207
  }
1,364✔
208
  return false;
1,674✔
209
}
1,674✔
210

STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc