tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

Hacl_Bignum_Base.h (18728B)


      1 /* MIT License
      2 *
      3 * Copyright (c) 2016-2022 INRIA, CMU and Microsoft Corporation
      4 * Copyright (c) 2022-2023 HACL* Contributors
      5 *
      6 * Permission is hereby granted, free of charge, to any person obtaining a copy
      7 * of this software and associated documentation files (the "Software"), to deal
      8 * in the Software without restriction, including without limitation the rights
      9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
     10 * copies of the Software, and to permit persons to whom the Software is
     11 * furnished to do so, subject to the following conditions:
     12 *
     13 * The above copyright notice and this permission notice shall be included in all
     14 * copies or substantial portions of the Software.
     15 *
     16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
     17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
     18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
     19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
     20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
     21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
     22 * SOFTWARE.
     23 */
     24 
     25 #ifndef __internal_Hacl_Bignum_Base_H
     26 #define __internal_Hacl_Bignum_Base_H
     27 
     28 #if defined(__cplusplus)
     29 extern "C" {
     30 #endif
     31 
     32 #include <string.h>
     33 #include "krml/internal/types.h"
     34 #include "krml/internal/builtin.h"
     35 #include "krml/lowstar_endianness.h"
     36 #include "krml/internal/target.h"
     37 
     38 #include "internal/Hacl_Krmllib.h"
     39 #include "Hacl_Krmllib.h"
     40 #include "lib_intrinsics.h"
     41 
     42 static inline uint32_t
     43 Hacl_Bignum_Base_mul_wide_add2_u32(uint32_t a, uint32_t b, uint32_t c_in, uint32_t *out)
     44 {
     45    uint32_t out0 = out[0U];
     46    uint64_t res = (uint64_t)a * (uint64_t)b + (uint64_t)c_in + (uint64_t)out0;
     47    out[0U] = (uint32_t)res;
     48    return (uint32_t)(res >> (uint32_t)32U);
     49 }
     50 
     51 static inline uint64_t
     52 Hacl_Bignum_Base_mul_wide_add2_u64(uint64_t a, uint64_t b, uint64_t c_in, uint64_t *out)
     53 {
     54    uint64_t out0 = out[0U];
     55    FStar_UInt128_uint128
     56        res =
     57            FStar_UInt128_add(FStar_UInt128_add(FStar_UInt128_mul_wide(a, b),
     58                                                FStar_UInt128_uint64_to_uint128(c_in)),
     59                              FStar_UInt128_uint64_to_uint128(out0));
     60    out[0U] = FStar_UInt128_uint128_to_uint64(res);
     61    return FStar_UInt128_uint128_to_uint64(FStar_UInt128_shift_right(res, (uint32_t)64U));
     62 }
     63 
     64 static inline void
     65 Hacl_Bignum_Convert_bn_from_bytes_be_uint64(uint32_t len, uint8_t *b, uint64_t *res)
     66 {
     67    uint32_t bnLen = (len - (uint32_t)1U) / (uint32_t)8U + (uint32_t)1U;
     68    uint32_t tmpLen = (uint32_t)8U * bnLen;
     69    KRML_CHECK_SIZE(sizeof(uint8_t), tmpLen);
     70    uint8_t *tmp = (uint8_t *)alloca(tmpLen * sizeof(uint8_t));
     71    memset(tmp, 0U, tmpLen * sizeof(uint8_t));
     72    memcpy(tmp + tmpLen - len, b, len * sizeof(uint8_t));
     73    for (uint32_t i = (uint32_t)0U; i < bnLen; i++) {
     74        uint64_t *os = res;
     75        uint64_t u = load64_be(tmp + (bnLen - i - (uint32_t)1U) * (uint32_t)8U);
     76        uint64_t x = u;
     77        os[i] = x;
     78    }
     79 }
     80 
     81 static inline void
     82 Hacl_Bignum_Convert_bn_to_bytes_be_uint64(uint32_t len, uint64_t *b, uint8_t *res)
     83 {
     84    uint32_t bnLen = (len - (uint32_t)1U) / (uint32_t)8U + (uint32_t)1U;
     85    uint32_t tmpLen = (uint32_t)8U * bnLen;
     86    KRML_CHECK_SIZE(sizeof(uint8_t), tmpLen);
     87    uint8_t *tmp = (uint8_t *)alloca(tmpLen * sizeof(uint8_t));
     88    memset(tmp, 0U, tmpLen * sizeof(uint8_t));
     89    for (uint32_t i = (uint32_t)0U; i < bnLen; i++) {
     90        store64_be(tmp + i * (uint32_t)8U, b[bnLen - i - (uint32_t)1U]);
     91    }
     92    memcpy(res, tmp + tmpLen - len, len * sizeof(uint8_t));
     93 }
     94 
     95 static inline uint32_t
     96 Hacl_Bignum_Lib_bn_get_top_index_u32(uint32_t len, uint32_t *b)
     97 {
     98    uint32_t priv = (uint32_t)0U;
     99    for (uint32_t i = (uint32_t)0U; i < len; i++) {
    100        uint32_t mask = FStar_UInt32_eq_mask(b[i], (uint32_t)0U);
    101        priv = (mask & priv) | (~mask & i);
    102    }
    103    return priv;
    104 }
    105 
    106 static inline uint64_t
    107 Hacl_Bignum_Lib_bn_get_top_index_u64(uint32_t len, uint64_t *b)
    108 {
    109    uint64_t priv = (uint64_t)0U;
    110    for (uint32_t i = (uint32_t)0U; i < len; i++) {
    111        uint64_t mask = FStar_UInt64_eq_mask(b[i], (uint64_t)0U);
    112        priv = (mask & priv) | (~mask & (uint64_t)i);
    113    }
    114    return priv;
    115 }
    116 
    117 static inline uint32_t
    118 Hacl_Bignum_Lib_bn_get_bits_u32(uint32_t len, uint32_t *b, uint32_t i, uint32_t l)
    119 {
    120    uint32_t i1 = i / (uint32_t)32U;
    121    uint32_t j = i % (uint32_t)32U;
    122    uint32_t p1 = b[i1] >> j;
    123    uint32_t ite;
    124    if (i1 + (uint32_t)1U < len && (uint32_t)0U < j) {
    125        ite = p1 | b[i1 + (uint32_t)1U] << ((uint32_t)32U - j);
    126    } else {
    127        ite = p1;
    128    }
    129    return ite & (((uint32_t)1U << l) - (uint32_t)1U);
    130 }
    131 
    132 static inline uint64_t
    133 Hacl_Bignum_Lib_bn_get_bits_u64(uint32_t len, uint64_t *b, uint32_t i, uint32_t l)
    134 {
    135    uint32_t i1 = i / (uint32_t)64U;
    136    uint32_t j = i % (uint32_t)64U;
    137    uint64_t p1 = b[i1] >> j;
    138    uint64_t ite;
    139    if (i1 + (uint32_t)1U < len && (uint32_t)0U < j) {
    140        ite = p1 | b[i1 + (uint32_t)1U] << ((uint32_t)64U - j);
    141    } else {
    142        ite = p1;
    143    }
    144    return ite & (((uint64_t)1U << l) - (uint64_t)1U);
    145 }
    146 
    147 static inline uint32_t
    148 Hacl_Bignum_Addition_bn_sub_eq_len_u32(uint32_t aLen, uint32_t *a, uint32_t *b, uint32_t *res)
    149 {
    150    uint32_t c = (uint32_t)0U;
    151    for (uint32_t i = (uint32_t)0U; i < aLen / (uint32_t)4U; i++) {
    152        uint32_t t1 = a[(uint32_t)4U * i];
    153        uint32_t t20 = b[(uint32_t)4U * i];
    154        uint32_t *res_i0 = res + (uint32_t)4U * i;
    155        c = Lib_IntTypes_Intrinsics_sub_borrow_u32(c, t1, t20, res_i0);
    156        uint32_t t10 = a[(uint32_t)4U * i + (uint32_t)1U];
    157        uint32_t t21 = b[(uint32_t)4U * i + (uint32_t)1U];
    158        uint32_t *res_i1 = res + (uint32_t)4U * i + (uint32_t)1U;
    159        c = Lib_IntTypes_Intrinsics_sub_borrow_u32(c, t10, t21, res_i1);
    160        uint32_t t11 = a[(uint32_t)4U * i + (uint32_t)2U];
    161        uint32_t t22 = b[(uint32_t)4U * i + (uint32_t)2U];
    162        uint32_t *res_i2 = res + (uint32_t)4U * i + (uint32_t)2U;
    163        c = Lib_IntTypes_Intrinsics_sub_borrow_u32(c, t11, t22, res_i2);
    164        uint32_t t12 = a[(uint32_t)4U * i + (uint32_t)3U];
    165        uint32_t t2 = b[(uint32_t)4U * i + (uint32_t)3U];
    166        uint32_t *res_i = res + (uint32_t)4U * i + (uint32_t)3U;
    167        c = Lib_IntTypes_Intrinsics_sub_borrow_u32(c, t12, t2, res_i);
    168    }
    169    for (uint32_t i = aLen / (uint32_t)4U * (uint32_t)4U; i < aLen; i++) {
    170        uint32_t t1 = a[i];
    171        uint32_t t2 = b[i];
    172        uint32_t *res_i = res + i;
    173        c = Lib_IntTypes_Intrinsics_sub_borrow_u32(c, t1, t2, res_i);
    174    }
    175    return c;
    176 }
    177 
    178 static inline uint64_t
    179 Hacl_Bignum_Addition_bn_sub_eq_len_u64(uint32_t aLen, uint64_t *a, uint64_t *b, uint64_t *res)
    180 {
    181    uint64_t c = (uint64_t)0U;
    182    for (uint32_t i = (uint32_t)0U; i < aLen / (uint32_t)4U; i++) {
    183        uint64_t t1 = a[(uint32_t)4U * i];
    184        uint64_t t20 = b[(uint32_t)4U * i];
    185        uint64_t *res_i0 = res + (uint32_t)4U * i;
    186        c = Lib_IntTypes_Intrinsics_sub_borrow_u64(c, t1, t20, res_i0);
    187        uint64_t t10 = a[(uint32_t)4U * i + (uint32_t)1U];
    188        uint64_t t21 = b[(uint32_t)4U * i + (uint32_t)1U];
    189        uint64_t *res_i1 = res + (uint32_t)4U * i + (uint32_t)1U;
    190        c = Lib_IntTypes_Intrinsics_sub_borrow_u64(c, t10, t21, res_i1);
    191        uint64_t t11 = a[(uint32_t)4U * i + (uint32_t)2U];
    192        uint64_t t22 = b[(uint32_t)4U * i + (uint32_t)2U];
    193        uint64_t *res_i2 = res + (uint32_t)4U * i + (uint32_t)2U;
    194        c = Lib_IntTypes_Intrinsics_sub_borrow_u64(c, t11, t22, res_i2);
    195        uint64_t t12 = a[(uint32_t)4U * i + (uint32_t)3U];
    196        uint64_t t2 = b[(uint32_t)4U * i + (uint32_t)3U];
    197        uint64_t *res_i = res + (uint32_t)4U * i + (uint32_t)3U;
    198        c = Lib_IntTypes_Intrinsics_sub_borrow_u64(c, t12, t2, res_i);
    199    }
    200    for (uint32_t i = aLen / (uint32_t)4U * (uint32_t)4U; i < aLen; i++) {
    201        uint64_t t1 = a[i];
    202        uint64_t t2 = b[i];
    203        uint64_t *res_i = res + i;
    204        c = Lib_IntTypes_Intrinsics_sub_borrow_u64(c, t1, t2, res_i);
    205    }
    206    return c;
    207 }
    208 
    209 static inline uint32_t
    210 Hacl_Bignum_Addition_bn_add_eq_len_u32(uint32_t aLen, uint32_t *a, uint32_t *b, uint32_t *res)
    211 {
    212    uint32_t c = (uint32_t)0U;
    213    for (uint32_t i = (uint32_t)0U; i < aLen / (uint32_t)4U; i++) {
    214        uint32_t t1 = a[(uint32_t)4U * i];
    215        uint32_t t20 = b[(uint32_t)4U * i];
    216        uint32_t *res_i0 = res + (uint32_t)4U * i;
    217        c = Lib_IntTypes_Intrinsics_add_carry_u32(c, t1, t20, res_i0);
    218        uint32_t t10 = a[(uint32_t)4U * i + (uint32_t)1U];
    219        uint32_t t21 = b[(uint32_t)4U * i + (uint32_t)1U];
    220        uint32_t *res_i1 = res + (uint32_t)4U * i + (uint32_t)1U;
    221        c = Lib_IntTypes_Intrinsics_add_carry_u32(c, t10, t21, res_i1);
    222        uint32_t t11 = a[(uint32_t)4U * i + (uint32_t)2U];
    223        uint32_t t22 = b[(uint32_t)4U * i + (uint32_t)2U];
    224        uint32_t *res_i2 = res + (uint32_t)4U * i + (uint32_t)2U;
    225        c = Lib_IntTypes_Intrinsics_add_carry_u32(c, t11, t22, res_i2);
    226        uint32_t t12 = a[(uint32_t)4U * i + (uint32_t)3U];
    227        uint32_t t2 = b[(uint32_t)4U * i + (uint32_t)3U];
    228        uint32_t *res_i = res + (uint32_t)4U * i + (uint32_t)3U;
    229        c = Lib_IntTypes_Intrinsics_add_carry_u32(c, t12, t2, res_i);
    230    }
    231    for (uint32_t i = aLen / (uint32_t)4U * (uint32_t)4U; i < aLen; i++) {
    232        uint32_t t1 = a[i];
    233        uint32_t t2 = b[i];
    234        uint32_t *res_i = res + i;
    235        c = Lib_IntTypes_Intrinsics_add_carry_u32(c, t1, t2, res_i);
    236    }
    237    return c;
    238 }
    239 
    240 static inline uint64_t
    241 Hacl_Bignum_Addition_bn_add_eq_len_u64(uint32_t aLen, uint64_t *a, uint64_t *b, uint64_t *res)
    242 {
    243    uint64_t c = (uint64_t)0U;
    244    for (uint32_t i = (uint32_t)0U; i < aLen / (uint32_t)4U; i++) {
    245        uint64_t t1 = a[(uint32_t)4U * i];
    246        uint64_t t20 = b[(uint32_t)4U * i];
    247        uint64_t *res_i0 = res + (uint32_t)4U * i;
    248        c = Lib_IntTypes_Intrinsics_add_carry_u64(c, t1, t20, res_i0);
    249        uint64_t t10 = a[(uint32_t)4U * i + (uint32_t)1U];
    250        uint64_t t21 = b[(uint32_t)4U * i + (uint32_t)1U];
    251        uint64_t *res_i1 = res + (uint32_t)4U * i + (uint32_t)1U;
    252        c = Lib_IntTypes_Intrinsics_add_carry_u64(c, t10, t21, res_i1);
    253        uint64_t t11 = a[(uint32_t)4U * i + (uint32_t)2U];
    254        uint64_t t22 = b[(uint32_t)4U * i + (uint32_t)2U];
    255        uint64_t *res_i2 = res + (uint32_t)4U * i + (uint32_t)2U;
    256        c = Lib_IntTypes_Intrinsics_add_carry_u64(c, t11, t22, res_i2);
    257        uint64_t t12 = a[(uint32_t)4U * i + (uint32_t)3U];
    258        uint64_t t2 = b[(uint32_t)4U * i + (uint32_t)3U];
    259        uint64_t *res_i = res + (uint32_t)4U * i + (uint32_t)3U;
    260        c = Lib_IntTypes_Intrinsics_add_carry_u64(c, t12, t2, res_i);
    261    }
    262    for (uint32_t i = aLen / (uint32_t)4U * (uint32_t)4U; i < aLen; i++) {
    263        uint64_t t1 = a[i];
    264        uint64_t t2 = b[i];
    265        uint64_t *res_i = res + i;
    266        c = Lib_IntTypes_Intrinsics_add_carry_u64(c, t1, t2, res_i);
    267    }
    268    return c;
    269 }
    270 
    271 static inline void
    272 Hacl_Bignum_Multiplication_bn_mul_u32(
    273    uint32_t aLen,
    274    uint32_t *a,
    275    uint32_t bLen,
    276    uint32_t *b,
    277    uint32_t *res)
    278 {
    279    memset(res, 0U, (aLen + bLen) * sizeof(uint32_t));
    280    for (uint32_t i0 = (uint32_t)0U; i0 < bLen; i0++) {
    281        uint32_t bj = b[i0];
    282        uint32_t *res_j = res + i0;
    283        uint32_t c = (uint32_t)0U;
    284        for (uint32_t i = (uint32_t)0U; i < aLen / (uint32_t)4U; i++) {
    285            uint32_t a_i = a[(uint32_t)4U * i];
    286            uint32_t *res_i0 = res_j + (uint32_t)4U * i;
    287            c = Hacl_Bignum_Base_mul_wide_add2_u32(a_i, bj, c, res_i0);
    288            uint32_t a_i0 = a[(uint32_t)4U * i + (uint32_t)1U];
    289            uint32_t *res_i1 = res_j + (uint32_t)4U * i + (uint32_t)1U;
    290            c = Hacl_Bignum_Base_mul_wide_add2_u32(a_i0, bj, c, res_i1);
    291            uint32_t a_i1 = a[(uint32_t)4U * i + (uint32_t)2U];
    292            uint32_t *res_i2 = res_j + (uint32_t)4U * i + (uint32_t)2U;
    293            c = Hacl_Bignum_Base_mul_wide_add2_u32(a_i1, bj, c, res_i2);
    294            uint32_t a_i2 = a[(uint32_t)4U * i + (uint32_t)3U];
    295            uint32_t *res_i = res_j + (uint32_t)4U * i + (uint32_t)3U;
    296            c = Hacl_Bignum_Base_mul_wide_add2_u32(a_i2, bj, c, res_i);
    297        }
    298        for (uint32_t i = aLen / (uint32_t)4U * (uint32_t)4U; i < aLen; i++) {
    299            uint32_t a_i = a[i];
    300            uint32_t *res_i = res_j + i;
    301            c = Hacl_Bignum_Base_mul_wide_add2_u32(a_i, bj, c, res_i);
    302        }
    303        uint32_t r = c;
    304        res[aLen + i0] = r;
    305    }
    306 }
    307 
    308 static inline void
    309 Hacl_Bignum_Multiplication_bn_mul_u64(
    310    uint32_t aLen,
    311    uint64_t *a,
    312    uint32_t bLen,
    313    uint64_t *b,
    314    uint64_t *res)
    315 {
    316    memset(res, 0U, (aLen + bLen) * sizeof(uint64_t));
    317    for (uint32_t i0 = (uint32_t)0U; i0 < bLen; i0++) {
    318        uint64_t bj = b[i0];
    319        uint64_t *res_j = res + i0;
    320        uint64_t c = (uint64_t)0U;
    321        for (uint32_t i = (uint32_t)0U; i < aLen / (uint32_t)4U; i++) {
    322            uint64_t a_i = a[(uint32_t)4U * i];
    323            uint64_t *res_i0 = res_j + (uint32_t)4U * i;
    324            c = Hacl_Bignum_Base_mul_wide_add2_u64(a_i, bj, c, res_i0);
    325            uint64_t a_i0 = a[(uint32_t)4U * i + (uint32_t)1U];
    326            uint64_t *res_i1 = res_j + (uint32_t)4U * i + (uint32_t)1U;
    327            c = Hacl_Bignum_Base_mul_wide_add2_u64(a_i0, bj, c, res_i1);
    328            uint64_t a_i1 = a[(uint32_t)4U * i + (uint32_t)2U];
    329            uint64_t *res_i2 = res_j + (uint32_t)4U * i + (uint32_t)2U;
    330            c = Hacl_Bignum_Base_mul_wide_add2_u64(a_i1, bj, c, res_i2);
    331            uint64_t a_i2 = a[(uint32_t)4U * i + (uint32_t)3U];
    332            uint64_t *res_i = res_j + (uint32_t)4U * i + (uint32_t)3U;
    333            c = Hacl_Bignum_Base_mul_wide_add2_u64(a_i2, bj, c, res_i);
    334        }
    335        for (uint32_t i = aLen / (uint32_t)4U * (uint32_t)4U; i < aLen; i++) {
    336            uint64_t a_i = a[i];
    337            uint64_t *res_i = res_j + i;
    338            c = Hacl_Bignum_Base_mul_wide_add2_u64(a_i, bj, c, res_i);
    339        }
    340        uint64_t r = c;
    341        res[aLen + i0] = r;
    342    }
    343 }
    344 
    345 static inline void
    346 Hacl_Bignum_Multiplication_bn_sqr_u32(uint32_t aLen, uint32_t *a, uint32_t *res)
    347 {
    348    memset(res, 0U, (aLen + aLen) * sizeof(uint32_t));
    349    for (uint32_t i0 = (uint32_t)0U; i0 < aLen; i0++) {
    350        uint32_t *ab = a;
    351        uint32_t a_j = a[i0];
    352        uint32_t *res_j = res + i0;
    353        uint32_t c = (uint32_t)0U;
    354        for (uint32_t i = (uint32_t)0U; i < i0 / (uint32_t)4U; i++) {
    355            uint32_t a_i = ab[(uint32_t)4U * i];
    356            uint32_t *res_i0 = res_j + (uint32_t)4U * i;
    357            c = Hacl_Bignum_Base_mul_wide_add2_u32(a_i, a_j, c, res_i0);
    358            uint32_t a_i0 = ab[(uint32_t)4U * i + (uint32_t)1U];
    359            uint32_t *res_i1 = res_j + (uint32_t)4U * i + (uint32_t)1U;
    360            c = Hacl_Bignum_Base_mul_wide_add2_u32(a_i0, a_j, c, res_i1);
    361            uint32_t a_i1 = ab[(uint32_t)4U * i + (uint32_t)2U];
    362            uint32_t *res_i2 = res_j + (uint32_t)4U * i + (uint32_t)2U;
    363            c = Hacl_Bignum_Base_mul_wide_add2_u32(a_i1, a_j, c, res_i2);
    364            uint32_t a_i2 = ab[(uint32_t)4U * i + (uint32_t)3U];
    365            uint32_t *res_i = res_j + (uint32_t)4U * i + (uint32_t)3U;
    366            c = Hacl_Bignum_Base_mul_wide_add2_u32(a_i2, a_j, c, res_i);
    367        }
    368        for (uint32_t i = i0 / (uint32_t)4U * (uint32_t)4U; i < i0; i++) {
    369            uint32_t a_i = ab[i];
    370            uint32_t *res_i = res_j + i;
    371            c = Hacl_Bignum_Base_mul_wide_add2_u32(a_i, a_j, c, res_i);
    372        }
    373        uint32_t r = c;
    374        res[i0 + i0] = r;
    375    }
    376    uint32_t c0 = Hacl_Bignum_Addition_bn_add_eq_len_u32(aLen + aLen, res, res, res);
    377    KRML_HOST_IGNORE(c0);
    378    KRML_CHECK_SIZE(sizeof(uint32_t), aLen + aLen);
    379    uint32_t *tmp = (uint32_t *)alloca((aLen + aLen) * sizeof(uint32_t));
    380    memset(tmp, 0U, (aLen + aLen) * sizeof(uint32_t));
    381    for (uint32_t i = (uint32_t)0U; i < aLen; i++) {
    382        uint64_t res1 = (uint64_t)a[i] * (uint64_t)a[i];
    383        uint32_t hi = (uint32_t)(res1 >> (uint32_t)32U);
    384        uint32_t lo = (uint32_t)res1;
    385        tmp[(uint32_t)2U * i] = lo;
    386        tmp[(uint32_t)2U * i + (uint32_t)1U] = hi;
    387    }
    388    uint32_t c1 = Hacl_Bignum_Addition_bn_add_eq_len_u32(aLen + aLen, res, tmp, res);
    389    KRML_HOST_IGNORE(c1);
    390 }
    391 
    392 static inline void
    393 Hacl_Bignum_Multiplication_bn_sqr_u64(uint32_t aLen, uint64_t *a, uint64_t *res)
    394 {
    395    memset(res, 0U, (aLen + aLen) * sizeof(uint64_t));
    396    for (uint32_t i0 = (uint32_t)0U; i0 < aLen; i0++) {
    397        uint64_t *ab = a;
    398        uint64_t a_j = a[i0];
    399        uint64_t *res_j = res + i0;
    400        uint64_t c = (uint64_t)0U;
    401        for (uint32_t i = (uint32_t)0U; i < i0 / (uint32_t)4U; i++) {
    402            uint64_t a_i = ab[(uint32_t)4U * i];
    403            uint64_t *res_i0 = res_j + (uint32_t)4U * i;
    404            c = Hacl_Bignum_Base_mul_wide_add2_u64(a_i, a_j, c, res_i0);
    405            uint64_t a_i0 = ab[(uint32_t)4U * i + (uint32_t)1U];
    406            uint64_t *res_i1 = res_j + (uint32_t)4U * i + (uint32_t)1U;
    407            c = Hacl_Bignum_Base_mul_wide_add2_u64(a_i0, a_j, c, res_i1);
    408            uint64_t a_i1 = ab[(uint32_t)4U * i + (uint32_t)2U];
    409            uint64_t *res_i2 = res_j + (uint32_t)4U * i + (uint32_t)2U;
    410            c = Hacl_Bignum_Base_mul_wide_add2_u64(a_i1, a_j, c, res_i2);
    411            uint64_t a_i2 = ab[(uint32_t)4U * i + (uint32_t)3U];
    412            uint64_t *res_i = res_j + (uint32_t)4U * i + (uint32_t)3U;
    413            c = Hacl_Bignum_Base_mul_wide_add2_u64(a_i2, a_j, c, res_i);
    414        }
    415        for (uint32_t i = i0 / (uint32_t)4U * (uint32_t)4U; i < i0; i++) {
    416            uint64_t a_i = ab[i];
    417            uint64_t *res_i = res_j + i;
    418            c = Hacl_Bignum_Base_mul_wide_add2_u64(a_i, a_j, c, res_i);
    419        }
    420        uint64_t r = c;
    421        res[i0 + i0] = r;
    422    }
    423    uint64_t c0 = Hacl_Bignum_Addition_bn_add_eq_len_u64(aLen + aLen, res, res, res);
    424    KRML_HOST_IGNORE(c0);
    425    KRML_CHECK_SIZE(sizeof(uint64_t), aLen + aLen);
    426    uint64_t *tmp = (uint64_t *)alloca((aLen + aLen) * sizeof(uint64_t));
    427    memset(tmp, 0U, (aLen + aLen) * sizeof(uint64_t));
    428    for (uint32_t i = (uint32_t)0U; i < aLen; i++) {
    429        FStar_UInt128_uint128 res1 = FStar_UInt128_mul_wide(a[i], a[i]);
    430        uint64_t hi = FStar_UInt128_uint128_to_uint64(FStar_UInt128_shift_right(res1, (uint32_t)64U));
    431        uint64_t lo = FStar_UInt128_uint128_to_uint64(res1);
    432        tmp[(uint32_t)2U * i] = lo;
    433        tmp[(uint32_t)2U * i + (uint32_t)1U] = hi;
    434    }
    435    uint64_t c1 = Hacl_Bignum_Addition_bn_add_eq_len_u64(aLen + aLen, res, tmp, res);
    436    KRML_HOST_IGNORE(c1);
    437 }
    438 
    439 #if defined(__cplusplus)
    440 }
    441 #endif
    442 
    443 #define __internal_Hacl_Bignum_Base_H_DEFINED
    444 #endif