1/* 2 * Copyright (c) 2022-2023 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15#include <stddef.h> 16#include <hvb_sysdeps.h> 17#include "hvb_crypto.h" 18#include "hvb_hash_sha256.h" 19 20#ifndef htobe32 21#define htobe32(value) \ 22 ((((value)&0x000000FF) << 24) | (((value)&0x0000FF00) << 8) | (((value)&0x00FF0000) >> 8) | \ 23 (((value)&0xFF000000) >> 24)) 24#endif 25 26#define word2byte(w) ((w) * sizeof(uint32_t)) 27#define PAD_BLK_WORD_SIZE_SHA256 (BLK_WORD_SIZE_SHA256 * 2) 28#define PAD_BLK_BYTE_SIZE_SHA256 WOR2BYTE(PAD_BLK_WORD_SIZE_SHA256) 29#define PAD_INFO_BYTE_LEN_SHA256 8 30 31#define shr(x, n) (((uint32_t)(x)) >> (n)) 32#define rotr(x, n) (shr(x, n) | (((uint32_t)(x)) << (32 - (n)))) 33 34#define sigma_0(x) (rotr(x, 2) ^ rotr(x, 13) ^ rotr(x, 22)) 35#define sigma_1(x) (rotr(x, 6) ^ rotr(x, 11) ^ rotr(x, 25)) 36#define sigma_2(x) (rotr(x, 7) ^ rotr(x, 18) ^ shr(x, 3)) 37#define sigma_3(x) (rotr(x, 17) ^ rotr(x, 19) ^ shr(x, 10)) 38 39#define maj(x, y, z) (((x) & (y)) ^ ((x) & (z)) ^ ((y) & (z))) 40#define ch(x, y, z) (((x) & (y)) ^ ((~(x)) & (z))) 41 42static const uint32_t const_key[] = { 43 0x428A2F98, 44 0x71374491, 45 0xB5C0FBCF, 46 0xE9B5DBA5, 47 0x3956C25B, 48 0x59F111F1, 49 0x923F82A4, 50 0xAB1C5ED5, 51 0xD807AA98, 52 0x12835B01, 53 0x243185BE, 54 0x550C7DC3, 55 0x72BE5D74, 56 0x80DEB1FE, 57 0x9BDC06A7, 58 0xC19BF174, 59 0xE49B69C1, 60 0xEFBE4786, 61 0x0FC19DC6, 62 0x240CA1CC, 63 0x2DE92C6F, 64 0x4A7484AA, 65 0x5CB0A9DC, 66 0x76F988DA, 67 0x983E5152, 68 0xA831C66D, 69 0xB00327C8, 70 0xBF597FC7, 71 0xC6E00BF3, 72 0xD5A79147, 73 0x06CA6351, 74 0x14292967, 75 0x27B70A85, 76 0x2E1B2138, 77 0x4D2C6DFC, 78 0x53380D13, 79 0x650A7354, 80 0x766A0ABB, 81 0x81C2C92E, 82 0x92722C85, 83 0xA2BFE8A1, 84 0xA81A664B, 85 0xC24B8B70, 86 0xC76C51A3, 87 0xD192E819, 88 0xD6990624, 89 0xF40E3585, 90 0x106AA070, 91 0x19A4C116, 92 0x1E376C08, 93 0x2748774C, 94 0x34B0BCB5, 95 0x391C0CB3, 96 0x4ED8AA4A, 97 0x5B9CCA4F, 98 0x682E6FF3, 99 0x748F82EE, 100 0x78A5636F, 101 0x84C87814, 102 0x8CC70208, 103 0x90BEFFFA, 104 0xA4506CEB, 105 0xBEF9A3F7, 106 0xC67178F2, 107}; 108 109 110static uint32_t sha256_iv_init[IV_WORD_SIZE_SHA256] = { 111 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19 }; 112 113static inline uint32_t bigend_read_word(const uint8_t *data) 114{ 115 uint32_t res; 116 117 res = data[0]; 118 res = (res << 8) | data[1]; 119 res = (res << 8) | data[2]; 120 res = (res << 8) | data[3]; 121 122 return res; 123} 124 125static inline uint32_t w_schedule(uint32_t w[64], uint32_t t) 126{ 127 return sigma_3(w[t - 2]) + w[t - 7] + sigma_2(w[t - 15]) + w[t - 16]; 128} 129 130static inline void rotate_regs(uint32_t regs[8]) 131{ 132 uint32_t backup; 133 backup = regs[6]; 134 regs[6] = regs[5]; 135 regs[5] = regs[4]; 136 regs[4] = regs[3]; 137 regs[3] = regs[2]; 138 regs[2] = regs[1]; 139 regs[1] = regs[0]; 140 regs[0] = regs[7]; 141 regs[7] = backup; 142} 143 144static void sha256_block_calc(uint32_t regs[8], const uint8_t *data) 145{ 146 uint32_t t1; 147 uint32_t w[64]; 148 uint32_t t; 149 150 for (t = 0; t < 64; t++, data += 4) { 151 w[t] = t < 16 ? bigend_read_word(data) : w_schedule(w, t); 152 t1 = regs[7] + sigma_1(regs[4]) + ch(regs[4], regs[5], regs[6]) + const_key[t] + w[t]; 153 regs[3] += t1; 154 regs[7] = sigma_0(regs[0]) + maj(regs[0], regs[1], regs[2]) + t1; 155 156 rotate_regs(regs); 157 } 158} 159 160int sha256_data_blk_update(uint32_t *iv, const void *msg, uint64_t len); 161 162__attribute__((weak)) int sha256_data_blk_update(uint32_t *iv, const void *msg, uint64_t len) 163{ 164 uint32_t regs[8]; 165 const uint8_t *pdata = msg; 166 uint64_t i; 167 uint32_t j; 168 169 for (i = 0; i < len / 64; i++, pdata += 64) { 170 for (j = 0; j < 8; j++) { 171 regs[j] = iv[j]; 172 } 173 174 sha256_block_calc(regs, pdata); 175 176 for (j = 0; j < 8; j++) { 177 iv[j] += regs[j]; 178 } 179 } 180 return 0; 181} 182 183static void hash_sha256_pad_update(uint32_t *iv, const void *left_msg, uint64_t left_len, uint64_t total_bit_len) 184{ 185 uint32_t pad_word_len; 186 uint32_t sha256_pad[PAD_BLK_WORD_SIZE_SHA256]; 187 uint8_t *pad_ptr = NULL; 188 uint32_t fill_zero_len; 189 190 if (left_len != 0) { 191 if (hvb_memcpy_s(sha256_pad, sizeof(sha256_pad), left_msg, (uint32_t)left_len) != 0) { 192 hvb_print("error, memcpy_s fail.\n"); 193 return; 194 } 195 } 196 197 pad_ptr = (uint8_t *)sha256_pad; 198 pad_ptr[left_len] = 0x80; // padding 0x80 199 left_len++; 200 201 if (left_len + PAD_INFO_BYTE_LEN_SHA256 <= BLK_BYTE_SIZE_SHA256) { 202 pad_word_len = BLK_WORD_SIZE_SHA256; 203 } else { 204 pad_word_len = PAD_BLK_WORD_SIZE_SHA256; 205 } 206 207 fill_zero_len = word2byte(pad_word_len) - (uint32_t)left_len - PAD_INFO_BYTE_LEN_SHA256; 208 if (hvb_memset_s(pad_ptr + left_len, sizeof(sha256_pad) - left_len, 0, fill_zero_len) != 0) { 209 hvb_print("error, memset_s fail.\n"); 210 return; 211 } 212 213 sha256_pad[pad_word_len - 1] = htobe32((uint32_t)total_bit_len); 214 total_bit_len = total_bit_len >> 32; 215 sha256_pad[pad_word_len - 2] = htobe32((uint32_t)total_bit_len); 216 217 sha256_data_blk_update(iv, sha256_pad, word2byte(pad_word_len)); 218} 219 220static int hash_sha256_output_iv(uint32_t *iv, uint8_t *out, uint32_t out_len) 221{ 222 if (out == NULL) 223 return HASH_ERR_PARAM_NULL; 224 225 if (out_len < IV_BYTE_SIZE_SHA256) { 226 return HASH_ERR_OUTBUF_NO_ENOUGH; 227 } 228 229 for (int i = 0; i < IV_WORD_SIZE_SHA256; i++) { 230 iv[i] = htobe32(iv[i]); 231 } 232 233 if (hvb_memcpy_s(out, out_len, iv, IV_BYTE_SIZE_SHA256) != 0) { 234 return HASH_ERR_MEMORY; 235 } 236 237 return HASH_OK; 238} 239 240int hash_sha256_single(const void *msg, uint32_t msg_len, uint8_t *out, uint32_t out_len) 241{ 242 uint64_t data_size; 243 uint64_t total_bit_len; 244 uint32_t iv[IV_WORD_SIZE_SHA256]; 245 246 if (msg == NULL || out == NULL) { 247 return HASH_ERR_PARAM_NULL; 248 } 249 250 total_bit_len = (uint64_t)msg_len * 8; // 8bit per byte 251 if (total_bit_len < msg_len) { 252 return HASH_ERR_TOTAL_LEN; 253 } 254 255 if (hvb_memcpy_s(iv, sizeof(iv), sha256_iv_init, sizeof(sha256_iv_init)) != 0) { 256 return HASH_ERR_MEMORY; 257 } 258 259 data_size = (msg_len / BLK_BYTE_SIZE_SHA256) * BLK_BYTE_SIZE_SHA256; 260 261 if (data_size > 0) { 262 sha256_data_blk_update(iv, msg, data_size); 263 } 264 265 hash_sha256_pad_update(iv, (uint8_t *)msg + data_size, msg_len - data_size, total_bit_len); 266 267 return hash_sha256_output_iv(iv, out, out_len); 268} 269 270static uint32_t hash_alg_get_blklen(enum hash_alg_type alg_type) 271{ 272 switch (alg_type) { 273 case HASH_ALG_SHA256: 274 return BLK_BYTE_SIZE_SHA256; 275 default: 276 return 0; 277 } 278 return 0; 279} 280 281 282int hash_ctx_init(struct hash_ctx_t *hash_ctx, enum hash_alg_type alg_type) 283{ 284 if (alg_type != HASH_ALG_SHA256) { 285 return HASH_ERR_ALG_NO_SUPPORT; 286 } 287 288 if (hash_ctx == NULL) { 289 return HASH_ERR_PARAM_NULL; 290 } 291 292 hash_ctx->alg_type = (uint32_t)alg_type; 293 hash_ctx->buf_len = 0; 294 hash_ctx->total_len = 0; 295 296 if (hvb_memcpy_s(hash_ctx->iv, IV_BYTE_SIZE_SHA256, sha256_iv_init, sizeof(sha256_iv_init)) != 0) { 297 return HASH_ERR_MEMORY; 298 } 299 300 return HASH_OK; 301} 302 303int hash_calc_update(struct hash_ctx_t *hash_ctx, const void *msg, uint32_t msg_len) 304{ 305 uint32_t left_len; 306 uint32_t blk_len; 307 uint32_t calc_len; 308 309 if(msg_len == 0) { 310 return HASH_OK; 311 } 312 313 if (hash_ctx == NULL || msg == NULL) { 314 return HASH_ERR_PARAM_NULL; 315 } 316 317 blk_len = hash_alg_get_blklen(hash_ctx->alg_type); 318 if (blk_len == 0) { 319 return HASH_ERR_ALG_NO_SUPPORT; 320 } 321 322 if (hash_ctx->buf_len >= blk_len) { 323 return HASH_ERR_BUF_LEN; 324 } 325 326 hash_ctx->total_len = hash_ctx->total_len + msg_len; 327 if (hash_ctx->total_len < msg_len) { 328 return HASH_ERR_TOTAL_LEN; 329 } 330 331 left_len = blk_len - hash_ctx->buf_len; 332 333 if (hash_ctx->buf_len != 0 && msg_len >= left_len) { 334 if (hvb_memcpy_s(hash_ctx->blk_buf + hash_ctx->buf_len, left_len, msg, left_len) != 0) { 335 return HASH_ERR_MEMORY; 336 } 337 (void)sha256_data_blk_update(hash_ctx->iv, hash_ctx->blk_buf, blk_len); 338 339 hash_ctx->buf_len = 0; 340 341 msg_len = msg_len - left_len; 342 msg = (uint8_t *)msg + left_len; 343 } 344 345 if (msg_len >= blk_len) { 346 calc_len = msg_len / blk_len * blk_len; 347 sha256_data_blk_update(hash_ctx->iv, msg, calc_len); 348 349 msg_len = msg_len - calc_len; 350 msg = (uint8_t *)msg + calc_len; 351 } 352 353 if (msg_len != 0) { 354 if (hvb_memcpy_s(hash_ctx->blk_buf + hash_ctx->buf_len, blk_len - hash_ctx->buf_len, msg, msg_len) != 0) { 355 return HASH_ERR_MEMORY; 356 } 357 hash_ctx->buf_len = hash_ctx->buf_len + msg_len; 358 } 359 360 return HASH_OK; 361} 362 363int hash_calc_do_final(struct hash_ctx_t *hash_ctx, const void *msg, uint32_t msg_len, uint8_t *out, uint32_t out_len) 364{ 365 uint64_t total_bit_len; 366 int ret; 367 368 ret = hash_calc_update(hash_ctx, msg, msg_len); 369 if (ret != HASH_OK) { 370 return ret; 371 } 372 373 total_bit_len = hash_ctx->total_len * 8; 374 if (total_bit_len <= hash_ctx->total_len) { 375 return HASH_ERR_TOTAL_LEN; 376 } 377 378 hash_sha256_pad_update(hash_ctx->iv, hash_ctx->blk_buf, hash_ctx->buf_len, total_bit_len); 379 380 return hash_sha256_output_iv(hash_ctx->iv, out, out_len); 381} 382