drv_crypto.c 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657
  1. /*
  2. * Copyright (C) 2022-2024, Xiaohua Semiconductor Co., Ltd.
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2023-02-10 CDT first version
  9. * 2024-06-11 CDT Fix compiler warning
  10. * 2025-07-29 CDT Support HC32F334
  11. */
  12. #include "board.h"
  13. #if defined(BSP_USING_HWCRYPTO)
  14. // #define DRV_DEBUG
  15. #define LOG_TAG "drv_crypto"
  16. #include <drv_log.h>
  17. struct hc32_hwcrypto_device
  18. {
  19. struct rt_hwcrypto_device dev;
  20. struct rt_mutex mutex;
  21. };
  22. #if defined(BSP_USING_CRC)
  23. #define DEFAULT_CRC16_CCITT_POLY (0x1021) /*!< X^16 + X^12 + X^5 + 1 */
  24. #define DEFAULT_CRC32_POLY (0x04C11DB7) /*!< X^32 + X^26 + X^23 + X^22 + X^16 + X^12 + X^11 + X^10 +X^8 + X^7 + X^5 + X^4 + X^2+ X + 1 */
  25. static struct hwcrypto_crc_cfg crc_cfgbk = {0};
  26. static rt_uint32_t _crc_update(struct hwcrypto_crc *ctx, const rt_uint8_t *in, rt_size_t length)
  27. {
  28. rt_uint32_t result = 0;
  29. stc_crc_init_t stcCrcInit;
  30. struct hc32_hwcrypto_device *hc32_hw_dev = (struct hc32_hwcrypto_device *)ctx->parent.device->user_data;
  31. rt_mutex_take(&hc32_hw_dev->mutex, RT_WAITING_FOREVER);
  32. if (ctx->crc_cfg.poly != DEFAULT_CRC16_CCITT_POLY && ctx->crc_cfg.poly != DEFAULT_CRC32_POLY)
  33. {
  34. LOG_E("CRC polynomial only support 0x1021/0x04C11DB7U.");
  35. goto _exit;
  36. }
  37. /* if crc_cfg change we need init crc again */
  38. if (rt_memcmp(&crc_cfgbk, &ctx->crc_cfg, sizeof(struct hwcrypto_crc_cfg)))
  39. {
  40. #if defined(HC32F460)
  41. switch (ctx->crc_cfg.flags)
  42. {
  43. case 0:
  44. stcCrcInit.u32RefIn = CRC_REFIN_DISABLE;
  45. stcCrcInit.u32RefOut = CRC_REFOUT_DISABLE;
  46. break;
  47. case CRC_FLAG_REFIN:
  48. stcCrcInit.u32RefIn = CRC_REFIN_ENABLE;
  49. stcCrcInit.u32RefOut = CRC_REFOUT_DISABLE;
  50. break;
  51. case CRC_FLAG_REFOUT:
  52. stcCrcInit.u32RefIn = CRC_REFIN_DISABLE;
  53. stcCrcInit.u32RefOut = CRC_REFOUT_ENABLE;
  54. break;
  55. case CRC_FLAG_REFIN | CRC_FLAG_REFOUT:
  56. stcCrcInit.u32RefIn = CRC_REFIN_ENABLE;
  57. stcCrcInit.u32RefOut = CRC_REFOUT_ENABLE;
  58. break;
  59. default :
  60. LOG_E("crc flag parameter error.");
  61. goto _exit;
  62. }
  63. if (ctx->crc_cfg.xorout)
  64. {
  65. stcCrcInit.u32XorOut = CRC_XOROUT_ENABLE;
  66. }
  67. else
  68. {
  69. stcCrcInit.u32XorOut = CRC_XOROUT_DISABLE;
  70. }
  71. #endif
  72. switch (ctx->crc_cfg.width)
  73. {
  74. case 16U:
  75. stcCrcInit.u32Protocol = CRC_CRC16;
  76. break;
  77. case 32U:
  78. stcCrcInit.u32Protocol = CRC_CRC32;
  79. break;
  80. default :
  81. LOG_E("crc width only support 16/32.");
  82. goto _exit;
  83. }
  84. #if defined(HC32F460) || defined(HC32F4A0) || defined(HC32F448) || defined(HC32F472) || \
  85. defined(HC32F334)
  86. stcCrcInit.u32InitValue = ctx->crc_cfg.last_val;
  87. #elif defined(HC32F4A8)
  88. stcCrcInit.u64InitValue = ctx->crc_cfg.last_val;
  89. #endif
  90. if (CRC_Init(&stcCrcInit) != LL_OK)
  91. {
  92. LOG_E("crc init error.");
  93. goto _exit;
  94. }
  95. LOG_D("CRC_Init.");
  96. rt_memcpy(&crc_cfgbk, &ctx->crc_cfg, sizeof(struct hwcrypto_crc_cfg));
  97. }
  98. #if defined(HC32F460) || defined(HC32F4A0) || defined(HC32F4A8)
  99. if (16U == ctx->crc_cfg.width)
  100. {
  101. (void)CRC_CRC16_AccumulateData(CRC_DATA_WIDTH_8BIT, in, length, (uint16_t *)&result);
  102. }
  103. else /* CRC32 */
  104. {
  105. (void)CRC_CRC32_AccumulateData(CRC_DATA_WIDTH_8BIT, in, length, &result);
  106. }
  107. #elif defined(HC32F448) || defined(HC32F472) || defined(HC32F334)
  108. if (16U == ctx->crc_cfg.width)
  109. {
  110. result = CRC_CRC16_AccumulateData(CRC_DATA_WIDTH_8BIT, in, length);
  111. }
  112. else /* CRC32 */
  113. {
  114. result = CRC_CRC32_AccumulateData(CRC_DATA_WIDTH_8BIT, in, length);
  115. }
  116. #endif
  117. _exit:
  118. rt_mutex_release(&hc32_hw_dev->mutex);
  119. return result;
  120. }
  121. static const struct hwcrypto_crc_ops crc_ops =
  122. {
  123. .update = _crc_update,
  124. };
  125. #endif /* BSP_USING_CRC */
  126. #if defined(BSP_USING_RNG)
  127. static rt_uint32_t _rng_rand(struct hwcrypto_rng *ctx)
  128. {
  129. rt_uint32_t gen_random = 0;
  130. if (TRNG_GenerateRandom(&gen_random, 1U) != LL_OK)
  131. {
  132. return 0;
  133. }
  134. return gen_random;
  135. }
  136. static const struct hwcrypto_rng_ops rng_ops =
  137. {
  138. .update = _rng_rand,
  139. };
  140. #endif /* BSP_USING_RNG */
  141. #if defined(BSP_USING_HASH)
  142. #define HASH_SHA256_MSG_DIGEST_SIZE (32U)
  143. static const rt_uint8_t *hash_in = RT_NULL;
  144. static rt_size_t hash_length = 0;
  145. static rt_err_t _hash_update(struct hwcrypto_hash *ctx, const rt_uint8_t *in, rt_size_t length)
  146. {
  147. rt_err_t result = RT_EOK;
  148. struct hc32_hwcrypto_device *hc32_hw_dev = (struct hc32_hwcrypto_device *)ctx->parent.device->user_data;
  149. rt_mutex_take(&hc32_hw_dev->mutex, RT_WAITING_FOREVER);
  150. /* Start HASH computation transfer */
  151. switch (ctx->parent.type)
  152. {
  153. case HWCRYPTO_TYPE_SHA256:
  154. hash_in = in;
  155. hash_length = length;
  156. break;
  157. default :
  158. LOG_E("not support hash type: %x", ctx->parent.type);
  159. result = -RT_ERROR;
  160. break;
  161. }
  162. rt_mutex_release(&hc32_hw_dev->mutex);
  163. return result;
  164. }
  165. static rt_err_t _hash_finish(struct hwcrypto_hash *ctx, rt_uint8_t *out, rt_size_t length)
  166. {
  167. rt_err_t result = RT_EOK;
  168. struct hc32_hwcrypto_device *hc32_hw_dev = (struct hc32_hwcrypto_device *)ctx->parent.device->user_data;
  169. rt_mutex_take(&hc32_hw_dev->mutex, RT_WAITING_FOREVER);
  170. if (hash_in == RT_NULL || hash_length == 0)
  171. {
  172. LOG_E("no data input.");
  173. result = -RT_ERROR;
  174. goto _exit;
  175. }
  176. /* Get the hash Subtype */
  177. switch (ctx->parent.type)
  178. {
  179. case HWCRYPTO_TYPE_SHA256:
  180. /* SHA256 = 32*8 Bits */
  181. if (length == HASH_SHA256_MSG_DIGEST_SIZE)
  182. {
  183. result = HASH_Calculate(hash_in, hash_length, out);
  184. }
  185. else
  186. {
  187. LOG_E("The out size must be 32 bytes");
  188. }
  189. break;
  190. default :
  191. LOG_E("not support hash type: %x", ctx->parent.type);
  192. result = -RT_ERROR;
  193. break;
  194. }
  195. _exit:
  196. rt_mutex_release(&hc32_hw_dev->mutex);
  197. return result;
  198. }
  199. static const struct hwcrypto_hash_ops hash_ops =
  200. {
  201. .update = _hash_update,
  202. .finish = _hash_finish
  203. };
  204. #endif /* BSP_USING_HASH */
  205. #if defined(BSP_USING_AES)
  206. #if defined (HC32F4A8)
  207. #define AES_KEY_SIZE_16BYTE (16U)
  208. #define AES_KEY_SIZE_24BYTE (24U)
  209. #define AES_KEY_SIZE_32BYTE (32U)
  210. static stc_ske_init_t stcSkeInit = {0};
  211. static int32_t AES_Encrypt(const uint8_t *pu8Plaintext, uint32_t u32PlaintextSize, \
  212. const uint8_t *pu8Key, uint8_t u8KeySize, uint8_t *pu8Ciphertext)
  213. {
  214. int32_t i32Ret = LL_ERR_INVD_PARAM;
  215. stc_ske_crypto_t stcCrypto;
  216. if ((pu8Plaintext != NULL) && (u32PlaintextSize > 0UL) && (pu8Key != NULL) && (pu8Ciphertext != NULL))
  217. {
  218. if (u8KeySize == AES_KEY_SIZE_16BYTE)
  219. {
  220. stcSkeInit.u32Alg = SKE_ALG_AES_128;
  221. }
  222. else if (u8KeySize == AES_KEY_SIZE_24BYTE)
  223. {
  224. stcSkeInit.u32Alg = SKE_ALG_AES_192;
  225. }
  226. else
  227. {
  228. stcSkeInit.u32Alg = SKE_ALG_AES_256;
  229. }
  230. stcSkeInit.u32Crypto = SKE_CRYPTO_ENCRYPT;
  231. stcSkeInit.pu8Key = pu8Key;
  232. /* Initialize SKE */
  233. i32Ret = SKE_Init(&stcSkeInit);
  234. stcCrypto.u32Alg = stcSkeInit.u32Alg;
  235. stcCrypto.u32Mode = stcSkeInit.u32Mode;
  236. stcCrypto.u32CryptoSize = u32PlaintextSize;
  237. /* Encrypt blocks */
  238. stcCrypto.pu8In = pu8Plaintext;
  239. stcCrypto.pu8Out = pu8Ciphertext;
  240. i32Ret = SKE_CryptoBlocks(&stcCrypto);
  241. }
  242. return i32Ret;
  243. }
  244. static int32_t AES_Decrypt(const uint8_t *pu8Ciphertext, uint32_t u32CiphertextSize, \
  245. const uint8_t *pu8Key, uint8_t u8KeySize, uint8_t *pu8Plaintext)
  246. {
  247. int32_t i32Ret = LL_ERR_INVD_PARAM;
  248. stc_ske_crypto_t stcCrypto;
  249. if ((pu8Plaintext != NULL) && (u32CiphertextSize > 0UL) && (pu8Key != NULL) && (pu8Ciphertext != NULL))
  250. {
  251. if (u8KeySize == AES_KEY_SIZE_16BYTE)
  252. {
  253. stcSkeInit.u32Alg = SKE_ALG_AES_128;
  254. }
  255. else if (u8KeySize == AES_KEY_SIZE_24BYTE)
  256. {
  257. stcSkeInit.u32Alg = SKE_ALG_AES_192;
  258. }
  259. else
  260. {
  261. stcSkeInit.u32Alg = SKE_ALG_AES_256;
  262. }
  263. stcSkeInit.u32Crypto = SKE_CRYPTO_DECRYPT;
  264. stcSkeInit.pu8Key = pu8Key;
  265. /* Initialize SKE */
  266. i32Ret = SKE_Init(&stcSkeInit);
  267. stcCrypto.u32Alg = stcSkeInit.u32Alg;
  268. stcCrypto.u32Mode = stcSkeInit.u32Mode;
  269. stcCrypto.u32CryptoSize = u32CiphertextSize;
  270. /* Decrypt blocks */
  271. stcCrypto.pu8In = pu8Ciphertext;
  272. stcCrypto.pu8Out = pu8Plaintext;
  273. i32Ret = SKE_CryptoBlocks(&stcCrypto);
  274. }
  275. return i32Ret;
  276. }
  277. #endif
  278. static rt_err_t _cryp_crypt(struct hwcrypto_symmetric *ctx, struct hwcrypto_symmetric_info *info)
  279. {
  280. rt_err_t result = RT_EOK;
  281. struct hc32_hwcrypto_device *hc32_hw_dev = (struct hc32_hwcrypto_device *)ctx->parent.device->user_data;
  282. rt_mutex_take(&hc32_hw_dev->mutex, RT_WAITING_FOREVER);
  283. #if defined (HC32F4A8)
  284. SKE_StructInit(&stcSkeInit);
  285. switch (ctx->parent.type)
  286. {
  287. case HWCRYPTO_TYPE_AES_ECB:
  288. LOG_D("AES type is ECB.");
  289. stcSkeInit.u32Mode = SKE_MD_ECB;
  290. break;
  291. case HWCRYPTO_TYPE_AES_CBC:
  292. stcSkeInit.u32Mode = SKE_MD_CBC;
  293. break;
  294. case HWCRYPTO_TYPE_AES_CTR:
  295. stcSkeInit.u32Mode = SKE_MD_CTR;
  296. break;
  297. case HWCRYPTO_TYPE_AES_CFB:
  298. stcSkeInit.u32Mode = SKE_MD_CFB;
  299. break;
  300. case HWCRYPTO_TYPE_AES_OFB:
  301. stcSkeInit.u32Mode = SKE_MD_OFB;
  302. break;
  303. default :
  304. LOG_E("not support cryp type: %x", ctx->parent.type);
  305. break;
  306. }
  307. stcSkeInit.pu8Iv = ctx->iv;
  308. #endif
  309. #if defined (HC32F460)
  310. if (ctx->key_bitlen != (AES_KEY_SIZE_16BYTE * 8U))
  311. {
  312. LOG_E("not support key bitlen: %d", ctx->key_bitlen);
  313. result = -RT_ERROR;
  314. goto _exit;
  315. }
  316. #elif defined (HC32F4A0) || defined (HC32F448) || defined (HC32F472) || defined (HC32F4A8)
  317. if (ctx->key_bitlen != (AES_KEY_SIZE_16BYTE * 8U) && ctx->key_bitlen != (AES_KEY_SIZE_24BYTE * 8U) && \
  318. ctx->key_bitlen != (AES_KEY_SIZE_32BYTE * 8U))
  319. {
  320. LOG_E("not support key bitlen: %d", ctx->key_bitlen);
  321. result = -RT_ERROR;
  322. goto _exit;
  323. }
  324. #endif
  325. if ((info->length % 16U) != 0U)
  326. {
  327. LOG_E("aes supports only an integer multiple of 16 in length");
  328. result = -RT_ERROR;
  329. goto _exit;
  330. }
  331. if (info->mode == HWCRYPTO_MODE_ENCRYPT)
  332. {
  333. /* AES encryption. */
  334. if (LL_OK != AES_Encrypt(info->in, info->length, ctx->key, (ctx->key_bitlen / 8U), info->out))
  335. {
  336. result = -RT_ERROR;
  337. }
  338. }
  339. else if (info->mode == HWCRYPTO_MODE_DECRYPT)
  340. {
  341. /* AES decryption */
  342. if (LL_OK != AES_Decrypt(info->in, info->length, ctx->key, (ctx->key_bitlen / 8U), info->out))
  343. {
  344. result = -RT_ERROR;
  345. }
  346. }
  347. else
  348. {
  349. rt_kprintf("error cryp mode : %02x!\n", info->mode);
  350. result = -RT_ERROR;
  351. goto _exit;
  352. }
  353. _exit:
  354. rt_mutex_release(&hc32_hw_dev->mutex);
  355. return result;
  356. }
  357. static const struct hwcrypto_symmetric_ops cryp_ops =
  358. {
  359. .crypt = _cryp_crypt
  360. };
  361. #endif
  362. static rt_err_t _crypto_create(struct rt_hwcrypto_ctx *ctx)
  363. {
  364. rt_err_t res = RT_EOK;
  365. switch (ctx->type & HWCRYPTO_MAIN_TYPE_MASK)
  366. {
  367. #if defined(BSP_USING_RNG)
  368. case HWCRYPTO_TYPE_RNG:
  369. {
  370. /* Enable TRNG. */
  371. FCG_Fcg0PeriphClockCmd(FCG0_PERIPH_TRNG, ENABLE);
  372. /* TRNG initialization configuration. */
  373. TRNG_Init(TRNG_SHIFT_CNT64, TRNG_RELOAD_INIT_VAL_ENABLE);
  374. /* TRNG Enable. */
  375. TRNG_Cmd(ENABLE);
  376. ((struct hwcrypto_rng *)ctx)->ops = &rng_ops;
  377. break;
  378. }
  379. #endif /* BSP_USING_RNG */
  380. #if defined(BSP_USING_CRC)
  381. case HWCRYPTO_TYPE_CRC:
  382. {
  383. /* Enable CRC module clock. */
  384. FCG_Fcg0PeriphClockCmd(FCG0_PERIPH_CRC, ENABLE);
  385. /* do not Initialize CRC because crc_update will do it */
  386. ((struct hwcrypto_crc *)ctx)->ops = &crc_ops;
  387. break;
  388. }
  389. #endif /* BSP_USING_CRC */
  390. #if defined(BSP_USING_HASH)
  391. case HWCRYPTO_TYPE_MD5:
  392. case HWCRYPTO_TYPE_SHA1:
  393. case HWCRYPTO_TYPE_SHA2:
  394. {
  395. if (ctx->type == HWCRYPTO_TYPE_SHA256)
  396. {
  397. /* Enable HASH. */
  398. FCG_Fcg0PeriphClockCmd(FCG0_PERIPH_HASH, ENABLE);
  399. ((struct hwcrypto_hash *)ctx)->ops = &hash_ops;
  400. }
  401. else
  402. {
  403. LOG_E("not support hash type.");
  404. res = -RT_ERROR;
  405. }
  406. break;
  407. }
  408. #endif /* BSP_USING_HASH */
  409. #if defined(BSP_USING_AES)
  410. case HWCRYPTO_TYPE_AES:
  411. case HWCRYPTO_TYPE_DES:
  412. case HWCRYPTO_TYPE_3DES:
  413. case HWCRYPTO_TYPE_RC4:
  414. case HWCRYPTO_TYPE_GCM:
  415. {
  416. #if defined(HC32F460) || defined(HC32F4A0) || defined(HC32F448) || defined(HC32F472)
  417. /* Enable AES peripheral clock. */
  418. FCG_Fcg0PeriphClockCmd(PWC_FCG0_AES, ENABLE);
  419. #elif defined(HC32F4A8)
  420. /* Enable SKE peripheral clock */
  421. FCG_Fcg0PeriphClockCmd(FCG0_PERIPH_SKE, ENABLE);
  422. #endif
  423. ((struct hwcrypto_symmetric *)ctx)->ops = &cryp_ops;
  424. break;
  425. }
  426. #endif /* BSP_USING_AES */
  427. default:
  428. res = -RT_ERROR;
  429. break;
  430. }
  431. return res;
  432. }
  433. static void _crypto_destroy(struct rt_hwcrypto_ctx *ctx)
  434. {
  435. switch (ctx->type & HWCRYPTO_MAIN_TYPE_MASK)
  436. {
  437. #if defined(BSP_USING_RNG)
  438. case HWCRYPTO_TYPE_RNG:
  439. TRNG_Cmd(DISABLE);
  440. TRNG_DeInit();
  441. FCG_Fcg0PeriphClockCmd(FCG0_PERIPH_TRNG, DISABLE);
  442. break;
  443. #endif /* BSP_USING_RNG */
  444. #if defined(BSP_USING_CRC)
  445. case HWCRYPTO_TYPE_CRC:
  446. rt_memset(&crc_cfgbk, 0, sizeof(struct hwcrypto_crc_cfg));
  447. CRC_DeInit();
  448. FCG_Fcg0PeriphClockCmd(FCG0_PERIPH_CRC, DISABLE);
  449. break;
  450. #endif /* BSP_USING_CRC */
  451. #if defined(BSP_USING_HASH)
  452. case HWCRYPTO_TYPE_MD5:
  453. case HWCRYPTO_TYPE_SHA1:
  454. case HWCRYPTO_TYPE_SHA2:
  455. HASH_DeInit();
  456. FCG_Fcg0PeriphClockCmd(FCG0_PERIPH_HASH, DISABLE);
  457. break;
  458. #endif /* BSP_USING_HASH */
  459. #if defined(BSP_USING_AES)
  460. case HWCRYPTO_TYPE_AES:
  461. case HWCRYPTO_TYPE_DES:
  462. case HWCRYPTO_TYPE_3DES:
  463. case HWCRYPTO_TYPE_RC4:
  464. case HWCRYPTO_TYPE_GCM:
  465. #if defined(HC32F460) || defined(HC32F4A0) || defined(HC32F448) || defined(HC32F472)
  466. AES_DeInit();
  467. FCG_Fcg0PeriphClockCmd(PWC_FCG0_AES, DISABLE);
  468. #elif defined(HC32F4A8)
  469. SKE_DeInit();
  470. FCG_Fcg0PeriphClockCmd(FCG0_PERIPH_SKE, DISABLE);
  471. #endif
  472. break;
  473. #endif /* BSP_USING_AES */
  474. default:
  475. break;
  476. }
  477. }
  478. static rt_err_t _crypto_clone(struct rt_hwcrypto_ctx *des, const struct rt_hwcrypto_ctx *src)
  479. {
  480. rt_err_t res = RT_EOK;
  481. switch (src->type & HWCRYPTO_MAIN_TYPE_MASK)
  482. {
  483. #if defined(BSP_USING_RNG)
  484. case HWCRYPTO_TYPE_RNG:
  485. break;
  486. #endif /* BSP_USING_RNG */
  487. #if defined(BSP_USING_CRC)
  488. case HWCRYPTO_TYPE_CRC:
  489. break;
  490. #endif /* BSP_USING_CRC */
  491. #if defined(BSP_USING_HASH)
  492. case HWCRYPTO_TYPE_MD5:
  493. case HWCRYPTO_TYPE_SHA1:
  494. case HWCRYPTO_TYPE_SHA2:
  495. break;
  496. #endif /* BSP_USING_HASH */
  497. #if defined(BSP_USING_AES)
  498. case HWCRYPTO_TYPE_AES:
  499. case HWCRYPTO_TYPE_DES:
  500. case HWCRYPTO_TYPE_3DES:
  501. case HWCRYPTO_TYPE_RC4:
  502. case HWCRYPTO_TYPE_GCM:
  503. break;
  504. #endif /* BSP_USING_AES */
  505. default:
  506. res = -RT_ERROR;
  507. break;
  508. }
  509. return res;
  510. }
  511. static void _crypto_reset(struct rt_hwcrypto_ctx *ctx)
  512. {
  513. switch (ctx->type & HWCRYPTO_MAIN_TYPE_MASK)
  514. {
  515. #if defined(BSP_USING_RNG)
  516. case HWCRYPTO_TYPE_RNG:
  517. break;
  518. #endif /* BSP_USING_RNG */
  519. #if defined(BSP_USING_CRC)
  520. case HWCRYPTO_TYPE_CRC:
  521. break;
  522. #endif /* BSP_USING_CRC */
  523. #if defined(BSP_USING_HASH)
  524. case HWCRYPTO_TYPE_MD5:
  525. case HWCRYPTO_TYPE_SHA1:
  526. case HWCRYPTO_TYPE_SHA2:
  527. break;
  528. #endif /* BSP_USING_HASH*/
  529. #if defined(BSP_USING_AES)
  530. case HWCRYPTO_TYPE_AES:
  531. case HWCRYPTO_TYPE_DES:
  532. case HWCRYPTO_TYPE_3DES:
  533. case HWCRYPTO_TYPE_RC4:
  534. case HWCRYPTO_TYPE_GCM:
  535. break;
  536. #endif /* BSP_USING_AES */
  537. default:
  538. break;
  539. }
  540. }
  541. static const struct rt_hwcrypto_ops _ops =
  542. {
  543. .create = _crypto_create,
  544. .destroy = _crypto_destroy,
  545. .copy = _crypto_clone,
  546. .reset = _crypto_reset,
  547. };
  548. static int rt_hw_crypto_device_init(void)
  549. {
  550. static struct hc32_hwcrypto_device _crypto_dev;
  551. #if defined(BSP_USING_UQID)
  552. stc_efm_unique_id_t pstcUID;
  553. rt_uint32_t cpuid[3] = {0};
  554. EFM_GetUID(&pstcUID);
  555. cpuid[0] = pstcUID.u32UniqueID0;
  556. cpuid[1] = pstcUID.u32UniqueID1;
  557. cpuid[2] = pstcUID.u32UniqueID2;
  558. /* we only used 2 words to as the UQID */
  559. rt_memcpy(&_crypto_dev.dev.id, cpuid, 8);
  560. LOG_D("UQID = %x%x", cpuid[0], cpuid[1]);
  561. #endif /* BSP_USING_UQID */
  562. _crypto_dev.dev.ops = &_ops;
  563. _crypto_dev.dev.user_data = &_crypto_dev;
  564. if (rt_hwcrypto_register(&_crypto_dev.dev, RT_HWCRYPTO_DEFAULT_NAME) != RT_EOK)
  565. {
  566. return -RT_ERROR;
  567. }
  568. rt_mutex_init(&_crypto_dev.mutex, RT_HWCRYPTO_DEFAULT_NAME, RT_IPC_FLAG_PRIO);
  569. return RT_EOK;
  570. }
  571. INIT_DEVICE_EXPORT(rt_hw_crypto_device_init);
  572. #endif