drv_spi.c 39 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193
  1. /*
  2. * Copyright (c) 2006-2025 RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2018-11-5 SummerGift first version
  9. * 2018-12-11 greedyhao Porting for stm32f7xx
  10. * 2019-01-03 zylx modify DMA initialization and spixfer function
  11. * 2020-01-15 whj4674672 Porting for stm32h7xx
  12. * 2020-06-18 thread-liu Porting for stm32mp1xx
  13. * 2020-10-14 PeakRacing Porting for stm32wbxx
  14. */
  15. #include <rtthread.h>
  16. #include <rtdevice.h>
  17. #include "board.h"
  18. #ifdef BSP_USING_SPI
  19. #if defined(BSP_USING_SPI1) || defined(BSP_USING_SPI2) || defined(BSP_USING_SPI3) || defined(BSP_USING_SPI4) || defined(BSP_USING_SPI5) || defined(BSP_USING_SPI6)
  20. #include "drv_spi.h"
  21. #include "drv_config.h"
  22. #include <string.h>
  23. /*#define DRV_DEBUG*/
  24. #define LOG_TAG "drv.spi"
  25. #include <drv_log.h>
  26. enum
  27. {
  28. #ifdef BSP_USING_SPI1
  29. SPI1_INDEX,
  30. #endif
  31. #ifdef BSP_USING_SPI2
  32. SPI2_INDEX,
  33. #endif
  34. #ifdef BSP_USING_SPI3
  35. SPI3_INDEX,
  36. #endif
  37. #ifdef BSP_USING_SPI4
  38. SPI4_INDEX,
  39. #endif
  40. #ifdef BSP_USING_SPI5
  41. SPI5_INDEX,
  42. #endif
  43. #ifdef BSP_USING_SPI6
  44. SPI6_INDEX,
  45. #endif
  46. };
  47. static struct stm32_spi_config spi_config[] =
  48. {
  49. #ifdef BSP_USING_SPI1
  50. SPI1_BUS_CONFIG,
  51. #endif
  52. #ifdef BSP_USING_SPI2
  53. SPI2_BUS_CONFIG,
  54. #endif
  55. #ifdef BSP_USING_SPI3
  56. SPI3_BUS_CONFIG,
  57. #endif
  58. #ifdef BSP_USING_SPI4
  59. SPI4_BUS_CONFIG,
  60. #endif
  61. #ifdef BSP_USING_SPI5
  62. SPI5_BUS_CONFIG,
  63. #endif
  64. #ifdef BSP_USING_SPI6
  65. SPI6_BUS_CONFIG,
  66. #endif
  67. };
  68. static struct stm32_spi spi_bus_obj[sizeof(spi_config) / sizeof(spi_config[0])] = {0};
  69. static rt_err_t stm32_spi_init(struct stm32_spi *spi_drv, struct rt_spi_configuration *cfg)
  70. {
  71. RT_ASSERT(spi_drv != RT_NULL);
  72. RT_ASSERT(cfg != RT_NULL);
  73. SPI_HandleTypeDef *spi_handle = &spi_drv->handle;
  74. if (cfg->mode & RT_SPI_SLAVE)
  75. {
  76. spi_handle->Init.Mode = SPI_MODE_SLAVE;
  77. }
  78. else
  79. {
  80. spi_handle->Init.Mode = SPI_MODE_MASTER;
  81. }
  82. if (cfg->mode & RT_SPI_3WIRE)
  83. {
  84. spi_handle->Init.Direction = SPI_DIRECTION_1LINE;
  85. }
  86. else
  87. {
  88. spi_handle->Init.Direction = SPI_DIRECTION_2LINES;
  89. }
  90. if (cfg->data_width == 8)
  91. {
  92. spi_handle->Init.DataSize = SPI_DATASIZE_8BIT;
  93. }
  94. else if (cfg->data_width == 16)
  95. {
  96. spi_handle->Init.DataSize = SPI_DATASIZE_16BIT;
  97. }
  98. else
  99. {
  100. return -RT_EIO;
  101. }
  102. if (cfg->mode & RT_SPI_CPHA)
  103. {
  104. spi_handle->Init.CLKPhase = SPI_PHASE_2EDGE;
  105. }
  106. else
  107. {
  108. spi_handle->Init.CLKPhase = SPI_PHASE_1EDGE;
  109. }
  110. if (cfg->mode & RT_SPI_CPOL)
  111. {
  112. spi_handle->Init.CLKPolarity = SPI_POLARITY_HIGH;
  113. }
  114. else
  115. {
  116. spi_handle->Init.CLKPolarity = SPI_POLARITY_LOW;
  117. }
  118. spi_handle->Init.NSS = SPI_NSS_SOFT;
  119. uint32_t spi_clock = 0UL;
  120. /* Some series may only have APBPERIPH_BASE, but don't have HAL_RCC_GetPCLK2Freq */
  121. #if defined(APBPERIPH_BASE)
  122. spi_clock = HAL_RCC_GetPCLK1Freq();
  123. #elif defined(APB1PERIPH_BASE) || defined(APB2PERIPH_BASE)
  124. /* The SPI clock for H7 cannot be configured with a peripheral bus clock, so it needs to be written separately */
  125. #if defined(SOC_SERIES_STM32H7)
  126. /* When the configuration is generated using CUBEMX, the configuration for the SPI clock is placed in the HAL_SPI_Init function.
  127. Therefore, it is necessary to initialize and configure the SPI clock to automatically configure the frequency division */
  128. HAL_SPI_Init(spi_handle);
  129. spi_clock = HAL_RCCEx_GetPeriphCLKFreq(RCC_PERIPHCLK_SPI123);
  130. #else
  131. if ((rt_uint32_t)spi_drv->config->Instance >= APB2PERIPH_BASE)
  132. {
  133. spi_clock = HAL_RCC_GetPCLK2Freq();
  134. }
  135. else
  136. {
  137. spi_clock = HAL_RCC_GetPCLK1Freq();
  138. }
  139. #endif /* SOC_SERIES_STM32H7) */
  140. #endif /* APBPERIPH_BASE */
  141. if (cfg->max_hz >= spi_clock / 2)
  142. {
  143. spi_handle->Init.BaudRatePrescaler = SPI_BAUDRATEPRESCALER_2;
  144. }
  145. else if (cfg->max_hz >= spi_clock / 4)
  146. {
  147. spi_handle->Init.BaudRatePrescaler = SPI_BAUDRATEPRESCALER_4;
  148. }
  149. else if (cfg->max_hz >= spi_clock / 8)
  150. {
  151. spi_handle->Init.BaudRatePrescaler = SPI_BAUDRATEPRESCALER_8;
  152. }
  153. else if (cfg->max_hz >= spi_clock / 16)
  154. {
  155. spi_handle->Init.BaudRatePrescaler = SPI_BAUDRATEPRESCALER_16;
  156. }
  157. else if (cfg->max_hz >= spi_clock / 32)
  158. {
  159. spi_handle->Init.BaudRatePrescaler = SPI_BAUDRATEPRESCALER_32;
  160. }
  161. else if (cfg->max_hz >= spi_clock / 64)
  162. {
  163. spi_handle->Init.BaudRatePrescaler = SPI_BAUDRATEPRESCALER_64;
  164. }
  165. else if (cfg->max_hz >= spi_clock / 128)
  166. {
  167. spi_handle->Init.BaudRatePrescaler = SPI_BAUDRATEPRESCALER_128;
  168. }
  169. else
  170. {
  171. /* min prescaler 256 */
  172. spi_handle->Init.BaudRatePrescaler = SPI_BAUDRATEPRESCALER_256;
  173. }
  174. #if defined(SOC_SERIES_STM32H7)
  175. cfg->usage_freq = spi_clock / (rt_size_t)(1 << ((spi_handle->Init.BaudRatePrescaler >> SPI_CFG1_MBR_Pos) + 1));
  176. #else
  177. cfg->usage_freq = spi_clock / (rt_size_t)(1 << ((spi_handle->Init.BaudRatePrescaler >> SPI_CR1_BR_Pos) + 1));
  178. #endif /* SOC_SERIES_STM32H7 */
  179. LOG_D("sys freq: %d, pclk freq: %d, SPI limiting freq: %d, SPI usage freq: %d",
  180. #if defined(SOC_SERIES_STM32MP1)
  181. HAL_RCC_GetSystemCoreClockFreq(),
  182. #else
  183. HAL_RCC_GetSysClockFreq(),
  184. #endif
  185. spi_clock,
  186. cfg->max_hz,
  187. cfg->usage_freq);
  188. if (cfg->mode & RT_SPI_MSB)
  189. {
  190. spi_handle->Init.FirstBit = SPI_FIRSTBIT_MSB;
  191. }
  192. else
  193. {
  194. spi_handle->Init.FirstBit = SPI_FIRSTBIT_LSB;
  195. }
  196. spi_handle->Init.TIMode = SPI_TIMODE_DISABLE;
  197. spi_handle->Init.CRCCalculation = SPI_CRCCALCULATION_DISABLE;
  198. spi_handle->State = HAL_SPI_STATE_RESET;
  199. #if defined(SOC_SERIES_STM32L4) || defined(SOC_SERIES_STM32G0) || defined(SOC_SERIES_STM32F0) || defined(SOC_SERIES_STM32WB)
  200. spi_handle->Init.NSSPMode = SPI_NSS_PULSE_DISABLE;
  201. #elif defined(SOC_SERIES_STM32H7) || defined(SOC_SERIES_STM32MP1)
  202. spi_handle->Init.Mode = SPI_MODE_MASTER;
  203. spi_handle->Init.NSS = SPI_NSS_SOFT;
  204. spi_handle->Init.NSSPMode = SPI_NSS_PULSE_DISABLE;
  205. spi_handle->Init.NSSPolarity = SPI_NSS_POLARITY_LOW;
  206. spi_handle->Init.CRCPolynomial = 7;
  207. spi_handle->Init.TxCRCInitializationPattern = SPI_CRC_INITIALIZATION_ALL_ZERO_PATTERN;
  208. spi_handle->Init.RxCRCInitializationPattern = SPI_CRC_INITIALIZATION_ALL_ZERO_PATTERN;
  209. spi_handle->Init.MasterSSIdleness = SPI_MASTER_SS_IDLENESS_00CYCLE;
  210. spi_handle->Init.MasterInterDataIdleness = SPI_MASTER_INTERDATA_IDLENESS_00CYCLE;
  211. spi_handle->Init.MasterReceiverAutoSusp = SPI_MASTER_RX_AUTOSUSP_DISABLE;
  212. spi_handle->Init.MasterKeepIOState = SPI_MASTER_KEEP_IO_STATE_ENABLE;
  213. spi_handle->Init.IOSwap = SPI_IO_SWAP_DISABLE;
  214. spi_handle->Init.FifoThreshold = SPI_FIFO_THRESHOLD_01DATA;
  215. #endif
  216. if (HAL_SPI_Init(spi_handle) != HAL_OK)
  217. {
  218. return -RT_EIO;
  219. }
  220. #if defined(SOC_SERIES_STM32L4) || defined(SOC_SERIES_STM32F0) \
  221. || defined(SOC_SERIES_STM32F7) || defined(SOC_SERIES_STM32G0) || defined(SOC_SERIES_STM32WB)
  222. SET_BIT(spi_handle->Instance->CR2, SPI_RXFIFO_THRESHOLD_HF);
  223. #endif
  224. /* DMA configuration */
  225. if (spi_drv->spi_dma_flag & SPI_USING_RX_DMA_FLAG)
  226. {
  227. HAL_DMA_Init(&spi_drv->dma.handle_rx);
  228. __HAL_LINKDMA(&spi_drv->handle, hdmarx, spi_drv->dma.handle_rx);
  229. /* NVIC configuration for DMA transfer complete interrupt */
  230. HAL_NVIC_SetPriority(spi_drv->config->dma_rx->dma_irq, 0, 0);
  231. HAL_NVIC_EnableIRQ(spi_drv->config->dma_rx->dma_irq);
  232. }
  233. if (spi_drv->spi_dma_flag & SPI_USING_TX_DMA_FLAG)
  234. {
  235. HAL_DMA_Init(&spi_drv->dma.handle_tx);
  236. __HAL_LINKDMA(&spi_drv->handle, hdmatx, spi_drv->dma.handle_tx);
  237. /* NVIC configuration for DMA transfer complete interrupt */
  238. HAL_NVIC_SetPriority(spi_drv->config->dma_tx->dma_irq, 1, 0);
  239. HAL_NVIC_EnableIRQ(spi_drv->config->dma_tx->dma_irq);
  240. }
  241. if(spi_drv->spi_dma_flag & SPI_USING_TX_DMA_FLAG || spi_drv->spi_dma_flag & SPI_USING_RX_DMA_FLAG)
  242. {
  243. HAL_NVIC_SetPriority(spi_drv->config->irq_type, 2, 0);
  244. HAL_NVIC_EnableIRQ(spi_drv->config->irq_type);
  245. }
  246. LOG_D("%s init done", spi_drv->config->bus_name);
  247. return RT_EOK;
  248. }
  249. static rt_ssize_t spixfer(struct rt_spi_device *device, struct rt_spi_message *message)
  250. {
  251. #define DMA_TRANS_MIN_LEN 10 /* only buffer length >= DMA_TRANS_MIN_LEN will use DMA mode */
  252. HAL_StatusTypeDef state = HAL_OK;
  253. rt_size_t message_length, already_send_length;
  254. rt_uint16_t send_length;
  255. rt_uint8_t *recv_buf;
  256. const rt_uint8_t *send_buf;
  257. RT_ASSERT(device != RT_NULL);
  258. RT_ASSERT(device->bus != RT_NULL);
  259. RT_ASSERT(message != RT_NULL);
  260. struct stm32_spi *spi_drv = rt_container_of(device->bus, struct stm32_spi, spi_bus);
  261. SPI_HandleTypeDef *spi_handle = &spi_drv->handle;
  262. rt_uint64_t total_byte_ms = (rt_uint64_t)message->length * 1000;
  263. rt_uint32_t speed_bytes_per_sec = spi_drv->cfg->usage_freq / 8;
  264. if (speed_bytes_per_sec == 0)
  265. {
  266. speed_bytes_per_sec = 1;
  267. }
  268. rt_uint32_t timeout_ms = total_byte_ms / speed_bytes_per_sec + 100;
  269. rt_tick_t timeout_tick = rt_tick_from_millisecond(timeout_ms);
  270. if (message->cs_take && !(device->config.mode & RT_SPI_NO_CS) && (device->cs_pin != PIN_NONE))
  271. {
  272. if (device->config.mode & RT_SPI_CS_HIGH)
  273. {
  274. rt_pin_write(device->cs_pin, PIN_HIGH);
  275. }
  276. else
  277. {
  278. rt_pin_write(device->cs_pin, PIN_LOW);
  279. }
  280. }
  281. LOG_D("%s transfer prepare and start", spi_drv->config->bus_name);
  282. LOG_D("%s sendbuf: %X, recvbuf: %X, length: %d",
  283. spi_drv->config->bus_name,
  284. (uint32_t)message->send_buf,
  285. (uint32_t)message->recv_buf, message->length);
  286. message_length = message->length;
  287. recv_buf = message->recv_buf;
  288. send_buf = message->send_buf;
  289. while (message_length)
  290. {
  291. /* the HAL library use uint16 to save the data length */
  292. if (message_length > 65535)
  293. {
  294. send_length = 65535;
  295. message_length = message_length - 65535;
  296. }
  297. else
  298. {
  299. send_length = message_length;
  300. message_length = 0;
  301. }
  302. /* calculate the start address */
  303. already_send_length = message->length - send_length - message_length;
  304. /* avoid null pointer problems */
  305. if (message->send_buf)
  306. {
  307. send_buf = (rt_uint8_t *)message->send_buf + already_send_length;
  308. }
  309. if (message->recv_buf)
  310. {
  311. recv_buf = (rt_uint8_t *)message->recv_buf + already_send_length;
  312. }
  313. rt_uint32_t* dma_aligned_buffer = RT_NULL;
  314. rt_uint32_t* p_txrx_buffer = RT_NULL;
  315. if ((spi_drv->spi_dma_flag & SPI_USING_TX_DMA_FLAG) && (send_length >= DMA_TRANS_MIN_LEN))
  316. {
  317. #if defined(SOC_SERIES_STM32H7) || defined(SOC_SERIES_STM32F7)
  318. if (RT_IS_ALIGN((rt_uint32_t)send_buf, 32) && send_buf != RT_NULL) /* aligned with 32 bytes? */
  319. {
  320. p_txrx_buffer = (rt_uint32_t *)send_buf; /* send_buf aligns with 32 bytes, no more operations */
  321. }
  322. else
  323. {
  324. /* send_buf doesn't align with 32 bytes, so creat a cache buffer with 32 bytes aligned */
  325. dma_aligned_buffer = (rt_uint32_t *)rt_malloc_align(send_length, 32);
  326. rt_memcpy(dma_aligned_buffer, send_buf, send_length);
  327. p_txrx_buffer = dma_aligned_buffer;
  328. }
  329. rt_hw_cpu_dcache_ops(RT_HW_CACHE_FLUSH, dma_aligned_buffer, send_length);
  330. #else
  331. if (RT_IS_ALIGN((rt_uint32_t)send_buf, 4) && send_buf != RT_NULL) /* aligned with 4 bytes? */
  332. {
  333. p_txrx_buffer = (rt_uint32_t *)send_buf; /* send_buf aligns with 4 bytes, no more operations */
  334. }
  335. else
  336. {
  337. /* send_buf doesn't align with 4 bytes, so creat a cache buffer with 4 bytes aligned */
  338. dma_aligned_buffer = (rt_uint32_t *)rt_malloc(send_length); /* aligned with RT_ALIGN_SIZE (8 bytes by default) */
  339. rt_memcpy(dma_aligned_buffer, send_buf, send_length);
  340. p_txrx_buffer = dma_aligned_buffer;
  341. }
  342. #endif /* SOC_SERIES_STM32H7 || SOC_SERIES_STM32F7 */
  343. }
  344. else if ((spi_drv->spi_dma_flag & SPI_USING_RX_DMA_FLAG) && (send_length >= DMA_TRANS_MIN_LEN))
  345. {
  346. #if defined(SOC_SERIES_STM32H7) || defined(SOC_SERIES_STM32F7)
  347. if (RT_IS_ALIGN((rt_uint32_t)recv_buf, 32) && recv_buf != RT_NULL) /* aligned with 32 bytes? */
  348. {
  349. p_txrx_buffer = (rt_uint32_t *)recv_buf; /* recv_buf aligns with 32 bytes, no more operations */
  350. }
  351. else
  352. {
  353. /* recv_buf doesn't align with 32 bytes, so creat a cache buffer with 32 bytes aligned */
  354. dma_aligned_buffer = (rt_uint32_t *)rt_malloc_align(send_length, 32);
  355. rt_memcpy(dma_aligned_buffer, recv_buf, send_length);
  356. p_txrx_buffer = dma_aligned_buffer;
  357. }
  358. rt_hw_cpu_dcache_ops(RT_HW_CACHE_FLUSH, dma_aligned_buffer, send_length);
  359. #else
  360. if (RT_IS_ALIGN((rt_uint32_t)recv_buf, 4) && recv_buf != RT_NULL) /* aligned with 4 bytes? */
  361. {
  362. p_txrx_buffer = (rt_uint32_t *)recv_buf; /* recv_buf aligns with 4 bytes, no more operations */
  363. }
  364. else
  365. {
  366. /* recv_buf doesn't align with 4 bytes, so creat a cache buffer with 4 bytes aligned */
  367. dma_aligned_buffer = (rt_uint32_t *)rt_malloc(send_length); /* aligned with RT_ALIGN_SIZE (8 bytes by default) */
  368. rt_memcpy(dma_aligned_buffer, recv_buf, send_length);
  369. p_txrx_buffer = dma_aligned_buffer;
  370. }
  371. #endif /* SOC_SERIES_STM32H7 || SOC_SERIES_STM32F7 */
  372. }
  373. /* start once data exchange in DMA mode */
  374. if (message->send_buf && message->recv_buf)
  375. {
  376. if ((spi_drv->spi_dma_flag & SPI_USING_TX_DMA_FLAG) && (spi_drv->spi_dma_flag & SPI_USING_RX_DMA_FLAG) && (send_length >= DMA_TRANS_MIN_LEN))
  377. {
  378. state = HAL_SPI_TransmitReceive_DMA(spi_handle, (uint8_t *)p_txrx_buffer, (uint8_t *)p_txrx_buffer, send_length);
  379. }
  380. else if ((spi_drv->spi_dma_flag & SPI_USING_TX_DMA_FLAG) && (send_length >= DMA_TRANS_MIN_LEN))
  381. {
  382. /* same as Tx ONLY. It will not receive SPI data any more. */
  383. state = HAL_SPI_Transmit_DMA(spi_handle, (uint8_t *)p_txrx_buffer, send_length);
  384. }
  385. else if ((spi_drv->spi_dma_flag & SPI_USING_RX_DMA_FLAG) && (send_length >= DMA_TRANS_MIN_LEN))
  386. {
  387. state = HAL_ERROR;
  388. LOG_E("It shoule be enabled both BSP_SPIx_TX_USING_DMA and BSP_SPIx_TX_USING_DMA flag, if wants to use SPI DMA Rx singly.");
  389. break;
  390. }
  391. else
  392. {
  393. state = HAL_SPI_TransmitReceive(spi_handle, (uint8_t *)send_buf, (uint8_t *)recv_buf, send_length, timeout_ms);
  394. }
  395. }
  396. else if (message->send_buf)
  397. {
  398. if ((spi_drv->spi_dma_flag & SPI_USING_TX_DMA_FLAG) && (send_length >= DMA_TRANS_MIN_LEN))
  399. {
  400. state = HAL_SPI_Transmit_DMA(spi_handle, (uint8_t *)p_txrx_buffer, send_length);
  401. }
  402. else
  403. {
  404. state = HAL_SPI_Transmit(spi_handle, (uint8_t *)send_buf, send_length, timeout_ms);
  405. }
  406. if (message->cs_release && (device->config.mode & RT_SPI_3WIRE))
  407. {
  408. /* release the CS by disable SPI when using 3 wires SPI */
  409. __HAL_SPI_DISABLE(spi_handle);
  410. }
  411. }
  412. else if(message->recv_buf)
  413. {
  414. rt_memset((uint8_t *)recv_buf, 0xff, send_length);
  415. if ((spi_drv->spi_dma_flag & SPI_USING_RX_DMA_FLAG) && (send_length >= DMA_TRANS_MIN_LEN))
  416. {
  417. state = HAL_SPI_Receive_DMA(spi_handle, (uint8_t *)p_txrx_buffer, send_length);
  418. }
  419. else
  420. {
  421. /* clear the old error flag */
  422. __HAL_SPI_CLEAR_OVRFLAG(spi_handle);
  423. state = HAL_SPI_Receive(spi_handle, (uint8_t *)recv_buf, send_length, timeout_ms);
  424. }
  425. }
  426. else
  427. {
  428. state = HAL_ERROR;
  429. LOG_E("message->send_buf and message->recv_buf are both NULL!");
  430. }
  431. if (state != HAL_OK)
  432. {
  433. LOG_E("SPI transfer error: %d", state);
  434. message->length = 0;
  435. spi_handle->State = HAL_SPI_STATE_READY;
  436. break;
  437. }
  438. else
  439. {
  440. LOG_D("%s transfer done", spi_drv->config->bus_name);
  441. }
  442. /* For simplicity reasons, this example is just waiting till the end of the
  443. transfer, but application may perform other tasks while transfer operation
  444. is ongoing. */
  445. if ((spi_drv->spi_dma_flag & (SPI_USING_TX_DMA_FLAG | SPI_USING_RX_DMA_FLAG)) && (send_length >= DMA_TRANS_MIN_LEN))
  446. {
  447. /* blocking the thread,and the other tasks can run */
  448. if (rt_completion_wait(&spi_drv->cpt, timeout_tick) != RT_EOK)
  449. {
  450. state = HAL_ERROR;
  451. LOG_E("wait for DMA interrupt overtime!");
  452. break;
  453. }
  454. }
  455. else
  456. {
  457. rt_uint32_t timeout = timeout_ms;
  458. while (HAL_SPI_GetState(spi_handle) != HAL_SPI_STATE_READY)
  459. {
  460. if (timeout-- > 0)
  461. {
  462. rt_thread_mdelay(1);
  463. }
  464. else
  465. {
  466. LOG_E("timeout! SPI state did not become READY.");
  467. state = HAL_TIMEOUT;
  468. break;
  469. }
  470. }
  471. }
  472. if(dma_aligned_buffer != RT_NULL) /* re-aligned, so need to copy the data to recv_buf */
  473. {
  474. if(recv_buf != RT_NULL)
  475. {
  476. #if defined(SOC_SERIES_STM32H7) || defined(SOC_SERIES_STM32F7)
  477. rt_hw_cpu_dcache_ops(RT_HW_CACHE_INVALIDATE, p_txrx_buffer, send_length);
  478. #endif /* SOC_SERIES_STM32H7 || SOC_SERIES_STM32F7 */
  479. rt_memcpy(recv_buf, p_txrx_buffer, send_length);
  480. }
  481. #if defined(SOC_SERIES_STM32H7) || defined(SOC_SERIES_STM32F7)
  482. rt_free_align(dma_aligned_buffer);
  483. #else
  484. rt_free(dma_aligned_buffer);
  485. #endif /* SOC_SERIES_STM32H7 || SOC_SERIES_STM32F7 */
  486. }
  487. }
  488. if (message->cs_release && !(device->config.mode & RT_SPI_NO_CS) && (device->cs_pin != PIN_NONE))
  489. {
  490. if (device->config.mode & RT_SPI_CS_HIGH)
  491. rt_pin_write(device->cs_pin, PIN_LOW);
  492. else
  493. rt_pin_write(device->cs_pin, PIN_HIGH);
  494. }
  495. if(state != HAL_OK)
  496. {
  497. return -RT_ERROR;
  498. }
  499. return message->length;
  500. }
  501. static rt_err_t spi_configure(struct rt_spi_device *device,
  502. struct rt_spi_configuration *configuration)
  503. {
  504. RT_ASSERT(device != RT_NULL);
  505. RT_ASSERT(configuration != RT_NULL);
  506. struct stm32_spi *spi_drv = rt_container_of(device->bus, struct stm32_spi, spi_bus);
  507. spi_drv->cfg = configuration;
  508. rt_kprintf("@spi_configure\n");
  509. return stm32_spi_init(spi_drv, configuration);
  510. }
  511. static const struct rt_spi_ops stm_spi_ops =
  512. {
  513. .configure = spi_configure,
  514. .xfer = spixfer,
  515. };
  516. static int rt_hw_spi_bus_init(void)
  517. {
  518. rt_err_t result;
  519. for (rt_size_t i = 0; i < sizeof(spi_config) / sizeof(spi_config[0]); i++)
  520. {
  521. spi_bus_obj[i].config = &spi_config[i];
  522. spi_bus_obj[i].spi_bus.parent.user_data = &spi_config[i];
  523. spi_bus_obj[i].handle.Instance = spi_config[i].Instance;
  524. if (spi_bus_obj[i].spi_dma_flag & SPI_USING_RX_DMA_FLAG)
  525. {
  526. /* Configure the DMA handler for Transmission process */
  527. spi_bus_obj[i].dma.handle_rx.Instance = spi_config[i].dma_rx->Instance;
  528. #if defined(SOC_SERIES_STM32F2) || defined(SOC_SERIES_STM32F4) || defined(SOC_SERIES_STM32F7)
  529. spi_bus_obj[i].dma.handle_rx.Init.Channel = spi_config[i].dma_rx->channel;
  530. #elif defined(SOC_SERIES_STM32L4) || defined(SOC_SERIES_STM32G0) || defined(SOC_SERIES_STM32MP1) || defined(SOC_SERIES_STM32WB) || defined(SOC_SERIES_STM32H7)
  531. spi_bus_obj[i].dma.handle_rx.Init.Request = spi_config[i].dma_rx->request;
  532. #endif
  533. #ifndef SOC_SERIES_STM32U5
  534. spi_bus_obj[i].dma.handle_rx.Init.Direction = DMA_PERIPH_TO_MEMORY;
  535. spi_bus_obj[i].dma.handle_rx.Init.PeriphInc = DMA_PINC_DISABLE;
  536. spi_bus_obj[i].dma.handle_rx.Init.MemInc = DMA_MINC_ENABLE;
  537. spi_bus_obj[i].dma.handle_rx.Init.PeriphDataAlignment = DMA_PDATAALIGN_BYTE;
  538. spi_bus_obj[i].dma.handle_rx.Init.MemDataAlignment = DMA_MDATAALIGN_BYTE;
  539. spi_bus_obj[i].dma.handle_rx.Init.Mode = DMA_NORMAL;
  540. spi_bus_obj[i].dma.handle_rx.Init.Priority = DMA_PRIORITY_HIGH;
  541. #endif
  542. #if defined(SOC_SERIES_STM32F2) || defined(SOC_SERIES_STM32F4) || defined(SOC_SERIES_STM32F7) || defined(SOC_SERIES_STM32MP1) || defined(SOC_SERIES_STM32H7)
  543. spi_bus_obj[i].dma.handle_rx.Init.FIFOMode = DMA_FIFOMODE_DISABLE;
  544. spi_bus_obj[i].dma.handle_rx.Init.FIFOThreshold = DMA_FIFO_THRESHOLD_FULL;
  545. spi_bus_obj[i].dma.handle_rx.Init.MemBurst = DMA_MBURST_INC4;
  546. spi_bus_obj[i].dma.handle_rx.Init.PeriphBurst = DMA_PBURST_INC4;
  547. #endif
  548. {
  549. rt_uint32_t tmpreg = 0x00U;
  550. #if defined(SOC_SERIES_STM32F1) || defined(SOC_SERIES_STM32G0) || defined(SOC_SERIES_STM32F0)
  551. /* enable DMA clock && Delay after an RCC peripheral clock enabling*/
  552. SET_BIT(RCC->AHBENR, spi_config[i].dma_rx->dma_rcc);
  553. tmpreg = READ_BIT(RCC->AHBENR, spi_config[i].dma_rx->dma_rcc);
  554. #elif defined(SOC_SERIES_STM32F2) || defined(SOC_SERIES_STM32F4) || defined(SOC_SERIES_STM32F7) || defined(SOC_SERIES_STM32L4) || defined(SOC_SERIES_STM32WB) || defined(SOC_SERIES_STM32H7)
  555. SET_BIT(RCC->AHB1ENR, spi_config[i].dma_rx->dma_rcc);
  556. /* Delay after an RCC peripheral clock enabling */
  557. tmpreg = READ_BIT(RCC->AHB1ENR, spi_config[i].dma_rx->dma_rcc);
  558. #elif defined(SOC_SERIES_STM32MP1)
  559. __HAL_RCC_DMAMUX_CLK_ENABLE();
  560. SET_BIT(RCC->MP_AHB2ENSETR, spi_config[i].dma_rx->dma_rcc);
  561. tmpreg = READ_BIT(RCC->MP_AHB2ENSETR, spi_config[i].dma_rx->dma_rcc);
  562. #endif
  563. UNUSED(tmpreg); /* To avoid compiler warnings */
  564. }
  565. }
  566. if (spi_bus_obj[i].spi_dma_flag & SPI_USING_TX_DMA_FLAG)
  567. {
  568. /* Configure the DMA handler for Transmission process */
  569. spi_bus_obj[i].dma.handle_tx.Instance = spi_config[i].dma_tx->Instance;
  570. #if defined(SOC_SERIES_STM32F2) || defined(SOC_SERIES_STM32F4) || defined(SOC_SERIES_STM32F7)
  571. spi_bus_obj[i].dma.handle_tx.Init.Channel = spi_config[i].dma_tx->channel;
  572. #elif defined(SOC_SERIES_STM32L4) || defined(SOC_SERIES_STM32G0) || defined(SOC_SERIES_STM32MP1) || defined(SOC_SERIES_STM32WB) || defined(SOC_SERIES_STM32H7)
  573. spi_bus_obj[i].dma.handle_tx.Init.Request = spi_config[i].dma_tx->request;
  574. #endif
  575. #ifndef SOC_SERIES_STM32U5
  576. spi_bus_obj[i].dma.handle_tx.Init.Direction = DMA_MEMORY_TO_PERIPH;
  577. spi_bus_obj[i].dma.handle_tx.Init.PeriphInc = DMA_PINC_DISABLE;
  578. spi_bus_obj[i].dma.handle_tx.Init.MemInc = DMA_MINC_ENABLE;
  579. spi_bus_obj[i].dma.handle_tx.Init.PeriphDataAlignment = DMA_PDATAALIGN_BYTE;
  580. spi_bus_obj[i].dma.handle_tx.Init.MemDataAlignment = DMA_MDATAALIGN_BYTE;
  581. spi_bus_obj[i].dma.handle_tx.Init.Mode = DMA_NORMAL;
  582. spi_bus_obj[i].dma.handle_tx.Init.Priority = DMA_PRIORITY_LOW;
  583. #endif
  584. #if defined(SOC_SERIES_STM32F2) || defined(SOC_SERIES_STM32F4) || defined(SOC_SERIES_STM32F7) || defined(SOC_SERIES_STM32MP1) || defined(SOC_SERIES_STM32H7)
  585. spi_bus_obj[i].dma.handle_tx.Init.FIFOMode = DMA_FIFOMODE_DISABLE;
  586. spi_bus_obj[i].dma.handle_tx.Init.FIFOThreshold = DMA_FIFO_THRESHOLD_FULL;
  587. spi_bus_obj[i].dma.handle_tx.Init.MemBurst = DMA_MBURST_INC4;
  588. spi_bus_obj[i].dma.handle_tx.Init.PeriphBurst = DMA_PBURST_INC4;
  589. #endif
  590. {
  591. rt_uint32_t tmpreg = 0x00U;
  592. #if defined(SOC_SERIES_STM32F1) || defined(SOC_SERIES_STM32G0) || defined(SOC_SERIES_STM32F0)
  593. /* enable DMA clock && Delay after an RCC peripheral clock enabling*/
  594. SET_BIT(RCC->AHBENR, spi_config[i].dma_tx->dma_rcc);
  595. tmpreg = READ_BIT(RCC->AHBENR, spi_config[i].dma_tx->dma_rcc);
  596. #elif defined(SOC_SERIES_STM32F2) || defined(SOC_SERIES_STM32F4) || defined(SOC_SERIES_STM32F7) || defined(SOC_SERIES_STM32L4) || defined(SOC_SERIES_STM32WB) || defined(SOC_SERIES_STM32H7)
  597. SET_BIT(RCC->AHB1ENR, spi_config[i].dma_tx->dma_rcc);
  598. /* Delay after an RCC peripheral clock enabling */
  599. tmpreg = READ_BIT(RCC->AHB1ENR, spi_config[i].dma_tx->dma_rcc);
  600. #elif defined(SOC_SERIES_STM32MP1)
  601. __HAL_RCC_DMAMUX_CLK_ENABLE();
  602. SET_BIT(RCC->MP_AHB2ENSETR, spi_config[i].dma_tx->dma_rcc);
  603. tmpreg = READ_BIT(RCC->MP_AHB2ENSETR, spi_config[i].dma_tx->dma_rcc);
  604. #endif
  605. UNUSED(tmpreg); /* To avoid compiler warnings */
  606. }
  607. }
  608. /* initialize completion object */
  609. rt_completion_init(&spi_bus_obj[i].cpt);
  610. result = rt_spi_bus_register(&spi_bus_obj[i].spi_bus, spi_config[i].bus_name, &stm_spi_ops);
  611. RT_ASSERT(result == RT_EOK);
  612. LOG_D("%s bus init done", spi_config[i].bus_name);
  613. }
  614. return result;
  615. }
  616. /**
  617. * Attach the spi device to SPI bus, this function must be used after initialization.
  618. */
  619. rt_err_t rt_hw_spi_device_attach(const char *bus_name, const char *device_name, rt_base_t cs_pin)
  620. {
  621. RT_ASSERT(bus_name != RT_NULL);
  622. RT_ASSERT(device_name != RT_NULL);
  623. rt_err_t result;
  624. struct rt_spi_device *spi_device;
  625. /* attach the device to spi bus*/
  626. spi_device = (struct rt_spi_device *)rt_malloc(sizeof(struct rt_spi_device));
  627. RT_ASSERT(spi_device != RT_NULL);
  628. result = rt_spi_bus_attach_device_cspin(spi_device, device_name, bus_name, cs_pin, RT_NULL);
  629. if (result != RT_EOK)
  630. {
  631. LOG_E("%s attach to %s faild, %d\n", device_name, bus_name, result);
  632. }
  633. RT_ASSERT(result == RT_EOK);
  634. LOG_D("%s attach to %s done", device_name, bus_name);
  635. return result;
  636. }
  637. /**
  638. * Detach the spi device from SPI bus.
  639. *
  640. * @param device_name the name of the spi device to be detached.
  641. */
  642. rt_err_t rt_hw_spi_device_detach(const char *device_name)
  643. {
  644. RT_ASSERT(device_name != RT_NULL);
  645. rt_err_t result;
  646. struct rt_spi_device *spi_device;
  647. rt_device_t device = rt_device_find(device_name);
  648. if (device == RT_NULL)
  649. {
  650. LOG_E("SPI device %s not found.", device_name);
  651. return -RT_ERROR;
  652. }
  653. if (device->type != RT_Device_Class_SPIDevice)
  654. {
  655. LOG_E("%s is not an SPI device.", device_name);
  656. return -RT_ERROR;
  657. }
  658. spi_device = (struct rt_spi_device *)device;
  659. result = rt_spi_bus_detach_device_cspin(spi_device);
  660. if (result != RT_EOK)
  661. {
  662. LOG_E("Failed to detach %s from its bus, error code: %d", device_name, result);
  663. return result;
  664. }
  665. rt_free(spi_device);
  666. LOG_D("SPI device %s has been detached.", device_name);
  667. return RT_EOK;
  668. }
  669. #if defined(BSP_SPI1_TX_USING_DMA) || defined(BSP_SPI1_RX_USING_DMA)
  670. void SPI1_IRQHandler(void)
  671. {
  672. /* enter interrupt */
  673. rt_interrupt_enter();
  674. HAL_SPI_IRQHandler(&spi_bus_obj[SPI1_INDEX].handle);
  675. /* leave interrupt */
  676. rt_interrupt_leave();
  677. }
  678. #endif
  679. #if defined(BSP_USING_SPI1) && defined(BSP_SPI1_RX_USING_DMA)
  680. /**
  681. * @brief This function handles DMA Rx interrupt request.
  682. * @param None
  683. * @retval None
  684. */
  685. void SPI1_DMA_RX_IRQHandler(void)
  686. {
  687. /* enter interrupt */
  688. rt_interrupt_enter();
  689. HAL_DMA_IRQHandler(&spi_bus_obj[SPI1_INDEX].dma.handle_rx);
  690. /* leave interrupt */
  691. rt_interrupt_leave();
  692. }
  693. #endif
  694. #if defined(BSP_USING_SPI1) && defined(BSP_SPI1_TX_USING_DMA)
  695. /**
  696. * @brief This function handles DMA Tx interrupt request.
  697. * @param None
  698. * @retval None
  699. */
  700. void SPI1_DMA_TX_IRQHandler(void)
  701. {
  702. /* enter interrupt */
  703. rt_interrupt_enter();
  704. HAL_DMA_IRQHandler(&spi_bus_obj[SPI1_INDEX].dma.handle_tx);
  705. /* leave interrupt */
  706. rt_interrupt_leave();
  707. }
  708. #endif /* defined(BSP_USING_SPI1) && defined(BSP_SPI_USING_DMA) */
  709. #if defined(BSP_SPI2_TX_USING_DMA) || defined(BSP_SPI2_RX_USING_DMA)
  710. void SPI2_IRQHandler(void)
  711. {
  712. /* enter interrupt */
  713. rt_interrupt_enter();
  714. HAL_SPI_IRQHandler(&spi_bus_obj[SPI2_INDEX].handle);
  715. /* leave interrupt */
  716. rt_interrupt_leave();
  717. }
  718. #endif
  719. #if defined(BSP_USING_SPI2) && defined(BSP_SPI2_RX_USING_DMA)
  720. /**
  721. * @brief This function handles DMA Rx interrupt request.
  722. * @param None
  723. * @retval None
  724. */
  725. void SPI2_DMA_RX_IRQHandler(void)
  726. {
  727. /* enter interrupt */
  728. rt_interrupt_enter();
  729. HAL_DMA_IRQHandler(&spi_bus_obj[SPI2_INDEX].dma.handle_rx);
  730. /* leave interrupt */
  731. rt_interrupt_leave();
  732. }
  733. #endif
  734. #if defined(BSP_USING_SPI2) && defined(BSP_SPI2_TX_USING_DMA)
  735. /**
  736. * @brief This function handles DMA Tx interrupt request.
  737. * @param None
  738. * @retval None
  739. */
  740. void SPI2_DMA_TX_IRQHandler(void)
  741. {
  742. /* enter interrupt */
  743. rt_interrupt_enter();
  744. HAL_DMA_IRQHandler(&spi_bus_obj[SPI2_INDEX].dma.handle_tx);
  745. /* leave interrupt */
  746. rt_interrupt_leave();
  747. }
  748. #endif /* defined(BSP_USING_SPI2) && defined(BSP_SPI_USING_DMA) */
  749. #if defined(BSP_SPI3_TX_USING_DMA) || defined(BSP_SPI3_RX_USING_DMA)
  750. void SPI3_IRQHandler(void)
  751. {
  752. /* enter interrupt */
  753. rt_interrupt_enter();
  754. HAL_SPI_IRQHandler(&spi_bus_obj[SPI3_INDEX].handle);
  755. /* leave interrupt */
  756. rt_interrupt_leave();
  757. }
  758. #endif
  759. #if defined(BSP_USING_SPI3) && defined(BSP_SPI3_RX_USING_DMA)
  760. /**
  761. * @brief This function handles DMA Rx interrupt request.
  762. * @param None
  763. * @retval None
  764. */
  765. void SPI3_DMA_RX_IRQHandler(void)
  766. {
  767. /* enter interrupt */
  768. rt_interrupt_enter();
  769. HAL_DMA_IRQHandler(&spi_bus_obj[SPI3_INDEX].dma.handle_rx);
  770. /* leave interrupt */
  771. rt_interrupt_leave();
  772. }
  773. #endif
  774. #if defined(BSP_USING_SPI3) && defined(BSP_SPI3_TX_USING_DMA)
  775. /**
  776. * @brief This function handles DMA Tx interrupt request.
  777. * @param None
  778. * @retval None
  779. */
  780. void SPI3_DMA_TX_IRQHandler(void)
  781. {
  782. /* enter interrupt */
  783. rt_interrupt_enter();
  784. HAL_DMA_IRQHandler(&spi_bus_obj[SPI3_INDEX].dma.handle_tx);
  785. /* leave interrupt */
  786. rt_interrupt_leave();
  787. }
  788. #endif /* defined(BSP_USING_SPI3) && defined(BSP_SPI_USING_DMA) */
  789. #if defined(BSP_SPI4_TX_USING_DMA) || defined(BSP_SPI4_RX_USING_DMA)
  790. void SPI4_IRQHandler(void)
  791. {
  792. /* enter interrupt */
  793. rt_interrupt_enter();
  794. HAL_SPI_IRQHandler(&spi_bus_obj[SPI4_INDEX].handle);
  795. /* leave interrupt */
  796. rt_interrupt_leave();
  797. }
  798. #endif
  799. #if defined(BSP_USING_SPI4) && defined(BSP_SPI4_RX_USING_DMA)
  800. /**
  801. * @brief This function handles DMA Rx interrupt request.
  802. * @param None
  803. * @retval None
  804. */
  805. void SPI4_DMA_RX_IRQHandler(void)
  806. {
  807. /* enter interrupt */
  808. rt_interrupt_enter();
  809. HAL_DMA_IRQHandler(&spi_bus_obj[SPI4_INDEX].dma.handle_rx);
  810. /* leave interrupt */
  811. rt_interrupt_leave();
  812. }
  813. #endif
  814. #if defined(BSP_USING_SPI4) && defined(BSP_SPI4_TX_USING_DMA)
  815. /**
  816. * @brief This function handles DMA Tx interrupt request.
  817. * @param None
  818. * @retval None
  819. */
  820. void SPI4_DMA_TX_IRQHandler(void)
  821. {
  822. /* enter interrupt */
  823. rt_interrupt_enter();
  824. HAL_DMA_IRQHandler(&spi_bus_obj[SPI4_INDEX].dma.handle_tx);
  825. /* leave interrupt */
  826. rt_interrupt_leave();
  827. }
  828. #endif /* defined(BSP_USING_SPI4) && defined(BSP_SPI_USING_DMA) */
  829. #if defined(BSP_SPI5_TX_USING_DMA) || defined(BSP_SPI5_RX_USING_DMA)
  830. void SPI5_IRQHandler(void)
  831. {
  832. /* enter interrupt */
  833. rt_interrupt_enter();
  834. HAL_SPI_IRQHandler(&spi_bus_obj[SPI5_INDEX].handle);
  835. /* leave interrupt */
  836. rt_interrupt_leave();
  837. }
  838. #endif
  839. #if defined(BSP_USING_SPI5) && defined(BSP_SPI5_RX_USING_DMA)
  840. /**
  841. * @brief This function handles DMA Rx interrupt request.
  842. * @param None
  843. * @retval None
  844. */
  845. void SPI5_DMA_RX_IRQHandler(void)
  846. {
  847. /* enter interrupt */
  848. rt_interrupt_enter();
  849. HAL_DMA_IRQHandler(&spi_bus_obj[SPI5_INDEX].dma.handle_rx);
  850. /* leave interrupt */
  851. rt_interrupt_leave();
  852. }
  853. #endif
  854. #if defined(BSP_USING_SPI5) && defined(BSP_SPI5_TX_USING_DMA)
  855. /**
  856. * @brief This function handles DMA Tx interrupt request.
  857. * @param None
  858. * @retval None
  859. */
  860. void SPI5_DMA_TX_IRQHandler(void)
  861. {
  862. /* enter interrupt */
  863. rt_interrupt_enter();
  864. HAL_DMA_IRQHandler(&spi_bus_obj[SPI5_INDEX].dma.handle_tx);
  865. /* leave interrupt */
  866. rt_interrupt_leave();
  867. }
  868. #endif /* defined(BSP_USING_SPI5) && defined(BSP_SPI_USING_DMA) */
  869. #if defined(BSP_USING_SPI6) && defined(BSP_SPI6_RX_USING_DMA)
  870. /**
  871. * @brief This function handles DMA Rx interrupt request.
  872. * @param None
  873. * @retval None
  874. */
  875. void SPI6_DMA_RX_IRQHandler(void)
  876. {
  877. /* enter interrupt */
  878. rt_interrupt_enter();
  879. HAL_DMA_IRQHandler(&spi_bus_obj[SPI6_INDEX].dma.handle_rx);
  880. /* leave interrupt */
  881. rt_interrupt_leave();
  882. }
  883. #endif
  884. #if defined(BSP_USING_SPI6) && defined(BSP_SPI6_TX_USING_DMA)
  885. /**
  886. * @brief This function handles DMA Tx interrupt request.
  887. * @param None
  888. * @retval None
  889. */
  890. void SPI6_DMA_TX_IRQHandler(void)
  891. {
  892. /* enter interrupt */
  893. rt_interrupt_enter();
  894. HAL_DMA_IRQHandler(&spi_bus_obj[SPI6_INDEX].dma.handle_tx);
  895. /* leave interrupt */
  896. rt_interrupt_leave();
  897. }
  898. #endif /* defined(BSP_USING_SPI6) && defined(BSP_SPI_USING_DMA) */
  899. static void stm32_get_dma_info(void)
  900. {
  901. #ifdef BSP_SPI1_RX_USING_DMA
  902. spi_bus_obj[SPI1_INDEX].spi_dma_flag |= SPI_USING_RX_DMA_FLAG;
  903. static struct dma_config spi1_dma_rx = SPI1_RX_DMA_CONFIG;
  904. spi_config[SPI1_INDEX].dma_rx = &spi1_dma_rx;
  905. #endif
  906. #ifdef BSP_SPI1_TX_USING_DMA
  907. spi_bus_obj[SPI1_INDEX].spi_dma_flag |= SPI_USING_TX_DMA_FLAG;
  908. static struct dma_config spi1_dma_tx = SPI1_TX_DMA_CONFIG;
  909. spi_config[SPI1_INDEX].dma_tx = &spi1_dma_tx;
  910. #endif
  911. #ifdef BSP_SPI2_RX_USING_DMA
  912. spi_bus_obj[SPI2_INDEX].spi_dma_flag |= SPI_USING_RX_DMA_FLAG;
  913. static struct dma_config spi2_dma_rx = SPI2_RX_DMA_CONFIG;
  914. spi_config[SPI2_INDEX].dma_rx = &spi2_dma_rx;
  915. #endif
  916. #ifdef BSP_SPI2_TX_USING_DMA
  917. spi_bus_obj[SPI2_INDEX].spi_dma_flag |= SPI_USING_TX_DMA_FLAG;
  918. static struct dma_config spi2_dma_tx = SPI2_TX_DMA_CONFIG;
  919. spi_config[SPI2_INDEX].dma_tx = &spi2_dma_tx;
  920. #endif
  921. #ifdef BSP_SPI3_RX_USING_DMA
  922. spi_bus_obj[SPI3_INDEX].spi_dma_flag |= SPI_USING_RX_DMA_FLAG;
  923. static struct dma_config spi3_dma_rx = SPI3_RX_DMA_CONFIG;
  924. spi_config[SPI3_INDEX].dma_rx = &spi3_dma_rx;
  925. #endif
  926. #ifdef BSP_SPI3_TX_USING_DMA
  927. spi_bus_obj[SPI3_INDEX].spi_dma_flag |= SPI_USING_TX_DMA_FLAG;
  928. static struct dma_config spi3_dma_tx = SPI3_TX_DMA_CONFIG;
  929. spi_config[SPI3_INDEX].dma_tx = &spi3_dma_tx;
  930. #endif
  931. #ifdef BSP_SPI4_RX_USING_DMA
  932. spi_bus_obj[SPI4_INDEX].spi_dma_flag |= SPI_USING_RX_DMA_FLAG;
  933. static struct dma_config spi4_dma_rx = SPI4_RX_DMA_CONFIG;
  934. spi_config[SPI4_INDEX].dma_rx = &spi4_dma_rx;
  935. #endif
  936. #ifdef BSP_SPI4_TX_USING_DMA
  937. spi_bus_obj[SPI4_INDEX].spi_dma_flag |= SPI_USING_TX_DMA_FLAG;
  938. static struct dma_config spi4_dma_tx = SPI4_TX_DMA_CONFIG;
  939. spi_config[SPI4_INDEX].dma_tx = &spi4_dma_tx;
  940. #endif
  941. #ifdef BSP_SPI5_RX_USING_DMA
  942. spi_bus_obj[SPI5_INDEX].spi_dma_flag |= SPI_USING_RX_DMA_FLAG;
  943. static struct dma_config spi5_dma_rx = SPI5_RX_DMA_CONFIG;
  944. spi_config[SPI5_INDEX].dma_rx = &spi5_dma_rx;
  945. #endif
  946. #ifdef BSP_SPI5_TX_USING_DMA
  947. spi_bus_obj[SPI5_INDEX].spi_dma_flag |= SPI_USING_TX_DMA_FLAG;
  948. static struct dma_config spi5_dma_tx = SPI5_TX_DMA_CONFIG;
  949. spi_config[SPI5_INDEX].dma_tx = &spi5_dma_tx;
  950. #endif
  951. #ifdef BSP_SPI6_RX_USING_DMA
  952. spi_bus_obj[SPI6_INDEX].spi_dma_flag |= SPI_USING_RX_DMA_FLAG;
  953. static struct dma_config spi6_dma_rx = SPI6_RX_DMA_CONFIG;
  954. spi_config[SPI6_INDEX].dma_rx = &spi6_dma_rx;
  955. #endif
  956. #ifdef BSP_SPI6_TX_USING_DMA
  957. spi_bus_obj[SPI6_INDEX].spi_dma_flag |= SPI_USING_TX_DMA_FLAG;
  958. static struct dma_config spi6_dma_tx = SPI6_TX_DMA_CONFIG;
  959. spi_config[SPI6_INDEX].dma_tx = &spi6_dma_tx;
  960. #endif
  961. }
  962. void HAL_SPI_TxRxCpltCallback(SPI_HandleTypeDef *hspi)
  963. {
  964. struct stm32_spi *spi_drv = rt_container_of(hspi, struct stm32_spi, handle);
  965. rt_completion_done(&spi_drv->cpt);
  966. }
  967. void HAL_SPI_TxCpltCallback(SPI_HandleTypeDef *hspi)
  968. {
  969. struct stm32_spi *spi_drv = rt_container_of(hspi, struct stm32_spi, handle);
  970. rt_completion_done(&spi_drv->cpt);
  971. }
  972. void HAL_SPI_RxCpltCallback(SPI_HandleTypeDef *hspi)
  973. {
  974. struct stm32_spi *spi_drv = rt_container_of(hspi, struct stm32_spi, handle);
  975. rt_completion_done(&spi_drv->cpt);
  976. }
  977. #if defined(SOC_SERIES_STM32F0)
  978. void SPI1_DMA_RX_TX_IRQHandler(void)
  979. {
  980. #if defined(BSP_USING_SPI1) && defined(BSP_SPI1_TX_USING_DMA)
  981. SPI1_DMA_TX_IRQHandler();
  982. #endif
  983. #if defined(BSP_USING_SPI1) && defined(BSP_SPI1_RX_USING_DMA)
  984. SPI1_DMA_RX_IRQHandler();
  985. #endif
  986. }
  987. void SPI2_DMA_RX_TX_IRQHandler(void)
  988. {
  989. #if defined(BSP_USING_SPI2) && defined(BSP_SPI2_TX_USING_DMA)
  990. SPI2_DMA_TX_IRQHandler();
  991. #endif
  992. #if defined(BSP_USING_SPI2) && defined(BSP_SPI2_RX_USING_DMA)
  993. SPI2_DMA_RX_IRQHandler();
  994. #endif
  995. }
  996. #elif defined(SOC_SERIES_STM32G0)
  997. #if defined(BSP_SPI1_TX_USING_DMA) || defined(BSP_SPI1_RX_USING_DMA)
  998. void SPI1_DMA_RX_TX_IRQHandler(void)
  999. {
  1000. #if defined(BSP_SPI1_TX_USING_DMA)
  1001. SPI1_DMA_TX_IRQHandler();
  1002. #endif
  1003. #if defined(BSP_SPI1_RX_USING_DMA)
  1004. SPI1_DMA_RX_IRQHandler();
  1005. #endif
  1006. }
  1007. #endif /* defined(BSP_SPI1_TX_USING_DMA) || defined(BSP_SPI1_RX_USING_DMA) */
  1008. #if defined(BSP_SPI2_TX_USING_DMA) || defined(BSP_SPI2_RX_USING_DMA)
  1009. void SPI2_DMA_RX_TX_IRQHandler(void)
  1010. {
  1011. #if defined(BSP_SPI2_TX_USING_DMA)
  1012. SPI2_DMA_TX_IRQHandler();
  1013. #endif
  1014. #if defined(BSP_SPI2_RX_USING_DMA)
  1015. SPI2_DMA_RX_IRQHandler();
  1016. #endif
  1017. }
  1018. #endif /* defined(BSP_SPI1_TX_USING_DMA) || defined(BSP_SPI1_RX_USING_DMA) */
  1019. #if defined(STM32G0B0xx) || defined(STM32G0B1xx) || defined(STM32G0C1xx)
  1020. #if defined(BSP_USING_SPI2) || defined(BSP_USING_SPI3)
  1021. void SPI2_3_IRQHandler(void)
  1022. {
  1023. #if defined(BSP_SPI2_TX_USING_DMA) || defined(BSP_SPI2_RX_USING_DMA)
  1024. SPI2_IRQHandler();
  1025. #endif
  1026. #if defined(BSP_SPI3_TX_USING_DMA) || defined(BSP_SPI3_RX_USING_DMA)
  1027. SPI3_IRQHandler();
  1028. #endif
  1029. }
  1030. #endif /* defined(BSP_USING_SPI2) || defined(BSP_USING_SPI3) */
  1031. #endif /* defined(STM32G0B0xx) || defined(STM32G0B1xx) || defined(STM32G0C1xx) */
  1032. #endif /* defined(SOC_SERIES_STM32F0) */
  1033. int rt_hw_spi_init(void)
  1034. {
  1035. stm32_get_dma_info();
  1036. return rt_hw_spi_bus_init();
  1037. }
  1038. INIT_BOARD_EXPORT(rt_hw_spi_init);
  1039. #endif /* BSP_USING_SPI1 || BSP_USING_SPI2 || BSP_USING_SPI3 || BSP_USING_SPI4 || BSP_USING_SPI5 */
  1040. #endif /* BSP_USING_SPI */