dma.h 6.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234
  1. /*
  2. * Copyright (c) 2006-2023, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2023-02-25 GuEe-GUI the first version
  9. */
  10. #ifndef __DMA_H__
  11. #define __DMA_H__
  12. #include <rtthread.h>
  13. #include <drivers/ofw.h>
  14. #include <drivers/misc.h>
  15. #include <drivers/core/dm.h>
  16. #include <mmu.h>
  17. #include <mm_page.h>
  18. #include <bitmap.h>
  19. struct rt_dma_chan;
  20. struct rt_dma_controller_ops;
  21. enum rt_dma_transfer_direction
  22. {
  23. RT_DMA_MEM_TO_MEM,
  24. RT_DMA_MEM_TO_DEV,
  25. RT_DMA_DEV_TO_MEM,
  26. RT_DMA_DEV_TO_DEV,
  27. RT_DMA_DIR_MAX,
  28. };
  29. enum rt_dma_slave_buswidth
  30. {
  31. RT_DMA_SLAVE_BUSWIDTH_UNDEFINED = 0,
  32. RT_DMA_SLAVE_BUSWIDTH_1_BYTE = 1,
  33. RT_DMA_SLAVE_BUSWIDTH_2_BYTES = 2,
  34. RT_DMA_SLAVE_BUSWIDTH_3_BYTES = 3,
  35. RT_DMA_SLAVE_BUSWIDTH_4_BYTES = 4,
  36. RT_DMA_SLAVE_BUSWIDTH_8_BYTES = 8,
  37. RT_DMA_SLAVE_BUSWIDTH_16_BYTES = 16,
  38. RT_DMA_SLAVE_BUSWIDTH_32_BYTES = 32,
  39. RT_DMA_SLAVE_BUSWIDTH_64_BYTES = 64,
  40. RT_DMA_SLAVE_BUSWIDTH_128_BYTES = 128,
  41. RT_DMA_SLAVE_BUSWIDTH_BYTES_MAX,
  42. };
  43. struct rt_dma_slave_config
  44. {
  45. enum rt_dma_transfer_direction direction;
  46. enum rt_dma_slave_buswidth src_addr_width;
  47. enum rt_dma_slave_buswidth dst_addr_width;
  48. rt_ubase_t src_addr;
  49. rt_ubase_t dst_addr;
  50. rt_uint32_t src_maxburst;
  51. rt_uint32_t dst_maxburst;
  52. rt_uint32_t src_port_window_size;
  53. rt_uint32_t dst_port_window_size;
  54. };
  55. struct rt_dma_slave_transfer
  56. {
  57. rt_ubase_t src_addr;
  58. rt_ubase_t dst_addr;
  59. void *buffer;
  60. rt_ubase_t dma_handle;
  61. rt_size_t buffer_len;
  62. rt_size_t period_len;
  63. };
  64. struct rt_dma_controller
  65. {
  66. rt_list_t list;
  67. struct rt_device *dev;
  68. RT_BITMAP_DECLARE(dir_cap, RT_DMA_DIR_MAX);
  69. const struct rt_dma_controller_ops *ops;
  70. rt_list_t channels_nodes;
  71. struct rt_mutex mutex;
  72. };
  73. struct rt_dma_controller_ops
  74. {
  75. struct rt_dma_chan *(*request_chan)(struct rt_dma_controller *ctrl,
  76. struct rt_device *slave, void *fw_data);
  77. rt_err_t (*release_chan)(struct rt_dma_chan *chan);
  78. rt_err_t (*start)(struct rt_dma_chan *chan);
  79. rt_err_t (*stop)(struct rt_dma_chan *chan);
  80. rt_err_t (*config)(struct rt_dma_chan *chan, struct rt_dma_slave_config *conf);
  81. rt_err_t (*prep_memcpy)(struct rt_dma_chan *chan,
  82. rt_ubase_t dma_addr_src, rt_ubase_t dma_addr_dst, rt_size_t len);
  83. rt_err_t (*prep_cyclic)(struct rt_dma_chan *chan,
  84. rt_ubase_t dma_buf_addr, rt_size_t buf_len, rt_size_t period_len,
  85. enum rt_dma_transfer_direction dir);
  86. rt_err_t (*prep_single)(struct rt_dma_chan *chan,
  87. rt_ubase_t dma_buf_addr, rt_size_t buf_len,
  88. enum rt_dma_transfer_direction dir);
  89. };
  90. struct rt_dma_chan
  91. {
  92. const char *name;
  93. struct rt_dma_controller *ctrl;
  94. struct rt_device *slave;
  95. rt_list_t list;
  96. rt_err_t conf_err;
  97. rt_err_t prep_err;
  98. struct rt_dma_slave_config conf;
  99. struct rt_dma_slave_transfer transfer;
  100. void (*callback)(struct rt_dma_chan *chan, rt_size_t size);
  101. void *priv;
  102. };
  103. struct rt_dma_pool
  104. {
  105. rt_region_t region;
  106. rt_list_t list;
  107. rt_ubase_t flags;
  108. rt_bitmap_t *map;
  109. rt_size_t bits;
  110. rt_ubase_t start;
  111. struct rt_device *dev;
  112. };
  113. struct rt_dma_map_ops
  114. {
  115. void *(*alloc)(struct rt_device *dev, rt_size_t size,
  116. rt_ubase_t *dma_handle, rt_ubase_t flags);
  117. void (*free)(struct rt_device *dev, rt_size_t size,
  118. void *cpu_addr, rt_ubase_t dma_handle, rt_ubase_t flags);
  119. rt_err_t (*sync_out_data)(struct rt_device *dev, void *data, rt_size_t size,
  120. rt_ubase_t *dma_handle, rt_ubase_t flags);
  121. rt_err_t (*sync_in_data)(struct rt_device *dev, void *out_data, rt_size_t size,
  122. rt_ubase_t dma_handle, rt_ubase_t flags);
  123. };
  124. rt_inline void rt_dma_controller_add_direction(struct rt_dma_controller *ctrl,
  125. enum rt_dma_transfer_direction dir)
  126. {
  127. RT_ASSERT(ctrl != RT_NULL);
  128. RT_ASSERT(dir < RT_DMA_DIR_MAX);
  129. rt_bitmap_set_bit(ctrl->dir_cap, dir);
  130. }
  131. rt_err_t rt_dma_controller_register(struct rt_dma_controller *ctrl);
  132. rt_err_t rt_dma_controller_unregister(struct rt_dma_controller *ctrl);
  133. rt_err_t rt_dma_chan_start(struct rt_dma_chan *chan);
  134. rt_err_t rt_dma_chan_stop(struct rt_dma_chan *chan);
  135. rt_err_t rt_dma_chan_config(struct rt_dma_chan *chan,
  136. struct rt_dma_slave_config *conf);
  137. rt_err_t rt_dma_chan_done(struct rt_dma_chan *chan, rt_size_t size);
  138. rt_err_t rt_dma_prep_memcpy(struct rt_dma_chan *chan,
  139. struct rt_dma_slave_transfer *transfer);
  140. rt_err_t rt_dma_prep_cyclic(struct rt_dma_chan *chan,
  141. struct rt_dma_slave_transfer *transfer);
  142. rt_err_t rt_dma_prep_single(struct rt_dma_chan *chan,
  143. struct rt_dma_slave_transfer *transfer);
  144. struct rt_dma_chan *rt_dma_chan_request(struct rt_device *dev, const char *name);
  145. rt_err_t rt_dma_chan_release(struct rt_dma_chan *chan);
  146. #define RT_DMA_F_LINEAR RT_BIT(0)
  147. #define RT_DMA_F_32BITS RT_BIT(1)
  148. #define RT_DMA_F_NOCACHE RT_BIT(2)
  149. #define RT_DMA_F_DEVICE RT_BIT(3)
  150. #define RT_DMA_F_NOMAP RT_BIT(4)
  151. #define RT_DMA_PAGE_SIZE ARCH_PAGE_SIZE
  152. void *rt_dma_alloc(struct rt_device *dev, rt_size_t size,
  153. rt_ubase_t *dma_handle, rt_ubase_t flags);
  154. void rt_dma_free(struct rt_device *dev, rt_size_t size,
  155. void *cpu_addr, rt_ubase_t dma_handle, rt_ubase_t flags);
  156. rt_inline void *rt_dma_alloc_coherent(struct rt_device *dev, rt_size_t size,
  157. rt_ubase_t *dma_handle)
  158. {
  159. return rt_dma_alloc(dev, size, dma_handle,
  160. RT_DMA_F_NOCACHE | RT_DMA_F_LINEAR);
  161. }
  162. rt_inline void rt_dma_free_coherent(struct rt_device *dev, rt_size_t size,
  163. void *cpu_addr, rt_ubase_t dma_handle)
  164. {
  165. rt_dma_free(dev, size, cpu_addr, dma_handle,
  166. RT_DMA_F_NOCACHE | RT_DMA_F_LINEAR);
  167. }
  168. rt_err_t rt_dma_sync_out_data(struct rt_device *dev, void *data, rt_size_t size,
  169. rt_ubase_t *dma_handle, rt_ubase_t flags);
  170. rt_err_t rt_dma_sync_in_data(struct rt_device *dev, void *out_data, rt_size_t size,
  171. rt_ubase_t dma_handle, rt_ubase_t flags);
  172. rt_inline rt_bool_t rt_dma_device_is_coherent(struct rt_device *dev)
  173. {
  174. return rt_dm_dev_prop_read_bool(dev, "dma-coherent");
  175. }
  176. rt_inline void rt_dma_device_set_ops(struct rt_device *dev,
  177. const struct rt_dma_map_ops *ops)
  178. {
  179. dev->dma_ops = ops;
  180. }
  181. struct rt_dma_pool *rt_dma_pool_install(rt_region_t *region);
  182. rt_err_t rt_dma_pool_extract(rt_region_t *region_list, rt_size_t list_len,
  183. rt_size_t cma_size, rt_size_t coherent_pool_size);
  184. #endif /* __DMA_H__ */