rtc_time.c 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269
  1. /*
  2. * SPDX-FileCopyrightText: 2015-2023 Espressif Systems (Shanghai) CO LTD
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. #include <stdint.h>
  7. #include "esp_rom_sys.h"
  8. #include "soc/rtc.h"
  9. #include "soc/rtc_cntl_reg.h"
  10. #include "hal/clk_tree_ll.h"
  11. #include "hal/rtc_cntl_ll.h"
  12. #include "hal/timer_ll.h"
  13. #include "soc/timer_group_reg.h"
  14. #include "esp_private/periph_ctrl.h"
  15. /* Calibration of RTC_SLOW_CLK is performed using a special feature of TIMG0.
  16. * This feature counts the number of XTAL clock cycles within a given number of
  17. * RTC_SLOW_CLK cycles.
  18. *
  19. * Slow clock calibration feature has two modes of operation: one-off and cycling.
  20. * In cycling mode (which is enabled by default on SoC reset), counting of XTAL
  21. * cycles within RTC_SLOW_CLK cycle is done continuously. Cycling mode is enabled
  22. * using TIMG_RTC_CALI_START_CYCLING bit. In one-off mode counting is performed
  23. * once, and TIMG_RTC_CALI_RDY bit is set when counting is done. One-off mode is
  24. * enabled using TIMG_RTC_CALI_START bit.
  25. */
  26. /**
  27. * @brief One-off clock calibration function used by rtc_clk_cal_internal
  28. * @param cal_clk which clock to calibrate
  29. * @param slowclk_cycles number of slow clock cycles to count
  30. * @return number of XTAL clock cycles within the given number of slow clock cycles
  31. */
  32. static uint32_t rtc_clk_cal_internal_oneoff(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
  33. {
  34. /* There may be another calibration process already running during we call this function,
  35. * so we should wait the last process is done.
  36. */
  37. if (GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING)) {
  38. /**
  39. * Set a small timeout threshold to accelerate the generation of timeout.
  40. * The internal circuit will be reset when the timeout occurs and will not affect the next calibration.
  41. */
  42. REG_SET_FIELD(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT_THRES, 1);
  43. while (!GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_RDY)
  44. && !GET_PERI_REG_MASK(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT));
  45. }
  46. /* Prepare calibration */
  47. REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_CLK_SEL, cal_clk);
  48. CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING);
  49. REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_MAX, slowclk_cycles);
  50. /* Figure out how long to wait for calibration to finish */
  51. /* Set timeout reg and expect time delay*/
  52. uint32_t expected_freq;
  53. if (cal_clk == RTC_CAL_32K_XTAL) {
  54. REG_SET_FIELD(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT_THRES, RTC_SLOW_CLK_X32K_CAL_TIMEOUT_THRES(slowclk_cycles));
  55. expected_freq = SOC_CLK_XTAL32K_FREQ_APPROX;
  56. } else if (cal_clk == RTC_CAL_8MD256) {
  57. REG_SET_FIELD(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT_THRES, RTC_SLOW_CLK_8MD256_CAL_TIMEOUT_THRES(slowclk_cycles));
  58. expected_freq = SOC_CLK_RC_FAST_D256_FREQ_APPROX;
  59. } else {
  60. REG_SET_FIELD(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT_THRES, RTC_SLOW_CLK_90K_CAL_TIMEOUT_THRES(slowclk_cycles));
  61. expected_freq = SOC_CLK_RC_SLOW_FREQ_APPROX;
  62. }
  63. uint32_t us_time_estimate = (uint32_t) (((uint64_t) slowclk_cycles) * MHZ / expected_freq);
  64. /* Start calibration */
  65. CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START);
  66. SET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START);
  67. /* Wait for calibration to finish up to another us_time_estimate */
  68. esp_rom_delay_us(us_time_estimate);
  69. uint32_t cal_val;
  70. while (true) {
  71. if (GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_RDY)) {
  72. cal_val = REG_GET_FIELD(TIMG_RTCCALICFG1_REG(0), TIMG_RTC_CALI_VALUE);
  73. break;
  74. }
  75. if (GET_PERI_REG_MASK(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT)) {
  76. cal_val = 0;
  77. break;
  78. }
  79. }
  80. return cal_val;
  81. }
  82. /**
  83. * @brief Cycling clock calibration function used by rtc_clk_cal_internal
  84. * @param cal_clk which clock to calibrate
  85. * @param slowclk_cycles number of slow clock cycles to count
  86. * @return number of XTAL clock cycles within the given number of slow clock cycles
  87. */
  88. static uint32_t rtc_clk_cal_internal_cycling(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
  89. {
  90. /* Get which slowclk is in calibration and max cali cycles */
  91. rtc_cal_sel_t in_calibration_clk;
  92. in_calibration_clk = REG_GET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_CLK_SEL);
  93. uint32_t cali_slowclk_cycles = REG_GET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_MAX);
  94. /* If no calibration in process or calibration period equal to 0, use slowclk_cycles cycles to calibrate slowclk */
  95. if (cali_slowclk_cycles == 0 || !GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING) || in_calibration_clk != cal_clk) {
  96. CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING);
  97. REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_CLK_SEL, cal_clk);
  98. REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_MAX, slowclk_cycles);
  99. SET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING);
  100. cali_slowclk_cycles = slowclk_cycles;
  101. }
  102. /* Wait for calibration finished */
  103. while (!GET_PERI_REG_MASK(TIMG_RTCCALICFG1_REG(0), TIMG_RTC_CALI_CYCLING_DATA_VLD));
  104. uint32_t cal_val = REG_GET_FIELD(TIMG_RTCCALICFG1_REG(0), TIMG_RTC_CALI_VALUE);
  105. return cal_val;
  106. }
  107. /**
  108. * @brief Slowclk period calculating funtion used by rtc_clk_cal and rtc_clk_cal_cycling
  109. * @param xtal_cycles number of xtal cycles count
  110. * @param slowclk_cycles number of slow clock cycles to count
  111. * @return slow clock period
  112. */
  113. static uint32_t rtc_clk_xtal_to_slowclk(uint64_t xtal_cycles, uint32_t slowclk_cycles)
  114. {
  115. rtc_xtal_freq_t xtal_freq = rtc_clk_xtal_freq_get();
  116. uint64_t divider = ((uint64_t)xtal_freq) * slowclk_cycles;
  117. uint64_t period_64 = ((xtal_cycles << RTC_CLK_CAL_FRACT) + divider / 2 - 1) / divider;
  118. uint32_t period = (uint32_t)(period_64 & UINT32_MAX);
  119. return period;
  120. }
  121. /**
  122. * @brief Clock calibration function used by rtc_clk_cal and rtc_clk_cal_ratio
  123. * @param cal_clk which clock to calibrate
  124. * @param slowclk_cycles number of slow clock cycles to count
  125. * @return number of XTAL clock cycles within the given number of slow clock cycles
  126. */
  127. uint32_t rtc_clk_cal_internal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles, uint32_t cal_mode)
  128. {
  129. /* On ESP32S2, choosing RTC_CAL_RTC_MUX results in calibration of
  130. * the 90k RTC clock regardless of the currenlty selected SLOW_CLK.
  131. * On the ESP32, it used the currently selected SLOW_CLK.
  132. * The following code emulates ESP32 behavior:
  133. */
  134. if (cal_clk == RTC_CAL_RTC_MUX) {
  135. soc_rtc_slow_clk_src_t slow_clk_src = rtc_clk_slow_src_get();
  136. if (slow_clk_src == SOC_RTC_SLOW_CLK_SRC_XTAL32K) {
  137. cal_clk = RTC_CAL_32K_XTAL;
  138. } else if (slow_clk_src == SOC_RTC_SLOW_CLK_SRC_RC_FAST_D256) {
  139. cal_clk = RTC_CAL_8MD256;
  140. }
  141. } else if (cal_clk == RTC_CAL_INTERNAL_OSC) {
  142. cal_clk = RTC_CAL_RTC_MUX;
  143. }
  144. /* Enable requested clock (90k clock is always on) */
  145. bool dig_32k_xtal_enabled = clk_ll_xtal32k_digi_is_enabled();
  146. if (cal_clk == RTC_CAL_32K_XTAL && !dig_32k_xtal_enabled) {
  147. clk_ll_xtal32k_digi_enable();
  148. }
  149. bool rc_fast_enabled = clk_ll_rc_fast_is_enabled();
  150. bool rc_fast_d256_enabled = clk_ll_rc_fast_d256_is_enabled();
  151. if (cal_clk == RTC_CAL_8MD256) {
  152. rtc_clk_8m_enable(true, true);
  153. clk_ll_rc_fast_d256_digi_enable();
  154. }
  155. uint32_t cal_val;
  156. if (cal_mode == RTC_TIME_CAL_ONEOFF_MODE) {
  157. cal_val = rtc_clk_cal_internal_oneoff(cal_clk, slowclk_cycles);
  158. } else {
  159. cal_val = rtc_clk_cal_internal_cycling(cal_clk, slowclk_cycles);
  160. }
  161. CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START);
  162. /* if dig_32k_xtal was originally off and enabled due to calibration, then set back to off state */
  163. if (cal_clk == RTC_CAL_32K_XTAL && !dig_32k_xtal_enabled) {
  164. clk_ll_xtal32k_digi_disable();
  165. }
  166. if (cal_clk == RTC_CAL_8MD256) {
  167. clk_ll_rc_fast_d256_digi_disable();
  168. rtc_clk_8m_enable(rc_fast_enabled, rc_fast_d256_enabled);
  169. }
  170. return cal_val;
  171. }
  172. uint32_t rtc_clk_cal_ratio(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
  173. {
  174. uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles, RTC_TIME_CAL_ONEOFF_MODE);
  175. uint64_t ratio_64 = ((xtal_cycles << RTC_CLK_CAL_FRACT)) / slowclk_cycles;
  176. uint32_t ratio = (uint32_t)(ratio_64 & UINT32_MAX);
  177. return ratio;
  178. }
  179. static inline bool rtc_clk_cal_32k_valid(rtc_xtal_freq_t xtal_freq, uint32_t slowclk_cycles, uint64_t actual_xtal_cycles)
  180. {
  181. uint64_t expected_xtal_cycles = (xtal_freq * 1000000ULL * slowclk_cycles) >> 15; // xtal_freq(hz) * slowclk_cycles / 32768
  182. uint64_t delta = expected_xtal_cycles / 2000; // 5/10000
  183. return (actual_xtal_cycles >= (expected_xtal_cycles - delta)) && (actual_xtal_cycles <= (expected_xtal_cycles + delta));
  184. }
  185. uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
  186. {
  187. uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles, RTC_TIME_CAL_ONEOFF_MODE);
  188. if ((cal_clk == RTC_CAL_32K_XTAL) && !rtc_clk_cal_32k_valid(rtc_clk_xtal_freq_get(), slowclk_cycles, xtal_cycles)) {
  189. return 0;
  190. }
  191. return rtc_clk_xtal_to_slowclk(xtal_cycles, slowclk_cycles);
  192. }
  193. uint32_t rtc_clk_cal_cycling(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
  194. {
  195. uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles, RTC_TIME_CAL_CYCLING_MODE);
  196. uint32_t period = rtc_clk_xtal_to_slowclk(xtal_cycles, slowclk_cycles);
  197. return period;
  198. }
  199. uint64_t rtc_time_us_to_slowclk(uint64_t time_in_us, uint32_t period)
  200. {
  201. /* Overflow will happen in this function if time_in_us >= 2^45, which is about 400 days.
  202. * TODO: fix overflow.
  203. */
  204. return (time_in_us << RTC_CLK_CAL_FRACT) / period;
  205. }
  206. uint64_t rtc_time_slowclk_to_us(uint64_t rtc_cycles, uint32_t period)
  207. {
  208. return (rtc_cycles * period) >> RTC_CLK_CAL_FRACT;
  209. }
  210. uint64_t rtc_time_get(void)
  211. {
  212. return rtc_cntl_ll_get_rtc_time();
  213. }
  214. void rtc_clk_wait_for_slow_cycle(void) //This function may not by useful any more
  215. {
  216. SET_PERI_REG_MASK(RTC_CNTL_SLOW_CLK_CONF_REG, RTC_CNTL_SLOW_CLK_NEXT_EDGE);
  217. while (GET_PERI_REG_MASK(RTC_CNTL_SLOW_CLK_CONF_REG, RTC_CNTL_SLOW_CLK_NEXT_EDGE)) {
  218. esp_rom_delay_us(1);
  219. }
  220. }
  221. uint32_t rtc_clk_freq_cal(uint32_t cal_val)
  222. {
  223. if (cal_val == 0) {
  224. return 0; // cal_val will be denominator, return 0 as the symbol of failure.
  225. }
  226. return 1000000ULL * (1 << RTC_CLK_CAL_FRACT) / cal_val;
  227. }
  228. /// @brief if the calibration is used, we need to enable the timer group0 first
  229. __attribute__((constructor))
  230. static void enable_timer_group0_for_calibration(void)
  231. {
  232. PERIPH_RCC_ACQUIRE_ATOMIC(PERIPH_TIMG0_MODULE, ref_count) {
  233. if (ref_count == 0) {
  234. timer_ll_enable_bus_clock(0, true);
  235. timer_ll_reset_register(0);
  236. }
  237. }
  238. }