rtc_time.c 10.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250
  1. /*
  2. * SPDX-FileCopyrightText: 2015-2021 Espressif Systems (Shanghai) CO LTD
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. #include <stdint.h>
  7. #include "esp_rom_sys.h"
  8. #include "soc/rtc.h"
  9. #include "soc/rtc_cntl_reg.h"
  10. #include "soc/timer_group_reg.h"
  11. /* Calibration of RTC_SLOW_CLK is performed using a special feature of TIMG0.
  12. * This feature counts the number of XTAL clock cycles within a given number of
  13. * RTC_SLOW_CLK cycles.
  14. *
  15. * Slow clock calibration feature has two modes of operation: one-off and cycling.
  16. * In cycling mode (which is enabled by default on SoC reset), counting of XTAL
  17. * cycles within RTC_SLOW_CLK cycle is done continuously. Cycling mode is enabled
  18. * using TIMG_RTC_CALI_START_CYCLING bit. In one-off mode counting is performed
  19. * once, and TIMG_RTC_CALI_RDY bit is set when counting is done. One-off mode is
  20. * enabled using TIMG_RTC_CALI_START bit.
  21. */
  22. /**
  23. * @brief One-off clock calibration function used by rtc_clk_cal_internal
  24. * @param cal_clk which clock to calibrate
  25. * @param slowclk_cycles number of slow clock cycles to count
  26. * @return number of XTAL clock cycles within the given number of slow clock cycles
  27. */
  28. static uint32_t rtc_clk_cal_internal_oneoff(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
  29. {
  30. /* There may be another calibration process already running during we call this function,
  31. * so we should wait the last process is done.
  32. */
  33. if (GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING)) {
  34. while (!GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_RDY)
  35. && !GET_PERI_REG_MASK(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT));
  36. }
  37. /* Prepare calibration */
  38. REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_CLK_SEL, cal_clk);
  39. CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING);
  40. REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_MAX, slowclk_cycles);
  41. /* Figure out how long to wait for calibration to finish */
  42. /* Set timeout reg and expect time delay*/
  43. uint32_t expected_freq;
  44. if (cal_clk == RTC_CAL_32K_XTAL) {
  45. REG_SET_FIELD(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT_THRES, RTC_SLOW_CLK_X32K_CAL_TIMEOUT_THRES(slowclk_cycles));
  46. expected_freq = RTC_SLOW_CLK_FREQ_32K;
  47. } else if (cal_clk == RTC_CAL_8MD256) {
  48. REG_SET_FIELD(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT_THRES, RTC_SLOW_CLK_8MD256_CAL_TIMEOUT_THRES(slowclk_cycles));
  49. expected_freq = RTC_SLOW_CLK_FREQ_8MD256;
  50. } else {
  51. REG_SET_FIELD(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT_THRES, RTC_SLOW_CLK_90K_CAL_TIMEOUT_THRES(slowclk_cycles));
  52. expected_freq = RTC_SLOW_CLK_FREQ_90K;
  53. }
  54. uint32_t us_time_estimate = (uint32_t) (((uint64_t) slowclk_cycles) * MHZ / expected_freq);
  55. /* Start calibration */
  56. CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START);
  57. SET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START);
  58. /* Wait for calibration to finish up to another us_time_estimate */
  59. esp_rom_delay_us(us_time_estimate);
  60. uint32_t cal_val;
  61. while (true) {
  62. if (GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_RDY)) {
  63. cal_val = REG_GET_FIELD(TIMG_RTCCALICFG1_REG(0), TIMG_RTC_CALI_VALUE);
  64. break;
  65. }
  66. if (GET_PERI_REG_MASK(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT)) {
  67. cal_val = 0;
  68. break;
  69. }
  70. }
  71. return cal_val;
  72. }
  73. /**
  74. * @brief Cycling clock calibration function used by rtc_clk_cal_internal
  75. * @param cal_clk which clock to calibrate
  76. * @param slowclk_cycles number of slow clock cycles to count
  77. * @return number of XTAL clock cycles within the given number of slow clock cycles
  78. */
  79. static uint32_t rtc_clk_cal_internal_cycling(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
  80. {
  81. /* Get which slowclk is in calibration and max cali cycles */
  82. rtc_cal_sel_t in_calibration_clk;
  83. in_calibration_clk = REG_GET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_CLK_SEL);
  84. uint32_t cali_slowclk_cycles = REG_GET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_MAX);
  85. /* If no calibration in process or calibration period equal to 0, use slowclk_cycles cycles to calibrate slowclk */
  86. if (cali_slowclk_cycles == 0 || !GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING) || in_calibration_clk != cal_clk) {
  87. CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING);
  88. REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_CLK_SEL, cal_clk);
  89. REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_MAX, slowclk_cycles);
  90. SET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING);
  91. cali_slowclk_cycles = slowclk_cycles;
  92. }
  93. /* Wait for calibration finished */
  94. while (!GET_PERI_REG_MASK(TIMG_RTCCALICFG1_REG(0), TIMG_RTC_CALI_CYCLING_DATA_VLD));
  95. uint32_t cal_val = REG_GET_FIELD(TIMG_RTCCALICFG1_REG(0), TIMG_RTC_CALI_VALUE);
  96. return cal_val;
  97. }
  98. /**
  99. * @brief Slowclk period calculating funtion used by rtc_clk_cal and rtc_clk_cal_cycling
  100. * @param xtal_cycles number of xtal cycles count
  101. * @param slowclk_cycles number of slow clock cycles to count
  102. * @return slow clock period
  103. */
  104. static uint32_t rtc_clk_xtal_to_slowclk(uint64_t xtal_cycles, uint32_t slowclk_cycles)
  105. {
  106. rtc_xtal_freq_t xtal_freq = rtc_clk_xtal_freq_get();
  107. uint64_t divider = ((uint64_t)xtal_freq) * slowclk_cycles;
  108. uint64_t period_64 = ((xtal_cycles << RTC_CLK_CAL_FRACT) + divider / 2 - 1) / divider;
  109. uint32_t period = (uint32_t)(period_64 & UINT32_MAX);
  110. return period;
  111. }
  112. /**
  113. * @brief Clock calibration function used by rtc_clk_cal and rtc_clk_cal_ratio
  114. * @param cal_clk which clock to calibrate
  115. * @param slowclk_cycles number of slow clock cycles to count
  116. * @return number of XTAL clock cycles within the given number of slow clock cycles
  117. */
  118. uint32_t rtc_clk_cal_internal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles, uint32_t cal_mode)
  119. {
  120. /* On ESP32S2, choosing RTC_CAL_RTC_MUX results in calibration of
  121. * the 90k RTC clock regardless of the currenlty selected SLOW_CLK.
  122. * On the ESP32, it used the currently selected SLOW_CLK.
  123. * The following code emulates ESP32 behavior:
  124. */
  125. if (cal_clk == RTC_CAL_RTC_MUX) {
  126. rtc_slow_freq_t slow_freq = rtc_clk_slow_freq_get();
  127. if (slow_freq == RTC_SLOW_FREQ_32K_XTAL) {
  128. cal_clk = RTC_CAL_32K_XTAL;
  129. } else if (slow_freq == RTC_SLOW_FREQ_8MD256) {
  130. cal_clk = RTC_CAL_8MD256;
  131. }
  132. } else if (cal_clk == RTC_CAL_INTERNAL_OSC) {
  133. cal_clk = RTC_CAL_RTC_MUX;
  134. }
  135. /* Enable requested clock (90k clock is always on) */
  136. int dig_32k_xtal_state = REG_GET_FIELD(RTC_CNTL_CLK_CONF_REG, RTC_CNTL_DIG_XTAL32K_EN);
  137. if (cal_clk == RTC_CAL_32K_XTAL && !dig_32k_xtal_state) {
  138. REG_SET_FIELD(RTC_CNTL_CLK_CONF_REG, RTC_CNTL_DIG_XTAL32K_EN, 1);
  139. }
  140. if (cal_clk == RTC_CAL_8MD256) {
  141. SET_PERI_REG_MASK(RTC_CNTL_CLK_CONF_REG, RTC_CNTL_DIG_CLK8M_D256_EN);
  142. }
  143. uint32_t cal_val;
  144. if (cal_mode == RTC_TIME_CAL_ONEOFF_MODE) {
  145. cal_val = rtc_clk_cal_internal_oneoff(cal_clk, slowclk_cycles);
  146. } else {
  147. cal_val = rtc_clk_cal_internal_cycling(cal_clk, slowclk_cycles);
  148. }
  149. CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START);
  150. REG_SET_FIELD(RTC_CNTL_CLK_CONF_REG, RTC_CNTL_DIG_XTAL32K_EN, dig_32k_xtal_state);
  151. if (cal_clk == RTC_CAL_8MD256) {
  152. CLEAR_PERI_REG_MASK(RTC_CNTL_CLK_CONF_REG, RTC_CNTL_DIG_CLK8M_D256_EN);
  153. }
  154. return cal_val;
  155. }
  156. uint32_t rtc_clk_cal_ratio(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
  157. {
  158. uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles, RTC_TIME_CAL_ONEOFF_MODE);
  159. uint64_t ratio_64 = ((xtal_cycles << RTC_CLK_CAL_FRACT)) / slowclk_cycles;
  160. uint32_t ratio = (uint32_t)(ratio_64 & UINT32_MAX);
  161. return ratio;
  162. }
  163. uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
  164. {
  165. uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles, RTC_TIME_CAL_ONEOFF_MODE);
  166. uint32_t period = rtc_clk_xtal_to_slowclk(xtal_cycles, slowclk_cycles);
  167. return period;
  168. }
  169. uint32_t rtc_clk_cal_cycling(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
  170. {
  171. uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles, RTC_TIME_CAL_CYCLING_MODE);
  172. uint32_t period = rtc_clk_xtal_to_slowclk(xtal_cycles, slowclk_cycles);
  173. return period;
  174. }
  175. uint64_t rtc_time_us_to_slowclk(uint64_t time_in_us, uint32_t period)
  176. {
  177. /* Overflow will happen in this function if time_in_us >= 2^45, which is about 400 days.
  178. * TODO: fix overflow.
  179. */
  180. return (time_in_us << RTC_CLK_CAL_FRACT) / period;
  181. }
  182. uint64_t rtc_time_slowclk_to_us(uint64_t rtc_cycles, uint32_t period)
  183. {
  184. return (rtc_cycles * period) >> RTC_CLK_CAL_FRACT;
  185. }
  186. uint64_t rtc_time_get(void)
  187. {
  188. SET_PERI_REG_MASK(RTC_CNTL_TIME_UPDATE_REG, RTC_CNTL_TIME_UPDATE);
  189. uint64_t t = READ_PERI_REG(RTC_CNTL_TIME0_REG);
  190. t |= ((uint64_t) READ_PERI_REG(RTC_CNTL_TIME1_REG)) << 32;
  191. return t;
  192. }
  193. uint64_t rtc_light_slp_time_get(void)
  194. {
  195. uint64_t t_wake = READ_PERI_REG(RTC_CNTL_TIME_LOW0_REG);
  196. t_wake |= ((uint64_t) READ_PERI_REG(RTC_CNTL_TIME_HIGH0_REG)) << 32;
  197. uint64_t t_slp = READ_PERI_REG(RTC_CNTL_TIME_LOW1_REG);
  198. t_slp |= ((uint64_t) READ_PERI_REG(RTC_CNTL_TIME_HIGH1_REG)) << 32;
  199. return (t_wake - t_slp);
  200. }
  201. uint64_t rtc_deep_slp_time_get(void)
  202. {
  203. uint64_t t_slp = READ_PERI_REG(RTC_CNTL_TIME_LOW1_REG);
  204. t_slp |= ((uint64_t) READ_PERI_REG(RTC_CNTL_TIME_HIGH1_REG)) << 32;
  205. uint64_t t_wake = rtc_time_get();
  206. return (t_wake - t_slp);
  207. }
  208. void rtc_clk_wait_for_slow_cycle(void) //This function may not by useful any more
  209. {
  210. SET_PERI_REG_MASK(RTC_CNTL_SLOW_CLK_CONF_REG, RTC_CNTL_SLOW_CLK_NEXT_EDGE);
  211. while (GET_PERI_REG_MASK(RTC_CNTL_SLOW_CLK_CONF_REG, RTC_CNTL_SLOW_CLK_NEXT_EDGE)) {
  212. esp_rom_delay_us(1);
  213. }
  214. }
  215. uint32_t rtc_clk_freq_cal(uint32_t cal_val)
  216. {
  217. if (cal_val == 0) {
  218. return 0; // cal_val will be denominator, return 0 as the symbol of failure.
  219. }
  220. return 1000000ULL * (1 << RTC_CLK_CAL_FRACT) / cal_val;
  221. }