unit_test.py 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797
  1. #!/usr/bin/env python
  2. #
  3. # Copyright 2018 Espressif Systems (Shanghai) PTE LTD
  4. #
  5. # Licensed under the Apache License, Version 2.0 (the "License");
  6. # you may not use this file except in compliance with the License.
  7. # You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. """
  17. Test script for unit test case.
  18. """
  19. import re
  20. import os
  21. import sys
  22. import time
  23. import argparse
  24. import threading
  25. try:
  26. import TinyFW
  27. except ImportError:
  28. # if we want to run test case outside `tiny-test-fw` folder,
  29. # we need to insert tiny-test-fw path into sys path
  30. test_fw_path = os.getenv("TEST_FW_PATH")
  31. if test_fw_path and test_fw_path not in sys.path:
  32. sys.path.insert(0, test_fw_path)
  33. else:
  34. # or try the copy in IDF
  35. idf_path = os.getenv("IDF_PATH")
  36. tiny_test_path = idf_path + "/tools/tiny-test-fw"
  37. if os.path.exists(tiny_test_path):
  38. sys.path.insert(0, tiny_test_path)
  39. import TinyFW
  40. import IDF
  41. import Utility
  42. import Env
  43. from DUT import ExpectTimeout
  44. from IDF.IDFApp import UT
  45. UT_APP_BOOT_UP_DONE = "Press ENTER to see the list of tests."
  46. RESET_PATTERN = re.compile(r"(ets [\w]{3}\s+[\d]{1,2} [\d]{4} [\d]{2}:[\d]{2}:[\d]{2}[^()]*\([\w].*?\))")
  47. EXCEPTION_PATTERN = re.compile(r"(Guru Meditation Error: Core\s+\d panic'ed \([\w].*?\))")
  48. ABORT_PATTERN = re.compile(r"(abort\(\) was called at PC 0x[a-fA-F\d]{8} on core \d)")
  49. FINISH_PATTERN = re.compile(r"1 Tests (\d) Failures (\d) Ignored")
  50. END_LIST_STR = r'\r?\nEnter test for running'
  51. TEST_PATTERN = re.compile(r'\((\d+)\)\s+"([^"]+)" ([^\r\n]+)\r?\n(' + END_LIST_STR + r')?')
  52. TEST_SUBMENU_PATTERN = re.compile(r'\s+\((\d+)\)\s+"[^"]+"\r?\n(?=(?=\()|(' + END_LIST_STR + r'))')
  53. SIMPLE_TEST_ID = 0
  54. MULTI_STAGE_ID = 1
  55. MULTI_DEVICE_ID = 2
  56. DEFAULT_TIMEOUT = 20
  57. DUT_DELAY_AFTER_RESET = 2
  58. DUT_STARTUP_CHECK_RETRY_COUNT = 5
  59. TEST_HISTORY_CHECK_TIMEOUT = 2
  60. class TestCaseFailed(AssertionError):
  61. pass
  62. def format_test_case_config(test_case_data):
  63. """
  64. convert the test case data to unified format.
  65. We need to following info to run unit test cases:
  66. 1. unit test app config
  67. 2. test case name
  68. 3. test case reset info
  69. the formatted case config is a dict, with ut app config as keys. The value is a list of test cases.
  70. Each test case is a dict with "name" and "reset" as keys. For example::
  71. case_config = {
  72. "default": [{"name": "restart from PRO CPU", "reset": "SW_CPU_RESET"}, {...}],
  73. "psram": [{"name": "restart from PRO CPU", "reset": "SW_CPU_RESET"}],
  74. }
  75. If config is not specified for test case, then
  76. :param test_case_data: string, list, or a dictionary list
  77. :return: formatted data
  78. """
  79. case_config = dict()
  80. def parse_case(one_case_data):
  81. """ parse and format one case """
  82. def process_reset_list(reset_list):
  83. # strip space and remove white space only items
  84. _output = list()
  85. for _r in reset_list:
  86. _data = _r.strip(" ")
  87. if _data:
  88. _output.append(_data)
  89. return _output
  90. _case = dict()
  91. if isinstance(one_case_data, str):
  92. _temp = one_case_data.split(" [reset=")
  93. _case["name"] = _temp[0]
  94. try:
  95. _case["reset"] = process_reset_list(_temp[1][0:-1].split(","))
  96. except IndexError:
  97. _case["reset"] = list()
  98. elif isinstance(one_case_data, dict):
  99. _case = one_case_data.copy()
  100. assert "name" in _case
  101. if "reset" not in _case:
  102. _case["reset"] = list()
  103. else:
  104. if isinstance(_case["reset"], str):
  105. _case["reset"] = process_reset_list(_case["reset"].split(","))
  106. else:
  107. raise TypeError("Not supported type during parsing unit test case")
  108. if "config" not in _case:
  109. _case["config"] = "default"
  110. return _case
  111. if not isinstance(test_case_data, list):
  112. test_case_data = [test_case_data]
  113. for case_data in test_case_data:
  114. parsed_case = parse_case(case_data)
  115. try:
  116. case_config[parsed_case["config"]].append(parsed_case)
  117. except KeyError:
  118. case_config[parsed_case["config"]] = [parsed_case]
  119. return case_config
  120. def replace_app_bin(dut, name, new_app_bin):
  121. if new_app_bin is None:
  122. return
  123. search_pattern = '/{}.bin'.format(name)
  124. for i, config in enumerate(dut.download_config):
  125. if config.endswith(search_pattern):
  126. dut.download_config[i] = new_app_bin
  127. Utility.console_log("The replaced application binary is {}".format(new_app_bin), "O")
  128. break
  129. def reset_dut(dut):
  130. dut.reset()
  131. # esptool ``run`` cmd takes quite long time.
  132. # before reset finish, serial port is closed. therefore DUT could already bootup before serial port opened.
  133. # this could cause checking bootup print failed.
  134. # now use input cmd `-` and check test history to check if DUT is bootup.
  135. # we'll retry this step for a few times,
  136. # in case `dut.reset` returns during DUT bootup (when DUT can't process any command).
  137. #
  138. # during bootup, DUT might only receive part of the first `-` command.
  139. # If it only receive `\n`, then it will print all cases. It could take more than 5 seconds, reset check will fail.
  140. # To solve this problem, we will add a delay between reset and input `-` command. And we'll also enlarge expect timeout.
  141. time.sleep(DUT_DELAY_AFTER_RESET)
  142. for _ in range(DUT_STARTUP_CHECK_RETRY_COUNT):
  143. dut.write("-")
  144. try:
  145. dut.expect("0 Tests 0 Failures 0 Ignored", timeout=TEST_HISTORY_CHECK_TIMEOUT)
  146. break
  147. except ExpectTimeout:
  148. pass
  149. else:
  150. raise AssertionError("Reset {} ({}) failed!".format(dut.name, dut.port))
  151. def run_one_normal_case(dut, one_case, junit_test_case):
  152. reset_dut(dut)
  153. dut.start_capture_raw_data()
  154. # run test case
  155. dut.write("\"{}\"".format(one_case["name"]))
  156. dut.expect("Running " + one_case["name"] + "...")
  157. exception_reset_list = []
  158. # we want to set this flag in callbacks (inner functions)
  159. # use list here so we can use append to set this flag
  160. test_finish = list()
  161. # expect callbacks
  162. def one_case_finish(result):
  163. """ one test finished, let expect loop break and log result """
  164. test_finish.append(True)
  165. output = dut.stop_capture_raw_data()
  166. if result:
  167. Utility.console_log("Success: " + one_case["name"], color="green")
  168. else:
  169. Utility.console_log("Failed: " + one_case["name"], color="red")
  170. junit_test_case.add_failure_info(output)
  171. raise TestCaseFailed()
  172. def handle_exception_reset(data):
  173. """
  174. just append data to exception list.
  175. exception list will be checked in ``handle_reset_finish``, once reset finished.
  176. """
  177. exception_reset_list.append(data[0])
  178. def handle_test_finish(data):
  179. """ test finished without reset """
  180. # in this scenario reset should not happen
  181. assert not exception_reset_list
  182. if int(data[1]):
  183. # case ignored
  184. Utility.console_log("Ignored: " + one_case["name"], color="orange")
  185. junit_test_case.add_skipped_info("ignored")
  186. one_case_finish(not int(data[0]))
  187. def handle_reset_finish(data):
  188. """ reset happened and reboot finished """
  189. assert exception_reset_list # reboot but no exception/reset logged. should never happen
  190. result = False
  191. if len(one_case["reset"]) == len(exception_reset_list):
  192. for i, exception in enumerate(exception_reset_list):
  193. if one_case["reset"][i] not in exception:
  194. break
  195. else:
  196. result = True
  197. if not result:
  198. err_msg = "Reset Check Failed: \r\n\tExpected: {}\r\n\tGet: {}".format(one_case["reset"],
  199. exception_reset_list)
  200. Utility.console_log(err_msg, color="orange")
  201. junit_test_case.add_error_info(err_msg)
  202. one_case_finish(result)
  203. while not test_finish:
  204. try:
  205. dut.expect_any((RESET_PATTERN, handle_exception_reset),
  206. (EXCEPTION_PATTERN, handle_exception_reset),
  207. (ABORT_PATTERN, handle_exception_reset),
  208. (FINISH_PATTERN, handle_test_finish),
  209. (UT_APP_BOOT_UP_DONE, handle_reset_finish),
  210. timeout=one_case["timeout"])
  211. except ExpectTimeout:
  212. Utility.console_log("Timeout in expect", color="orange")
  213. junit_test_case.add_failure_info("timeout")
  214. one_case_finish(False)
  215. break
  216. @IDF.idf_unit_test(env_tag="UT_T1_1", junit_report_by_case=True)
  217. def run_unit_test_cases(env, extra_data):
  218. """
  219. extra_data can be three types of value
  220. 1. as string:
  221. 1. "case_name"
  222. 2. "case_name [reset=RESET_REASON]"
  223. 2. as dict:
  224. 1. with key like {"name": "Intr_alloc test, shared ints"}
  225. 2. with key like {"name": "restart from PRO CPU", "reset": "SW_CPU_RESET", "config": "psram"}
  226. 3. as list of string or dict:
  227. [case1, case2, case3, {"name": "restart from PRO CPU", "reset": "SW_CPU_RESET"}, ...]
  228. :param env: test env instance
  229. :param extra_data: the case name or case list or case dictionary
  230. :return: None
  231. """
  232. case_config = format_test_case_config(extra_data)
  233. # we don't want stop on failed case (unless some special scenarios we can't handle)
  234. # this flag is used to log if any of the case failed during executing
  235. # Before exit test function this flag is used to log if the case fails
  236. failed_cases = []
  237. for ut_config in case_config:
  238. Utility.console_log("Running unit test for config: " + ut_config, "O")
  239. dut = env.get_dut("unit-test-app", app_path=ut_config, allow_dut_exception=True)
  240. if len(case_config[ut_config]) > 0:
  241. replace_app_bin(dut, "unit-test-app", case_config[ut_config][0].get('app_bin'))
  242. dut.start_app()
  243. Utility.console_log("Download finished, start running test cases", "O")
  244. for one_case in case_config[ut_config]:
  245. performance_items = []
  246. # create junit report test case
  247. junit_test_case = TinyFW.JunitReport.create_test_case("[{}] {}".format(ut_config, one_case["name"]))
  248. try:
  249. run_one_normal_case(dut, one_case, junit_test_case)
  250. performance_items = dut.get_performance_items()
  251. except TestCaseFailed:
  252. failed_cases.append(one_case["name"])
  253. except Exception as e:
  254. junit_test_case.add_failure_info("Unexpected exception: " + str(e))
  255. failed_cases.append(one_case["name"])
  256. finally:
  257. TinyFW.JunitReport.update_performance(performance_items)
  258. TinyFW.JunitReport.test_case_finish(junit_test_case)
  259. # raise exception if any case fails
  260. if failed_cases:
  261. Utility.console_log("Failed Cases:", color="red")
  262. for _case_name in failed_cases:
  263. Utility.console_log("\t" + _case_name, color="red")
  264. raise AssertionError("Unit Test Failed")
  265. class Handler(threading.Thread):
  266. WAIT_SIGNAL_PATTERN = re.compile(r'Waiting for signal: \[(.+)]!')
  267. SEND_SIGNAL_PATTERN = re.compile(r'Send signal: \[([^]]+)](\[([^]]+)])?!')
  268. FINISH_PATTERN = re.compile(r"1 Tests (\d) Failures (\d) Ignored")
  269. def __init__(self, dut, sent_signal_list, lock, parent_case_name, child_case_index, timeout):
  270. self.dut = dut
  271. self.sent_signal_list = sent_signal_list
  272. self.lock = lock
  273. self.parent_case_name = parent_case_name
  274. self.child_case_name = ""
  275. self.child_case_index = child_case_index + 1
  276. self.finish = False
  277. self.result = False
  278. self.output = ""
  279. self.fail_name = None
  280. self.timeout = timeout
  281. self.force_stop = threading.Event() # it show the running status
  282. reset_dut(self.dut) # reset the board to make it start from begining
  283. threading.Thread.__init__(self, name="{} Handler".format(dut))
  284. def run(self):
  285. self.dut.start_capture_raw_data()
  286. def get_child_case_name(data):
  287. self.child_case_name = data[0]
  288. time.sleep(1)
  289. self.dut.write(str(self.child_case_index))
  290. def one_device_case_finish(result):
  291. """ one test finished, let expect loop break and log result """
  292. self.finish = True
  293. self.result = result
  294. self.output = "[{}]\n\n{}\n".format(self.child_case_name,
  295. self.dut.stop_capture_raw_data())
  296. if not result:
  297. self.fail_name = self.child_case_name
  298. def device_wait_action(data):
  299. start_time = time.time()
  300. expected_signal = data[0]
  301. while 1:
  302. if time.time() > start_time + self.timeout:
  303. Utility.console_log("Timeout in device for function: %s" % self.child_case_name, color="orange")
  304. break
  305. with self.lock:
  306. for sent_signal in self.sent_signal_list:
  307. if expected_signal == sent_signal["name"]:
  308. self.dut.write(sent_signal["parameter"])
  309. self.sent_signal_list.remove(sent_signal)
  310. break
  311. else:
  312. time.sleep(0.01)
  313. continue
  314. break
  315. def device_send_action(data):
  316. with self.lock:
  317. self.sent_signal_list.append({
  318. "name": data[0].encode('utf-8'),
  319. "parameter": "" if data[2] is None else data[2].encode('utf-8')
  320. # no parameter means we only write EOL to DUT
  321. })
  322. def handle_device_test_finish(data):
  323. """ test finished without reset """
  324. # in this scenario reset should not happen
  325. if int(data[1]):
  326. # case ignored
  327. Utility.console_log("Ignored: " + self.child_case_name, color="orange")
  328. one_device_case_finish(not int(data[0]))
  329. try:
  330. time.sleep(1)
  331. self.dut.write("\"{}\"".format(self.parent_case_name))
  332. self.dut.expect("Running " + self.parent_case_name + "...")
  333. except ExpectTimeout:
  334. Utility.console_log("No case detected!", color="orange")
  335. while not self.finish and not self.force_stop.isSet():
  336. try:
  337. self.dut.expect_any((re.compile('\(' + str(self.child_case_index) + '\)\s"(\w+)"'), # noqa: W605 - regex
  338. get_child_case_name),
  339. (self.WAIT_SIGNAL_PATTERN, device_wait_action), # wait signal pattern
  340. (self.SEND_SIGNAL_PATTERN, device_send_action), # send signal pattern
  341. (self.FINISH_PATTERN, handle_device_test_finish), # test finish pattern
  342. timeout=self.timeout)
  343. except ExpectTimeout:
  344. Utility.console_log("Timeout in expect", color="orange")
  345. one_device_case_finish(False)
  346. break
  347. def stop(self):
  348. self.force_stop.set()
  349. def get_case_info(one_case):
  350. parent_case = one_case["name"]
  351. child_case_num = one_case["child case num"]
  352. return parent_case, child_case_num
  353. def get_dut(duts, env, name, ut_config, app_bin=None):
  354. if name in duts:
  355. dut = duts[name]
  356. else:
  357. dut = env.get_dut(name, app_path=ut_config, allow_dut_exception=True)
  358. duts[name] = dut
  359. replace_app_bin(dut, "unit-test-app", app_bin)
  360. dut.start_app() # download bin to board
  361. return dut
  362. def run_one_multiple_devices_case(duts, ut_config, env, one_case, app_bin, junit_test_case):
  363. lock = threading.RLock()
  364. threads = []
  365. send_signal_list = []
  366. result = True
  367. parent_case, case_num = get_case_info(one_case)
  368. for i in range(case_num):
  369. dut = get_dut(duts, env, "dut%d" % i, ut_config, app_bin)
  370. threads.append(Handler(dut, send_signal_list, lock,
  371. parent_case, i, one_case["timeout"]))
  372. for thread in threads:
  373. thread.setDaemon(True)
  374. thread.start()
  375. output = "Multiple Device Failed\n"
  376. for thread in threads:
  377. thread.join()
  378. result = result and thread.result
  379. output += thread.output
  380. if not thread.result:
  381. [thd.stop() for thd in threads]
  382. if not result:
  383. junit_test_case.add_failure_info(output)
  384. # collect performances from DUTs
  385. performance_items = []
  386. for dut_name in duts:
  387. performance_items.extend(duts[dut_name].get_performance_items())
  388. TinyFW.JunitReport.update_performance(performance_items)
  389. return result
  390. @IDF.idf_unit_test(env_tag="UT_T2_1", junit_report_by_case=True)
  391. def run_multiple_devices_cases(env, extra_data):
  392. """
  393. extra_data can be two types of value
  394. 1. as dict:
  395. e.g.
  396. {"name": "gpio master/slave test example",
  397. "child case num": 2,
  398. "config": "release",
  399. "env_tag": "UT_T2_1"}
  400. 2. as list dict:
  401. e.g.
  402. [{"name": "gpio master/slave test example1",
  403. "child case num": 2,
  404. "config": "release",
  405. "env_tag": "UT_T2_1"},
  406. {"name": "gpio master/slave test example2",
  407. "child case num": 2,
  408. "config": "release",
  409. "env_tag": "UT_T2_1"}]
  410. """
  411. failed_cases = []
  412. case_config = format_test_case_config(extra_data)
  413. duts = {}
  414. for ut_config in case_config:
  415. Utility.console_log("Running unit test for config: " + ut_config, "O")
  416. for one_case in case_config[ut_config]:
  417. result = False
  418. junit_test_case = TinyFW.JunitReport.create_test_case("[{}] {}".format(ut_config, one_case["name"]))
  419. try:
  420. result = run_one_multiple_devices_case(duts, ut_config, env, one_case,
  421. one_case.get('app_bin'), junit_test_case)
  422. except Exception as e:
  423. junit_test_case.add_failure_info("Unexpected exception: " + str(e))
  424. finally:
  425. if result:
  426. Utility.console_log("Success: " + one_case["name"], color="green")
  427. else:
  428. failed_cases.append(one_case["name"])
  429. Utility.console_log("Failed: " + one_case["name"], color="red")
  430. TinyFW.JunitReport.test_case_finish(junit_test_case)
  431. if failed_cases:
  432. Utility.console_log("Failed Cases:", color="red")
  433. for _case_name in failed_cases:
  434. Utility.console_log("\t" + _case_name, color="red")
  435. raise AssertionError("Unit Test Failed")
  436. def run_one_multiple_stage_case(dut, one_case, junit_test_case):
  437. reset_dut(dut)
  438. dut.start_capture_raw_data()
  439. exception_reset_list = []
  440. for test_stage in range(one_case["child case num"]):
  441. # select multi stage test case name
  442. dut.write("\"{}\"".format(one_case["name"]))
  443. dut.expect("Running " + one_case["name"] + "...")
  444. # select test function for current stage
  445. dut.write(str(test_stage + 1))
  446. # we want to set this flag in callbacks (inner functions)
  447. # use list here so we can use append to set this flag
  448. stage_finish = list()
  449. def last_stage():
  450. return test_stage == one_case["child case num"] - 1
  451. def check_reset():
  452. if one_case["reset"]:
  453. assert exception_reset_list # reboot but no exception/reset logged. should never happen
  454. result = False
  455. if len(one_case["reset"]) == len(exception_reset_list):
  456. for i, exception in enumerate(exception_reset_list):
  457. if one_case["reset"][i] not in exception:
  458. break
  459. else:
  460. result = True
  461. if not result:
  462. err_msg = "Reset Check Failed: \r\n\tExpected: {}\r\n\tGet: {}".format(one_case["reset"],
  463. exception_reset_list)
  464. Utility.console_log(err_msg, color="orange")
  465. junit_test_case.add_failure_info(err_msg)
  466. else:
  467. # we allow omit reset in multi stage cases
  468. result = True
  469. return result
  470. # expect callbacks
  471. def one_case_finish(result):
  472. """ one test finished, let expect loop break and log result """
  473. # handle test finish
  474. result = result and check_reset()
  475. output = dut.stop_capture_raw_data()
  476. if result:
  477. Utility.console_log("Success: " + one_case["name"], color="green")
  478. else:
  479. Utility.console_log("Failed: " + one_case["name"], color="red")
  480. junit_test_case.add_failure_info(output)
  481. raise TestCaseFailed()
  482. stage_finish.append("break")
  483. def handle_exception_reset(data):
  484. """
  485. just append data to exception list.
  486. exception list will be checked in ``handle_reset_finish``, once reset finished.
  487. """
  488. exception_reset_list.append(data[0])
  489. def handle_test_finish(data):
  490. """ test finished without reset """
  491. # in this scenario reset should not happen
  492. if int(data[1]):
  493. # case ignored
  494. Utility.console_log("Ignored: " + one_case["name"], color="orange")
  495. junit_test_case.add_skipped_info("ignored")
  496. # only passed in last stage will be regarded as real pass
  497. if last_stage():
  498. one_case_finish(not int(data[0]))
  499. else:
  500. Utility.console_log("test finished before enter last stage", color="orange")
  501. one_case_finish(False)
  502. def handle_next_stage(data):
  503. """ reboot finished. we goto next stage """
  504. if last_stage():
  505. # already last stage, should never goto next stage
  506. Utility.console_log("didn't finish at last stage", color="orange")
  507. one_case_finish(False)
  508. else:
  509. stage_finish.append("continue")
  510. while not stage_finish:
  511. try:
  512. dut.expect_any((RESET_PATTERN, handle_exception_reset),
  513. (EXCEPTION_PATTERN, handle_exception_reset),
  514. (ABORT_PATTERN, handle_exception_reset),
  515. (FINISH_PATTERN, handle_test_finish),
  516. (UT_APP_BOOT_UP_DONE, handle_next_stage),
  517. timeout=one_case["timeout"])
  518. except ExpectTimeout:
  519. Utility.console_log("Timeout in expect", color="orange")
  520. one_case_finish(False)
  521. break
  522. if stage_finish[0] == "break":
  523. # test breaks on current stage
  524. break
  525. @IDF.idf_unit_test(env_tag="UT_T1_1", junit_report_by_case=True)
  526. def run_multiple_stage_cases(env, extra_data):
  527. """
  528. extra_data can be 2 types of value
  529. 1. as dict: Mandantory keys: "name" and "child case num", optional keys: "reset" and others
  530. 3. as list of string or dict:
  531. [case1, case2, case3, {"name": "restart from PRO CPU", "child case num": 2}, ...]
  532. :param env: test env instance
  533. :param extra_data: the case name or case list or case dictionary
  534. :return: None
  535. """
  536. case_config = format_test_case_config(extra_data)
  537. # we don't want stop on failed case (unless some special scenarios we can't handle)
  538. # this flag is used to log if any of the case failed during executing
  539. # Before exit test function this flag is used to log if the case fails
  540. failed_cases = []
  541. for ut_config in case_config:
  542. Utility.console_log("Running unit test for config: " + ut_config, "O")
  543. dut = env.get_dut("unit-test-app", app_path=ut_config, allow_dut_exception=True)
  544. if len(case_config[ut_config]) > 0:
  545. replace_app_bin(dut, "unit-test-app", case_config[ut_config][0].get('app_bin'))
  546. dut.start_app()
  547. for one_case in case_config[ut_config]:
  548. performance_items = []
  549. junit_test_case = TinyFW.JunitReport.create_test_case("[{}] {}".format(ut_config, one_case["name"]))
  550. try:
  551. run_one_multiple_stage_case(dut, one_case, junit_test_case)
  552. performance_items = dut.get_performance_items()
  553. except TestCaseFailed:
  554. failed_cases.append(one_case["name"])
  555. except Exception as e:
  556. junit_test_case.add_failure_info("Unexpected exception: " + str(e))
  557. failed_cases.append(one_case["name"])
  558. finally:
  559. TinyFW.JunitReport.update_performance(performance_items)
  560. TinyFW.JunitReport.test_case_finish(junit_test_case)
  561. # raise exception if any case fails
  562. if failed_cases:
  563. Utility.console_log("Failed Cases:", color="red")
  564. for _case_name in failed_cases:
  565. Utility.console_log("\t" + _case_name, color="red")
  566. raise AssertionError("Unit Test Failed")
  567. def detect_update_unit_test_info(env, extra_data, app_bin):
  568. case_config = format_test_case_config(extra_data)
  569. for ut_config in case_config:
  570. dut = env.get_dut("unit-test-app", app_path=ut_config)
  571. replace_app_bin(dut, "unit-test-app", app_bin)
  572. dut.start_app()
  573. reset_dut(dut)
  574. # get the list of test cases
  575. dut.write("")
  576. dut.expect("Here's the test menu, pick your combo:", timeout=DEFAULT_TIMEOUT)
  577. def find_update_dic(name, _t, _timeout, child_case_num=None):
  578. for _case_data in extra_data:
  579. if _case_data['name'] == name:
  580. _case_data['type'] = _t
  581. if 'timeout' not in _case_data:
  582. _case_data['timeout'] = _timeout
  583. if child_case_num:
  584. _case_data['child case num'] = child_case_num
  585. try:
  586. while True:
  587. data = dut.expect(TEST_PATTERN, timeout=DEFAULT_TIMEOUT)
  588. test_case_name = data[1]
  589. m = re.search(r'\[timeout=(\d+)\]', data[2])
  590. if m:
  591. timeout = int(m.group(1))
  592. else:
  593. timeout = 30
  594. m = re.search(r'\[multi_stage\]', data[2])
  595. if m:
  596. test_case_type = MULTI_STAGE_ID
  597. else:
  598. m = re.search(r'\[multi_device\]', data[2])
  599. if m:
  600. test_case_type = MULTI_DEVICE_ID
  601. else:
  602. test_case_type = SIMPLE_TEST_ID
  603. find_update_dic(test_case_name, test_case_type, timeout)
  604. if data[3] and re.search(END_LIST_STR, data[3]):
  605. break
  606. continue
  607. # find the last submenu item
  608. data = dut.expect(TEST_SUBMENU_PATTERN, timeout=DEFAULT_TIMEOUT)
  609. find_update_dic(test_case_name, test_case_type, timeout, child_case_num=int(data[0]))
  610. if data[1] and re.search(END_LIST_STR, data[1]):
  611. break
  612. # check if the unit test case names are correct, i.e. they could be found in the device
  613. for _dic in extra_data:
  614. if 'type' not in _dic:
  615. raise ValueError("Unit test \"{}\" doesn't exist in the flashed device!".format(_dic.get('name')))
  616. except ExpectTimeout:
  617. Utility.console_log("Timeout during getting the test list", color="red")
  618. finally:
  619. dut.close()
  620. # These options are the same for all configs, therefore there is no need to continue
  621. break
  622. if __name__ == '__main__':
  623. parser = argparse.ArgumentParser()
  624. parser.add_argument(
  625. '--repeat', '-r',
  626. help='Number of repetitions for the test(s). Default is 1.',
  627. type=int,
  628. default=1
  629. )
  630. parser.add_argument("--env_config_file", "-e",
  631. help="test env config file",
  632. default=None
  633. )
  634. parser.add_argument("--app_bin", "-b",
  635. help="application binary file for flashing the chip",
  636. default=None
  637. )
  638. parser.add_argument(
  639. 'test',
  640. help='Comma separated list of <option>:<argument> where option can be "name" (default), "child case num", \
  641. "config", "timeout".',
  642. nargs='+'
  643. )
  644. args = parser.parse_args()
  645. list_of_dicts = []
  646. for test in args.test:
  647. test_args = test.split(r',')
  648. test_dict = dict()
  649. for test_item in test_args:
  650. if len(test_item) == 0:
  651. continue
  652. pair = test_item.split(r':')
  653. if len(pair) == 1 or pair[0] is 'name':
  654. test_dict['name'] = pair[0]
  655. elif len(pair) == 2:
  656. if pair[0] == 'timeout' or pair[0] == 'child case num':
  657. test_dict[pair[0]] = int(pair[1])
  658. else:
  659. test_dict[pair[0]] = pair[1]
  660. else:
  661. raise ValueError('Error in argument item {} of {}'.format(test_item, test))
  662. test_dict['app_bin'] = args.app_bin
  663. list_of_dicts.append(test_dict)
  664. TinyFW.set_default_config(env_config_file=args.env_config_file)
  665. env_config = TinyFW.get_default_config()
  666. env_config['app'] = UT
  667. env_config['dut'] = IDF.IDFDUT
  668. env_config['test_suite_name'] = 'unit_test_parsing'
  669. test_env = Env.Env(**env_config)
  670. detect_update_unit_test_info(test_env, extra_data=list_of_dicts, app_bin=args.app_bin)
  671. for index in range(1, args.repeat + 1):
  672. if args.repeat > 1:
  673. Utility.console_log("Repetition {}".format(index), color="green")
  674. for dic in list_of_dicts:
  675. t = dic.get('type', SIMPLE_TEST_ID)
  676. if t == SIMPLE_TEST_ID:
  677. run_unit_test_cases(extra_data=dic)
  678. elif t == MULTI_STAGE_ID:
  679. run_multiple_stage_cases(extra_data=dic)
  680. elif t == MULTI_DEVICE_ID:
  681. run_multiple_devices_cases(extra_data=dic)
  682. else:
  683. raise ValueError('Unknown type {} of {}'.format(t, dic.get('name')))