pytest_iperf.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418
  1. # SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD
  2. # SPDX-License-Identifier: Unlicense OR CC0-1.0
  3. """
  4. Test case for iperf example.
  5. This test case might have problem running on windows:
  6. - use `sudo killall iperf` to force kill iperf, didn't implement windows version
  7. The test env Example_ShieldBox do need the following config::
  8. Example_ShieldBox:
  9. ap_list:
  10. - ssid: "ssid"
  11. password: "password"
  12. outlet: 1
  13. apc_ip: "192.168.1.88"
  14. attenuator_port: "/dev/ttyUSB0"
  15. iperf: "/dev/ttyUSB1"
  16. apc_ip: "192.168.1.88"
  17. pc_nic: "eth0"
  18. """
  19. import logging
  20. import os
  21. import re
  22. import time
  23. from typing import Any, Callable, Dict, Generator, Tuple
  24. import pexpect
  25. import pytest
  26. from common_test_methods import get_env_config_variable, get_host_ip_by_interface
  27. from idf_iperf_test_util import Attenuator, IperfUtility, PowerControl, TestReport
  28. from idf_iperf_test_util.IperfUtility import SCAN_RETRY_COUNT, SCAN_TIMEOUT, TEST_TIME
  29. from pytest_embedded import Dut
  30. from pytest_embedded_idf.dut import IdfDut
  31. # configurations
  32. RETRY_COUNT_FOR_BEST_PERFORMANCE = 2
  33. ATTEN_VALUE_LIST = range(0, 60, 2)
  34. NO_BANDWIDTH_LIMIT = -1 # iperf send bandwith is not limited
  35. # We need to auto compare the difference between adjacent configs (01 -> 00, 02 -> 01, ...) and put them to reports.
  36. # Using numbers for config will make this easy.
  37. # Use default value `99` for config with best performance.
  38. BEST_PERFORMANCE_CONFIG = '99'
  39. class IperfTestUtilitySoftap(IperfUtility.IperfTestUtility):
  40. """ iperf test implementation """
  41. def __init__(self, dut:IdfDut, softap_dut:IdfDut, config_name:str, test_result:Any=None) -> None:
  42. IperfUtility.IperfTestUtility.__init__(self, dut, config_name, 'softap', '1234567890', None, None, test_result)
  43. self.softap_dut = softap_dut
  44. self.softap_ip = '192.168.4.1'
  45. def setup(self) -> Tuple[str,int]:
  46. """
  47. setup iperf test:
  48. 1. kill current iperf process
  49. 2. reboot DUT (currently iperf is not very robust, need to reboot DUT)
  50. 3. scan to get AP RSSI
  51. 4. connect to AP
  52. """
  53. self.softap_dut.write('restart')
  54. self.softap_dut.expect_exact("Type 'help' to get the list of commands.")
  55. self.softap_dut.expect('iperf>', timeout=30)
  56. self.softap_dut.write('ap {} {}'.format(self.ap_ssid, self.ap_password))
  57. self.dut.write('restart')
  58. self.dut.expect_exact("Type 'help' to get the list of commands.")
  59. self.dut.expect('iperf>', timeout=30)
  60. self.dut.write('scan {}'.format(self.ap_ssid))
  61. for _ in range(SCAN_RETRY_COUNT):
  62. try:
  63. rssi = int(self.dut.expect(r'\[{}]\[rssi=(-\d+)]'.format(self.ap_ssid),
  64. timeout=SCAN_TIMEOUT).group(1))
  65. break
  66. except pexpect.TIMEOUT:
  67. continue
  68. else:
  69. raise AssertionError('Failed to scan AP')
  70. self.dut.write('sta {} {}'.format(self.ap_ssid, self.ap_password))
  71. dut_ip = self.dut.expect(r'sta ip: ([\d.]+), mask: ([\d.]+), gw: ([\d.]+)').group(1).decode('utf-8')
  72. return dut_ip, rssi
  73. def _test_once(self, proto:str, direction:str, bw_limit:int) -> Tuple[str, int, int]:
  74. """ do measure once for one type """
  75. # connect and scan to get RSSI
  76. dut_ip, rssi = self.setup()
  77. assert direction in ['rx', 'tx']
  78. assert proto in ['tcp', 'udp']
  79. # run iperf test
  80. if direction == 'tx':
  81. if proto == 'tcp':
  82. self.softap_dut.write('iperf -s -i 1 -t {}'.format(TEST_TIME))
  83. # wait until DUT TCP server created
  84. try:
  85. self.softap_dut.expect('iperf tcp server create successfully', timeout=1)
  86. except pexpect.TIMEOUT:
  87. # compatible with old iperf example binary
  88. pass
  89. if bw_limit > 0:
  90. self.dut.write('iperf -c {} -i 1 -t {} -b {}'.format(self.softap_ip, TEST_TIME, bw_limit))
  91. else:
  92. self.dut.write('iperf -c {} -i 1 -t {}'.format(self.softap_ip, TEST_TIME))
  93. else:
  94. self.softap_dut.write('iperf -s -u -i 1 -t {}'.format(TEST_TIME))
  95. if bw_limit > 0:
  96. self.dut.write('iperf -c {} -u -i 1 -t {} -b {}'.format(self.softap_ip, TEST_TIME, bw_limit))
  97. else:
  98. self.dut.write('iperf -c {} -u -i 1 -t {}'.format(self.softap_ip, TEST_TIME))
  99. else:
  100. if proto == 'tcp':
  101. self.dut.write('iperf -s -i 1 -t {}'.format(TEST_TIME))
  102. # wait until DUT TCP server created
  103. try:
  104. self.dut.expect('iperf tcp server create successfully', timeout=1)
  105. except pexpect.TIMEOUT:
  106. # compatible with old iperf example binary
  107. pass
  108. if bw_limit > 0:
  109. self.softap_dut.write('iperf -c {} -i 1 -t {} -b {}'.format(dut_ip, TEST_TIME, bw_limit))
  110. else:
  111. self.softap_dut.write('iperf -c {} -i 1 -t {}'.format(dut_ip, TEST_TIME))
  112. else:
  113. self.dut.write('iperf -s -u -i 1 -t {}'.format(TEST_TIME))
  114. if bw_limit > 0:
  115. self.softap_dut.write('iperf -c {} -u -i 1 -t {} -b {}'.format(dut_ip, TEST_TIME, bw_limit))
  116. else:
  117. self.softap_dut.write('iperf -c {} -u -i 1 -t {}'.format(dut_ip, TEST_TIME))
  118. time.sleep(TEST_TIME + 5)
  119. if direction == 'tx':
  120. server_raw_data = self.dut.expect(pexpect.TIMEOUT, timeout=0).decode('utf-8')
  121. else:
  122. server_raw_data = self.dut.expect(pexpect.TIMEOUT, timeout=0).decode('utf-8')
  123. self.dut.write('iperf -a')
  124. self.softap_dut.write('iperf -a')
  125. self.dut.write('heap')
  126. heap_size = self.dut.expect(r'min heap size: (\d+)\D').group(1)
  127. # return server raw data (for parsing test results) and RSSI
  128. return server_raw_data, rssi, heap_size
  129. @pytest.fixture(name='generate_report_different_configs', scope='session')
  130. def fixture_generate_report_different_configs(
  131. session_tempdir:str
  132. ) -> Generator[Callable[[Dict[str, Any], Dict[str, Any], str], None], None, None]:
  133. _test_result_dict = dict()
  134. _sdkconfig_files_dict = dict()
  135. _ap_info = dict()
  136. def add_config(ap_info:Dict[str, Any], test_result:Dict[str, Any], config_name:str) -> None:
  137. """
  138. Collects results for each config and stores it to a dictionary
  139. Args:
  140. ap_info: AP info
  141. test_result: test results for a specific config
  142. config_name: config name
  143. """
  144. # need to store the SSID to generate the report in the teardown period
  145. # note that the info passed along with the last call of the fixture is used in the teardown period
  146. _ap_info['ssid'] = ap_info['ssid']
  147. _test_result_dict[config_name] = test_result
  148. _sdkconfig_files_dict[config_name] = 'sdkconfig.ci.' + config_name
  149. yield add_config
  150. # the final report for all config results is generated during fixture's teardown period
  151. report = TestReport.ThroughputForConfigsReport(os.path.join(session_tempdir, 'Performance',
  152. 'ThroughputForConfigsReport'), _ap_info['ssid'],
  153. _test_result_dict, _sdkconfig_files_dict)
  154. report.generate_report()
  155. @pytest.mark.esp32
  156. @pytest.mark.esp32s2
  157. @pytest.mark.esp32c3
  158. @pytest.mark.esp32s3
  159. @pytest.mark.temp_skip_ci(targets=['esp32s2', 'esp32c3', 'esp32s3'], reason='lack of runners (run only for ESP32)')
  160. @pytest.mark.timeout(1200)
  161. @pytest.mark.Example_ShieldBox_Basic
  162. @pytest.mark.parametrize('config', [
  163. BEST_PERFORMANCE_CONFIG
  164. ], indirect=True)
  165. def test_wifi_throughput_basic(
  166. dut: Dut,
  167. log_performance: Callable[[str, str], None],
  168. check_performance: Callable[[str, float, str], None],
  169. ) -> None:
  170. """
  171. steps: |
  172. 1. test TCP tx rx and UDP tx rx throughput
  173. 2. compare with the pre-defined pass standard
  174. """
  175. # 1. wait for DUT
  176. dut.expect('iperf>')
  177. # 2. preparing
  178. env_name = 'Example_ShieldBox_Basic'
  179. pc_nic = get_env_config_variable(env_name, 'pc_nic')
  180. pc_nic_ip = get_host_ip_by_interface(pc_nic)
  181. pc_iperf_log_file = os.path.join(dut.logdir, 'pc_iperf_log.md')
  182. ap_info = {
  183. 'ssid': get_env_config_variable(env_name, 'ap_ssid'),
  184. 'password': get_env_config_variable(env_name, 'ap_password'),
  185. }
  186. test_result = {
  187. 'tcp_tx': IperfUtility.TestResult('tcp', 'tx', BEST_PERFORMANCE_CONFIG),
  188. 'tcp_rx': IperfUtility.TestResult('tcp', 'rx', BEST_PERFORMANCE_CONFIG),
  189. 'udp_tx': IperfUtility.TestResult('udp', 'tx', BEST_PERFORMANCE_CONFIG),
  190. 'udp_rx': IperfUtility.TestResult('udp', 'rx', BEST_PERFORMANCE_CONFIG),
  191. }
  192. test_utility = IperfUtility.IperfTestUtility(dut, BEST_PERFORMANCE_CONFIG, ap_info['ssid'], ap_info['password'],
  193. pc_nic_ip, pc_iperf_log_file, test_result)
  194. # 3. run test for TCP Tx, Rx and UDP Tx, Rx
  195. for _ in range(RETRY_COUNT_FOR_BEST_PERFORMANCE):
  196. test_utility.run_all_cases(0, NO_BANDWIDTH_LIMIT)
  197. # 4. log performance and compare with pass standard
  198. for throughput_type in test_result:
  199. log_performance('{}_throughput'.format(throughput_type),
  200. '{:.02f} Mbps'.format(test_result[throughput_type].get_best_throughput()))
  201. # do check after logging, otherwise test will exit immediately if check fail, some performance can't be logged.
  202. for throughput_type in test_result:
  203. check_performance('{}_throughput'.format(throughput_type),
  204. test_result[throughput_type].get_best_throughput(), dut.target)
  205. @pytest.mark.esp32
  206. @pytest.mark.esp32s2
  207. @pytest.mark.esp32c3
  208. @pytest.mark.esp32s3
  209. @pytest.mark.temp_skip_ci(targets=['esp32', 'esp32s2', 'esp32c3', 'esp32s3'], reason='local stress test')
  210. @pytest.mark.timeout(1200)
  211. @pytest.mark.Example_ShieldBox_Basic
  212. @pytest.mark.parametrize('config', [
  213. '00',
  214. '01',
  215. '02',
  216. '03',
  217. '04',
  218. '05',
  219. '06',
  220. '07',
  221. '99'
  222. ], indirect=True)
  223. def test_wifi_throughput_with_different_configs(
  224. dut: Dut,
  225. generate_report_different_configs: Callable[[Dict[str, Any], Dict[str, Any], str], None],
  226. ) -> None:
  227. """
  228. steps: |
  229. 1. build iperf with specified configs
  230. 2. test throughput for all routers
  231. """
  232. # 1. wait for DUT
  233. dut.expect('iperf>')
  234. # 2. preparing
  235. env_name = 'Example_ShieldBox_Basic'
  236. pc_nic = get_env_config_variable(env_name, 'pc_nic')
  237. pc_nic_ip = get_host_ip_by_interface(pc_nic)
  238. pc_iperf_log_file = os.path.join(dut.logdir, 'pc_iperf_log.md')
  239. ap_info = {
  240. 'ssid': get_env_config_variable(env_name, 'ap_ssid'),
  241. 'password': get_env_config_variable(env_name, 'ap_password'),
  242. }
  243. found_config = re.search(r'esp32.*\.(\w+)\.', dut.test_case_name)
  244. if found_config is not None:
  245. config_name = found_config.group(1)
  246. else:
  247. raise Exception('config name not found')
  248. # 3. run test for each required att value
  249. test_result = {
  250. 'tcp_tx': IperfUtility.TestResult('tcp', 'tx', config_name),
  251. 'tcp_rx': IperfUtility.TestResult('tcp', 'rx', config_name),
  252. 'udp_tx': IperfUtility.TestResult('udp', 'tx', config_name),
  253. 'udp_rx': IperfUtility.TestResult('udp', 'rx', config_name),
  254. }
  255. test_utility = IperfUtility.IperfTestUtility(dut, config_name, ap_info['ssid'], ap_info['password'], pc_nic_ip,
  256. pc_iperf_log_file, test_result)
  257. for _ in range(RETRY_COUNT_FOR_BEST_PERFORMANCE):
  258. test_utility.run_all_cases(0, NO_BANDWIDTH_LIMIT)
  259. for result_type in test_result:
  260. summary = str(test_result[result_type])
  261. if summary:
  262. logging.info(summary)
  263. generate_report_different_configs(ap_info, test_result, config_name)
  264. @pytest.mark.esp32
  265. @pytest.mark.esp32s2
  266. @pytest.mark.esp32c3
  267. @pytest.mark.esp32s3
  268. @pytest.mark.temp_skip(targets=['esp32', 'esp32s2', 'esp32c3', 'esp32s3'], reason='lack of runners')
  269. @pytest.mark.timeout(3600)
  270. @pytest.mark.Example_ShieldBox
  271. @pytest.mark.parametrize('config', [
  272. BEST_PERFORMANCE_CONFIG
  273. ], indirect=True)
  274. def test_wifi_throughput_vs_rssi(
  275. dut: Dut,
  276. session_tempdir:str,
  277. ) -> None:
  278. """
  279. steps: |
  280. 1. build with best performance config
  281. 2. switch on one router
  282. 3. set attenuator value from 0-60 for each router
  283. 4. test TCP tx rx and UDP tx rx throughput
  284. """
  285. # 1. wait for DUT
  286. dut.expect('iperf>')
  287. # 2. preparing
  288. env_name = 'Example_ShieldBox'
  289. att_port = get_env_config_variable(env_name, 'attenuator_port')
  290. ap_list = get_env_config_variable(env_name, 'ap_list')
  291. pc_nic = get_env_config_variable(env_name, 'pc_nic')
  292. pc_nic_ip = get_host_ip_by_interface(pc_nic)
  293. apc_ip = get_env_config_variable(env_name, 'apc_ip')
  294. pc_iperf_log_file = os.path.join(dut.logdir, 'pc_iperf_log.md')
  295. test_result = {
  296. 'tcp_tx': IperfUtility.TestResult('tcp', 'tx', BEST_PERFORMANCE_CONFIG),
  297. 'tcp_rx': IperfUtility.TestResult('tcp', 'rx', BEST_PERFORMANCE_CONFIG),
  298. 'udp_tx': IperfUtility.TestResult('udp', 'tx', BEST_PERFORMANCE_CONFIG),
  299. 'udp_rx': IperfUtility.TestResult('udp', 'rx', BEST_PERFORMANCE_CONFIG),
  300. }
  301. # 3. run test for each required att value
  302. for ap_info in ap_list:
  303. test_utility = IperfUtility.IperfTestUtility(dut, BEST_PERFORMANCE_CONFIG, ap_info['ssid'],
  304. ap_info['password'], pc_nic_ip, pc_iperf_log_file, test_result)
  305. PowerControl.Control.control_rest(apc_ip, ap_info['outlet'], 'OFF')
  306. PowerControl.Control.control(apc_ip, {ap_info['outlet']: 'ON'})
  307. Attenuator.set_att(att_port, 0)
  308. if not test_utility.wait_ap_power_on():
  309. logging.error('[{}] failed to power on, skip testing this AP'.format(ap_info['ssid']))
  310. continue
  311. for atten_val in ATTEN_VALUE_LIST:
  312. assert Attenuator.set_att(att_port, atten_val) is True
  313. try:
  314. test_utility.run_all_cases(atten_val, NO_BANDWIDTH_LIMIT)
  315. except AssertionError:
  316. break
  317. # 4. generate report
  318. report = TestReport.ThroughputVsRssiReport(os.path.join(session_tempdir, 'Performance', 'STAThroughputVsRssiReport'),
  319. test_result)
  320. report.generate_report()
  321. @pytest.mark.esp32
  322. @pytest.mark.esp32s2
  323. @pytest.mark.esp32c3
  324. @pytest.mark.esp32s3
  325. @pytest.mark.temp_skip(targets=['esp32', 'esp32s2', 'esp32c3', 'esp32s3'], reason='lack of runners')
  326. @pytest.mark.parametrize('count, config', [
  327. (2, BEST_PERFORMANCE_CONFIG),
  328. ], indirect=True)
  329. def test_softap_throughput_vs_rssi(
  330. dut: Tuple[IdfDut, IdfDut],
  331. session_tempdir:str,
  332. ) -> None:
  333. """
  334. steps: |
  335. 1. build with best performance config
  336. 2. switch on one router
  337. 3. set attenuator value from 0-60 for each router
  338. 4. test TCP tx rx and UDP tx rx throughput
  339. """
  340. # 1. wait for DUTs
  341. softap_dut = dut[0]
  342. sta_dut = dut[1]
  343. softap_dut.expect('iperf>')
  344. sta_dut.expect('iperf>')
  345. # 2. preparing
  346. env_name = 'Example_ShieldBox2'
  347. att_port = get_env_config_variable(env_name, 'attenuator_port')
  348. test_result = {
  349. 'tcp_tx': IperfUtility.TestResult('tcp', 'tx', BEST_PERFORMANCE_CONFIG),
  350. 'tcp_rx': IperfUtility.TestResult('tcp', 'rx', BEST_PERFORMANCE_CONFIG),
  351. 'udp_tx': IperfUtility.TestResult('udp', 'tx', BEST_PERFORMANCE_CONFIG),
  352. 'udp_rx': IperfUtility.TestResult('udp', 'rx', BEST_PERFORMANCE_CONFIG),
  353. }
  354. # 3. run test for each required att value
  355. test_utility = IperfTestUtilitySoftap(sta_dut, softap_dut, BEST_PERFORMANCE_CONFIG, test_result)
  356. Attenuator.set_att(att_port, 0)
  357. for atten_val in ATTEN_VALUE_LIST:
  358. assert Attenuator.set_att(att_port, atten_val) is True
  359. try:
  360. test_utility.run_all_cases(atten_val, NO_BANDWIDTH_LIMIT)
  361. except AssertionError:
  362. break
  363. # 4. generate report
  364. report = TestReport.ThroughputVsRssiReport(os.path.join(session_tempdir, 'Performance',
  365. 'SoftAPThroughputVsRssiReport'),test_result)
  366. report.generate_report()