|
|
@@ -26,33 +26,13 @@ from builtins import range
|
|
|
from builtins import object
|
|
|
import re
|
|
|
import os
|
|
|
-import sys
|
|
|
import time
|
|
|
import subprocess
|
|
|
|
|
|
-try:
|
|
|
- import IDF
|
|
|
-except ImportError:
|
|
|
- # this is a test case write with tiny-test-fw.
|
|
|
- # to run test cases outside tiny-test-fw,
|
|
|
- # we need to set environment variable `TEST_FW_PATH`,
|
|
|
- # then get and insert `TEST_FW_PATH` to sys path before import FW module
|
|
|
- test_fw_path = os.getenv("TEST_FW_PATH")
|
|
|
- if test_fw_path and test_fw_path not in sys.path:
|
|
|
- sys.path.insert(0, test_fw_path)
|
|
|
- import IDF
|
|
|
-
|
|
|
-import DUT
|
|
|
-import TinyFW
|
|
|
-import Utility
|
|
|
-from Utility import (Attenuator, PowerControl, LineChart)
|
|
|
-
|
|
|
-try:
|
|
|
- from test_report import (ThroughputForConfigsReport, ThroughputVsRssiReport)
|
|
|
-except ImportError:
|
|
|
- # add current folder to system path for importing test_report
|
|
|
- sys.path.append(os.path.dirname(__file__))
|
|
|
- from test_report import (ThroughputForConfigsReport, ThroughputVsRssiReport)
|
|
|
+from tiny_test_fw import TinyFW, DUT, Utility
|
|
|
+import ttfw_idf
|
|
|
+from idf_iperf_test_util import (Attenuator, PowerControl, LineChart, TestReport)
|
|
|
+
|
|
|
|
|
|
# configurations
|
|
|
TEST_TIME = TEST_TIMEOUT = 60
|
|
|
@@ -165,8 +145,8 @@ class TestResult(object):
|
|
|
throughput = 0.0
|
|
|
|
|
|
if throughput == 0 and rssi > self.ZERO_THROUGHPUT_THRESHOLD:
|
|
|
- self.error_list.append("[Error][Fatal][{}][att: {}][rssi: {}]: No throughput data found"
|
|
|
- .format(ap_ssid, att, rssi))
|
|
|
+ self.error_list.append("[Error][Fatal][{}][att: {}][rssi: {}]: No throughput data found"
|
|
|
+ .format(ap_ssid, att, rssi))
|
|
|
|
|
|
self._save_result(throughput, ap_ssid, att, rssi, heap_size)
|
|
|
|
|
|
@@ -466,32 +446,7 @@ class IperfTestUtility(object):
|
|
|
return ret
|
|
|
|
|
|
|
|
|
-def build_iperf_with_config(config_name):
|
|
|
- """
|
|
|
- we need to build iperf example with different configurations.
|
|
|
-
|
|
|
- :param config_name: sdkconfig we want to build
|
|
|
- """
|
|
|
-
|
|
|
- # switch to iperf example path before build when we're running test with Runner
|
|
|
- example_path = os.path.dirname(__file__)
|
|
|
- cwd = os.getcwd()
|
|
|
- if cwd != example_path and example_path:
|
|
|
- os.chdir(example_path)
|
|
|
- try:
|
|
|
- subprocess.check_call("make clean > /dev/null", shell=True)
|
|
|
- subprocess.check_call(["cp", "sdkconfig.defaults.{}".format(config_name), "sdkconfig.defaults"])
|
|
|
- subprocess.check_call(["rm", "-f", "sdkconfig"])
|
|
|
- subprocess.check_call("make defconfig > /dev/null", shell=True)
|
|
|
- # save sdkconfig to generate config comparision report
|
|
|
- subprocess.check_call(["cp", "sdkconfig", "sdkconfig.{}".format(config_name)])
|
|
|
- subprocess.check_call("make -j5 > /dev/null", shell=True)
|
|
|
- subprocess.check_call("make print_flash_cmd | tail -n 1 > build/download.config", shell=True)
|
|
|
- finally:
|
|
|
- os.chdir(cwd)
|
|
|
-
|
|
|
-
|
|
|
-@IDF.idf_example_test(env_tag="Example_ShieldBox_Basic", category="stress")
|
|
|
+@ttfw_idf.idf_example_test(env_tag="Example_ShieldBox_Basic", category="stress")
|
|
|
def test_wifi_throughput_with_different_configs(env, extra_data):
|
|
|
"""
|
|
|
steps: |
|
|
|
@@ -511,13 +466,12 @@ def test_wifi_throughput_with_different_configs(env, extra_data):
|
|
|
sdkconfig_files = dict()
|
|
|
|
|
|
for config_name in CONFIG_NAME_PATTERN.findall(config_names_raw):
|
|
|
- # 1. build config
|
|
|
- build_iperf_with_config(config_name)
|
|
|
+ # 1. get the config
|
|
|
sdkconfig_files[config_name] = os.path.join(os.path.dirname(__file__),
|
|
|
- "sdkconfig.{}".format(config_name))
|
|
|
+ "sdkconfig.ci.{}".format(config_name))
|
|
|
|
|
|
# 2. get DUT and download
|
|
|
- dut = env.get_dut("iperf", "examples/wifi/iperf")
|
|
|
+ dut = env.get_dut("iperf", "examples/wifi/iperf", app_config_name=config_name)
|
|
|
dut.start_app()
|
|
|
dut.expect("esp32>")
|
|
|
|
|
|
@@ -544,12 +498,12 @@ def test_wifi_throughput_with_different_configs(env, extra_data):
|
|
|
env.close_dut("iperf")
|
|
|
|
|
|
# 5. generate report
|
|
|
- report = ThroughputForConfigsReport(os.path.join(env.log_path, "ThroughputForConfigsReport"),
|
|
|
- ap_info["ssid"], test_result, sdkconfig_files)
|
|
|
+ report = TestReport.ThroughputForConfigsReport(os.path.join(env.log_path, "ThroughputForConfigsReport"),
|
|
|
+ ap_info["ssid"], test_result, sdkconfig_files)
|
|
|
report.generate_report()
|
|
|
|
|
|
|
|
|
-@IDF.idf_example_test(env_tag="Example_ShieldBox", category="stress")
|
|
|
+@ttfw_idf.idf_example_test(env_tag="Example_ShieldBox", category="stress")
|
|
|
def test_wifi_throughput_vs_rssi(env, extra_data):
|
|
|
"""
|
|
|
steps: |
|
|
|
@@ -571,15 +525,12 @@ def test_wifi_throughput_vs_rssi(env, extra_data):
|
|
|
"udp_rx": TestResult("udp", "rx", BEST_PERFORMANCE_CONFIG),
|
|
|
}
|
|
|
|
|
|
- # 1. build config
|
|
|
- build_iperf_with_config(BEST_PERFORMANCE_CONFIG)
|
|
|
-
|
|
|
- # 2. get DUT and download
|
|
|
- dut = env.get_dut("iperf", "examples/wifi/iperf")
|
|
|
+ # 1. get DUT and download
|
|
|
+ dut = env.get_dut("iperf", "examples/wifi/iperf", app_config_name=BEST_PERFORMANCE_CONFIG)
|
|
|
dut.start_app()
|
|
|
dut.expect("esp32>")
|
|
|
|
|
|
- # 3. run test for each required att value
|
|
|
+ # 2. run test for each required att value
|
|
|
for ap_info in ap_list:
|
|
|
test_utility = IperfTestUtility(dut, BEST_PERFORMANCE_CONFIG, ap_info["ssid"], ap_info["password"],
|
|
|
pc_nic_ip, pc_iperf_log_file, test_result)
|
|
|
@@ -597,16 +548,16 @@ def test_wifi_throughput_vs_rssi(env, extra_data):
|
|
|
assert Attenuator.set_att(att_port, atten_val) is True
|
|
|
test_utility.run_all_cases(atten_val)
|
|
|
|
|
|
- # 4. check test results
|
|
|
+ # 3. check test results
|
|
|
env.close_dut("iperf")
|
|
|
|
|
|
- # 5. generate report
|
|
|
- report = ThroughputVsRssiReport(os.path.join(env.log_path, "ThroughputVsRssiReport"),
|
|
|
- test_result)
|
|
|
+ # 4. generate report
|
|
|
+ report = TestReport.ThroughputVsRssiReport(os.path.join(env.log_path, "ThroughputVsRssiReport"),
|
|
|
+ test_result)
|
|
|
report.generate_report()
|
|
|
|
|
|
|
|
|
-@IDF.idf_example_test(env_tag="Example_ShieldBox_Basic")
|
|
|
+@ttfw_idf.idf_example_test(env_tag="Example_ShieldBox_Basic")
|
|
|
def test_wifi_throughput_basic(env, extra_data):
|
|
|
"""
|
|
|
steps: |
|
|
|
@@ -620,15 +571,12 @@ def test_wifi_throughput_basic(env, extra_data):
|
|
|
"password": env.get_variable("ap_password"),
|
|
|
}
|
|
|
|
|
|
- # 1. build iperf with best config
|
|
|
- build_iperf_with_config(BEST_PERFORMANCE_CONFIG)
|
|
|
-
|
|
|
- # 2. get DUT
|
|
|
- dut = env.get_dut("iperf", "examples/wifi/iperf")
|
|
|
+ # 1. get DUT
|
|
|
+ dut = env.get_dut("iperf", "examples/wifi/iperf", app_config_name=BEST_PERFORMANCE_CONFIG)
|
|
|
dut.start_app()
|
|
|
dut.expect("esp32>")
|
|
|
|
|
|
- # 3. preparing
|
|
|
+ # 2. preparing
|
|
|
test_result = {
|
|
|
"tcp_tx": TestResult("tcp", "tx", BEST_PERFORMANCE_CONFIG),
|
|
|
"tcp_rx": TestResult("tcp", "rx", BEST_PERFORMANCE_CONFIG),
|
|
|
@@ -639,24 +587,24 @@ def test_wifi_throughput_basic(env, extra_data):
|
|
|
test_utility = IperfTestUtility(dut, BEST_PERFORMANCE_CONFIG, ap_info["ssid"],
|
|
|
ap_info["password"], pc_nic_ip, pc_iperf_log_file, test_result)
|
|
|
|
|
|
- # 4. run test for TCP Tx, Rx and UDP Tx, Rx
|
|
|
+ # 3. run test for TCP Tx, Rx and UDP Tx, Rx
|
|
|
for _ in range(RETRY_COUNT_FOR_BEST_PERFORMANCE):
|
|
|
test_utility.run_all_cases(0)
|
|
|
|
|
|
- # 5. log performance and compare with pass standard
|
|
|
+ # 4. log performance and compare with pass standard
|
|
|
performance_items = []
|
|
|
for throughput_type in test_result:
|
|
|
- IDF.log_performance("{}_throughput".format(throughput_type),
|
|
|
- "{:.02f} Mbps".format(test_result[throughput_type].get_best_throughput()))
|
|
|
+ ttfw_idf.log_performance("{}_throughput".format(throughput_type),
|
|
|
+ "{:.02f} Mbps".format(test_result[throughput_type].get_best_throughput()))
|
|
|
performance_items.append(["{}_throughput".format(throughput_type),
|
|
|
"{:.02f} Mbps".format(test_result[throughput_type].get_best_throughput())])
|
|
|
|
|
|
- # save to report
|
|
|
+ # 5. save to report
|
|
|
TinyFW.JunitReport.update_performance(performance_items)
|
|
|
# do check after logging, otherwise test will exit immediately if check fail, some performance can't be logged.
|
|
|
for throughput_type in test_result:
|
|
|
- IDF.check_performance("{}_throughput".format(throughput_type),
|
|
|
- test_result[throughput_type].get_best_throughput())
|
|
|
+ ttfw_idf.check_performance("{}_throughput".format(throughput_type),
|
|
|
+ test_result[throughput_type].get_best_throughput())
|
|
|
|
|
|
env.close_dut("iperf")
|
|
|
|