diff --git a/py-scripts/test_l3.py b/py-scripts/test_l3.py
index e822a3fe5..0e720ea39 100755
--- a/py-scripts/test_l3.py
+++ b/py-scripts/test_l3.py
@@ -312,6 +312,43 @@
--debug
--no_cleanup
+ # Example : Command Line Interface to run the Test along with IOT without device list
+ ./test_l3.py
+ --lfmgr 192.168.207.78
+ --test_duration 1m
+ --polling_interval 5s
+ --upstream_port eth1
+ --endp_type mc_udp
+ --rates_are_totals
+ --side_b_min_bps=10000000
+ --test_tag test_l3
+ --use_existing_station_list
+ --existing_station_list 1.20.en0
+ --cleanup_cx
+ --tos VO
+ --test_name Multcast
+ --iot_test
+ --iot_testname "Multicast_IoT_Test"
+
+ # Example : Command Line Interface to run the Test along with IOT with device list
+ ./test_l3.py
+ --lfmgr 192.168.207.78
+ --test_duration 1m
+ --polling_interval 5s
+ --upstream_port eth1
+ --endp_type mc_udp
+ --rates_are_totals
+ --side_b_min_bps=10000000
+ --test_tag test_l3
+ --use_existing_station_list
+ --existing_station_list 1.20.en0
+ --cleanup_cx
+ --tos VO
+ --test_name Multcast
+ --iot_test
+ --iot_testname "Multicast_IoT_Test"
+ --iot_device_list "switch.smart_plug_1_socket_1"
+
SCRIPT_CLASSIFICATION: Creation & Runs Traffic
@@ -645,6 +682,8 @@
# import traceback # TODO incorporate traceback if using try except
import json
import shutil
+import threading
+from collections import OrderedDict
import asyncio
import copy
@@ -661,6 +700,7 @@
lf_logger_config = importlib.import_module("py-scripts.lf_logger_config")
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
realm = importlib.import_module("py-json.realm")
+LFCliBase = realm.LFCliBase
DeviceConfig = importlib.import_module("py-scripts.DeviceConfig")
lf_attenuator = importlib.import_module("py-scripts.lf_atten_mod_test")
lf_modify_radio = importlib.import_module("py-scripts.lf_modify_radio")
@@ -669,6 +709,11 @@
logger = logging.getLogger(__name__)
+iot_scripts_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../local/interop-webGUI/IoT/scripts/"))
+if os.path.exists(iot_scripts_path):
+ sys.path.insert(0, iot_scripts_path)
+ from test_automation import Automation # noqa: E402
+
class L3VariableTime(Realm):
"""Test class for variable-time Layer-3 traffic tests.
@@ -6007,11 +6052,31 @@ def add_live_view_images_to_report(self):
self.report.set_custom_html(f'
')
self.report.build_custom()
- def generate_report(self, config_devices=None, group_device_map=None):
- self.report.set_obj_html("Objective", "The Layer 3 Traffic Generation Test is designed to test the performance of the "
- "Access Point by running layer 3 Cross-Connect Traffic. Layer-3 Cross-Connects represent a stream "
- "of data flowing through the system under test. A Cross-Connect (CX) is composed of two Endpoints, "
- "each of which is associated with a particular Port (physical or virtual interface).")
+ def generate_report(self, config_devices=None, group_device_map=None, iot_summary=None):
+ if iot_summary:
+ self.report.set_obj_html(
+ "Objective",
+ "The Candela Multicast Test Including IoT Devices is designed to evaluate an Access "
+ "Point’s efficiency, reliability, and scalability in handling multicast communication "
+ "across both Real clients (Android, Windows, Linux, iOS) and IoT devices (controlled "
+ "via Home Assistant). "
+ "For Real clients, the test simulates multicast traffic and measures key metrics such "
+ "as performance, latency, and packet delivery to assess how well the AP sustains "
+ "multicast communication under real-world conditions. "
+ "For IoT clients, the test concurrently executes device-specific actions (e.g., camera "
+ "streaming, switch toggling, lock/unlock) while multicast traffic is active, monitoring "
+ "success rate, latency, and failure rate. The goal is to validate that the AP can "
+ "reliably manage multicast traffic for Real clients while ensuring consistent "
+ "responsiveness and control of IoT devices."
+ )
+ else:
+ self.report.set_obj_html(
+ "Objective",
+ "The Layer 3 Traffic Generation Test is designed to test the performance of the "
+ "Access Point by running layer 3 Cross-Connect Traffic. Layer-3 Cross-Connects represent a stream "
+ "of data flowing through the system under test. A Cross-Connect (CX) is composed of two Endpoints, "
+ "each of which is associated with a particular Port (physical or virtual interface)."
+ )
self.report.build_objective()
test_setup_info = {
@@ -6023,6 +6088,8 @@ def generate_report(self, config_devices=None, group_device_map=None):
self.report.set_table_title("Device Under Test Information")
self.report.build_table_title()
+ if iot_summary:
+ test_setup_info = with_iot_params_in_table(test_setup_info, iot_summary)
self.report.test_setup_table(value="Device Under Test",
test_setup_data=test_setup_info)
# For real devices when groups specified for configuration
@@ -6409,6 +6476,8 @@ def generate_report(self, config_devices=None, group_device_map=None):
self.report.build_table_title()
self.report.set_table_dataframe(last_row)
self.report.build_table()
+ if iot_summary:
+ self.build_iot_report_section(self.report, iot_summary)
def write_report(self):
"""Write out HTML and PDF report as configured."""
@@ -6602,6 +6671,123 @@ def get_pass_fail_list(self, tos, up, down):
pass_fail_list.append('FAIL')
return test_input_list, pass_fail_list
+ def build_iot_report_section(self, report, iot_summary):
+ """
+ Handles all IoT-related charts, tables, and increment-wise reports.
+ """
+ outdir = report.path_date_time
+ os.makedirs(outdir, exist_ok=True)
+
+ def copy_into_report(raw_path, new_name):
+ """Resolve and copy image into report dir."""
+ if not raw_path:
+ return None
+
+ abs_src = os.path.abspath(raw_path)
+ if not os.path.exists(abs_src):
+ # Search recursively under 'results' if absolute path missing
+ for root, _, files in os.walk(os.path.join(os.getcwd(), "results")):
+ if os.path.basename(raw_path) in files:
+ abs_src = os.path.join(root, os.path.basename(raw_path))
+ break
+ else:
+ return None
+
+ dst = os.path.join(outdir, new_name)
+ if os.path.abspath(abs_src) != os.path.abspath(dst):
+ shutil.copy2(abs_src, dst)
+ return new_name
+
+ # section header
+ report.set_custom_html('
')
+ report.build_custom()
+ report.set_custom_html('IoT Results
')
+ report.build_custom()
+
+ # Statistics
+ stats_png = copy_into_report(iot_summary.get("statistics_img"), "iot_statistics.png")
+ if stats_png:
+ report.build_chart_title("Test Statistics")
+ report.set_custom_html(f'
')
+ report.build_custom()
+
+ # Request vs Latency
+ rvl_png = copy_into_report(iot_summary.get("req_vs_latency_img"), "iot_request_vs_latency.png")
+ if rvl_png:
+ report.build_chart_title("Request vs Average Latency")
+ report.set_custom_html(f'
')
+ report.build_custom()
+
+ # Overall results table
+ ort = iot_summary.get("overall_result_table") or {}
+ if ort:
+ rows = [{
+ "Device": dev,
+ "Min Latency (ms)": stats.get("min_latency"),
+ "Avg Latency (ms)": stats.get("avg_latency"),
+ "Max Latency (ms)": stats.get("max_latency"),
+ "Total Iterations": stats.get("total_iterations"),
+ "Success Iters": stats.get("success_iterations"),
+ "Failed Iters": stats.get("failed_iterations"),
+ "No-Response Iters": stats.get("no_response_iterations"),
+ } for dev, stats in ort.items()]
+
+ df_overall = pd.DataFrame(rows).round(2)
+
+ report.set_custom_html('')
+ report.build_custom()
+ report.set_obj_html(_obj_title="Overall IoT Result Table", _obj=" ")
+ report.build_objective()
+ report.set_table_dataframe(df_overall)
+ report.build_table()
+ report.set_custom_html('
')
+ report.build_custom()
+
+ # Increment reports
+ inc = iot_summary.get("increment_reports") or {}
+ if inc:
+ report.set_custom_html('Reports by Increment Steps
')
+ report.build_custom()
+
+ for step_name, rep in inc.items():
+
+ report.set_custom_html(f'{step_name.replace("_", " ")}
')
+ report.build_custom()
+
+ # Latency graph
+ lat_png = copy_into_report(rep.get("latency_graph"), f"iot_{step_name}_latency.png")
+ if lat_png:
+ report.build_chart_title("Average Latency")
+ report.set_custom_html(f'
')
+ report.build_custom()
+
+ # Success count graph
+ res_png = copy_into_report(rep.get("result_graph"), f"iot_{step_name}_results.png")
+ if res_png:
+ report.build_chart_title("Success Count")
+ report.set_custom_html(f'
')
+ report.build_custom()
+
+ # Tabular data for detailed iteration-level results
+ data_rows = rep.get("data") or []
+ if data_rows:
+ df = pd.DataFrame(data_rows).rename(
+ columns={"latency__ms": "Latency_ms", "latency_ms": "Latency_ms"}
+ )
+ if "Latency_ms" in df.columns:
+ df["Latency_ms"] = pd.to_numeric(df["Latency_ms"], errors="coerce").round(3)
+ if "Result" in df.columns:
+ df["Result"] = df["Result"].map(lambda x: "Success" if bool(x) else "Failure")
+
+ desired_cols = ["Iteration", "Device", "Current State", "Latency_ms", "Result"]
+ df = df[[c for c in desired_cols if c in df.columns]]
+
+ report.set_table_dataframe(df)
+ report.build_table()
+
+ report.set_custom_html('
')
+ report.build_custom()
+
# Converting the upstream_port to IP address for configuration purposes
def change_port_to_ip(upstream_port, lfclient_host, lfclient_port):
@@ -7600,6 +7786,8 @@ def parse_args():
test_l3_parser.add_argument('--local_lf_report_dir',
help='--local_lf_report_dir override the report path (lanforge/html-reports), primary used when making another directory lanforge/html-report/',
default="")
+ optional = parser.add_argument_group('Optional arguments to run test_l3.py')
+
test_l3_parser.add_argument(
"--results_dir_name",
default="test_l3",
@@ -7917,6 +8105,39 @@ def parse_args():
default=None,
action="store_true",
help='Show summary of what this script does')
+ # IOT ARGS
+ parser.add_argument('--iot_test', help="If true will execute script for iot", action='store_true')
+ optional.add_argument('--iot_ip',
+ default='127.0.0.1',
+ help='IP of the server')
+
+ optional.add_argument('--iot_port',
+ default='8000',
+ help='Port of the server')
+ optional.add_argument('--iot_iterations',
+ type=int,
+ default=1,
+ help='Iterations to run the test')
+
+ optional.add_argument('--iot_delay',
+ type=int,
+ default=5,
+ help='Delay in seconds between iterations (min. 5 seconds)')
+
+ optional.add_argument('--iot_device_list',
+ type=str,
+ default='',
+ help='Entity IDs of the devices to include in testing (comma separated)')
+
+ optional.add_argument('--iot_testname',
+ type=str,
+ default='',
+ help='Testname for reporting')
+
+ optional.add_argument('--iot_increment',
+ type=str,
+ default='',
+ help='Comma-separated list of device counts to incrementally test (e.g., "1,3,5")')
return parser.parse_args()
@@ -7926,6 +8147,112 @@ def parse_args():
# https://stackoverflow.com/questions/37304799/cross-platform-safe-to-use-command-line-string-separator
#
# Safe to exit in this function, as this should only be called by this script
+def with_iot_params_in_table(base: dict, iot_summary) -> dict:
+ """
+ Append IoT params into the existing Throughput Input Parameters table.
+ Adds: IoT Test name, IoT Iterations, IoT Delay (s), IoT Increment.
+ Accepts dict or JSON string.
+ """
+ try:
+ if not iot_summary:
+ return base
+ if isinstance(iot_summary, str):
+ try:
+ iot_summary = json.loads(iot_summary)
+ except Exception:
+ start = iot_summary.find("{")
+ end = iot_summary.rfind("}")
+ if start == -1 or end == -1 or end <= start:
+ return base
+ try:
+ iot_summary = json.loads(iot_summary[start:end + 1])
+ except Exception:
+ return base
+
+ ti = (iot_summary.get("test_input_table") or {})
+ out = OrderedDict(base)
+ out["Iot Device List"] = ti.get("Device List", "")
+ out["IoT Iterations"] = ti.get("Iterations", "")
+ out["IoT Delay (s)"] = ti.get("Delay (seconds)", "")
+ out["IoT Increment"] = ti.get("Increment Pattern", "")
+ return out
+ except Exception:
+ return base
+
+
+def trigger_iot(ip, port, iterations, delay, device_list, testname, increment):
+ """
+ Entry point to start the IoT test in a separate thread.
+ This function is called from the throughput test script when IoT testing
+ is enabled. It wraps the asynchronous `run_iot()`.
+ """
+ asyncio.run(run_iot(ip, port, iterations, delay, device_list, testname, increment))
+
+
+async def run_iot(ip: str = '127.0.0.1',
+ port: str = '8000',
+ iterations: int = 1,
+ delay: int = 5,
+ device_list: str = '',
+ testname: str = '',
+ increment: str = ''):
+ try:
+
+ if delay < 5:
+ logger.error('The minimum delay should be 5 seconds.')
+ exit(1)
+
+ if device_list != '':
+ device_list = device_list.split(',')
+ else:
+ device_list = None
+ # Parse and validate increment pattern if provided
+ if increment:
+ print("the increment is : ", increment)
+ try:
+ increment = list(map(int, increment.split(',')))
+ if any(i < 1 for i in increment):
+ logger.error('Increment values must be positive integers')
+ exit(1)
+ except ValueError:
+ logger.error('Invalid increment format. Please provide comma-separated integers (e.g., "1,3,5")')
+ exit(1)
+
+ testname = testname
+
+ # Ensure test name is unique (avoid overwriting previous results)
+ if testname in os.listdir('../../local/interop-webGUI/IoT/scripts/results/'):
+ logger.error('Test with same name already existing. Please give a different testname.')
+ exit(1)
+ automation = Automation(ip=ip,
+ port=port,
+ iterations=iterations,
+ delay=delay,
+ device_list=device_list,
+ testname=testname,
+ increment=increment)
+
+ # fetch the available iot devices
+ automation.devices = await automation.fetch_iot_devices()
+
+ # select the iot devices for testing
+ automation.select_iot_devices()
+
+ # run the iot test on selected devices
+ automation.run_test()
+
+ # generate the iot report
+ automation.generate_report()
+
+ except Exception as e:
+ logger.error(f"Iot Test failed: {str(e)}")
+ raise
+
+ await automation.session.close()
+
+ logger.info('Iot Test Completed.')
+
+
def main():
endp_types = "lf_udp"
@@ -7952,6 +8279,14 @@ def main():
test_name = args.test_name
ip = args.lfmgr
logger.info(" dowebgui %s %s %s", args.dowebgui, test_name, ip)
+ if args.iot_test:
+ iot_ip = args.iot_ip
+ iot_port = args.iot_port
+ iot_iterations = args.iot_iterations
+ iot_delay = args.iot_delay
+ iot_device_list = args.iot_device_list
+ iot_testname = args.iot_testname
+ iot_increment = args.iot_increment
# initialize pass / fail
test_passed = False
@@ -8045,6 +8380,27 @@ def main():
network_auth_type_list = []
anqp_3gpp_cell_net_list = []
ieee80211w_list = []
+ if args.iot_test:
+ if args.iot_iterations > 1:
+ thread = threading.Thread(target=trigger_iot, args=(iot_ip, iot_port, iot_iterations, iot_delay, iot_device_list, iot_testname, iot_increment))
+ thread.start()
+ else:
+ total_secs = int(LFCliBase.parse_time(args.test_duration).total_seconds())
+ iot_iterations = max(1, total_secs // args.iot_delay)
+ iot_thread = threading.Thread(
+ target=trigger_iot,
+ args=(
+ args.iot_ip,
+ args.iot_port,
+ iot_iterations,
+ args.iot_delay,
+ args.iot_device_list,
+ args.iot_testname,
+ args.iot_increment
+ ),
+ daemon=True
+ )
+ iot_thread.start()
logger.debug("Parse radio arguments used for station configuration")
if radios is not None:
@@ -8612,12 +8968,19 @@ def main():
ip_var_test.set_report_obj(report=report)
if args.dowebgui:
ip_var_test.webgui_finalize()
+ iot_summary = None
+ if args.iot_test and args.iot_testname:
+ base = os.path.join("results", args.iot_testname)
+ p = os.path.join(base, "iot_summary.json")
+ if os.path.exists(p):
+ with open(p) as f:
+ iot_summary = json.load(f)
# Generate and write out test report
logger.info("Generating test report")
if args.real:
- ip_var_test.generate_report(config_devices, group_device_map)
+ ip_var_test.generate_report(config_devices, group_device_map, iot_summary=iot_summary)
else:
- ip_var_test.generate_report()
+ ip_var_test.generate_report(iot_summary=iot_summary)
ip_var_test.write_report()
# TODO move to after reporting