Compare commits

...

2 Commits

Author SHA1 Message Date
Pbopbo
0c7de92ae9 Refactoring to sort results in year/month/day forlder. 2026-04-08 11:10:12 +02:00
Pbopbo
dd118ddb23 Adds check if test is valid to latency test. 2026-04-08 10:27:22 +02:00
6 changed files with 27 additions and 13 deletions

View File

@@ -39,6 +39,10 @@ artifact_detection:
enabled: true
threshold_db: 6.0 # Energy change threshold in dB between consecutive windows (detects level changes)
latency:
max_std_dev_ms: 0.5 # Maximum allowed std deviation; test fails if exceeded
min_avg_ms: 1.0 # Minimum expected average latency; near-zero indicates bad loopback
latency_buildup:
measurement_interval: 10 # seconds between latency measurements
max_duration: null # maximum test duration in seconds (null = run until canceled)

View File

@@ -235,11 +235,22 @@ def run_latency_test(config: Dict, num_measurements: int = 5, save_plots: bool =
last_correlation = correlation
last_lags = lags
avg = float(np.mean(latencies))
std_dev = float(np.std(latencies))
latency_cfg = config.get('latency', {})
max_std_dev_ms = latency_cfg.get('max_std_dev_ms', None)
min_avg_ms = latency_cfg.get('min_avg_ms', None)
valid = True
if max_std_dev_ms is not None and std_dev > max_std_dev_ms:
valid = False
if min_avg_ms is not None and avg < min_avg_ms:
valid = False
latency_stats = {
'avg': float(np.mean(latencies)),
'avg': avg,
'min': float(np.min(latencies)),
'max': float(np.max(latencies)),
'std': float(np.std(latencies))
'std': std_dev,
'valid': valid
}
if save_plots and output_dir and last_recording is not None:

View File

@@ -36,10 +36,9 @@ def main():
test_id = timestamp.strftime('%Y%m%d_%H%M%S')
results_dir = Path(config['output']['results_dir'])
results_dir.mkdir(exist_ok=True)
test_output_dir = results_dir / f"{test_id}_artifact_detection"
test_output_dir.mkdir(exist_ok=True)
test_output_dir = results_dir / timestamp.strftime('%Y') / timestamp.strftime('%m') / timestamp.strftime('%d') / f"{test_id}_artifact_detection"
test_output_dir.mkdir(parents=True, exist_ok=True)
save_plots = config['output'].get('save_plots', False)

View File

@@ -26,10 +26,9 @@ def main():
test_id = timestamp.strftime('%Y%m%d_%H%M%S')
results_dir = Path(config['output']['results_dir'])
results_dir.mkdir(exist_ok=True)
test_output_dir = results_dir / f"{test_id}_latency"
test_output_dir.mkdir(exist_ok=True)
test_output_dir = results_dir / timestamp.strftime('%Y') / timestamp.strftime('%m') / timestamp.strftime('%d') / f"{test_id}_latency"
test_output_dir.mkdir(parents=True, exist_ok=True)
save_plots = config['output'].get('save_plots', False)
@@ -47,7 +46,9 @@ def main():
try:
latency_stats = run_latency_test(config, num_measurements=args.measurements,
save_plots=save_plots, output_dir=test_output_dir)
print(f"✓ Latency: avg={latency_stats['avg']:.3f}ms, "
valid = latency_stats.get('valid', True)
status = "PASS" if valid else "FAIL"
print(f"{'' if valid else ''} Latency [{status}]: avg={latency_stats['avg']:.3f}ms, "
f"min={latency_stats['min']:.3f}ms, max={latency_stats['max']:.3f}ms, "
f"std={latency_stats['std']:.3f}ms")
except Exception as e:

View File

@@ -263,10 +263,9 @@ def main():
test_id = timestamp.strftime('%Y%m%d_%H%M%S')
results_dir = Path(config['output']['results_dir'])
results_dir.mkdir(exist_ok=True)
test_output_dir = results_dir / f"{test_id}_latency_buildup"
test_output_dir.mkdir(exist_ok=True)
test_output_dir = results_dir / timestamp.strftime('%Y') / timestamp.strftime('%m') / timestamp.strftime('%d') / f"{test_id}_latency_buildup"
test_output_dir.mkdir(parents=True, exist_ok=True)
save_plots = config['output'].get('save_plots', False)

View File

@@ -88,7 +88,7 @@ def display_results(yaml_file: Path):
def list_all_results(results_dir: Path):
yaml_files = sorted(results_dir.glob("*_results.yaml"))
yaml_files = sorted(results_dir.rglob("*_results.yaml"))
if not yaml_files:
print("No test results found.")