Refactoring and minor improvents.
This commit is contained in:
81
test_latency.py
Normal file
81
test_latency.py
Normal file
@@ -0,0 +1,81 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import yaml
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent))
|
||||
from src.audio_tests import run_latency_test
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Run latency test on audio loopback and radio path')
|
||||
parser.add_argument('--serial-number', required=True, help='Serial number (e.g., SN001234)')
|
||||
parser.add_argument('--software-version', required=True, help='Software version (git commit hash)')
|
||||
parser.add_argument('--comment', default='', help='Comments about this test')
|
||||
parser.add_argument('--config', default='config.yaml', help='Path to config file')
|
||||
parser.add_argument('--measurements', type=int, default=5, help='Number of latency measurements (default: 5)')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
with open(args.config, 'r') as f:
|
||||
config = yaml.safe_load(f)
|
||||
|
||||
timestamp = datetime.now()
|
||||
test_id = timestamp.strftime('%Y%m%d_%H%M%S')
|
||||
|
||||
results_dir = Path(config['output']['results_dir'])
|
||||
results_dir.mkdir(exist_ok=True)
|
||||
|
||||
test_output_dir = results_dir / f"{test_id}_latency"
|
||||
test_output_dir.mkdir(exist_ok=True)
|
||||
|
||||
save_plots = config['output'].get('save_plots', False)
|
||||
|
||||
print(f"Starting latency test: {test_id}")
|
||||
print(f"Serial Number: {args.serial_number}")
|
||||
print(f"Software: {args.software_version}")
|
||||
if args.comment:
|
||||
print(f"Comment: {args.comment}")
|
||||
print(f"Measurements: {args.measurements}")
|
||||
if save_plots:
|
||||
print(f"Plots will be saved to: {test_output_dir}")
|
||||
print("-" * 60)
|
||||
|
||||
print(f"\nRunning chirp-based latency test ({args.measurements} measurements)...")
|
||||
try:
|
||||
latency_stats = run_latency_test(config, num_measurements=args.measurements,
|
||||
save_plots=save_plots, output_dir=test_output_dir)
|
||||
print(f"✓ Latency: avg={latency_stats['avg']:.3f}ms, "
|
||||
f"min={latency_stats['min']:.3f}ms, max={latency_stats['max']:.3f}ms, "
|
||||
f"std={latency_stats['std']:.3f}ms")
|
||||
except Exception as e:
|
||||
print(f"✗ Error: {e}")
|
||||
latency_stats = {'avg': 0.0, 'min': 0.0, 'max': 0.0, 'std': 0.0, 'error': str(e)}
|
||||
|
||||
output_data = {
|
||||
'metadata': {
|
||||
'test_id': test_id,
|
||||
'timestamp': timestamp.isoformat(),
|
||||
'serial_number': args.serial_number,
|
||||
'software_version': args.software_version,
|
||||
'comment': args.comment
|
||||
},
|
||||
'latency_test': latency_stats
|
||||
}
|
||||
|
||||
output_file = test_output_dir / f"{test_id}_latency_results.yaml"
|
||||
with open(output_file, 'w') as f:
|
||||
yaml.dump(output_data, f, default_flow_style=False, sort_keys=False)
|
||||
|
||||
print("-" * 60)
|
||||
print(f"✓ Test complete! Results saved to: {output_file}")
|
||||
if save_plots:
|
||||
print(f"✓ Plots saved to: {test_output_dir}/")
|
||||
print(f"\nTo view results: python view_results.py {output_file}")
|
||||
print(f"To view plots: ls {test_output_dir}/*.png")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Reference in New Issue
Block a user