trax/tests/test_optimization_dashboard.py

744 lines
26 KiB
Python

"""Unit tests for interactive optimization dashboard."""
import pytest
import json
import time
from unittest.mock import Mock, patch, MagicMock
from typing import Dict, List, Any
import tempfile
import os
from pathlib import Path
from src.services.optimization_dashboard import (
OptimizationDashboard,
RealTimeMonitor,
InteractiveCharts,
ConfigurationManager,
AlertSystem,
DashboardComponent
)
class TestOptimizationDashboard:
"""Test the main OptimizationDashboard class."""
def test_dashboard_initialization(self):
"""Test OptimizationDashboard initialization with default settings."""
dashboard = OptimizationDashboard()
assert dashboard.port == 8080
assert dashboard.host == 'localhost'
assert dashboard.auto_refresh_seconds == 5
assert dashboard.max_data_points == 1000
assert dashboard.enable_websockets is True
def test_dashboard_custom_initialization(self):
"""Test OptimizationDashboard initialization with custom settings."""
dashboard = OptimizationDashboard(
port=9000,
host='0.0.0.0',
auto_refresh_seconds=10,
max_data_points=500,
enable_websockets=False
)
assert dashboard.port == 9000
assert dashboard.host == '0.0.0.0'
assert dashboard.auto_refresh_seconds == 10
assert dashboard.max_data_points == 500
assert dashboard.enable_websockets is False
def test_start_dashboard(self):
"""Test dashboard startup."""
dashboard = OptimizationDashboard()
# Mock the web server
with patch('src.services.optimization_dashboard.threading.Thread') as mock_thread:
mock_thread.return_value.start.return_value = None
result = dashboard.start_dashboard()
assert result['status'] == 'started'
assert result['port'] == 8080
assert result['url'] == 'http://localhost:8080'
def test_stop_dashboard(self):
"""Test dashboard shutdown."""
dashboard = OptimizationDashboard()
# Start dashboard first
dashboard.start_dashboard()
result = dashboard.stop_dashboard()
assert result['status'] == 'stopped'
assert 'shutdown_time' in result
def test_get_dashboard_status(self):
"""Test dashboard status retrieval."""
dashboard = OptimizationDashboard()
result = dashboard.get_dashboard_status()
assert 'status' in result
assert 'uptime_seconds' in result
assert 'active_connections' in result
assert 'data_points_count' in result
def test_update_metrics(self):
"""Test metrics update."""
dashboard = OptimizationDashboard()
metrics = {
'cpu_usage': 75.5,
'memory_usage': 60.2,
'throughput': 25.0,
'latency': 0.15
}
result = dashboard.update_metrics(metrics)
assert result['updated'] is True
assert result['timestamp'] > 0
assert result['metrics_count'] == 4
class TestRealTimeMonitor:
"""Test real-time monitoring functionality."""
def test_real_time_monitor_initialization(self):
"""Test RealTimeMonitor initialization."""
monitor = RealTimeMonitor()
assert monitor.update_interval_seconds == 1
assert monitor.max_history_size == 1000
assert monitor.enable_alerts is True
assert monitor.metrics_enabled == ['cpu', 'memory', 'throughput', 'latency']
def test_real_time_monitor_custom_initialization(self):
"""Test RealTimeMonitor initialization with custom settings."""
monitor = RealTimeMonitor(
update_interval_seconds=2,
max_history_size=500,
enable_alerts=False,
metrics_enabled=['cpu', 'memory']
)
assert monitor.update_interval_seconds == 2
assert monitor.max_history_size == 500
assert monitor.enable_alerts is False
assert monitor.metrics_enabled == ['cpu', 'memory']
def test_start_monitoring(self):
"""Test monitoring start."""
monitor = RealTimeMonitor()
result = monitor.start_monitoring()
assert result['status'] == 'started'
assert result['update_interval'] == 1
assert result['metrics_enabled'] == ['cpu', 'memory', 'throughput', 'latency']
def test_stop_monitoring(self):
"""Test monitoring stop."""
monitor = RealTimeMonitor()
# Start monitoring first
monitor.start_monitoring()
result = monitor.stop_monitoring()
assert result['status'] == 'stopped'
assert 'stop_time' in result
def test_collect_metrics(self):
"""Test metrics collection."""
monitor = RealTimeMonitor()
# Mock system metrics
with patch('psutil.cpu_percent') as mock_cpu:
with patch('psutil.virtual_memory') as mock_memory:
mock_cpu.return_value = 75.5
mock_memory.return_value = Mock(percent=60.2)
metrics = monitor.collect_metrics()
assert 'cpu_usage' in metrics
assert 'memory_usage' in metrics
assert 'timestamp' in metrics
assert metrics['cpu_usage'] == 75.5
assert metrics['memory_usage'] == 60.2
def test_get_metrics_history(self):
"""Test metrics history retrieval."""
monitor = RealTimeMonitor()
# Add some test data
monitor.metrics_history = [
{'timestamp': 1000, 'cpu_usage': 50.0, 'memory_usage': 40.0},
{'timestamp': 2000, 'cpu_usage': 60.0, 'memory_usage': 50.0},
{'timestamp': 3000, 'cpu_usage': 70.0, 'memory_usage': 60.0}
]
history = monitor.get_metrics_history()
assert len(history) == 3
assert all('timestamp' in metric for metric in history)
assert all('cpu_usage' in metric for metric in history)
def test_get_metrics_summary(self):
"""Test metrics summary generation."""
monitor = RealTimeMonitor()
# Add test data
monitor.metrics_history = [
{'timestamp': 1000, 'cpu_usage': 50.0, 'memory_usage': 40.0},
{'timestamp': 2000, 'cpu_usage': 60.0, 'memory_usage': 50.0},
{'timestamp': 3000, 'cpu_usage': 70.0, 'memory_usage': 60.0}
]
summary = monitor.get_metrics_summary()
assert 'cpu_usage' in summary
assert 'memory_usage' in summary
assert 'avg_cpu_usage' in summary['cpu_usage']
assert 'max_cpu_usage' in summary['cpu_usage']
assert 'min_cpu_usage' in summary['cpu_usage']
assert summary['cpu_usage']['avg_cpu_usage'] == 60.0
class TestInteractiveCharts:
"""Test interactive chart functionality."""
def test_interactive_charts_initialization(self):
"""Test InteractiveCharts initialization."""
charts = InteractiveCharts()
assert charts.chart_types == ['line', 'bar', 'scatter', 'heatmap']
assert charts.default_chart_type == 'line'
assert charts.auto_update is True
assert charts.max_data_points == 1000
def test_interactive_charts_custom_initialization(self):
"""Test InteractiveCharts initialization with custom settings."""
charts = InteractiveCharts(
chart_types=['line', 'bar'],
default_chart_type='bar',
auto_update=False,
max_data_points=500
)
assert charts.chart_types == ['line', 'bar']
assert charts.default_chart_type == 'bar'
assert charts.auto_update is False
assert charts.max_data_points == 500
def test_create_performance_chart(self):
"""Test performance chart creation."""
charts = InteractiveCharts()
data = [
{'timestamp': 1000, 'cpu_usage': 50.0, 'memory_usage': 40.0},
{'timestamp': 2000, 'cpu_usage': 60.0, 'memory_usage': 50.0},
{'timestamp': 3000, 'cpu_usage': 70.0, 'memory_usage': 60.0}
]
chart_config = charts.create_performance_chart(data, 'cpu_usage')
assert 'chart_type' in chart_config
assert 'data' in chart_config
assert 'options' in chart_config
assert chart_config['chart_type'] == 'line'
assert len(chart_config['data']) == 3
def test_create_throughput_chart(self):
"""Test throughput chart creation."""
charts = InteractiveCharts()
data = [
{'timestamp': 1000, 'throughput': 10.0, 'latency': 0.1},
{'timestamp': 2000, 'throughput': 15.0, 'latency': 0.15},
{'timestamp': 3000, 'throughput': 20.0, 'latency': 0.2}
]
chart_config = charts.create_throughput_chart(data)
assert 'chart_type' in chart_config
assert 'data' in chart_config
assert 'options' in chart_config
assert chart_config['chart_type'] == 'line'
assert len(chart_config['data']) == 3
def test_create_memory_chart(self):
"""Test memory chart creation."""
charts = InteractiveCharts()
data = [
{'timestamp': 1000, 'memory_usage': 40.0, 'gpu_memory': 30.0},
{'timestamp': 2000, 'memory_usage': 50.0, 'gpu_memory': 40.0},
{'timestamp': 3000, 'memory_usage': 60.0, 'gpu_memory': 50.0}
]
chart_config = charts.create_memory_chart(data)
assert 'chart_type' in chart_config
assert 'data' in chart_config
assert 'options' in chart_config
assert chart_config['chart_type'] == 'line'
assert len(chart_config['data']) == 3
def test_create_combined_chart(self):
"""Test combined chart creation."""
charts = InteractiveCharts()
data = [
{'timestamp': 1000, 'cpu_usage': 50.0, 'memory_usage': 40.0, 'throughput': 10.0},
{'timestamp': 2000, 'cpu_usage': 60.0, 'memory_usage': 50.0, 'throughput': 15.0},
{'timestamp': 3000, 'cpu_usage': 70.0, 'memory_usage': 60.0, 'throughput': 20.0}
]
chart_config = charts.create_combined_chart(data)
assert 'chart_type' in chart_config
assert 'data' in chart_config
assert 'options' in chart_config
assert chart_config['chart_type'] == 'line'
assert len(chart_config['data']) == 3
def test_export_chart_data(self):
"""Test chart data export."""
charts = InteractiveCharts()
data = [
{'timestamp': 1000, 'cpu_usage': 50.0, 'memory_usage': 40.0},
{'timestamp': 2000, 'cpu_usage': 60.0, 'memory_usage': 50.0}
]
# Test JSON export
json_data = charts.export_chart_data(data, 'json')
assert json_data['format'] == 'json'
assert len(json_data['data']) == 2
# Test CSV export
csv_data = charts.export_chart_data(data, 'csv')
assert csv_data['format'] == 'csv'
assert 'data' in csv_data
class TestConfigurationManager:
"""Test configuration management functionality."""
def test_configuration_manager_initialization(self):
"""Test ConfigurationManager initialization."""
config_manager = ConfigurationManager()
assert config_manager.config_file == 'dashboard_config.json'
assert config_manager.auto_save is True
assert config_manager.backup_enabled is True
assert config_manager.max_backups == 5
def test_configuration_manager_custom_initialization(self):
"""Test ConfigurationManager initialization with custom settings."""
config_manager = ConfigurationManager(
config_file='custom_config.json',
auto_save=False,
backup_enabled=False,
max_backups=3
)
assert config_manager.config_file == 'custom_config.json'
assert config_manager.auto_save is False
assert config_manager.backup_enabled is False
assert config_manager.max_backups == 3
def test_load_configuration(self):
"""Test configuration loading."""
config_manager = ConfigurationManager()
# Mock file reading
mock_config = {
'dashboard': {'port': 8080, 'host': 'localhost'},
'monitoring': {'interval': 1, 'enabled': True},
'charts': {'auto_update': True, 'max_points': 1000}
}
with patch('builtins.open', create=True) as mock_open:
mock_open.return_value.__enter__.return_value.read.return_value = json.dumps(mock_config)
config = config_manager.load_configuration()
assert config['dashboard']['port'] == 8080
assert config['monitoring']['interval'] == 1
assert config['charts']['auto_update'] is True
def test_save_configuration(self):
"""Test configuration saving."""
config_manager = ConfigurationManager()
config = {
'dashboard': {'port': 9000, 'host': '0.0.0.0'},
'monitoring': {'interval': 2, 'enabled': True},
'charts': {'auto_update': False, 'max_points': 500}
}
with patch('builtins.open', create=True) as mock_open:
mock_open.return_value.__enter__.return_value.write.return_value = None
result = config_manager.save_configuration(config)
assert result['saved'] is True
assert result['timestamp'] > 0
def test_update_configuration(self):
"""Test configuration update."""
config_manager = ConfigurationManager()
current_config = {
'dashboard': {'port': 8080, 'host': 'localhost'},
'monitoring': {'interval': 1, 'enabled': True}
}
updates = {
'dashboard': {'port': 9000},
'monitoring': {'interval': 2}
}
new_config = config_manager.update_configuration(current_config, updates)
assert new_config['dashboard']['port'] == 9000
assert new_config['dashboard']['host'] == 'localhost' # Unchanged
assert new_config['monitoring']['interval'] == 2
assert new_config['monitoring']['enabled'] is True # Unchanged
def test_validate_configuration(self):
"""Test configuration validation."""
config_manager = ConfigurationManager()
valid_config = {
'dashboard': {'port': 8080, 'host': 'localhost'},
'monitoring': {'interval': 1, 'enabled': True},
'charts': {'auto_update': True, 'max_points': 1000}
}
result = config_manager.validate_configuration(valid_config)
assert result['valid'] is True
assert result['errors'] is None
# Test invalid config
invalid_config = {
'dashboard': {'port': -1}, # Invalid port
'monitoring': {'interval': 0} # Invalid interval
}
result = config_manager.validate_configuration(invalid_config)
assert result['valid'] is False
assert 'errors' in result
assert len(result['errors']) > 0
def test_create_backup(self):
"""Test configuration backup creation."""
config_manager = ConfigurationManager()
config = {'test': 'data'}
with patch('builtins.open', create=True) as mock_open:
mock_open.return_value.__enter__.return_value.write.return_value = None
result = config_manager.create_backup(config)
assert result['backup_created'] is True
assert 'backup_file' in result
assert result['timestamp'] > 0
class TestAlertSystem:
"""Test alert system functionality."""
def test_alert_system_initialization(self):
"""Test AlertSystem initialization."""
alert_system = AlertSystem()
assert alert_system.enable_alerts is True
assert alert_system.alert_levels == ['info', 'warning', 'error', 'critical']
assert alert_system.max_alerts == 100
assert alert_system.auto_clear is True
def test_alert_system_custom_initialization(self):
"""Test AlertSystem initialization with custom settings."""
alert_system = AlertSystem(
enable_alerts=False,
alert_levels=['warning', 'error'],
max_alerts=50,
auto_clear=False
)
assert alert_system.enable_alerts is False
assert alert_system.alert_levels == ['warning', 'error']
assert alert_system.max_alerts == 50
assert alert_system.auto_clear is False
def test_create_alert(self):
"""Test alert creation."""
alert_system = AlertSystem()
alert = alert_system.create_alert(
level='warning',
message='High CPU usage detected',
source='monitoring',
threshold=80.0,
current_value=85.0
)
assert alert['level'] == 'warning'
assert alert['message'] == 'High CPU usage detected'
assert alert['source'] == 'monitoring'
assert alert['threshold'] == 80.0
assert alert['current_value'] == 85.0
assert 'timestamp' in alert
assert 'id' in alert
def test_get_alerts(self):
"""Test alert retrieval."""
alert_system = AlertSystem()
# Add some test alerts
alert_system.alerts = [
{'id': '1', 'level': 'info', 'message': 'Test 1', 'timestamp': 1000},
{'id': '2', 'level': 'warning', 'message': 'Test 2', 'timestamp': 2000},
{'id': '3', 'level': 'error', 'message': 'Test 3', 'timestamp': 3000}
]
all_alerts = alert_system.get_alerts()
assert len(all_alerts) == 3
warning_alerts = alert_system.get_alerts(level='warning')
assert len(warning_alerts) == 1
assert warning_alerts[0]['level'] == 'warning'
def test_clear_alerts(self):
"""Test alert clearing."""
alert_system = AlertSystem()
# Add test alerts
alert_system.alerts = [
{'id': '1', 'level': 'info', 'message': 'Test 1', 'timestamp': 1000},
{'id': '2', 'level': 'warning', 'message': 'Test 2', 'timestamp': 2000}
]
result = alert_system.clear_alerts()
assert result['cleared'] is True
assert result['count'] == 2
assert len(alert_system.alerts) == 0
def test_check_thresholds(self):
"""Test threshold checking."""
alert_system = AlertSystem()
metrics = {
'cpu_usage': 85.0,
'memory_usage': 70.0,
'throughput': 15.0
}
thresholds = {
'cpu_usage': {'warning': 80.0, 'error': 90.0},
'memory_usage': {'warning': 75.0, 'error': 85.0},
'throughput': {'warning': 10.0, 'error': 5.0}
}
alerts = alert_system.check_thresholds(metrics, thresholds)
assert len(alerts) > 0
assert any(alert['level'] == 'warning' for alert in alerts)
def test_get_alert_summary(self):
"""Test alert summary generation."""
alert_system = AlertSystem()
# Add test alerts
alert_system.alerts = [
{'id': '1', 'level': 'info', 'message': 'Test 1', 'timestamp': 1000},
{'id': '2', 'level': 'warning', 'message': 'Test 2', 'timestamp': 2000},
{'id': '3', 'level': 'warning', 'message': 'Test 3', 'timestamp': 3000},
{'id': '4', 'level': 'error', 'message': 'Test 4', 'timestamp': 4000}
]
summary = alert_system.get_alert_summary()
assert summary['total_alerts'] == 4
assert summary['info_count'] == 1
assert summary['warning_count'] == 2
assert summary['error_count'] == 1
assert summary['critical_count'] == 0
class TestDashboardComponent:
"""Test dashboard component functionality."""
def test_dashboard_component_initialization(self):
"""Test DashboardComponent initialization."""
component = DashboardComponent('test_component')
assert component.name == 'test_component'
assert component.enabled is True
assert component.auto_refresh is True
assert component.refresh_interval == 5
def test_dashboard_component_custom_initialization(self):
"""Test DashboardComponent initialization with custom settings."""
component = DashboardComponent(
'test_component',
enabled=False,
auto_refresh=False,
refresh_interval=10
)
assert component.name == 'test_component'
assert component.enabled is False
assert component.auto_refresh is False
assert component.refresh_interval == 10
def test_component_update(self):
"""Test component update."""
component = DashboardComponent('test_component')
data = {'value': 42, 'status': 'active'}
result = component.update(data)
assert result['updated'] is True
assert result['timestamp'] > 0
assert result['data'] == data
def test_component_get_status(self):
"""Test component status retrieval."""
component = DashboardComponent('test_component')
status = component.get_status()
assert status['name'] == 'test_component'
assert status['enabled'] is True
assert status['auto_refresh'] is True
assert 'last_update' in status
def test_component_enable_disable(self):
"""Test component enable/disable."""
component = DashboardComponent('test_component')
# Test disable
result = component.disable()
assert result['enabled'] is False
assert component.enabled is False
# Test enable
result = component.enable()
assert result['enabled'] is True
assert component.enabled is True
def test_component_set_refresh_interval(self):
"""Test component refresh interval setting."""
component = DashboardComponent('test_component')
result = component.set_refresh_interval(10)
assert result['refresh_interval'] == 10
assert component.refresh_interval == 10
class TestOptimizationDashboardIntegration:
"""Integration tests for optimization dashboard components."""
def test_dashboard_with_monitoring(self):
"""Test dashboard integration with monitoring."""
dashboard = OptimizationDashboard()
monitor = RealTimeMonitor()
# Start monitoring
monitor_result = monitor.start_monitoring()
assert monitor_result['status'] == 'started'
# Start dashboard
dashboard_result = dashboard.start_dashboard()
assert dashboard_result['status'] == 'started'
# Update metrics
metrics = {'cpu_usage': 75.0, 'memory_usage': 60.0}
update_result = dashboard.update_metrics(metrics)
assert update_result['updated'] is True
# Stop dashboard
stop_result = dashboard.stop_dashboard()
assert stop_result['status'] == 'stopped'
def test_dashboard_with_charts(self):
"""Test dashboard integration with charts."""
dashboard = OptimizationDashboard()
charts = InteractiveCharts()
# Create test data
data = [
{'timestamp': 1000, 'cpu_usage': 50.0, 'memory_usage': 40.0},
{'timestamp': 2000, 'cpu_usage': 60.0, 'memory_usage': 50.0}
]
# Create charts
performance_chart = charts.create_performance_chart(data, 'cpu_usage')
memory_chart = charts.create_memory_chart(data)
assert performance_chart['chart_type'] == 'line'
assert memory_chart['chart_type'] == 'line'
assert len(performance_chart['data']) == 2
assert len(memory_chart['data']) == 2
def test_dashboard_with_alerts(self):
"""Test dashboard integration with alerts."""
dashboard = OptimizationDashboard()
alert_system = AlertSystem()
# Create alerts
alert1 = alert_system.create_alert('warning', 'High CPU usage', 'monitoring', 80.0, 85.0)
alert2 = alert_system.create_alert('error', 'Memory limit exceeded', 'monitoring', 90.0, 95.0)
assert alert1['level'] == 'warning'
assert alert2['level'] == 'error'
# Get alerts
alerts = alert_system.get_alerts()
assert len(alerts) == 2
# Get summary
summary = alert_system.get_alert_summary()
assert summary['total_alerts'] == 2
assert summary['warning_count'] == 1
assert summary['error_count'] == 1
def test_dashboard_with_configuration(self):
"""Test dashboard integration with configuration."""
dashboard = OptimizationDashboard()
config_manager = ConfigurationManager()
# Create configuration
config = {
'dashboard': {'port': 8080, 'host': 'localhost'},
'monitoring': {'interval': 1, 'enabled': True},
'charts': {'auto_update': True, 'max_points': 1000}
}
# Save configuration
with patch('builtins.open', create=True) as mock_open:
mock_open.return_value.__enter__.return_value.write.return_value = None
save_result = config_manager.save_configuration(config)
assert save_result['saved'] is True
# Validate configuration
validation_result = config_manager.validate_configuration(config)
assert validation_result['valid'] is True
if __name__ == '__main__':
pytest.main([__file__])