From 5186be7335657a766ae27d06a5e1b646906bc6c5 Mon Sep 17 00:00:00 2001 From: TaeHyun aivle Date: Tue, 12 Aug 2025 01:53:52 +0000 Subject: [PATCH 01/12] =?UTF-8?q?=F0=9F=94=A5=20Chore:=20Remove=20unused?= =?UTF-8?q?=20files=20and=20tests?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 기존 시뮬레이터에서 더 이상 사용되지 않는 파일 및 테스트 코드를 정리 --- .../app/routers/connection_test_router.py | 49 --- .../app/services/model_client.py | 100 ----- .../app/utils/logger.py | 58 --- .../tests/test_logger.py | 324 ---------------- .../tests/test_main.py | 341 ---------------- .../tests/test_model_client.py | 87 ----- .../tests/test_settings.py | 363 ------------------ 7 files changed, 1322 deletions(-) delete mode 100644 services/painting-process-data-simulator-service/app/routers/connection_test_router.py delete mode 100644 services/painting-process-data-simulator-service/app/services/model_client.py delete mode 100644 services/painting-process-data-simulator-service/app/utils/logger.py delete mode 100644 services/painting-process-data-simulator-service/tests/test_logger.py delete mode 100644 services/painting-process-data-simulator-service/tests/test_main.py delete mode 100644 services/painting-process-data-simulator-service/tests/test_model_client.py delete mode 100644 services/painting-process-data-simulator-service/tests/test_settings.py diff --git a/services/painting-process-data-simulator-service/app/routers/connection_test_router.py b/services/painting-process-data-simulator-service/app/routers/connection_test_router.py deleted file mode 100644 index dda1963..0000000 --- a/services/painting-process-data-simulator-service/app/routers/connection_test_router.py +++ /dev/null @@ -1,49 +0,0 @@ -from fastapi import APIRouter -from app.services.model_client import model_client -from app.services.azure_storage import azure_storage -from app.config.settings import settings - -router = APIRouter() - -@router.get("/azure-storage-connection") -async def test_azure_connection(): - """Azure Storage 연결 테스트""" - try: - files = await azure_storage.list_data_files() - - return { - "status": "success", - "message": "Azure Storage 연결 성공", - "file_count": len(files), - "sample_files": files[:5] # 처음 5개 파일만 표시 - } - except Exception as e: - return { - "status": "error", - "message": f"Azure Storage 연결 실패: {str(e)}" - } - - -@router.get("/models-connection") -async def test_model_services(): - """모델 서비스 연결 테스트""" - try: - health_status = await model_client.health_check_all() - - healthy_services = [name for name, - status in health_status.items() if status] - unhealthy_services = [name for name, - status in health_status.items() if not status] - - return { - "status": "success" if healthy_services else "error", - "healthy_services": healthy_services, - "unhealthy_services": unhealthy_services, - "total_services": len(settings.model_services), - "healthy_count": len(healthy_services) - } - except Exception as e: - return { - "status": "error", - "message": f"모델 서비스 테스트 실패: {str(e)}" - } diff --git a/services/painting-process-data-simulator-service/app/services/model_client.py b/services/painting-process-data-simulator-service/app/services/model_client.py deleted file mode 100644 index 8205bcc..0000000 --- a/services/painting-process-data-simulator-service/app/services/model_client.py +++ /dev/null @@ -1,100 +0,0 @@ -import httpx -import asyncio -from typing import Dict, Any, Optional -from app.config.settings import settings - - -class ModelClient: - def __init__(self): - self.timeout = httpx.Timeout(settings.http_timeout) - self.max_retries = settings.max_retries - - async def predict_painting_issue(self, data: Dict[str, Any], client: httpx.AsyncClient = None) -> Optional[Dict[str, Any]]: - """Painting Process Equipment 모델 서비스에 예측 요청""" - service_name = "painting-process-equipment" - service_url = settings.model_services.get(service_name) - - if not service_url: - print(f"❌ 알 수 없는 서비스: {service_name}") - return None - - predict_url = f"{service_url}/predict/" - - async def _request(client: httpx.AsyncClient): - for attempt in range(self.max_retries): - try: - response = await client.post(predict_url, json=data) - - if response.status_code == 200: - response_json = response.json() - if "predictions" in response_json and response_json["predictions"]: - result = response_json["predictions"][0] - result['machineId'] = data.get('machineId') - result['timeStamp'] = data.get('timeStamp') - print(f"✅ {service_name} 예측 성공 (이슈 감지) (시도 {attempt + 1})") - return result - else: - print(f"⚠️ {service_name} 응답 형식이 올바르지 않음 (시도 {attempt + 1})") - return None - elif response.status_code == 204: - print(f"✅ {service_name} 예측 성공 (정상) (시도 {attempt + 1})") - return None - else: - print(f"⚠️ {service_name} HTTP {response.status_code} (시도 {attempt + 1})") - - except httpx.TimeoutException: - print(f"⏰ {service_name} 타임아웃 (시도 {attempt + 1})") - except httpx.ConnectError: - print(f"🔌 {service_name} 연결 실패 (시도 {attempt + 1})") - except Exception as e: - print(f"❌ {service_name} 예측 오류 (시도 {attempt + 1}): {e}") - - if attempt < self.max_retries - 1: - await asyncio.sleep(min(2 ** attempt, 30)) - - print(f"❌ {service_name} 최대 재시도 횟수 초과") - return None - - if client: - return await _request(client) - else: - async with httpx.AsyncClient(timeout=self.timeout) as new_client: - return await _request(new_client) - - async def health_check(self, service_name: str) -> bool: - """서비스 헬스 체크""" - service_url = settings.model_services.get(service_name) - - if not service_url: - return False - - health_url = f"{service_url}/health" - - try: - async with httpx.AsyncClient(timeout=httpx.Timeout(5.0)) as client: - response = await client.get(health_url) - return response.status_code == 200 - except (httpx.HTTPError, asyncio.TimeoutError): - return False - - async def health_check_all(self) -> Dict[str, bool]: - """모든 서비스 헬스 체크""" - tasks = [] - service_names = list(settings.model_services.keys()) - - for service_name in service_names: - task = self.health_check(service_name) - tasks.append(task) - - results = await asyncio.gather(*tasks, return_exceptions=True) - - health_status = {} - for service_name, result in zip(service_names, results): - health_status[service_name] = result if isinstance( - result, bool) else False - - return health_status - - -# 글로벌 모델 클라이언트 인스턴스 -model_client = ModelClient() diff --git a/services/painting-process-data-simulator-service/app/utils/logger.py b/services/painting-process-data-simulator-service/app/utils/logger.py deleted file mode 100644 index 401e07c..0000000 --- a/services/painting-process-data-simulator-service/app/utils/logger.py +++ /dev/null @@ -1,58 +0,0 @@ -import json -import os -from datetime import datetime -from typing import Dict, Any -from app.config.settings import settings - - -class AnomalyLogger: - def __init__(self): - # 로그 디렉토리 생성 - os.makedirs(settings.log_directory, exist_ok=True) - self.log_file_path = os.path.join( - settings.log_directory, settings.log_filename) - - def log_anomaly(self, service_name: str, prediction_result: Dict[str, Any], original_data: Dict[str, Any]): - """이상 감지 결과를 로그 파일에 저장""" - log_entry = { - "timestamp": datetime.now().isoformat(), - "service_name": service_name, - "prediction": prediction_result, - "original_data": original_data - } - - # JSON 파일에 추가 - with open(self.log_file_path, "a", encoding="utf-8") as f: - f.write(json.dumps(log_entry, ensure_ascii=False) + "\n") - - # 콘솔 출력 - print(f"🚨 ANOMALY DETECTED: {service_name}") - print(f" └─ Machine ID: {prediction_result.get('machineId', 'N/A')}") - print(f" └─ Time: {prediction_result.get('timeStamp', 'N/A')}") - print(f" └─ Issue: {prediction_result.get('issue', 'N/A')}") - print("-" * 50) - - def log_normal_processing(self, service_name: str, original_data: Dict[str, Any]): - """정상 처리 결과를 콘솔에만 출력""" - print( - f"✅ NORMAL: {service_name} - Machine ID: {original_data.get('machineId', 'N/A')}, Time: {original_data.get('timeStamp', 'N/A')}") - - def log_error(self, service_name: str, error_message: str, original_data: Dict[str, Any] = None): - """에러 로그""" - log_entry = { - "timestamp": datetime.now().isoformat(), - "service_name": service_name, - "error": error_message, - "original_data": original_data - } - - error_log_path = os.path.join( - settings.log_directory, settings.error_log_filename) - with open(error_log_path, "a", encoding="utf-8") as f: - f.write(json.dumps(log_entry, ensure_ascii=False) + "\n") - - print(f"❌ ERROR: {service_name} - {error_message}") - - -# 글로벌 로거 인스턴스 -anomaly_logger = AnomalyLogger() diff --git a/services/painting-process-data-simulator-service/tests/test_logger.py b/services/painting-process-data-simulator-service/tests/test_logger.py deleted file mode 100644 index 1e5742e..0000000 --- a/services/painting-process-data-simulator-service/tests/test_logger.py +++ /dev/null @@ -1,324 +0,0 @@ -import json -import os -import tempfile -import pytest -from unittest.mock import patch, mock_open, call -import shutil - -# Import the logger module from the correct path -from app.utils.logger import AnomalyLogger, anomaly_logger - - -class TestAnomalyLogger: - """ - Comprehensive unit tests for AnomalyLogger class. - Testing framework: pytest (as identified in existing project structure) - """ - - @pytest.fixture(autouse=True) - def setup_method(self): - """Set up test fixtures before each test method.""" - # Mock settings to avoid dependency on actual config - - self.settings_patcher = patch('app.utils.logger.settings') - self.mock_settings = self.settings_patcher.start() - self.mock_settings.log_directory = '/tmp/test_logs' - self.mock_settings.log_filename = 'anomaly_test.log' - self.mock_settings.error_log_filename = 'error_test.log' - - # Create temporary directory for tests - - self.temp_dir = tempfile.mkdtemp() - self.mock_settings.log_directory = self.temp_dir - - yield - - # Clean up after test - self.settings_patcher.stop() - if os.path.exists(self.temp_dir): - shutil.rmtree(self.temp_dir) - - @patch('app.utils.logger.os.makedirs') - def test_init_creates_log_directory(self, mock_makedirs): - """Test that AnomalyLogger initialization creates log directory.""" - with patch('app.utils.logger.settings') as mock_settings: - mock_settings.log_directory = '/test/logs' - mock_settings.log_filename = 'test.log' - AnomalyLogger() - mock_makedirs.assert_called_once_with('/test/logs', exist_ok=True) - - def test_init_sets_correct_log_file_path(self): - """Test that AnomalyLogger sets the correct log file path.""" - logger = AnomalyLogger() - expected_path = os.path.join(self.temp_dir, 'anomaly_test.log') - assert logger.log_file_path == expected_path - - @patch('builtins.print') - def test_log_anomaly_console_output(self, mock_print): - """Test that log_anomaly produces correct console output.""" - service_name = "TestService" - prediction_result = { - 'machineId': 'MACHINE_001', - 'timeStamp': '2023-12-01T10:30:00', - 'issue': 'Temperature anomaly' - } - original_data = {'sensor_data': {'temperature': 85.5}} - - logger = AnomalyLogger() - - with patch('builtins.open', mock_open()): - logger.log_anomaly(service_name, prediction_result, original_data) - - # Verify console output - expected_calls = [ - call("🚨 ANOMALY DETECTED: TestService"), - call(" └─ Machine ID: MACHINE_001"), - call(" └─ Time: 2023-12-01T10:30:00"), - call(" └─ Issue: Temperature anomaly"), - call("-" * 50) - ] - - mock_print.assert_has_calls(expected_calls) - - @patch('builtins.print') - def test_log_anomaly_console_output_with_missing_fields(self, mock_print): - """Test console output when prediction_result has missing fields.""" - service_name = "TestService" - prediction_result = {} # Empty prediction result - original_data = {'sensor_data': {'temperature': 85.5}} - - logger = AnomalyLogger() - - with patch('builtins.open', mock_open()): - logger.log_anomaly(service_name, prediction_result, original_data) - - # Verify console output handles missing fields gracefully - expected_calls = [ - call("🚨 ANOMALY DETECTED: TestService"), - call(" └─ Machine ID: N/A"), - call(" └─ Time: N/A"), - call(" └─ Issue: N/A"), - call("-" * 50) - ] - - mock_print.assert_has_calls(expected_calls) - - @patch('builtins.print') - def test_log_normal_processing(self, mock_print): - """Test log_normal_processing console output.""" - service_name = "TestService" - original_data = { - 'machineId': 'MACHINE_001', - 'timeStamp': '2023-12-01T10:30:00' - } - - logger = AnomalyLogger() - logger.log_normal_processing(service_name, original_data) - - expected_message = "✅ NORMAL: TestService - Machine ID: MACHINE_001, Time: 2023-12-01T10:30:00" - mock_print.assert_called_once_with(expected_message) - - @patch('builtins.print') - def test_log_normal_processing_with_missing_fields(self, mock_print): - """Test log_normal_processing handles missing fields in original_data.""" - service_name = "TestService" - original_data = {} # Empty data - - logger = AnomalyLogger() - logger.log_normal_processing(service_name, original_data) - - expected_message = "✅ NORMAL: TestService - Machine ID: N/A, Time: N/A" - mock_print.assert_called_once_with(expected_message) - - @patch('app.utils.logger.datetime') - @patch('builtins.print') - @patch('builtins.open', new_callable=mock_open) - def test_log_error_without_original_data(self, mock_file, mock_print, mock_datetime): - """Test log_error works when original_data is None.""" - mock_datetime.now.return_value.isoformat.return_value = "2023-12-01T10:30:00.123456" - - service_name = "TestService" - error_message = "Invalid configuration" - - logger = AnomalyLogger() - logger.log_error(service_name, error_message) - - # Verify JSON content includes None for original_data - handle = mock_file.return_value.__enter__.return_value - written_content = handle.write.call_args[0][0] - json_part = written_content.rstrip('\n') - parsed_json = json.loads(json_part) - - assert parsed_json['original_data'] is None - - @patch('builtins.open', side_effect=PermissionError("Permission denied")) - @patch('builtins.print') - def test_log_anomaly_file_permission_error(self, mock_print, mock_open_func): - """Test that file permission errors are handled appropriately.""" - service_name = "TestService" - prediction_result = {'machineId': 'MACHINE_001'} - original_data = {'sensor_data': {'temperature': 85.5}} - - logger = AnomalyLogger() - - with pytest.raises(PermissionError): - logger.log_anomaly(service_name, prediction_result, original_data) - - @patch('builtins.open', side_effect=OSError("Disk full")) - @patch('builtins.print') - def test_log_error_disk_full_error(self, mock_print, mock_open_func): - """Test that disk full errors are handled appropriately.""" - service_name = "TestService" - error_message = "Connection timeout" - - logger = AnomalyLogger() - - with pytest.raises(OSError): - logger.log_error(service_name, error_message) - - @patch('builtins.print') - def test_log_methods_with_unicode_characters(self, mock_print): - """Test that all log methods handle Unicode characters correctly.""" - service_name = "서비스테스트" # Korean characters - - # Test log_normal_processing with Unicode - original_data = { - 'machineId': 'MACHINE_001', - 'timeStamp': '2023-12-01T10:30:00', - 'description': '온도 센서' # Korean characters - } - - logger = AnomalyLogger() - logger.log_normal_processing(service_name, original_data) - - # Verify it doesn't raise encoding errors - assert mock_print.called - - @patch('app.utils.logger.datetime') - @patch('builtins.open', new_callable=mock_open) - def test_json_serialization_with_complex_data_types(self, mock_file, mock_datetime): - """Test JSON serialization handles complex data types correctly.""" - mock_datetime.now.return_value.isoformat.return_value = "2023-12-01T10:30:00.123456" - - # Test with nested dictionaries and lists - service_name = "TestService" - prediction_result = { - 'machineId': 'MACHINE_001', - 'nested_data': { - 'sensors': ['temp', 'pressure'], - 'readings': {'temp': 25.5, 'pressure': 1.2} - } - } - original_data = { - 'raw_sensors': [1, 2, 3, 4, 5], - 'metadata': {'source': 'sensor_array_1'} - } - - logger = AnomalyLogger() - - with patch('builtins.print'): - logger.log_anomaly(service_name, prediction_result, original_data) - - # Verify that complex data structures are serialized correctly - handle = mock_file.return_value.__enter__.return_value - written_content = handle.write.call_args[0][0] - json_part = written_content.rstrip('\n') - - # Should not raise JSON serialization errors - parsed_json = json.loads(json_part) - assert isinstance(parsed_json, dict) - assert parsed_json['service_name'] == service_name - - def test_global_logger_instance_exists(self): - """Test that global anomaly_logger instance is created.""" - assert isinstance(anomaly_logger, AnomalyLogger) - - def test_log_file_paths_are_constructed_correctly(self): - """Test that log file paths are constructed correctly from settings.""" - logger = AnomalyLogger() - expected_anomaly_path = os.path.join(self.temp_dir, 'anomaly_test.log') - assert logger.log_file_path == expected_anomaly_path - - @patch('builtins.print') - def test_empty_service_name_handling(self, mock_print): - """Test handling of empty service names.""" - service_name = "" - original_data = {'machineId': 'MACHINE_001'} - - logger = AnomalyLogger() - logger.log_normal_processing(service_name, original_data) - - expected_message = "✅ NORMAL: - Machine ID: MACHINE_001, Time: N/A" - mock_print.assert_called_once_with(expected_message) - - @patch('builtins.print') - def test_none_values_handling(self, mock_print): - """Test handling of None values in data structures.""" - service_name = "TestService" - original_data = { - 'machineId': None, - 'timeStamp': None - } - - logger = AnomalyLogger() - logger.log_normal_processing(service_name, original_data) - - expected_message = "✅ NORMAL: TestService - Machine ID: None, Time: None" - mock_print.assert_called_once_with(expected_message) - - @pytest.mark.parametrize("service_name,prediction_result,original_data", [ - ("Service1", {'machineId': 'M1'}, {'data': 'test1'}), - ("Service2", {'machineId': 'M2', 'issue': 'Critical'}, {'data': 'test2'}), - ("Service3", {}, {}), - ]) - @patch('builtins.print') - def test_log_anomaly_parametrized(self, mock_print, service_name, prediction_result, original_data): - """Parametrized test for log_anomaly with different data combinations.""" - logger = AnomalyLogger() - - with patch('builtins.open', mock_open()): - logger.log_anomaly(service_name, prediction_result, original_data) - - # Verify the anomaly detection message is always printed - calls = mock_print.call_args_list - assert any(f"🚨 ANOMALY DETECTED: {service_name}" in str(call) for call in calls) - - @patch('builtins.print') - def test_log_normal_processing_with_special_characters(self, mock_print): - """Test log_normal_processing with special characters in data.""" - service_name = "Test@Service#123" - original_data = { - 'machineId': 'MACHINE-001_TEST', - 'timeStamp': '2023-12-01T10:30:00+00:00' - } - - logger = AnomalyLogger() - logger.log_normal_processing(service_name, original_data) - - expected_message = "✅ NORMAL: Test@Service#123 - Machine ID: MACHINE-001_TEST, Time: 2023-12-01T10:30:00+00:00" - mock_print.assert_called_once_with(expected_message) - - @patch('app.utils.logger.datetime') - @patch('builtins.open', new_callable=mock_open) - def test_timestamp_format_consistency(self, mock_file, mock_datetime): - """Test that timestamps are consistently formatted across all log methods.""" - mock_datetime.now.return_value.isoformat.return_value = '2023-12-01T10:30:00.123456' - - logger = AnomalyLogger() - service_name = "TestService" - - with patch('builtins.print'): - # Test anomaly log timestamp - logger.log_anomaly(service_name, {'machineId': 'M1'}, {}) - - # Test error log timestamp - logger.log_error(service_name, "Test error") - - # Verify both calls used the same timestamp format - calls = mock_file.return_value.__enter__.return_value.write.call_args_list - - for call_args in calls: - written_content = call_args[0][0] - json_part = written_content.rstrip('\n') - parsed_json = json.loads(json_part) - assert parsed_json['timestamp'] == '2023-12-01T10:30:00.123456' \ No newline at end of file diff --git a/services/painting-process-data-simulator-service/tests/test_main.py b/services/painting-process-data-simulator-service/tests/test_main.py deleted file mode 100644 index 12437d9..0000000 --- a/services/painting-process-data-simulator-service/tests/test_main.py +++ /dev/null @@ -1,341 +0,0 @@ -import pytest -from unittest.mock import Mock, patch, AsyncMock -from fastapi.testclient import TestClient -from fastapi import FastAPI -import os - -from app.main import app, lifespan - -class TestLifespan: - """Test suite for the lifespan context manager""" - - @pytest.mark.asyncio - async def test_lifespan_startup_with_azure_connection_string(self, capfd): - """Test lifespan startup when Azure connection string is configured""" - mock_app = Mock(spec=FastAPI) - - with patch('app.main.settings') as mock_settings, \ - patch('app.main.simulator_scheduler') as mock_scheduler, \ - patch('app.main.os.makedirs') as mock_makedirs: - - # Configure mock settings - mock_settings.azure_connection_string = "DefaultEndpointsProtocol=https;AccountName=test" - mock_settings.log_directory = "/test/logs" - mock_settings.scheduler_interval_minutes = 5 - mock_settings.model_services = ["service1", "service2", "service3"] - - mock_scheduler.is_running = True - mock_scheduler.stop = AsyncMock() - - # Test the lifespan context manager - async with lifespan(mock_app): - pass - - # Verify directory creation - mock_makedirs.assert_called_once_with("/test/logs", exist_ok=True) - - # Verify scheduler stop was called - mock_scheduler.stop.assert_called_once() - - # Check printed output - captured = capfd.readouterr() - assert "🚀 Data Simulator Service 시작 중..." in captured.out - assert "📁 로그 디렉토리: /test/logs" in captured.out - assert "🔧 스케줄러 간격: 5분" in captured.out - assert "🎯 대상 서비스 수: 3" in captured.out - assert "🛑 Data Simulator Service 종료 중..." in captured.out - - @pytest.mark.asyncio - async def test_lifespan_scheduler_not_running_on_shutdown(self, capfd): - """Test lifespan shutdown when scheduler is not running""" - mock_app = Mock(spec=FastAPI) - - with patch('app.main.settings') as mock_settings, \ - patch('app.main.simulator_scheduler') as mock_scheduler, \ - patch('app.main.os.makedirs'): - - mock_settings.azure_connection_string = "test" - mock_settings.log_directory = "/test/logs" - mock_settings.scheduler_interval_minutes = 1 - mock_settings.model_services = [] - - mock_scheduler.is_running = False - mock_scheduler.stop = AsyncMock() - - # Test the lifespan context manager - async with lifespan(mock_app): - pass - - # Verify scheduler stop was NOT called - mock_scheduler.stop.assert_not_called() - - # Check shutdown message still appears - captured = capfd.readouterr() - assert "🛑 Data Simulator Service 종료 중..." in captured.out - - @pytest.mark.asyncio - async def test_lifespan_makedirs_exception_handling(self): - """Test lifespan handles directory creation errors gracefully""" - mock_app = Mock(spec=FastAPI) - - with patch('app.main.settings') as mock_settings, \ - patch('app.main.simulator_scheduler') as mock_scheduler, \ - patch('app.main.os.makedirs') as mock_makedirs: - - mock_settings.azure_connection_string = "test" - mock_settings.log_directory = "/invalid/path" - mock_settings.scheduler_interval_minutes = 1 - mock_settings.model_services = [] - - mock_scheduler.is_running = False - mock_makedirs.side_effect = OSError("Permission denied") - - # Should not raise exception, should handle gracefully - with pytest.raises(OSError): - async with lifespan(mock_app): - pass - - @pytest.mark.asyncio - async def test_lifespan_scheduler_stop_exception_handling(self): - """Test lifespan handles scheduler stop errors gracefully""" - mock_app = Mock(spec=FastAPI) - - with patch('app.main.settings') as mock_settings, \ - patch('app.main.simulator_scheduler') as mock_scheduler, \ - patch('app.main.os.makedirs'): - - mock_settings.azure_connection_string = "test" - mock_settings.log_directory = "/test/logs" - mock_settings.scheduler_interval_minutes = 1 - mock_settings.model_services = [] - - mock_scheduler.is_running = True - mock_scheduler.stop = AsyncMock(side_effect=Exception("Scheduler error")) - - # Should handle scheduler stop errors gracefully - with pytest.raises(Exception, match="Scheduler error"): - async with lifespan(mock_app): - pass - - -class TestFastAPIApp: - """Test suite for FastAPI app configuration""" - - def test_app_configuration(self): - """Test FastAPI app is configured with correct metadata""" - assert app.title == "Painting Process Equipment Data Simulator Service" - assert app.description == "도장 공정 설비 결함 탐지 모델을 위한 실시간 데이터 시뮬레이터" - assert app.version == "1.0.0" - - - def test_app_routers_included(self): - """Test that required routers are included in the app""" - # Check that routers are included by examining routes - route_paths = [route.path for route in app.routes] - - # Should have simulator routes - simulator_routes = [path for path in route_paths if path.startswith("/simulator")] - assert len(simulator_routes) > 0, "Simulator router not properly included" - - # Should have test connection routes - test_routes = [path for path in route_paths if path.startswith("/test")] - assert len(test_routes) > 0, "Test connection router not properly included" - - def test_app_has_basic_routes(self): - """Test that basic routes (root and health) are defined""" - route_paths = [route.path for route in app.routes] - - assert "/" in route_paths, "Root route not defined" - assert "/health" in route_paths, "Health route not defined" - - -class TestAPIEndpoints: - """Test suite for API endpoints""" - - def setup_method(self): - """Setup test client for each test""" - self.client = TestClient(app) - - @patch('app.main.simulator_scheduler') - def test_root_endpoint(self, mock_scheduler): - """Test root endpoint returns correct service information""" - mock_scheduler.get_status.return_value = { - "running": True, - "last_execution": "2024-01-01T00:00:00Z" - } - - response = self.client.get("/") - - assert response.status_code == 200 - data = response.json() - - expected_keys = ["service", "version", "status", "target_model", "scheduler_status"] - for key in expected_keys: - assert key in data, f"Missing key: {key}" - - assert data["service"] == "Painting Process Equipment Data Simulator Service" - assert data["version"] == "1.0.0" - assert data["status"] == "running" - assert data["target_model"] == "painting-process-equipment-defect-detection" - assert data["scheduler_status"]["running"] - - @patch('app.main.simulator_scheduler') - def test_root_endpoint_scheduler_not_running(self, mock_scheduler): - """Test root endpoint when scheduler is not running""" - mock_scheduler.get_status.return_value = { - "running": False, - "last_execution": None - } - - response = self.client.get("/") - - assert response.status_code == 200 - data = response.json() - assert not data["scheduler_status"]["running"] - - def test_health_check_endpoint(self): - """Test health check endpoint returns healthy status""" - response = self.client.get("/health") - - assert response.status_code == 200 - data = response.json() - assert data == {"status": "healthy"} - - def test_nonexistent_endpoint(self): - """Test that nonexistent endpoints return 404""" - response = self.client.get("/nonexistent") - assert response.status_code == 404 - - def test_root_endpoint_method_not_allowed(self): - """Test that non-GET methods on root endpoint return 405""" - response = self.client.post("/") - assert response.status_code == 405 - - response = self.client.put("/") - assert response.status_code == 405 - - response = self.client.delete("/") - assert response.status_code == 405 - - def test_health_endpoint_method_not_allowed(self): - """Test that non-GET methods on health endpoint return 405""" - response = self.client.post("/health") - assert response.status_code == 405 - - -class TestIntegrationScenarios: - """Integration test scenarios for the main application""" - - def setup_method(self): - """Setup test client for each test""" - self.client = TestClient(app) - - @patch('app.main.simulator_scheduler') - @patch('app.main.settings') - def test_app_startup_shutdown_cycle(self, mock_settings, mock_scheduler): - """Test complete startup and shutdown cycle""" - mock_settings.azure_connection_string = "test" - mock_settings.log_directory = "/test/logs" - mock_settings.scheduler_interval_minutes = 5 - mock_settings.model_services = ["service1", "service2"] - - mock_scheduler.is_running = True - mock_scheduler.get_status.return_value = {"running": True} - mock_scheduler.stop = AsyncMock() - - # Create a new test client to trigger lifespan - with TestClient(app) as client: - # Test that the app is working after startup - response = client.get("/health") - assert response.status_code == 200 - - response = client.get("/") - assert response.status_code == 200 - - @patch('app.main.simulator_scheduler') - def test_scheduler_status_consistency(self, mock_scheduler): - """Test that scheduler status is consistently reported""" - # Test with running scheduler - mock_scheduler.get_status.return_value = { - "running": True, - "last_execution": "2024-01-01T10:00:00Z", - "next_execution": "2024-01-01T10:05:00Z" - } - - response1 = self.client.get("/") - response2 = self.client.get("/") - - assert response1.status_code == 200 - assert response2.status_code == 200 - - data1 = response1.json() - data2 = response2.json() - - assert data1["scheduler_status"] == data2["scheduler_status"] - - # Test with stopped scheduler - mock_scheduler.get_status.return_value = { - "running": False, - "last_execution": "2024-01-01T09:55:00Z", - "next_execution": None - } - - response3 = self.client.get("/") - assert response3.status_code == 200 - data3 = response3.json() - assert not data3["scheduler_status"]["running"] - - -class TestErrorHandling: - """Test error handling scenarios""" - - def setup_method(self): - """Setup test client for each test""" - self.client = TestClient(app) - - def test_malformed_request_headers(self): - """Test handling of malformed request headers""" - # Test with various problematic headers - response = self.client.get("/", headers={"Content-Type": "invalid/type"}) - assert response.status_code == 200 # Should still work - - response = self.client.get("/health", headers={"Accept": "invalid"}) - assert response.status_code == 200 # Should still work - - def test_large_request_handling(self): - """Test handling of requests with large payloads (where applicable)""" - # Health endpoint should handle any size gracefully since it's GET - response = self.client.get("/health" + "?" + "x" * 1000) - # Should either work or return appropriate error, but not crash - assert response.status_code in [200, 414, 400] - - -class TestEnvironmentVariableHandling: - """Test environment variable and configuration handling""" - - @patch.dict(os.environ, {}, clear=True) - @patch('app.main.settings') - def test_missing_environment_variables(self, mock_settings): - """Test behavior when environment variables are missing""" - mock_settings.azure_connection_string = None - mock_settings.log_directory = "./logs" - mock_settings.scheduler_interval_minutes = 5 - mock_settings.model_services = [] - - # App should still start and work - client = TestClient(app) - response = client.get("/health") - assert response.status_code == 200 - - @patch('app.main.settings') - def test_invalid_configuration_values(self, mock_settings): - """Test handling of invalid configuration values""" - mock_settings.azure_connection_string = "" - mock_settings.log_directory = "" - mock_settings.scheduler_interval_minutes = -1 - mock_settings.model_services = None - - # App should handle gracefully - client = TestClient(app) - response = client.get("/health") - assert response.status_code == 200 diff --git a/services/painting-process-data-simulator-service/tests/test_model_client.py b/services/painting-process-data-simulator-service/tests/test_model_client.py deleted file mode 100644 index 36d48b4..0000000 --- a/services/painting-process-data-simulator-service/tests/test_model_client.py +++ /dev/null @@ -1,87 +0,0 @@ -import pytest -import httpx -import respx -from typing import Dict, Any - -from app.services.model_client import model_client -from app.config.settings import settings - -# 모든 테스트는 비동기로 실행되도록 표시 -pytestmark = pytest.mark.asyncio - -# 테스트에 사용할 모델 서비스 URL -SERVICE_NAME = "painting-process-equipment" -BASE_URL = settings.model_services.get(SERVICE_NAME) -PREDICT_URL = f"{BASE_URL}/predict/" - -# 테스트용 입력 데이터 -@pytest.fixture -def sample_input_data() -> Dict[str, Any]: - return { - "machineId": "TEST-MCH-01", - "timeStamp": "2025-08-06T12:00:00", - "thick": 25.0, - "voltage": 300.0, - "current": 80.0, - "temper": 35.0, - "issue": "", - "isSolved": False - } - -async def test_predict_painting_issue_success_with_issue(sample_input_data: Dict[str, Any], respx_mock): - """모델이 이슈를 감지했을 때 (200 OK) 응답을 올바르게 처리하는지 테스트""" - # given: 모델 서비스가 200 OK와 함께 이슈 데이터를 반환하도록 모킹 - mock_response = { - "predictions": [{ - "issue": "PAINT-EQ-VOL-HIGH" - }] - } - respx_mock.post(PREDICT_URL).mock(return_value=httpx.Response(200, json=mock_response)) - - # when: predict_painting_issue 함수 호출 - async with httpx.AsyncClient() as client: - result = await model_client.predict_painting_issue(sample_input_data, client) - - # then: 반환된 결과에 machineId와 timeStamp가 포함되고, issue가 올바른지 확인 - assert result is not None - assert result["issue"] == "PAINT-EQ-VOL-HIGH" - assert result["machineId"] == sample_input_data["machineId"] - assert result["timeStamp"] == sample_input_data["timeStamp"] - -async def test_predict_painting_issue_success_no_issue(sample_input_data: Dict[str, Any], respx_mock): - """모델이 정상으로 판단했을 때 (204 No Content) 응답을 올바르게 처리하는지 테스트""" - # given: 모델 서비스가 204 No Content를 반환하도록 모킹 - respx_mock.post(PREDICT_URL).mock(return_value=httpx.Response(204)) - - # when: predict_painting_issue 함수 호출 - async with httpx.AsyncClient() as client: - result = await model_client.predict_painting_issue(sample_input_data, client) - - # then: 결과가 None인지 확인 - assert result is None - -async def test_predict_painting_issue_http_error(sample_input_data: Dict[str, Any], respx_mock): - """모델 서비스가 HTTP 오류(500)를 반환했을 때 None을 반환하는지 테스트""" - # given: 모델 서비스가 500 Internal Server Error를 반환하도록 모킹 - respx_mock.post(PREDICT_URL).mock(return_value=httpx.Response(500)) - - # when: predict_painting_issue 함수 호출 - async with httpx.AsyncClient() as client: - result = await model_client.predict_painting_issue(sample_input_data, client) - - # then: 결과가 None인지 확인 (최대 재시도 후) - assert result is None - -async def test_predict_painting_issue_timeout_error(sample_input_data: Dict[str, Any], respx_mock): - """모델 서비스 연결 시 타임아웃이 발생했을 때 None을 반환하는지 테스트""" - # given: 모델 서비스 연결 시 Timeout 예외가 발생하도록 모킹 - respx_mock.post(PREDICT_URL).mock(side_effect=httpx.TimeoutException("Timeout error")) - - # when: predict_painting_issue 함수 호출 - async with httpx.AsyncClient() as client: - result = await model_client.predict_painting_issue(sample_input_data, client) - - # then: 결과가 None인지 확인 (최대 재시도 후) - assert result is None - - diff --git a/services/painting-process-data-simulator-service/tests/test_settings.py b/services/painting-process-data-simulator-service/tests/test_settings.py deleted file mode 100644 index 0b93690..0000000 --- a/services/painting-process-data-simulator-service/tests/test_settings.py +++ /dev/null @@ -1,363 +0,0 @@ -import os -import pytest -from unittest.mock import patch -from pydantic import ValidationError -from pydantic_settings import BaseSettings - -from app.config.settings import Settings, settings - - -class TestSettings: - """Test suite for Settings configuration class using pytest framework.""" - - def test_settings_inherits_from_base_settings(self): - """Test that Settings class properly inherits from BaseSettings.""" - assert issubclass(Settings, BaseSettings) - assert isinstance(settings, Settings) - - def test_default_values(self): - """Test that default values are set correctly.""" - test_settings = Settings(azure_connection_string="test_connection") - - assert test_settings.azure_container_name == "simulator-data" - assert test_settings.painting_data_folder == "painting-process-equipment" - assert test_settings.scheduler_interval_minutes == 1 - assert test_settings.batch_size == 10 - assert test_settings.log_directory == "logs" - assert test_settings.log_filename == "painting_issue_logs.json" - assert test_settings.error_log_filename == "painting_errors.json" - assert test_settings.http_timeout == 30 - assert test_settings.max_retries == 3 - - def test_integer_field_validation(self): - """Test integer field validation for various numeric settings.""" - # Valid integers - test_settings = Settings( - azure_connection_string="test_connection", - scheduler_interval_minutes=5, - batch_size=20, - http_timeout=60, - max_retries=5 - ) - assert test_settings.scheduler_interval_minutes == 5 - assert test_settings.batch_size == 20 - assert test_settings.http_timeout == 60 - assert test_settings.max_retries == 5 - - # Invalid types should raise validation error - with pytest.raises(ValidationError): - Settings() - Settings(azure_connection_string="test", scheduler_interval_minutes="invalid") - - def test_negative_integer_handling(self): - """Test handling of negative values for integer fields.""" - # Test negative scheduler interval - test_settings = Settings( - azure_connection_string="test_connection", - scheduler_interval_minutes=-1 - ) - assert test_settings.scheduler_interval_minutes == -1 - - # Test negative batch size - test_settings = Settings( - azure_connection_string="test_connection", - batch_size=-5 - ) - assert test_settings.batch_size == -5 - - def test_zero_values(self): - """Test handling of zero values for integer fields.""" - test_settings = Settings( - azure_connection_string="test_connection", - scheduler_interval_minutes=0, - batch_size=0, - http_timeout=0, - max_retries=0 - ) - assert test_settings.scheduler_interval_minutes == 0 - assert test_settings.batch_size == 0 - assert test_settings.http_timeout == 0 - assert test_settings.max_retries == 0 - - def test_large_integer_values(self): - """Test handling of large integer values.""" - test_settings = Settings( - azure_connection_string="test_connection", - scheduler_interval_minutes=999999, - batch_size=1000000, - http_timeout=86400, - max_retries=100 - ) - assert test_settings.scheduler_interval_minutes == 999999 - assert test_settings.batch_size == 1000000 - assert test_settings.http_timeout == 86400 - assert test_settings.max_retries == 100 - - @patch.dict(os.environ, {"PAINTING_SERVICE_URL": "http://custom-host:9000"}) - def test_painting_service_url_from_environment(self): - """Test painting_service_url reads from environment variable.""" - test_settings = Settings(azure_connection_string="test_connection") - assert test_settings.painting_service_url == "http://custom-host:9000" - - @patch.dict(os.environ, {}, clear=True) - def test_painting_service_url_default_value(self): - """Test painting_service_url uses default value when env var not set.""" - test_settings = Settings(azure_connection_string="test_connection") - assert test_settings.painting_service_url == "http://localhost:8001" - - def test_model_services_property(self): - """Test model_services property returns correct dictionary.""" - test_settings = Settings( - azure_connection_string="test_connection", - painting_service_url="http://test-service:8080" - ) - - expected = {"painting-process-equipment": "http://test-service:8080"} - assert test_settings.model_services == expected - - def test_model_services_property_with_default_url(self): - """Test model_services property with default painting service URL.""" - with patch.dict(os.environ, {}, clear=True): - test_settings = Settings(azure_connection_string="test_connection") - expected = {"painting-process-equipment": "http://localhost:8001"} - assert test_settings.model_services == expected - - def test_model_services_property_is_dynamic(self): - """Test that model_services property reflects changes to painting_service_url.""" - test_settings = Settings( - azure_connection_string="test_connection", - painting_service_url="http://initial:8000" - ) - - # Initial value - assert test_settings.model_services["painting-process-equipment"] == "http://initial:8000" - - # Change the URL and verify property updates - test_settings.painting_service_url = "http://updated:9000" - assert test_settings.model_services["painting-process-equipment"] == "http://updated:9000" - - def test_string_field_validation(self): - """Test string field validation and handling.""" - test_settings = Settings( - azure_connection_string="test_connection", - azure_container_name="custom-container", - painting_data_folder="custom-folder", - log_directory="custom-logs", - log_filename="custom.log", - error_log_filename="errors.log" - ) - - assert test_settings.azure_container_name == "custom-container" - assert test_settings.painting_data_folder == "custom-folder" - assert test_settings.log_directory == "custom-logs" - assert test_settings.log_filename == "custom.log" - assert test_settings.error_log_filename == "errors.log" - - def test_empty_string_handling(self): - """Test handling of empty strings for optional string fields.""" - test_settings = Settings( - azure_connection_string="test_connection", - azure_container_name="", - painting_data_folder="", - log_directory="", - log_filename="", - error_log_filename="" - ) - - assert test_settings.azure_container_name == "" - assert test_settings.painting_data_folder == "" - assert test_settings.log_directory == "" - assert test_settings.log_filename == "" - assert test_settings.error_log_filename == "" - - def test_special_characters_in_strings(self): - """Test handling of special characters in string fields.""" - test_settings = Settings( - azure_connection_string="DefaultEndpointsProtocol=https;AccountName=test;AccountKey=abc123==;EndpointSuffix=core.windows.net", - azure_container_name="test-container_123", - painting_data_folder="folder/with/slashes", - log_directory="logs\\windows\\path", - log_filename="file-with-dashes_and_underscores.json", - error_log_filename="файл.json" # Non-ASCII characters - ) - - assert "AccountKey=abc123==" in test_settings.azure_connection_string - assert test_settings.azure_container_name == "test-container_123" - assert test_settings.painting_data_folder == "folder/with/slashes" - assert test_settings.log_directory == "logs\\windows\\path" - assert test_settings.log_filename == "file-with-dashes_and_underscores.json" - assert test_settings.error_log_filename == "файл.json" - - def test_model_config_attributes(self): - """Test that model_config is properly set.""" - assert hasattr(Settings, 'model_config') - assert Settings.model_config['env_file'] == '.env' - assert Settings.model_config['env_file_encoding'] == 'utf-8' - - @patch.dict(os.environ, { - "AZURE_CONNECTION_STRING": "env_connection_string", - "AZURE_CONTAINER_NAME": "env-container", - "PAINTING_DATA_FOLDER": "env-folder", - "SCHEDULER_INTERVAL_MINUTES": "15", - "BATCH_SIZE": "50", - "LOG_DIRECTORY": "env-logs", - "HTTP_TIMEOUT": "120", - "MAX_RETRIES": "10" - }) - def test_environment_variable_override(self): - """Test that environment variables properly override default values.""" - test_settings = Settings() - - assert test_settings.azure_connection_string == "env_connection_string" - assert test_settings.azure_container_name == "env-container" - assert test_settings.painting_data_folder == "env-folder" - assert test_settings.scheduler_interval_minutes == 15 - assert test_settings.batch_size == 50 - assert test_settings.log_directory == "env-logs" - assert test_settings.http_timeout == 120 - assert test_settings.max_retries == 10 - - def test_type_coercion(self): - """Test that string values are properly coerced to correct types.""" - # Test integer coercion from strings - test_settings = Settings( - azure_connection_string="test", - scheduler_interval_minutes="30", # String that should become int - batch_size="100", - http_timeout="180", - max_retries="7" - ) - - assert isinstance(test_settings.scheduler_interval_minutes, int) - assert test_settings.scheduler_interval_minutes == 30 - assert isinstance(test_settings.batch_size, int) - assert test_settings.batch_size == 100 - - def test_invalid_type_coercion_raises_error(self): - """Test that invalid type conversions raise ValidationError.""" - with pytest.raises(ValidationError): - Settings() - Settings( - azure_connection_string="test", - scheduler_interval_minutes="not_a_number" - ) - - def test_global_settings_instance(self): - """Test that the global settings instance is properly initialized.""" - # Test that settings is an instance of Settings - assert isinstance(settings, Settings) - - # Test that it has all required attributes - assert hasattr(settings, 'azure_connection_string') - assert hasattr(settings, 'azure_container_name') - assert hasattr(settings, 'model_services') - - def test_settings_immutability_after_creation(self): - """Test settings behavior after instantiation.""" - test_settings = Settings(azure_connection_string="test") - - # Should be able to modify attributes (pydantic models are mutable by default) - test_settings.batch_size = 999 - assert test_settings.batch_size == 999 - - # Should be able to access all properties - assert isinstance(test_settings.model_services, dict) - - def test_unicode_handling(self): - """Test handling of Unicode characters in configuration values.""" - test_settings = Settings( - azure_connection_string="тест", # Cyrillic - azure_container_name="测试", # Chinese - painting_data_folder="🎨painting", # Emoji - ) - - assert test_settings.azure_connection_string == "тест" - assert test_settings.azure_container_name == "测试" - assert test_settings.painting_data_folder == "🎨painting" - - def test_very_long_strings(self): - """Test handling of very long string values.""" - long_string = "x" * 10000 - test_settings = Settings( - azure_connection_string=long_string, - azure_container_name=long_string - ) - - assert len(test_settings.azure_connection_string) == 10000 - assert len(test_settings.azure_container_name) == 10000 - - def test_boundary_values_for_integers(self): - """Test boundary values for integer fields.""" - # Test with maximum reasonable values - test_settings = Settings( - azure_connection_string="test", - scheduler_interval_minutes=2147483647, # Max 32-bit signed int - batch_size=2147483647, - http_timeout=2147483647, - max_retries=2147483647 - ) - - assert test_settings.scheduler_interval_minutes == 2147483647 - assert test_settings.batch_size == 2147483647 - - def test_model_services_property_consistency(self): - """Test that model_services property maintains consistency.""" - test_settings = Settings( - azure_connection_string="test", - painting_service_url="http://consistent:8000" - ) - - # Multiple calls should return the same value - result1 = test_settings.model_services - result2 = test_settings.model_services - - assert result1 == result2 - assert result1 is not result2 # Should be different objects (new dict each time) - - def test_all_fields_have_expected_types(self): - """Test that all fields have the expected Python types after initialization.""" - test_settings = Settings(azure_connection_string="test") - - assert isinstance(test_settings.azure_connection_string, str) - assert isinstance(test_settings.azure_container_name, str) - assert isinstance(test_settings.painting_data_folder, str) - assert isinstance(test_settings.scheduler_interval_minutes, int) - assert isinstance(test_settings.batch_size, int) - assert isinstance(test_settings.painting_service_url, str) - assert isinstance(test_settings.log_directory, str) - assert isinstance(test_settings.log_filename, str) - assert isinstance(test_settings.error_log_filename, str) - assert isinstance(test_settings.http_timeout, int) - assert isinstance(test_settings.max_retries, int) - assert isinstance(test_settings.model_services, dict) - - def test_configuration_completeness(self): - """Test that all expected configuration options are present and accessible.""" - test_settings = Settings(azure_connection_string="test") - - # All expected attributes should exist - expected_attrs = [ - 'azure_connection_string', 'azure_container_name', 'painting_data_folder', - 'scheduler_interval_minutes', 'batch_size', 'painting_service_url', - 'log_directory', 'log_filename', 'error_log_filename', - 'http_timeout', 'max_retries', 'model_services' - ] - - for attr in expected_attrs: - assert hasattr(test_settings, attr), f"Settings missing expected attribute: {attr}" - # Should be able to access the value without errors - getattr(test_settings, attr) - - def test_settings_representation(self): - """Test string representation and debugging capabilities.""" - test_settings = Settings(azure_connection_string="test_connection") - - # Should be able to convert to string without errors - str_repr = str(test_settings) - assert isinstance(str_repr, str) - assert len(str_repr) > 0 - - # Should be able to get repr without errors - repr_str = repr(test_settings) - assert isinstance(repr_str, str) - assert len(repr_str) > 0 \ No newline at end of file From 99eebe7f56aaa601e3f07ebbdf3fad856d090563 Mon Sep 17 00:00:00 2001 From: TaeHyun aivle Date: Tue, 12 Aug 2025 02:01:23 +0000 Subject: [PATCH 02/12] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Refact:=20change=20s?= =?UTF-8?q?ettings=20and=20Replace=20logging?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 기존에 사용하던 print문을 적절한 로깅으로 대체, 기존 시뮬레이터의 settings.py를 변경 --- .../app/config/logging_config.py | 15 +++++ .../app/config/settings.py | 24 ++------ .../app/main.py | 19 +++--- .../app/services/azure_storage.py | 60 ++++++++++--------- 4 files changed, 64 insertions(+), 54 deletions(-) create mode 100644 services/painting-process-data-simulator-service/app/config/logging_config.py diff --git a/services/painting-process-data-simulator-service/app/config/logging_config.py b/services/painting-process-data-simulator-service/app/config/logging_config.py new file mode 100644 index 0000000..02ce366 --- /dev/null +++ b/services/painting-process-data-simulator-service/app/config/logging_config.py @@ -0,0 +1,15 @@ + +import logging +import sys +from app.config.settings import settings + +def setup_logging(): + """Sets up centralized logging.""" + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + handlers=[ + logging.StreamHandler(sys.stdout), + logging.FileHandler(f"{settings.log_directory}/service.log") + ] + ) diff --git a/services/painting-process-data-simulator-service/app/config/settings.py b/services/painting-process-data-simulator-service/app/config/settings.py index 0d59314..ea62c6a 100644 --- a/services/painting-process-data-simulator-service/app/config/settings.py +++ b/services/painting-process-data-simulator-service/app/config/settings.py @@ -1,5 +1,3 @@ -import os -from typing import Dict from pydantic_settings import BaseSettings @@ -12,27 +10,17 @@ class Settings(BaseSettings): painting_data_folder: str = "painting-process-equipment" # 스케줄러 설정 - scheduler_interval_minutes: int = 1 + scheduler_interval_seconds: int = 30 batch_size: int = 10 - # Painting Process Equipment 모델 서비스 설정 - painting_service_url: str = os.getenv("PAINTING_SERVICE_URL", "http://localhost:8001") - - # 로그 설정 - log_directory: str = "logs" - log_filename: str = "painting_issue_logs.json" - error_log_filename: str = "painting_errors.json" + # Backend 서비스 설정 + backend_service_url: str = "http://localhost:8087/equipment-data" # HTTP 클라이언트 설정 http_timeout: int = 30 - max_retries: int = 3 - - @property - def model_services(self) -> Dict[str, str]: - """Painting Process Equipment 모델 서비스 URL""" - return { - "painting-process-equipment": self.painting_service_url - } + + # 로그 디렉토리 + log_directory: str = "logs" model_config = { "env_file": ".env", diff --git a/services/painting-process-data-simulator-service/app/main.py b/services/painting-process-data-simulator-service/app/main.py index bb1933b..c2c0677 100644 --- a/services/painting-process-data-simulator-service/app/main.py +++ b/services/painting-process-data-simulator-service/app/main.py @@ -1,33 +1,38 @@ +import logging from fastapi import FastAPI from contextlib import asynccontextmanager from app.config.settings import settings from app.services.scheduler_service import simulator_scheduler from app.routers import simulator_router -from app.routers import connection_test_router +from app.config.logging_config import setup_logging + import os +# 로깅 설정 +setup_logging() +logger = logging.getLogger(__name__) @asynccontextmanager async def lifespan(app: FastAPI): """애플리케이션 생명주기 관리""" # 시작 시 - print("🚀 Data Simulator Service 시작 중...") + logger.info("🚀 Data Simulator Service 시작 중...") # 환경 변수 체크 if not settings.azure_connection_string: + logger.error("AZURE_CONNECTION_STRING 환경 변수가 설정되지 않았습니다.") raise ValueError("AZURE_CONNECTION_STRING 환경 변수가 설정되지 않았습니다. .env 파일을 생성하거나 환경 변수를 설정해주세요.") # 로그 디렉토리 생성 os.makedirs(settings.log_directory, exist_ok=True) - print(f"📁 로그 디렉토리: {settings.log_directory}") - print(f"🔧 스케줄러 간격: {settings.scheduler_interval_minutes}분") - print(f"🎯 대상 서비스 수: {len(settings.model_services)}") + logger.info(f"📁 로그 디렉토리: {settings.log_directory}") + logger.info(f"🔧 스케줄러 간격: {settings.scheduler_interval_seconds}초") yield # 종료 시 - print("🛑 Data Simulator Service 종료 중...") + logger.info("🛑 Data Simulator Service 종료 중...") if simulator_scheduler.is_running: await simulator_scheduler.stop() @@ -42,8 +47,6 @@ async def lifespan(app: FastAPI): # 라우터 설정 # 시뮬레이터 활성화/비활성화/상태확인 API 모음 app.include_router(simulator_router.router, prefix="/simulator") -# azure storage 연결, model serving 서비스 연결 확인 API 모음 -app.include_router(connection_test_router.router, prefix="/test") # 아래는 서비스 기본 정보 확인과 서비스 헬스 체크 api 정의 diff --git a/services/painting-process-data-simulator-service/app/services/azure_storage.py b/services/painting-process-data-simulator-service/app/services/azure_storage.py index ffab94b..df65011 100644 --- a/services/painting-process-data-simulator-service/app/services/azure_storage.py +++ b/services/painting-process-data-simulator-service/app/services/azure_storage.py @@ -3,12 +3,17 @@ from azure.storage.blob.aio import BlobServiceClient from app.config.settings import settings import io +import logging +logger = logging.getLogger(__name__) class AzureStorageService: def __init__(self): self.connection_string = settings.azure_connection_string self.container_name = settings.azure_container_name + self.blob_service_client = None + if self.connection_string: + self.blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # 순차 처리를 위한 인덱스 관리 self.current_index = 0 @@ -16,47 +21,45 @@ def __init__(self): async def list_data_files(self) -> List[str]: """데이터 파일 목록 조회""" - if not self.connection_string: + if not self.blob_service_client: raise ValueError("Azure connection string이 설정되지 않았습니다.") try: - async with BlobServiceClient.from_connection_string(self.connection_string) as client: - container_client = client.get_container_client(self.container_name) - blob_list = [] + container_client = self.blob_service_client.get_container_client(self.container_name) + blob_list = [] - # painting-process-equipment 폴더만 검색 - prefix = f"{settings.painting_data_folder}/" + # painting-process-equipment 폴더만 검색 + prefix = f"{settings.painting_data_folder}/" - async for blob in container_client.list_blobs(name_starts_with=prefix): - if blob.name.endswith('.csv'): - blob_list.append(blob.name) + async for blob in container_client.list_blobs(name_starts_with=prefix): + if blob.name.endswith('.csv'): + blob_list.append(blob.name) - return sorted(blob_list) + return sorted(blob_list) except Exception as e: - print(f"❌ Painting 데이터 파일 목록 조회 실패: {e}") + logger.error(f"❌ Painting 데이터 파일 목록 조회 실패: {e}") return [] async def read_csv_data(self, blob_name: str) -> Optional[pd.DataFrame]: """CSV 파일 읽기""" - if not self.connection_string: + if not self.blob_service_client: raise ValueError("Azure connection string이 설정되지 않았습니다.") try: - async with BlobServiceClient.from_connection_string(self.connection_string) as client: - blob_client = client.get_blob_client( - container=self.container_name, - blob=blob_name - ) + blob_client = self.blob_service_client.get_blob_client( + container=self.container_name, + blob=blob_name + ) - blob_data = await blob_client.download_blob() - content = await blob_data.readall() - df = pd.read_csv(io.StringIO(content.decode('utf-8'))) + blob_data = await blob_client.download_blob() + content = await blob_data.readall() + df = pd.read_csv(io.StringIO(content.decode('utf-8'))) - print(f"📁 파일 읽기 성공: {blob_name} ({len(df)} rows)") - return df + logger.info(f"📁 파일 읽기 성공: {blob_name} ({len(df)} rows)") + return df except Exception as e: - print(f"❌ 파일 읽기 실패 ({blob_name}): {e}") + logger.error(f"❌ 파일 읽기 실패 ({blob_name}): {e}") return None async def simulate_real_time_data(self) -> Optional[Dict[str, Any]]: @@ -90,11 +93,11 @@ async def simulate_real_time_data(self) -> Optional[Dict[str, Any]]: "isSolved": False # 기본값 } - print(f"📊 Painting 데이터: 행 {self.current_index}/{len(self.cached_df)}") + logger.info(f"📊 Painting 데이터: 행 {self.current_index}/{len(self.cached_df)}") return simulated_data except Exception as e: - print(f"❌ Painting 데이터 시뮬레이션 실패: {e}") + logger.error(f"❌ Painting 데이터 시뮬레이션 실패: {e}") return None async def _load_dataframe(self): @@ -102,20 +105,21 @@ async def _load_dataframe(self): try: files = await self.list_data_files() if not files: - print("⚠️ Painting 데이터 파일이 없습니다.") + logger.warning("⚠️ Painting 데이터 파일이 없습니다.") return # 첫 번째 CSV 파일을 사용 target_file = files[0] + logger.info(f"✅ 시뮬레이션에 사용할 파일: {target_file}. 발견된 파일들 중 첫번째 파일을 사용합니다.") self.cached_df = await self.read_csv_data(target_file) self.current_index = 0 if self.cached_df is not None: - print(f"✅ 데이터 캐싱 완료: {target_file} ({len(self.cached_df)} rows)") + logger.info(f"✅ 데이터 캐싱 완료: {target_file} ({len(self.cached_df)} rows)") except Exception as e: - print(f"❌ DataFrame 로드 실패: {e}") + logger.error(f"❌ DataFrame 로드 실패: {e}") # 글로벌 Azure Storage 서비스 인스턴스 From b5b5498f970ea44cbf51c9881ebf1ce3ce0552ae Mon Sep 17 00:00:00 2001 From: TaeHyun aivle Date: Tue, 12 Aug 2025 02:04:25 +0000 Subject: [PATCH 03/12] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Refact:=20simulator?= =?UTF-8?q?=20to=20send=20data=20to=20Spring=20Boot?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 기존 시뮬레이터에서 모델 및 데이터 파일을 주고받는 로직을 제거하고 Spring Boot 서버에 데이터를 전송하도록 기능을 변경 --- .../app/routers/simulator_router.py | 50 +--------- .../app/services/backend_client.py | 19 ++++ .../app/services/scheduler_service.py | 99 +++++-------------- 3 files changed, 47 insertions(+), 121 deletions(-) create mode 100644 services/painting-process-data-simulator-service/app/services/backend_client.py diff --git a/services/painting-process-data-simulator-service/app/routers/simulator_router.py b/services/painting-process-data-simulator-service/app/routers/simulator_router.py index f001d79..5de3e12 100644 --- a/services/painting-process-data-simulator-service/app/routers/simulator_router.py +++ b/services/painting-process-data-simulator-service/app/routers/simulator_router.py @@ -1,26 +1,12 @@ from fastapi import APIRouter, HTTPException -from app.services.model_client import model_client from app.services.scheduler_service import simulator_scheduler -from app.config.settings import settings - -import os -import json router = APIRouter() - @router.get("/status") async def get_simulator_status(): """시뮬레이터 상태 조회""" - status = simulator_scheduler.get_status() - - # 모델 서비스 헬스 체크 추가 - if simulator_scheduler.is_running: - health_status = await model_client.health_check_all() - status["model_services_health"] = health_status - - return status - + return simulator_scheduler.get_status() @router.post("/start") async def start_simulator(): @@ -34,7 +20,6 @@ async def start_simulator(): except Exception as e: raise HTTPException(status_code=500, detail=f"시뮬레이터 시작 실패: {str(e)}") from e - @router.post("/stop") async def stop_simulator(): """시뮬레이션 중지""" @@ -45,35 +30,4 @@ async def stop_simulator(): "status": simulator_scheduler.get_status() } except Exception as e: - raise HTTPException(status_code=500, detail=f"시뮬레이터 중지 실패: {str(e)}") from e - - -@router.get("/logs/recent") -async def get_recent_logs(): - """최근 로그 조회""" - try: - log_file_path = os.path.join( - settings.log_directory, settings.log_filename) - - if not os.path.exists(log_file_path): - return {"logs": [], "message": "로그 파일이 없습니다."} - - # 최근 10개 로그만 반환 - logs = [] - total_lines = 0 - with open(log_file_path, "r", encoding="utf-8") as f: - lines = f.readlines() - total_lines = len(lines) - for line in lines[-10:]: # 최근 10개 - try: - logs.append(json.loads(line.strip())) - except json.JSONDecodeError: - continue - - return { - "logs": logs, - "total_count": total_lines - } - - except Exception as e: - raise HTTPException(status_code=500, detail=f"로그 조회 실패: {str(e)}") from e + raise HTTPException(status_code=500, detail=f"시뮬레이터 중지 실패: {str(e)}") from e \ No newline at end of file diff --git a/services/painting-process-data-simulator-service/app/services/backend_client.py b/services/painting-process-data-simulator-service/app/services/backend_client.py new file mode 100644 index 0000000..822f3cf --- /dev/null +++ b/services/painting-process-data-simulator-service/app/services/backend_client.py @@ -0,0 +1,19 @@ +import httpx +import logging +from typing import Any, Dict + +logger = logging.getLogger(__name__) + +class BackendClient: + async def send_to_backend(self, data: Dict[str, Any], url: str): + async with httpx.AsyncClient() as client: + try: + response = await client.post(url, json=data) + response.raise_for_status() + logger.info(f"✅ 데이터 전송 성공: {response.status_code}") + except httpx.HTTPStatusError as e: + logger.error(f"❌ 데이터 전송 실패: {e.response.status_code} - {e.response.text}") + except Exception as e: + logger.error(f"❌ 데이터 전송 중 예외 발생: {e}") + +backend_client = BackendClient() \ No newline at end of file diff --git a/services/painting-process-data-simulator-service/app/services/scheduler_service.py b/services/painting-process-data-simulator-service/app/services/scheduler_service.py index 51cfaa3..3d789bc 100644 --- a/services/painting-process-data-simulator-service/app/services/scheduler_service.py +++ b/services/painting-process-data-simulator-service/app/services/scheduler_service.py @@ -3,9 +3,10 @@ from apscheduler.triggers.interval import IntervalTrigger from app.config.settings import settings from app.services.azure_storage import azure_storage -from app.services.model_client import model_client -from app.utils.logger import anomaly_logger +from app.services.backend_client import backend_client +import logging +logger = logging.getLogger(__name__) class SimulatorScheduler: def __init__(self): @@ -15,114 +16,66 @@ def __init__(self): async def start(self): """스케줄러 시작""" if self.is_running: - print("⚠️ 스케줄러가 이미 실행 중입니다.") + logger.warning("⚠️ 스케줄러가 이미 실행 중입니다.") return try: - - - # 헬스 체크 - await self._initial_health_check() - - # 스케줄 작업 등록 self.scheduler.add_job( - func=self._simulate_data_collection, - trigger=IntervalTrigger( - minutes=settings.scheduler_interval_minutes), + func=self._simulate_and_send_data, + trigger=IntervalTrigger(seconds=settings.scheduler_interval_seconds), id='data_simulation', - name='Data Collection Simulation', + name='Data Simulation and Sending', replace_existing=True ) self.scheduler.start() self.is_running = True - print(f"🚀 시뮬레이터 시작! (간격: {settings.scheduler_interval_minutes}분)") - print(f"📊 대상 서비스: {list(settings.model_services.keys())}") - print("-" * 60) + logger.info(f"🚀 백엔드 데이터 전송 시뮬레이터 시작! (간격: {settings.scheduler_interval_seconds}초)") + logger.info(f"🎯 대상 백엔드 서비스: {settings.backend_service_url}") except Exception as e: - print(f"❌ 스케줄러 시작 실패: {e}") + logger.error(f"❌ 스케줄러 시작 실패: {e}") raise async def stop(self): """스케줄러 중지""" if not self.is_running: - print("⚠️ 스케줄러가 실행 중이 아닙니다.") + logger.warning("⚠️ 스케줄러가 실행 중이 아닙니다.") return self.scheduler.shutdown() - self.is_running = False - print("🛑 시뮬레이터 중지됨") - - async def _initial_health_check(self): - """초기 헬스 체크""" - print("🔍 모델 서비스 헬스 체크 중...") - health_status = await model_client.health_check_all() - - for service_name, is_healthy in health_status.items(): - status = "✅" if is_healthy else "❌" - print(f" {status} {service_name}") + logger.info("🛑 시뮬레이터 중지됨") - healthy_count = sum(health_status.values()) - total_count = len(health_status) - - if healthy_count == 0: - raise Exception("모든 모델 서비스가 비활성 상태입니다.") - - print(f"📈 활성 서비스: {healthy_count}/{total_count}") - print("-" * 60) - - async def _simulate_data_collection(self): - """주기적 Painting Process Equipment 데이터 수집 및 예측 작업""" + async def _simulate_and_send_data(self): + """주기적 데이터 시뮬레이션 및 백엔드 전송""" try: - print( - f"🔄 Painting 데이터 수집 시작 - {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") + logger.info(f"🔄 데이터 시뮬레이션 및 전송 시작 - {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") - # Azure Blob에서 데이터 시뮬레이션 simulated_data = await azure_storage.simulate_real_time_data() if not simulated_data: - print("⚠️ 수집할 데이터가 없습니다.") + logger.warning("⚠️ 전송할 데이터가 없습니다.") return - # 모델 서비스에 예측 요청 - prediction_result = await model_client.predict_painting_issue(simulated_data) - - # 결과 처리 - if prediction_result: - # 이상 감지 시 로그 기록 - anomaly_logger.log_anomaly( - "painting-process-equipment", - prediction_result, # 모델이 반환한 로그 데이터 - simulated_data # 시뮬레이터가 생성한 원본 데이터 - ) - print(f"🚨 이상 감지! - 이슈: {prediction_result.get('issue')}") - else: - # 정상 상태 - anomaly_logger.log_normal_processing( - "painting-process-equipment", - simulated_data - ) - print("✅ 정상 상태") - - print("-" * 60) + await backend_client.send_to_backend(simulated_data, settings.backend_service_url) except Exception as e: - print(f"❌ 데이터 수집 중 오류 발생: {e}") - anomaly_logger.log_error("painting-simulator-scheduler", str(e)) + logger.error(f"❌ 데이터 전송 중 오류 발생: {e}") def get_status(self) -> dict: """스케줄러 상태 정보""" jobs = self.scheduler.get_jobs() + next_run = None + if jobs: + next_run = str(jobs[0].next_run_time) + return { "is_running": self.is_running, - "interval_minutes": settings.scheduler_interval_minutes, - "next_run": str(jobs[0].next_run_time) if jobs else None, - "total_services": len(settings.model_services) + "interval_seconds": settings.scheduler_interval_seconds, + "next_run": next_run, + "backend_service_url": settings.backend_service_url } - -# 글로벌 스케줄러 인스턴스 -simulator_scheduler = SimulatorScheduler() +simulator_scheduler = SimulatorScheduler() \ No newline at end of file From 43c1f1625e49394ffbd95448463c5c415daa8746 Mon Sep 17 00:00:00 2001 From: TaeHyun aivle Date: Tue, 12 Aug 2025 02:07:38 +0000 Subject: [PATCH 04/12] =?UTF-8?q?=F0=9F=93=9D=20Docs:=20update=20README=20?= =?UTF-8?q?and=20env=20example?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 수정에 따른 README 및 requirements 변경. .env.example 을 사용함으로써 수월하게 .env 사용 가능(Azure 액세스 키만 직접 추가하면 됨) --- .../.env.example | 43 ++++++++ .../README.md | 101 ++++++------------ .../requirements.txt | 18 +--- 3 files changed, 76 insertions(+), 86 deletions(-) create mode 100644 services/painting-process-data-simulator-service/.env.example diff --git a/services/painting-process-data-simulator-service/.env.example b/services/painting-process-data-simulator-service/.env.example new file mode 100644 index 0000000..7a28b5d --- /dev/null +++ b/services/painting-process-data-simulator-service/.env.example @@ -0,0 +1,43 @@ +# .env.example +# 이 파일을 .env 로 복사한 후, 로컬 개발 환경에 맞게 값을 수정하여 사용하세요. + +#============================== +# Azure Storage 설정 +#============================== + +# Azure Storage 계정의 연결 문자열 (필수) +AZURE_CONNECTION_STRING="" + +# 데이터를 읽어올 컨테이너 이름 +AZURE_CONTAINER_NAME="simulator-data" + +# 컨테이너 내에서 데이터가 저장된 폴더 경로 +PAINTING_DATA_FOLDER="painting-process-equipment" + + +#============================== +# 스케줄러 설정 +#============================== + +# 데이터 전송 작업의 실행 주기 (초 단위) +SCHEDULER_INTERVAL_SECONDS=30 + +# 한 번에 전송할 데이터의 묶음 크기 +BATCH_SIZE=10 + + +#============================== +# 백엔드 서비스 설정 +#============================== + +# 시뮬레이터가 데이터를 전송할 Spring Boot 백엔드 서비스의 전체 URL +# (주의: paintingprocessmonitoring 서비스에 새로 추가한 POST /equipment-data 경로로 설정) +BACKEND_SERVICE_URL="http://localhost:8087/equipment-data" + + +#============================== +# HTTP 클라이언트 설정 +#============================== + +# 백엔드 API 호출 시 타임아웃 시간 (초 단위) +HTTP_TIMEOUT=30 diff --git a/services/painting-process-data-simulator-service/README.md b/services/painting-process-data-simulator-service/README.md index 7a07c68..946f5ce 100644 --- a/services/painting-process-data-simulator-service/README.md +++ b/services/painting-process-data-simulator-service/README.md @@ -1,35 +1,30 @@ -# Painting Process Equipment Simulator Service +# Painting Process Backend Simulator Service ## 1. 서비스 소개 -이 서비스는 **Painting Process Equipment Defect Detection Model Service**를 테스트하고 모니터링하기 위한 시뮬레이터입니다. +이 서비스는 Spring Boot 백엔드 애플리케이션으로 공정 데이터를 전송하는 시뮬레이터입니다. -주기적으로 Azure Blob Storage에 저장된 CSV 데이터를 읽어와 실시간 공정 데이터처럼 모델 서비스의 예측 API를 호출합니다. 그 후, 모델 서비스로부터 받은 결과(정상 또는 이상 감지)를 콘솔과 로그 파일에 기록하여 시스템의 동작을 검증하는 역할을 합니다. +주기적으로 Azure Blob Storage에 저장된 CSV 데이터를 읽어와, Spring Boot 백엔드의 API를 호출하여 데이터를 전송하는 역할을 합니다. ## 2. 주요 기능 -- **주기적 데이터 시뮬레이션**: `APScheduler`를 사용하여 설정된 시간 간격마다 자동으로 데이터를 생성하고 예측을 요청합니다. -- **Azure Blob Storage 연동**: Azure Blob Storage에 저장된 실제 공정 데이터 기반의 CSV 파일을 읽어 시뮬레이션에 사용합니다. -- **모델 서비스 연동**: `HTTPX` 클라이언트를 사용하여 `painting-process-equipment-defect-detection-model-service`의 API를 비동기적으로 호출합니다. -- **상태 로깅**: 모델의 예측 결과를 `anomaly_logger`를 통해 이상(anomaly) 또는 정상(normal) 상태로 구분하여 로그를 기록합니다. -- **Docker 지원**: Dockerfile을 통해 컨테이너 환경에서 쉽게 서비스를 빌드하고 실행할 수 있으며, Docker Compose를 통한 통합 관리에도 용이합니다. +- **주기적 데이터 전송**: `APScheduler`를 사용하여 설정된 시간 간격마다 Azure의 데이터를 백엔드로 전송합니다. +- **Azure Blob Storage 연동**: Azure Blob Storage에 저장된 CSV 파일을 읽어 시뮬레이션에 사용합니다. +- **Spring Boot 백엔드 연동**: `HTTPX` 클라이언트를 사용하여 Spring Boot 백엔드의 API를 비동기적으로 호출합니다. ## 3. 프로젝트 구조 ```text -painting-process-equipment-simulator-service/ +painting-process-backend-simulator-service/ ├── app/ │ ├── main.py # FastAPI 애플리케이션의 메인 진입점 │ ├── config/ -│ │ └── settings.py # Pydantic-settings를 이용한 환경 변수 및 설정 관리 +│ │ ├── settings.py # Pydantic-settings를 이용한 환경 변수 및 설정 관리 +│ │ └── logging_config.py # 서비스 로깅 설정 │ ├── routers/ -│ │ ├── simulator_router.py # 시뮬레이터 시작/중지/상태 확인 API -│ │ └── test_connection_router.py # 외부 서비스(Azure, 모델) 연결 테스트 API -│ ├── services/ -│ │ ├── scheduler_service.py # APScheduler를 사용한 핵심 스케줄링 로직 -│ │ ├── model_client.py # 모델 예측 서비스 API 호출 클라이언트 -│ │ └── azure_storage.py # Azure Blob Storage 데이터 처리 서비스 -│ └── utils/ -│ └── logger.py # 이상 및 정상 로그 기록 유틸리티 -├── logs/ # 시뮬레이션 결과 로그가 저장되는 디렉토리 +│ │ └── simulator_router.py # 시뮬레이터 시작/중지/상태 확인 API +│ └── services/ +│ ├── scheduler_service.py # APScheduler를 사용한 핵심 스케줄링 로직 +│ ├── backend_client.py # Spring Boot 백엔드 API 호출 클라이언트 +│ └── azure_storage.py # Azure Blob Storage 데이터 처리 서비스 ├── .env # Azure 연결 문자열 등 민감한 환경 변수 파일 ├── Dockerfile # Docker 이미지 빌드 설정 ├── requirements.txt # Python 라이브러리 의존성 목록 @@ -38,9 +33,7 @@ painting-process-equipment-simulator-service/ ## 4. 설치 및 실행 방법 -### 4.1. 로컬 환경에서 실행 - -**사전 준비:** `painting-process-equipment-defect-detection-model-service`가 로컬 환경(`http://localhost:8001`)에서 먼저 실행 중이어야 합니다. +### 로컬 환경에서 실행 1. **Python 가상 환경 설정**: ```bash @@ -57,66 +50,32 @@ painting-process-equipment-simulator-service/ ``` 3. **.env 파일 설정**: - 프로젝트 루트에 `.env` 파일을 생성하고, Azure Storage 연결 문자열을 추가합니다. + 프로젝트 루트에 `.env` 파일을 생성하고, Azure Storage 연결 문자열과 백엔드 서비스 URL을 추가합니다. ```env AZURE_CONNECTION_STRING="" + BACKEND_SERVICE_URL="" ``` 4. **애플리케이션 실행**: ```bash - # 포트 8011에서 실행 uvicorn app.main:app --reload --port 8011 ``` - 실행 후 `http://localhost:8011/docs`에서 API 문서를 확인할 수 있습니다. - -### 4.2. Docker를 이용한 실행 - -**사전 준비:** `painting-process-equipment-defect-detection-model-service`가 `model-service`라는 컨테이너 이름으로 동일한 Docker 네트워크(`smart-fast-net`)에서 실행 중이어야 합니다. - -1. **Docker 네트워크 생성** (이미 생성했다면 생략): - ```bash - docker network create smart-fast-net - ``` - -2. **모델 서비스 실행** (이미 실행 중이라면 생략): - ```bash - # 모델 서비스 디렉토리에서 실행 - docker build -t model-service . - docker run --name model-service --network smart-fast-net -p 8001:8001 model-service - ``` -3. **시뮬레이터 서비스 Docker 이미지 빌드**: - ```bash - docker build -t simulator-service . - ``` +## 5. API 엔드포인트 -4. **시뮬레이터 서비스 Docker 컨테이너 실행**: - `--env-file` 옵션을 사용하여 호스트의 `.env` 파일을 컨테이너의 환경 변수로 안전하게 주입합니다. - 혹시 Azure storage가 잘 불러와지지 않는다면 -e AZURE_CONNECTION_STRING="Access-Key"를 직접 추가해주세요. - ```bash - docker run --name simulator-service --network smart-fast-net \ - -p 8011:8011 \ - --env-file ./.env \ - -e PAINTING_SERVICE_URL="http://model-service:8001" \ - simulator-service - ``` +서비스가 시작되면 다음 URL로 API 문서(Swagger UI)에 접근할 수 있습니다: `http://localhost:8011/docs` -5. **로그 확인**: - ```bash - # 시뮬레이터 로그 확인 - docker logs -f simulator-service - ``` +| HTTP Method | Endpoint | Description | +| :---------- | :----------------- | :--------------------------- | +| `POST` | `/simulator/start` | 데이터 전송 시뮬레이션을 시작합니다. | +| `POST` | `/simulator/stop` | 실행 중인 시뮬레이션을 중지합니다. | +| `GET` | `/simulator/status`| 현재 스케줄러의 상태를 확인합니다. | -## 5. API 엔드포인트 +## 6. 로깅 (Logging) -서비스가 시작되면 `http://localhost:8011/docs` (또는 Docker IP)에서 API 문서를 통해 아래 엔드포인트를 테스트할 수 있습니다. +이 서비스는 `app/config/logging_config.py` 파일을 통해 중앙에서 로깅 설정을 관리합니다. -| HTTP Method | Endpoint | Description | -| :---------- | :-------------------------------- | :----------------------------------------- | -| `POST` | `/simulator/start` | 데이터 시뮬레이션을 시작합니다. | -| `POST` | `/simulator/stop` | 실행 중인 시뮬레이션을 중지합니다. | -| `GET` | `/simulator/status` | 현재 스케줄러의 상태를 확인합니다. | -| `POST` | `/test/azure-storage-connection` | Azure Blob Storage 연결을 테스트합니다. | -| `POST` | `/test/models-connection` | 모델 서비스와의 연결을 테스트합니다. | -```text -``` \ No newline at end of file +- **로그 형식**: 모든 로그는 `시간 - 모듈 - 로그 레벨 - 메시지` 형식으로 기록됩니다. +- **로그 출력**: + - **콘솔**: 실시간으로 로그가 콘솔에 출력됩니다. + - **파일**: 로그는 `logs/service.log` 파일에도 저장되어, 서비스 실행 이력을 확인할 수 있습니다. diff --git a/services/painting-process-data-simulator-service/requirements.txt b/services/painting-process-data-simulator-service/requirements.txt index 0fb72ff..34f5034 100644 --- a/services/painting-process-data-simulator-service/requirements.txt +++ b/services/painting-process-data-simulator-service/requirements.txt @@ -4,52 +4,40 @@ aiosignal==1.4.0 annotated-types==0.7.0 anyio==4.10.0 APScheduler==3.11.0 -async-timeout==5.0.1 attrs==25.3.0 azure-core==1.35.0 azure-storage-blob==12.26.0 -backports.asyncio.runner==1.2.0 certifi==2025.8.3 cffi==1.17.1 -charset-normalizer==3.4.2 +charset-normalizer==3.4.3 click==8.2.1 -colorama==0.4.6 cryptography==45.0.6 -exceptiongroup==1.3.0 fastapi==0.116.1 frozenlist==1.7.0 h11==0.16.0 httpcore==1.0.9 httpx==0.28.1 idna==3.10 -iniconfig==2.1.0 isodate==0.7.2 multidict==6.6.3 -numpy==2.2.6 -packaging==25.0 +numpy==2.3.2 pandas==2.3.1 -pluggy==1.6.0 propcache==0.3.2 pycparser==2.22 pydantic==2.11.7 pydantic-settings==2.10.1 pydantic_core==2.33.2 -Pygments==2.19.2 -pytest==8.4.1 -pytest-asyncio==1.1.0 python-dateutil==2.9.0.post0 python-dotenv==1.1.1 pytz==2025.2 requests==2.32.4 -respx==0.22.0 six==1.17.0 sniffio==1.3.1 starlette==0.47.2 -tomli==2.2.1 typing-inspection==0.4.1 typing_extensions==4.14.1 tzdata==2025.2 tzlocal==5.3.1 urllib3==2.5.0 uvicorn==0.35.0 -yarl==1.20.1 +yarl==1.20.1 \ No newline at end of file From 8dcfe9c98a8a64a6605a678b8c3292342b89ab15 Mon Sep 17 00:00:00 2001 From: TaeHyun aivle Date: Tue, 12 Aug 2025 02:26:04 +0000 Subject: [PATCH 05/12] =?UTF-8?q?=E2=9C=85=20Test:=20Add=20unit=20tests=20?= =?UTF-8?q?for=20core=20services?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit API 라우터, 스케줄러, Azure Storage 서비스, 백엔드 클라이언트의 핵심 로직에 대한 단위 테스트를 추가. 테스트를 위한 라이브러리 추가 --- .../pytest.ini | 1 - .../requirements.txt | 6 + .../tests/test_azure_storage.py | 598 ++---------------- .../tests/test_backend_client.py | 18 + .../tests/test_scheduler_service.py | 454 ++----------- .../tests/test_simulator_router.py | 115 +--- 6 files changed, 124 insertions(+), 1068 deletions(-) create mode 100644 services/painting-process-data-simulator-service/tests/test_backend_client.py diff --git a/services/painting-process-data-simulator-service/pytest.ini b/services/painting-process-data-simulator-service/pytest.ini index decb2e8..a635c5c 100644 --- a/services/painting-process-data-simulator-service/pytest.ini +++ b/services/painting-process-data-simulator-service/pytest.ini @@ -1,3 +1,2 @@ [pytest] pythonpath = . -testpaths = tests \ No newline at end of file diff --git a/services/painting-process-data-simulator-service/requirements.txt b/services/painting-process-data-simulator-service/requirements.txt index 34f5034..7be3649 100644 --- a/services/painting-process-data-simulator-service/requirements.txt +++ b/services/painting-process-data-simulator-service/requirements.txt @@ -18,15 +18,21 @@ h11==0.16.0 httpcore==1.0.9 httpx==0.28.1 idna==3.10 +iniconfig==2.1.0 isodate==0.7.2 multidict==6.6.3 numpy==2.3.2 +packaging==25.0 pandas==2.3.1 +pluggy==1.6.0 propcache==0.3.2 pycparser==2.22 pydantic==2.11.7 pydantic-settings==2.10.1 pydantic_core==2.33.2 +Pygments==2.19.2 +pytest==8.4.1 +pytest-asyncio==1.1.0 python-dateutil==2.9.0.post0 python-dotenv==1.1.1 pytz==2025.2 diff --git a/services/painting-process-data-simulator-service/tests/test_azure_storage.py b/services/painting-process-data-simulator-service/tests/test_azure_storage.py index 6664543..906228c 100644 --- a/services/painting-process-data-simulator-service/tests/test_azure_storage.py +++ b/services/painting-process-data-simulator-service/tests/test_azure_storage.py @@ -1,560 +1,50 @@ import pytest import pandas as pd -from unittest.mock import AsyncMock, MagicMock, patch - +from unittest.mock import patch, AsyncMock from app.services.azure_storage import AzureStorageService - -class TestAzureStorageService: - """Test suite for AzureStorageService class using pytest framework.""" - - @pytest.fixture - def mock_settings(self): - """Mock settings fixture for testing.""" - with patch('app.services.azure_storage.settings') as mock_settings: - mock_settings.azure_connection_string = "DefaultEndpointsProtocol=https;AccountName=test;AccountKey=test123;EndpointSuffix=core.windows.net" - mock_settings.azure_container_name = "test-container" - mock_settings.painting_data_folder = "painting-process-equipment" - yield mock_settings - - @pytest.fixture - def azure_service(self, mock_settings): - """Create AzureStorageService instance for testing.""" - return AzureStorageService() - - @pytest.fixture - def sample_csv_data(self): - """Sample CSV data for testing.""" - data = { - 'machineId': ['MACHINE001', 'MACHINE002', 'MACHINE003'], - 'timeStamp': ['2023-01-01T10:00:00Z', '2023-01-01T10:01:00Z', '2023-01-01T10:02:00Z'], - 'Thick': [1.5, 2.0, 1.8], - 'PT_jo_V_1': [220.5, 221.0, 219.8], - 'PT_jo_A_Main_1': [15.2, 16.1, 14.9], - 'PT_jo_TP': [25.3, 26.1, 24.8] - } - return pd.DataFrame(data) - - class TestInitialization: - """Test AzureStorageService initialization.""" - - def test_initialization_with_settings(self, mock_settings): - """Test successful initialization with proper settings.""" - service = AzureStorageService() - - assert service.connection_string == mock_settings.azure_connection_string - assert service.container_name == mock_settings.azure_container_name - assert service.current_index == 0 - assert service.cached_df is None - - def test_initialization_attributes(self, azure_service, mock_settings): - """Test all initialization attributes are set correctly.""" - assert hasattr(azure_service, 'connection_string') - assert hasattr(azure_service, 'container_name') - assert hasattr(azure_service, 'current_index') - assert hasattr(azure_service, 'cached_df') - assert azure_service.current_index == 0 - assert azure_service.cached_df is None - - class TestListDataFiles: - """Test list_data_files method.""" - - - @pytest.mark.asyncio - async def test_list_data_files_no_connection_string(self, mock_settings): - """Test list_data_files with no connection string.""" - mock_settings.azure_connection_string = None - service = AzureStorageService() - - with pytest.raises(ValueError, match="Azure connection string이 설정되지 않았습니다."): - await service.list_data_files() - - @pytest.mark.asyncio - async def test_list_data_files_empty_connection_string(self, mock_settings): - """Test list_data_files with empty connection string.""" - mock_settings.azure_connection_string = "" - service = AzureStorageService() - - with pytest.raises(ValueError, match="Azure connection string이 설정되지 않았습니다."): - await service.list_data_files() - - @pytest.mark.asyncio - async def test_list_data_files_exception_handling(self, azure_service, capsys): - """Test exception handling in list_data_files.""" - with patch('app.services.azure_storage.BlobServiceClient') as mock_client_class: - mock_client_class.from_connection_string.side_effect = Exception("Connection failed") - - result = await azure_service.list_data_files() - - assert result == [] - captured = capsys.readouterr() - assert "❌ Painting 데이터 파일 목록 조회 실패: Connection failed" in captured.out - - @pytest.mark.asyncio - async def test_list_data_files_no_csv_files(self, azure_service, mock_settings): - """Test list_data_files when no CSV files are found.""" - mock_blob1 = MagicMock() - mock_blob1.name = "painting-process-equipment/data1.txt" - mock_blob2 = MagicMock() - mock_blob2.name = "painting-process-equipment/data2.json" - - mock_client = MagicMock() - mock_container_client = AsyncMock() - mock_client.get_container_client.return_value = mock_container_client - - async def async_iterator(items): - for item in items: - yield item - - mock_container_client.list_blobs.return_value = async_iterator([ - mock_blob1, mock_blob2 - ]) - - with patch('app.services.azure_storage.BlobServiceClient.from_connection_string') as mock_from_conn_str: - mock_from_conn_str.return_value.__aenter__.return_value = mock_client - - result = await azure_service.list_data_files() - - assert result == [] - - @pytest.mark.asyncio - async def test_read_csv_data_success(self, azure_service, sample_csv_data, capsys): - """Test successful CSV data reading.""" - csv_content = sample_csv_data.to_csv(index=False) - - mock_client = MagicMock() - mock_blob_client = AsyncMock() - mock_client.get_blob_client.return_value = mock_blob_client - - mock_blob_data = AsyncMock() - mock_blob_client.download_blob.return_value = mock_blob_data - mock_blob_data.readall.return_value = csv_content.encode('utf-8') - - with patch('app.services.azure_storage.BlobServiceClient.from_connection_string') as mock_from_conn_str: - mock_from_conn_str.return_value.__aenter__.return_value = mock_client - - result = await azure_service.read_csv_data("test.csv") - - assert result is not None - assert isinstance(result, pd.DataFrame) - assert len(result) == 3 - assert list(result.columns) == list(sample_csv_data.columns) - - captured = capsys.readouterr() - assert "📁 파일 읽기 성공: test.csv (3 rows)" in captured.out - - @pytest.mark.asyncio - async def test_read_csv_data_no_connection_string(self, mock_settings): - """Test read_csv_data with no connection string.""" - mock_settings.azure_connection_string = None - service = AzureStorageService() - - with pytest.raises(ValueError, match="Azure connection string이 설정되지 않았습니다."): - await service.read_csv_data("test.csv") - - @pytest.mark.asyncio - async def test_read_csv_data_empty_connection_string(self, mock_settings): - """Test read_csv_data with empty connection string.""" - mock_settings.azure_connection_string = "" - service = AzureStorageService() - - with pytest.raises(ValueError, match="Azure connection string이 설정되지 않았습니다."): - await service.read_csv_data("test.csv") - - @pytest.mark.asyncio - async def test_read_csv_data_exception_handling(self, azure_service, capsys): - """Test exception handling in read_csv_data.""" - with patch('app.services.azure_storage.BlobServiceClient') as mock_client_class: - mock_client_class.from_connection_string.side_effect = Exception("Download failed") - - result = await azure_service.read_csv_data("test.csv") - - assert result is None - captured = capsys.readouterr() - assert "❌ 파일 읽기 실패 (test.csv): Download failed" in captured.out - - @pytest.mark.asyncio - async def test_read_csv_data_invalid_csv(self, azure_service, capsys): - """Test reading invalid CSV data.""" - invalid_csv = "invalid,csv,content\nwith,broken\nlines" - - with patch('app.services.azure_storage.BlobServiceClient') as mock_client_class: - mock_client = AsyncMock() - mock_client_class.from_connection_string.return_value = mock_client - mock_client.__aenter__.return_value = mock_client - mock_client.__aexit__.return_value = None - - mock_blob_client = AsyncMock() - mock_client.get_blob_client.return_value = mock_blob_client - - mock_blob_data = AsyncMock() - mock_blob_client.download_blob.return_value = mock_blob_data - mock_blob_data.readall.return_value = invalid_csv.encode('utf-8') - - result = await azure_service.read_csv_data("invalid.csv") - - assert result is None - captured = capsys.readouterr() - assert "❌ 파일 읽기 실패 (invalid.csv):" in captured.out - - @pytest.mark.asyncio - async def test_read_csv_data_blob_client_calls(self, azure_service, sample_csv_data): - """Test that blob client is called with correct parameters.""" - csv_content = sample_csv_data.to_csv(index=False) - - mock_client = MagicMock() - mock_blob_client = AsyncMock() - mock_client.get_blob_client.return_value = mock_blob_client - - mock_blob_data = AsyncMock() - mock_blob_client.download_blob.return_value = mock_blob_data - mock_blob_data.readall.return_value = csv_content.encode('utf-8') - - with patch('app.services.azure_storage.BlobServiceClient.from_connection_string') as mock_from_conn_str: - mock_from_conn_str.return_value.__aenter__.return_value = mock_client - - await azure_service.read_csv_data("test.csv") - - mock_client.get_blob_client.assert_called_once_with( - container=azure_service.container_name, - blob="test.csv" - ) - - class TestSimulateRealTimeData: - """Test simulate_real_time_data method.""" - - @pytest.mark.asyncio - async def test_simulate_real_time_data_subsequent_calls(self, azure_service, sample_csv_data, capsys): - """Test subsequent calls use cached DataFrame.""" - azure_service.cached_df = sample_csv_data.copy() - azure_service.current_index = 1 - - with patch.object(azure_service, '_load_dataframe', new_callable=AsyncMock) as mock_load: - result = await azure_service.simulate_real_time_data() - - mock_load.assert_not_called() - assert result is not None - assert result['machineId'] == 'PAINT-MACHINE002' - assert result['thick'] == 2.0 - assert azure_service.current_index == 2 - - @pytest.mark.asyncio - async def test_simulate_real_time_data_index_wraparound(self, azure_service, sample_csv_data): - """Test index wraps around to 0 when reaching end of DataFrame.""" - azure_service.cached_df = sample_csv_data.copy() - azure_service.current_index = 2 # Last index - - result = await azure_service.simulate_real_time_data() - - assert result is not None - assert result['machineId'] == 'PAINT-MACHINE003' - assert azure_service.current_index == 0 # Wrapped around - - @pytest.mark.asyncio - async def test_simulate_real_time_data_no_cached_df(self, azure_service, capsys): - """Test behavior when cached_df remains None after loading.""" - with patch.object(azure_service, '_load_dataframe', new_callable=AsyncMock): - azure_service.cached_df = None - - result = await azure_service.simulate_real_time_data() - - assert result is None - - @pytest.mark.asyncio - async def test_simulate_real_time_data_missing_columns(self, azure_service, capsys): - """Test behavior with missing required columns.""" - incomplete_data = pd.DataFrame({ - 'machineId': ['MACHINE001'], - 'timeStamp': ['2023-01-01T10:00:00Z'], - 'Thick': [1.5], - # Missing PT_jo_V_1, PT_jo_A_Main_1, PT_jo_TP - }) - azure_service.cached_df = incomplete_data - - result = await azure_service.simulate_real_time_data() - - assert result is None - captured = capsys.readouterr() - assert "❌ Painting 데이터 시뮬레이션 실패:" in captured.out - - @pytest.mark.asyncio - async def test_simulate_real_time_data_data_structure(self, azure_service, sample_csv_data): - """Test the structure of returned simulated data.""" - azure_service.cached_df = sample_csv_data.copy() - azure_service.current_index = 0 - - result = await azure_service.simulate_real_time_data() - - assert isinstance(result, dict) - expected_keys = ['machineId', 'timeStamp', 'thick', 'voltage', 'current', 'temper', 'issue', 'isSolved'] - assert all(key in result for key in expected_keys) - - assert result['machineId'] == 'PAINT-MACHINE001' - assert result['timeStamp'] == '2023-01-01T10:00:00Z' - assert result['thick'] == 1.5 - assert result['voltage'] == 220.5 - assert result['current'] == 15.2 - assert result['temper'] == 25.3 - assert result['issue'] == "" - assert result['isSolved'] is False - - @pytest.mark.asyncio - async def test_simulate_real_time_data_type_conversion(self, azure_service): - """Test proper type conversion of numeric values.""" - test_data = pd.DataFrame({ - 'machineId': ['TEST001'], - 'timeStamp': ['2023-01-01T10:00:00Z'], - 'Thick': ['1.5'], # String that should convert to float - 'PT_jo_V_1': ['220.5'], - 'PT_jo_A_Main_1': ['15.2'], - 'PT_jo_TP': ['25.3'] - }) - azure_service.cached_df = test_data - - result = await azure_service.simulate_real_time_data() - - assert isinstance(result['thick'], float) - assert isinstance(result['voltage'], float) - assert isinstance(result['current'], float) - assert isinstance(result['temper'], float) - - @pytest.mark.asyncio - async def test_simulate_real_time_data_exception_handling(self, azure_service, capsys): - """Test exception handling in simulate_real_time_data.""" - with patch.object(azure_service, '_load_dataframe', side_effect=Exception("Load failed")): - result = await azure_service.simulate_real_time_data() - - assert result is None - captured = capsys.readouterr() - assert "❌ Painting 데이터 시뮬레이션 실패: Load failed" in captured.out - - @pytest.mark.asyncio - async def test_simulate_real_time_data_empty_dataframe(self, azure_service): - """Test behavior with empty DataFrame.""" - azure_service.cached_df = pd.DataFrame() - - result = await azure_service.simulate_real_time_data() - - assert result is None - - class TestLoadDataframe: - """Test _load_dataframe method.""" - - @pytest.mark.asyncio - async def test_load_dataframe_success(self, azure_service, sample_csv_data, capsys): - """Test successful DataFrame loading.""" - mock_files = ["painting-process-equipment/data1.csv", "painting-process-equipment/data2.csv"] - - with patch.object(azure_service, 'list_data_files', new=AsyncMock(return_value=mock_files)), \ - patch.object(azure_service, 'read_csv_data', new=AsyncMock(return_value=sample_csv_data)): - - await azure_service._load_dataframe() - - assert azure_service.cached_df is not None - assert len(azure_service.cached_df) == 3 - assert azure_service.current_index == 0 - - captured = capsys.readouterr() - assert "✅ 데이터 캐싱 완료:" in captured.out - - @pytest.mark.asyncio - async def test_load_dataframe_no_files(self, azure_service, capsys): - """Test behavior when no files are found.""" - with patch.object(azure_service, 'list_data_files', new=AsyncMock(return_value=[])): - await azure_service._load_dataframe() - - assert azure_service.cached_df is None - captured = capsys.readouterr() - assert "⚠️ Painting 데이터 파일이 없습니다." in captured.out - - @pytest.mark.asyncio - async def test_load_dataframe_read_failure(self, azure_service, capsys): - """Test behavior when CSV reading fails.""" - mock_files = ["painting-process-equipment/data1.csv"] - - with patch.object(azure_service, 'list_data_files', new=AsyncMock(return_value=mock_files)), \ - patch.object(azure_service, 'read_csv_data', new=AsyncMock(return_value=None)): - - await azure_service._load_dataframe() - - assert azure_service.cached_df is None - assert azure_service.current_index == 0 - - @pytest.mark.asyncio - async def test_load_dataframe_uses_first_file(self, azure_service, sample_csv_data): - """Test that _load_dataframe uses the first available file.""" - mock_files = ["painting-process-equipment/data1.csv", "painting-process-equipment/data2.csv"] - - with patch.object(azure_service, 'list_data_files', new=AsyncMock(return_value=mock_files)), \ - patch.object(azure_service, 'read_csv_data', new=AsyncMock(return_value=sample_csv_data)) as mock_read: - - await azure_service._load_dataframe() - - mock_read.assert_called_once_with("painting-process-equipment/data1.csv") - - @pytest.mark.asyncio - async def test_load_dataframe_exception_handling(self, azure_service, capsys): - """Test exception handling in _load_dataframe.""" - with patch.object(azure_service, 'list_data_files', new=AsyncMock(side_effect=Exception("List failed"))): - await azure_service._load_dataframe() - - captured = capsys.readouterr() - assert "❌ DataFrame 로드 실패: List failed" in captured.out - - class TestEdgeCasesAndBoundaryConditions: - """Test edge cases and boundary conditions.""" - - @pytest.mark.asyncio - async def test_single_row_dataframe(self, azure_service): - """Test behavior with single row DataFrame.""" - single_row_data = pd.DataFrame({ - 'machineId': ['MACHINE001'], - 'timeStamp': ['2023-01-01T10:00:00Z'], - 'Thick': [1.5], - 'PT_jo_V_1': [220.5], - 'PT_jo_A_Main_1': [15.2], - 'PT_jo_TP': [25.3] - }) - azure_service.cached_df = single_row_data - - # First call - result1 = await azure_service.simulate_real_time_data() - assert result1 is not None - assert azure_service.current_index == 0 # Should wrap back to 0 - - # Second call should return same data - result2 = await azure_service.simulate_real_time_data() - assert result2 is not None - assert result2['machineId'] == result1['machineId'] - - @pytest.mark.asyncio - async def test_very_large_dataframe_index_management(self, azure_service): - """Test index management with large DataFrame.""" - large_data = pd.DataFrame({ - 'machineId': [f'MACHINE{i:03d}' for i in range(1000)], - 'timeStamp': ['2023-01-01T10:00:00Z'] * 1000, - 'Thick': [1.5] * 1000, - 'PT_jo_V_1': [220.5] * 1000, - 'PT_jo_A_Main_1': [15.2] * 1000, - 'PT_jo_TP': [25.3] * 1000 - }) - azure_service.cached_df = large_data - azure_service.current_index = 999 - - result = await azure_service.simulate_real_time_data() - - assert result is not None - assert azure_service.current_index == 0 # Should wrap around - - @pytest.mark.asyncio - async def test_nan_values_in_dataframe(self, azure_service): - """Test handling of NaN values in DataFrame.""" - data_with_nan = pd.DataFrame({ - 'machineId': ['MACHINE001'], - 'timeStamp': ['2023-01-01T10:00:00Z'], - 'Thick': [float('nan')], - 'PT_jo_V_1': [220.5], - 'PT_jo_A_Main_1': [15.2], - 'PT_jo_TP': [25.3] - }) - azure_service.cached_df = data_with_nan - - result = await azure_service.simulate_real_time_data() - - assert result is not None - assert pd.isna(result['thick']) - - @pytest.mark.asyncio - async def test_unicode_and_special_characters(self, azure_service): - """Test handling of unicode and special characters.""" - special_data = pd.DataFrame({ - 'machineId': ['MACHINE_특수문자'], - 'timeStamp': ['2023-01-01T10:00:00Z'], - 'Thick': [1.5], - 'PT_jo_V_1': [220.5], - 'PT_jo_A_Main_1': [15.2], - 'PT_jo_TP': [25.3] - }) - azure_service.cached_df = special_data - - result = await azure_service.simulate_real_time_data() - - assert result is not None - assert result['machineId'] == 'PAINT-MACHINE_특수문자' - - class TestIntegrationScenarios: - """Test integration-like scenarios that combine multiple methods.""" - - @pytest.mark.asyncio - async def test_full_workflow_simulation(self, azure_service, sample_csv_data): - """Test complete workflow from listing files to data simulation.""" - mock_files = ["painting-process-equipment/data1.csv"] - - with patch.object(azure_service, 'list_data_files', new=AsyncMock(return_value=mock_files)), \ - patch.object(azure_service, 'read_csv_data', new=AsyncMock(return_value=sample_csv_data)): - - # First simulate call should trigger loading - result1 = await azure_service.simulate_real_time_data() - assert result1 is not None - assert result1['machineId'] == 'PAINT-MACHINE001' - - # Second call should use cached data - result2 = await azure_service.simulate_real_time_data() - assert result2 is not None - assert result2['machineId'] == 'PAINT-MACHINE002' - - # Third call should use cached data - result3 = await azure_service.simulate_real_time_data() - assert result3 is not None - assert result3['machineId'] == 'PAINT-MACHINE003' - - # Fourth call should wrap around - result4 = await azure_service.simulate_real_time_data() - assert result4 is not None - assert result4['machineId'] == 'PAINT-MACHINE001' - - @pytest.mark.asyncio - async def test_error_recovery_workflow(self, azure_service, sample_csv_data): - """Test error recovery in complete workflow.""" - # First attempt fails - with patch.object(azure_service, 'list_data_files', new=AsyncMock(side_effect=Exception("Network error"))): - result1 = await azure_service.simulate_real_time_data() - assert result1 is None - - # Second attempt succeeds - mock_files = ["painting-process-equipment/data1.csv"] - with patch.object(azure_service, 'list_data_files', new=AsyncMock(return_value=mock_files)), \ - patch.object(azure_service, 'read_csv_data', new=AsyncMock(return_value=sample_csv_data)): - - result2 = await azure_service.simulate_real_time_data() - assert result2 is not None - assert result2['machineId'] == 'PAINT-MACHINE001' - - -class TestGlobalInstance: - """Test the global azure_storage instance.""" - - def test_global_instance_creation(self): - """Test that global instance is created properly.""" - from app.services.azure_storage import azure_storage +@pytest.fixture +def sample_csv_data(): + """Provides a sample DataFrame for testing.""" + data = { + 'machineId': ['MACHINE001'], + 'timeStamp': ['2023-01-01T10:00:00Z'], + 'Thick': [1.5], + 'PT_jo_V_1': [220.5], + 'PT_jo_A_Main_1': [15.2], + 'PT_jo_TP': [25.3] + } + return pd.DataFrame(data) + +@pytest.mark.asyncio +async def test_simulate_real_time_data(sample_csv_data): + """Test the data simulation logic.""" + service = AzureStorageService() + service.cached_df = sample_csv_data + + with patch.object(service, '_load_dataframe', new_callable=AsyncMock) as mock_load: + result = await service.simulate_real_time_data() + + mock_load.assert_not_called() # Should use the cached dataframe + + assert result is not None + assert result["machineId"] == "PAINT-MACHINE001" + assert result["thick"] == 1.5 + assert isinstance(result["voltage"], float) + +@pytest.mark.asyncio +async def test_load_dataframe(): + """Test the dataframe loading logic.""" + service = AzureStorageService() + + with patch.object(service, 'list_data_files', new_callable=AsyncMock) as mock_list, \ + patch.object(service, 'read_csv_data', new_callable=AsyncMock) as mock_read: - assert azure_storage is not None - assert isinstance(azure_storage, AzureStorageService) - assert hasattr(azure_storage, 'current_index') - assert hasattr(azure_storage, 'cached_df') + mock_list.return_value = ['test.csv'] + mock_read.return_value = pd.DataFrame({'machineId': ['test']}) - def test_global_instance_singleton_behavior(self): - """Test singleton-like behavior of global instance.""" - from app.services.azure_storage import azure_storage - - # Modify the global instance - original_index = azure_storage.current_index - azure_storage.current_index = 999 - - # Import again and verify it's the same instance - from app.services.azure_storage import azure_storage as azure_storage2 - - assert azure_storage2.current_index == 999 - - # Reset for other tests - azure_storage.current_index = original_index + await service._load_dataframe() + + mock_list.assert_awaited_once() + mock_read.assert_awaited_once_with('test.csv') + assert service.cached_df is not None diff --git a/services/painting-process-data-simulator-service/tests/test_backend_client.py b/services/painting-process-data-simulator-service/tests/test_backend_client.py new file mode 100644 index 0000000..6db8af6 --- /dev/null +++ b/services/painting-process-data-simulator-service/tests/test_backend_client.py @@ -0,0 +1,18 @@ +import pytest +from unittest.mock import patch, AsyncMock +from app.services.backend_client import BackendClient + +@pytest.mark.asyncio +async def test_send_to_backend(): + """Test that the backend client sends data correctly.""" + client = BackendClient() + test_data = {"test_key": "test_value"} + test_url = "http://test-backend.com/api" + + with patch('httpx.AsyncClient') as mock_async_client: + mock_client_instance = AsyncMock() + mock_async_client.return_value.__aenter__.return_value = mock_client_instance + + await client.send_to_backend(test_data, test_url) + + mock_client_instance.post.assert_awaited_once_with(test_url, json=test_data) diff --git a/services/painting-process-data-simulator-service/tests/test_scheduler_service.py b/services/painting-process-data-simulator-service/tests/test_scheduler_service.py index 1933904..f697e33 100644 --- a/services/painting-process-data-simulator-service/tests/test_scheduler_service.py +++ b/services/painting-process-data-simulator-service/tests/test_scheduler_service.py @@ -1,439 +1,59 @@ import pytest -from unittest.mock import Mock, AsyncMock, patch -from datetime import datetime -from apscheduler.schedulers.asyncio import AsyncIOScheduler -from apscheduler.triggers.interval import IntervalTrigger -from apscheduler.job import Job - -from app.services.scheduler_service import SimulatorScheduler, simulator_scheduler - +from unittest.mock import patch, AsyncMock +from app.services.scheduler_service import SimulatorScheduler +from app.config.settings import settings @pytest.fixture def scheduler_instance(): - """Create a fresh scheduler instance for each test""" + """Provides a clean instance of SimulatorScheduler for each test.""" return SimulatorScheduler() -@pytest.fixture -def mock_settings(): - """Mock settings configuration""" - with patch('app.services.scheduler_service.settings') as mock_settings: - mock_settings.scheduler_interval_minutes = 5 - mock_settings.model_services = { - 'service1': {'url': 'http://service1'}, - 'service2': {'url': 'http://service2'} - } - yield mock_settings - -@pytest.fixture -def mock_azure_storage(): - """Mock Azure storage service""" - with patch('app.services.scheduler_service.azure_storage') as mock_storage: - yield mock_storage - -@pytest.fixture -def mock_model_client(): - """Mock model client service""" - with patch('app.services.scheduler_service.model_client') as mock_client: - yield mock_client - -@pytest.fixture -def mock_anomaly_logger(): - """Mock anomaly logger""" - with patch('app.services.scheduler_service.anomaly_logger') as mock_logger: - yield mock_logger - -class TestSimulatorScheduler: - - def test_init_creates_scheduler_instance(self): - """Test scheduler initialization creates AsyncIOScheduler instance""" - scheduler = SimulatorScheduler() - assert isinstance(scheduler.scheduler, AsyncIOScheduler) - assert scheduler.is_running is False - - def test_init_default_state(self): - """Test scheduler starts in correct initial state""" - scheduler = SimulatorScheduler() - assert scheduler.is_running is False - assert hasattr(scheduler, 'scheduler') - - @pytest.mark.asyncio - async def test_start_successful(self, scheduler_instance, mock_settings, mock_model_client, capsys): - """Test successful scheduler start""" - mock_model_client.health_check_all = AsyncMock(return_value={ - 'service1': True, - 'service2': True - }) - - with patch.object(scheduler_instance.scheduler, 'add_job') as mock_add_job, \ - patch.object(scheduler_instance.scheduler, 'start') as mock_start: - - await scheduler_instance.start() - - assert scheduler_instance.is_running is True - mock_add_job.assert_called_once() - mock_start.assert_called_once() - - # Verify job configuration - call_args = mock_add_job.call_args - assert call_args[1]['id'] == 'data_simulation' - assert call_args[1]['name'] == 'Data Collection Simulation' - assert call_args[1]['replace_existing'] is True - assert isinstance(call_args[1]['trigger'], IntervalTrigger) - - @pytest.mark.asyncio - async def test_start_already_running(self, scheduler_instance, capsys): - """Test start when scheduler is already running""" - scheduler_instance.is_running = True +@pytest.mark.asyncio +async def test_scheduler_start_stop(scheduler_instance): + """Test the start and stop methods of the scheduler.""" + with patch.object(scheduler_instance.scheduler, 'add_job') as mock_add_job, \ + patch.object(scheduler_instance.scheduler, 'start') as mock_start, \ + patch.object(scheduler_instance.scheduler, 'shutdown') as mock_shutdown: await scheduler_instance.start() - - captured = capsys.readouterr() - assert "⚠️ 스케줄러가 이미 실행 중입니다." in captured.out - - @pytest.mark.asyncio - async def test_start_health_check_failure_all_unhealthy(self, scheduler_instance, mock_model_client): - """Test start fails when all services are unhealthy""" - mock_model_client.health_check_all = AsyncMock(return_value={ - 'service1': False, - 'service2': False - }) - - with pytest.raises(Exception, match="모든 모델 서비스가 비활성 상태입니다."): - await scheduler_instance.start() - - assert scheduler_instance.is_running is False - - @pytest.mark.asyncio - async def test_start_health_check_partial_healthy(self, scheduler_instance, mock_settings, mock_model_client, capsys): - """Test start succeeds when some services are healthy""" - mock_model_client.health_check_all = AsyncMock(return_value={ - 'service1': True, - 'service2': False, - 'service3': True - }) - - with patch.object(scheduler_instance.scheduler, 'add_job'), \ - patch.object(scheduler_instance.scheduler, 'start'): - - await scheduler_instance.start() - - assert scheduler_instance.is_running is True - captured = capsys.readouterr() - assert "📈 활성 서비스: 2/3" in captured.out + assert scheduler_instance.is_running is True + mock_add_job.assert_called_once() + mock_start.assert_called_once() - @pytest.mark.asyncio - async def test_start_exception_handling(self, scheduler_instance, mock_model_client): - """Test start handles exceptions properly""" - mock_model_client.health_check_all = AsyncMock(side_effect=Exception("Network error")) - - with pytest.raises(Exception, match="Network error"): - await scheduler_instance.start() - - assert scheduler_instance.is_running is False - - @pytest.mark.asyncio - async def test_stop_successful(self, scheduler_instance, capsys): - """Test successful scheduler stop""" - scheduler_instance.is_running = True - scheduler_instance.scheduler.shutdown = Mock() - await scheduler_instance.stop() - assert scheduler_instance.is_running is False - scheduler_instance.scheduler.shutdown.assert_called_once() - - captured = capsys.readouterr() - assert "🛑 시뮬레이터 중지됨" in captured.out + mock_shutdown.assert_called_once() - @pytest.mark.asyncio - async def test_stop_not_running(self, scheduler_instance, capsys): - """Test stop when scheduler is not running""" - scheduler_instance.is_running = False - - await scheduler_instance.stop() - - captured = capsys.readouterr() - assert "⚠️ 스케줄러가 실행 중이 아닙니다." in captured.out +@pytest.mark.asyncio +async def test_simulate_and_send_data(): + """Test the _simulate_and_send_data method.""" + scheduler = SimulatorScheduler() + simulated_data = {"key": "value"} - @pytest.mark.asyncio - async def test_initial_health_check_all_healthy(self, scheduler_instance, mock_model_client, capsys): - """Test initial health check with all services healthy""" - mock_model_client.health_check_all = AsyncMock(return_value={ - 'service1': True, - 'service2': True - }) - - await scheduler_instance._initial_health_check() + with patch('app.services.scheduler_service.azure_storage', new_callable=AsyncMock) as mock_azure, \ + patch('app.services.scheduler_service.backend_client', new_callable=AsyncMock) as mock_backend: - captured = capsys.readouterr() - assert "🔍 모델 서비스 헬스 체크 중..." in captured.out - assert "✅ service1" in captured.out - assert "✅ service2" in captured.out - assert "📈 활성 서비스: 2/2" in captured.out + mock_azure.simulate_real_time_data.return_value = simulated_data + mock_backend.send_to_backend = AsyncMock() - @pytest.mark.asyncio - async def test_initial_health_check_mixed_status(self, scheduler_instance, mock_model_client, capsys): - """Test initial health check with mixed service status""" - mock_model_client.health_check_all = AsyncMock(return_value={ - 'healthy_service': True, - 'unhealthy_service': False - }) - - await scheduler_instance._initial_health_check() - - captured = capsys.readouterr() - assert "✅ healthy_service" in captured.out - assert "❌ unhealthy_service" in captured.out - assert "📈 활성 서비스: 1/2" in captured.out + await scheduler._simulate_and_send_data() - @pytest.mark.asyncio - async def test_simulate_data_collection_successful_with_anomaly(self, scheduler_instance, mock_azure_storage, mock_model_client, mock_anomaly_logger, capsys): - """Test data collection simulation with anomaly detection""" - mock_data = {"temperature": 85, "pressure": 120} - mock_prediction = {"issue": "high_temperature", "confidence": 0.95} - - mock_azure_storage.simulate_real_time_data = AsyncMock(return_value=mock_data) - mock_model_client.predict_painting_issue = AsyncMock(return_value=mock_prediction) - - await scheduler_instance._simulate_data_collection() - - mock_azure_storage.simulate_real_time_data.assert_called_once() - mock_model_client.predict_painting_issue.assert_called_once_with(mock_data) - mock_anomaly_logger.log_anomaly.assert_called_once_with( - "painting-process-equipment", - mock_prediction, - mock_data - ) - - captured = capsys.readouterr() - assert "🔄 Painting 데이터 수집 시작" in captured.out - assert "🚨 이상 감지! - 이슈: high_temperature" in captured.out + mock_azure.simulate_real_time_data.assert_awaited_once() + mock_backend.send_to_backend.assert_awaited_once_with(simulated_data, settings.backend_service_url) - @pytest.mark.asyncio - async def test_simulate_data_collection_successful_normal(self, scheduler_instance, mock_azure_storage, mock_model_client, mock_anomaly_logger, capsys): - """Test data collection simulation with normal status""" - mock_data = {"temperature": 65, "pressure": 100} - - mock_azure_storage.simulate_real_time_data = AsyncMock(return_value=mock_data) - mock_model_client.predict_painting_issue = AsyncMock(return_value=None) - - await scheduler_instance._simulate_data_collection() - - mock_anomaly_logger.log_normal_processing.assert_called_once_with( - "painting-process-equipment", - mock_data - ) - - captured = capsys.readouterr() - assert "✅ 정상 상태" in captured.out +@pytest.mark.asyncio +async def test_simulate_and_send_data_no_data(): + """Test the _simulate_and_send_data method when no data is available.""" + scheduler = SimulatorScheduler() - @pytest.mark.asyncio - async def test_simulate_data_collection_no_data(self, scheduler_instance, mock_azure_storage, mock_model_client, capsys): - """Test data collection when no data is available""" - mock_azure_storage.simulate_real_time_data = AsyncMock(return_value=None) + with patch('app.services.scheduler_service.azure_storage', new_callable=AsyncMock) as mock_azure, \ + patch('app.services.scheduler_service.backend_client', new_callable=AsyncMock) as mock_backend: - await scheduler_instance._simulate_data_collection() - - mock_model_client.predict_painting_issue.assert_not_called() - - captured = capsys.readouterr() - assert "⚠️ 수집할 데이터가 없습니다." in captured.out - - @pytest.mark.asyncio - async def test_simulate_data_collection_empty_data(self, scheduler_instance, mock_azure_storage, mock_model_client, capsys): - """Test data collection with empty data""" - mock_azure_storage.simulate_real_time_data = AsyncMock(return_value={}) - - await scheduler_instance._simulate_data_collection() - - captured = capsys.readouterr() - assert "⚠️ 수집할 데이터가 없습니다." in captured.out - - @pytest.mark.asyncio - async def test_simulate_data_collection_azure_storage_exception(self, scheduler_instance, mock_azure_storage, mock_anomaly_logger, capsys): - """Test data collection handles Azure storage exceptions""" - mock_azure_storage.simulate_real_time_data = AsyncMock(side_effect=Exception("Storage error")) - - await scheduler_instance._simulate_data_collection() - - mock_anomaly_logger.log_error.assert_called_once_with( - "painting-simulator-scheduler", - "Storage error" - ) - - captured = capsys.readouterr() - assert "❌ 데이터 수집 중 오류 발생: Storage error" in captured.out - - @pytest.mark.asyncio - async def test_simulate_data_collection_model_client_exception(self, scheduler_instance, mock_azure_storage, mock_model_client, mock_anomaly_logger, capsys): - """Test data collection handles model client exceptions""" - mock_data = {"temperature": 75} - mock_azure_storage.simulate_real_time_data = AsyncMock(return_value=mock_data) - mock_model_client.predict_painting_issue = AsyncMock(side_effect=Exception("Model error")) - - await scheduler_instance._simulate_data_collection() - - mock_anomaly_logger.log_error.assert_called_once_with( - "painting-simulator-scheduler", - "Model error" - ) - - def test_get_status_with_jobs(self, scheduler_instance, mock_settings): - """Test get_status returns correct status with active jobs""" - scheduler_instance.is_running = True - - mock_job = Mock(spec=Job) - mock_job.next_run_time = datetime(2024, 1, 1, 12, 0, 0) - scheduler_instance.scheduler.get_jobs = Mock(return_value=[mock_job]) - - status = scheduler_instance.get_status() - - assert status["is_running"] is True - assert status["interval_minutes"] == 5 - assert status["next_run"] == "2024-01-01 12:00:00" - assert status["total_services"] == 2 - - def test_get_status_no_jobs(self, scheduler_instance, mock_settings): - """Test get_status returns correct status with no jobs""" - scheduler_instance.is_running = False - scheduler_instance.scheduler.get_jobs = Mock(return_value=[]) - - status = scheduler_instance.get_status() - - assert status["is_running"] is False - assert status["interval_minutes"] == 5 - assert status["next_run"] is None - assert status["total_services"] == 2 - - def test_get_status_multiple_jobs(self, scheduler_instance, mock_settings): - """Test get_status with multiple jobs returns first job's next run time""" - scheduler_instance.is_running = True - - mock_job1 = Mock(spec=Job) - mock_job1.next_run_time = datetime(2024, 1, 1, 12, 0, 0) - mock_job2 = Mock(spec=Job) - mock_job2.next_run_time = datetime(2024, 1, 1, 13, 0, 0) - - scheduler_instance.scheduler.get_jobs = Mock(return_value=[mock_job1, mock_job2]) - - status = scheduler_instance.get_status() - - assert status["next_run"] == "2024-01-01 12:00:00" + mock_azure.simulate_real_time_data.return_value = None + mock_backend.send_to_backend = AsyncMock() + await scheduler._simulate_and_send_data() -class TestGlobalSchedulerInstance: - """Test suite for global scheduler instance""" - - def test_global_instance_exists(self): - """Test that global scheduler instance exists""" - assert simulator_scheduler is not None - assert isinstance(simulator_scheduler, SimulatorScheduler) + mock_azure.simulate_real_time_data.assert_awaited_once() + mock_backend.send_to_backend.assert_not_awaited() - def test_global_instance_initial_state(self): - """Test global scheduler instance initial state""" - assert simulator_scheduler.is_running is False - assert hasattr(simulator_scheduler, 'scheduler') - - -@pytest.mark.usefixtures("mock_settings", "mock_model_client") -class TestSchedulerIntegration: - """Integration tests for scheduler service""" - - @pytest.mark.asyncio - async def test_start_stop_cycle(self, mock_settings, mock_model_client): - """Test complete start-stop cycle""" - scheduler = SimulatorScheduler() - mock_model_client.health_check_all = AsyncMock(return_value={'service1': True}) - - with patch.object(scheduler.scheduler, 'add_job'), \ - patch.object(scheduler.scheduler, 'start'), \ - patch.object(scheduler.scheduler, 'shutdown'): - - # Start scheduler - await scheduler.start() - assert scheduler.is_running is True - - # Stop scheduler - await scheduler.stop() - assert scheduler.is_running is False - - @pytest.mark.asyncio - async def test_multiple_start_calls_idempotent(self, mock_settings, mock_model_client): - """Test multiple start calls are handled gracefully""" - scheduler = SimulatorScheduler() - mock_model_client.health_check_all = AsyncMock(return_value={'service1': True}) - - with patch.object(scheduler.scheduler, 'add_job') as mock_add_job, \ - patch.object(scheduler.scheduler, 'start') as mock_start: - - await scheduler.start() - await scheduler.start() # Second call should be ignored - - # Should only be called once - assert mock_add_job.call_count == 1 - assert mock_start.call_count == 1 - - @pytest.mark.asyncio - async def test_multiple_stop_calls_idempotent(self): - """Test multiple stop calls are handled gracefully""" - scheduler = SimulatorScheduler() - scheduler.scheduler.shutdown = Mock() - - await scheduler.stop() # First call when not running - await scheduler.stop() # Second call when not running - - # Shutdown should not be called when not running - scheduler.scheduler.shutdown.assert_not_called() - - -@pytest.mark.usefixtures("scheduler_instance", "mock_azure_storage", "mock_model_client", "mock_anomaly_logger") -class TestSchedulerEdgeCases: - """Test edge cases and error conditions""" - - @pytest.mark.asyncio - async def test_health_check_empty_services(self, scheduler_instance, mock_model_client): - """Test health check with no services configured""" - mock_model_client.health_check_all = AsyncMock(return_value={}) - - with pytest.raises(Exception, match="모든 모델 서비스가 비활성 상태입니다."): - await scheduler_instance._initial_health_check() - - @pytest.mark.asyncio - async def test_simulate_data_collection_prediction_result_falsy_values(self, scheduler_instance, mock_azure_storage, mock_model_client, mock_anomaly_logger): - """Test data collection with various falsy prediction results""" - mock_data = {"temperature": 70} - mock_azure_storage.simulate_real_time_data = AsyncMock(return_value=mock_data) - - # Test with False - mock_model_client.predict_painting_issue = AsyncMock(return_value=False) - await scheduler_instance._simulate_data_collection() - mock_anomaly_logger.log_normal_processing.assert_called() - - # Test with empty dict - mock_anomaly_logger.reset_mock() - mock_model_client.predict_painting_issue = AsyncMock(return_value={}) - await scheduler_instance._simulate_data_collection() - mock_anomaly_logger.log_normal_processing.assert_called() - - def test_get_status_scheduler_exception(self, scheduler_instance, mock_settings): - """Test get_status handles scheduler exceptions gracefully""" - scheduler_instance.scheduler.get_jobs = Mock(side_effect=Exception("Scheduler error")) - - with pytest.raises(Exception, match="Scheduler error"): - scheduler_instance.get_status() - - @pytest.mark.asyncio - async def test_datetime_formatting_in_simulation(self, scheduler_instance, mock_azure_storage, mock_model_client, capsys): - """Test datetime formatting in simulation output""" - mock_data = {"temperature": 70} - mock_azure_storage.simulate_real_time_data = AsyncMock(return_value=mock_data) - mock_model_client.predict_painting_issue = AsyncMock(return_value=None) - - with patch('app.services.scheduler_service.datetime') as mock_datetime: - mock_datetime.now.return_value = datetime(2024, 1, 1, 15, 30, 45) - mock_datetime.strftime = datetime.strftime - - await scheduler_instance._simulate_data_collection() - - captured = capsys.readouterr() - assert "2024-01-01 15:30:45" in captured.out \ No newline at end of file diff --git a/services/painting-process-data-simulator-service/tests/test_simulator_router.py b/services/painting-process-data-simulator-service/tests/test_simulator_router.py index 9ba6f1a..e263718 100644 --- a/services/painting-process-data-simulator-service/tests/test_simulator_router.py +++ b/services/painting-process-data-simulator-service/tests/test_simulator_router.py @@ -1,121 +1,44 @@ import pytest from fastapi.testclient import TestClient -from unittest.mock import patch +from unittest.mock import patch, AsyncMock -# 테스트 대상 FastAPI 애플리케이션 임포트 from app.main import app -# 테스트 클라이언트 생성 client = TestClient(app) @pytest.fixture def mock_scheduler(): - """ - simulator_scheduler 서비스를 모의(mock) 처리하는 픽스처. - autospec=True를 사용하여 실제 객체의 시그니처(동기/비동기)를 유지합니다. - """ - # patch의 대상은 해당 객체가 '사용되는' 위치인 'app.routers.simulator_router'로 지정합니다. + """Mock the simulator_scheduler service.""" with patch('app.routers.simulator_router.simulator_scheduler', autospec=True) as mock: - # is_running 속성의 기본값 설정 - mock.is_running = False - - # get_status (동기 메서드)의 기본 반환값 설정 - mock.get_status.return_value = { - "is_running": False, - "interval_minutes": 1, - "next_run": None, - "total_services": 1 - } - - # start, stop (비동기 메서드)는 autospec에 의해 자동으로 AsyncMock으로 처리됩니다. + mock.get_status.return_value = {"is_running": False, "next_run": None} + mock.start = AsyncMock() + mock.stop = AsyncMock() yield mock -@pytest.fixture -def mock_model_client(): - """ - model_client 서비스를 모의(mock) 처리하는 픽스처. - """ - with patch('app.routers.simulator_router.model_client', autospec=True) as mock: - # health_check_all (비동기 메서드)의 반환값 설정 - mock.health_check_all.return_value = { - "painting-process-equipment": True - } - yield mock - -def test_get_simulator_status_when_stopped(mock_scheduler): - """ - Given: 시뮬레이터가 중지된 상태일 때 - When: GET /simulator/status API를 호출하면 - Then: 200 OK와 함께 is_running이 False인 상태 정보를 반환해야 합니다. - """ - # When +def test_get_simulator_status(mock_scheduler): + """Test GET /simulator/status endpoint.""" response = client.get("/simulator/status") - - # Then assert response.status_code == 200 - data = response.json() - assert data["is_running"] is False - assert "model_services_health" not in data + assert response.json() == {"is_running": False, "next_run": None} + mock_scheduler.get_status.assert_called_once() def test_start_simulator(mock_scheduler): - """ - Given: 시뮬레이터가 중지된 상태일 때 - When: POST /simulator/start API를 호출하면 - Then: 200 OK와 함께 성공 메시지를 반환하고, 스케줄러의 start 메서드가 호출되어야 합니다. - """ - # When + """Test POST /simulator/start endpoint.""" response = client.post("/simulator/start") - - # Then assert response.status_code == 200 - assert response.json()["message"] == "시뮬레이터가 시작되었습니다." - - # 스케줄러의 start 메서드가 1번 호출되었는지 검증 + assert "시뮬레이터가 시작되었습니다." in response.json()["message"] mock_scheduler.start.assert_awaited_once() def test_stop_simulator(mock_scheduler): - """ - Given: 시뮬레이터가 실행 중인 상태일 때 - When: POST /simulator/stop API를 호출하면 - Then: 200 OK와 함께 성공 메시지를 반환하고, 스케줄러의 stop 메서드가 호출되어야 합니다. - """ - # Given: 시뮬레이터를 실행 중인 상태로 설정 - mock_scheduler.is_running = True - - # When + """Test POST /simulator/stop endpoint.""" response = client.post("/simulator/stop") - - # Then assert response.status_code == 200 - assert response.json()["message"] == "시뮬레이터가 중지되었습니다." - - # 스케줄러의 stop 메서드가 1번 호출되었는지 검증 + assert "시뮬레이터가 중지되었습니다." in response.json()["message"] mock_scheduler.stop.assert_awaited_once() -def test_get_simulator_status_when_running(mock_scheduler, mock_model_client): - """ - Given: 시뮬레이터가 실행 중인 상태일 때 - When: GET /simulator/status API를 호출하면 - Then: 200 OK와 함께 is_running이 True이고, 모델 서비스 헬스 체크 결과가 포함된 상태를 반환해야 합니다. - """ - # Given: 시뮬레이터를 실행 중인 상태로 설정 - mock_scheduler.is_running = True - mock_scheduler.get_status.return_value = { - "is_running": True, - "interval_minutes": 1, - "next_run": "2025-08-06T13:00:00", - "total_services": 1 - } - - # When - response = client.get("/simulator/status") - - # Then - assert response.status_code == 200 - data = response.json() - assert data["is_running"] is True - assert "model_services_health" in data - assert data["model_services_health"]["painting-process-equipment"] is True - - # model_client의 health_check_all 메서드가 1번 호출되었는지 검증 - mock_model_client.health_check_all.assert_awaited_once() \ No newline at end of file +def test_start_simulator_exception(mock_scheduler): + """Test exception handling when starting the simulator.""" + mock_scheduler.start.side_effect = Exception("Test Exception") + response = client.post("/simulator/start") + assert response.status_code == 500 + assert "시뮬레이터 시작 실패" in response.json()["detail"] From 93744bdeebd0a8d5d768d1c9d2e72510ede71b2f Mon Sep 17 00:00:00 2001 From: TaeHyun aivle Date: Wed, 13 Aug 2025 01:52:14 +0000 Subject: [PATCH 06/12] =?UTF-8?q?=F0=9F=93=A6=EF=B8=8F=20Fix:=20Downgrade?= =?UTF-8?q?=20dependencies=20and=20etc?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 파이썬 3.10버전에 맞는 numpy버전을 2.3.2에서 2.2.6으로 수정하고 .env파일에 따옴표를 빼서 오류를 방지 --- .../.env.example | 13 +++++++------ .../requirements.txt | 2 +- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/services/painting-process-data-simulator-service/.env.example b/services/painting-process-data-simulator-service/.env.example index 7a28b5d..f9fed58 100644 --- a/services/painting-process-data-simulator-service/.env.example +++ b/services/painting-process-data-simulator-service/.env.example @@ -6,13 +6,13 @@ #============================== # Azure Storage 계정의 연결 문자열 (필수) -AZURE_CONNECTION_STRING="" +AZURE_CONNECTION_STRING= # 데이터를 읽어올 컨테이너 이름 -AZURE_CONTAINER_NAME="simulator-data" +AZURE_CONTAINER_NAME=simulator-data # 컨테이너 내에서 데이터가 저장된 폴더 경로 -PAINTING_DATA_FOLDER="painting-process-equipment" +PAINTING_DATA_FOLDER=painting-process-equipment #============================== @@ -30,9 +30,10 @@ BATCH_SIZE=10 # 백엔드 서비스 설정 #============================== -# 시뮬레이터가 데이터를 전송할 Spring Boot 백엔드 서비스의 전체 URL -# (주의: paintingprocessmonitoring 서비스에 새로 추가한 POST /equipment-data 경로로 설정) -BACKEND_SERVICE_URL="http://localhost:8087/equipment-data" + +# 도커 사용시 시뮬레이터가 데이터를 전송할 Spring Boot 백엔드 서비스의 전체 URL +BACKEND_SERVICE_URL=http://paintingprocessmonitoring:8087/equipment-data +# BACKEND_SERVICE_URL=http://localhost:8087/equipment-data # uvicorn으로 실행할때 사용 #============================== diff --git a/services/painting-process-data-simulator-service/requirements.txt b/services/painting-process-data-simulator-service/requirements.txt index 7be3649..0cf10e1 100644 --- a/services/painting-process-data-simulator-service/requirements.txt +++ b/services/painting-process-data-simulator-service/requirements.txt @@ -21,7 +21,7 @@ idna==3.10 iniconfig==2.1.0 isodate==0.7.2 multidict==6.6.3 -numpy==2.3.2 +numpy==2.2.6 packaging==25.0 pandas==2.3.1 pluggy==1.6.0 From 9bdb9e90e1e275cbb6d48b51eefe39ea49f25056 Mon Sep 17 00:00:00 2001 From: TaeHyun aivle Date: Wed, 13 Aug 2025 04:44:31 +0000 Subject: [PATCH 07/12] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Refact:=20Take=20the?= =?UTF-8?q?=20rabbit's=20advice?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 래빗코드의 조언에 따라 코드를 리팩토링하고 잠재된 문제 및 오류들을 감지하고 대응 --- .../README.md | 2 +- .../app/config/logging_config.py | 4 +++- .../app/config/settings.py | 12 ++++++++++-- .../app/main.py | 7 +++---- .../app/routers/simulator_router.py | 10 +++++++--- .../app/services/azure_storage.py | 6 ++++++ .../app/services/backend_client.py | 19 +++++++++++++------ .../app/services/scheduler_service.py | 6 +++++- 8 files changed, 48 insertions(+), 18 deletions(-) diff --git a/services/painting-process-data-simulator-service/README.md b/services/painting-process-data-simulator-service/README.md index 946f5ce..05519ab 100644 --- a/services/painting-process-data-simulator-service/README.md +++ b/services/painting-process-data-simulator-service/README.md @@ -13,7 +13,7 @@ ## 3. 프로젝트 구조 ```text -painting-process-backend-simulator-service/ +painting-process-data-simulator-service/ ├── app/ │ ├── main.py # FastAPI 애플리케이션의 메인 진입점 │ ├── config/ diff --git a/services/painting-process-data-simulator-service/app/config/logging_config.py b/services/painting-process-data-simulator-service/app/config/logging_config.py index 02ce366..8634647 100644 --- a/services/painting-process-data-simulator-service/app/config/logging_config.py +++ b/services/painting-process-data-simulator-service/app/config/logging_config.py @@ -1,10 +1,12 @@ - +import os import logging import sys from app.config.settings import settings def setup_logging(): """Sets up centralized logging.""" + # Ensure log directory exists + os.makedirs(settings.log_directory, exist_ok=True) logging.basicConfig( level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", diff --git a/services/painting-process-data-simulator-service/app/config/settings.py b/services/painting-process-data-simulator-service/app/config/settings.py index ea62c6a..13765f5 100644 --- a/services/painting-process-data-simulator-service/app/config/settings.py +++ b/services/painting-process-data-simulator-service/app/config/settings.py @@ -1,5 +1,5 @@ from pydantic_settings import BaseSettings - +from pydantic import AnyHttpUrl, field_validator class Settings(BaseSettings): # Azure Storage 설정 @@ -14,7 +14,7 @@ class Settings(BaseSettings): batch_size: int = 10 # Backend 서비스 설정 - backend_service_url: str = "http://localhost:8087/equipment-data" + backend_service_url: AnyHttpUrl = "http://localhost:8087/equipment-data" # HTTP 클라이언트 설정 http_timeout: int = 30 @@ -22,6 +22,14 @@ class Settings(BaseSettings): # 로그 디렉토리 log_directory: str = "logs" + # Validators + @field_validator("scheduler_interval_seconds") + @classmethod + def _positive_interval(cls, v: int) -> int: + if v <= 0: + raise ValueError("scheduler_interval_seconds must be > 0") + return v + model_config = { "env_file": ".env", "env_file_encoding": "utf-8" diff --git a/services/painting-process-data-simulator-service/app/main.py b/services/painting-process-data-simulator-service/app/main.py index c2c0677..7001d46 100644 --- a/services/painting-process-data-simulator-service/app/main.py +++ b/services/painting-process-data-simulator-service/app/main.py @@ -9,7 +9,9 @@ import os # 로깅 설정 -setup_logging() +# Ensure log directory exists before creating FileHandler(s) +os.makedirs(settings.log_directory, exist_ok=True) +setup_logging() logger = logging.getLogger(__name__) @asynccontextmanager @@ -23,9 +25,6 @@ async def lifespan(app: FastAPI): logger.error("AZURE_CONNECTION_STRING 환경 변수가 설정되지 않았습니다.") raise ValueError("AZURE_CONNECTION_STRING 환경 변수가 설정되지 않았습니다. .env 파일을 생성하거나 환경 변수를 설정해주세요.") - # 로그 디렉토리 생성 - os.makedirs(settings.log_directory, exist_ok=True) - logger.info(f"📁 로그 디렉토리: {settings.log_directory}") logger.info(f"🔧 스케줄러 간격: {settings.scheduler_interval_seconds}초") diff --git a/services/painting-process-data-simulator-service/app/routers/simulator_router.py b/services/painting-process-data-simulator-service/app/routers/simulator_router.py index 5de3e12..12f03fc 100644 --- a/services/painting-process-data-simulator-service/app/routers/simulator_router.py +++ b/services/painting-process-data-simulator-service/app/routers/simulator_router.py @@ -6,7 +6,10 @@ @router.get("/status") async def get_simulator_status(): """시뮬레이터 상태 조회""" - return simulator_scheduler.get_status() + status = simulator_scheduler.get_status() + # 내부 URL 노출 방지 + status.pop("backend_service_url", None) + return status @router.post("/start") async def start_simulator(): @@ -29,5 +32,6 @@ async def stop_simulator(): "message": "시뮬레이터가 중지되었습니다.", "status": simulator_scheduler.get_status() } - except Exception as e: - raise HTTPException(status_code=500, detail=f"시뮬레이터 중지 실패: {str(e)}") from e \ No newline at end of file + except Exception as e: + logger.exception("시뮬레이터 중지 실패") + raise HTTPException(status_code=500, detail="시뮬레이터 중지 실패") from e \ No newline at end of file diff --git a/services/painting-process-data-simulator-service/app/services/azure_storage.py b/services/painting-process-data-simulator-service/app/services/azure_storage.py index df65011..249d447 100644 --- a/services/painting-process-data-simulator-service/app/services/azure_storage.py +++ b/services/painting-process-data-simulator-service/app/services/azure_storage.py @@ -35,9 +35,12 @@ async def list_data_files(self) -> List[str]: if blob.name.endswith('.csv'): blob_list.append(blob.name) + await container_client.close() return sorted(blob_list) except Exception as e: logger.error(f"❌ Painting 데이터 파일 목록 조회 실패: {e}") + if container_client: + await container_client.close() return [] async def read_csv_data(self, blob_name: str) -> Optional[pd.DataFrame]: @@ -55,11 +58,14 @@ async def read_csv_data(self, blob_name: str) -> Optional[pd.DataFrame]: content = await blob_data.readall() df = pd.read_csv(io.StringIO(content.decode('utf-8'))) + await blob_client.close() logger.info(f"📁 파일 읽기 성공: {blob_name} ({len(df)} rows)") return df except Exception as e: logger.error(f"❌ 파일 읽기 실패 ({blob_name}): {e}") + if blob_client: + await blob_client.close() return None async def simulate_real_time_data(self) -> Optional[Dict[str, Any]]: diff --git a/services/painting-process-data-simulator-service/app/services/backend_client.py b/services/painting-process-data-simulator-service/app/services/backend_client.py index 822f3cf..d8864d1 100644 --- a/services/painting-process-data-simulator-service/app/services/backend_client.py +++ b/services/painting-process-data-simulator-service/app/services/backend_client.py @@ -5,15 +5,22 @@ logger = logging.getLogger(__name__) class BackendClient: - async def send_to_backend(self, data: Dict[str, Any], url: str): - async with httpx.AsyncClient() as client: + async def send_to_backend(self, data: Dict[str, Any], url: str) -> bool: + async with httpx.AsyncClient(timeout=httpx.Timeout(10.0)) as client: try: response = await client.post(url, json=data) response.raise_for_status() - logger.info(f"✅ 데이터 전송 성공: {response.status_code}") + logger.info("✅ 데이터 전송 성공: %s", response.status_code) + return True except httpx.HTTPStatusError as e: - logger.error(f"❌ 데이터 전송 실패: {e.response.status_code} - {e.response.text}") - except Exception as e: - logger.error(f"❌ 데이터 전송 중 예외 발생: {e}") + # Avoid logging full response body to prevent PII leakage + logger.error("❌ 데이터 전송 실패(HTTP %s)", getattr(e.response, "status_code", "unknown")) + return False + except httpx.RequestError as e: + logger.error("❌ 네트워크 오류로 데이터 전송 실패: %r", e) + return False + except Exception: + logger.exception("❌ 데이터 전송 중 예외 발생") + return False backend_client = BackendClient() \ No newline at end of file diff --git a/services/painting-process-data-simulator-service/app/services/scheduler_service.py b/services/painting-process-data-simulator-service/app/services/scheduler_service.py index 3d789bc..8618b66 100644 --- a/services/painting-process-data-simulator-service/app/services/scheduler_service.py +++ b/services/painting-process-data-simulator-service/app/services/scheduler_service.py @@ -5,6 +5,7 @@ from app.services.azure_storage import azure_storage from app.services.backend_client import backend_client import logging +import asyncio logger = logging.getLogger(__name__) @@ -25,6 +26,9 @@ async def start(self): trigger=IntervalTrigger(seconds=settings.scheduler_interval_seconds), id='data_simulation', name='Data Simulation and Sending', + max_instances=1, # prevent overlapping runs + coalesce=True, # collapse missed runs to one run + misfire_grace_time=30, # seconds to allow a delayed run replace_existing=True ) @@ -44,7 +48,7 @@ async def stop(self): logger.warning("⚠️ 스케줄러가 실행 중이 아닙니다.") return - self.scheduler.shutdown() + await asyncio.to_thread(self.scheduler.shutdown) self.is_running = False logger.info("🛑 시뮬레이터 중지됨") From a40d3208539295b957248bad5777ddef2910f656 Mon Sep 17 00:00:00 2001 From: TaeHyun aivle Date: Wed, 13 Aug 2025 05:15:28 +0000 Subject: [PATCH 08/12] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Refact:=20Take=20the?= =?UTF-8?q?=20rabbit's=20advice?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 래빗코드의 조언에 따라 코드를 리팩토링하고 잠재된 문제 및 오류들을 감지하고 대응 --- .../app/config/logging_config.py | 12 ++++++++---- .../app/config/settings.py | 19 +++++++++++++------ .../app/services/backend_client.py | 3 ++- 3 files changed, 23 insertions(+), 11 deletions(-) diff --git a/services/painting-process-data-simulator-service/app/config/logging_config.py b/services/painting-process-data-simulator-service/app/config/logging_config.py index 8634647..3fe1ce2 100644 --- a/services/painting-process-data-simulator-service/app/config/logging_config.py +++ b/services/painting-process-data-simulator-service/app/config/logging_config.py @@ -7,11 +7,15 @@ def setup_logging(): """Sets up centralized logging.""" # Ensure log directory exists os.makedirs(settings.log_directory, exist_ok=True) + handlers = [logging.StreamHandler(sys.stdout)] + try: + handlers.append(logging.FileHandler(f"{settings.log_directory}/service.log")) + except OSError as e: + # fallback to stdout-only logging if file handler can't be created + logging.warning("FileHandler creation failed; using stdout-only logging. (%s)", e) + logging.basicConfig( level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", - handlers=[ - logging.StreamHandler(sys.stdout), - logging.FileHandler(f"{settings.log_directory}/service.log") - ] + handlers=handlers, ) diff --git a/services/painting-process-data-simulator-service/app/config/settings.py b/services/painting-process-data-simulator-service/app/config/settings.py index 13765f5..8717451 100644 --- a/services/painting-process-data-simulator-service/app/config/settings.py +++ b/services/painting-process-data-simulator-service/app/config/settings.py @@ -23,12 +23,19 @@ class Settings(BaseSettings): log_directory: str = "logs" # Validators - @field_validator("scheduler_interval_seconds") - @classmethod - def _positive_interval(cls, v: int) -> int: - if v <= 0: - raise ValueError("scheduler_interval_seconds must be > 0") - return v + @field_validator("scheduler_interval_seconds") + @classmethod + def _positive_interval(cls, v: int) -> int: + if v <= 0: + raise ValueError("scheduler_interval_seconds must be > 0") + return v + + @field_validator("batch_size", "http_timeout") + @classmethod + def _positive_ints(cls, v: int) -> int: + if v <= 0: + raise ValueError("batch_size and http_timeout must be > 0") + return v model_config = { "env_file": ".env", diff --git a/services/painting-process-data-simulator-service/app/services/backend_client.py b/services/painting-process-data-simulator-service/app/services/backend_client.py index d8864d1..23cdf99 100644 --- a/services/painting-process-data-simulator-service/app/services/backend_client.py +++ b/services/painting-process-data-simulator-service/app/services/backend_client.py @@ -1,12 +1,13 @@ import httpx import logging from typing import Any, Dict +from app.config.settings import settings logger = logging.getLogger(__name__) class BackendClient: async def send_to_backend(self, data: Dict[str, Any], url: str) -> bool: - async with httpx.AsyncClient(timeout=httpx.Timeout(10.0)) as client: + async with httpx.AsyncClient(timeout=httpx.Timeout(settings.http_timeout)) as client: try: response = await client.post(url, json=data) response.raise_for_status() From 7605f8e4ea3af33b8a25db9716062a57dfc49dfe Mon Sep 17 00:00:00 2001 From: TaeHyun aivle Date: Wed, 13 Aug 2025 05:44:43 +0000 Subject: [PATCH 09/12] =?UTF-8?q?=F0=9F=9A=91=EF=B8=8F=20Fix:=20TypeError?= =?UTF-8?q?=20caused=20by=20passing=20URL?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 래빗코드가 리팩토링한 코드 중 시뮬레이터의 backend_client.py 파일에서 httpx 라이브러리를 호출할 때, --- .../app/services/backend_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/painting-process-data-simulator-service/app/services/backend_client.py b/services/painting-process-data-simulator-service/app/services/backend_client.py index 23cdf99..0d2e47d 100644 --- a/services/painting-process-data-simulator-service/app/services/backend_client.py +++ b/services/painting-process-data-simulator-service/app/services/backend_client.py @@ -9,7 +9,7 @@ class BackendClient: async def send_to_backend(self, data: Dict[str, Any], url: str) -> bool: async with httpx.AsyncClient(timeout=httpx.Timeout(settings.http_timeout)) as client: try: - response = await client.post(url, json=data) + response = await client.post(str(url), json=data) response.raise_for_status() logger.info("✅ 데이터 전송 성공: %s", response.status_code) return True From 1f76b964d62b2fbe05a99c0afc518c3611669e1f Mon Sep 17 00:00:00 2001 From: TaeHyun aivle Date: Wed, 20 Aug 2025 14:03:37 +0000 Subject: [PATCH 10/12] =?UTF-8?q?=F0=9F=94=A7=20Fix:=20change=20return=20t?= =?UTF-8?q?ype=20and=20output=20format?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit {predictions: [result]} 으로 출력하지 않고 시뮬레이터처럼 단순 json로 출력되게 수정 --- .../app/routers/predict.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/services/painting-process-equipment-defect-detection-model-service/app/routers/predict.py b/services/painting-process-equipment-defect-detection-model-service/app/routers/predict.py index 31ca3ef..cbf0330 100644 --- a/services/painting-process-equipment-defect-detection-model-service/app/routers/predict.py +++ b/services/painting-process-equipment-defect-detection-model-service/app/routers/predict.py @@ -45,10 +45,9 @@ async def predict_issue( save_issue_log(result, config) except Exception as e: print(f"이슈 로그를 저장하는 중에 오류가 발생했습니다: {e}") - # 로그 저장 실패는 Critical 오류가 아니므로 200 OK를 반환하되, 경고를 포함합니다. - return {"predictions": [result], "warning": f"로그 파일 저장에 실패했습니다: {str(e)}"} + # 로그 저장 실패는 Critical 오류가 아니므로 API 호출 자체는 성공으로 간주하고 result를 반환한다. - return {"predictions": [result]} + return result @router.post("/file") async def predict_issue_from_file( From ef1b17b5fe375eb5bd58eed3a0eee1a8e3ed18e1 Mon Sep 17 00:00:00 2001 From: TaeHyun aivle Date: Thu, 21 Aug 2025 02:05:28 +0000 Subject: [PATCH 11/12] =?UTF-8?q?=F0=9F=94=A7=20Fix:=20Update=20server=20p?= =?UTF-8?q?ort=20configuration?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit spring boot 포트를 8091로 변경한것을 반영 --- services/painting-process-data-simulator-service/.env.example | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/services/painting-process-data-simulator-service/.env.example b/services/painting-process-data-simulator-service/.env.example index f9fed58..8469b87 100644 --- a/services/painting-process-data-simulator-service/.env.example +++ b/services/painting-process-data-simulator-service/.env.example @@ -32,8 +32,7 @@ BATCH_SIZE=10 # 도커 사용시 시뮬레이터가 데이터를 전송할 Spring Boot 백엔드 서비스의 전체 URL -BACKEND_SERVICE_URL=http://paintingprocessmonitoring:8087/equipment-data -# BACKEND_SERVICE_URL=http://localhost:8087/equipment-data # uvicorn으로 실행할때 사용 +BACKEND_SERVICE_URL=http://localhost:8091/equipment-data #============================== From eb0a50aa66d63bde666abd270c66542ad867936c Mon Sep 17 00:00:00 2001 From: TaeHyun aivle Date: Sat, 23 Aug 2025 13:29:52 +0000 Subject: [PATCH 12/12] =?UTF-8?q?=F0=9F=94=A7=20Fix:=20Update=20gateway=20?= =?UTF-8?q?connection=20port?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 서버가 아닌 게이트웨이와 연결 --- .../app/config/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/painting-process-data-simulator-service/app/config/settings.py b/services/painting-process-data-simulator-service/app/config/settings.py index 8717451..ae72b4d 100644 --- a/services/painting-process-data-simulator-service/app/config/settings.py +++ b/services/painting-process-data-simulator-service/app/config/settings.py @@ -14,7 +14,7 @@ class Settings(BaseSettings): batch_size: int = 10 # Backend 서비스 설정 - backend_service_url: AnyHttpUrl = "http://localhost:8087/equipment-data" + backend_service_url: AnyHttpUrl = "http://localhost:8088/equipment-data" # HTTP 클라이언트 설정 http_timeout: int = 30