From 14f26c34686d70e6e34450ffc730cc57465fd55d Mon Sep 17 00:00:00 2001 From: skytin1004 Date: Sun, 19 Jan 2025 07:22:00 +0900 Subject: [PATCH] Update base config test to expect ValueError for LLM configuration checks --- src/co_op_translator/config/base_config.py | 6 ++---- tests/co_op_translator/config/test_base_config.py | 6 +++--- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/src/co_op_translator/config/base_config.py b/src/co_op_translator/config/base_config.py index 1b2f07d..fdc7b93 100644 --- a/src/co_op_translator/config/base_config.py +++ b/src/co_op_translator/config/base_config.py @@ -34,10 +34,8 @@ def check_configuration(): Raises: OSError: If no LLM service is properly configured """ - try: - LLMConfig.get_available_provider() - except ValueError as e: - raise OSError("No LLM service is properly configured") from e + + LLMConfig.check_configuration() # Vision configuration is optional VisionConfig.check_configuration() diff --git a/tests/co_op_translator/config/test_base_config.py b/tests/co_op_translator/config/test_base_config.py index b827c51..5f05f2f 100644 --- a/tests/co_op_translator/config/test_base_config.py +++ b/tests/co_op_translator/config/test_base_config.py @@ -11,7 +11,7 @@ def azure_openai_env_vars(): return { 'AZURE_OPENAI_API_KEY': 'fake_openai_key', 'AZURE_OPENAI_ENDPOINT': 'https://fake-openai-endpoint.com', - 'AZURE_OPENAI_MODEL_NAME': 'gpt-3.5', + 'AZURE_OPENAI_MODEL_NAME': 'gpt', 'AZURE_OPENAI_CHAT_DEPLOYMENT_NAME': 'chat-deployment', 'AZURE_OPENAI_API_VERSION': 'v1' } @@ -60,7 +60,7 @@ def test_config_with_openai_only(openai_env_vars): def test_config_with_no_llm_service(): """Test configuration with no LLM service available""" with patch.dict(os.environ, {}, clear=True): - with pytest.raises(OSError) as excinfo: + with pytest.raises(ValueError) as excinfo: Config.check_configuration() assert "No LLM service is properly configured" in str(excinfo.value) @@ -71,6 +71,6 @@ def test_config_with_partial_azure_openai(): 'AZURE_OPENAI_ENDPOINT': 'https://fake-endpoint.com' } with patch.dict(os.environ, partial_vars, clear=True): - with pytest.raises(OSError) as excinfo: + with pytest.raises(ValueError) as excinfo: # Changed to match ValueError Config.check_configuration() assert "No LLM service is properly configured" in str(excinfo.value)