Skip to content

Commit

Permalink
Update base config test to expect ValueError for LLM configuration ch…
Browse files Browse the repository at this point in the history
…ecks
  • Loading branch information
skytin1004 committed Jan 18, 2025
1 parent 339008e commit 14f26c3
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 7 deletions.
6 changes: 2 additions & 4 deletions src/co_op_translator/config/base_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,8 @@ def check_configuration():
Raises:
OSError: If no LLM service is properly configured
"""
try:
LLMConfig.get_available_provider()
except ValueError as e:
raise OSError("No LLM service is properly configured") from e

LLMConfig.check_configuration()

# Vision configuration is optional
VisionConfig.check_configuration()
6 changes: 3 additions & 3 deletions tests/co_op_translator/config/test_base_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ def azure_openai_env_vars():
return {
'AZURE_OPENAI_API_KEY': 'fake_openai_key',
'AZURE_OPENAI_ENDPOINT': 'https://fake-openai-endpoint.com',
'AZURE_OPENAI_MODEL_NAME': 'gpt-3.5',
'AZURE_OPENAI_MODEL_NAME': 'gpt',
'AZURE_OPENAI_CHAT_DEPLOYMENT_NAME': 'chat-deployment',
'AZURE_OPENAI_API_VERSION': 'v1'
}
Expand Down Expand Up @@ -60,7 +60,7 @@ def test_config_with_openai_only(openai_env_vars):
def test_config_with_no_llm_service():
"""Test configuration with no LLM service available"""
with patch.dict(os.environ, {}, clear=True):
with pytest.raises(OSError) as excinfo:
with pytest.raises(ValueError) as excinfo:
Config.check_configuration()
assert "No LLM service is properly configured" in str(excinfo.value)

Expand All @@ -71,6 +71,6 @@ def test_config_with_partial_azure_openai():
'AZURE_OPENAI_ENDPOINT': 'https://fake-endpoint.com'
}
with patch.dict(os.environ, partial_vars, clear=True):
with pytest.raises(OSError) as excinfo:
with pytest.raises(ValueError) as excinfo: # Changed to match ValueError
Config.check_configuration()
assert "No LLM service is properly configured" in str(excinfo.value)

0 comments on commit 14f26c3

Please sign in to comment.