From 2f486e5b21bb2cae5caf15833e1caac126f7e073 Mon Sep 17 00:00:00 2001 From: John Fitzpatrick Date: Sat, 20 Sep 2025 13:44:23 -0700 Subject: [PATCH] test: Update test expectations for new Ollama default URL Updated test_async_llm_provider_service.py to expect host.docker.internal instead of localhost for Ollama URLs to match the new default configuration. --- python/tests/test_async_llm_provider_service.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/python/tests/test_async_llm_provider_service.py b/python/tests/test_async_llm_provider_service.py index 6c012897..e52c2242 100644 --- a/python/tests/test_async_llm_provider_service.py +++ b/python/tests/test_async_llm_provider_service.py @@ -69,7 +69,7 @@ class TestAsyncLLMProviderService: return { "provider": "ollama", "api_key": "ollama", - "base_url": "http://localhost:11434/v1", + "base_url": "http://host.docker.internal:11434/v1", "chat_model": "llama2", "embedding_model": "nomic-embed-text", } @@ -127,7 +127,7 @@ class TestAsyncLLMProviderService: async with get_llm_client() as client: assert client == mock_client mock_openai.assert_called_once_with( - api_key="ollama", base_url="http://localhost:11434/v1" + api_key="ollama", base_url="http://host.docker.internal:11434/v1" ) @pytest.mark.asyncio @@ -216,7 +216,7 @@ class TestAsyncLLMProviderService: } mock_credential_service.get_active_provider.return_value = config_without_key mock_credential_service.get_credentials_by_category = AsyncMock(return_value={ - "LLM_BASE_URL": "http://localhost:11434" + "LLM_BASE_URL": "http://host.docker.internal:11434" }) with patch( @@ -234,7 +234,7 @@ class TestAsyncLLMProviderService: # Verify it created an Ollama client with correct params mock_openai.assert_called_once_with( api_key="ollama", - base_url="http://localhost:11434/v1" + base_url="http://host.docker.internal:11434/v1" ) @pytest.mark.asyncio @@ -480,7 +480,7 @@ class TestAsyncLLMProviderService: """Test creating clients for different providers in sequence""" configs = [ {"provider": "openai", "api_key": "openai-key", "base_url": None}, - {"provider": "ollama", "api_key": "ollama", "base_url": "http://localhost:11434/v1"}, + {"provider": "ollama", "api_key": "ollama", "base_url": "http://host.docker.internal:11434/v1"}, { "provider": "google", "api_key": "google-key",