Coverage for src/qdrant_loader_core/llm/factory.py: 92%

50 statements  

« prev     ^ index     » next       coverage.py v7.10.6, created at 2025-09-08 06:01 +0000

1from __future__ import annotations 

2 

3from urllib.parse import urlparse 

4 

5from .providers.ollama import OllamaProvider 

6from .providers.openai import OpenAIProvider 

7 

8try: 

9 from .providers.azure_openai import AzureOpenAIProvider # type: ignore 

10except Exception: # pragma: no cover - optional dependency surface 

11 AzureOpenAIProvider = None # type: ignore 

12from .settings import LLMSettings 

13from .types import ChatClient, EmbeddingsClient, LLMProvider, TokenCounter 

14 

15 

16class _NoopEmbeddings(EmbeddingsClient): 

17 async def embed(self, inputs: list[str]) -> list[list[float]]: 

18 raise NotImplementedError("Embeddings provider not implemented") 

19 

20 

21class _NoopChat(ChatClient): 

22 async def chat(self, messages, **kwargs): # type: ignore[no-untyped-def] 

23 raise NotImplementedError("Chat provider not implemented") 

24 

25 

26class _NoopTokenizer(TokenCounter): 

27 def count(self, text: str) -> int: # naive char-count fallback 

28 return len(text) 

29 

30 

31class _NoopProvider(LLMProvider): 

32 def embeddings(self) -> EmbeddingsClient: 

33 return _NoopEmbeddings() 

34 

35 def chat(self) -> ChatClient: 

36 return _NoopChat() 

37 

38 def tokenizer(self) -> TokenCounter: 

39 return _NoopTokenizer() 

40 

41 

42def _safe_hostname(url: str | None) -> str | None: 

43 if not url: 

44 return None 

45 try: 

46 host = urlparse(url).hostname 

47 return host.lower() if host else None 

48 except Exception: 

49 return None 

50 

51 

52def create_provider(settings: LLMSettings) -> LLMProvider: 

53 """Create a provider by settings. 

54 

55 Phase 0: route OpenAI/OpenAI-compatible to OpenAIProvider when available; otherwise return a noop provider. 

56 Ollama returns a stub provider for now. 

57 """ 

58 provider_name = (settings.provider or "").lower() 

59 base_url = settings.base_url or "" 

60 base_host = _safe_hostname(base_url) 

61 

62 # Route Azure before generic OpenAI routing 

63 is_azure = "azure" in provider_name or ( 

64 base_host is not None 

65 and ( 

66 base_host == "openai.azure.com" 

67 or base_host.endswith(".openai.azure.com") 

68 or base_host == "cognitiveservices.azure.com" 

69 or base_host.endswith(".cognitiveservices.azure.com") 

70 ) 

71 ) 

72 if is_azure and AzureOpenAIProvider is not None: # type: ignore[truthy-bool] 

73 try: 

74 return AzureOpenAIProvider(settings) # type: ignore[misc] 

75 except Exception: 

76 return _NoopProvider() 

77 

78 if "openai" in provider_name or "openai" in base_url.lower(): 

79 try: 

80 return OpenAIProvider(settings) 

81 except Exception: 

82 return _NoopProvider() 

83 

84 if provider_name == "ollama" or (base_host in ("localhost", "127.0.0.1")): 

85 return OllamaProvider(settings) 

86 

87 return _NoopProvider()