Coverage for src / qdrant_loader_core / llm / factory.py: 93%

58 statements  

« prev     ^ index     » next       coverage.py v7.13.5, created at 2026-03-18 04:44 +0000

1from __future__ import annotations 

2 

3from urllib.parse import urlparse 

4 

5from .settings import LLMSettings 

6from .types import ChatClient, EmbeddingsClient, LLMProvider, TokenCounter 

7 

8 

9class _NoopEmbeddings(EmbeddingsClient): 

10 async def embed(self, inputs: list[str]) -> list[list[float]]: 

11 raise NotImplementedError("Embeddings provider not implemented") 

12 

13 

14class _NoopChat(ChatClient): 

15 async def chat(self, messages, **kwargs): # type: ignore[no-untyped-def] 

16 raise NotImplementedError("Chat provider not implemented") 

17 

18 

19class _NoopTokenizer(TokenCounter): 

20 def count(self, text: str) -> int: # naive char-count fallback 

21 return len(text) 

22 

23 

24class _NoopProvider(LLMProvider): 

25 def embeddings(self) -> EmbeddingsClient: 

26 return _NoopEmbeddings() 

27 

28 def chat(self) -> ChatClient: 

29 return _NoopChat() 

30 

31 def tokenizer(self) -> TokenCounter: 

32 return _NoopTokenizer() 

33 

34 

35_SENTINEL = object() 

36_azure_provider_class: type | None | object = _SENTINEL 

37 

38 

39def _get_azure_provider_class(): # type: ignore[return] 

40 """Lazily resolve the optional AzureOpenAIProvider (cached after first call).""" 

41 global _azure_provider_class 

42 if _azure_provider_class is _SENTINEL: 

43 try: 

44 from .providers.azure_openai import AzureOpenAIProvider # type: ignore 

45 

46 _azure_provider_class = AzureOpenAIProvider 

47 except Exception: # pragma: no cover - optional dependency surface 

48 _azure_provider_class = None 

49 return _azure_provider_class 

50 

51 

52def _safe_hostname(url: str | None) -> str | None: 

53 if not url: 

54 return None 

55 try: 

56 host = urlparse(url).hostname 

57 return host.lower() if host else None 

58 except Exception: 

59 return None 

60 

61 

62def create_provider(settings: LLMSettings) -> LLMProvider: 

63 """Create a provider by settings. 

64 

65 Phase 0: route OpenAI/OpenAI-compatible to OpenAIProvider when available; otherwise return a noop provider. 

66 Ollama returns a stub provider for now. 

67 """ 

68 provider_name = (settings.provider or "").lower() 

69 base_url = settings.base_url or "" 

70 base_host = _safe_hostname(base_url) 

71 

72 # Route Azure before generic OpenAI routing 

73 is_azure = "azure" in provider_name or ( 

74 base_host is not None 

75 and ( 

76 base_host == "openai.azure.com" 

77 or base_host.endswith(".openai.azure.com") 

78 or base_host == "cognitiveservices.azure.com" 

79 or base_host.endswith(".cognitiveservices.azure.com") 

80 ) 

81 ) 

82 if is_azure: 

83 azure_cls = _get_azure_provider_class() 

84 if azure_cls is not None: 

85 try: 

86 return azure_cls(settings) # type: ignore[misc] 

87 except Exception: 

88 return _NoopProvider() 

89 

90 if "openai" in provider_name or "openai" in base_url.lower(): 

91 from .providers.openai import OpenAIProvider 

92 

93 try: 

94 return OpenAIProvider(settings) 

95 except Exception: 

96 return _NoopProvider() 

97 

98 if provider_name == "ollama" or (base_host in ("localhost", "127.0.0.1")): 

99 from .providers.ollama import OllamaProvider 

100 

101 return OllamaProvider(settings) 

102 

103 return _NoopProvider()