Coverage for src / local_deep_research / llm / providers / base.py: 100%
11 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-14 23:55 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-14 23:55 +0000
1"""Base class for LLM providers."""
4def normalize_provider(provider):
5 """Normalize provider name to lowercase canonical form.
7 All provider comparisons in route/service code should use
8 this function to ensure consistent casing.
9 """
10 return provider.lower() if provider else None
13class BaseLLMProvider:
14 """Base class for all LLM providers.
16 Defines the minimum interface that all providers
17 must satisfy. Subclasses should override these
18 methods as needed.
19 """
21 @classmethod
22 def create_llm(cls, model_name=None, temperature=0.7, **kwargs):
23 """Create and return a LangChain chat model instance.
25 Subclasses MUST override this method.
27 Args:
28 model_name: Name of the model to use
29 temperature: Model temperature (0.0-1.0)
30 **kwargs: Additional arguments including settings_snapshot
32 Returns:
33 A configured BaseChatModel instance
35 Raises:
36 NotImplementedError: If not overridden by subclass
37 """
38 raise NotImplementedError(f"{cls.__name__} must implement create_llm()")
40 @classmethod
41 def is_available(cls, settings_snapshot=None):
42 """Check if this provider is available.
44 Returns False by default (fail-closed). Subclasses MUST override
45 this method to implement their own availability logic.
46 """
47 return False
49 @classmethod
50 def requires_auth_for_models(cls):
51 """Whether auth is needed to list models.
53 Returns True by default. Override in subclasses that allow
54 unauthenticated model listing (e.g., local providers).
55 """
56 return True