Coverage for src / local_deep_research / llm / providers / openai_base.py: 87%
119 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-02-25 01:07 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-02-25 01:07 +0000
1"""Base OpenAI-compatible endpoint provider for Local Deep Research."""
3from langchain_openai import ChatOpenAI
4from loguru import logger
6from ...config.thread_settings import (
7 get_llm_setting_from_snapshot as get_setting_from_snapshot,
8 NoSettingsContextError,
9)
10from ...utilities.url_utils import normalize_url
13class OpenAICompatibleProvider:
14 """Base class for OpenAI-compatible API providers.
16 This class provides a common implementation for any service that offers
17 an OpenAI-compatible API endpoint (Google, OpenRouter, Groq, Together, etc.)
18 """
20 # Override these in subclasses
21 provider_name = "openai_endpoint" # Name used in logs
22 api_key_setting = "llm.openai_endpoint.api_key" # Settings key for API key
23 url_setting = None # Settings key for URL (e.g., "llm.lmstudio.url")
24 default_base_url = "https://api.openai.com/v1" # Default endpoint URL
25 default_model = "gpt-3.5-turbo" # Default model if none specified
27 @classmethod
28 def create_llm(cls, model_name=None, temperature=0.7, **kwargs):
29 """Factory function for OpenAI-compatible LLMs.
31 Args:
32 model_name: Name of the model to use
33 temperature: Model temperature (0.0-1.0)
34 **kwargs: Additional arguments including settings_snapshot
36 Returns:
37 A configured ChatOpenAI instance
39 Raises:
40 ValueError: If API key is not configured
41 """
42 settings_snapshot = kwargs.get("settings_snapshot")
44 # Get API key from settings (if provider requires one)
45 if cls.api_key_setting: 45 ↛ 62line 45 didn't jump to line 62 because the condition on line 45 was always true
46 api_key = get_setting_from_snapshot(
47 cls.api_key_setting,
48 default=None,
49 settings_snapshot=settings_snapshot,
50 )
52 if not api_key:
53 logger.error(
54 f"{cls.provider_name} API key not found in settings"
55 )
56 raise ValueError(
57 f"{cls.provider_name} API key not configured. "
58 f"Please set {cls.api_key_setting} in settings."
59 )
60 else:
61 # Provider doesn't require API key (e.g., LM Studio)
62 api_key = kwargs.get("api_key", "dummy-key")
64 # Use default model if none specified
65 if not model_name:
66 model_name = cls.default_model
68 # Get endpoint URL (can be overridden in kwargs for flexibility)
69 base_url = kwargs.get("base_url", cls.default_base_url)
70 base_url = normalize_url(base_url) if base_url else cls.default_base_url
72 # Build parameters for OpenAI client
73 llm_params = {
74 "model": model_name,
75 "api_key": api_key,
76 "base_url": base_url,
77 "temperature": temperature,
78 }
80 # Add max_tokens if specified in settings
81 try:
82 max_tokens = get_setting_from_snapshot(
83 "llm.max_tokens",
84 default=None,
85 settings_snapshot=settings_snapshot,
86 )
87 if max_tokens:
88 llm_params["max_tokens"] = int(max_tokens)
89 except NoSettingsContextError:
90 pass # Optional parameter
92 # Add streaming if specified
93 try:
94 streaming = get_setting_from_snapshot(
95 "llm.streaming",
96 default=None,
97 settings_snapshot=settings_snapshot,
98 )
99 if streaming is not None:
100 llm_params["streaming"] = streaming
101 except NoSettingsContextError:
102 pass # Optional parameter
104 # Add max_retries if specified
105 try:
106 max_retries = get_setting_from_snapshot(
107 "llm.max_retries",
108 default=None,
109 settings_snapshot=settings_snapshot,
110 )
111 if max_retries is not None:
112 llm_params["max_retries"] = max_retries
113 except NoSettingsContextError:
114 pass # Optional parameter
116 # Add request_timeout if specified
117 try:
118 request_timeout = get_setting_from_snapshot(
119 "llm.request_timeout",
120 default=None,
121 settings_snapshot=settings_snapshot,
122 )
123 if request_timeout is not None:
124 llm_params["request_timeout"] = request_timeout
125 except NoSettingsContextError:
126 pass # Optional parameter
128 logger.info(
129 f"Creating {cls.provider_name} LLM with model: {model_name}, "
130 f"temperature: {temperature}, endpoint: {base_url}"
131 )
133 return ChatOpenAI(**llm_params)
135 @classmethod
136 def _create_llm_instance(cls, model_name=None, temperature=0.7, **kwargs):
137 """Internal method to create LLM instance with provided parameters.
139 This bypasses API key checking for providers that handle auth differently.
140 """
141 settings_snapshot = kwargs.get("settings_snapshot")
143 # Use default model if none specified
144 if not model_name:
145 model_name = cls.default_model
147 # Get endpoint URL (can be overridden in kwargs for flexibility)
148 base_url = kwargs.get("base_url", cls.default_base_url)
149 base_url = normalize_url(base_url) if base_url else cls.default_base_url
151 # Get API key from kwargs (caller is responsible for providing it)
152 api_key = kwargs.get("api_key", "dummy-key")
154 # Build parameters for OpenAI client
155 llm_params = {
156 "model": model_name,
157 "api_key": api_key,
158 "base_url": base_url,
159 "temperature": temperature,
160 }
162 # Add optional parameters (same as in create_llm)
163 try:
164 max_tokens = get_setting_from_snapshot(
165 "llm.max_tokens",
166 default=None,
167 settings_snapshot=settings_snapshot,
168 )
169 if max_tokens: 169 ↛ 170line 169 didn't jump to line 170 because the condition on line 169 was never true
170 llm_params["max_tokens"] = int(max_tokens)
171 except NoSettingsContextError:
172 pass
174 return ChatOpenAI(**llm_params)
176 @classmethod
177 def is_available(cls, settings_snapshot=None):
178 """Check if this provider is available.
180 Args:
181 settings_snapshot: Optional settings snapshot to use
183 Returns:
184 True if API key is configured (or not needed), False otherwise
185 """
186 try:
187 # If provider doesn't require API key, it's available
188 if not cls.api_key_setting:
189 return True
191 # Check if API key is configured
192 api_key = get_setting_from_snapshot(
193 cls.api_key_setting,
194 default=None,
195 settings_snapshot=settings_snapshot,
196 )
197 return bool(api_key and str(api_key).strip())
198 except Exception:
199 return False
201 @classmethod
202 def requires_auth_for_models(cls):
203 """Check if this provider requires authentication for listing models.
205 Override in subclasses that don't require auth.
207 Returns:
208 True if authentication is required, False otherwise
209 """
210 return True
212 @classmethod
213 def _get_base_url_for_models(cls, settings_snapshot=None):
214 """Get the base URL to use for listing models.
216 Reads from url_setting if defined, otherwise uses default_base_url.
218 Args:
219 settings_snapshot: Optional settings snapshot dict
221 Returns:
222 The base URL string to use for model listing
223 """
224 if cls.url_setting: 224 ↛ 227line 224 didn't jump to line 227 because the condition on line 224 was never true
225 # Use get_setting_from_snapshot which handles both settings_snapshot
226 # and thread-local context, with proper fallback
227 url = get_setting_from_snapshot(
228 cls.url_setting,
229 default=None,
230 settings_snapshot=settings_snapshot,
231 )
232 if url:
233 return url.rstrip("/")
235 return cls.default_base_url
237 @classmethod
238 def list_models_for_api(cls, api_key=None, base_url=None):
239 """List available models for API endpoint use.
241 This method is designed to be called from Flask routes.
243 Args:
244 api_key: Optional API key (if None and required, returns empty list)
245 base_url: Optional base URL to use (if None, uses cls.default_base_url)
247 Returns:
248 List of model dictionaries with 'value' and 'label' keys
249 """
250 try:
251 # Check if auth is required
252 if cls.requires_auth_for_models():
253 if not api_key:
254 logger.debug(
255 f"{cls.provider_name} requires API key for model listing"
256 )
257 return []
258 else:
259 # Use a dummy key for providers that don't require auth
260 api_key = api_key or "dummy-key-for-models-list"
262 from openai import OpenAI
264 # Use provided base_url or fall back to class default
265 if not base_url:
266 base_url = cls.default_base_url
268 # Create OpenAI client (uses library defaults for timeout)
269 client = OpenAI(api_key=api_key, base_url=base_url)
271 # Fetch models
272 logger.debug(
273 f"Fetching models from {cls.provider_name} at {base_url}"
274 )
275 models_response = client.models.list()
277 models = []
278 for model in models_response.data:
279 if model.id: 279 ↛ 278line 279 didn't jump to line 278 because the condition on line 279 was always true
280 models.append(
281 {
282 "value": model.id,
283 "label": model.id,
284 }
285 )
287 logger.info(f"Found {len(models)} models from {cls.provider_name}")
288 return models
290 except Exception as e:
291 # Use warning level since connection failures are expected
292 # when the provider is not running (e.g., LM Studio not started)
293 logger.warning(
294 f"Could not list models from {cls.provider_name}: {e}"
295 )
296 return []
298 @classmethod
299 def list_models(cls, settings_snapshot=None):
300 """List available models from this provider.
302 Args:
303 settings_snapshot: Optional settings snapshot to use
305 Returns:
306 List of model dictionaries with 'value' and 'label' keys
307 """
308 try:
309 # Get API key from settings if auth is required
310 api_key = None
311 if cls.requires_auth_for_models(): 311 ↛ 319line 311 didn't jump to line 319 because the condition on line 311 was always true
312 api_key = get_setting_from_snapshot(
313 cls.api_key_setting,
314 default=None,
315 settings_snapshot=settings_snapshot,
316 )
318 # Get base URL from settings if provider has configurable URL
319 base_url = cls._get_base_url_for_models(settings_snapshot)
321 return cls.list_models_for_api(api_key, base_url)
323 except Exception:
324 logger.exception(f"Error listing models from {cls.provider_name}")
325 return []