Coverage for src / local_deep_research / llm / providers / implementations / openai.py: 27%
80 statements
« prev ^ index » next coverage.py v7.12.0, created at 2026-01-11 07:37 +0000
« prev ^ index » next coverage.py v7.12.0, created at 2026-01-11 07:37 +0000
1"""OpenAI LLM provider for Local Deep Research."""
3from langchain_openai import ChatOpenAI
4from loguru import logger
6from ....config.thread_settings import (
7 get_setting_from_snapshot as _get_setting_from_snapshot,
8 NoSettingsContextError,
9)
10from ...llm_registry import register_llm
11from ..openai_base import OpenAICompatibleProvider
14def get_setting_from_snapshot(
15 key, default=None, username=None, settings_snapshot=None
16):
17 """Get setting from context only - no database access from threads.
19 This is a wrapper around the shared function that enables fallback LLM check.
20 """
21 return _get_setting_from_snapshot(
22 key, default, username, settings_snapshot, check_fallback_llm=True
23 )
26class OpenAIProvider(OpenAICompatibleProvider):
27 """OpenAI provider for Local Deep Research.
29 This is the official OpenAI API provider.
30 """
32 provider_name = "OpenAI"
33 api_key_setting = "llm.openai.api_key"
34 default_model = "gpt-3.5-turbo"
35 default_base_url = "https://api.openai.com/v1"
37 # Metadata for auto-discovery
38 provider_key = "OPENAI"
39 company_name = "OpenAI"
40 region = "US"
41 country = "United States"
42 data_location = "United States"
43 is_cloud = True
45 @classmethod
46 def create_llm(cls, model_name=None, temperature=0.7, **kwargs):
47 """Factory function for OpenAI LLMs.
49 Args:
50 model_name: Name of the model to use
51 temperature: Model temperature (0.0-1.0)
52 **kwargs: Additional arguments including settings_snapshot
54 Returns:
55 A configured ChatOpenAI instance
57 Raises:
58 ValueError: If API key is not configured
59 """
60 settings_snapshot = kwargs.get("settings_snapshot")
62 # Get API key from settings
63 api_key = get_setting_from_snapshot(
64 cls.api_key_setting,
65 default=None,
66 settings_snapshot=settings_snapshot,
67 )
69 if not api_key:
70 logger.error(f"{cls.provider_name} API key not found in settings")
71 raise ValueError(
72 f"{cls.provider_name} API key not configured. "
73 f"Please set {cls.api_key_setting} in settings."
74 )
76 # Use default model if none specified
77 if not model_name:
78 model_name = cls.default_model
80 # Build OpenAI-specific parameters
81 openai_params = {
82 "model": model_name,
83 "api_key": api_key,
84 "temperature": temperature,
85 }
87 # Add optional parameters if they exist in settings
88 try:
89 api_base = get_setting_from_snapshot(
90 "llm.openai.api_base",
91 default=None,
92 settings_snapshot=settings_snapshot,
93 )
94 if api_base:
95 openai_params["openai_api_base"] = api_base
96 except NoSettingsContextError:
97 pass # Optional parameter
99 try:
100 organization = get_setting_from_snapshot(
101 "llm.openai.organization",
102 default=None,
103 settings_snapshot=settings_snapshot,
104 )
105 if organization:
106 openai_params["openai_organization"] = organization
107 except NoSettingsContextError:
108 pass # Optional parameter
110 try:
111 streaming = get_setting_from_snapshot(
112 "llm.streaming",
113 default=None,
114 settings_snapshot=settings_snapshot,
115 )
116 if streaming is not None:
117 openai_params["streaming"] = streaming
118 except NoSettingsContextError:
119 pass # Optional parameter
121 try:
122 max_retries = get_setting_from_snapshot(
123 "llm.max_retries",
124 default=None,
125 settings_snapshot=settings_snapshot,
126 )
127 if max_retries is not None:
128 openai_params["max_retries"] = max_retries
129 except NoSettingsContextError:
130 pass # Optional parameter
132 try:
133 request_timeout = get_setting_from_snapshot(
134 "llm.request_timeout",
135 default=None,
136 settings_snapshot=settings_snapshot,
137 )
138 if request_timeout is not None:
139 openai_params["request_timeout"] = request_timeout
140 except NoSettingsContextError:
141 pass # Optional parameter
143 # Add max_tokens if specified in settings
144 try:
145 max_tokens = get_setting_from_snapshot(
146 "llm.max_tokens",
147 default=None,
148 settings_snapshot=settings_snapshot,
149 )
150 if max_tokens:
151 openai_params["max_tokens"] = int(max_tokens)
152 except NoSettingsContextError:
153 pass # Optional parameter
155 logger.info(
156 f"Creating {cls.provider_name} LLM with model: {model_name}, "
157 f"temperature: {temperature}"
158 )
160 return ChatOpenAI(**openai_params)
162 @classmethod
163 def is_available(cls, settings_snapshot=None):
164 """Check if this provider is available.
166 Args:
167 settings_snapshot: Optional settings snapshot to use
169 Returns:
170 True if API key is configured, False otherwise
171 """
172 try:
173 # Check if API key is configured
174 api_key = get_setting_from_snapshot(
175 cls.api_key_setting,
176 default=None,
177 settings_snapshot=settings_snapshot,
178 )
179 return bool(api_key)
180 except Exception:
181 return False
184# Keep the standalone functions for backward compatibility and registration
185def create_openai_llm(model_name=None, temperature=0.7, **kwargs):
186 """Factory function for OpenAI LLMs.
188 Args:
189 model_name: Name of the model to use (e.g., "gpt-4", "gpt-3.5-turbo", etc.)
190 temperature: Model temperature (0.0-1.0)
191 **kwargs: Additional arguments including settings_snapshot
193 Returns:
194 A configured ChatOpenAI instance
196 Raises:
197 ValueError: If OpenAI API key is not configured
198 """
199 return OpenAIProvider.create_llm(model_name, temperature, **kwargs)
202def is_openai_available(settings_snapshot=None):
203 """Check if OpenAI is available.
205 Args:
206 settings_snapshot: Optional settings snapshot to use
208 Returns:
209 True if OpenAI API key is configured, False otherwise
210 """
211 return OpenAIProvider.is_available(settings_snapshot)
214def register_openai_provider():
215 """Register the OpenAI provider with the LLM registry."""
216 register_llm("openai", create_openai_llm)
217 logger.info("Registered OpenAI LLM provider")