Coverage for src / local_deep_research / llm / providers / implementations / openai.py: 100%

67 statements  

« prev     ^ index     » next       coverage.py v7.13.5, created at 2026-04-14 23:55 +0000

1"""OpenAI LLM provider for Local Deep Research.""" 

2 

3from langchain_openai import ChatOpenAI 

4from loguru import logger 

5 

6from ....config.thread_settings import ( 

7 get_setting_from_snapshot, 

8 NoSettingsContextError, 

9) 

10from ..openai_base import OpenAICompatibleProvider 

11 

12 

13class OpenAIProvider(OpenAICompatibleProvider): 

14 """OpenAI provider for Local Deep Research. 

15 

16 This is the official OpenAI API provider. 

17 """ 

18 

19 provider_name = "OpenAI" 

20 api_key_setting = "llm.openai.api_key" 

21 default_model = "gpt-3.5-turbo" 

22 default_base_url = "https://api.openai.com/v1" 

23 

24 # Metadata for auto-discovery 

25 provider_key = "OPENAI" 

26 company_name = "OpenAI" 

27 is_cloud = True 

28 

29 @classmethod 

30 def create_llm(cls, model_name=None, temperature=0.7, **kwargs): 

31 """Factory function for OpenAI LLMs. 

32 

33 Args: 

34 model_name: Name of the model to use 

35 temperature: Model temperature (0.0-1.0) 

36 **kwargs: Additional arguments including settings_snapshot 

37 

38 Returns: 

39 A configured ChatOpenAI instance 

40 

41 Raises: 

42 ValueError: If API key is not configured 

43 """ 

44 settings_snapshot = kwargs.get("settings_snapshot") 

45 

46 # Get API key from settings 

47 api_key = get_setting_from_snapshot( 

48 cls.api_key_setting, 

49 default=None, 

50 settings_snapshot=settings_snapshot, 

51 ) 

52 

53 if not api_key: 

54 logger.error(f"{cls.provider_name} API key not found in settings") 

55 raise ValueError( 

56 f"{cls.provider_name} API key not configured. " 

57 f"Please set {cls.api_key_setting} in settings." 

58 ) 

59 

60 # Use default model if none specified 

61 if not model_name: 

62 model_name = cls.default_model 

63 

64 # Build OpenAI-specific parameters 

65 openai_params = { 

66 "model": model_name, 

67 "api_key": api_key, 

68 "temperature": temperature, 

69 } 

70 

71 # Add optional parameters if they exist in settings 

72 try: 

73 api_base = get_setting_from_snapshot( 

74 "llm.openai.api_base", 

75 default=None, 

76 settings_snapshot=settings_snapshot, 

77 ) 

78 if api_base: 

79 openai_params["openai_api_base"] = api_base 

80 except NoSettingsContextError: 

81 pass # Optional parameter 

82 

83 try: 

84 organization = get_setting_from_snapshot( 

85 "llm.openai.organization", 

86 default=None, 

87 settings_snapshot=settings_snapshot, 

88 ) 

89 if organization: 

90 openai_params["openai_organization"] = organization 

91 except NoSettingsContextError: 

92 pass # Optional parameter 

93 

94 try: 

95 streaming = get_setting_from_snapshot( 

96 "llm.streaming", 

97 default=None, 

98 settings_snapshot=settings_snapshot, 

99 ) 

100 if streaming is not None: 

101 openai_params["streaming"] = streaming 

102 except NoSettingsContextError: 

103 pass # Optional parameter 

104 

105 try: 

106 max_retries = get_setting_from_snapshot( 

107 "llm.max_retries", 

108 default=None, 

109 settings_snapshot=settings_snapshot, 

110 ) 

111 if max_retries is not None: 

112 openai_params["max_retries"] = max_retries 

113 except NoSettingsContextError: 

114 pass # Optional parameter 

115 

116 try: 

117 request_timeout = get_setting_from_snapshot( 

118 "llm.request_timeout", 

119 default=None, 

120 settings_snapshot=settings_snapshot, 

121 ) 

122 if request_timeout is not None: 

123 openai_params["request_timeout"] = request_timeout 

124 except NoSettingsContextError: 

125 pass # Optional parameter 

126 

127 # Add max_tokens if specified in settings 

128 try: 

129 max_tokens = get_setting_from_snapshot( 

130 "llm.max_tokens", 

131 default=None, 

132 settings_snapshot=settings_snapshot, 

133 ) 

134 if max_tokens: 

135 openai_params["max_tokens"] = int(max_tokens) 

136 except NoSettingsContextError: 

137 pass # Optional parameter 

138 

139 logger.info( 

140 f"Creating {cls.provider_name} LLM with model: {model_name}, " 

141 f"temperature: {temperature}" 

142 ) 

143 

144 return ChatOpenAI(**openai_params) 

145 

146 @classmethod 

147 def is_available(cls, settings_snapshot=None): 

148 """Check if this provider is available. 

149 

150 Args: 

151 settings_snapshot: Optional settings snapshot to use 

152 

153 Returns: 

154 True if API key is configured, False otherwise 

155 """ 

156 try: 

157 # Check if API key is configured 

158 api_key = get_setting_from_snapshot( 

159 cls.api_key_setting, 

160 default=None, 

161 settings_snapshot=settings_snapshot, 

162 ) 

163 return bool(api_key and str(api_key).strip()) 

164 except Exception: 

165 return False