Coverage for src / local_deep_research / llm / providers / implementations / custom_openai_endpoint.py: 91%

35 statements  

« prev     ^ index     » next       coverage.py v7.12.0, created at 2026-01-11 00:51 +0000

1"""Custom OpenAI-compatible endpoint provider for Local Deep Research.""" 

2 

3from loguru import logger 

4 

5from ....config.thread_settings import ( 

6 get_setting_from_snapshot as _get_setting_from_snapshot, 

7) 

8from ....utilities.url_utils import normalize_url 

9from ...llm_registry import register_llm 

10from ..openai_base import OpenAICompatibleProvider 

11 

12 

13def get_setting_from_snapshot( 

14 key, default=None, username=None, settings_snapshot=None 

15): 

16 """Get setting from context only - no database access from threads. 

17 

18 This is a wrapper around the shared function that enables fallback LLM check. 

19 """ 

20 return _get_setting_from_snapshot( 

21 key, default, username, settings_snapshot, check_fallback_llm=True 

22 ) 

23 

24 

25class CustomOpenAIEndpointProvider(OpenAICompatibleProvider): 

26 """Custom OpenAI-compatible endpoint provider. 

27 

28 This provider allows users to connect to any OpenAI-compatible API endpoint 

29 by specifying a custom URL in the settings. 

30 """ 

31 

32 provider_name = "Custom OpenAI Endpoint" 

33 api_key_setting = "llm.openai_endpoint.api_key" 

34 url_setting = "llm.openai_endpoint.url" # Settings key for URL 

35 default_base_url = "https://api.openai.com/v1" 

36 default_model = "gpt-3.5-turbo" 

37 

38 # Metadata for auto-discovery 

39 provider_key = "OPENAI_ENDPOINT" 

40 company_name = "Custom" 

41 region = "Custom" 

42 country = "User-defined" 

43 data_location = "User-defined" 

44 is_cloud = True # Assume cloud by default 

45 

46 @classmethod 

47 def requires_auth_for_models(cls): 

48 """Custom endpoints may or may not require authentication for listing models. 

49 

50 Many OpenAI-compatible servers (vLLM, local LLMs, etc.) don't require 

51 authentication. Return False to allow model listing without an API key. 

52 If the endpoint requires auth, the OpenAI client will raise an error. 

53 """ 

54 return False 

55 

56 @classmethod 

57 def create_llm(cls, model_name=None, temperature=0.7, **kwargs): 

58 """Override to get URL from settings.""" 

59 settings_snapshot = kwargs.get("settings_snapshot") 

60 

61 # Get custom endpoint URL from settings 

62 custom_url = get_setting_from_snapshot( 

63 "llm.openai_endpoint.url", 

64 default=cls.default_base_url, 

65 settings_snapshot=settings_snapshot, 

66 ) 

67 

68 # Normalize and pass the custom URL to parent implementation 

69 kwargs["base_url"] = ( 

70 normalize_url(custom_url) if custom_url else cls.default_base_url 

71 ) 

72 

73 return super().create_llm(model_name, temperature, **kwargs) 

74 

75 

76# Keep the standalone functions for backward compatibility 

77def create_openai_endpoint_llm(model_name=None, temperature=0.7, **kwargs): 

78 """Factory function for custom OpenAI-compatible endpoint LLMs. 

79 

80 Args: 

81 model_name: Name of the model to use 

82 temperature: Model temperature (0.0-1.0) 

83 **kwargs: Additional arguments including settings_snapshot 

84 

85 Returns: 

86 A configured ChatOpenAI instance pointing to custom endpoint 

87 

88 Raises: 

89 ValueError: If API key is not configured 

90 """ 

91 return CustomOpenAIEndpointProvider.create_llm( 

92 model_name, temperature, **kwargs 

93 ) 

94 

95 

96def is_openai_endpoint_available(settings_snapshot=None): 

97 """Check if custom OpenAI endpoint is available. 

98 

99 Args: 

100 settings_snapshot: Optional settings snapshot to use 

101 

102 Returns: 

103 True if API key is configured, False otherwise 

104 """ 

105 return CustomOpenAIEndpointProvider.is_available(settings_snapshot) 

106 

107 

108def register_custom_openai_endpoint_provider(): 

109 """Register the custom OpenAI endpoint provider with the LLM registry.""" 

110 register_llm("openai_endpoint", create_openai_endpoint_llm) 

111 logger.info("Registered Custom OpenAI Endpoint LLM provider")