Coverage for src / local_deep_research / llm / providers / implementations / custom_openai_endpoint.py: 100%

39 statements  

« prev     ^ index     » next       coverage.py v7.13.5, created at 2026-04-14 23:55 +0000

1"""Custom OpenAI-compatible endpoint provider for Local Deep Research.""" 

2 

3from loguru import logger 

4 

5from ....config.thread_settings import get_setting_from_snapshot 

6from ....utilities.url_utils import normalize_url 

7from ..openai_base import OpenAICompatibleProvider 

8 

9 

10class CustomOpenAIEndpointProvider(OpenAICompatibleProvider): 

11 """Custom OpenAI-compatible endpoint provider. 

12 

13 This provider allows users to connect to any OpenAI-compatible API endpoint 

14 by specifying a custom URL in the settings. 

15 """ 

16 

17 provider_name = "OpenAI-Compatible Endpoint" 

18 api_key_setting = "llm.openai_endpoint.api_key" 

19 url_setting = "llm.openai_endpoint.url" # type: ignore[assignment] # Settings key for URL 

20 default_base_url = "https://api.openai.com/v1" 

21 default_model = "gpt-3.5-turbo" 

22 

23 # Metadata for auto-discovery 

24 provider_key = "OPENAI_ENDPOINT" 

25 company_name = "OpenAI-Compatible" 

26 is_cloud = None # Unknown — could be local or cloud 

27 

28 @classmethod 

29 def requires_auth_for_models(cls): 

30 """Custom endpoints may or may not require authentication for listing models. 

31 

32 Many OpenAI-compatible servers (vLLM, local LLMs, etc.) don't require 

33 authentication. Return False to allow model listing without an API key. 

34 If the endpoint requires auth, the OpenAI client will raise an error. 

35 """ 

36 return False 

37 

38 @classmethod 

39 def is_available(cls, settings_snapshot=None): 

40 """Custom endpoints are available with either an API key or a custom URL. 

41 

42 Unlike cloud-only providers, custom endpoints support keyless local 

43 servers (vLLM, text-generation-webui, etc.). The provider is 

44 considered configured when the user has set either an API key or a 

45 URL that differs from the default OpenAI endpoint. 

46 """ 

47 try: 

48 api_key = get_setting_from_snapshot( 

49 cls.api_key_setting, 

50 default=None, 

51 settings_snapshot=settings_snapshot, 

52 ) 

53 if api_key and str(api_key).strip(): 

54 return True 

55 except Exception: 

56 logger.debug( 

57 "Error checking provider availability", 

58 exc_info=True, 

59 ) 

60 

61 try: 

62 custom_url = get_setting_from_snapshot( 

63 cls.url_setting, 

64 default=None, 

65 settings_snapshot=settings_snapshot, 

66 ) 

67 if custom_url and str(custom_url).strip(): 

68 normalized = normalize_url(str(custom_url).strip()) 

69 if normalized.rstrip("/") != cls.default_base_url.rstrip("/"): 

70 return True 

71 except Exception: 

72 logger.debug( 

73 f"Error reading URL setting '{cls.url_setting}'", 

74 exc_info=True, 

75 ) 

76 

77 return False 

78 

79 @classmethod 

80 def create_llm(cls, model_name=None, temperature=0.7, **kwargs): 

81 """Override to get URL from settings.""" 

82 settings_snapshot = kwargs.get("settings_snapshot") 

83 

84 # Get custom endpoint URL from settings 

85 custom_url = get_setting_from_snapshot( 

86 "llm.openai_endpoint.url", 

87 default=cls.default_base_url, 

88 settings_snapshot=settings_snapshot, 

89 ) 

90 

91 # Normalize and pass the custom URL to parent implementation 

92 kwargs["base_url"] = ( 

93 normalize_url(custom_url) if custom_url else cls.default_base_url 

94 ) 

95 

96 return super().create_llm(model_name, temperature, **kwargs)