Coverage for src / local_deep_research / llm / providers / implementations / custom_openai_endpoint.py: 93%
30 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-02-25 01:07 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-02-25 01:07 +0000
1"""Custom OpenAI-compatible endpoint provider for Local Deep Research."""
3from loguru import logger
5from ....config.thread_settings import (
6 get_llm_setting_from_snapshot as get_setting_from_snapshot,
7)
8from ....utilities.url_utils import normalize_url
9from ...llm_registry import register_llm
10from ..openai_base import OpenAICompatibleProvider
13class CustomOpenAIEndpointProvider(OpenAICompatibleProvider):
14 """Custom OpenAI-compatible endpoint provider.
16 This provider allows users to connect to any OpenAI-compatible API endpoint
17 by specifying a custom URL in the settings.
18 """
20 provider_name = "Custom OpenAI Endpoint"
21 api_key_setting = "llm.openai_endpoint.api_key"
22 url_setting = "llm.openai_endpoint.url" # Settings key for URL
23 default_base_url = "https://api.openai.com/v1"
24 default_model = "gpt-3.5-turbo"
26 # Metadata for auto-discovery
27 provider_key = "OPENAI_ENDPOINT"
28 company_name = "Custom"
29 is_cloud = None # Unknown — could be local or cloud
31 @classmethod
32 def requires_auth_for_models(cls):
33 """Custom endpoints may or may not require authentication for listing models.
35 Many OpenAI-compatible servers (vLLM, local LLMs, etc.) don't require
36 authentication. Return False to allow model listing without an API key.
37 If the endpoint requires auth, the OpenAI client will raise an error.
38 """
39 return False
41 @classmethod
42 def create_llm(cls, model_name=None, temperature=0.7, **kwargs):
43 """Override to get URL from settings."""
44 settings_snapshot = kwargs.get("settings_snapshot")
46 # Get custom endpoint URL from settings
47 custom_url = get_setting_from_snapshot(
48 "llm.openai_endpoint.url",
49 default=cls.default_base_url,
50 settings_snapshot=settings_snapshot,
51 )
53 # Normalize and pass the custom URL to parent implementation
54 kwargs["base_url"] = (
55 normalize_url(custom_url) if custom_url else cls.default_base_url
56 )
58 return super().create_llm(model_name, temperature, **kwargs)
61# Keep the standalone functions for backward compatibility
62def create_openai_endpoint_llm(model_name=None, temperature=0.7, **kwargs):
63 """Factory function for custom OpenAI-compatible endpoint LLMs.
65 Args:
66 model_name: Name of the model to use
67 temperature: Model temperature (0.0-1.0)
68 **kwargs: Additional arguments including settings_snapshot
70 Returns:
71 A configured ChatOpenAI instance pointing to custom endpoint
73 Raises:
74 ValueError: If API key is not configured
75 """
76 return CustomOpenAIEndpointProvider.create_llm(
77 model_name, temperature, **kwargs
78 )
81def is_openai_endpoint_available(settings_snapshot=None):
82 """Check if custom OpenAI endpoint is available.
84 Args:
85 settings_snapshot: Optional settings snapshot to use
87 Returns:
88 True if API key is configured, False otherwise
89 """
90 return CustomOpenAIEndpointProvider.is_available(settings_snapshot)
93def register_custom_openai_endpoint_provider():
94 """Register the custom OpenAI endpoint provider with the LLM registry."""
95 register_llm("openai_endpoint", create_openai_endpoint_llm)
96 logger.info("Registered Custom OpenAI Endpoint LLM provider")