Coverage for src / local_deep_research / llm / providers / implementations / lmstudio.py: 62%
45 statements
« prev ^ index » next coverage.py v7.12.0, created at 2026-01-11 07:37 +0000
« prev ^ index » next coverage.py v7.12.0, created at 2026-01-11 07:37 +0000
1"""LM Studio LLM provider for Local Deep Research."""
3from loguru import logger
5from ....utilities.url_utils import normalize_url
6from ...llm_registry import register_llm
7from ..openai_base import OpenAICompatibleProvider
10class LMStudioProvider(OpenAICompatibleProvider):
11 """LM Studio provider using OpenAI-compatible endpoint.
13 LM Studio provides a local OpenAI-compatible API for running models.
14 """
16 provider_name = "LM Studio"
17 api_key_setting = None # LM Studio doesn't need a real API key
18 url_setting = "llm.lmstudio.url" # Settings key for URL
19 default_base_url = "http://localhost:1234/v1"
20 default_model = "local-model" # User should specify their loaded model
22 # Metadata for auto-discovery
23 provider_key = "LMSTUDIO"
24 company_name = "LM Studio"
25 region = "Local"
26 country = "Local"
27 data_location = "Local"
28 is_cloud = False # Local provider
30 @classmethod
31 def create_llm(cls, model_name=None, temperature=0.7, **kwargs):
32 """Override to handle LM Studio specifics."""
33 from ....config.thread_settings import get_setting_from_snapshot
35 settings_snapshot = kwargs.get("settings_snapshot")
37 # Get LM Studio URL from settings (default includes /v1 for backward compatibility)
38 lmstudio_url = get_setting_from_snapshot(
39 "llm.lmstudio.url",
40 cls.default_base_url,
41 settings_snapshot=settings_snapshot,
42 )
44 # Use URL as-is (user should provide complete URL including /v1 if needed)
45 kwargs["base_url"] = normalize_url(lmstudio_url)
47 # LM Studio doesn't require a real API key, just use a clearly fake placeholder
48 kwargs["api_key"] = "not-required" # pragma: allowlist secret
50 # Use parent's create_llm but bypass API key check
51 return super()._create_llm_instance(model_name, temperature, **kwargs)
53 @classmethod
54 def is_available(cls, settings_snapshot=None):
55 """Check if LM Studio is available."""
56 try:
57 from ....config.thread_settings import get_setting_from_snapshot
58 from ....security import safe_get
60 lmstudio_url = get_setting_from_snapshot(
61 "llm.lmstudio.url",
62 cls.default_base_url,
63 settings_snapshot=settings_snapshot,
64 )
65 # Use URL as-is (default already includes /v1)
66 base_url = normalize_url(lmstudio_url)
67 # LM Studio typically uses OpenAI-compatible endpoints
68 response = safe_get(
69 f"{base_url}/models",
70 timeout=1.0,
71 allow_localhost=True,
72 allow_private_ips=True,
73 )
74 return response.status_code == 200
75 except Exception:
76 return False
78 @classmethod
79 def requires_auth_for_models(cls):
80 """LM Studio doesn't require authentication for listing models."""
81 return False
84# Keep the standalone functions for backward compatibility and registration
85def create_lmstudio_llm(model_name=None, temperature=0.7, **kwargs):
86 """Factory function for LM Studio LLMs.
88 Args:
89 model_name: Name of the model to use
90 temperature: Model temperature (0.0-1.0)
91 **kwargs: Additional arguments including settings_snapshot
93 Returns:
94 A configured ChatOpenAI instance pointing to LM Studio
96 Raises:
97 ValueError: If LM Studio is not available
98 """
99 return LMStudioProvider.create_llm(model_name, temperature, **kwargs)
102def is_lmstudio_available(settings_snapshot=None):
103 """Check if LM Studio is available.
105 Args:
106 settings_snapshot: Optional settings snapshot to use
108 Returns:
109 True if LM Studio is running, False otherwise
110 """
111 return LMStudioProvider.is_available(settings_snapshot)
114def register_lmstudio_provider():
115 """Register the LM Studio provider with the LLM registry."""
116 register_llm("lmstudio", create_lmstudio_llm)
117 logger.info("Registered LM Studio LLM provider")