Coverage for src / local_deep_research / llm / providers / implementations / lmstudio.py: 100%
34 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-14 23:55 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-14 23:55 +0000
1"""LM Studio LLM provider for Local Deep Research."""
3from ....config.constants import DEFAULT_LMSTUDIO_URL
4from ....utilities.url_utils import normalize_url
5from ..openai_base import OpenAICompatibleProvider
8class LMStudioProvider(OpenAICompatibleProvider):
9 """LM Studio provider using OpenAI-compatible endpoint.
11 LM Studio provides a local OpenAI-compatible API for running models.
12 """
14 provider_name = "LM Studio"
15 api_key_setting = None # type: ignore[assignment] # LM Studio doesn't need a real API key
16 url_setting = "llm.lmstudio.url" # type: ignore[assignment] # Settings key for URL
17 default_base_url = DEFAULT_LMSTUDIO_URL
18 default_model = "local-model" # User should specify their loaded model
20 # Metadata for auto-discovery
21 provider_key = "LMSTUDIO"
22 company_name = "LM Studio"
23 is_cloud = False # Local provider
25 @classmethod
26 def create_llm(cls, model_name=None, temperature=0.7, **kwargs):
27 """Override to handle LM Studio specifics."""
28 from ....config.thread_settings import get_setting_from_snapshot
30 settings_snapshot = kwargs.get("settings_snapshot")
32 # Get LM Studio URL from settings (default includes /v1 for backward compatibility)
33 lmstudio_url = get_setting_from_snapshot(
34 "llm.lmstudio.url",
35 cls.default_base_url,
36 settings_snapshot=settings_snapshot,
37 )
39 # Use URL as-is (user should provide complete URL including /v1 if needed)
40 kwargs["base_url"] = normalize_url(lmstudio_url)
42 # LM Studio doesn't require a real API key, just use a clearly fake placeholder
43 kwargs["api_key"] = "not-required" # pragma: allowlist secret
45 # Use parent's create_llm but bypass API key check
46 return super()._create_llm_instance(model_name, temperature, **kwargs)
48 @classmethod
49 def is_available(cls, settings_snapshot=None):
50 """Check if LM Studio is available."""
51 try:
52 from ....config.thread_settings import get_setting_from_snapshot
53 from ....security import safe_get
55 lmstudio_url = get_setting_from_snapshot(
56 "llm.lmstudio.url",
57 cls.default_base_url,
58 settings_snapshot=settings_snapshot,
59 )
60 # Use URL as-is (default already includes /v1)
61 base_url = normalize_url(lmstudio_url)
62 # LM Studio typically uses OpenAI-compatible endpoints
63 response = safe_get(
64 f"{base_url}/models",
65 timeout=1,
66 allow_localhost=True,
67 allow_private_ips=True,
68 )
69 return response.status_code == 200
70 except Exception:
71 return False
73 @classmethod
74 def requires_auth_for_models(cls):
75 """LM Studio doesn't require authentication for listing models."""
76 return False