Coverage for src / local_deep_research / llm / providers / implementations / anthropic.py: 95%
37 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-14 23:55 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-14 23:55 +0000
1"""Anthropic LLM provider for Local Deep Research."""
3from langchain_anthropic import ChatAnthropic
4from loguru import logger
6from ....config.thread_settings import (
7 get_setting_from_snapshot,
8 NoSettingsContextError,
9)
10from ..openai_base import OpenAICompatibleProvider
13class AnthropicProvider(OpenAICompatibleProvider):
14 """Anthropic provider for Local Deep Research.
16 This is the official Anthropic API provider.
17 """
19 provider_name = "Anthropic"
20 api_key_setting = "llm.anthropic.api_key"
21 default_model = "claude-3-sonnet-20240229"
22 default_base_url = "https://api.anthropic.com/v1"
24 # Metadata for auto-discovery
25 provider_key = "ANTHROPIC"
26 company_name = "Anthropic"
27 is_cloud = True
29 @classmethod
30 def create_llm(cls, model_name=None, temperature=0.7, **kwargs):
31 """Factory function for Anthropic LLMs.
33 Args:
34 model_name: Name of the model to use
35 temperature: Model temperature (0.0-1.0)
36 **kwargs: Additional arguments including settings_snapshot
38 Returns:
39 A configured ChatAnthropic instance
41 Raises:
42 ValueError: If API key is not configured
43 """
44 settings_snapshot = kwargs.get("settings_snapshot")
46 # Get API key from settings
47 api_key = get_setting_from_snapshot(
48 cls.api_key_setting,
49 default=None,
50 settings_snapshot=settings_snapshot,
51 )
53 if not api_key:
54 logger.error(f"{cls.provider_name} API key not found in settings")
55 raise ValueError(
56 f"{cls.provider_name} API key not configured. "
57 f"Please set {cls.api_key_setting} in settings."
58 )
60 # Use default model if none specified
61 if not model_name:
62 model_name = cls.default_model
64 # Build Anthropic-specific parameters
65 anthropic_params = {
66 "model": model_name,
67 "anthropic_api_key": api_key,
68 "temperature": temperature,
69 }
71 # Add max_tokens if specified in settings
72 try:
73 max_tokens = get_setting_from_snapshot(
74 "llm.max_tokens",
75 default=None,
76 settings_snapshot=settings_snapshot,
77 )
78 if max_tokens:
79 anthropic_params["max_tokens"] = int(max_tokens)
80 except NoSettingsContextError:
81 pass # Optional parameter
83 logger.info(
84 f"Creating {cls.provider_name} LLM with model: {model_name}, "
85 f"temperature: {temperature}"
86 )
88 return ChatAnthropic(**anthropic_params)
90 @classmethod
91 def is_available(cls, settings_snapshot=None):
92 """Check if this provider is available.
94 Args:
95 settings_snapshot: Optional settings snapshot to use
97 Returns:
98 True if API key is configured, False otherwise
99 """
100 try:
101 # Check if API key is configured
102 api_key = get_setting_from_snapshot(
103 cls.api_key_setting,
104 default=None,
105 settings_snapshot=settings_snapshot,
106 )
107 return bool(api_key and str(api_key).strip())
108 except Exception:
109 return False