Coverage for src / local_deep_research / llm / providers / implementations / anthropic.py: 46%

50 statements  

« prev     ^ index     » next       coverage.py v7.12.0, created at 2026-01-11 07:37 +0000

1"""Anthropic LLM provider for Local Deep Research.""" 

2 

3from langchain_anthropic import ChatAnthropic 

4from loguru import logger 

5 

6from ....config.thread_settings import ( 

7 get_setting_from_snapshot as _get_setting_from_snapshot, 

8 NoSettingsContextError, 

9) 

10from ...llm_registry import register_llm 

11from ..openai_base import OpenAICompatibleProvider 

12 

13 

14def get_setting_from_snapshot( 

15 key, default=None, username=None, settings_snapshot=None 

16): 

17 """Get setting from context only - no database access from threads. 

18 

19 This is a wrapper around the shared function that enables fallback LLM check. 

20 """ 

21 return _get_setting_from_snapshot( 

22 key, default, username, settings_snapshot, check_fallback_llm=True 

23 ) 

24 

25 

26class AnthropicProvider(OpenAICompatibleProvider): 

27 """Anthropic provider for Local Deep Research. 

28 

29 This is the official Anthropic API provider. 

30 """ 

31 

32 provider_name = "Anthropic" 

33 api_key_setting = "llm.anthropic.api_key" 

34 default_model = "claude-3-sonnet-20240229" 

35 default_base_url = "https://api.anthropic.com/v1" 

36 

37 # Metadata for auto-discovery 

38 provider_key = "ANTHROPIC" 

39 company_name = "Anthropic" 

40 region = "US" 

41 country = "United States" 

42 data_location = "United States" 

43 is_cloud = True 

44 

45 @classmethod 

46 def create_llm(cls, model_name=None, temperature=0.7, **kwargs): 

47 """Factory function for Anthropic LLMs. 

48 

49 Args: 

50 model_name: Name of the model to use 

51 temperature: Model temperature (0.0-1.0) 

52 **kwargs: Additional arguments including settings_snapshot 

53 

54 Returns: 

55 A configured ChatAnthropic instance 

56 

57 Raises: 

58 ValueError: If API key is not configured 

59 """ 

60 settings_snapshot = kwargs.get("settings_snapshot") 

61 

62 # Get API key from settings 

63 api_key = get_setting_from_snapshot( 

64 cls.api_key_setting, 

65 default=None, 

66 settings_snapshot=settings_snapshot, 

67 ) 

68 

69 if not api_key: 

70 logger.error(f"{cls.provider_name} API key not found in settings") 

71 raise ValueError( 

72 f"{cls.provider_name} API key not configured. " 

73 f"Please set {cls.api_key_setting} in settings." 

74 ) 

75 

76 # Use default model if none specified 

77 if not model_name: 

78 model_name = cls.default_model 

79 

80 # Build Anthropic-specific parameters 

81 anthropic_params = { 

82 "model": model_name, 

83 "anthropic_api_key": api_key, 

84 "temperature": temperature, 

85 } 

86 

87 # Add max_tokens if specified in settings 

88 try: 

89 max_tokens = get_setting_from_snapshot( 

90 "llm.max_tokens", 

91 default=None, 

92 settings_snapshot=settings_snapshot, 

93 ) 

94 if max_tokens: 

95 anthropic_params["max_tokens"] = int(max_tokens) 

96 except NoSettingsContextError: 

97 pass # Optional parameter 

98 

99 logger.info( 

100 f"Creating {cls.provider_name} LLM with model: {model_name}, " 

101 f"temperature: {temperature}" 

102 ) 

103 

104 return ChatAnthropic(**anthropic_params) 

105 

106 @classmethod 

107 def is_available(cls, settings_snapshot=None): 

108 """Check if this provider is available. 

109 

110 Args: 

111 settings_snapshot: Optional settings snapshot to use 

112 

113 Returns: 

114 True if API key is configured, False otherwise 

115 """ 

116 try: 

117 # Check if API key is configured 

118 api_key = get_setting_from_snapshot( 

119 cls.api_key_setting, 

120 default=None, 

121 settings_snapshot=settings_snapshot, 

122 ) 

123 return bool(api_key) 

124 except Exception: 

125 return False 

126 

127 

128# Keep the standalone functions for backward compatibility and registration 

129def create_anthropic_llm(model_name=None, temperature=0.7, **kwargs): 

130 """Factory function for Anthropic LLMs. 

131 

132 Args: 

133 model_name: Name of the model to use (e.g., "claude-3-opus-20240229", "claude-3-sonnet-20240229", etc.) 

134 temperature: Model temperature (0.0-1.0) 

135 **kwargs: Additional arguments including settings_snapshot 

136 

137 Returns: 

138 A configured ChatAnthropic instance 

139 

140 Raises: 

141 ValueError: If Anthropic API key is not configured 

142 """ 

143 return AnthropicProvider.create_llm(model_name, temperature, **kwargs) 

144 

145 

146def is_anthropic_available(settings_snapshot=None): 

147 """Check if Anthropic is available. 

148 

149 Args: 

150 settings_snapshot: Optional settings snapshot to use 

151 

152 Returns: 

153 True if Anthropic API key is configured, False otherwise 

154 """ 

155 return AnthropicProvider.is_available(settings_snapshot) 

156 

157 

158def register_anthropic_provider(): 

159 """Register the Anthropic provider with the LLM registry.""" 

160 register_llm("anthropic", create_anthropic_llm) 

161 logger.info("Registered Anthropic LLM provider")