Coverage for src / local_deep_research / llm / providers / implementations / google.py: 46%
51 statements
« prev ^ index » next coverage.py v7.12.0, created at 2026-01-11 07:37 +0000
« prev ^ index » next coverage.py v7.12.0, created at 2026-01-11 07:37 +0000
1"""Google/Gemini LLM provider for Local Deep Research."""
3from loguru import logger
5from ...llm_registry import register_llm
6from ..openai_base import OpenAICompatibleProvider
9class GoogleProvider(OpenAICompatibleProvider):
10 """Google Gemini provider using OpenAI-compatible endpoint.
12 This uses Google's OpenAI-compatible API endpoint to access Gemini models,
13 which automatically supports all current and future Gemini models without
14 needing to update the code.
15 """
17 provider_name = "Google Gemini"
18 api_key_setting = "llm.google.api_key"
19 default_base_url = "https://generativelanguage.googleapis.com/v1beta/openai"
20 default_model = "gemini-1.5-flash"
22 # Metadata for auto-discovery
23 provider_key = "GOOGLE"
24 company_name = "Google"
25 region = "US"
26 country = "United States"
27 data_location = "Worldwide" # Google has data centers worldwide
28 is_cloud = True
30 @classmethod
31 def requires_auth_for_models(cls):
32 """Google requires authentication for listing models.
34 Note: Google's OpenAI-compatible /models endpoint has a bug (returns 401).
35 The native Gemini API endpoint requires an API key.
36 """
37 return True
39 @classmethod
40 def list_models_for_api(cls, api_key=None, base_url=None):
41 """List available models using Google's native API.
43 Args:
44 api_key: Google API key
45 base_url: Not used - Google uses a fixed endpoint
47 Google's OpenAI-compatible /models endpoint returns 401 (bug),
48 so we use the native Gemini API endpoint instead.
49 """
50 if not api_key: 50 ↛ 54line 50 didn't jump to line 54 because the condition on line 50 was always true
51 logger.debug("Google Gemini requires API key for listing models")
52 return []
54 try:
55 from ....security import safe_get
57 # Use the native Gemini API endpoint (not OpenAI-compatible)
58 # Note: Google's API requires the key as a query parameter, not in headers
59 # This is their documented approach: https://ai.google.dev/api/rest
60 url = f"https://generativelanguage.googleapis.com/v1beta/models?key={api_key}"
62 response = safe_get(url, timeout=10)
64 if response.status_code == 200:
65 data = response.json()
66 models = []
68 for model in data.get("models", []):
69 model_name = model.get("name", "")
70 # Extract just the model ID from "models/gemini-1.5-flash"
71 if model_name.startswith("models/"):
72 model_id = model_name[7:] # Remove "models/" prefix
73 else:
74 model_id = model_name
76 # Only include generative models (not embedding models)
77 supported_methods = model.get(
78 "supportedGenerationMethods", []
79 )
80 if "generateContent" in supported_methods and model_id:
81 models.append(
82 {
83 "value": model_id,
84 "label": model_id,
85 }
86 )
88 logger.info(
89 f"Found {len(models)} generative models from Google Gemini API"
90 )
91 return models
92 else:
93 logger.warning(
94 f"Google Gemini API returned status {response.status_code}"
95 )
96 return []
98 except Exception:
99 logger.exception("Error fetching Google Gemini models")
100 return []
103# Keep the standalone functions for backward compatibility and registration
104def create_google_llm(model_name=None, temperature=0.7, **kwargs):
105 """Factory function for Google/Gemini LLMs using OpenAI-compatible endpoint.
107 Args:
108 model_name: Name of the model to use (e.g., "gemini-1.5-flash", "gemini-2.0-flash-exp", etc.)
109 temperature: Model temperature (0.0-1.0)
110 **kwargs: Additional arguments including settings_snapshot
112 Returns:
113 A configured ChatOpenAI instance pointing to Google's API
115 Raises:
116 ValueError: If Google API key is not configured
117 """
118 return GoogleProvider.create_llm(model_name, temperature, **kwargs)
121def is_google_available(settings_snapshot=None):
122 """Check if Google/Gemini is available.
124 Args:
125 settings_snapshot: Optional settings snapshot to use
127 Returns:
128 True if Google API key is configured, False otherwise
129 """
130 return GoogleProvider.is_available(settings_snapshot)
133def register_google_provider():
134 """Register the Google/Gemini provider with the LLM registry."""
135 register_llm("google", create_google_llm)
136 logger.info("Registered Google/Gemini LLM provider")