Coverage for src / local_deep_research / llm / providers / implementations / google.py: 100%

40 statements  

« prev     ^ index     » next       coverage.py v7.13.5, created at 2026-04-14 23:55 +0000

1"""Google/Gemini LLM provider for Local Deep Research.""" 

2 

3from loguru import logger 

4 

5from ..openai_base import OpenAICompatibleProvider 

6 

7 

8class GoogleProvider(OpenAICompatibleProvider): 

9 """Google Gemini provider using OpenAI-compatible endpoint. 

10 

11 This uses Google's OpenAI-compatible API endpoint to access Gemini models, 

12 which automatically supports all current and future Gemini models without 

13 needing to update the code. 

14 """ 

15 

16 provider_name = "Google Gemini" 

17 api_key_setting = "llm.google.api_key" 

18 default_base_url = "https://generativelanguage.googleapis.com/v1beta/openai" 

19 default_model = "gemini-1.5-flash" 

20 

21 # Metadata for auto-discovery 

22 provider_key = "GOOGLE" 

23 company_name = "Google" 

24 is_cloud = True 

25 

26 @classmethod 

27 def requires_auth_for_models(cls): 

28 """Google requires authentication for listing models. 

29 

30 Note: Google's OpenAI-compatible /models endpoint has a bug (returns 401). 

31 The native Gemini API endpoint requires an API key. 

32 """ 

33 return True 

34 

35 @classmethod 

36 def list_models_for_api(cls, api_key=None, base_url=None): 

37 """List available models using Google's native API. 

38 

39 Args: 

40 api_key: Google API key 

41 base_url: Not used - Google uses a fixed endpoint 

42 

43 Google's OpenAI-compatible /models endpoint returns 401 (bug), 

44 so we use the native Gemini API endpoint instead. 

45 """ 

46 if not api_key: 

47 logger.debug("Google Gemini requires API key for listing models") 

48 return [] 

49 

50 try: 

51 from ....security import safe_get 

52 

53 # Use the native Gemini API endpoint (not OpenAI-compatible) 

54 # Note: Google's API requires the key as a query parameter, not in headers 

55 # This is their documented approach: https://ai.google.dev/api/rest 

56 url = f"https://generativelanguage.googleapis.com/v1beta/models?key={api_key}" 

57 

58 response = safe_get(url, timeout=10) 

59 

60 if response.status_code == 200: 

61 data = response.json() 

62 models = [] 

63 

64 for model in data.get("models", []): 

65 model_name = model.get("name", "") 

66 # Extract just the model ID from "models/gemini-1.5-flash" 

67 if model_name.startswith("models/"): 

68 model_id = model_name[7:] # Remove "models/" prefix 

69 else: 

70 model_id = model_name 

71 

72 # Only include generative models (not embedding models) 

73 supported_methods = model.get( 

74 "supportedGenerationMethods", [] 

75 ) 

76 if "generateContent" in supported_methods and model_id: 

77 models.append( 

78 { 

79 "value": model_id, 

80 "label": model_id, 

81 } 

82 ) 

83 

84 logger.info( 

85 f"Found {len(models)} generative models from Google Gemini API" 

86 ) 

87 return models 

88 logger.warning( 

89 f"Google Gemini API returned status {response.status_code}" 

90 ) 

91 return [] 

92 

93 except Exception: 

94 logger.exception("Error fetching Google Gemini models") 

95 return []