Coverage for src / local_deep_research / embeddings / providers / implementations / ollama.py: 93%
41 statements
« prev ^ index » next coverage.py v7.12.0, created at 2026-01-11 00:51 +0000
« prev ^ index » next coverage.py v7.12.0, created at 2026-01-11 00:51 +0000
1"""Ollama embedding provider."""
3from typing import Any, Dict, List, Optional
5from langchain_community.embeddings import OllamaEmbeddings
6from langchain_core.embeddings import Embeddings
7from loguru import logger
9from ....config.thread_settings import get_setting_from_snapshot
10from ....utilities.llm_utils import get_ollama_base_url
11from ..base import BaseEmbeddingProvider
12from ....security import safe_get
15class OllamaEmbeddingsProvider(BaseEmbeddingProvider):
16 """
17 Ollama embedding provider.
19 Uses Ollama API for local embedding models.
20 No API key required, runs locally.
21 """
23 provider_name = "Ollama"
24 provider_key = "OLLAMA"
25 requires_api_key = False
26 supports_local = True
27 default_model = "nomic-embed-text"
29 @classmethod
30 def create_embeddings(
31 cls,
32 model: Optional[str] = None,
33 settings_snapshot: Optional[Dict[str, Any]] = None,
34 **kwargs,
35 ) -> Embeddings:
36 """
37 Create Ollama embeddings instance.
39 Args:
40 model: Model name (defaults to nomic-embed-text)
41 settings_snapshot: Optional settings snapshot
42 **kwargs: Additional parameters (base_url, etc.)
44 Returns:
45 OllamaEmbeddings instance
46 """
47 # Get model from settings if not specified
48 if model is None:
49 model = get_setting_from_snapshot(
50 "embeddings.ollama.model",
51 default=cls.default_model,
52 settings_snapshot=settings_snapshot,
53 )
55 # Get Ollama URL
56 base_url = kwargs.get("base_url")
57 if base_url is None:
58 base_url = get_ollama_base_url(settings_snapshot)
60 logger.info(
61 f"Creating OllamaEmbeddings with model={model}, base_url={base_url}"
62 )
64 return OllamaEmbeddings(
65 model=model,
66 base_url=base_url,
67 )
69 @classmethod
70 def is_available(
71 cls, settings_snapshot: Optional[Dict[str, Any]] = None
72 ) -> bool:
73 """Check if Ollama is available."""
74 try:
75 import requests
77 # Get Ollama URL
78 base_url = get_ollama_base_url(settings_snapshot)
80 # Check if Ollama is running
81 try:
82 response = safe_get(
83 f"{base_url}/api/tags",
84 timeout=3.0,
85 allow_localhost=True,
86 allow_private_ips=True,
87 )
88 return response.status_code == 200
89 except requests.exceptions.RequestException:
90 return False
92 except Exception:
93 logger.exception("Error checking Ollama availability")
94 return False
96 @classmethod
97 def get_available_models(
98 cls, settings_snapshot: Optional[Dict[str, Any]] = None
99 ) -> List[Dict[str, str]]:
100 """Get list of available Ollama embedding models."""
101 from ....utilities.llm_utils import fetch_ollama_models
103 base_url = get_ollama_base_url(settings_snapshot)
104 return fetch_ollama_models(base_url, timeout=3.0)