Coverage for src / local_deep_research / web / routes / api_routes.py: 74%
196 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-02-25 01:07 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-02-25 01:07 +0000
1import requests
2from flask import (
3 Blueprint,
4 current_app,
5 jsonify,
6 request,
7)
8from loguru import logger
10from ...database.models import ResearchHistory
11from ...database.session_context import get_user_db_session
12from ...utilities.url_utils import normalize_url
13from ..auth.decorators import login_required
14from ..services.research_service import (
15 cancel_research,
16)
17from ..services.resource_service import (
18 add_resource,
19 delete_resource,
20 get_resources_for_research,
21)
22from local_deep_research.settings import SettingsManager
23from ...security import safe_get, strip_settings_snapshot
25# Create blueprint
26api_bp = Blueprint("api", __name__)
29@api_bp.route("/settings/current-config", methods=["GET"])
30@login_required
31def get_current_config():
32 """Get the current configuration from database settings."""
33 try:
34 with get_user_db_session() as session:
35 settings_manager = SettingsManager(session)
36 config = {
37 "provider": settings_manager.get_setting(
38 "llm.provider", "Not configured"
39 ),
40 "model": settings_manager.get_setting(
41 "llm.model", "Not configured"
42 ),
43 "search_tool": settings_manager.get_setting(
44 "search.tool", "searxng"
45 ),
46 "iterations": settings_manager.get_setting(
47 "search.iterations", 8
48 ),
49 "questions_per_iteration": settings_manager.get_setting(
50 "search.questions_per_iteration", 5
51 ),
52 "search_strategy": settings_manager.get_setting(
53 "search.search_strategy", "focused_iteration"
54 ),
55 }
57 return jsonify({"success": True, "config": config})
59 except Exception:
60 logger.exception("Error getting current config")
61 return jsonify(
62 {"success": False, "error": "An internal error occurred"}
63 ), 500
66# API Routes
67@api_bp.route("/start", methods=["POST"])
68@login_required
69def api_start_research():
70 """
71 Start a new research process.
73 Delegates to the full-featured start_research() in research_routes,
74 which reads settings from the database, handles queueing, and starts
75 the research thread.
76 """
77 from ..routes.research_routes import start_research
79 return start_research()
82@api_bp.route("/status/<string:research_id>", methods=["GET"])
83@login_required
84def api_research_status(research_id):
85 """
86 Get the status of a research process
87 """
88 try:
89 # Get a fresh session to avoid conflicts with the research process
91 with get_user_db_session() as db_session:
92 research = (
93 db_session.query(ResearchHistory)
94 .filter_by(id=research_id)
95 .first()
96 )
98 if research is None:
99 return jsonify({"error": "Research not found"}), 404
101 # Extract attributes while session is active
102 # to avoid DetachedInstanceError after the with block exits
103 result = {
104 "status": research.status,
105 "progress": research.progress,
106 "completed_at": research.completed_at,
107 "report_path": research.report_path,
108 "metadata": strip_settings_snapshot(research.research_meta),
109 }
111 return jsonify(result)
112 except Exception:
113 logger.exception("Error getting research status")
114 return jsonify(
115 {"status": "error", "message": "Failed to get research status"}
116 ), 500
119@api_bp.route("/terminate/<string:research_id>", methods=["POST"])
120@login_required
121def api_terminate_research(research_id):
122 """
123 Terminate a research process
124 """
125 try:
126 from flask import session
128 username = session.get("username")
129 result = cancel_research(research_id, username)
130 if result:
131 return jsonify(
132 {
133 "status": "success",
134 "message": "Research terminated",
135 "result": result,
136 }
137 )
138 else:
139 return jsonify(
140 {
141 "status": "success",
142 "message": "Research not found or already completed",
143 "result": result,
144 }
145 )
146 except Exception:
147 logger.exception("Error terminating research")
148 return (
149 jsonify({"status": "error", "message": "Failed to stop research."}),
150 500,
151 )
154@api_bp.route("/resources/<string:research_id>", methods=["GET"])
155@login_required
156def api_get_resources(research_id):
157 """
158 Get resources for a specific research
159 """
160 try:
161 resources = get_resources_for_research(research_id)
162 return jsonify({"status": "success", "resources": resources})
163 except Exception:
164 logger.exception("Error getting resources for research")
165 return jsonify(
166 {"status": "error", "message": "Failed to get resources"}, 500
167 )
170@api_bp.route("/resources/<string:research_id>", methods=["POST"])
171@login_required
172def api_add_resource(research_id):
173 """
174 Add a new resource to a research project
175 """
176 try:
177 data = request.json
179 # Required fields
180 title = data.get("title")
181 url = data.get("url")
183 # Optional fields
184 content_preview = data.get("content_preview")
185 source_type = data.get("source_type", "web")
186 metadata = data.get("metadata", {})
188 # Validate required fields
189 if not title or not url:
190 return (
191 jsonify(
192 {"status": "error", "message": "Title and URL are required"}
193 ),
194 400,
195 )
197 # Security: Validate URL to prevent SSRF attacks
198 from ...security.ssrf_validator import validate_url
200 is_valid = validate_url(url)
201 if not is_valid: 201 ↛ 202line 201 didn't jump to line 202 because the condition on line 201 was never true
202 logger.warning(f"SSRF protection: Rejected URL {url}")
203 return (
204 jsonify({"status": "error", "message": "Invalid URL"}),
205 400,
206 )
208 # Check if the research exists
209 with get_user_db_session() as db_session:
210 research = (
211 db_session.query(ResearchHistory)
212 .filter_by(id=research_id)
213 .first()
214 )
216 if not research: 216 ↛ 222line 216 didn't jump to line 222
217 return jsonify(
218 {"status": "error", "message": "Research not found"}
219 ), 404
221 # Add the resource
222 resource_id = add_resource(
223 research_id=research_id,
224 title=title,
225 url=url,
226 content_preview=content_preview,
227 source_type=source_type,
228 metadata=metadata,
229 )
231 return jsonify(
232 {
233 "status": "success",
234 "message": "Resource added successfully",
235 "resource_id": resource_id,
236 }
237 )
238 except Exception:
239 logger.exception("Error adding resource")
240 return jsonify(
241 {"status": "error", "message": "Failed to add resource"}
242 ), 500
245@api_bp.route(
246 "/resources/<string:research_id>/delete/<int:resource_id>",
247 methods=["DELETE"],
248)
249@login_required
250def api_delete_resource(research_id, resource_id):
251 """
252 Delete a resource from a research project
253 """
254 try:
255 # Delete the resource
256 success = delete_resource(resource_id)
258 if success:
259 return jsonify(
260 {
261 "status": "success",
262 "message": "Resource deleted successfully",
263 }
264 )
265 else:
266 return jsonify(
267 {"status": "error", "message": "Resource not found"}
268 ), 404
269 except Exception:
270 logger.exception("Error deleting resource")
271 return jsonify(
272 {
273 "status": "error",
274 "message": "An internal error occurred while deleting the resource.",
275 }
276 ), 500
279@api_bp.route("/check/ollama_status", methods=["GET"])
280@login_required
281def check_ollama_status():
282 """
283 Check if Ollama API is running
284 """
285 try:
286 # Get Ollama URL from config
287 llm_config = current_app.config.get("LLM_CONFIG", {})
288 provider = llm_config.get("provider", "ollama")
290 if provider.lower() != "ollama":
291 return jsonify(
292 {
293 "running": True,
294 "message": f"Using provider: {provider}, not Ollama",
295 }
296 )
298 # Get Ollama API URL from LLM config
299 raw_ollama_base_url = llm_config.get(
300 "ollama_base_url", "http://localhost:11434"
301 )
302 ollama_base_url = (
303 normalize_url(raw_ollama_base_url)
304 if raw_ollama_base_url
305 else "http://localhost:11434"
306 )
308 logger.info(f"Checking Ollama status at: {ollama_base_url}")
310 # Check if Ollama is running
311 try:
312 response = safe_get(
313 f"{ollama_base_url}/api/tags",
314 timeout=5,
315 allow_localhost=True,
316 allow_private_ips=True,
317 )
319 # Add response details for debugging
320 logger.debug(
321 f"Ollama status check response code: {response.status_code}"
322 )
324 if response.status_code == 200: 324 ↛ 360line 324 didn't jump to line 360 because the condition on line 324 was always true
325 # Try to validate the response content
326 try:
327 data = response.json()
329 # Check the format
330 if "models" in data: 330 ↛ 337line 330 didn't jump to line 337 because the condition on line 330 was always true
331 model_count = len(data.get("models", []))
332 logger.info(
333 f"Ollama service is running with {model_count} models (new API format)"
334 )
335 else:
336 # Older API format
337 model_count = len(data)
338 logger.info(
339 f"Ollama service is running with {model_count} models (old API format)"
340 )
342 return jsonify(
343 {
344 "running": True,
345 "message": f"Ollama service is running with {model_count} models",
346 "model_count": model_count,
347 }
348 )
349 except ValueError as json_err:
350 logger.warning(f"Ollama returned invalid JSON: {json_err}")
351 # It's running but returned invalid JSON
352 return jsonify(
353 {
354 "running": True,
355 "message": "Ollama service is running but returned invalid data format",
356 "error_details": "Invalid response format from the service.",
357 }
358 )
359 else:
360 logger.warning(
361 f"Ollama returned non-200 status code: {response.status_code}"
362 )
363 return jsonify(
364 {
365 "running": False,
366 "message": f"Ollama service returned status code: {response.status_code}",
367 "status_code": response.status_code,
368 }
369 )
371 except requests.exceptions.ConnectionError as conn_err:
372 logger.warning(f"Ollama connection error: {conn_err}")
373 return jsonify(
374 {
375 "running": False,
376 "message": "Ollama service is not running or not accessible",
377 "error_type": "connection_error",
378 "error_details": "Unable to connect to the service. Please check if the service is running.",
379 }
380 )
381 except requests.exceptions.Timeout as timeout_err:
382 logger.warning(f"Ollama request timed out: {timeout_err}")
383 return jsonify(
384 {
385 "running": False,
386 "message": "Ollama service request timed out after 5 seconds",
387 "error_type": "timeout",
388 "error_details": "Request timed out. The service may be overloaded.",
389 }
390 )
392 except Exception:
393 logger.exception("Error checking Ollama status")
394 return jsonify(
395 {
396 "running": False,
397 "message": "An internal error occurred while checking Ollama status.",
398 "error_type": "exception",
399 "error_details": "An internal error occurred.",
400 }
401 )
404@api_bp.route("/check/ollama_model", methods=["GET"])
405@login_required
406def check_ollama_model():
407 """
408 Check if the configured Ollama model is available
409 """
410 try:
411 # Get Ollama configuration
412 llm_config = current_app.config.get("LLM_CONFIG", {})
413 provider = llm_config.get("provider", "ollama")
415 if provider.lower() != "ollama":
416 return jsonify(
417 {
418 "available": True,
419 "message": f"Using provider: {provider}, not Ollama",
420 "provider": provider,
421 }
422 )
424 # Get model name from request or use config default
425 model_name = request.args.get("model")
426 if not model_name: 426 ↛ 430line 426 didn't jump to line 430 because the condition on line 426 was always true
427 model_name = llm_config.get("model", "gemma3:12b")
429 # Log which model we're checking for debugging
430 logger.info(f"Checking availability of Ollama model: {model_name}")
432 # Get Ollama API URL from LLM config
433 raw_ollama_base_url = llm_config.get(
434 "ollama_base_url", "http://localhost:11434"
435 )
436 ollama_base_url = (
437 normalize_url(raw_ollama_base_url)
438 if raw_ollama_base_url
439 else "http://localhost:11434"
440 )
442 # Check if the model is available
443 try:
444 response = safe_get(
445 f"{ollama_base_url}/api/tags",
446 timeout=5,
447 allow_localhost=True,
448 allow_private_ips=True,
449 )
451 # Log response details for debugging
452 logger.debug(f"Ollama API response status: {response.status_code}")
454 if response.status_code != 200: 454 ↛ 455line 454 didn't jump to line 455 because the condition on line 454 was never true
455 logger.warning(
456 f"Ollama API returned non-200 status: {response.status_code}"
457 )
458 return jsonify(
459 {
460 "available": False,
461 "model": model_name,
462 "message": f"Could not access Ollama service - status code: {response.status_code}",
463 "status_code": response.status_code,
464 }
465 )
467 # Try to parse the response
468 try:
469 data = response.json()
471 # Debug log the first bit of the response
472 response_preview = (
473 str(data)[:500] + "..."
474 if len(str(data)) > 500
475 else str(data)
476 )
477 logger.debug(f"Ollama API response data: {response_preview}")
479 # Get models based on API format
480 models = []
481 if "models" in data: 481 ↛ 487line 481 didn't jump to line 487 because the condition on line 481 was always true
482 # Newer Ollama API
483 logger.debug("Using new Ollama API format (models key)")
484 models = data.get("models", [])
485 else:
486 # Older Ollama API format
487 logger.debug("Using old Ollama API format (array)")
488 models = data
490 # Log available models for debugging
491 model_names = [m.get("name", "") for m in models]
492 logger.debug(
493 f"Available Ollama models: {', '.join(model_names[:10])}"
494 + (
495 f" and {len(model_names) - 10} more"
496 if len(model_names) > 10
497 else ""
498 )
499 )
501 # Case-insensitive model name comparison
502 model_exists = any(
503 m.get("name", "").lower() == model_name.lower()
504 for m in models
505 )
507 if model_exists:
508 logger.info(f"Ollama model {model_name} is available")
509 return jsonify(
510 {
511 "available": True,
512 "model": model_name,
513 "message": f"Model {model_name} is available",
514 "all_models": model_names,
515 }
516 )
517 else:
518 # Check if models were found at all
519 if not models: 519 ↛ 520line 519 didn't jump to line 520 because the condition on line 519 was never true
520 logger.warning("No models found in Ollama")
521 message = "No models found in Ollama. Please pull models first."
522 else:
523 logger.warning(
524 f"Model {model_name} not found among {len(models)} available models"
525 )
526 # Don't expose available models for security reasons
527 message = f"Model {model_name} is not available"
529 return jsonify(
530 {
531 "available": False,
532 "model": model_name,
533 "message": message,
534 # Remove all_models to prevent information disclosure
535 }
536 )
537 except ValueError as json_err:
538 # JSON parsing error
539 logger.exception(
540 f"Failed to parse Ollama API response: {json_err}"
541 )
542 return jsonify(
543 {
544 "available": False,
545 "model": model_name,
546 "message": "Invalid response from Ollama API",
547 "error_type": "json_parse_error",
548 }
549 )
551 except requests.exceptions.ConnectionError as conn_err:
552 # Connection error
553 logger.warning(f"Connection error to Ollama API: {conn_err}")
554 return jsonify(
555 {
556 "available": False,
557 "model": model_name,
558 "message": "Could not connect to Ollama service",
559 "error_type": "connection_error",
560 "error_details": "Unable to connect to the service. Please check if the service is running.",
561 }
562 )
563 except requests.exceptions.Timeout:
564 # Timeout error
565 logger.warning("Timeout connecting to Ollama API")
566 return jsonify(
567 {
568 "available": False,
569 "model": model_name,
570 "message": "Connection to Ollama service timed out",
571 "error_type": "timeout",
572 }
573 )
575 except Exception:
576 # General exception
577 logger.exception("Error checking Ollama model")
579 return jsonify(
580 {
581 "available": False,
582 "model": (
583 model_name
584 if "model_name" in locals()
585 else llm_config.get("model", "gemma3:12b")
586 ),
587 "message": "An internal error occurred while checking the model.",
588 "error_type": "exception",
589 "error_details": "An internal error occurred.",
590 }
591 )
594# Helper route to get system configuration
595@api_bp.route("/config", methods=["GET"])
596@login_required
597def api_get_config():
598 """
599 Get public system configuration
600 """
601 # Only return public configuration
602 public_config = {
603 "version": current_app.config.get("VERSION", "0.1.0"),
604 "llm_provider": current_app.config.get("LLM_CONFIG", {}).get(
605 "provider", "ollama"
606 ),
607 "search_tool": current_app.config.get("SEARCH_CONFIG", {}).get(
608 "search_tool", "auto"
609 ),
610 "features": {
611 "notifications": current_app.config.get(
612 "ENABLE_NOTIFICATIONS", False
613 )
614 },
615 }
617 return jsonify(public_config)