open-webui/backend/open_webui/routers/retrieval.py

2414 lines
99 KiB
Python
Raw Normal View History

2024-08-28 06:10:27 +08:00
import json
import logging
import mimetypes
import os
import shutil
import asyncio
2024-08-28 06:10:27 +08:00
import uuid
2024-06-08 12:18:04 +08:00
from datetime import datetime
2024-02-18 13:06:08 +08:00
from pathlib import Path
from typing import Iterator, List, Optional, Sequence, Union
2024-01-07 14:59:22 +08:00
2024-12-12 10:05:42 +08:00
from fastapi import (
Depends,
FastAPI,
File,
Form,
HTTPException,
UploadFile,
Request,
status,
APIRouter,
)
2024-09-10 09:27:50 +08:00
from fastapi.middleware.cors import CORSMiddleware
2025-02-14 15:05:10 +08:00
from fastapi.concurrency import run_in_threadpool
2024-09-10 09:27:50 +08:00
from pydantic import BaseModel
2024-10-26 12:46:14 +08:00
import tiktoken
2024-09-10 09:27:50 +08:00
2024-10-13 18:02:02 +08:00
2024-12-12 10:05:42 +08:00
from langchain.text_splitter import RecursiveCharacterTextSplitter, TokenTextSplitter
from langchain_text_splitters import MarkdownHeaderTextSplitter
2024-12-12 10:05:42 +08:00
from langchain_core.documents import Document
from open_webui.models.files import FileModel, Files
2024-12-10 16:54:13 +08:00
from open_webui.models.knowledge import Knowledges
2024-10-21 14:45:15 +08:00
from open_webui.storage.provider import Storage
2024-12-12 10:05:42 +08:00
from open_webui.retrieval.vector.factory import VECTOR_DB_CLIENT
2024-09-28 08:23:09 +08:00
# Document loaders
2024-12-12 10:05:42 +08:00
from open_webui.retrieval.loaders.main import Loader
from open_webui.retrieval.loaders.youtube import YoutubeLoader
2024-09-28 08:23:09 +08:00
# Web search engines
2024-12-12 10:05:42 +08:00
from open_webui.retrieval.web.main import SearchResult
from open_webui.retrieval.web.utils import get_web_loader
from open_webui.retrieval.web.brave import search_brave
from open_webui.retrieval.web.kagi import search_kagi
from open_webui.retrieval.web.mojeek import search_mojeek
2025-02-10 16:44:47 +08:00
from open_webui.retrieval.web.bocha import search_bocha
2024-12-12 10:05:42 +08:00
from open_webui.retrieval.web.duckduckgo import search_duckduckgo
from open_webui.retrieval.web.google_pse import search_google_pse
from open_webui.retrieval.web.jina_search import search_jina
from open_webui.retrieval.web.searchapi import search_searchapi
2025-02-14 12:24:58 +08:00
from open_webui.retrieval.web.serpapi import search_serpapi
2024-12-12 10:05:42 +08:00
from open_webui.retrieval.web.searxng import search_searxng
from open_webui.retrieval.web.yacy import search_yacy
2024-12-12 10:05:42 +08:00
from open_webui.retrieval.web.serper import search_serper
from open_webui.retrieval.web.serply import search_serply
from open_webui.retrieval.web.serpstack import search_serpstack
from open_webui.retrieval.web.tavily import search_tavily
from open_webui.retrieval.web.bing import search_bing
from open_webui.retrieval.web.exa import search_exa
from open_webui.retrieval.web.perplexity import search_perplexity
from open_webui.retrieval.web.sougou import search_sougou
2025-04-24 14:57:28 +08:00
from open_webui.retrieval.web.firecrawl import search_firecrawl
from open_webui.retrieval.web.external import search_external
2024-12-12 10:05:42 +08:00
from open_webui.retrieval.utils import (
2024-08-28 06:10:27 +08:00
get_embedding_function,
get_reranking_function,
2024-08-28 06:10:27 +08:00
get_model_path,
query_collection,
query_collection_with_hybrid_search,
query_doc,
query_doc_with_hybrid_search,
2024-02-18 13:06:08 +08:00
)
2024-12-12 10:05:42 +08:00
from open_webui.utils.misc import (
calculate_sha256_string,
)
from open_webui.utils.auth import get_admin_user, get_verified_user
from open_webui.config import (
2024-08-28 06:10:27 +08:00
ENV,
2024-04-25 20:49:59 +08:00
RAG_EMBEDDING_MODEL_AUTO_UPDATE,
RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE,
2024-04-25 20:49:59 +08:00
RAG_RERANKING_MODEL_AUTO_UPDATE,
2024-04-23 04:49:58 +08:00
RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
2024-08-28 06:10:27 +08:00
UPLOAD_DIR,
2024-10-28 17:33:52 +08:00
DEFAULT_LOCALE,
2025-03-31 12:55:20 +08:00
RAG_EMBEDDING_CONTENT_PREFIX,
RAG_EMBEDDING_QUERY_PREFIX,
2024-02-18 13:06:08 +08:00
)
from open_webui.env import (
SRC_LOG_LEVELS,
DEVICE_TYPE,
DOCKER,
SENTENCE_TRANSFORMERS_BACKEND,
SENTENCE_TRANSFORMERS_MODEL_KWARGS,
SENTENCE_TRANSFORMERS_CROSS_ENCODER_BACKEND,
SENTENCE_TRANSFORMERS_CROSS_ENCODER_MODEL_KWARGS,
)
2024-12-12 10:05:42 +08:00
from open_webui.constants import ERROR_MESSAGES
2024-01-07 14:59:22 +08:00
log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["RAG"])
2024-12-12 10:05:42 +08:00
##########################################
#
# Utility functions
#
##########################################
2024-06-02 10:03:56 +08:00
2024-12-12 10:46:29 +08:00
def get_ef(
engine: str,
2024-04-25 20:49:59 +08:00
embedding_model: str,
2024-09-18 04:58:06 +08:00
auto_update: bool = False,
2024-04-25 20:49:59 +08:00
):
2024-12-12 10:46:29 +08:00
ef = None
if embedding_model and engine == "":
2024-10-13 15:21:06 +08:00
from sentence_transformers import SentenceTransformer
try:
2024-12-12 10:46:29 +08:00
ef = SentenceTransformer(
get_model_path(embedding_model, auto_update),
device=DEVICE_TYPE,
trust_remote_code=RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE,
backend=SENTENCE_TRANSFORMERS_BACKEND,
model_kwargs=SENTENCE_TRANSFORMERS_MODEL_KWARGS,
)
except Exception as e:
log.debug(f"Error loading SentenceTransformer: {e}")
2024-04-25 20:49:59 +08:00
2024-12-12 10:46:29 +08:00
return ef
2024-04-25 20:49:59 +08:00
2024-12-12 10:46:29 +08:00
def get_rf(
engine: str = "",
reranking_model: Optional[str] = None,
external_reranker_url: str = "",
external_reranker_api_key: str = "",
2024-09-18 04:58:06 +08:00
auto_update: bool = False,
2024-04-25 20:49:59 +08:00
):
2024-12-12 10:46:29 +08:00
rf = None
2024-04-25 20:49:59 +08:00
if reranking_model:
2024-09-16 18:36:43 +08:00
if any(model in reranking_model for model in ["jinaai/jina-colbert-v2"]):
2024-09-18 05:07:04 +08:00
try:
2024-12-12 10:05:42 +08:00
from open_webui.retrieval.models.colbert import ColBERT
2024-12-12 10:46:29 +08:00
rf = ColBERT(
2024-09-30 05:20:37 +08:00
get_model_path(reranking_model, auto_update),
env="docker" if DOCKER else None,
2024-09-20 00:40:23 +08:00
)
2024-12-12 10:46:29 +08:00
2024-09-18 05:13:51 +08:00
except Exception as e:
log.error(f"ColBERT: {e}")
2024-12-12 10:46:29 +08:00
raise Exception(ERROR_MESSAGES.DEFAULT(e))
2024-09-16 17:46:39 +08:00
else:
if engine == "external":
try:
from open_webui.retrieval.models.external import ExternalReranker
rf = ExternalReranker(
url=external_reranker_url,
api_key=external_reranker_api_key,
model=reranking_model,
)
except Exception as e:
log.error(f"ExternalReranking: {e}")
raise Exception(ERROR_MESSAGES.DEFAULT(e))
else:
import sentence_transformers
try:
rf = sentence_transformers.CrossEncoder(
get_model_path(reranking_model, auto_update),
device=DEVICE_TYPE,
trust_remote_code=RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
backend=SENTENCE_TRANSFORMERS_CROSS_ENCODER_BACKEND,
model_kwargs=SENTENCE_TRANSFORMERS_CROSS_ENCODER_MODEL_KWARGS,
)
except Exception as e:
log.error(f"CrossEncoder: {e}")
raise Exception(ERROR_MESSAGES.DEFAULT("CrossEncoder error"))
2024-12-12 10:46:29 +08:00
return rf
2024-04-25 20:49:59 +08:00
2024-02-18 14:29:52 +08:00
2024-12-12 10:05:42 +08:00
##########################################
#
# API routes
#
##########################################
2024-04-28 03:38:50 +08:00
2024-12-12 10:05:42 +08:00
router = APIRouter()
2024-01-07 14:07:20 +08:00
2024-01-07 15:40:51 +08:00
class CollectionNameForm(BaseModel):
2024-09-28 08:29:08 +08:00
collection_name: Optional[str] = None
2024-01-07 14:59:22 +08:00
2024-09-28 08:29:08 +08:00
class ProcessUrlForm(CollectionNameForm):
2024-01-07 15:40:51 +08:00
url: str
2024-03-26 14:47:08 +08:00
2025-04-07 06:45:48 +08:00
class SearchForm(BaseModel):
queries: List[str]
2024-12-12 10:05:42 +08:00
@router.get("/")
async def get_status(request: Request):
2024-02-18 14:29:52 +08:00
return {
"status": True,
2024-12-12 10:05:42 +08:00
"chunk_size": request.app.state.config.CHUNK_SIZE,
"chunk_overlap": request.app.state.config.CHUNK_OVERLAP,
"template": request.app.state.config.RAG_TEMPLATE,
"embedding_engine": request.app.state.config.RAG_EMBEDDING_ENGINE,
"embedding_model": request.app.state.config.RAG_EMBEDDING_MODEL,
"reranking_model": request.app.state.config.RAG_RERANKING_MODEL,
"embedding_batch_size": request.app.state.config.RAG_EMBEDDING_BATCH_SIZE,
2024-02-20 03:05:45 +08:00
}
2024-12-12 10:05:42 +08:00
@router.get("/embedding")
async def get_embedding_config(request: Request, user=Depends(get_admin_user)):
2024-02-20 03:05:45 +08:00
return {
"status": True,
2024-12-12 10:05:42 +08:00
"embedding_engine": request.app.state.config.RAG_EMBEDDING_ENGINE,
"embedding_model": request.app.state.config.RAG_EMBEDDING_MODEL,
"embedding_batch_size": request.app.state.config.RAG_EMBEDDING_BATCH_SIZE,
2024-04-15 07:15:39 +08:00
"openai_config": {
2024-12-13 12:22:17 +08:00
"url": request.app.state.config.RAG_OPENAI_API_BASE_URL,
"key": request.app.state.config.RAG_OPENAI_API_KEY,
2024-04-15 07:15:39 +08:00
},
2024-11-19 06:19:56 +08:00
"ollama_config": {
2024-12-13 12:22:17 +08:00
"url": request.app.state.config.RAG_OLLAMA_BASE_URL,
"key": request.app.state.config.RAG_OLLAMA_API_KEY,
2024-11-19 06:19:56 +08:00
},
2025-05-20 10:58:04 +08:00
"azure_openai_config": {
"url": request.app.state.config.RAG_AZURE_OPENAI_BASE_URL,
"key": request.app.state.config.RAG_AZURE_OPENAI_API_KEY,
"version": request.app.state.config.RAG_AZURE_OPENAI_API_VERSION,
2025-05-20 10:58:04 +08:00
},
2024-02-20 03:05:45 +08:00
}
2024-04-15 07:15:39 +08:00
class OpenAIConfigForm(BaseModel):
url: str
key: str
2024-11-19 06:19:56 +08:00
class OllamaConfigForm(BaseModel):
url: str
key: str
2025-05-20 10:58:04 +08:00
class AzureOpenAIConfigForm(BaseModel):
url: str
key: str
version: str
2024-02-20 03:05:45 +08:00
class EmbeddingModelUpdateForm(BaseModel):
2024-04-15 07:15:39 +08:00
openai_config: Optional[OpenAIConfigForm] = None
2024-11-19 06:19:56 +08:00
ollama_config: Optional[OllamaConfigForm] = None
2025-05-20 10:58:04 +08:00
azure_openai_config: Optional[AzureOpenAIConfigForm] = None
2024-04-15 06:31:40 +08:00
embedding_engine: str
2024-02-20 03:05:45 +08:00
embedding_model: str
embedding_batch_size: Optional[int] = 1
2024-02-20 03:05:45 +08:00
2024-12-12 10:05:42 +08:00
@router.post("/embedding/update")
2024-04-15 06:31:40 +08:00
async def update_embedding_config(
2024-12-12 10:05:42 +08:00
request: Request, form_data: EmbeddingModelUpdateForm, user=Depends(get_admin_user)
2024-02-20 03:05:45 +08:00
):
2024-04-05 02:07:42 +08:00
log.info(
2024-12-12 10:05:42 +08:00
f"Updating embedding model: {request.app.state.config.RAG_EMBEDDING_MODEL} to {form_data.embedding_model}"
2024-02-20 03:05:45 +08:00
)
2025-08-21 19:40:56 +08:00
if request.app.state.config.RAG_EMBEDDING_ENGINE == "":
# unloads current internal embedding model and clears VRAM cache
request.app.state.ef = None
request.app.state.EMBEDDING_FUNCTION = None
import gc
2025-08-21 19:40:56 +08:00
gc.collect()
2025-08-21 19:40:56 +08:00
if DEVICE_TYPE == "cuda":
import torch
2025-08-21 19:40:56 +08:00
if torch.cuda.is_available():
torch.cuda.empty_cache()
try:
2024-12-12 10:05:42 +08:00
request.app.state.config.RAG_EMBEDDING_ENGINE = form_data.embedding_engine
request.app.state.config.RAG_EMBEDDING_MODEL = form_data.embedding_model
2024-04-15 06:31:40 +08:00
if request.app.state.config.RAG_EMBEDDING_ENGINE in [
"ollama",
"openai",
"azure_openai",
]:
if form_data.openai_config is not None:
2024-12-13 12:22:17 +08:00
request.app.state.config.RAG_OPENAI_API_BASE_URL = (
2024-12-12 10:05:42 +08:00
form_data.openai_config.url
)
2024-12-13 12:22:17 +08:00
request.app.state.config.RAG_OPENAI_API_KEY = (
form_data.openai_config.key
)
2024-11-19 06:19:56 +08:00
if form_data.ollama_config is not None:
2024-12-13 12:22:17 +08:00
request.app.state.config.RAG_OLLAMA_BASE_URL = (
form_data.ollama_config.url
)
request.app.state.config.RAG_OLLAMA_API_KEY = (
form_data.ollama_config.key
)
2024-11-19 06:19:56 +08:00
2025-05-20 10:58:04 +08:00
if form_data.azure_openai_config is not None:
request.app.state.config.RAG_AZURE_OPENAI_BASE_URL = (
form_data.azure_openai_config.url
)
request.app.state.config.RAG_AZURE_OPENAI_API_KEY = (
form_data.azure_openai_config.key
)
request.app.state.config.RAG_AZURE_OPENAI_API_VERSION = (
2025-05-20 10:58:04 +08:00
form_data.azure_openai_config.version
)
2024-12-12 10:05:42 +08:00
request.app.state.config.RAG_EMBEDDING_BATCH_SIZE = (
form_data.embedding_batch_size
)
2024-12-12 10:46:29 +08:00
request.app.state.ef = get_ef(
request.app.state.config.RAG_EMBEDDING_ENGINE,
request.app.state.config.RAG_EMBEDDING_MODEL,
)
2024-12-12 10:05:42 +08:00
request.app.state.EMBEDDING_FUNCTION = get_embedding_function(
request.app.state.config.RAG_EMBEDDING_ENGINE,
request.app.state.config.RAG_EMBEDDING_MODEL,
2024-12-12 10:46:29 +08:00
request.app.state.ef,
2024-11-19 06:19:56 +08:00
(
2024-12-13 12:22:17 +08:00
request.app.state.config.RAG_OPENAI_API_BASE_URL
2024-12-12 10:05:42 +08:00
if request.app.state.config.RAG_EMBEDDING_ENGINE == "openai"
2025-05-20 10:58:04 +08:00
else (
request.app.state.config.RAG_OLLAMA_BASE_URL
if request.app.state.config.RAG_EMBEDDING_ENGINE == "ollama"
else request.app.state.config.RAG_AZURE_OPENAI_BASE_URL
)
2024-11-19 06:19:56 +08:00
),
(
2024-12-13 12:22:17 +08:00
request.app.state.config.RAG_OPENAI_API_KEY
2024-12-12 10:05:42 +08:00
if request.app.state.config.RAG_EMBEDDING_ENGINE == "openai"
2025-05-20 10:58:04 +08:00
else (
request.app.state.config.RAG_OLLAMA_API_KEY
if request.app.state.config.RAG_EMBEDDING_ENGINE == "ollama"
else request.app.state.config.RAG_AZURE_OPENAI_API_KEY
)
2024-11-19 06:19:56 +08:00
),
2024-12-12 10:05:42 +08:00
request.app.state.config.RAG_EMBEDDING_BATCH_SIZE,
azure_api_version=(
request.app.state.config.RAG_AZURE_OPENAI_API_VERSION
2025-05-20 10:58:04 +08:00
if request.app.state.config.RAG_EMBEDDING_ENGINE == "azure_openai"
else None
),
2024-04-28 03:38:50 +08:00
)
return {
"status": True,
2024-12-12 10:05:42 +08:00
"embedding_engine": request.app.state.config.RAG_EMBEDDING_ENGINE,
"embedding_model": request.app.state.config.RAG_EMBEDDING_MODEL,
"embedding_batch_size": request.app.state.config.RAG_EMBEDDING_BATCH_SIZE,
2024-04-15 07:15:39 +08:00
"openai_config": {
2024-12-13 12:22:17 +08:00
"url": request.app.state.config.RAG_OPENAI_API_BASE_URL,
"key": request.app.state.config.RAG_OPENAI_API_KEY,
2024-04-15 07:15:39 +08:00
},
2024-11-19 06:19:56 +08:00
"ollama_config": {
2024-12-13 12:22:17 +08:00
"url": request.app.state.config.RAG_OLLAMA_BASE_URL,
"key": request.app.state.config.RAG_OLLAMA_API_KEY,
2024-11-19 06:19:56 +08:00
},
2025-05-20 10:58:04 +08:00
"azure_openai_config": {
"url": request.app.state.config.RAG_AZURE_OPENAI_BASE_URL,
"key": request.app.state.config.RAG_AZURE_OPENAI_API_KEY,
"version": request.app.state.config.RAG_AZURE_OPENAI_API_VERSION,
2025-05-20 10:58:04 +08:00
},
}
except Exception as e:
log.exception(f"Problem updating embedding model: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ERROR_MESSAGES.DEFAULT(e),
)
2024-02-18 14:29:52 +08:00
2024-12-12 10:05:42 +08:00
@router.get("/config")
async def get_rag_config(request: Request, user=Depends(get_admin_user)):
2024-02-18 14:29:52 +08:00
return {
"status": True,
2025-04-13 07:33:36 +08:00
# RAG settings
2025-04-13 13:55:24 +08:00
"RAG_TEMPLATE": request.app.state.config.RAG_TEMPLATE,
2025-04-13 07:33:36 +08:00
"TOP_K": request.app.state.config.TOP_K,
2025-02-27 07:42:19 +08:00
"BYPASS_EMBEDDING_AND_RETRIEVAL": request.app.state.config.BYPASS_EMBEDDING_AND_RETRIEVAL,
2025-04-13 07:33:36 +08:00
"RAG_FULL_CONTEXT": request.app.state.config.RAG_FULL_CONTEXT,
# Hybrid search settings
"ENABLE_RAG_HYBRID_SEARCH": request.app.state.config.ENABLE_RAG_HYBRID_SEARCH,
"TOP_K_RERANKER": request.app.state.config.TOP_K_RERANKER,
"RELEVANCE_THRESHOLD": request.app.state.config.RELEVANCE_THRESHOLD,
"HYBRID_BM25_WEIGHT": request.app.state.config.HYBRID_BM25_WEIGHT,
2025-04-13 07:33:36 +08:00
# Content extraction settings
"CONTENT_EXTRACTION_ENGINE": request.app.state.config.CONTENT_EXTRACTION_ENGINE,
"PDF_EXTRACT_IMAGES": request.app.state.config.PDF_EXTRACT_IMAGES,
"DATALAB_MARKER_API_KEY": request.app.state.config.DATALAB_MARKER_API_KEY,
2025-07-23 08:49:28 +08:00
"DATALAB_MARKER_API_BASE_URL": request.app.state.config.DATALAB_MARKER_API_BASE_URL,
"DATALAB_MARKER_ADDITIONAL_CONFIG": request.app.state.config.DATALAB_MARKER_ADDITIONAL_CONFIG,
"DATALAB_MARKER_SKIP_CACHE": request.app.state.config.DATALAB_MARKER_SKIP_CACHE,
"DATALAB_MARKER_FORCE_OCR": request.app.state.config.DATALAB_MARKER_FORCE_OCR,
"DATALAB_MARKER_PAGINATE": request.app.state.config.DATALAB_MARKER_PAGINATE,
"DATALAB_MARKER_STRIP_EXISTING_OCR": request.app.state.config.DATALAB_MARKER_STRIP_EXISTING_OCR,
"DATALAB_MARKER_DISABLE_IMAGE_EXTRACTION": request.app.state.config.DATALAB_MARKER_DISABLE_IMAGE_EXTRACTION,
2025-07-23 09:06:29 +08:00
"DATALAB_MARKER_FORMAT_LINES": request.app.state.config.DATALAB_MARKER_FORMAT_LINES,
"DATALAB_MARKER_USE_LLM": request.app.state.config.DATALAB_MARKER_USE_LLM,
"DATALAB_MARKER_OUTPUT_FORMAT": request.app.state.config.DATALAB_MARKER_OUTPUT_FORMAT,
2025-05-15 02:28:40 +08:00
"EXTERNAL_DOCUMENT_LOADER_URL": request.app.state.config.EXTERNAL_DOCUMENT_LOADER_URL,
"EXTERNAL_DOCUMENT_LOADER_API_KEY": request.app.state.config.EXTERNAL_DOCUMENT_LOADER_API_KEY,
2025-04-13 07:33:36 +08:00
"TIKA_SERVER_URL": request.app.state.config.TIKA_SERVER_URL,
"DOCLING_SERVER_URL": request.app.state.config.DOCLING_SERVER_URL,
"DOCLING_DO_OCR": request.app.state.config.DOCLING_DO_OCR,
"DOCLING_FORCE_OCR": request.app.state.config.DOCLING_FORCE_OCR,
"DOCLING_OCR_ENGINE": request.app.state.config.DOCLING_OCR_ENGINE,
"DOCLING_OCR_LANG": request.app.state.config.DOCLING_OCR_LANG,
"DOCLING_PDF_BACKEND": request.app.state.config.DOCLING_PDF_BACKEND,
"DOCLING_TABLE_MODE": request.app.state.config.DOCLING_TABLE_MODE,
"DOCLING_PIPELINE": request.app.state.config.DOCLING_PIPELINE,
"DOCLING_DO_PICTURE_DESCRIPTION": request.app.state.config.DOCLING_DO_PICTURE_DESCRIPTION,
"DOCLING_PICTURE_DESCRIPTION_MODE": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_MODE,
2025-06-09 00:02:14 +08:00
"DOCLING_PICTURE_DESCRIPTION_LOCAL": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_LOCAL,
"DOCLING_PICTURE_DESCRIPTION_API": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_API,
2025-04-13 07:33:36 +08:00
"DOCUMENT_INTELLIGENCE_ENDPOINT": request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT,
"DOCUMENT_INTELLIGENCE_KEY": request.app.state.config.DOCUMENT_INTELLIGENCE_KEY,
"MISTRAL_OCR_API_KEY": request.app.state.config.MISTRAL_OCR_API_KEY,
# Reranking settings
"RAG_RERANKING_MODEL": request.app.state.config.RAG_RERANKING_MODEL,
"RAG_RERANKING_ENGINE": request.app.state.config.RAG_RERANKING_ENGINE,
2025-05-10 22:29:04 +08:00
"RAG_EXTERNAL_RERANKER_URL": request.app.state.config.RAG_EXTERNAL_RERANKER_URL,
"RAG_EXTERNAL_RERANKER_API_KEY": request.app.state.config.RAG_EXTERNAL_RERANKER_API_KEY,
2025-04-13 07:33:36 +08:00
# Chunking settings
"TEXT_SPLITTER": request.app.state.config.TEXT_SPLITTER,
"CHUNK_SIZE": request.app.state.config.CHUNK_SIZE,
"CHUNK_OVERLAP": request.app.state.config.CHUNK_OVERLAP,
# File upload settings
"FILE_MAX_SIZE": request.app.state.config.FILE_MAX_SIZE,
"FILE_MAX_COUNT": request.app.state.config.FILE_MAX_COUNT,
2025-06-16 20:52:57 +08:00
"FILE_IMAGE_COMPRESSION_WIDTH": request.app.state.config.FILE_IMAGE_COMPRESSION_WIDTH,
"FILE_IMAGE_COMPRESSION_HEIGHT": request.app.state.config.FILE_IMAGE_COMPRESSION_HEIGHT,
2025-05-17 01:05:52 +08:00
"ALLOWED_FILE_EXTENSIONS": request.app.state.config.ALLOWED_FILE_EXTENSIONS,
2025-04-13 07:33:36 +08:00
# Integration settings
"ENABLE_GOOGLE_DRIVE_INTEGRATION": request.app.state.config.ENABLE_GOOGLE_DRIVE_INTEGRATION,
"ENABLE_ONEDRIVE_INTEGRATION": request.app.state.config.ENABLE_ONEDRIVE_INTEGRATION,
# Web search settings
2024-06-02 10:03:56 +08:00
"web": {
2025-04-13 07:33:36 +08:00
"ENABLE_WEB_SEARCH": request.app.state.config.ENABLE_WEB_SEARCH,
"WEB_SEARCH_ENGINE": request.app.state.config.WEB_SEARCH_ENGINE,
"WEB_SEARCH_TRUST_ENV": request.app.state.config.WEB_SEARCH_TRUST_ENV,
"WEB_SEARCH_RESULT_COUNT": request.app.state.config.WEB_SEARCH_RESULT_COUNT,
"WEB_SEARCH_CONCURRENT_REQUESTS": request.app.state.config.WEB_SEARCH_CONCURRENT_REQUESTS,
"WEB_LOADER_CONCURRENT_REQUESTS": request.app.state.config.WEB_LOADER_CONCURRENT_REQUESTS,
2025-04-13 07:33:36 +08:00
"WEB_SEARCH_DOMAIN_FILTER_LIST": request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
"BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL": request.app.state.config.BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL,
"BYPASS_WEB_SEARCH_WEB_LOADER": request.app.state.config.BYPASS_WEB_SEARCH_WEB_LOADER,
2025-04-13 07:33:36 +08:00
"SEARXNG_QUERY_URL": request.app.state.config.SEARXNG_QUERY_URL,
"YACY_QUERY_URL": request.app.state.config.YACY_QUERY_URL,
"YACY_USERNAME": request.app.state.config.YACY_USERNAME,
"YACY_PASSWORD": request.app.state.config.YACY_PASSWORD,
2025-04-13 07:33:36 +08:00
"GOOGLE_PSE_API_KEY": request.app.state.config.GOOGLE_PSE_API_KEY,
"GOOGLE_PSE_ENGINE_ID": request.app.state.config.GOOGLE_PSE_ENGINE_ID,
"BRAVE_SEARCH_API_KEY": request.app.state.config.BRAVE_SEARCH_API_KEY,
"KAGI_SEARCH_API_KEY": request.app.state.config.KAGI_SEARCH_API_KEY,
"MOJEEK_SEARCH_API_KEY": request.app.state.config.MOJEEK_SEARCH_API_KEY,
"BOCHA_SEARCH_API_KEY": request.app.state.config.BOCHA_SEARCH_API_KEY,
"SERPSTACK_API_KEY": request.app.state.config.SERPSTACK_API_KEY,
"SERPSTACK_HTTPS": request.app.state.config.SERPSTACK_HTTPS,
"SERPER_API_KEY": request.app.state.config.SERPER_API_KEY,
"SERPLY_API_KEY": request.app.state.config.SERPLY_API_KEY,
"TAVILY_API_KEY": request.app.state.config.TAVILY_API_KEY,
"SEARCHAPI_API_KEY": request.app.state.config.SEARCHAPI_API_KEY,
"SEARCHAPI_ENGINE": request.app.state.config.SEARCHAPI_ENGINE,
"SERPAPI_API_KEY": request.app.state.config.SERPAPI_API_KEY,
"SERPAPI_ENGINE": request.app.state.config.SERPAPI_ENGINE,
"JINA_API_KEY": request.app.state.config.JINA_API_KEY,
"BING_SEARCH_V7_ENDPOINT": request.app.state.config.BING_SEARCH_V7_ENDPOINT,
"BING_SEARCH_V7_SUBSCRIPTION_KEY": request.app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY,
"EXA_API_KEY": request.app.state.config.EXA_API_KEY,
"PERPLEXITY_API_KEY": request.app.state.config.PERPLEXITY_API_KEY,
"PERPLEXITY_MODEL": request.app.state.config.PERPLEXITY_MODEL,
"PERPLEXITY_SEARCH_CONTEXT_USAGE": request.app.state.config.PERPLEXITY_SEARCH_CONTEXT_USAGE,
2025-04-13 07:33:36 +08:00
"SOUGOU_API_SID": request.app.state.config.SOUGOU_API_SID,
"SOUGOU_API_SK": request.app.state.config.SOUGOU_API_SK,
"WEB_LOADER_ENGINE": request.app.state.config.WEB_LOADER_ENGINE,
"ENABLE_WEB_LOADER_SSL_VERIFICATION": request.app.state.config.ENABLE_WEB_LOADER_SSL_VERIFICATION,
"PLAYWRIGHT_WS_URL": request.app.state.config.PLAYWRIGHT_WS_URL,
"PLAYWRIGHT_TIMEOUT": request.app.state.config.PLAYWRIGHT_TIMEOUT,
"FIRECRAWL_API_KEY": request.app.state.config.FIRECRAWL_API_KEY,
"FIRECRAWL_API_BASE_URL": request.app.state.config.FIRECRAWL_API_BASE_URL,
"TAVILY_EXTRACT_DEPTH": request.app.state.config.TAVILY_EXTRACT_DEPTH,
"EXTERNAL_WEB_SEARCH_URL": request.app.state.config.EXTERNAL_WEB_SEARCH_URL,
"EXTERNAL_WEB_SEARCH_API_KEY": request.app.state.config.EXTERNAL_WEB_SEARCH_API_KEY,
"EXTERNAL_WEB_LOADER_URL": request.app.state.config.EXTERNAL_WEB_LOADER_URL,
"EXTERNAL_WEB_LOADER_API_KEY": request.app.state.config.EXTERNAL_WEB_LOADER_API_KEY,
2025-04-13 07:33:36 +08:00
"YOUTUBE_LOADER_LANGUAGE": request.app.state.config.YOUTUBE_LOADER_LANGUAGE,
"YOUTUBE_LOADER_PROXY_URL": request.app.state.config.YOUTUBE_LOADER_PROXY_URL,
"YOUTUBE_LOADER_TRANSLATION": request.app.state.YOUTUBE_LOADER_TRANSLATION,
2024-06-02 10:03:56 +08:00
},
2024-02-18 14:29:52 +08:00
}
2024-06-02 10:40:48 +08:00
class WebConfig(BaseModel):
2025-04-13 07:33:36 +08:00
ENABLE_WEB_SEARCH: Optional[bool] = None
WEB_SEARCH_ENGINE: Optional[str] = None
WEB_SEARCH_TRUST_ENV: Optional[bool] = None
WEB_SEARCH_RESULT_COUNT: Optional[int] = None
WEB_SEARCH_CONCURRENT_REQUESTS: Optional[int] = None
WEB_LOADER_CONCURRENT_REQUESTS: Optional[int] = None
2025-04-13 07:33:36 +08:00
WEB_SEARCH_DOMAIN_FILTER_LIST: Optional[List[str]] = []
BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL: Optional[bool] = None
BYPASS_WEB_SEARCH_WEB_LOADER: Optional[bool] = None
2025-04-13 07:33:36 +08:00
SEARXNG_QUERY_URL: Optional[str] = None
YACY_QUERY_URL: Optional[str] = None
YACY_USERNAME: Optional[str] = None
YACY_PASSWORD: Optional[str] = None
2025-04-13 07:33:36 +08:00
GOOGLE_PSE_API_KEY: Optional[str] = None
GOOGLE_PSE_ENGINE_ID: Optional[str] = None
BRAVE_SEARCH_API_KEY: Optional[str] = None
KAGI_SEARCH_API_KEY: Optional[str] = None
MOJEEK_SEARCH_API_KEY: Optional[str] = None
BOCHA_SEARCH_API_KEY: Optional[str] = None
SERPSTACK_API_KEY: Optional[str] = None
SERPSTACK_HTTPS: Optional[bool] = None
SERPER_API_KEY: Optional[str] = None
SERPLY_API_KEY: Optional[str] = None
TAVILY_API_KEY: Optional[str] = None
SEARCHAPI_API_KEY: Optional[str] = None
SEARCHAPI_ENGINE: Optional[str] = None
SERPAPI_API_KEY: Optional[str] = None
SERPAPI_ENGINE: Optional[str] = None
JINA_API_KEY: Optional[str] = None
BING_SEARCH_V7_ENDPOINT: Optional[str] = None
BING_SEARCH_V7_SUBSCRIPTION_KEY: Optional[str] = None
EXA_API_KEY: Optional[str] = None
PERPLEXITY_API_KEY: Optional[str] = None
PERPLEXITY_MODEL: Optional[str] = None
PERPLEXITY_SEARCH_CONTEXT_USAGE: Optional[str] = None
2025-04-13 07:33:36 +08:00
SOUGOU_API_SID: Optional[str] = None
SOUGOU_API_SK: Optional[str] = None
WEB_LOADER_ENGINE: Optional[str] = None
ENABLE_WEB_LOADER_SSL_VERIFICATION: Optional[bool] = None
PLAYWRIGHT_WS_URL: Optional[str] = None
PLAYWRIGHT_TIMEOUT: Optional[int] = None
FIRECRAWL_API_KEY: Optional[str] = None
FIRECRAWL_API_BASE_URL: Optional[str] = None
TAVILY_EXTRACT_DEPTH: Optional[str] = None
EXTERNAL_WEB_SEARCH_URL: Optional[str] = None
EXTERNAL_WEB_SEARCH_API_KEY: Optional[str] = None
EXTERNAL_WEB_LOADER_URL: Optional[str] = None
EXTERNAL_WEB_LOADER_API_KEY: Optional[str] = None
2025-04-13 07:33:36 +08:00
YOUTUBE_LOADER_LANGUAGE: Optional[List[str]] = None
YOUTUBE_LOADER_PROXY_URL: Optional[str] = None
YOUTUBE_LOADER_TRANSLATION: Optional[str] = None
class ConfigForm(BaseModel):
# RAG settings
2025-04-13 13:55:24 +08:00
RAG_TEMPLATE: Optional[str] = None
2025-04-13 07:33:36 +08:00
TOP_K: Optional[int] = None
2025-02-27 07:42:19 +08:00
BYPASS_EMBEDDING_AND_RETRIEVAL: Optional[bool] = None
2025-04-13 07:33:36 +08:00
RAG_FULL_CONTEXT: Optional[bool] = None
# Hybrid search settings
ENABLE_RAG_HYBRID_SEARCH: Optional[bool] = None
TOP_K_RERANKER: Optional[int] = None
RELEVANCE_THRESHOLD: Optional[float] = None
HYBRID_BM25_WEIGHT: Optional[float] = None
2025-04-13 07:33:36 +08:00
# Content extraction settings
CONTENT_EXTRACTION_ENGINE: Optional[str] = None
PDF_EXTRACT_IMAGES: Optional[bool] = None
DATALAB_MARKER_API_KEY: Optional[str] = None
2025-07-23 08:49:28 +08:00
DATALAB_MARKER_API_BASE_URL: Optional[str] = None
DATALAB_MARKER_ADDITIONAL_CONFIG: Optional[str] = None
DATALAB_MARKER_SKIP_CACHE: Optional[bool] = None
DATALAB_MARKER_FORCE_OCR: Optional[bool] = None
DATALAB_MARKER_PAGINATE: Optional[bool] = None
DATALAB_MARKER_STRIP_EXISTING_OCR: Optional[bool] = None
DATALAB_MARKER_DISABLE_IMAGE_EXTRACTION: Optional[bool] = None
2025-07-23 09:06:29 +08:00
DATALAB_MARKER_FORMAT_LINES: Optional[bool] = None
DATALAB_MARKER_USE_LLM: Optional[bool] = None
DATALAB_MARKER_OUTPUT_FORMAT: Optional[str] = None
2025-05-15 02:28:40 +08:00
EXTERNAL_DOCUMENT_LOADER_URL: Optional[str] = None
EXTERNAL_DOCUMENT_LOADER_API_KEY: Optional[str] = None
2025-04-13 07:33:36 +08:00
TIKA_SERVER_URL: Optional[str] = None
DOCLING_SERVER_URL: Optional[str] = None
DOCLING_DO_OCR: Optional[bool] = None
DOCLING_FORCE_OCR: Optional[bool] = None
DOCLING_OCR_ENGINE: Optional[str] = None
DOCLING_OCR_LANG: Optional[str] = None
DOCLING_PDF_BACKEND: Optional[str] = None
DOCLING_TABLE_MODE: Optional[str] = None
DOCLING_PIPELINE: Optional[str] = None
DOCLING_DO_PICTURE_DESCRIPTION: Optional[bool] = None
DOCLING_PICTURE_DESCRIPTION_MODE: Optional[str] = None
2025-06-09 00:02:14 +08:00
DOCLING_PICTURE_DESCRIPTION_LOCAL: Optional[dict] = None
DOCLING_PICTURE_DESCRIPTION_API: Optional[dict] = None
2025-04-13 07:33:36 +08:00
DOCUMENT_INTELLIGENCE_ENDPOINT: Optional[str] = None
DOCUMENT_INTELLIGENCE_KEY: Optional[str] = None
MISTRAL_OCR_API_KEY: Optional[str] = None
# Reranking settings
RAG_RERANKING_MODEL: Optional[str] = None
RAG_RERANKING_ENGINE: Optional[str] = None
2025-05-10 22:29:04 +08:00
RAG_EXTERNAL_RERANKER_URL: Optional[str] = None
RAG_EXTERNAL_RERANKER_API_KEY: Optional[str] = None
2025-04-13 07:33:36 +08:00
# Chunking settings
TEXT_SPLITTER: Optional[str] = None
CHUNK_SIZE: Optional[int] = None
CHUNK_OVERLAP: Optional[int] = None
# File upload settings
FILE_MAX_SIZE: Optional[int] = None
FILE_MAX_COUNT: Optional[int] = None
2025-06-16 20:52:57 +08:00
FILE_IMAGE_COMPRESSION_WIDTH: Optional[int] = None
FILE_IMAGE_COMPRESSION_HEIGHT: Optional[int] = None
2025-05-17 01:05:52 +08:00
ALLOWED_FILE_EXTENSIONS: Optional[List[str]] = None
2025-04-13 07:33:36 +08:00
# Integration settings
ENABLE_GOOGLE_DRIVE_INTEGRATION: Optional[bool] = None
ENABLE_ONEDRIVE_INTEGRATION: Optional[bool] = None
# Web search settings
2024-06-02 10:40:48 +08:00
web: Optional[WebConfig] = None
2024-03-11 04:32:34 +08:00
2024-12-12 10:05:42 +08:00
@router.post("/config/update")
async def update_rag_config(
2025-04-13 07:33:36 +08:00
request: Request, form_data: ConfigForm, user=Depends(get_admin_user)
2024-12-12 10:05:42 +08:00
):
2025-04-13 07:33:36 +08:00
# RAG settings
request.app.state.config.RAG_TEMPLATE = (
2025-04-13 14:00:34 +08:00
form_data.RAG_TEMPLATE
if form_data.RAG_TEMPLATE is not None
2025-04-13 07:33:36 +08:00
else request.app.state.config.RAG_TEMPLATE
)
request.app.state.config.TOP_K = (
form_data.TOP_K
if form_data.TOP_K is not None
else request.app.state.config.TOP_K
)
request.app.state.config.BYPASS_EMBEDDING_AND_RETRIEVAL = (
form_data.BYPASS_EMBEDDING_AND_RETRIEVAL
if form_data.BYPASS_EMBEDDING_AND_RETRIEVAL is not None
else request.app.state.config.BYPASS_EMBEDDING_AND_RETRIEVAL
)
2025-02-19 13:14:58 +08:00
request.app.state.config.RAG_FULL_CONTEXT = (
form_data.RAG_FULL_CONTEXT
if form_data.RAG_FULL_CONTEXT is not None
else request.app.state.config.RAG_FULL_CONTEXT
)
2025-04-13 07:33:36 +08:00
# Hybrid search settings
request.app.state.config.ENABLE_RAG_HYBRID_SEARCH = (
form_data.ENABLE_RAG_HYBRID_SEARCH
if form_data.ENABLE_RAG_HYBRID_SEARCH is not None
else request.app.state.config.ENABLE_RAG_HYBRID_SEARCH
)
request.app.state.config.TOP_K_RERANKER = (
form_data.TOP_K_RERANKER
if form_data.TOP_K_RERANKER is not None
else request.app.state.config.TOP_K_RERANKER
)
request.app.state.config.RELEVANCE_THRESHOLD = (
form_data.RELEVANCE_THRESHOLD
if form_data.RELEVANCE_THRESHOLD is not None
else request.app.state.config.RELEVANCE_THRESHOLD
)
request.app.state.config.HYBRID_BM25_WEIGHT = (
form_data.HYBRID_BM25_WEIGHT
if form_data.HYBRID_BM25_WEIGHT is not None
else request.app.state.config.HYBRID_BM25_WEIGHT
)
2025-04-13 07:33:36 +08:00
# Content extraction settings
request.app.state.config.CONTENT_EXTRACTION_ENGINE = (
form_data.CONTENT_EXTRACTION_ENGINE
if form_data.CONTENT_EXTRACTION_ENGINE is not None
else request.app.state.config.CONTENT_EXTRACTION_ENGINE
)
request.app.state.config.PDF_EXTRACT_IMAGES = (
form_data.PDF_EXTRACT_IMAGES
if form_data.PDF_EXTRACT_IMAGES is not None
else request.app.state.config.PDF_EXTRACT_IMAGES
)
request.app.state.config.DATALAB_MARKER_API_KEY = (
form_data.DATALAB_MARKER_API_KEY
if form_data.DATALAB_MARKER_API_KEY is not None
else request.app.state.config.DATALAB_MARKER_API_KEY
)
2025-07-23 08:49:28 +08:00
request.app.state.config.DATALAB_MARKER_API_BASE_URL = (
form_data.DATALAB_MARKER_API_BASE_URL
if form_data.DATALAB_MARKER_API_BASE_URL is not None
else request.app.state.config.DATALAB_MARKER_API_BASE_URL
)
request.app.state.config.DATALAB_MARKER_ADDITIONAL_CONFIG = (
form_data.DATALAB_MARKER_ADDITIONAL_CONFIG
if form_data.DATALAB_MARKER_ADDITIONAL_CONFIG is not None
else request.app.state.config.DATALAB_MARKER_ADDITIONAL_CONFIG
)
request.app.state.config.DATALAB_MARKER_SKIP_CACHE = (
form_data.DATALAB_MARKER_SKIP_CACHE
if form_data.DATALAB_MARKER_SKIP_CACHE is not None
else request.app.state.config.DATALAB_MARKER_SKIP_CACHE
)
request.app.state.config.DATALAB_MARKER_FORCE_OCR = (
form_data.DATALAB_MARKER_FORCE_OCR
if form_data.DATALAB_MARKER_FORCE_OCR is not None
else request.app.state.config.DATALAB_MARKER_FORCE_OCR
)
request.app.state.config.DATALAB_MARKER_PAGINATE = (
form_data.DATALAB_MARKER_PAGINATE
if form_data.DATALAB_MARKER_PAGINATE is not None
else request.app.state.config.DATALAB_MARKER_PAGINATE
)
request.app.state.config.DATALAB_MARKER_STRIP_EXISTING_OCR = (
form_data.DATALAB_MARKER_STRIP_EXISTING_OCR
if form_data.DATALAB_MARKER_STRIP_EXISTING_OCR is not None
else request.app.state.config.DATALAB_MARKER_STRIP_EXISTING_OCR
)
request.app.state.config.DATALAB_MARKER_DISABLE_IMAGE_EXTRACTION = (
form_data.DATALAB_MARKER_DISABLE_IMAGE_EXTRACTION
if form_data.DATALAB_MARKER_DISABLE_IMAGE_EXTRACTION is not None
else request.app.state.config.DATALAB_MARKER_DISABLE_IMAGE_EXTRACTION
)
2025-07-23 09:06:29 +08:00
request.app.state.config.DATALAB_MARKER_FORMAT_LINES = (
form_data.DATALAB_MARKER_FORMAT_LINES
if form_data.DATALAB_MARKER_FORMAT_LINES is not None
else request.app.state.config.DATALAB_MARKER_FORMAT_LINES
)
request.app.state.config.DATALAB_MARKER_OUTPUT_FORMAT = (
form_data.DATALAB_MARKER_OUTPUT_FORMAT
if form_data.DATALAB_MARKER_OUTPUT_FORMAT is not None
else request.app.state.config.DATALAB_MARKER_OUTPUT_FORMAT
)
request.app.state.config.DATALAB_MARKER_USE_LLM = (
form_data.DATALAB_MARKER_USE_LLM
if form_data.DATALAB_MARKER_USE_LLM is not None
else request.app.state.config.DATALAB_MARKER_USE_LLM
)
2025-05-15 02:28:40 +08:00
request.app.state.config.EXTERNAL_DOCUMENT_LOADER_URL = (
form_data.EXTERNAL_DOCUMENT_LOADER_URL
if form_data.EXTERNAL_DOCUMENT_LOADER_URL is not None
else request.app.state.config.EXTERNAL_DOCUMENT_LOADER_URL
)
request.app.state.config.EXTERNAL_DOCUMENT_LOADER_API_KEY = (
form_data.EXTERNAL_DOCUMENT_LOADER_API_KEY
if form_data.EXTERNAL_DOCUMENT_LOADER_API_KEY is not None
else request.app.state.config.EXTERNAL_DOCUMENT_LOADER_API_KEY
)
2025-04-13 07:33:36 +08:00
request.app.state.config.TIKA_SERVER_URL = (
form_data.TIKA_SERVER_URL
if form_data.TIKA_SERVER_URL is not None
else request.app.state.config.TIKA_SERVER_URL
)
request.app.state.config.DOCLING_SERVER_URL = (
form_data.DOCLING_SERVER_URL
if form_data.DOCLING_SERVER_URL is not None
else request.app.state.config.DOCLING_SERVER_URL
)
request.app.state.config.DOCLING_DO_OCR = (
form_data.DOCLING_DO_OCR
if form_data.DOCLING_DO_OCR is not None
else request.app.state.config.DOCLING_DO_OCR
)
request.app.state.config.DOCLING_FORCE_OCR = (
form_data.DOCLING_FORCE_OCR
if form_data.DOCLING_FORCE_OCR is not None
else request.app.state.config.DOCLING_FORCE_OCR
)
request.app.state.config.DOCLING_OCR_ENGINE = (
form_data.DOCLING_OCR_ENGINE
if form_data.DOCLING_OCR_ENGINE is not None
else request.app.state.config.DOCLING_OCR_ENGINE
)
request.app.state.config.DOCLING_OCR_LANG = (
form_data.DOCLING_OCR_LANG
if form_data.DOCLING_OCR_LANG is not None
else request.app.state.config.DOCLING_OCR_LANG
)
request.app.state.config.DOCLING_PDF_BACKEND = (
form_data.DOCLING_PDF_BACKEND
if form_data.DOCLING_PDF_BACKEND is not None
else request.app.state.config.DOCLING_PDF_BACKEND
)
request.app.state.config.DOCLING_TABLE_MODE = (
form_data.DOCLING_TABLE_MODE
if form_data.DOCLING_TABLE_MODE is not None
else request.app.state.config.DOCLING_TABLE_MODE
)
request.app.state.config.DOCLING_PIPELINE = (
form_data.DOCLING_PIPELINE
if form_data.DOCLING_PIPELINE is not None
else request.app.state.config.DOCLING_PIPELINE
)
request.app.state.config.DOCLING_DO_PICTURE_DESCRIPTION = (
form_data.DOCLING_DO_PICTURE_DESCRIPTION
if form_data.DOCLING_DO_PICTURE_DESCRIPTION is not None
else request.app.state.config.DOCLING_DO_PICTURE_DESCRIPTION
)
request.app.state.config.DOCLING_PICTURE_DESCRIPTION_MODE = (
form_data.DOCLING_PICTURE_DESCRIPTION_MODE
if form_data.DOCLING_PICTURE_DESCRIPTION_MODE is not None
else request.app.state.config.DOCLING_PICTURE_DESCRIPTION_MODE
)
2025-06-09 00:02:14 +08:00
request.app.state.config.DOCLING_PICTURE_DESCRIPTION_LOCAL = (
form_data.DOCLING_PICTURE_DESCRIPTION_LOCAL
if form_data.DOCLING_PICTURE_DESCRIPTION_LOCAL is not None
else request.app.state.config.DOCLING_PICTURE_DESCRIPTION_LOCAL
)
2025-06-09 00:02:14 +08:00
request.app.state.config.DOCLING_PICTURE_DESCRIPTION_API = (
form_data.DOCLING_PICTURE_DESCRIPTION_API
if form_data.DOCLING_PICTURE_DESCRIPTION_API is not None
else request.app.state.config.DOCLING_PICTURE_DESCRIPTION_API
)
2025-04-13 07:33:36 +08:00
request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT = (
form_data.DOCUMENT_INTELLIGENCE_ENDPOINT
if form_data.DOCUMENT_INTELLIGENCE_ENDPOINT is not None
else request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT
)
request.app.state.config.DOCUMENT_INTELLIGENCE_KEY = (
form_data.DOCUMENT_INTELLIGENCE_KEY
if form_data.DOCUMENT_INTELLIGENCE_KEY is not None
else request.app.state.config.DOCUMENT_INTELLIGENCE_KEY
)
request.app.state.config.MISTRAL_OCR_API_KEY = (
form_data.MISTRAL_OCR_API_KEY
if form_data.MISTRAL_OCR_API_KEY is not None
else request.app.state.config.MISTRAL_OCR_API_KEY
)
# Reranking settings
2025-08-21 19:40:56 +08:00
if request.app.state.config.RAG_RERANKING_ENGINE == "":
# Unloading the internal reranker and clear VRAM memory
request.app.state.rf = None
request.app.state.RERANKING_FUNCTION = None
import gc
2025-08-21 19:40:56 +08:00
gc.collect()
2025-08-21 19:40:56 +08:00
if DEVICE_TYPE == "cuda":
import torch
2025-08-21 19:40:56 +08:00
if torch.cuda.is_available():
torch.cuda.empty_cache()
request.app.state.config.RAG_RERANKING_ENGINE = (
form_data.RAG_RERANKING_ENGINE
if form_data.RAG_RERANKING_ENGINE is not None
else request.app.state.config.RAG_RERANKING_ENGINE
)
2025-05-10 22:29:04 +08:00
request.app.state.config.RAG_EXTERNAL_RERANKER_URL = (
form_data.RAG_EXTERNAL_RERANKER_URL
if form_data.RAG_EXTERNAL_RERANKER_URL is not None
else request.app.state.config.RAG_EXTERNAL_RERANKER_URL
)
2025-05-10 22:29:04 +08:00
request.app.state.config.RAG_EXTERNAL_RERANKER_API_KEY = (
form_data.RAG_EXTERNAL_RERANKER_API_KEY
if form_data.RAG_EXTERNAL_RERANKER_API_KEY is not None
else request.app.state.config.RAG_EXTERNAL_RERANKER_API_KEY
)
log.info(
f"Updating reranking model: {request.app.state.config.RAG_RERANKING_MODEL} to {form_data.RAG_RERANKING_MODEL}"
)
try:
request.app.state.config.RAG_RERANKING_MODEL = (
form_data.RAG_RERANKING_MODEL
if form_data.RAG_RERANKING_MODEL is not None
else request.app.state.config.RAG_RERANKING_MODEL
)
try:
2025-08-21 19:40:56 +08:00
if (
request.app.state.config.ENABLE_RAG_HYBRID_SEARCH
and not request.app.state.config.BYPASS_EMBEDDING_AND_RETRIEVAL
2025-08-21 19:42:03 +08:00
):
request.app.state.rf = get_rf(
request.app.state.config.RAG_RERANKING_ENGINE,
request.app.state.config.RAG_RERANKING_MODEL,
request.app.state.config.RAG_EXTERNAL_RERANKER_URL,
request.app.state.config.RAG_EXTERNAL_RERANKER_API_KEY,
True,
)
request.app.state.RERANKING_FUNCTION = get_reranking_function(
request.app.state.config.RAG_RERANKING_ENGINE,
request.app.state.config.RAG_RERANKING_MODEL,
request.app.state.rf,
)
except Exception as e:
log.error(f"Error loading reranking model: {e}")
request.app.state.config.ENABLE_RAG_HYBRID_SEARCH = False
except Exception as e:
log.exception(f"Problem updating reranking model: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ERROR_MESSAGES.DEFAULT(e),
)
2025-04-13 07:33:36 +08:00
# Chunking settings
request.app.state.config.TEXT_SPLITTER = (
form_data.TEXT_SPLITTER
if form_data.TEXT_SPLITTER is not None
else request.app.state.config.TEXT_SPLITTER
)
request.app.state.config.CHUNK_SIZE = (
form_data.CHUNK_SIZE
if form_data.CHUNK_SIZE is not None
else request.app.state.config.CHUNK_SIZE
)
request.app.state.config.CHUNK_OVERLAP = (
form_data.CHUNK_OVERLAP
if form_data.CHUNK_OVERLAP is not None
else request.app.state.config.CHUNK_OVERLAP
)
# File upload settings
2025-06-16 20:52:57 +08:00
request.app.state.config.FILE_MAX_SIZE = form_data.FILE_MAX_SIZE
request.app.state.config.FILE_MAX_COUNT = form_data.FILE_MAX_COUNT
request.app.state.config.FILE_IMAGE_COMPRESSION_WIDTH = (
form_data.FILE_IMAGE_COMPRESSION_WIDTH
2025-04-13 07:33:36 +08:00
)
2025-06-16 20:52:57 +08:00
request.app.state.config.FILE_IMAGE_COMPRESSION_HEIGHT = (
form_data.FILE_IMAGE_COMPRESSION_HEIGHT
2025-02-27 07:42:19 +08:00
)
2025-05-17 01:05:52 +08:00
request.app.state.config.ALLOWED_FILE_EXTENSIONS = (
form_data.ALLOWED_FILE_EXTENSIONS
if form_data.ALLOWED_FILE_EXTENSIONS is not None
else request.app.state.config.ALLOWED_FILE_EXTENSIONS
)
2025-02-27 07:42:19 +08:00
2025-04-13 07:33:36 +08:00
# Integration settings
2024-12-19 10:04:56 +08:00
request.app.state.config.ENABLE_GOOGLE_DRIVE_INTEGRATION = (
form_data.ENABLE_GOOGLE_DRIVE_INTEGRATION
if form_data.ENABLE_GOOGLE_DRIVE_INTEGRATION is not None
2024-12-19 10:04:56 +08:00
else request.app.state.config.ENABLE_GOOGLE_DRIVE_INTEGRATION
)
2025-02-24 22:14:10 +08:00
request.app.state.config.ENABLE_ONEDRIVE_INTEGRATION = (
2025-04-13 07:33:36 +08:00
form_data.ENABLE_ONEDRIVE_INTEGRATION
if form_data.ENABLE_ONEDRIVE_INTEGRATION is not None
2025-02-24 22:14:10 +08:00
else request.app.state.config.ENABLE_ONEDRIVE_INTEGRATION
)
2025-04-13 07:33:36 +08:00
if form_data.web is not None:
# Web search settings
request.app.state.config.ENABLE_WEB_SEARCH = form_data.web.ENABLE_WEB_SEARCH
request.app.state.config.WEB_SEARCH_ENGINE = form_data.web.WEB_SEARCH_ENGINE
request.app.state.config.WEB_SEARCH_TRUST_ENV = (
form_data.web.WEB_SEARCH_TRUST_ENV
2024-12-12 10:05:42 +08:00
)
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT = (
form_data.web.WEB_SEARCH_RESULT_COUNT
)
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_CONCURRENT_REQUESTS = (
form_data.web.WEB_SEARCH_CONCURRENT_REQUESTS
2024-06-02 10:40:48 +08:00
)
request.app.state.config.WEB_LOADER_CONCURRENT_REQUESTS = (
form_data.web.WEB_LOADER_CONCURRENT_REQUESTS
)
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST = (
form_data.web.WEB_SEARCH_DOMAIN_FILTER_LIST
2024-12-12 10:05:42 +08:00
)
2025-04-13 07:33:36 +08:00
request.app.state.config.BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL = (
form_data.web.BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL
2024-12-12 10:05:42 +08:00
)
request.app.state.config.BYPASS_WEB_SEARCH_WEB_LOADER = (
form_data.web.BYPASS_WEB_SEARCH_WEB_LOADER
)
2025-04-13 07:33:36 +08:00
request.app.state.config.SEARXNG_QUERY_URL = form_data.web.SEARXNG_QUERY_URL
request.app.state.config.YACY_QUERY_URL = form_data.web.YACY_QUERY_URL
request.app.state.config.YACY_USERNAME = form_data.web.YACY_USERNAME
request.app.state.config.YACY_PASSWORD = form_data.web.YACY_PASSWORD
2025-04-13 07:33:36 +08:00
request.app.state.config.GOOGLE_PSE_API_KEY = form_data.web.GOOGLE_PSE_API_KEY
2024-12-12 10:05:42 +08:00
request.app.state.config.GOOGLE_PSE_ENGINE_ID = (
2025-04-13 07:33:36 +08:00
form_data.web.GOOGLE_PSE_ENGINE_ID
2024-06-02 10:40:48 +08:00
)
2024-12-12 10:05:42 +08:00
request.app.state.config.BRAVE_SEARCH_API_KEY = (
2025-04-13 07:33:36 +08:00
form_data.web.BRAVE_SEARCH_API_KEY
2024-12-12 10:05:42 +08:00
)
2025-04-13 07:33:36 +08:00
request.app.state.config.KAGI_SEARCH_API_KEY = form_data.web.KAGI_SEARCH_API_KEY
2024-12-12 10:05:42 +08:00
request.app.state.config.MOJEEK_SEARCH_API_KEY = (
2025-04-13 07:33:36 +08:00
form_data.web.MOJEEK_SEARCH_API_KEY
2024-11-22 01:52:19 +08:00
)
2025-02-10 16:44:47 +08:00
request.app.state.config.BOCHA_SEARCH_API_KEY = (
2025-04-13 07:33:36 +08:00
form_data.web.BOCHA_SEARCH_API_KEY
2025-02-10 16:44:47 +08:00
)
2025-04-13 07:33:36 +08:00
request.app.state.config.SERPSTACK_API_KEY = form_data.web.SERPSTACK_API_KEY
request.app.state.config.SERPSTACK_HTTPS = form_data.web.SERPSTACK_HTTPS
request.app.state.config.SERPER_API_KEY = form_data.web.SERPER_API_KEY
request.app.state.config.SERPLY_API_KEY = form_data.web.SERPLY_API_KEY
request.app.state.config.TAVILY_API_KEY = form_data.web.TAVILY_API_KEY
request.app.state.config.SEARCHAPI_API_KEY = form_data.web.SEARCHAPI_API_KEY
request.app.state.config.SEARCHAPI_ENGINE = form_data.web.SEARCHAPI_ENGINE
request.app.state.config.SERPAPI_API_KEY = form_data.web.SERPAPI_API_KEY
request.app.state.config.SERPAPI_ENGINE = form_data.web.SERPAPI_ENGINE
request.app.state.config.JINA_API_KEY = form_data.web.JINA_API_KEY
2024-12-12 10:05:42 +08:00
request.app.state.config.BING_SEARCH_V7_ENDPOINT = (
2025-04-13 07:33:36 +08:00
form_data.web.BING_SEARCH_V7_ENDPOINT
2024-11-04 09:07:24 +08:00
)
2024-12-12 10:05:42 +08:00
request.app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY = (
2025-04-13 07:33:36 +08:00
form_data.web.BING_SEARCH_V7_SUBSCRIPTION_KEY
)
2025-04-13 07:33:36 +08:00
request.app.state.config.EXA_API_KEY = form_data.web.EXA_API_KEY
request.app.state.config.PERPLEXITY_API_KEY = form_data.web.PERPLEXITY_API_KEY
request.app.state.config.PERPLEXITY_MODEL = form_data.web.PERPLEXITY_MODEL
request.app.state.config.PERPLEXITY_SEARCH_CONTEXT_USAGE = (
form_data.web.PERPLEXITY_SEARCH_CONTEXT_USAGE
)
2025-04-13 07:33:36 +08:00
request.app.state.config.SOUGOU_API_SID = form_data.web.SOUGOU_API_SID
request.app.state.config.SOUGOU_API_SK = form_data.web.SOUGOU_API_SK
# Web loader settings
request.app.state.config.WEB_LOADER_ENGINE = form_data.web.WEB_LOADER_ENGINE
request.app.state.config.ENABLE_WEB_LOADER_SSL_VERIFICATION = (
form_data.web.ENABLE_WEB_LOADER_SSL_VERIFICATION
)
2025-04-13 07:33:36 +08:00
request.app.state.config.PLAYWRIGHT_WS_URL = form_data.web.PLAYWRIGHT_WS_URL
request.app.state.config.PLAYWRIGHT_TIMEOUT = form_data.web.PLAYWRIGHT_TIMEOUT
request.app.state.config.FIRECRAWL_API_KEY = form_data.web.FIRECRAWL_API_KEY
request.app.state.config.FIRECRAWL_API_BASE_URL = (
2025-04-13 07:33:36 +08:00
form_data.web.FIRECRAWL_API_BASE_URL
)
request.app.state.config.EXTERNAL_WEB_SEARCH_URL = (
form_data.web.EXTERNAL_WEB_SEARCH_URL
)
request.app.state.config.EXTERNAL_WEB_SEARCH_API_KEY = (
form_data.web.EXTERNAL_WEB_SEARCH_API_KEY
)
request.app.state.config.EXTERNAL_WEB_LOADER_URL = (
form_data.web.EXTERNAL_WEB_LOADER_URL
)
request.app.state.config.EXTERNAL_WEB_LOADER_API_KEY = (
form_data.web.EXTERNAL_WEB_LOADER_API_KEY
)
request.app.state.config.TAVILY_EXTRACT_DEPTH = (
2025-04-13 07:33:36 +08:00
form_data.web.TAVILY_EXTRACT_DEPTH
)
request.app.state.config.YOUTUBE_LOADER_LANGUAGE = (
2025-04-13 07:33:36 +08:00
form_data.web.YOUTUBE_LOADER_LANGUAGE
)
request.app.state.config.YOUTUBE_LOADER_PROXY_URL = (
2025-04-13 07:33:36 +08:00
form_data.web.YOUTUBE_LOADER_PROXY_URL
)
request.app.state.YOUTUBE_LOADER_TRANSLATION = (
2025-04-13 07:33:36 +08:00
form_data.web.YOUTUBE_LOADER_TRANSLATION
)
2024-02-18 14:29:52 +08:00
return {
"status": True,
2025-04-13 13:55:24 +08:00
# RAG settings
"RAG_TEMPLATE": request.app.state.config.RAG_TEMPLATE,
"TOP_K": request.app.state.config.TOP_K,
2025-02-27 07:42:19 +08:00
"BYPASS_EMBEDDING_AND_RETRIEVAL": request.app.state.config.BYPASS_EMBEDDING_AND_RETRIEVAL,
2025-04-13 07:33:36 +08:00
"RAG_FULL_CONTEXT": request.app.state.config.RAG_FULL_CONTEXT,
2025-04-13 13:55:24 +08:00
# Hybrid search settings
"ENABLE_RAG_HYBRID_SEARCH": request.app.state.config.ENABLE_RAG_HYBRID_SEARCH,
"TOP_K_RERANKER": request.app.state.config.TOP_K_RERANKER,
"RELEVANCE_THRESHOLD": request.app.state.config.RELEVANCE_THRESHOLD,
"HYBRID_BM25_WEIGHT": request.app.state.config.HYBRID_BM25_WEIGHT,
2025-04-13 07:33:36 +08:00
# Content extraction settings
"CONTENT_EXTRACTION_ENGINE": request.app.state.config.CONTENT_EXTRACTION_ENGINE,
"PDF_EXTRACT_IMAGES": request.app.state.config.PDF_EXTRACT_IMAGES,
"DATALAB_MARKER_API_KEY": request.app.state.config.DATALAB_MARKER_API_KEY,
2025-07-23 08:49:28 +08:00
"DATALAB_MARKER_API_BASE_URL": request.app.state.config.DATALAB_MARKER_API_BASE_URL,
"DATALAB_MARKER_ADDITIONAL_CONFIG": request.app.state.config.DATALAB_MARKER_ADDITIONAL_CONFIG,
"DATALAB_MARKER_SKIP_CACHE": request.app.state.config.DATALAB_MARKER_SKIP_CACHE,
"DATALAB_MARKER_FORCE_OCR": request.app.state.config.DATALAB_MARKER_FORCE_OCR,
"DATALAB_MARKER_PAGINATE": request.app.state.config.DATALAB_MARKER_PAGINATE,
"DATALAB_MARKER_STRIP_EXISTING_OCR": request.app.state.config.DATALAB_MARKER_STRIP_EXISTING_OCR,
"DATALAB_MARKER_DISABLE_IMAGE_EXTRACTION": request.app.state.config.DATALAB_MARKER_DISABLE_IMAGE_EXTRACTION,
"DATALAB_MARKER_USE_LLM": request.app.state.config.DATALAB_MARKER_USE_LLM,
"DATALAB_MARKER_OUTPUT_FORMAT": request.app.state.config.DATALAB_MARKER_OUTPUT_FORMAT,
2025-05-15 02:28:40 +08:00
"EXTERNAL_DOCUMENT_LOADER_URL": request.app.state.config.EXTERNAL_DOCUMENT_LOADER_URL,
"EXTERNAL_DOCUMENT_LOADER_API_KEY": request.app.state.config.EXTERNAL_DOCUMENT_LOADER_API_KEY,
2025-04-13 07:33:36 +08:00
"TIKA_SERVER_URL": request.app.state.config.TIKA_SERVER_URL,
"DOCLING_SERVER_URL": request.app.state.config.DOCLING_SERVER_URL,
"DOCLING_DO_OCR": request.app.state.config.DOCLING_DO_OCR,
"DOCLING_FORCE_OCR": request.app.state.config.DOCLING_FORCE_OCR,
"DOCLING_OCR_ENGINE": request.app.state.config.DOCLING_OCR_ENGINE,
"DOCLING_OCR_LANG": request.app.state.config.DOCLING_OCR_LANG,
"DOCLING_PDF_BACKEND": request.app.state.config.DOCLING_PDF_BACKEND,
"DOCLING_TABLE_MODE": request.app.state.config.DOCLING_TABLE_MODE,
"DOCLING_PIPELINE": request.app.state.config.DOCLING_PIPELINE,
"DOCLING_DO_PICTURE_DESCRIPTION": request.app.state.config.DOCLING_DO_PICTURE_DESCRIPTION,
"DOCLING_PICTURE_DESCRIPTION_MODE": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_MODE,
2025-06-09 00:02:14 +08:00
"DOCLING_PICTURE_DESCRIPTION_LOCAL": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_LOCAL,
"DOCLING_PICTURE_DESCRIPTION_API": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_API,
2025-04-13 07:33:36 +08:00
"DOCUMENT_INTELLIGENCE_ENDPOINT": request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT,
"DOCUMENT_INTELLIGENCE_KEY": request.app.state.config.DOCUMENT_INTELLIGENCE_KEY,
"MISTRAL_OCR_API_KEY": request.app.state.config.MISTRAL_OCR_API_KEY,
# Reranking settings
"RAG_RERANKING_MODEL": request.app.state.config.RAG_RERANKING_MODEL,
"RAG_RERANKING_ENGINE": request.app.state.config.RAG_RERANKING_ENGINE,
2025-05-10 22:29:04 +08:00
"RAG_EXTERNAL_RERANKER_URL": request.app.state.config.RAG_EXTERNAL_RERANKER_URL,
"RAG_EXTERNAL_RERANKER_API_KEY": request.app.state.config.RAG_EXTERNAL_RERANKER_API_KEY,
2025-04-13 07:33:36 +08:00
# Chunking settings
"TEXT_SPLITTER": request.app.state.config.TEXT_SPLITTER,
"CHUNK_SIZE": request.app.state.config.CHUNK_SIZE,
"CHUNK_OVERLAP": request.app.state.config.CHUNK_OVERLAP,
# File upload settings
"FILE_MAX_SIZE": request.app.state.config.FILE_MAX_SIZE,
"FILE_MAX_COUNT": request.app.state.config.FILE_MAX_COUNT,
2025-06-16 20:52:57 +08:00
"FILE_IMAGE_COMPRESSION_WIDTH": request.app.state.config.FILE_IMAGE_COMPRESSION_WIDTH,
"FILE_IMAGE_COMPRESSION_HEIGHT": request.app.state.config.FILE_IMAGE_COMPRESSION_HEIGHT,
2025-05-17 01:05:52 +08:00
"ALLOWED_FILE_EXTENSIONS": request.app.state.config.ALLOWED_FILE_EXTENSIONS,
2025-04-13 07:33:36 +08:00
# Integration settings
"ENABLE_GOOGLE_DRIVE_INTEGRATION": request.app.state.config.ENABLE_GOOGLE_DRIVE_INTEGRATION,
"ENABLE_ONEDRIVE_INTEGRATION": request.app.state.config.ENABLE_ONEDRIVE_INTEGRATION,
# Web search settings
2024-06-02 10:40:48 +08:00
"web": {
2025-04-13 07:33:36 +08:00
"ENABLE_WEB_SEARCH": request.app.state.config.ENABLE_WEB_SEARCH,
"WEB_SEARCH_ENGINE": request.app.state.config.WEB_SEARCH_ENGINE,
"WEB_SEARCH_TRUST_ENV": request.app.state.config.WEB_SEARCH_TRUST_ENV,
"WEB_SEARCH_RESULT_COUNT": request.app.state.config.WEB_SEARCH_RESULT_COUNT,
"WEB_SEARCH_CONCURRENT_REQUESTS": request.app.state.config.WEB_SEARCH_CONCURRENT_REQUESTS,
"WEB_LOADER_CONCURRENT_REQUESTS": request.app.state.config.WEB_LOADER_CONCURRENT_REQUESTS,
2025-04-13 07:33:36 +08:00
"WEB_SEARCH_DOMAIN_FILTER_LIST": request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
"BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL": request.app.state.config.BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL,
"BYPASS_WEB_SEARCH_WEB_LOADER": request.app.state.config.BYPASS_WEB_SEARCH_WEB_LOADER,
2025-04-13 07:33:36 +08:00
"SEARXNG_QUERY_URL": request.app.state.config.SEARXNG_QUERY_URL,
"YACY_QUERY_URL": request.app.state.config.YACY_QUERY_URL,
"YACY_USERNAME": request.app.state.config.YACY_USERNAME,
"YACY_PASSWORD": request.app.state.config.YACY_PASSWORD,
2025-04-13 07:33:36 +08:00
"GOOGLE_PSE_API_KEY": request.app.state.config.GOOGLE_PSE_API_KEY,
"GOOGLE_PSE_ENGINE_ID": request.app.state.config.GOOGLE_PSE_ENGINE_ID,
"BRAVE_SEARCH_API_KEY": request.app.state.config.BRAVE_SEARCH_API_KEY,
"KAGI_SEARCH_API_KEY": request.app.state.config.KAGI_SEARCH_API_KEY,
"MOJEEK_SEARCH_API_KEY": request.app.state.config.MOJEEK_SEARCH_API_KEY,
"BOCHA_SEARCH_API_KEY": request.app.state.config.BOCHA_SEARCH_API_KEY,
"SERPSTACK_API_KEY": request.app.state.config.SERPSTACK_API_KEY,
"SERPSTACK_HTTPS": request.app.state.config.SERPSTACK_HTTPS,
"SERPER_API_KEY": request.app.state.config.SERPER_API_KEY,
"SERPLY_API_KEY": request.app.state.config.SERPLY_API_KEY,
"TAVILY_API_KEY": request.app.state.config.TAVILY_API_KEY,
"SEARCHAPI_API_KEY": request.app.state.config.SEARCHAPI_API_KEY,
"SEARCHAPI_ENGINE": request.app.state.config.SEARCHAPI_ENGINE,
"SERPAPI_API_KEY": request.app.state.config.SERPAPI_API_KEY,
"SERPAPI_ENGINE": request.app.state.config.SERPAPI_ENGINE,
"JINA_API_KEY": request.app.state.config.JINA_API_KEY,
"BING_SEARCH_V7_ENDPOINT": request.app.state.config.BING_SEARCH_V7_ENDPOINT,
"BING_SEARCH_V7_SUBSCRIPTION_KEY": request.app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY,
"EXA_API_KEY": request.app.state.config.EXA_API_KEY,
"PERPLEXITY_API_KEY": request.app.state.config.PERPLEXITY_API_KEY,
"PERPLEXITY_MODEL": request.app.state.config.PERPLEXITY_MODEL,
"PERPLEXITY_SEARCH_CONTEXT_USAGE": request.app.state.config.PERPLEXITY_SEARCH_CONTEXT_USAGE,
2025-04-13 07:33:36 +08:00
"SOUGOU_API_SID": request.app.state.config.SOUGOU_API_SID,
"SOUGOU_API_SK": request.app.state.config.SOUGOU_API_SK,
"WEB_LOADER_ENGINE": request.app.state.config.WEB_LOADER_ENGINE,
"ENABLE_WEB_LOADER_SSL_VERIFICATION": request.app.state.config.ENABLE_WEB_LOADER_SSL_VERIFICATION,
"PLAYWRIGHT_WS_URL": request.app.state.config.PLAYWRIGHT_WS_URL,
"PLAYWRIGHT_TIMEOUT": request.app.state.config.PLAYWRIGHT_TIMEOUT,
"FIRECRAWL_API_KEY": request.app.state.config.FIRECRAWL_API_KEY,
"FIRECRAWL_API_BASE_URL": request.app.state.config.FIRECRAWL_API_BASE_URL,
"TAVILY_EXTRACT_DEPTH": request.app.state.config.TAVILY_EXTRACT_DEPTH,
"EXTERNAL_WEB_SEARCH_URL": request.app.state.config.EXTERNAL_WEB_SEARCH_URL,
"EXTERNAL_WEB_SEARCH_API_KEY": request.app.state.config.EXTERNAL_WEB_SEARCH_API_KEY,
"EXTERNAL_WEB_LOADER_URL": request.app.state.config.EXTERNAL_WEB_LOADER_URL,
"EXTERNAL_WEB_LOADER_API_KEY": request.app.state.config.EXTERNAL_WEB_LOADER_API_KEY,
2025-04-13 07:33:36 +08:00
"YOUTUBE_LOADER_LANGUAGE": request.app.state.config.YOUTUBE_LOADER_LANGUAGE,
"YOUTUBE_LOADER_PROXY_URL": request.app.state.config.YOUTUBE_LOADER_PROXY_URL,
"YOUTUBE_LOADER_TRANSLATION": request.app.state.YOUTUBE_LOADER_TRANSLATION,
2024-06-02 10:40:48 +08:00
},
2024-02-18 14:29:52 +08:00
}
2024-01-07 14:59:22 +08:00
2024-09-28 08:23:09 +08:00
####################################
#
# Document process and retrieval
#
####################################
2024-02-02 05:35:41 +08:00
2024-09-28 08:38:59 +08:00
def save_docs_to_vector_db(
2024-12-12 10:05:42 +08:00
request: Request,
2024-09-28 08:38:59 +08:00
docs,
collection_name,
metadata: Optional[dict] = None,
overwrite: bool = False,
split: bool = True,
2024-10-03 21:44:17 +08:00
add: bool = False,
2025-02-05 16:07:45 +08:00
user=None,
2024-09-28 08:23:09 +08:00
) -> bool:
2024-12-12 10:05:42 +08:00
def _get_docs_info(docs: list[Document]) -> str:
docs_info = set()
# Trying to select relevant metadata identifying the document.
for doc in docs:
metadata = getattr(doc, "metadata", {})
doc_name = metadata.get("name", "")
if not doc_name:
doc_name = metadata.get("title", "")
if not doc_name:
doc_name = metadata.get("source", "")
if doc_name:
docs_info.add(doc_name)
return ", ".join(docs_info)
log.info(
f"save_docs_to_vector_db: document {_get_docs_info(docs)} {collection_name}"
)
2024-09-28 08:23:09 +08:00
2024-10-03 21:53:21 +08:00
# Check if entries with the same hash (metadata.hash) already exist
if metadata and "hash" in metadata:
2024-10-04 14:06:47 +08:00
result = VECTOR_DB_CLIENT.query(
2024-10-03 21:53:21 +08:00
collection_name=collection_name,
filter={"hash": metadata["hash"]},
)
2024-10-04 14:06:47 +08:00
2024-10-08 05:03:42 +08:00
if result is not None:
2024-10-04 14:06:47 +08:00
existing_doc_ids = result.ids[0]
if existing_doc_ids:
log.info(f"Document with hash {metadata['hash']} already exists")
raise ValueError(ERROR_MESSAGES.DUPLICATE_CONTENT)
2024-10-03 21:53:21 +08:00
2024-09-28 08:38:59 +08:00
if split:
2024-12-12 10:05:42 +08:00
if request.app.state.config.TEXT_SPLITTER in ["", "character"]:
2024-10-13 17:07:50 +08:00
text_splitter = RecursiveCharacterTextSplitter(
2024-12-12 10:05:42 +08:00
chunk_size=request.app.state.config.CHUNK_SIZE,
chunk_overlap=request.app.state.config.CHUNK_OVERLAP,
2024-10-13 17:07:50 +08:00
add_start_index=True,
)
docs = text_splitter.split_documents(docs)
2024-12-12 10:05:42 +08:00
elif request.app.state.config.TEXT_SPLITTER == "token":
2024-10-26 13:23:21 +08:00
log.info(
2024-12-12 10:05:42 +08:00
f"Using token text splitter: {request.app.state.config.TIKTOKEN_ENCODING_NAME}"
2024-10-26 13:23:21 +08:00
)
2024-12-12 10:05:42 +08:00
tiktoken.get_encoding(str(request.app.state.config.TIKTOKEN_ENCODING_NAME))
2024-10-13 17:07:50 +08:00
text_splitter = TokenTextSplitter(
2024-12-12 10:05:42 +08:00
encoding_name=str(request.app.state.config.TIKTOKEN_ENCODING_NAME),
chunk_size=request.app.state.config.CHUNK_SIZE,
chunk_overlap=request.app.state.config.CHUNK_OVERLAP,
2024-10-13 17:07:50 +08:00
add_start_index=True,
)
docs = text_splitter.split_documents(docs)
elif request.app.state.config.TEXT_SPLITTER == "markdown_header":
log.info("Using markdown header text splitter")
2025-07-13 04:15:16 +08:00
# Define headers to split on - covering most common markdown header levels
headers_to_split_on = [
("#", "Header 1"),
2025-07-13 04:15:16 +08:00
("##", "Header 2"),
("###", "Header 3"),
("####", "Header 4"),
("#####", "Header 5"),
("######", "Header 6"),
]
2025-07-13 04:15:16 +08:00
markdown_splitter = MarkdownHeaderTextSplitter(
headers_to_split_on=headers_to_split_on,
strip_headers=False, # Keep headers in content for context
)
2025-07-13 04:15:16 +08:00
md_split_docs = []
for doc in docs:
md_header_splits = markdown_splitter.split_text(doc.page_content)
text_splitter = RecursiveCharacterTextSplitter(
chunk_size=request.app.state.config.CHUNK_SIZE,
chunk_overlap=request.app.state.config.CHUNK_OVERLAP,
add_start_index=True,
)
md_header_splits = text_splitter.split_documents(md_header_splits)
2025-07-13 04:15:16 +08:00
# Convert back to Document objects, preserving original metadata
for split_chunk in md_header_splits:
headings_list = []
# Extract header values in order based on headers_to_split_on
for _, header_meta_key_name in headers_to_split_on:
if header_meta_key_name in split_chunk.metadata:
2025-07-13 04:15:16 +08:00
headings_list.append(
split_chunk.metadata[header_meta_key_name]
)
md_split_docs.append(
Document(
page_content=split_chunk.page_content,
metadata={**doc.metadata, "headings": headings_list},
)
)
docs = md_split_docs
2024-10-13 17:07:50 +08:00
else:
raise ValueError(ERROR_MESSAGES.DEFAULT("Invalid text splitter"))
2024-09-28 08:38:59 +08:00
if len(docs) == 0:
raise ValueError(ERROR_MESSAGES.EMPTY_CONTENT)
2024-09-28 08:23:09 +08:00
texts = [doc.page_content for doc in docs]
2024-10-13 18:25:11 +08:00
metadatas = [
{
**doc.metadata,
**(metadata if metadata else {}),
2025-07-31 21:45:06 +08:00
"embedding_config": {
"engine": request.app.state.config.RAG_EMBEDDING_ENGINE,
"model": request.app.state.config.RAG_EMBEDDING_MODEL,
},
2024-10-13 18:25:11 +08:00
}
for doc in docs
]
2024-09-28 08:23:09 +08:00
2024-04-15 05:55:00 +08:00
try:
2024-09-28 08:23:09 +08:00
if VECTOR_DB_CLIENT.has_collection(collection_name=collection_name):
log.info(f"collection {collection_name} already exists")
2024-10-03 21:44:17 +08:00
if overwrite:
VECTOR_DB_CLIENT.delete_collection(collection_name=collection_name)
log.info(f"deleting existing collection {collection_name}")
2024-10-18 04:08:10 +08:00
elif add is False:
2024-10-21 08:45:37 +08:00
log.info(
f"collection {collection_name} already exists, overwrite is False and add is False"
)
2024-10-03 21:44:17 +08:00
return True
2024-04-15 07:15:39 +08:00
2025-09-12 01:29:56 +08:00
log.info(f"generating embeddings for {collection_name}")
2024-10-03 21:44:17 +08:00
embedding_function = get_embedding_function(
2024-12-12 10:05:42 +08:00
request.app.state.config.RAG_EMBEDDING_ENGINE,
request.app.state.config.RAG_EMBEDDING_MODEL,
2024-12-12 10:46:29 +08:00
request.app.state.ef,
2024-11-19 06:19:56 +08:00
(
2024-12-13 12:22:17 +08:00
request.app.state.config.RAG_OPENAI_API_BASE_URL
2024-12-12 10:05:42 +08:00
if request.app.state.config.RAG_EMBEDDING_ENGINE == "openai"
2025-05-20 10:58:04 +08:00
else (
request.app.state.config.RAG_OLLAMA_BASE_URL
if request.app.state.config.RAG_EMBEDDING_ENGINE == "ollama"
else request.app.state.config.RAG_AZURE_OPENAI_BASE_URL
)
2024-11-19 06:19:56 +08:00
),
(
2024-12-13 12:22:17 +08:00
request.app.state.config.RAG_OPENAI_API_KEY
2024-12-12 10:05:42 +08:00
if request.app.state.config.RAG_EMBEDDING_ENGINE == "openai"
2025-05-20 10:58:04 +08:00
else (
request.app.state.config.RAG_OLLAMA_API_KEY
if request.app.state.config.RAG_EMBEDDING_ENGINE == "ollama"
else request.app.state.config.RAG_AZURE_OPENAI_API_KEY
)
2024-11-19 06:19:56 +08:00
),
2024-12-12 10:05:42 +08:00
request.app.state.config.RAG_EMBEDDING_BATCH_SIZE,
azure_api_version=(
request.app.state.config.RAG_AZURE_OPENAI_API_VERSION
2025-05-20 10:58:04 +08:00
if request.app.state.config.RAG_EMBEDDING_ENGINE == "azure_openai"
else None
),
2024-10-03 21:44:17 +08:00
)
embeddings = embedding_function(
2025-03-31 12:55:20 +08:00
list(map(lambda x: x.replace("\n", " "), texts)),
prefix=RAG_EMBEDDING_CONTENT_PREFIX,
user=user,
2024-10-03 21:44:17 +08:00
)
2024-04-15 07:15:39 +08:00
2024-10-04 15:23:14 +08:00
items = [
{
"id": str(uuid.uuid4()),
"text": text,
"vector": embeddings[idx],
2024-10-13 18:25:11 +08:00
"metadata": metadatas[idx],
2024-10-04 15:23:14 +08:00
}
for idx, text in enumerate(texts)
]
2024-10-04 15:46:32 +08:00
2025-09-12 01:29:56 +08:00
log.info(f"adding to collection {collection_name}")
2024-10-03 21:44:17 +08:00
VECTOR_DB_CLIENT.insert(
collection_name=collection_name,
2024-10-04 15:23:14 +08:00
items=items,
2024-10-03 21:44:17 +08:00
)
return True
2024-04-15 05:55:00 +08:00
except Exception as e:
log.exception(e)
2024-12-01 14:29:53 +08:00
raise e
2024-02-02 05:35:41 +08:00
2024-09-28 08:23:09 +08:00
class ProcessFileForm(BaseModel):
file_id: str
2024-10-04 15:23:14 +08:00
content: Optional[str] = None
2024-09-28 08:23:09 +08:00
collection_name: Optional[str] = None
2024-12-12 10:05:42 +08:00
@router.post("/process/file")
2024-09-28 08:23:09 +08:00
def process_file(
2024-12-12 10:05:42 +08:00
request: Request,
2024-09-28 08:23:09 +08:00
form_data: ProcessFileForm,
user=Depends(get_verified_user),
):
try:
file = Files.get_file_by_id(form_data.file_id)
2024-06-12 16:37:53 +08:00
2024-09-28 08:38:59 +08:00
collection_name = form_data.collection_name
2024-10-06 00:58:46 +08:00
2024-09-28 08:38:59 +08:00
if collection_name is None:
2024-10-04 13:22:22 +08:00
collection_name = f"file-{file.id}"
2024-09-28 08:38:59 +08:00
2024-10-04 15:23:14 +08:00
if form_data.content:
2024-10-06 01:08:48 +08:00
# Update the content in the file
2025-04-07 08:31:50 +08:00
# Usage: /files/{file_id}/data/content/update, /files/ (audio file upload pipeline)
2024-10-06 01:08:48 +08:00
2025-02-27 05:09:52 +08:00
try:
# /files/{file_id}/data/content/update
VECTOR_DB_CLIENT.delete_collection(collection_name=f"file-{file.id}")
except:
# Audio file upload pipeline
pass
2024-10-06 01:05:12 +08:00
2024-10-04 15:23:14 +08:00
docs = [
Document(
2024-12-01 14:29:53 +08:00
page_content=form_data.content.replace("<br/>", "\n"),
2024-10-04 15:23:14 +08:00
metadata={
2024-11-22 11:46:09 +08:00
**file.meta,
"name": file.filename,
2024-10-04 15:23:14 +08:00
"created_by": file.user_id,
2024-10-06 01:05:12 +08:00
"file_id": file.id,
2024-11-22 11:46:09 +08:00
"source": file.filename,
2024-10-04 15:23:14 +08:00
},
)
]
text_content = form_data.content
2024-10-06 00:58:46 +08:00
elif form_data.collection_name:
2024-10-06 01:08:48 +08:00
# Check if the file has already been processed and save the content
# Usage: /knowledge/{id}/file/add, /knowledge/{id}/file/update
2024-10-06 00:58:46 +08:00
result = VECTOR_DB_CLIENT.query(
collection_name=f"file-{file.id}", filter={"file_id": file.id}
)
2024-10-08 05:03:42 +08:00
if result is not None and len(result.ids[0]) > 0:
2024-10-06 00:58:46 +08:00
docs = [
Document(
page_content=result.documents[0][idx],
metadata=result.metadatas[0][idx],
)
for idx, id in enumerate(result.ids[0])
]
else:
docs = [
Document(
page_content=file.data.get("content", ""),
metadata={
2024-11-22 11:46:09 +08:00
**file.meta,
"name": file.filename,
2024-10-06 00:58:46 +08:00
"created_by": file.user_id,
2024-10-06 01:05:12 +08:00
"file_id": file.id,
2024-11-22 11:46:09 +08:00
"source": file.filename,
2024-10-06 00:58:46 +08:00
},
)
]
2024-10-04 15:23:14 +08:00
text_content = file.data.get("content", "")
else:
2024-10-06 01:08:48 +08:00
# Process the file and save the content
# Usage: /files/
2024-10-21 08:45:37 +08:00
file_path = file.path
2024-10-04 15:23:14 +08:00
if file_path:
2024-10-21 14:45:15 +08:00
file_path = Storage.get_file(file_path)
2024-10-06 00:58:46 +08:00
loader = Loader(
2024-12-12 10:05:42 +08:00
engine=request.app.state.config.CONTENT_EXTRACTION_ENGINE,
DATALAB_MARKER_API_KEY=request.app.state.config.DATALAB_MARKER_API_KEY,
2025-07-23 08:49:28 +08:00
DATALAB_MARKER_API_BASE_URL=request.app.state.config.DATALAB_MARKER_API_BASE_URL,
DATALAB_MARKER_ADDITIONAL_CONFIG=request.app.state.config.DATALAB_MARKER_ADDITIONAL_CONFIG,
DATALAB_MARKER_SKIP_CACHE=request.app.state.config.DATALAB_MARKER_SKIP_CACHE,
DATALAB_MARKER_FORCE_OCR=request.app.state.config.DATALAB_MARKER_FORCE_OCR,
DATALAB_MARKER_PAGINATE=request.app.state.config.DATALAB_MARKER_PAGINATE,
DATALAB_MARKER_STRIP_EXISTING_OCR=request.app.state.config.DATALAB_MARKER_STRIP_EXISTING_OCR,
DATALAB_MARKER_DISABLE_IMAGE_EXTRACTION=request.app.state.config.DATALAB_MARKER_DISABLE_IMAGE_EXTRACTION,
2025-07-23 09:06:29 +08:00
DATALAB_MARKER_FORMAT_LINES=request.app.state.config.DATALAB_MARKER_FORMAT_LINES,
DATALAB_MARKER_USE_LLM=request.app.state.config.DATALAB_MARKER_USE_LLM,
DATALAB_MARKER_OUTPUT_FORMAT=request.app.state.config.DATALAB_MARKER_OUTPUT_FORMAT,
2025-05-15 02:28:40 +08:00
EXTERNAL_DOCUMENT_LOADER_URL=request.app.state.config.EXTERNAL_DOCUMENT_LOADER_URL,
EXTERNAL_DOCUMENT_LOADER_API_KEY=request.app.state.config.EXTERNAL_DOCUMENT_LOADER_API_KEY,
2024-12-12 10:05:42 +08:00
TIKA_SERVER_URL=request.app.state.config.TIKA_SERVER_URL,
DOCLING_SERVER_URL=request.app.state.config.DOCLING_SERVER_URL,
2025-06-09 00:02:14 +08:00
DOCLING_PARAMS={
"do_ocr": request.app.state.config.DOCLING_DO_OCR,
"force_ocr": request.app.state.config.DOCLING_FORCE_OCR,
2025-06-09 00:02:14 +08:00
"ocr_engine": request.app.state.config.DOCLING_OCR_ENGINE,
"ocr_lang": request.app.state.config.DOCLING_OCR_LANG,
"pdf_backend": request.app.state.config.DOCLING_PDF_BACKEND,
"table_mode": request.app.state.config.DOCLING_TABLE_MODE,
"pipeline": request.app.state.config.DOCLING_PIPELINE,
2025-06-09 00:02:14 +08:00
"do_picture_description": request.app.state.config.DOCLING_DO_PICTURE_DESCRIPTION,
"picture_description_mode": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_MODE,
"picture_description_local": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_LOCAL,
"picture_description_api": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_API,
},
2024-12-12 10:05:42 +08:00
PDF_EXTRACT_IMAGES=request.app.state.config.PDF_EXTRACT_IMAGES,
DOCUMENT_INTELLIGENCE_ENDPOINT=request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT,
DOCUMENT_INTELLIGENCE_KEY=request.app.state.config.DOCUMENT_INTELLIGENCE_KEY,
MISTRAL_OCR_API_KEY=request.app.state.config.MISTRAL_OCR_API_KEY,
2024-10-06 00:58:46 +08:00
)
2024-10-04 15:23:14 +08:00
docs = loader.load(
file.filename, file.meta.get("content_type"), file_path
)
2024-11-22 11:46:09 +08:00
docs = [
Document(
page_content=doc.page_content,
metadata={
**doc.metadata,
"name": file.filename,
"created_by": file.user_id,
"file_id": file.id,
"source": file.filename,
},
)
for doc in docs
]
2024-10-04 15:23:14 +08:00
else:
docs = [
Document(
page_content=file.data.get("content", ""),
metadata={
2024-11-22 11:46:09 +08:00
**file.meta,
2024-10-04 15:23:14 +08:00
"name": file.filename,
"created_by": file.user_id,
2024-10-06 01:05:12 +08:00
"file_id": file.id,
2024-11-22 11:46:09 +08:00
"source": file.filename,
2024-10-04 15:23:14 +08:00
},
)
]
text_content = " ".join([doc.page_content for doc in docs])
2024-10-03 21:44:17 +08:00
2024-09-28 16:53:25 +08:00
log.debug(f"text_content: {text_content}")
2024-10-04 13:22:22 +08:00
Files.update_file_data_by_id(
2024-10-03 21:44:17 +08:00
file.id,
2025-08-20 04:36:13 +08:00
{"status": "completed", "content": text_content},
2024-09-28 08:56:56 +08:00
)
2024-09-28 08:23:09 +08:00
2024-10-04 15:23:14 +08:00
hash = calculate_sha256_string(text_content)
2024-10-04 13:22:22 +08:00
Files.update_file_hash_by_id(file.id, hash)
2024-09-28 08:23:09 +08:00
2025-02-27 07:42:19 +08:00
if not request.app.state.config.BYPASS_EMBEDDING_AND_RETRIEVAL:
try:
result = save_docs_to_vector_db(
request,
docs=docs,
collection_name=collection_name,
metadata={
"file_id": file.id,
"name": file.filename,
"hash": hash,
2024-10-04 13:22:22 +08:00
},
2025-02-27 07:42:19 +08:00
add=(True if form_data.collection_name else False),
user=user,
2024-10-04 13:22:22 +08:00
)
2025-02-27 07:42:19 +08:00
if result:
Files.update_file_metadata_by_id(
file.id,
{
"collection_name": collection_name,
},
)
return {
"status": True,
"collection_name": collection_name,
"filename": file.filename,
"content": text_content,
}
except Exception as e:
raise e
else:
return {
"status": True,
"collection_name": None,
"filename": file.filename,
"content": text_content,
}
2024-09-28 08:23:09 +08:00
except Exception as e:
log.exception(e)
if "No pandoc was found" in str(e):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ERROR_MESSAGES.PANDOC_NOT_INSTALLED,
)
else:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
2024-10-04 12:10:33 +08:00
detail=str(e),
2024-09-28 08:23:09 +08:00
)
2024-09-28 08:29:08 +08:00
class ProcessTextForm(BaseModel):
2024-09-28 08:23:09 +08:00
name: str
content: str
collection_name: Optional[str] = None
2024-12-12 10:05:42 +08:00
@router.post("/process/text")
2024-09-28 08:29:08 +08:00
def process_text(
2024-12-12 10:05:42 +08:00
request: Request,
2024-09-28 08:29:08 +08:00
form_data: ProcessTextForm,
2024-09-28 08:23:09 +08:00
user=Depends(get_verified_user),
):
collection_name = form_data.collection_name
if collection_name is None:
collection_name = calculate_sha256_string(form_data.content)
2024-09-28 08:38:59 +08:00
docs = [
Document(
page_content=form_data.content,
metadata={"name": form_data.name, "created_by": user.id},
)
]
2024-09-30 00:55:26 +08:00
text_content = form_data.content
log.debug(f"text_content: {text_content}")
result = save_docs_to_vector_db(request, docs, collection_name, user=user)
2024-09-28 08:23:09 +08:00
if result:
2024-09-30 00:55:26 +08:00
return {
"status": True,
"collection_name": collection_name,
"content": text_content,
}
2024-09-28 08:23:09 +08:00
else:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ERROR_MESSAGES.DEFAULT(),
)
2024-12-12 10:05:42 +08:00
@router.post("/process/youtube")
def process_youtube_video(
request: Request, form_data: ProcessUrlForm, user=Depends(get_verified_user)
):
2024-09-28 08:23:09 +08:00
try:
2024-09-28 08:38:59 +08:00
collection_name = form_data.collection_name
if not collection_name:
collection_name = calculate_sha256_string(form_data.url)[:63]
2024-11-21 02:02:07 +08:00
loader = YoutubeLoader(
form_data.url,
2024-12-12 10:05:42 +08:00
language=request.app.state.config.YOUTUBE_LOADER_LANGUAGE,
proxy_url=request.app.state.config.YOUTUBE_LOADER_PROXY_URL,
2024-09-28 08:23:09 +08:00
)
2024-11-20 22:53:11 +08:00
2024-09-28 08:38:59 +08:00
docs = loader.load()
2024-10-07 10:44:02 +08:00
content = " ".join([doc.page_content for doc in docs])
log.debug(f"text_content: {content}")
2024-12-12 10:05:42 +08:00
2025-02-05 16:07:45 +08:00
save_docs_to_vector_db(
request, docs, collection_name, overwrite=True, user=user
)
2024-09-28 08:29:08 +08:00
2024-09-28 08:23:09 +08:00
return {
"status": True,
"collection_name": collection_name,
"filename": form_data.url,
2024-10-07 10:44:02 +08:00
"file": {
"data": {
"content": content,
},
"meta": {
"name": form_data.url,
},
},
2024-09-28 08:23:09 +08:00
}
except Exception as e:
log.exception(e)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ERROR_MESSAGES.DEFAULT(e),
)
2024-12-12 10:05:42 +08:00
@router.post("/process/web")
def process_web(
request: Request, form_data: ProcessUrlForm, user=Depends(get_verified_user)
):
2024-09-28 08:23:09 +08:00
try:
2024-09-28 08:38:59 +08:00
collection_name = form_data.collection_name
if not collection_name:
collection_name = calculate_sha256_string(form_data.url)[:63]
2024-09-28 08:23:09 +08:00
loader = get_web_loader(
form_data.url,
2025-04-13 07:33:36 +08:00
verify_ssl=request.app.state.config.ENABLE_WEB_LOADER_SSL_VERIFICATION,
requests_per_second=request.app.state.config.WEB_LOADER_CONCURRENT_REQUESTS,
2024-09-28 08:23:09 +08:00
)
2024-09-28 08:38:59 +08:00
docs = loader.load()
2024-10-07 10:44:02 +08:00
content = " ".join([doc.page_content for doc in docs])
2024-12-12 10:05:42 +08:00
2024-10-07 10:44:02 +08:00
log.debug(f"text_content: {content}")
if not request.app.state.config.BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL:
save_docs_to_vector_db(
request, docs, collection_name, overwrite=True, user=user
)
else:
collection_name = None
2024-09-28 08:29:08 +08:00
2024-09-28 08:23:09 +08:00
return {
"status": True,
"collection_name": collection_name,
"filename": form_data.url,
2024-10-07 10:44:02 +08:00
"file": {
"data": {
"content": content,
},
"meta": {
"name": form_data.url,
"source": form_data.url,
2024-10-07 10:44:02 +08:00
},
},
2024-09-28 08:23:09 +08:00
}
except Exception as e:
log.exception(e)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ERROR_MESSAGES.DEFAULT(e),
)
2024-06-13 02:08:05 +08:00
2024-12-12 10:05:42 +08:00
def search_web(request: Request, engine: str, query: str) -> list[SearchResult]:
2024-06-02 10:52:12 +08:00
"""Search the web using a search engine and return the results as a list of SearchResult objects.
Will look for a search engine API key in environment variables in the following order:
- SEARXNG_QUERY_URL
- YACY_QUERY_URL + YACY_USERNAME + YACY_PASSWORD
2024-06-02 10:52:12 +08:00
- GOOGLE_PSE_API_KEY + GOOGLE_PSE_ENGINE_ID
- BRAVE_SEARCH_API_KEY
2024-12-08 13:21:10 +08:00
- KAGI_SEARCH_API_KEY
2024-10-29 22:45:38 +08:00
- MOJEEK_SEARCH_API_KEY
2025-02-10 16:44:47 +08:00
- BOCHA_SEARCH_API_KEY
2024-06-02 10:52:12 +08:00
- SERPSTACK_API_KEY
- SERPER_API_KEY
- SERPLY_API_KEY
- TAVILY_API_KEY
- EXA_API_KEY
- PERPLEXITY_API_KEY
- SOUGOU_API_SID + SOUGOU_API_SK
- SEARCHAPI_API_KEY + SEARCHAPI_ENGINE (by default `google`)
2025-02-14 12:24:58 +08:00
- SERPAPI_API_KEY + SERPAPI_ENGINE (by default `google`)
2024-06-02 10:52:12 +08:00
Args:
query (str): The query to search for
"""
# TODO: add playwright to search the web
if engine == "searxng":
2024-12-12 10:05:42 +08:00
if request.app.state.config.SEARXNG_QUERY_URL:
2024-06-02 10:57:00 +08:00
return search_searxng(
2024-12-12 10:05:42 +08:00
request.app.state.config.SEARXNG_QUERY_URL,
2024-06-02 10:57:00 +08:00
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
2024-06-02 10:57:00 +08:00
)
2024-06-02 10:52:12 +08:00
else:
raise Exception("No SEARXNG_QUERY_URL found in environment variables")
elif engine == "yacy":
if request.app.state.config.YACY_QUERY_URL:
return search_yacy(
request.app.state.config.YACY_QUERY_URL,
request.app.state.config.YACY_USERNAME,
request.app.state.config.YACY_PASSWORD,
query,
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
)
else:
raise Exception("No YACY_QUERY_URL found in environment variables")
2024-06-02 10:52:12 +08:00
elif engine == "google_pse":
if (
2024-12-12 10:05:42 +08:00
request.app.state.config.GOOGLE_PSE_API_KEY
and request.app.state.config.GOOGLE_PSE_ENGINE_ID
2024-06-02 10:52:12 +08:00
):
return search_google_pse(
2024-12-12 10:05:42 +08:00
request.app.state.config.GOOGLE_PSE_API_KEY,
request.app.state.config.GOOGLE_PSE_ENGINE_ID,
2024-06-02 10:52:12 +08:00
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
2024-06-02 10:52:12 +08:00
)
else:
raise Exception(
"No GOOGLE_PSE_API_KEY or GOOGLE_PSE_ENGINE_ID found in environment variables"
)
elif engine == "brave":
2024-12-12 10:05:42 +08:00
if request.app.state.config.BRAVE_SEARCH_API_KEY:
2024-06-02 10:57:00 +08:00
return search_brave(
2024-12-12 10:05:42 +08:00
request.app.state.config.BRAVE_SEARCH_API_KEY,
2024-06-02 10:57:00 +08:00
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
2024-06-02 10:57:00 +08:00
)
2024-06-02 10:52:12 +08:00
else:
raise Exception("No BRAVE_SEARCH_API_KEY found in environment variables")
2024-12-08 13:21:10 +08:00
elif engine == "kagi":
2024-12-12 10:05:42 +08:00
if request.app.state.config.KAGI_SEARCH_API_KEY:
2024-12-08 13:21:10 +08:00
return search_kagi(
2024-12-12 10:05:42 +08:00
request.app.state.config.KAGI_SEARCH_API_KEY,
2024-12-08 13:21:10 +08:00
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
2024-12-08 13:21:10 +08:00
)
else:
raise Exception("No KAGI_SEARCH_API_KEY found in environment variables")
2024-10-29 22:45:38 +08:00
elif engine == "mojeek":
2024-12-12 10:05:42 +08:00
if request.app.state.config.MOJEEK_SEARCH_API_KEY:
2024-10-29 22:45:38 +08:00
return search_mojeek(
2024-12-12 10:05:42 +08:00
request.app.state.config.MOJEEK_SEARCH_API_KEY,
2024-10-29 22:45:38 +08:00
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
2024-10-29 22:45:38 +08:00
)
else:
raise Exception("No MOJEEK_SEARCH_API_KEY found in environment variables")
2025-02-10 16:44:47 +08:00
elif engine == "bocha":
if request.app.state.config.BOCHA_SEARCH_API_KEY:
return search_bocha(
request.app.state.config.BOCHA_SEARCH_API_KEY,
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
2025-02-10 16:44:47 +08:00
)
else:
raise Exception("No BOCHA_SEARCH_API_KEY found in environment variables")
2024-06-02 10:52:12 +08:00
elif engine == "serpstack":
2024-12-12 10:05:42 +08:00
if request.app.state.config.SERPSTACK_API_KEY:
2024-06-02 10:52:12 +08:00
return search_serpstack(
2024-12-12 10:05:42 +08:00
request.app.state.config.SERPSTACK_API_KEY,
2024-06-02 10:52:12 +08:00
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
2024-12-12 10:05:42 +08:00
https_enabled=request.app.state.config.SERPSTACK_HTTPS,
2024-06-02 10:52:12 +08:00
)
else:
raise Exception("No SERPSTACK_API_KEY found in environment variables")
elif engine == "serper":
2024-12-12 10:05:42 +08:00
if request.app.state.config.SERPER_API_KEY:
2024-06-02 10:57:00 +08:00
return search_serper(
2024-12-12 10:05:42 +08:00
request.app.state.config.SERPER_API_KEY,
2024-06-02 10:57:00 +08:00
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
2024-06-02 10:57:00 +08:00
)
2024-06-02 10:52:12 +08:00
else:
raise Exception("No SERPER_API_KEY found in environment variables")
elif engine == "serply":
2024-12-12 10:05:42 +08:00
if request.app.state.config.SERPLY_API_KEY:
return search_serply(
2024-12-12 10:05:42 +08:00
request.app.state.config.SERPLY_API_KEY,
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
2025-08-09 04:37:37 +08:00
filter_list=request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
)
else:
raise Exception("No SERPLY_API_KEY found in environment variables")
elif engine == "duckduckgo":
2024-06-18 05:32:23 +08:00
return search_duckduckgo(
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
concurrent_requests=request.app.state.config.WEB_SEARCH_CONCURRENT_REQUESTS,
2024-06-18 05:32:23 +08:00
)
elif engine == "tavily":
2024-12-12 10:05:42 +08:00
if request.app.state.config.TAVILY_API_KEY:
return search_tavily(
2024-12-12 10:05:42 +08:00
request.app.state.config.TAVILY_API_KEY,
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
)
else:
raise Exception("No TAVILY_API_KEY found in environment variables")
2025-06-19 13:52:58 +08:00
elif engine == "exa":
if request.app.state.config.EXA_API_KEY:
return search_exa(
request.app.state.config.EXA_API_KEY,
query,
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
)
else:
raise Exception("No EXA_API_KEY found in environment variables")
elif engine == "searchapi":
2024-12-12 10:05:42 +08:00
if request.app.state.config.SEARCHAPI_API_KEY:
return search_searchapi(
2024-12-12 10:05:42 +08:00
request.app.state.config.SEARCHAPI_API_KEY,
request.app.state.config.SEARCHAPI_ENGINE,
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
)
else:
raise Exception("No SEARCHAPI_API_KEY found in environment variables")
2025-02-14 12:24:58 +08:00
elif engine == "serpapi":
if request.app.state.config.SERPAPI_API_KEY:
return search_serpapi(
request.app.state.config.SERPAPI_API_KEY,
request.app.state.config.SERPAPI_ENGINE,
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
2025-02-14 12:24:58 +08:00
)
else:
raise Exception("No SERPAPI_API_KEY found in environment variables")
elif engine == "jina":
2024-11-04 09:07:24 +08:00
return search_jina(
2024-12-12 10:05:42 +08:00
request.app.state.config.JINA_API_KEY,
2024-11-04 09:07:24 +08:00
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
2024-11-04 09:07:24 +08:00
)
2024-10-28 17:33:52 +08:00
elif engine == "bing":
return search_bing(
2024-12-12 10:05:42 +08:00
request.app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY,
request.app.state.config.BING_SEARCH_V7_ENDPOINT,
2024-10-28 17:33:52 +08:00
str(DEFAULT_LOCALE),
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
2024-10-28 17:33:52 +08:00
)
2025-06-25 09:24:53 +08:00
elif engine == "exa":
return search_exa(
request.app.state.config.EXA_API_KEY,
query,
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
)
elif engine == "perplexity":
return search_perplexity(
request.app.state.config.PERPLEXITY_API_KEY,
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
model=request.app.state.config.PERPLEXITY_MODEL,
search_context_usage=request.app.state.config.PERPLEXITY_SEARCH_CONTEXT_USAGE,
)
2025-04-13 07:33:36 +08:00
elif engine == "sougou":
if (
request.app.state.config.SOUGOU_API_SID
and request.app.state.config.SOUGOU_API_SK
):
return search_sougou(
request.app.state.config.SOUGOU_API_SID,
request.app.state.config.SOUGOU_API_SK,
query,
2025-04-13 07:33:36 +08:00
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
)
else:
2025-04-13 07:33:36 +08:00
raise Exception(
"No SOUGOU_API_SID or SOUGOU_API_SK found in environment variables"
)
2025-04-24 14:57:28 +08:00
elif engine == "firecrawl":
return search_firecrawl(
request.app.state.config.FIRECRAWL_API_BASE_URL,
request.app.state.config.FIRECRAWL_API_KEY,
query,
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
)
elif engine == "external":
return search_external(
request.app.state.config.EXTERNAL_WEB_SEARCH_URL,
request.app.state.config.EXTERNAL_WEB_SEARCH_API_KEY,
query,
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
)
2024-06-02 10:52:12 +08:00
else:
raise Exception("No search engine API key found in environment variables")
2024-12-12 10:05:42 +08:00
@router.post("/process/web/search")
async def process_web_search(
2025-02-04 08:47:26 +08:00
request: Request, form_data: SearchForm, user=Depends(get_verified_user)
2024-12-12 10:05:42 +08:00
):
urls = []
2025-09-07 07:00:28 +08:00
result_items = []
try:
2024-06-12 15:18:22 +08:00
logging.info(
2025-05-10 23:16:09 +08:00
f"trying to web search with {request.app.state.config.WEB_SEARCH_ENGINE, form_data.queries}"
2024-06-12 15:18:22 +08:00
)
search_tasks = [
run_in_threadpool(
search_web,
request,
request.app.state.config.WEB_SEARCH_ENGINE,
query,
)
for query in form_data.queries
]
search_results = await asyncio.gather(*search_tasks)
for result in search_results:
if result:
for item in result:
if item and item.link:
2025-09-07 07:00:28 +08:00
result_items.append(item)
urls.append(item.link)
urls = list(dict.fromkeys(urls))
log.debug(f"urls: {urls}")
2024-06-02 10:52:12 +08:00
except Exception as e:
log.exception(e)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ERROR_MESSAGES.WEB_SEARCH_ERROR(e),
)
try:
if request.app.state.config.BYPASS_WEB_SEARCH_WEB_LOADER:
search_results = [
item for result in search_results for item in result if result
]
docs = [
Document(
page_content=result.snippet,
metadata={
"source": result.link,
"title": result.title,
"snippet": result.snippet,
"link": result.link,
},
)
for result in search_results
2025-08-09 04:33:41 +08:00
if hasattr(result, "snippet") and result.snippet is not None
]
else:
loader = get_web_loader(
urls,
verify_ssl=request.app.state.config.ENABLE_WEB_LOADER_SSL_VERIFICATION,
requests_per_second=request.app.state.config.WEB_LOADER_CONCURRENT_REQUESTS,
trust_env=request.app.state.config.WEB_SEARCH_TRUST_ENV,
)
docs = await loader.aload()
2025-04-07 06:45:48 +08:00
urls = [
2025-04-24 00:40:34 +08:00
doc.metadata.get("source") for doc in docs if doc.metadata.get("source")
] # only keep the urls returned by the loader
2025-09-07 07:00:28 +08:00
result_items = [
dict(item) for item in result_items if item.link in urls
] # only keep the search results that have been loaded
2024-09-28 08:38:59 +08:00
2025-02-27 07:42:19 +08:00
if request.app.state.config.BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL:
2025-02-18 10:14:26 +08:00
return {
"status": True,
2025-02-27 07:42:19 +08:00
"collection_name": None,
"filenames": urls,
2025-09-07 07:00:28 +08:00
"items": result_items,
2025-02-18 10:14:26 +08:00
"docs": [
{
"content": doc.page_content,
"metadata": doc.metadata,
}
for doc in docs
],
"loaded_count": len(docs),
}
else:
# Create a single collection for all documents
collection_name = (
f"web-search-{calculate_sha256_string('-'.join(form_data.queries))}"[
:63
]
)
try:
await run_in_threadpool(
save_docs_to_vector_db,
request,
docs,
collection_name,
overwrite=True,
user=user,
)
except Exception as e:
log.debug(f"error saving docs: {e}")
2025-02-18 10:14:26 +08:00
return {
"status": True,
"collection_names": [collection_name],
2025-09-07 07:00:28 +08:00
"items": result_items,
2025-02-18 10:14:26 +08:00
"filenames": urls,
"loaded_count": len(docs),
}
except Exception as e:
log.exception(e)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ERROR_MESSAGES.DEFAULT(e),
)
2024-09-28 08:23:09 +08:00
class QueryDocForm(BaseModel):
collection_name: str
query: str
k: Optional[int] = None
2025-03-06 17:47:57 +08:00
k_reranker: Optional[int] = None
2024-09-28 08:23:09 +08:00
r: Optional[float] = None
hybrid: Optional[bool] = None
2024-03-24 15:40:27 +08:00
2024-12-12 10:05:42 +08:00
@router.post("/query/doc")
2024-09-28 08:23:09 +08:00
def query_doc_handler(
2024-12-12 10:05:42 +08:00
request: Request,
2024-09-28 08:23:09 +08:00
form_data: QueryDocForm,
user=Depends(get_verified_user),
):
try:
if request.app.state.config.ENABLE_RAG_HYBRID_SEARCH and (
form_data.hybrid is None or form_data.hybrid
):
collection_results = {}
collection_results[form_data.collection_name] = VECTOR_DB_CLIENT.get(
collection_name=form_data.collection_name
)
2024-09-28 08:23:09 +08:00
return query_doc_with_hybrid_search(
collection_name=form_data.collection_name,
collection_result=collection_results[form_data.collection_name],
2024-09-28 08:23:09 +08:00
query=form_data.query,
2025-04-01 05:13:27 +08:00
embedding_function=lambda query, prefix: request.app.state.EMBEDDING_FUNCTION(
query, prefix=prefix, user=user
2025-02-05 16:07:45 +08:00
),
2024-12-12 10:05:42 +08:00
k=form_data.k if form_data.k else request.app.state.config.TOP_K,
2025-07-14 18:05:06 +08:00
reranking_function=(
2025-07-16 17:56:02 +08:00
(
lambda sentences: request.app.state.RERANKING_FUNCTION(
sentences, user=user
)
2025-07-14 18:05:06 +08:00
)
2025-07-16 17:56:02 +08:00
if request.app.state.RERANKING_FUNCTION
else None
),
2025-03-27 16:40:28 +08:00
k_reranker=form_data.k_reranker
or request.app.state.config.TOP_K_RERANKER,
2024-09-28 08:23:09 +08:00
r=(
2024-12-12 10:05:42 +08:00
form_data.r
if form_data.r
else request.app.state.config.RELEVANCE_THRESHOLD
2024-09-28 08:23:09 +08:00
),
hybrid_bm25_weight=(
form_data.hybrid_bm25_weight
if form_data.hybrid_bm25_weight
else request.app.state.config.HYBRID_BM25_WEIGHT
),
2025-02-05 16:07:45 +08:00
user=user,
2024-09-28 08:23:09 +08:00
)
else:
return query_doc(
collection_name=form_data.collection_name,
2025-02-05 16:07:45 +08:00
query_embedding=request.app.state.EMBEDDING_FUNCTION(
2025-02-06 07:15:24 +08:00
form_data.query, prefix=RAG_EMBEDDING_QUERY_PREFIX, user=user
2025-02-05 16:07:45 +08:00
),
2024-12-12 10:05:42 +08:00
k=form_data.k if form_data.k else request.app.state.config.TOP_K,
2025-02-05 16:07:45 +08:00
user=user,
2024-09-28 08:23:09 +08:00
)
except Exception as e:
log.exception(e)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ERROR_MESSAGES.DEFAULT(e),
)
2024-03-24 15:40:27 +08:00
2024-09-28 08:23:09 +08:00
class QueryCollectionsForm(BaseModel):
collection_names: list[str]
query: str
k: Optional[int] = None
2025-03-06 17:47:57 +08:00
k_reranker: Optional[int] = None
2024-09-28 08:23:09 +08:00
r: Optional[float] = None
hybrid: Optional[bool] = None
hybrid_bm25_weight: Optional[float] = None
2024-03-26 14:47:08 +08:00
2024-03-24 15:40:27 +08:00
2024-12-12 10:05:42 +08:00
@router.post("/query/collection")
2024-09-28 08:23:09 +08:00
def query_collection_handler(
2024-12-12 10:05:42 +08:00
request: Request,
2024-09-28 08:23:09 +08:00
form_data: QueryCollectionsForm,
2024-06-28 02:29:59 +08:00
user=Depends(get_verified_user),
2024-01-07 18:46:12 +08:00
):
2024-01-07 14:59:22 +08:00
try:
if request.app.state.config.ENABLE_RAG_HYBRID_SEARCH and (
form_data.hybrid is None or form_data.hybrid
):
2024-09-28 08:23:09 +08:00
return query_collection_with_hybrid_search(
collection_names=form_data.collection_names,
2024-12-02 05:36:36 +08:00
queries=[form_data.query],
2025-04-01 05:13:27 +08:00
embedding_function=lambda query, prefix: request.app.state.EMBEDDING_FUNCTION(
query, prefix=prefix, user=user
2025-02-05 16:07:45 +08:00
),
2024-12-12 10:05:42 +08:00
k=form_data.k if form_data.k else request.app.state.config.TOP_K,
2025-07-16 17:56:02 +08:00
reranking_function=(
(
lambda sentences: request.app.state.RERANKING_FUNCTION(
sentences, user=user
)
)
if request.app.state.RERANKING_FUNCTION
else None
),
2025-03-27 16:40:28 +08:00
k_reranker=form_data.k_reranker
or request.app.state.config.TOP_K_RERANKER,
2024-09-28 08:23:09 +08:00
r=(
2024-12-12 10:05:42 +08:00
form_data.r
if form_data.r
else request.app.state.config.RELEVANCE_THRESHOLD
2024-09-28 08:23:09 +08:00
),
hybrid_bm25_weight=(
form_data.hybrid_bm25_weight
if form_data.hybrid_bm25_weight
else request.app.state.config.HYBRID_BM25_WEIGHT
),
2024-01-13 21:46:56 +08:00
)
else:
2024-09-28 08:23:09 +08:00
return query_collection(
collection_names=form_data.collection_names,
2024-12-02 05:36:36 +08:00
queries=[form_data.query],
2025-04-01 05:13:27 +08:00
embedding_function=lambda query, prefix: request.app.state.EMBEDDING_FUNCTION(
query, prefix=prefix, user=user
2025-02-05 16:07:45 +08:00
),
2024-12-12 10:05:42 +08:00
k=form_data.k if form_data.k else request.app.state.config.TOP_K,
2024-07-15 19:05:38 +08:00
)
2024-06-19 04:50:18 +08:00
except Exception as e:
log.exception(e)
2024-03-24 15:40:27 +08:00
raise HTTPException(
2024-09-28 08:23:09 +08:00
status_code=status.HTTP_400_BAD_REQUEST,
detail=ERROR_MESSAGES.DEFAULT(e),
2024-03-24 15:40:27 +08:00
)
2024-09-28 08:23:09 +08:00
####################################
#
# Vector DB operations
#
####################################
2024-02-18 13:06:08 +08:00
2024-10-03 21:44:17 +08:00
class DeleteForm(BaseModel):
collection_name: str
file_id: str
2024-12-12 10:05:42 +08:00
@router.post("/delete")
2024-10-03 21:44:17 +08:00
def delete_entries_from_collection(form_data: DeleteForm, user=Depends(get_admin_user)):
try:
if VECTOR_DB_CLIENT.has_collection(collection_name=form_data.collection_name):
file = Files.get_file_by_id(form_data.file_id)
hash = file.hash
VECTOR_DB_CLIENT.delete(
collection_name=form_data.collection_name,
metadata={"hash": hash},
)
return {"status": True}
else:
return {"status": False}
except Exception as e:
log.exception(e)
return {"status": False}
2024-12-12 10:05:42 +08:00
@router.post("/reset/db")
def reset_vector_db(user=Depends(get_admin_user)):
2024-09-10 09:27:50 +08:00
VECTOR_DB_CLIENT.reset()
2024-10-13 18:02:02 +08:00
Knowledges.delete_all_knowledge()
2024-01-07 17:40:36 +08:00
2024-12-12 10:05:42 +08:00
@router.post("/reset/uploads")
2024-06-04 12:45:36 +08:00
def reset_upload_dir(user=Depends(get_admin_user)) -> bool:
folder = f"{UPLOAD_DIR}"
try:
# Check if the directory exists
if os.path.exists(folder):
# Iterate over all the files and directories in the specified directory
for filename in os.listdir(folder):
file_path = os.path.join(folder, filename)
try:
if os.path.isfile(file_path) or os.path.islink(file_path):
os.unlink(file_path) # Remove the file or link
elif os.path.isdir(file_path):
shutil.rmtree(file_path) # Remove the directory
except Exception as e:
log.exception(f"Failed to delete {file_path}. Reason: {e}")
2024-06-04 12:45:36 +08:00
else:
log.warning(f"The directory {folder} does not exist")
2024-06-04 12:45:36 +08:00
except Exception as e:
log.exception(f"Failed to process the directory {folder}. Reason: {e}")
return True
2024-05-19 21:51:32 +08:00
2024-06-12 15:18:22 +08:00
2024-05-19 21:51:32 +08:00
if ENV == "dev":
2024-12-12 10:05:42 +08:00
@router.get("/ef/{text}")
async def get_embeddings(request: Request, text: Optional[str] = "Hello World!"):
2025-03-31 12:55:20 +08:00
return {
"result": request.app.state.EMBEDDING_FUNCTION(
2025-04-01 05:13:27 +08:00
text, prefix=RAG_EMBEDDING_QUERY_PREFIX
2025-03-31 12:55:20 +08:00
)
}
2024-12-18 10:40:50 +08:00
class BatchProcessFilesForm(BaseModel):
files: List[FileModel]
collection_name: str
2024-12-18 10:40:50 +08:00
class BatchProcessFilesResult(BaseModel):
file_id: str
status: str
error: Optional[str] = None
2024-12-18 10:40:50 +08:00
class BatchProcessFilesResponse(BaseModel):
results: List[BatchProcessFilesResult]
errors: List[BatchProcessFilesResult]
2024-12-18 10:40:50 +08:00
@router.post("/process/files/batch")
def process_files_batch(
2024-12-31 00:36:34 +08:00
request: Request,
form_data: BatchProcessFilesForm,
user=Depends(get_verified_user),
) -> BatchProcessFilesResponse:
"""
Process a batch of files and save them to the vector database.
"""
results: List[BatchProcessFilesResult] = []
errors: List[BatchProcessFilesResult] = []
collection_name = form_data.collection_name
# Prepare all documents first
all_docs: List[Document] = []
for file in form_data.files:
try:
text_content = file.data.get("content", "")
2024-12-18 10:40:50 +08:00
docs: List[Document] = [
Document(
page_content=text_content.replace("<br/>", "\n"),
metadata={
**file.meta,
"name": file.filename,
"created_by": file.user_id,
"file_id": file.id,
"source": file.filename,
},
)
]
hash = calculate_sha256_string(text_content)
Files.update_file_hash_by_id(file.id, hash)
Files.update_file_data_by_id(file.id, {"content": text_content})
2024-12-18 10:40:50 +08:00
all_docs.extend(docs)
2024-12-18 10:40:50 +08:00
results.append(BatchProcessFilesResult(file_id=file.id, status="prepared"))
except Exception as e:
log.error(f"process_files_batch: Error processing file {file.id}: {str(e)}")
2024-12-18 10:40:50 +08:00
errors.append(
BatchProcessFilesResult(file_id=file.id, status="failed", error=str(e))
)
# Save all documents in one batch
if all_docs:
try:
save_docs_to_vector_db(
2024-12-31 00:36:34 +08:00
request=request,
docs=all_docs,
collection_name=collection_name,
add=True,
user=user,
)
2024-12-18 10:40:50 +08:00
# Update all files with collection name
for result in results:
Files.update_file_metadata_by_id(
2024-12-18 10:40:50 +08:00
result.file_id, {"collection_name": collection_name}
)
result.status = "completed"
except Exception as e:
2024-12-18 10:40:50 +08:00
log.error(
f"process_files_batch: Error saving documents to vector DB: {str(e)}"
)
for result in results:
result.status = "failed"
2024-12-18 10:40:50 +08:00
errors.append(
BatchProcessFilesResult(file_id=result.file_id, error=str(e))
)
2024-12-13 22:29:43 +08:00
2024-12-18 10:40:50 +08:00
return BatchProcessFilesResponse(results=results, errors=errors)