From fd62724b4ca99bf7a0882d859ed6603cc903b313 Mon Sep 17 00:00:00 2001 From: Ruben Lucas Date: Fri, 18 Apr 2025 13:12:13 +0200 Subject: [PATCH] =?UTF-8?q?=F0=9F=8E=A8=20Add=20reranker=20to=20config.ini?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- config.yaml | 1 + generic_rag/app.py | 6 +++--- generic_rag/parsers/config.py | 1 + 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/config.yaml b/config.yaml index 4c877d8..672f4c7 100644 --- a/config.yaml +++ b/config.yaml @@ -4,6 +4,7 @@ chat_backend: local # Select the primary chat backend (azure, openai, google_ver emb_backend: local # Select the primary embedding backend (azure, openai, google_vertex, aws, local, huggingface) use_conditional_graph: false # Use a conditional RAG model with historical chat context, or a non-conditional model without access to the current conversation +use_reranker: false # Use a LLM to rerank the retrieved context documents # --- Provider Specific Settings --- diff --git a/generic_rag/app.py b/generic_rag/app.py index 3f6e68d..fbde75a 100644 --- a/generic_rag/app.py +++ b/generic_rag/app.py @@ -67,9 +67,9 @@ else: chat_model=chat_function, embedding_model=embedding_function, system_prompt=system_prompt, - compression_model=get_compression_model( - "BAAI/bge-reranker-base", vector_store - ), # TODO: implement in config parser + compression_model=( + get_compression_model("BAAI/bge-reranker-base", vector_store) if settings.use_reranker else None + ), ) diff --git a/generic_rag/parsers/config.py b/generic_rag/parsers/config.py index 7ef2dee..d7f14f5 100644 --- a/generic_rag/parsers/config.py +++ b/generic_rag/parsers/config.py @@ -119,6 +119,7 @@ class AppSettings(BaseModel): chat_backend: ChatBackend = Field(default=ChatBackend.local) emb_backend: EmbeddingBackend = Field(default=EmbeddingBackend.huggingface) use_conditional_graph: bool = Field(default=False) + use_reranker: bool = Field(default=False) # --- Provider-specific settings --- azure: Optional[AzureSettings] = None