🎨 Add reranker to config.ini

This commit is contained in:
Ruben Lucas 2025-04-18 13:12:13 +02:00
parent ad60c9d52f
commit fd62724b4c
3 changed files with 5 additions and 3 deletions

View File

@ -4,6 +4,7 @@ chat_backend: local # Select the primary chat backend (azure, openai, google_ver
emb_backend: local # Select the primary embedding backend (azure, openai, google_vertex, aws, local, huggingface)
use_conditional_graph: false # Use a conditional RAG model with historical chat context, or a non-conditional model without access to the current conversation
use_reranker: false # Use a LLM to rerank the retrieved context documents
# --- Provider Specific Settings ---

View File

@ -67,9 +67,9 @@ else:
chat_model=chat_function,
embedding_model=embedding_function,
system_prompt=system_prompt,
compression_model=get_compression_model(
"BAAI/bge-reranker-base", vector_store
), # TODO: implement in config parser
compression_model=(
get_compression_model("BAAI/bge-reranker-base", vector_store) if settings.use_reranker else None
),
)

View File

@ -119,6 +119,7 @@ class AppSettings(BaseModel):
chat_backend: ChatBackend = Field(default=ChatBackend.local)
emb_backend: EmbeddingBackend = Field(default=EmbeddingBackend.huggingface)
use_conditional_graph: bool = Field(default=False)
use_reranker: bool = Field(default=False)
# --- Provider-specific settings ---
azure: Optional[AzureSettings] = None