Factor out processing of responses in functions

This commit is contained in:
Nielson Janné 2025-03-26 10:34:34 +01:00
parent 5258127ae1
commit 506857deeb
2 changed files with 36 additions and 28 deletions

View File

@ -88,39 +88,47 @@ else:
@cl.on_message @cl.on_message
async def on_message(message: cl.Message): async def on_message(message: cl.Message):
if isinstance(graph, CondRetGenLangGraph): if isinstance(graph, CondRetGenLangGraph):
config = {"configurable": {"thread_id": cl.user_session.get("id")}} await process_cond_response(message)
chainlit_response = cl.Message(content="")
for response in graph.stream(message.content, config=config):
await chainlit_response.stream_token(response)
await chainlit_response.send()
elif isinstance(graph, RetGenLangGraph): elif isinstance(graph, RetGenLangGraph):
config = {"configurable": {"thread_id": cl.user_session.get("id")}} await process_response(message)
response = graph.invoke(message.content, config=config)
answer = response["answer"]
answer += "\n\n"
pdf_sources = graph.get_last_pdf_sources() async def process_response(message):
web_sources = graph.get_last_web_sources() config = {"configurable": {"thread_id": cl.user_session.get("id")}}
elements = [] chainlit_response = cl.Message(content="")
if len(pdf_sources) > 0:
answer += "The following PDF source were consulted:\n"
for source, page_numbers in pdf_sources.items():
page_numbers = list(page_numbers)
page_numbers.sort()
# display="side" seems to be not supported by chainlit for PDF's, so we use "inline" instead.
elements.append(cl.Pdf(name="pdf", display="inline", path=source, page=page_numbers[0]))
answer += f"'{source}' on page(s): {page_numbers}\n"
if len(web_sources) > 0: response = graph.invoke(message.content, config=config)
answer += f"The following web sources were consulted: {web_sources}\n"
await cl.Message(content=answer, elements=elements).send() await chainlit_response.stream_token(f"{response}\n\n")
pdf_sources = graph.get_last_pdf_sources()
if len(pdf_sources) > 0:
await chainlit_response.stream_token("The following PDF source were consulted:\n")
for source, page_numbers in pdf_sources.items():
page_numbers = list(page_numbers)
page_numbers.sort()
# display="side" seems to be not supported by chainlit for PDF's, so we use "inline" instead.
chainlit_response.elements.append(cl.Pdf(name="pdf", display="inline", path=source, page=page_numbers[0]))
await chainlit_response.update()
await chainlit_response.stream_token(f"'{source}' on page(s): {page_numbers}\n")
web_sources = graph.get_last_web_sources()
if len(web_sources) > 0:
await chainlit_response.stream_token(f"The following web sources were consulted: {web_sources}\n")
await chainlit_response.send()
async def process_cond_response(message):
config = {"configurable": {"thread_id": cl.user_session.get("id")}}
chainlit_response = cl.Message(content="")
for response in graph.stream(message.content, config=config):
await chainlit_response.stream_token(response)
await chainlit_response.send()
@cl.set_starters @cl.set_starters

View File

@ -31,7 +31,7 @@ class RetGenLangGraph:
def invoke(self, message: str, config: dict) -> Union[dict[str, Any], Any]: def invoke(self, message: str, config: dict) -> Union[dict[str, Any], Any]:
self.last_invoke = self.graph.invoke({"question": message}, config=config) self.last_invoke = self.graph.invoke({"question": message}, config=config)
return self.last_invoke return self.last_invoke["answer"]
def _retrieve(self, state: State) -> dict: def _retrieve(self, state: State) -> dict:
retrieved_docs = self.vector_store.similarity_search(state["question"]) retrieved_docs = self.vector_store.similarity_search(state["question"])