File size: 1,197 Bytes
c9bb5fc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
import gradio as gr
from backend import backend_chat
from langchain_core.messages import HumanMessage, BaseMessage, AIMessage
def respond(message: str, history: list[dict]):
"""
message: the latest user input
history: openai-style list of dicts with keys 'role' and 'content'
"""
backend_history = []
for msg in history:
if msg["role"] == "user":
backend_history.append(HumanMessage(content=msg["content"]))
elif msg["role"] == "assistant":
backend_history.append(AIMessage(content=msg["content"]))
updated_backend_history = backend_chat(backend_history, message)
latest_assistant = None
for msg in reversed(updated_backend_history):
if not isinstance(msg, HumanMessage):
latest_assistant = msg
break
if latest_assistant is None:
return "Sorry, I couldn't generate a response."
return latest_assistant.content
def main():
demo = gr.ChatInterface(
fn=respond,
type="messages",
title="ChatBot",
description="Chatbot using langgraph backend and Memory Checkpointing",
)
demo.launch()
if __name__ == "__main__":
main()
|