| import gradio as gr | |
| from backend import backend_chat | |
| from langchain_core.messages import HumanMessage, BaseMessage, AIMessage | |
| def respond(message: str, history: list[dict]): | |
| """ | |
| message: the latest user input | |
| history: openai-style list of dicts with keys 'role' and 'content' | |
| """ | |
| backend_history = [] | |
| for msg in history: | |
| if msg["role"] == "user": | |
| backend_history.append(HumanMessage(content=msg["content"])) | |
| elif msg["role"] == "assistant": | |
| backend_history.append(AIMessage(content=msg["content"])) | |
| updated_backend_history = backend_chat(backend_history, message) | |
| latest_assistant = None | |
| for msg in reversed(updated_backend_history): | |
| if not isinstance(msg, HumanMessage): | |
| latest_assistant = msg | |
| break | |
| if latest_assistant is None: | |
| return "Sorry, I couldn't generate a response." | |
| return latest_assistant.content | |
| def main(): | |
| demo = gr.ChatInterface( | |
| fn=respond, | |
| type="messages", | |
| title="ChatBot", | |
| description="Chatbot using langgraph backend and Memory Checkpointing", | |
| ) | |
| demo.launch() | |
| if __name__ == "__main__": | |
| main() | |