Spaces:
Running
Running
| # app.py | |
| import streamlit as st | |
| import os | |
| # Local imports | |
| from embedding import load_embeddings | |
| from vectorstore import load_or_build_vectorstore | |
| from chain_setup import build_conversational_chain | |
| def main(): | |
| st.title("💬 المحادثة التفاعلية - ادارة البيانات و حماية البيانات الشخصية") | |
| # Paths and constants | |
| local_file = "Policies001.pdf" | |
| index_folder = "faiss_index" | |
| # Step 1: Load Embeddings | |
| embeddings = load_embeddings() | |
| # Step 2: Build or load VectorStore | |
| vectorstore = load_or_build_vectorstore(local_file, index_folder, embeddings) | |
| # Step 3: Build the Conversational Retrieval Chain | |
| qa_chain = build_conversational_chain(vectorstore) | |
| # Step 4: Session State for UI Chat | |
| if "messages" not in st.session_state: | |
| st.session_state["messages"] = [ | |
| {"role": "assistant", "content": "👋 مرحبًا! اسألني أي شيء عن إدارة البيانات وحماية البيانات الشخصية"} | |
| ] | |
| # Display existing messages | |
| for msg in st.session_state["messages"]: | |
| with st.chat_message(msg["role"]): | |
| st.markdown(msg["content"]) | |
| # Step 5: Chat Input | |
| user_input = st.chat_input("Type your question...") | |
| # Step 6: Process user input | |
| if user_input: | |
| # a) Display user message | |
| st.session_state["messages"].append({"role": "user", "content": user_input}) | |
| with st.chat_message("user"): | |
| st.markdown(user_input) | |
| # b) Run chain | |
| response_dict = qa_chain({"question": user_input}) | |
| answer = response_dict["answer"] | |
| # c) Display assistant response | |
| st.session_state["messages"].append({"role": "assistant", "content": answer}) | |
| with st.chat_message("assistant"): | |
| st.markdown(answer) | |
| if __name__ == "__main__": | |
| main() | |