import streamlit as st import time import os import sys from pathlib import Path sys.path.append(str(Path(__file__).parent)) from utils.config import config from core.llm import send_to_ollama, send_to_hf from core.session import session_manager st.set_page_config(page_title="AI Life Coach", page_icon="🧠", layout="wide") # Initialize session state if "messages" not in st.session_state: st.session_state.messages = [] # Sidebar with st.sidebar: st.title("AI Life Coach") st.markdown("Your personal AI-powered life development assistant") # Model selection model_options = { "Mistral 7B (Local)": "mistral:latest", "Llama 2 7B (Local)": "llama2:latest", "OpenChat 3.5 (Local)": "openchat:latest" } selected_model_name = st.selectbox( "Select Model", options=list(model_options.keys()), index=0 ) st.session_state.selected_model = model_options[selected_model_name] # Ollama URL input st.session_state.ngrok_url = st.text_input( "Ollama Server URL", value=st.session_state.get("ngrok_url", "http://localhost:11434"), help="Enter the URL to your Ollama server" ) # Conversation history st.subheader("Conversation History") if st.button("Clear History"): st.session_state.messages = [] st.success("History cleared!") # Main chat interface st.title("🧠 AI Life Coach") st.markdown("Ask me anything about personal development, goal setting, or life advice!") # Display chat messages for message in st.session_state.messages: with st.chat_message(message["role"]): st.markdown(message["content"]) # Chat input and send button col1, col2 = st.columns([4, 1]) with col1: user_input = st.text_input( "Your message...", key="user_message_input", placeholder="Type your message here...", label_visibility="collapsed" ) with col2: send_button = st.button("Send", key="send_message_button", use_container_width=True) if send_button and user_input.strip(): # Display user message with st.chat_message("user"): st.markdown(user_input) # Add user message to history st.session_state.messages.append({"role": "user", "content": user_input}) # Get conversation history user_session = session_manager.get_session("default_user") conversation = user_session.get("conversation", []) conversation_history = conversation[-5:] # Last 5 messages conversation_history.append({"role": "user", "content": user_input}) # Send to backend with st.chat_message("assistant"): with st.spinner("AI Coach is thinking..."): ai_response = None backend_used = "" # Try Ollama first if not falling back if not config.use_fallback: ai_response = send_to_ollama( user_input, conversation_history, st.session_state.ngrok_url, st.session_state.selected_model ) backend_used = "Ollama" # Fallback to Hugging Face if not ai_response and config.hf_token: ai_response = send_to_hf(user_input, conversation_history) backend_used = "Hugging Face" if ai_response: st.markdown(f"{ai_response}") # Update conversation history (stub – actual save will come later) conversation.append({"role": "user", "content": user_input}) conversation.append({"role": "assistant", "content": ai_response}) # Update session user_session["conversation"] = conversation session_manager.save_session("default_user", user_session) # Add assistant response to history st.session_state.messages.append({"role": "assistant", "content": ai_response}) else: st.error("Failed to get response from both providers.") # Clear input by forcing rerun st.experimental_rerun()