import streamlit as st import time import os import sys from datetime import datetime from pathlib import Path sys.path.append(str(Path(__file__).parent)) from utils.config import config from core.llm import send_to_ollama, send_to_hf from core.session import session_manager from core.memory import check_redis_health st.set_page_config(page_title="AI Life Coach", page_icon="🧠", layout="wide") # Initialize session state if "messages" not in st.session_state: st.session_state.messages = [] if "last_error" not in st.session_state: st.session_state.last_error = "" if "last_ollama_call_success" not in st.session_state: st.session_state.last_ollama_call_success = None if "last_ollama_call_time" not in st.session_state: st.session_state.last_ollama_call_time = "" if "last_ollama_response_preview" not in st.session_state: st.session_state.last_ollama_response_preview = "" if "last_hf_call_success" not in st.session_state: st.session_state.last_hf_call_success = None if "last_hf_call_time" not in st.session_state: st.session_state.last_hf_call_time = "" if "last_hf_response_preview" not in st.session_state: st.session_state.last_hf_response_preview = "" # Sidebar with st.sidebar: st.title("AI Life Coach") st.markdown("Your personal AI-powered life development assistant") # Model selection model_options = { "Mistral 7B (Local)": "mistral:latest", "Llama 2 7B (Local)": "llama2:latest", "OpenChat 3.5 (Local)": "openchat:latest" } selected_model_name = st.selectbox( "Select Model", options=list(model_options.keys()), index=0 ) st.session_state.selected_model = model_options[selected_model_name] # Ollama URL input st.session_state.ngrok_url = st.text_input( "Ollama Server URL", value=st.session_state.get("ngrok_url", "http://localhost:11434"), help="Enter the URL to your Ollama server" ) # Conversation history st.subheader("Conversation History") if st.button("Clear History"): st.session_state.messages = [] st.success("History cleared!") # Debug info with st.sidebar.expander("🔧 Debug Info"): st.write(f"**OLLAMA_HOST**: {st.session_state.ngrok_url}") st.write(f"**Selected Model**: {st.session_state.selected_model}") st.write(f"Fallback Mode: {'✅ On' if config.use_fallback else '❌ Off'}") st.write(f"Redis Status: {'✅ Healthy' if check_redis_health() else '⚠️ Unavailable'}") st.write(f"Env Detected As: {'☁️ HF Space' if config.is_hf_space else '🏠 Local'}") st.write(f"HF Token Set: {'✅ Yes' if config.hf_token else '❌ No'}") if st.session_state.last_error: st.warning(f"Last Error: {st.session_state.last_error}") # Ollama API Call Tracking if st.session_state.last_ollama_call_success is not None: status_icon = "✅ Success" if st.session_state.last_ollama_call_success else "❌ Failed" st.write(f"Last Ollama Call: {status_icon}") st.write(f"At: {st.session_state.last_ollama_call_time}") if st.session_state.last_ollama_response_preview: st.code(st.session_state.last_ollama_response_preview[:200] + ("..." if len(st.session_state.last_ollama_response_preview) > 200 else ""), language="text") # Hugging Face API Call Tracking if st.session_state.last_hf_call_success is not None: status_icon = "✅ Success" if st.session_state.last_hf_call_success else "❌ Failed" st.write(f"Last HF Call: {status_icon}") st.write(f"At: {st.session_state.last_hf_call_time}") if st.session_state.last_hf_response_preview: st.code(st.session_state.last_hf_response_preview[:200] + ("..." if len(st.session_state.last_hf_response_preview) > 200 else ""), language="text") # Manual Refresh Button if st.button("🔄 Refresh Ollama Status"): from services.ollama_monitor import check_ollama_status status = check_ollama_status() # Fix Streamlit toast issue - replace with compatible function st.sidebar.info(f"Ollama Status: {'Running' if status['running'] else 'Unavailable'}") # Main chat interface st.title("🧠 AI Life Coach") st.markdown("Ask me anything about personal development, goal setting, or life advice!") # Display chat messages for message in st.session_state.messages: with st.chat_message(message["role"]): st.markdown(message["content"]) # Chat input and send button col1, col2 = st.columns([4, 1]) with col1: user_input = st.text_input( "Your message...", key="user_message_input", placeholder="Type your message here...", label_visibility="collapsed" ) with col2: send_button = st.button("Send", key="send_message_button", use_container_width=True) if send_button and user_input.strip(): # Display user message with st.chat_message("user"): st.markdown(user_input) # Add user message to history st.session_state.messages.append({"role": "user", "content": user_input}) # Reset error state st.session_state.last_error = "" # Get conversation history user_session = session_manager.get_session("default_user") conversation = user_session.get("conversation", []) conversation_history = conversation[-5:] # Last 5 messages conversation_history.append({"role": "user", "content": user_input}) # Send to backend with st.chat_message("assistant"): with st.spinner("AI Coach is thinking..."): ai_response = None backend_used = "" error_msg = "" # Try Ollama first if not falling back if not config.use_fallback: try: ai_response = send_to_ollama( user_input, conversation_history, st.session_state.ngrok_url, st.session_state.selected_model ) backend_used = "Ollama" # Capture success metadata st.session_state.last_ollama_call_success = True st.session_state.last_ollama_call_time = str(datetime.utcnow()) st.session_state.last_ollama_response_preview = ai_response[:200] if ai_response else "" except Exception as e: error_msg = f"Ollama error: {str(e)}" # Capture failure metadata st.session_state.last_ollama_call_success = False st.session_state.last_ollama_call_time = str(datetime.utcnow()) st.session_state.last_ollama_response_preview = str(e)[:200] # Fallback to Hugging Face if not ai_response and config.hf_token if not ai_response and config.hf_token: try: ai_response = send_to_hf(user_input, conversation_history) backend_used = "Hugging Face" # Capture success metadata st.session_state.last_hf_call_success = True st.session_state.last_hf_call_time = str(datetime.utcnow()) st.session_state.last_hf_response_preview = ai_response[:200] if ai_response else "" except Exception as e: error_msg = f"Hugging Face error: {str(e)}" # Capture failure metadata st.session_state.last_hf_call_success = False st.session_state.last_hf_call_time = str(datetime.utcnow()) st.session_state.last_hf_response_preview = str(e)[:200] if ai_response: st.markdown(f"{ai_response}") # Update conversation history conversation.append({"role": "user", "content": user_input}) conversation.append({"role": "assistant", "content": ai_response}) # Update session using the correct method user_session["conversation"] = conversation session_manager.update_session("default_user", user_session) # Add assistant response to history st.session_state.messages.append({"role": "assistant", "content": ai_response}) else: st.error("Failed to get response from both providers.") st.session_state.last_error = error_msg or "No response from either provider" # Clear input by forcing rerun st.experimental_rerun()