|
|
|
|
|
import streamlit as st |
|
|
from utils.config import config |
|
|
import requests |
|
|
import json |
|
|
import os |
|
|
from core.memory import load_user_state |
|
|
|
|
|
|
|
|
st.set_page_config(page_title="AI Life Coach", page_icon="π§", layout="centered") |
|
|
|
|
|
|
|
|
st.sidebar.title("π§ AI Life Coach") |
|
|
user = st.sidebar.selectbox("Select User", ["Rob", "Sarah"]) |
|
|
st.sidebar.markdown("---") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
BASE_URL = os.environ.get("SPACE_ID", "") |
|
|
IS_HF_SPACE = bool(BASE_URL) |
|
|
|
|
|
|
|
|
NGROK_HEADERS = { |
|
|
"ngrok-skip-browser-warning": "true", |
|
|
"User-Agent": "AI-Life-Coach-App" |
|
|
} |
|
|
|
|
|
|
|
|
def get_ollama_status(): |
|
|
try: |
|
|
|
|
|
response = requests.get( |
|
|
f"{config.ollama_host}/api/tags", |
|
|
headers=NGROK_HEADERS, |
|
|
timeout=10 |
|
|
) |
|
|
if response.status_code == 200: |
|
|
models = response.json().get("models", []) |
|
|
if models: |
|
|
return { |
|
|
"running": True, |
|
|
"model_loaded": models[0].get("name"), |
|
|
"remote_host": config.ollama_host |
|
|
} |
|
|
except Exception as e: |
|
|
|
|
|
return { |
|
|
"running": False, |
|
|
"model_loaded": None, |
|
|
"error": str(e), |
|
|
"remote_host": config.ollama_host |
|
|
} |
|
|
|
|
|
|
|
|
return { |
|
|
"running": False, |
|
|
"model_loaded": None, |
|
|
"remote_host": config.ollama_host |
|
|
} |
|
|
|
|
|
|
|
|
def get_conversation_history(user_id): |
|
|
user_state = load_user_state(user_id) |
|
|
if user_state and "conversation" in user_state: |
|
|
return json.loads(user_state["conversation"]) |
|
|
return [] |
|
|
|
|
|
|
|
|
ollama_status = get_ollama_status() |
|
|
|
|
|
|
|
|
if ollama_status["running"]: |
|
|
st.sidebar.success(f"π§ Model Running: {ollama_status['model_loaded']}") |
|
|
st.sidebar.info(f"Connected to: {ollama_status['remote_host']}") |
|
|
else: |
|
|
st.sidebar.error("π§ Ollama is not accessible") |
|
|
st.sidebar.info(f"Configured host: {ollama_status['remote_host']}") |
|
|
if "error" in ollama_status: |
|
|
st.sidebar.caption(f"Error: {ollama_status['error']}") |
|
|
|
|
|
|
|
|
st.title("π§ AI Life Coach") |
|
|
st.markdown("Talk to your personal development assistant.") |
|
|
|
|
|
|
|
|
with st.expander("π Connection Status"): |
|
|
st.write("Ollama Status:", ollama_status) |
|
|
st.write("Environment Info:") |
|
|
st.write("- Is HF Space:", IS_HF_SPACE) |
|
|
st.write("- Base URL:", BASE_URL or "Not in HF Space") |
|
|
st.write("- Configured Ollama Host:", config.ollama_host) |
|
|
|
|
|
if not ollama_status["running"]: |
|
|
st.warning("β οΈ Ollama is not accessible. Please check your Ollama/ngrok setup.") |
|
|
st.info(""" |
|
|
Troubleshooting tips: |
|
|
1. Ensure your Ollama service is running locally |
|
|
2. Verify your ngrok tunnel is active and pointing to Ollama (port 11434) |
|
|
3. Check that the ngrok URL in your .env file matches your active tunnel |
|
|
4. Confirm that your ngrok account allows connections from Hugging Face Spaces |
|
|
""") |
|
|
else: |
|
|
|
|
|
conversation = get_conversation_history(user) |
|
|
for msg in conversation: |
|
|
role = msg["role"].capitalize() |
|
|
content = msg["content"] |
|
|
st.markdown(f"**{role}:** {content}") |
|
|
|
|
|
|
|
|
user_input = st.text_input("Your message...", key="input") |
|
|
if st.button("Send"): |
|
|
if user_input.strip() == "": |
|
|
st.warning("Please enter a message.") |
|
|
else: |
|
|
|
|
|
st.markdown(f"**You:** {user_input}") |
|
|
|
|
|
|
|
|
with st.spinner("AI Coach is thinking..."): |
|
|
try: |
|
|
|
|
|
conversation_history = [{"role": msg["role"], "content": msg["content"]} |
|
|
for msg in conversation[-5:]] |
|
|
conversation_history.append({"role": "user", "content": user_input}) |
|
|
|
|
|
payload = { |
|
|
"model": config.local_model_name, |
|
|
"messages": conversation_history, |
|
|
"stream": False |
|
|
} |
|
|
|
|
|
response = requests.post( |
|
|
f"{config.ollama_host}/api/chat", |
|
|
json=payload, |
|
|
headers=NGROK_HEADERS, |
|
|
timeout=60 |
|
|
) |
|
|
|
|
|
if response.status_code == 200: |
|
|
response_data = response.json() |
|
|
ai_response = response_data.get("message", {}).get("content", "") |
|
|
st.markdown(f"**AI Coach:** {ai_response}") |
|
|
|
|
|
|
|
|
else: |
|
|
st.error(f"Failed to get response from Ollama: {response.status_code}") |
|
|
st.error(response.text[:200]) |
|
|
except Exception as e: |
|
|
st.error(f"Connection error: {e}") |
|
|
|