import os import gradio as gr from huggingface_hub import InferenceClient from retriever import find_similar_foundations from retriever_m3 import find_similar_foundations_api from chat import chat_with_model # ------------------------------------------------------------------- # 1. Setup client for chatbot # ------------------------------------------------------------------- # Use my token stored as a Space secret for inference client_chat = InferenceClient( provider="featherless-ai", api_key=os.environ["HF_TOKEN_inf"] ) # ------------------------------------------------------------------- # 2. Setup client for bgem3 similarity search # ------------------------------------------------------------------- client_m3 = InferenceClient( provider="hf-inference", # for embeddings similarity api_key=os.environ["HF_TOKEN_inf"], ) # ------------------------------------------------------------------- # 3. Foundations Retriever bge-m3 function API # ------------------------------------------------------------------- def retrieve_foundations_m3(perspective, top_k=5): results = find_similar_foundations_api(perspective, client=client_m3, top_k=int(top_k)) return [(r["Title"], r["Purpose"], f"{r['score']:.4f}") for r in results] # ------------------------------------------------------------------- # 4. Foundations Retriever bge-en-icl function (for UI) # ------------------------------------------------------------------- def retrieve_foundations(perspective, top_k=5): """ Find foundations aligned with user-provided perspective. """ results = find_similar_foundations(perspective, top_k=int(top_k)) display_text = "" for i, res in enumerate(results, 1): display_text += f"{i}. {res['Title']} - {res['Purpose']} (Score: {res['Score']:.3f})\n" return display_text # ------------------------------------------------------------------- # 5. Gradio Interface # ------------------------------------------------------------------- with gr.Blocks() as demo: gr.Markdown("# Mistral Perspective Chatbot & Foundation Finder") with gr.Tab("💬 Chatbot"): perspective_input = gr.Textbox( label="Enter your philanthropic perspective (optional)", placeholder="e.g. Environmental philanthropist emphasizing animal protection while fostering children's education" ) chatbot = gr.Chatbot(type="messages") msg = gr.Textbox(placeholder="Ask me anything...", show_label=False) state = gr.State([]) # stores conversation in messages format # Streaming callback from chat.py msg.submit( chat_with_model, [msg, state, perspective_input], [chatbot, state], ) with gr.Tab("🔎 M3 Aligned Foundations"): perspective_api = gr.Textbox(label="Enter your philanthropic perspective") top_k_api = gr.Slider(1, 5, value=2, step=1, label="Number of results") output_api = gr.Dataframe(headers=["Title", "Purpose", "Score"]) gr.Button("Find Foundations").click(fn=retrieve_foundations_m3, inputs=[perspective_api, top_k_api], outputs=output_api) with gr.Tab("🔎 FAISS ICL Aligned Foundations"): perspective = gr.Textbox( label="Enter your philanthropic perspective", placeholder="e.g. Environmental philanthropist emphasizing animal protection while fostering children's education" ) top_k = gr.Slider(1, 5, value=2, step=1, label="Number of results") output = gr.Dataframe(headers=["Title", "Purpose", "similarity"], wrap=True) btn = gr.Button("Find Foundations") btn.click(fn=retrieve_foundations, inputs=[perspective, top_k], outputs=output) demo.launch(server_name="0.0.0.0", server_port=7860)