Gary
Initial commit
cca58a9
raw
history blame
1.75 kB
from indexer import (
load_raw_dataset,
create_vector_database,
get_llm,
get_prompt_template,
)
import gradio as gr
def format_contexts(contexts):
return "\n".join(
[
f"Reference {i+1}:\n{doc.metadata['question']}\n{doc.metadata['answer']}"
for i, doc in enumerate(contexts)
]
)
class CustomRAG:
def __init__(self, vector_db, llm, prompt_template):
self.vector_db = vector_db
self.llm = llm
self.prompt_template = prompt_template
def run(self, query):
retriever = self.vector_db.as_retriever(search_kwargs={"k": 3})
contexts = retriever.get_relevant_documents(query)
formatted_context = format_contexts(contexts)
prompt = self.prompt_template.format(context=formatted_context, question=query)
return self.llm.invoke(prompt), contexts
def answer_question(query):
docs = load_raw_dataset()
rag = CustomRAG(
create_vector_database(docs, "all-MiniLM-L6-v2"),
get_llm("FreedomIntelligence/HuatuoGPT-o1-7B"),
get_prompt_template(),
)
response, _ = rag.run(query)
return response
demo = gr.Interface(
fn=answer_question,
inputs=[
gr.Textbox(
label="Describe your medical concern",
placeholder="e.g. I've been feeling tired and dizzy lately.",
lines=3,
),
],
outputs="text",
title="Medical Assistant – Powered by AI & RAG",
description=(
"Get helpful insights based on your described symptoms. "
"This assistant uses medical reference data to provide informative responses. "
"Note: This is not a substitute for professional medical advice."
),
)
demo.launch()