Update app.py
Browse files
app.py
CHANGED
|
@@ -36,7 +36,7 @@ def create_web_search_vectors(search_results):
|
|
| 36 |
documents.append(Document(page_content=content, metadata={"source": result['href']}))
|
| 37 |
return FAISS.from_documents(documents, embed)
|
| 38 |
|
| 39 |
-
async def get_response_with_search(query, model, use_embeddings, num_calls=3, temperature=0.2):
|
| 40 |
search_results = duckduckgo_search(query)
|
| 41 |
|
| 42 |
if not search_results:
|
|
@@ -51,14 +51,20 @@ async def get_response_with_search(query, model, use_embeddings, num_calls=3, te
|
|
| 51 |
else:
|
| 52 |
context = "\n".join([f"{result['title']}\n{result['body']}\nSource: {result['href']}" for result in search_results])
|
| 53 |
|
| 54 |
-
system_message = """
|
| 55 |
Reason through the query inside <thinking> tags, and then provide your final response inside <output> tags.
|
| 56 |
Providing comprehensive and accurate information based on web search results is essential.
|
| 57 |
Your goal is to synthesize the given context into a coherent and detailed response that directly addresses the user's query.
|
| 58 |
Please ensure that your response is well-structured, factual, and cites sources where appropriate.
|
| 59 |
-
If you detect that you made a mistake in your reasoning at any point, correct yourself inside <reflection> tags.
|
|
|
|
| 60 |
|
| 61 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
{context}
|
| 63 |
|
| 64 |
Write a detailed and complete research document that fulfills the following user request: '{query}'
|
|
@@ -104,8 +110,11 @@ async def respond(message, history, model, temperature, num_calls, use_embedding
|
|
| 104 |
logging.info(f"Number of API Calls: {num_calls}")
|
| 105 |
logging.info(f"Use Embeddings: {use_embeddings}")
|
| 106 |
|
|
|
|
|
|
|
|
|
|
| 107 |
try:
|
| 108 |
-
async for main_content, sources in get_response_with_search(message, model, use_embeddings, num_calls=num_calls, temperature=temperature):
|
| 109 |
response = f"{main_content}\n\n{sources}"
|
| 110 |
yield response
|
| 111 |
except asyncio.CancelledError:
|
|
@@ -138,8 +147,8 @@ def create_gradio_interface():
|
|
| 138 |
gr.Slider(minimum=1, maximum=5, value=1, step=1, label="Number of API Calls"),
|
| 139 |
gr.Checkbox(label="Use Embeddings", value=False),
|
| 140 |
],
|
| 141 |
-
title="AI-powered Web Search Assistant",
|
| 142 |
-
description="Use web search to answer questions or generate summaries.",
|
| 143 |
theme=gr.Theme.from_hub("allenai/gradio-theme"),
|
| 144 |
css=css,
|
| 145 |
examples=[
|
|
@@ -168,6 +177,7 @@ def create_gradio_interface():
|
|
| 168 |
5. Check or uncheck the "Use Embeddings" box to toggle between using embeddings or direct text summarization.
|
| 169 |
6. Press Enter or click the submit button to get your answer.
|
| 170 |
7. Use the provided examples or ask your own questions.
|
|
|
|
| 171 |
""")
|
| 172 |
|
| 173 |
return demo
|
|
|
|
| 36 |
documents.append(Document(page_content=content, metadata={"source": result['href']}))
|
| 37 |
return FAISS.from_documents(documents, embed)
|
| 38 |
|
| 39 |
+
async def get_response_with_search(query, model, use_embeddings, conversation_history, num_calls=3, temperature=0.2):
|
| 40 |
search_results = duckduckgo_search(query)
|
| 41 |
|
| 42 |
if not search_results:
|
|
|
|
| 51 |
else:
|
| 52 |
context = "\n".join([f"{result['title']}\n{result['body']}\nSource: {result['href']}" for result in search_results])
|
| 53 |
|
| 54 |
+
system_message = """You are a world-class AI system, capable of complex reasoning and reflection.
|
| 55 |
Reason through the query inside <thinking> tags, and then provide your final response inside <output> tags.
|
| 56 |
Providing comprehensive and accurate information based on web search results is essential.
|
| 57 |
Your goal is to synthesize the given context into a coherent and detailed response that directly addresses the user's query.
|
| 58 |
Please ensure that your response is well-structured, factual, and cites sources where appropriate.
|
| 59 |
+
If you detect that you made a mistake in your reasoning at any point, correct yourself inside <reflection> tags.
|
| 60 |
+
Consider the conversation history when formulating your response to maintain context and coherence."""
|
| 61 |
|
| 62 |
+
conversation_context = "\n".join([f"Human: {msg['human']}\nAI: {msg['ai']}" for msg in conversation_history])
|
| 63 |
+
|
| 64 |
+
user_message = f"""Conversation history:
|
| 65 |
+
{conversation_context}
|
| 66 |
+
|
| 67 |
+
Using the following context from web search results:
|
| 68 |
{context}
|
| 69 |
|
| 70 |
Write a detailed and complete research document that fulfills the following user request: '{query}'
|
|
|
|
| 110 |
logging.info(f"Number of API Calls: {num_calls}")
|
| 111 |
logging.info(f"Use Embeddings: {use_embeddings}")
|
| 112 |
|
| 113 |
+
# Convert Gradio history to a list of dictionaries
|
| 114 |
+
conversation_history = [{"human": h, "ai": a} for h, a in history]
|
| 115 |
+
|
| 116 |
try:
|
| 117 |
+
async for main_content, sources in get_response_with_search(message, model, use_embeddings, conversation_history, num_calls=num_calls, temperature=temperature):
|
| 118 |
response = f"{main_content}\n\n{sources}"
|
| 119 |
yield response
|
| 120 |
except asyncio.CancelledError:
|
|
|
|
| 147 |
gr.Slider(minimum=1, maximum=5, value=1, step=1, label="Number of API Calls"),
|
| 148 |
gr.Checkbox(label="Use Embeddings", value=False),
|
| 149 |
],
|
| 150 |
+
title="AI-powered Conversational Web Search Assistant",
|
| 151 |
+
description="Use web search to answer questions or generate summaries. The assistant remembers previous interactions.",
|
| 152 |
theme=gr.Theme.from_hub("allenai/gradio-theme"),
|
| 153 |
css=css,
|
| 154 |
examples=[
|
|
|
|
| 177 |
5. Check or uncheck the "Use Embeddings" box to toggle between using embeddings or direct text summarization.
|
| 178 |
6. Press Enter or click the submit button to get your answer.
|
| 179 |
7. Use the provided examples or ask your own questions.
|
| 180 |
+
8. The assistant will remember previous interactions and maintain context throughout the conversation.
|
| 181 |
""")
|
| 182 |
|
| 183 |
return demo
|