Fix : Dynamic follow up examples
Browse files- app.py +7 -2
- climateqa/engine/graph.py +2 -12
- front/tabs/chat_interface.py +1 -1
- front/tabs/main_tab.py +2 -1
app.py
CHANGED
|
@@ -208,8 +208,8 @@ def event_handling(
|
|
| 208 |
.then(finish_chat, None, [textbox], api_name=f"finish_chat_{examples_hidden.elem_id}")
|
| 209 |
)
|
| 210 |
(follow_up_examples_hidden
|
| 211 |
-
.change(start_chat, [
|
| 212 |
-
.then(chat, [
|
| 213 |
.then(finish_chat, None, [textbox], api_name=f"finish_chat_{follow_up_examples_hidden.elem_id}")
|
| 214 |
)
|
| 215 |
|
|
@@ -227,6 +227,11 @@ def event_handling(
|
|
| 227 |
.then(chat_poc, [examples_hidden, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs], concurrency_limit=8, api_name=f"chat_{examples_hidden.elem_id}")
|
| 228 |
.then(finish_chat, None, [textbox], api_name=f"finish_chat_{examples_hidden.elem_id}")
|
| 229 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 230 |
|
| 231 |
|
| 232 |
new_sources_hmtl.change(lambda x : x, inputs = [new_sources_hmtl], outputs = [sources_textbox])
|
|
|
|
| 208 |
.then(finish_chat, None, [textbox], api_name=f"finish_chat_{examples_hidden.elem_id}")
|
| 209 |
)
|
| 210 |
(follow_up_examples_hidden
|
| 211 |
+
.change(start_chat, [follow_up_examples_hidden, chatbot, search_only], [follow_up_examples_hidden, tabs, chatbot, sources_raw], queue=False, api_name=f"start_chat_{examples_hidden.elem_id}")
|
| 212 |
+
.then(chat, [follow_up_examples_hidden, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs,follow_up_examples.dataset], concurrency_limit=8, api_name=f"chat_{examples_hidden.elem_id}")
|
| 213 |
.then(finish_chat, None, [textbox], api_name=f"finish_chat_{follow_up_examples_hidden.elem_id}")
|
| 214 |
)
|
| 215 |
|
|
|
|
| 227 |
.then(chat_poc, [examples_hidden, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs], concurrency_limit=8, api_name=f"chat_{examples_hidden.elem_id}")
|
| 228 |
.then(finish_chat, None, [textbox], api_name=f"finish_chat_{examples_hidden.elem_id}")
|
| 229 |
)
|
| 230 |
+
(follow_up_examples_hidden
|
| 231 |
+
.change(start_chat, [follow_up_examples_hidden, chatbot, search_only], [follow_up_examples_hidden, tabs, chatbot, sources_raw], queue=False, api_name=f"start_chat_{examples_hidden.elem_id}")
|
| 232 |
+
.then(chat, [follow_up_examples_hidden, chatbot, dropdown_audience, dropdown_sources, dropdown_reports, dropdown_external_sources, search_only], [chatbot, new_sources_hmtl, output_query, output_language, new_figures, current_graphs,follow_up_examples.dataset], concurrency_limit=8, api_name=f"chat_{examples_hidden.elem_id}")
|
| 233 |
+
.then(finish_chat, None, [textbox], api_name=f"finish_chat_{follow_up_examples_hidden.elem_id}")
|
| 234 |
+
)
|
| 235 |
|
| 236 |
|
| 237 |
new_sources_hmtl.change(lambda x : x, inputs = [new_sources_hmtl], outputs = [sources_textbox])
|
climateqa/engine/graph.py
CHANGED
|
@@ -218,8 +218,6 @@ def make_graph_agent(llm, vectorstore_ipcc, vectorstore_graphs, vectorstore_regi
|
|
| 218 |
workflow.add_edge("retrieve_graphs", END)
|
| 219 |
workflow.add_edge("answer_rag", "generate_follow_up")
|
| 220 |
workflow.add_edge("answer_rag_no_docs", "generate_follow_up")
|
| 221 |
-
# workflow.add_edge("answer_rag", END)
|
| 222 |
-
# workflow.add_edge("answer_rag_no_docs", END)
|
| 223 |
workflow.add_edge("answer_chitchat", "chitchat_categorize_intent")
|
| 224 |
workflow.add_edge("retrieve_graphs_chitchat", END)
|
| 225 |
|
|
@@ -287,7 +285,6 @@ def make_graph_agent_poc(llm, vectorstore_ipcc, vectorstore_graphs, vectorstore_
|
|
| 287 |
workflow.add_node("answer_rag", answer_rag)
|
| 288 |
workflow.add_node("answer_rag_no_docs", answer_rag_no_docs)
|
| 289 |
workflow.add_node("generate_follow_up", generate_follow_up)
|
| 290 |
-
workflow.add_node("process_follow_up", standalone_question_node)
|
| 291 |
|
| 292 |
# Entry point
|
| 293 |
workflow.set_entry_point("standalone_question")
|
|
@@ -322,12 +319,6 @@ def make_graph_agent_poc(llm, vectorstore_ipcc, vectorstore_graphs, vectorstore_
|
|
| 322 |
make_id_dict(["retrieve_graphs", END])
|
| 323 |
)
|
| 324 |
|
| 325 |
-
workflow.add_conditional_edges(
|
| 326 |
-
"generate_follow_up",
|
| 327 |
-
route_follow_up,
|
| 328 |
-
make_id_dict(["process_follow_up", END])
|
| 329 |
-
)
|
| 330 |
-
|
| 331 |
# Define the edges
|
| 332 |
workflow.add_edge("standalone_question", "categorize_intent")
|
| 333 |
workflow.add_edge("translate_query", "transform_query")
|
|
@@ -338,14 +329,13 @@ def make_graph_agent_poc(llm, vectorstore_ipcc, vectorstore_graphs, vectorstore_
|
|
| 338 |
workflow.add_edge("retrieve_graphs", END)
|
| 339 |
workflow.add_edge("answer_rag", "generate_follow_up")
|
| 340 |
workflow.add_edge("answer_rag_no_docs", "generate_follow_up")
|
| 341 |
-
workflow.add_edge("answer_rag", END)
|
| 342 |
-
workflow.add_edge("answer_rag_no_docs", END)
|
| 343 |
workflow.add_edge("answer_chitchat", "chitchat_categorize_intent")
|
| 344 |
workflow.add_edge("retrieve_graphs_chitchat", END)
|
| 345 |
|
| 346 |
workflow.add_edge("retrieve_local_data", "answer_search")
|
| 347 |
workflow.add_edge("retrieve_documents", "answer_search")
|
| 348 |
-
workflow.add_edge("
|
|
|
|
| 349 |
|
| 350 |
# Compile
|
| 351 |
app = workflow.compile()
|
|
|
|
| 218 |
workflow.add_edge("retrieve_graphs", END)
|
| 219 |
workflow.add_edge("answer_rag", "generate_follow_up")
|
| 220 |
workflow.add_edge("answer_rag_no_docs", "generate_follow_up")
|
|
|
|
|
|
|
| 221 |
workflow.add_edge("answer_chitchat", "chitchat_categorize_intent")
|
| 222 |
workflow.add_edge("retrieve_graphs_chitchat", END)
|
| 223 |
|
|
|
|
| 285 |
workflow.add_node("answer_rag", answer_rag)
|
| 286 |
workflow.add_node("answer_rag_no_docs", answer_rag_no_docs)
|
| 287 |
workflow.add_node("generate_follow_up", generate_follow_up)
|
|
|
|
| 288 |
|
| 289 |
# Entry point
|
| 290 |
workflow.set_entry_point("standalone_question")
|
|
|
|
| 319 |
make_id_dict(["retrieve_graphs", END])
|
| 320 |
)
|
| 321 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 322 |
# Define the edges
|
| 323 |
workflow.add_edge("standalone_question", "categorize_intent")
|
| 324 |
workflow.add_edge("translate_query", "transform_query")
|
|
|
|
| 329 |
workflow.add_edge("retrieve_graphs", END)
|
| 330 |
workflow.add_edge("answer_rag", "generate_follow_up")
|
| 331 |
workflow.add_edge("answer_rag_no_docs", "generate_follow_up")
|
|
|
|
|
|
|
| 332 |
workflow.add_edge("answer_chitchat", "chitchat_categorize_intent")
|
| 333 |
workflow.add_edge("retrieve_graphs_chitchat", END)
|
| 334 |
|
| 335 |
workflow.add_edge("retrieve_local_data", "answer_search")
|
| 336 |
workflow.add_edge("retrieve_documents", "answer_search")
|
| 337 |
+
workflow.add_edge("generate_follow_up",END)
|
| 338 |
+
|
| 339 |
|
| 340 |
# Compile
|
| 341 |
app = workflow.compile()
|
front/tabs/chat_interface.py
CHANGED
|
@@ -56,7 +56,7 @@ def create_chat_interface(tab):
|
|
| 56 |
)
|
| 57 |
with gr.Row(elem_id="follow-up-examples"):
|
| 58 |
follow_up_examples_hidden = gr.Textbox(visible=False, elem_id="follow-up-hidden")
|
| 59 |
-
follow_up_examples = gr.Examples(examples=[
|
| 60 |
|
| 61 |
with gr.Row(elem_id="input-message"):
|
| 62 |
|
|
|
|
| 56 |
)
|
| 57 |
with gr.Row(elem_id="follow-up-examples"):
|
| 58 |
follow_up_examples_hidden = gr.Textbox(visible=False, elem_id="follow-up-hidden")
|
| 59 |
+
follow_up_examples = gr.Examples(examples=[ ], label="Follow up questions", inputs= [follow_up_examples_hidden], elem_id="follow-up-button", run_on_click=False)
|
| 60 |
|
| 61 |
with gr.Row(elem_id="input-message"):
|
| 62 |
|
front/tabs/main_tab.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
| 1 |
import gradio as gr
|
|
|
|
| 2 |
from typing import TypedDict
|
| 3 |
from .chat_interface import create_chat_interface
|
| 4 |
from .tab_examples import create_examples_tab
|
|
@@ -29,7 +30,7 @@ class MainTabPanel:
|
|
| 29 |
tab_graphs: gr.Tab
|
| 30 |
tab_papers: gr.Tab
|
| 31 |
graph_container: gr.HTML
|
| 32 |
-
follow_up_examples :
|
| 33 |
follow_up_examples_hidden : gr.Textbox
|
| 34 |
|
| 35 |
def cqa_tab(tab_name):
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
+
from gradio.helpers import Examples
|
| 3 |
from typing import TypedDict
|
| 4 |
from .chat_interface import create_chat_interface
|
| 5 |
from .tab_examples import create_examples_tab
|
|
|
|
| 30 |
tab_graphs: gr.Tab
|
| 31 |
tab_papers: gr.Tab
|
| 32 |
graph_container: gr.HTML
|
| 33 |
+
follow_up_examples : Examples
|
| 34 |
follow_up_examples_hidden : gr.Textbox
|
| 35 |
|
| 36 |
def cqa_tab(tab_name):
|