Spaces:
Sleeping
Sleeping
Fix bug on frontend and backend
Browse files- app.py +45 -14
- explorepage.html +3 -3
- search.html +3 -3
- static/Home.png +0 -0
app.py
CHANGED
|
@@ -5,7 +5,7 @@ from http.server import BaseHTTPRequestHandler
|
|
| 5 |
from urllib.parse import urlparse, parse_qs
|
| 6 |
import traceback
|
| 7 |
from pydantic import BaseModel, Field
|
| 8 |
-
from typing import List, Dict, Tuple
|
| 9 |
import os
|
| 10 |
from langchain_community.vectorstores import FAISS
|
| 11 |
from langchain_community.embeddings import FakeEmbeddings
|
|
@@ -13,9 +13,16 @@ from langchain_community.vectorstores.utils import DistanceStrategy
|
|
| 13 |
from together import Together
|
| 14 |
import numpy as np
|
| 15 |
from collections import defaultdict
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
|
| 17 |
app = FastAPI(title="Knowledge Graph API")
|
| 18 |
|
|
|
|
|
|
|
| 19 |
# Enable CORS for frontend access
|
| 20 |
app.add_middleware(
|
| 21 |
CORSMiddleware,
|
|
@@ -116,7 +123,7 @@ def get_news_db():
|
|
| 116 |
def get_definitions_db():
|
| 117 |
conn = None
|
| 118 |
try:
|
| 119 |
-
conn =
|
| 120 |
yield conn
|
| 121 |
finally:
|
| 122 |
if conn:
|
|
@@ -132,6 +139,7 @@ def retrieve_triplets(query: str) -> Tuple[List[Tuple[str, str, str]], List[Tupl
|
|
| 132 |
- List of triplets: [(head, relation, tail), ...]
|
| 133 |
- List of relations with definitions: [(relation, definition), ...]
|
| 134 |
"""
|
|
|
|
| 135 |
API_KEY = os.environ.get("TOGETHER_API_KEY")
|
| 136 |
client = Together(api_key = API_KEY)
|
| 137 |
|
|
@@ -200,6 +208,7 @@ def retrieve_news(query: str) -> Dict[str, str]:
|
|
| 200 |
- Related content
|
| 201 |
- Links of the related content
|
| 202 |
"""
|
|
|
|
| 203 |
API_KEY = os.environ.get("TOGETHER_API_KEY")
|
| 204 |
client = Together(api_key = API_KEY)
|
| 205 |
|
|
@@ -263,7 +272,8 @@ def extract_information_from_triplets(query: str,
|
|
| 263 |
list of relations and their definition: {relations}
|
| 264 |
extracted information:
|
| 265 |
'''
|
| 266 |
-
|
|
|
|
| 267 |
API_KEY = os.environ.get("TOGETHER_API_KEY")
|
| 268 |
client = Together(api_key = API_KEY)
|
| 269 |
|
|
@@ -286,15 +296,15 @@ def extract_information_from_triplets(query: str,
|
|
| 286 |
|
| 287 |
return response.choices[0].message.content
|
| 288 |
|
| 289 |
-
def extract_information_from_news(query: str,
|
| 290 |
-
|
| 291 |
-
"""
|
| 292 |
Args:
|
| 293 |
news_list: List from retrieve_news
|
| 294 |
|
| 295 |
Returns:
|
| 296 |
Extracted information string
|
| 297 |
"""
|
|
|
|
| 298 |
system_prompt = f'''Given a list of some information related to the query, extract all important information from the list to answer query question.
|
| 299 |
Every item in the list represent one information, if the information is ambiguous (e.g. contains unknown pronoun to which it refers), do not use that information to answer the query.
|
| 300 |
You don't have to use all the information, only use the information that has clarity and a good basis, but try to use as many information as possible.
|
|
@@ -309,6 +319,10 @@ def extract_information_from_news(query: str,
|
|
| 309 |
output:
|
| 310 |
'''
|
| 311 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 312 |
response = client.chat.completions.create(
|
| 313 |
model="meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
|
| 314 |
response_format={
|
|
@@ -343,7 +357,6 @@ def extract_information(query:str, triplet_info: str, news_info: str, language:s
|
|
| 343 |
Returns:
|
| 344 |
str: Final answer for the user
|
| 345 |
"""
|
| 346 |
-
client = Together(api_key = API_KEY)
|
| 347 |
system_prompt = f'''Given information from two sources, combine the information and make a comprehensive and informative paragraph that answer the query.
|
| 348 |
Make sure the output paragraph includes all crucial information and given in detail.
|
| 349 |
If there is no related or useful information can be extracted from the triplets to answer the query question, inform "No related information found."
|
|
@@ -358,6 +371,10 @@ def extract_information(query:str, triplet_info: str, news_info: str, language:s
|
|
| 358 |
extracted information:
|
| 359 |
'''
|
| 360 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 361 |
response = client.chat.completions.create(
|
| 362 |
model="meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
|
| 363 |
response_format={
|
|
@@ -413,6 +430,9 @@ def query_language(query):
|
|
| 413 |
query: {query}
|
| 414 |
output:
|
| 415 |
'''
|
|
|
|
|
|
|
|
|
|
| 416 |
client = Together(api_key = API_KEY)
|
| 417 |
|
| 418 |
response = client.chat.completions.create(
|
|
@@ -439,6 +459,18 @@ def query_language(query):
|
|
| 439 |
|
| 440 |
#API ENDPOINTS
|
| 441 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 442 |
@app.post("/api/query", response_model=QueryResponse)
|
| 443 |
def process_query(request: QueryRequest):
|
| 444 |
"""Process user query and return comprehensive response"""
|
|
@@ -517,9 +549,7 @@ def process_query(request: QueryRequest):
|
|
| 517 |
|
| 518 |
@app.get("/api/graph", response_model=GraphData)
|
| 519 |
def get_graph_data(
|
| 520 |
-
search: Optional[str] = None
|
| 521 |
-
triplets_db: sqlite3.Connection = Depends(get_triplets_connection),
|
| 522 |
-
definitions_db: sqlite3.Connection = Depends(get_definitions_connection)
|
| 523 |
):
|
| 524 |
"""Get complete graph data with nodes and edges."""
|
| 525 |
|
|
@@ -541,8 +571,9 @@ def get_graph_data(
|
|
| 541 |
base_query += " LIMIT 1000"
|
| 542 |
|
| 543 |
# Get triplets
|
| 544 |
-
|
| 545 |
-
|
|
|
|
| 546 |
|
| 547 |
with get_definitions_db() as conn:
|
| 548 |
# Get definitions
|
|
@@ -590,7 +621,7 @@ if __name__ == "__main__":
|
|
| 590 |
print(f"Definitions DB: {DATABASE_CONFIG['definitions_db']}")
|
| 591 |
|
| 592 |
import uvicorn
|
| 593 |
-
port = int(os.environ.get("PORT",
|
| 594 |
-
uvicorn.run(app, host="
|
| 595 |
|
| 596 |
|
|
|
|
| 5 |
from urllib.parse import urlparse, parse_qs
|
| 6 |
import traceback
|
| 7 |
from pydantic import BaseModel, Field
|
| 8 |
+
from typing import List, Dict, Tuple, Optional
|
| 9 |
import os
|
| 10 |
from langchain_community.vectorstores import FAISS
|
| 11 |
from langchain_community.embeddings import FakeEmbeddings
|
|
|
|
| 13 |
from together import Together
|
| 14 |
import numpy as np
|
| 15 |
from collections import defaultdict
|
| 16 |
+
from fastapi import FastAPI, HTTPException
|
| 17 |
+
from fastapi.responses import FileResponse
|
| 18 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 19 |
+
from dotenv import load_dotenv
|
| 20 |
+
from fastapi.staticfiles import StaticFiles
|
| 21 |
|
| 22 |
app = FastAPI(title="Knowledge Graph API")
|
| 23 |
|
| 24 |
+
app.mount("/static", StaticFiles(directory="static"), name="static")
|
| 25 |
+
|
| 26 |
# Enable CORS for frontend access
|
| 27 |
app.add_middleware(
|
| 28 |
CORSMiddleware,
|
|
|
|
| 123 |
def get_definitions_db():
|
| 124 |
conn = None
|
| 125 |
try:
|
| 126 |
+
conn = sqlite3.connect(DATABASE_CONFIG["definitions_db"])
|
| 127 |
yield conn
|
| 128 |
finally:
|
| 129 |
if conn:
|
|
|
|
| 139 |
- List of triplets: [(head, relation, tail), ...]
|
| 140 |
- List of relations with definitions: [(relation, definition), ...]
|
| 141 |
"""
|
| 142 |
+
load_dotenv()
|
| 143 |
API_KEY = os.environ.get("TOGETHER_API_KEY")
|
| 144 |
client = Together(api_key = API_KEY)
|
| 145 |
|
|
|
|
| 208 |
- Related content
|
| 209 |
- Links of the related content
|
| 210 |
"""
|
| 211 |
+
load_dotenv()
|
| 212 |
API_KEY = os.environ.get("TOGETHER_API_KEY")
|
| 213 |
client = Together(api_key = API_KEY)
|
| 214 |
|
|
|
|
| 272 |
list of relations and their definition: {relations}
|
| 273 |
extracted information:
|
| 274 |
'''
|
| 275 |
+
|
| 276 |
+
load_dotenv()
|
| 277 |
API_KEY = os.environ.get("TOGETHER_API_KEY")
|
| 278 |
client = Together(api_key = API_KEY)
|
| 279 |
|
|
|
|
| 296 |
|
| 297 |
return response.choices[0].message.content
|
| 298 |
|
| 299 |
+
def extract_information_from_news(query: str, news_list: Dict[str, str]) -> Tuple[str, List[str]]:
|
| 300 |
+
"""
|
|
|
|
| 301 |
Args:
|
| 302 |
news_list: List from retrieve_news
|
| 303 |
|
| 304 |
Returns:
|
| 305 |
Extracted information string
|
| 306 |
"""
|
| 307 |
+
|
| 308 |
system_prompt = f'''Given a list of some information related to the query, extract all important information from the list to answer query question.
|
| 309 |
Every item in the list represent one information, if the information is ambiguous (e.g. contains unknown pronoun to which it refers), do not use that information to answer the query.
|
| 310 |
You don't have to use all the information, only use the information that has clarity and a good basis, but try to use as many information as possible.
|
|
|
|
| 319 |
output:
|
| 320 |
'''
|
| 321 |
|
| 322 |
+
load_dotenv()
|
| 323 |
+
API_KEY = os.environ.get("TOGETHER_API_KEY")
|
| 324 |
+
client = Together(api_key = API_KEY)
|
| 325 |
+
|
| 326 |
response = client.chat.completions.create(
|
| 327 |
model="meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
|
| 328 |
response_format={
|
|
|
|
| 357 |
Returns:
|
| 358 |
str: Final answer for the user
|
| 359 |
"""
|
|
|
|
| 360 |
system_prompt = f'''Given information from two sources, combine the information and make a comprehensive and informative paragraph that answer the query.
|
| 361 |
Make sure the output paragraph includes all crucial information and given in detail.
|
| 362 |
If there is no related or useful information can be extracted from the triplets to answer the query question, inform "No related information found."
|
|
|
|
| 371 |
extracted information:
|
| 372 |
'''
|
| 373 |
|
| 374 |
+
load_dotenv()
|
| 375 |
+
API_KEY = os.environ.get("TOGETHER_API_KEY")
|
| 376 |
+
client = Together(api_key = API_KEY)
|
| 377 |
+
|
| 378 |
response = client.chat.completions.create(
|
| 379 |
model="meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
|
| 380 |
response_format={
|
|
|
|
| 430 |
query: {query}
|
| 431 |
output:
|
| 432 |
'''
|
| 433 |
+
|
| 434 |
+
load_dotenv()
|
| 435 |
+
API_KEY = os.environ.get("TOGETHER_API_KEY")
|
| 436 |
client = Together(api_key = API_KEY)
|
| 437 |
|
| 438 |
response = client.chat.completions.create(
|
|
|
|
| 459 |
|
| 460 |
#API ENDPOINTS
|
| 461 |
|
| 462 |
+
@app.get("/", response_class=FileResponse)
|
| 463 |
+
def serve_index():
|
| 464 |
+
return FileResponse("index.html")
|
| 465 |
+
|
| 466 |
+
@app.get("/explorepage.html", response_class=FileResponse)
|
| 467 |
+
def serve_explore_page():
|
| 468 |
+
return FileResponse("explorepage.html")
|
| 469 |
+
|
| 470 |
+
@app.get("/search.html", response_class=FileResponse)
|
| 471 |
+
def serve_search_page():
|
| 472 |
+
return FileResponse("search.html")
|
| 473 |
+
|
| 474 |
@app.post("/api/query", response_model=QueryResponse)
|
| 475 |
def process_query(request: QueryRequest):
|
| 476 |
"""Process user query and return comprehensive response"""
|
|
|
|
| 549 |
|
| 550 |
@app.get("/api/graph", response_model=GraphData)
|
| 551 |
def get_graph_data(
|
| 552 |
+
search: Optional[str] = None
|
|
|
|
|
|
|
| 553 |
):
|
| 554 |
"""Get complete graph data with nodes and edges."""
|
| 555 |
|
|
|
|
| 571 |
base_query += " LIMIT 1000"
|
| 572 |
|
| 573 |
# Get triplets
|
| 574 |
+
with get_triplets_db() as conn:
|
| 575 |
+
cursor = conn.execute(base_query, params)
|
| 576 |
+
triplets = cursor.fetchall()
|
| 577 |
|
| 578 |
with get_definitions_db() as conn:
|
| 579 |
# Get definitions
|
|
|
|
| 621 |
print(f"Definitions DB: {DATABASE_CONFIG['definitions_db']}")
|
| 622 |
|
| 623 |
import uvicorn
|
| 624 |
+
port = int(os.environ.get("PORT", 8000))
|
| 625 |
+
uvicorn.run(app, host="localhost", port=port)
|
| 626 |
|
| 627 |
|
explorepage.html
CHANGED
|
@@ -412,7 +412,7 @@
|
|
| 412 |
<div class="main-content">
|
| 413 |
<!-- Home Button in Top-Right -->
|
| 414 |
<button class="main-home-btn" id="mainHomeBtn">
|
| 415 |
-
<img src="Home.png" alt="Home" style="width: 20px; height: 20px;">
|
| 416 |
</button>
|
| 417 |
<div id="loading" class="loading">
|
| 418 |
<div class="loading-spinner"></div>
|
|
@@ -426,7 +426,7 @@
|
|
| 426 |
|
| 427 |
<script>
|
| 428 |
// Configuration
|
| 429 |
-
const API_BASE = '/api';
|
| 430 |
|
| 431 |
// Global variables
|
| 432 |
let graphData = { nodes: [], edges: [] };
|
|
@@ -468,7 +468,7 @@
|
|
| 468 |
|
| 469 |
function goHome() {
|
| 470 |
// Navigate back to main page
|
| 471 |
-
window.location.href = '
|
| 472 |
}
|
| 473 |
|
| 474 |
function setupVisualizationSVG() {
|
|
|
|
| 412 |
<div class="main-content">
|
| 413 |
<!-- Home Button in Top-Right -->
|
| 414 |
<button class="main-home-btn" id="mainHomeBtn">
|
| 415 |
+
<img src="/static/Home.png" alt="Home" style="width: 20px; height: 20px;">
|
| 416 |
</button>
|
| 417 |
<div id="loading" class="loading">
|
| 418 |
<div class="loading-spinner"></div>
|
|
|
|
| 426 |
|
| 427 |
<script>
|
| 428 |
// Configuration
|
| 429 |
+
const API_BASE = 'http://localhost:8000/api';
|
| 430 |
|
| 431 |
// Global variables
|
| 432 |
let graphData = { nodes: [], edges: [] };
|
|
|
|
| 468 |
|
| 469 |
function goHome() {
|
| 470 |
// Navigate back to main page
|
| 471 |
+
window.location.href = 'http://localhost:8000/'; // Adjust path as needed
|
| 472 |
}
|
| 473 |
|
| 474 |
function setupVisualizationSVG() {
|
search.html
CHANGED
|
@@ -425,7 +425,7 @@
|
|
| 425 |
<!-- Header with Search -->
|
| 426 |
<div class="header">
|
| 427 |
<button class="home-btn" id="homeBtn" title="Go to Home">
|
| 428 |
-
<img src="Home.png" alt="Home" style="width: 20px; height: 20px;">
|
| 429 |
</button>
|
| 430 |
<div class="search-container">
|
| 431 |
<input
|
|
@@ -484,7 +484,7 @@
|
|
| 484 |
|
| 485 |
<script>
|
| 486 |
// Configuration
|
| 487 |
-
const API_BASE = '/api';
|
| 488 |
|
| 489 |
// Global variables
|
| 490 |
let currentQuery = '';
|
|
@@ -530,7 +530,7 @@
|
|
| 530 |
}
|
| 531 |
|
| 532 |
function goHome() {
|
| 533 |
-
window.location.href = '
|
| 534 |
}
|
| 535 |
|
| 536 |
function setupMiniGraph() {
|
|
|
|
| 425 |
<!-- Header with Search -->
|
| 426 |
<div class="header">
|
| 427 |
<button class="home-btn" id="homeBtn" title="Go to Home">
|
| 428 |
+
<img src="/static/Home.png" alt="Home" style="width: 20px; height: 20px;">
|
| 429 |
</button>
|
| 430 |
<div class="search-container">
|
| 431 |
<input
|
|
|
|
| 484 |
|
| 485 |
<script>
|
| 486 |
// Configuration
|
| 487 |
+
const API_BASE = 'http://localhost:8000/api';
|
| 488 |
|
| 489 |
// Global variables
|
| 490 |
let currentQuery = '';
|
|
|
|
| 530 |
}
|
| 531 |
|
| 532 |
function goHome() {
|
| 533 |
+
window.location.href = 'http://localhost:8000/';
|
| 534 |
}
|
| 535 |
|
| 536 |
function setupMiniGraph() {
|
static/Home.png
ADDED
|