|
|
import gradio as gr |
|
|
from huggingface_hub import InferenceClient |
|
|
from sentence_transformers import SentenceTransformer |
|
|
import torch |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
theme = gr.themes.Soft( |
|
|
primary_hue="gray", |
|
|
secondary_hue="red", |
|
|
neutral_hue="orange" |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
custom_css = """ |
|
|
:root { |
|
|
--background-fill-primary: #1A1A1A !important; |
|
|
} |
|
|
.dark { |
|
|
--background-fill-primary: ##1A1A1A !important; |
|
|
} |
|
|
""" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
with open("research.txt", "r", encoding="utf-8") as file: |
|
|
research_text = file.read() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def preprocess_text(text): |
|
|
cleaned_text = text.strip() |
|
|
chunks = cleaned_text.split("\n") |
|
|
cleaned_chunks = [chunk.strip() for chunk in chunks if chunk.strip() != ""] |
|
|
return cleaned_chunks |
|
|
|
|
|
|
|
|
|
|
|
cleaned_chunks = preprocess_text(research_text) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
model = SentenceTransformer('all-MiniLM-L6-v2') |
|
|
|
|
|
|
|
|
|
|
|
def create_embeddings(text_chunks): |
|
|
chunk_embeddings = model.encode(text_chunks, convert_to_tensor=True) |
|
|
return chunk_embeddings |
|
|
|
|
|
|
|
|
|
|
|
chunk_embeddings = create_embeddings(cleaned_chunks) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_top_chunks(query, chunk_embeddings, text_chunks): |
|
|
query_embedding = model.encode(query, convert_to_tensor=True) |
|
|
query_embedding_normalized = query_embedding / query_embedding.norm() |
|
|
chunk_embeddings_normalized = chunk_embeddings / chunk_embeddings.norm(dim=1, keepdim=True) |
|
|
similarities = torch.matmul(chunk_embeddings_normalized, query_embedding_normalized) |
|
|
top_indices = torch.topk(similarities, k=3).indices |
|
|
top_chunks = [text_chunks[i] for i in top_indices] |
|
|
return top_chunks |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
client = InferenceClient("Qwen/Qwen2.5-72B-Instruct") |
|
|
|
|
|
|
|
|
|
|
|
def respond(message, history): |
|
|
top_results = get_top_chunks(message, chunk_embeddings, cleaned_chunks) |
|
|
str_top_results = '\n'.join(top_results) |
|
|
messages = [ |
|
|
{'role': 'system', 'content': f'You are a chatbot. Complete all your sentences, be concise, and do not cut yourself off. The word limit is 100 words. Start off by giving a car in a complete, informitive sentence, and then if prompted by the user provide more information like engine type, problems, solutions, transmission, drivetrain,pros, cons,etc. Base your response on the provided context:\n{str_top_results}'} |
|
|
] |
|
|
if history: |
|
|
messages.extend(history) |
|
|
messages.append({'role': 'user', 'content': message}) |
|
|
|
|
|
|
|
|
|
|
|
response = client.chat_completion( |
|
|
messages, |
|
|
max_tokens=1000, |
|
|
temperature=0.2 |
|
|
) |
|
|
return response['choices'][0]['message']['content'].strip() |
|
|
|
|
|
|
|
|
|
|
|
def display_image(): |
|
|
return "banner.jpg" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def show_info(topic): |
|
|
responses = { |
|
|
"Fastest Nissan GTR Models": "1. GTR Nismo (R35)\n2. GTR SpecV (R35)\n3. GTR Track Edition (R35)\n4. GTR R34 V-Spec II Nür\n5. GTR R33 V-Spec", |
|
|
"Most Powerful Nissan Engines": "1. VR38DETT (GTR R35 Nismo)\n2. RB26DETT (GTR R34)\n3. VQ38DETT (Frontier/Patrol Prototype)\n4. VK56DETT (GT-R LM Nismo LMP1)\n5. RB30DET (Custom R31 Skyline Builds)", |
|
|
"Quickest 0–60 Times": "1. Nissan GTR Nismo – 2.5 sec\n2. Lucid Air Sapphire - 1.86 sec\n3. Porsche 911 Turbo S – 2.2 sec\n4. Ferrari SF90 Stradale – 2.3 sec\n5. Lamborghini Huracán Performante – 2.4 sec", |
|
|
"Best Handling Sports Cars": "1. Mazda MX-5 Miata\n2. Porsche Cayman GT4\n3. Chevrolet Corvette Z06\n4. Nissan GTR R35\n5. BMW M2 Competition", |
|
|
"Top Speed Monsters": "1. Bugatti Chiron Super Sport 300+ – 304 mph\n2. Koenigsegg Jesko Absolut – 330+ mph (theoretical)\n3. Hennessey Venom F5 – 311 mph\n4. SSC Tuatara – 282 mph (verified)\n5. Bugatti Mistral - 282 mph" |
|
|
} |
|
|
return responses.get(topic, "Select a category to see the corresponding cars.") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
content = resources.get(career) |
|
|
if not content: |
|
|
return "Select a career to see resources.", "" |
|
|
|
|
|
|
|
|
|
|
|
link_html = "<ul>" |
|
|
for label, url in content["links"]: |
|
|
link_html += f'<li><strong>{label}</strong>: <a href="{url}" target="_blank">{url}</a></li>' |
|
|
link_html += "</ul>" |
|
|
|
|
|
|
|
|
|
|
|
college_html = "" |
|
|
if "college" in content: |
|
|
college = content["college"] |
|
|
college_html += "<p><strong>College & Classes</strong></p><ul>" |
|
|
college_html += f"<li><em>Common Major(s):</em> {college['major']}</li>" |
|
|
classes_list = college.get("classes", []) |
|
|
if isinstance(classes_list, list): |
|
|
classes_html = ", ".join(classes_list) |
|
|
else: |
|
|
classes_html = str(classes_list) |
|
|
college_html += f"<li><em>Helpful College Classes:</em> {classes_html}</li>" |
|
|
college_html += "</ul>" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return link_html + college_html, "" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
with gr.Blocks(theme=theme, css=custom_css) as chatbot: |
|
|
gr.Image(display_image) |
|
|
|
|
|
|
|
|
|
|
|
with gr.Tab("ChatBot"): |
|
|
gr.ChatInterface( |
|
|
respond, |
|
|
type="messages", |
|
|
title="Hi, I'm RB26 AI!", |
|
|
textbox=gr.Textbox(placeholder="Desribe your car's problems, and I can provide steps to solve it! If you don't have any problems, and are here to learn about Nissan Skylines and GT-Rs, you are welcome to type your questions"), |
|
|
description='This tool provides information on Nissan Skylines and GT-Rs.' |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
with gr.Tab("Explore Now"): |
|
|
gr.Markdown("### Explore More Nissan GT-R Records") |
|
|
dropdown_explore = gr.Dropdown( |
|
|
choices=[ |
|
|
"Fastest Nissan GTR Models", |
|
|
"Most Powerful Nissan Engines", |
|
|
"Quickest 0–60 Times", |
|
|
"Best Handling Sports Cars", |
|
|
"Top Speed Monsters" |
|
|
], |
|
|
label="Choose a Category" |
|
|
) |
|
|
output_explore = gr.Markdown() |
|
|
|
|
|
|
|
|
|
|
|
dropdown_explore.change(fn=show_info, inputs=dropdown_explore, outputs=output_explore) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
chatbot.launch() |
|
|
|
|
|
|
|
|
|