Spaces:
Running
Running
| import datetime | |
| import os.path | |
| import sys | |
| import uuid | |
| from os import PathLike | |
| import gradio as gr | |
| import pandas as pd | |
| import torch | |
| from config import APP_CONFIG | |
| from data_repository import REPOSITORY_INSTANCE, ModelScoringResult | |
| from designs_submission_validations import validate_github_link, validate_user_designs | |
| from domain_constants import SCORE_NAMES_MAP | |
| sys.path.append(os.path.join(os.path.dirname(__file__), "bike_bench_internal/src/")) | |
| from bikebench.benchmarking.benchmarking_utils import evaluate_designs | |
| from bikebench.data_loading import data_loading | |
| COLUMNS = list(data_loading.load_bike_bench_train().columns) | |
| def compute_scores(user_gen_designs: pd.DataFrame) -> ModelScoringResult: | |
| user_gen_designs = pd.DataFrame(user_gen_designs, columns=COLUMNS) | |
| designs_length = len(user_gen_designs) | |
| if designs_length < 10_000: | |
| raise Exception(f"Too few designs to evaluate. Expected > 10,000, got {designs_length}") | |
| data_tens = torch.tensor(user_gen_designs.values, dtype=torch.float32) | |
| main_scores, detailed_scores, all_evaluation_scores = evaluate_designs(data_tens) | |
| return ModelScoringResult( | |
| uuid=str(uuid.uuid4()), | |
| submission_time=datetime.datetime.now(), | |
| design_quality=main_scores[SCORE_NAMES_MAP["design_quality"]], | |
| diversity_dpp=main_scores[SCORE_NAMES_MAP["diversity_dpp"]], | |
| mean_novelty=main_scores[SCORE_NAMES_MAP["mean_novelty"]], | |
| sim_to_data_mmd=main_scores[SCORE_NAMES_MAP["sim_to_data_mmd"]], | |
| mean_violations=main_scores[SCORE_NAMES_MAP["mean_violations"]], | |
| binary_validity=main_scores[SCORE_NAMES_MAP["binary_validity"]], | |
| ) | |
| def process_generated_designs(github_link: str, file: PathLike[str]): | |
| validate_github_link(github_link) | |
| with open(file, 'r') as user_file: | |
| user_gen_designs = pd.read_csv(user_file) | |
| validate_user_designs(user_gen_designs) | |
| scores = compute_scores(user_gen_designs) | |
| REPOSITORY_INSTANCE.add_row(scores) | |
| return f"File uploaded successfully, uuid {scores.uuid}" | |
| def build_approval_app(): | |
| pass | |
| def build_app(): | |
| with gr.Blocks() as gradio_app: | |
| with gr.Tab("Bike Bench Leaderboard"): | |
| gr.Markdown("Hello beautiful people!") | |
| gr.Dataframe(REPOSITORY_INSTANCE.get_data_to_display, label="Scores of Previous Files") | |
| with gr.Tab("Upload File"): | |
| gr.Interface( | |
| fn=process_generated_designs, | |
| inputs=[ | |
| gr.Textbox(label="Github Link"), | |
| gr.File(label="Upload a file"), | |
| ], | |
| outputs="text", | |
| title="Bike Bench Leaderboard", | |
| description="Upload a file to see the result." | |
| ) | |
| return gradio_app | |
| build_app().launch(debug=(not APP_CONFIG.production)) | |