Spaces:
Running
Running
File size: 9,877 Bytes
105bb74 6998088 5abbf47 6998088 105bb74 5abbf47 6998088 5abbf47 6998088 5abbf47 6998088 105bb74 6998088 5abbf47 105bb74 5abbf47 6998088 105bb74 6998088 7bad00a 6998088 105bb74 6998088 105bb74 6998088 105bb74 6998088 105bb74 6998088 105bb74 6998088 105bb74 6998088 105bb74 6998088 105bb74 6998088 5abbf47 6998088 105bb74 fc6f2c0 105bb74 fc6f2c0 6998088 5abbf47 6998088 5abbf47 6998088 5abbf47 6998088 105bb74 6998088 105bb74 6998088 105bb74 6998088 105bb74 6998088 105bb74 6998088 105bb74 6998088 105bb74 6998088 105bb74 5abbf47 105bb74 6998088 105bb74 6998088 105bb74 6998088 5abbf47 105bb74 5abbf47 6998088 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 |
# app.py β FINAL VERSION
import gradio as gr
from backend import (
login_user,
signup_user,
get_user_runs,
get_run_logs,
queue_training_run,
publish_run_to_hub,
run_inference
)
from utils import ARCH_ANALOGIES, get_auto_hyperparams
# ------------------------------ STATE ------------------------------
user_state = {"user_id": None, "username": "", "arch_config": {}}
# ------------------------------ BACKEND WRAPPERS (UI LOGIC) ------------------------------
def login_action(username, password):
user_id, msg = login_user(username, password)
if user_id:
user_state["user_id"] = user_id
user_state["username"] = username
runs_list = page_processes()
return gr.update(visible=False), gr.update(visible=True), msg, runs_list
return gr.update(), gr.update(), msg, ""
def signup_action(username, password):
user_id, msg = signup_user(username, password)
if user_id:
return gr.update(visible=False), gr.update(visible=True), msg
return gr.update(), gr.update(), msg
def page_processes():
if not user_state.get("user_id"): return "Login required."
runs = get_user_runs(user_state["user_id"])
run_list = "\n".join([f"π³ Run #{r[0]} | {r[1].upper()} x{r[2]}L | Status: {r[3]}" for r in runs]) or "No runs yet."
return run_list
def load_run_logs(run_id_str):
try:
run_id = int(run_id_str)
user_id = user_state["user_id"]
logs, status = get_run_logs(user_id, run_id)
if status == "unknown":
return "Error: Run not found or you do not have permission to view it."
return f"Status: {status}\n\n{logs}"
except:
return "Invalid Run ID format. Please enter a number."
def page_architecture_next(arch_type, num_layers):
analogy = ARCH_ANALOGIES.get(arch_type, "")
auto_config = get_auto_hyperparams(arch_type, num_layers)
user_state["arch_config"] = {"arch_type": arch_type, "num_layers": num_layers, "auto_config": auto_config}
suggestion = f"π§ Auto-Seasoningβ’ Suggestion:\nLR: {auto_config['learning_rate']} | Epochs: {auto_config['epochs']} | Batch: {auto_config['batch_size']}"
return gr.update(visible=False), gr.update(visible=True), analogy, suggestion, auto_config['learning_rate'], auto_config['epochs'], auto_config['batch_size']
def page_hyperparams_next(lr, epochs, batch_size):
config = user_state["arch_config"]
final_config = {"arch_type": config["arch_type"], "num_layers": config["num_layers"], "learning_rate": float(lr) if lr else config["auto_config"]["learning_rate"], "epochs": int(epochs) if epochs else config["auto_config"]["epochs"], "batch_size": int(batch_size) if batch_size else config["auto_config"]["batch_size"]}
queue_training_run(user_state["user_id"], final_config)
return gr.update(visible=False), gr.update(visible=True), page_processes()
def get_completed_runs():
all_runs = get_user_runs(user_state["user_id"])
return [r for r in all_runs if r[3] == 'completed']
def go_to_page_with_run_list(page_to_show):
completed_runs = get_completed_runs()
if not completed_runs:
gr.Warning("You have no completed models! Finish a training run first.")
return gr.update(), gr.update(), gr.update(choices=[])
choices = [(f"Run #{r[0]}: {r[1].upper()} x{r[2]}L", r[0]) for r in completed_runs]
return gr.update(visible=False), page_to_show, gr.update(choices=choices, value=choices[0][1])
def inference_action(run_id, prompt):
if not run_id: return "Error: Please select a model."
try: return run_inference(run_id, prompt)
except Exception as e: return f"Error: {str(e)}"
def publish_action(run_id, hf_token, description):
if not run_id: return "Error: Please select a model to publish."
if not hf_token: return "Error: Hugging Face Token is required."
try:
run_info = next((r for r in get_completed_runs() if r[0] == run_id), None)
repo_name = f"llm-kitchen-{run_info[1]}-{run_info[2]}L-run{run_id}" if run_info else f"llm-kitchen-run-{run_id}"
return publish_run_to_hub(run_id, hf_token, repo_name, description.strip())
except Exception as e:
return f"Publish failed: {str(e)}"
# ------------------------------ UI ------------------------------
with gr.Blocks(title="LLM Kitchen π³", theme=gr.themes.Soft()) as demo:
gr.Markdown("# π³ Welcome to LLM Kitchen")
gr.Markdown("### Cook your own language model β from scratch!")
with gr.Group(visible=False) as page_signup_ui:
gr.Markdown("### π Create a New Account")
signup_user_input = gr.Textbox(label="Username")
signup_pass_input = gr.Textbox(label="Password", type="password")
signup_btn = gr.Button("Sign Up", variant="primary")
signup_msg = gr.Markdown()
go_to_login_btn = gr.Button("Already have an account? Log In")
with gr.Group() as page_login_ui:
gr.Markdown("### π Login to the Kitchen")
login_user_input = gr.Textbox(label="Username")
login_pass_input = gr.Textbox(label="Password", type="password")
login_btn = gr.Button("Login", variant="primary")
login_msg = gr.Markdown()
go_to_signup_btn = gr.Button("Don't have an account? Sign Up")
with gr.Group(visible=False) as page_processes_ui:
gr.Markdown(f"### π§βπ³ Your Processes")
with gr.Row():
refresh_btn = gr.Button("π Refresh")
inference_btn = gr.Button("π§ͺ Inference Kitchen")
publish_btn = gr.Button("π Publishing Bay")
runs_display = gr.Textbox(label="Your Training Runs", lines=8, interactive=False)
with gr.Accordion("View Raw Logs", open=False):
run_id_input = gr.Textbox(label="Enter a Run ID")
view_logs_btn = gr.Button("View Logs")
logs_display = gr.Textbox(label="Training Logs", lines=10, interactive=False)
new_run_btn = gr.Button("β Start New Process", variant="primary")
with gr.Group(visible=False) as page_inference_ui:
gr.Markdown("### π§ͺ Inference Kitchen")
inf_run_id_dropdown = gr.Dropdown(label="Select a Completed Model")
prompt_input = gr.Textbox(label="Your Prompt", lines=3)
infer_btn = gr.Button("Generate Answer")
output_text = gr.Textbox(label="Model's Answer", lines=5, interactive=False)
back_from_inf = gr.Button("β¬
οΈ Back to Processes")
with gr.Group(visible=False) as page_publish_ui:
gr.Markdown("### π Publishing Bay")
pub_run_id_dropdown = gr.Dropdown(label="Select a Completed Model to Publish")
pub_hf_token_input = gr.Textbox(label="Your Hugging Face Token (with write permissions)", type="password")
pub_description_input = gr.Textbox(label="Model Card Description", lines=4)
publish_now_btn = gr.Button("Publish to Hugging Face Hub", variant="primary")
publish_status = gr.Markdown()
back_from_pub = gr.Button("β¬
οΈ Back to Processes")
with gr.Group(visible=False) as page_arch_ui:
gr.Markdown("### ποΈ Step 2: Choose Your Architecture")
arch_dropdown = gr.Dropdown(["cnn", "rnn", "transformer"], label="Architecture Type")
layers_slider = gr.Slider(1, 16, value=4, step=1, label="Number of Layers")
arch_next_btn = gr.Button("Next β Hyperparameters")
arch_analogy = gr.Markdown()
auto_suggestion = gr.Markdown()
with gr.Group(visible=False) as page_hyper_ui:
gr.Markdown("### π§ Step 3: Season Your Model")
lr_input = gr.Number(label="Learning Rate")
epochs_input = gr.Number(label="Epochs", precision=0)
batch_input = gr.Number(label="Batch Size", precision=0)
hyper_next_btn = gr.Button("Start Cooking! π²")
# ------------------------------ EVENTS ------------------------------
go_to_signup_btn.click(lambda: (gr.update(visible=False), gr.update(visible=True)), outputs=[page_login_ui, page_signup_ui])
go_to_login_btn.click(lambda: (gr.update(visible=False), gr.update(visible=True)), outputs=[page_signup_ui, page_login_ui])
login_btn.click(login_action, inputs=[login_user_input, login_pass_input], outputs=[page_login_ui, page_processes_ui, login_msg, runs_display])
signup_btn.click(signup_action, inputs=[signup_user_input, signup_pass_input], outputs=[page_signup_ui, page_login_ui, signup_msg])
refresh_btn.click(page_processes, outputs=runs_display)
view_logs_btn.click(load_run_logs, inputs=run_id_input, outputs=logs_display)
new_run_btn.click(lambda: (gr.update(visible=False), gr.update(visible=True)), outputs=[page_processes_ui, page_arch_ui])
arch_next_btn.click(page_architecture_next, inputs=[arch_dropdown, layers_slider], outputs=[page_arch_ui, page_hyper_ui, arch_analogy, auto_suggestion, lr_input, epochs_input, batch_input])
hyper_next_btn.click(page_hyperparams_next, inputs=[lr_input, epochs_input, batch_input], outputs=[page_hyper_ui, page_processes_ui, runs_display])
inference_btn.click(lambda: go_to_page_with_run_list(gr.update(visible=True)), outputs=[page_processes_ui, page_inference_ui, inf_run_id_dropdown])
infer_btn.click(inference_action, inputs=[inf_run_id_dropdown, prompt_input], outputs=output_text)
back_from_inf.click(lambda: (gr.update(visible=False), gr.update(visible=True)), outputs=[page_inference_ui, page_processes_ui])
publish_btn.click(lambda: go_to_page_with_run_list(gr.update(visible=True)), outputs=[page_processes_ui, page_publish_ui, pub_run_id_dropdown])
publish_now_btn.click(publish_action, inputs=[pub_run_id_dropdown, pub_hf_token_input, pub_description_input], outputs=publish_status)
back_from_pub.click(lambda: (gr.update(visible=False), gr.update(visible=True)), outputs=[page_publish_ui, page_processes_ui])
demo.queue().launch() |