Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -45,15 +45,12 @@ def generate_response(prompt, history, model, temperature, max_tokens, top_p, se
|
|
| 45 |
additional_inputs = [
|
| 46 |
gr.Dropdown(
|
| 47 |
choices=[
|
| 48 |
-
"llama-3.
|
| 49 |
-
"llama-3.2-1b-preview",
|
| 50 |
-
"llama-3.1-70b-versatile",
|
| 51 |
"llama-3.1-8b-instant",
|
| 52 |
-
"llama3-70b-8192",
|
| 53 |
-
"llama3-8b-8192",
|
| 54 |
-
"mixtral-8x7b-32768",
|
| 55 |
"gemma2-9b-it",
|
| 56 |
-
"
|
|
|
|
|
|
|
| 57 |
],
|
| 58 |
value="llama-3.1-70b-versatile",
|
| 59 |
label="Model"
|
|
|
|
| 45 |
additional_inputs = [
|
| 46 |
gr.Dropdown(
|
| 47 |
choices=[
|
| 48 |
+
"llama-3.3-70b-versatile",
|
|
|
|
|
|
|
| 49 |
"llama-3.1-8b-instant",
|
|
|
|
|
|
|
|
|
|
| 50 |
"gemma2-9b-it",
|
| 51 |
+
"meta-llama/llama-4-maverick-17b-128e-instruct",
|
| 52 |
+
"meta-llama/llama-4-scout-17b-16e-instruct",
|
| 53 |
+
"deepseek-r1-distill-llama-70b"
|
| 54 |
],
|
| 55 |
value="llama-3.1-70b-versatile",
|
| 56 |
label="Model"
|