Fix model weight
Browse files
README.md
CHANGED
|
@@ -82,7 +82,7 @@ Download model weights and run inference using the following example:
|
|
| 82 |
import torch
|
| 83 |
from transformers import AutoModelForCausalLM
|
| 84 |
INPUT ="When was Rome founded?"
|
| 85 |
-
MODEL_ID = "Aleph-Alpha/
|
| 86 |
model = AutoModelForCausalLM.from_pretrained(
|
| 87 |
trust_remote_code=True,
|
| 88 |
pretrained_model_name_or_path=MODEL_ID
|
|
|
|
| 82 |
import torch
|
| 83 |
from transformers import AutoModelForCausalLM
|
| 84 |
INPUT ="When was Rome founded?"
|
| 85 |
+
MODEL_ID = "Aleph-Alpha/tfree-hat-pretrained-7b-base"
|
| 86 |
model = AutoModelForCausalLM.from_pretrained(
|
| 87 |
trust_remote_code=True,
|
| 88 |
pretrained_model_name_or_path=MODEL_ID
|