update readme.md
Browse files
README.md
CHANGED
|
@@ -1,4 +1,16 @@
|
|
| 1 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
|
| 3 |
# LLaMa Lite: Reduced-Scale, Experimental Versions of LLaMA and LLaMa 2
|
| 4 |
|
|
@@ -23,13 +35,25 @@ model = AutoModelForCausalLM.from_pretrained(model_path)
|
|
| 23 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
| 24 |
model.eval()
|
| 25 |
|
| 26 |
-
prompt = 'Q: What is the
|
| 27 |
input_ids = tokenizer(prompt, return_tensors="pt").input_ids
|
| 28 |
tokens = model.generate(input_ids, max_length=20)
|
| 29 |
print( tokenizer.decode(tokens[0].tolist(), skip_special_tokens=True) )
|
| 30 |
# Q: What is the largest bird?\nA: The largest bird is the bald eagle.
|
| 31 |
```
|
| 32 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
|
| 34 |
|
| 35 |
## Contact
|
|
|
|
| 1 |
+
---
|
| 2 |
+
language:
|
| 3 |
+
- English
|
| 4 |
+
tags:
|
| 5 |
+
- llama2
|
| 6 |
+
- llama-2
|
| 7 |
+
- llama
|
| 8 |
+
- llama2 architecture
|
| 9 |
+
datasets:
|
| 10 |
+
- Redpajama
|
| 11 |
+
metrics:
|
| 12 |
+
- MMLU
|
| 13 |
+
---
|
| 14 |
|
| 15 |
# LLaMa Lite: Reduced-Scale, Experimental Versions of LLaMA and LLaMa 2
|
| 16 |
|
|
|
|
| 35 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
| 36 |
model.eval()
|
| 37 |
|
| 38 |
+
prompt = 'Q: What is the largest bird?\nA:'
|
| 39 |
input_ids = tokenizer(prompt, return_tensors="pt").input_ids
|
| 40 |
tokens = model.generate(input_ids, max_length=20)
|
| 41 |
print( tokenizer.decode(tokens[0].tolist(), skip_special_tokens=True) )
|
| 42 |
# Q: What is the largest bird?\nA: The largest bird is the bald eagle.
|
| 43 |
```
|
| 44 |
|
| 45 |
+
## Evaluation
|
| 46 |
+
|
| 47 |
+
We evaluate our models on the MMLU task
|
| 48 |
+
markdown table
|
| 49 |
+
| Models | #parameters |zero-shot | 5-shot |
|
| 50 |
+
| --- | --- | --- | --- |
|
| 51 |
+
| llama | 7B | 28.46 | 35.05 |
|
| 52 |
+
| openllama | 3B | 24.90 | 26.71 |
|
| 53 |
+
|TinyLlama-1.1B-step-50K-105b | 1.1B | 19.00 | 26.53 |
|
| 54 |
+
| llama2_xs_460M | 0.46B | 21.13 | 26.39 |
|
| 55 |
+
|
| 56 |
+
|
| 57 |
|
| 58 |
|
| 59 |
## Contact
|