Training in progress, step 2000
Browse files- config.json +37 -0
- pytorch_model.bin +3 -0
- runs/Sep16_13-28-46_nid006603/events.out.tfevents.1758022141.nid006603.208166.0 +3 -0
- runs/Sep16_22-37-21_nid007081/events.out.tfevents.1758055057.nid007081.55404.0 +3 -0
- runs/Sep17_11-23-31_nid007191/events.out.tfevents.1758101018.nid007191.214208.0 +3 -0
- runs/Sep17_11-29-28_nid007081/events.out.tfevents.1758101384.nid007081.193734.0 +3 -0
- training_args.bin +3 -0
config.json
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"adapter_reduction": 16,
|
| 3 |
+
"architectures": [
|
| 4 |
+
"DistillationWrapper"
|
| 5 |
+
],
|
| 6 |
+
"attention_probs_dropout_prob": 0.1,
|
| 7 |
+
"classifier_dropout": null,
|
| 8 |
+
"dtype": "float32",
|
| 9 |
+
"embedding_size": 128,
|
| 10 |
+
"expert_intermediate_size": 2624,
|
| 11 |
+
"group_depth": 4,
|
| 12 |
+
"hidden_act": "gelu",
|
| 13 |
+
"hidden_dropout_prob": 0.1,
|
| 14 |
+
"hidden_size": 1024,
|
| 15 |
+
"initializer_range": 0.02,
|
| 16 |
+
"intermediate_size": 2624,
|
| 17 |
+
"layer_norm_eps": 1e-06,
|
| 18 |
+
"load_balancing_loss_coef": 0.2,
|
| 19 |
+
"lora_alpha": 32,
|
| 20 |
+
"lora_rank": 16,
|
| 21 |
+
"max_position_embeddings": 8192,
|
| 22 |
+
"model_type": "ModernALBERT",
|
| 23 |
+
"num_attention_heads": 16,
|
| 24 |
+
"num_expert_modules": 3,
|
| 25 |
+
"num_experts": 8,
|
| 26 |
+
"num_hidden_layers": 16,
|
| 27 |
+
"output_hidden_states": true,
|
| 28 |
+
"pad_token_id": 0,
|
| 29 |
+
"router_jitter_noise": 0.01,
|
| 30 |
+
"top_k": 2,
|
| 31 |
+
"torch_dtype": "bfloat16",
|
| 32 |
+
"transformers_version": "4.51.3",
|
| 33 |
+
"use_adapter": true,
|
| 34 |
+
"use_cache": true,
|
| 35 |
+
"use_moa": true,
|
| 36 |
+
"vocab_size": 50368
|
| 37 |
+
}
|
pytorch_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8b07e2e45544a061b54af89084b5eaee507af0501d209758bb170ff5c9c22370
|
| 3 |
+
size 1059459406
|
runs/Sep16_13-28-46_nid006603/events.out.tfevents.1758022141.nid006603.208166.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:409ff9e69becc0373bc9e6fdb4d87862ce67d1eaac85ec890fe60067c2f5177f
|
| 3 |
+
size 364278
|
runs/Sep16_22-37-21_nid007081/events.out.tfevents.1758055057.nid007081.55404.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7acdc4f03085e2343dfd59272ab71e31c4b31c8bec1be746b06ad834a39fadb0
|
| 3 |
+
size 426414
|
runs/Sep17_11-23-31_nid007191/events.out.tfevents.1758101018.nid007191.214208.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0dfcb957eeeb8b30a4853a0550e5f2d8f957da386b1e8d793e699058a26f8f72
|
| 3 |
+
size 5973
|
runs/Sep17_11-29-28_nid007081/events.out.tfevents.1758101384.nid007081.193734.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8df1add6546cb1fc51df0ace915a0e91ec398746a37b601678b96d95bbe8391a
|
| 3 |
+
size 47536
|
training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:239c6eb602e2f8ad26ad20793c5485f8285cb0674b765c9c2a7f3c210b2330b3
|
| 3 |
+
size 5432
|