TiberiuCristianLeon commited on
Commit
45fb578
·
verified ·
1 Parent(s): f2ee2e1

Upload 4 files

Browse files
.gitattributes CHANGED
@@ -32,3 +32,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
35
+ vocab.deen.spm filter=lfs diff=lfs merge=lfs -text
lex.50.50.deen.s2t.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4fde63dcdd04fe8d38f001c53ed634fe79a93bace020f5bbfcaeba948807e23d
3
+ size 5167324
metadata.json ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architecture": "base",
3
+ "byteSize": 42992955,
4
+ "flores": {
5
+ "bleu": 41.6,
6
+ "comet": 0.8854
7
+ },
8
+ "hash": "10a4b912a44ee02541935b1cd34b4a0094ab5c4bcf22ba4d16ed49761ea69b65",
9
+ "modelConfig": {
10
+ "bert-train-type-embeddings": true,
11
+ "bert-type-vocab-size": 2,
12
+ "dec-cell": "ssru",
13
+ "dec-cell-base-depth": 2,
14
+ "dec-cell-high-depth": 1,
15
+ "dec-depth": 2,
16
+ "dim-emb": 512,
17
+ "dim-rnn": 1024,
18
+ "dim-vocabs": [
19
+ 32000,
20
+ 32000
21
+ ],
22
+ "enc-cell": "gru",
23
+ "enc-cell-depth": 1,
24
+ "enc-depth": 6,
25
+ "enc-type": "bidirectional",
26
+ "factors-combine": "sum",
27
+ "factors-dim-emb": 0,
28
+ "input-types": [],
29
+ "layer-normalization": false,
30
+ "lemma-dependency": "",
31
+ "lemma-dim-emb": 0,
32
+ "output-omit-bias": false,
33
+ "right-left": false,
34
+ "skip": false,
35
+ "tied-embeddings": false,
36
+ "tied-embeddings-all": true,
37
+ "tied-embeddings-src": false,
38
+ "transformer-aan-activation": "swish",
39
+ "transformer-aan-depth": 2,
40
+ "transformer-aan-nogate": false,
41
+ "transformer-decoder-autoreg": "rnn",
42
+ "transformer-decoder-dim-ffn": 0,
43
+ "transformer-decoder-ffn-depth": 0,
44
+ "transformer-dim-aan": 2048,
45
+ "transformer-dim-ffn": 2048,
46
+ "transformer-ffn-activation": "relu",
47
+ "transformer-ffn-depth": 2,
48
+ "transformer-guided-alignment-layer": "last",
49
+ "transformer-heads": 8,
50
+ "transformer-no-affine": false,
51
+ "transformer-no-bias": false,
52
+ "transformer-no-projection": false,
53
+ "transformer-pool": false,
54
+ "transformer-postprocess": "dan",
55
+ "transformer-postprocess-emb": "d",
56
+ "transformer-postprocess-top": "",
57
+ "transformer-preprocess": "",
58
+ "transformer-rnn-projection": false,
59
+ "transformer-tied-layers": [],
60
+ "transformer-train-position-embeddings": false,
61
+ "type": "transformer",
62
+ "ulr": false,
63
+ "ulr-dim-emb": 0,
64
+ "ulr-trainable-transformation": false,
65
+ "version": "v1.12.14 2d067af 2024-02-16 11:44:13 -0500"
66
+ },
67
+ "modelStatistics": {
68
+ "decoder_bytes": 7533632,
69
+ "decoder_parameters": 7388432,
70
+ "embeddings_bytes": 16384000,
71
+ "encoder_bytes": 19034256,
72
+ "encoder_parameters": 18914340,
73
+ "parameters": 42686774
74
+ },
75
+ "sourceLanguage": "de",
76
+ "targetLanguage": "en"
77
+ }
model.deen.intgemm.alphas.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:10a4b912a44ee02541935b1cd34b4a0094ab5c4bcf22ba4d16ed49761ea69b65
3
+ size 42992955
vocab.deen.spm ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54d7bd26db251297c1bb638ea50e77e981dcf6acaf767f9e72c331981284cfde
3
+ size 809663