Upload modeling_fastesm.py with huggingface_hub
Browse files- modeling_fastesm.py +1 -1
modeling_fastesm.py
CHANGED
|
@@ -58,7 +58,6 @@ class FastEsmConfig(PretrainedConfig):
|
|
| 58 |
super().__init__(
|
| 59 |
pad_token_id=pad_token_id,
|
| 60 |
mask_token_id=mask_token_id,
|
| 61 |
-
tie_word_embeddings=False,
|
| 62 |
**kwargs,
|
| 63 |
)
|
| 64 |
|
|
@@ -74,6 +73,7 @@ class FastEsmConfig(PretrainedConfig):
|
|
| 74 |
self.layer_norm_eps = layer_norm_eps
|
| 75 |
self.position_embedding_type = position_embedding_type
|
| 76 |
self.emb_layer_norm_before = emb_layer_norm_before
|
|
|
|
| 77 |
|
| 78 |
def to_dict(self) -> Dict[str, Any]:
|
| 79 |
"""
|
|
|
|
| 58 |
super().__init__(
|
| 59 |
pad_token_id=pad_token_id,
|
| 60 |
mask_token_id=mask_token_id,
|
|
|
|
| 61 |
**kwargs,
|
| 62 |
)
|
| 63 |
|
|
|
|
| 73 |
self.layer_norm_eps = layer_norm_eps
|
| 74 |
self.position_embedding_type = position_embedding_type
|
| 75 |
self.emb_layer_norm_before = emb_layer_norm_before
|
| 76 |
+
self.tie_word_embeddings = False
|
| 77 |
|
| 78 |
def to_dict(self) -> Dict[str, Any]:
|
| 79 |
"""
|