feat: select first LoRA upon initialization
Browse files- modeling_lora.py +1 -0
modeling_lora.py
CHANGED
|
@@ -181,6 +181,7 @@ class BertLoRA(BertPreTrainedModel):
|
|
| 181 |
for name, param in super().named_parameters():
|
| 182 |
if "lora" not in name:
|
| 183 |
param.requires_grad_(False)
|
|
|
|
| 184 |
|
| 185 |
def from_bert(self, *args, num_adaptions=1, **kwargs):
|
| 186 |
self.bert = BertModel.from_pretrained(*args, **kwargs)
|
|
|
|
| 181 |
for name, param in super().named_parameters():
|
| 182 |
if "lora" not in name:
|
| 183 |
param.requires_grad_(False)
|
| 184 |
+
self.select_task(0)
|
| 185 |
|
| 186 |
def from_bert(self, *args, num_adaptions=1, **kwargs):
|
| 187 |
self.bert = BertModel.from_pretrained(*args, **kwargs)
|