aboutsummaryrefslogtreecommitdiff
path: root/modules/xlmr.py
diff options
context:
space:
mode:
authorAarni Koskela <akx@iki.fi>2023-05-11 18:28:15 +0300
committerAarni Koskela <akx@iki.fi>2023-05-11 20:29:11 +0300
commit49a55b410b66b7dd9be9335d8a2e3a71e4f8b15c (patch)
treed79f004eae46bc1c49832f3c668a524107c30034 /modules/xlmr.py
parent431bc5a297ff7c17231b92b6c8f8152b2fab8553 (diff)
Autofix Ruff W (not W605) (mostly whitespace)
Diffstat (limited to 'modules/xlmr.py')
-rw-r--r--modules/xlmr.py6
1 files changed, 3 insertions, 3 deletions
diff --git a/modules/xlmr.py b/modules/xlmr.py
index e056c3f6..a407a3ca 100644
--- a/modules/xlmr.py
+++ b/modules/xlmr.py
@@ -28,7 +28,7 @@ class BertSeriesModelWithTransformation(BertPreTrainedModel):
config_class = BertSeriesConfig
def __init__(self, config=None, **kargs):
- # modify initialization for autoloading
+ # modify initialization for autoloading
if config is None:
config = XLMRobertaConfig()
config.attention_probs_dropout_prob= 0.1
@@ -74,7 +74,7 @@ class BertSeriesModelWithTransformation(BertPreTrainedModel):
text["attention_mask"] = torch.tensor(
text['attention_mask']).to(device)
features = self(**text)
- return features['projection_state']
+ return features['projection_state']
def forward(
self,
@@ -134,4 +134,4 @@ class BertSeriesModelWithTransformation(BertPreTrainedModel):
class RobertaSeriesModelWithTransformation(BertSeriesModelWithTransformation):
base_model_prefix = 'roberta'
- config_class= RobertaSeriesConfig \ No newline at end of file
+ config_class= RobertaSeriesConfig