aboutsummaryrefslogtreecommitdiff
path: root/modules/xlmr.py
diff options
context:
space:
mode:
authorAUTOMATIC <16777216c@gmail.com>2023-05-27 20:13:26 +0300
committerAUTOMATIC <16777216c@gmail.com>2023-05-27 20:13:26 +0300
commit50906bf78bce9cdb872a56d498000316ec2dfdcd (patch)
tree5811cd211f383f759120c80ed266ed0b1befbaf0 /modules/xlmr.py
parent89f9faa63388756314e8a1d96cf86bf5e0663045 (diff)
parentb186045fee0d384addcdc2a759fd33dba51b070e (diff)
Merge branch 'release_candidate'
Diffstat (limited to 'modules/xlmr.py')
-rw-r--r--modules/xlmr.py8
1 files changed, 4 insertions, 4 deletions
diff --git a/modules/xlmr.py b/modules/xlmr.py
index beab3fdf..a407a3ca 100644
--- a/modules/xlmr.py
+++ b/modules/xlmr.py
@@ -1,4 +1,4 @@
-from transformers import BertPreTrainedModel,BertModel,BertConfig
+from transformers import BertPreTrainedModel, BertConfig
import torch.nn as nn
import torch
from transformers.models.xlm_roberta.configuration_xlm_roberta import XLMRobertaConfig
@@ -28,7 +28,7 @@ class BertSeriesModelWithTransformation(BertPreTrainedModel):
config_class = BertSeriesConfig
def __init__(self, config=None, **kargs):
- # modify initialization for autoloading
+ # modify initialization for autoloading
if config is None:
config = XLMRobertaConfig()
config.attention_probs_dropout_prob= 0.1
@@ -74,7 +74,7 @@ class BertSeriesModelWithTransformation(BertPreTrainedModel):
text["attention_mask"] = torch.tensor(
text['attention_mask']).to(device)
features = self(**text)
- return features['projection_state']
+ return features['projection_state']
def forward(
self,
@@ -134,4 +134,4 @@ class BertSeriesModelWithTransformation(BertPreTrainedModel):
class RobertaSeriesModelWithTransformation(BertSeriesModelWithTransformation):
base_model_prefix = 'roberta'
- config_class= RobertaSeriesConfig \ No newline at end of file
+ config_class= RobertaSeriesConfig