aboutsummaryrefslogtreecommitdiff
path: root/modules/xlmr.py
diff options
context:
space:
mode:
authorRoman Beltiukov <maybe.hello.world@gmail.com>2023-05-25 15:10:10 -0700
committerGitHub <noreply@github.com>2023-05-25 15:10:10 -0700
commitb2530c965c2afd5512c5f9020251fd4be8f067e5 (patch)
tree0c1620e00ac4eddea514706a5c3bf3e03bd46c70 /modules/xlmr.py
parent09d9c3d287ee4543d285e0fde8b81603c9751a7e (diff)
parenta6e653be26cc05f4438145fa0082816e9fbbf5fc (diff)
Merge branch 'dev' into master
Diffstat (limited to 'modules/xlmr.py')
-rw-r--r--modules/xlmr.py8
1 files changed, 4 insertions, 4 deletions
diff --git a/modules/xlmr.py b/modules/xlmr.py
index beab3fdf..a407a3ca 100644
--- a/modules/xlmr.py
+++ b/modules/xlmr.py
@@ -1,4 +1,4 @@
-from transformers import BertPreTrainedModel,BertModel,BertConfig
+from transformers import BertPreTrainedModel, BertConfig
import torch.nn as nn
import torch
from transformers.models.xlm_roberta.configuration_xlm_roberta import XLMRobertaConfig
@@ -28,7 +28,7 @@ class BertSeriesModelWithTransformation(BertPreTrainedModel):
config_class = BertSeriesConfig
def __init__(self, config=None, **kargs):
- # modify initialization for autoloading
+ # modify initialization for autoloading
if config is None:
config = XLMRobertaConfig()
config.attention_probs_dropout_prob= 0.1
@@ -74,7 +74,7 @@ class BertSeriesModelWithTransformation(BertPreTrainedModel):
text["attention_mask"] = torch.tensor(
text['attention_mask']).to(device)
features = self(**text)
- return features['projection_state']
+ return features['projection_state']
def forward(
self,
@@ -134,4 +134,4 @@ class BertSeriesModelWithTransformation(BertPreTrainedModel):
class RobertaSeriesModelWithTransformation(BertSeriesModelWithTransformation):
base_model_prefix = 'roberta'
- config_class= RobertaSeriesConfig \ No newline at end of file
+ config_class= RobertaSeriesConfig