aboutsummaryrefslogtreecommitdiff
path: root/modules/xlmr.py
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2023-05-17 09:26:26 +0300
committerGitHub <noreply@github.com>2023-05-17 09:26:26 +0300
commit85232a5b26666854deae59cf950f744740dd5c37 (patch)
tree3af76d8c6ba3173ffd925336d902da058df4e02d /modules/xlmr.py
parent56a2672831751480f94a018f861f0143a8234ae8 (diff)
parent4b07f2f584596604c4499efb0b0295e96985080f (diff)
Merge branch 'dev' into taesd-a
Diffstat (limited to 'modules/xlmr.py')
-rw-r--r--modules/xlmr.py8
1 files changed, 4 insertions, 4 deletions
diff --git a/modules/xlmr.py b/modules/xlmr.py
index beab3fdf..a407a3ca 100644
--- a/modules/xlmr.py
+++ b/modules/xlmr.py
@@ -1,4 +1,4 @@
-from transformers import BertPreTrainedModel,BertModel,BertConfig
+from transformers import BertPreTrainedModel, BertConfig
import torch.nn as nn
import torch
from transformers.models.xlm_roberta.configuration_xlm_roberta import XLMRobertaConfig
@@ -28,7 +28,7 @@ class BertSeriesModelWithTransformation(BertPreTrainedModel):
config_class = BertSeriesConfig
def __init__(self, config=None, **kargs):
- # modify initialization for autoloading
+ # modify initialization for autoloading
if config is None:
config = XLMRobertaConfig()
config.attention_probs_dropout_prob= 0.1
@@ -74,7 +74,7 @@ class BertSeriesModelWithTransformation(BertPreTrainedModel):
text["attention_mask"] = torch.tensor(
text['attention_mask']).to(device)
features = self(**text)
- return features['projection_state']
+ return features['projection_state']
def forward(
self,
@@ -134,4 +134,4 @@ class BertSeriesModelWithTransformation(BertPreTrainedModel):
class RobertaSeriesModelWithTransformation(BertSeriesModelWithTransformation):
base_model_prefix = 'roberta'
- config_class= RobertaSeriesConfig \ No newline at end of file
+ config_class= RobertaSeriesConfig