diff options
author | Aarni Koskela <akx@iki.fi> | 2023-05-11 15:28:15 +0000 |
---|---|---|
committer | Aarni Koskela <akx@iki.fi> | 2023-05-11 17:29:11 +0000 |
commit | 49a55b410b66b7dd9be9335d8a2e3a71e4f8b15c (patch) | |
tree | d79f004eae46bc1c49832f3c668a524107c30034 /modules/xlmr.py | |
parent | 431bc5a297ff7c17231b92b6c8f8152b2fab8553 (diff) | |
download | stable-diffusion-webui-gfx803-49a55b410b66b7dd9be9335d8a2e3a71e4f8b15c.tar.gz stable-diffusion-webui-gfx803-49a55b410b66b7dd9be9335d8a2e3a71e4f8b15c.tar.bz2 stable-diffusion-webui-gfx803-49a55b410b66b7dd9be9335d8a2e3a71e4f8b15c.zip |
Autofix Ruff W (not W605) (mostly whitespace)
Diffstat (limited to 'modules/xlmr.py')
-rw-r--r-- | modules/xlmr.py | 6 |
1 files changed, 3 insertions, 3 deletions
diff --git a/modules/xlmr.py b/modules/xlmr.py index e056c3f6..a407a3ca 100644 --- a/modules/xlmr.py +++ b/modules/xlmr.py @@ -28,7 +28,7 @@ class BertSeriesModelWithTransformation(BertPreTrainedModel): config_class = BertSeriesConfig def __init__(self, config=None, **kargs): - # modify initialization for autoloading + # modify initialization for autoloading if config is None: config = XLMRobertaConfig() config.attention_probs_dropout_prob= 0.1 @@ -74,7 +74,7 @@ class BertSeriesModelWithTransformation(BertPreTrainedModel): text["attention_mask"] = torch.tensor( text['attention_mask']).to(device) features = self(**text) - return features['projection_state'] + return features['projection_state'] def forward( self, @@ -134,4 +134,4 @@ class BertSeriesModelWithTransformation(BertPreTrainedModel): class RobertaSeriesModelWithTransformation(BertSeriesModelWithTransformation): base_model_prefix = 'roberta' - config_class= RobertaSeriesConfig
\ No newline at end of file + config_class= RobertaSeriesConfig |