aboutsummaryrefslogtreecommitdiff
path: root/modules/sd_hijack_open_clip.py
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2023-07-14 09:56:01 +0300
committerAUTOMATIC1111 <16777216c@gmail.com>2023-07-14 09:56:01 +0300
commit9a3f35b028a8026291679c35e1df5b2aea327a1d (patch)
treeefb86fcbcbe81b1a993cf90cd42b6e3d816b5974 /modules/sd_hijack_open_clip.py
parentabb948dab09841571dd24c6be9ff9d6b212778ea (diff)
repair medvram and lowvram
Diffstat (limited to 'modules/sd_hijack_open_clip.py')
-rw-r--r--modules/sd_hijack_open_clip.py4
1 files changed, 2 insertions, 2 deletions
diff --git a/modules/sd_hijack_open_clip.py b/modules/sd_hijack_open_clip.py
index fcf5ad07..bb0b96c7 100644
--- a/modules/sd_hijack_open_clip.py
+++ b/modules/sd_hijack_open_clip.py
@@ -32,7 +32,7 @@ class FrozenOpenCLIPEmbedderWithCustomWords(sd_hijack_clip.FrozenCLIPEmbedderWit
def encode_embedding_init_text(self, init_text, nvpt):
ids = tokenizer.encode(init_text)
ids = torch.asarray([ids], device=devices.device, dtype=torch.int)
- embedded = self.wrapped.model.token_embedding.wrapped(ids).squeeze(0)
+ embedded = self.wrapped.model.token_embedding.wrapped(ids.to(self.wrapped.model.token_embedding.wrapped.weight.device)).squeeze(0)
return embedded
@@ -66,6 +66,6 @@ class FrozenOpenCLIPEmbedder2WithCustomWords(sd_hijack_clip.FrozenCLIPEmbedderWi
def encode_embedding_init_text(self, init_text, nvpt):
ids = tokenizer.encode(init_text)
ids = torch.asarray([ids], device=devices.device, dtype=torch.int)
- embedded = self.wrapped.model.token_embedding.wrapped(ids).squeeze(0)
+ embedded = self.wrapped.model.token_embedding.wrapped(ids.to(self.wrapped.model.token_embedding.wrapped.weight.device)).squeeze(0)
return embedded