aboutsummaryrefslogtreecommitdiff
path: root/modules/textual_inversion
diff options
context:
space:
mode:
authorShondoit <shondoit@gmail.com>2023-01-12 15:34:11 +0100
committerShondoit <shondoit@gmail.com>2023-02-15 10:03:59 +0100
commitbc50936745e1a349afdc28cf1540109ba20bc71a (patch)
tree10c5a91caf895e58dffbf5a2d5d30924651e0bbf /modules/textual_inversion
parent21642000b33a3069e3408ea1a50239006176badb (diff)
Call weighted_forward during training
Diffstat (limited to 'modules/textual_inversion')
-rw-r--r--modules/textual_inversion/textual_inversion.py3
1 files changed, 2 insertions, 1 deletions
diff --git a/modules/textual_inversion/textual_inversion.py b/modules/textual_inversion/textual_inversion.py
index a1a406c2..8853c868 100644
--- a/modules/textual_inversion/textual_inversion.py
+++ b/modules/textual_inversion/textual_inversion.py
@@ -480,6 +480,7 @@ def train_embedding(id_task, embedding_name, learn_rate, batch_size, gradient_st
with devices.autocast():
x = batch.latent_sample.to(devices.device, non_blocking=pin_memory)
+ w = batch.weight.to(devices.device, non_blocking=pin_memory)
c = shared.sd_model.cond_stage_model(batch.cond_text)
if is_training_inpainting_model:
@@ -490,7 +491,7 @@ def train_embedding(id_task, embedding_name, learn_rate, batch_size, gradient_st
else:
cond = c
- loss = shared.sd_model(x, cond)[0] / gradient_step
+ loss = shared.sd_model.weighted_forward(x, cond, w)[0] / gradient_step
del x
_loss_step += loss.item()