aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorshirase-0 <ultra.gamer.ebf@gmail.com>2022-10-02 00:43:24 +1000
committershirase-0 <ultra.gamer.ebf@gmail.com>2022-10-02 00:43:24 +1000
commit27fbf3de4adf6ba8dfa43876db3599bb8159ef44 (patch)
tree63fe38509d3574db06ec98553d34c5129f191378
parent3f417566b0bda8eab05d247567aebf001c1d1725 (diff)
Added tag parsing for prompts from file
-rw-r--r--scripts/prompts_from_file.py58
1 files changed, 57 insertions, 1 deletions
diff --git a/scripts/prompts_from_file.py b/scripts/prompts_from_file.py
index 513d9a1c..36e199b3 100644
--- a/scripts/prompts_from_file.py
+++ b/scripts/prompts_from_file.py
@@ -2,6 +2,7 @@ import math
import os
import sys
import traceback
+from xml.etree.ElementTree import tostring
import modules.scripts as scripts
import gradio as gr
@@ -29,6 +30,44 @@ class Script(scripts.Script):
checkbox_txt.change(fn=lambda x: [gr.File.update(visible = not x), gr.TextArea.update(visible = x)], inputs=[checkbox_txt], outputs=[file, prompt_txt])
return [checkbox_txt, file, prompt_txt]
+ def process_string_tag(self, tag):
+ return tag[1:-2]
+
+ def process_int_tag(self, tag):
+ return int(tag)
+
+ def process_float_tag(self, tag):
+ return float(tag)
+
+ def process_boolean_tag(self, tag):
+ return True if (tag == "true") else False
+
+ prompt_tags = {
+ "sd_model": None,
+ "outpath_samples": process_string_tag,
+ "outpath_grids": process_string_tag,
+ "prompt_for_display": process_string_tag,
+ "prompt": process_string_tag,
+ "negative_prompt": process_string_tag,
+ "styles": process_string_tag,
+ "seed": process_int_tag,
+ "subseed_strength": process_float_tag,
+ "subseed": process_int_tag,
+ "seed_resize_from_h": process_int_tag,
+ "seed_resize_from_w": process_int_tag,
+ "sampler_index": process_int_tag,
+ "batch_size": process_int_tag,
+ "n_iter": process_int_tag,
+ "steps": process_int_tag,
+ "cfg_scale": process_float_tag,
+ "width": process_int_tag,
+ "height": process_int_tag,
+ "restore_faces": process_boolean_tag,
+ "tiling": process_boolean_tag,
+ "do_not_save_samples": process_boolean_tag,
+ "do_not_save_grid": process_boolean_tag
+ }
+
def run(self, p, checkbox_txt, data: bytes, prompt_txt: str):
if (checkbox_txt):
lines = [x.strip() for x in prompt_txt.splitlines()]
@@ -39,6 +78,7 @@ class Script(scripts.Script):
img_count = len(lines) * p.n_iter
batch_count = math.ceil(img_count / p.batch_size)
loop_count = math.ceil(batch_count / p.n_iter)
+ # These numbers no longer accurately reflect the total images and number of batches
print(f"Will process {img_count} images in {batch_count} batches.")
p.do_not_save_grid = True
@@ -48,7 +88,23 @@ class Script(scripts.Script):
images = []
for loop_no in range(loop_count):
state.job = f"{loop_no + 1} out of {loop_count}"
- p.prompt = lines[loop_no*p.batch_size:(loop_no+1)*p.batch_size] * p.n_iter
+ # The following line may need revising to remove batch_size references
+ current_line = lines[loop_no*p.batch_size:(loop_no+1)*p.batch_size] * p.n_iter
+ if(current_line[0][:2] != "--"):
+ p.prompt = current_line
+ else:
+ tokenized_line = current_line[0].split("--")
+
+ for tag in tokenized_line:
+ tag_split = tag.split(" ", 1)
+ if(tag_split[0] != ''):
+ value_func = self.prompt_tags.get(tag_split[0], None)
+ if(value_func != None):
+ value = value_func(self, tag_split[1])
+ setattr(p, tag_split[0], value)
+ else:
+ print(f"Unknown option \"{tag_split}\"")
+
proc = process_images(p)
images += proc.images