Commit 5b066074 authored by ljleb's avatar ljleb
Browse files

simplify

parent 6b68b590
Loading
Loading
Loading
Loading
+5 −3
Original line number Diff line number Diff line
@@ -718,24 +718,26 @@ def process_images_inner(p: StableDiffusionProcessing) -> Processed:

    def infotext(iteration=0, position_in_batch=0, use_main_prompt=False):
        all_prompts = p.all_prompts[:]
        all_negative_prompts = p.all_negative_prompts[:]
        all_seeds = p.all_seeds[:]
        all_subseeds = p.all_subseeds[:]

        # apply changes to generation data
        all_prompts[iteration * p.batch_size:(iteration + 1) * p.batch_size] = p.prompts
        all_negative_prompts[iteration * p.batch_size:(iteration + 1) * p.batch_size] = p.negative_prompts
        all_seeds[iteration * p.batch_size:(iteration + 1) * p.batch_size] = p.seeds
        all_subseeds[iteration * p.batch_size:(iteration + 1) * p.batch_size] = p.subseeds

        # update p.all_negative_prompts in case extensions changed the size of the batch
        # create_infotext below uses it
        old_negative_prompts = p.all_negative_prompts[iteration * p.batch_size:(iteration + 1) * p.batch_size]
        p.all_negative_prompts[iteration * p.batch_size:(iteration + 1) * p.batch_size] = p.negative_prompts
        old_negative_prompts = p.all_negative_prompts
        p.all_negative_prompts = all_negative_prompts

        try:
            return create_infotext(p, all_prompts, all_seeds, all_subseeds, comments, iteration, position_in_batch, use_main_prompt)
        finally:
            # restore p.all_negative_prompts in case extensions changed the size of the batch
            p.all_negative_prompts[iteration * p.batch_size:iteration * p.batch_size + len(p.negative_prompts)] = old_negative_prompts
            p.all_negative_prompts = old_negative_prompts

    if os.path.exists(cmd_opts.embeddings_dir) and not p.do_not_reload_embeddings:
        model_hijack.embedding_db.load_textual_inversion_embeddings()