Commit 2053745c authored by AUTOMATIC's avatar AUTOMATIC
Browse files

Merge branch 'v1.2.0-hotfix' into release_candidate

parents 231562ea 27f7fbf3
Loading
Loading
Loading
Loading
+11 −0
Original line number Diff line number Diff line
## Upcoming 1.2.1

### Features:
 * add an option to always refer to lora by filenames

### Bug Fixes:
 * never refer to lora by an alias if multiple loras have same alias or the alias is called none
 * fix upscalers disappearing after the user reloads UI
 * allow bf16 in safe unpickler (resolves problems with loading some loras)
 * allow web UI to be ran fully offline

## 1.2.0

### Features:
+6 −0
Original line number Diff line number Diff line
@@ -393,6 +393,8 @@ def lora_MultiheadAttention_load_state_dict(self, *args, **kwargs):
def list_available_loras():
    available_loras.clear()
    available_lora_aliases.clear()
    forbidden_lora_aliases.clear()
    forbidden_lora_aliases.update({"none": 1})

    os.makedirs(shared.cmd_opts.lora_dir, exist_ok=True)

@@ -406,6 +408,9 @@ def list_available_loras():

        available_loras[name] = entry

        if entry.alias in available_lora_aliases:
            forbidden_lora_aliases[entry.alias.lower()] = 1

        available_lora_aliases[name] = entry
        available_lora_aliases[entry.alias] = entry

@@ -445,6 +450,7 @@ def infotext_pasted(infotext, params):

available_loras = {}
available_lora_aliases = {}
forbidden_lora_aliases = {}
loaded_loras = []

list_available_loras()
+1 −0
Original line number Diff line number Diff line
@@ -54,6 +54,7 @@ script_callbacks.on_infotext_pasted(lora.infotext_pasted)

shared.options_templates.update(shared.options_section(('extra_networks', "Extra Networks"), {
    "sd_lora": shared.OptionInfo("None", "Add Lora to prompt", gr.Dropdown, lambda: {"choices": ["None"] + [x for x in lora.available_loras]}, refresh=lora.list_available_loras),
    "lora_preferred_name": shared.OptionInfo("Alias from file", "When adding to prompt, refer to lora by", gr.Radio, {"choices": ["Alias from file", "Filename"]}),
}))


+7 −1
Original line number Diff line number Diff line
@@ -15,13 +15,19 @@ class ExtraNetworksPageLora(ui_extra_networks.ExtraNetworksPage):
    def list_items(self):
        for name, lora_on_disk in lora.available_loras.items():
            path, ext = os.path.splitext(lora_on_disk.filename)

            if shared.opts.lora_preferred_name == "Filename" or lora_on_disk.alias.lower() in lora.forbidden_lora_aliases:
                alias = name
            else:
                alias = lora_on_disk.alias

            yield {
                "name": name,
                "filename": path,
                "preview": self.find_preview(path),
                "description": self.find_description(path),
                "search_term": self.search_terms_from_path(lora_on_disk.filename),
                "prompt": json.dumps(f"<lora:{lora_on_disk.alias}:") + " + opts.extra_networks_default_multiplier + " + json.dumps(">"),
                "prompt": json.dumps(f"<lora:{alias}:") + " + opts.extra_networks_default_multiplier + " + json.dumps(">"),
                "local_preview": f"{path}.{shared.opts.samples_format}",
                "metadata": json.dumps(lora_on_disk.metadata, indent=4) if lora_on_disk.metadata else None,
            }
+10 −17
Original line number Diff line number Diff line
@@ -117,20 +117,6 @@ def move_files(src_path: str, dest_path: str, ext_filter: str = None):
        pass


builtin_upscaler_classes = []
forbidden_upscaler_classes = set()


def list_builtin_upscalers():
    builtin_upscaler_classes.clear()
    builtin_upscaler_classes.extend(Upscaler.__subclasses__())

def forbid_loaded_nonbuiltin_upscalers():
    for cls in Upscaler.__subclasses__():
        if cls not in builtin_upscaler_classes:
            forbidden_upscaler_classes.add(cls)


def load_upscalers():
    # We can only do this 'magic' method to dynamically load upscalers if they are referenced,
    # so we'll try to import any _model.py files before looking in __subclasses__
@@ -146,10 +132,17 @@ def load_upscalers():

    datas = []
    commandline_options = vars(shared.cmd_opts)
    for cls in Upscaler.__subclasses__():
        if cls in forbidden_upscaler_classes:
            continue

    # some of upscaler classes will not go away after reloading their modules, and we'll end
    # up with two copies of those classes. The newest copy will always be the last in the list,
    # so we go from end to beginning and ignore duplicates
    used_classes = {}
    for cls in reversed(Upscaler.__subclasses__()):
        classname = str(cls)
        if classname not in used_classes:
            used_classes[classname] = cls

    for cls in reversed(used_classes.values()):
        name = cls.__name__
        cmd_name = f"{name.lower().replace('upscaler', '')}_models_path"
        scaler = cls(commandline_options.get(cmd_name, None))
Loading