Commit 39ec4f06 authored by AUTOMATIC's avatar AUTOMATIC
Browse files

calculate hashes for Lora

add lora hashes to infotext
when pasting infotext, use infotext's lora hashes to find local loras for <lora:xxx:1> entries whose hashes match loras the user has
parent 87702feb
Loading
Loading
Loading
Loading
+18 −0
Original line number Diff line number Diff line
@@ -23,5 +23,23 @@ class ExtraNetworkLora(extra_networks.ExtraNetwork):

        lora.load_loras(names, multipliers)

        if shared.opts.lora_add_hashes_to_infotext:
            lora_hashes = []
            for item in lora.loaded_loras:
                shorthash = item.lora_on_disk.shorthash
                if not shorthash:
                    continue

                alias = item.mentioned_name
                if not alias:
                    continue

                alias = alias.replace(":", "").replace(",", "")

                lora_hashes.append(f"{alias}: {shorthash}")

            if lora_hashes:
                p.extra_generation_params["Lora hashes"] = ", ".join(lora_hashes)

    def deactivate(self, p):
        pass
+48 −11
Original line number Diff line number Diff line
@@ -3,7 +3,7 @@ import re
import torch
from typing import Union

from modules import shared, devices, sd_models, errors, scripts, sd_hijack
from modules import shared, devices, sd_models, errors, scripts, sd_hijack, hashes

metadata_tags_order = {"ss_sd_model_name": 1, "ss_resolution": 2, "ss_clip_skip": 3, "ss_num_train_images": 10, "ss_tag_frequency": 20}

@@ -76,9 +76,9 @@ class LoraOnDisk:
        self.name = name
        self.filename = filename
        self.metadata = {}
        self.is_safetensors = os.path.splitext(filename)[1].lower() == ".safetensors"

        _, ext = os.path.splitext(filename)
        if ext.lower() == ".safetensors":
        if self.is_safetensors:
            try:
                self.metadata = sd_models.read_metadata_from_safetensors(filename)
            except Exception as e:
@@ -94,14 +94,43 @@ class LoraOnDisk:
        self.ssmd_cover_images = self.metadata.pop('ssmd_cover_images', None)  # those are cover images and they are too big to display in UI as text
        self.alias = self.metadata.get('ss_output_name', self.name)

        self.hash = None
        self.shorthash = None
        self.set_hash(
            self.metadata.get('sshs_model_hash') or
            hashes.sha256_from_cache(self.filename, "lora/" + self.name, use_addnet_hash=self.is_safetensors) or
            ''
        )

    def set_hash(self, v):
        self.hash = v
        self.shorthash = self.hash[0:12]

        if self.shorthash:
            available_lora_hash_lookup[self.shorthash] = self

    def read_hash(self):
        if not self.hash:
            self.set_hash(hashes.sha256(self.filename, "lora/" + self.name, use_addnet_hash=self.is_safetensors) or '')

    def get_alias(self):
        if shared.opts.lora_preferred_name == "Filename" or self.alias.lower() in forbidden_lora_aliases:
            return self.name
        else:
            return self.alias


class LoraModule:
    def __init__(self, name):
    def __init__(self, name, lora_on_disk: LoraOnDisk):
        self.name = name
        self.lora_on_disk = lora_on_disk
        self.multiplier = 1.0
        self.modules = {}
        self.mtime = None

        self.mentioned_name = None
        """the text that was used to add lora to prompt - can be either name or an alias"""


class LoraUpDownModule:
    def __init__(self):
@@ -126,11 +155,11 @@ def assign_lora_names_to_compvis_modules(sd_model):
    sd_model.lora_layer_mapping = lora_layer_mapping


def load_lora(name, filename):
    lora = LoraModule(name)
    lora.mtime = os.path.getmtime(filename)
def load_lora(name, lora_on_disk):
    lora = LoraModule(name, lora_on_disk)
    lora.mtime = os.path.getmtime(lora_on_disk.filename)

    sd = sd_models.read_state_dict(filename)
    sd = sd_models.read_state_dict(lora_on_disk.filename)

    # this should not be needed but is here as an emergency fix for an unknown error people are experiencing in 1.2.0
    if not hasattr(shared.sd_model, 'lora_layer_mapping'):
@@ -191,7 +220,7 @@ def load_lora(name, filename):
            raise AssertionError(f"Bad Lora layer name: {key_diffusers} - must end in lora_up.weight, lora_down.weight or alpha")

    if len(keys_failed_to_match) > 0:
        print(f"Failed to match keys when loading Lora {filename}: {keys_failed_to_match}")
        print(f"Failed to match keys when loading Lora {lora_on_disk.filename}: {keys_failed_to_match}")

    return lora

@@ -217,14 +246,19 @@ def load_loras(names, multipliers=None):
        lora = already_loaded.get(name, None)

        lora_on_disk = loras_on_disk[i]

        if lora_on_disk is not None:
            if lora is None or os.path.getmtime(lora_on_disk.filename) > lora.mtime:
                try:
                    lora = load_lora(name, lora_on_disk.filename)
                    lora = load_lora(name, lora_on_disk)
                except Exception as e:
                    errors.display(e, f"loading Lora {lora_on_disk.filename}")
                    continue

            lora.mentioned_name = name

            lora_on_disk.read_hash()

        if lora is None:
            failed_to_load_loras.append(name)
            print(f"Couldn't find Lora with name {name}")
@@ -403,7 +437,8 @@ def list_available_loras():
    available_loras.clear()
    available_lora_aliases.clear()
    forbidden_lora_aliases.clear()
    forbidden_lora_aliases.update({"none": 1})
    available_lora_hash_lookup.clear()
    forbidden_lora_aliases.update({"none": 1, "Addams": 1})

    os.makedirs(shared.cmd_opts.lora_dir, exist_ok=True)

@@ -457,8 +492,10 @@ def infotext_pasted(infotext, params):
    if added:
        params["Prompt"] += "\n" + "".join(added)


available_loras = {}
available_lora_aliases = {}
available_lora_hash_lookup = {}
forbidden_lora_aliases = {}
loaded_loras = []

+31 −1
Original line number Diff line number Diff line
import re

import torch
import gradio as gr
from fastapi import FastAPI
@@ -54,7 +56,8 @@ script_callbacks.on_infotext_pasted(lora.infotext_pasted)

shared.options_templates.update(shared.options_section(('extra_networks', "Extra Networks"), {
    "sd_lora": shared.OptionInfo("None", "Add Lora to prompt", gr.Dropdown, lambda: {"choices": ["None", *lora.available_loras]}, refresh=lora.list_available_loras),
    "lora_preferred_name": shared.OptionInfo("Alias from file", "When adding to prompt, refer to lora by", gr.Radio, {"choices": ["Alias from file", "Filename"]}),
    "lora_preferred_name": shared.OptionInfo("Alias from file", "When adding to prompt, refer to Lora by", gr.Radio, {"choices": ["Alias from file", "Filename"]}),
    "lora_add_hashes_to_infotext": shared.OptionInfo(True, "Add Lora hashes to infotext"),
}))


@@ -84,3 +87,30 @@ def api_loras(_: gr.Blocks, app: FastAPI):

script_callbacks.on_app_started(api_loras)

re_lora = re.compile("<lora:([^:]+):")


def infotext_pasted(infotext, d):
    hashes = d.get("Lora hashes")
    if not hashes:
        return

    hashes = [x.strip().split(':', 1) for x in hashes.split(",")]
    hashes = {x[0].strip().replace(",", ""): x[1].strip() for x in hashes}

    def lora_replacement(m):
        alias = m.group(1)
        shorthash = hashes.get(alias)
        if shorthash is None:
            return m.group(0)

        lora_on_disk = lora.available_lora_hash_lookup.get(shorthash)
        if lora_on_disk is None:
            return m.group(0)

        return f'<lora:{lora_on_disk.get_alias()}:'

    d["Prompt"] = re.sub(re_lora, lora_replacement, d["Prompt"])


script_callbacks.on_infotext_pasted(infotext_pasted)
+1 −4
Original line number Diff line number Diff line
@@ -16,10 +16,7 @@ class ExtraNetworksPageLora(ui_extra_networks.ExtraNetworksPage):
        for name, lora_on_disk in lora.available_loras.items():
            path, ext = os.path.splitext(lora_on_disk.filename)

            if shared.opts.lora_preferred_name == "Filename" or lora_on_disk.alias.lower() in lora.forbidden_lora_aliases:
                alias = name
            else:
                alias = lora_on_disk.alias
            alias = lora_on_disk.get_alias()

            yield {
                "name": name,
+9 −0
Original line number Diff line number Diff line
@@ -17,6 +17,15 @@ def register_extra_network(extra_network):
class ExtraNetworkParams:
    def __init__(self, items=None):
        self.items = items or []
        self.positional = []
        self.named = {}

        for item in self.items:
            parts = item.split('=', 2)
            if len(parts) == 2:
                self.named[parts[0]] = parts[1]
            else:
                self.positional.append(item)


class ExtraNetwork:
Loading