Commit 122d4268 authored by Fampai's avatar Fampai Committed by AUTOMATIC1111
Browse files

Fix VRAM Issue by only loading in hypernetwork when selected in settings

parent e00b4df7
Loading
Loading
Loading
Loading
+15 −8
Original line number Diff line number Diff line
@@ -40,18 +40,25 @@ class Hypernetwork:
            self.layers[size] = (HypernetworkModule(size, sd[0]), HypernetworkModule(size, sd[1]))


def load_hypernetworks(path):
def list_hypernetworks(path):
    res = {}

    for filename in glob.iglob(os.path.join(path, '**/*.pt'), recursive=True):
        name = os.path.splitext(os.path.basename(filename))[0]
        res[name] = filename
    return res


def load_hypernetwork(filename):
    print(f"Loading hypernetwork {filename}")
    path = shared.hypernetworks.get(filename, None)
    if (path is not None):
        try:
            hn = Hypernetwork(filename)
            res[hn.name] = hn
            shared.loaded_hypernetwork = Hypernetwork(path)
        except Exception:
            print(f"Error loading hypernetwork {filename}", file=sys.stderr)
            print(f"Error loading hypernetwork {path}", file=sys.stderr)
            print(traceback.format_exc(), file=sys.stderr)

    return res
    else:
        shared.loaded_hypernetwork = None


def attention_CrossAttention_forward(self, x, context=None, mask=None):
@@ -60,7 +67,7 @@ def attention_CrossAttention_forward(self, x, context=None, mask=None):
    q = self.to_q(x)
    context = default(context, x)

    hypernetwork = shared.selected_hypernetwork()
    hypernetwork = shared.loaded_hypernetwork
    hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)

    if hypernetwork_layers is not None:
+3 −3
Original line number Diff line number Diff line
@@ -28,7 +28,7 @@ def split_cross_attention_forward_v1(self, x, context=None, mask=None):
    q_in = self.to_q(x)
    context = default(context, x)

    hypernetwork = shared.selected_hypernetwork()
    hypernetwork = shared.loaded_hypernetwork
    hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)

    if hypernetwork_layers is not None:
@@ -68,7 +68,7 @@ def split_cross_attention_forward(self, x, context=None, mask=None):
    q_in = self.to_q(x)
    context = default(context, x)

    hypernetwork = shared.selected_hypernetwork()
    hypernetwork = shared.loaded_hypernetwork
    hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)

    if hypernetwork_layers is not None:
@@ -132,7 +132,7 @@ def xformers_attention_forward(self, x, context=None, mask=None):
    h = self.heads
    q_in = self.to_q(x)
    context = default(context, x)
    hypernetwork = shared.selected_hypernetwork()
    hypernetwork = shared.loaded_hypernetwork
    hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
    if hypernetwork_layers is not None:
        k_in = self.to_k(hypernetwork_layers[0](context))
+2 −5
Original line number Diff line number Diff line
@@ -79,11 +79,8 @@ parallel_processing_allowed = not cmd_opts.lowvram and not cmd_opts.medvram
xformers_available = False
config_filename = cmd_opts.ui_settings_file

hypernetworks = hypernetwork.load_hypernetworks(os.path.join(models_path, 'hypernetworks'))


def selected_hypernetwork():
    return hypernetworks.get(opts.sd_hypernetwork, None)
hypernetworks = hypernetwork.list_hypernetworks(os.path.join(models_path, 'hypernetworks'))
loaded_hypernetwork = None


class State:
+3 −0
Original line number Diff line number Diff line
@@ -82,6 +82,9 @@ modules.scripts.load_scripts(os.path.join(script_path, "scripts"))
shared.sd_model = modules.sd_models.load_model()
shared.opts.onchange("sd_model_checkpoint", wrap_queued_call(lambda: modules.sd_models.reload_model_weights(shared.sd_model)))

loaded_hypernetwork = modules.hypernetwork.load_hypernetwork(shared.opts.sd_hypernetwork)
shared.opts.onchange("sd_hypernetwork", wrap_queued_call(lambda: modules.hypernetwork.load_hypernetwork(shared.opts.sd_hypernetwork)))


def webui():
    # make the program just exit at ctrl+c without waiting for anything