Commit 1c6efdbb authored by v0xie's avatar v0xie
Browse files

inference working but SLOW

parent ec718f76
Loading
Loading
Loading
Loading
+36 −37
Original line number Diff line number Diff line
@@ -12,6 +12,7 @@ class ModuleTypeOFT(network.ModuleType):
# adapted from https://github.com/kohya-ss/sd-scripts/blob/main/networks/oft.py
class NetworkModuleOFT(network.NetworkModule):
    def __init__(self,  net: network.Network, weights: network.NetworkWeights):

        super().__init__(net, weights)

        self.oft_blocks = weights.w["oft_blocks"]
@@ -20,24 +21,29 @@ class NetworkModuleOFT(network.NetworkModule):
        self.dim = self.oft_blocks.shape[0]
        self.num_blocks = self.dim

        #if type(self.alpha) == torch.Tensor:
        #    self.alpha = self.alpha.detach().numpy()

        if "Linear" in self.sd_module.__class__.__name__:
            self.out_dim = self.sd_module.out_features
        elif "Conv" in self.sd_module.__class__.__name__:
            self.out_dim = self.sd_module.out_channels

        self.constraint = self.alpha * self.out_dim
        self.constraint = self.alpha
        #self.constraint = self.alpha * self.out_dim
        self.block_size = self.out_dim // self.num_blocks

        self.oft_multiplier = self.multiplier()
        self.org_module: list[torch.Module] = [self.sd_module]

        self.R = self.get_weight()

        self.apply_to()

    # replace forward method of original linear rather than replacing the module
        # self.org_forward = self.sd_module.forward
        # self.sd_module.forward = self.forward
    def apply_to(self):
        self.org_forward = self.org_module[0].forward
        self.org_module[0].forward = self.forward
    
    def get_weight(self):
    def get_weight(self, multiplier=None):
        if not multiplier:
            multiplier = self.multiplier()
        block_Q = self.oft_blocks - self.oft_blocks.transpose(1, 2)
        norm_Q = torch.norm(block_Q.flatten())
        new_norm_Q = torch.clamp(norm_Q, max=self.constraint)
@@ -45,38 +51,31 @@ class NetworkModuleOFT(network.NetworkModule):
        I = torch.eye(self.block_size, device=self.oft_blocks.device).unsqueeze(0).repeat(self.num_blocks, 1, 1)
        block_R = torch.matmul(I + block_Q, (I - block_Q).inverse())

        block_R_weighted = self.oft_multiplier * block_R + (1 - self.oft_multiplier) * I
        block_R_weighted = multiplier * block_R + (1 - multiplier) * I
        R = torch.block_diag(*block_R_weighted)

        return R

    def calc_updown(self, orig_weight):
        oft_blocks = self.oft_blocks.to(orig_weight.device, dtype=orig_weight.dtype)
        block_Q = oft_blocks - oft_blocks.transpose(1, 2)
        norm_Q = torch.norm(block_Q.flatten())
        new_norm_Q = torch.clamp(norm_Q, max=self.constraint)
        block_Q = block_Q * ((new_norm_Q + 1e-8) / (norm_Q + 1e-8))
        I = torch.eye(self.block_size, device=oft_blocks.device).unsqueeze(0).repeat(self.num_blocks, 1, 1)
        block_R = torch.matmul(I + block_Q, (I - block_Q).inverse())

        block_R_weighted = self.oft_multiplier * block_R + (1 - self.oft_multiplier) * I
        R = torch.block_diag(*block_R_weighted)
        #R = self.get_weight().to(orig_weight.device, dtype=orig_weight.dtype)
        # W = R*W_0
        updown = orig_weight + R
        output_shape = [R.size(0), orig_weight.size(1)]
        R = self.R
        if orig_weight.dim() == 4:
            weight = torch.einsum("oihw, op -> pihw", orig_weight, R)
        else:
            weight = torch.einsum("oi, op -> pi", orig_weight, R)
        updown = orig_weight @ R
        output_shape = [orig_weight.size(0), R.size(1)]
        #output_shape = [R.size(0), orig_weight.size(1)]
        return self.finalize_updown(updown, orig_weight, output_shape)
    
    # def forward(self, x, y=None):
    #     x = self.org_forward(x)
    #     if self.oft_multiplier == 0.0:
    #         return x

    #     R = self.get_weight().to(x.device, dtype=x.dtype)
    #     if x.dim() == 4:
    #         x = x.permute(0, 2, 3, 1)
    #         x = torch.matmul(x, R)
    #         x = x.permute(0, 3, 1, 2)
    #     else:
    #         x = torch.matmul(x, R)
    #     return x
    def forward(self, x, y=None):
        x = self.org_forward(x)
        if self.multiplier() == 0.0:
            return x
        R = self.get_weight().to(x.device, dtype=x.dtype)
        if x.dim() == 4:
            x = x.permute(0, 2, 3, 1)
            x = torch.matmul(x, R)
            x = x.permute(0, 3, 1, 2)
        else:
            x = torch.matmul(x, R)
        return x
+39 −3
Original line number Diff line number Diff line
@@ -170,6 +170,10 @@ def load_network(name, network_on_disk):
                emb_dict[vec_name] = weight
            bundle_embeddings[emb_name] = emb_dict
        
        #if key_network_without_network_parts == "oft_unet":
        #    print(key_network_without_network_parts)
        #    pass

        key = convert_diffusers_name_to_compvis(key_network_without_network_parts, is_sd2)
        sd_module = shared.sd_model.network_layer_mapping.get(key, None)

@@ -185,15 +189,39 @@ def load_network(name, network_on_disk):
        elif sd_module is None and "lora_te1_text_model" in key_network_without_network_parts:
            key = key_network_without_network_parts.replace("lora_te1_text_model", "0_transformer_text_model")
            sd_module = shared.sd_model.network_layer_mapping.get(key, None)
        elif sd_module is None and "oft_unet" in key_network_without_network_parts:
            key = key_network_without_network_parts.replace("oft_unet", "diffusion_model")
            sd_module = shared.sd_model.network_layer_mapping.get(key, None)

            # some SD1 Loras also have correct compvis keys
            if sd_module is None:
                key = key_network_without_network_parts.replace("lora_te1_text_model", "transformer_text_model")
                sd_module = shared.sd_model.network_layer_mapping.get(key, None)

        elif sd_module is None and "oft_unet" in key_network_without_network_parts:
        #    UNET_TARGET_REPLACE_MODULE_ALL_LINEAR = ["Transformer2DModel"]
        #    UNET_TARGET_REPLACE_MODULE_CONV2D_3X3 = ["ResnetBlock2D", "Downsample2D", "Upsample2D"]
            UNET_TARGET_REPLACE_MODULE_ATTN_ONLY = ["CrossAttention"]
            # TODO: Change matchedm odules based on whether all linear, conv, etc

            key = key_network_without_network_parts.replace("oft_unet", "diffusion_model")
            sd_module = shared.sd_model.network_layer_mapping.get(key, None)
            #key_no_suffix = key.rsplit("_to_", 1)[0]
            ## Match all modules of class CrossAttention
            #replace_module_list = []
            #for module_type in UNET_TARGET_REPLACE_MODULE_ATTN_ONLY:
            #    replace_module_list += [module for k, module in shared.sd_model.network_layer_mapping.items() if module_type in module.__class__.__name__]

            #matched_module = replace_module_list.get(key_no_suffix, None)
            #if key.endswith('to_q'):
            #    sd_module = matched_module.to_q or None
            #if key.endswith('to_k'):
            #    sd_module = matched_module.to_k or None
            #if key.endswith('to_v'):
            #    sd_module = matched_module.to_v or None
            #if key.endswith('to_out_0'):
            #    sd_module = matched_module.to_out[0] or None
            #if key.endswith('to_out_1'):
            #    sd_module = matched_module.to_out[1] or None


        if sd_module is None:
            keys_failed_to_match[key_network] = key
            continue
@@ -215,6 +243,14 @@ def load_network(name, network_on_disk):

        net.modules[key] = net_module
    
    # replaces forward method of original Linear
    # applied_to_count = 0
    #for key, created_module in net.modules.items():
    #    if isinstance(created_module, network_oft.NetworkModuleOFT):
    #        net_module.apply_to()
            #applied_to_count += 1
    # print(f'Applied OFT modules: {applied_to_count}')

    embeddings = {}
    for emb_name, data in bundle_embeddings.items():
        embedding = textual_inversion.create_embedding_from_data(data, emb_name, filename=network_on_disk.filename + "/" + emb_name)