Commit f9c5da15 authored by AUTOMATIC's avatar AUTOMATIC
Browse files

add fallback for xformers_attnblock_forward

parent a5550f02
Loading
Loading
Loading
Loading
+4 −1
Original line number Diff line number Diff line
@@ -211,6 +211,7 @@ def cross_attention_attnblock_forward(self, x):
        return h3
    
def xformers_attnblock_forward(self, x):
    try:
        h_ = x
        h_ = self.norm(h_)
        q1 = self.q(h_).contiguous()
@@ -219,3 +220,5 @@ def xformers_attnblock_forward(self, x):
        out = xformers.ops.memory_efficient_attention(q1, k1, v)
        out = self.proj_out(out)
        return x + out
    except NotImplementedError:
        return cross_attention_attnblock_forward(self, x)