mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-04-20 03:13:30 +00:00
Remove prints that are useless when xformers is enabled.
This commit is contained in:
parent
a7328e4945
commit
c9daec4c89
@ -343,7 +343,7 @@ class CrossAttentionDoggettx(nn.Module):
|
|||||||
|
|
||||||
return self.to_out(r2)
|
return self.to_out(r2)
|
||||||
|
|
||||||
class OriginalCrossAttention(nn.Module):
|
class CrossAttention(nn.Module):
|
||||||
def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.):
|
def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
inner_dim = dim_head * heads
|
inner_dim = dim_head * heads
|
||||||
@ -395,14 +395,13 @@ class OriginalCrossAttention(nn.Module):
|
|||||||
return self.to_out(out)
|
return self.to_out(out)
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
if "--use-split-cross-attention" in sys.argv:
|
if XFORMERS_IS_AVAILBLE == False:
|
||||||
print("Using split optimization for cross attention")
|
if "--use-split-cross-attention" in sys.argv:
|
||||||
class CrossAttention(CrossAttentionDoggettx):
|
print("Using split optimization for cross attention")
|
||||||
pass
|
CrossAttention = CrossAttentionDoggettx
|
||||||
else:
|
else:
|
||||||
print("Using sub quadratic optimization for cross attention, if you have memory or speed issues try using: --use-split-cross-attention")
|
print("Using sub quadratic optimization for cross attention, if you have memory or speed issues try using: --use-split-cross-attention")
|
||||||
class CrossAttention(CrossAttentionBirchSan):
|
CrossAttention = CrossAttentionBirchSan
|
||||||
pass
|
|
||||||
|
|
||||||
class MemoryEfficientCrossAttention(nn.Module):
|
class MemoryEfficientCrossAttention(nn.Module):
|
||||||
# https://github.com/MatthieuTPHR/diffusers/blob/d80b531ff8060ec1ea982b65a1b8df70f73aa67c/src/diffusers/models/attention.py#L223
|
# https://github.com/MatthieuTPHR/diffusers/blob/d80b531ff8060ec1ea982b65a1b8df70f73aa67c/src/diffusers/models/attention.py#L223
|
||||||
|
Loading…
Reference in New Issue
Block a user