Skip to content

Commit 33b4c02

Browse files
committed
add_{k,v}_proj should be projecting to inner_dim
1 parent 072a5dd commit 33b4c02

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

src/diffusers/models/attention_processor.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -93,8 +93,8 @@ def __init__(
9393
self.to_v = nn.Linear(cross_attention_dim, inner_dim, bias=bias)
9494

9595
if self.added_kv_proj_dim is not None:
96-
self.add_k_proj = nn.Linear(added_kv_proj_dim, cross_attention_dim)
97-
self.add_v_proj = nn.Linear(added_kv_proj_dim, cross_attention_dim)
96+
self.add_k_proj = nn.Linear(added_kv_proj_dim, inner_dim)
97+
self.add_v_proj = nn.Linear(added_kv_proj_dim, inner_dim)
9898

9999
self.to_out = nn.ModuleList([])
100100
self.to_out.append(nn.Linear(inner_dim, query_dim, bias=out_bias))

0 commit comments

Comments
 (0)