Skip to content

Commit

Permalink
[Versatile Diffusion] Fix cross_attention_kwargs (open-mmlab#1849)
Browse files Browse the repository at this point in the history
fix versatile
  • Loading branch information
patrickvonplaten committed Dec 28, 2022
1 parent 2ba42aa commit debc74f
Showing 1 changed file with 8 additions and 1 deletion.
9 changes: 8 additions & 1 deletion src/diffusers/models/attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -703,7 +703,13 @@ def __init__(
self.transformer_index_for_condition = [1, 0]

def forward(
self, hidden_states, encoder_hidden_states, timestep=None, attention_mask=None, return_dict: bool = True
self,
hidden_states,
encoder_hidden_states,
timestep=None,
attention_mask=None,
cross_attention_kwargs=None,
return_dict: bool = True,
):
"""
Args:
Expand Down Expand Up @@ -738,6 +744,7 @@ def forward(
input_states,
encoder_hidden_states=condition_state,
timestep=timestep,
cross_attention_kwargs=cross_attention_kwargs,
return_dict=False,
)[0]
encoded_states.append(encoded_state - input_states)
Expand Down

0 comments on commit debc74f

Please sign in to comment.