Commit 06fe2294 authored by one's avatar one
Browse files

Fix vortex attention interface by adding dropout parameters and updating function signature

parent 1516fed0
......@@ -37,8 +37,8 @@
+ q,
+ kv,
+ self.drop.p if self.training else 0.0,
+ softmax_scale=None,
+ causal=False,
+ causal=causal,
+ softmax_scale=self.softmax_scale,
+ alibi_slopes=self.alibi_slopes,
+ window_size=self.window_size,
+ deterministic=self.deterministic,
......@@ -60,3 +60,20 @@
q,
k,
v,
@@ -72,6 +72,9 @@
softcap,
return_softmax,
None,
+ False,
+ None,
+ 0.0,
)
return out, softmax_lse, S_dmask, rng_state
@@ -1624,5 +1627,6 @@
softcap,
rotary_interleaved,
num_splits,
+ None,
)
return (out, softmax_lse) if return_softmax_lse else out
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment