Skip to content

Commit

Permalink
remove 2 extra warp_uniform calls
Browse files Browse the repository at this point in the history
ghstack-source-id: 888c31adf5df834d1b5ca749354bc5ae595a6c70
Pull Request resolved: fairinternal/xformers#695

__original_commit__ = fairinternal/xformers@fd35f16
  • Loading branch information
bottler authored and xFormers Bot committed Jun 21, 2023
1 parent a4e311a commit 6e88cf8
Showing 1 changed file with 1 addition and 2 deletions.
3 changes: 1 addition & 2 deletions xformers/csrc/attention/cuda/fmha/kernel_forward.h
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,7 @@ struct AttentionKernel {

// Make sure the compiler knows these variables are the same on all
// the threads of the warp.
// Only worth doing if they could have been modified above.
query_ptr = warp_uniform(query_ptr);
key_ptr = warp_uniform(key_ptr);
value_ptr = warp_uniform(value_ptr);
Expand All @@ -321,8 +322,6 @@ struct AttentionKernel {
num_queries = warp_uniform(num_queries);
num_keys = warp_uniform(num_keys);
num_heads = warp_uniform(num_heads);
head_dim = warp_uniform(head_dim);
head_dim_value = warp_uniform(head_dim_value);
o_strideM = warp_uniform(o_strideM);
custom_mask_type = warp_uniform(custom_mask_type);
return true;
Expand Down

0 comments on commit 6e88cf8

Please sign in to comment.