Skip to content

Commit

Permalink
[Test] attention implementation #228
Browse files Browse the repository at this point in the history
  • Loading branch information
fedebotu committed Oct 24, 2024
1 parent 3f9eb35 commit 28c1166
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 1 deletion.
3 changes: 2 additions & 1 deletion rl4co/models/nn/attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@
def scaled_dot_product_attention_simple(
q, k, v, attn_mask=None, dropout_p=0.0, is_causal=False
):
"""Simple Scaled Dot-Product Attention in PyTorch without Flash Attention"""
"""Simple (exact) Scaled Dot-Product Attention in RL4CO without customized kernels (i.e. no Flash Attention)."""

# Check for causal and attn_mask conflict
if is_causal and attn_mask is not None:
raise ValueError("Cannot set both is_causal and attn_mask")
Expand Down
15 changes: 15 additions & 0 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@
import torch

from tensordict import TensorDict
from torch.nn.functional import scaled_dot_product_attention

from rl4co.models.nn.attention import scaled_dot_product_attention_simple
from rl4co.utils.decoding import process_logits
from rl4co.utils.ops import batchify, unbatchify

Expand Down Expand Up @@ -35,3 +37,16 @@ def test_top_k_top_p_sampling(top_p, top_k):
mask = torch.ones(8, 10).bool()
logprobs = process_logits(logits, mask, top_p=top_p, top_k=top_k)
assert len(logprobs) == logits.size(0)


def test_scaled_dot_product_attention():
bs, ns, ds = 2, 3, 4
q = torch.rand(bs, ns, ds)
k = torch.rand(bs, ns, ds)
v = torch.rand(bs, ns, ds)
attn_mask = torch.rand(bs, ns, ns) > 0.5
attn_mask[:, 0, :] = True # at least one row element is True
attn_mask[:, :, 0] = True # at least one column element is True
attn_torch = scaled_dot_product_attention(q, k, v, attn_mask)
attn_rl4co = scaled_dot_product_attention_simple(q, k, v, attn_mask)
assert torch.allclose(attn_torch, attn_rl4co)

0 comments on commit 28c1166

Please sign in to comment.