Skip to content

Commit 78c7653

Browse files
committed
delta
1 parent 19097d6 commit 78c7653

File tree

1 file changed

+2
-1
lines changed

1 file changed

+2
-1
lines changed

src/transformers/models/falcon/modeling_falcon.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
from torch import nn
2424
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, LayerNorm, MSELoss
2525
from torch.nn import functional as F
26+
from transformers.activations import get_activation
2627

2728
from ...modeling_attn_mask_utils import (
2829
AttentionMaskConverter,
@@ -733,7 +734,7 @@ def _upad_input(self, query_layer, key_layer, value_layer, attention_mask, query
733734
(max_seqlen_in_batch_q, max_seqlen_in_batch_k),
734735
)
735736

736-
from transformers.activations import get_activation
737+
737738
class FalconMLP(nn.Module):
738739
def __init__(self, config: FalconConfig):
739740
super().__init__()

0 commit comments

Comments
 (0)