diff --git a/src/transformers/activations.py b/src/transformers/activations.py index 08a30b3dd88c..1312bede777e 100644 --- a/src/transformers/activations.py +++ b/src/transformers/activations.py @@ -345,8 +345,8 @@ def forward(self, input: Tensor) -> Tensor: def get_activation(activation_string): if activation_string in ACT2FN: return ACT2FN[activation_string] - else: - raise KeyError(f"function {activation_string} not found in ACT2FN mapping {list(ACT2FN.keys())}") + + raise KeyError(f"function {activation_string} not found in ACT2FN mapping {list(ACT2FN.keys())}") # For backwards compatibility with: from activations import gelu_python