diff --git a/tinygrad/tensor.py b/tinygrad/tensor.py index fcc665df074e..682feac9e3bf 100644 --- a/tinygrad/tensor.py +++ b/tinygrad/tensor.py @@ -1160,7 +1160,7 @@ def reciprocal(self): return F.Reciprocal.apply(self.cast(least_upper_float(self def elu(self, alpha=1.0): return self.relu() - alpha*(1-self.exp()).relu() def celu(self, alpha=1.0): return self.maximum(0) + (alpha * ((self / alpha).exp() - 1)).minimum(0) def swish(self): return self * self.sigmoid() - def silu(self): return self.swish() # The SiLU function is also known as the swish F. + def silu(self): return self.swish() # The SiLU function is also known as the swish function. def relu6(self): return self.relu() - (self-6).relu() def hardswish(self): return self * (self+3).relu6() * (1/6) def tanh(self): return 2.0 * ((2.0 * self).sigmoid()) - 1.0