Skip to content

Commit

Permalink
redo simpler abs and sign (tinygrad#4611)
Browse files Browse the repository at this point in the history
moved Sign logic to function.py, and backward always returns 0 to match torch.
rewrite abs as `self * self.sign()`, so it's backward also matches torch.
  • Loading branch information
chenyuxyz authored May 15, 2024
1 parent eb96893 commit 2119e04
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 2 deletions.
7 changes: 7 additions & 0 deletions test/test_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -487,6 +487,8 @@ def test_celu(self):
def test_abs(self):
helper_test_op([(45,65)], torch.abs, Tensor.abs)
helper_test_op([()], torch.abs, Tensor.abs)
def test_abs_exact(self):
helper_test_op(None, torch.abs, Tensor.abs, vals=[[-1.,0,1]])

def test_log(self):
helper_test_op([(45,65)], torch.log, Tensor.log)
Expand All @@ -505,9 +507,14 @@ def test_exp2(self):
def test_sign(self):
helper_test_op([(45,65)], torch.sign, Tensor.sign)
helper_test_op([()], torch.sign, Tensor.sign)
def test_sign_exact(self):
helper_test_op(None, torch.sign, Tensor.sign, vals=[[-1.,0,1]])

def test_softsign(self):
helper_test_op([(45,65)], torch.nn.functional.softsign, Tensor.softsign)
helper_test_op([()], torch.nn.functional.softsign, Tensor.softsign)
def test_softsign_exact(self):
helper_test_op(None, torch.nn.functional.softsign, Tensor.softsign, vals=[[-1.,0,1]])

def test_sigmoid(self):
helper_test_op([(45,65)], torch.sigmoid, Tensor.sigmoid)
Expand Down
7 changes: 7 additions & 0 deletions tinygrad/function.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,13 @@ def forward(self, x:LazyBuffer) -> LazyBuffer:
def backward(self, grad_output:LazyBuffer) -> LazyBuffer:
return self.ret.e(BinaryOps.MUL, self.ret.const(1).e(BinaryOps.SUB, self.ret)).e(BinaryOps.MUL, grad_output)

class Sign(Function):
def forward(self, x:LazyBuffer) -> LazyBuffer:
return x.e(BinaryOps.CMPEQ, x.const(0)).e(TernaryOps.WHERE, x.const(0),
x.e(BinaryOps.CMPLT, x.const(0)).e(TernaryOps.WHERE, x.const(-1), x.const(1)))
# backward always return 0 to match torch
def backward(self, grad_output:LazyBuffer) -> LazyBuffer: return grad_output.const(0)

# ************* binary ops *************

class Less(Function):
Expand Down
4 changes: 2 additions & 2 deletions tinygrad/tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -1151,8 +1151,8 @@ def round(self: Tensor) -> Tensor:
def lerp(self, end: Tensor, weight: Union[Tensor, float]) -> Tensor: return self + (end - self) * weight
def square(self): return self*self
def clip(self, min_, max_): return self.maximum(min_).minimum(max_)
def abs(self): return self.relu() + (-self).relu()
def sign(self): return ((self.float()) / (self.float().abs() + 1e-12)).cast(self.dtype)
def sign(self): return F.Sign.apply(self)
def abs(self): return self * self.sign()
def reciprocal(self): return F.Reciprocal.apply(self.cast(least_upper_float(self.dtype)))

# ***** activation functions (unary) *****
Expand Down

0 comments on commit 2119e04

Please sign in to comment.