Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

update pow and rpow #36

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 34 additions & 8 deletions micrograd/engine.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import math

class Value:
""" stores a single scalar value and its gradient """
Expand Down Expand Up @@ -33,14 +34,22 @@ def _backward():
return out

def __pow__(self, other):
assert isinstance(other, (int, float)), "only supporting int/float powers for now"
out = Value(self.data**other, (self,), f'**{other}')

other_grad = True

if not isinstance(other, Value):
other_grad = False
other = Value(other)

result = Value(self.data ** other.data , (self,other) , '**')

def _backward():
self.grad += (other * self.data**(other-1)) * out.grad
out._backward = _backward

return out
self.grad += (other.data) * ((self.data) ** (other.data - 1)) * (result.grad)
if other_grad :
other.grad += round((self.data ** other.data) * math.log(self.data),4)

result._backward = _backward

return result

def relu(self):
out = Value(0 if self.data < 0 else self.data, (self,), 'ReLU')
Expand Down Expand Up @@ -90,5 +99,22 @@ def __truediv__(self, other): # self / other
def __rtruediv__(self, other): # other / self
return other * self**-1

def __rpow__(self, other):
other_grad = True
if not isinstance(other , Value):
other_grad = False
other = Value(other)

result = Value(other.data ** self.data , (self,other) , '**')

def _backward():
self.grad = round((other.data ** self.data) * math.log(other.data) , 4)
if other_grad:
other.grad += (other.data) * ((self.data) ** (other.data - 1)) * (result.grad)

result._backward = _backward

return result

def __repr__(self):
return f"Value(data={self.data}, grad={self.grad})"
return f"Value(data={self.data}, grad={self.grad})"