Skip to content

Commit

Permalink
fix autodiff
Browse files Browse the repository at this point in the history
  • Loading branch information
Tokisakix committed Apr 7, 2024
1 parent 714d624 commit db4d40a
Show file tree
Hide file tree
Showing 5 changed files with 74 additions and 19 deletions.
16 changes: 12 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,24 @@

**Terox is an open source tiny Deep Learning System based on Python, Cython and CUDA.**

Terox is a tiny Python package that provides some features:
- [x] Support automatic differentiation.
- [ ] Provides convenient tensor calculation.
- [ ] Control the parameters and the model.
- [ ] Provides common computing functions for deep learning.
- [ ] Provides common deep learning components.
- [ ] Provides deep learning model optimizer.
- [ ] Accelerate computing on CPU and GPU.
- [ ] Support distributed computing.

---

## Setup

Terox requires **Python 3.8** or higher. To check your version of Python, run either:

```Shell
python --version
python3 --version
python --version # expect python version >= 3.8
```

The next step is to install packages. There are several packages used throughout Terox, and you can install them in your enviroment by running:
Expand All @@ -31,5 +40,4 @@ Make sure that everything is installed by running python and then checking. If y

```Python
import terox
print(terox.__version__)
```
print(terox.__version__) # expect output: "Terox v0.1 by Tokisakix."
15 changes: 12 additions & 3 deletions README_cn.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,24 @@

**Terox 是一个基于 Python、Cython 和 CUDA 的开源微型深度学习系统。**

Terox 是一个很精简的 Python 包,它提供了一些特性:
- [x] 支持自动微分。
- [ ] 提供方便的张量计算。
- [ ] 便捷控制参数和模型。
- [ ] 提供深度学习常用的计算函数。
- [ ] 提供常用的深度学习组件。
- [ ] 提供深度学习模型优化器。
- [ ] 提高在 CPU 和 GPU 上的计算速度。
- [ ] 支持分布式计算。

---

## 设置

Terox 要求 **Python 3.8** 或更高版本。要检查你的 Python 的版本,请运行:

```Shell
python --version
python3 --version
python --version # 期望 python 版本 >= 3.8
```

下一步是安装第三方软件包。在 Terox 项目中使用了几个包,您可以通过运行以下命令将它们安装到您的环境中:
Expand All @@ -31,5 +40,5 @@ python -m pip install -Ue .

```Python
import terox
print(terox.__version__)
print(terox.__version__) # 期望输出: "Terox v0.1 by Tokisakix."
```
8 changes: 7 additions & 1 deletion terox/autodiff/scalar.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,20 @@
from typing import Optional
from typing import Iterable, Optional

from .variable import Variable, VarHistory
from .scalar_opts import ScalarOptsBackend

scalar_count:int = 0

class Scalar(Variable):

_id: int
_item: float

def __init__(self, _item:float=None, _history:Optional[VarHistory]=None, _gradient:Optional["Scalar"]=None, _backend:ScalarOptsBackend=ScalarOptsBackend()) -> None:
super().__init__(_history, _gradient)
global scalar_count
self._id = scalar_count
scalar_count += 1
if _item == None:
_item = 0.0
self._item = float(_item)
Expand Down
46 changes: 39 additions & 7 deletions terox/autodiff/variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ def __init__(self, _func:VarFunction, _args:Iterable["Variable"]) -> None:

class Variable():

_id:int
_history: Optional[VarHistory]
_gradient: Optional[object]

Expand Down Expand Up @@ -55,12 +56,11 @@ def _chainRule(self) -> None:
arg._gradient += grad
return

def backward(self, first=True):
if first:
self._oneGrad()
self._chainRule()
for parent in self._parent():
parent.backward(first=False)
def backward(self):
self._oneGrad()
TopoList = getTopoList(self)
for var in TopoList:
var._chainRule()
return

def new(self) -> "Variable":
Expand All @@ -82,4 +82,36 @@ def detach(self) -> "Variable":
raise NotImplementedError

def item(self) -> object:
raise NotImplementedError
raise NotImplementedError

def _getTopoChain(var:"Variable") -> Iterable["Variable"]:
topoChainId = []
topoChainVar = []
for parent in var._parent():
topoChainId.append((var._id, parent._id))
topoChainVar.append(parent)
temp = _getTopoChain(parent)
topoChainId += temp[0]
topoChainVar += temp[1]
return topoChainId, topoChainVar

def getTopoList(var:"Variable") -> Iterable["Variable"]:
topoChainId, topoChainVar = _getTopoChain(var)
topoId2Var = {var._id:var}
topoId2Degree = {var._id:1}
for (_, temp_id), temp_var in zip(topoChainId, topoChainVar):
topoId2Var[temp_id] = temp_var
topoChainId = list(set(topoChainId))
for _, parent_id in topoChainId:
if not parent_id in topoId2Degree:
topoId2Degree[parent_id] = 0
topoId2Degree[parent_id] += 1
topoList, queue = [], [var]
while len(queue) > 0:
variable = queue[0]
queue += variable._parent()
topoId2Degree[variable._id] -= 1
if topoId2Degree[variable._id] == 0:
topoList.append(variable)
del queue[0]
return topoList
8 changes: 4 additions & 4 deletions test/autodiff/test_backward.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,15 +152,15 @@ def test_tanh(a:float=-2.0) -> None:
return

@pytest.mark.test_scalar_overload
def test_add_backward() -> None:
def test_complex_backward() -> None:
A = Scalar(1.0, None, None)
B = Scalar(2.0, None, None)
C = A + B
D = A + C
E = (D - C) * (D - C)
E.backward()
assert A._gradient == 5.0
assert B._gradient == 2.0
assert C._gradient == 1.0
assert A._gradient == 2.0
assert B._gradient == 0.0
assert C._gradient == 0.0
assert D._gradient == 2.0
return

0 comments on commit db4d40a

Please sign in to comment.