Skip to content

Commit

Permalink
backward pass working on long graph
Browse files Browse the repository at this point in the history
  • Loading branch information
joey00072 committed Aug 20, 2023
1 parent c5a9452 commit f1fb4c1
Show file tree
Hide file tree
Showing 9 changed files with 107 additions and 40 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,8 @@ target/
profile_default/
ipython_config.py

dev.py

# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
Expand Down
28 changes: 21 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,20 @@

Newest ML framework that you propbaly don't need, <br>
this is really autograd engine backed by numpy<br><br>
>> `tinytorch.py` shall always remain under 1000 lines. if not we will revert commit
#### why this exists
Bcs I was bored


$$
f(x) =x^3+x
$$
<p align="center">
<img src="images/image-1.png" alt="Alt text" width="70%">
</p>

<p align="center">
<img src="images/image.png" alt="Alt text">
</p>

#### Visulization
If you want to see your computation graph run visulize.py
Expand All @@ -17,15 +27,19 @@ requirements
pip install graphviz
sudo apt-get install -y graphviz # IDK what to do for windows I use wsl
```
<p align="center">
<img src="images/image-2.png" width="50%" >
</p>


#### why this exists
Bcs I was bored

### DEV BLOG
Part 1: [pythonstuff/build-tensors](https://www.pythonstuff.com/blog/buinging%20own%20autograd%20engine%20tinytorch)

#### powerlevel
1.0 - karpathy [micrograd](https://github.com/karpathy/micrograd) (really simple, not much you can do with it) <br>
3.14 - [tinytorch](https://github.com/joey00072/nanograd) (simpile and you can do lot of things with it) <= ❤️ <br>
69 - [tinygrad](https://github.com/tinygrad/tinygrad) (no longer simple you can do lot more)<br>
∞ - [pytorch](https://github.com/pytorch/pytorch) (goat library, that makes gpu go burrr)<br>



### DEV BLOG
Part 1: [pythonstuff/build-tensors](https://www.pythonstuff.com/blog/buinging%20own%20autograd%20engine%20tinytorch)
Binary file added images/image-1.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added images/image-2.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added images/image.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
39 changes: 19 additions & 20 deletions tinytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ def __repr__(self):
return f"tensor({self.data})"

def backward(self,grad=None):
if self._ctx is None:
return

if grad is None:
grad = Tensor([1.])
self.grad = grad
Expand All @@ -34,8 +37,11 @@ def backward(self,grad=None):
grads = op.backward(self._ctx,grad)

for tensor,grad in zip(child_nodes,grads):
tensor.grad = grad

if tensor.grad is None:
tensor.grad = Tensor(np.zeros_like(self.data))
tensor.grad += grad
tensor.backward(grad)


class Function:
def __init__(self, op, *args):
Expand All @@ -51,7 +57,7 @@ def forward(x, y):
@staticmethod
def backward(ctx, grad):
x, y = ctx.args
return Tensor([1]), Tensor([1])
return Tensor([1])*grad, Tensor([1]) *grad


class Mul:
Expand All @@ -62,28 +68,21 @@ def forward(x, y):
@staticmethod
def backward(ctx, grad):
x, y = ctx.args
return Tensor(y.data), Tensor(x.data) # dz/dx, dz/dy
return Tensor(y.data)*grad, Tensor(x.data)*grad # dz/dx, dz/dy




if __name__ == "__main__":
x = Tensor([8])
y = Tensor([5])

print("Add")
z = x + y
print(z)
z.backward()
print(f"x: {x} , grad {x.grad}")
print(f"y: {y} , grad {y.grad}")
print("="*100)
def f(x):
return x*x*x + x

print("Mul")
z = x * y
print(z)
z.backward()
print(f"x: {x} , grad {x.grad}")
print(f"y: {y} , grad {y.grad}")
print("="*100)
x = Tensor([1.2])

z = f(x)
z.backward()
print(f"X: {x} grad: {x.grad}")



26 changes: 19 additions & 7 deletions vis/Digraph.gv
Original file line number Diff line number Diff line change
@@ -1,9 +1,21 @@
digraph {
139935156534576 [label="Tensor: [13] "]
139935156533664 [label="Context: Add"]
139935156534576 -> 139935156533664
139935156533664 -> 139935160270656
139935160270656 [label="Tensor: [8] "]
139935156533664 -> 139935156533232
139935156533232 [label="Tensor: [5] "]
140589163168192 [label="Tensor: ([2.928] grad: [1.]) "]
140589163168528 [label="Context: Add"]
140589163168192 -> 140589163168528
140589163168528 -> 140589163168480
140589163168480 [label="Tensor: ([1.728] grad: [1.]) "]
140589163168912 [label="Context: Mul"]
140589163168480 -> 140589163168912
140589163168912 -> 140589163167856
140589163167856 [label="Tensor: ([1.44] grad: [1.2]) "]
140589163167952 [label="Context: Mul"]
140589163167856 -> 140589163167952
140589163167952 -> 140589555449664
140589555449664 [label="Tensor: ([1.2] grad: [5.32]) "]
140589163167952 -> 140589555449664
140589555449664 [label="Tensor: ([1.2] grad: [5.32]) "]
140589163168912 -> 140589555449664
140589555449664 [label="Tensor: ([1.2] grad: [5.32]) "]
140589163168528 -> 140589555449664
140589555449664 [label="Tensor: ([1.2] grad: [5.32]) "]
}
Binary file modified vis/Digraph.gv.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
52 changes: 46 additions & 6 deletions visulize.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import graphviz
import matplotlib.pyplot as plt
from tinytorch import *

G = graphviz.Digraph(format="png")
Expand All @@ -7,7 +8,7 @@

def visit_nodes(G: graphviz.Digraph, node: Tensor):
uid = str(id(node))
G.node(uid, f"Tensor: {str(node.data) } ")
G.node(uid, f"Tensor: ({str(node.data) } { 'grad: '+str(node.grad.data) if node.grad is not None else ''}) ")
if node._ctx:
ctx_uid = str(id(node._ctx))
G.node(ctx_uid, f"Context: {str(node._ctx.op.__name__)}")
Expand All @@ -17,12 +18,51 @@ def visit_nodes(G: graphviz.Digraph, node: Tensor):
visit_nodes(G, child)




def f(x):
return x * x * x + x
# Defining the function to plot the given function and its derivative using the custom Tensor class
def plot_function_and_derivative():


# Values for x ranging from -3 to 3
x_values_custom = np.linspace(-3, 3, 100)
y_values_custom = []
derivative_values_custom = []

# Using the custom Tensor class to calculate the function and its derivative for each x value
for x_val in x_values_custom:
x_tensor = Tensor([x_val])
y_tensor = f(x_tensor)
y_tensor.backward()
y_values_custom.append(y_tensor.data[0])
derivative_values_custom.append(x_tensor.grad.data[0])

# Plotting the original function and its derivative using the custom implementation
plt.plot(x_values_custom, y_values_custom, label="f(x) = x^3 + x (custom)")
plt.plot(x_values_custom, derivative_values_custom, label="f'(x) = 3x^2 + 1 (custom)")

plt.xlabel('x')
plt.ylabel('y')
plt.title('Plot of the Function and its Derivative (Custom Implementation)')
plt.legend()
plt.grid(True)
plt.show()




if __name__ == "__main__":
x = Tensor([8])
y = Tensor([5])
z = x + y
plot_function_and_derivative()

x = Tensor([1.2])
z = f(x)
z.backward()
visit_nodes(G, z)
G.render(directory="vis", view=True)
print(z)
print(f"Z:{x} grad:{x.grad}")

print(len(G.body))



0 comments on commit f1fb4c1

Please sign in to comment.