-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmini_autograd.py
111 lines (90 loc) · 4.21 KB
/
mini_autograd.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
from __future__ import annotations
import math
class Value:
def __init__(self, data: float, requires_grad=False, prev_values: list[Value] = [], name:str = None, op:str = None) -> None:
self.data = data
self.requires_grad = requires_grad
self.name = name
self.op = op
if self.requires_grad:
self.grad = 0.0
self._backward = lambda: None
self.prev_values = prev_values
def __repr__(self) -> str:
return f"data:{'%9.4f' % self.data}, grad:{('%9.4f' % self.grad) if self.requires_grad else '-'}, name:{self.name or ''}, op:{self.op or ''}"
def __add__(self, other: Value | int | float) -> Value:
other: Value = other if isinstance(other, Value) else Value(other)
if other.data == 0 and other.requires_grad == False:
return self
out = Value(self.data + other.data,
requires_grad=(self.requires_grad or other.requires_grad), prev_values=[self, other], op="+")
def _backward():
if self.requires_grad: self.grad += out.grad
if other.requires_grad: other.grad += out.grad
out._backward = _backward
return out
def __radd__(self, other: Value) -> Value: # other * self
return self + other
def __neg__(self) -> Value:
return self * -1
def __sub__(self, other: Value | int | float) -> Value:
other = other if isinstance(other, Value) else Value(other)
out = self + (-other)
return out
def __rsub__(self, other) -> Value: # other - self
return -(self - other)
def __truediv__(self, other) -> Value: # self / other
return self * (other ** -1)
def __rtruediv__(self, other) -> Value: # other / self
return other * (self ** -1)
def __mul__(self, other: Value | float) -> Value:
other: Value = other if isinstance(other, Value) else Value(other)
out = Value(self.data * other.data, requires_grad=(self.requires_grad or other.requires_grad), prev_values=[self, other], op="*")
def _backward():
if str(out) == 'd: 1.0000, g: 2.0000, n:, o:*':
print("here")
if self.requires_grad: self.grad += out.grad * other.data
if other.requires_grad: other.grad += out.grad * self.data
out._backward = _backward
return out
def __rmul__(self, other: Value) -> Value: # other * self
return self * other
def __pow__(self, other: int | float | Value) -> Value:
other:Value = other if isinstance(other, Value) else Value(other)
out_requires_grad = self.requires_grad | other.requires_grad
out = Value(self.data ** other.data, requires_grad=out_requires_grad, prev_values=[self, other], op="^")
def _backward():
if self.requires_grad: self.grad += out.grad * other.data * (self.data**(other.data - 1))
if other.requires_grad: other.grad += out.grad * math.log(self.data)*(self.data ** other.data)
out._backward = _backward
return out
def log(self) -> Value:
out = Value(math.log(self.data), requires_grad=self.requires_grad, prev_values=[self], op="log")
def _backward():
if self.requires_grad: self.grad += out.grad * (1.0 / self.data)
out._backward = _backward
return out
def relu(self) -> Value:
out = Value(self.data if self.data > 0 else 0, self.requires_grad, [self], op="relu")
def _backward():
if self.requires_grad: self.grad += (out.grad if self.data > 0 else 0)
out._backward = _backward
return out
def sigmoid(self) -> Value:
return 1.0 / (1.0 + (Value(math.e) ** (-self)))
def backward(self):
if self.requires_grad == False:
return
self.grad = 1.0
all_values: list[Value] = []
visited: set[Value] = set()
def deep_first(cur: Value):
if cur in visited:
return
visited.add(cur)
for n in cur.prev_values:
deep_first(n)
all_values.append(cur)
deep_first(self)
for value in reversed(all_values):
if value.requires_grad: value._backward()