forked from mryab/efficient-dl-systems
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathutils.py
33 lines (27 loc) · 1.05 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
"""Some auxiliary files used for honor track numpy assignment"""
import numpy as np
from random import randrange
def eval_numerical_gradient(f, x, verbose=False, h=0.00001):
"""Evaluates gradient df/dx via finite differences:
df/dx ~ (f(x+h) - f(x-h)) / 2h
Adopted from https://github.com/ddtm/dl-course/ (our ysda course).
"""
fx = f(x) # evaluate function value at original point
grad = np.zeros_like(x)
# iterate over all indexes in x
it = np.nditer(x, flags=['multi_index'], op_flags=['readwrite'])
while not it.finished:
# evaluate function at x+h
ix = it.multi_index
oldval = x[ix]
x[ix] = oldval + h # increment by h
fxph = f(x) # evalute f(x + h)
x[ix] = oldval - h
fxmh = f(x) # evaluate f(x - h)
x[ix] = oldval # restore
# compute the partial derivative with centered formula
grad[ix] = (fxph - fxmh) / (2 * h) # the slope
if verbose:
print (ix, grad[ix])
it.iternext() # step to next dimension
return grad