forked from Khabermas/NLP_ps1
-
Notifications
You must be signed in to change notification settings - Fork 9
/
q2_gradcheck.py
95 lines (75 loc) · 3.04 KB
/
q2_gradcheck.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
#!/usr/bin/env python
import numpy as np
import random
from q2_sigmoid import sigmoid, sigmoid_grad
# First implement a gradient checker by filling in the following functions
def gradcheck_naive(f, x):
""" Gradient check for a function f.
Arguments:
f -- a function that takes a single argument and outputs the
cost and its gradients
x -- the point (numpy array) to check the gradient at
"""
rndstate = random.getstate()
random.setstate(rndstate)
fx, grad = f(x) # Evaluate function value at original point
h = 1e-4 # Do not change this!
# Iterate over all indexes in x
it = np.nditer(x, flags=['multi_index'], op_flags=['readwrite'])
while not it.finished:
ix = it.multi_index
# Try modifying x[ix] with h defined above to compute
# numerical gradients. Make sure you call random.setstate(rndstate)
# before calling f(x) each time. This will make it possible
# to test cost functions with built in randomness later.
### YOUR CODE HERE:
x[ix] += h # increment by h
random.setstate(rndstate)
fxh, _ = f(x) # evalute f(x + h)
x[ix] -= 2 * h # restore to previous value (very important!)
random.setstate(rndstate)
fxnh, _ = f(x)
x[ix] += h
numgrad = (fxh - fxnh) / 2 / h
### END YOUR CODE
# Compare gradients
reldiff = abs(numgrad - grad[ix]) / max(1, abs(numgrad), abs(grad[ix]))
if reldiff > 1e-5:
print "Gradient check failed."
print "First gradient error found at index %s" % str(ix)
print "Your gradient: %f \t Numerical gradient: %f" % (
grad[ix], numgrad)
return
it.iternext() # Step to next dimension
print "Gradient check passed!"
def sanity_check():
"""
Some basic sanity checks.
"""
quad = lambda x: (np.sum(x ** 2), x * 2)
print "Running sanity checks..."
gradcheck_naive(quad, np.array(123.456)) # scalar test
gradcheck_naive(quad, np.random.randn(3,)) # 1-D test
gradcheck_naive(quad, np.random.randn(4,5)) # 2-D test
print ""
def your_sanity_checks():
"""
Use this space add any additional sanity checks by running:
python q2_gradcheck.py
This function will not be called by the autograder, nor will
your additional tests be graded.
"""
print "Running your sanity checks..."
### YOUR CODE HERE
sigmoid_and_grad = lambda x: (np.sum(sigmoid(x)), sigmoid_grad(sigmoid(x)))
gradcheck_naive(sigmoid_and_grad, np.array(1.23456)) # scalar test
gradcheck_naive(sigmoid_and_grad, np.random.randn(3,)) # 1-D test
gradcheck_naive(sigmoid_and_grad, np.random.randn(4,5)) # 2-D test
gradcheck_naive(sigmoid_and_grad, np.arange(-5.0, 5.0, 0.1)) # range test
sincos_and_grad = lambda x: (np.sin(x) + np.cos(x), np.cos(x) - np.sin(x))
gradcheck_naive(sincos_and_grad, np.array(1.0))
print
### END YOUR CODE
if __name__ == "__main__":
sanity_check()
your_sanity_checks()