File tree Expand file tree Collapse file tree 1 file changed +27
-0
lines changed Expand file tree Collapse file tree 1 file changed +27
-0
lines changed Original file line number Diff line number Diff line change @@ -83,3 +83,30 @@ def loss(target, nn_out):
83
83
def softmax (x ):
84
84
expA = np .exp (x - np .max (x ))
85
85
return expA / expA .sum (axis = 0 )
86
+
87
+
88
+ def numericalgradient (fun ,w ,e ):
89
+ """
90
+ Provides a numerical estimate of the gradient of fun w.r.t. to parameters w
91
+ :param e: epsilon
92
+ :return: numerical gradient estimate (shape of w)
93
+ """
94
+ # get dimensionality
95
+ d = len (w )
96
+ # initialize numerical derivative
97
+ dh = np .zeros (d )
98
+ # go through dimensions
99
+ for i in range (d ):
100
+ # copy the weight vector
101
+ nw = w .copy ()
102
+ # perturb dimension i
103
+ nw [i ] += e
104
+ # compute loss
105
+ l1 , temp = fun (nw )
106
+ # perturb dimension i again
107
+ nw [i ] -= 2 * e
108
+ # compute loss
109
+ l2 , temp = fun (nw )
110
+ # the gradient is the slope of the loss
111
+ dh [i ] = (l1 - l2 ) / (2 * e )
112
+ return dh
You can’t perform that action at this time.
0 commit comments