You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: lib/node_modules/@stdlib/ml/online-sgd-regression/test/test.loss.huber.js
+3-3
Original file line number
Diff line number
Diff line change
@@ -1,3 +1,4 @@
1
+
/* eslint-disable no-underscore-dangle */
1
2
'use strict';
2
3
3
4
// MODULES //
@@ -27,7 +28,7 @@ tape( 'the sub-gradient of the squared-error loss times the learning rate is add
27
28
lambda=0.0;
28
29
29
30
weights=newWeightVector(3,false);
30
-
weights.add([1.0,2.0,3.0]);
31
+
weights.add([1.0,2.0,3.0]);
31
32
epsilon=0.1;
32
33
eta=0.02;
33
34
@@ -43,7 +44,6 @@ tape( 'the sub-gradient of the squared-error loss times the learning rate is add
43
44
t.end();
44
45
});
45
46
46
-
47
47
tape('the sub-gradient of the linear loss times the learning rate is added to the weights for absolute errors greater or equal than epsilon (no regularization)',functiontest(t){
48
48
varexpected;
49
49
varweights;
@@ -56,7 +56,7 @@ tape( 'the sub-gradient of the linear loss times the learning rate is added to t
0 commit comments