You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
t.deepEqual(weights._data,expected,'weights are correctly updated');
40
+
t.deepEqual(weights._data,expected,'weights are correctly updated');// eslint-disable-line no-underscore-dangle
41
41
42
42
t.end();
43
43
});
44
44
45
45
tape('the sub-gradient of the linear loss times the learning rate is added to the weights for absolute errors greater or equal than epsilon (no regularization)',functiontest(t){
46
+
/* eslint-disable no-underscore-dangle */
46
47
varexpected;
47
48
varweights;
48
49
varepsilon;
@@ -54,7 +55,7 @@ tape( 'the sub-gradient of the linear loss times the learning rate is added to t
Copy file name to clipboardExpand all lines: lib/node_modules/@stdlib/ml/online-sgd-regression/test/test.loss.huber.js
+2-1
Original file line number
Diff line number
Diff line change
@@ -40,11 +40,12 @@ tape( 'the sub-gradient of the squared-error loss times the learning rate is add
40
40
41
41
huberLoss(weights,x,y,eta,lambda,epsilon);
42
42
43
-
t.deepEqual(weights._data,expected,'weights are correctly updated');
43
+
t.deepEqual(weights._data,expected,'weights are correctly updated');// eslint-disable-line no-underscore-dangle
44
44
t.end();
45
45
});
46
46
47
47
tape('the sub-gradient of the linear loss times the learning rate is added to the weights for absolute errors greater or equal than epsilon (no regularization)',functiontest(t){
0 commit comments