14
14
from time import time
15
15
from sklearn .cluster import k_means
16
16
from elm import ELMClassifier , ELMRegressor , SimpleELMClassifier , SimpleELMRegressor
17
- from random_hidden_layer import SimpleRandomHiddenLayer , RBFRandomHiddenLayer
17
+ from random_layer import SimpleRandomLayer , RBFRandomLayer
18
18
19
19
# <codecell>
20
20
@@ -64,11 +64,10 @@ def res_dist(x, y, e, n_runs=100, random_state=None):
64
64
dgx_train , dgx_test , dgy_train , dgy_test = train_test_split (dgx , dgy , test_size = 0.2 )
65
65
66
66
diabetes = load_diabetes ()
67
- #dbx, dby = stdsc.fit_transform(diabetes.data), stdsc.fit_transform(diabetes.target)
68
67
dbx , dby = stdsc .fit_transform (diabetes .data ), diabetes .target
69
68
dbx_train , dbx_test , dby_train , dby_test = train_test_split (dbx , dby , test_size = 0.2 )
70
69
71
- mrx , mry = make_regression (n_samples = 2000 )
70
+ mrx , mry = make_regression (n_samples = 2000 , n_targets = 2 )
72
71
mrx_train , mrx_test , mry_train , mry_test = train_test_split (mrx , mry , test_size = 0.2 )
73
72
74
73
xtoy , ytoy = make_toy ()
@@ -78,50 +77,63 @@ def res_dist(x, y, e, n_runs=100, random_state=None):
78
77
79
78
# <codecell>
80
79
80
+ # SimpleELMClassifier test
81
+ elmc = SimpleELMClassifier (n_hidden = 500 )
82
+ elmc .fit (dgx_train , dgy_train )
83
+ print elmc .score (dgx_train , dgy_train ), elmc .score (dgx_test , dgy_test )
84
+
85
+ # <codecell>
86
+
87
+ # SimpleELMRegressor test
88
+ elmr = SimpleELMRegressor ()
89
+ elmr .fit (xtoy_train , ytoy_train )
90
+ print elmr .score (xtoy_train , ytoy_train ), elmr .score (xtoy_test , ytoy_test )
91
+ plot (xtoy , ytoy , xtoy , elmr .predict (xtoy ))
92
+
93
+ # <codecell>
94
+
81
95
# RBF tests
82
- elmc = ELMClassifier (RBFRandomHiddenLayer (activation_func = 'gaussian' ))
96
+ elmc = ELMClassifier (RBFRandomLayer (activation_func = 'gaussian' ))
83
97
tr ,ts = res_dist (irx , iry , elmc , n_runs = 100 , random_state = 0 )
84
98
85
- elmc = ELMClassifier (RBFRandomHiddenLayer (activation_func = 'poly_spline' , gamma = 2 ))
99
+ elmc = ELMClassifier (RBFRandomLayer (activation_func = 'poly_spline' , gamma = 2 ))
86
100
tr ,ts = res_dist (irx , iry , elmc , n_runs = 100 , random_state = 0 )
87
101
88
- elmc = ELMClassifier (RBFRandomHiddenLayer (activation_func = 'multiquadric' ))
102
+ elmc = ELMClassifier (RBFRandomLayer (activation_func = 'multiquadric' ))
89
103
tr ,ts = res_dist (irx , iry , elmc , n_runs = 100 , random_state = 0 )
90
104
91
105
# Simple tests
92
- elmc = ELMClassifier (SimpleRandomHiddenLayer (activation_func = 'sine' ))
106
+ elmc = ELMClassifier (SimpleRandomLayer (activation_func = 'sine' ))
93
107
tr ,ts = res_dist (irx , iry , elmc , n_runs = 100 , random_state = 0 )
94
108
95
- elmc = ELMClassifier (SimpleRandomHiddenLayer (activation_func = 'tanh' ))
109
+ elmc = ELMClassifier (SimpleRandomLayer (activation_func = 'tanh' ))
96
110
tr ,ts = res_dist (irx , iry , elmc , n_runs = 100 , random_state = 0 )
97
111
98
- elmc = ELMClassifier (SimpleRandomHiddenLayer (activation_func = 'tribas' ))
112
+ elmc = ELMClassifier (SimpleRandomLayer (activation_func = 'tribas' ))
99
113
tr ,ts = res_dist (irx , iry , elmc , n_runs = 100 , random_state = 0 )
100
114
101
- elmc = ELMClassifier (SimpleRandomHiddenLayer (activation_func = 'sigmoid' ))
115
+ elmc = ELMClassifier (SimpleRandomLayer (activation_func = 'sigmoid' ))
102
116
tr ,ts = res_dist (irx , iry , elmc , n_runs = 100 , random_state = 0 )
103
117
104
- elmc = ELMClassifier (SimpleRandomHiddenLayer (activation_func = 'hardlim' ))
118
+ elmc = ELMClassifier (SimpleRandomLayer (activation_func = 'hardlim' ))
105
119
tr ,ts = res_dist (irx , iry , elmc , n_runs = 100 , random_state = 0 )
106
120
107
121
# <codecell>
108
122
109
- hardlim = (lambda a : np .array (a > 0.0 , dtype = float ))
110
- tribas = (lambda a : np .clip (1.0 - np .fabs (a ), 0.0 , 1.0 ))
111
- elmr = ELMRegressor (SimpleRandomHiddenLayer (random_state = 0 , activation_func = tribas ))
123
+ elmr = ELMRegressor (SimpleRandomLayer (random_state = 0 , activation_func = 'tribas' ))
112
124
elmr .fit (xtoy_train , ytoy_train )
113
125
print elmr .score (xtoy_train , ytoy_train ), elmr .score (xtoy_test , ytoy_test )
114
126
plot (xtoy , ytoy , xtoy , elmr .predict (xtoy ))
115
127
116
128
# <codecell>
117
129
118
- rhl = SimpleRandomHiddenLayer (n_hidden = 200 )
130
+ rhl = SimpleRandomLayer (n_hidden = 200 )
119
131
elmr = ELMRegressor (hidden_layer = rhl )
120
132
tr , ts = res_dist (mrx , mry , elmr , n_runs = 20 , random_state = 0 )
121
133
122
134
# <codecell>
123
135
124
- rhl = RBFRandomHiddenLayer (n_hidden = 15 , gamma = 0.25 )
136
+ rhl = RBFRandomLayer (n_hidden = 15 , gamma = 0.25 )
125
137
elmr = ELMRegressor (hidden_layer = rhl )
126
138
elmr .fit (xtoy_train , ytoy_train )
127
139
print elmr .score (xtoy_train , ytoy_train ), elmr .score (xtoy_test , ytoy_test )
@@ -132,33 +144,33 @@ def res_dist(x, y, e, n_runs=100, random_state=None):
132
144
nh = 10
133
145
(ctrs , _ , _ ) = k_means (xtoy_train , nh )
134
146
unit_rs = np .ones (nh )
135
- rhl = RBFRandomHiddenLayer (n_hidden = nh , activation_func = 'poly_spline' , gamma = 3 )
136
- #rhl = RBFRandomHiddenLayer (n_hidden=nh, activation_func='multiquadric', gamma=1)
137
- # rhl = RBFRandomHiddenLayer (n_hidden=nh, centers=ctrs, radii=unit_rs, gamma=4 )
147
+ # rhl = RBFRandomLayer (n_hidden=nh, activation_func='poly_spline', gamma=3)
148
+ #rhl = RBFRandomLayer (n_hidden=nh, activation_func='multiquadric', gamma=1)
149
+ rhl = RBFRandomLayer (n_hidden = nh , centers = ctrs , radii = unit_rs )
138
150
elmr = ELMRegressor (hidden_layer = rhl )
139
151
elmr .fit (xtoy_train , ytoy_train )
140
152
print elmr .score (xtoy_train , ytoy_train ), elmr .score (xtoy_test , ytoy_test )
141
153
plot (xtoy , ytoy , xtoy , elmr .predict (xtoy ))
142
154
143
155
# <codecell>
144
156
145
- rbf_rhl = RBFRandomHiddenLayer (n_hidden = 100 , random_state = 0 , gamma = 0.1 )
157
+ rbf_rhl = RBFRandomLayer (n_hidden = 100 , random_state = 0 , gamma = 0.1 )
146
158
elmc_rbf = ELMClassifier (hidden_layer = rbf_rhl )
147
159
elmc_rbf .fit (dgx_train , dgy_train )
148
160
print elmc_rbf .score (dgx_train , dgy_train ), elmc_rbf .score (dgx_test , dgy_test )
149
161
150
162
def powtanh_xfer (activations , power = 1.0 ):
151
163
return pow (np .tanh (activations ), power )
152
164
153
- #tanh_rhl = SimpleRandomHiddenLayer (n_hidden=5000, random_state=0)
154
- tanh_rhl = SimpleRandomHiddenLayer (n_hidden = 5000 , activation_func = powtanh_xfer , activation_args = {'power' :2.0 })
165
+ #tanh_rhl = SimpleRandomLayer (n_hidden=5000, random_state=0)
166
+ tanh_rhl = SimpleRandomLayer (n_hidden = 5000 , activation_func = powtanh_xfer , activation_args = {'power' :2.0 })
155
167
elmc_tanh = ELMClassifier (hidden_layer = tanh_rhl )
156
168
elmc_tanh .fit (dgx_train , dgy_train )
157
169
print elmc_tanh .score (dgx_train , dgy_train ), elmc_tanh .score (dgx_test , dgy_test )
158
170
159
171
# <codecell>
160
172
161
- rbf_rhl = RBFRandomHiddenLayer (n_hidden = 100 , gamma = 0.1 )
173
+ rbf_rhl = RBFRandomLayer (n_hidden = 100 , gamma = 0.1 )
162
174
tr , ts = res_dist (dgx , dgy , ELMClassifier (hidden_layer = rbf_rhl ), n_runs = 100 , random_state = 0 )
163
175
164
176
# <codecell>
@@ -168,13 +180,12 @@ def powtanh_xfer(activations, power=1.0):
168
180
169
181
# <codecell>
170
182
171
- from sklearn .svm import SVR
172
183
from sklearn .ensemble import RandomForestRegressor
173
184
tr , ts = res_dist (dbx , dby , RandomForestRegressor (n_estimators = 15 ), n_runs = 100 , random_state = 0 )
174
185
hist (tr ), hist (ts )
175
186
print
176
187
177
- rhl = RBFRandomHiddenLayer (n_hidden = 15 , gamma = 0.01 )
188
+ rhl = RBFRandomLayer (n_hidden = 15 , gamma = 0.01 )
178
189
tr ,ts = res_dist (dbx , dby , ELMRegressor (rhl ), n_runs = 100 , random_state = 0 )
179
190
hist (tr ), hist (ts )
180
191
print
@@ -192,19 +203,6 @@ def powtanh_xfer(activations, power=1.0):
192
203
193
204
# <codecell>
194
205
195
- elmc = SimpleELMClassifier (n_hidden = 500 , activation_func = 'hardlim' )
196
- elmc .fit (dgx_train , dgy_train )
197
- print elmc .score (dgx_train , dgy_train ), elmc .score (dgx_test , dgy_test )
198
-
199
- # <codecell>
200
-
201
- elmr = SimpleELMRegressor ()
202
- elmr .fit (xtoy_train , ytoy_train )
203
- print elmr .score (xtoy_train , ytoy_train ), elmr .score (xtoy_test , ytoy_test )
204
- plot (xtoy , ytoy , xtoy , elmr .predict (xtoy ))
205
-
206
- # <codecell>
207
-
208
206
elmr = SimpleELMRegressor (activation_func = 'tribas' )
209
207
elmr .fit (xtoy_train , ytoy_train )
210
208
print elmr .score (xtoy_train , ytoy_train ), elmr .score (xtoy_test , ytoy_test )
0 commit comments