@@ -566,10 +566,6 @@ class LinearBoostRegressor(_LinearBoosting, RegressorMixin):
566
566
A node will be split if this split induces a decrease of the impurity
567
567
greater than or equal to this value.
568
568
569
- min_impurity_split : float, default=0
570
- Threshold for early stopping in tree growth. A node will split
571
- if its impurity is above the threshold, otherwise it is a leaf.
572
-
573
569
ccp_alpha : non-negative float, default=0.0
574
570
Complexity parameter used for Minimal Cost-Complexity Pruning. The
575
571
subtree with the largest cost complexity that is smaller than
@@ -619,8 +615,7 @@ def __init__(self, base_estimator, *, loss='linear', n_estimators=10,
619
615
max_depth = 3 , min_samples_split = 2 , min_samples_leaf = 1 ,
620
616
min_weight_fraction_leaf = 0.0 , max_features = None ,
621
617
random_state = None , max_leaf_nodes = None ,
622
- min_impurity_decrease = 0.0 , min_impurity_split = None ,
623
- ccp_alpha = 0.0 ):
618
+ min_impurity_decrease = 0.0 , ccp_alpha = 0.0 ):
624
619
625
620
self .base_estimator = base_estimator
626
621
self .loss = loss
@@ -633,7 +628,6 @@ def __init__(self, base_estimator, *, loss='linear', n_estimators=10,
633
628
self .random_state = random_state
634
629
self .max_leaf_nodes = max_leaf_nodes
635
630
self .min_impurity_decrease = min_impurity_decrease
636
- self .min_impurity_split = min_impurity_split
637
631
self .ccp_alpha = ccp_alpha
638
632
639
633
def fit (self , X , y , sample_weight = None ):
@@ -777,10 +771,6 @@ class LinearBoostClassifier(_LinearBoosting, ClassifierMixin):
777
771
A node will be split if this split induces a decrease of the impurity
778
772
greater than or equal to this value.
779
773
780
- min_impurity_split : float, default=0
781
- Threshold for early stopping in tree growth. A node will split
782
- if its impurity is above the threshold, otherwise it is a leaf.
783
-
784
774
ccp_alpha : non-negative float, default=0.0
785
775
Complexity parameter used for Minimal Cost-Complexity Pruning. The
786
776
subtree with the largest cost complexity that is smaller than
@@ -830,8 +820,7 @@ def __init__(self, base_estimator, *, loss='hamming', n_estimators=10,
830
820
max_depth = 3 , min_samples_split = 2 , min_samples_leaf = 1 ,
831
821
min_weight_fraction_leaf = 0.0 , max_features = None ,
832
822
random_state = None , max_leaf_nodes = None ,
833
- min_impurity_decrease = 0.0 , min_impurity_split = None ,
834
- ccp_alpha = 0.0 ):
823
+ min_impurity_decrease = 0.0 , ccp_alpha = 0.0 ):
835
824
836
825
self .base_estimator = base_estimator
837
826
self .loss = loss
@@ -844,7 +833,6 @@ def __init__(self, base_estimator, *, loss='hamming', n_estimators=10,
844
833
self .random_state = random_state
845
834
self .max_leaf_nodes = max_leaf_nodes
846
835
self .min_impurity_decrease = min_impurity_decrease
847
- self .min_impurity_split = min_impurity_split
848
836
self .ccp_alpha = ccp_alpha
849
837
850
838
def fit (self , X , y , sample_weight = None ):
@@ -1039,10 +1027,6 @@ class LinearForestClassifier(_LinearForest, ClassifierMixin):
1039
1027
A node will be split if this split induces a decrease of the impurity
1040
1028
greater than or equal to this value.
1041
1029
1042
- min_impurity_split : float, default=None
1043
- Threshold for early stopping in tree growth. A node will split
1044
- if its impurity is above the threshold, otherwise it is a leaf.
1045
-
1046
1030
bootstrap : bool, default=True
1047
1031
Whether bootstrap samples are used when building trees. If False, the
1048
1032
whole dataset is used to build each tree.
@@ -1076,7 +1060,7 @@ class LinearForestClassifier(_LinearForest, ClassifierMixin):
1076
1060
- If None (default), then draw `X.shape[0]` samples.
1077
1061
- If int, then draw `max_samples` samples.
1078
1062
- If float, then draw `max_samples * X.shape[0]` samples. Thus,
1079
- `max_samples` should be in the interval `(0, 1) `.
1063
+ `max_samples` should be in the interval `(0, 1] `.
1080
1064
1081
1065
Attributes
1082
1066
----------
@@ -1129,9 +1113,8 @@ def __init__(self, base_estimator, *, n_estimators=100,
1129
1113
max_depth = None , min_samples_split = 2 , min_samples_leaf = 1 ,
1130
1114
min_weight_fraction_leaf = 0. , max_features = "auto" ,
1131
1115
max_leaf_nodes = None , min_impurity_decrease = 0. ,
1132
- min_impurity_split = None , bootstrap = True ,
1133
- oob_score = False , n_jobs = None , random_state = None ,
1134
- ccp_alpha = 0.0 , max_samples = None ):
1116
+ bootstrap = True , oob_score = False , n_jobs = None ,
1117
+ random_state = None , ccp_alpha = 0.0 , max_samples = None ):
1135
1118
1136
1119
self .base_estimator = base_estimator
1137
1120
self .n_estimators = n_estimators
@@ -1142,7 +1125,6 @@ def __init__(self, base_estimator, *, n_estimators=100,
1142
1125
self .max_features = max_features
1143
1126
self .max_leaf_nodes = max_leaf_nodes
1144
1127
self .min_impurity_decrease = min_impurity_decrease
1145
- self .min_impurity_split = min_impurity_split
1146
1128
self .bootstrap = bootstrap
1147
1129
self .oob_score = oob_score
1148
1130
self .n_jobs = n_jobs
@@ -1351,10 +1333,6 @@ class LinearForestRegressor(_LinearForest, RegressorMixin):
1351
1333
A node will be split if this split induces a decrease of the impurity
1352
1334
greater than or equal to this value.
1353
1335
1354
- min_impurity_split : float, default=None
1355
- Threshold for early stopping in tree growth. A node will split
1356
- if its impurity is above the threshold, otherwise it is a leaf.
1357
-
1358
1336
bootstrap : bool, default=True
1359
1337
Whether bootstrap samples are used when building trees. If False, the
1360
1338
whole dataset is used to build each tree.
@@ -1388,7 +1366,7 @@ class LinearForestRegressor(_LinearForest, RegressorMixin):
1388
1366
- If None (default), then draw `X.shape[0]` samples.
1389
1367
- If int, then draw `max_samples` samples.
1390
1368
- If float, then draw `max_samples * X.shape[0]` samples. Thus,
1391
- `max_samples` should be in the interval `(0, 1) `.
1369
+ `max_samples` should be in the interval `(0, 1] `.
1392
1370
1393
1371
Attributes
1394
1372
----------
@@ -1437,14 +1415,12 @@ class LinearForestRegressor(_LinearForest, RegressorMixin):
1437
1415
Authors: Haozhe Zhang, Dan Nettleton, Zhengyuan Zhu.
1438
1416
(https://arxiv.org/abs/1904.10416)
1439
1417
"""
1440
-
1441
1418
def __init__ (self , base_estimator , * , n_estimators = 100 ,
1442
1419
max_depth = None , min_samples_split = 2 , min_samples_leaf = 1 ,
1443
1420
min_weight_fraction_leaf = 0. , max_features = "auto" ,
1444
1421
max_leaf_nodes = None , min_impurity_decrease = 0. ,
1445
- min_impurity_split = None , bootstrap = True ,
1446
- oob_score = False , n_jobs = None , random_state = None ,
1447
- ccp_alpha = 0.0 , max_samples = None ):
1422
+ bootstrap = True , oob_score = False , n_jobs = None ,
1423
+ random_state = None , ccp_alpha = 0.0 , max_samples = None ):
1448
1424
1449
1425
self .base_estimator = base_estimator
1450
1426
self .n_estimators = n_estimators
@@ -1455,7 +1431,6 @@ def __init__(self, base_estimator, *, n_estimators=100,
1455
1431
self .max_features = max_features
1456
1432
self .max_leaf_nodes = max_leaf_nodes
1457
1433
self .min_impurity_decrease = min_impurity_decrease
1458
- self .min_impurity_split = min_impurity_split
1459
1434
self .bootstrap = bootstrap
1460
1435
self .oob_score = oob_score
1461
1436
self .n_jobs = n_jobs
0 commit comments