@@ -297,7 +297,9 @@ def __init__(self, name: Optional[str],
297
297
cache_folder : str = None ,
298
298
nr_of_processes : int = 1 ,
299
299
multi_threading : bool = True ,
300
- allow_multidim_targets : bool = False ):
300
+ allow_multidim_targets : bool = False ,
301
+ raise_error : bool = False ,
302
+ score_train : bool = True ):
301
303
"""
302
304
Initialize the object.
303
305
@@ -420,6 +422,12 @@ def __init__(self, name: Optional[str],
420
422
allow_multidim_targets:
421
423
Allows multidimensional targets.
422
424
425
+ score_train:
426
+ metrics for the train-set are only calculated if score_train is true.
427
+
428
+ raise_error:
429
+ if true, errors in the inner fold are raised instead of suppressed as warnings.
430
+
423
431
"""
424
432
425
433
self .name = re .sub (r'\W+' , '' , name )
@@ -514,6 +522,8 @@ def __init__(self, name: Optional[str],
514
522
self .permutation_id = permutation_id
515
523
self .allow_multidim_targets = allow_multidim_targets
516
524
self .is_final_fit = False
525
+ self .score_train = score_train
526
+ self .raise_error = raise_error
517
527
518
528
# ====================== Random Seed ===========================
519
529
self .random_state = random_seed
@@ -933,7 +943,7 @@ def _finalize_optimization(self):
933
943
logger .error (str (e ))
934
944
935
945
# get feature importances of optimum pipe
936
- logger .info ("Mapping back feature importances..." )
946
+ # logger.info("Mapping back feature importances...")
937
947
feature_importances = self .optimum_pipe .feature_importances_
938
948
939
949
if not feature_importances :
@@ -943,18 +953,18 @@ def _finalize_optimization(self):
943
953
944
954
# write backmapping file only if optimum_pipes inverse_transform works completely.
945
955
# restriction: only a faulty inverse_transform is considered, missing ones are further ignored.
946
- with warnings .catch_warnings (record = True ) as w :
947
- # get backmapping
948
- backmapping , _ , _ = self .optimum_pipe .\
949
- inverse_transform (np .array (feature_importances ).reshape (1 , - 1 ), None )
950
-
951
- if not any ("The inverse transformation is not possible for" in s
952
- for s in [e .message .args [0 ] for e in w ]):
953
- # save backmapping
954
- self .results_handler .save_backmapping (
955
- filename = 'optimum_pipe_feature_importances_backmapped' , backmapping = backmapping )
956
- else :
957
- logger .info ('Could not save feature importance: backmapping NOT successful.' )
956
+ # with warnings.catch_warnings(record=True) as w:
957
+ # # get backmapping
958
+ # backmapping, _, _ = self.optimum_pipe.\
959
+ # inverse_transform(np.array(feature_importances).reshape(1, -1), None)
960
+ #
961
+ # if not any("The inverse transformation is not possible for" in s
962
+ # for s in [e.message.args[0] for e in w]):
963
+ # # save backmapping
964
+ # self.results_handler.save_backmapping(
965
+ # filename='optimum_pipe_feature_importances_backmapped', backmapping=backmapping)
966
+ # else:
967
+ # logger.info('Could not save feature importance: backmapping NOT successful.')
958
968
959
969
# save learning curves
960
970
if self .cross_validation .learning_curves :
@@ -1085,7 +1095,9 @@ def fit(self, data: np.ndarray, targets: np.ndarray, **kwargs):
1085
1095
cache_folder = self .cache_folder ,
1086
1096
cache_updater = self .recursive_cache_folder_propagation ,
1087
1097
dummy_estimator = dummy_estimator ,
1088
- result_obj = outer_fold )
1098
+ result_obj = outer_fold ,
1099
+ score_train = self .score_train ,
1100
+ raise_error = self .raise_error )
1089
1101
# 2. monitor outputs
1090
1102
self .results .outer_folds .append (outer_fold )
1091
1103
@@ -1243,6 +1255,7 @@ def train_and_get_fimps(pipeline, train_idx, test_idx, data_X, data_y, data_kwar
1243
1255
1244
1256
# get feature importances
1245
1257
logger .photon_system_log ("Permutation Importances: Calculating performances for " + fold_str )
1258
+
1246
1259
perm_imps = permutation_importance (pipeline , test_X , test_y , ** kwargs )
1247
1260
1248
1261
# store into list
0 commit comments