diff --git a/_images/65904496ae44a4a185e7c69818a8751bd821541b100b26091cf76db157f5a3f6.png b/_images/65904496ae44a4a185e7c69818a8751bd821541b100b26091cf76db157f5a3f6.png deleted file mode 100644 index f228b2018..000000000 Binary files a/_images/65904496ae44a4a185e7c69818a8751bd821541b100b26091cf76db157f5a3f6.png and /dev/null differ diff --git a/_images/96da411b5c4ebfaa3ebeb9c05c1fa91e8164f132b58558585e11ad4b7d55a671.png b/_images/96da411b5c4ebfaa3ebeb9c05c1fa91e8164f132b58558585e11ad4b7d55a671.png new file mode 100644 index 000000000..2cd51eead Binary files /dev/null and b/_images/96da411b5c4ebfaa3ebeb9c05c1fa91e8164f132b58558585e11ad4b7d55a671.png differ diff --git a/_images/a830e52976bbc92558a4865dcfe3ea4ee88afda4db3236c609b4f65dca9f6558.png b/_images/a830e52976bbc92558a4865dcfe3ea4ee88afda4db3236c609b4f65dca9f6558.png deleted file mode 100644 index f1ad9cd52..000000000 Binary files a/_images/a830e52976bbc92558a4865dcfe3ea4ee88afda4db3236c609b4f65dca9f6558.png and /dev/null differ diff --git a/_images/aa30de15e213b5786f4300f81791f9ae43dbe0b3edb40fcea1c7d5ec46154031.png b/_images/aa30de15e213b5786f4300f81791f9ae43dbe0b3edb40fcea1c7d5ec46154031.png new file mode 100644 index 000000000..cbb8c48f7 Binary files /dev/null and b/_images/aa30de15e213b5786f4300f81791f9ae43dbe0b3edb40fcea1c7d5ec46154031.png differ diff --git a/_images/d98cf10afde42d00cba794b3555d7e9ba000cebdef829967a70a33ccba1b60db.png b/_images/d98cf10afde42d00cba794b3555d7e9ba000cebdef829967a70a33ccba1b60db.png deleted file mode 100644 index 6e70264fa..000000000 Binary files a/_images/d98cf10afde42d00cba794b3555d7e9ba000cebdef829967a70a33ccba1b60db.png and /dev/null differ diff --git a/_images/fb409cbf68b13df4149fba8f25d820751b1e5ad85c2612c3fe73045a40c0c004.png b/_images/fb409cbf68b13df4149fba8f25d820751b1e5ad85c2612c3fe73045a40c0c004.png new file mode 100644 index 000000000..fcf776779 Binary files /dev/null and b/_images/fb409cbf68b13df4149fba8f25d820751b1e5ad85c2612c3fe73045a40c0c004.png differ diff --git a/_sources/python_scripts/linear_models_feature_engineering_classification.py b/_sources/python_scripts/linear_models_feature_engineering_classification.py index 9fd203f34..12a2997da 100644 --- a/_sources/python_scripts/linear_models_feature_engineering_classification.py +++ b/_sources/python_scripts/linear_models_feature_engineering_classification.py @@ -235,7 +235,10 @@ def plot_decision_boundary(model, title=None): # %% from sklearn.preprocessing import KBinsDiscretizer -classifier = make_pipeline(KBinsDiscretizer(n_bins=5), LogisticRegression()) +classifier = make_pipeline( + KBinsDiscretizer(n_bins=5, encode="onehot"), # already the default params + LogisticRegression(), +) classifier # %% @@ -279,15 +282,20 @@ def plot_decision_boundary(model, title=None): # We can see that the decision boundary is now smooth, and while it favors # axis-aligned decision rules when extrapolating in low density regions, it can # adopt a more curvy decision boundary in the high density regions. -# -# Note however, that the number of knots is a hyperparameter that needs to be -# tuned. If we use too few knots, the model would underfit the data, as shown on -# the moons dataset. If we use too many knots, the model would overfit the data. -# # However, as for the binning transformation, the model still fails to separate # the data for the XOR dataset, irrespective of the number of knots, for the # same reasons: **the spline transformation is a feature-wise transformation** # and thus **cannot capture interactions** between features. +# +# Take into account that the number of knots is a hyperparameter that needs to be +# tuned. If we use too few knots, the model would underfit the data, as shown on +# the moons dataset. If we use too many knots, the model would overfit the data. +# +# ```{note} +# Notice that `KBinsDiscretizer(encode="onehot")` and `SplineTransformer` do not +# require additional scaling. Indeed, they can replace the scaling step for +# numerical features: they both create features with values in the [0, 1] range. +# ``` # %% [markdown] # diff --git a/appendix/notebook_timings.html b/appendix/notebook_timings.html index a28d4c9be..f3547b35d 100644 --- a/appendix/notebook_timings.html +++ b/appendix/notebook_timings.html @@ -1004,9 +1004,9 @@
python_scripts/linear_models_feature_engineering_classification
2023-10-20 14:15
2023-10-26 11:59
cache
10.8
10.37
✅
from sklearn.preprocessing import KBinsDiscretizer
-classifier = make_pipeline(KBinsDiscretizer(n_bins=5), LogisticRegression())
+classifier = make_pipeline(
+ KBinsDiscretizer(n_bins=5, encode="onehot"), # already the default params
+ LogisticRegression(),
+)
classifier
Note
+Notice that KBinsDiscretizer(encode="onehot")
and SplineTransformer
do not
+require additional scaling. Indeed, they can replace the scaling step for
+numerical features: they both create features with values in the [0, 1] range.
The polynomial kernel approach would be interesting in cases were the @@ -1120,7 +1129,7 @@
The resulting decision boundary is smooth and can successfully separate @@ -1197,7 +1206,7 @@
The decision boundary of this pipeline is smooth, but with axis-aligned diff --git a/searchindex.js b/searchindex.js index 7e9182e05..cb504cba3 100644 --- a/searchindex.js +++ b/searchindex.js @@ -1 +1 @@ -Search.setIndex({"docnames": ["appendix/acknowledgement", "appendix/adult_census_description", "appendix/datasets_intro", "appendix/glossary", "appendix/notebook_timings", "appendix/toc_redirect", "concluding_remarks", "concluding_remarks_video", "ensemble/bagging_slides", "ensemble/boosting_slides", "ensemble/ensemble_boosting_index", "ensemble/ensemble_bootstrap_index", "ensemble/ensemble_hyperparameters_index", "ensemble/ensemble_module_intro", "ensemble/ensemble_module_take_away", "ensemble/ensemble_quiz_m6_01", "ensemble/ensemble_quiz_m6_02", "ensemble/ensemble_quiz_m6_03", "ensemble/ensemble_wrap_up_quiz", "evaluation/cross_validation_baseline_index", "evaluation/cross_validation_choices_index", "evaluation/cross_validation_nested_index", "evaluation/evaluation_module_intro", "evaluation/evaluation_module_take_away", "evaluation/evaluation_quiz_m7_01", "evaluation/evaluation_quiz_m7_02", "evaluation/evaluation_quiz_m7_03", "evaluation/evaluation_quiz_m7_04", "evaluation/evaluation_quiz_m7_05", "evaluation/evaluation_wrap_up_quiz", "evaluation/metrics_classification_index", "evaluation/metrics_regression_index", "feature_selection/feature_selection_limitation_index", "feature_selection/feature_selection_module_intro", "feature_selection/feature_selection_module_take_away", "feature_selection/feature_selection_quiz", "index", "interpretation/interpretation_quiz", "linear_models/linear_models_intuitions_index", "linear_models/linear_models_module_intro", "linear_models/linear_models_module_take_away", "linear_models/linear_models_non_linear_index", "linear_models/linear_models_quiz_m4_01", "linear_models/linear_models_quiz_m4_02", "linear_models/linear_models_quiz_m4_03", "linear_models/linear_models_regularization_index", "linear_models/linear_models_slides", "linear_models/linear_models_wrap_up_quiz", "linear_models/regularized_linear_models_slides", "ml_concepts/quiz_intro_01", "ml_concepts/slides", "overfit/bias_vs_variance_quiz_m2_03", "overfit/bias_vs_variance_slides", "overfit/learning_validation_curves_quiz_m2_02", "overfit/learning_validation_curves_slides", "overfit/overfit_bias_variance_index", "overfit/overfit_module_intro", "overfit/overfit_overfitting_underfitting_index", "overfit/overfit_take_away", "overfit/overfit_validation_learning_curves_index", "overfit/overfit_wrap_up_quiz", "overfit/overfitting_vs_under_fitting_quiz_m2_01", "overfit/overfitting_vs_under_fitting_slides", "predictive_modeling_pipeline/01_tabular_data_exploration_index", "predictive_modeling_pipeline/01_tabular_data_exploration_quiz_m1_01", "predictive_modeling_pipeline/02_numerical_pipeline_index", "predictive_modeling_pipeline/02_numerical_pipeline_quiz_m1_02", "predictive_modeling_pipeline/02_numerical_pipeline_video_cross_validation", "predictive_modeling_pipeline/03_categorical_pipeline_index", "predictive_modeling_pipeline/03_categorical_pipeline_quiz_m1_03", "predictive_modeling_pipeline/03_categorical_pipeline_visualization_video", "predictive_modeling_pipeline/predictive_modeling_module_intro", "predictive_modeling_pipeline/predictive_modeling_module_take_away", "predictive_modeling_pipeline/wrap_up_quiz", "python_scripts/01_tabular_data_exploration", "python_scripts/01_tabular_data_exploration_ex_01", "python_scripts/01_tabular_data_exploration_sol_01", "python_scripts/02_numerical_pipeline_cross_validation", "python_scripts/02_numerical_pipeline_ex_00", "python_scripts/02_numerical_pipeline_ex_01", "python_scripts/02_numerical_pipeline_hands_on", "python_scripts/02_numerical_pipeline_introduction", "python_scripts/02_numerical_pipeline_scaling", "python_scripts/02_numerical_pipeline_sol_00", "python_scripts/02_numerical_pipeline_sol_01", "python_scripts/03_categorical_pipeline", "python_scripts/03_categorical_pipeline_column_transformer", "python_scripts/03_categorical_pipeline_ex_01", "python_scripts/03_categorical_pipeline_ex_02", "python_scripts/03_categorical_pipeline_sol_01", "python_scripts/03_categorical_pipeline_sol_02", "python_scripts/03_categorical_pipeline_visualization", "python_scripts/cross_validation_baseline", "python_scripts/cross_validation_ex_01", "python_scripts/cross_validation_ex_02", "python_scripts/cross_validation_grouping", "python_scripts/cross_validation_learning_curve", "python_scripts/cross_validation_nested", "python_scripts/cross_validation_sol_01", "python_scripts/cross_validation_sol_02", "python_scripts/cross_validation_stratification", "python_scripts/cross_validation_time", "python_scripts/cross_validation_train_test", "python_scripts/cross_validation_validation_curve", "python_scripts/datasets_ames_housing", "python_scripts/datasets_bike_rides", "python_scripts/datasets_blood_transfusion", "python_scripts/datasets_california_housing", "python_scripts/dev_features_importance", "python_scripts/ensemble_adaboost", "python_scripts/ensemble_bagging", "python_scripts/ensemble_ex_01", "python_scripts/ensemble_ex_02", "python_scripts/ensemble_ex_03", "python_scripts/ensemble_ex_04", "python_scripts/ensemble_gradient_boosting", "python_scripts/ensemble_hist_gradient_boosting", "python_scripts/ensemble_hyperparameters", "python_scripts/ensemble_introduction", "python_scripts/ensemble_random_forest", "python_scripts/ensemble_sol_01", "python_scripts/ensemble_sol_02", "python_scripts/ensemble_sol_03", "python_scripts/ensemble_sol_04", "python_scripts/feature_selection_ex_01", "python_scripts/feature_selection_introduction", "python_scripts/feature_selection_limitation_model", "python_scripts/feature_selection_sol_01", "python_scripts/linear_models_ex_01", "python_scripts/linear_models_ex_02", "python_scripts/linear_models_ex_03", "python_scripts/linear_models_feature_engineering_classification", "python_scripts/linear_models_regularization", "python_scripts/linear_models_sol_01", "python_scripts/linear_models_sol_02", "python_scripts/linear_models_sol_03", "python_scripts/linear_regression_in_sklearn", "python_scripts/linear_regression_non_linear_link", "python_scripts/linear_regression_without_sklearn", "python_scripts/logistic_regression", "python_scripts/logistic_regression_non_linear", "python_scripts/metrics_classification", "python_scripts/metrics_ex_01", "python_scripts/metrics_ex_02", "python_scripts/metrics_regression", "python_scripts/metrics_sol_01", "python_scripts/metrics_sol_02", "python_scripts/parameter_tuning_ex_02", "python_scripts/parameter_tuning_ex_03", "python_scripts/parameter_tuning_grid_search", "python_scripts/parameter_tuning_manual", "python_scripts/parameter_tuning_nested", "python_scripts/parameter_tuning_parallel_plot", "python_scripts/parameter_tuning_randomized_search", "python_scripts/parameter_tuning_sol_02", "python_scripts/parameter_tuning_sol_03", "python_scripts/trees_classification", "python_scripts/trees_dataset", "python_scripts/trees_ex_01", "python_scripts/trees_ex_02", "python_scripts/trees_hyperparameters", "python_scripts/trees_regression", "python_scripts/trees_sol_01", "python_scripts/trees_sol_02", "toc", "trees/slides", "trees/trees_classification_index", "trees/trees_hyperparameters_index", "trees/trees_intuitions_index", "trees/trees_module_intro", "trees/trees_module_take_away", "trees/trees_quiz_m5_01", "trees/trees_quiz_m5_02", "trees/trees_quiz_m5_03", "trees/trees_quiz_m5_04", "trees/trees_regression_index", "trees/trees_wrap_up_quiz", "tuning/parameter_tuning_automated_index", "tuning/parameter_tuning_automated_quiz_m3_02", "tuning/parameter_tuning_manual_index", "tuning/parameter_tuning_manual_quiz_m3_01", "tuning/parameter_tuning_module_intro", "tuning/parameter_tuning_module_take_away", "tuning/parameter_tuning_parallel_plot_video", "tuning/parameter_tuning_wrap_up_quiz"], "filenames": ["appendix/acknowledgement.md", "appendix/adult_census_description.md", "appendix/datasets_intro.md", "appendix/glossary.md", "appendix/notebook_timings.md", "appendix/toc_redirect.md", "concluding_remarks.md", "concluding_remarks_video.md", "ensemble/bagging_slides.md", "ensemble/boosting_slides.md", "ensemble/ensemble_boosting_index.md", "ensemble/ensemble_bootstrap_index.md", "ensemble/ensemble_hyperparameters_index.md", "ensemble/ensemble_module_intro.md", "ensemble/ensemble_module_take_away.md", "ensemble/ensemble_quiz_m6_01.md", "ensemble/ensemble_quiz_m6_02.md", "ensemble/ensemble_quiz_m6_03.md", "ensemble/ensemble_wrap_up_quiz.md", "evaluation/cross_validation_baseline_index.md", "evaluation/cross_validation_choices_index.md", "evaluation/cross_validation_nested_index.md", "evaluation/evaluation_module_intro.md", "evaluation/evaluation_module_take_away.md", "evaluation/evaluation_quiz_m7_01.md", "evaluation/evaluation_quiz_m7_02.md", "evaluation/evaluation_quiz_m7_03.md", "evaluation/evaluation_quiz_m7_04.md", "evaluation/evaluation_quiz_m7_05.md", "evaluation/evaluation_wrap_up_quiz.md", "evaluation/metrics_classification_index.md", "evaluation/metrics_regression_index.md", "feature_selection/feature_selection_limitation_index.md", "feature_selection/feature_selection_module_intro.md", "feature_selection/feature_selection_module_take_away.md", "feature_selection/feature_selection_quiz.md", "index.md", "interpretation/interpretation_quiz.md", "linear_models/linear_models_intuitions_index.md", "linear_models/linear_models_module_intro.md", "linear_models/linear_models_module_take_away.md", "linear_models/linear_models_non_linear_index.md", "linear_models/linear_models_quiz_m4_01.md", "linear_models/linear_models_quiz_m4_02.md", "linear_models/linear_models_quiz_m4_03.md", "linear_models/linear_models_regularization_index.md", "linear_models/linear_models_slides.md", "linear_models/linear_models_wrap_up_quiz.md", "linear_models/regularized_linear_models_slides.md", "ml_concepts/quiz_intro_01.md", "ml_concepts/slides.md", "overfit/bias_vs_variance_quiz_m2_03.md", "overfit/bias_vs_variance_slides.md", "overfit/learning_validation_curves_quiz_m2_02.md", "overfit/learning_validation_curves_slides.md", "overfit/overfit_bias_variance_index.md", "overfit/overfit_module_intro.md", "overfit/overfit_overfitting_underfitting_index.md", "overfit/overfit_take_away.md", "overfit/overfit_validation_learning_curves_index.md", "overfit/overfit_wrap_up_quiz.md", "overfit/overfitting_vs_under_fitting_quiz_m2_01.md", "overfit/overfitting_vs_under_fitting_slides.md", "predictive_modeling_pipeline/01_tabular_data_exploration_index.md", "predictive_modeling_pipeline/01_tabular_data_exploration_quiz_m1_01.md", "predictive_modeling_pipeline/02_numerical_pipeline_index.md", "predictive_modeling_pipeline/02_numerical_pipeline_quiz_m1_02.md", "predictive_modeling_pipeline/02_numerical_pipeline_video_cross_validation.md", "predictive_modeling_pipeline/03_categorical_pipeline_index.md", "predictive_modeling_pipeline/03_categorical_pipeline_quiz_m1_03.md", "predictive_modeling_pipeline/03_categorical_pipeline_visualization_video.md", "predictive_modeling_pipeline/predictive_modeling_module_intro.md", "predictive_modeling_pipeline/predictive_modeling_module_take_away.md", "predictive_modeling_pipeline/wrap_up_quiz.md", "python_scripts/01_tabular_data_exploration.py", "python_scripts/01_tabular_data_exploration_ex_01.py", "python_scripts/01_tabular_data_exploration_sol_01.py", "python_scripts/02_numerical_pipeline_cross_validation.py", "python_scripts/02_numerical_pipeline_ex_00.py", "python_scripts/02_numerical_pipeline_ex_01.py", "python_scripts/02_numerical_pipeline_hands_on.py", "python_scripts/02_numerical_pipeline_introduction.py", "python_scripts/02_numerical_pipeline_scaling.py", "python_scripts/02_numerical_pipeline_sol_00.py", "python_scripts/02_numerical_pipeline_sol_01.py", "python_scripts/03_categorical_pipeline.py", "python_scripts/03_categorical_pipeline_column_transformer.py", "python_scripts/03_categorical_pipeline_ex_01.py", "python_scripts/03_categorical_pipeline_ex_02.py", "python_scripts/03_categorical_pipeline_sol_01.py", "python_scripts/03_categorical_pipeline_sol_02.py", "python_scripts/03_categorical_pipeline_visualization.py", "python_scripts/cross_validation_baseline.py", "python_scripts/cross_validation_ex_01.py", "python_scripts/cross_validation_ex_02.py", "python_scripts/cross_validation_grouping.py", "python_scripts/cross_validation_learning_curve.py", "python_scripts/cross_validation_nested.py", "python_scripts/cross_validation_sol_01.py", "python_scripts/cross_validation_sol_02.py", "python_scripts/cross_validation_stratification.py", "python_scripts/cross_validation_time.py", "python_scripts/cross_validation_train_test.py", "python_scripts/cross_validation_validation_curve.py", "python_scripts/datasets_ames_housing.py", "python_scripts/datasets_bike_rides.py", "python_scripts/datasets_blood_transfusion.py", "python_scripts/datasets_california_housing.py", "python_scripts/dev_features_importance.py", "python_scripts/ensemble_adaboost.py", "python_scripts/ensemble_bagging.py", "python_scripts/ensemble_ex_01.py", "python_scripts/ensemble_ex_02.py", "python_scripts/ensemble_ex_03.py", "python_scripts/ensemble_ex_04.py", "python_scripts/ensemble_gradient_boosting.py", "python_scripts/ensemble_hist_gradient_boosting.py", "python_scripts/ensemble_hyperparameters.py", "python_scripts/ensemble_introduction.py", "python_scripts/ensemble_random_forest.py", "python_scripts/ensemble_sol_01.py", "python_scripts/ensemble_sol_02.py", "python_scripts/ensemble_sol_03.py", "python_scripts/ensemble_sol_04.py", "python_scripts/feature_selection_ex_01.py", "python_scripts/feature_selection_introduction.py", "python_scripts/feature_selection_limitation_model.py", "python_scripts/feature_selection_sol_01.py", "python_scripts/linear_models_ex_01.py", "python_scripts/linear_models_ex_02.py", "python_scripts/linear_models_ex_03.py", "python_scripts/linear_models_feature_engineering_classification.py", "python_scripts/linear_models_regularization.py", "python_scripts/linear_models_sol_01.py", "python_scripts/linear_models_sol_02.py", "python_scripts/linear_models_sol_03.py", "python_scripts/linear_regression_in_sklearn.py", "python_scripts/linear_regression_non_linear_link.py", "python_scripts/linear_regression_without_sklearn.py", "python_scripts/logistic_regression.py", "python_scripts/logistic_regression_non_linear.py", "python_scripts/metrics_classification.py", "python_scripts/metrics_ex_01.py", "python_scripts/metrics_ex_02.py", "python_scripts/metrics_regression.py", "python_scripts/metrics_sol_01.py", "python_scripts/metrics_sol_02.py", "python_scripts/parameter_tuning_ex_02.py", "python_scripts/parameter_tuning_ex_03.py", "python_scripts/parameter_tuning_grid_search.py", "python_scripts/parameter_tuning_manual.py", "python_scripts/parameter_tuning_nested.py", "python_scripts/parameter_tuning_parallel_plot.py", "python_scripts/parameter_tuning_randomized_search.py", "python_scripts/parameter_tuning_sol_02.py", "python_scripts/parameter_tuning_sol_03.py", "python_scripts/trees_classification.py", "python_scripts/trees_dataset.py", "python_scripts/trees_ex_01.py", "python_scripts/trees_ex_02.py", "python_scripts/trees_hyperparameters.py", "python_scripts/trees_regression.py", "python_scripts/trees_sol_01.py", "python_scripts/trees_sol_02.py", "toc.md", "trees/slides.md", "trees/trees_classification_index.md", "trees/trees_hyperparameters_index.md", "trees/trees_intuitions_index.md", "trees/trees_module_intro.md", "trees/trees_module_take_away.md", "trees/trees_quiz_m5_01.md", "trees/trees_quiz_m5_02.md", "trees/trees_quiz_m5_03.md", "trees/trees_quiz_m5_04.md", "trees/trees_regression_index.md", "trees/trees_wrap_up_quiz.md", "tuning/parameter_tuning_automated_index.md", "tuning/parameter_tuning_automated_quiz_m3_02.md", "tuning/parameter_tuning_manual_index.md", "tuning/parameter_tuning_manual_quiz_m3_01.md", "tuning/parameter_tuning_module_intro.md", "tuning/parameter_tuning_module_take_away.md", "tuning/parameter_tuning_parallel_plot_video.md", "tuning/parameter_tuning_wrap_up_quiz.md"], "titles": ["Acknowledgement", "The adult census dataset", "Datasets description", "Glossary", "Notebook timings", "Table of contents", "Concluding remarks", "\ud83c\udfa5 Concluding remarks", "\ud83c\udfa5 Intuitions on ensemble models: bagging", "\ud83c\udfa5 Intuitions on ensemble models: boosting", "Ensemble based on boosting", "Ensemble method using bootstrapping", "Hyperparameter tuning with ensemble methods", "Module overview", "Main take-away", "\u2705 Quiz M6.01", "\u2705 Quiz M6.02", "\u2705 Quiz M6.03", "\ud83c\udfc1 Wrap-up quiz 6", "Comparing a model with simple baselines", "Choice of cross-validation", "Nested cross-validation", "Module overview", "Main take-away", "\u2705 Quiz M7.01", "\u2705 Quiz M7.02", "\u2705 Quiz M7.03", "\u2705 Quiz M7.04", "\u2705 Quiz M7.05", "\ud83c\udfc1 Wrap-up quiz 7", "Classification metrics", "Regression metrics", "Caveats of feature selection", "Module overview", "Main take-away", "\u2705 Quiz", "Introduction", "\u2705 Quiz", "Intuitions on linear models", "Module overview", "Main take-away", "Non-linear feature engineering for linear models", "\u2705 Quiz M4.01", "\u2705 Quiz M4.02", "\u2705 Quiz M4.03", "Regularization in linear model", "\ud83c\udfa5 Intuitions on linear models", "\ud83c\udfc1 Wrap-up quiz 4", "\ud83c\udfa5 Intuitions on regularized linear models", "\u2705 Quiz Intro.01", "\ud83c\udfa5 Introducing machine-learning concepts", "\u2705 Quiz M2.03", "\ud83c\udfa5 Bias versus Variance", "\u2705 Quiz M2.02", "\ud83c\udfa5 Comparing train and test errors", "Bias versus variance trade-off", "Module overview", "Overfitting and underfitting", "Main take-away", "Validation and learning curves", "\ud83c\udfc1 Wrap-up quiz 2", "\u2705 Quiz M2.01", "\ud83c\udfa5 Overfitting and Underfitting", "Tabular data exploration", "\u2705 Quiz M1.01", "Fitting a scikit-learn model on numerical data", "\u2705 Quiz M1.02", "\ud83c\udfa5 Validation of a model", "Handling categorical data", "\u2705 Quiz M1.03", "\ud83c\udfa5 Visualizing scikit-learn pipelines in Jupyter", "Module overview", "Main take-away", "\ud83c\udfc1 Wrap-up quiz 1", "First look at our dataset", "\ud83d\udcdd Exercise M1.01", "\ud83d\udcc3 Solution for Exercise M1.01", "Model evaluation using cross-validation", "\ud83d\udcdd Exercise M1.02", "\ud83d\udcdd Exercise M1.03", "Working with numerical data", "First model with scikit-learn", "Preprocessing for numerical features", "\ud83d\udcc3 Solution for Exercise M1.02", "\ud83d\udcc3 Solution for Exercise M1.03", "Encoding of categorical variables", "Using numerical and categorical variables together", "\ud83d\udcdd Exercise M1.04", "\ud83d\udcdd Exercise M1.05", "\ud83d\udcc3 Solution for Exercise M1.04", "\ud83d\udcc3 Solution for Exercise M1.05", "Visualizing scikit-learn pipelines in Jupyter", "Comparing model performance with a simple baseline", "\ud83d\udcdd Exercise M2.01", "\ud83d\udcdd Exercise M7.01", "Sample grouping", "Effect of the sample size in cross-validation", "Nested cross-validation", "\ud83d\udcc3 Solution for Exercise M2.01", "\ud83d\udcc3 Solution for Exercise M7.01", "Stratification", "Non i.i.d. data", "Cross-validation framework", "Overfit-generalization-underfit", "The Ames housing dataset", "The bike rides dataset", "The blood transfusion dataset", "The California housing dataset", "Feature importance", "Adaptive Boosting (AdaBoost)", "Bagging", "\ud83d\udcdd Exercise M6.01", "\ud83d\udcdd Exercise M6.02", "\ud83d\udcdd Exercise M6.03", "\ud83d\udcdd Exercise M6.04", "Gradient-boosting decision tree (GBDT)", "Speeding-up gradient-boosting", "Hyperparameter tuning", "Introductory example to ensemble models", "Random forests", "\ud83d\udcc3 Solution for Exercise M6.01", "\ud83d\udcc3 Solution for Exercise M6.02", "\ud83d\udcc3 Solution for Exercise M6.03", "\ud83d\udcc3 Solution for Exercise M6.04", "\ud83d\udcdd Exercise 01", "Benefits of using feature selection", "Limitation of selecting feature using a model", "\ud83d\udcc3 Solution for Exercise 01", "\ud83d\udcdd Exercise M4.01", "\ud83d\udcdd Exercise M4.02", "\ud83d\udcdd Exercise M4.03", "Non-linear feature engineering for Logistic Regression", "Regularization of linear regression model", "\ud83d\udcc3 Solution for Exercise M4.01", "\ud83d\udcc3 Solution for Exercise M4.02", "\ud83d\udcc3 Solution for Exercise M4.03", "Linear regression using scikit-learn", "Non-linear feature engineering for Linear Regression", "Linear regression without scikit-learn", "Linear models for classification", "Beyond linear separation in classification", "Classification", "\ud83d\udcdd Exercise M7.02", "\ud83d\udcdd Exercise M7.03", "Regression", "\ud83d\udcc3 Solution for Exercise M7.02", "\ud83d\udcc3 Solution for Exercise M7.03", "\ud83d\udcdd Exercise M3.01", "\ud83d\udcdd Exercise M3.02", "Hyperparameter tuning by grid-search", "Set and get hyperparameters in scikit-learn", "Evaluation and hyperparameter tuning", "Analysis of hyperparameter search results", "Hyperparameter tuning by randomized-search", "\ud83d\udcc3 Solution for Exercise M3.01", "\ud83d\udcc3 Solution for Exercise M3.02", "Build a classification decision tree", "The penguins datasets", "\ud83d\udcdd Exercise M5.01", "\ud83d\udcdd Exercise M5.02", "Importance of decision tree hyperparameters on generalization", "Decision tree for regression", "\ud83d\udcc3 Solution for Exercise M5.01", "\ud83d\udcc3 Solution for Exercise M5.02", "Table of contents", "\ud83c\udfa5 Intuitions on tree-based models", "Decision tree in classification", "Hyperparameters of decision tree", "Intuitions on tree-based models", "Module overview", "Main take-away", "\u2705 Quiz M5.01", "\u2705 Quiz M5.02", "\u2705 Quiz M5.03", "\u2705 Quiz M5.04", "Decision tree in regression", "\ud83c\udfc1 Wrap-up quiz 5", "Automated tuning", "\u2705 Quiz M3.02", "Manual tuning", "\u2705 Quiz M3.01", "Module overview", "Main take-away", "\ud83c\udfa5 Analysis of hyperparameter search results", "\ud83c\udfc1 Wrap-up quiz 3"], "terms": {"The": [0, 2, 3, 13, 18, 22, 33, 36, 37, 39, 42, 44, 47, 49, 56, 58, 60, 66, 71, 73, 75, 76, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 108, 109, 110, 111, 112, 113, 114, 116, 117, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 142, 143, 144, 145, 146, 147, 148, 149, 151, 152, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 169, 171, 173, 176, 181, 182, 184], "diagram": [0, 3, 82], "present": [0, 8, 9, 13, 22, 23, 29, 34, 39, 46, 48, 50, 52, 54, 56, 62, 67, 71, 73, 74, 81, 85, 92, 96, 100, 101, 103, 104, 105, 106, 107, 109, 110, 115, 116, 118, 119, 126, 136, 138, 141, 142, 144, 145, 151, 153, 157, 161, 165, 169, 170, 171], "api": [0, 6, 72, 78, 81, 83, 86, 141], "design": [0, 3, 6, 36, 80, 104, 137, 144], "modul": [0, 1, 3, 14, 18, 23, 34, 36, 40, 47, 58, 72, 74, 80, 86, 107, 131, 134, 135, 141, 149, 150, 151, 158, 162, 164, 170, 182], "predict": [0, 1, 13, 16, 18, 22, 23, 24, 25, 29, 33, 36, 39, 40, 42, 43, 47, 49, 51, 56, 58, 60, 61, 66, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 82, 83, 84, 86, 87, 89, 91, 92, 93, 94, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 115, 117, 118, 119, 120, 121, 122, 124, 125, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 140, 144, 146, 148, 150, 152, 155, 156, 157, 159, 160, 161, 163, 169, 173, 176, 180, 181, 184], "model": [0, 1, 10, 11, 13, 14, 15, 17, 18, 22, 23, 24, 25, 26, 27, 28, 29, 32, 33, 34, 35, 36, 37, 39, 40, 42, 43, 44, 47, 51, 53, 56, 58, 60, 61, 64, 66, 69, 71, 72, 73, 74, 76, 78, 79, 80, 83, 84, 85, 87, 88, 89, 90, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 106, 107, 110, 111, 113, 114, 116, 117, 119, 120, 122, 123, 124, 125, 127, 129, 130, 134, 135, 136, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 152, 154, 155, 156, 159, 160, 161, 162, 163, 169, 170, 176, 178, 180, 181, 182, 184], "pipelin": [0, 1, 3, 13, 22, 33, 34, 36, 39, 43, 47, 50, 56, 60, 66, 68, 71, 72, 73, 77, 82, 86, 87, 89, 93, 94, 95, 97, 98, 99, 100, 104, 107, 108, 116, 119, 124, 125, 126, 127, 129, 130, 131, 132, 134, 135, 137, 139, 140, 147, 148, 149, 150, 151, 153, 154, 155, 169, 178, 180, 181, 184], "us": [0, 14, 17, 18, 22, 23, 24, 26, 27, 28, 29, 32, 33, 34, 35, 36, 37, 38, 39, 40, 42, 43, 44, 47, 49, 56, 60, 64, 65, 66, 68, 69, 71, 72, 73, 74, 75, 76, 78, 79, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 127, 128, 129, 130, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 152, 154, 155, 156, 157, 158, 159, 161, 162, 163, 164, 171, 172, 176, 178, 181, 184], "follow": [0, 3, 13, 14, 17, 18, 22, 23, 27, 29, 33, 34, 40, 42, 44, 47, 49, 56, 58, 60, 66, 69, 71, 72, 73, 74, 78, 81, 82, 83, 85, 86, 93, 94, 95, 97, 98, 99, 101, 108, 110, 114, 115, 117, 119, 123, 130, 131, 132, 135, 136, 137, 138, 139, 140, 141, 144, 147, 148, 149, 150, 151, 153, 154, 155, 157, 161, 169, 170, 176, 178, 181, 182, 184], "paramet": [0, 6, 15, 18, 26, 27, 28, 29, 37, 40, 42, 44, 47, 53, 56, 58, 60, 66, 73, 77, 78, 79, 80, 82, 83, 84, 85, 87, 89, 93, 97, 98, 99, 102, 103, 108, 109, 110, 111, 113, 114, 117, 118, 119, 120, 122, 123, 126, 129, 130, 131, 134, 136, 138, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 162, 169, 176, 178, 180, 181, 184], "free": [0, 6, 36, 85, 114, 117, 123, 132, 184], "icon": [0, 36], "licens": [0, 36], "under": [0, 24, 29, 36, 40, 74, 93, 98, 103, 118, 131, 137, 140, 141, 144, 160, 169], "cc": [0, 36], "BY": [0, 36], "3": [0, 4, 18, 29, 36, 42, 47, 56, 60, 73, 74, 76, 80, 81, 82, 83, 84, 85, 91, 92, 93, 95, 97, 98, 99, 100, 102, 103, 104, 105, 106, 107, 109, 110, 114, 115, 116, 117, 118, 120, 121, 123, 125, 126, 129, 131, 132, 134, 137, 138, 139, 141, 142, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 159, 160, 161, 162, 163, 164, 169, 176, 181], "0": [0, 3, 4, 18, 29, 37, 42, 44, 47, 66, 73, 74, 76, 77, 80, 81, 82, 83, 84, 85, 86, 88, 89, 90, 91, 92, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 160, 161, 162, 163, 176, 178, 184], "sourc": [0, 117], "set": [0, 6, 18, 26, 29, 35, 37, 39, 40, 42, 44, 47, 51, 53, 56, 58, 60, 61, 64, 66, 73, 74, 77, 79, 80, 81, 82, 84, 85, 86, 87, 89, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 105, 107, 108, 109, 110, 111, 112, 113, 114, 115, 117, 118, 119, 120, 121, 122, 123, 124, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 144, 145, 146, 147, 148, 149, 151, 153, 154, 155, 156, 157, 160, 161, 162, 164, 174, 176, 178, 179, 180, 181, 184], "gear": 0, "svg": 0, "vector": [0, 42, 43, 93, 98, 100, 102, 115, 124, 127, 128, 131, 133, 137, 140, 141], "cc0": 0, "close": [0, 3, 29, 44, 49, 74, 82, 97, 100, 101, 102, 107, 108, 115, 122, 123, 131, 132, 135, 136, 137, 139, 144, 149, 151, 153, 156], "mit": 0, "thi": [1, 13, 14, 18, 22, 23, 27, 29, 33, 34, 36, 39, 40, 43, 47, 49, 50, 56, 58, 60, 71, 72, 73, 74, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 169, 170, 173, 176, 178, 181, 182, 184], "collect": [1, 3, 6, 29, 74, 77, 94, 99, 102, 118, 125, 146], "inform": [1, 6, 24, 29, 47, 73, 74, 77, 81, 85, 86, 92, 95, 96, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 115, 117, 118, 125, 126, 132, 137, 139, 141, 142, 145, 149, 150, 153, 155, 157], "relat": [1, 3, 14, 18, 23, 34, 40, 56, 58, 72, 74, 86, 135, 137, 141, 170, 181, 182], "person": [1, 6, 64, 74, 80, 106, 141, 149], "task": [1, 6, 42, 73, 74, 80, 91, 102, 106, 131, 139, 171], "whether": [1, 3, 25, 64, 73, 74, 77, 82, 86, 88, 90, 91, 93, 98, 102, 103, 106, 108, 141, 146, 149, 151, 155, 159, 160, 163], "earn": [1, 74, 149], "salari": [1, 64, 107], "abov": [1, 3, 6, 18, 29, 47, 73, 74, 78, 82, 83, 91, 97, 100, 102, 103, 107, 108, 110, 115, 116, 128, 132, 133, 134, 138, 139, 140, 141, 142, 144, 145, 149, 151, 155, 157, 159, 161, 162, 163, 176, 178, 184], "below": [1, 3, 18, 47, 60, 77, 84, 85, 91, 107, 108, 116, 122, 128, 133, 138, 141, 144, 153, 157, 159, 162, 163, 176, 178, 180, 184], "50": [1, 18, 29, 33, 60, 74, 78, 80, 82, 83, 92, 99, 100, 102, 103, 104, 105, 106, 107, 113, 115, 116, 117, 118, 119, 122, 129, 132, 134, 144, 149, 150, 152, 153], "k": [1, 3, 25, 60, 77, 80, 81, 82, 92, 96, 102, 103, 109, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 125, 126, 127, 144, 146, 148, 149, 151, 152, 155, 162, 181], "we": [1, 3, 13, 14, 18, 22, 23, 25, 28, 29, 33, 34, 37, 39, 40, 42, 47, 49, 53, 56, 58, 60, 64, 66, 69, 71, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 169, 170, 176, 178, 180, 181, 184], "extens": [1, 102], "explor": [1, 6, 47, 75, 76, 80, 82, 85, 98, 112, 114, 121, 123, 131, 132, 137, 148, 149, 151, 152, 153, 155, 164, 184], "first": [1, 8, 9, 29, 46, 48, 50, 52, 54, 56, 62, 63, 64, 65, 67, 71, 75, 76, 77, 78, 79, 80, 82, 83, 84, 85, 86, 87, 88, 89, 90, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 114, 115, 116, 117, 118, 119, 121, 123, 124, 125, 126, 127, 129, 130, 131, 132, 134, 136, 137, 139, 140, 141, 142, 143, 144, 145, 146, 149, 151, 156, 157, 159, 160, 161, 162, 163, 164, 165, 181], "sequenc": [1, 85, 115, 147, 154], "tabular": [1, 6, 64, 71, 74, 81, 86, 164], "data": [1, 18, 20, 22, 23, 24, 25, 28, 29, 36, 39, 40, 43, 44, 47, 58, 60, 66, 69, 71, 72, 73, 75, 76, 78, 79, 83, 84, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 129, 131, 132, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 172, 176, 178, 181, 184], "notebook": [1, 18, 23, 36, 47, 64, 73, 76, 78, 79, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 94, 95, 96, 97, 99, 100, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 113, 115, 116, 117, 118, 119, 120, 122, 125, 126, 129, 131, 132, 134, 136, 138, 139, 140, 141, 142, 144, 145, 146, 148, 149, 150, 151, 152, 153, 155, 156, 157, 158, 160, 161, 162, 163, 164, 178], "look": [1, 6, 29, 63, 64, 66, 75, 76, 78, 80, 81, 83, 85, 95, 96, 98, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 117, 129, 130, 132, 134, 135, 137, 138, 140, 141, 149, 156, 157, 164, 178, 184], "our": [1, 6, 18, 25, 29, 42, 47, 63, 64, 71, 77, 79, 80, 81, 82, 84, 86, 87, 89, 91, 92, 93, 95, 96, 97, 98, 100, 101, 102, 103, 104, 105, 106, 108, 109, 110, 113, 114, 115, 117, 122, 123, 124, 125, 127, 128, 130, 131, 132, 133, 135, 136, 137, 138, 139, 141, 142, 144, 145, 150, 152, 156, 157, 161, 163, 164, 184], "To": [1, 3, 8, 9, 29, 37, 46, 47, 48, 50, 52, 54, 62, 67, 73, 74, 80, 81, 82, 86, 87, 89, 93, 95, 96, 97, 98, 100, 101, 102, 103, 108, 109, 110, 113, 116, 117, 122, 125, 129, 131, 132, 134, 135, 136, 137, 140, 141, 144, 153, 155, 158, 161, 165, 176], "avoid": [1, 3, 6, 40, 47, 73, 74, 97, 108, 109, 110, 113, 121, 122, 127, 129, 134, 135, 136, 137, 149, 152, 153, 155], "repeat": [1, 3, 18, 29, 47, 58, 60, 77, 79, 84, 87, 89, 95, 96, 97, 100, 102, 125, 135, 140, 142, 145, 148, 155, 158, 159, 161, 162, 163, 184], "same": [1, 3, 18, 26, 29, 37, 40, 44, 47, 66, 74, 79, 80, 81, 82, 84, 85, 86, 90, 95, 96, 97, 99, 100, 102, 103, 104, 108, 109, 110, 115, 116, 125, 128, 129, 131, 132, 133, 134, 136, 137, 138, 140, 141, 144, 146, 152, 153, 161, 162, 176, 184], "redirect": 1, "reader": [1, 74, 110, 137, 139, 144], "particular": [1, 3, 6, 27, 71, 74, 77, 78, 80, 81, 83, 86, 90, 92, 97, 98, 102, 119, 131, 135, 139, 141, 144, 149, 151, 152, 155, 162], "penguin": [2, 18, 75, 76, 109, 112, 121, 128, 129, 130, 133, 134, 135, 136, 138, 139, 156, 158, 159, 161, 162, 163, 164, 184], "adult": [2, 64, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 94, 99, 119, 147, 149, 150, 151, 153, 154, 164], "censu": [2, 64, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 94, 99, 102, 107, 119, 147, 149, 150, 151, 153, 154, 164], "california": [2, 92, 102, 104, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 164], "hous": [2, 3, 49, 73, 91, 92, 96, 102, 103, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 132, 143, 144, 146, 164], "am": [2, 132, 143, 144, 146, 164], "blood": [2, 93, 98, 141, 142, 145, 164], "transfus": [2, 93, 98, 141, 142, 145, 164], "bike": [2, 29, 164], "ride": [2, 29, 164], "aim": [3, 36, 80, 93, 98, 101, 102, 109, 111, 112, 113, 114, 118, 120, 121, 122, 123, 124, 125, 127, 128, 129, 133, 134, 138, 141, 142, 145, 158, 159, 162, 163], "describ": [3, 69, 80, 81, 82, 92, 107, 184], "For": [3, 6, 36, 42, 53, 58, 71, 74, 77, 80, 81, 82, 85, 86, 91, 92, 95, 97, 99, 102, 103, 105, 107, 108, 112, 113, 115, 117, 119, 121, 122, 124, 125, 126, 127, 129, 131, 132, 134, 135, 137, 138, 139, 141, 144, 146, 149, 150, 151, 153, 156, 160, 162, 163, 172, 176, 181], "you": [3, 6, 14, 18, 23, 29, 34, 36, 40, 47, 50, 58, 60, 66, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 94, 96, 98, 99, 100, 102, 103, 105, 107, 108, 109, 111, 112, 113, 114, 116, 117, 118, 119, 120, 121, 122, 123, 124, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 149, 150, 152, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 170, 174, 176, 178, 180, 182, 184], "don": [3, 6, 78, 83, 93, 98, 102, 106, 124, 127, 132, 149], "t": [3, 6, 78, 83, 85, 93, 95, 98, 101, 102, 106, 108, 116, 123, 124, 127, 132, 141, 144, 145, 149], "find": [3, 6, 49, 58, 74, 80, 81, 82, 85, 93, 94, 97, 98, 99, 101, 103, 111, 112, 114, 120, 121, 123, 126, 127, 129, 130, 131, 132, 134, 135, 136, 140, 147, 148, 149, 151, 153, 154, 155, 156, 158, 159, 162, 163, 169, 178], "ad": [3, 18, 40, 47, 53, 93, 96, 98, 108, 113, 117, 119, 122, 137, 140, 149, 153], "bottom": [3, 74, 141], "page": [3, 36, 80, 82, 83, 86, 91, 97, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "acronym": [3, 101], "stand": [3, 81, 110, 178], "applic": [3, 6, 81, 95, 101, 141, 144, 160], "program": [3, 18, 29, 36, 47, 60, 71, 73, 81, 95, 124, 127, 176, 184], "interfac": [3, 81], "It": [3, 6, 29, 36, 58, 74, 78, 81, 82, 83, 85, 86, 95, 96, 97, 98, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 115, 116, 118, 119, 123, 124, 127, 131, 132, 135, 138, 141, 142, 144, 145, 146, 149, 151, 152, 153, 156, 160, 161, 162], "can": [3, 6, 14, 15, 18, 22, 23, 28, 29, 34, 36, 39, 40, 44, 47, 53, 56, 58, 60, 66, 69, 71, 72, 73, 74, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 125, 126, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 148, 149, 150, 151, 152, 153, 155, 156, 157, 158, 159, 160, 161, 162, 163, 170, 171, 173, 176, 178, 181, 182, 184], "have": [3, 25, 29, 42, 47, 49, 53, 58, 60, 66, 69, 74, 77, 79, 80, 81, 82, 84, 85, 89, 92, 96, 97, 100, 101, 102, 104, 105, 106, 107, 108, 109, 112, 115, 116, 117, 118, 119, 121, 122, 124, 125, 127, 129, 131, 132, 133, 134, 137, 138, 139, 140, 141, 144, 147, 149, 150, 151, 152, 153, 154, 155, 156, 157, 160, 161, 162, 169, 176, 178, 181, 182, 184], "slightli": [3, 16, 74, 83, 85, 86, 102, 105, 109, 110, 117, 118, 119, 122, 135], "differ": [3, 6, 15, 16, 18, 22, 25, 29, 37, 42, 44, 47, 53, 58, 60, 64, 66, 69, 73, 74, 75, 76, 77, 81, 82, 85, 86, 90, 92, 93, 95, 96, 97, 98, 100, 101, 102, 104, 105, 106, 107, 108, 109, 110, 115, 116, 117, 119, 127, 128, 131, 132, 133, 134, 135, 136, 137, 142, 144, 145, 146, 148, 150, 151, 152, 153, 155, 156, 157, 160, 161, 176, 184], "mean": [3, 6, 28, 29, 47, 60, 73, 74, 77, 78, 80, 81, 82, 83, 84, 85, 86, 88, 89, 90, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 127, 129, 131, 132, 133, 134, 136, 137, 138, 139, 141, 143, 144, 145, 146, 149, 150, 151, 153, 154, 155, 156, 161, 162, 173, 184], "context": [3, 6, 15, 101, 139, 141], "some": [3, 6, 18, 22, 23, 29, 33, 36, 47, 60, 64, 66, 71, 73, 74, 75, 76, 77, 79, 80, 81, 82, 84, 85, 87, 89, 91, 95, 97, 99, 100, 101, 102, 103, 104, 105, 107, 108, 109, 110, 112, 114, 115, 116, 117, 118, 119, 121, 123, 124, 126, 127, 131, 132, 135, 137, 138, 139, 140, 144, 146, 149, 151, 152, 153, 155, 156, 160, 161, 163, 176, 184], "case": [3, 29, 33, 49, 74, 77, 80, 82, 85, 86, 88, 90, 91, 92, 94, 95, 97, 98, 99, 100, 101, 102, 103, 108, 115, 117, 119, 124, 127, 129, 131, 132, 134, 135, 138, 139, 140, 141, 142, 144, 145, 146, 148, 150, 151, 153, 155, 156], "an": [3, 16, 22, 23, 26, 28, 35, 36, 40, 42, 43, 47, 49, 51, 53, 56, 58, 69, 71, 72, 73, 74, 77, 81, 82, 84, 86, 87, 88, 89, 90, 91, 93, 95, 96, 97, 98, 100, 101, 102, 105, 106, 107, 108, 110, 113, 115, 116, 117, 118, 119, 122, 124, 125, 127, 129, 130, 131, 132, 133, 134, 135, 136, 137, 139, 140, 141, 142, 144, 145, 146, 147, 149, 150, 151, 154, 155, 156, 160, 161, 162, 169, 172, 176, 178, 180, 181, 182, 184], "onlin": [3, 92, 94, 99], "servic": [3, 36, 86], "access": [3, 15, 29, 36, 47, 58, 78, 83, 93, 98, 102, 108, 110, 112, 121, 132, 135, 139, 184], "remot": 3, "In": [3, 6, 15, 22, 23, 29, 34, 36, 37, 39, 40, 42, 44, 47, 58, 64, 72, 74, 77, 78, 80, 81, 82, 83, 84, 85, 86, 87, 89, 91, 92, 93, 94, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 114, 115, 116, 117, 118, 119, 123, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 160, 161, 162, 163, 170, 176, 178, 181], "both": [3, 15, 16, 18, 22, 29, 39, 40, 47, 53, 64, 73, 74, 76, 81, 82, 85, 86, 87, 88, 89, 90, 94, 95, 96, 97, 98, 99, 100, 103, 104, 108, 109, 113, 115, 116, 119, 122, 124, 125, 127, 128, 131, 132, 133, 135, 137, 139, 140, 141, 142, 144, 145, 149, 151, 155, 156, 157, 159, 160, 163, 169, 170, 176, 178, 184], "itself": [3, 18, 42, 82, 95, 97, 98, 102, 106, 110, 144, 151, 181], "technic": [3, 13, 22, 33, 36, 39, 56, 71, 81, 169, 181], "specif": [3, 13, 22, 23, 37, 64, 81, 82, 87, 88, 89, 90, 95, 102, 103, 105, 107, 108, 109, 115, 116, 119, 125, 132, 141, 149, 150, 152, 156, 160, 161, 176], "peopl": [3, 6, 74, 95, 102, 107, 108, 141], "who": [3, 6, 86, 141], "write": [3, 36, 75, 78, 79, 87, 88, 90, 93, 94, 95, 111, 112, 113, 114, 124, 128, 129, 130, 137, 142, 143, 147, 148, 154, 158, 159], "client": 3, "connect": 3, "offlin": 3, "librari": [3, 36, 71, 74, 155], "scikit": [3, 13, 14, 22, 23, 24, 27, 28, 33, 34, 37, 38, 39, 40, 42, 56, 58, 60, 64, 66, 68, 71, 72, 77, 78, 80, 82, 83, 85, 86, 87, 89, 94, 99, 100, 102, 107, 109, 112, 114, 116, 119, 121, 123, 124, 127, 130, 131, 132, 137, 139, 141, 142, 144, 145, 146, 149, 151, 162, 164, 169, 170, 172, 173, 179, 181, 182, 184], "list": [3, 29, 36, 47, 60, 85, 88, 90, 100, 108, 110, 111, 113, 120, 122, 132, 142, 143, 145, 146, 150, 184], "all": [3, 15, 16, 17, 18, 24, 26, 27, 28, 29, 37, 42, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 77, 78, 80, 81, 82, 83, 85, 88, 90, 92, 94, 95, 96, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 116, 117, 118, 119, 125, 131, 132, 133, 135, 137, 138, 141, 144, 146, 148, 149, 150, 151, 152, 155, 156, 159, 161, 162, 163, 171, 172, 174, 176, 178, 180, 184], "public": 3, "function": [3, 6, 18, 29, 36, 43, 51, 58, 60, 66, 73, 77, 80, 81, 82, 85, 94, 99, 102, 103, 104, 107, 108, 109, 110, 114, 115, 123, 124, 127, 128, 130, 131, 133, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 149, 151, 152, 156, 158, 161, 162, 173, 184], "class": [3, 15, 25, 27, 42, 60, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 94, 95, 98, 99, 100, 104, 105, 106, 107, 108, 109, 110, 116, 119, 130, 131, 132, 135, 137, 138, 139, 140, 142, 145, 146, 147, 149, 150, 151, 153, 154, 156, 158, 160, 172, 184], "method": [3, 4, 6, 14, 42, 60, 73, 74, 75, 76, 80, 81, 82, 86, 95, 101, 108, 111, 115, 118, 119, 120, 126, 129, 131, 132, 134, 137, 139, 140, 141, 144, 150, 151, 153, 164, 184], "along": [3, 74, 77, 85, 87, 89, 104, 128, 133, 144, 156], "document": [3, 4, 6, 15, 60, 78, 80, 81, 82, 83, 85, 87, 89, 92, 94, 95, 99, 137, 141, 142, 143, 145, 146, 150], "via": [3, 29, 36, 77, 85, 97, 103, 107, 114, 115, 116, 118, 119, 123, 125, 139, 149, 150, 160], "docstr": [3, 144], "brows": 3, "http": [3, 6, 36, 74, 81, 95, 102, 107], "org": [3, 74, 80, 81, 82, 83, 86, 91, 97, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "stabl": [3, 108, 110, 132, 176, 184], "html": [3, 80, 82, 83, 86, 91, 97, 102, 107, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "try": [3, 6, 29, 47, 74, 80, 82, 83, 86, 87, 89, 91, 97, 102, 105, 109, 110, 115, 117, 119, 129, 131, 132, 134, 135, 136, 137, 140, 141, 142, 144, 145, 146, 147, 149, 151, 153, 154, 155, 156, 162, 163, 176, 181], "adopt": [3, 131, 135], "simpl": [3, 6, 47, 74, 79, 84, 93, 98, 104, 109, 110, 118, 119, 132, 135, 137, 138, 139, 150, 156, 162, 164], "convent": [3, 74, 81, 82, 137], "limit": [3, 18, 29, 32, 34, 42, 56, 58, 61, 80, 86, 102, 103, 110, 131, 132, 137, 140, 144, 153, 157, 159, 163, 164], "minimum": [3, 117, 135, 141, 153, 159, 160, 161, 163], "number": [3, 6, 16, 17, 18, 29, 37, 42, 44, 47, 49, 53, 60, 69, 73, 74, 77, 78, 80, 81, 82, 83, 85, 86, 87, 89, 93, 95, 96, 98, 100, 102, 103, 104, 105, 106, 107, 108, 109, 110, 113, 114, 115, 116, 117, 119, 122, 123, 125, 126, 127, 129, 131, 132, 133, 134, 135, 137, 141, 143, 144, 146, 148, 149, 151, 152, 153, 155, 156, 160, 161, 171, 176, 178, 182, 184], "object": [3, 6, 29, 47, 58, 78, 80, 81, 82, 83, 85, 86, 87, 88, 89, 90, 102, 104, 105, 106, 119, 132, 139, 141, 142, 145, 147, 149, 150, 151, 153, 154, 155, 156, 161, 176], "must": [3, 6, 44, 108, 132, 151, 184], "implement": [3, 90, 100, 109, 110, 116, 136, 139, 142, 145, 149, 152], "furthermor": [3, 47, 102, 131, 132, 134, 139], "tri": [3, 6, 66, 105, 109, 132, 135], "consist": [3, 40, 47, 73, 77, 81, 102, 129, 134, 137, 146], "name": [3, 15, 29, 60, 74, 76, 80, 81, 82, 83, 84, 85, 86, 92, 93, 98, 99, 100, 102, 104, 105, 106, 107, 108, 110, 115, 117, 120, 123, 129, 132, 134, 135, 138, 139, 141, 149, 150, 152, 153, 180, 184], "categori": [3, 69, 73, 74, 80, 86, 87, 89, 104, 106, 119, 132, 149, 157], "e": [3, 6, 18, 29, 47, 60, 64, 66, 69, 71, 72, 73, 74, 80, 81, 82, 84, 85, 86, 89, 95, 96, 97, 100, 101, 102, 108, 115, 116, 117, 118, 125, 129, 132, 134, 135, 136, 139, 141, 144, 146, 147, 149, 153, 154, 155, 156, 176, 178, 180, 184], "g": [3, 6, 18, 29, 73, 74, 85, 86, 95, 97, 101, 108, 112, 117, 118, 121, 125, 128, 129, 132, 133, 134, 135, 136, 138, 139, 147, 149, 153, 154, 155, 157, 159, 160, 161, 163, 176, 184], "expos": [3, 82, 85, 119, 141], "fit_transform": [3, 66, 82, 85, 86, 104, 116, 127, 137], "accept": [3, 85, 102], "similar": [3, 44, 47, 60, 66, 78, 82, 83, 85, 88, 90, 104, 110, 117, 125, 129, 131, 132, 134, 137, 140, 141, 146, 149, 150, 151, 152, 153, 156, 184], "argument": [3, 47, 60, 78, 82, 83, 85, 87, 89, 105, 184], "type": [3, 14, 23, 29, 43, 47, 50, 71, 72, 74, 80, 81, 87, 88, 89, 90, 102, 104, 105, 106, 107, 110, 124, 127, 130, 141, 144, 176], "shape": [3, 42, 43, 74, 80, 81, 85, 104, 105, 107, 108, 109, 110, 131, 137, 139, 144, 156, 161], "those": [3, 15, 28, 56, 80, 94, 99, 103, 110, 116, 132, 137, 144, 149, 151, 155, 162, 173, 180, 184], "problem": [3, 15, 18, 22, 29, 39, 40, 44, 49, 50, 56, 60, 64, 73, 74, 79, 84, 85, 87, 89, 91, 93, 95, 98, 99, 100, 102, 105, 106, 107, 115, 125, 131, 132, 136, 137, 138, 139, 141, 144, 149, 151, 152, 155, 156, 157, 160, 161, 169, 170, 176, 184], "where": [3, 6, 29, 40, 42, 43, 44, 60, 69, 73, 77, 82, 87, 89, 93, 95, 98, 100, 102, 107, 108, 113, 119, 122, 125, 131, 132, 135, 138, 139, 140, 144, 149, 151, 153, 155, 156, 160, 178, 184], "goal": [3, 25, 36, 74, 77, 78, 79, 83, 84, 87, 88, 89, 90, 91, 95, 117, 118, 119, 125, 144, 147, 148, 154, 155, 184], "take": [3, 16, 29, 47, 74, 77, 80, 81, 82, 86, 90, 92, 94, 95, 97, 99, 101, 102, 104, 105, 106, 107, 110, 128, 133, 138, 141, 149, 152, 153, 156, 164, 176, 178], "finit": [3, 64, 69, 74, 85], "valu": [3, 13, 15, 18, 24, 28, 29, 36, 37, 40, 42, 44, 47, 49, 60, 64, 66, 69, 73, 74, 78, 80, 83, 85, 86, 87, 89, 92, 95, 97, 98, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 113, 114, 115, 116, 117, 118, 119, 121, 122, 123, 125, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 144, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 159, 160, 161, 162, 163, 172, 173, 176, 178, 180, 181, 184], "exampl": [3, 6, 11, 14, 23, 34, 40, 49, 58, 71, 72, 74, 77, 79, 82, 84, 85, 86, 91, 92, 95, 100, 101, 103, 105, 109, 110, 116, 119, 125, 132, 133, 137, 138, 140, 144, 146, 149, 150, 160, 162, 164, 170, 181, 182], "ar": [3, 13, 14, 15, 16, 17, 18, 22, 23, 25, 27, 28, 29, 33, 34, 36, 37, 39, 40, 43, 44, 47, 49, 53, 56, 58, 60, 66, 69, 71, 72, 73, 74, 75, 76, 77, 78, 80, 81, 82, 83, 85, 86, 89, 90, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 115, 116, 117, 118, 119, 120, 123, 125, 126, 127, 129, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 144, 146, 148, 149, 150, 151, 152, 153, 154, 155, 156, 160, 161, 162, 163, 169, 170, 172, 176, 178, 180, 181, 182, 184], "iri": [3, 100], "setosa": 3, "versicolor": 3, "virginica": 3, "from": [3, 6, 14, 18, 24, 25, 29, 36, 37, 44, 47, 49, 56, 58, 60, 74, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 130, 131, 132, 134, 135, 136, 137, 138, 139, 140, 141, 142, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 171, 173, 176, 178, 180, 181, 184], "petal": 3, "sepal": 3, "measur": [3, 6, 18, 29, 66, 73, 75, 76, 77, 80, 81, 105, 108, 122, 125, 128, 133, 138, 141, 151, 156, 157], "patient": [3, 6, 25, 129, 134], "ha": [3, 25, 37, 43, 44, 47, 64, 71, 73, 74, 80, 81, 82, 84, 85, 86, 90, 95, 99, 102, 103, 104, 107, 108, 109, 110, 116, 117, 122, 129, 131, 132, 134, 135, 136, 138, 141, 142, 144, 145, 148, 150, 153, 155, 156, 162, 176, 178, 184], "diseas": [3, 6, 25, 74, 129, 134], "result": [3, 29, 66, 74, 77, 79, 80, 81, 82, 84, 85, 86, 89, 90, 92, 94, 95, 97, 99, 100, 101, 102, 103, 110, 113, 114, 115, 117, 118, 119, 122, 123, 124, 125, 126, 127, 129, 131, 132, 134, 135, 136, 137, 141, 144, 149, 150, 151, 153, 155, 156, 162, 164, 177, 178, 180, 184], "medic": [3, 6, 25, 74], "email": 3, "spam": 3, "content": [3, 80, 86, 104, 146, 149], "sender": 3, "titl": [3, 92, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 122, 123, 125, 126, 131, 132, 134, 135, 136, 137, 138, 139, 140, 141, 145, 152, 156, 160, 161, 162, 163], "etc": [3, 25, 29, 36, 74, 85, 97, 105, 118, 129, 134], "when": [3, 6, 16, 17, 18, 22, 23, 27, 28, 29, 35, 40, 44, 53, 58, 60, 66, 73, 74, 77, 80, 81, 82, 85, 86, 87, 89, 93, 95, 96, 97, 98, 99, 100, 101, 102, 104, 105, 106, 108, 109, 113, 115, 116, 117, 119, 122, 123, 124, 125, 126, 127, 129, 131, 132, 134, 135, 136, 138, 139, 140, 141, 146, 149, 150, 151, 153, 156, 157, 173, 178, 181, 182], "two": [3, 13, 14, 29, 37, 47, 64, 66, 73, 74, 75, 76, 80, 81, 82, 85, 86, 95, 97, 99, 100, 102, 103, 104, 106, 108, 115, 117, 118, 125, 126, 127, 128, 131, 132, 133, 135, 136, 137, 139, 140, 141, 147, 149, 151, 152, 153, 154, 155, 156, 157, 159, 161, 162, 163, 176, 184], "call": [3, 13, 14, 29, 44, 58, 60, 66, 69, 73, 74, 80, 81, 82, 85, 86, 87, 89, 92, 93, 96, 98, 99, 102, 103, 106, 109, 115, 116, 117, 118, 119, 127, 129, 131, 132, 134, 136, 137, 138, 139, 141, 142, 144, 145, 146, 147, 149, 151, 153, 154, 181], "binari": [3, 6, 42, 60, 74, 91, 99, 139, 141, 171, 184], "least": [3, 100, 117, 135, 160, 184], "three": [3, 77, 86, 99, 100, 103, 107, 108, 109, 112, 121, 131, 156, 157], "multi": [3, 125, 129, 134, 139, 152], "illustr": [3, 33, 58, 77, 81, 82, 85, 94, 97, 99, 100, 102, 108, 119, 131, 137, 140, 141, 151, 156, 157, 160, 161], "provid": [3, 37, 73, 80, 82, 85, 86, 102, 105, 107, 111, 114, 115, 116, 120, 123, 127, 132, 135, 136, 138, 139, 140, 141, 142, 145, 146, 149, 151, 184], "user": [3, 6, 77, 86, 91, 103, 117, 118, 119, 131, 132, 134, 139, 144, 149, 153, 181, 184], "contain": [3, 18, 29, 43, 47, 60, 64, 66, 73, 74, 77, 80, 81, 85, 86, 87, 89, 95, 100, 104, 105, 106, 107, 112, 119, 121, 124, 126, 127, 132, 137, 138, 139, 140, 141, 149, 153, 159, 161, 163, 176, 184], "2": [3, 4, 18, 28, 29, 37, 42, 43, 47, 49, 66, 71, 74, 76, 77, 80, 81, 82, 83, 85, 91, 92, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 110, 113, 115, 116, 117, 118, 119, 120, 122, 123, 125, 126, 127, 129, 131, 132, 133, 134, 137, 138, 139, 140, 141, 143, 144, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 160, 162, 164, 176, 184], "repres": [3, 6, 64, 66, 69, 73, 74, 80, 81, 82, 85, 86, 100, 104, 106, 108, 115, 116, 128, 132, 133, 137, 138, 139, 140, 141, 144, 156, 162], "x": [3, 24, 28, 29, 42, 43, 66, 74, 80, 81, 82, 105, 107, 108, 109, 110, 115, 121, 128, 129, 131, 133, 134, 135, 136, 137, 138, 139, 140, 148, 149, 152, 155, 156, 157, 160, 161, 162, 163, 178], "y": [3, 28, 29, 42, 66, 74, 80, 81, 82, 100, 107, 108, 109, 110, 115, 121, 128, 131, 133, 135, 136, 137, 138, 139, 140, 144, 152, 156, 157, 160, 161, 162, 163, 178], "axi": [3, 18, 29, 76, 92, 99, 100, 104, 107, 108, 110, 123, 125, 126, 131, 132, 135, 137, 138, 140, 141, 144, 149, 152, 153, 155, 156, 160, 178], "becaus": [3, 6, 18, 73, 74, 77, 78, 81, 82, 83, 85, 86, 87, 89, 99, 100, 101, 102, 103, 108, 110, 119, 122, 125, 131, 132, 137, 142, 144, 145, 146, 150, 151, 153, 155, 157, 161], "onli": [3, 6, 18, 29, 37, 42, 47, 49, 64, 66, 69, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 91, 93, 94, 96, 97, 98, 99, 100, 102, 105, 106, 109, 115, 117, 119, 125, 127, 129, 130, 131, 132, 134, 135, 138, 139, 140, 141, 142, 144, 145, 149, 150, 151, 155, 156, 160, 173, 176, 180, 182], "here": [3, 29, 60, 74, 75, 78, 79, 80, 81, 82, 85, 86, 87, 88, 89, 92, 93, 94, 98, 101, 103, 105, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 124, 126, 127, 128, 129, 130, 132, 133, 135, 137, 138, 139, 140, 141, 142, 143, 147, 148, 149, 150, 151, 154, 157, 158, 159, 161], "encod": [3, 6, 29, 47, 68, 69, 80, 86, 87, 89, 104, 105, 107, 116, 119, 131, 132, 139, 164, 176], "color": [3, 74, 97, 107, 108, 109, 110, 115, 121, 123, 125, 126, 131, 132, 133, 135, 136, 137, 138, 139, 141, 145, 152, 155, 156, 160, 161, 162, 163, 178], "blue": [3, 74, 77, 95, 102, 109, 110, 131, 135, 139, 140, 141, 151, 156, 160, 162], "orang": [3, 74, 110, 115, 141, 156, 161, 162], "point": [3, 6, 15, 74, 78, 80, 82, 83, 90, 95, 100, 105, 106, 107, 108, 110, 116, 117, 119, 122, 131, 132, 135, 137, 139, 140, 141, 144, 151, 153, 156, 160, 162], "thu": [3, 6, 29, 37, 61, 80, 82, 86, 93, 95, 97, 98, 101, 102, 103, 105, 106, 107, 108, 109, 113, 116, 117, 118, 119, 122, 124, 127, 131, 132, 138, 139, 140, 144, 149, 150, 151, 152, 155, 156, 160, 161, 176], "each": [3, 6, 18, 25, 29, 36, 40, 47, 49, 66, 69, 74, 75, 76, 77, 80, 81, 82, 85, 86, 94, 95, 97, 99, 100, 101, 102, 104, 105, 106, 107, 108, 109, 110, 112, 113, 114, 115, 116, 117, 118, 119, 121, 122, 123, 125, 126, 128, 131, 132, 133, 137, 138, 139, 141, 147, 149, 150, 151, 153, 154, 155, 156, 157, 159, 160, 161, 162, 163, 172, 176, 178, 184], "entir": [3, 35, 77, 85, 86, 106, 107, 117, 122, 124, 127, 135, 176], "wa": [3, 74, 81, 82, 90, 91, 95, 97, 98, 100, 102, 104, 107, 109, 110, 115, 119, 125, 128, 132, 133, 135, 137, 141, 151, 156, 158, 161, 162], "linear": [3, 6, 13, 15, 16, 29, 37, 39, 40, 42, 43, 44, 47, 53, 58, 74, 80, 82, 85, 86, 87, 88, 89, 90, 93, 95, 98, 107, 110, 118, 126, 128, 129, 133, 134, 143, 144, 146, 150, 156, 157, 159, 161, 163, 171, 173, 176, 181], "decis": [3, 6, 10, 13, 15, 16, 18, 27, 35, 42, 58, 82, 86, 88, 90, 92, 96, 101, 102, 103, 109, 110, 112, 113, 116, 118, 119, 120, 121, 122, 126, 130, 131, 137, 139, 140, 141, 142, 145, 157, 158, 159, 162, 163, 169, 170, 171, 172, 173, 174, 176], "rule": [3, 80, 81, 118, 131, 132, 135, 139, 156, 172], "black": [3, 74, 80, 85, 92, 95, 97, 99, 102, 103, 104, 105, 106, 107, 109, 110, 115, 121, 123, 125, 126, 132, 133, 136, 137, 138, 145, 149, 153, 160, 161, 163], "dot": 3, "line": [3, 29, 43, 69, 74, 80, 108, 110, 115, 129, 131, 134, 135, 137, 139, 140, 141, 142, 144, 145, 152, 153, 156, 160, 161, 178, 184], "new": [3, 6, 29, 36, 40, 47, 74, 78, 80, 81, 82, 83, 85, 86, 87, 89, 93, 94, 95, 96, 98, 99, 101, 102, 108, 109, 112, 113, 115, 117, 121, 122, 129, 134, 137, 141, 147, 149, 150, 154, 159, 161, 162, 163], "accord": [3, 74, 135], "its": [3, 6, 18, 23, 29, 58, 61, 74, 79, 81, 82, 84, 85, 86, 93, 97, 98, 99, 101, 102, 103, 108, 111, 115, 117, 119, 120, 125, 131, 132, 135, 136, 138, 139, 141, 142, 144, 145, 148, 155, 156, 178], "posit": [3, 6, 27, 28, 29, 44, 47, 66, 103, 108, 110, 118, 132, 135, 138, 139, 141, 142, 144, 145, 153, 184], "respect": [3, 29, 42, 43, 85, 86, 90, 93, 98, 99, 101, 103, 117, 119, 131, 132, 134, 138, 139, 144, 146], "ly": [3, 132], "left": [3, 73, 74, 81, 92, 95, 99, 100, 101, 105, 107, 108, 109, 110, 115, 117, 118, 121, 127, 133, 135, 141, 151, 152, 156, 160, 162], "while": [3, 18, 29, 80, 81, 82, 85, 98, 102, 103, 105, 107, 108, 109, 110, 115, 116, 117, 119, 122, 125, 127, 129, 131, 132, 134, 135, 140, 141, 144, 152, 155, 160, 176], "right": [3, 74, 75, 76, 80, 85, 87, 89, 99, 108, 117, 122, 124, 127, 135, 141, 151, 160], "defin": [3, 18, 29, 36, 47, 56, 66, 73, 77, 82, 85, 86, 87, 89, 91, 94, 95, 99, 100, 101, 108, 109, 114, 123, 128, 129, 131, 132, 133, 134, 135, 137, 138, 140, 141, 147, 148, 149, 152, 153, 154, 155, 156, 158, 176, 178, 180, 181, 184], "higher": [3, 6, 27, 39, 43, 83, 84, 86, 97, 102, 104, 105, 108, 109, 118, 131, 132, 135, 136, 146, 151, 178], "dimens": [3, 43, 74, 131, 155], "would": [3, 18, 29, 49, 74, 77, 79, 80, 81, 84, 85, 87, 89, 97, 98, 99, 100, 101, 102, 104, 106, 107, 108, 110, 115, 116, 117, 119, 122, 125, 129, 131, 132, 134, 135, 136, 137, 138, 139, 141, 142, 144, 145, 148, 151, 155, 156, 160, 161], "hyperplan": 3, "howev": [3, 6, 22, 36, 80, 81, 82, 85, 86, 88, 90, 92, 95, 97, 99, 100, 102, 103, 104, 105, 106, 108, 109, 110, 113, 115, 116, 117, 118, 119, 122, 124, 125, 127, 131, 132, 133, 136, 137, 139, 141, 142, 144, 145, 146, 151, 153, 155, 156, 160, 161, 181], "depend": [3, 18, 22, 27, 28, 29, 37, 40, 77, 82, 85, 86, 97, 99, 101, 107, 108, 110, 117, 125, 129, 131, 132, 134, 135, 139, 141, 146, 149, 153, 156, 157], "A": [3, 6, 29, 36, 44, 53, 61, 64, 66, 74, 82, 86, 95, 97, 101, 102, 107, 116, 117, 132, 136, 137, 141, 144, 155, 161, 171, 172, 176, 182], "These": [3, 6, 23, 74, 77, 81, 85, 104, 119, 141, 150, 161, 181], "handl": [3, 71, 72, 80, 85, 86, 90, 104, 115, 137, 149, 164], "discret": [3, 85, 102, 106, 116, 139, 144], "1": [3, 4, 18, 27, 29, 42, 43, 44, 47, 49, 60, 66, 74, 76, 77, 80, 81, 82, 83, 85, 86, 88, 89, 90, 91, 92, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 110, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 129, 130, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 144, 145, 146, 147, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 160, 161, 162, 164, 176, 178, 184], "cat": [3, 91], "dog": 3, "logisticregress": [3, 44, 73, 77, 79, 80, 82, 84, 85, 86, 87, 89, 91, 95, 99, 100, 127, 130, 131, 135, 139, 141, 146, 150, 156, 178, 180], "histgradientboostingclassifi": [3, 86, 88, 90, 116, 147, 149, 151, 152, 153, 154], "note": [3, 8, 9, 36, 46, 47, 48, 50, 52, 54, 62, 67, 74, 77, 80, 81, 82, 85, 86, 90, 92, 93, 97, 98, 99, 102, 103, 106, 107, 108, 110, 118, 119, 131, 135, 138, 139, 149, 150, 151, 155, 156, 161, 165, 176], "histor": 3, "reason": [3, 6, 18, 47, 58, 74, 76, 82, 88, 90, 97, 101, 102, 105, 108, 115, 117, 127, 131, 135, 141, 155], "confus": [3, 115, 150, 161, 181], "contrari": [3, 74, 85, 102, 108, 161], "what": [3, 6, 15, 24, 27, 28, 29, 35, 42, 43, 47, 49, 50, 58, 60, 73, 74, 75, 76, 78, 79, 80, 82, 83, 84, 86, 94, 97, 99, 100, 101, 102, 103, 108, 118, 119, 131, 134, 135, 136, 138, 139, 140, 141, 150, 151, 156, 161, 176, 178], "suggest": 3, "procedur": [3, 6, 18, 47, 77, 80, 97, 101, 102, 110, 117, 119, 122, 125, 126, 137, 151, 155, 171, 178, 180], "how": [3, 18, 22, 23, 26, 29, 33, 34, 39, 42, 47, 56, 58, 64, 69, 73, 74, 75, 76, 77, 79, 80, 81, 82, 84, 85, 86, 91, 92, 96, 98, 100, 101, 103, 108, 109, 110, 115, 117, 119, 129, 131, 132, 134, 135, 136, 141, 142, 145, 149, 150, 151, 152, 153, 155, 156, 158, 160, 161, 162, 169, 174, 178, 180, 181], "well": [3, 6, 13, 15, 22, 66, 76, 82, 95, 99, 100, 103, 106, 107, 108, 109, 110, 115, 117, 118, 126, 135, 141, 150, 153, 160], "idea": [3, 74, 91, 98, 108, 115, 151], "behind": [3, 6, 13, 22, 118], "dataset": [3, 16, 18, 25, 29, 35, 37, 39, 40, 42, 43, 47, 53, 56, 60, 63, 64, 66, 69, 71, 73, 75, 76, 77, 78, 79, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 152, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 164, 176, 178, 184], "evalu": [3, 18, 22, 23, 26, 29, 37, 44, 47, 56, 60, 65, 66, 74, 79, 80, 81, 84, 87, 88, 89, 90, 91, 92, 93, 95, 97, 98, 100, 101, 102, 106, 108, 110, 111, 113, 115, 116, 120, 122, 126, 127, 132, 136, 142, 143, 144, 145, 146, 147, 148, 149, 150, 153, 154, 155, 161, 176, 177, 181, 184], "separ": [3, 29, 41, 42, 44, 47, 64, 74, 76, 79, 80, 84, 86, 99, 104, 105, 106, 131, 135, 139, 151, 156, 160, 162, 164, 172, 176], "sever": [3, 13, 16, 23, 66, 77, 95, 97, 102, 108, 109, 110, 113, 115, 116, 118, 119, 122, 128, 131, 133, 135, 137, 140, 153], "time": [3, 18, 29, 47, 66, 74, 77, 80, 82, 85, 86, 87, 88, 89, 90, 91, 95, 97, 100, 101, 102, 103, 105, 106, 108, 110, 113, 115, 116, 117, 118, 119, 122, 125, 132, 135, 139, 141, 149, 152, 153, 156, 158, 162, 164, 171, 176], "get": [3, 6, 18, 34, 42, 47, 53, 60, 66, 74, 75, 76, 77, 78, 80, 81, 82, 83, 86, 87, 88, 89, 90, 95, 96, 97, 100, 101, 102, 103, 105, 107, 109, 110, 113, 114, 115, 122, 123, 124, 125, 127, 132, 135, 137, 141, 142, 143, 144, 145, 146, 149, 151, 152, 153, 155, 156, 159, 160, 161, 163, 164, 178, 179, 180], "s": [3, 4, 6, 17, 23, 25, 26, 29, 35, 36, 42, 47, 73, 74, 77, 78, 80, 81, 82, 83, 85, 86, 87, 88, 89, 90, 91, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 115, 117, 118, 120, 121, 125, 126, 129, 130, 131, 132, 133, 134, 135, 136, 137, 139, 140, 141, 144, 146, 149, 150, 151, 152, 153, 156, 157, 160, 161, 181, 184], "uncertainti": [3, 77, 86, 110, 124, 127, 151], "see": [3, 8, 9, 39, 46, 48, 50, 52, 54, 62, 67, 74, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 91, 92, 95, 96, 98, 99, 100, 101, 104, 105, 106, 107, 108, 109, 110, 115, 116, 118, 120, 122, 123, 125, 127, 131, 132, 134, 136, 137, 138, 139, 140, 141, 144, 146, 149, 150, 151, 152, 153, 155, 156, 157, 158, 160, 161, 162, 165], "more": [3, 18, 29, 36, 43, 44, 47, 53, 56, 60, 74, 77, 80, 81, 82, 85, 87, 89, 90, 92, 93, 96, 97, 98, 99, 103, 104, 105, 107, 108, 109, 110, 115, 116, 117, 118, 119, 125, 129, 131, 132, 134, 135, 136, 137, 138, 139, 140, 141, 142, 144, 145, 149, 150, 151, 152, 153, 155, 156, 157, 160, 161, 184], "detail": [3, 13, 18, 29, 39, 61, 74, 77, 78, 80, 81, 83, 85, 86, 87, 89, 93, 95, 98, 105, 107, 108, 115, 117, 118, 131, 137, 139, 141, 142, 145, 146, 149, 157, 169, 170, 184], "n_sampl": [3, 110, 115, 125, 126, 131, 137, 139, 140, 160], "row": [3, 18, 60, 64, 74, 85, 91, 102, 104, 107, 110, 129, 132, 134, 137, 149, 152, 153, 155, 184], "n_featur": [3, 43, 117, 119, 125, 126, 131, 137, 139, 140], "column": [3, 6, 18, 29, 37, 42, 43, 47, 60, 64, 69, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 91, 93, 94, 98, 99, 101, 102, 104, 105, 106, 107, 108, 110, 115, 117, 119, 120, 121, 123, 125, 126, 129, 131, 132, 134, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 149, 151, 152, 153, 154, 155, 160, 161, 163, 176, 184], "equal": [3, 6, 29, 42, 44, 60, 82, 100, 115, 132, 149, 150, 161], "flower": 3, "4": [3, 4, 18, 29, 73, 74, 76, 80, 81, 82, 83, 84, 85, 91, 92, 95, 98, 99, 100, 102, 104, 105, 106, 107, 108, 110, 115, 117, 120, 123, 125, 129, 131, 132, 134, 135, 137, 138, 141, 146, 149, 150, 151, 152, 153, 155, 157, 160, 162, 164, 176], "length": [3, 6, 18, 29, 75, 76, 80, 81, 104, 105, 109, 112, 121, 128, 129, 130, 133, 134, 135, 136, 138, 139, 141, 149, 153, 156, 157, 158, 159, 160, 161, 162, 163, 184], "width": [3, 116], "common": [3, 58, 77, 85, 101, 110, 132, 144, 162, 184], "math": [3, 104], "matric": [3, 85], "capit": [3, 74, 77, 79, 80, 81, 82, 84, 85, 86, 149, 150, 153], "letter": [3, 102, 107], "f": [3, 8, 9, 18, 29, 46, 48, 50, 52, 54, 62, 67, 74, 77, 80, 81, 82, 83, 84, 85, 86, 88, 89, 90, 91, 95, 97, 98, 100, 101, 102, 107, 108, 109, 110, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 127, 128, 132, 133, 134, 135, 136, 137, 138, 139, 141, 144, 145, 146, 149, 150, 151, 153, 154, 156, 160, 162, 165, 178], "iter": [3, 29, 66, 77, 82, 85, 100, 101, 102, 113, 117, 122, 132, 151, 152, 153], "optim": [3, 22, 23, 44, 64, 82, 93, 97, 98, 109, 114, 115, 116, 118, 123, 127, 129, 132, 134, 136, 141, 144, 146, 149, 150, 151, 153, 155, 160, 176, 182, 184], "befor": [3, 44, 47, 74, 78, 80, 82, 83, 85, 100, 102, 104, 108, 116, 119, 124, 127, 132, 138, 141, 144, 158], "converg": [3, 66, 82, 85], "algorithm": [3, 13, 16, 18, 24, 35, 74, 81, 82, 85, 95, 108, 109, 113, 115, 116, 117, 119, 122, 124, 125, 127, 129, 134, 137, 139, 156, 157], "over": [3, 15, 24, 29, 39, 40, 74, 93, 95, 97, 98, 101, 103, 108, 112, 116, 118, 121, 144, 147, 149, 154, 158, 160, 169, 172], "done": [3, 6, 73, 81, 89, 116, 117, 119, 127, 129, 131, 132, 134, 150, 151, 153, 156, 160], "monitor": [3, 6, 29], "score": [3, 18, 27, 28, 29, 37, 47, 58, 60, 66, 73, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 94, 95, 96, 97, 98, 99, 102, 103, 107, 108, 113, 114, 115, 116, 117, 118, 120, 122, 123, 124, 125, 126, 127, 132, 134, 135, 137, 139, 141, 142, 143, 144, 145, 146, 147, 149, 150, 151, 152, 153, 154, 155, 156, 162, 174, 176, 178, 184], "jargon": [3, 81, 82], "onc": [3, 6, 18, 29, 80, 82, 90, 95, 100, 102, 110, 117, 119, 139, 141, 143, 146, 147, 148, 149, 152, 153, 154, 155, 158, 178], "quantiti": [3, 6, 85, 86, 105, 108], "size": [3, 49, 56, 58, 59, 85, 92, 93, 98, 105, 107, 109, 110, 117, 127, 131, 156, 164], "weight": [3, 29, 40, 42, 44, 47, 80, 93, 95, 98, 108, 109, 115, 128, 129, 130, 132, 133, 134, 136, 137, 138, 139, 156, 171], "dure": [3, 13, 22, 25, 29, 33, 34, 39, 40, 56, 58, 72, 73, 74, 80, 85, 87, 89, 100, 102, 105, 109, 118, 125, 132, 136, 142, 145, 149, 151, 153, 156, 159, 161, 163, 169, 170, 181, 182], "four": [3, 105, 106, 141], "never": [3, 18, 56, 74, 80, 81, 85, 109, 110, 135, 144, 149, 153, 162, 178], "seen": [3, 25, 77, 79, 81, 82, 84, 85, 102, 135, 138, 141, 149, 150, 151, 159, 163], "aspect": [3, 6, 22, 34, 77, 82, 96, 126, 132, 171], "configur": [3, 82, 114, 123, 137, 149], "learnt": [3, 58, 96, 108, 137, 156], "nearest": [3, 60, 80, 81, 82, 103, 181, 184], "neighbor": [3, 60, 78, 80, 81, 82, 83, 103, 148, 155, 181, 184], "approach": [3, 6, 14, 23, 34, 40, 47, 58, 72, 73, 85, 100, 116, 131, 136, 139, 149, 151, 153, 155, 170, 182], "polynomi": [3, 43, 53, 103, 110, 131, 137], "sai": [3, 60, 108, 135, 156], "degre": [3, 43, 47, 53, 103, 108, 110, 129, 131, 132, 134, 137], "between": [3, 6, 13, 15, 18, 22, 29, 40, 47, 53, 56, 58, 60, 74, 76, 77, 80, 82, 85, 93, 97, 98, 101, 103, 105, 106, 108, 110, 112, 115, 117, 121, 124, 125, 127, 129, 131, 132, 133, 134, 135, 137, 138, 139, 141, 144, 152, 153, 155, 157, 160, 161, 169, 184], "10": [3, 4, 18, 25, 36, 42, 47, 60, 73, 74, 77, 78, 80, 83, 85, 90, 92, 94, 95, 96, 97, 98, 99, 102, 103, 104, 105, 106, 107, 108, 110, 113, 115, 116, 117, 118, 120, 122, 127, 129, 130, 131, 132, 134, 135, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 160, 162, 176, 178, 184], "impact": [3, 18, 44, 47, 56, 85, 87, 88, 89, 90, 96, 97, 103, 113, 117, 122, 130, 132, 144, 152, 155, 160, 178, 180, 181, 182, 184], "comput": [3, 6, 29, 47, 60, 66, 74, 77, 78, 81, 82, 83, 85, 86, 88, 90, 92, 93, 94, 96, 97, 98, 99, 100, 102, 103, 108, 109, 110, 113, 115, 116, 117, 118, 122, 124, 125, 127, 129, 131, 133, 134, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 148, 151, 152, 153, 155, 156, 158, 161, 162, 178], "inde": [3, 18, 29, 74, 80, 81, 82, 86, 90, 93, 95, 97, 98, 101, 102, 104, 105, 106, 107, 108, 109, 110, 115, 116, 117, 118, 124, 125, 126, 127, 132, 134, 135, 136, 137, 139, 140, 141, 142, 144, 145, 146, 150, 151, 153, 156, 157, 160, 161, 162], "usual": [3, 17, 80, 82, 86, 95, 101, 103, 105, 129, 134, 141, 152, 153], "inspect": [3, 6, 29, 60, 64, 82, 91, 102, 109, 114, 117, 123, 126, 130, 131, 135, 137, 139, 140, 149, 152, 153, 156, 158, 160, 162, 184], "regard": [3, 13, 14, 22, 24, 33, 34, 39, 42, 71, 73, 82, 85, 87, 89, 92, 93, 95, 96, 98, 100, 103, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 127, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 141, 142, 143, 144, 145, 146, 147, 154, 156, 158, 159, 160, 161, 162, 163], "tune": [3, 13, 17, 26, 35, 40, 43, 44, 47, 93, 97, 98, 103, 111, 118, 119, 120, 125, 127, 131, 135, 148, 152, 154, 155, 170, 176, 181, 184], "maxim": [3, 47, 95, 97, 132, 146, 147, 148, 149, 153, 154, 155, 156, 174, 181], "involv": [3, 6, 74, 99, 102, 150], "grid": [3, 97, 105, 114, 117, 118, 123, 132, 148, 151, 153, 155, 160, 164, 174, 176, 177, 178, 182, 184], "search": [3, 47, 96, 97, 114, 116, 117, 118, 119, 120, 123, 132, 136, 147, 148, 151, 154, 155, 160, 164, 174, 176, 177, 178, 181, 182, 184], "random": [3, 11, 13, 14, 15, 17, 18, 37, 47, 51, 73, 80, 95, 99, 100, 101, 102, 105, 107, 108, 110, 112, 113, 115, 116, 118, 121, 122, 124, 125, 126, 127, 131, 137, 148, 149, 152, 155, 164, 172, 177, 178, 182], "further": [3, 39, 74, 95, 96, 131, 137, 138, 151, 160, 162], "read": [3, 6, 74, 95, 105, 108, 144, 184], "post": [3, 49, 69, 170], "machin": [3, 23, 29, 34, 36, 40, 56, 58, 64, 69, 72, 74, 80, 81, 82, 84, 85, 86, 87, 89, 91, 93, 94, 97, 98, 99, 100, 101, 102, 110, 119, 124, 125, 126, 127, 131, 132, 136, 137, 139, 140, 141, 144, 149, 153], "mooc": [3, 74, 81, 91, 92, 93, 96, 98, 102, 103, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 141, 142, 143, 144, 145, 146, 156, 158, 159, 160, 161, 162, 163], "refer": [3, 14, 23, 34, 40, 58, 72, 73, 74, 81, 82, 87, 89, 92, 93, 94, 95, 96, 98, 99, 102, 103, 107, 109, 110, 111, 112, 113, 115, 116, 118, 119, 120, 121, 122, 128, 129, 130, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 150, 156, 158, 159, 160, 161, 162, 163, 170, 182], "process": [3, 6, 36, 58, 66, 72, 73, 74, 86, 101, 110, 116, 124, 125, 127, 132, 146, 150, 151, 158, 181], "make": [3, 6, 24, 25, 29, 44, 47, 51, 56, 60, 61, 71, 73, 74, 77, 78, 80, 82, 83, 85, 86, 87, 89, 90, 91, 92, 93, 95, 97, 98, 99, 101, 102, 103, 104, 105, 107, 108, 109, 112, 113, 114, 115, 116, 117, 118, 119, 121, 122, 123, 124, 125, 126, 127, 129, 131, 132, 134, 135, 136, 137, 138, 139, 140, 141, 143, 144, 146, 147, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 171], "appli": [3, 6, 15, 16, 17, 18, 24, 26, 27, 29, 39, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 82, 85, 86, 91, 100, 103, 104, 105, 107, 131, 138, 140, 146, 149, 150, 152, 155, 160, 162, 171, 172, 174, 178, 180, 184], "unlabel": 3, "word": [3, 29, 136, 159, 163, 178], "equival": [3, 18, 29, 47, 73, 74, 81, 93, 98, 102, 117, 130, 135, 137, 139, 141, 155, 176], "unseen": [3, 66, 74, 100, 101, 117, 140, 156], "notion": 3, "out": [3, 6, 25, 73, 74, 77, 80, 81, 85, 94, 96, 99, 101, 102, 108, 110, 113, 114, 117, 118, 122, 123, 127, 130, 132, 135, 141, 149, 151, 158, 159, 162, 163], "ti": 3, "definit": [3, 137, 150], "distribut": [3, 6, 43, 74, 75, 76, 82, 94, 95, 99, 100, 101, 102, 103, 104, 105, 106, 107, 116, 131, 132, 139, 144, 146, 148, 151, 153, 155, 157, 161, 178], "condit": [3, 6, 108, 132, 139], "check": [3, 6, 29, 47, 60, 74, 77, 80, 81, 82, 84, 85, 86, 93, 95, 96, 98, 100, 101, 102, 103, 105, 107, 109, 110, 111, 112, 114, 115, 116, 118, 119, 120, 121, 123, 124, 125, 126, 127, 129, 132, 134, 136, 137, 138, 140, 141, 142, 145, 149, 151, 152, 156, 157, 159, 160, 161, 163, 169, 176], "wikipedia": [3, 58, 110], "articl": [3, 6, 58, 110], "finish": [3, 132], "_": [3, 74, 76, 81, 82, 86, 92, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 120, 121, 122, 123, 125, 126, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 144, 145, 150, 152, 156, 157, 160, 161, 162, 163], "end": [3, 82, 85, 92, 93, 95, 96, 98, 99, 100, 102, 103, 105, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 141, 142, 143, 144, 145, 146, 154, 156, 158, 159, 160, 161, 162, 163], "thei": [3, 6, 29, 47, 56, 66, 74, 78, 80, 82, 83, 85, 86, 90, 99, 100, 103, 104, 107, 108, 110, 116, 117, 119, 131, 132, 137, 141, 144, 150, 156, 161, 163, 170, 176, 181], "avail": [3, 6, 29, 36, 49, 73, 74, 75, 76, 77, 80, 81, 91, 95, 96, 104, 106, 107, 112, 121, 125, 127, 132, 136, 138, 141, 150, 153, 176], "after": [3, 18, 28, 47, 77, 85, 102, 104, 110, 113, 116, 117, 120, 122, 136, 150, 154, 155], "been": [3, 74, 77, 80, 85, 92, 100, 102, 108, 109, 116, 122, 125, 134, 139, 150, 151, 156], "slope": [3, 29, 105, 138], "intercept": [3, 40, 128, 129, 133, 134, 135, 136, 137, 138, 139, 156], "one": [3, 6, 14, 18, 25, 37, 42, 43, 47, 58, 66, 69, 73, 74, 76, 77, 79, 80, 81, 82, 84, 85, 86, 88, 90, 93, 94, 95, 97, 98, 99, 100, 102, 105, 107, 108, 109, 115, 116, 117, 119, 126, 131, 132, 134, 136, 137, 139, 141, 142, 144, 145, 149, 150, 151, 155, 156, 157, 160, 162, 172, 184], "section": [3, 77, 80, 85, 86, 92, 93, 95, 96, 98, 102, 103, 109, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 140, 141, 142, 143, 144, 145, 146, 153, 156, 158, 159, 160, 161, 162, 163], "about": [3, 13, 17, 22, 24, 29, 33, 36, 39, 56, 60, 64, 71, 72, 74, 77, 81, 82, 85, 94, 96, 99, 102, 103, 104, 106, 108, 115, 118, 131, 135, 139, 140, 144, 149, 157, 161, 169, 178, 181, 184], "also": [3, 13, 22, 25, 27, 29, 33, 36, 58, 60, 74, 80, 81, 82, 85, 86, 87, 88, 89, 90, 93, 96, 97, 98, 101, 102, 103, 104, 105, 108, 109, 110, 113, 114, 116, 117, 119, 122, 123, 125, 131, 132, 135, 137, 138, 139, 140, 141, 144, 150, 151, 152, 153, 155, 156, 162], "python": [3, 6, 29, 36, 66, 71, 74, 77, 87, 89, 102, 110, 116, 132, 142, 145], "pass": [3, 18, 29, 47, 60, 66, 73, 77, 85, 86, 87, 89, 93, 97, 98, 102, 110, 138, 142, 143, 145, 146, 148, 149, 151, 155, 156, 162, 184], "anoth": [3, 6, 18, 58, 73, 74, 77, 97, 99, 101, 110, 119, 125, 126, 128, 132, 133, 137, 140, 141, 144, 160], "includ": [3, 29, 34, 36, 64, 85, 97, 104, 125, 126, 129, 132, 134, 135, 137, 150, 173], "gridsearchcv": [3, 6, 97, 118, 123, 148, 149, 151, 153, 155, 160, 176, 178, 184], "someth": [3, 90, 140, 150], "occur": [3, 85, 132], "your": [3, 6, 34, 39, 60, 73, 74, 75, 78, 79, 83, 85, 86, 87, 88, 89, 90, 93, 94, 98, 100, 111, 112, 113, 114, 117, 124, 127, 128, 129, 130, 134, 142, 143, 147, 148, 154, 158, 159, 163, 176, 178], "stick": 3, "too": [3, 6, 61, 81, 91, 97, 102, 103, 113, 116, 117, 119, 122, 131, 132, 134, 135, 149, 155, 160, 178], "so": [3, 6, 14, 29, 47, 66, 74, 76, 82, 84, 85, 87, 89, 92, 93, 98, 99, 100, 101, 102, 104, 105, 108, 109, 114, 115, 117, 123, 129, 131, 132, 134, 135, 136, 137, 141, 142, 145, 150, 151, 155, 162, 176], "up": [3, 6, 10, 35, 44, 74, 77, 80, 85, 95, 97, 99, 102, 107, 108, 125, 128, 133, 137, 138, 141, 149, 161, 164, 181], "nois": [3, 47, 53, 58, 103, 110, 115, 131, 135, 137, 140], "rather": [3, 29, 34, 77, 80, 105, 106, 139, 141, 146, 153, 155], "than": [3, 6, 18, 27, 28, 29, 34, 36, 43, 47, 53, 60, 73, 74, 77, 80, 81, 82, 83, 84, 85, 86, 89, 90, 92, 94, 95, 97, 98, 99, 101, 102, 103, 104, 105, 108, 109, 110, 116, 117, 118, 119, 122, 124, 126, 127, 128, 129, 131, 132, 133, 134, 135, 139, 140, 141, 144, 146, 148, 149, 151, 152, 153, 155, 156, 159, 160, 163, 173, 176, 178, 184], "relev": [3, 60, 80, 95, 108, 141], "pattern": [3, 23, 74, 95, 124, 127, 142, 145, 151], "tell": [3, 74, 103, 108], "great": [3, 6, 49], "poorli": [3, 89], "real": [3, 49, 74, 80, 81, 85, 86, 100, 102, 128, 133, 141, 144, 161], "world": [3, 161], "fit_predict": 3, "kneighborsclassifi": [3, 60, 78, 81, 83, 184], "decisiontreeregressor": [3, 18, 92, 96, 101, 102, 103, 110, 111, 115, 118, 119, 120, 137, 160, 161, 163, 176], "One": [3, 49, 60, 69, 73, 77, 78, 83, 85, 100, 101, 106, 108, 116, 141, 144, 151], "focu": [3, 18, 74, 80, 102, 104, 107, 109, 115, 132, 141, 144, 149, 150, 156], "were": [3, 29, 66, 77, 80, 86, 95, 102, 103, 106, 115, 124, 127, 128, 131, 133, 141, 153, 155, 160, 163], "If": [3, 6, 28, 29, 37, 42, 53, 66, 74, 77, 81, 85, 92, 93, 95, 96, 97, 98, 101, 102, 103, 107, 108, 109, 111, 112, 113, 116, 117, 118, 119, 120, 121, 122, 125, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 150, 151, 155, 156, 158, 159, 160, 161, 162, 163, 174, 184], "do": [3, 6, 17, 18, 29, 58, 66, 74, 75, 76, 77, 78, 79, 80, 82, 83, 84, 85, 86, 87, 89, 90, 91, 94, 97, 99, 101, 102, 103, 104, 107, 113, 116, 117, 119, 122, 124, 125, 127, 129, 131, 132, 134, 136, 137, 141, 142, 145, 149, 150, 151, 152, 161, 178, 180, 184], "1d": [3, 29, 137], "5": [3, 4, 18, 29, 60, 66, 73, 74, 76, 77, 81, 82, 83, 84, 85, 86, 91, 92, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 126, 129, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 144, 146, 149, 151, 152, 153, 155, 156, 157, 160, 161, 162, 163, 164, 180, 181, 184], "someon": [3, 74], "come": [3, 6, 29, 56, 74, 80, 115, 118, 125, 128, 133, 138, 150, 151, 161], "doe": [3, 6, 18, 24, 29, 36, 47, 74, 77, 82, 85, 86, 88, 89, 90, 92, 95, 96, 97, 98, 100, 101, 105, 108, 117, 120, 131, 132, 135, 139, 140, 144, 146, 149, 153, 155, 156, 161, 178, 182], "15": [3, 4, 74, 80, 82, 85, 92, 103, 104, 114, 116, 117, 120, 123, 141, 153, 155, 156, 162, 176], "continu": [3, 6, 37, 42, 43, 64, 74, 80, 102, 104, 108, 138, 139, 141, 144, 146, 157, 160], "price": [3, 49, 73, 91, 92, 102, 104, 107, 108, 143, 144, 146], "descript": [3, 29, 49, 73, 74, 78, 83, 92, 93, 96, 98, 102, 103, 107, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 141, 142, 143, 144, 145, 146, 156, 158, 159, 160, 161, 162, 163, 164], "room": [3, 49, 102, 107, 108], "surfac": [3, 29], "locat": [3, 75, 76, 92, 104, 105, 107, 139, 152], "ag": [3, 74, 77, 79, 80, 81, 82, 84, 85, 86, 102, 107, 108, 132, 149, 150, 153], "mri": 3, "scan": [3, 6, 150], "want": [3, 18, 74, 75, 76, 80, 81, 85, 88, 90, 91, 92, 93, 95, 96, 98, 100, 101, 102, 103, 105, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 125, 126, 128, 129, 130, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 149, 150, 151, 156, 157, 158, 159, 160, 161, 162, 163, 176, 178, 184], "tree": [3, 6, 10, 13, 15, 16, 17, 18, 58, 74, 82, 85, 86, 88, 90, 92, 96, 101, 102, 103, 108, 109, 110, 112, 113, 114, 116, 118, 119, 120, 121, 122, 123, 126, 131, 137, 142, 145, 147, 149, 153, 154, 157, 158, 159, 162, 163, 169, 170, 171, 172, 173, 174, 176], "piecewis": [3, 131, 161, 173], "constant": [3, 24, 84, 108, 129, 131, 134, 135, 137, 161, 173], "given": [3, 6, 18, 29, 42, 49, 58, 66, 80, 82, 85, 94, 97, 99, 100, 102, 104, 106, 107, 108, 110, 115, 119, 126, 130, 131, 132, 134, 135, 136, 138, 139, 141, 146, 151, 153, 162, 171, 178, 180, 181], "output": [3, 18, 42, 74, 77, 82, 85, 86, 101, 102, 105, 108, 110, 128, 129, 131, 133, 134, 139, 141, 146, 162], "correspond": [3, 18, 29, 47, 74, 77, 80, 85, 86, 87, 89, 95, 97, 102, 103, 104, 105, 106, 107, 108, 110, 112, 121, 134, 138, 139, 141, 149, 150, 152, 153, 161, 173, 176], "ridg": [3, 40, 44, 47, 107, 108, 110, 132, 135], "order": [3, 6, 18, 29, 40, 47, 69, 74, 82, 86, 89, 90, 91, 95, 100, 102, 104, 114, 119, 123, 132, 138, 149, 152, 153, 184], "shrink": [3, 44, 132, 135], "constrain": [3, 40, 44, 61, 103, 131], "toward": [3, 44, 108, 125, 132, 135, 139], "zero": [3, 28, 29, 42, 43, 44, 53, 56, 74, 85, 103, 108, 132, 135, 137, 139], "2d": [3, 131, 137], "singl": [3, 15, 16, 18, 24, 25, 27, 28, 29, 35, 42, 43, 44, 47, 49, 53, 60, 64, 66, 69, 73, 74, 77, 80, 82, 85, 86, 91, 94, 97, 99, 100, 101, 102, 108, 110, 115, 118, 119, 120, 125, 126, 128, 129, 131, 133, 134, 137, 141, 142, 144, 145, 149, 150, 151, 156, 160, 162, 171, 172, 173, 174, 176, 178, 180, 184], "orient": [3, 108, 135, 162], "clf": 3, "give": [3, 6, 14, 22, 24, 29, 33, 53, 56, 58, 71, 74, 77, 82, 84, 85, 86, 95, 96, 97, 100, 102, 105, 106, 108, 109, 110, 115, 116, 117, 118, 119, 125, 132, 135, 136, 137, 139, 141, 144, 151, 155, 156, 162], "concret": [3, 29, 58], "graphic": [3, 66, 86, 100, 107, 138], "plot": [3, 18, 29, 47, 60, 66, 74, 75, 76, 82, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 114, 115, 121, 123, 125, 126, 128, 130, 131, 132, 133, 135, 136, 137, 138, 139, 140, 141, 142, 144, 145, 151, 152, 153, 155, 156, 157, 158, 159, 160, 161, 162, 163, 178], "compos": [3, 73, 74, 81, 85, 86, 87, 88, 89, 90, 91, 94, 99, 104, 119, 144, 147, 149, 151, 153, 154, 160], "sinc": [3, 74, 77, 82, 93, 97, 98, 99, 100, 102, 105, 106, 107, 108, 109, 110, 115, 116, 117, 119, 125, 132, 136, 139, 140, 141, 144, 149, 156, 157, 161, 162], "potenti": [3, 44, 49, 74, 82, 86, 96, 102, 103, 116, 136, 140, 141, 151, 180], "choic": [3, 29, 36, 44, 53, 56, 58, 85, 98, 99, 102, 105, 107, 108, 110, 132, 136, 146, 149, 153, 155, 164, 184], "circl": [3, 74, 107, 110, 131, 140], "vs": [3, 82, 85, 135, 141, 162], "squar": [3, 28, 58, 105, 132, 133, 136, 137, 144, 146], "boil": 3, "down": [3, 6, 117], "fact": [3, 18, 82, 84, 90, 105, 115, 132, 153, 163, 176], "exactli": [3, 44, 56, 60, 80, 90, 95, 140, 184], "know": [3, 6, 86, 93, 98, 102, 103, 104, 107, 110, 115, 127, 132, 137, 141, 149], "frame": [3, 104, 105, 106, 107], "scienc": [3, 6, 36, 95, 105], "solv": [3, 6, 29, 44, 60, 74, 80, 86, 95, 98, 100, 102, 105, 106, 132, 136, 137, 138, 139, 144, 161, 184], "might": [3, 6, 29, 37, 49, 73, 84, 85, 86, 88, 90, 95, 96, 100, 101, 102, 103, 105, 108, 117, 133, 141, 144, 149, 153, 162], "speci": [3, 18, 75, 76, 109, 130, 135, 139, 156, 157, 158, 160, 162, 184], "commonli": [3, 74, 80, 81], "denot": 3, "eventu": 3, "ideal": [3, 102, 141, 144], "let": [3, 6, 18, 25, 43, 47, 73, 74, 77, 78, 80, 81, 82, 83, 85, 86, 87, 88, 89, 90, 91, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 115, 117, 125, 126, 129, 130, 131, 132, 133, 134, 135, 136, 137, 140, 141, 144, 146, 149, 150, 153, 156, 157, 160, 161, 184], "On": [3, 6, 29, 80, 82, 83, 86, 91, 97, 102, 108, 109, 110, 117, 122, 124, 127, 131, 132, 134, 135, 136, 137, 140, 141, 144, 149, 151, 153, 156, 161, 162, 163], "figur": [3, 27, 77, 80, 100, 102, 108, 109, 110, 115, 139, 151, 152, 153, 156, 162, 178], "mathemat": [3, 58, 93, 98, 131, 137, 138, 139, 144], "b": [3, 15, 16, 17, 18, 24, 25, 26, 27, 28, 29, 35, 37, 42, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 128, 133, 138, 153, 155, 171, 172, 173, 174, 176, 178, 180, 184], "creat": [3, 29, 44, 47, 60, 69, 71, 72, 73, 77, 78, 80, 82, 83, 85, 86, 93, 94, 96, 98, 99, 100, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 118, 120, 121, 122, 123, 124, 125, 127, 128, 129, 130, 132, 133, 134, 135, 137, 139, 140, 141, 142, 143, 145, 146, 149, 150, 151, 153, 156, 158, 159, 161, 162, 163, 172, 176, 181], "infin": 3, "vari": [3, 18, 80, 93, 96, 98, 100, 103, 105, 108, 123, 125, 132, 136, 138, 141, 150, 155, 176], "fulfil": 3, "requir": [3, 6, 13, 18, 22, 29, 33, 36, 39, 40, 43, 47, 56, 60, 71, 73, 74, 77, 80, 82, 86, 93, 98, 99, 105, 109, 116, 117, 132, 146, 151, 153, 155, 160, 169, 176, 181, 184], "minim": [3, 6, 40, 58, 64, 92, 97, 102, 103, 135, 136, 144, 146, 156, 172], "sum": [3, 40, 42, 58, 74, 81, 83, 115, 133, 139, 141], "error": [3, 6, 16, 28, 29, 40, 42, 51, 53, 56, 58, 59, 61, 74, 77, 81, 87, 89, 92, 95, 96, 97, 103, 109, 111, 112, 113, 115, 116, 117, 119, 120, 121, 122, 128, 129, 132, 133, 134, 136, 137, 140, 141, 143, 144, 146, 147, 154, 164, 172], "red": [3, 27, 77, 102, 109, 110, 115, 131, 135, 139, 140, 151, 160], "best": [3, 13, 18, 22, 33, 39, 53, 58, 60, 74, 91, 97, 98, 100, 103, 111, 113, 114, 116, 117, 119, 120, 122, 123, 128, 132, 133, 135, 136, 137, 141, 144, 147, 148, 149, 150, 151, 153, 154, 155, 156, 169, 178, 181, 182, 184], "possibl": [3, 6, 15, 29, 36, 42, 43, 58, 60, 64, 69, 74, 82, 85, 92, 96, 97, 100, 102, 103, 108, 110, 116, 117, 119, 127, 128, 129, 131, 132, 133, 134, 135, 137, 139, 140, 144, 146, 148, 149, 151, 152, 155, 159, 161, 162, 163, 178, 184], "abstract": [3, 105], "manner": [3, 14, 29, 85, 95, 132, 146], "state": [3, 6, 14, 60, 74, 80, 81, 82, 85, 86, 107, 149, 153], "jockei": 3, "wheel": 3, "i": [3, 13, 18, 20, 22, 29, 47, 71, 72, 73, 74, 77, 80, 81, 82, 84, 85, 86, 89, 90, 96, 97, 100, 102, 108, 110, 115, 116, 117, 118, 128, 129, 131, 132, 133, 134, 136, 139, 141, 144, 146, 149, 155, 156, 164, 178, 184], "support": [3, 6, 82, 88, 90, 93, 98, 131, 137, 140, 142, 145, 162], "standardscal": [3, 29, 47, 60, 66, 73, 77, 82, 86, 88, 90, 91, 93, 98, 99, 100, 107, 108, 130, 131, 135, 139, 140, 148, 150, 155, 176, 178, 180, 184], "columntransform": [3, 72, 86, 88, 90, 91, 131, 147, 149, 151, 153, 154], "enough": [3, 6, 89, 90, 99, 102, 103, 115, 117, 135, 140, 144, 150, 156, 160, 162, 176], "flexibl": [3, 6, 53, 56, 58, 61, 93, 98, 103, 119, 140, 156], "opposit": 3, "cluster": [3, 102, 171], "whose": [3, 81, 116, 150], "group": [3, 6, 20, 25, 29, 74, 100, 101, 102, 107, 164], "subset": [3, 17, 42, 44, 47, 71, 74, 77, 80, 81, 86, 91, 102, 105, 107, 117, 119, 122, 124, 125, 126, 127, 147, 154, 156, 157, 160, 176], "them": [3, 6, 13, 60, 74, 76, 77, 82, 85, 86, 94, 97, 99, 102, 108, 110, 112, 116, 117, 118, 121, 129, 131, 132, 134, 137, 139, 141, 144, 149, 150, 151, 155, 159, 162, 163, 176, 184], "broad": 3, "topic": [3, 102, 107], "custom": [3, 137], "commerc": 3, "websit": [3, 36, 49, 78, 83], "although": 3, "mention": [3, 49, 77, 93, 98, 101, 104, 109, 116, 118, 131, 132, 135, 136, 139, 141, 142, 144, 145, 150, 153, 163], "cover": [3, 50, 74, 77, 80, 85, 86, 135], "impli": [3, 160], "fix": [3, 29, 47, 53, 58, 85, 93, 98, 117, 132, 148, 149, 155, 160, 162, 176, 178, 182], "like": [3, 6, 18, 25, 29, 44, 53, 74, 80, 81, 85, 86, 87, 89, 97, 99, 102, 104, 106, 108, 109, 110, 117, 125, 132, 133, 137, 138, 139, 141, 142, 145, 146, 150], "necessari": [3, 6, 44, 74, 117, 131, 151], "subdivid": [3, 156], "select": [3, 6, 13, 15, 16, 17, 18, 22, 23, 24, 25, 26, 27, 28, 29, 33, 34, 35, 39, 42, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 75, 76, 77, 79, 80, 82, 84, 87, 88, 89, 90, 97, 100, 102, 108, 110, 115, 117, 119, 123, 124, 127, 128, 133, 136, 137, 139, 149, 150, 151, 152, 155, 157, 169, 171, 172, 173, 174, 176, 178, 180, 184], "final": [3, 13, 23, 39, 44, 56, 60, 71, 81, 82, 85, 86, 97, 101, 102, 105, 107, 110, 114, 115, 117, 122, 123, 124, 126, 127, 132, 135, 142, 143, 145, 146, 149, 150, 151, 159, 163, 181], "sometim": [3, 6, 58, 86, 99, 141, 144, 151, 153], "clear": [3, 74, 98, 103, 160], "mani": [3, 6, 29, 42, 60, 73, 74, 75, 76, 81, 82, 85, 86, 97, 99, 100, 102, 103, 104, 107, 110, 117, 119, 129, 131, 132, 134, 141, 150, 155, 184], "need": [3, 6, 22, 24, 29, 37, 40, 60, 66, 74, 78, 80, 82, 83, 85, 86, 91, 93, 97, 98, 102, 103, 105, 108, 109, 110, 115, 117, 118, 119, 120, 131, 132, 135, 136, 137, 140, 142, 143, 145, 146, 147, 149, 150, 151, 153, 154, 160, 174, 176], "criteria": [3, 117], "ml": [3, 6, 95], "cheatsheet": 3, "readthedoc": 3, "io": [3, 36], "en": 3, "latest": [3, 28], "googl": 3, "develop": [3, 6, 36, 71, 74, 87, 89, 105, 129, 134, 137], "com": [3, 6, 36], "advanc": [3, 6, 36, 71, 88, 90], "terminolog": 3, "modifi": [4, 6, 116, 118, 137], "run": [4, 6, 18, 44, 66, 78, 83, 92, 96, 97, 108, 114, 123, 132, 152, 153, 176, 178, 184], "statu": [4, 25, 74, 80, 85, 86, 149, 151, 153], "python_script": 4, "01_tabular_data_explor": 4, "2023": 4, "20": [4, 18, 29, 60, 74, 76, 85, 91, 92, 97, 102, 103, 104, 105, 106, 107, 113, 117, 118, 120, 122, 129, 132, 134, 141, 146, 147, 148, 154, 155, 157], "13": [4, 74, 80, 82, 92, 95, 104, 105, 106, 117, 118, 120, 131, 140, 144, 153, 155], "57": [4, 104, 105, 120], "cach": 4, "12": [4, 74, 82, 91, 92, 104, 105, 106, 107, 117, 120, 141, 149, 155, 162], "36": [4, 73, 76, 102, 104, 117, 120, 129, 134, 157, 161], "01_tabular_data_exploration_ex_01": 4, "31": [4, 104, 114, 117, 123, 134, 141, 149, 153], "01_tabular_data_exploration_sol_01": 4, "76": [4, 84, 98, 104, 106, 141], "02_numerical_pipeline_cross_valid": 4, "02_numerical_pipeline_ex_00": 4, "45": [4, 74, 81, 82, 85, 92, 102, 104, 106, 116, 120, 133, 138, 139, 156], "02_numerical_pipeline_ex_01": 4, "02_numerical_pipeline_hands_on": 4, "49": [4, 85, 104, 116, 117, 125, 136, 153], "02_numerical_pipeline_introduct": 4, "7": [4, 18, 47, 73, 74, 76, 82, 83, 85, 92, 95, 98, 99, 100, 102, 104, 105, 107, 108, 116, 117, 118, 120, 129, 132, 134, 141, 146, 149, 152, 153, 155, 157, 160, 162, 164, 176, 184], "21": [4, 85, 90, 92, 102, 104, 105, 107, 108, 120, 146, 153, 155], "02_numerical_pipeline_sc": 4, "32": [4, 86, 104, 115, 117, 149], "02_numerical_pipeline_sol_00": 4, "84": [4, 91, 104, 134], "02_numerical_pipeline_sol_01": 4, "51": [4, 104, 120, 184], "03_categorical_pipelin": 4, "08": [4, 29, 105, 132, 153], "03_categorical_pipeline_column_transform": 4, "9": [4, 29, 73, 74, 83, 91, 92, 95, 98, 99, 100, 102, 104, 107, 108, 116, 117, 120, 132, 134, 142, 144, 145, 146, 149, 153, 155, 157, 184], "95": [4, 107, 153], "03_categorical_pipeline_ex_01": 4, "58": [4, 104, 105, 134, 153], "44": [4, 74, 80, 92, 102, 104, 134, 149, 150, 153], "03_categorical_pipeline_ex_02": 4, "8": [4, 60, 74, 76, 80, 83, 85, 91, 92, 95, 98, 99, 100, 101, 102, 104, 107, 108, 110, 114, 115, 116, 117, 118, 120, 121, 123, 131, 132, 135, 137, 141, 146, 147, 149, 152, 153, 154, 155, 156, 161, 162, 176, 178, 184], "03_categorical_pipeline_sol_01": 4, "03_categorical_pipeline_sol_02": 4, "17": [4, 29, 74, 76, 80, 82, 92, 102, 104, 116, 117, 120, 129, 134, 139, 144, 146, 152, 155, 156, 157], "03_categorical_pipeline_visu": 4, "61": [4, 104, 117, 132, 153], "cross_validation_baselin": 4, "59": [4, 82, 85, 104, 105], "16": [4, 74, 82, 85, 92, 95, 101, 103, 104, 106, 116, 117, 120, 146, 152, 155, 156, 162], "cross_validation_ex_01": 4, "03": [4, 10, 12, 21, 29, 31, 45, 55, 65, 68, 164, 175], "cross_validation_ex_02": 4, "53": [4, 104, 105, 116, 125, 153], "cross_validation_group": 4, "24": [4, 86, 92, 102, 104, 105, 106, 107, 108, 120, 141, 146, 149, 153], "cross_validation_learning_curv": 4, "18": [4, 29, 36, 74, 76, 80, 92, 104, 105, 116, 117, 120, 129, 132, 134, 137, 142, 145, 149, 150, 152, 153, 155, 157], "cross_validation_nest": 4, "14": [4, 74, 92, 95, 104, 105, 116, 117, 120, 131, 141, 144, 155], "00": [4, 29, 82, 102, 105, 133, 136, 149], "25": [4, 60, 74, 80, 81, 82, 86, 92, 102, 103, 104, 105, 107, 108, 120, 133, 138, 149, 150, 153], "39": [4, 76, 85, 86, 104, 116, 120, 129, 134, 157], "cross_validation_sol_01": 4, "65": [4, 85, 91, 104], "cross_validation_sol_02": 4, "cross_validation_stratif": 4, "52": [4, 102, 104, 105, 107, 108, 125, 156], "cross_validation_tim": 4, "63": [4, 104, 110, 120], "cross_validation_train_test": 4, "01": [4, 11, 19, 32, 38, 57, 59, 63, 82, 97, 105, 117, 118, 130, 135, 149, 150, 152, 164, 166, 168, 179], "07": [4, 101, 108, 132], "cross_validation_validation_curv": 4, "27": [4, 74, 85, 92, 104, 105, 120], "38": [4, 74, 80, 82, 85, 104, 120, 149, 150, 153], "datasets_ames_h": 4, "datasets_bike_rid": 4, "89": [4, 82, 92, 125], "datasets_blood_transfus": 4, "02": [4, 10, 11, 20, 30, 41, 59, 65, 82, 164, 166, 175, 177], "46": [4, 85, 92, 102, 104, 115, 116, 120, 153, 155], "datasets_california_h": 4, "dev_features_import": 4, "80": [4, 29, 73, 91, 92, 104, 147, 154], "09": [4, 105, 132], "ensemble_adaboost": 4, "ensemble_bag": 4, "04": [4, 12, 30, 68, 82, 109, 117, 132, 162, 164, 167], "6": [4, 13, 22, 39, 47, 71, 73, 74, 76, 82, 83, 88, 90, 91, 92, 95, 98, 99, 100, 102, 104, 105, 107, 108, 116, 117, 120, 129, 132, 134, 135, 141, 144, 146, 149, 152, 153, 155, 156, 157, 160, 161, 164, 176], "99": [4, 82], "ensemble_ex_01": 4, "ensemble_ex_02": 4, "ensemble_ex_03": 4, "22": [4, 92, 102, 104, 105, 107, 108, 118, 144, 146, 152], "ensemble_ex_04": 4, "ensemble_gradient_boost": 4, "06": 4, "127": [4, 85], "87": [4, 85, 101, 117, 149], "ensemble_hist_gradient_boost": 4, "54": [4, 104, 105, 117, 118], "88": [4, 85, 101, 102, 107, 108, 117, 149, 153], "ensemble_hyperparamet": 4, "123": [4, 76], "ensemble_introduct": 4, "ensemble_random_forest": 4, "28": [4, 74, 80, 82, 85, 92, 102, 104, 105, 106, 116, 149, 150, 153], "ensemble_sol_01": 4, "35": [4, 74, 104, 106, 117], "83": [4, 74, 116], "ensemble_sol_02": 4, "11": [4, 74, 85, 92, 104, 117, 120, 132, 149, 153, 155, 160, 176], "ensemble_sol_03": 4, "104": 4, "ensemble_sol_04": 4, "60": [4, 29, 74, 81, 91, 104, 105, 108, 110, 120, 152, 160, 162], "feature_selection_ex_01": 4, "feature_selection_introduct": 4, "feature_selection_limitation_model": 4, "78": [4, 104, 141, 153, 156], "43": [4, 86, 92, 95, 102, 104, 105, 116], "feature_selection_sol_01": 4, "05": [4, 29, 31, 68, 92, 95, 99, 100, 101, 105, 107, 110, 115, 118, 121, 132, 135, 141, 151, 156, 160, 162, 164], "linear_models_ex_01": 4, "linear_models_ex_02": 4, "linear_models_ex_03": 4, "linear_models_feature_engineering_classif": 4, "linear_models_regular": 4, "linear_models_sol_01": 4, "linear_models_sol_02": 4, "linear_models_sol_03": 4, "33": [4, 102, 104, 107, 117, 152, 153], "linear_regression_in_sklearn": 4, "linear_regression_non_linear_link": 4, "linear_regression_without_sklearn": 4, "79": [4, 73, 104, 120], "logistic_regress": [4, 130, 131, 135, 139], "logistic_regression_non_linear": 4, "77": [4, 104, 106, 116, 141], "metrics_classif": 4, "metrics_ex_01": 4, "34": [4, 104, 107, 122, 152], "metrics_ex_02": 4, "metrics_regress": 4, "55": [4, 96, 104, 105], "metrics_sol_01": 4, "19": [4, 76, 77, 85, 92, 104, 105, 116, 117, 120, 129, 132, 134, 136, 152, 155, 157], "metrics_sol_02": 4, "parameter_tuning_ex_02": 4, "71": [4, 104, 149, 153, 161], "parameter_tuning_ex_03": 4, "parameter_tuning_grid_search": 4, "42": [4, 73, 80, 82, 84, 85, 86, 104, 117, 127, 131, 140, 147, 148, 149, 151, 153, 154, 155], "parameter_tuning_manu": 4, "parameter_tuning_nest": 4, "parameter_tuning_parallel_plot": 4, "parameter_tuning_randomized_search": 4, "parameter_tuning_sol_02": 4, "73": [4, 81, 104], "parameter_tuning_sol_03": 4, "26": [4, 92, 104, 105, 153], "trees_classif": 4, "23": [4, 85, 92, 102, 104, 105, 106, 107, 108, 116, 118, 120, 162], "93": [4, 116], "trees_dataset": 4, "trees_ex_01": 4, "trees_ex_02": 4, "trees_hyperparamet": 4, "trees_regress": 4, "trees_sol_01": 4, "trees_sol_02": 4, "lot": [6, 74, 107, 108, 127, 132, 141], "materi": 6, "far": [6, 47, 92, 103, 115, 122, 126, 132], "congratul": 6, "And": [6, 101], "thank": [6, 131, 151], "everyon": 6, "instructor": 6, "staff": 6, "help": [6, 60, 66, 82, 84, 88, 90, 93, 94, 98, 99, 103, 104, 105, 106, 107, 108, 117, 124, 127, 131, 132, 135, 139, 141, 144, 184], "forum": [6, 36], "student": [6, 36], "hard": [6, 73, 74, 75, 76, 124, 127, 139, 141, 146], "work": [6, 40, 60, 65, 74, 82, 85, 86, 100, 101, 105, 110, 117, 119, 126, 127, 132, 147, 150, 152, 154, 156, 158, 161, 162, 164, 169, 184], "summar": [6, 102, 109, 117, 119], "train": [6, 15, 16, 17, 18, 25, 26, 29, 34, 35, 37, 40, 42, 44, 47, 51, 53, 56, 58, 59, 60, 61, 66, 69, 71, 72, 74, 77, 78, 79, 82, 83, 84, 85, 86, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 103, 105, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 129, 130, 131, 132, 134, 135, 136, 137, 139, 140, 141, 142, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 164, 173, 174, 176, 181, 184], "test": [6, 18, 26, 29, 34, 35, 37, 44, 47, 53, 56, 58, 59, 60, 66, 72, 73, 77, 78, 79, 82, 83, 84, 85, 86, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 103, 108, 110, 111, 112, 113, 114, 115, 117, 118, 120, 121, 122, 123, 124, 125, 126, 127, 132, 134, 135, 139, 140, 141, 142, 144, 145, 147, 149, 150, 151, 153, 154, 155, 158, 161, 162, 164, 174, 176, 178, 184], "built": [6, 37, 94, 99, 108, 110, 137, 141, 160, 162, 172], "matrix": [6, 29, 82, 85, 95, 119, 124, 127, 132, 137], "featur": [6, 15, 16, 17, 18, 24, 29, 33, 34, 35, 37, 40, 42, 43, 47, 49, 58, 60, 64, 65, 66, 69, 71, 72, 73, 74, 75, 76, 77, 80, 81, 86, 87, 89, 91, 92, 94, 98, 99, 102, 104, 105, 106, 107, 109, 110, 115, 116, 117, 119, 124, 127, 129, 134, 138, 139, 140, 144, 149, 150, 151, 155, 156, 157, 158, 159, 160, 161, 162, 163, 171, 172, 176, 184], "observ": [6, 28, 44, 47, 51, 53, 74, 76, 82, 84, 85, 86, 88, 90, 92, 94, 95, 96, 97, 98, 99, 100, 102, 103, 105, 106, 109, 110, 113, 115, 117, 119, 122, 123, 125, 131, 132, 134, 135, 137, 139, 140, 141, 144, 149, 152, 155, 156, 157, 160, 161, 163, 173], "transform": [6, 29, 40, 43, 66, 69, 81, 82, 85, 86, 91, 94, 97, 99, 102, 103, 105, 110, 116, 119, 124, 127, 129, 131, 132, 134, 135, 137, 144, 149, 150, 151, 152, 153, 178], "often": [6, 40, 42, 44, 58, 61, 74, 85, 86, 97, 99, 110, 135, 137, 144, 146, 151, 176, 181, 184], "typic": [6, 13, 49, 71, 74, 85, 102, 107, 117, 124, 127, 131, 139, 144, 146, 151, 153, 171], "categor": [6, 37, 47, 64, 69, 71, 72, 73, 74, 75, 76, 87, 89, 104, 119, 131, 132, 139, 141, 149, 157, 164, 176], "variabl": [6, 24, 29, 36, 40, 47, 49, 58, 60, 64, 66, 68, 69, 73, 77, 78, 81, 82, 83, 87, 89, 92, 95, 96, 101, 102, 103, 104, 106, 107, 110, 117, 119, 132, 138, 139, 144, 149, 151, 153, 155, 157, 164, 176, 178, 184], "seek": [6, 34, 117, 118, 141], "suffic": [6, 117], "But": [6, 91, 98, 101, 102, 107, 131, 132, 151, 152], "larg": [6, 18, 47, 69, 86, 97, 102, 105, 107, 113, 114, 116, 117, 122, 123, 126, 127, 130, 132, 134, 135, 144, 148, 151, 152, 153, 155, 178], "detect": 6, "underfit": [6, 13, 22, 29, 33, 39, 43, 44, 51, 53, 56, 58, 59, 60, 61, 96, 115, 117, 118, 119, 131, 134, 135, 137, 140, 156, 164, 169, 171], "multipl": [6, 85, 95, 108, 110, 115, 129, 134, 137, 142, 143, 145, 146, 152, 171, 172], "hyper": [6, 18, 37, 97, 99, 151, 155, 178], "control": [6, 44, 78, 82, 83, 93, 97, 98, 103, 117, 119, 125, 130, 131, 135, 137, 147, 148, 150, 152, 154, 155, 156, 160, 171, 181], "import": [6, 13, 18, 22, 29, 36, 37, 47, 58, 60, 72, 73, 74, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 161, 162, 163, 164, 167, 169, 176, 178, 180, 184], "randomsearchcv": 6, "understand": [6, 13, 22, 33, 39, 56, 78, 80, 83, 85, 95, 96, 100, 103, 128, 133, 141, 160, 161, 169, 170, 181], "suit": [6, 131, 144, 170], "intuit": [6, 10, 11, 13, 14, 22, 39, 45, 56, 71, 80, 81, 85, 105, 107, 109, 110, 115, 123, 131, 135, 136, 137, 139, 140, 156, 157, 161, 162, 164], "debug": 6, "build": [6, 18, 29, 47, 71, 73, 74, 81, 91, 93, 98, 110, 116, 126, 129, 131, 134, 149, 160, 164, 166], "combin": [6, 13, 14, 16, 42, 43, 66, 74, 80, 82, 86, 95, 97, 106, 107, 108, 109, 110, 114, 115, 116, 117, 118, 119, 123, 126, 131, 132, 134, 136, 137, 146, 147, 148, 149, 151, 153, 154, 155, 156, 171, 172, 178, 181, 182], "particularli": [6, 73, 87, 89, 117], "few": [6, 74, 75, 76, 79, 80, 84, 102, 104, 106, 107, 117, 119, 129, 131, 134, 135, 151, 157], "benefit": [6, 17, 24, 33, 82, 85, 96, 115, 118, 119, 132, 144, 158, 164], "non": [6, 20, 29, 39, 42, 43, 44, 47, 66, 69, 74, 82, 90, 93, 97, 98, 99, 104, 105, 106, 107, 108, 110, 125, 129, 132, 134, 139, 155, 156, 161, 163, 164, 170, 171, 182], "engin": [6, 40, 43, 47, 95, 105, 129, 132, 134, 164], "base": [6, 13, 14, 15, 16, 18, 29, 34, 36, 42, 49, 60, 74, 75, 76, 82, 88, 90, 95, 102, 105, 110, 115, 118, 119, 125, 126, 129, 134, 135, 137, 139, 140, 141, 146, 149, 151, 156, 157, 164, 184], "seri": [6, 92, 94, 95, 99, 101, 108, 110, 115, 135, 139, 156], "threshold": [6, 15, 27, 74, 91, 107, 156, 161, 162, 172], "variou": [6, 56, 91], "attribut": [6, 37, 74, 82, 85, 95, 97, 102, 107, 108, 110, 112, 121, 135, 136, 137, 139, 148, 149, 153, 155, 178, 184], "natur": [6, 22, 36, 81, 85, 86, 102, 105, 110, 131, 132, 153], "miss": [6, 74, 85, 95, 102, 104, 106, 107, 129, 134, 153, 184], "histgradientboostingregressor": [6, 18, 29, 116, 117, 123, 146], "classifi": [6, 15, 24, 27, 42, 69, 75, 76, 79, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90, 91, 93, 94, 95, 98, 99, 100, 106, 109, 110, 119, 125, 130, 131, 135, 139, 140, 142, 145, 146, 147, 149, 150, 151, 153, 154, 156, 158, 160, 162, 170, 171, 178, 180, 184], "goto": 6, "strongli": [6, 108], "advis": [6, 108], "pointer": 6, "doc": 6, "rich": 6, "didact": [6, 36, 85, 86, 104], "improv": [6, 36, 66, 90, 91, 95, 96, 98, 99, 103, 111, 113, 117, 120, 122, 137, 144, 152, 178, 181], "compris": [6, 141], "guid": [6, 131, 139, 144, 184], "everi": [6, 74, 77, 102, 105, 108, 136, 151], "explain": [6, 18, 29, 39, 58, 86, 108, 115, 116, 122, 126, 135, 144, 149, 169], "demonstr": [6, 86, 92, 109, 115, 116, 132, 137, 160], "good": [6, 22, 25, 47, 74, 77, 79, 80, 81, 84, 85, 86, 88, 90, 100, 101, 102, 103, 106, 108, 117, 118, 119, 126, 128, 129, 132, 133, 134, 135, 136, 139, 140, 141, 149, 151, 152, 153, 155, 156, 178], "softwar": [6, 36, 81], "ask": [6, 124, 127, 128, 130, 133, 135, 141], "question": [6, 80, 88, 90, 104, 128, 129, 133, 134, 135, 141, 155], "stackoverflow": 6, "github": [6, 36, 80, 82, 83, 86, 91, 97, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "discuss": [6, 14, 18, 36, 47, 74, 77, 81, 109, 115, 117, 118, 132, 135, 156], "driven": [6, 135], "inclus": 6, "contribut": [6, 60, 82, 95, 132, 155, 178], "other": [6, 14, 27, 29, 36, 44, 60, 73, 74, 77, 81, 82, 85, 86, 87, 89, 92, 93, 94, 98, 99, 100, 101, 105, 108, 110, 117, 125, 129, 132, 134, 136, 137, 139, 140, 144, 149, 151, 152, 153, 159, 162, 163, 178, 181, 184], "advocaci": 6, "curat": 6, "overflow": 6, "code": [6, 29, 36, 49, 69, 75, 78, 79, 87, 93, 94, 110, 111, 112, 113, 114, 116, 124, 127, 128, 129, 130, 132, 133, 137, 139, 142, 143, 147, 148, 149, 151, 152, 154, 155, 158, 159, 178], "start": [6, 36, 47, 74, 78, 79, 80, 82, 83, 84, 85, 86, 88, 90, 92, 95, 96, 99, 100, 101, 102, 105, 106, 115, 117, 118, 124, 125, 127, 129, 130, 131, 132, 134, 135, 138, 139, 141, 144, 146, 148, 150, 155, 156, 157, 158], "carpentri": 6, "resourc": [6, 36, 71, 74, 105, 117, 151], "git": 6, "lab": [6, 36], "unsupervis": [6, 49], "structur": [6, 58, 71, 74, 82, 85, 86, 95, 117, 144, 155, 156, 161, 170], "instanc": [6, 47, 64, 74, 80, 81, 82, 85, 95, 102, 104, 105, 107, 108, 111, 114, 120, 123, 124, 125, 126, 127, 129, 131, 132, 134, 136, 138, 141, 143, 144, 146, 150, 153, 174, 180, 181], "sampl": [6, 14, 15, 16, 18, 20, 29, 42, 44, 49, 59, 61, 64, 74, 75, 76, 77, 80, 81, 82, 85, 86, 93, 94, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 113, 115, 116, 117, 119, 122, 124, 127, 128, 131, 132, 133, 135, 137, 138, 139, 140, 141, 146, 148, 149, 151, 153, 155, 156, 157, 159, 160, 161, 163, 164, 173, 182], "supervis": [6, 49, 102, 171], "recov": [6, 18, 95, 132], "link": [6, 15, 79, 80, 84, 95, 104, 105, 106, 107, 108, 117, 124, 127, 141, 144], "drive": 6, "system": [6, 74, 95], "hand": [6, 29, 91, 95, 109, 122, 132, 134, 137, 149, 151], "nuanc": 6, "deep": [6, 90, 117, 118, 160], "better": [6, 17, 18, 28, 47, 73, 77, 81, 84, 88, 89, 90, 91, 92, 93, 94, 95, 97, 98, 99, 100, 102, 103, 110, 117, 118, 119, 124, 127, 128, 132, 133, 135, 136, 146, 150, 151, 153, 155, 160, 176, 184], "gradient": [6, 10, 13, 14, 16, 17, 18, 29, 82, 86, 109, 113, 114, 118, 122, 123, 146, 149, 153, 164], "boost": [6, 13, 14, 16, 17, 18, 29, 86, 113, 114, 118, 122, 123, 149, 153, 164], "classif": [6, 15, 22, 38, 39, 41, 42, 49, 60, 64, 73, 74, 80, 81, 87, 89, 91, 93, 94, 95, 98, 99, 100, 102, 106, 109, 129, 131, 134, 138, 142, 143, 144, 145, 146, 158, 160, 161, 162, 164, 169, 170, 171, 184], "regress": [6, 18, 22, 27, 38, 39, 40, 41, 42, 43, 44, 45, 49, 53, 58, 60, 66, 73, 80, 82, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 98, 99, 100, 102, 107, 110, 112, 115, 121, 124, 127, 129, 130, 134, 135, 139, 140, 141, 143, 146, 148, 150, 155, 156, 159, 160, 163, 164, 169, 170, 171, 176, 184], "nativ": [6, 74, 80, 85, 86, 102, 125, 137, 149, 151, 153], "input": [6, 24, 40, 42, 58, 73, 74, 79, 81, 82, 84, 85, 88, 89, 90, 92, 95, 98, 99, 108, 110, 128, 133, 137, 138, 139, 151, 156, 157, 171, 184], "speech": 6, "text": [6, 36, 49, 105, 131, 138], "imag": [6, 95], "voic": 6, "pretrain": 6, "human": [6, 74, 105, 131], "cost": [6, 81, 102, 105, 118, 135, 152, 153], "mainten": 6, "Not": [6, 86, 87, 89, 97], "pytorch": 6, "tensorflow": 6, "introduct": [6, 56, 71, 164], "andrea": 6, "c": [6, 15, 16, 17, 18, 24, 25, 26, 27, 28, 29, 35, 37, 42, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 95, 97, 106, 130, 131, 139, 141, 150, 162, 171, 172, 173, 174, 176, 178, 180, 184], "m\u00fcller": 6, "sarah": 6, "guido": 6, "handbook": 6, "jake": 6, "van": 6, "der": 6, "pla": 6, "broader": [6, 159, 163], "statist": [6, 18, 56, 74, 76, 81, 82, 85, 88, 90, 102, 107, 109, 110, 124, 127, 141, 184], "jame": 6, "witten": 6, "hasti": 6, "tibshirani": 6, "theori": [6, 109], "concept": [6, 13, 14, 22, 23, 33, 34, 39, 40, 56, 58, 72, 95, 100, 102, 146, 169, 170, 182], "kera": 6, "aur\u00e9lien": 6, "g\u00e9ron": 6, "kaggl": 6, "particip": 6, "challeng": [6, 36, 106, 137], "team": 6, "solut": [6, 10, 11, 12, 18, 19, 30, 31, 32, 36, 38, 41, 45, 59, 63, 65, 68, 85, 101, 124, 125, 136, 140, 149, 164, 166, 175, 177, 179], "share": [6, 110], "winner": 6, "wai": [6, 71, 73, 74, 77, 80, 84, 85, 97, 98, 99, 101, 109, 110, 115, 116, 117, 124, 125, 127, 137, 141, 144, 161, 176, 184], "now": [6, 18, 29, 47, 60, 73, 77, 78, 80, 81, 82, 83, 84, 85, 86, 87, 89, 92, 93, 94, 95, 96, 98, 99, 100, 101, 102, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 127, 128, 129, 131, 132, 133, 134, 135, 137, 138, 139, 141, 142, 145, 147, 149, 151, 152, 153, 154, 156, 159, 160, 162, 163, 176, 184], "touch": 6, "briefli": 6, "fit": [6, 14, 24, 26, 29, 39, 40, 42, 43, 44, 47, 51, 64, 66, 77, 78, 80, 83, 84, 85, 88, 90, 91, 93, 97, 98, 99, 101, 102, 103, 108, 109, 110, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 124, 125, 126, 127, 128, 131, 132, 133, 134, 135, 136, 137, 139, 140, 141, 144, 146, 149, 150, 151, 152, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 164, 169, 173, 178], "wider": [6, 36, 53], "mai": [6, 25, 43, 44, 47, 49, 74, 102, 107, 117, 129, 132, 134, 135, 136, 139, 141, 146, 153, 155, 162], "fail": [6, 101, 131, 142, 145], "weak": [6, 16, 117, 135, 153], "analysi": [6, 71, 80, 93, 98, 106, 107, 132, 144, 153, 155, 156, 164, 177], "kei": [6, 8, 9, 46, 48, 50, 52, 54, 62, 66, 67, 93, 98, 100, 103, 117, 120, 123, 125, 126, 135, 149, 150, 153, 160, 165], "achiev": [6, 18, 74, 79, 82, 84, 96, 98, 106, 131, 132, 160], "reliabl": [6, 95, 139], "even": [6, 36, 39, 61, 76, 77, 81, 85, 86, 88, 89, 90, 95, 99, 101, 102, 109, 116, 117, 122, 125, 131, 132, 135, 137, 139, 140, 141, 142, 143, 145, 146, 149, 155, 181, 182], "cross": [6, 13, 18, 22, 23, 24, 25, 26, 29, 33, 34, 39, 40, 44, 47, 56, 57, 59, 60, 65, 66, 73, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 98, 99, 100, 101, 103, 107, 108, 114, 115, 116, 117, 118, 119, 122, 123, 124, 125, 126, 127, 129, 132, 134, 140, 141, 142, 143, 145, 146, 147, 149, 150, 151, 152, 153, 154, 155, 160, 164, 169, 174, 176, 178, 181, 182, 184], "accuraci": [6, 18, 27, 60, 66, 73, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 88, 89, 90, 91, 93, 95, 97, 98, 99, 100, 106, 109, 115, 125, 126, 127, 128, 133, 135, 139, 142, 145, 146, 149, 150, 151, 152, 153, 154, 155, 156, 158, 162, 184], "imperfect": [6, 108], "estim": [6, 13, 17, 22, 29, 33, 39, 44, 47, 56, 60, 66, 71, 77, 81, 82, 85, 86, 91, 97, 99, 101, 107, 108, 109, 110, 111, 113, 114, 116, 117, 118, 119, 120, 122, 123, 124, 125, 127, 129, 132, 134, 137, 143, 144, 146, 149, 150, 151, 153, 169, 176, 178, 181, 184], "actual": [6, 74, 78, 81, 83, 92, 101, 102, 103, 117, 131, 141, 144, 151], "gener": [6, 17, 18, 22, 23, 28, 29, 47, 50, 56, 58, 59, 60, 66, 77, 79, 80, 81, 82, 84, 85, 86, 87, 89, 90, 92, 93, 95, 96, 97, 98, 99, 100, 101, 102, 104, 106, 107, 109, 110, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 124, 125, 126, 127, 129, 132, 134, 135, 137, 139, 140, 141, 143, 144, 146, 147, 148, 149, 150, 151, 153, 154, 155, 161, 164, 167, 176, 178, 184], "As": [6, 29, 47, 74, 77, 80, 81, 82, 85, 86, 88, 90, 93, 95, 97, 98, 99, 104, 106, 107, 108, 110, 115, 116, 131, 132, 135, 139, 140, 141, 143, 144, 146, 150, 151, 152, 155, 156, 160, 163], "narrow": 6, "spend": [6, 105, 117], "increasingli": 6, "effort": [6, 105], "split": [6, 15, 18, 23, 29, 37, 42, 44, 66, 74, 77, 79, 84, 85, 86, 92, 95, 97, 100, 101, 102, 104, 108, 111, 115, 116, 117, 119, 120, 122, 124, 127, 140, 141, 144, 149, 151, 153, 156, 158, 160, 161, 162, 171, 172], "afford": 6, "trust": [6, 80, 81, 82, 83, 86, 91, 97, 102, 109, 131, 132, 135, 136, 137, 140, 141, 149, 150, 151, 153, 156, 162, 163], "think": [6, 75, 76, 101, 105, 129, 134, 135], "carefulli": [6, 95], "complet": [6, 15, 29, 36, 47, 97, 99, 124, 127, 131, 132, 148, 151, 155, 178], "futur": [6, 74, 77, 86, 91, 101, 102, 149, 162], "upon": [6, 91, 98, 99], "affect": [6, 101, 132, 135, 139, 149, 156, 161, 162], "live": [6, 132], "sure": [6, 18, 60, 80, 82, 85, 90, 114, 123], "divers": [6, 156], "demograph": [6, 118], "increas": [6, 16, 18, 28, 29, 43, 44, 47, 51, 53, 58, 76, 82, 85, 98, 102, 103, 108, 113, 116, 117, 122, 132, 135, 138, 139, 140, 149, 153, 155, 156, 158, 160, 161, 162, 174, 182], "coverag": 6, "phrase": 6, "recommend": [6, 36, 71, 74, 85], "identifi": [6, 13, 71, 86, 95, 98, 103, 141, 178], "ani": [6, 15, 18, 29, 36, 43, 73, 74, 77, 80, 82, 84, 86, 89, 90, 92, 95, 98, 100, 101, 102, 103, 105, 106, 107, 108, 110, 117, 118, 119, 123, 124, 125, 126, 127, 131, 132, 136, 139, 144, 149, 151, 152, 155, 160, 173, 184], "bia": [6, 37, 51, 56, 58, 108, 129, 131, 134, 144, 164], "acquisit": 6, "full": [6, 8, 9, 18, 36, 46, 48, 49, 50, 52, 54, 62, 66, 67, 77, 78, 82, 83, 85, 97, 102, 117, 122, 124, 126, 127, 138, 147, 151, 154, 165, 178, 181], "chain": [6, 77, 82, 86], "acquir": [6, 13, 22, 33, 39, 56, 103, 105, 169, 181], "fanci": 6, "put": [6, 33, 37, 82, 103, 108, 132, 156], "product": [6, 97, 100, 102, 129, 132, 134, 149], "routin": [6, 18, 95, 184], "debt": 6, "simpler": [6, 14, 47, 86, 135], "easier": [6, 82, 85, 92, 137, 144], "maintain": 6, "less": [6, 29, 44, 86, 95, 101, 107, 108, 109, 110, 117, 118, 126, 131, 132, 134, 135, 137, 149, 155], "power": [6, 29, 36, 105, 117, 118, 129, 132, 134, 137, 156], "drift": 6, "gave": [6, 141], "methodolog": [6, 36, 56, 102, 161], "element": [6, 22, 81, 85, 95, 102, 132, 133, 138, 141], "alwai": [6, 15, 18, 22, 24, 42, 44, 53, 79, 80, 81, 84, 86, 90, 92, 95, 97, 98, 99, 100, 102, 103, 106, 107, 119, 125, 140, 141, 150, 151, 152, 163, 176, 178, 184], "solid": 6, "conclus": [6, 74, 94, 95, 97, 99, 100, 101, 103, 106, 117, 125], "standpoint": 6, "biggest": 6, "shortcom": 6, "cannot": [6, 18, 29, 58, 74, 100, 101, 103, 108, 117, 124, 127, 131, 137, 140, 141, 144, 146, 149, 152, 163, 181], "autom": [6, 74, 164, 182], "domain": 6, "knowledg": [6, 36, 71, 97, 103, 110, 127, 137, 140, 151], "critic": [6, 36, 104], "thing": [6, 74, 85, 86, 91, 101, 137, 149], "oper": [6, 82, 110, 131, 141, 151], "risk": [6, 129, 134, 140], "advertis": 6, "individu": [6, 17, 29, 74, 82, 86, 106, 110, 112, 119, 121, 132, 139, 140, 156, 184], "caus": [6, 18, 51, 58, 74, 85, 89, 90, 117, 132, 149, 151, 178], "wast": [6, 117], "bit": [6, 29, 84, 102, 103, 108, 132, 137, 142, 145, 151], "monei": 6, "annoi": 6, "otherwis": [6, 18, 80, 89, 95, 97, 119, 131, 137, 138, 160], "mostli": [6, 135, 155], "harmless": 6, "medicin": 6, "kill": 6, "logic": [6, 149, 162], "fals": [6, 16, 53, 81, 85, 88, 90, 95, 97, 101, 105, 107, 123, 125, 126, 131, 132, 134, 137, 141, 145, 148, 149, 153, 155, 156, 162, 174], "brain": 6, "tumor": 6, "sent": 6, "surgeri": 6, "veri": [6, 18, 56, 60, 74, 80, 86, 88, 90, 97, 98, 100, 103, 106, 107, 108, 110, 115, 117, 119, 123, 131, 132, 135, 137, 141, 149, 150, 151, 152, 153, 160, 178], "danger": [6, 132, 153], "mr": 6, "confirm": [6, 92, 100, 105, 107, 110, 124, 127, 131, 135, 139, 144, 151, 152], "should": [6, 18, 23, 29, 34, 35, 44, 47, 69, 73, 74, 81, 82, 86, 90, 95, 97, 99, 100, 101, 102, 103, 104, 105, 106, 108, 109, 114, 115, 117, 118, 123, 124, 125, 127, 128, 132, 133, 135, 137, 140, 141, 142, 145, 146, 147, 149, 150, 151, 154, 156, 158, 160, 161, 174, 176, 178, 181, 182], "delai": 6, "life": [6, 74, 124, 127, 144], "save": [6, 184], "treatment": [6, 58], "hospit": [6, 25], "stai": [6, 40, 44, 110, 132], "overcrowd": 6, "unit": [6, 74, 80, 82, 85, 86, 102, 105, 107, 108, 109, 132, 136, 138, 144, 149, 153], "chang": [6, 28, 29, 37, 47, 82, 94, 99, 108, 109, 113, 122, 125, 132, 144, 147, 150, 154, 156, 181, 184], "inpati": 6, "chose": [6, 53, 74, 115, 132], "load": [6, 18, 64, 75, 76, 77, 78, 82, 83, 85, 86, 87, 89, 93, 96, 97, 98, 100, 101, 102, 103, 106, 107, 109, 111, 116, 117, 118, 120, 129, 130, 131, 132, 134, 135, 136, 137, 140, 141, 149, 150, 152, 153, 156, 157, 158, 159, 160, 161, 162, 163, 178, 184], "interest": [6, 49, 74, 75, 76, 92, 94, 99, 100, 102, 103, 105, 107, 110, 115, 129, 131, 134, 136, 137, 139, 141, 144, 146, 149, 150, 151, 152, 153, 155, 162, 184], "focus": [6, 14, 61, 107, 109, 132, 141, 151], "easi": [6, 73, 86, 101, 102, 105, 131, 160], "accumul": 6, "target": [6, 18, 29, 39, 40, 43, 47, 49, 58, 60, 64, 66, 73, 74, 75, 76, 77, 78, 79, 80, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 131, 132, 133, 134, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 157, 158, 162, 176, 184], "proxi": [6, 141], "reflect": [6, 102, 110, 119], "ground": [6, 115, 141], "truth": [6, 115, 141], "polici": [6, 25], "uneven": 6, "across": [6, 42, 47, 77, 82, 92, 108, 132, 139, 153, 184], "popul": [6, 74, 102, 107, 108, 141, 155], "eg": 6, "qualiti": [6, 108, 139, 144, 151], "affair": 6, "desir": [6, 79, 84, 119, 132], "qualif": 6, "respons": 6, "women": 6, "pai": [6, 85, 109], "men": 6, "pick": [6, 29, 91, 97, 104, 105, 108, 116, 150, 181], "amplifi": 6, "inequ": 6, "mechan": [6, 29, 81, 82, 151], "die": 6, "naiv": [6, 18, 29, 74, 97, 106, 116, 119], "bad": [6, 89, 98, 127, 132, 140, 141, 178], "health": [6, 36], "fallaci": 6, "compar": [6, 13, 17, 18, 22, 29, 37, 47, 49, 59, 60, 73, 74, 77, 79, 81, 82, 84, 85, 86, 87, 89, 97, 98, 99, 103, 107, 108, 110, 113, 115, 116, 120, 122, 125, 126, 127, 129, 132, 134, 141, 144, 153, 155, 156, 164, 176, 184], "wors": [6, 18, 28, 47, 73, 98, 99, 101, 136, 176], "baselin": [6, 22, 24, 79, 84, 86, 87, 89, 94, 95, 98, 99, 118, 164], "heart": [6, 29, 105, 129, 134], "pressur": 6, "greater": [6, 29, 73, 131], "trigger": 6, "care": [6, 34, 47, 85, 97, 100, 106, 107, 124, 127, 132], "which": [6, 14, 16, 17, 18, 23, 25, 27, 29, 33, 34, 40, 42, 44, 47, 58, 66, 69, 72, 73, 74, 77, 80, 81, 82, 84, 85, 86, 90, 91, 92, 93, 96, 97, 98, 99, 101, 102, 103, 105, 106, 107, 108, 109, 110, 115, 116, 117, 119, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 144, 145, 146, 148, 149, 150, 152, 153, 155, 156, 160, 161, 162, 169, 170, 171, 176, 178, 180, 182, 184], "learner": [6, 14, 109, 110, 115, 117, 118], "predictor": [6, 14, 15, 16, 17, 66, 81, 82, 86, 99, 103, 120, 132, 149, 156], "pure": [6, 127, 162], "benefici": [6, 33, 82, 117, 125, 151, 153, 160], "intervent": [6, 74], "brittl": 6, "interpret": [6, 17, 36, 105, 107, 108, 134, 135, 136, 137, 144, 152, 156, 162], "subject": [6, 74, 141], "caution": [6, 90, 108], "feedback": 6, "loop": [6, 97, 108, 147, 149, 151, 154, 184], "todai": 6, "ai": 6, "alloc": 6, "loan": 6, "screen": [6, 8, 9, 46, 48, 50, 52, 54, 62, 67, 165], "job": [6, 82], "prioritis": 6, "treatement": 6, "law": [6, 29], "enforc": [6, 101, 105, 117, 132, 135], "court": 6, "fairlearn": [6, 74], "assess": [6, 25, 47, 77, 81, 94, 96, 97, 99, 103, 113, 122, 128, 132, 133, 140, 141, 144, 151, 155], "shift": [6, 82, 135], "technolog": [6, 95], "induc": [6, 108, 110, 135], "societi": 6, "though": [6, 99, 116, 143, 146], "difficult": [6, 105, 117, 136, 144], "intersect": [6, 152, 155], "No": [6, 24], "found": [6, 60, 97, 106, 107, 114, 117, 123, 132, 136, 147, 149, 151, 154, 155, 156, 160, 161, 178, 184], "short": [6, 34, 80, 107, 118, 157], "move": [6, 86, 105, 135, 152, 155], "choos": [6, 18, 36, 74, 87, 89, 102, 108, 124, 125, 127, 131, 132, 137, 139, 151, 174, 184], "revolut": 6, "fantast": [6, 127], "opportun": 6, "With": [6, 37, 89, 102, 105, 117, 125, 126, 141, 149, 153, 160], "lift": 6, "roadblock": 6, "hope": [6, 91, 131, 144], "empow": 6, "varieti": [6, 36, 77], "mindset": 6, "dream": 6, "being": [6, 42, 92, 139, 155], "adventur": 6, "navig": [8, 9, 46, 48, 50, 52, 54, 62, 67, 165], "slide": [8, 9, 46, 48, 50, 52, 54, 62, 67, 131, 135, 139, 152, 155, 165], "click": [8, 9, 15, 46, 48, 50, 52, 54, 62, 67, 152, 155, 165, 178], "press": [8, 9, 46, 48, 50, 52, 54, 62, 67, 165], "arrow": [8, 9, 46, 48, 50, 52, 54, 62, 67, 165], "go": [8, 9, 13, 22, 29, 33, 36, 39, 46, 48, 50, 52, 54, 56, 62, 67, 71, 77, 78, 83, 85, 93, 95, 97, 98, 101, 102, 103, 105, 107, 108, 115, 118, 125, 139, 141, 146, 149, 156, 165, 169, 181], "next": [8, 9, 46, 48, 50, 52, 54, 58, 62, 67, 80, 81, 85, 91, 93, 94, 98, 99, 100, 101, 109, 115, 117, 118, 129, 131, 132, 134, 135, 137, 150, 156, 165], "previou": [8, 9, 16, 18, 22, 29, 46, 47, 48, 50, 52, 54, 60, 62, 64, 66, 67, 73, 74, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 94, 95, 96, 97, 99, 100, 101, 102, 103, 110, 111, 114, 115, 116, 117, 119, 120, 123, 124, 127, 128, 129, 131, 132, 133, 134, 135, 136, 137, 140, 142, 145, 147, 149, 150, 151, 152, 153, 154, 155, 156, 158, 159, 162, 163, 165, 176, 178, 181, 184], "p": [8, 9, 46, 48, 50, 52, 54, 62, 67, 95, 134, 142, 145, 165], "toggl": [8, 9, 46, 48, 50, 52, 54, 62, 67, 165], "mode": [8, 9, 36, 46, 48, 50, 52, 54, 62, 67, 165], "adapt": [10, 100, 118, 137, 140, 144, 153, 164], "adaboost": [10, 13, 115, 164], "gbdt": [10, 109, 122, 164], "exercis": [10, 11, 12, 19, 29, 30, 31, 32, 36, 38, 41, 45, 47, 59, 63, 65, 68, 80, 85, 117, 131, 136, 137, 149, 151, 156, 164, 166, 175, 176, 177, 179], "m6": [10, 11, 12, 117, 164], "speed": [10, 29, 34, 35, 44, 105, 108, 125, 151, 153, 164], "quiz": [10, 11, 12, 19, 20, 21, 30, 31, 38, 41, 45, 55, 57, 59, 63, 65, 68, 155, 164, 166, 167, 168, 175, 177, 179], "bag": [11, 13, 15, 16, 109, 111, 117, 118, 119, 120, 164], "introductori": [11, 139, 164], "forest": [11, 13, 14, 15, 17, 18, 37, 108, 112, 113, 115, 116, 118, 121, 122, 125, 126, 164], "togeth": [13, 14, 68, 73, 81, 82, 89, 94, 95, 97, 99, 109, 115, 140, 160, 164], "ensembl": [13, 14, 17, 18, 29, 58, 86, 88, 90, 108, 109, 110, 113, 115, 116, 117, 119, 120, 121, 122, 123, 125, 126, 146, 147, 149, 151, 153, 154], "famili": [13, 14, 39, 56, 58, 74, 80, 82, 86, 118, 150], "techniqu": [13, 33, 74, 108, 131], "bootstrap": [13, 14, 18, 117, 118, 119, 120, 164], "ii": [13, 22, 77, 81, 136], "belong": [13, 18, 47, 80, 84, 85, 95, 98, 106, 139, 146, 156, 162, 176], "former": [13, 74, 82, 125, 141], "strategi": [13, 14, 18, 22, 23, 24, 25, 29, 60, 77, 79, 81, 84, 87, 89, 91, 92, 94, 95, 97, 99, 100, 101, 102, 104, 109, 110, 113, 116, 119, 122, 129, 133, 134, 136, 141, 144, 151, 181, 184], "later": [13, 18, 74, 80, 81, 91, 98, 102, 116, 134, 135, 137, 140, 141, 148, 149, 151, 155], "hyperparamet": [13, 17, 26, 40, 43, 47, 60, 93, 96, 97, 98, 103, 111, 115, 118, 120, 127, 130, 131, 132, 135, 146, 148, 155, 156, 169, 170, 177, 178, 179, 180, 181, 182], "allow": [13, 14, 29, 44, 66, 74, 77, 80, 82, 85, 93, 98, 102, 104, 107, 108, 117, 124, 127, 128, 132, 133, 137, 138, 139, 142, 145, 151, 153, 156, 160, 161, 162, 169, 178, 181, 182], "skill": [13, 22, 33, 39, 56, 71, 169, 181], "carri": [13, 22, 33, 39, 47, 56, 71, 85, 96, 107, 126, 169, 181], "basic": [13, 22, 33, 36, 39, 50, 56, 71, 100, 102, 108, 120, 144, 169, 181], "usag": [13, 22, 33, 39, 56, 80, 104, 105, 106, 107, 119, 137, 151, 169, 181], "mainli": [13, 14, 22, 33, 39, 74, 104, 156, 169], "around": [13, 22, 33, 39, 74, 80, 91, 95, 98, 101, 102, 103, 105, 108, 134, 135, 140, 141, 144, 169], "overfit": [13, 16, 18, 22, 29, 33, 39, 44, 47, 51, 53, 56, 58, 59, 60, 61, 96, 97, 101, 108, 109, 110, 113, 117, 118, 119, 122, 126, 131, 132, 134, 135, 140, 151, 155, 160, 164, 169, 171, 174], "valid": [13, 18, 22, 23, 24, 25, 26, 29, 33, 34, 39, 40, 44, 47, 56, 57, 58, 60, 65, 66, 73, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 98, 99, 100, 101, 107, 108, 113, 114, 115, 116, 117, 118, 119, 122, 123, 124, 125, 126, 127, 129, 132, 134, 140, 141, 142, 143, 145, 146, 147, 149, 150, 151, 152, 153, 154, 160, 161, 164, 169, 174, 176, 178, 181, 182, 184], "principl": [13, 22, 34, 141], "through": [13, 22, 29, 33, 36, 39, 56, 71, 93, 98, 103, 108, 114, 123, 132, 138, 139, 142, 145, 149, 169, 178, 181], "hour": [13, 22, 39, 56, 71, 74, 77, 79, 80, 81, 82, 84, 85, 86, 149, 150, 153, 169, 181], "saw": [14, 29, 40, 47, 80, 82, 85, 86, 102, 103, 115, 117, 132, 136, 139, 140, 142, 145, 149, 151, 152, 153, 156, 160, 161, 162, 170], "parallel": [14, 16, 115, 151, 152, 155, 178], "sequenti": [14, 16, 53, 82, 116, 117, 152, 178], "intern": [14, 44, 66, 82, 86, 97, 102, 109, 110, 113, 114, 122, 123, 132, 144, 146, 149, 151, 178], "machineri": [14, 109, 115], "art": 14, "learn": [14, 16, 17, 23, 24, 27, 28, 29, 34, 37, 38, 40, 42, 51, 58, 60, 64, 66, 68, 69, 72, 74, 77, 78, 80, 82, 83, 84, 85, 86, 87, 89, 93, 94, 95, 97, 98, 99, 100, 101, 102, 103, 106, 107, 109, 112, 114, 116, 117, 119, 121, 123, 124, 125, 126, 127, 130, 131, 132, 134, 137, 139, 140, 141, 142, 144, 145, 146, 149, 151, 152, 154, 157, 161, 162, 170, 171, 172, 173, 179, 180, 182, 184], "earli": [14, 29, 113, 114, 122, 123], "stop": [14, 29, 87, 89, 92, 99, 105, 113, 114, 117, 122, 123], "stack": 14, "By": [15, 18, 29, 47, 74, 82, 85, 97, 100, 103, 105, 119, 127, 132, 141, 144, 146, 159, 163, 176], "default": [15, 18, 27, 28, 47, 60, 66, 77, 78, 82, 83, 85, 93, 98, 100, 105, 111, 117, 118, 120, 130, 132, 141, 142, 143, 144, 145, 146, 150, 151, 176, 181], "baggingclassifi": [15, 119], "baggingregressor": [15, 110, 111, 118, 119, 120], "draw": [15, 29, 74, 94, 99, 110, 117, 125, 153, 156, 178], "replac": [15, 29, 47, 85, 104, 105, 107, 110], "without": [15, 36, 38, 47, 60, 74, 78, 81, 83, 92, 95, 97, 98, 101, 102, 104, 105, 106, 117, 118, 125, 126, 129, 132, 134, 135, 136, 140, 141, 142, 144, 145, 162, 164, 184], "d": [15, 16, 17, 18, 20, 25, 27, 29, 37, 42, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 74, 81, 95, 164, 171, 176, 178, 180, 184], "answer": [15, 16, 17, 18, 24, 25, 26, 27, 28, 29, 35, 42, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 85, 104, 129, 134, 135, 155, 171, 172, 173, 174, 176, 178, 180, 184], "hint": [15, 28, 29, 47, 60, 73, 75, 76, 78, 79, 83, 84, 88, 90, 129, 132, 134, 135, 162, 184], "base_estim": [15, 120], "decid": [15, 22, 74, 80, 102, 107, 124, 127, 129, 134, 151], "resampl": [15, 51, 73, 97, 105, 109, 132], "perform": [15, 17, 18, 19, 22, 23, 24, 26, 29, 34, 35, 47, 60, 64, 66, 73, 76, 77, 79, 80, 81, 82, 84, 85, 86, 87, 89, 91, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 106, 108, 109, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 124, 125, 126, 127, 129, 132, 134, 137, 140, 141, 143, 144, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 160, 169, 176, 178, 181, 182, 184], "correct": [16, 17, 18, 29, 60, 81, 83, 109, 115, 117, 129, 134, 141, 147, 153, 154, 160, 184], "statement": [16, 18, 27, 29, 42, 73, 178, 184], "simultan": 16, "histogram": [16, 29, 66, 74, 75, 76, 82, 94, 99, 104, 107, 114, 116, 123, 139, 149, 153], "acceler": [16, 29, 105, 116], "subsampl": [16, 107, 119, 124, 127], "origin": [16, 66, 77, 80, 81, 85, 86, 100, 101, 104, 109, 110, 115, 116, 119, 129, 131, 134, 135, 137, 138, 144, 156, 157, 161, 162], "bin": [16, 74, 82, 92, 95, 99, 102, 103, 104, 105, 106, 107, 116, 131, 137, 152, 153], "numer": [16, 47, 64, 68, 69, 71, 72, 73, 74, 75, 76, 77, 78, 79, 81, 83, 84, 85, 94, 99, 102, 104, 105, 107, 129, 131, 132, 134, 137, 148, 149, 150, 155, 164, 176, 184], "tend": [16, 29, 95, 117, 118, 131, 132, 135, 144, 151], "true": [16, 18, 27, 29, 42, 47, 53, 58, 60, 66, 73, 81, 92, 95, 96, 97, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 125, 128, 129, 131, 132, 133, 134, 139, 141, 144, 146, 148, 149, 151, 155, 156, 162, 174, 178, 184], "shallow": [17, 109, 115, 117, 160], "deeper": [17, 92, 93, 96, 98, 102, 103, 109, 111, 112, 113, 116, 117, 118, 119, 120, 121, 122, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 141, 142, 143, 144, 145, 146, 156, 158, 159, 160, 161, 162, 163], "exist": [17, 74, 77, 95], "maximum": [17, 29, 82, 85, 97, 103, 137, 141, 149, 153, 156, 158, 159, 161, 162, 163, 171, 174], "depth": [17, 18, 36, 75, 76, 103, 107, 109, 115, 116, 117, 118, 129, 130, 134, 135, 139, 147, 154, 156, 157, 158, 159, 160, 161, 162, 163, 171, 174, 176], "rate": [17, 29, 81, 92, 96, 102, 105, 117, 141, 149, 152, 154], "option": [17, 82, 85, 92, 100, 102, 105, 113, 122, 181, 184], "reduc": [17, 18, 40, 44, 47, 91, 95, 96, 116, 117, 118, 119, 125, 132, 133], "sensit": [17, 51, 135, 140, 141, 152, 155, 170], "notic": [18, 81, 82, 98, 102, 107, 108, 132, 134, 135, 137, 139, 141, 146, 148, 155, 162], "tradit": 18, "panda": [18, 29, 36, 47, 60, 64, 71, 73, 74, 75, 76, 77, 78, 79, 80, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 115, 117, 119, 120, 121, 123, 125, 126, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 176, 178, 184], "pd": [18, 29, 47, 60, 64, 73, 74, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 115, 117, 119, 120, 121, 123, 125, 126, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 176, 178, 184], "read_csv": [18, 29, 47, 60, 64, 73, 74, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 93, 94, 98, 99, 101, 104, 105, 106, 109, 112, 119, 121, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 141, 142, 143, 144, 145, 146, 147, 149, 150, 151, 152, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 176, 178, 184], "csv": [18, 29, 47, 60, 64, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 93, 94, 98, 99, 101, 104, 105, 106, 109, 112, 119, 121, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 141, 142, 143, 144, 145, 146, 147, 149, 150, 151, 152, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 176, 178, 184], "feature_nam": [18, 104, 107, 112, 121, 128, 131, 132, 133, 136, 138, 139, 140, 156, 157, 159, 160, 161, 162, 163], "culmen": [18, 75, 76, 109, 129, 130, 134, 135, 139, 156, 157, 158, 160, 162, 184], "mm": [18, 76, 109, 112, 121, 128, 129, 130, 133, 134, 135, 136, 138, 139, 156, 157, 158, 159, 160, 161, 162, 163, 184], "flipper": [18, 112, 121, 128, 129, 133, 134, 136, 138, 157, 159, 160, 161, 163, 184], "target_nam": [18, 29, 47, 60, 73, 77, 79, 81, 82, 84, 85, 86, 87, 88, 89, 90, 91, 104, 105, 112, 119, 121, 128, 129, 131, 132, 133, 134, 136, 138, 140, 147, 149, 150, 151, 153, 154, 159, 160, 161, 163, 176, 184], "bodi": [18, 75, 76, 105, 112, 121, 128, 129, 133, 134, 136, 138, 157, 159, 160, 161, 163, 184], "mass": [18, 29, 112, 121, 128, 129, 133, 134, 136, 138, 157, 159, 160, 161, 163, 184], "dropna": [18, 129, 134, 184], "frac": [18, 29], "random_st": [18, 37, 80, 82, 84, 86, 92, 93, 97, 98, 99, 100, 101, 102, 103, 108, 109, 110, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 123, 125, 126, 127, 130, 131, 132, 134, 135, 137, 139, 140, 141, 144, 147, 148, 149, 151, 153, 154, 155, 156, 158, 160, 162, 176], "reset_index": [18, 130, 135, 139], "drop": [18, 29, 37, 47, 60, 73, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 93, 94, 98, 99, 101, 104, 105, 106, 107, 108, 119, 141, 142, 143, 144, 145, 146, 147, 149, 151, 153, 154, 176], "therefor": [18, 29, 73, 74, 77, 82, 84, 86, 92, 95, 96, 97, 99, 102, 103, 107, 109, 115, 116, 117, 119, 125, 126, 127, 132, 133, 134, 137, 139, 140, 141, 144, 146, 151, 155, 160], "randomli": [18, 29, 97, 99, 102, 108, 110, 119, 153], "shuffl": [18, 37, 80, 95, 97, 100, 101, 102, 108, 114, 123, 141, 144], "break": [18, 95, 132], "spuriou": 18, "troubl": [18, 100], "outsid": [18, 149, 156, 159, 163], "scope": [18, 74, 137, 139, 144, 146], "regressor": [18, 24, 28, 29, 40, 44, 47, 81, 82, 92, 96, 101, 102, 103, 110, 111, 114, 115, 116, 117, 118, 119, 120, 122, 123, 132, 137, 144, 170, 173, 176], "sklearn": [18, 29, 44, 47, 60, 73, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 130, 131, 132, 134, 135, 136, 137, 139, 140, 141, 142, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 158, 160, 161, 162, 163, 176, 178, 180, 184], "randomforestregressor": [18, 108, 115, 117, 119, 121, 122], "except": [18, 87, 89, 142, 145, 156, 162], "exact": [18, 29, 115], "fold": [18, 25, 29, 47, 60, 73, 77, 95, 100, 102, 108, 114, 123, 125, 126, 127, 128, 129, 132, 133, 134, 143, 146, 151, 153, 155, 176, 184], "model_select": [18, 29, 47, 60, 77, 79, 80, 82, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 107, 108, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 130, 132, 134, 135, 139, 141, 142, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 158, 160, 162, 176, 178, 184], "cross_valid": [18, 29, 47, 60, 66, 73, 77, 85, 86, 87, 88, 89, 90, 91, 92, 93, 98, 99, 100, 103, 107, 108, 114, 115, 116, 118, 123, 125, 126, 132, 134, 142, 143, 145, 146, 149, 150, 151, 176, 184], "cv": [18, 29, 47, 66, 73, 77, 86, 91, 92, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 108, 114, 118, 123, 126, 132, 134, 142, 145, 146, 149, 151, 153, 154, 160, 176, 178, 184], "store": [18, 29, 74, 80, 82, 92, 94, 99, 100, 102, 104, 105, 106, 110, 118, 125, 126, 131, 132, 136, 140, 148, 149, 155, 184], "return_train_scor": [18, 29, 60, 103, 132], "count": [18, 47, 60, 73, 74, 76, 80, 82, 84, 85, 92, 95, 100, 104, 105, 106, 107, 156, 176, 184], "rang": [18, 40, 47, 60, 66, 73, 80, 82, 95, 97, 100, 102, 105, 106, 107, 108, 110, 131, 132, 138, 151, 152, 153, 155, 157, 159, 162, 163, 173, 176, 178, 184], "substanti": [18, 176, 184], "almost": [18, 47, 73, 81, 90, 102, 108, 135, 138, 144, 151, 162, 176], "100": [18, 47, 60, 80, 81, 92, 96, 99, 102, 103, 107, 108, 110, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 124, 125, 126, 127, 129, 131, 132, 134, 135, 137, 140, 141, 144, 148, 153, 154, 155, 162, 184], "again": [18, 93, 98, 108, 110, 113, 122, 135, 137, 141, 152], "curv": [18, 42, 58, 60, 93, 98, 113, 122, 134, 138, 139, 141, 164], "n_estim": [18, 109, 110, 113, 115, 116, 117, 118, 119, 120, 121, 122], "numpi": [18, 29, 36, 60, 64, 66, 71, 82, 92, 95, 96, 98, 99, 100, 103, 105, 107, 108, 109, 110, 115, 116, 121, 122, 124, 125, 127, 128, 131, 132, 133, 134, 136, 137, 138, 140, 141, 143, 144, 146, 152, 155, 160, 161, 162, 163, 178], "np": [18, 29, 47, 60, 92, 93, 95, 96, 98, 99, 100, 103, 105, 107, 108, 109, 110, 113, 115, 116, 119, 121, 122, 124, 125, 127, 128, 129, 131, 132, 133, 134, 136, 137, 138, 140, 141, 143, 144, 146, 148, 152, 155, 160, 161, 162, 163, 173, 178], "arrai": [18, 29, 42, 43, 60, 66, 77, 80, 81, 82, 83, 85, 86, 95, 96, 102, 103, 108, 109, 110, 113, 116, 121, 122, 129, 132, 134, 139, 141, 145, 149, 156], "200": [18, 60, 73, 95, 105, 115, 116, 131, 132, 153], "500": [18, 60, 85, 86, 89, 102, 105, 107, 144, 152, 153], "1_000": [18, 29, 95, 113, 122], "decreas": [18, 29, 44, 47, 53, 58, 82, 108, 116, 125, 132, 144, 149, 155], "becom": [18, 93, 96, 98, 103, 108, 116, 117, 149, 153, 162, 178], "reach": [18, 96, 103, 113, 122, 149, 152, 160, 178], "plateau": [18, 96, 113, 122], "experi": [18, 36, 47, 60, 71, 79, 84, 93, 95, 96, 97, 98, 102, 103, 107, 113, 114, 116, 122, 123, 135, 140, 142, 145, 155, 158, 161, 162], "instead": [18, 29, 47, 60, 73, 77, 81, 82, 85, 87, 88, 89, 90, 92, 93, 95, 96, 98, 99, 102, 110, 115, 117, 118, 119, 129, 132, 134, 137, 141, 142, 143, 144, 145, 146, 147, 148, 151, 152, 153, 154, 155, 156, 157, 161, 162, 176], "max_depth": [18, 103, 109, 110, 113, 114, 115, 117, 118, 122, 123, 137, 156, 161, 162, 163, 176], "gap": [18, 103, 132], "begin": [18, 81, 113, 122, 131, 132, 151], "consid": [18, 43, 60, 69, 73, 85, 101, 103, 104, 107, 108, 109, 115, 116, 117, 119, 131, 132, 137, 142, 145, 156, 162, 171, 180, 184], "none": [18, 47, 93, 95, 98, 102, 104, 105, 106, 107, 110, 113, 117, 118, 122, 130, 131, 137, 152, 184], "rf_1_tree": 18, "cv_results_tre": 18, "train_scor": [18, 29, 103, 132], "return": [18, 28, 29, 42, 44, 66, 77, 80, 81, 85, 87, 89, 108, 110, 115, 128, 132, 133, 138, 139, 141, 142, 145, 149, 150, 152, 153, 178], "83120264": 18, "83309064": 18, "83195043": 18, "84834224": 18, "85790323": 18, "86235297": 18, "84791111": 18, "85183089": 18, "82241954": 18, "85045978": 18, "perfect": [18, 42, 56, 92, 102, 108, 115, 124, 126, 127, 140, 141, 160], "r2": [18, 28, 101, 107, 118, 123, 144, 146], "surpris": [18, 84, 95, 100, 101, 124, 127, 156, 162], "memor": [18, 81, 101, 102, 103], "expect": [18, 29, 66, 74, 76, 80, 85, 86, 97, 100, 101, 102, 107, 110, 117, 127, 129, 132, 134, 140, 144, 146, 151, 155, 160], "automat": [18, 66, 74, 80, 82, 85, 87, 89, 102, 150, 151, 176, 181], "prevent": [18, 43, 85, 89, 110, 117, 178], "max_it": [18, 29, 82, 85, 86, 89, 95, 116, 117, 123, 135], "recal": [18, 27, 60, 85, 97, 102, 116, 118, 135, 141, 150, 151, 153, 155, 184], "averag": [18, 29, 60, 81, 92, 95, 99, 100, 102, 105, 107, 108, 110, 115, 116, 117, 118, 119, 122, 125, 132, 135, 136, 141, 142, 144, 145, 146, 184], "small": [18, 40, 73, 76, 77, 86, 95, 98, 100, 102, 103, 108, 110, 115, 116, 117, 129, 132, 134, 135, 137, 147, 149, 154, 178], "behav": [18, 93, 98, 99, 132, 135, 176], "high": [18, 29, 37, 51, 53, 58, 64, 74, 76, 80, 84, 85, 98, 103, 104, 105, 106, 107, 108, 131, 135, 139, 149, 150, 153], "optimum": 18, "m7": [19, 20, 21, 30, 31, 164], "stratif": [20, 164], "framework": [22, 23, 34, 56, 57, 96, 100, 103, 114, 123, 143, 146, 151, 164, 181], "keep": [22, 29, 74, 91, 95, 102, 104, 105, 107, 108, 125, 126, 127, 129, 130, 134, 135, 137, 139, 140, 151, 153], "mind": [22, 74, 102, 108, 117, 124, 125, 126, 127, 140, 153], "metric": [22, 27, 29, 66, 77, 81, 96, 101, 102, 106, 110, 111, 113, 120, 121, 122, 125, 129, 132, 133, 134, 136, 137, 142, 143, 144, 145, 146, 152, 161, 164, 184], "besid": [22, 23, 33, 39, 80, 86, 88, 90, 92, 95, 96, 114, 123, 125, 156, 169, 182], "insight": [22, 33, 36, 47, 64, 74, 101, 103, 110, 124, 127, 138, 139, 144, 153], "addit": [22, 29, 43, 69, 77, 81, 85, 102, 105, 107, 113, 114, 118, 119, 122, 123, 132, 134, 137, 138, 141, 144, 149, 150, 151, 153, 155, 176], "necess": [22, 102], "appropri": [22, 74], "nest": [22, 23, 26, 117, 132, 147, 149, 151, 154, 164, 176, 182, 184], "wise": [23, 131, 151, 152, 182], "encount": [23, 73, 85, 87, 89, 100], "show": [23, 39, 74, 75, 76, 77, 79, 80, 81, 82, 83, 84, 85, 86, 91, 96, 97, 100, 102, 103, 104, 107, 109, 110, 115, 117, 118, 119, 126, 128, 129, 131, 132, 133, 134, 136, 137, 138, 139, 140, 141, 142, 145, 149, 150, 151, 152, 153, 155, 156, 158, 161, 162, 163, 169, 178, 181], "comparison": [23, 40, 97, 118, 141], "remov": [24, 35, 47, 74, 108, 116, 127, 129, 134, 149, 152, 184], "dummi": [24, 60, 79, 84, 89, 92, 94, 99, 141, 144], "reli": [24, 74, 80, 82, 108, 109, 126, 141], "ye": [24, 42, 43], "whatev": [24, 152], "chosen": [24, 85, 89, 106, 113, 122, 144, 182], "record": [25, 29, 49, 74, 81, 85, 102, 105, 106, 108], "suppos": [25, 85, 95], "imbalanc": [25, 60, 74, 99, 132, 141, 184], "addition": [25, 82, 132], "suspect": 25, "systemat": [25, 51, 58, 73, 98, 144, 150], "bias": [25, 131], "due": [25, 29, 84, 86, 100, 115, 119, 146, 156], "factor": [25, 29, 92, 135, 144], "devic": [25, 29], "socioeconom": 25, "most": [25, 29, 47, 60, 74, 80, 81, 84, 85, 89, 90, 93, 94, 95, 98, 99, 102, 104, 108, 109, 110, 116, 125, 132, 135, 136, 139, 141, 142, 145, 149, 152, 156, 162, 171, 173, 176, 178], "suitabl": 25, "abil": [25, 103, 141, 147, 154], "stratifi": [25, 94, 99, 100, 184], "leav": [25, 81, 93, 98, 117, 153, 160, 162], "inner": [26, 97, 114, 123, 132, 151, 176], "outer": [26, 97, 114, 122, 123, 131, 132, 151, 176, 184], "balanc": [27, 60, 103, 117, 132, 141, 142, 144, 145, 184], "roc": [27, 141], "auc": [27, 141], "precis": [27, 29, 58, 86, 117, 138, 141, 142, 145, 149], "regular": [27, 37, 39, 40, 44, 47, 108, 110, 130, 131, 153, 161, 164, 182], "assum": [27, 29, 43, 44, 53, 69, 89, 100, 101, 108, 132, 137, 139, 176], "logist": [27, 41, 42, 44, 66, 80, 82, 85, 86, 87, 88, 89, 90, 93, 94, 95, 98, 99, 100, 124, 127, 130, 135, 139, 141, 150, 156, 164], "stronger": [27, 105, 110, 135, 155], "lead": [27, 58, 74, 85, 86, 89, 95, 101, 115, 117, 118, 119, 132, 134, 135, 141, 148, 151, 152, 153, 155], "lower": [27, 28, 29, 47, 53, 89, 95, 96, 99, 102, 105, 108, 117, 122, 132, 134, 135, 141, 144, 146, 151, 155, 162], "r": [28, 98, 101, 102, 107, 108, 143, 144, 146], "absolut": [28, 29, 47, 92, 96, 102, 103, 111, 112, 113, 115, 116, 120, 121, 122, 129, 133, 134, 136, 137, 143, 144, 146], "median": [28, 91, 92, 97, 102, 107, 108, 118, 123, 125, 126, 132, 144, 145, 172, 173], "cross_val_scor": [28, 60, 95, 97, 101, 102, 119, 127, 142, 143, 145, 146, 147, 154], "model_a": 28, "neg_mean_squared_error": [28, 132, 146], "strictli": 28, "model_b": 28, "rememb": [28, 47, 74, 110, 129, 134, 135, 180, 184], "alia": 28, "neg": [28, 29, 47, 66, 102, 103, 108, 132, 133, 138, 141], "guarante": [28, 108, 160], "either": [28, 69, 81, 95, 98, 100, 124, 125, 127, 133, 139, 141, 144], "open": [29, 47, 60, 73, 78, 80, 83, 101, 104, 105, 176], "bike_rid": [29, 105], "command": [29, 47, 60, 73, 176, 184], "cycl": [29, 105], "index_col": [29, 101, 105, 152, 153, 178], "parse_d": [29, 101, 105], "index": [29, 47, 74, 81, 95, 100, 101, 105, 108, 110, 116, 125, 129, 132, 134, 135, 139, 145, 149, 151, 156], "appendix": [29, 92, 93, 96, 98, 102, 103, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 141, 142, 143, 144, 145, 146, 156, 158, 159, 160, 161, 162, 163], "remind": 29, "cheap": [29, 49, 86], "sensor": [29, 74, 105], "gp": [29, 105], "cyclist": [29, 105], "meter": [29, 105], "expens": [29, 73, 116, 149, 182], "blindli": 29, "introduc": [29, 36, 81, 82, 102, 108, 110, 125, 129, 132, 134, 137, 138, 144, 146, 164], "flavor": 29, "classic": 29, "newton": 29, "second": [29, 37, 73, 82, 85, 88, 90, 94, 99, 100, 105, 109, 115, 116, 131, 140, 144, 153, 160, 162], "p_": 29, "meca": 29, "rho": 29, "sc_x": 29, "v_": 29, "c_r": 29, "mg": 29, "co": 29, "alpha": [29, 44, 47, 74, 95, 98, 107, 108, 109, 110, 115, 121, 131, 132, 133, 135, 136, 137, 138, 139, 140, 144, 156, 160, 161, 162, 163], "sin": 29, "ma": 29, "v_d": 29, "air": 29, "densiti": [29, 131], "kg": [29, 157], "m": [29, 85, 95], "frontal": 29, "c_x": 29, "drag": 29, "coeffici": [29, 37, 40, 43, 47, 107, 132, 134, 135, 137, 138, 139, 144, 171], "v_a": 29, "roll": 29, "rider": 29, "bicycl": 29, "standard": [29, 47, 74, 77, 82, 86, 87, 89, 94, 95, 97, 99, 102, 103, 108, 114, 123, 132, 140, 141, 151], "graviti": 29, "81": [29, 73, 79, 84, 116, 117], "radian": 29, "equat": [29, 136, 139], "complex": [29, 44, 61, 80, 86, 87, 89, 91, 96, 104, 115, 135, 142, 145, 160, 161], "term": [29, 74, 81, 82, 101, 102, 110, 112, 115, 116, 119, 121, 125, 138, 156, 161], "within": [29, 34, 77, 95, 97, 100, 102, 107, 108, 110, 114, 116, 123, 125, 131, 132, 143, 146, 151, 152, 156, 163], "parenthesi": 29, "produc": [29, 162], "fight": [29, 39], "wind": 29, "resist": 29, "tire": 29, "floor": 29, "third": [29, 115, 131, 134], "hill": 29, "forward": [29, 105], "fourth": 29, "last": [29, 36, 47, 69, 85, 100, 104, 106, 131, 132, 137, 139, 141, 142, 145], "hi": [29, 106], "simplifi": [29, 47, 101, 102, 125, 139, 155, 157, 176], "beta_": 29, "closer": [29, 44, 100, 105, 132, 135, 139], "previous": [29, 47, 73, 85, 86, 93, 98, 101, 104, 109, 115, 116, 117, 118, 129, 131, 134, 135, 137, 139, 144, 147, 149, 151, 153, 154, 160, 161], "part": [29, 74, 85, 86, 97, 103, 108, 116, 127, 128, 132, 133, 135, 141, 156, 158, 162], "cube": 29, "multipli": [29, 146], "sine": 29, "angl": 29, "arc": 29, "tangent": 29, "arctan": 29, "ourself": [29, 141], "clip": 29, "brake": 29, "preprocess": [29, 36, 47, 60, 65, 66, 73, 74, 77, 85, 86, 87, 88, 89, 90, 91, 93, 95, 98, 99, 100, 107, 108, 110, 116, 119, 130, 131, 132, 134, 135, 137, 139, 140, 144, 147, 149, 150, 151, 153, 154, 155, 164, 176, 178, 180, 184], "linear_model": [29, 44, 47, 73, 77, 80, 82, 85, 86, 87, 89, 91, 95, 99, 100, 107, 108, 110, 127, 130, 131, 132, 134, 135, 136, 137, 139, 140, 141, 144, 146, 150, 156, 161, 163, 176, 178, 180], "ridgecv": [29, 47, 107, 108, 132], "shufflesplit": [29, 92, 93, 94, 96, 98, 99, 100, 101, 102, 103, 132], "n_split": [29, 92, 96, 97, 99, 100, 101, 102, 103, 108, 123, 132, 145, 151], "mae": [29, 122, 129, 133, 134, 146], "return_estim": [29, 47, 102, 107, 108, 114, 118, 123, 125, 132, 151, 184], "subsequ": [29, 71, 95, 103, 114, 117, 123, 124, 127, 129, 131, 132, 134, 180, 181], "Be": [29, 74, 85, 86, 87, 89, 103, 117, 140, 149, 150, 176], "awar": [29, 33, 34, 74, 81, 85, 86, 87, 89, 91, 103, 104, 117, 140, 149, 150, 176], "investig": [29, 82, 101, 108, 111, 114, 118, 120, 123, 125, 156], "consequ": [29, 100, 110, 116, 119], "003": [29, 77, 86, 90, 150, 151], "obtain": [29, 47, 60, 73, 77, 81, 85, 86, 87, 89, 91, 95, 97, 99, 101, 102, 105, 107, 108, 111, 117, 118, 120, 127, 131, 132, 135, 136, 140, 141, 144, 151, 152, 153, 154, 178], "closest": [29, 81, 131], "watt": [29, 105], "70": [29, 74, 91, 104, 118], "90": [29, 74, 80, 82, 92, 95, 100, 133], "neg_mean_absolute_error": [29, 92, 96, 102, 103, 115, 116, 117, 120, 122, 129, 134, 146], "request": [29, 85, 116, 132], "h": [29, 108], "beta": 29, "cadenc": [29, 105], "turn": [29, 73, 105, 129, 134], "pedal": [29, 105], "rotat": [29, 74, 95, 105], "per": [29, 74, 77, 79, 80, 81, 82, 84, 85, 86, 97, 102, 105, 107, 116, 119, 137, 141, 149, 150, 151, 153], "minut": [29, 33, 86, 105, 153], "beat": [29, 105], "1000": [29, 47, 86, 105, 117, 122, 123, 135, 141, 143, 144, 146, 155], "activ": [29, 69, 152, 178], "early_stop": [29, 117, 123], "40": [29, 74, 76, 80, 81, 82, 86, 101, 102, 104, 117, 120, 129, 133, 134, 138, 148, 149, 150, 153, 155, 157], "consider": [29, 117, 138], "test_scor": [29, 77, 85, 86, 88, 89, 90, 91, 92, 95, 97, 98, 99, 100, 101, 102, 103, 107, 115, 116, 118, 123, 125, 126, 127, 132, 134, 150, 151, 154, 156, 162], "dictionari": [29, 66, 77, 102], "made": [29, 35, 42, 74, 93, 95, 98, 101, 102, 106, 115, 141, 146, 150], "ignor": [29, 73, 85, 86, 88, 89, 90, 91, 92, 108, 155], "datafram": [29, 47, 60, 73, 74, 75, 76, 80, 81, 82, 85, 86, 91, 92, 94, 95, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 110, 115, 117, 118, 120, 121, 123, 125, 126, 131, 132, 137, 138, 139, 140, 141, 145, 146, 149, 151, 152, 153, 155, 156, 160, 161, 162, 163, 176, 184], "account": [29, 81, 92, 95, 104, 108, 141, 176], "date": [29, 86, 95, 104, 105], "hesit": 29, "uniqu": [29, 60, 80, 85, 95, 100, 110, 116, 149, 153, 162], "dai": 29, "datetimeindex": [29, 105], "went": 29, "df": [29, 73, 152], "capac": [29, 96], "leaveonegroupout": [29, 101], "had": [29, 102, 108, 134, 137, 139], "indic": [29, 74, 86, 95, 102, 105, 107, 108, 110, 125, 128, 132, 133, 155], "differenti": [29, 71, 74, 151, 157], "integ": [29, 69, 77, 80, 85, 87, 89, 95, 104, 106, 108, 110, 153, 155, 184], "align": [29, 106, 131, 135, 137, 153], "pessimist": 29, "optimist": [29, 81, 95, 97, 102], "deviat": [29, 47, 77, 82, 95, 97, 102, 103, 108, 114, 123, 132, 151], "analys": [29, 80, 108], "reus": [29, 128, 133, 142, 145, 151], "train_indic": 29, "test_indic": 29, "data_linear_model_train": 29, "data_linear_model": 29, "iloc": [29, 83, 100, 105, 107, 108, 109, 110, 115, 149, 160], "data_linear_model_test": 29, "data_train": [29, 80, 82, 86, 101, 102, 110, 111, 112, 113, 115, 117, 120, 121, 122, 127, 130, 135, 139, 141, 144, 147, 148, 149, 151, 153, 154, 155, 156, 158, 159, 161, 162, 163], "data_test": [29, 80, 81, 82, 83, 86, 101, 102, 110, 111, 112, 113, 115, 117, 120, 121, 122, 127, 130, 135, 139, 141, 144, 147, 148, 149, 151, 153, 154, 155, 156, 158, 160, 161, 162, 163], "target_train": [29, 80, 82, 84, 86, 100, 101, 102, 110, 111, 112, 113, 115, 117, 120, 121, 122, 127, 130, 135, 139, 141, 144, 147, 148, 149, 151, 153, 154, 155, 156, 158, 159, 161, 162, 163], "target_test": [29, 80, 81, 82, 83, 84, 86, 100, 101, 102, 111, 112, 113, 117, 120, 121, 122, 127, 130, 135, 139, 141, 144, 147, 148, 149, 151, 153, 154, 155, 156, 158, 162], "scatter": [29, 76, 107, 110, 112, 115, 121, 131, 140, 157, 159, 161, 162, 163], "catastroph": [29, 82], "portion": 29, "time_slic": 29, "slice": 29, "2020": [29, 105], "data_test_linear_model_subset": 29, "data_test_subset": [29, 127], "target_test_subset": 29, "pm": 29, "until": [29, 117, 141], "accur": [29, 43, 109, 132, 141], "motiv": [33, 118], "known": [33, 74, 106, 107, 132, 133, 139, 141, 144, 149, 156], "caveat": [33, 124, 127, 151, 164], "practic": [33, 58, 74, 77, 80, 81, 84, 91, 97, 99, 100, 102, 117, 119, 132, 140, 141, 142, 144, 145, 151, 152, 153], "magic": [34, 101], "tool": [34, 36, 77, 86, 110, 139, 151, 153], "margin": [34, 95, 108], "gain": [34, 36, 64, 74, 77, 79, 80, 81, 82, 84, 85, 86, 96, 111, 120, 124, 125, 127, 139, 144, 149, 150, 153], "tackl": [34, 58, 126], "selector": [34, 85, 86, 87, 88, 89, 90, 124, 127, 147, 149, 151, 153, 154], "recurs": 34, "main": [35, 47, 77, 82, 88, 90, 117, 125, 134, 137, 155, 164], "advantag": [35, 77, 125, 131], "fine": [35, 40, 86, 90, 151, 155, 181], "noisi": [35, 61, 103, 108, 110, 135, 140, 155, 160], "teach": [36, 50], "beginn": 36, "strong": [36, 108, 135, 155], "background": 36, "bring": 36, "vast": 36, "busi": 36, "intellig": 36, "industri": 36, "scientif": [36, 119], "discoveri": 36, "pillar": 36, "modern": 36, "field": [36, 74, 184], "central": 36, "easili": [36, 73, 74, 81, 82, 85, 131, 134, 160], "yet": [36, 73, 86, 88, 90, 132, 137], "dovetail": 36, "ecosystem": 36, "languag": 36, "step": [36, 47, 60, 74, 80, 81, 82, 86, 91, 117, 119, 125, 126, 129, 132, 134, 135, 137, 139, 140, 143, 146, 149, 150, 151, 153, 184], "lesson": [36, 150], "fundament": [36, 56, 98, 144], "stone": 36, "artifici": 36, "mine": 36, "cookbook": 36, "failur": [36, 56], "session": [36, 151, 153], "octob": 36, "2022": 36, "month": [36, 106, 141], "enrol": 36, "quizz": 36, "execut": [36, 129, 134, 147, 153, 154, 178], "platform": 36, "purpos": [36, 85, 86, 97, 99, 101, 102, 103, 112, 121, 124, 125, 127, 132, 135, 141, 150, 176], "educ": [36, 74, 80, 85, 86, 87, 88, 89, 90, 119, 139, 147, 149, 151, 153, 154], "prior": [36, 71], "matplotlib": [36, 71, 74, 82, 92, 95, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 121, 123, 125, 126, 131, 132, 133, 135, 136, 138, 139, 140, 141, 144, 145, 156, 160, 161, 162, 163], "quick": [36, 71, 74, 104, 107, 116, 157], "publicli": 36, "cite": 36, "project": [36, 152], "zenodo": 36, "archiv": [36, 91, 95], "doi": 36, "5281": 36, "7220306": 36, "repositori": [36, 102, 107], "inria": 36, "publish": [36, 102, 107], "static": 36, "rocket": 36, "top": [36, 74, 141, 152, 153, 159, 160, 163], "interact": [36, 47, 74, 105, 129, 132, 134, 137, 152, 153, 155, 178], "cell": [36, 74, 78, 80, 82, 83, 86, 91, 97, 102, 109, 115, 116, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "binder": 36, "video": [36, 91, 171], "youtub": 36, "playlist": 36, "channel": 36, "www": [36, 74, 81, 102, 107], "pl2oka_2qdj": 36, "m44koooi7x8tu85wr4ez4f": 36, "version": [36, 82, 104, 116, 118, 131, 153, 162], "host": [36, 153], "fun": 36, "infer": [37, 105, 125, 138, 181], "importance_permut": 37, "correl": [37, 47, 74, 106, 108, 119, 125, 132, 137], "divid": [37, 82, 92, 95, 103, 133, 141, 151, 153], "receiv": [37, 141], "cardin": [37, 85, 108], "independ": [37, 85, 95, 97, 101, 110, 116, 117, 131, 132, 141, 144, 150, 153], "could": [37, 60, 73, 74, 80, 81, 82, 84, 85, 86, 92, 93, 95, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 116, 118, 119, 123, 125, 127, 128, 131, 132, 133, 136, 137, 138, 139, 140, 141, 142, 144, 145, 150, 151, 152, 155, 160], "m4": [38, 41, 45, 164], "parametr": [39, 128, 133, 136, 138, 153, 156, 161, 163, 170], "implic": 39, "dimension": [39, 66, 71, 74, 95, 124, 127, 129, 131, 134, 135, 139, 149, 152, 156], "effect": [39, 44, 47, 59, 60, 82, 93, 97, 98, 107, 108, 110, 117, 118, 119, 130, 135, 161, 162, 164], "relationship": [39, 74, 80, 85, 86, 101, 108, 110, 131, 132, 137, 138, 141, 149, 151, 153, 157], "adjust": [40, 47, 58, 85, 129, 134, 149, 152, 156], "successfulli": [40, 99, 115, 131], "scale": [40, 44, 47, 60, 72, 73, 82, 85, 86, 93, 95, 98, 102, 110, 116, 144, 149, 150, 153, 155, 176, 182, 184], "approxim": [40, 60, 77, 81, 82, 90, 92, 95, 99, 110, 122, 131, 132, 134, 135, 155, 184], "dynam": 40, "linearli": [40, 42, 44, 80, 131, 137, 140], "extra": [40, 73, 105, 135, 142, 145, 151], "beyond": [41, 74, 137, 139, 144, 146, 164], "Is": [42, 43, 79, 84, 104], "linearregress": [42, 44, 132, 134, 136, 137, 144, 146, 161, 163, 176], "coef_": [42, 43, 47, 107, 108, 126, 132, 135, 136, 137, 139, 150, 181], "intercept_": [42, 43, 136, 137], "boundari": [42, 130, 131, 139, 140, 156, 158, 160, 161, 162], "predict_proba": [42, 44, 82, 131, 135, 139, 141, 146, 149, 156, 162], "probabl": [42, 91, 102, 107, 108, 109, 131, 134, 135, 146, 156, 178], "extract": [43, 77, 95, 105, 108, 110, 132, 149, 153, 184], "straight": [43, 86, 131, 135, 137, 139, 140, 161], "float": [43, 105, 107, 116, 153], "express": [43, 47, 61, 73, 90, 93, 98, 102, 107, 108, 115, 129, 131, 134, 135, 137, 140, 152, 155, 160, 178], "ensur": [43, 95, 97, 100, 117, 131, 135], "extrapol": [43, 131, 159, 163, 170], "regardless": [43, 131, 132, 156], "inher": [43, 108], "robust": [44, 90, 102, 108, 118, 132], "outlier": [44, 92, 107, 132, 146, 160], "wide": [44, 80], "forc": [44, 82, 88, 90, 109, 116, 119, 132, 136], "penal": [44, 132], "scientist": [44, 97], "prepar": 44, "plan": [44, 162], "strength": [44, 47, 130, 132, 135, 153], "confid": [44, 131, 135, 139, 141], "ames_housing_no_miss": [47, 73, 104, 132, 176], "ames_h": [47, 73, 91, 104, 132, 143, 144, 146, 176], "salepric": [47, 73, 91, 104, 132, 143, 144, 146, 176], "numerical_featur": [47, 73, 104, 176], "lotfrontag": [47, 73, 91, 104, 132, 176], "lotarea": [47, 73, 91, 104, 132, 176], "masvnrarea": [47, 73, 104, 176], "bsmtfinsf1": [47, 73, 104, 176], "bsmtfinsf2": [47, 73, 104, 176], "bsmtunfsf": [47, 73, 104, 176], "totalbsmtsf": [47, 73, 104, 176], "1stflrsf": [47, 73, 104, 176], "2ndflrsf": [47, 73, 104, 176], "lowqualfinsf": [47, 73, 104, 176], "grlivarea": [47, 73, 104, 176], "bedroomabvgr": [47, 73, 104, 176], "kitchenabvgr": [47, 73, 104, 176], "totrmsabvgrd": [47, 73, 104, 176], "fireplac": [47, 73, 104, 176], "garagecar": [47, 73, 104, 176], "garagearea": [47, 73, 104, 176], "wooddecksf": [47, 73, 104, 176], "openporchsf": [47, 73, 104, 176], "enclosedporch": [47, 73, 104, 176], "3ssnporch": [47, 73, 104, 176], "screenporch": [47, 73, 91, 104, 176], "poolarea": [47, 73, 91, 104, 132, 176], "miscval": [47, 73, 91, 104, 176], "data_numer": [47, 77, 79, 80, 82, 84, 176], "penalti": [47, 108, 130, 135], "largest": [47, 107], "1e0": 47, "000": [47, 49, 73, 89, 92, 100, 102, 107, 115, 116, 124, 127, 139, 144, 154, 155], "1e5": 47, "larger": [47, 82, 85, 103, 117, 122, 134, 135, 137, 138, 147, 148, 151, 154, 155, 156, 173], "notat": 47, "box": [47, 97, 107, 114, 123, 125, 126, 132, 142, 145, 184], "garag": 47, "just": [47, 89, 101, 102, 103, 105, 108, 109, 110, 113, 117, 119, 122, 132, 137, 139, 141], "logspac": [47, 93, 98, 107, 132, 148, 155], "num": [47, 74, 80, 85, 86, 87, 88, 89, 90, 91, 92, 93, 96, 98, 99, 107, 110, 115, 119, 121, 128, 132, 133, 136, 138, 147, 148, 149, 151, 153, 154, 155], "101": [47, 184], "alpha_": [47, 132], "fall": [47, 102, 144, 153], "preprocessor": [47, 86, 87, 88, 89, 90, 91, 95, 104, 119, 147, 149, 150, 151, 153, 154, 176, 184], "deal": [47, 81, 85, 86, 90, 100, 101, 106, 124, 127, 131, 134, 137, 139, 146, 157, 176], "onehotencod": [47, 72, 73, 85, 86, 87, 88, 89, 90, 91, 132], "categorical_featur": [47, 91, 104], "yield": [47, 90, 117, 136], "long": [47, 90, 104, 105, 107, 129, 134, 150], "splinetransform": [47, 131, 137], "influenc": [47, 96, 103, 108, 132, 137, 144, 153, 169], "nystroem": [47, 129, 131, 134, 135, 137], "kernel": [47, 93, 98, 129, 131, 134, 135, 137, 140], "poli": [47, 129, 131, 134, 137], "n_compon": [47, 129, 131, 134, 135, 137], "300": [47, 82, 105, 110, 115, 117, 121, 128, 133, 136, 138, 160], "studi": [49, 60, 74, 93, 95, 98, 102, 140, 184], "apart": [49, 108], "estat": [49, 102], "thousand": [49, 102, 107, 108], "entertain": 49, "spaciou": 49, "updat": [49, 91, 184], "bedroom": [49, 102, 107, 108], "bathroom": 49, "lakeview": 49, "97630": 49, "1st": [49, 74, 85, 134], "nightlif": 49, "privat": [49, 74, 80, 85, 86, 149, 153], "backyard": 49, "buyer": 49, "market": 49, "kind": [49, 74, 86, 90, 108, 122, 137, 144, 171, 184], "sub": [50, 97, 98, 156], "vocabulari": 50, "varianc": [51, 56, 58, 95, 103, 108, 144, 164], "low": [53, 64, 74, 76, 80, 84, 95, 105, 107, 108, 110, 117, 131, 135, 141, 156, 160, 178], "littl": [53, 80, 95, 100, 152], "reduct": [53, 108], "steadi": 53, "label": [53, 58, 69, 76, 80, 85, 86, 89, 99, 100, 101, 109, 110, 115, 121, 133, 138, 139, 141, 142, 145, 161, 163], "slow": [53, 90, 117], "tradeoff": [53, 58, 103], "m2": [55, 57, 59, 164], "trade": [56, 58, 77, 131, 135, 160, 164, 169, 171], "off": [56, 58, 74, 77, 92, 104, 131, 135, 141, 160, 164, 169, 171], "character": [56, 108, 141], "why": [56, 60, 66, 74, 86, 94, 99, 105, 108, 135, 146, 178], "aris": [56, 74], "Then": [56, 71, 77, 82, 86, 92, 102, 110, 114, 118, 123, 124, 125, 126, 127, 131, 135, 137, 139, 142, 143, 144, 145, 146, 151, 153, 156], "quantifi": [56, 74, 103, 108, 128, 133, 184], "contrast": [56, 74, 85, 102, 115, 138, 161], "importantli": 56, "emphas": [56, 118], "happen": [58, 66, 74, 87, 89, 119, 130, 134, 156], "suffer": [58, 82, 106], "lack": 58, "captur": [58, 74, 103, 108, 131, 132], "neither": [58, 100], "nor": 58, "still": [58, 77, 82, 85, 86, 90, 103, 107, 108, 109, 111, 120, 131, 132, 135, 137, 144, 152, 153, 156, 160], "variat": [58, 77, 102, 103, 108, 110, 132, 144], "fulli": [58, 85, 102, 113, 117, 122], "determin": [58, 85, 93, 98, 141, 144], "irreduc": 58, "decompos": 58, "chapter": [58, 184], "diagnos": 58, "blood_transfus": [60, 93, 98, 106, 141, 142, 145], "propos": [60, 184], "multiclass": [60, 156, 162, 184], "proport": [60, 96, 99, 106, 135, 141, 144, 184], "twice": [60, 141, 184], "value_count": [60, 74, 75, 76, 84, 85, 100, 104, 106, 141, 184], "dummyclassifi": [60, 79, 84, 89, 94, 99, 141], "most_frequ": [60, 84, 89, 99, 104, 141], "75": [60, 80, 82, 85, 91, 92, 99, 101, 104, 107], "balanced_accuraci": [60, 141, 142, 145, 184], "remaind": [60, 85, 86, 88, 90, 119, 147, 149, 151, 153, 154], "add": [60, 105, 108, 115, 116, 125, 129, 132, 134, 137, 138, 141, 142, 145, 159, 163], "faster": [60, 66, 74, 82, 117, 147, 154], "distanc": [60, 82, 135, 155], "normal": [60, 82, 91, 95, 104, 105, 106, 107, 108, 116, 139, 141, 144, 148, 149, 155, 156, 162], "irrelev": 60, "make_pipelin": [60, 66, 77, 82, 85, 86, 87, 88, 89, 90, 95, 98, 99, 100, 104, 107, 108, 110, 116, 119, 124, 125, 126, 127, 129, 130, 131, 132, 134, 135, 137, 139, 140, 155], "get_param": [60, 93, 98, 111, 120, 150, 180, 184], "n_neighbor": [60, 78, 83, 148, 155, 181, 184], "clearli": [60, 84, 96, 98], "param_rang": [60, 98, 103, 113, 122, 129, 134], "affirm": 60, "highli": [61, 74, 131], "much": [61, 74, 89, 90, 92, 99, 102, 103, 108, 116, 117, 119, 120, 125, 131, 134, 178], "m1": [63, 65, 68, 164], "adult_censu": [64, 74, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 94, 99, 119, 147, 149, 150, 151, 152, 153, 154], "comma": [64, 74, 104, 105, 106], "file": [64, 74, 80, 81, 104, 105, 106, 142, 145, 184], "alreadi": [64, 74, 80, 81, 82, 98, 108, 117, 119, 131, 132, 148, 153, 155], "packag": [64, 116, 132, 142, 145], "survei": 64, "incom": [64, 74, 80, 84, 92, 102, 107, 108], "seaborn": [64, 74, 75, 76, 82, 105, 106, 107, 108, 109, 110, 115, 121, 133, 135, 136, 137, 138, 139, 140, 149, 152, 156, 157, 160, 161, 162, 163], "visual": [64, 68, 73, 82, 85, 96, 98, 101, 102, 105, 110, 115, 128, 131, 132, 133, 139, 141, 144, 149, 152, 156, 157, 161, 162, 164, 181], "scipi": [64, 117, 120, 153], "organ": [64, 162], "five": [66, 81, 86, 102], "overlap": [66, 76, 77, 110, 146, 153], "lie": 66, "fewer": [66, 144], "jupyt": [68, 80, 82, 83, 86, 97, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163, 164], "ordin": [69, 73, 86, 89, 116], "string": [69, 80, 85, 86, 87, 89, 102, 104, 142, 145, 146, 151, 184], "meaning": [69, 85, 90, 124, 126, 127, 138, 144, 149], "hot": [69, 85, 86, 131], "represent": [69, 80, 82, 83, 85, 86, 88, 90, 91, 96, 97, 102, 106, 107, 109, 116, 119, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 160, 162, 163], "compani": [69, 101], "sector": 69, "construct": [69, 110, 119, 153], "retail": 69, "energi": [69, 101, 105], "insur": 69, "phone": 69, "sale": [69, 73, 86], "depart": 69, "employe": 69, "profit": 69, "quarter": [69, 101], "head": [69, 73, 74, 76, 80, 81, 85, 86, 102, 104, 105, 106, 107, 108, 129, 134, 138, 149, 150, 153, 157], "tabl": [71, 74, 117, 119, 149], "progress": [71, 148, 155], "attent": [71, 109], "extend": [71, 80], "mix": [71, 72, 74, 86, 95, 160], "unknown": [71, 85, 87, 89, 119, 144], "notabl": [72, 141], "ordinalencod": [72, 85, 86, 87, 88, 89, 90, 119, 147, 149, 151, 153, 154, 176], "200_000": [73, 91], "astyp": [73, 91, 104, 131, 148, 153, 155, 160], "int": [73, 77, 91, 109, 153], "did": [73, 74, 82, 85, 94, 99, 101, 102, 104, 105, 117, 125, 127, 131, 135, 137, 141, 142, 145, 149, 150, 151, 153, 155, 158, 162, 181], "convert": [73, 102, 109, 110, 121, 155], "info": [73, 81, 95, 104, 105, 106, 107], "examin": [73, 132], "select_dtyp": [73, 104, 143, 144, 146], "make_column_selector": [73, 85, 86, 87, 88, 89, 90, 119, 147, 149, 151, 153, 154], "shown": [73, 74, 102, 115, 131, 135, 139, 144, 151, 158, 162], "among": [73, 85, 86, 100, 125, 155], "quantit": [73, 80, 115, 128, 133, 136], "exclud": [73, 74, 125], "overallqu": [73, 104], "overallcond": [73, 104], "yearbuilt": [73, 104, 132], "sole": [73, 127, 144], "treat": [73, 86, 91, 132], "issu": [73, 74, 85, 86, 100, 101, 102, 105, 106, 116, 131, 132, 137], "rare": [73, 74, 85, 86, 104, 119, 132], "handle_unknown": [73, 85, 86, 87, 88, 89, 90, 91, 119, 147, 149, 151, 153, 154], "mere": 73, "chanc": [73, 99, 102, 110, 124, 127, 141, 162], "partit": [73, 77, 142, 145, 156, 158, 160, 161, 162], "classifact": 73, "li": [73, 85, 110], "place": [74, 127, 132, 137], "workflow": 74, "1994": [74, 95], "download": [74, 102, 107], "openml": [74, 81], "webpag": 74, "1590": [74, 81], "manipul": [74, 78, 83, 93, 98, 102], "tutori": 74, "50k": [74, 79, 80, 81, 82, 83, 84, 86, 149, 153], "year": [74, 80, 104, 132, 149], "heterogen": [74, 80, 86, 104, 132], "employ": 74, "covari": 74, "workclass": [74, 80, 85, 86, 149, 151, 153], "marit": [74, 80, 85, 86, 149, 151, 153], "occup": [74, 80, 85, 86, 107, 108, 149, 151, 153], "race": [74, 80, 85, 86, 105, 149, 151, 153], "sex": [74, 80, 85, 86, 149, 151, 153], "loss": [74, 77, 79, 80, 81, 82, 84, 85, 86, 144, 146, 149, 150, 153], "week": [74, 77, 79, 80, 81, 82, 84, 85, 86, 149, 150, 153], "countri": [74, 80, 85, 86, 149, 151, 153], "11th": [74, 80, 85, 149, 153], "marri": [74, 80, 85, 86, 149, 153], "op": [74, 80, 85, 149, 153], "inspct": [74, 80, 85, 149, 153], "own": [74, 80, 85, 86, 117, 137, 149, 153], "child": [74, 80, 85, 86, 149, 153], "male": [74, 80, 85, 86, 149, 153], "lt": [74, 80, 81, 153], "hs": [74, 80, 85, 86, 149, 153], "grad": [74, 80, 85, 86, 149, 153], "civ": [74, 80, 85, 86, 149, 153], "spous": [74, 80, 85, 86, 149, 153], "farm": [74, 80, 85, 149, 153], "fish": [74, 80, 85, 149, 153], "husband": [74, 80, 85, 86, 149, 153], "white": [74, 80, 85, 86, 131, 135, 139, 149, 153, 162], "local": [74, 80, 85, 91, 106, 137, 149, 153], "gov": [74, 80, 85, 149, 153], "assoc": [74, 80, 85, 149, 153], "acdm": [74, 80, 85, 149, 153], "protect": [74, 80, 85, 149, 153], "serv": [74, 80, 85, 88, 90, 105, 141, 149, 153], "gt": [74, 80, 153], "colleg": [74, 80, 85, 149, 153], "7688": [74, 80, 149, 150, 153], "femal": [74, 80, 85, 86, 149, 153], "30": [74, 80, 85, 86, 92, 93, 95, 96, 98, 103, 104, 106, 107, 110, 116, 117, 118, 120, 134, 147, 149, 150, 151, 153, 154, 160, 162, 163], "revenu": [74, 84, 85, 132], "target_column": [74, 109, 130, 135, 139, 156, 157, 158, 162], "37155": [74, 84], "11687": [74, 84], "dtype": [74, 76, 80, 81, 82, 83, 84, 85, 86, 87, 89, 92, 99, 102, 104, 105, 106, 107, 109, 132, 139, 141, 149, 153, 155, 156], "int64": [74, 76, 80, 84, 85, 104, 106, 155], "imbal": [74, 106, 135], "special": [74, 105], "healthi": 74, "ill": [74, 132], "numerical_column": [74, 77, 79, 80, 82, 84, 86, 88, 90, 150], "categorical_column": [74, 85, 86, 87, 88, 89, 90, 149, 151, 153], "all_column": 74, "print": [74, 77, 80, 81, 82, 83, 84, 85, 86, 88, 89, 90, 91, 95, 97, 98, 100, 101, 102, 107, 108, 109, 110, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 125, 127, 128, 132, 133, 134, 136, 137, 138, 139, 141, 142, 144, 145, 146, 148, 149, 150, 151, 153, 154, 155, 156, 162], "48842": [74, 80, 85, 149, 153], "subtract": [74, 82], "mayb": [74, 95, 103, 107], "peculiar": [74, 104], "malfunct": 74, "afterward": [74, 81, 108], "cap": [74, 97, 107, 123, 125, 126, 132, 145], "hist": [74, 76, 92, 95, 99, 102, 103, 104, 105, 106, 107, 139], "figsiz": [74, 76, 104, 105, 106, 107, 108, 131, 132, 141, 144, 156, 160, 161, 162], "func": [74, 102, 107, 142, 145], "assign": [74, 82, 99, 104, 109, 115, 131, 137, 139, 143, 146], "underscor": [74, 82, 150], "garbag": 74, "comment": 74, "retir": 74, "filter": [74, 85, 105, 149], "peak": 74, "ll": 74, "32650": 74, "16192": 74, "disproport": 74, "fair": [74, 97, 118], "deploi": [74, 86, 102, 151, 161], "mitig": [74, 117], "deploy": [74, 151], "compon": [74, 129, 134, 137, 156, 180], "unexpect": [74, 100], "gender": 74, "15784": 74, "10878": 74, "bachelor": [74, 85, 86], "8025": 74, "master": [74, 85], "2657": 74, "voc": [74, 85], "2061": 74, "1812": 74, "1601": 74, "10th": [74, 85], "1389": 74, "7th": [74, 85], "8th": [74, 85], "955": 74, "prof": [74, 85, 86], "school": [74, 85, 95], "834": 74, "9th": [74, 85], "756": 74, "12th": [74, 85], "657": 74, "doctor": [74, 85], "594": 74, "5th": [74, 85, 134], "6th": [74, 85], "509": 74, "4th": [74, 85], "247": 74, "preschool": [74, 85], "crosstab": 74, "entri": [74, 77, 102, 104, 105, 106, 107, 114, 123, 128, 133, 139], "lose": 74, "redund": [74, 102, 125, 129, 134, 137], "upcom": [74, 141, 150], "latter": [74, 82, 97, 125, 141], "pairplot": [74, 75, 76, 105, 106, 107, 108, 157], "diagon": [74, 106, 131, 141, 144, 149, 157], "reveal": [74, 102], "sn": [74, 82, 105, 106, 107, 108, 109, 110, 115, 121, 133, 135, 136, 137, 138, 139, 140, 149, 152, 156, 157, 160, 161, 162, 163], "readabl": [74, 149, 152, 178], "n_samples_to_plot": 74, "5000": [74, 106, 125, 126, 133, 138], "var": 74, "hue": [74, 76, 105, 106, 107, 109, 135, 139, 140, 152, 156, 157, 160, 162], "plot_kw": [74, 108], "height": [74, 76, 129, 134], "diag_kind": [74, 108], "diag_kw": 74, "written": [74, 95, 107], "scatterplot": [74, 82, 107, 109, 110, 115, 121, 133, 135, 136, 137, 138, 139, 140, 152, 156, 157, 160, 161, 162, 163], "region": [74, 102, 103, 131, 135, 139, 149, 153], "pyplot": [74, 82, 92, 95, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 121, 123, 125, 126, 131, 132, 133, 135, 136, 139, 140, 141, 144, 145, 156, 160, 161, 162, 163], "plt": [74, 82, 92, 95, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 121, 123, 125, 126, 131, 132, 133, 135, 136, 139, 140, 141, 144, 145, 156, 160, 161, 162, 163], "ax": [74, 82, 104, 105, 108, 109, 119, 131, 132, 133, 135, 137, 138, 140, 141, 144, 149, 152, 156, 160, 161, 162], "age_limit": 74, "axvlin": [74, 107, 108], "ymin": [74, 107], "ymax": [74, 107], "linestyl": [74, 107, 110, 121, 131, 135, 141, 161], "hours_per_week_limit": 74, "axhlin": 74, "xmin": 74, "xmax": 74, "annot": [74, 149], "fontsiz": 74, "AND": 74, "seem": [74, 77, 80, 83, 84, 88, 90, 98, 101, 107, 108, 117, 119, 122, 132, 133, 144, 150, 156], "complic": [74, 107, 125], "similarli": [74, 93, 94, 96, 98, 99, 101, 112, 117, 119, 121, 132, 139, 141, 153, 163], "somewhat": [74, 110], "arbitrari": [74, 85, 86, 87, 89, 90, 91, 108, 138], "straightforward": [74, 110], "obviou": [74, 95, 105, 140], "highlight": [74, 81, 85, 95, 97, 101, 109, 115, 124, 125, 127, 132, 141, 149, 151, 156, 161], "imagin": [75, 76, 108, 140], "feel": [75, 76, 85, 114, 117, 123, 132, 146, 184], "penguins_classif": [75, 76, 109, 130, 135, 139, 156, 157, 158, 160, 162], "There": [76, 82, 102, 106], "adeli": [76, 130, 135, 139, 156, 157, 162], "151": [76, 85, 152, 153], "gentoo": [76, 156, 157, 162], "chinstrap": [76, 130, 135, 139, 156, 157], "68": [76, 91, 104, 118, 153, 155], "pairplot_figur": [76, 157], "prioriti": 76, "tweak": 76, "subfigur": 76, "perfectli": [76, 90, 106, 115, 117, 137, 144, 160], "downsid": [77, 134], "amount": [77, 95, 102, 106, 119, 132], "smaller": [77, 102, 116, 117, 122, 134, 135, 146, 173], "repetit": [77, 95, 149], "aggreg": [77, 107, 114, 123, 132, 141], "clone": [77, 110], "earlier": [77, 86, 107, 118, 141, 156], "computation": [77, 116, 137, 149, 178], "intens": [77, 99, 178], "cv_result": [77, 85, 86, 88, 89, 90, 91, 98, 102, 103, 107, 117, 118, 120, 125, 126, 132, 134, 146, 149, 150, 151, 152, 153, 155, 178], "cpu": [77, 86, 103, 115, 118, 149, 153], "894": 77, "ms": [77, 86, 103, 118, 149], "sy": [77, 86, 103, 118, 149, 153], "299": [77, 134], "total": [77, 86, 95, 101, 102, 103, 104, 105, 106, 107, 108, 118, 134, 141, 149, 153, 155], "wall": [77, 86, 103, 118, 149, 153], "680": 77, "fit_tim": [77, 85, 86, 98, 102, 115, 116, 125, 145, 151], "10083437": 77, "09485173": 77, "09516358": 77, "09389663": 77, "09959269": 77, "score_tim": [77, 85, 86, 98, 102, 115, 116, 125, 145, 151], "02249742": 77, "02220821": 77, "02236342": 77, "02161741": 77, "02207828": 77, "79557785": 77, "80049135": 77, "79965192": 77, "79873055": 77, "80436118": 77, "iii": 77, "distinct": [77, 80, 97, 100], "match": [77, 78, 83, 97, 139], "stabil": [77, 108], "discard": [77, 102, 107, 109, 156], "round": [77, 94, 99, 109], "themselv": 77, "3f": [77, 80, 81, 82, 84, 85, 86, 88, 89, 90, 91, 95, 97, 98, 100, 107, 115, 116, 118, 119, 121, 123, 127, 128, 133, 134, 139, 141, 144, 145, 146, 150, 151, 154, 156], "std": [77, 80, 82, 85, 86, 88, 89, 90, 91, 92, 95, 97, 98, 100, 101, 102, 107, 108, 110, 115, 116, 118, 119, 123, 129, 132, 134, 145, 146, 150, 151], "800": [77, 150], "crucial": [77, 108, 117], "bar": [77, 100, 104, 108, 132, 139, 156], "decim": 77, "trustworthi": [77, 97], "compat": [77, 151], "familiar": [78, 83, 107, 114, 123, 143, 146], "conveni": [78, 83, 132], "directli": [78, 81, 83, 86, 108, 115, 137, 146, 156], "insid": [78, 83, 85, 104, 142, 145, 176], "pager": [78, 83], "roughli": [79, 84, 110, 122, 138], "simplest": [79, 84], "irrespect": [79, 84, 99, 131, 178, 184], "82": [79, 81, 84, 101, 116, 117], "train_test_split": [79, 80, 82, 84, 86, 101, 102, 108, 111, 112, 113, 117, 120, 121, 122, 124, 127, 130, 135, 139, 141, 144, 147, 148, 149, 151, 153, 154, 155, 156, 158, 162], "behavior": [79, 84, 132, 135, 140, 161], "oversimplifi": 80, "exclus": [80, 131], "helper": [80, 82, 85, 86, 128, 133, 137, 142, 145], "duplic": [80, 85, 86, 110, 149, 153], "48837": [80, 85, 149, 153], "48838": [80, 85, 149, 153], "48839": [80, 85, 149, 153], "48840": [80, 85, 149, 153], "48841": [80, 85, 149, 153], "explicit": [80, 81, 102, 137, 143, 146], "At": [80, 97, 100, 102, 110, 141, 162], "moreov": 80, "o": [80, 162], "self": [80, 81, 85, 97, 142, 145, 153], "explanatori": [80, 101], "000000": [80, 82, 92, 105, 107], "643585": 80, "710510": 80, "min": [80, 82, 92, 105, 107, 110, 128, 132, 133, 136, 138, 160, 161, 163, 173], "37": [80, 81, 82, 85, 102, 104, 107, 108, 117, 152], "48": [80, 81, 82, 104, 117, 118, 120, 152], "max": [80, 82, 92, 105, 107, 110, 128, 132, 133, 136, 137, 138, 154, 160, 161, 163, 173, 178], "float64": [80, 92, 99, 102, 104, 105, 106, 107, 139], "unusu": 80, "memori": [80, 101, 104, 105, 106, 107, 134, 137, 150, 151], "test_siz": [80, 92, 96, 99, 102, 103, 111, 113, 120, 122, 135, 141, 151], "determinist": [80, 99, 137], "specifi": [80, 85, 86, 104, 105, 119, 138, 151, 153, 181], "remain": [80, 81, 100, 101, 108, 125, 132, 140, 153], "quickli": [80, 104, 107, 108, 117, 137, 139, 141, 152, 153, 162], "got": [80, 114, 123, 140, 163], "1f": [80, 110, 146], "12211": 80, "36631": [80, 82], "cours": [80, 85, 104, 125, 128, 133, 137, 142, 145, 158, 162], "environ": [80, 82, 83, 86, 91, 97, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "pleas": [80, 82, 83, 86, 91, 94, 97, 99, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163, 176, 184], "rerun": [80, 82, 83, 86, 91, 97, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "unabl": [80, 82, 83, 86, 91, 97, 100, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "render": [80, 82, 83, 86, 91, 97, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "nbviewer": [80, 82, 83, 86, 91, 97, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "logisticregressionlogisticregress": [80, 82, 86, 91, 131, 141, 156], "807": [80, 82], "fraction": [80, 103, 117, 141, 144], "correctli": [80, 90, 91, 100, 109, 141], "visit": 81, "glossari": [81, 164], "fed": 81, "41": [81, 102, 104, 107, 108, 120], "92": [81, 85, 92, 144, 152, 184], "3273": 81, "side": [81, 92, 119, 122, 151, 161], "39068": 81, "39069": 81, "39070": 81, "39071": 81, "39072": 81, "39073": 81, "linger": [81, 85, 95, 102], "denomin": 81, "major": [81, 84, 99, 135], "seldom": 81, "target_predict": [81, 101, 102, 109, 120, 121, 128, 133, 137, 141, 144, 160, 161], "sake": [81, 91, 94, 99, 117, 137, 140, 151, 156, 176], "simplic": [81, 91, 94, 99, 137, 140, 156, 176], "agre": [81, 122, 139], "bool": [81, 104, 141], "mistak": [81, 109, 127, 135, 141, 160], "success": [81, 82, 115, 162], "8242776341719346": 81, "harder": [81, 125], "conclud": [81, 87, 89, 99, 110, 135, 156], "ones": [81, 85, 135, 149, 150, 153], "adult_census_test": [81, 83], "9769": 81, "manual": [81, 85, 86, 110, 115, 124, 127, 137, 141, 150, 156, 164, 181], "model_nam": [81, 82, 150], "__class__": [81, 82], "__name__": [81, 82], "804": 81, "underli": [81, 82, 85, 95, 109, 115, 141], "wrongli": [81, 86], "held": [81, 92, 113, 117, 122, 151], "642352": 82, "1087": 82, "077721": 82, "665311": 82, "431247": 82, "725748": 82, "7522": 82, "692939": 82, "407": 82, "110175": 82, "423952": 82, "99999": 82, "4356": 82, "span": [82, 132], "assumpt": [82, 85, 89, 101, 126, 140, 160, 161], "address": 82, "pair": [82, 105, 106, 107, 128, 133, 152], "solver": [82, 132, 146], "descent": [82, 105, 146], "scaler": [82, 91, 150, 155, 178, 180, 184], "standardscalerstandardscal": [82, 86, 91, 131, 140], "wherea": [82, 117, 130, 134, 135, 146, 148, 155], "fashion": [82, 162], "mean_": 82, "64235211": 82, "07772106": 82, "6653108": 82, "43124676": 82, "scale_": 82, "72556083": 82, "59025606": 82, "10461772": 82, "42378265": 82, "data_train_sc": 82, "17177061": 82, "14450843": 82, "71188483": 82, "28845333": 82, "02605707": 82, "22025127": 82, "27618374": 82, "33822677": 82, "77019645": 82, "77536738": 82, "03471139": 82, "53605445": 82, "48319243": 82, "69090725": 82, "perspect": [82, 88, 90, 103], "predefin": 82, "shorthand": 82, "preserv": [82, 100, 140, 149], "set_output": [82, 85, 129, 132, 134], "behaviour": [82, 101, 117, 126], "663100e": 82, "273364e": 82, "530310e": 82, "840667e": 82, "844684e": 82, "000014e": 82, "576792e": 82, "445084e": 82, "202513e": 82, "173852e": 82, "753674e": 82, "471139e": 82, "196565e": 82, "817680e": 82, "677425e": 82, "741752e": 82, "314865e": 82, "047970e": 82, "714245e": 82, "jointplot": 82, "clearer": 82, "num_points_to_plot": 82, "marginal_kw": 82, "dict": [82, 131, 150], "suptitl": [82, 131, 141, 144, 162], "nbefor": 82, "nafter": 82, "x27": [82, 86, 91, 97, 109, 131, 132, 137, 140, 149, 151, 153], "pipelinepipelin": [82, 86, 91, 131, 132, 137, 140, 149, 151, 153], "named_step": 82, "decision_funct": 82, "elapsed_tim": [82, 88, 90], "predicted_target": 82, "n_iter_": [82, 123], "093": 82, "174": 82, "scenario": [82, 86, 132, 139, 144], "kneighborsclassifierkneighborsclassifi": 83, "first_data_valu": 83, "first_predict": 83, "first_target_valu": 83, "number_of_correct_predict": 83, "number_of_predict": 83, "len": [83, 102, 108, 109, 116, 117, 131, 162], "8290379545978042": 83, "8177909714402702": 83, "data_numeric_train": 84, "data_numeric_test": 84, "class_to_predict": 84, "high_revenue_clf": 84, "234": 84, "low_revenue_clf": 84, "766": 84, "7607182343065395": 84, "appear": [84, 110], "most_freq_revenue_clf": 84, "frequent": [84, 89, 94, 99, 104, 153, 173], "reassur": [84, 122, 151], "arithmet": 85, "instruct": 85, "taken": [85, 108, 119, 138], "symbol": [85, 101], "sort_index": 85, "857": [85, 154], "cambodia": 85, "canada": 85, "182": 85, "china": 85, "122": [85, 102, 107, 108], "columbia": 85, "85": [85, 91, 102, 107, 108, 178], "cuba": 85, "138": 85, "dominican": 85, "republ": 85, "103": [85, 105, 146, 153, 156], "ecuador": 85, "el": 85, "salvador": 85, "155": [85, 153], "england": 85, "franc": 85, "germani": 85, "206": [85, 161], "greec": 85, "guatemala": 85, "haiti": 85, "holand": 85, "netherland": 85, "hondura": 85, "hong": 85, "hungari": 85, "india": 85, "iran": 85, "ireland": 85, "itali": 85, "105": [85, 105], "jamaica": 85, "106": [85, 105], "japan": 85, "lao": 85, "mexico": 85, "951": 85, "nicaragua": 85, "outli": 85, "guam": 85, "usvi": 85, "peru": 85, "philippin": 85, "295": 85, "poland": 85, "portug": 85, "67": [85, 104, 105, 108], "puerto": 85, "rico": 85, "184": [85, 142, 145], "scotland": 85, "south": [85, 108], "115": [85, 102], "taiwan": 85, "thailand": 85, "trinadad": 85, "tobago": 85, "43832": 85, "vietnam": 85, "86": [85, 91, 102, 107, 108, 152], "yugoslavia": 85, "recogn": [85, 95], "categorical_columns_selector": [85, 86, 87, 88, 89, 90, 149, 151, 153], "dtype_includ": [85, 86, 87, 88, 89, 90, 119, 147, 149, 151, 153, 154], "unwant": [85, 107], "data_categor": [85, 87, 89], "education_column": 85, "education_encod": 85, "map": [85, 89, 131, 132, 135, 139, 155], "categories_": 85, "data_encod": 85, "downstream": [85, 134], "lexicograph": 85, "meaningless": [85, 110, 161], "l": [85, 95], "xl": 85, "alphabet": 85, "constructor": 85, "explicitli": [85, 142, 145, 153, 155], "mislead": [85, 90, 108], "altern": [85, 129, 134, 137, 139, 153, 160], "sparse_output": [85, 88, 90], "education_": 85, "spars": [85, 88, 90, 102, 107], "effici": [85, 109, 116, 119, 132, 137], "won": [85, 108], "becam": 85, "workclass_": 85, "feder": 85, "emp": 85, "inc": 85, "country_": 85, "amp": 85, "102": [85, 105], "violat": [85, 101], "realli": [85, 95, 101, 103, 105, 109, 135, 141], "misord": 85, "misus": 85, "ineffici": 85, "integr": [85, 116, 132], "abl": [85, 86, 98, 105, 110, 111, 115, 116, 119, 120, 140, 141, 142, 145, 152, 156, 159, 161, 163, 170, 181, 184], "bypass": 85, "keyword": 85, "min_frequ": 85, "collaps": 85, "rarest": 85, "enabl": [85, 184], "infrequent_if_exist": 85, "sandbox": [85, 178], "use_encoded_valu": [85, 86, 87, 88, 89, 90, 119, 147, 149, 151, 153, 154], "unknown_valu": [85, 86, 87, 88, 89, 90, 119, 147, 149, 151, 153, 154], "silenc": 85, "convergencewarn": 85, "87112451": 85, "77881455": 85, "79158235": 85, "78124523": 85, "78233767": 85, "03798509": 85, "03630471": 85, "03779101": 85, "03987718": 85, "03801203": 85, "83222438": 85, "83560242": 85, "82872645": 85, "83312858": 85, "83466421": 85, "833": [85, 89], "002": [85, 88, 89, 90, 150], "decoupl": [86, 141], "numerical_columns_selector": [86, 88, 90], "dtype_exclud": [86, 88, 90], "properli": [86, 97, 105, 113, 122, 140, 160], "format": [86, 101, 105, 133, 138], "elaps": [86, 125], "introspect": [86, 184], "send": 86, "columntransfom": 86, "categorical_preprocessor": [86, 88, 90, 147, 149, 151, 153, 154], "numerical_preprocessor": 86, "associ": [86, 95, 105, 107, 132, 141, 144, 149], "standard_scal": 86, "concaten": [86, 94, 95, 99, 100, 131, 137, 140, 151, 160], "columntransformercolumntransform": [86, 91, 149, 151, 153], "onehotencoderonehotencod": [86, 91], "prefer": 86, "raw": [86, 132, 135, 144, 184], "7762": 86, "56": [86, 104, 105, 120, 122, 145], "divorc": 86, "unmarri": 86, "23881": 86, "transport": 86, "30507": 86, "specialti": 86, "14344": 86, "28911": 86, "19484": 86, "wife": 86, "8575055278028008": 86, "usabl": 86, "0058949": 86, "03596926": 86, "93540072": 86, "00388241": 86, "9987452": 86, "04645991": 86, "04427958": 86, "04497743": 86, "04708195": 86, "04461789": 86, "8512642": 86, "8498311": 86, "84756347": 86, "8523751": 86, "85524161": 86, "851": [86, 119], "compound": 86, "isol": [86, 102, 117], "nice": [86, 109, 137], "fast": [86, 90, 115], "passthrough": [86, 88, 90, 119, 147, 149, 151, 153, 154], "977": 86, "8808451396282041": 86, "significantli": [86, 103, 108], "whenev": [86, 105], "popular": [86, 119], "datasci": 86, "practition": 86, "outperform": 86, "assembl": [87, 89, 115, 117], "rais": [87, 89, 109, 110, 119, 121, 142, 145], "warn": [87, 89, 109, 110, 116, 121, 132, 142, 145], "nan": [87, 89, 91, 100, 104, 142, 145], "traceback": [87, 89, 142, 145], "error_scor": [87, 89], "awai": [87, 89, 90, 110, 135, 144, 164], "handi": [87, 89, 102, 105, 142, 145], "empir": [88, 90, 102], "util": [88, 90, 91, 99, 104, 142, 145], "874": [88, 90], "131": 88, "detriment": [88, 90, 117, 119, 132], "dens": [88, 90], "workaround": [88, 90], "755": 89, "rel": [89, 94, 99, 102, 118, 126, 137, 141, 144], "anyth": [89, 101, 124, 127, 141], "constantli": [89, 94, 99], "761": 89, "messag": [89, 90], "873": 90, "194": 90, "217": 90, "signific": [90, 108, 117, 125, 131, 132, 151], "useless": [90, 122], "580": 90, "view": [90, 151], "longer": [90, 132, 135, 138, 150, 157, 162], "current": [90, 122, 176], "incomplet": 90, "unnecessari": [90, 113, 122], "unless": 90, "reproduc": [91, 105, 151], "script": 91, "event": 91, "rerecord": 91, "ui": 91, "releas": 91, "house_pric": [91, 104, 143, 144, 146], "na_valu": [91, 104], "id": [91, 95, 104], "mssubclass": [91, 104], "mszone": [91, 104], "street": [91, 104], "allei": [91, 104], "lotshap": [91, 104], "landcontour": [91, 104], "poolqc": [91, 104], "fenc": [91, 104], "miscfeatur": [91, 104], "mosold": [91, 104], "yrsold": [91, 104, 132], "saletyp": [91, 104], "salecondit": [91, 104], "rl": [91, 104], "8450": [91, 104], "pave": [91, 104], "reg": [91, 104, 108], "lvl": [91, 104], "allpub": [91, 104], "2008": [91, 104], "wd": [91, 104], "9600": [91, 104], "2007": [91, 104], "11250": [91, 104], "ir1": [91, 104], "9550": [91, 104], "2006": [91, 104], "abnorml": [91, 104], "14260": [91, 104], "1455": 91, "1456": 91, "62": [91, 104], "7917": 91, "1457": 91, "13175": 91, "mnprv": [91, 104], "2010": 91, "1458": 91, "66": [91, 104, 105, 153], "9042": 91, "gdprv": 91, "shed": [91, 104], "2500": 91, "1459": [91, 104], "9717": 91, "1460": [91, 104], "9937": 91, "cherri": 91, "retain": [91, 132], "numeric_featur": 91, "fullbath": [91, 104], "halfbath": [91, 104], "neighborhood": [91, 92, 104, 108], "housestyl": [91, 104], "imput": [91, 104], "simpleimput": [91, 104], "numeric_transform": 91, "categorical_transform": 91, "join": 91, "simpleimputersimpleimput": 91, "859": [91, 154], "018": [91, 116], "dollar": [91, 92, 102, 107, 132], "necessarili": [91, 102, 103, 118, 131, 132, 146, 149, 181], "richer": [91, 137], "level": [91, 97, 99, 117, 119, 124, 125, 127, 131, 135, 141, 158, 159, 160, 162, 163, 176], "coars": 91, "dummyregressor": [92, 144], "overview": [92, 93, 96, 98, 102, 103, 107, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 130, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 156, 158, 159, 160, 161, 162, 163, 164], "fetch_california_h": [92, 96, 102, 103, 107, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 148, 155], "return_x_i": [92, 97, 100, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 148, 155, 178], "as_fram": [92, 96, 100, 102, 103, 107, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 148, 155], "rescal": [92, 96, 103, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 132, 144, 148, 150, 155], "splitter": 92, "cv_results_tree_regressor": 92, "n_job": [92, 95, 96, 97, 98, 99, 101, 103, 107, 108, 115, 116, 117, 118, 119, 120, 122, 123, 125, 127, 132, 134, 149, 151, 153, 155], "errors_tree_regressor": 92, "774882": 92, "137125": 92, "533398": 92, "841918": 92, "033639": 92, "512544": 92, "47": [92, 102, 104, 153], "969062": 92, "result_dummi": 92, "errors_dummy_regressor": 92, "91": [92, 104], "140009": 92, "821140": 92, "757566": 92, "543652": 92, "034555": 92, "979007": 92, "477244": 92, "all_error": 92, "concat": [92, 99, 100, 108, 123, 125, 126, 135], "170466": 92, "713153": 92, "605570": 92, "539353": 92, "483618": 92, "941912": 92, "982259": 92, "213912": 92, "692293": 92, "015862": 92, "422448": 92, "542490": 92, "893328": 92, "130930": 92, "732185": 92, "947952": 92, "793400": 92, "991373": 92, "416833": 92, "023571": 92, "020024": 92, "556965": 92, "047253": 92, "539567": 92, "987471": 92, "185225": 92, "910118": 92, "298971": 92, "738824": 92, "084639": 92, "252201": 92, "984471": 92, "060866": 92, "981744": 92, "731943": 92, "547140": 92, "962591": 92, "820219": 92, "768721": 92, "092553": 92, "305556": 92, "604933": 92, "503017": 92, "544447": 92, "147974": 92, "352055": 92, "386320": 92, "121120": 92, "815660": 92, "307338": 92, "216574": 92, "138339": 92, "107460": 92, "548585": 92, "620318": 92, "29": [92, 104, 108, 116, 125, 137], "165331": 92, "linspac": [92, 96, 98, 99, 110, 115, 121, 128, 133, 136, 138, 152, 162], "edgecolor": [92, 95, 99, 102, 103, 104, 105, 106, 107, 131, 152, 162], "legend": [92, 95, 99, 100, 101, 105, 107, 109, 110, 115, 121, 133, 135, 139, 141, 152, 156, 160, 161, 162, 163], "bbox_to_anchor": [92, 95, 99, 100, 101, 105, 107, 109, 110, 115, 121, 133, 135, 141, 152, 156, 160, 162], "loc": [92, 95, 99, 100, 101, 105, 107, 109, 110, 115, 121, 130, 133, 135, 139, 141, 152, 156, 160, 162], "upper": [92, 95, 99, 100, 101, 105, 107, 108, 110, 115, 121, 135, 141, 156, 160, 162], "xlabel": [92, 95, 97, 98, 99, 100, 102, 103, 104, 105, 106, 108, 122, 123, 125, 126, 131, 132, 134, 139, 141], "Such": [92, 132], "extrem": [92, 97, 106, 107, 124, 127, 131, 132], "gamma": [93, 97, 98, 131, 135, 140], "svm": [93, 97, 98, 137, 140], "form": [93, 95, 97, 98, 108, 128, 129, 133, 134, 136, 138, 144, 150], "accomplish": [93, 98], "rbf": [93, 98, 131, 135, 140], "svc": [93, 97, 98, 140], "scheme": [93, 98, 101, 109, 161], "validationcurvedisplai": [93, 98, 103, 122, 129, 134], "10e": [93, 98], "10e2": [93, 98], "logarithm": [93, 98], "svc__gamma": [93, 98], "retriev": [93, 98, 102, 108, 132], "learningcurvedisplai": [93, 96, 98], "half": [94, 99, 140, 141], "uniform": [94, 99, 107, 110, 131, 153], "handwritten": 95, "digit": 95, "load_digit": 95, "recreat": 95, "minmaxscal": [95, 110, 132, 184], "kfold": [95, 97, 100, 123, 143, 146, 151], "test_score_no_shuffl": 95, "931": 95, "026": 95, "test_score_with_shuffl": 95, "966": 95, "010": [95, 154], "all_scor": [95, 97], "xlim": [95, 108, 115, 141], "impos": [95, 117, 153], "94166667": 95, "89722222": 95, "94986072": 95, "9637883": 95, "90250696": 95, "ship": 95, "descr": [95, 102, 107], "_digits_dataset": 95, "optic": 95, "recognit": 95, "characterist": [95, 102, 107, 141], "1797": 95, "64": [95, 104, 105, 145, 153], "8x8": 95, "pixel": 95, "creator": 95, "alpaydin": 95, "boun": 95, "edu": 95, "tr": 95, "juli": 95, "1998": 95, "copi": [95, 102, 105, 108, 128, 133, 178], "uci": 95, "ic": 95, "nist": 95, "bitmap": 95, "preprint": 95, "32x32": 95, "nonoverlap": 95, "block": [95, 101, 102, 107, 108, 116], "4x4": 95, "invari": [95, 131], "distort": 95, "garri": 95, "j": 95, "candela": 95, "dimmick": 95, "geist": 95, "grother": 95, "janet": 95, "wilson": 95, "handprint": 95, "nistir": 95, "5469": 95, "kaynak": 95, "1995": 95, "Their": [95, 139], "msc": 95, "thesi": 95, "institut": 95, "graduat": 95, "bogazici": 95, "univers": 95, "cascad": 95, "kybernetika": 95, "ken": 95, "tang": 95, "ponnuthurai": 95, "n": [95, 97, 98, 107, 110, 116, 120, 123, 128, 132, 133, 135, 139, 141, 150, 151, 154, 156], "suganthan": 95, "xi": 95, "yao": 95, "kai": 95, "qin": 95, "dimensionalityreduct": 95, "lda": 95, "electr": [95, 104], "electron": 95, "nanyang": 95, "2005": 95, "claudio": 95, "gentil": 95, "nip": 95, "2000": 95, "writer": 95, "wrote": 95, "certain": [95, 123], "130": [95, 116], "hypothesi": [95, 101, 132], "itertool": [95, 104], "bound": [95, 141, 144], "writer_boundari": 95, "256": [95, 116, 117, 153], "386": 95, "516": 95, "646": 95, "776": 95, "915": [95, 115], "1029": 95, "1157": 95, "1287": 95, "1415": 95, "1545": 95, "1667": 95, "zeros_lik": [95, 109], "lower_bound": 95, "upper_bound": 95, "group_id": 95, "lb": 95, "zip": [95, 105, 115, 128, 131, 133], "ytick": [95, 100], "xtick": 95, "ylabel": [95, 100, 101, 103, 122, 123, 131, 132, 134, 139, 141, 156], "groupkfold": 95, "928": 95, "014": [95, 97, 116], "realiti": 95, "synthet": [96, 110, 115, 125, 131, 141, 159, 160, 161, 163], "train_siz": [96, 98, 147, 154], "endpoint": 96, "325": [96, 105], "775": 96, "displai": [96, 112, 121, 139, 141, 152, 178], "from_estim": [96, 98, 103, 109, 122, 130, 131, 134, 135, 139, 140, 141, 156, 160, 162], "score_typ": [96, 98], "negate_scor": [96, 103, 122, 134], "neg_": [96, 102, 129, 134, 146], "score_nam": [96, 98], "std_display_styl": [96, 98, 103, 122, 134], "errorbar": [96, 98, 103, 122, 132, 134], "ax_": [96, 98, 103, 122, 134, 135, 141], "xscale": [96, 132], "log": [96, 132, 144, 152, 153], "alon": [96, 131], "anymor": [96, 99, 101, 102, 117, 140], "bay": 96, "especi": [96, 132], "report": [96, 97, 102], "problemat": [97, 132, 153], "underestim": 97, "philosoph": 97, "breast": 97, "cancer": 97, "load_breast_canc": 97, "param_grid": [97, 118, 120, 149, 151, 160, 178, 184], "model_to_tun": 97, "gridsearchcvgridsearchcv": [97, 149, 151], "svcsvc": [97, 140], "best_params_": [97, 123, 148, 149, 151, 153, 155, 160, 178, 184], "best_score_": 97, "627": 97, "stage": [97, 100, 116, 124, 125, 127, 137, 141, 162], "misinterpret": 97, "forget": 97, "pitfal": 97, "emb": [97, 151], "dedic": [97, 144], "declar": 97, "inner_cv": 97, "outer_cv": 97, "trial": 97, "test_score_not_nest": 97, "test_score_nest": 97, "n_trial": 97, "non_nest": 97, "append": [97, 100, 108, 110, 121, 123, 135, 146], "merg": [97, 125], "whisker": [97, 107, 123, 125, 126, 132, 145], "vert": [97, 107, 123, 125, 126, 132, 145], "highest": [97, 109, 124, 125, 127, 141, 144, 152, 153], "lure": 97, "overli": [97, 102], "021278": 98, "003896": 98, "680000": 98, "021131": 98, "003630": 98, "746667": 98, "020016": 98, "003548": 98, "786667": 98, "019274": 98, "003748": 98, "800000": 98, "020104": 98, "003524": 98, "019360": 98, "003528": 98, "019182": 98, "003642": 98, "018040": 98, "003457": 98, "826667": 98, "018451": 98, "003804": 98, "018714": 98, "003438": 98, "733333": 98, "765": 98, "043": 98, "param_nam": [98, 103, 122, 134, 149, 152, 153, 178, 184], "disp": [98, 103, 122, 134, 135, 141], "errorbar_kw": 98, "transpar": 98, "regim": 98, "oscil": 98, "donat": [98, 106, 141, 142, 145], "simplist": 98, "imposs": [98, 140], "cv_results_logistic_regress": 99, "test_score_logistic_regress": 99, "815937": 99, "813849": 99, "815036": 99, "815569": 99, "810982": 99, "814709": 99, "813112": 99, "810327": 99, "812416": 99, "816388": 99, "most_frequent_classifi": 99, "cv_results_most_frequ": 99, "test_score_most_frequ": 99, "760329": 99, "756808": 99, "759142": 99, "760739": 99, "761681": 99, "761885": 99, "757463": 99, "757176": 99, "763114": 99, "all_test_scor": 99, "stratified_dummi": 99, "cv_results_stratifi": 99, "test_score_dummy_stratifi": 99, "uniform_dummi": 99, "cv_results_uniform": 99, "test_score_dummy_uniform": 99, "wrong": [99, 124, 127, 152], "henc": [99, 108, 115, 132, 153], "uniformli": [99, 110, 135], "weakest": 99, "argu": 99, "permutation_test_scor": 99, "permut": [99, 178], "quit": [99, 100, 101, 103, 105, 116, 139], "strongest": 99, "load_iri": [100, 178], "toi": [100, 137, 140], "nine": 100, "data_random": 100, "randn": [100, 110, 115, 124, 127, 137], "train_index": 100, "test_index": 100, "six": 100, "train_cv_count": 100, "test_cv_count": 100, "fold_idx": 100, "train_idx": 100, "test_idx": 100, "enumer": [100, 109, 110, 121, 123, 125, 128, 133, 151], "idx": [100, 125, 162], "953": 100, "009": 100, "frequenc": [100, 106, 141], "stratifiedkfold": [100, 142, 145], "960": 100, "016": 100, "past": [101, 106, 128, 133, 141], "ident": [101, 102, 116, 141, 151], "financi": 101, "quotat": 101, "tot": 101, "xom": 101, "exxon": 101, "cvx": 101, "chevron": 101, "cop": 101, "conocophillip": 101, "vlo": 101, "valero": 101, "template_nam": 101, "quot": 101, "stock": 101, "2f": [101, 102, 117, 120, 122, 128, 132, 133, 135, 136, 137, 138, 141, 149, 153, 156, 162], "94": [101, 109], "surprisingli": [101, 102, 107, 127], "outstand": 101, "eas": [101, 102, 131, 137, 140], "r2_score": 101, "verifi": [101, 113, 122, 137], "doesn": 101, "proper": [101, 107, 137, 151], "to_period": 101, "q": 101, "69": [101, 104, 109], "forecast": 101, "ulterior": 101, "timeseriessplit": 101, "nuniqu": [101, 105, 160, 184], "74": [101, 104], "shelv": 101, "absurd": 101, "intend": [102, 106, 176], "dive": 102, "area": [102, 103, 104, 110, 135, 140, 141, 160], "geograph": [102, 107, 118], "_california_housing_dataset": [102, 107], "20640": [102, 107], "medinc": [102, 107, 108], "houseag": [102, 107, 108], "averoom": [102, 107, 108, 155], "household": [102, 107], "avebedrm": [102, 107, 108], "aveoccup": [102, 107, 108], "member": [102, 107], "latitud": [102, 107, 108], "longitud": [102, 107, 108], "statlib": [102, 107], "dcc": [102, 107], "fc": [102, 107], "pt": [102, 107], "ltorgo": [102, 107], "cal_hous": [102, 107], "district": [102, 107, 108, 118], "hundr": [102, 107, 125], "deriv": [102, 105, 107, 129, 134, 137], "1990": [102, 107], "u": [102, 107], "smallest": [102, 107, 116, 132], "bureau": [102, 107], "600": [102, 107], "resid": [102, 107], "home": [102, 107], "empti": [102, 107], "vacat": [102, 107], "resort": [102, 107], "pace": [102, 107], "kellei": [102, 107], "ronald": [102, 107], "barri": [102, 107], "spatial": [102, 107], "autoregress": [102, 107], "1997": [102, 107], "291": [102, 107], "297": [102, 107], "3252": [102, 107, 108], "984127": [102, 107, 108], "023810": [102, 107, 108], "322": [102, 107, 108, 116], "555556": [102, 107, 108], "3014": [102, 107, 108], "238137": [102, 107, 108], "971880": [102, 107, 108], "2401": [102, 107, 108], "109842": [102, 107, 108], "2574": [102, 107, 108], "288136": [102, 107, 108], "073446": [102, 107, 108], "496": [102, 107, 108, 152, 153], "802260": [102, 107, 108], "6431": [102, 107, 108], "817352": [102, 107, 108], "073059": [102, 107, 108], "558": [102, 107, 108], "547945": [102, 107, 108], "8462": [102, 107, 108], "281853": [102, 107, 108], "081081": [102, 107, 108], "565": [102, 107, 108], "181467": [102, 107, 108], "452": 102, "358": 102, "352": 102, "341": 102, "342": 102, "medhousev": [102, 107, 108], "decisiontreeregressordecisiontreeregressor": [102, 137, 163], "mean_absolute_error": [102, 113, 120, 121, 122, 136, 144], "grown": [102, 113, 117, 122], "leaf": [102, 117, 149, 153, 154, 156, 160, 162, 172, 173], "node": [102, 117, 119, 149, 154, 156, 160, 162, 171, 172, 173], "phenomena": 102, "unstabl": [102, 132], "wouldn": 102, "unlimit": [102, 117], "lucki": 102, "easiest": 102, "variant": 102, "226245": 102, "004727": 102, "909797": 102, "228429": 102, "004796": 102, "421170": 102, "224813": 102, "004612": 102, "411089": 102, "225663": 102, "004545": 102, "319824": 102, "220283": 102, "004752": 102, "607875": 102, "front": 102, "revert": [102, 129, 134], "negat": 102, "test_error": [102, 132], "226878": 102, "004689": 102, "901300": 102, "224951": 102, "004616": 102, "572767": 102, "225966": 102, "004643": 102, "194585": 102, "227141": 102, "004674": 102, "590236": 102, "229660": 102, "004721": 102, "727998": 102, "percentag": [102, 110, 144], "tag": [102, 128, 133], "expert": [102, 137, 140], "25903583": 102, "25394607": 102, "25143719": 102, "2595458": 102, "25119472": 102, "003268": 102, "00340438": 102, "00354171": 102, "00333428": 102, "00355721": 102, "26291527": 102, "41947109": 102, "44492564": 102, "23357874": 102, "40788361": 102, "overal": [102, 113, 117, 118, 122, 132, 135, 141, 160], "fluctuat": [103, 137, 149], "hopefulli": [103, 117, 136], "302": 103, "318": 103, "harm": 103, "matter": [103, 126, 151], "compromis": [103, 141], "dispers": [103, 116], "directori": [104, 105, 106], "charact": 104, "marker": [104, 109, 141, 162], "pars": [104, 105], "lotconfig": 104, "208500": 104, "fr2": 104, "181500": 104, "223500": 104, "corner": [104, 141], "140000": 104, "250000": 104, "nin": 104, "tail": [104, 105, 107, 139], "coupl": [104, 105, 107, 116, 117, 132, 153], "core": [104, 105, 106, 107, 115, 116], "rangeindex": [104, 105, 106, 107], "null": [104, 105, 106, 107, 139], "1201": 104, "landslop": 104, "condition1": 104, "condition2": 104, "bldgtype": 104, "yearremodadd": 104, "roofstyl": 104, "roofmatl": 104, "exterior1st": 104, "exterior2nd": 104, "masvnrtyp": 104, "588": 104, "1452": 104, "exterqu": 104, "extercond": 104, "foundat": 104, "bsmtqual": 104, "1423": 104, "bsmtcond": 104, "bsmtexposur": 104, "1422": 104, "bsmtfintype1": 104, "bsmtfintype2": 104, "heat": 104, "heatingqc": 104, "centralair": 104, "bsmtfullbath": 104, "bsmthalfbath": 104, "kitchenqu": 104, "fireplacequ": 104, "770": 104, "garagetyp": 104, "1379": 104, "garageyrblt": 104, "garagefinish": 104, "garagequ": 104, "garagecond": 104, "paveddr": 104, "72": 104, "281": 104, "901": 104, "kb": [104, 106], "numerical_data": 104, "410": 104, "layout": 104, "subplots_adjust": [104, 105, 107, 108], "hspace": [104, 105, 107], "wspace": [104, 107], "criterion": [104, 108, 156], "swim": 104, "pool": [104, 127], "string_data": 104, "490": 104, "ceil": 104, "zip_longest": 104, "n_string_featur": 104, "nrow": [104, 141, 162], "ncol": [104, 131, 133, 141, 144, 162], "fig": [104, 108, 131, 132, 141, 144, 152, 155, 156, 157, 178], "subplot": [104, 108, 131, 132, 141, 144, 156, 160, 161, 162], "ravel": [104, 105, 133, 139, 162], "barh": [104, 106, 108, 135, 139, 141], "set_titl": [104, 137, 138, 141, 162], "databas": [104, 155], "grvl": 104, "gd": 104, "make_column_transform": [104, 119], "most_frequent_imput": 104, "mean_imput": 104, "ames_housing_preprocess": 104, "tolist": [104, 156, 162], "timestamp": 105, "150": [105, 109], "0880": 105, "033870": 105, "161": [105, 116, 156], "336": 105, "0842": 105, "033571": 105, "163": 105, "409": 105, "0234": 105, "033223": 105, "156": 105, "445": 105, "0016": 105, "032908": 105, "148": 105, "441": 105, "1144": 105, "38254": 105, "38253": 105, "mb": [105, 107], "str": 105, "datetim": 105, "direct": [105, 131, 135, 153], "reopen": 105, "explan": [105, 157], "soup": 105, "blender": 105, "blend": [105, 131], "veget": 105, "instantan": 105, "profession": 105, "calibr": 105, "track": 105, "spent": [105, 125], "food": 105, "uranium": 105, "petrol": 105, "ga": 105, "coal": 105, "plant": 105, "400": 105, "cheaper": [105, 108], "w": [105, 144, 162], "deliv": 105, "breakout": 105, "kilomet": 105, "costli": [105, 135, 148, 149, 155], "cruis": 105, "datetime64": 105, "ns": 105, "freq": 105, "august": 105, "septemb": 105, "date_first_rid": 105, "cycling_rid": 105, "data_rid": 105, "target_rid": 105, "tempor": 105, "resolut": [105, 153], "smoother": [105, 110], "set_xlabel": [105, 144, 162], "extremum": 105, "rng": [105, 107, 108, 110, 115, 127, 137], "randomst": [105, 107, 108, 110, 115, 127, 131, 137], "arang": [105, 107, 108, 110, 160, 161, 163], "quantiz": [105, 107], "midpoint": [105, 107], "interv": [105, 107, 110, 112, 115, 121, 159, 161, 163], "qcut": [105, 107], "retbin": [105, 107], "lambda": [105, 107, 152, 178], "mid": [105, 107], "palett": [105, 107, 109, 135, 139, 140, 156, 160, 162], "viridi": [105, 107, 152, 162, 178], "uphil": 105, "physiolog": 105, "stimuli": 105, "recenc": [106, 141], "monetari": [106, 141], "12500": 106, "98": [106, 108, 156], "3250": [106, 138], "4000": 106, "6000": 106, "748": 106, "747": 106, "noth": [106, 110], "shock": 106, "her": 106, "762032": 106, "237968": 106, "strike": 106, "fetch": 107, "internet": 107, "california_h": 107, "526": 107, "585": 107, "521": [107, 141], "413": [107, 153], "422": [107, 141], "demographi": 107, "granular": [107, 141], "20639": 107, "640": [107, 156], "unnotic": 107, "features_of_interest": [107, 132], "429000": 107, "096675": 107, "070655": 107, "1425": 107, "476744": 107, "474173": 107, "473911": 107, "386050": 107, "1132": 107, "462122": 107, "846154": 107, "333333": 107, "692308": 107, "440716": 107, "006079": 107, "429741": 107, "787": [107, 150], "229129": 107, "048780": 107, "818116": 107, "1166": 107, "052381": 107, "099526": 107, "282261": 107, "1725": 107, "141": 107, "909091": 107, "066667": 107, "1243": 107, "35682": 107, "huge": 107, "datapoint": [107, 135], "coast": 107, "big": [107, 144], "citi": [107, 144], "san": 107, "diego": 107, "lo": 107, "angel": 107, "jose": 107, "francisco": 107, "columns_drop": 107, "distinguish": 107, "curiou": [107, 140, 184], "553": [107, 141], "062": 107, "coef": [107, 108, 132, 135, 139], "est": [107, 132], "spot": [107, 132], "10000": 108, "100k": 108, "assert": [108, 117, 184], "un": [108, 132], "bin_var": 108, "randint": [108, 120, 124, 127], "rnd_bin": 108, "num_var": 108, "rnd_num": 108, "x_with_rnd_feat": 108, "x_train": 108, "x_test": 108, "y_train": [108, 173], "y_test": 108, "train_dataset": 108, "insert": [108, 135], "kde": 108, "scatter_kw": 108, "x_i": 108, "versu": [108, 144, 164], "6013466090490024": 108, "5975757793803438": 108, "Its": 108, "somehow": 108, "rest": [108, 137, 162], "worth": 108, "magnitud": [108, 130, 132, 135, 171], "habit": 108, "nb": 108, "outcom": [108, 139, 141, 155], "shall": [108, 110], "rise": 108, "80k": 108, "gaug": 108, "decad": 108, "visibl": [108, 144], "dev": 108, "6013157556102924": 108, "5972410717953726": 108, "safe": 108, "perturb": 108, "repeatedkfold": 108, "cv_model": 108, "n_repeat": [108, 125, 126], "boxplot": 108, "cyan": 108, "satur": 108, "pretti": 108, "l1": 108, "015": 108, "5899811014945939": 108, "5769786920519312": 108, "partli": 108, "multivari": 108, "instabl": 108, "teas": 108, "9796463093530234": 108, "8467693464367002": 108, "formal": 108, "brought": 108, "argsort": [108, 125], "set_ytick": 108, "set_yticklabel": 108, "9798863545214676": 108, "8465346522200555": 108, "def": [108, 110, 115, 128, 131, 133, 135, 137, 138, 149, 152, 153, 160, 178], "get_score_after_permut": 108, "curr_feat": 108, "x_permut": 108, "col_idx": 108, "permuted_scor": 108, "get_feature_import": 108, "baseline_score_train": 108, "permuted_score_train": 108, "feature_import": 108, "661": [108, 121], "list_feature_import": 108, "n_round": 108, "00879": 108, "heavili": 108, "permutation_import": 108, "calcul": [108, 109, 141], "importances_mean": 108, "importances_std": 108, "plot_feature_import": 108, "perm_importance_result": 108, "feat_nam": 108, "xerr": 108, "perm_importance_result_train": 108, "realist": [108, 139], "unclear": 108, "culmen_column": [109, 130, 135, 139, 156, 157, 158, 162], "purposefulli": 109, "unlik": [109, 113, 122, 156], "misclassifi": [109, 131, 135], "decisiontreeclassifi": [109, 119, 142, 145, 156, 160, 162], "tab": [109, 110, 115, 131, 135, 139, 140, 141, 156, 160, 161, 162], "decisiontreeclassifierdecisiontreeclassifi": [109, 156, 162], "misclassified_samples_idx": 109, "flatnonzero": 109, "data_misclassifi": 109, "decisionboundarydisplai": [109, 130, 131, 135, 139, 140, 156, 158, 160, 162], "response_method": [109, 131, 135, 139, 140, 156, 160, 162], "cmap": [109, 131, 135, 139, 140, 149, 156, 160, 162], "rdbu": [109, 131, 140, 160], "center": [109, 131, 133, 152, 155, 160], "nwith": [109, 115], "misclassif": [109, 135, 141], "sample_weight": 109, "trick": [109, 140], "drastic": 109, "qualit": [109, 110, 128, 133, 162], "newly_misclassified_samples_idx": 109, "remaining_misclassified_samples_idx": 109, "intersect1d": 109, "ensemble_weight": 109, "935672514619883": 109, "6929824561403509": 109, "adaboostclassifi": 109, "samm": 109, "adaboostclassifieradaboostclassifi": 109, "boosting_round": 109, "estimators_": [109, 110, 112, 121], "to_numpi": [109, 110, 121, 141], "640x480": 109, "estimator_weights_": 109, "58351894": 109, "46901998": 109, "03303773": 109, "estimator_errors_": 109, "05263158": 109, "05864198": 109, "08787269": 109, "sens": [109, 163], "generate_data": [110, 115], "x_min": [110, 115], "x_max": [110, 115], "capabl": [110, 115, 132, 141, 159, 161, 163], "y_pred": [110, 142, 144, 145], "data_bootstrap": 110, "target_bootstrap": 110, "bootstrap_sampl": 110, "bootstrap_indic": 110, "n_bootstrap": 110, "bootstrap_idx": 110, "facecolor": 110, "180": [110, 138], "linewidth": [110, 131, 135, 160], "darker": [110, 135, 139], "data_train_hug": 110, "data_test_hug": 110, "target_train_hug": 110, "100_000": 110, "data_bootstrap_sampl": 110, "target_bootstrap_sampl": 110, "ratio_unique_sampl": 110, "bag_of_tre": 110, "tree_idx": [110, 121], "tree_predict": [110, 121], "feed": 110, "bag_predict": 110, "unbroken": [110, 115], "whole": [110, 112, 117, 119, 121, 132, 137], "meta": 110, "wrap": [110, 132, 135, 164], "snippet": [110, 178], "smooth": [110, 131, 135, 140], "bagged_tre": [110, 119], "bagged_trees_predict": 110, "els": [110, 137, 160], "opac": 110, "appreci": 110, "space": [110, 112, 114, 115, 121, 123, 127, 129, 131, 132, 134, 135, 139, 156, 161, 162], "polynomialfeatur": [110, 129, 131, 132, 134, 137], "polynomial_regressor": 110, "1e": [110, 116, 135, 150, 153], "intention": 110, "simpli": [110, 162], "regressor_predict": 110, "base_model_lin": 110, "bagging_predict": 110, "ylim": [110, 141], "shade": 110, "randomizedsearchcv": [111, 117, 120, 148, 153, 155, 178], "penguins_regress": [112, 121, 128, 133, 136, 138, 157, 159, 160, 161, 163], "evenli": [112, 121], "170": [112, 121], "230": [112, 121], "newli": [112, 121], "conduct": [113, 122, 155], "learning_r": [113, 114, 117, 122, 123, 147, 149, 152, 153, 154, 178, 180], "slower": [113, 122, 134, 151], "offer": [113, 122, 153], "certainli": [113, 122], "n_iter_no_chang": [113, 122], "max_leaf_nod": [114, 117, 123, 147, 149, 151, 152, 153, 154, 160, 178, 180], "residu": [115, 117, 144, 153], "back": [115, 139, 141, 152, 156], "len_x": 115, "rand": [115, 137], "target_train_predict": 115, "target_test_predict": 115, "line_predict": 115, "lines_residu": 115, "edit": 115, "initi": [115, 151, 181], "tree_residu": 115, "target_train_predicted_residu": 115, "target_test_predicted_residu": 115, "manag": 115, "x_sampl": 115, "target_tru": 115, "target_true_residu": 115, "commit": [115, 144], "y_pred_first_tre": 115, "517": 115, "393": 115, "145": 115, "248": [115, 116], "y_pred_first_and_second_tre": 115, "gradientboostingregressor": [115, 116, 122], "gradient_boost": [115, 116], "cv_results_gbdt": [115, 116], "416": 115, "144": [115, 153], "012": 115, "random_forest": [115, 119], "cv_results_rf": 115, "465": 115, "315": 115, "032": 115, "197": 115, "brute": [116, 136], "overcom": [116, 118, 131, 137], "benchmark": 116, "392": 116, "914": 116, "042": 116, "011": 116, "kbinsdiscret": [116, 131, 137], "n_bin": [116, 131, 137], "quantil": [116, 131, 140], "data_tran": 116, "opt": [116, 132, 142, 145], "hostedtoolcach": [116, 132, 142, 145], "x64": [116, 132, 142, 145], "lib": [116, 132, 142, 145], "python3": [116, 132, 142, 145], "site": [116, 132, 142, 145], "_discret": 116, "py": [116, 132, 142, 145], "userwarn": [116, 142, 145], "249": 116, "231": 116, "162": 116, "203": 116, "242": 116, "125": 116, "160": 116, "126": 116, "136": [116, 142, 145], "199": 116, "col": 116, "253": [116, 154], "207": 116, "235": [116, 121, 163], "773": 116, "273": 116, "histogram_gradient_boost": 116, "cv_results_hgbdt": 116, "758": 116, "694": 116, "862": 116, "077": 116, "clariti": 117, "doubl": [117, 149, 150], "max_featur": [117, 119, 120], "grow": [117, 118, 160, 178], "uncorrel": 117, "symmetr": [117, 132, 144, 160], "constraint": [117, 132, 160], "min_samples_leaf": [117, 118, 152, 153, 160, 178], "branch": [117, 160], "promot": 117, "altogeth": 117, "param_distribut": [117, 153, 155], "search_cv": 117, "n_iter": [117, 120, 148, 153, 155, 178], "param_": [117, 120, 123, 149, 153], "mean_test_error": [117, 120], "std_test_error": [117, 120], "cv_results_": [117, 120, 123, 149, 151, 153, 155, 178], "mean_test_scor": [117, 120, 123, 149, 151, 152, 153, 155, 178], "std_test_scor": [117, 120, 149, 151, 152, 153], "sort_valu": [117, 120, 149, 153, 155], "param_max_featur": [117, 120], "param_max_leaf_nod": 117, "param_min_samples_leaf": 117, "996708": 117, "575388": 117, "013965": 117, "522837": 117, "290532": 117, "320069": 117, "169996": 117, "486971": 117, "425679": 117, "597833": 117, "856788": 117, "543134": 117, "927604": 117, "800344": 117, "100456": 117, "635957": 117, "515785": 117, "833755": 117, "640989": 117, "856759": 117, "role": 117, "inter": 117, "refit": [117, 147, 151, 154], "overlook": 117, "stat": [117, 120, 153], "loguniform": [117, 153], "param_max_it": 117, "param_learning_r": 117, "01864": 117, "043016": 117, "262257": 117, "047293": 117, "811893": 117, "229961": 117, "176656": 117, "410615": 117, "243557": 117, "297739": 117, "740945": 117, "360870": 117, "083745": 117, "095718": 117, "274735": 117, "215543": 117, "275814": 117, "216063": 117, "067503": 117, "780658": 117, "237595": 117, "05929": 117, "855942": 117, "418406": 117, "160519": 117, "270716": 117, "416068": 117, "125207": 117, "914995": 117, "557058": 117, "054511": 117, "224344": 117, "623883": 117, "248463": 117, "147930": 117, "842348": 117, "906226": 117, "494647": 117, "710124": 117, "061034": 117, "568261": 117, "551379": 117, "079415": 117, "455489": 117, "944949": 117, "0351": 117, "503834": 117, "949876": 117, "019923": 117, "624869": 117, "045625": 117, "039361": 117, "818311": 117, "083471": 117, "019351": 117, "377257": 117, "051528": 117, "01724": 117, "941795": 117, "084528": 117, "rank": [117, 123, 184], "hgbt": 117, "hassl": 118, "354": 118, "087": 118, "min_samples_split": [118, 160], "523": [118, 133], "107": 118, "bagging_regressor": 118, "642": 118, "083": 118, "decent": [118, 152, 153], "modif": 119, "inject": 119, "decorrel": 119, "categorical_encod": 119, "scores_tre": 119, "820": 119, "006": [119, 123], "scores_bagged_tre": 119, "846": 119, "005": 119, "randomforestclassifi": [119, 125, 126], "scores_random_forest": 119, "004": 119, "disabl": 119, "sqrt": 119, "literatur": 119, "agnost": 119, "param": [120, 123, 149, 152], "bootstrap_featur": 120, "estimator__ccp_alpha": 120, "estimator__criterion": 120, "estimator__max_depth": 120, "estimator__max_featur": 120, "estimator__max_leaf_nod": 120, "estimator__min_impurity_decreas": 120, "estimator__min_samples_leaf": 120, "estimator__min_samples_split": 120, "estimator__min_weight_fraction_leaf": 120, "estimator__random_st": 120, "estimator__splitt": 120, "max_sampl": 120, "oob_scor": 120, "verbos": [120, 150, 153, 155, 180], "warm_start": 120, "param_n_estim": 120, "param_max_sampl": 120, "param_estimator__max_depth": 120, "281680": 120, "061146": 120, "475610": 120, "121340": 120, "602077": 120, "070860": 120, "326435": 120, "174542": 120, "956380": 120, "278850": 120, "017761": 120, "674627": 120, "135453": 120, "005112": [120, 152], "224306": 120, "316641": 120, "070459": 120, "053769": 120, "759904": 120, "679971": 120, "334637": 120, "125204": 120, "528335": 120, "972150": 120, "872540": 120, "686614": 120, "949551": 120, "721352": 120, "529438": 120, "429014": 120, "750573": 120, "081410": 120, "841505": 120, "968520": 120, "258303": 120, "351126": 120, "840351": 120, "744600": 120, "889776": 120, "075650": 120, "gram": [121, 129, 134, 136], "366": 121, "data_rang": 121, "forest_predict": 121, "n_estimators_": 122, "243": [122, 146], "hist_gbdt": 123, "839": [123, 142, 145], "best_estimator_": 123, "528": 123, "447": 123, "576": 123, "290": 123, "414": 123, "index_column": 123, "inner_cv_result": 123, "cv_idx": 123, "search_cv_result": 123, "set_index": [123, 130, 135, 139, 146], "renam": [123, 149, 152, 153, 155, 178], "coincid": [123, 141], "bioinformat": [124, 127], "rna": [124, 127], "seq": [124, 127], "ten": [124, 127], "anova": [124, 125, 127], "feature_select": [124, 125, 126, 127], "selectkbest": [124, 125, 127], "f_classif": [124, 125, 127], "pre": [124, 127], "princip": 125, "make_classif": [125, 126], "n_inform": [125, 126], "n_redund": [125, 126], "univari": 125, "model_without_select": [125, 126], "model_with_select": [125, 126], "score_func": [125, 127], "cv_results_without_select": [125, 126], "incorpor": 125, "cv_results_with_select": [125, 126], "analyz": [125, 132, 178], "swap": 125, "swaplevel": [125, 126], "Of": 125, "scores_": 125, "percentil": 125, "alien": 125, "primari": 125, "feature_importances_": 126, "suffici": [126, 131], "class_sep": 126, "selectfrommodel": 126, "feature_selector": [126, 127], "overestim": 126, "100000": 127, "550": 127, "data_subset": 127, "940": 127, "succeed": 127, "legit": 127, "leak": 127, "data_train_subset": 127, "520": 127, "460": 127, "boilerpl": 127, "linear_model_flipper_mass": [128, 133, 138], "flipper_length": [128, 133, 138], "weight_flipper_length": [128, 133, 136, 138], "intercept_body_mass": [128, 133, 136, 138], "body_mass": [128, 133, 138], "flipper_length_rang": [128, 133, 136, 138], "goodness_fit_measur": [128, 133], "true_valu": [128, 133], "scalar": [128, 133], "model_idx": [128, 133], "x1": [129, 134, 139], "x2": [129, 134], "x3": [129, 134], "penguins_non_miss": [129, 134, 184], "181": [129, 134, 138], "186": [129, 134, 138], "195": [129, 134, 138], "193": [129, 134, 138], "190": [129, 134, 138, 153], "sign": [129, 134], "interaction_onli": [129, 134], "intermedi": [129, 134, 137, 151, 152], "infinit": [130, 135], "l2": [130, 135], "yourself": [130, 135], "penguins_train": [130, 135, 139], "penguins_test": [130, 135, 139], "candid": [130, 135, 153, 155, 156], "cs": [130, 135], "nevertheless": 131, "moon": [131, 140], "crescent": 131, "make_moon": [131, 140], "newaxi": [131, 140, 160], "data_moon": [131, 140], "target_moon": [131, 140], "gaussian": [131, 140], "edg": 131, "concentr": 131, "make_gaussian_quantil": [131, 140], "n_class": [131, 139, 140, 162], "gauss": [131, 140], "data_gauss": [131, 140], "target_gauss": [131, 140], "xor": 131, "OR": 131, "target_xor": 131, "logical_xor": 131, "int32": [131, 148, 155, 160], "data_xor": 131, "glanc": 131, "listedcolormap": 131, "constrained_layout": 131, "common_scatter_plot_param": 131, "plot_decision_boundari": [131, 135], "plot_method": [131, 135], "pcolormesh": [131, 135], "vmin": [131, 135, 149, 156, 162], "vmax": [131, 135, 149, 156, 162], "middl": [131, 150], "colormap": [131, 135, 139, 162], "contour": [131, 135], "set_ylabel": [131, 144, 162], "soft": [131, 139], "unsur": [131, 139], "attempt": [131, 132, 135], "leverag": 131, "spline": [131, 137], "kbinsdiscretizerkbinsdiscret": [131, 137], "segment": 131, "rectangular": 131, "drawn": 131, "n_knot": 131, "splinetransformersplinetransform": [131, 137], "favor": 131, "curvi": [131, 135], "knot": 131, "include_bia": [131, 132, 134, 137], "polynomialfeaturespolynomialfeatur": [131, 132, 137], "nystr\u00f6m": [131, 134], "kernel_approxim": [131, 134, 135, 137], "coef0": [131, 139], "nystroemnystroem": [131, 137], "expans": [131, 137, 140], "intract": 131, "radial": [131, 140], "basi": [131, 140], "furthemor": 131, "induct": 131, "rotation": 131, "everywher": [131, 135], "drawback": 131, "orign": 131, "despit": 131, "augment": [131, 132, 140], "interplai": 131, "linear_regress": [132, 134, 136, 137, 163], "train_error": 132, "2e": 132, "85e": 132, "63e": 132, "69e": 132, "47e": 132, "fortun": 132, "feature_names_in_": 132, "model_first_fold": 132, "linearregressionlinearregress": [132, 136, 137], "queri": [132, 135], "weights_linear_regress": 132, "symlog": 132, "homogen": 132, "choleski": 132, "_ridg": 132, "linalgwarn": 132, "rcond": 132, "59923e": 132, "linalg": 132, "xy": 132, "assume_a": 132, "po": 132, "overwrite_a": 132, "59556e": 132, "59609e": 132, "11828e": 132, "06109e": 132, "60121e": 132, "61694e": 132, "59735e": 132, "59566e": 132, "72304e": 132, "60047e": 132, "59824e": 132, "59593e": 132, "59564e": 132, "5959e": 132, "59553e": 132, "59686e": 132, "60737e": 132, "5957e": 132, "60243e": 132, "90e": 132, "56e": 132, "55e": 132, "68e": 132, "weights_ridg": 132, "shrunk": 132, "worst": [132, 141], "saga": 132, "lsqr": 132, "re": [132, 176, 184], "resolv": 132, "omit": 132, "annual": 132, "neutral": [132, 162], "ahead": 132, "scaled_ridg": 132, "78e": 132, "21e": 132, "83e": 132, "17e": 132, "sweet": 132, "weights_ridge_scaled_data": 132, "ridge_large_alpha": 132, "1_000_000": 132, "unpredict": 132, "occurr": 132, "presenc": [132, 146], "divis": 132, "beforehand": 132, "store_cv_valu": 132, "12e": 132, "25e": 132, "50e": 132, "40e": 132, "mse_alpha": 132, "cv_values_": 132, "cv_alpha": 132, "000000e": 132, "841881e": 132, "347783e": 132, "321941e": 132, "837563e": 132, "343115e": 132, "747528e": 132, "831866e": 132, "336956e": 132, "310130e": 132, "824352e": 132, "328835e": 132, "053856e": 132, "814452e": 132, "318133e": 132, "274549e": 132, "319038e": 132, "337394e": 132, "328761e": 132, "324503e": 132, "338181e": 132, "722368e": 132, "328652e": 132, "338778e": 132, "564633e": 132, "331799e": 132, "339232e": 132, "334185e": 132, "339576e": 132, "yerr": 132, "yscale": 132, "salt": 132, "cook": 132, "best_alpha": 132, "11497569953977356": 132, "35111917342151344": 132, "1519911082952933": 132, "4641588833612782": 132, "08697490026177834": 132, "6135907273413176": 132, "stem": [132, 144], "summari": 132, "wasn": 132, "disproportion": 132, "15000": 133, "14000": 133, "predicted_body_mass": [133, 136, 138], "misleadingli": 133, "mse": [133, 137, 144, 146], "ab": [133, 137], "2764": 133, "854": 133, "338": 133, "573": 133, "041": 133, "337": 134, "071": 134, "868": 134, "poly_featur": 134, "linear_regression_interact": 134, "7077": 134, "3384": 134, "731": 134, "7347": 134, "3236": 134, "687": 134, "7858": 134, "3510": 134, "725": 134, "7083": 134, "3724": 134, "708": 134, "7467": 134, "3914": 134, "809": 134, "flipper_length_first_sampl": 134, "culmen_depth_first_sampl": 134, "301": 134, "790": 134, "340": 134, "spread": [134, 135, 152, 178], "enrich": 134, "nystroem_regress": [134, 137], "nystroem__n_compon": 134, "set_param": [134, 135, 150, 154, 180, 184], "331": 134, "832": 134, "4950": 134, "5050": 134, "footprint": 134, "scalabl": 134, "metion": 135, "invers": 135, "diverg": [135, 139, 155, 162], "rdbu_r": [135, 139], "1e6": 135, "logisticregression__c": [135, 178, 180], "sigmoid": [135, 139], "dark": 135, "nearli": 135, "steep": 135, "deduc": [135, 157], "lai": 135, "zone": 135, "weaker": 135, "light": 135, "lr_weight": 135, "perpendicular": [135, 156], "lowest": [135, 136, 144], "anywher": 135, "minor": 135, "blob": [135, 160], "frontier": 135, "conjunct": 135, "certainti": [135, 162], "68556640610011": 136, "5780": 136, "831358077066": 136, "mean_squared_error": [136, 137, 144], "inferred_body_mass": 136, "model_error": 136, "154546": 136, "313": 136, "occas": 137, "cubic": [137, 173], "said": [137, 144, 146], "data_max": 137, "data_min": 137, "len_data": 137, "sort": 137, "full_data": 137, "input_featur": 137, "reshap": [137, 144, 162], "fit_score_plot_regress": 137, "global": 137, "data_expand": 137, "polynomial_expans": 137, "polynomial_regress": 137, "encourag": [137, 144], "svr": 137, "svrsvr": 137, "medium": 137, "10_000": [137, 156], "binned_regress": 137, "spline_regress": 137, "expand": 137, "3750": 138, "3800": 138, "3450": 138, "3650": 138, "2700": 138, "6300": 138, "heavier": [138, 157], "formula": 138, "shorter": 138, "13000": 138, "millimet": 138, "body_mass_180": 138, "body_mass_181": 138, "7200": 138, "7240": 138, "goe": [138, 141], "170mm": 138, "230mm": 138, "redefin": 138, "groupbi": 139, "inclin": 139, "x0": 139, "coef1": 139, "obliqu": [139, 156], "724988": 139, "096500": 139, "readi": 139, "barplot": 139, "horizont": [139, 160, 162], "vertic": 139, "coordin": [139, 151, 152, 155, 178], "hypothet": 139, "test_penguin": 139, "y_pred_proba": [139, 156], "17145312": 139, "82854688": 139, "y_proba_sampl": 139, "classes_": [139, 141, 156, 162, 180], "insist": 139, "overconfid": 139, "underconfid": 139, "softer": 139, "asymptot": 139, "softmax": 139, "hold": [140, 144, 152, 155, 178, 184], "interlac": [140, 160], "depict": [140, 157], "push": 140, "surround": 140, "kernel_model": 140, "donor": 141, "ago": 141, "new_donor": 141, "That": [141, 146, 149, 151], "258": 141, "505": 141, "665": 141, "615": 141, "743": 141, "374": 141, "7780748663101604": 141, "accuracy_scor": 141, "778": 141, "finer": 141, "confusionmatrixdisplai": 141, "incorrect": 141, "erron": 141, "tp": 141, "tn": 141, "fn": 141, "fp": 141, "precision_scor": [141, 142, 145], "recall_scor": 141, "pos_label": [141, 142, 145], "688": 141, "124": 141, "mislabel": 141, "ratio": 141, "dummy_classifi": 141, "762": 141, "balanced_accuracy_scor": 141, "haven": 141, "target_proba_predict": 141, "271820": 141, "728180": 141, "451764": 141, "548236": 141, "445211": 141, "554789": 141, "441577": 141, "558423": 141, "870583": 141, "129417": 141, "equivalence_pred_proba": 141, "idxmax": 141, "graph": 141, "precisionrecalldisplai": 141, "tpr": 141, "ppv": 141, "ap": 141, "preval": 141, "discrimin": 141, "roccurvedisplai": 141, "dash": 141, "plot_chance_level": 141, "pr": 141, "chance_level_kw": 141, "ambigu": [142, 145], "valueerror": [142, 145], "exc": [142, 145], "_valid": [142, 145], "recent": [142, 145], "_scorer": [142, 145], "__call__": [142, 145], "scorer": [142, 143, 145, 146], "_score": [142, 145], "355": [142, 145], "_sign": [142, 145], "_score_func": [142, 145], "y_true": [142, 144, 145], "scoring_kwarg": [142, 145], "_param_valid": [142, 145], "211": [142, 145], "wrapper": [142, 145], "arg": [142, 145, 153], "kwarg": [142, 145, 153], "_classif": [142, 145], "2127": [142, 145], "precision_recall_fscore_support": [142, 145], "1721": [142, 145], "_check_set_wise_label": [142, 145], "1507": [142, 145], "catch": [142, 145], "make_scor": [142, 145], "syntax": [143, 146], "iowa": 144, "intro": [144, 164], "996": 144, "902": 144, "2064": 144, "736": 144, "6872520581075487": 144, "dummy_regressor": 144, "608": 144, "disadvantag": 144, "median_absolute_error": 144, "137": 144, "mean_absolute_percentage_error": 144, "574": 144, "obsev": 144, "unobserv": 144, "extern": [144, 151], "cloud": 144, "against": 144, "exhibit": 144, "predictionerrordisplai": 144, "from_predict": 144, "actual_vs_predict": 144, "scatter_kwarg": 144, "residual_vs_predict": 144, "nwithout": 144, "banana": 144, "smile": 144, "clue": 144, "monoton": 144, "quantiletransform": [144, 184], "transformedtargetregressor": 144, "n_quantil": [144, 184], "900": 144, "output_distribut": 144, "model_transformed_target": 144, "ntransform": 144, "406": 144, "327": [144, 153], "disapprov": 144, "statistician": 144, "justifi": 144, "poissonregressor": 144, "tweedieregressor": 144, "reachabl": 144, "623": 145, "507": 145, "108": 145, "255": [145, 153], "166": 145, "00379062": 145, "00376248": 145, "0038867": 145, "00365186": 145, "00371385": 145, "00387788": 145, "00366783": 145, "00371575": 145, "00371671": 145, "00378752": 145, "00309134": 145, "00321937": 145, "00310946": 145, "00320387": 145, "00321031": 145, "00326419": 145, "00317121": 145, "00320053": 145, "00322223": 145, "00360751": 145, "test_accuraci": 145, "29333333": 145, "53333333": 145, "74666667": 145, "65333333": 145, "69333333": 145, "77333333": 145, "63513514": 145, "75675676": 145, "test_balanced_accuraci": 145, "42105263": 145, "48391813": 145, "62426901": 145, "40643275": 145, "48684211": 145, "55116959": 145, "73684211": 145, "45356037": 145, "51186791": 145, "794": 146, "892": 146, "225": 146, "test_r2": 146, "test_neg_mean_absolute_error": 146, "848721": 146, "256799": 146, "816374": 146, "084083": 146, "813513": 146, "113367": 146, "814138": 146, "448279": 146, "637473": 146, "370341": 146, "defaultdict": 146, "loss_funct": 146, "squared_error": 146, "absolute_error": 146, "loss_func": 146, "test_neg_mean_squared_error": 146, "923": 146, "344": [146, 153], "evolv": 146, "discontinu": 146, "surrog": 146, "substitut": 146, "log_loss": 146, "exhaust": [147, 154, 181], "cat_preprocessor": [147, 149, 151, 153, 154], "kneighborsregressor": [148, 155], "with_mean": [148, 155], "with_std": [148, 155], "reload": [149, 153], "dealt": 149, "ordinalencoderordinalencod": [149, 151, 153], "remainderpassthroughpassthroughhistgradientboostingclassifierhistgradientboostingclassifi": [149, 151, 153], "classifier__learning_r": [149, 151, 153, 154], "classifier__max_leaf_nod": [149, 151, 153, 154], "model_grid_search": [149, 151], "charg": 149, "rapidli": 149, "ascend": [149, 153, 155], "mean_fit_tim": [149, 152], "std_fit_tim": [149, 152], "mean_score_tim": [149, 152], "std_score_tim": [149, 152], "param_classifier__learning_r": [149, 151, 152], "param_classifier__max_leaf_nod": [149, 151, 152], "split0_test_scor": [149, 152], "split1_test_scor": [149, 152], "rank_test_scor": [149, 151, 152, 153], "489168": 149, "050132": 149, "224362": 149, "016874": 149, "868912": 149, "867213": 149, "868063": 149, "000850": 149, "370372": 149, "009651": 149, "203127": 149, "004503": 149, "866783": 149, "866066": 149, "866425": 149, "000359": 149, "118699": 149, "000410": 149, "093414": 149, "007336": 149, "classifier__": 149, "858648": 149, "862408": [149, 151, 152], "860528": 149, "001880": 149, "128271": 149, "002882": 149, "099234": 149, "011471": 149, "859358": 149, "859514": 149, "859436": 149, "000078": 149, "132225": 149, "003397": 149, "083718": 149, "000626": 149, "855536": 149, "856129": 149, "855832": 149, "000296": 149, "shorten": 149, "param_classifier__": 149, "prefix": [149, 152], "column_result": [149, 153], "shorten_param": [149, 152, 153, 178], "__": [149, 150, 152, 153, 178], "rsplit": [149, 152, 153, 178], "853266": 149, "000515": 149, "843330": 149, "002917": 149, "817832": 149, "001124": 149, "797166": 149, "000715": 149, "288200": 149, "050539": 149, "283476": 149, "003775": 149, "262564": 149, "006326": 149, "heatmap": [149, 152], "pivoted_cv_result": 149, "pivot_t": 149, "ylgnbu": 149, "invert_yaxi": 149, "degrad": 149, "patholog": 149, "accordingli": 149, "hyperparamt": [149, 156], "recogniz": 150, "spell": 150, "classifier__c": [150, 178, 180], "hyperparameter_nam": 150, "preprocessor__copi": 150, "preprocessor__with_mean": 150, "preprocessor__with_std": 150, "classifier__class_weight": 150, "classifier__du": 150, "classifier__fit_intercept": 150, "classifier__intercept_sc": 150, "classifier__l1_ratio": 150, "classifier__max_it": 150, "classifier__multi_class": 150, "classifier__n_job": 150, "classifier__penalti": 150, "classifier__random_st": 150, "classifier__solv": 150, "classifier__tol": 150, "classifier__verbos": 150, "classifier__warm_start": 150, "001": [150, 153], "799": 150, "523512": 151, "084637": 151, "863241": 151, "519701": 151, "086653": 151, "860784": 151, "521355": 151, "085747": 151, "860360": [151, 152], "517670": 151, "087460": 151, "523147": 151, "086819": 151, "866912": 151, "863": 151, "embed": 151, "864195": 151, "000061": 151, "870910": 151, "869743": 151, "000532": 151, "866058": 151, "001515": 151, "concern": 151, "877": 151, "schemat": 151, "green": [151, 156, 162], "rough": 151, "cv_test_scor": 151, "871": 151, "apprehend": 151, "cv_inner": 151, "cv_outer": 151, "greed": 151, "cv_fold": 151, "estimator_in_fold": 151, "vote": 151, "randomized_search_result": [152, 153, 178], "param_classifier__l2_regular": 152, "param_classifier__max_bin": 152, "param_classifier__min_samples_leaf": 152, "split2_test_scor": 152, "split3_test_scor": 152, "split4_test_scor": 152, "540456": 152, "062725": 152, "052069": 152, "002661": 152, "467047": 152, "550075": 152, "classifier__l2_regular": [152, 153], "4670474863": 152, "856558": 152, "862271": 152, "857767": 152, "854491": 152, "856675": 152, "857552": 152, "002586": 152, "110536": 152, "033403": 152, "074142": 152, "002165": 152, "015449": 152, "001146": 152, "0154488709": 152, "758974": 152, "758941": 152, "758947": [152, 153], "000013": [152, 153], "323": [152, 156], "137484": 152, "053150": 152, "092993": 152, "029005": 152, "095093": 152, "004274": 152, "0950934559": 152, "783267": 152, "776413": 152, "779143": 152, "771341": 152, "010357": 152, "311": 152, "935108": 152, "202993": 152, "118105": 152, "023658": 152, "003621": 152, "001305": 152, "164": 152, "0036210968": 152, "255219": 152, "038301": 152, "056048": 152, "016736": 152, "000081": 152, "407382": 152, "97": [152, 162, 184], "1060737427": 152, "495": 152, "452411": 152, "023006": 152, "055563": 152, "000846": 152, "000075": 152, "364373": 152, "4813767874": 152, "858332": 152, "865001": 152, "862681": 152, "860770": 152, "861429": 152, "002258": 152, "133042": 152, "014456": 152, "078186": 152, "002199": 152, "065946": 152, "001222": 152, "0659455480": 152, "497": [152, 153], "911828": 152, "017167": 152, "076563": 152, "005130": 152, "460025": 152, "044408": 152, "4600250010": 152, "839907": 152, "849713": 152, "846847": 152, "846028": 152, "844390": 152, "845377": 152, "003234": 152, "140": 152, "498": 152, "168120": 152, "121819": 152, "061283": 152, "000760": 152, "000068": 152, "287904": 152, "227": 152, "146": [152, 153], "7755366885": 152, "861881": 152, "859951": 152, "861862": 152, "862221": 152, "001623": 152, "499": [152, 153], "823774": 152, "120686": 152, "060351": 152, "014958": 152, "445218": 152, "4452178932": 152, "764569": 152, "765902": 152, "764947": 152, "765083": 152, "765281": 152, "000535": 152, "319": 152, "l2_regular": [152, 153, 178], "max_bin": [152, 153, 178], "score_bin": 152, "cut": [152, 160], "set_palett": 152, "ylgnbu_r": 152, "set_xscal": 152, "set_yscal": 152, "band": 152, "plotli": [152, 155, 178], "px": [152, 155, 178], "parallel_coordin": [152, 155, 178], "log10": [152, 178], "log2": [152, 178], "color_continuous_scal": [152, 155, 178], "undo": 152, "yellow": [152, 162], "tick": 152, "invert": 152, "consecut": 153, "untract": 153, "situat": 153, "stochast": 153, "loguniform_int": 153, "__init__": 153, "_distribut": 153, "rv": 153, "processor": 153, "1e3": 153, "classifier__min_samples_leaf": 153, "classifier__max_bin": 153, "model_random_search": [153, 155], "histgradientboostingc": 153, "_distn_infrastructur": 153, "rv_continuous_frozen": 153, "0x7fcf3f891a00": 153, "0x7fcf3ebd97f0": 153, "__main__": 153, "0x7fcf3ebe0100": 153, "0x7fcf3ec7dfa0": 153, "0x7fcf3ebd9340": 153, "randomizedsearchcvrandomizedsearchcv": 153, "pprint": 153, "05267903307568315": 153, "10798958387414": 153, "232": 153, "052679": 153, "10799": 153, "870738": 153, "001633": 153, "001174": 153, "02105": 153, "855478": 153, "003486": 153, "000003": 153, "322713": 153, "854741": 153, "003185": 153, "000026": 153, "026509": 153, "853075": 153, "002667": 153, "428258": 153, "272481": 153, "813901": 153, "001062": 153, "906324": 153, "026156": 153, "806448": 153, "001279": 153, "000267": 153, "029741": 153, "183": 153, "799541": 153, "001546": 153, "000007": 153, "00541": 153, "762278": 153, "000332": 153, "000002": 153, "001527": 153, "171": 153, "005833": 153, "001013": 153, "to_csv": 153, "208": 153, "011775": 153, "076653": 153, "871393": 153, "001588": 153, "343": 153, "000404": 153, "244503": 153, "229": 153, "871339": 153, "002741": 153, "994918": 153, "077047": 153, "192": 153, "870793": 153, "001993": 153, "328": 153, "036232": 153, "224702": 153, "236": 153, "869837": 153, "000808": 153, "733808": 153, "036786": 153, "241": 153, "869673": 153, "002417": 153, "000097": 153, "976823": 153, "448205": 153, "253714": 153, "000001": 153, "828574": 153, "091079": 153, "000444": 153, "236325": 153, "344629": 153, "207156": 153, "357": 153, "075318": 153, "241053": 153, "valuabl": 153, "allevi": 153, "best_scor": 154, "best_param": 154, "lr": 154, "mln": 154, "mean_scor": 154, "789": 154, "813": 154, "842": 154, "847": 154, "855": 154, "835": 154, "828": 154, "288": 154, "437": 154, "best_lr": 154, "best_mln": 154, "870": 154, "kneighborsregressor__n_neighbor": 155, "standardscaler__with_mean": 155, "standardscaler__with_std": 155, "welcom": 155, "column_name_map": 155, "param_kneighborsregressor__n_neighbor": 155, "param_standardscaler__with_mean": 155, "param_standardscaler__with_std": 155, "boolean": 155, "column_scal": 155, "687926": 155, "674812": 155, "668778": 155, "648317": 155, "629772": 155, "215": 155, "617295": 155, "464": 155, "567164": 155, "508809": 155, "486503": 155, "103390": 155, "061394": 155, "033122": 155, "017583": 155, "007987": 155, "002900": 155, "238830": 155, "tealros": 155, "kneighbor": 155, "mpl": [156, 162], "tab10_norm": [156, 162], "dbd": 156, "tab10": [156, 162], "norm": [156, 162], "plot_tre": [156, 158, 160, 161, 162], "class_nam": [156, 162], "impur": [156, 162], "inferior": 156, "superior": 156, "settabl": 156, "45mm": 156, "test_penguin_1": 156, "test_penguin_2": 156, "y_proba_class_0": 156, "adelie_proba": 156, "chinstrap_proba": 156, "gentoo_proba": 156, "037": 156, "disregard": 156, "moment": 156, "test_penguin_3": 156, "63975155": 156, "32298137": 156, "03726708": 156, "fairli": 156, "palmer": 157, "anatom": 157, "set_size_inch": 157, "superimpos": [159, 163], "data_clf_column": 160, "target_clf_column": 160, "data_clf": 160, "data_reg_column": 160, "target_reg_column": 160, "data_reg": 160, "fit_and_plot_classif": 160, "fit_and_plot_regress": 160, "tree_clf": 160, "tree_reg": 160, "adequ": 160, "asymmetri": 160, "make_blob": 160, "x_1": 160, "y_1": 160, "x_2": 160, "y_2": 160, "min_impurity_decreas": 160, "asymmetr": 160, "priori": 161, "3698": 161, "5032": 161, "tricki": 162, "spectr": 162, "purpl": 162, "xx": 162, "yy": 162, "meshgrid": 162, "xfull": 162, "proba": 162, "sharei": 162, "class_of_interest": 162, "imshow_handl": 162, "imshow": 162, "extent": 162, "colorbar": 162, "cax": 162, "binar": 162, "impress": 162, "target_predicted_linear_regress": 163, "target_predicted_tre": 163, "interpol": 163, "offset": 163, "175": 163, "shortest": 163, "longest": 163, "m3": [164, 177, 179], "m5": [164, 166, 167, 168, 175], "acknowledg": 164, "prune": 170, "children": 171, "increment": 172, "refin": 172, "author": 176, "circular": 178, "budget": [178, 182], "badli": 178, "histgradientbosstingclassifi": 180, "get_paramet": 180, "anim": 184, "param_valu": 184, "powertransform": 184, "all_preprocessor": 184, "cox": 184, "classifier__n_neighbor": 184, "forgot": 184}, "objects": {}, "objtypes": {}, "objnames": {}, "titleterms": {"acknowledg": 0, "figur": 0, "attribut": [0, 3], "The": [1, 6, 74, 77, 104, 105, 106, 107, 141, 157, 164], "adult": [1, 74], "censu": [1, 74], "dataset": [1, 2, 6, 74, 80, 81, 91, 104, 105, 106, 107, 108, 151, 157], "descript": 2, "glossari": 3, "main": [3, 14, 23, 34, 40, 58, 72, 128, 133, 170, 182], "term": 3, "us": [3, 6, 11, 77, 86, 125, 126, 136, 149, 153], "thi": [3, 6], "cours": [3, 36], "api": 3, "classif": [3, 30, 139, 140, 141, 156, 157, 166], "classifi": [3, 141], "cross": [3, 20, 21, 77, 86, 96, 97, 102], "valid": [3, 6, 20, 21, 59, 67, 77, 86, 96, 97, 102, 103], "data": [3, 6, 63, 65, 68, 74, 77, 80, 81, 82, 85, 86, 101, 128, 133], "matrix": [3, 141], "input": 3, "earli": 3, "stop": 3, "estim": [3, 102, 139, 162], "featur": [3, 32, 41, 82, 85, 88, 90, 108, 125, 126, 131, 132, 135, 137, 164], "variabl": [3, 74, 85, 86, 88, 90, 108], "descriptor": 3, "covari": 3, "gener": [3, 103, 160], "perform": [3, 92, 164], "predict": [3, 6, 81, 85, 139, 141, 149, 151, 153, 162, 164], "statist": 3, "hyperparamet": [3, 12, 117, 119, 149, 150, 151, 152, 153, 160, 164, 167, 183], "infer": 3, "learn": [3, 6, 13, 22, 33, 36, 39, 50, 56, 59, 65, 70, 71, 81, 91, 96, 108, 110, 136, 138, 150, 164, 169, 181], "paramet": [3, 132, 135, 160], "meta": 3, "model": [3, 6, 8, 9, 19, 38, 41, 45, 46, 48, 65, 67, 77, 81, 82, 86, 91, 92, 108, 118, 126, 128, 131, 132, 133, 139, 149, 151, 153, 164, 165, 168], "overfit": [3, 57, 62, 103], "predictor": 3, "regress": [3, 31, 131, 132, 136, 137, 138, 144, 157, 161, 175], "regressor": 3, "regular": [3, 45, 48, 132, 135], "penal": 3, "sampl": [3, 95, 96], "instanc": 3, "observ": 3, "supervis": 3, "target": [3, 81], "label": [3, 6], "annot": 3, "test": [3, 54, 80, 81, 102], "set": [3, 150], "train": [3, 54, 80, 81, 102], "fit": [3, 65, 81, 82, 86], "transform": 3, "underfit": [3, 57, 62, 103], "unsupervis": 3, "other": [3, 160], "notebook": [4, 74, 77, 80, 81, 137], "time": [4, 6, 13, 22, 33, 39, 56, 71, 169, 181], "tabl": [5, 164], "content": [5, 164], "conclud": [6, 7, 164], "remark": [6, 7, 164], "last": 6, "lesson": [6, 108], "goal": 6, "big": 6, "messag": [6, 131], "mooc": [6, 36], "1": [6, 73, 108], "machin": [6, 50, 164], "pipelin": [6, 70, 85, 88, 90, 91, 110, 164], "2": [6, 60, 108], "adapt": [6, 109], "complex": [6, 110], "3": [6, 108, 184], "specif": [6, 86], "go": [6, 14, 23, 34, 40, 58, 72, 170, 182], "further": [6, 14, 23, 34, 40, 58, 72, 170, 182], "more": [6, 86, 102], "about": [6, 119], "scikit": [6, 36, 65, 70, 81, 91, 110, 136, 138, 150], "we": [6, 91], "ar": 6, "an": [6, 85], "open": 6, "sourc": 6, "commun": 6, "topic": 6, "have": 6, "cover": 6, "studi": 6, "bring": 6, "valu": 6, "bigger": 6, "pictur": 6, "beyond": [6, 140], "evalu": [6, 77, 85, 86, 141, 151, 164], "matter": 6, "small": 6, "part": 6, "problem": [6, 162], "most": 6, "technic": 6, "craft": 6, "all": 6, "how": 6, "choic": [6, 20], "output": 6, "bias": 6, "versu": [6, 52, 55], "causal": 6, "societ": 6, "impact": [6, 135], "intuit": [8, 9, 38, 46, 48, 165, 168], "ensembl": [8, 9, 10, 11, 12, 118, 164], "bag": [8, 110], "boost": [9, 10, 109, 115, 116, 117], "base": [10, 85, 86, 165, 168], "method": [11, 12], "bootstrap": [11, 110], "tune": [12, 117, 132, 149, 151, 153, 164, 177, 179], "modul": [13, 22, 33, 39, 56, 71, 169, 181], "overview": [13, 22, 33, 39, 56, 71, 169, 181], "what": [13, 22, 33, 39, 56, 71, 169, 181], "you": [13, 22, 33, 39, 56, 71, 169, 181], "befor": [13, 22, 33, 39, 56, 71, 169, 181], "get": [13, 22, 33, 39, 56, 71, 150, 169, 181], "start": [13, 22, 33, 39, 56, 71, 169, 181], "object": [13, 22, 33, 39, 56, 71, 169, 181], "schedul": [13, 22, 33, 39, 56, 71, 169, 181], "take": [14, 23, 34, 40, 58, 72, 108, 131, 170, 182], "awai": [14, 23, 34, 40, 58, 72, 108, 131, 170, 182], "wrap": [14, 18, 23, 29, 34, 40, 47, 58, 60, 72, 73, 170, 176, 182, 184], "up": [14, 18, 23, 29, 34, 40, 47, 58, 60, 72, 73, 116, 170, 176, 182, 184], "To": [14, 23, 34, 40, 58, 72, 170, 182], "quiz": [15, 16, 17, 18, 24, 25, 26, 27, 28, 29, 35, 37, 42, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 171, 172, 173, 174, 176, 178, 180, 184], "m6": [15, 16, 17, 111, 112, 113, 114, 120, 121, 122, 123], "01": [15, 24, 42, 49, 61, 64, 75, 76, 93, 94, 98, 99, 111, 120, 124, 127, 128, 133, 147, 154, 158, 162, 171, 180], "question": [15, 16, 17, 18, 24, 25, 26, 27, 28, 29, 35, 37, 42, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 171, 172, 173, 174, 176, 178, 180, 184], "02": [16, 25, 43, 53, 66, 78, 83, 112, 121, 129, 134, 142, 145, 148, 155, 159, 163, 172, 178], "03": [17, 26, 44, 51, 69, 79, 84, 113, 122, 130, 135, 143, 146, 173], "6": 18, "compar": [19, 54, 92], "simpl": [19, 92], "baselin": [19, 92, 141], "nest": [21, 97], "m7": [24, 25, 26, 27, 28, 94, 99, 142, 143, 145, 146], "04": [27, 87, 89, 114, 123, 174], "05": [28, 88, 90], "7": 29, "metric": [30, 31, 141], "caveat": 32, "select": [32, 85, 86, 125, 126, 164], "introduct": 36, "present": [36, 108], "welcom": 36, "follow": 36, "prerequisit": [36, 128, 133], "materi": 36, "social": 36, "network": 36, "linear": [38, 41, 45, 46, 48, 108, 131, 132, 135, 136, 137, 138, 139, 140, 164], "non": [41, 101, 131, 135, 137], "engin": [41, 131, 135, 137], "m4": [42, 43, 44, 128, 129, 130, 133, 134, 135], "4": 47, "intro": 49, "introduc": 50, "concept": [50, 164], "m2": [51, 53, 61, 93, 98], "bia": [52, 55], "varianc": [52, 55], "error": [54, 102], "trade": 55, "off": 55, "curv": [59, 96, 103], "tabular": 63, "explor": 63, "m1": [64, 66, 69, 75, 76, 78, 79, 83, 84, 87, 88, 89, 90], "numer": [65, 80, 82, 86, 88, 90], "handl": 68, "categor": [68, 85, 86, 88, 90], "visual": [70, 74, 91], "jupyt": [70, 91], "first": [74, 81, 91], "look": [74, 119], "our": [74, 85, 149, 151, 153], "load": [74, 80, 81, 91, 128, 133, 151], "column": [74, 86], "inspect": [74, 108], "creat": [74, 91, 160], "decis": [74, 115, 117, 135, 156, 160, 161, 164, 166, 167, 175], "rule": 74, "hand": 74, "recap": [74, 77, 80, 81, 137], "exercis": [75, 76, 78, 79, 83, 84, 87, 88, 89, 90, 93, 94, 98, 99, 111, 112, 113, 114, 120, 121, 122, 123, 124, 127, 128, 129, 130, 133, 134, 135, 142, 143, 145, 146, 147, 148, 154, 155, 158, 159, 162, 163], "solut": [76, 83, 84, 89, 90, 98, 99, 120, 121, 122, 123, 127, 133, 134, 135, 145, 146, 154, 155, 162, 163], "prepar": [77, 82], "need": 77, "work": 80, "entir": 80, "identifi": [80, 85], "split": [80, 81], "panda": 81, "separ": [81, 140], "make": 81, "preprocess": 82, "encod": [85, 88, 90], "type": [85, 86], "strategi": 85, "categori": [85, 88, 90], "ordin": 85, "nomin": 85, "without": [85, 138, 151], "assum": 85, "ani": 85, "order": 85, "choos": 85, "togeth": 86, "dispatch": 86, "processor": 86, "power": 86, "refer": [88, 90], "scale": [88, 90, 108, 132], "integ": [88, 90], "code": [88, 90], "One": [88, 90], "hot": [88, 90], "analysi": [90, 152, 183], "Then": 91, "final": 91, "score": 91, "group": 95, "effect": [96, 132, 160], "size": 96, "summari": [96, 102, 103, 131], "stratif": 100, "i": 101, "d": 101, "framework": 102, "vs": [102, 103], "stabil": 102, "detail": [102, 119], "regard": 102, "cross_valid": 102, "am": 104, "hous": [104, 107], "bike": 105, "ride": 105, "blood": 106, "transfus": 106, "california": 107, "import": [108, 160], "0": 108, "sign": 108, "coeffici": 108, "A": [108, 119], "surpris": 108, "associ": 108, "check": 108, "spars": 108, "lasso": 108, "randomforest": 108, "feature_importances_": 108, "permut": 108, "discuss": 108, "adaboost": 109, "resampl": 110, "aggreg": 110, "gradient": [115, 116, 117], "tree": [115, 117, 156, 160, 161, 164, 165, 166, 167, 168, 175], "gbdt": 115, "speed": 116, "random": [117, 119, 153], "forest": [117, 119], "histogram": 117, "introductori": 118, "exampl": 118, "default": 119, "benefit": 125, "limit": 126, "definit": [128, 133], "logist": 131, "addit": 131, "interact": 131, "multi": [131, 162], "step": 131, "influenc": 135, "c": 135, "boundari": 135, "weight": 135, "probabl": [139, 141, 162], "accuraci": 141, "confus": 141, "deriv": 141, "issu": 141, "class": [141, 162], "imbal": 141, "differ": 141, "threshold": 141, "m3": [147, 148, 154, 155, 178, 180], "grid": 149, "search": [149, 152, 153, 183], "With": 151, "result": [152, 183], "build": 156, "penguin": 157, "m5": [158, 159, 162, 163, 171, 172, 173, 174], "helper": 160, "function": 160, "max_depth": 160, "best": 164, "appendix": 164, "interpret": 164, "5": 176, "autom": 177, "manual": 179}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 6, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx": 56}}) \ No newline at end of file +Search.setIndex({"docnames": ["appendix/acknowledgement", "appendix/adult_census_description", "appendix/datasets_intro", "appendix/glossary", "appendix/notebook_timings", "appendix/toc_redirect", "concluding_remarks", "concluding_remarks_video", "ensemble/bagging_slides", "ensemble/boosting_slides", "ensemble/ensemble_boosting_index", "ensemble/ensemble_bootstrap_index", "ensemble/ensemble_hyperparameters_index", "ensemble/ensemble_module_intro", "ensemble/ensemble_module_take_away", "ensemble/ensemble_quiz_m6_01", "ensemble/ensemble_quiz_m6_02", "ensemble/ensemble_quiz_m6_03", "ensemble/ensemble_wrap_up_quiz", "evaluation/cross_validation_baseline_index", "evaluation/cross_validation_choices_index", "evaluation/cross_validation_nested_index", "evaluation/evaluation_module_intro", "evaluation/evaluation_module_take_away", "evaluation/evaluation_quiz_m7_01", "evaluation/evaluation_quiz_m7_02", "evaluation/evaluation_quiz_m7_03", "evaluation/evaluation_quiz_m7_04", "evaluation/evaluation_quiz_m7_05", "evaluation/evaluation_wrap_up_quiz", "evaluation/metrics_classification_index", "evaluation/metrics_regression_index", "feature_selection/feature_selection_limitation_index", "feature_selection/feature_selection_module_intro", "feature_selection/feature_selection_module_take_away", "feature_selection/feature_selection_quiz", "index", "interpretation/interpretation_quiz", "linear_models/linear_models_intuitions_index", "linear_models/linear_models_module_intro", "linear_models/linear_models_module_take_away", "linear_models/linear_models_non_linear_index", "linear_models/linear_models_quiz_m4_01", "linear_models/linear_models_quiz_m4_02", "linear_models/linear_models_quiz_m4_03", "linear_models/linear_models_regularization_index", "linear_models/linear_models_slides", "linear_models/linear_models_wrap_up_quiz", "linear_models/regularized_linear_models_slides", "ml_concepts/quiz_intro_01", "ml_concepts/slides", "overfit/bias_vs_variance_quiz_m2_03", "overfit/bias_vs_variance_slides", "overfit/learning_validation_curves_quiz_m2_02", "overfit/learning_validation_curves_slides", "overfit/overfit_bias_variance_index", "overfit/overfit_module_intro", "overfit/overfit_overfitting_underfitting_index", "overfit/overfit_take_away", "overfit/overfit_validation_learning_curves_index", "overfit/overfit_wrap_up_quiz", "overfit/overfitting_vs_under_fitting_quiz_m2_01", "overfit/overfitting_vs_under_fitting_slides", "predictive_modeling_pipeline/01_tabular_data_exploration_index", "predictive_modeling_pipeline/01_tabular_data_exploration_quiz_m1_01", "predictive_modeling_pipeline/02_numerical_pipeline_index", "predictive_modeling_pipeline/02_numerical_pipeline_quiz_m1_02", "predictive_modeling_pipeline/02_numerical_pipeline_video_cross_validation", "predictive_modeling_pipeline/03_categorical_pipeline_index", "predictive_modeling_pipeline/03_categorical_pipeline_quiz_m1_03", "predictive_modeling_pipeline/03_categorical_pipeline_visualization_video", "predictive_modeling_pipeline/predictive_modeling_module_intro", "predictive_modeling_pipeline/predictive_modeling_module_take_away", "predictive_modeling_pipeline/wrap_up_quiz", "python_scripts/01_tabular_data_exploration", "python_scripts/01_tabular_data_exploration_ex_01", "python_scripts/01_tabular_data_exploration_sol_01", "python_scripts/02_numerical_pipeline_cross_validation", "python_scripts/02_numerical_pipeline_ex_00", "python_scripts/02_numerical_pipeline_ex_01", "python_scripts/02_numerical_pipeline_hands_on", "python_scripts/02_numerical_pipeline_introduction", "python_scripts/02_numerical_pipeline_scaling", "python_scripts/02_numerical_pipeline_sol_00", "python_scripts/02_numerical_pipeline_sol_01", "python_scripts/03_categorical_pipeline", "python_scripts/03_categorical_pipeline_column_transformer", "python_scripts/03_categorical_pipeline_ex_01", "python_scripts/03_categorical_pipeline_ex_02", "python_scripts/03_categorical_pipeline_sol_01", "python_scripts/03_categorical_pipeline_sol_02", "python_scripts/03_categorical_pipeline_visualization", "python_scripts/cross_validation_baseline", "python_scripts/cross_validation_ex_01", "python_scripts/cross_validation_ex_02", "python_scripts/cross_validation_grouping", "python_scripts/cross_validation_learning_curve", "python_scripts/cross_validation_nested", "python_scripts/cross_validation_sol_01", "python_scripts/cross_validation_sol_02", "python_scripts/cross_validation_stratification", "python_scripts/cross_validation_time", "python_scripts/cross_validation_train_test", "python_scripts/cross_validation_validation_curve", "python_scripts/datasets_ames_housing", "python_scripts/datasets_bike_rides", "python_scripts/datasets_blood_transfusion", "python_scripts/datasets_california_housing", "python_scripts/dev_features_importance", "python_scripts/ensemble_adaboost", "python_scripts/ensemble_bagging", "python_scripts/ensemble_ex_01", "python_scripts/ensemble_ex_02", "python_scripts/ensemble_ex_03", "python_scripts/ensemble_ex_04", "python_scripts/ensemble_gradient_boosting", "python_scripts/ensemble_hist_gradient_boosting", "python_scripts/ensemble_hyperparameters", "python_scripts/ensemble_introduction", "python_scripts/ensemble_random_forest", "python_scripts/ensemble_sol_01", "python_scripts/ensemble_sol_02", "python_scripts/ensemble_sol_03", "python_scripts/ensemble_sol_04", "python_scripts/feature_selection_ex_01", "python_scripts/feature_selection_introduction", "python_scripts/feature_selection_limitation_model", "python_scripts/feature_selection_sol_01", "python_scripts/linear_models_ex_01", "python_scripts/linear_models_ex_02", "python_scripts/linear_models_ex_03", "python_scripts/linear_models_feature_engineering_classification", "python_scripts/linear_models_regularization", "python_scripts/linear_models_sol_01", "python_scripts/linear_models_sol_02", "python_scripts/linear_models_sol_03", "python_scripts/linear_regression_in_sklearn", "python_scripts/linear_regression_non_linear_link", "python_scripts/linear_regression_without_sklearn", "python_scripts/logistic_regression", "python_scripts/logistic_regression_non_linear", "python_scripts/metrics_classification", "python_scripts/metrics_ex_01", "python_scripts/metrics_ex_02", "python_scripts/metrics_regression", "python_scripts/metrics_sol_01", "python_scripts/metrics_sol_02", "python_scripts/parameter_tuning_ex_02", "python_scripts/parameter_tuning_ex_03", "python_scripts/parameter_tuning_grid_search", "python_scripts/parameter_tuning_manual", "python_scripts/parameter_tuning_nested", "python_scripts/parameter_tuning_parallel_plot", "python_scripts/parameter_tuning_randomized_search", "python_scripts/parameter_tuning_sol_02", "python_scripts/parameter_tuning_sol_03", "python_scripts/trees_classification", "python_scripts/trees_dataset", "python_scripts/trees_ex_01", "python_scripts/trees_ex_02", "python_scripts/trees_hyperparameters", "python_scripts/trees_regression", "python_scripts/trees_sol_01", "python_scripts/trees_sol_02", "toc", "trees/slides", "trees/trees_classification_index", "trees/trees_hyperparameters_index", "trees/trees_intuitions_index", "trees/trees_module_intro", "trees/trees_module_take_away", "trees/trees_quiz_m5_01", "trees/trees_quiz_m5_02", "trees/trees_quiz_m5_03", "trees/trees_quiz_m5_04", "trees/trees_regression_index", "trees/trees_wrap_up_quiz", "tuning/parameter_tuning_automated_index", "tuning/parameter_tuning_automated_quiz_m3_02", "tuning/parameter_tuning_manual_index", "tuning/parameter_tuning_manual_quiz_m3_01", "tuning/parameter_tuning_module_intro", "tuning/parameter_tuning_module_take_away", "tuning/parameter_tuning_parallel_plot_video", "tuning/parameter_tuning_wrap_up_quiz"], "filenames": ["appendix/acknowledgement.md", "appendix/adult_census_description.md", "appendix/datasets_intro.md", "appendix/glossary.md", "appendix/notebook_timings.md", "appendix/toc_redirect.md", "concluding_remarks.md", "concluding_remarks_video.md", "ensemble/bagging_slides.md", "ensemble/boosting_slides.md", "ensemble/ensemble_boosting_index.md", "ensemble/ensemble_bootstrap_index.md", "ensemble/ensemble_hyperparameters_index.md", "ensemble/ensemble_module_intro.md", "ensemble/ensemble_module_take_away.md", "ensemble/ensemble_quiz_m6_01.md", "ensemble/ensemble_quiz_m6_02.md", "ensemble/ensemble_quiz_m6_03.md", "ensemble/ensemble_wrap_up_quiz.md", "evaluation/cross_validation_baseline_index.md", "evaluation/cross_validation_choices_index.md", "evaluation/cross_validation_nested_index.md", "evaluation/evaluation_module_intro.md", "evaluation/evaluation_module_take_away.md", "evaluation/evaluation_quiz_m7_01.md", "evaluation/evaluation_quiz_m7_02.md", "evaluation/evaluation_quiz_m7_03.md", "evaluation/evaluation_quiz_m7_04.md", "evaluation/evaluation_quiz_m7_05.md", "evaluation/evaluation_wrap_up_quiz.md", "evaluation/metrics_classification_index.md", "evaluation/metrics_regression_index.md", "feature_selection/feature_selection_limitation_index.md", "feature_selection/feature_selection_module_intro.md", "feature_selection/feature_selection_module_take_away.md", "feature_selection/feature_selection_quiz.md", "index.md", "interpretation/interpretation_quiz.md", "linear_models/linear_models_intuitions_index.md", "linear_models/linear_models_module_intro.md", "linear_models/linear_models_module_take_away.md", "linear_models/linear_models_non_linear_index.md", "linear_models/linear_models_quiz_m4_01.md", "linear_models/linear_models_quiz_m4_02.md", "linear_models/linear_models_quiz_m4_03.md", "linear_models/linear_models_regularization_index.md", "linear_models/linear_models_slides.md", "linear_models/linear_models_wrap_up_quiz.md", "linear_models/regularized_linear_models_slides.md", "ml_concepts/quiz_intro_01.md", "ml_concepts/slides.md", "overfit/bias_vs_variance_quiz_m2_03.md", "overfit/bias_vs_variance_slides.md", "overfit/learning_validation_curves_quiz_m2_02.md", "overfit/learning_validation_curves_slides.md", "overfit/overfit_bias_variance_index.md", "overfit/overfit_module_intro.md", "overfit/overfit_overfitting_underfitting_index.md", "overfit/overfit_take_away.md", "overfit/overfit_validation_learning_curves_index.md", "overfit/overfit_wrap_up_quiz.md", "overfit/overfitting_vs_under_fitting_quiz_m2_01.md", "overfit/overfitting_vs_under_fitting_slides.md", "predictive_modeling_pipeline/01_tabular_data_exploration_index.md", "predictive_modeling_pipeline/01_tabular_data_exploration_quiz_m1_01.md", "predictive_modeling_pipeline/02_numerical_pipeline_index.md", "predictive_modeling_pipeline/02_numerical_pipeline_quiz_m1_02.md", "predictive_modeling_pipeline/02_numerical_pipeline_video_cross_validation.md", "predictive_modeling_pipeline/03_categorical_pipeline_index.md", "predictive_modeling_pipeline/03_categorical_pipeline_quiz_m1_03.md", "predictive_modeling_pipeline/03_categorical_pipeline_visualization_video.md", "predictive_modeling_pipeline/predictive_modeling_module_intro.md", "predictive_modeling_pipeline/predictive_modeling_module_take_away.md", "predictive_modeling_pipeline/wrap_up_quiz.md", "python_scripts/01_tabular_data_exploration.py", "python_scripts/01_tabular_data_exploration_ex_01.py", "python_scripts/01_tabular_data_exploration_sol_01.py", "python_scripts/02_numerical_pipeline_cross_validation.py", "python_scripts/02_numerical_pipeline_ex_00.py", "python_scripts/02_numerical_pipeline_ex_01.py", "python_scripts/02_numerical_pipeline_hands_on.py", "python_scripts/02_numerical_pipeline_introduction.py", "python_scripts/02_numerical_pipeline_scaling.py", "python_scripts/02_numerical_pipeline_sol_00.py", "python_scripts/02_numerical_pipeline_sol_01.py", "python_scripts/03_categorical_pipeline.py", "python_scripts/03_categorical_pipeline_column_transformer.py", "python_scripts/03_categorical_pipeline_ex_01.py", "python_scripts/03_categorical_pipeline_ex_02.py", "python_scripts/03_categorical_pipeline_sol_01.py", "python_scripts/03_categorical_pipeline_sol_02.py", "python_scripts/03_categorical_pipeline_visualization.py", "python_scripts/cross_validation_baseline.py", "python_scripts/cross_validation_ex_01.py", "python_scripts/cross_validation_ex_02.py", "python_scripts/cross_validation_grouping.py", "python_scripts/cross_validation_learning_curve.py", "python_scripts/cross_validation_nested.py", "python_scripts/cross_validation_sol_01.py", "python_scripts/cross_validation_sol_02.py", "python_scripts/cross_validation_stratification.py", "python_scripts/cross_validation_time.py", "python_scripts/cross_validation_train_test.py", "python_scripts/cross_validation_validation_curve.py", "python_scripts/datasets_ames_housing.py", "python_scripts/datasets_bike_rides.py", "python_scripts/datasets_blood_transfusion.py", "python_scripts/datasets_california_housing.py", "python_scripts/dev_features_importance.py", "python_scripts/ensemble_adaboost.py", "python_scripts/ensemble_bagging.py", "python_scripts/ensemble_ex_01.py", "python_scripts/ensemble_ex_02.py", "python_scripts/ensemble_ex_03.py", "python_scripts/ensemble_ex_04.py", "python_scripts/ensemble_gradient_boosting.py", "python_scripts/ensemble_hist_gradient_boosting.py", "python_scripts/ensemble_hyperparameters.py", "python_scripts/ensemble_introduction.py", "python_scripts/ensemble_random_forest.py", "python_scripts/ensemble_sol_01.py", "python_scripts/ensemble_sol_02.py", "python_scripts/ensemble_sol_03.py", "python_scripts/ensemble_sol_04.py", "python_scripts/feature_selection_ex_01.py", "python_scripts/feature_selection_introduction.py", "python_scripts/feature_selection_limitation_model.py", "python_scripts/feature_selection_sol_01.py", "python_scripts/linear_models_ex_01.py", "python_scripts/linear_models_ex_02.py", "python_scripts/linear_models_ex_03.py", "python_scripts/linear_models_feature_engineering_classification.py", "python_scripts/linear_models_regularization.py", "python_scripts/linear_models_sol_01.py", "python_scripts/linear_models_sol_02.py", "python_scripts/linear_models_sol_03.py", "python_scripts/linear_regression_in_sklearn.py", "python_scripts/linear_regression_non_linear_link.py", "python_scripts/linear_regression_without_sklearn.py", "python_scripts/logistic_regression.py", "python_scripts/logistic_regression_non_linear.py", "python_scripts/metrics_classification.py", "python_scripts/metrics_ex_01.py", "python_scripts/metrics_ex_02.py", "python_scripts/metrics_regression.py", "python_scripts/metrics_sol_01.py", "python_scripts/metrics_sol_02.py", "python_scripts/parameter_tuning_ex_02.py", "python_scripts/parameter_tuning_ex_03.py", "python_scripts/parameter_tuning_grid_search.py", "python_scripts/parameter_tuning_manual.py", "python_scripts/parameter_tuning_nested.py", "python_scripts/parameter_tuning_parallel_plot.py", "python_scripts/parameter_tuning_randomized_search.py", "python_scripts/parameter_tuning_sol_02.py", "python_scripts/parameter_tuning_sol_03.py", "python_scripts/trees_classification.py", "python_scripts/trees_dataset.py", "python_scripts/trees_ex_01.py", "python_scripts/trees_ex_02.py", "python_scripts/trees_hyperparameters.py", "python_scripts/trees_regression.py", "python_scripts/trees_sol_01.py", "python_scripts/trees_sol_02.py", "toc.md", "trees/slides.md", "trees/trees_classification_index.md", "trees/trees_hyperparameters_index.md", "trees/trees_intuitions_index.md", "trees/trees_module_intro.md", "trees/trees_module_take_away.md", "trees/trees_quiz_m5_01.md", "trees/trees_quiz_m5_02.md", "trees/trees_quiz_m5_03.md", "trees/trees_quiz_m5_04.md", "trees/trees_regression_index.md", "trees/trees_wrap_up_quiz.md", "tuning/parameter_tuning_automated_index.md", "tuning/parameter_tuning_automated_quiz_m3_02.md", "tuning/parameter_tuning_manual_index.md", "tuning/parameter_tuning_manual_quiz_m3_01.md", "tuning/parameter_tuning_module_intro.md", "tuning/parameter_tuning_module_take_away.md", "tuning/parameter_tuning_parallel_plot_video.md", "tuning/parameter_tuning_wrap_up_quiz.md"], "titles": ["Acknowledgement", "The adult census dataset", "Datasets description", "Glossary", "Notebook timings", "Table of contents", "Concluding remarks", "\ud83c\udfa5 Concluding remarks", "\ud83c\udfa5 Intuitions on ensemble models: bagging", "\ud83c\udfa5 Intuitions on ensemble models: boosting", "Ensemble based on boosting", "Ensemble method using bootstrapping", "Hyperparameter tuning with ensemble methods", "Module overview", "Main take-away", "\u2705 Quiz M6.01", "\u2705 Quiz M6.02", "\u2705 Quiz M6.03", "\ud83c\udfc1 Wrap-up quiz 6", "Comparing a model with simple baselines", "Choice of cross-validation", "Nested cross-validation", "Module overview", "Main take-away", "\u2705 Quiz M7.01", "\u2705 Quiz M7.02", "\u2705 Quiz M7.03", "\u2705 Quiz M7.04", "\u2705 Quiz M7.05", "\ud83c\udfc1 Wrap-up quiz 7", "Classification metrics", "Regression metrics", "Caveats of feature selection", "Module overview", "Main take-away", "\u2705 Quiz", "Introduction", "\u2705 Quiz", "Intuitions on linear models", "Module overview", "Main take-away", "Non-linear feature engineering for linear models", "\u2705 Quiz M4.01", "\u2705 Quiz M4.02", "\u2705 Quiz M4.03", "Regularization in linear model", "\ud83c\udfa5 Intuitions on linear models", "\ud83c\udfc1 Wrap-up quiz 4", "\ud83c\udfa5 Intuitions on regularized linear models", "\u2705 Quiz Intro.01", "\ud83c\udfa5 Introducing machine-learning concepts", "\u2705 Quiz M2.03", "\ud83c\udfa5 Bias versus Variance", "\u2705 Quiz M2.02", "\ud83c\udfa5 Comparing train and test errors", "Bias versus variance trade-off", "Module overview", "Overfitting and underfitting", "Main take-away", "Validation and learning curves", "\ud83c\udfc1 Wrap-up quiz 2", "\u2705 Quiz M2.01", "\ud83c\udfa5 Overfitting and Underfitting", "Tabular data exploration", "\u2705 Quiz M1.01", "Fitting a scikit-learn model on numerical data", "\u2705 Quiz M1.02", "\ud83c\udfa5 Validation of a model", "Handling categorical data", "\u2705 Quiz M1.03", "\ud83c\udfa5 Visualizing scikit-learn pipelines in Jupyter", "Module overview", "Main take-away", "\ud83c\udfc1 Wrap-up quiz 1", "First look at our dataset", "\ud83d\udcdd Exercise M1.01", "\ud83d\udcc3 Solution for Exercise M1.01", "Model evaluation using cross-validation", "\ud83d\udcdd Exercise M1.02", "\ud83d\udcdd Exercise M1.03", "Working with numerical data", "First model with scikit-learn", "Preprocessing for numerical features", "\ud83d\udcc3 Solution for Exercise M1.02", "\ud83d\udcc3 Solution for Exercise M1.03", "Encoding of categorical variables", "Using numerical and categorical variables together", "\ud83d\udcdd Exercise M1.04", "\ud83d\udcdd Exercise M1.05", "\ud83d\udcc3 Solution for Exercise M1.04", "\ud83d\udcc3 Solution for Exercise M1.05", "Visualizing scikit-learn pipelines in Jupyter", "Comparing model performance with a simple baseline", "\ud83d\udcdd Exercise M2.01", "\ud83d\udcdd Exercise M7.01", "Sample grouping", "Effect of the sample size in cross-validation", "Nested cross-validation", "\ud83d\udcc3 Solution for Exercise M2.01", "\ud83d\udcc3 Solution for Exercise M7.01", "Stratification", "Non i.i.d. data", "Cross-validation framework", "Overfit-generalization-underfit", "The Ames housing dataset", "The bike rides dataset", "The blood transfusion dataset", "The California housing dataset", "Feature importance", "Adaptive Boosting (AdaBoost)", "Bagging", "\ud83d\udcdd Exercise M6.01", "\ud83d\udcdd Exercise M6.02", "\ud83d\udcdd Exercise M6.03", "\ud83d\udcdd Exercise M6.04", "Gradient-boosting decision tree (GBDT)", "Speeding-up gradient-boosting", "Hyperparameter tuning", "Introductory example to ensemble models", "Random forests", "\ud83d\udcc3 Solution for Exercise M6.01", "\ud83d\udcc3 Solution for Exercise M6.02", "\ud83d\udcc3 Solution for Exercise M6.03", "\ud83d\udcc3 Solution for Exercise M6.04", "\ud83d\udcdd Exercise 01", "Benefits of using feature selection", "Limitation of selecting feature using a model", "\ud83d\udcc3 Solution for Exercise 01", "\ud83d\udcdd Exercise M4.01", "\ud83d\udcdd Exercise M4.02", "\ud83d\udcdd Exercise M4.03", "Non-linear feature engineering for Logistic Regression", "Regularization of linear regression model", "\ud83d\udcc3 Solution for Exercise M4.01", "\ud83d\udcc3 Solution for Exercise M4.02", "\ud83d\udcc3 Solution for Exercise M4.03", "Linear regression using scikit-learn", "Non-linear feature engineering for Linear Regression", "Linear regression without scikit-learn", "Linear models for classification", "Beyond linear separation in classification", "Classification", "\ud83d\udcdd Exercise M7.02", "\ud83d\udcdd Exercise M7.03", "Regression", "\ud83d\udcc3 Solution for Exercise M7.02", "\ud83d\udcc3 Solution for Exercise M7.03", "\ud83d\udcdd Exercise M3.01", "\ud83d\udcdd Exercise M3.02", "Hyperparameter tuning by grid-search", "Set and get hyperparameters in scikit-learn", "Evaluation and hyperparameter tuning", "Analysis of hyperparameter search results", "Hyperparameter tuning by randomized-search", "\ud83d\udcc3 Solution for Exercise M3.01", "\ud83d\udcc3 Solution for Exercise M3.02", "Build a classification decision tree", "The penguins datasets", "\ud83d\udcdd Exercise M5.01", "\ud83d\udcdd Exercise M5.02", "Importance of decision tree hyperparameters on generalization", "Decision tree for regression", "\ud83d\udcc3 Solution for Exercise M5.01", "\ud83d\udcc3 Solution for Exercise M5.02", "Table of contents", "\ud83c\udfa5 Intuitions on tree-based models", "Decision tree in classification", "Hyperparameters of decision tree", "Intuitions on tree-based models", "Module overview", "Main take-away", "\u2705 Quiz M5.01", "\u2705 Quiz M5.02", "\u2705 Quiz M5.03", "\u2705 Quiz M5.04", "Decision tree in regression", "\ud83c\udfc1 Wrap-up quiz 5", "Automated tuning", "\u2705 Quiz M3.02", "Manual tuning", "\u2705 Quiz M3.01", "Module overview", "Main take-away", "\ud83c\udfa5 Analysis of hyperparameter search results", "\ud83c\udfc1 Wrap-up quiz 3"], "terms": {"The": [0, 2, 3, 13, 18, 22, 33, 36, 37, 39, 42, 44, 47, 49, 56, 58, 60, 66, 71, 73, 75, 76, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 108, 109, 110, 111, 112, 113, 114, 116, 117, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 142, 143, 144, 145, 146, 147, 148, 149, 151, 152, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 169, 171, 173, 176, 181, 182, 184], "diagram": [0, 3, 82], "present": [0, 8, 9, 13, 22, 23, 29, 34, 39, 46, 48, 50, 52, 54, 56, 62, 67, 71, 73, 74, 81, 85, 92, 96, 100, 101, 103, 104, 105, 106, 107, 109, 110, 115, 116, 118, 119, 126, 136, 138, 141, 142, 144, 145, 151, 153, 157, 161, 165, 169, 170, 171], "api": [0, 6, 72, 78, 81, 83, 86, 141], "design": [0, 3, 6, 36, 80, 104, 137, 144], "modul": [0, 1, 3, 14, 18, 23, 34, 36, 40, 47, 58, 72, 74, 80, 86, 107, 131, 134, 135, 141, 149, 150, 151, 158, 162, 164, 170, 182], "predict": [0, 1, 13, 16, 18, 22, 23, 24, 25, 29, 33, 36, 39, 40, 42, 43, 47, 49, 51, 56, 58, 60, 61, 66, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 82, 83, 84, 86, 87, 89, 91, 92, 93, 94, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 115, 117, 118, 119, 120, 121, 122, 124, 125, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 140, 144, 146, 148, 150, 152, 155, 156, 157, 159, 160, 161, 163, 169, 173, 176, 180, 181, 184], "model": [0, 1, 10, 11, 13, 14, 15, 17, 18, 22, 23, 24, 25, 26, 27, 28, 29, 32, 33, 34, 35, 36, 37, 39, 40, 42, 43, 44, 47, 51, 53, 56, 58, 60, 61, 64, 66, 69, 71, 72, 73, 74, 76, 78, 79, 80, 83, 84, 85, 87, 88, 89, 90, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 106, 107, 110, 111, 113, 114, 116, 117, 119, 120, 122, 123, 124, 125, 127, 129, 130, 134, 135, 136, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 152, 154, 155, 156, 159, 160, 161, 162, 163, 169, 170, 176, 178, 180, 181, 182, 184], "pipelin": [0, 1, 3, 13, 22, 33, 34, 36, 39, 43, 47, 50, 56, 60, 66, 68, 71, 72, 73, 77, 82, 86, 87, 89, 93, 94, 95, 97, 98, 99, 100, 104, 107, 108, 116, 119, 124, 125, 126, 127, 129, 130, 131, 132, 134, 135, 137, 139, 140, 147, 148, 149, 150, 151, 153, 154, 155, 169, 178, 180, 181, 184], "us": [0, 14, 17, 18, 22, 23, 24, 26, 27, 28, 29, 32, 33, 34, 35, 36, 37, 38, 39, 40, 42, 43, 44, 47, 49, 56, 60, 64, 65, 66, 68, 69, 71, 72, 73, 74, 75, 76, 78, 79, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 127, 128, 129, 130, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 152, 154, 155, 156, 157, 158, 159, 161, 162, 163, 164, 171, 172, 176, 178, 181, 184], "follow": [0, 3, 13, 14, 17, 18, 22, 23, 27, 29, 33, 34, 40, 42, 44, 47, 49, 56, 58, 60, 66, 69, 71, 72, 73, 74, 78, 81, 82, 83, 85, 86, 93, 94, 95, 97, 98, 99, 101, 108, 110, 114, 115, 117, 119, 123, 130, 131, 132, 135, 136, 137, 138, 139, 140, 141, 144, 147, 148, 149, 150, 151, 153, 154, 155, 157, 161, 169, 170, 176, 178, 181, 182, 184], "paramet": [0, 6, 15, 18, 26, 27, 28, 29, 37, 40, 42, 44, 47, 53, 56, 58, 60, 66, 73, 77, 78, 79, 80, 82, 83, 84, 85, 87, 89, 93, 97, 98, 99, 102, 103, 108, 109, 110, 111, 113, 114, 117, 118, 119, 120, 122, 123, 126, 129, 130, 131, 134, 136, 138, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 162, 169, 176, 178, 180, 181, 184], "free": [0, 6, 36, 85, 114, 117, 123, 132, 184], "icon": [0, 36], "licens": [0, 36], "under": [0, 24, 29, 36, 40, 74, 93, 98, 103, 118, 131, 137, 140, 141, 144, 160, 169], "cc": [0, 36], "BY": [0, 36], "3": [0, 4, 18, 29, 36, 42, 47, 56, 60, 73, 74, 76, 80, 81, 82, 83, 84, 85, 91, 92, 93, 95, 97, 98, 99, 100, 102, 103, 104, 105, 106, 107, 109, 110, 114, 115, 116, 117, 118, 120, 121, 123, 125, 126, 129, 131, 132, 134, 137, 138, 139, 141, 142, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 159, 160, 161, 162, 163, 164, 169, 176, 181], "0": [0, 3, 4, 18, 29, 37, 42, 44, 47, 66, 73, 74, 76, 77, 80, 81, 82, 83, 84, 85, 86, 88, 89, 90, 91, 92, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 160, 161, 162, 163, 176, 178, 184], "sourc": [0, 117], "set": [0, 6, 18, 26, 29, 35, 37, 39, 40, 42, 44, 47, 51, 53, 56, 58, 60, 61, 64, 66, 73, 74, 77, 79, 80, 81, 82, 84, 85, 86, 87, 89, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 105, 107, 108, 109, 110, 111, 112, 113, 114, 115, 117, 118, 119, 120, 121, 122, 123, 124, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 144, 145, 146, 147, 148, 149, 151, 153, 154, 155, 156, 157, 160, 161, 162, 164, 174, 176, 178, 179, 180, 181, 184], "gear": 0, "svg": 0, "vector": [0, 42, 43, 93, 98, 100, 102, 115, 124, 127, 128, 131, 133, 137, 140, 141], "cc0": 0, "close": [0, 3, 29, 44, 49, 74, 82, 97, 100, 101, 102, 107, 108, 115, 122, 123, 131, 132, 135, 136, 137, 139, 144, 149, 151, 153, 156], "mit": 0, "thi": [1, 13, 14, 18, 22, 23, 27, 29, 33, 34, 36, 39, 40, 43, 47, 49, 50, 56, 58, 60, 71, 72, 73, 74, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 169, 170, 173, 176, 178, 181, 182, 184], "collect": [1, 3, 6, 29, 74, 77, 94, 99, 102, 118, 125, 146], "inform": [1, 6, 24, 29, 47, 73, 74, 77, 81, 85, 86, 92, 95, 96, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 115, 117, 118, 125, 126, 132, 137, 139, 141, 142, 145, 149, 150, 153, 155, 157], "relat": [1, 3, 14, 18, 23, 34, 40, 56, 58, 72, 74, 86, 135, 137, 141, 170, 181, 182], "person": [1, 6, 64, 74, 80, 106, 141, 149], "task": [1, 6, 42, 73, 74, 80, 91, 102, 106, 131, 139, 171], "whether": [1, 3, 25, 64, 73, 74, 77, 82, 86, 88, 90, 91, 93, 98, 102, 103, 106, 108, 141, 146, 149, 151, 155, 159, 160, 163], "earn": [1, 74, 149], "salari": [1, 64, 107], "abov": [1, 3, 6, 18, 29, 47, 73, 74, 78, 82, 83, 91, 97, 100, 102, 103, 107, 108, 110, 115, 116, 128, 132, 133, 134, 138, 139, 140, 141, 142, 144, 145, 149, 151, 155, 157, 159, 161, 162, 163, 176, 178, 184], "below": [1, 3, 18, 47, 60, 77, 84, 85, 91, 107, 108, 116, 122, 128, 133, 138, 141, 144, 153, 157, 159, 162, 163, 176, 178, 180, 184], "50": [1, 18, 29, 33, 60, 74, 78, 80, 82, 83, 92, 99, 100, 102, 103, 104, 105, 106, 107, 113, 115, 116, 117, 118, 119, 122, 129, 132, 134, 144, 149, 150, 152, 153], "k": [1, 3, 25, 60, 77, 80, 81, 82, 92, 96, 102, 103, 109, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 125, 126, 127, 144, 146, 148, 149, 151, 152, 155, 162, 181], "we": [1, 3, 13, 14, 18, 22, 23, 25, 28, 29, 33, 34, 37, 39, 40, 42, 47, 49, 53, 56, 58, 60, 64, 66, 69, 71, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 169, 170, 176, 178, 180, 181, 184], "extens": [1, 102], "explor": [1, 6, 47, 75, 76, 80, 82, 85, 98, 112, 114, 121, 123, 131, 132, 137, 148, 149, 151, 152, 153, 155, 164, 184], "first": [1, 8, 9, 29, 46, 48, 50, 52, 54, 56, 62, 63, 64, 65, 67, 71, 75, 76, 77, 78, 79, 80, 82, 83, 84, 85, 86, 87, 88, 89, 90, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 114, 115, 116, 117, 118, 119, 121, 123, 124, 125, 126, 127, 129, 130, 131, 132, 134, 136, 137, 139, 140, 141, 142, 143, 144, 145, 146, 149, 151, 156, 157, 159, 160, 161, 162, 163, 164, 165, 181], "sequenc": [1, 85, 115, 147, 154], "tabular": [1, 6, 64, 71, 74, 81, 86, 164], "data": [1, 18, 20, 22, 23, 24, 25, 28, 29, 36, 39, 40, 43, 44, 47, 58, 60, 66, 69, 71, 72, 73, 75, 76, 78, 79, 83, 84, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 129, 131, 132, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 172, 176, 178, 181, 184], "notebook": [1, 18, 23, 36, 47, 64, 73, 76, 78, 79, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 94, 95, 96, 97, 99, 100, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 113, 115, 116, 117, 118, 119, 120, 122, 125, 126, 129, 131, 132, 134, 136, 138, 139, 140, 141, 142, 144, 145, 146, 148, 149, 150, 151, 152, 153, 155, 156, 157, 158, 160, 161, 162, 163, 164, 178], "look": [1, 6, 29, 63, 64, 66, 75, 76, 78, 80, 81, 83, 85, 95, 96, 98, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 117, 129, 130, 132, 134, 135, 137, 138, 140, 141, 149, 156, 157, 164, 178, 184], "our": [1, 6, 18, 25, 29, 42, 47, 63, 64, 71, 77, 79, 80, 81, 82, 84, 86, 87, 89, 91, 92, 93, 95, 96, 97, 98, 100, 101, 102, 103, 104, 105, 106, 108, 109, 110, 113, 114, 115, 117, 122, 123, 124, 125, 127, 128, 130, 131, 132, 133, 135, 136, 137, 138, 139, 141, 142, 144, 145, 150, 152, 156, 157, 161, 163, 164, 184], "To": [1, 3, 8, 9, 29, 37, 46, 47, 48, 50, 52, 54, 62, 67, 73, 74, 80, 81, 82, 86, 87, 89, 93, 95, 96, 97, 98, 100, 101, 102, 103, 108, 109, 110, 113, 116, 117, 122, 125, 129, 131, 132, 134, 135, 136, 137, 140, 141, 144, 153, 155, 158, 161, 165, 176], "avoid": [1, 3, 6, 40, 47, 73, 74, 97, 108, 109, 110, 113, 121, 122, 127, 129, 134, 135, 136, 137, 149, 152, 153, 155], "repeat": [1, 3, 18, 29, 47, 58, 60, 77, 79, 84, 87, 89, 95, 96, 97, 100, 102, 125, 135, 140, 142, 145, 148, 155, 158, 159, 161, 162, 163, 184], "same": [1, 3, 18, 26, 29, 37, 40, 44, 47, 66, 74, 79, 80, 81, 82, 84, 85, 86, 90, 95, 96, 97, 99, 100, 102, 103, 104, 108, 109, 110, 115, 116, 125, 128, 129, 131, 132, 133, 134, 136, 137, 138, 140, 141, 144, 146, 152, 153, 161, 162, 176, 184], "redirect": 1, "reader": [1, 74, 110, 137, 139, 144], "particular": [1, 3, 6, 27, 71, 74, 77, 78, 80, 81, 83, 86, 90, 92, 97, 98, 102, 119, 131, 135, 139, 141, 144, 149, 151, 152, 155, 162], "penguin": [2, 18, 75, 76, 109, 112, 121, 128, 129, 130, 133, 134, 135, 136, 138, 139, 156, 158, 159, 161, 162, 163, 164, 184], "adult": [2, 64, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 94, 99, 119, 147, 149, 150, 151, 153, 154, 164], "censu": [2, 64, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 94, 99, 102, 107, 119, 147, 149, 150, 151, 153, 154, 164], "california": [2, 92, 102, 104, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 164], "hous": [2, 3, 49, 73, 91, 92, 96, 102, 103, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 132, 143, 144, 146, 164], "am": [2, 132, 143, 144, 146, 164], "blood": [2, 93, 98, 141, 142, 145, 164], "transfus": [2, 93, 98, 141, 142, 145, 164], "bike": [2, 29, 164], "ride": [2, 29, 164], "aim": [3, 36, 80, 93, 98, 101, 102, 109, 111, 112, 113, 114, 118, 120, 121, 122, 123, 124, 125, 127, 128, 129, 133, 134, 138, 141, 142, 145, 158, 159, 162, 163], "describ": [3, 69, 80, 81, 82, 92, 107, 184], "For": [3, 6, 36, 42, 53, 58, 71, 74, 77, 80, 81, 82, 85, 86, 91, 92, 95, 97, 99, 102, 103, 105, 107, 108, 112, 113, 115, 117, 119, 121, 122, 124, 125, 126, 127, 129, 131, 132, 134, 135, 137, 138, 139, 141, 144, 146, 149, 150, 151, 153, 156, 160, 162, 163, 172, 176, 181], "you": [3, 6, 14, 18, 23, 29, 34, 36, 40, 47, 50, 58, 60, 66, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 94, 96, 98, 99, 100, 102, 103, 105, 107, 108, 109, 111, 112, 113, 114, 116, 117, 118, 119, 120, 121, 122, 123, 124, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 149, 150, 152, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 170, 174, 176, 178, 180, 182, 184], "don": [3, 6, 78, 83, 93, 98, 102, 106, 124, 127, 132, 149], "t": [3, 6, 78, 83, 85, 93, 95, 98, 101, 102, 106, 108, 116, 123, 124, 127, 132, 141, 144, 145, 149], "find": [3, 6, 49, 58, 74, 80, 81, 82, 85, 93, 94, 97, 98, 99, 101, 103, 111, 112, 114, 120, 121, 123, 126, 127, 129, 130, 131, 132, 134, 135, 136, 140, 147, 148, 149, 151, 153, 154, 155, 156, 158, 159, 162, 163, 169, 178], "ad": [3, 18, 40, 47, 53, 93, 96, 98, 108, 113, 117, 119, 122, 137, 140, 149, 153], "bottom": [3, 74, 141], "page": [3, 36, 80, 82, 83, 86, 91, 97, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "acronym": [3, 101], "stand": [3, 81, 110, 178], "applic": [3, 6, 81, 95, 101, 141, 144, 160], "program": [3, 18, 29, 36, 47, 60, 71, 73, 81, 95, 124, 127, 176, 184], "interfac": [3, 81], "It": [3, 6, 29, 36, 58, 74, 78, 81, 82, 83, 85, 86, 95, 96, 97, 98, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 115, 116, 118, 119, 123, 124, 127, 131, 132, 135, 138, 141, 142, 144, 145, 146, 149, 151, 152, 153, 156, 160, 161, 162], "can": [3, 6, 14, 15, 18, 22, 23, 28, 29, 34, 36, 39, 40, 44, 47, 53, 56, 58, 60, 66, 69, 71, 72, 73, 74, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 125, 126, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 148, 149, 150, 151, 152, 153, 155, 156, 157, 158, 159, 160, 161, 162, 163, 170, 171, 173, 176, 178, 181, 182, 184], "have": [3, 25, 29, 42, 47, 49, 53, 58, 60, 66, 69, 74, 77, 79, 80, 81, 82, 84, 85, 89, 92, 96, 97, 100, 101, 102, 104, 105, 106, 107, 108, 109, 112, 115, 116, 117, 118, 119, 121, 122, 124, 125, 127, 129, 131, 132, 133, 134, 137, 138, 139, 140, 141, 144, 147, 149, 150, 151, 152, 153, 154, 155, 156, 157, 160, 161, 162, 169, 176, 178, 181, 182, 184], "slightli": [3, 16, 74, 83, 85, 86, 102, 105, 109, 110, 117, 118, 119, 122, 135], "differ": [3, 6, 15, 16, 18, 22, 25, 29, 37, 42, 44, 47, 53, 58, 60, 64, 66, 69, 73, 74, 75, 76, 77, 81, 82, 85, 86, 90, 92, 93, 95, 96, 97, 98, 100, 101, 102, 104, 105, 106, 107, 108, 109, 110, 115, 116, 117, 119, 127, 128, 131, 132, 133, 134, 135, 136, 137, 142, 144, 145, 146, 148, 150, 151, 152, 153, 155, 156, 157, 160, 161, 176, 184], "mean": [3, 6, 28, 29, 47, 60, 73, 74, 77, 78, 80, 81, 82, 83, 84, 85, 86, 88, 89, 90, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 127, 129, 131, 132, 133, 134, 136, 137, 138, 139, 141, 143, 144, 145, 146, 149, 150, 151, 153, 154, 155, 156, 161, 162, 173, 184], "context": [3, 6, 15, 101, 139, 141], "some": [3, 6, 18, 22, 23, 29, 33, 36, 47, 60, 64, 66, 71, 73, 74, 75, 76, 77, 79, 80, 81, 82, 84, 85, 87, 89, 91, 95, 97, 99, 100, 101, 102, 103, 104, 105, 107, 108, 109, 110, 112, 114, 115, 116, 117, 118, 119, 121, 123, 124, 126, 127, 131, 132, 135, 137, 138, 139, 140, 144, 146, 149, 151, 152, 153, 155, 156, 160, 161, 163, 176, 184], "case": [3, 29, 33, 49, 74, 77, 80, 82, 85, 86, 88, 90, 91, 92, 94, 95, 97, 98, 99, 100, 101, 102, 103, 108, 115, 117, 119, 124, 127, 129, 131, 132, 134, 135, 138, 139, 140, 141, 142, 144, 145, 146, 148, 150, 151, 153, 155, 156], "an": [3, 16, 22, 23, 26, 28, 35, 36, 40, 42, 43, 47, 49, 51, 53, 56, 58, 69, 71, 72, 73, 74, 77, 81, 82, 84, 86, 87, 88, 89, 90, 91, 93, 95, 96, 97, 98, 100, 101, 102, 105, 106, 107, 108, 110, 113, 115, 116, 117, 118, 119, 122, 124, 125, 127, 129, 130, 131, 132, 133, 134, 135, 136, 137, 139, 140, 141, 142, 144, 145, 146, 147, 149, 150, 151, 154, 155, 156, 160, 161, 162, 169, 172, 176, 178, 180, 181, 182, 184], "onlin": [3, 92, 94, 99], "servic": [3, 36, 86], "access": [3, 15, 29, 36, 47, 58, 78, 83, 93, 98, 102, 108, 110, 112, 121, 132, 135, 139, 184], "remot": 3, "In": [3, 6, 15, 22, 23, 29, 34, 36, 37, 39, 40, 42, 44, 47, 58, 64, 72, 74, 77, 78, 80, 81, 82, 83, 84, 85, 86, 87, 89, 91, 92, 93, 94, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 114, 115, 116, 117, 118, 119, 123, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 160, 161, 162, 163, 170, 176, 178, 181], "both": [3, 15, 16, 18, 22, 29, 39, 40, 47, 53, 64, 73, 74, 76, 81, 82, 85, 86, 87, 88, 89, 90, 94, 95, 96, 97, 98, 99, 100, 103, 104, 108, 109, 113, 115, 116, 119, 122, 124, 125, 127, 128, 131, 132, 133, 135, 137, 139, 140, 141, 142, 144, 145, 149, 151, 155, 156, 157, 159, 160, 163, 169, 170, 176, 178, 184], "itself": [3, 18, 42, 82, 95, 97, 98, 102, 106, 110, 144, 151, 181], "technic": [3, 13, 22, 33, 36, 39, 56, 71, 81, 169, 181], "specif": [3, 13, 22, 23, 37, 64, 81, 82, 87, 88, 89, 90, 95, 102, 103, 105, 107, 108, 109, 115, 116, 119, 125, 132, 141, 149, 150, 152, 156, 160, 161, 176], "peopl": [3, 6, 74, 95, 102, 107, 108, 141], "who": [3, 6, 86, 141], "write": [3, 36, 75, 78, 79, 87, 88, 90, 93, 94, 95, 111, 112, 113, 114, 124, 128, 129, 130, 137, 142, 143, 147, 148, 154, 158, 159], "client": 3, "connect": 3, "offlin": 3, "librari": [3, 36, 71, 74, 155], "scikit": [3, 13, 14, 22, 23, 24, 27, 28, 33, 34, 37, 38, 39, 40, 42, 56, 58, 60, 64, 66, 68, 71, 72, 77, 78, 80, 82, 83, 85, 86, 87, 89, 94, 99, 100, 102, 107, 109, 112, 114, 116, 119, 121, 123, 124, 127, 130, 131, 132, 137, 139, 141, 142, 144, 145, 146, 149, 151, 162, 164, 169, 170, 172, 173, 179, 181, 182, 184], "list": [3, 29, 36, 47, 60, 85, 88, 90, 100, 108, 110, 111, 113, 120, 122, 132, 142, 143, 145, 146, 150, 184], "all": [3, 15, 16, 17, 18, 24, 26, 27, 28, 29, 37, 42, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 77, 78, 80, 81, 82, 83, 85, 88, 90, 92, 94, 95, 96, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 116, 117, 118, 119, 125, 131, 132, 133, 135, 137, 138, 141, 144, 146, 148, 149, 150, 151, 152, 155, 156, 159, 161, 162, 163, 171, 172, 174, 176, 178, 180, 184], "public": 3, "function": [3, 6, 18, 29, 36, 43, 51, 58, 60, 66, 73, 77, 80, 81, 82, 85, 94, 99, 102, 103, 104, 107, 108, 109, 110, 114, 115, 123, 124, 127, 128, 130, 131, 133, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 149, 151, 152, 156, 158, 161, 162, 173, 184], "class": [3, 15, 25, 27, 42, 60, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 94, 95, 98, 99, 100, 104, 105, 106, 107, 108, 109, 110, 116, 119, 130, 131, 132, 135, 137, 138, 139, 140, 142, 145, 146, 147, 149, 150, 151, 153, 154, 156, 158, 160, 172, 184], "method": [3, 4, 6, 14, 42, 60, 73, 74, 75, 76, 80, 81, 82, 86, 95, 101, 108, 111, 115, 118, 119, 120, 126, 129, 131, 132, 134, 137, 139, 140, 141, 144, 150, 151, 153, 164, 184], "along": [3, 74, 77, 85, 87, 89, 104, 128, 133, 144, 156], "document": [3, 4, 6, 15, 60, 78, 80, 81, 82, 83, 85, 87, 89, 92, 94, 95, 99, 137, 141, 142, 143, 145, 146, 150], "via": [3, 29, 36, 77, 85, 97, 103, 107, 114, 115, 116, 118, 119, 123, 125, 139, 149, 150, 160], "docstr": [3, 144], "brows": 3, "http": [3, 6, 36, 74, 81, 95, 102, 107], "org": [3, 74, 80, 81, 82, 83, 86, 91, 97, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "stabl": [3, 108, 110, 132, 176, 184], "html": [3, 80, 82, 83, 86, 91, 97, 102, 107, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "try": [3, 6, 29, 47, 74, 80, 82, 83, 86, 87, 89, 91, 97, 102, 105, 109, 110, 115, 117, 119, 129, 131, 132, 134, 135, 136, 137, 140, 141, 142, 144, 145, 146, 147, 149, 151, 153, 154, 155, 156, 162, 163, 176, 181], "adopt": [3, 131, 135], "simpl": [3, 6, 47, 74, 79, 84, 93, 98, 104, 109, 110, 118, 119, 132, 135, 137, 138, 139, 150, 156, 162, 164], "convent": [3, 74, 81, 82, 137], "limit": [3, 18, 29, 32, 34, 42, 56, 58, 61, 80, 86, 102, 103, 110, 131, 132, 137, 140, 144, 153, 157, 159, 163, 164], "minimum": [3, 117, 135, 141, 153, 159, 160, 161, 163], "number": [3, 6, 16, 17, 18, 29, 37, 42, 44, 47, 49, 53, 60, 69, 73, 74, 77, 78, 80, 81, 82, 83, 85, 86, 87, 89, 93, 95, 96, 98, 100, 102, 103, 104, 105, 106, 107, 108, 109, 110, 113, 114, 115, 116, 117, 119, 122, 123, 125, 126, 127, 129, 131, 132, 133, 134, 135, 137, 141, 143, 144, 146, 148, 149, 151, 152, 153, 155, 156, 160, 161, 171, 176, 178, 182, 184], "object": [3, 6, 29, 47, 58, 78, 80, 81, 82, 83, 85, 86, 87, 88, 89, 90, 102, 104, 105, 106, 119, 132, 139, 141, 142, 145, 147, 149, 150, 151, 153, 154, 155, 156, 161, 176], "must": [3, 6, 44, 108, 132, 151, 184], "implement": [3, 90, 100, 109, 110, 116, 136, 139, 142, 145, 149, 152], "furthermor": [3, 47, 102, 131, 132, 134, 139], "tri": [3, 6, 66, 105, 109, 132, 135], "consist": [3, 40, 47, 73, 77, 81, 102, 129, 134, 137, 146], "name": [3, 15, 29, 60, 74, 76, 80, 81, 82, 83, 84, 85, 86, 92, 93, 98, 99, 100, 102, 104, 105, 106, 107, 108, 110, 115, 117, 120, 123, 129, 132, 134, 135, 138, 139, 141, 149, 150, 152, 153, 180, 184], "categori": [3, 69, 73, 74, 80, 86, 87, 89, 104, 106, 119, 132, 149, 157], "e": [3, 6, 18, 29, 47, 60, 64, 66, 69, 71, 72, 73, 74, 80, 81, 82, 84, 85, 86, 89, 95, 96, 97, 100, 101, 102, 108, 115, 116, 117, 118, 125, 129, 132, 134, 135, 136, 139, 141, 144, 146, 147, 149, 153, 154, 155, 156, 176, 178, 180, 184], "g": [3, 6, 18, 29, 73, 74, 85, 86, 95, 97, 101, 108, 112, 117, 118, 121, 125, 128, 129, 132, 133, 134, 135, 136, 138, 139, 147, 149, 153, 154, 155, 157, 159, 160, 161, 163, 176, 184], "expos": [3, 82, 85, 119, 141], "fit_transform": [3, 66, 82, 85, 86, 104, 116, 127, 137], "accept": [3, 85, 102], "similar": [3, 44, 47, 60, 66, 78, 82, 83, 85, 88, 90, 104, 110, 117, 125, 129, 131, 132, 134, 137, 140, 141, 146, 149, 150, 151, 152, 153, 156, 184], "argument": [3, 47, 60, 78, 82, 83, 85, 87, 89, 105, 184], "type": [3, 14, 23, 29, 43, 47, 50, 71, 72, 74, 80, 81, 87, 88, 89, 90, 102, 104, 105, 106, 107, 110, 124, 127, 130, 141, 144, 176], "shape": [3, 42, 43, 74, 80, 81, 85, 104, 105, 107, 108, 109, 110, 131, 137, 139, 144, 156, 161], "those": [3, 15, 28, 56, 80, 94, 99, 103, 110, 116, 132, 137, 144, 149, 151, 155, 162, 173, 180, 184], "problem": [3, 15, 18, 22, 29, 39, 40, 44, 49, 50, 56, 60, 64, 73, 74, 79, 84, 85, 87, 89, 91, 93, 95, 98, 99, 100, 102, 105, 106, 107, 115, 125, 131, 132, 136, 137, 138, 139, 141, 144, 149, 151, 152, 155, 156, 157, 160, 161, 169, 170, 176, 184], "where": [3, 6, 29, 40, 42, 43, 44, 60, 69, 73, 77, 82, 87, 89, 93, 95, 98, 100, 102, 107, 108, 113, 119, 122, 125, 131, 132, 135, 138, 139, 140, 144, 149, 151, 153, 155, 156, 160, 178, 184], "goal": [3, 25, 36, 74, 77, 78, 79, 83, 84, 87, 88, 89, 90, 91, 95, 117, 118, 119, 125, 144, 147, 148, 154, 155, 184], "take": [3, 16, 29, 47, 74, 77, 80, 81, 82, 86, 90, 92, 94, 95, 97, 99, 101, 102, 104, 105, 106, 107, 110, 128, 133, 138, 141, 149, 152, 153, 156, 164, 176, 178], "finit": [3, 64, 69, 74, 85], "valu": [3, 13, 15, 18, 24, 28, 29, 36, 37, 40, 42, 44, 47, 49, 60, 64, 66, 69, 73, 74, 78, 80, 83, 85, 86, 87, 89, 92, 95, 97, 98, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 113, 114, 115, 116, 117, 118, 119, 121, 122, 123, 125, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 144, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 159, 160, 161, 162, 163, 172, 173, 176, 178, 180, 181, 184], "exampl": [3, 6, 11, 14, 23, 34, 40, 49, 58, 71, 72, 74, 77, 79, 82, 84, 85, 86, 91, 92, 95, 100, 101, 103, 105, 109, 110, 116, 119, 125, 132, 133, 137, 138, 140, 144, 146, 149, 150, 160, 162, 164, 170, 181, 182], "ar": [3, 13, 14, 15, 16, 17, 18, 22, 23, 25, 27, 28, 29, 33, 34, 36, 37, 39, 40, 43, 44, 47, 49, 53, 56, 58, 60, 66, 69, 71, 72, 73, 74, 75, 76, 77, 78, 80, 81, 82, 83, 85, 86, 89, 90, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 115, 116, 117, 118, 119, 120, 123, 125, 126, 127, 129, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 144, 146, 148, 149, 150, 151, 152, 153, 154, 155, 156, 160, 161, 162, 163, 169, 170, 172, 176, 178, 180, 181, 182, 184], "iri": [3, 100], "setosa": 3, "versicolor": 3, "virginica": 3, "from": [3, 6, 14, 18, 24, 25, 29, 36, 37, 44, 47, 49, 56, 58, 60, 74, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 130, 131, 132, 134, 135, 136, 137, 138, 139, 140, 141, 142, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 171, 173, 176, 178, 180, 181, 184], "petal": 3, "sepal": 3, "measur": [3, 6, 18, 29, 66, 73, 75, 76, 77, 80, 81, 105, 108, 122, 125, 128, 133, 138, 141, 151, 156, 157], "patient": [3, 6, 25, 129, 134], "ha": [3, 25, 37, 43, 44, 47, 64, 71, 73, 74, 80, 81, 82, 84, 85, 86, 90, 95, 99, 102, 103, 104, 107, 108, 109, 110, 116, 117, 122, 129, 131, 132, 134, 135, 136, 138, 141, 142, 144, 145, 148, 150, 153, 155, 156, 162, 176, 178, 184], "diseas": [3, 6, 25, 74, 129, 134], "result": [3, 29, 66, 74, 77, 79, 80, 81, 82, 84, 85, 86, 89, 90, 92, 94, 95, 97, 99, 100, 101, 102, 103, 110, 113, 114, 115, 117, 118, 119, 122, 123, 124, 125, 126, 127, 129, 131, 132, 134, 135, 136, 137, 141, 144, 149, 150, 151, 153, 155, 156, 162, 164, 177, 178, 180, 184], "medic": [3, 6, 25, 74], "email": 3, "spam": 3, "content": [3, 80, 86, 104, 146, 149], "sender": 3, "titl": [3, 92, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 122, 123, 125, 126, 131, 132, 134, 135, 136, 137, 138, 139, 140, 141, 145, 152, 156, 160, 161, 162, 163], "etc": [3, 25, 29, 36, 74, 85, 97, 105, 118, 129, 134], "when": [3, 6, 16, 17, 18, 22, 23, 27, 28, 29, 35, 40, 44, 53, 58, 60, 66, 73, 74, 77, 80, 81, 82, 85, 86, 87, 89, 93, 95, 96, 97, 98, 99, 100, 101, 102, 104, 105, 106, 108, 109, 113, 115, 116, 117, 119, 122, 123, 124, 125, 126, 127, 129, 131, 132, 134, 135, 136, 138, 139, 140, 141, 146, 149, 150, 151, 153, 156, 157, 173, 178, 181, 182], "two": [3, 13, 14, 29, 37, 47, 64, 66, 73, 74, 75, 76, 80, 81, 82, 85, 86, 95, 97, 99, 100, 102, 103, 104, 106, 108, 115, 117, 118, 125, 126, 127, 128, 131, 132, 133, 135, 136, 137, 139, 140, 141, 147, 149, 151, 152, 153, 154, 155, 156, 157, 159, 161, 162, 163, 176, 184], "call": [3, 13, 14, 29, 44, 58, 60, 66, 69, 73, 74, 80, 81, 82, 85, 86, 87, 89, 92, 93, 96, 98, 99, 102, 103, 106, 109, 115, 116, 117, 118, 119, 127, 129, 131, 132, 134, 136, 137, 138, 139, 141, 142, 144, 145, 146, 147, 149, 151, 153, 154, 181], "binari": [3, 6, 42, 60, 74, 91, 99, 139, 141, 171, 184], "least": [3, 100, 117, 135, 160, 184], "three": [3, 77, 86, 99, 100, 103, 107, 108, 109, 112, 121, 131, 156, 157], "multi": [3, 125, 129, 134, 139, 152], "illustr": [3, 33, 58, 77, 81, 82, 85, 94, 97, 99, 100, 102, 108, 119, 131, 137, 140, 141, 151, 156, 157, 160, 161], "provid": [3, 37, 73, 80, 82, 85, 86, 102, 105, 107, 111, 114, 115, 116, 120, 123, 127, 132, 135, 136, 138, 139, 140, 141, 142, 145, 146, 149, 151, 184], "user": [3, 6, 77, 86, 91, 103, 117, 118, 119, 131, 132, 134, 139, 144, 149, 153, 181, 184], "contain": [3, 18, 29, 43, 47, 60, 64, 66, 73, 74, 77, 80, 81, 85, 86, 87, 89, 95, 100, 104, 105, 106, 107, 112, 119, 121, 124, 126, 127, 132, 137, 138, 139, 140, 141, 149, 153, 159, 161, 163, 176, 184], "2": [3, 4, 18, 28, 29, 37, 42, 43, 47, 49, 66, 71, 74, 76, 77, 80, 81, 82, 83, 85, 91, 92, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 110, 113, 115, 116, 117, 118, 119, 120, 122, 123, 125, 126, 127, 129, 131, 132, 133, 134, 137, 138, 139, 140, 141, 143, 144, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 160, 162, 164, 176, 184], "repres": [3, 6, 64, 66, 69, 73, 74, 80, 81, 82, 85, 86, 100, 104, 106, 108, 115, 116, 128, 132, 133, 137, 138, 139, 140, 141, 144, 156, 162], "x": [3, 24, 28, 29, 42, 43, 66, 74, 80, 81, 82, 105, 107, 108, 109, 110, 115, 121, 128, 129, 131, 133, 134, 135, 136, 137, 138, 139, 140, 148, 149, 152, 155, 156, 157, 160, 161, 162, 163, 178], "y": [3, 28, 29, 42, 66, 74, 80, 81, 82, 100, 107, 108, 109, 110, 115, 121, 128, 131, 133, 135, 136, 137, 138, 139, 140, 144, 152, 156, 157, 160, 161, 162, 163, 178], "axi": [3, 18, 29, 76, 92, 99, 100, 104, 107, 108, 110, 123, 125, 126, 131, 132, 135, 137, 138, 140, 141, 144, 149, 152, 153, 155, 156, 160, 178], "becaus": [3, 6, 18, 73, 74, 77, 78, 81, 82, 83, 85, 86, 87, 89, 99, 100, 101, 102, 103, 108, 110, 119, 122, 125, 131, 132, 137, 142, 144, 145, 146, 150, 151, 153, 155, 157, 161], "onli": [3, 6, 18, 29, 37, 42, 47, 49, 64, 66, 69, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 91, 93, 94, 96, 97, 98, 99, 100, 102, 105, 106, 109, 115, 117, 119, 125, 127, 129, 130, 131, 132, 134, 135, 138, 139, 140, 141, 142, 144, 145, 149, 150, 151, 155, 156, 160, 173, 176, 180, 182], "here": [3, 29, 60, 74, 75, 78, 79, 80, 81, 82, 85, 86, 87, 88, 89, 92, 93, 94, 98, 101, 103, 105, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 124, 126, 127, 128, 129, 130, 132, 133, 135, 137, 138, 139, 140, 141, 142, 143, 147, 148, 149, 150, 151, 154, 157, 158, 159, 161], "encod": [3, 6, 29, 47, 68, 69, 80, 86, 87, 89, 104, 105, 107, 116, 119, 131, 132, 139, 164, 176], "color": [3, 74, 97, 107, 108, 109, 110, 115, 121, 123, 125, 126, 131, 132, 133, 135, 136, 137, 138, 139, 141, 145, 152, 155, 156, 160, 161, 162, 163, 178], "blue": [3, 74, 77, 95, 102, 109, 110, 131, 135, 139, 140, 141, 151, 156, 160, 162], "orang": [3, 74, 110, 115, 141, 156, 161, 162], "point": [3, 6, 15, 74, 78, 80, 82, 83, 90, 95, 100, 105, 106, 107, 108, 110, 116, 117, 119, 122, 131, 132, 135, 137, 139, 140, 141, 144, 151, 153, 156, 160, 162], "thu": [3, 6, 29, 37, 61, 80, 82, 86, 93, 95, 97, 98, 101, 102, 103, 105, 106, 107, 108, 109, 113, 116, 117, 118, 119, 122, 124, 127, 131, 132, 138, 139, 140, 144, 149, 150, 151, 152, 155, 156, 160, 161, 176], "each": [3, 6, 18, 25, 29, 36, 40, 47, 49, 66, 69, 74, 75, 76, 77, 80, 81, 82, 85, 86, 94, 95, 97, 99, 100, 101, 102, 104, 105, 106, 107, 108, 109, 110, 112, 113, 114, 115, 116, 117, 118, 119, 121, 122, 123, 125, 126, 128, 131, 132, 133, 137, 138, 139, 141, 147, 149, 150, 151, 153, 154, 155, 156, 157, 159, 160, 161, 162, 163, 172, 176, 178, 184], "entir": [3, 35, 77, 85, 86, 106, 107, 117, 122, 124, 127, 135, 176], "wa": [3, 74, 81, 82, 90, 91, 95, 97, 98, 100, 102, 104, 107, 109, 110, 115, 119, 125, 128, 132, 133, 135, 137, 141, 151, 156, 158, 161, 162], "linear": [3, 6, 13, 15, 16, 29, 37, 39, 40, 42, 43, 44, 47, 53, 58, 74, 80, 82, 85, 86, 87, 88, 89, 90, 93, 95, 98, 107, 110, 118, 126, 128, 129, 133, 134, 143, 144, 146, 150, 156, 157, 159, 161, 163, 171, 173, 176, 181], "decis": [3, 6, 10, 13, 15, 16, 18, 27, 35, 42, 58, 82, 86, 88, 90, 92, 96, 101, 102, 103, 109, 110, 112, 113, 116, 118, 119, 120, 121, 122, 126, 130, 131, 137, 139, 140, 141, 142, 145, 157, 158, 159, 162, 163, 169, 170, 171, 172, 173, 174, 176], "rule": [3, 80, 81, 118, 131, 132, 135, 139, 156, 172], "black": [3, 74, 80, 85, 92, 95, 97, 99, 102, 103, 104, 105, 106, 107, 109, 110, 115, 121, 123, 125, 126, 132, 133, 136, 137, 138, 145, 149, 153, 160, 161, 163], "dot": 3, "line": [3, 29, 43, 69, 74, 80, 108, 110, 115, 129, 131, 134, 135, 137, 139, 140, 141, 142, 144, 145, 152, 153, 156, 160, 161, 178, 184], "new": [3, 6, 29, 36, 40, 47, 74, 78, 80, 81, 82, 83, 85, 86, 87, 89, 93, 94, 95, 96, 98, 99, 101, 102, 108, 109, 112, 113, 115, 117, 121, 122, 129, 134, 137, 141, 147, 149, 150, 154, 159, 161, 162, 163], "accord": [3, 74, 135], "its": [3, 6, 18, 23, 29, 58, 61, 74, 79, 81, 82, 84, 85, 86, 93, 97, 98, 99, 101, 102, 103, 108, 111, 115, 117, 119, 120, 125, 131, 132, 135, 136, 138, 139, 141, 142, 144, 145, 148, 155, 156, 178], "posit": [3, 6, 27, 28, 29, 44, 47, 66, 103, 108, 110, 118, 132, 135, 138, 139, 141, 142, 144, 145, 153, 184], "respect": [3, 29, 42, 43, 85, 86, 90, 93, 98, 99, 101, 103, 117, 119, 131, 132, 134, 138, 139, 144, 146], "ly": [3, 132], "left": [3, 73, 74, 81, 92, 95, 99, 100, 101, 105, 107, 108, 109, 110, 115, 117, 118, 121, 127, 133, 135, 141, 151, 152, 156, 160, 162], "while": [3, 18, 29, 80, 81, 82, 85, 98, 102, 103, 105, 107, 108, 109, 110, 115, 116, 117, 119, 122, 125, 127, 129, 131, 132, 134, 135, 140, 141, 144, 152, 155, 160, 176], "right": [3, 74, 75, 76, 80, 85, 87, 89, 99, 108, 117, 122, 124, 127, 135, 141, 151, 160], "defin": [3, 18, 29, 36, 47, 56, 66, 73, 77, 82, 85, 86, 87, 89, 91, 94, 95, 99, 100, 101, 108, 109, 114, 123, 128, 129, 131, 132, 133, 134, 135, 137, 138, 140, 141, 147, 148, 149, 152, 153, 154, 155, 156, 158, 176, 178, 180, 181, 184], "higher": [3, 6, 27, 39, 43, 83, 84, 86, 97, 102, 104, 105, 108, 109, 118, 131, 132, 135, 136, 146, 151, 178], "dimens": [3, 43, 74, 131, 155], "would": [3, 18, 29, 49, 74, 77, 79, 80, 81, 84, 85, 87, 89, 97, 98, 99, 100, 101, 102, 104, 106, 107, 108, 110, 115, 116, 117, 119, 122, 125, 129, 131, 132, 134, 135, 136, 137, 138, 139, 141, 142, 144, 145, 148, 151, 155, 156, 160, 161], "hyperplan": 3, "howev": [3, 6, 22, 36, 80, 81, 82, 85, 86, 88, 90, 92, 95, 97, 99, 100, 102, 103, 104, 105, 106, 108, 109, 110, 113, 115, 116, 117, 118, 119, 122, 124, 125, 127, 131, 132, 133, 136, 137, 139, 141, 142, 144, 145, 146, 151, 153, 155, 156, 160, 161, 181], "depend": [3, 18, 22, 27, 28, 29, 37, 40, 77, 82, 85, 86, 97, 99, 101, 107, 108, 110, 117, 125, 129, 131, 132, 134, 135, 139, 141, 146, 149, 153, 156, 157], "A": [3, 6, 29, 36, 44, 53, 61, 64, 66, 74, 82, 86, 95, 97, 101, 102, 107, 116, 117, 132, 136, 137, 141, 144, 155, 161, 171, 172, 176, 182], "These": [3, 6, 23, 74, 77, 81, 85, 104, 119, 141, 150, 161, 181], "handl": [3, 71, 72, 80, 85, 86, 90, 104, 115, 137, 149, 164], "discret": [3, 85, 102, 106, 116, 139, 144], "1": [3, 4, 18, 27, 29, 42, 43, 44, 47, 49, 60, 66, 74, 76, 77, 80, 81, 82, 83, 85, 86, 88, 89, 90, 91, 92, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 110, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 129, 130, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 144, 145, 146, 147, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 160, 161, 162, 164, 176, 178, 184], "cat": [3, 91], "dog": 3, "logisticregress": [3, 44, 73, 77, 79, 80, 82, 84, 85, 86, 87, 89, 91, 95, 99, 100, 127, 130, 131, 135, 139, 141, 146, 150, 156, 178, 180], "histgradientboostingclassifi": [3, 86, 88, 90, 116, 147, 149, 151, 152, 153, 154], "note": [3, 8, 9, 36, 46, 47, 48, 50, 52, 54, 62, 67, 74, 77, 80, 81, 82, 85, 86, 90, 92, 93, 97, 98, 99, 102, 103, 106, 107, 108, 110, 118, 119, 135, 138, 139, 149, 150, 151, 155, 156, 161, 165, 176], "histor": 3, "reason": [3, 6, 18, 47, 58, 74, 76, 82, 88, 90, 97, 101, 102, 105, 108, 115, 117, 127, 131, 135, 141, 155], "confus": [3, 115, 150, 161, 181], "contrari": [3, 74, 85, 102, 108, 161], "what": [3, 6, 15, 24, 27, 28, 29, 35, 42, 43, 47, 49, 50, 58, 60, 73, 74, 75, 76, 78, 79, 80, 82, 83, 84, 86, 94, 97, 99, 100, 101, 102, 103, 108, 118, 119, 131, 134, 135, 136, 138, 139, 140, 141, 150, 151, 156, 161, 176, 178], "suggest": 3, "procedur": [3, 6, 18, 47, 77, 80, 97, 101, 102, 110, 117, 119, 122, 125, 126, 137, 151, 155, 171, 178, 180], "how": [3, 18, 22, 23, 26, 29, 33, 34, 39, 42, 47, 56, 58, 64, 69, 73, 74, 75, 76, 77, 79, 80, 81, 82, 84, 85, 86, 91, 92, 96, 98, 100, 101, 103, 108, 109, 110, 115, 117, 119, 129, 131, 132, 134, 135, 136, 141, 142, 145, 149, 150, 151, 152, 153, 155, 156, 158, 160, 161, 162, 169, 174, 178, 180, 181], "well": [3, 6, 13, 15, 22, 66, 76, 82, 95, 99, 100, 103, 106, 107, 108, 109, 110, 115, 117, 118, 126, 135, 141, 150, 153, 160], "idea": [3, 74, 91, 98, 108, 115, 151], "behind": [3, 6, 13, 22, 118], "dataset": [3, 16, 18, 25, 29, 35, 37, 39, 40, 42, 43, 47, 53, 56, 60, 63, 64, 66, 69, 71, 73, 75, 76, 77, 78, 79, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 152, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 164, 176, 178, 184], "evalu": [3, 18, 22, 23, 26, 29, 37, 44, 47, 56, 60, 65, 66, 74, 79, 80, 81, 84, 87, 88, 89, 90, 91, 92, 93, 95, 97, 98, 100, 101, 102, 106, 108, 110, 111, 113, 115, 116, 120, 122, 126, 127, 132, 136, 142, 143, 144, 145, 146, 147, 148, 149, 150, 153, 154, 155, 161, 176, 177, 181, 184], "separ": [3, 29, 41, 42, 44, 47, 64, 74, 76, 79, 80, 84, 86, 99, 104, 105, 106, 131, 135, 139, 151, 156, 160, 162, 164, 172, 176], "sever": [3, 13, 16, 23, 66, 77, 95, 97, 102, 108, 109, 110, 113, 115, 116, 118, 119, 122, 128, 131, 133, 135, 137, 140, 153], "time": [3, 18, 29, 47, 66, 74, 77, 80, 82, 85, 86, 87, 88, 89, 90, 91, 95, 97, 100, 101, 102, 103, 105, 106, 108, 110, 113, 115, 116, 117, 118, 119, 122, 125, 132, 135, 139, 141, 149, 152, 153, 156, 158, 162, 164, 171, 176], "get": [3, 6, 18, 34, 42, 47, 53, 60, 66, 74, 75, 76, 77, 78, 80, 81, 82, 83, 86, 87, 88, 89, 90, 95, 96, 97, 100, 101, 102, 103, 105, 107, 109, 110, 113, 114, 115, 122, 123, 124, 125, 127, 132, 135, 137, 141, 142, 143, 144, 145, 146, 149, 151, 152, 153, 155, 156, 159, 160, 161, 163, 164, 178, 179, 180], "s": [3, 4, 6, 17, 23, 25, 26, 29, 35, 36, 42, 47, 73, 74, 77, 78, 80, 81, 82, 83, 85, 86, 87, 88, 89, 90, 91, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 115, 117, 118, 120, 121, 125, 126, 129, 130, 131, 132, 133, 134, 135, 136, 137, 139, 140, 141, 144, 146, 149, 150, 151, 152, 153, 156, 157, 160, 161, 181, 184], "uncertainti": [3, 77, 86, 110, 124, 127, 151], "see": [3, 8, 9, 39, 46, 48, 50, 52, 54, 62, 67, 74, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 91, 92, 95, 96, 98, 99, 100, 101, 104, 105, 106, 107, 108, 109, 110, 115, 116, 118, 120, 122, 123, 125, 127, 131, 132, 134, 136, 137, 138, 139, 140, 141, 144, 146, 149, 150, 151, 152, 153, 155, 156, 157, 158, 160, 161, 162, 165], "more": [3, 18, 29, 36, 43, 44, 47, 53, 56, 60, 74, 77, 80, 81, 82, 85, 87, 89, 90, 92, 93, 96, 97, 98, 99, 103, 104, 105, 107, 108, 109, 110, 115, 116, 117, 118, 119, 125, 129, 131, 132, 134, 135, 136, 137, 138, 139, 140, 141, 142, 144, 145, 149, 150, 151, 152, 153, 155, 156, 157, 160, 161, 184], "detail": [3, 13, 18, 29, 39, 61, 74, 77, 78, 80, 81, 83, 85, 86, 87, 89, 93, 95, 98, 105, 107, 108, 115, 117, 118, 131, 137, 139, 141, 142, 145, 146, 149, 157, 169, 170, 184], "n_sampl": [3, 110, 115, 125, 126, 131, 137, 139, 140, 160], "row": [3, 18, 60, 64, 74, 85, 91, 102, 104, 107, 110, 129, 132, 134, 137, 149, 152, 153, 155, 184], "n_featur": [3, 43, 117, 119, 125, 126, 131, 137, 139, 140], "column": [3, 6, 18, 29, 37, 42, 43, 47, 60, 64, 69, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 91, 93, 94, 98, 99, 101, 102, 104, 105, 106, 107, 108, 110, 115, 117, 119, 120, 121, 123, 125, 126, 129, 131, 132, 134, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 149, 151, 152, 153, 154, 155, 160, 161, 163, 176, 184], "equal": [3, 6, 29, 42, 44, 60, 82, 100, 115, 132, 149, 150, 161], "flower": 3, "4": [3, 4, 18, 29, 73, 74, 76, 80, 81, 82, 83, 84, 85, 91, 92, 95, 98, 99, 100, 102, 104, 105, 106, 107, 108, 110, 115, 117, 120, 123, 125, 129, 131, 132, 134, 135, 137, 138, 141, 146, 149, 150, 151, 152, 153, 155, 157, 160, 162, 164, 176], "length": [3, 6, 18, 29, 75, 76, 80, 81, 104, 105, 109, 112, 121, 128, 129, 130, 133, 134, 135, 136, 138, 139, 141, 149, 153, 156, 157, 158, 159, 160, 161, 162, 163, 184], "width": [3, 116], "common": [3, 58, 77, 85, 101, 110, 132, 144, 162, 184], "math": [3, 104], "matric": [3, 85], "capit": [3, 74, 77, 79, 80, 81, 82, 84, 85, 86, 149, 150, 153], "letter": [3, 102, 107], "f": [3, 8, 9, 18, 29, 46, 48, 50, 52, 54, 62, 67, 74, 77, 80, 81, 82, 83, 84, 85, 86, 88, 89, 90, 91, 95, 97, 98, 100, 101, 102, 107, 108, 109, 110, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 127, 128, 132, 133, 134, 135, 136, 137, 138, 139, 141, 144, 145, 146, 149, 150, 151, 153, 154, 156, 160, 162, 165, 178], "iter": [3, 29, 66, 77, 82, 85, 100, 101, 102, 113, 117, 122, 132, 151, 152, 153], "optim": [3, 22, 23, 44, 64, 82, 93, 97, 98, 109, 114, 115, 116, 118, 123, 127, 129, 132, 134, 136, 141, 144, 146, 149, 150, 151, 153, 155, 160, 176, 182, 184], "befor": [3, 44, 47, 74, 78, 80, 82, 83, 85, 100, 102, 104, 108, 116, 119, 124, 127, 132, 138, 141, 144, 158], "converg": [3, 66, 82, 85], "algorithm": [3, 13, 16, 18, 24, 35, 74, 81, 82, 85, 95, 108, 109, 113, 115, 116, 117, 119, 122, 124, 125, 127, 129, 134, 137, 139, 156, 157], "over": [3, 15, 24, 29, 39, 40, 74, 93, 95, 97, 98, 101, 103, 108, 112, 116, 118, 121, 144, 147, 149, 154, 158, 160, 169, 172], "done": [3, 6, 73, 81, 89, 116, 117, 119, 127, 129, 131, 132, 134, 150, 151, 153, 156, 160], "monitor": [3, 6, 29], "score": [3, 18, 27, 28, 29, 37, 47, 58, 60, 66, 73, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 94, 95, 96, 97, 98, 99, 102, 103, 107, 108, 113, 114, 115, 116, 117, 118, 120, 122, 123, 124, 125, 126, 127, 132, 134, 135, 137, 139, 141, 142, 143, 144, 145, 146, 147, 149, 150, 151, 152, 153, 154, 155, 156, 162, 174, 176, 178, 184], "jargon": [3, 81, 82], "onc": [3, 6, 18, 29, 80, 82, 90, 95, 100, 102, 110, 117, 119, 139, 141, 143, 146, 147, 148, 149, 152, 153, 154, 155, 158, 178], "quantiti": [3, 6, 85, 86, 105, 108], "size": [3, 49, 56, 58, 59, 85, 92, 93, 98, 105, 107, 109, 110, 117, 127, 131, 156, 164], "weight": [3, 29, 40, 42, 44, 47, 80, 93, 95, 98, 108, 109, 115, 128, 129, 130, 132, 133, 134, 136, 137, 138, 139, 156, 171], "dure": [3, 13, 22, 25, 29, 33, 34, 39, 40, 56, 58, 72, 73, 74, 80, 85, 87, 89, 100, 102, 105, 109, 118, 125, 132, 136, 142, 145, 149, 151, 153, 156, 159, 161, 163, 169, 170, 181, 182], "four": [3, 105, 106, 141], "never": [3, 18, 56, 74, 80, 81, 85, 109, 110, 135, 144, 149, 153, 162, 178], "seen": [3, 25, 77, 79, 81, 82, 84, 85, 102, 135, 138, 141, 149, 150, 151, 159, 163], "aspect": [3, 6, 22, 34, 77, 82, 96, 126, 132, 171], "configur": [3, 82, 114, 123, 137, 149], "learnt": [3, 58, 96, 108, 137, 156], "nearest": [3, 60, 80, 81, 82, 103, 181, 184], "neighbor": [3, 60, 78, 80, 81, 82, 83, 103, 148, 155, 181, 184], "approach": [3, 6, 14, 23, 34, 40, 47, 58, 72, 73, 85, 100, 116, 131, 136, 139, 149, 151, 153, 155, 170, 182], "polynomi": [3, 43, 53, 103, 110, 131, 137], "sai": [3, 60, 108, 135, 156], "degre": [3, 43, 47, 53, 103, 108, 110, 129, 131, 132, 134, 137], "between": [3, 6, 13, 15, 18, 22, 29, 40, 47, 53, 56, 58, 60, 74, 76, 77, 80, 82, 85, 93, 97, 98, 101, 103, 105, 106, 108, 110, 112, 115, 117, 121, 124, 125, 127, 129, 131, 132, 133, 134, 135, 137, 138, 139, 141, 144, 152, 153, 155, 157, 160, 161, 169, 184], "10": [3, 4, 18, 25, 36, 42, 47, 60, 73, 74, 77, 78, 80, 83, 85, 90, 92, 94, 95, 96, 97, 98, 99, 102, 103, 104, 105, 106, 107, 108, 110, 113, 115, 116, 117, 118, 120, 122, 127, 129, 130, 131, 132, 134, 135, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 160, 162, 176, 178, 184], "impact": [3, 18, 44, 47, 56, 85, 87, 88, 89, 90, 96, 97, 103, 113, 117, 122, 130, 132, 144, 152, 155, 160, 178, 180, 181, 182, 184], "comput": [3, 6, 29, 47, 60, 66, 74, 77, 78, 81, 82, 83, 85, 86, 88, 90, 92, 93, 94, 96, 97, 98, 99, 100, 102, 103, 108, 109, 110, 113, 115, 116, 117, 118, 122, 124, 125, 127, 129, 131, 133, 134, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 148, 151, 152, 153, 155, 156, 158, 161, 162, 178], "inde": [3, 18, 29, 74, 80, 81, 82, 86, 90, 93, 95, 97, 98, 101, 102, 104, 105, 106, 107, 108, 109, 110, 115, 116, 117, 118, 124, 125, 126, 127, 131, 132, 134, 135, 136, 137, 139, 140, 141, 142, 144, 145, 146, 150, 151, 153, 156, 157, 160, 161, 162], "usual": [3, 17, 80, 82, 86, 95, 101, 103, 105, 129, 134, 141, 152, 153], "inspect": [3, 6, 29, 60, 64, 82, 91, 102, 109, 114, 117, 123, 126, 130, 131, 135, 137, 139, 140, 149, 152, 153, 156, 158, 160, 162, 184], "regard": [3, 13, 14, 22, 24, 33, 34, 39, 42, 71, 73, 82, 85, 87, 89, 92, 93, 95, 96, 98, 100, 103, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 127, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 141, 142, 143, 144, 145, 146, 147, 154, 156, 158, 159, 160, 161, 162, 163], "tune": [3, 13, 17, 26, 35, 40, 43, 44, 47, 93, 97, 98, 103, 111, 118, 119, 120, 125, 127, 131, 135, 148, 152, 154, 155, 170, 176, 181, 184], "maxim": [3, 47, 95, 97, 132, 146, 147, 148, 149, 153, 154, 155, 156, 174, 181], "involv": [3, 6, 74, 99, 102, 150], "grid": [3, 97, 105, 114, 117, 118, 123, 132, 148, 151, 153, 155, 160, 164, 174, 176, 177, 178, 182, 184], "search": [3, 47, 96, 97, 114, 116, 117, 118, 119, 120, 123, 132, 136, 147, 148, 151, 154, 155, 160, 164, 174, 176, 177, 178, 181, 182, 184], "random": [3, 11, 13, 14, 15, 17, 18, 37, 47, 51, 73, 80, 95, 99, 100, 101, 102, 105, 107, 108, 110, 112, 113, 115, 116, 118, 121, 122, 124, 125, 126, 127, 131, 137, 148, 149, 152, 155, 164, 172, 177, 178, 182], "further": [3, 39, 74, 95, 96, 131, 137, 138, 151, 160, 162], "read": [3, 6, 74, 95, 105, 108, 144, 184], "post": [3, 49, 69, 170], "machin": [3, 23, 29, 34, 36, 40, 56, 58, 64, 69, 72, 74, 80, 81, 82, 84, 85, 86, 87, 89, 91, 93, 94, 97, 98, 99, 100, 101, 102, 110, 119, 124, 125, 126, 127, 131, 132, 136, 137, 139, 140, 141, 144, 149, 153], "mooc": [3, 74, 81, 91, 92, 93, 96, 98, 102, 103, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 141, 142, 143, 144, 145, 146, 156, 158, 159, 160, 161, 162, 163], "refer": [3, 14, 23, 34, 40, 58, 72, 73, 74, 81, 82, 87, 89, 92, 93, 94, 95, 96, 98, 99, 102, 103, 107, 109, 110, 111, 112, 113, 115, 116, 118, 119, 120, 121, 122, 128, 129, 130, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 150, 156, 158, 159, 160, 161, 162, 163, 170, 182], "process": [3, 6, 36, 58, 66, 72, 73, 74, 86, 101, 110, 116, 124, 125, 127, 132, 146, 150, 151, 158, 181], "make": [3, 6, 24, 25, 29, 44, 47, 51, 56, 60, 61, 71, 73, 74, 77, 78, 80, 82, 83, 85, 86, 87, 89, 90, 91, 92, 93, 95, 97, 98, 99, 101, 102, 103, 104, 105, 107, 108, 109, 112, 113, 114, 115, 116, 117, 118, 119, 121, 122, 123, 124, 125, 126, 127, 129, 131, 132, 134, 135, 136, 137, 138, 139, 140, 141, 143, 144, 146, 147, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 171], "appli": [3, 6, 15, 16, 17, 18, 24, 26, 27, 29, 39, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 82, 85, 86, 91, 100, 103, 104, 105, 107, 131, 138, 140, 146, 149, 150, 152, 155, 160, 162, 171, 172, 174, 178, 180, 184], "unlabel": 3, "word": [3, 29, 136, 159, 163, 178], "equival": [3, 18, 29, 47, 73, 74, 81, 93, 98, 102, 117, 130, 135, 137, 139, 141, 155, 176], "unseen": [3, 66, 74, 100, 101, 117, 140, 156], "notion": 3, "out": [3, 6, 25, 73, 74, 77, 80, 81, 85, 94, 96, 99, 101, 102, 108, 110, 113, 114, 117, 118, 122, 123, 127, 130, 132, 135, 141, 149, 151, 158, 159, 162, 163], "ti": 3, "definit": [3, 137, 150], "distribut": [3, 6, 43, 74, 75, 76, 82, 94, 95, 99, 100, 101, 102, 103, 104, 105, 106, 107, 116, 131, 132, 139, 144, 146, 148, 151, 153, 155, 157, 161, 178], "condit": [3, 6, 108, 132, 139], "check": [3, 6, 29, 47, 60, 74, 77, 80, 81, 82, 84, 85, 86, 93, 95, 96, 98, 100, 101, 102, 103, 105, 107, 109, 110, 111, 112, 114, 115, 116, 118, 119, 120, 121, 123, 124, 125, 126, 127, 129, 132, 134, 136, 137, 138, 140, 141, 142, 145, 149, 151, 152, 156, 157, 159, 160, 161, 163, 169, 176], "wikipedia": [3, 58, 110], "articl": [3, 6, 58, 110], "finish": [3, 132], "_": [3, 74, 76, 81, 82, 86, 92, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 120, 121, 122, 123, 125, 126, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 144, 145, 150, 152, 156, 157, 160, 161, 162, 163], "end": [3, 82, 85, 92, 93, 95, 96, 98, 99, 100, 102, 103, 105, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 141, 142, 143, 144, 145, 146, 154, 156, 158, 159, 160, 161, 162, 163], "thei": [3, 6, 29, 47, 56, 66, 74, 78, 80, 82, 83, 85, 86, 90, 99, 100, 103, 104, 107, 108, 110, 116, 117, 119, 131, 132, 137, 141, 144, 150, 156, 161, 163, 170, 176, 181], "avail": [3, 6, 29, 36, 49, 73, 74, 75, 76, 77, 80, 81, 91, 95, 96, 104, 106, 107, 112, 121, 125, 127, 132, 136, 138, 141, 150, 153, 176], "after": [3, 18, 28, 47, 77, 85, 102, 104, 110, 113, 116, 117, 120, 122, 136, 150, 154, 155], "been": [3, 74, 77, 80, 85, 92, 100, 102, 108, 109, 116, 122, 125, 134, 139, 150, 151, 156], "slope": [3, 29, 105, 138], "intercept": [3, 40, 128, 129, 133, 134, 135, 136, 137, 138, 139, 156], "one": [3, 6, 14, 18, 25, 37, 42, 43, 47, 58, 66, 69, 73, 74, 76, 77, 79, 80, 81, 82, 84, 85, 86, 88, 90, 93, 94, 95, 97, 98, 99, 100, 102, 105, 107, 108, 109, 115, 116, 117, 119, 126, 131, 132, 134, 136, 137, 139, 141, 142, 144, 145, 149, 150, 151, 155, 156, 157, 160, 162, 172, 184], "section": [3, 77, 80, 85, 86, 92, 93, 95, 96, 98, 102, 103, 109, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 140, 141, 142, 143, 144, 145, 146, 153, 156, 158, 159, 160, 161, 162, 163], "about": [3, 13, 17, 22, 24, 29, 33, 36, 39, 56, 60, 64, 71, 72, 74, 77, 81, 82, 85, 94, 96, 99, 102, 103, 104, 106, 108, 115, 118, 131, 135, 139, 140, 144, 149, 157, 161, 169, 178, 181, 184], "also": [3, 13, 22, 25, 27, 29, 33, 36, 58, 60, 74, 80, 81, 82, 85, 86, 87, 88, 89, 90, 93, 96, 97, 98, 101, 102, 103, 104, 105, 108, 109, 110, 113, 114, 116, 117, 119, 122, 123, 125, 131, 132, 135, 137, 138, 139, 140, 141, 144, 150, 151, 152, 153, 155, 156, 162], "python": [3, 6, 29, 36, 66, 71, 74, 77, 87, 89, 102, 110, 116, 132, 142, 145], "pass": [3, 18, 29, 47, 60, 66, 73, 77, 85, 86, 87, 89, 93, 97, 98, 102, 110, 138, 142, 143, 145, 146, 148, 149, 151, 155, 156, 162, 184], "anoth": [3, 6, 18, 58, 73, 74, 77, 97, 99, 101, 110, 119, 125, 126, 128, 132, 133, 137, 140, 141, 144, 160], "includ": [3, 29, 34, 36, 64, 85, 97, 104, 125, 126, 129, 132, 134, 135, 137, 150, 173], "gridsearchcv": [3, 6, 97, 118, 123, 148, 149, 151, 153, 155, 160, 176, 178, 184], "someth": [3, 90, 140, 150], "occur": [3, 85, 132], "your": [3, 6, 34, 39, 60, 73, 74, 75, 78, 79, 83, 85, 86, 87, 88, 89, 90, 93, 94, 98, 100, 111, 112, 113, 114, 117, 124, 127, 128, 129, 130, 134, 142, 143, 147, 148, 154, 158, 159, 163, 176, 178], "stick": 3, "too": [3, 6, 61, 81, 91, 97, 102, 103, 113, 116, 117, 119, 122, 131, 132, 134, 135, 149, 155, 160, 178], "so": [3, 6, 14, 29, 47, 66, 74, 76, 82, 84, 85, 87, 89, 92, 93, 98, 99, 100, 101, 102, 104, 105, 108, 109, 114, 115, 117, 123, 129, 131, 132, 134, 135, 136, 137, 141, 142, 145, 150, 151, 155, 162, 176], "up": [3, 6, 10, 35, 44, 74, 77, 80, 85, 95, 97, 99, 102, 107, 108, 125, 128, 133, 137, 138, 141, 149, 161, 164, 181], "nois": [3, 47, 53, 58, 103, 110, 115, 131, 135, 137, 140], "rather": [3, 29, 34, 77, 80, 105, 106, 139, 141, 146, 153, 155], "than": [3, 6, 18, 27, 28, 29, 34, 36, 43, 47, 53, 60, 73, 74, 77, 80, 81, 82, 83, 84, 85, 86, 89, 90, 92, 94, 95, 97, 98, 99, 101, 102, 103, 104, 105, 108, 109, 110, 116, 117, 118, 119, 122, 124, 126, 127, 128, 129, 131, 132, 133, 134, 135, 139, 140, 141, 144, 146, 148, 149, 151, 152, 153, 155, 156, 159, 160, 163, 173, 176, 178, 184], "relev": [3, 60, 80, 95, 108, 141], "pattern": [3, 23, 74, 95, 124, 127, 142, 145, 151], "tell": [3, 74, 103, 108], "great": [3, 6, 49], "poorli": [3, 89], "real": [3, 49, 74, 80, 81, 85, 86, 100, 102, 128, 133, 141, 144, 161], "world": [3, 161], "fit_predict": 3, "kneighborsclassifi": [3, 60, 78, 81, 83, 184], "decisiontreeregressor": [3, 18, 92, 96, 101, 102, 103, 110, 111, 115, 118, 119, 120, 137, 160, 161, 163, 176], "One": [3, 49, 60, 69, 73, 77, 78, 83, 85, 100, 101, 106, 108, 116, 141, 144, 151], "focu": [3, 18, 74, 80, 102, 104, 107, 109, 115, 132, 141, 144, 149, 150, 156], "were": [3, 29, 66, 77, 80, 86, 95, 102, 103, 106, 115, 124, 127, 128, 131, 133, 141, 153, 155, 160, 163], "If": [3, 6, 28, 29, 37, 42, 53, 66, 74, 77, 81, 85, 92, 93, 95, 96, 97, 98, 101, 102, 103, 107, 108, 109, 111, 112, 113, 116, 117, 118, 119, 120, 121, 122, 125, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 150, 151, 155, 156, 158, 159, 160, 161, 162, 163, 174, 184], "do": [3, 6, 17, 18, 29, 58, 66, 74, 75, 76, 77, 78, 79, 80, 82, 83, 84, 85, 86, 87, 89, 90, 91, 94, 97, 99, 101, 102, 103, 104, 107, 113, 116, 117, 119, 122, 124, 125, 127, 129, 131, 132, 134, 136, 137, 141, 142, 145, 149, 150, 151, 152, 161, 178, 180, 184], "1d": [3, 29, 137], "5": [3, 4, 18, 29, 60, 66, 73, 74, 76, 77, 81, 82, 83, 84, 85, 86, 91, 92, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 126, 129, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 144, 146, 149, 151, 152, 153, 155, 156, 157, 160, 161, 162, 163, 164, 180, 181, 184], "someon": [3, 74], "come": [3, 6, 29, 56, 74, 80, 115, 118, 125, 128, 133, 138, 150, 151, 161], "doe": [3, 6, 18, 24, 29, 36, 47, 74, 77, 82, 85, 86, 88, 89, 90, 92, 95, 96, 97, 98, 100, 101, 105, 108, 117, 120, 131, 132, 135, 139, 140, 144, 146, 149, 153, 155, 156, 161, 178, 182], "15": [3, 4, 74, 80, 82, 85, 92, 103, 104, 114, 116, 117, 120, 123, 141, 153, 155, 156, 162, 176], "continu": [3, 6, 37, 42, 43, 64, 74, 80, 102, 104, 108, 138, 139, 141, 144, 146, 157, 160], "price": [3, 49, 73, 91, 92, 102, 104, 107, 108, 143, 144, 146], "descript": [3, 29, 49, 73, 74, 78, 83, 92, 93, 96, 98, 102, 103, 107, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 141, 142, 143, 144, 145, 146, 156, 158, 159, 160, 161, 162, 163, 164], "room": [3, 49, 102, 107, 108], "surfac": [3, 29], "locat": [3, 75, 76, 92, 104, 105, 107, 139, 152], "ag": [3, 74, 77, 79, 80, 81, 82, 84, 85, 86, 102, 107, 108, 132, 149, 150, 153], "mri": 3, "scan": [3, 6, 150], "want": [3, 18, 74, 75, 76, 80, 81, 85, 88, 90, 91, 92, 93, 95, 96, 98, 100, 101, 102, 103, 105, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 125, 126, 128, 129, 130, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 149, 150, 151, 156, 157, 158, 159, 160, 161, 162, 163, 176, 178, 184], "tree": [3, 6, 10, 13, 15, 16, 17, 18, 58, 74, 82, 85, 86, 88, 90, 92, 96, 101, 102, 103, 108, 109, 110, 112, 113, 114, 116, 118, 119, 120, 121, 122, 123, 126, 131, 137, 142, 145, 147, 149, 153, 154, 157, 158, 159, 162, 163, 169, 170, 171, 172, 173, 174, 176], "piecewis": [3, 131, 161, 173], "constant": [3, 24, 84, 108, 129, 131, 134, 135, 137, 161, 173], "given": [3, 6, 18, 29, 42, 49, 58, 66, 80, 82, 85, 94, 97, 99, 100, 102, 104, 106, 107, 108, 110, 115, 119, 126, 130, 131, 132, 134, 135, 136, 138, 139, 141, 146, 151, 153, 162, 171, 178, 180, 181], "output": [3, 18, 42, 74, 77, 82, 85, 86, 101, 102, 105, 108, 110, 128, 129, 131, 133, 134, 139, 141, 146, 162], "correspond": [3, 18, 29, 47, 74, 77, 80, 85, 86, 87, 89, 95, 97, 102, 103, 104, 105, 106, 107, 108, 110, 112, 121, 134, 138, 139, 141, 149, 150, 152, 153, 161, 173, 176], "ridg": [3, 40, 44, 47, 107, 108, 110, 132, 135], "order": [3, 6, 18, 29, 40, 47, 69, 74, 82, 86, 89, 90, 91, 95, 100, 102, 104, 114, 119, 123, 132, 138, 149, 152, 153, 184], "shrink": [3, 44, 132, 135], "constrain": [3, 40, 44, 61, 103, 131], "toward": [3, 44, 108, 125, 132, 135, 139], "zero": [3, 28, 29, 42, 43, 44, 53, 56, 74, 85, 103, 108, 132, 135, 137, 139], "2d": [3, 131, 137], "singl": [3, 15, 16, 18, 24, 25, 27, 28, 29, 35, 42, 43, 44, 47, 49, 53, 60, 64, 66, 69, 73, 74, 77, 80, 82, 85, 86, 91, 94, 97, 99, 100, 101, 102, 108, 110, 115, 118, 119, 120, 125, 126, 128, 129, 131, 133, 134, 137, 141, 142, 144, 145, 149, 150, 151, 156, 160, 162, 171, 172, 173, 174, 176, 178, 180, 184], "orient": [3, 108, 135, 162], "clf": 3, "give": [3, 6, 14, 22, 24, 29, 33, 53, 56, 58, 71, 74, 77, 82, 84, 85, 86, 95, 96, 97, 100, 102, 105, 106, 108, 109, 110, 115, 116, 117, 118, 119, 125, 132, 135, 136, 137, 139, 141, 144, 151, 155, 156, 162], "concret": [3, 29, 58], "graphic": [3, 66, 86, 100, 107, 138], "plot": [3, 18, 29, 47, 60, 66, 74, 75, 76, 82, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 114, 115, 121, 123, 125, 126, 128, 130, 131, 132, 133, 135, 136, 137, 138, 139, 140, 141, 142, 144, 145, 151, 152, 153, 155, 156, 157, 158, 159, 160, 161, 162, 163, 178], "compos": [3, 73, 74, 81, 85, 86, 87, 88, 89, 90, 91, 94, 99, 104, 119, 144, 147, 149, 151, 153, 154, 160], "sinc": [3, 74, 77, 82, 93, 97, 98, 99, 100, 102, 105, 106, 107, 108, 109, 110, 115, 116, 117, 119, 125, 132, 136, 139, 140, 141, 144, 149, 156, 157, 161, 162], "potenti": [3, 44, 49, 74, 82, 86, 96, 102, 103, 116, 136, 140, 141, 151, 180], "choic": [3, 29, 36, 44, 53, 56, 58, 85, 98, 99, 102, 105, 107, 108, 110, 132, 136, 146, 149, 153, 155, 164, 184], "circl": [3, 74, 107, 110, 131, 140], "vs": [3, 82, 85, 135, 141, 162], "squar": [3, 28, 58, 105, 132, 133, 136, 137, 144, 146], "boil": 3, "down": [3, 6, 117], "fact": [3, 18, 82, 84, 90, 105, 115, 132, 153, 163, 176], "exactli": [3, 44, 56, 60, 80, 90, 95, 140, 184], "know": [3, 6, 86, 93, 98, 102, 103, 104, 107, 110, 115, 127, 132, 137, 141, 149], "frame": [3, 104, 105, 106, 107], "scienc": [3, 6, 36, 95, 105], "solv": [3, 6, 29, 44, 60, 74, 80, 86, 95, 98, 100, 102, 105, 106, 132, 136, 137, 138, 139, 144, 161, 184], "might": [3, 6, 29, 37, 49, 73, 84, 85, 86, 88, 90, 95, 96, 100, 101, 102, 103, 105, 108, 117, 133, 141, 144, 149, 153, 162], "speci": [3, 18, 75, 76, 109, 130, 135, 139, 156, 157, 158, 160, 162, 184], "commonli": [3, 74, 80, 81], "denot": 3, "eventu": 3, "ideal": [3, 102, 141, 144], "let": [3, 6, 18, 25, 43, 47, 73, 74, 77, 78, 80, 81, 82, 83, 85, 86, 87, 88, 89, 90, 91, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 115, 117, 125, 126, 129, 130, 131, 132, 133, 134, 135, 136, 137, 140, 141, 144, 146, 149, 150, 153, 156, 157, 160, 161, 184], "On": [3, 6, 29, 80, 82, 83, 86, 91, 97, 102, 108, 109, 110, 117, 122, 124, 127, 131, 132, 134, 135, 136, 137, 140, 141, 144, 149, 151, 153, 156, 161, 162, 163], "figur": [3, 27, 77, 80, 100, 102, 108, 109, 110, 115, 139, 151, 152, 153, 156, 162, 178], "mathemat": [3, 58, 93, 98, 131, 137, 138, 139, 144], "b": [3, 15, 16, 17, 18, 24, 25, 26, 27, 28, 29, 35, 37, 42, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 128, 133, 138, 153, 155, 171, 172, 173, 174, 176, 178, 180, 184], "creat": [3, 29, 44, 47, 60, 69, 71, 72, 73, 77, 78, 80, 82, 83, 85, 86, 93, 94, 96, 98, 99, 100, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 118, 120, 121, 122, 123, 124, 125, 127, 128, 129, 130, 131, 132, 133, 134, 135, 137, 139, 140, 141, 142, 143, 145, 146, 149, 150, 151, 153, 156, 158, 159, 161, 162, 163, 172, 176, 181], "infin": 3, "vari": [3, 18, 80, 93, 96, 98, 100, 103, 105, 108, 123, 125, 132, 136, 138, 141, 150, 155, 176], "fulfil": 3, "requir": [3, 6, 13, 18, 22, 29, 33, 36, 39, 40, 43, 47, 56, 60, 71, 73, 74, 77, 80, 82, 86, 93, 98, 99, 105, 109, 116, 117, 131, 132, 146, 151, 153, 155, 160, 169, 176, 181, 184], "minim": [3, 6, 40, 58, 64, 92, 97, 102, 103, 135, 136, 144, 146, 156, 172], "sum": [3, 40, 42, 58, 74, 81, 83, 115, 133, 139, 141], "error": [3, 6, 16, 28, 29, 40, 42, 51, 53, 56, 58, 59, 61, 74, 77, 81, 87, 89, 92, 95, 96, 97, 103, 109, 111, 112, 113, 115, 116, 117, 119, 120, 121, 122, 128, 129, 132, 133, 134, 136, 137, 140, 141, 143, 144, 146, 147, 154, 164, 172], "red": [3, 27, 77, 102, 109, 110, 115, 131, 135, 139, 140, 151, 160], "best": [3, 13, 18, 22, 33, 39, 53, 58, 60, 74, 91, 97, 98, 100, 103, 111, 113, 114, 116, 117, 119, 120, 122, 123, 128, 132, 133, 135, 136, 137, 141, 144, 147, 148, 149, 150, 151, 153, 154, 155, 156, 169, 178, 181, 182, 184], "possibl": [3, 6, 15, 29, 36, 42, 43, 58, 60, 64, 69, 74, 82, 85, 92, 96, 97, 100, 102, 103, 108, 110, 116, 117, 119, 127, 128, 129, 131, 132, 133, 134, 135, 137, 139, 140, 144, 146, 148, 149, 151, 152, 155, 159, 161, 162, 163, 178, 184], "abstract": [3, 105], "manner": [3, 14, 29, 85, 95, 132, 146], "state": [3, 6, 14, 60, 74, 80, 81, 82, 85, 86, 107, 149, 153], "jockei": 3, "wheel": 3, "i": [3, 13, 18, 20, 22, 29, 47, 71, 72, 73, 74, 77, 80, 81, 82, 84, 85, 86, 89, 90, 96, 97, 100, 102, 108, 110, 115, 116, 117, 118, 128, 129, 131, 132, 133, 134, 136, 139, 141, 144, 146, 149, 155, 156, 164, 178, 184], "support": [3, 6, 82, 88, 90, 93, 98, 131, 137, 140, 142, 145, 162], "standardscal": [3, 29, 47, 60, 66, 73, 77, 82, 86, 88, 90, 91, 93, 98, 99, 100, 107, 108, 130, 131, 135, 139, 140, 148, 150, 155, 176, 178, 180, 184], "columntransform": [3, 72, 86, 88, 90, 91, 131, 147, 149, 151, 153, 154], "enough": [3, 6, 89, 90, 99, 102, 103, 115, 117, 135, 140, 144, 150, 156, 160, 162, 176], "flexibl": [3, 6, 53, 56, 58, 61, 93, 98, 103, 119, 140, 156], "opposit": 3, "cluster": [3, 102, 171], "whose": [3, 81, 116, 150], "group": [3, 6, 20, 25, 29, 74, 100, 101, 102, 107, 164], "subset": [3, 17, 42, 44, 47, 71, 74, 77, 80, 81, 86, 91, 102, 105, 107, 117, 119, 122, 124, 125, 126, 127, 147, 154, 156, 157, 160, 176], "them": [3, 6, 13, 60, 74, 76, 77, 82, 85, 86, 94, 97, 99, 102, 108, 110, 112, 116, 117, 118, 121, 129, 131, 132, 134, 137, 139, 141, 144, 149, 150, 151, 155, 159, 162, 163, 176, 184], "broad": 3, "topic": [3, 102, 107], "custom": [3, 137], "commerc": 3, "websit": [3, 36, 49, 78, 83], "although": 3, "mention": [3, 49, 77, 93, 98, 101, 104, 109, 116, 118, 131, 132, 135, 136, 139, 141, 142, 144, 145, 150, 153, 163], "cover": [3, 50, 74, 77, 80, 85, 86, 135], "impli": [3, 160], "fix": [3, 29, 47, 53, 58, 85, 93, 98, 117, 132, 148, 149, 155, 160, 162, 176, 178, 182], "like": [3, 6, 18, 25, 29, 44, 53, 74, 80, 81, 85, 86, 87, 89, 97, 99, 102, 104, 106, 108, 109, 110, 117, 125, 132, 133, 137, 138, 139, 141, 142, 145, 146, 150], "necessari": [3, 6, 44, 74, 117, 131, 151], "subdivid": [3, 156], "select": [3, 6, 13, 15, 16, 17, 18, 22, 23, 24, 25, 26, 27, 28, 29, 33, 34, 35, 39, 42, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 75, 76, 77, 79, 80, 82, 84, 87, 88, 89, 90, 97, 100, 102, 108, 110, 115, 117, 119, 123, 124, 127, 128, 133, 136, 137, 139, 149, 150, 151, 152, 155, 157, 169, 171, 172, 173, 174, 176, 178, 180, 184], "final": [3, 13, 23, 39, 44, 56, 60, 71, 81, 82, 85, 86, 97, 101, 102, 105, 107, 110, 114, 115, 117, 122, 123, 124, 126, 127, 132, 135, 142, 143, 145, 146, 149, 150, 151, 159, 163, 181], "sometim": [3, 6, 58, 86, 99, 141, 144, 151, 153], "clear": [3, 74, 98, 103, 160], "mani": [3, 6, 29, 42, 60, 73, 74, 75, 76, 81, 82, 85, 86, 97, 99, 100, 102, 103, 104, 107, 110, 117, 119, 129, 131, 132, 134, 141, 150, 155, 184], "need": [3, 6, 22, 24, 29, 37, 40, 60, 66, 74, 78, 80, 82, 83, 85, 86, 91, 93, 97, 98, 102, 103, 105, 108, 109, 110, 115, 117, 118, 119, 120, 131, 132, 135, 136, 137, 140, 142, 143, 145, 146, 147, 149, 150, 151, 153, 154, 160, 174, 176], "criteria": [3, 117], "ml": [3, 6, 95], "cheatsheet": 3, "readthedoc": 3, "io": [3, 36], "en": 3, "latest": [3, 28], "googl": 3, "develop": [3, 6, 36, 71, 74, 87, 89, 105, 129, 134, 137], "com": [3, 6, 36], "advanc": [3, 6, 36, 71, 88, 90], "terminolog": 3, "modifi": [4, 6, 116, 118, 137], "run": [4, 6, 18, 44, 66, 78, 83, 92, 96, 97, 108, 114, 123, 132, 152, 153, 176, 178, 184], "statu": [4, 25, 74, 80, 85, 86, 149, 151, 153], "python_script": 4, "01_tabular_data_explor": 4, "2023": 4, "20": [4, 18, 29, 60, 74, 76, 85, 91, 92, 97, 102, 103, 104, 105, 106, 107, 113, 117, 118, 120, 122, 129, 132, 134, 141, 146, 147, 148, 154, 155, 157], "13": [4, 74, 80, 82, 92, 95, 104, 105, 106, 117, 118, 120, 131, 140, 144, 153, 155], "57": [4, 104, 105, 120], "cach": 4, "12": [4, 74, 82, 91, 92, 104, 105, 106, 107, 117, 120, 141, 149, 155, 162], "36": [4, 73, 76, 102, 104, 117, 120, 129, 134, 157, 161], "01_tabular_data_exploration_ex_01": 4, "31": [4, 104, 114, 117, 123, 134, 141, 149, 153], "01_tabular_data_exploration_sol_01": 4, "76": [4, 84, 98, 104, 106, 141], "02_numerical_pipeline_cross_valid": 4, "02_numerical_pipeline_ex_00": 4, "45": [4, 74, 81, 82, 85, 92, 102, 104, 106, 116, 120, 133, 138, 139, 156], "02_numerical_pipeline_ex_01": 4, "02_numerical_pipeline_hands_on": 4, "49": [4, 85, 104, 116, 117, 125, 136, 153], "02_numerical_pipeline_introduct": 4, "7": [4, 18, 47, 73, 74, 76, 82, 83, 85, 92, 95, 98, 99, 100, 102, 104, 105, 107, 108, 116, 117, 118, 120, 129, 132, 134, 141, 146, 149, 152, 153, 155, 157, 160, 162, 164, 176, 184], "21": [4, 85, 90, 92, 102, 104, 105, 107, 108, 120, 146, 153, 155], "02_numerical_pipeline_sc": 4, "32": [4, 86, 104, 115, 117, 149], "02_numerical_pipeline_sol_00": 4, "84": [4, 91, 104, 134], "02_numerical_pipeline_sol_01": 4, "51": [4, 104, 120, 184], "03_categorical_pipelin": 4, "08": [4, 29, 105, 132, 153], "03_categorical_pipeline_column_transform": 4, "9": [4, 29, 73, 74, 83, 91, 92, 95, 98, 99, 100, 102, 104, 107, 108, 116, 117, 120, 132, 134, 142, 144, 145, 146, 149, 153, 155, 157, 184], "95": [4, 107, 153], "03_categorical_pipeline_ex_01": 4, "58": [4, 104, 105, 134, 153], "44": [4, 74, 80, 92, 102, 104, 134, 149, 150, 153], "03_categorical_pipeline_ex_02": 4, "8": [4, 60, 74, 76, 80, 83, 85, 91, 92, 95, 98, 99, 100, 101, 102, 104, 107, 108, 110, 114, 115, 116, 117, 118, 120, 121, 123, 131, 132, 135, 137, 141, 146, 147, 149, 152, 153, 154, 155, 156, 161, 162, 176, 178, 184], "03_categorical_pipeline_sol_01": 4, "03_categorical_pipeline_sol_02": 4, "17": [4, 29, 74, 76, 80, 82, 92, 102, 104, 116, 117, 120, 129, 134, 139, 144, 146, 152, 155, 156, 157], "03_categorical_pipeline_visu": 4, "61": [4, 104, 117, 132, 153], "cross_validation_baselin": 4, "59": [4, 82, 85, 104, 105], "16": [4, 74, 82, 85, 92, 95, 101, 103, 104, 106, 116, 117, 120, 146, 152, 155, 156, 162], "cross_validation_ex_01": 4, "03": [4, 10, 12, 21, 29, 31, 45, 55, 65, 68, 164, 175], "cross_validation_ex_02": 4, "53": [4, 104, 105, 116, 125, 153], "cross_validation_group": 4, "24": [4, 86, 92, 102, 104, 105, 106, 107, 108, 120, 141, 146, 149, 153], "cross_validation_learning_curv": 4, "18": [4, 29, 36, 74, 76, 80, 92, 104, 105, 116, 117, 120, 129, 132, 134, 137, 142, 145, 149, 150, 152, 153, 155, 157], "cross_validation_nest": 4, "14": [4, 74, 92, 95, 104, 105, 116, 117, 120, 131, 141, 144, 155], "00": [4, 29, 82, 102, 105, 133, 136, 149], "25": [4, 60, 74, 80, 81, 82, 86, 92, 102, 103, 104, 105, 107, 108, 120, 133, 138, 149, 150, 153], "39": [4, 76, 85, 86, 104, 116, 120, 129, 134, 157], "cross_validation_sol_01": 4, "65": [4, 85, 91, 104], "cross_validation_sol_02": 4, "cross_validation_stratif": 4, "52": [4, 102, 104, 105, 107, 108, 125, 156], "cross_validation_tim": 4, "63": [4, 104, 110, 120], "cross_validation_train_test": 4, "01": [4, 11, 19, 32, 38, 57, 59, 63, 82, 97, 105, 117, 118, 130, 135, 149, 150, 152, 164, 166, 168, 179], "07": [4, 101, 108, 132], "cross_validation_validation_curv": 4, "27": [4, 74, 85, 92, 104, 105, 120], "38": [4, 74, 80, 82, 85, 104, 120, 149, 150, 153], "datasets_ames_h": 4, "datasets_bike_rid": 4, "89": [4, 82, 92, 125], "datasets_blood_transfus": 4, "02": [4, 10, 11, 20, 30, 41, 59, 65, 82, 164, 166, 175, 177], "46": [4, 85, 92, 102, 104, 115, 116, 120, 153, 155], "datasets_california_h": 4, "dev_features_import": 4, "80": [4, 29, 73, 91, 92, 104, 147, 154], "09": [4, 105, 132], "ensemble_adaboost": 4, "ensemble_bag": 4, "04": [4, 12, 30, 68, 82, 109, 117, 132, 162, 164, 167], "6": [4, 13, 22, 39, 47, 71, 73, 74, 76, 82, 83, 88, 90, 91, 92, 95, 98, 99, 100, 102, 104, 105, 107, 108, 116, 117, 120, 129, 132, 134, 135, 141, 144, 146, 149, 152, 153, 155, 156, 157, 160, 161, 164, 176], "99": [4, 82], "ensemble_ex_01": 4, "ensemble_ex_02": 4, "ensemble_ex_03": 4, "22": [4, 92, 102, 104, 105, 107, 108, 118, 144, 146, 152], "ensemble_ex_04": 4, "ensemble_gradient_boost": 4, "06": 4, "127": [4, 85], "87": [4, 85, 101, 117, 149], "ensemble_hist_gradient_boost": 4, "54": [4, 104, 105, 117, 118], "88": [4, 85, 101, 102, 107, 108, 117, 149, 153], "ensemble_hyperparamet": 4, "123": [4, 76], "ensemble_introduct": 4, "ensemble_random_forest": 4, "28": [4, 74, 80, 82, 85, 92, 102, 104, 105, 106, 116, 149, 150, 153], "ensemble_sol_01": 4, "35": [4, 74, 104, 106, 117], "83": [4, 74, 116], "ensemble_sol_02": 4, "11": [4, 74, 85, 92, 104, 117, 120, 132, 149, 153, 155, 160, 176], "ensemble_sol_03": 4, "104": 4, "ensemble_sol_04": 4, "60": [4, 29, 74, 81, 91, 104, 105, 108, 110, 120, 152, 160, 162], "feature_selection_ex_01": 4, "feature_selection_introduct": 4, "feature_selection_limitation_model": 4, "78": [4, 104, 141, 153, 156], "43": [4, 86, 92, 95, 102, 104, 105, 116], "feature_selection_sol_01": 4, "05": [4, 29, 31, 68, 92, 95, 99, 100, 101, 105, 107, 110, 115, 118, 121, 132, 135, 141, 151, 156, 160, 162, 164], "linear_models_ex_01": 4, "linear_models_ex_02": 4, "linear_models_ex_03": 4, "linear_models_feature_engineering_classif": 4, "26": [4, 92, 104, 105, 153], "37": [4, 80, 81, 82, 85, 102, 104, 107, 108, 117, 152], "linear_models_regular": 4, "linear_models_sol_01": 4, "linear_models_sol_02": 4, "linear_models_sol_03": 4, "33": [4, 102, 104, 107, 117, 152, 153], "linear_regression_in_sklearn": 4, "linear_regression_non_linear_link": 4, "linear_regression_without_sklearn": 4, "79": [4, 73, 104, 120], "logistic_regress": [4, 130, 131, 135, 139], "logistic_regression_non_linear": 4, "77": [4, 104, 106, 116, 141], "metrics_classif": 4, "metrics_ex_01": 4, "34": [4, 104, 107, 122, 152], "metrics_ex_02": 4, "metrics_regress": 4, "55": [4, 96, 104, 105], "metrics_sol_01": 4, "19": [4, 76, 77, 85, 92, 104, 105, 116, 117, 120, 129, 132, 134, 136, 152, 155, 157], "metrics_sol_02": 4, "parameter_tuning_ex_02": 4, "71": [4, 104, 149, 153, 161], "parameter_tuning_ex_03": 4, "parameter_tuning_grid_search": 4, "42": [4, 73, 80, 82, 84, 85, 86, 104, 117, 127, 131, 140, 147, 148, 149, 151, 153, 154, 155], "parameter_tuning_manu": 4, "parameter_tuning_nest": 4, "parameter_tuning_parallel_plot": 4, "parameter_tuning_randomized_search": 4, "parameter_tuning_sol_02": 4, "73": [4, 81, 104], "parameter_tuning_sol_03": 4, "trees_classif": 4, "23": [4, 85, 92, 102, 104, 105, 106, 107, 108, 116, 118, 120, 162], "93": [4, 116], "trees_dataset": 4, "trees_ex_01": 4, "trees_ex_02": 4, "trees_hyperparamet": 4, "trees_regress": 4, "trees_sol_01": 4, "trees_sol_02": 4, "lot": [6, 74, 107, 108, 127, 132, 141], "materi": 6, "far": [6, 47, 92, 103, 115, 122, 126, 132], "congratul": 6, "And": [6, 101], "thank": [6, 131, 151], "everyon": 6, "instructor": 6, "staff": 6, "help": [6, 60, 66, 82, 84, 88, 90, 93, 94, 98, 99, 103, 104, 105, 106, 107, 108, 117, 124, 127, 131, 132, 135, 139, 141, 144, 184], "forum": [6, 36], "student": [6, 36], "hard": [6, 73, 74, 75, 76, 124, 127, 139, 141, 146], "work": [6, 40, 60, 65, 74, 82, 85, 86, 100, 101, 105, 110, 117, 119, 126, 127, 132, 147, 150, 152, 154, 156, 158, 161, 162, 164, 169, 184], "summar": [6, 102, 109, 117, 119], "train": [6, 15, 16, 17, 18, 25, 26, 29, 34, 35, 37, 40, 42, 44, 47, 51, 53, 56, 58, 59, 60, 61, 66, 69, 71, 72, 74, 77, 78, 79, 82, 83, 84, 85, 86, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 103, 105, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 129, 130, 131, 132, 134, 135, 136, 137, 139, 140, 141, 142, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 164, 173, 174, 176, 181, 184], "test": [6, 18, 26, 29, 34, 35, 37, 44, 47, 53, 56, 58, 59, 60, 66, 72, 73, 77, 78, 79, 82, 83, 84, 85, 86, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 103, 108, 110, 111, 112, 113, 114, 115, 117, 118, 120, 121, 122, 123, 124, 125, 126, 127, 132, 134, 135, 139, 140, 141, 142, 144, 145, 147, 149, 150, 151, 153, 154, 155, 158, 161, 162, 164, 174, 176, 178, 184], "built": [6, 37, 94, 99, 108, 110, 137, 141, 160, 162, 172], "matrix": [6, 29, 82, 85, 95, 119, 124, 127, 132, 137], "featur": [6, 15, 16, 17, 18, 24, 29, 33, 34, 35, 37, 40, 42, 43, 47, 49, 58, 60, 64, 65, 66, 69, 71, 72, 73, 74, 75, 76, 77, 80, 81, 86, 87, 89, 91, 92, 94, 98, 99, 102, 104, 105, 106, 107, 109, 110, 115, 116, 117, 119, 124, 127, 129, 134, 138, 139, 140, 144, 149, 150, 151, 155, 156, 157, 158, 159, 160, 161, 162, 163, 171, 172, 176, 184], "observ": [6, 28, 44, 47, 51, 53, 74, 76, 82, 84, 85, 86, 88, 90, 92, 94, 95, 96, 97, 98, 99, 100, 102, 103, 105, 106, 109, 110, 113, 115, 117, 119, 122, 123, 125, 131, 132, 134, 135, 137, 139, 140, 141, 144, 149, 152, 155, 156, 157, 160, 161, 163, 173], "transform": [6, 29, 40, 43, 66, 69, 81, 82, 85, 86, 91, 94, 97, 99, 102, 103, 105, 110, 116, 119, 124, 127, 129, 131, 132, 134, 135, 137, 144, 149, 150, 151, 152, 153, 178], "often": [6, 40, 42, 44, 58, 61, 74, 85, 86, 97, 99, 110, 135, 137, 144, 146, 151, 176, 181, 184], "typic": [6, 13, 49, 71, 74, 85, 102, 107, 117, 124, 127, 131, 139, 144, 146, 151, 153, 171], "categor": [6, 37, 47, 64, 69, 71, 72, 73, 74, 75, 76, 87, 89, 104, 119, 131, 132, 139, 141, 149, 157, 164, 176], "variabl": [6, 24, 29, 36, 40, 47, 49, 58, 60, 64, 66, 68, 69, 73, 77, 78, 81, 82, 83, 87, 89, 92, 95, 96, 101, 102, 103, 104, 106, 107, 110, 117, 119, 132, 138, 139, 144, 149, 151, 153, 155, 157, 164, 176, 178, 184], "seek": [6, 34, 117, 118, 141], "suffic": [6, 117], "But": [6, 91, 98, 101, 102, 107, 131, 132, 151, 152], "larg": [6, 18, 47, 69, 86, 97, 102, 105, 107, 113, 114, 116, 117, 122, 123, 126, 127, 130, 132, 134, 135, 144, 148, 151, 152, 153, 155, 178], "detect": 6, "underfit": [6, 13, 22, 29, 33, 39, 43, 44, 51, 53, 56, 58, 59, 60, 61, 96, 115, 117, 118, 119, 131, 134, 135, 137, 140, 156, 164, 169, 171], "multipl": [6, 85, 95, 108, 110, 115, 129, 134, 137, 142, 143, 145, 146, 152, 171, 172], "hyper": [6, 18, 37, 97, 99, 151, 155, 178], "control": [6, 44, 78, 82, 83, 93, 97, 98, 103, 117, 119, 125, 130, 131, 135, 137, 147, 148, 150, 152, 154, 155, 156, 160, 171, 181], "import": [6, 13, 18, 22, 29, 36, 37, 47, 58, 60, 72, 73, 74, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 161, 162, 163, 164, 167, 169, 176, 178, 180, 184], "randomsearchcv": 6, "understand": [6, 13, 22, 33, 39, 56, 78, 80, 83, 85, 95, 96, 100, 103, 128, 133, 141, 160, 161, 169, 170, 181], "suit": [6, 131, 144, 170], "intuit": [6, 10, 11, 13, 14, 22, 39, 45, 56, 71, 80, 81, 85, 105, 107, 109, 110, 115, 123, 131, 135, 136, 137, 139, 140, 156, 157, 161, 162, 164], "debug": 6, "build": [6, 18, 29, 47, 71, 73, 74, 81, 91, 93, 98, 110, 116, 126, 129, 131, 134, 149, 160, 164, 166], "combin": [6, 13, 14, 16, 42, 43, 66, 74, 80, 82, 86, 95, 97, 106, 107, 108, 109, 110, 114, 115, 116, 117, 118, 119, 123, 126, 131, 132, 134, 136, 137, 146, 147, 148, 149, 151, 153, 154, 155, 156, 171, 172, 178, 181, 182], "particularli": [6, 73, 87, 89, 117], "few": [6, 74, 75, 76, 79, 80, 84, 102, 104, 106, 107, 117, 119, 129, 131, 134, 135, 151, 157], "benefit": [6, 17, 24, 33, 82, 85, 96, 115, 118, 119, 132, 144, 158, 164], "non": [6, 20, 29, 39, 42, 43, 44, 47, 66, 69, 74, 82, 90, 93, 97, 98, 99, 104, 105, 106, 107, 108, 110, 125, 129, 132, 134, 139, 155, 156, 161, 163, 164, 170, 171, 182], "engin": [6, 40, 43, 47, 95, 105, 129, 132, 134, 164], "base": [6, 13, 14, 15, 16, 18, 29, 34, 36, 42, 49, 60, 74, 75, 76, 82, 88, 90, 95, 102, 105, 110, 115, 118, 119, 125, 126, 129, 134, 135, 137, 139, 140, 141, 146, 149, 151, 156, 157, 164, 184], "seri": [6, 92, 94, 95, 99, 101, 108, 110, 115, 135, 139, 156], "threshold": [6, 15, 27, 74, 91, 107, 156, 161, 162, 172], "variou": [6, 56, 91], "attribut": [6, 37, 74, 82, 85, 95, 97, 102, 107, 108, 110, 112, 121, 135, 136, 137, 139, 148, 149, 153, 155, 178, 184], "natur": [6, 22, 36, 81, 85, 86, 102, 105, 110, 131, 132, 153], "miss": [6, 74, 85, 95, 102, 104, 106, 107, 129, 134, 153, 184], "histgradientboostingregressor": [6, 18, 29, 116, 117, 123, 146], "classifi": [6, 15, 24, 27, 42, 69, 75, 76, 79, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90, 91, 93, 94, 95, 98, 99, 100, 106, 109, 110, 119, 125, 130, 131, 135, 139, 140, 142, 145, 146, 147, 149, 150, 151, 153, 154, 156, 158, 160, 162, 170, 171, 178, 180, 184], "goto": 6, "strongli": [6, 108], "advis": [6, 108], "pointer": 6, "doc": 6, "rich": 6, "didact": [6, 36, 85, 86, 104], "improv": [6, 36, 66, 90, 91, 95, 96, 98, 99, 103, 111, 113, 117, 120, 122, 137, 144, 152, 178, 181], "compris": [6, 141], "guid": [6, 131, 139, 144, 184], "everi": [6, 74, 77, 102, 105, 108, 136, 151], "explain": [6, 18, 29, 39, 58, 86, 108, 115, 116, 122, 126, 135, 144, 149, 169], "demonstr": [6, 86, 92, 109, 115, 116, 132, 137, 160], "good": [6, 22, 25, 47, 74, 77, 79, 80, 81, 84, 85, 86, 88, 90, 100, 101, 102, 103, 106, 108, 117, 118, 119, 126, 128, 129, 132, 133, 134, 135, 136, 139, 140, 141, 149, 151, 152, 153, 155, 156, 178], "softwar": [6, 36, 81], "ask": [6, 124, 127, 128, 130, 133, 135, 141], "question": [6, 80, 88, 90, 104, 128, 129, 133, 134, 135, 141, 155], "stackoverflow": 6, "github": [6, 36, 80, 82, 83, 86, 91, 97, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "discuss": [6, 14, 18, 36, 47, 74, 77, 81, 109, 115, 117, 118, 132, 135, 156], "driven": [6, 135], "inclus": 6, "contribut": [6, 60, 82, 95, 132, 155, 178], "other": [6, 14, 27, 29, 36, 44, 60, 73, 74, 77, 81, 82, 85, 86, 87, 89, 92, 93, 94, 98, 99, 100, 101, 105, 108, 110, 117, 125, 129, 132, 134, 136, 137, 139, 140, 144, 149, 151, 152, 153, 159, 162, 163, 178, 181, 184], "advocaci": 6, "curat": 6, "overflow": 6, "code": [6, 29, 36, 49, 69, 75, 78, 79, 87, 93, 94, 110, 111, 112, 113, 114, 116, 124, 127, 128, 129, 130, 132, 133, 137, 139, 142, 143, 147, 148, 149, 151, 152, 154, 155, 158, 159, 178], "start": [6, 36, 47, 74, 78, 79, 80, 82, 83, 84, 85, 86, 88, 90, 92, 95, 96, 99, 100, 101, 102, 105, 106, 115, 117, 118, 124, 125, 127, 129, 130, 131, 132, 134, 135, 138, 139, 141, 144, 146, 148, 150, 155, 156, 157, 158], "carpentri": 6, "resourc": [6, 36, 71, 74, 105, 117, 151], "git": 6, "lab": [6, 36], "unsupervis": [6, 49], "structur": [6, 58, 71, 74, 82, 85, 86, 95, 117, 144, 155, 156, 161, 170], "instanc": [6, 47, 64, 74, 80, 81, 82, 85, 95, 102, 104, 105, 107, 108, 111, 114, 120, 123, 124, 125, 126, 127, 129, 131, 132, 134, 136, 138, 141, 143, 144, 146, 150, 153, 174, 180, 181], "sampl": [6, 14, 15, 16, 18, 20, 29, 42, 44, 49, 59, 61, 64, 74, 75, 76, 77, 80, 81, 82, 85, 86, 93, 94, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 113, 115, 116, 117, 119, 122, 124, 127, 128, 131, 132, 133, 135, 137, 138, 139, 140, 141, 146, 148, 149, 151, 153, 155, 156, 157, 159, 160, 161, 163, 164, 173, 182], "supervis": [6, 49, 102, 171], "recov": [6, 18, 95, 132], "link": [6, 15, 79, 80, 84, 95, 104, 105, 106, 107, 108, 117, 124, 127, 141, 144], "drive": 6, "system": [6, 74, 95], "hand": [6, 29, 91, 95, 109, 122, 132, 134, 137, 149, 151], "nuanc": 6, "deep": [6, 90, 117, 118, 160], "better": [6, 17, 18, 28, 47, 73, 77, 81, 84, 88, 89, 90, 91, 92, 93, 94, 95, 97, 98, 99, 100, 102, 103, 110, 117, 118, 119, 124, 127, 128, 132, 133, 135, 136, 146, 150, 151, 153, 155, 160, 176, 184], "gradient": [6, 10, 13, 14, 16, 17, 18, 29, 82, 86, 109, 113, 114, 118, 122, 123, 146, 149, 153, 164], "boost": [6, 13, 14, 16, 17, 18, 29, 86, 113, 114, 118, 122, 123, 149, 153, 164], "classif": [6, 15, 22, 38, 39, 41, 42, 49, 60, 64, 73, 74, 80, 81, 87, 89, 91, 93, 94, 95, 98, 99, 100, 102, 106, 109, 129, 131, 134, 138, 142, 143, 144, 145, 146, 158, 160, 161, 162, 164, 169, 170, 171, 184], "regress": [6, 18, 22, 27, 38, 39, 40, 41, 42, 43, 44, 45, 49, 53, 58, 60, 66, 73, 80, 82, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 98, 99, 100, 102, 107, 110, 112, 115, 121, 124, 127, 129, 130, 134, 135, 139, 140, 141, 143, 146, 148, 150, 155, 156, 159, 160, 163, 164, 169, 170, 171, 176, 184], "nativ": [6, 74, 80, 85, 86, 102, 125, 137, 149, 151, 153], "input": [6, 24, 40, 42, 58, 73, 74, 79, 81, 82, 84, 85, 88, 89, 90, 92, 95, 98, 99, 108, 110, 128, 133, 137, 138, 139, 151, 156, 157, 171, 184], "speech": 6, "text": [6, 36, 49, 105, 131, 138], "imag": [6, 95], "voic": 6, "pretrain": 6, "human": [6, 74, 105, 131], "cost": [6, 81, 102, 105, 118, 135, 152, 153], "mainten": 6, "Not": [6, 86, 87, 89, 97], "pytorch": 6, "tensorflow": 6, "introduct": [6, 56, 71, 164], "andrea": 6, "c": [6, 15, 16, 17, 18, 24, 25, 26, 27, 28, 29, 35, 37, 42, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 95, 97, 106, 130, 131, 139, 141, 150, 162, 171, 172, 173, 174, 176, 178, 180, 184], "m\u00fcller": 6, "sarah": 6, "guido": 6, "handbook": 6, "jake": 6, "van": 6, "der": 6, "pla": 6, "broader": [6, 159, 163], "statist": [6, 18, 56, 74, 76, 81, 82, 85, 88, 90, 102, 107, 109, 110, 124, 127, 141, 184], "jame": 6, "witten": 6, "hasti": 6, "tibshirani": 6, "theori": [6, 109], "concept": [6, 13, 14, 22, 23, 33, 34, 39, 40, 56, 58, 72, 95, 100, 102, 146, 169, 170, 182], "kera": 6, "aur\u00e9lien": 6, "g\u00e9ron": 6, "kaggl": 6, "particip": 6, "challeng": [6, 36, 106, 137], "team": 6, "solut": [6, 10, 11, 12, 18, 19, 30, 31, 32, 36, 38, 41, 45, 59, 63, 65, 68, 85, 101, 124, 125, 136, 140, 149, 164, 166, 175, 177, 179], "share": [6, 110], "winner": 6, "wai": [6, 71, 73, 74, 77, 80, 84, 85, 97, 98, 99, 101, 109, 110, 115, 116, 117, 124, 125, 127, 137, 141, 144, 161, 176, 184], "now": [6, 18, 29, 47, 60, 73, 77, 78, 80, 81, 82, 83, 84, 85, 86, 87, 89, 92, 93, 94, 95, 96, 98, 99, 100, 101, 102, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 127, 128, 129, 131, 132, 133, 134, 135, 137, 138, 139, 141, 142, 145, 147, 149, 151, 152, 153, 154, 156, 159, 160, 162, 163, 176, 184], "touch": 6, "briefli": 6, "fit": [6, 14, 24, 26, 29, 39, 40, 42, 43, 44, 47, 51, 64, 66, 77, 78, 80, 83, 84, 85, 88, 90, 91, 93, 97, 98, 99, 101, 102, 103, 108, 109, 110, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 124, 125, 126, 127, 128, 131, 132, 133, 134, 135, 136, 137, 139, 140, 141, 144, 146, 149, 150, 151, 152, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 164, 169, 173, 178], "wider": [6, 36, 53], "mai": [6, 25, 43, 44, 47, 49, 74, 102, 107, 117, 129, 132, 134, 135, 136, 139, 141, 146, 153, 155, 162], "fail": [6, 101, 131, 142, 145], "weak": [6, 16, 117, 135, 153], "analysi": [6, 71, 80, 93, 98, 106, 107, 132, 144, 153, 155, 156, 164, 177], "kei": [6, 8, 9, 46, 48, 50, 52, 54, 62, 66, 67, 93, 98, 100, 103, 117, 120, 123, 125, 126, 135, 149, 150, 153, 160, 165], "achiev": [6, 18, 74, 79, 82, 84, 96, 98, 106, 131, 132, 160], "reliabl": [6, 95, 139], "even": [6, 36, 39, 61, 76, 77, 81, 85, 86, 88, 89, 90, 95, 99, 101, 102, 109, 116, 117, 122, 125, 131, 132, 135, 137, 139, 140, 141, 142, 143, 145, 146, 149, 155, 181, 182], "cross": [6, 13, 18, 22, 23, 24, 25, 26, 29, 33, 34, 39, 40, 44, 47, 56, 57, 59, 60, 65, 66, 73, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 98, 99, 100, 101, 103, 107, 108, 114, 115, 116, 117, 118, 119, 122, 123, 124, 125, 126, 127, 129, 132, 134, 140, 141, 142, 143, 145, 146, 147, 149, 150, 151, 152, 153, 154, 155, 160, 164, 169, 174, 176, 178, 181, 182, 184], "accuraci": [6, 18, 27, 60, 66, 73, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 88, 89, 90, 91, 93, 95, 97, 98, 99, 100, 106, 109, 115, 125, 126, 127, 128, 133, 135, 139, 142, 145, 146, 149, 150, 151, 152, 153, 154, 155, 156, 158, 162, 184], "imperfect": [6, 108], "estim": [6, 13, 17, 22, 29, 33, 39, 44, 47, 56, 60, 66, 71, 77, 81, 82, 85, 86, 91, 97, 99, 101, 107, 108, 109, 110, 111, 113, 114, 116, 117, 118, 119, 120, 122, 123, 124, 125, 127, 129, 132, 134, 137, 143, 144, 146, 149, 150, 151, 153, 169, 176, 178, 181, 184], "actual": [6, 74, 78, 81, 83, 92, 101, 102, 103, 117, 131, 141, 144, 151], "gener": [6, 17, 18, 22, 23, 28, 29, 47, 50, 56, 58, 59, 60, 66, 77, 79, 80, 81, 82, 84, 85, 86, 87, 89, 90, 92, 93, 95, 96, 97, 98, 99, 100, 101, 102, 104, 106, 107, 109, 110, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 124, 125, 126, 127, 129, 132, 134, 135, 137, 139, 140, 141, 143, 144, 146, 147, 148, 149, 150, 151, 153, 154, 155, 161, 164, 167, 176, 178, 184], "As": [6, 29, 47, 74, 77, 80, 81, 82, 85, 86, 88, 90, 93, 95, 97, 98, 99, 104, 106, 107, 108, 110, 115, 116, 131, 132, 135, 139, 140, 141, 143, 144, 146, 150, 151, 152, 155, 156, 160, 163], "narrow": 6, "spend": [6, 105, 117], "increasingli": 6, "effort": [6, 105], "split": [6, 15, 18, 23, 29, 37, 42, 44, 66, 74, 77, 79, 84, 85, 86, 92, 95, 97, 100, 101, 102, 104, 108, 111, 115, 116, 117, 119, 120, 122, 124, 127, 140, 141, 144, 149, 151, 153, 156, 158, 160, 161, 162, 171, 172], "afford": 6, "trust": [6, 80, 81, 82, 83, 86, 91, 97, 102, 109, 131, 132, 135, 136, 137, 140, 141, 149, 150, 151, 153, 156, 162, 163], "think": [6, 75, 76, 101, 105, 129, 134, 135], "carefulli": [6, 95], "complet": [6, 15, 29, 36, 47, 97, 99, 124, 127, 131, 132, 148, 151, 155, 178], "futur": [6, 74, 77, 86, 91, 101, 102, 149, 162], "upon": [6, 91, 98, 99], "affect": [6, 101, 132, 135, 139, 149, 156, 161, 162], "live": [6, 132], "sure": [6, 18, 60, 80, 82, 85, 90, 114, 123], "divers": [6, 156], "demograph": [6, 118], "increas": [6, 16, 18, 28, 29, 43, 44, 47, 51, 53, 58, 76, 82, 85, 98, 102, 103, 108, 113, 116, 117, 122, 132, 135, 138, 139, 140, 149, 153, 155, 156, 158, 160, 161, 162, 174, 182], "coverag": 6, "phrase": 6, "recommend": [6, 36, 71, 74, 85], "identifi": [6, 13, 71, 86, 95, 98, 103, 141, 178], "ani": [6, 15, 18, 29, 36, 43, 73, 74, 77, 80, 82, 84, 86, 89, 90, 92, 95, 98, 100, 101, 102, 103, 105, 106, 107, 108, 110, 117, 118, 119, 123, 124, 125, 126, 127, 131, 132, 136, 139, 144, 149, 151, 152, 155, 160, 173, 184], "bia": [6, 37, 51, 56, 58, 108, 129, 131, 134, 144, 164], "acquisit": 6, "full": [6, 8, 9, 18, 36, 46, 48, 49, 50, 52, 54, 62, 66, 67, 77, 78, 82, 83, 85, 97, 102, 117, 122, 124, 126, 127, 138, 147, 151, 154, 165, 178, 181], "chain": [6, 77, 82, 86], "acquir": [6, 13, 22, 33, 39, 56, 103, 105, 169, 181], "fanci": 6, "put": [6, 33, 37, 82, 103, 108, 132, 156], "product": [6, 97, 100, 102, 129, 132, 134, 149], "routin": [6, 18, 95, 184], "debt": 6, "simpler": [6, 14, 47, 86, 135], "easier": [6, 82, 85, 92, 137, 144], "maintain": 6, "less": [6, 29, 44, 86, 95, 101, 107, 108, 109, 110, 117, 118, 126, 131, 132, 134, 135, 137, 149, 155], "power": [6, 29, 36, 105, 117, 118, 129, 132, 134, 137, 156], "drift": 6, "gave": [6, 141], "methodolog": [6, 36, 56, 102, 161], "element": [6, 22, 81, 85, 95, 102, 132, 133, 138, 141], "alwai": [6, 15, 18, 22, 24, 42, 44, 53, 79, 80, 81, 84, 86, 90, 92, 95, 97, 98, 99, 100, 102, 103, 106, 107, 119, 125, 140, 141, 150, 151, 152, 163, 176, 178, 184], "solid": 6, "conclus": [6, 74, 94, 95, 97, 99, 100, 101, 103, 106, 117, 125], "standpoint": 6, "biggest": 6, "shortcom": 6, "cannot": [6, 18, 29, 58, 74, 100, 101, 103, 108, 117, 124, 127, 131, 137, 140, 141, 144, 146, 149, 152, 163, 181], "autom": [6, 74, 164, 182], "domain": 6, "knowledg": [6, 36, 71, 97, 103, 110, 127, 137, 140, 151], "critic": [6, 36, 104], "thing": [6, 74, 85, 86, 91, 101, 137, 149], "oper": [6, 82, 110, 131, 141, 151], "risk": [6, 129, 134, 140], "advertis": 6, "individu": [6, 17, 29, 74, 82, 86, 106, 110, 112, 119, 121, 132, 139, 140, 156, 184], "caus": [6, 18, 51, 58, 74, 85, 89, 90, 117, 132, 149, 151, 178], "wast": [6, 117], "bit": [6, 29, 84, 102, 103, 108, 132, 137, 142, 145, 151], "monei": 6, "annoi": 6, "otherwis": [6, 18, 80, 89, 95, 97, 119, 131, 137, 138, 160], "mostli": [6, 135, 155], "harmless": 6, "medicin": 6, "kill": 6, "logic": [6, 149, 162], "fals": [6, 16, 53, 81, 85, 88, 90, 95, 97, 101, 105, 107, 123, 125, 126, 131, 132, 134, 137, 141, 145, 148, 149, 153, 155, 156, 162, 174], "brain": 6, "tumor": 6, "sent": 6, "surgeri": 6, "veri": [6, 18, 56, 60, 74, 80, 86, 88, 90, 97, 98, 100, 103, 106, 107, 108, 110, 115, 117, 119, 123, 131, 132, 135, 137, 141, 149, 150, 151, 152, 153, 160, 178], "danger": [6, 132, 153], "mr": 6, "confirm": [6, 92, 100, 105, 107, 110, 124, 127, 131, 135, 139, 144, 151, 152], "should": [6, 18, 23, 29, 34, 35, 44, 47, 69, 73, 74, 81, 82, 86, 90, 95, 97, 99, 100, 101, 102, 103, 104, 105, 106, 108, 109, 114, 115, 117, 118, 123, 124, 125, 127, 128, 132, 133, 135, 137, 140, 141, 142, 145, 146, 147, 149, 150, 151, 154, 156, 158, 160, 161, 174, 176, 178, 181, 182], "delai": 6, "life": [6, 74, 124, 127, 144], "save": [6, 184], "treatment": [6, 58], "hospit": [6, 25], "stai": [6, 40, 44, 110, 132], "overcrowd": 6, "unit": [6, 74, 80, 82, 85, 86, 102, 105, 107, 108, 109, 132, 136, 138, 144, 149, 153], "chang": [6, 28, 29, 37, 47, 82, 94, 99, 108, 109, 113, 122, 125, 132, 144, 147, 150, 154, 156, 181, 184], "inpati": 6, "chose": [6, 53, 74, 115, 132], "load": [6, 18, 64, 75, 76, 77, 78, 82, 83, 85, 86, 87, 89, 93, 96, 97, 98, 100, 101, 102, 103, 106, 107, 109, 111, 116, 117, 118, 120, 129, 130, 131, 132, 134, 135, 136, 137, 140, 141, 149, 150, 152, 153, 156, 157, 158, 159, 160, 161, 162, 163, 178, 184], "interest": [6, 49, 74, 75, 76, 92, 94, 99, 100, 102, 103, 105, 107, 110, 115, 129, 131, 134, 136, 137, 139, 141, 144, 146, 149, 150, 151, 152, 153, 155, 162, 184], "focus": [6, 14, 61, 107, 109, 132, 141, 151], "easi": [6, 73, 86, 101, 102, 105, 131, 160], "accumul": 6, "target": [6, 18, 29, 39, 40, 43, 47, 49, 58, 60, 64, 66, 73, 74, 75, 76, 77, 78, 79, 80, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 131, 132, 133, 134, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 157, 158, 162, 176, 184], "proxi": [6, 141], "reflect": [6, 102, 110, 119], "ground": [6, 115, 141], "truth": [6, 115, 141], "polici": [6, 25], "uneven": 6, "across": [6, 42, 47, 77, 82, 92, 108, 132, 139, 153, 184], "popul": [6, 74, 102, 107, 108, 141, 155], "eg": 6, "qualiti": [6, 108, 139, 144, 151], "affair": 6, "desir": [6, 79, 84, 119, 132], "qualif": 6, "respons": 6, "women": 6, "pai": [6, 85, 109], "men": 6, "pick": [6, 29, 91, 97, 104, 105, 108, 116, 150, 181], "amplifi": 6, "inequ": 6, "mechan": [6, 29, 81, 82, 151], "die": 6, "naiv": [6, 18, 29, 74, 97, 106, 116, 119], "bad": [6, 89, 98, 127, 132, 140, 141, 178], "health": [6, 36], "fallaci": 6, "compar": [6, 13, 17, 18, 22, 29, 37, 47, 49, 59, 60, 73, 74, 77, 79, 81, 82, 84, 85, 86, 87, 89, 97, 98, 99, 103, 107, 108, 110, 113, 115, 116, 120, 122, 125, 126, 127, 129, 132, 134, 141, 144, 153, 155, 156, 164, 176, 184], "wors": [6, 18, 28, 47, 73, 98, 99, 101, 136, 176], "baselin": [6, 22, 24, 79, 84, 86, 87, 89, 94, 95, 98, 99, 118, 164], "heart": [6, 29, 105, 129, 134], "pressur": 6, "greater": [6, 29, 73, 131], "trigger": 6, "care": [6, 34, 47, 85, 97, 100, 106, 107, 124, 127, 132], "which": [6, 14, 16, 17, 18, 23, 25, 27, 29, 33, 34, 40, 42, 44, 47, 58, 66, 69, 72, 73, 74, 77, 80, 81, 82, 84, 85, 86, 90, 91, 92, 93, 96, 97, 98, 99, 101, 102, 103, 105, 106, 107, 108, 109, 110, 115, 116, 117, 119, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 144, 145, 146, 148, 149, 150, 152, 153, 155, 156, 160, 161, 162, 169, 170, 171, 176, 178, 180, 182, 184], "learner": [6, 14, 109, 110, 115, 117, 118], "predictor": [6, 14, 15, 16, 17, 66, 81, 82, 86, 99, 103, 120, 132, 149, 156], "pure": [6, 127, 162], "benefici": [6, 33, 82, 117, 125, 151, 153, 160], "intervent": [6, 74], "brittl": 6, "interpret": [6, 17, 36, 105, 107, 108, 134, 135, 136, 137, 144, 152, 156, 162], "subject": [6, 74, 141], "caution": [6, 90, 108], "feedback": 6, "loop": [6, 97, 108, 147, 149, 151, 154, 184], "todai": 6, "ai": 6, "alloc": 6, "loan": 6, "screen": [6, 8, 9, 46, 48, 50, 52, 54, 62, 67, 165], "job": [6, 82], "prioritis": 6, "treatement": 6, "law": [6, 29], "enforc": [6, 101, 105, 117, 132, 135], "court": 6, "fairlearn": [6, 74], "assess": [6, 25, 47, 77, 81, 94, 96, 97, 99, 103, 113, 122, 128, 132, 133, 140, 141, 144, 151, 155], "shift": [6, 82, 135], "technolog": [6, 95], "induc": [6, 108, 110, 135], "societi": 6, "though": [6, 99, 116, 143, 146], "difficult": [6, 105, 117, 136, 144], "intersect": [6, 152, 155], "No": [6, 24], "found": [6, 60, 97, 106, 107, 114, 117, 123, 132, 136, 147, 149, 151, 154, 155, 156, 160, 161, 178, 184], "short": [6, 34, 80, 107, 118, 157], "move": [6, 86, 105, 135, 152, 155], "choos": [6, 18, 36, 74, 87, 89, 102, 108, 124, 125, 127, 131, 132, 137, 139, 151, 174, 184], "revolut": 6, "fantast": [6, 127], "opportun": 6, "With": [6, 37, 89, 102, 105, 117, 125, 126, 141, 149, 153, 160], "lift": 6, "roadblock": 6, "hope": [6, 91, 131, 144], "empow": 6, "varieti": [6, 36, 77], "mindset": 6, "dream": 6, "being": [6, 42, 92, 139, 155], "adventur": 6, "navig": [8, 9, 46, 48, 50, 52, 54, 62, 67, 165], "slide": [8, 9, 46, 48, 50, 52, 54, 62, 67, 131, 135, 139, 152, 155, 165], "click": [8, 9, 15, 46, 48, 50, 52, 54, 62, 67, 152, 155, 165, 178], "press": [8, 9, 46, 48, 50, 52, 54, 62, 67, 165], "arrow": [8, 9, 46, 48, 50, 52, 54, 62, 67, 165], "go": [8, 9, 13, 22, 29, 33, 36, 39, 46, 48, 50, 52, 54, 56, 62, 67, 71, 77, 78, 83, 85, 93, 95, 97, 98, 101, 102, 103, 105, 107, 108, 115, 118, 125, 139, 141, 146, 149, 156, 165, 169, 181], "next": [8, 9, 46, 48, 50, 52, 54, 58, 62, 67, 80, 81, 85, 91, 93, 94, 98, 99, 100, 101, 109, 115, 117, 118, 129, 131, 132, 134, 135, 137, 150, 156, 165], "previou": [8, 9, 16, 18, 22, 29, 46, 47, 48, 50, 52, 54, 60, 62, 64, 66, 67, 73, 74, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 94, 95, 96, 97, 99, 100, 101, 102, 103, 110, 111, 114, 115, 116, 117, 119, 120, 123, 124, 127, 128, 129, 131, 132, 133, 134, 135, 136, 137, 140, 142, 145, 147, 149, 150, 151, 152, 153, 154, 155, 156, 158, 159, 162, 163, 165, 176, 178, 181, 184], "p": [8, 9, 46, 48, 50, 52, 54, 62, 67, 95, 134, 142, 145, 165], "toggl": [8, 9, 46, 48, 50, 52, 54, 62, 67, 165], "mode": [8, 9, 36, 46, 48, 50, 52, 54, 62, 67, 165], "adapt": [10, 100, 118, 137, 140, 144, 153, 164], "adaboost": [10, 13, 115, 164], "gbdt": [10, 109, 122, 164], "exercis": [10, 11, 12, 19, 29, 30, 31, 32, 36, 38, 41, 45, 47, 59, 63, 65, 68, 80, 85, 117, 131, 136, 137, 149, 151, 156, 164, 166, 175, 176, 177, 179], "m6": [10, 11, 12, 117, 164], "speed": [10, 29, 34, 35, 44, 105, 108, 125, 151, 153, 164], "quiz": [10, 11, 12, 19, 20, 21, 30, 31, 38, 41, 45, 55, 57, 59, 63, 65, 68, 155, 164, 166, 167, 168, 175, 177, 179], "bag": [11, 13, 15, 16, 109, 111, 117, 118, 119, 120, 164], "introductori": [11, 139, 164], "forest": [11, 13, 14, 15, 17, 18, 37, 108, 112, 113, 115, 116, 118, 121, 122, 125, 126, 164], "togeth": [13, 14, 68, 73, 81, 82, 89, 94, 95, 97, 99, 109, 115, 140, 160, 164], "ensembl": [13, 14, 17, 18, 29, 58, 86, 88, 90, 108, 109, 110, 113, 115, 116, 117, 119, 120, 121, 122, 123, 125, 126, 146, 147, 149, 151, 153, 154], "famili": [13, 14, 39, 56, 58, 74, 80, 82, 86, 118, 150], "techniqu": [13, 33, 74, 108, 131], "bootstrap": [13, 14, 18, 117, 118, 119, 120, 164], "ii": [13, 22, 77, 81, 136], "belong": [13, 18, 47, 80, 84, 85, 95, 98, 106, 139, 146, 156, 162, 176], "former": [13, 74, 82, 125, 141], "strategi": [13, 14, 18, 22, 23, 24, 25, 29, 60, 77, 79, 81, 84, 87, 89, 91, 92, 94, 95, 97, 99, 100, 101, 102, 104, 109, 110, 113, 116, 119, 122, 129, 133, 134, 136, 141, 144, 151, 181, 184], "later": [13, 18, 74, 80, 81, 91, 98, 102, 116, 134, 135, 137, 140, 141, 148, 149, 151, 155], "hyperparamet": [13, 17, 26, 40, 43, 47, 60, 93, 96, 97, 98, 103, 111, 115, 118, 120, 127, 130, 131, 132, 135, 146, 148, 155, 156, 169, 170, 177, 178, 179, 180, 181, 182], "allow": [13, 14, 29, 44, 66, 74, 77, 80, 82, 85, 93, 98, 102, 104, 107, 108, 117, 124, 127, 128, 132, 133, 137, 138, 139, 142, 145, 151, 153, 156, 160, 161, 162, 169, 178, 181, 182], "skill": [13, 22, 33, 39, 56, 71, 169, 181], "carri": [13, 22, 33, 39, 47, 56, 71, 85, 96, 107, 126, 169, 181], "basic": [13, 22, 33, 36, 39, 50, 56, 71, 100, 102, 108, 120, 144, 169, 181], "usag": [13, 22, 33, 39, 56, 80, 104, 105, 106, 107, 119, 137, 151, 169, 181], "mainli": [13, 14, 22, 33, 39, 74, 104, 156, 169], "around": [13, 22, 33, 39, 74, 80, 91, 95, 98, 101, 102, 103, 105, 108, 134, 135, 140, 141, 144, 169], "overfit": [13, 16, 18, 22, 29, 33, 39, 44, 47, 51, 53, 56, 58, 59, 60, 61, 96, 97, 101, 108, 109, 110, 113, 117, 118, 119, 122, 126, 131, 132, 134, 135, 140, 151, 155, 160, 164, 169, 171, 174], "valid": [13, 18, 22, 23, 24, 25, 26, 29, 33, 34, 39, 40, 44, 47, 56, 57, 58, 60, 65, 66, 73, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 98, 99, 100, 101, 107, 108, 113, 114, 115, 116, 117, 118, 119, 122, 123, 124, 125, 126, 127, 129, 132, 134, 140, 141, 142, 143, 145, 146, 147, 149, 150, 151, 152, 153, 154, 160, 161, 164, 169, 174, 176, 178, 181, 182, 184], "principl": [13, 22, 34, 141], "through": [13, 22, 29, 33, 36, 39, 56, 71, 93, 98, 103, 108, 114, 123, 132, 138, 139, 142, 145, 149, 169, 178, 181], "hour": [13, 22, 39, 56, 71, 74, 77, 79, 80, 81, 82, 84, 85, 86, 149, 150, 153, 169, 181], "saw": [14, 29, 40, 47, 80, 82, 85, 86, 102, 103, 115, 117, 132, 136, 139, 140, 142, 145, 149, 151, 152, 153, 156, 160, 161, 162, 170], "parallel": [14, 16, 115, 151, 152, 155, 178], "sequenti": [14, 16, 53, 82, 116, 117, 152, 178], "intern": [14, 44, 66, 82, 86, 97, 102, 109, 110, 113, 114, 122, 123, 132, 144, 146, 149, 151, 178], "machineri": [14, 109, 115], "art": 14, "learn": [14, 16, 17, 23, 24, 27, 28, 29, 34, 37, 38, 40, 42, 51, 58, 60, 64, 66, 68, 69, 72, 74, 77, 78, 80, 82, 83, 84, 85, 86, 87, 89, 93, 94, 95, 97, 98, 99, 100, 101, 102, 103, 106, 107, 109, 112, 114, 116, 117, 119, 121, 123, 124, 125, 126, 127, 130, 131, 132, 134, 137, 139, 140, 141, 142, 144, 145, 146, 149, 151, 152, 154, 157, 161, 162, 170, 171, 172, 173, 179, 180, 182, 184], "earli": [14, 29, 113, 114, 122, 123], "stop": [14, 29, 87, 89, 92, 99, 105, 113, 114, 117, 122, 123], "stack": 14, "By": [15, 18, 29, 47, 74, 82, 85, 97, 100, 103, 105, 119, 127, 132, 141, 144, 146, 159, 163, 176], "default": [15, 18, 27, 28, 47, 60, 66, 77, 78, 82, 83, 85, 93, 98, 100, 105, 111, 117, 118, 120, 130, 131, 132, 141, 142, 143, 144, 145, 146, 150, 151, 176, 181], "baggingclassifi": [15, 119], "baggingregressor": [15, 110, 111, 118, 119, 120], "draw": [15, 29, 74, 94, 99, 110, 117, 125, 153, 156, 178], "replac": [15, 29, 47, 85, 104, 105, 107, 110, 131], "without": [15, 36, 38, 47, 60, 74, 78, 81, 83, 92, 95, 97, 98, 101, 102, 104, 105, 106, 117, 118, 125, 126, 129, 132, 134, 135, 136, 140, 141, 142, 144, 145, 162, 164, 184], "d": [15, 16, 17, 18, 20, 25, 27, 29, 37, 42, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 74, 81, 95, 164, 171, 176, 178, 180, 184], "answer": [15, 16, 17, 18, 24, 25, 26, 27, 28, 29, 35, 42, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 85, 104, 129, 134, 135, 155, 171, 172, 173, 174, 176, 178, 180, 184], "hint": [15, 28, 29, 47, 60, 73, 75, 76, 78, 79, 83, 84, 88, 90, 129, 132, 134, 135, 162, 184], "base_estim": [15, 120], "decid": [15, 22, 74, 80, 102, 107, 124, 127, 129, 134, 151], "resampl": [15, 51, 73, 97, 105, 109, 132], "perform": [15, 17, 18, 19, 22, 23, 24, 26, 29, 34, 35, 47, 60, 64, 66, 73, 76, 77, 79, 80, 81, 82, 84, 85, 86, 87, 89, 91, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 106, 108, 109, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 124, 125, 126, 127, 129, 132, 134, 137, 140, 141, 143, 144, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 160, 169, 176, 178, 181, 182, 184], "correct": [16, 17, 18, 29, 60, 81, 83, 109, 115, 117, 129, 134, 141, 147, 153, 154, 160, 184], "statement": [16, 18, 27, 29, 42, 73, 178, 184], "simultan": 16, "histogram": [16, 29, 66, 74, 75, 76, 82, 94, 99, 104, 107, 114, 116, 123, 139, 149, 153], "acceler": [16, 29, 105, 116], "subsampl": [16, 107, 119, 124, 127], "origin": [16, 66, 77, 80, 81, 85, 86, 100, 101, 104, 109, 110, 115, 116, 119, 129, 131, 134, 135, 137, 138, 144, 156, 157, 161, 162], "bin": [16, 74, 82, 92, 95, 99, 102, 103, 104, 105, 106, 107, 116, 131, 137, 152, 153], "numer": [16, 47, 64, 68, 69, 71, 72, 73, 74, 75, 76, 77, 78, 79, 81, 83, 84, 85, 94, 99, 102, 104, 105, 107, 129, 131, 132, 134, 137, 148, 149, 150, 155, 164, 176, 184], "tend": [16, 29, 95, 117, 118, 131, 132, 135, 144, 151], "true": [16, 18, 27, 29, 42, 47, 53, 58, 60, 66, 73, 81, 92, 95, 96, 97, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 125, 128, 129, 131, 132, 133, 134, 139, 141, 144, 146, 148, 149, 151, 155, 156, 162, 174, 178, 184], "shallow": [17, 109, 115, 117, 160], "deeper": [17, 92, 93, 96, 98, 102, 103, 109, 111, 112, 113, 116, 117, 118, 119, 120, 121, 122, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 141, 142, 143, 144, 145, 146, 156, 158, 159, 160, 161, 162, 163], "exist": [17, 74, 77, 95], "maximum": [17, 29, 82, 85, 97, 103, 137, 141, 149, 153, 156, 158, 159, 161, 162, 163, 171, 174], "depth": [17, 18, 36, 75, 76, 103, 107, 109, 115, 116, 117, 118, 129, 130, 134, 135, 139, 147, 154, 156, 157, 158, 159, 160, 161, 162, 163, 171, 174, 176], "rate": [17, 29, 81, 92, 96, 102, 105, 117, 141, 149, 152, 154], "option": [17, 82, 85, 92, 100, 102, 105, 113, 122, 181, 184], "reduc": [17, 18, 40, 44, 47, 91, 95, 96, 116, 117, 118, 119, 125, 132, 133], "sensit": [17, 51, 135, 140, 141, 152, 155, 170], "notic": [18, 81, 82, 98, 102, 107, 108, 131, 132, 134, 135, 137, 139, 141, 146, 148, 155, 162], "tradit": 18, "panda": [18, 29, 36, 47, 60, 64, 71, 73, 74, 75, 76, 77, 78, 79, 80, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 115, 117, 119, 120, 121, 123, 125, 126, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 176, 178, 184], "pd": [18, 29, 47, 60, 64, 73, 74, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 115, 117, 119, 120, 121, 123, 125, 126, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 176, 178, 184], "read_csv": [18, 29, 47, 60, 64, 73, 74, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 93, 94, 98, 99, 101, 104, 105, 106, 109, 112, 119, 121, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 141, 142, 143, 144, 145, 146, 147, 149, 150, 151, 152, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 176, 178, 184], "csv": [18, 29, 47, 60, 64, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 93, 94, 98, 99, 101, 104, 105, 106, 109, 112, 119, 121, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 141, 142, 143, 144, 145, 146, 147, 149, 150, 151, 152, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 176, 178, 184], "feature_nam": [18, 104, 107, 112, 121, 128, 131, 132, 133, 136, 138, 139, 140, 156, 157, 159, 160, 161, 162, 163], "culmen": [18, 75, 76, 109, 129, 130, 134, 135, 139, 156, 157, 158, 160, 162, 184], "mm": [18, 76, 109, 112, 121, 128, 129, 130, 133, 134, 135, 136, 138, 139, 156, 157, 158, 159, 160, 161, 162, 163, 184], "flipper": [18, 112, 121, 128, 129, 133, 134, 136, 138, 157, 159, 160, 161, 163, 184], "target_nam": [18, 29, 47, 60, 73, 77, 79, 81, 82, 84, 85, 86, 87, 88, 89, 90, 91, 104, 105, 112, 119, 121, 128, 129, 131, 132, 133, 134, 136, 138, 140, 147, 149, 150, 151, 153, 154, 159, 160, 161, 163, 176, 184], "bodi": [18, 75, 76, 105, 112, 121, 128, 129, 133, 134, 136, 138, 157, 159, 160, 161, 163, 184], "mass": [18, 29, 112, 121, 128, 129, 133, 134, 136, 138, 157, 159, 160, 161, 163, 184], "dropna": [18, 129, 134, 184], "frac": [18, 29], "random_st": [18, 37, 80, 82, 84, 86, 92, 93, 97, 98, 99, 100, 101, 102, 103, 108, 109, 110, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 123, 125, 126, 127, 130, 131, 132, 134, 135, 137, 139, 140, 141, 144, 147, 148, 149, 151, 153, 154, 155, 156, 158, 160, 162, 176], "reset_index": [18, 130, 135, 139], "drop": [18, 29, 37, 47, 60, 73, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 93, 94, 98, 99, 101, 104, 105, 106, 107, 108, 119, 141, 142, 143, 144, 145, 146, 147, 149, 151, 153, 154, 176], "therefor": [18, 29, 73, 74, 77, 82, 84, 86, 92, 95, 96, 97, 99, 102, 103, 107, 109, 115, 116, 117, 119, 125, 126, 127, 132, 133, 134, 137, 139, 140, 141, 144, 146, 151, 155, 160], "randomli": [18, 29, 97, 99, 102, 108, 110, 119, 153], "shuffl": [18, 37, 80, 95, 97, 100, 101, 102, 108, 114, 123, 141, 144], "break": [18, 95, 132], "spuriou": 18, "troubl": [18, 100], "outsid": [18, 149, 156, 159, 163], "scope": [18, 74, 137, 139, 144, 146], "regressor": [18, 24, 28, 29, 40, 44, 47, 81, 82, 92, 96, 101, 102, 103, 110, 111, 114, 115, 116, 117, 118, 119, 120, 122, 123, 132, 137, 144, 170, 173, 176], "sklearn": [18, 29, 44, 47, 60, 73, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 130, 131, 132, 134, 135, 136, 137, 139, 140, 141, 142, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 158, 160, 161, 162, 163, 176, 178, 180, 184], "randomforestregressor": [18, 108, 115, 117, 119, 121, 122], "except": [18, 87, 89, 142, 145, 156, 162], "exact": [18, 29, 115], "fold": [18, 25, 29, 47, 60, 73, 77, 95, 100, 102, 108, 114, 123, 125, 126, 127, 128, 129, 132, 133, 134, 143, 146, 151, 153, 155, 176, 184], "model_select": [18, 29, 47, 60, 77, 79, 80, 82, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 107, 108, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 130, 132, 134, 135, 139, 141, 142, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 158, 160, 162, 176, 178, 184], "cross_valid": [18, 29, 47, 60, 66, 73, 77, 85, 86, 87, 88, 89, 90, 91, 92, 93, 98, 99, 100, 103, 107, 108, 114, 115, 116, 118, 123, 125, 126, 132, 134, 142, 143, 145, 146, 149, 150, 151, 176, 184], "cv": [18, 29, 47, 66, 73, 77, 86, 91, 92, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 108, 114, 118, 123, 126, 132, 134, 142, 145, 146, 149, 151, 153, 154, 160, 176, 178, 184], "store": [18, 29, 74, 80, 82, 92, 94, 99, 100, 102, 104, 105, 106, 110, 118, 125, 126, 131, 132, 136, 140, 148, 149, 155, 184], "return_train_scor": [18, 29, 60, 103, 132], "count": [18, 47, 60, 73, 74, 76, 80, 82, 84, 85, 92, 95, 100, 104, 105, 106, 107, 156, 176, 184], "rang": [18, 40, 47, 60, 66, 73, 80, 82, 95, 97, 100, 102, 105, 106, 107, 108, 110, 131, 132, 138, 151, 152, 153, 155, 157, 159, 162, 163, 173, 176, 178, 184], "substanti": [18, 176, 184], "almost": [18, 47, 73, 81, 90, 102, 108, 135, 138, 144, 151, 162, 176], "100": [18, 47, 60, 80, 81, 92, 96, 99, 102, 103, 107, 108, 110, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 124, 125, 126, 127, 129, 131, 132, 134, 135, 137, 140, 141, 144, 148, 153, 154, 155, 162, 184], "again": [18, 93, 98, 108, 110, 113, 122, 135, 137, 141, 152], "curv": [18, 42, 58, 60, 93, 98, 113, 122, 134, 138, 139, 141, 164], "n_estim": [18, 109, 110, 113, 115, 116, 117, 118, 119, 120, 121, 122], "numpi": [18, 29, 36, 60, 64, 66, 71, 82, 92, 95, 96, 98, 99, 100, 103, 105, 107, 108, 109, 110, 115, 116, 121, 122, 124, 125, 127, 128, 131, 132, 133, 134, 136, 137, 138, 140, 141, 143, 144, 146, 152, 155, 160, 161, 162, 163, 178], "np": [18, 29, 47, 60, 92, 93, 95, 96, 98, 99, 100, 103, 105, 107, 108, 109, 110, 113, 115, 116, 119, 121, 122, 124, 125, 127, 128, 129, 131, 132, 133, 134, 136, 137, 138, 140, 141, 143, 144, 146, 148, 152, 155, 160, 161, 162, 163, 173, 178], "arrai": [18, 29, 42, 43, 60, 66, 77, 80, 81, 82, 83, 85, 86, 95, 96, 102, 103, 108, 109, 110, 113, 116, 121, 122, 129, 132, 134, 139, 141, 145, 149, 156], "200": [18, 60, 73, 95, 105, 115, 116, 131, 132, 153], "500": [18, 60, 85, 86, 89, 102, 105, 107, 144, 152, 153], "1_000": [18, 29, 95, 113, 122], "decreas": [18, 29, 44, 47, 53, 58, 82, 108, 116, 125, 132, 144, 149, 155], "becom": [18, 93, 96, 98, 103, 108, 116, 117, 149, 153, 162, 178], "reach": [18, 96, 103, 113, 122, 149, 152, 160, 178], "plateau": [18, 96, 113, 122], "experi": [18, 36, 47, 60, 71, 79, 84, 93, 95, 96, 97, 98, 102, 103, 107, 113, 114, 116, 122, 123, 135, 140, 142, 145, 155, 158, 161, 162], "instead": [18, 29, 47, 60, 73, 77, 81, 82, 85, 87, 88, 89, 90, 92, 93, 95, 96, 98, 99, 102, 110, 115, 117, 118, 119, 129, 132, 134, 137, 141, 142, 143, 144, 145, 146, 147, 148, 151, 152, 153, 154, 155, 156, 157, 161, 162, 176], "max_depth": [18, 103, 109, 110, 113, 114, 115, 117, 118, 122, 123, 137, 156, 161, 162, 163, 176], "gap": [18, 103, 132], "begin": [18, 81, 113, 122, 131, 132, 151], "consid": [18, 43, 60, 69, 73, 85, 101, 103, 104, 107, 108, 109, 115, 116, 117, 119, 131, 132, 137, 142, 145, 156, 162, 171, 180, 184], "none": [18, 47, 93, 95, 98, 102, 104, 105, 106, 107, 110, 113, 117, 118, 122, 130, 131, 137, 152, 184], "rf_1_tree": 18, "cv_results_tre": 18, "train_scor": [18, 29, 103, 132], "return": [18, 28, 29, 42, 44, 66, 77, 80, 81, 85, 87, 89, 108, 110, 115, 128, 132, 133, 138, 139, 141, 142, 145, 149, 150, 152, 153, 178], "83120264": 18, "83309064": 18, "83195043": 18, "84834224": 18, "85790323": 18, "86235297": 18, "84791111": 18, "85183089": 18, "82241954": 18, "85045978": 18, "perfect": [18, 42, 56, 92, 102, 108, 115, 124, 126, 127, 140, 141, 160], "r2": [18, 28, 101, 107, 118, 123, 144, 146], "surpris": [18, 84, 95, 100, 101, 124, 127, 156, 162], "memor": [18, 81, 101, 102, 103], "expect": [18, 29, 66, 74, 76, 80, 85, 86, 97, 100, 101, 102, 107, 110, 117, 127, 129, 132, 134, 140, 144, 146, 151, 155, 160], "automat": [18, 66, 74, 80, 82, 85, 87, 89, 102, 150, 151, 176, 181], "prevent": [18, 43, 85, 89, 110, 117, 178], "max_it": [18, 29, 82, 85, 86, 89, 95, 116, 117, 123, 135], "recal": [18, 27, 60, 85, 97, 102, 116, 118, 135, 141, 150, 151, 153, 155, 184], "averag": [18, 29, 60, 81, 92, 95, 99, 100, 102, 105, 107, 108, 110, 115, 116, 117, 118, 119, 122, 125, 132, 135, 136, 141, 142, 144, 145, 146, 184], "small": [18, 40, 73, 76, 77, 86, 95, 98, 100, 102, 103, 108, 110, 115, 116, 117, 129, 132, 134, 135, 137, 147, 149, 154, 178], "behav": [18, 93, 98, 99, 132, 135, 176], "high": [18, 29, 37, 51, 53, 58, 64, 74, 76, 80, 84, 85, 98, 103, 104, 105, 106, 107, 108, 131, 135, 139, 149, 150, 153], "optimum": 18, "m7": [19, 20, 21, 30, 31, 164], "stratif": [20, 164], "framework": [22, 23, 34, 56, 57, 96, 100, 103, 114, 123, 143, 146, 151, 164, 181], "keep": [22, 29, 74, 91, 95, 102, 104, 105, 107, 108, 125, 126, 127, 129, 130, 134, 135, 137, 139, 140, 151, 153], "mind": [22, 74, 102, 108, 117, 124, 125, 126, 127, 140, 153], "metric": [22, 27, 29, 66, 77, 81, 96, 101, 102, 106, 110, 111, 113, 120, 121, 122, 125, 129, 132, 133, 134, 136, 137, 142, 143, 144, 145, 146, 152, 161, 164, 184], "besid": [22, 23, 33, 39, 80, 86, 88, 90, 92, 95, 96, 114, 123, 125, 156, 169, 182], "insight": [22, 33, 36, 47, 64, 74, 101, 103, 110, 124, 127, 138, 139, 144, 153], "addit": [22, 29, 43, 69, 77, 81, 85, 102, 105, 107, 113, 114, 118, 119, 122, 123, 132, 134, 137, 138, 141, 144, 149, 150, 151, 153, 155, 176], "necess": [22, 102], "appropri": [22, 74], "nest": [22, 23, 26, 117, 132, 147, 149, 151, 154, 164, 176, 182, 184], "wise": [23, 131, 151, 152, 182], "encount": [23, 73, 85, 87, 89, 100], "show": [23, 39, 74, 75, 76, 77, 79, 80, 81, 82, 83, 84, 85, 86, 91, 96, 97, 100, 102, 103, 104, 107, 109, 110, 115, 117, 118, 119, 126, 128, 129, 131, 132, 133, 134, 136, 137, 138, 139, 140, 141, 142, 145, 149, 150, 151, 152, 153, 155, 156, 158, 161, 162, 163, 169, 178, 181], "comparison": [23, 40, 97, 118, 141], "remov": [24, 35, 47, 74, 108, 116, 127, 129, 134, 149, 152, 184], "dummi": [24, 60, 79, 84, 89, 92, 94, 99, 141, 144], "reli": [24, 74, 80, 82, 108, 109, 126, 141], "ye": [24, 42, 43], "whatev": [24, 152], "chosen": [24, 85, 89, 106, 113, 122, 144, 182], "record": [25, 29, 49, 74, 81, 85, 102, 105, 106, 108], "suppos": [25, 85, 95], "imbalanc": [25, 60, 74, 99, 132, 141, 184], "addition": [25, 82, 132], "suspect": 25, "systemat": [25, 51, 58, 73, 98, 144, 150], "bias": [25, 131], "due": [25, 29, 84, 86, 100, 115, 119, 146, 156], "factor": [25, 29, 92, 135, 144], "devic": [25, 29], "socioeconom": 25, "most": [25, 29, 47, 60, 74, 80, 81, 84, 85, 89, 90, 93, 94, 95, 98, 99, 102, 104, 108, 109, 110, 116, 125, 132, 135, 136, 139, 141, 142, 145, 149, 152, 156, 162, 171, 173, 176, 178], "suitabl": 25, "abil": [25, 103, 141, 147, 154], "stratifi": [25, 94, 99, 100, 184], "leav": [25, 81, 93, 98, 117, 153, 160, 162], "inner": [26, 97, 114, 123, 132, 151, 176], "outer": [26, 97, 114, 122, 123, 131, 132, 151, 176, 184], "balanc": [27, 60, 103, 117, 132, 141, 142, 144, 145, 184], "roc": [27, 141], "auc": [27, 141], "precis": [27, 29, 58, 86, 117, 138, 141, 142, 145, 149], "regular": [27, 37, 39, 40, 44, 47, 108, 110, 130, 131, 153, 161, 164, 182], "assum": [27, 29, 43, 44, 53, 69, 89, 100, 101, 108, 132, 137, 139, 176], "logist": [27, 41, 42, 44, 66, 80, 82, 85, 86, 87, 88, 89, 90, 93, 94, 95, 98, 99, 100, 124, 127, 130, 135, 139, 141, 150, 156, 164], "stronger": [27, 105, 110, 135, 155], "lead": [27, 58, 74, 85, 86, 89, 95, 101, 115, 117, 118, 119, 132, 134, 135, 141, 148, 151, 152, 153, 155], "lower": [27, 28, 29, 47, 53, 89, 95, 96, 99, 102, 105, 108, 117, 122, 132, 134, 135, 141, 144, 146, 151, 155, 162], "r": [28, 98, 101, 102, 107, 108, 143, 144, 146], "absolut": [28, 29, 47, 92, 96, 102, 103, 111, 112, 113, 115, 116, 120, 121, 122, 129, 133, 134, 136, 137, 143, 144, 146], "median": [28, 91, 92, 97, 102, 107, 108, 118, 123, 125, 126, 132, 144, 145, 172, 173], "cross_val_scor": [28, 60, 95, 97, 101, 102, 119, 127, 142, 143, 145, 146, 147, 154], "model_a": 28, "neg_mean_squared_error": [28, 132, 146], "strictli": 28, "model_b": 28, "rememb": [28, 47, 74, 110, 129, 134, 135, 180, 184], "alia": 28, "neg": [28, 29, 47, 66, 102, 103, 108, 132, 133, 138, 141], "guarante": [28, 108, 160], "either": [28, 69, 81, 95, 98, 100, 124, 125, 127, 133, 139, 141, 144], "open": [29, 47, 60, 73, 78, 80, 83, 101, 104, 105, 176], "bike_rid": [29, 105], "command": [29, 47, 60, 73, 176, 184], "cycl": [29, 105], "index_col": [29, 101, 105, 152, 153, 178], "parse_d": [29, 101, 105], "index": [29, 47, 74, 81, 95, 100, 101, 105, 108, 110, 116, 125, 129, 132, 134, 135, 139, 145, 149, 151, 156], "appendix": [29, 92, 93, 96, 98, 102, 103, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 141, 142, 143, 144, 145, 146, 156, 158, 159, 160, 161, 162, 163], "remind": 29, "cheap": [29, 49, 86], "sensor": [29, 74, 105], "gp": [29, 105], "cyclist": [29, 105], "meter": [29, 105], "expens": [29, 73, 116, 149, 182], "blindli": 29, "introduc": [29, 36, 81, 82, 102, 108, 110, 125, 129, 132, 134, 137, 138, 144, 146, 164], "flavor": 29, "classic": 29, "newton": 29, "second": [29, 37, 73, 82, 85, 88, 90, 94, 99, 100, 105, 109, 115, 116, 131, 140, 144, 153, 160, 162], "p_": 29, "meca": 29, "rho": 29, "sc_x": 29, "v_": 29, "c_r": 29, "mg": 29, "co": 29, "alpha": [29, 44, 47, 74, 95, 98, 107, 108, 109, 110, 115, 121, 131, 132, 133, 135, 136, 137, 138, 139, 140, 144, 156, 160, 161, 162, 163], "sin": 29, "ma": 29, "v_d": 29, "air": 29, "densiti": [29, 131], "kg": [29, 157], "m": [29, 85, 95], "frontal": 29, "c_x": 29, "drag": 29, "coeffici": [29, 37, 40, 43, 47, 107, 132, 134, 135, 137, 138, 139, 144, 171], "v_a": 29, "roll": 29, "rider": 29, "bicycl": 29, "standard": [29, 47, 74, 77, 82, 86, 87, 89, 94, 95, 97, 99, 102, 103, 108, 114, 123, 132, 140, 141, 151], "graviti": 29, "81": [29, 73, 79, 84, 116, 117], "radian": 29, "equat": [29, 136, 139], "complex": [29, 44, 61, 80, 86, 87, 89, 91, 96, 104, 115, 135, 142, 145, 160, 161], "term": [29, 74, 81, 82, 101, 102, 110, 112, 115, 116, 119, 121, 125, 138, 156, 161], "within": [29, 34, 77, 95, 97, 100, 102, 107, 108, 110, 114, 116, 123, 125, 131, 132, 143, 146, 151, 152, 156, 163], "parenthesi": 29, "produc": [29, 162], "fight": [29, 39], "wind": 29, "resist": 29, "tire": 29, "floor": 29, "third": [29, 115, 131, 134], "hill": 29, "forward": [29, 105], "fourth": 29, "last": [29, 36, 47, 69, 85, 100, 104, 106, 131, 132, 137, 139, 141, 142, 145], "hi": [29, 106], "simplifi": [29, 47, 101, 102, 125, 139, 155, 157, 176], "beta_": 29, "closer": [29, 44, 100, 105, 132, 135, 139], "previous": [29, 47, 73, 85, 86, 93, 98, 101, 104, 109, 115, 116, 117, 118, 129, 131, 134, 135, 137, 139, 144, 147, 149, 151, 153, 154, 160, 161], "part": [29, 74, 85, 86, 97, 103, 108, 116, 127, 128, 132, 133, 135, 141, 156, 158, 162], "cube": 29, "multipli": [29, 146], "sine": 29, "angl": 29, "arc": 29, "tangent": 29, "arctan": 29, "ourself": [29, 141], "clip": 29, "brake": 29, "preprocess": [29, 36, 47, 60, 65, 66, 73, 74, 77, 85, 86, 87, 88, 89, 90, 91, 93, 95, 98, 99, 100, 107, 108, 110, 116, 119, 130, 131, 132, 134, 135, 137, 139, 140, 144, 147, 149, 150, 151, 153, 154, 155, 164, 176, 178, 180, 184], "linear_model": [29, 44, 47, 73, 77, 80, 82, 85, 86, 87, 89, 91, 95, 99, 100, 107, 108, 110, 127, 130, 131, 132, 134, 135, 136, 137, 139, 140, 141, 144, 146, 150, 156, 161, 163, 176, 178, 180], "ridgecv": [29, 47, 107, 108, 132], "shufflesplit": [29, 92, 93, 94, 96, 98, 99, 100, 101, 102, 103, 132], "n_split": [29, 92, 96, 97, 99, 100, 101, 102, 103, 108, 123, 132, 145, 151], "mae": [29, 122, 129, 133, 134, 146], "return_estim": [29, 47, 102, 107, 108, 114, 118, 123, 125, 132, 151, 184], "subsequ": [29, 71, 95, 103, 114, 117, 123, 124, 127, 129, 131, 132, 134, 180, 181], "Be": [29, 74, 85, 86, 87, 89, 103, 117, 140, 149, 150, 176], "awar": [29, 33, 34, 74, 81, 85, 86, 87, 89, 91, 103, 104, 117, 140, 149, 150, 176], "investig": [29, 82, 101, 108, 111, 114, 118, 120, 123, 125, 156], "consequ": [29, 100, 110, 116, 119], "003": [29, 77, 86, 90, 150, 151], "obtain": [29, 47, 60, 73, 77, 81, 85, 86, 87, 89, 91, 95, 97, 99, 101, 102, 105, 107, 108, 111, 117, 118, 120, 127, 131, 132, 135, 136, 140, 141, 144, 151, 152, 153, 154, 178], "closest": [29, 81, 131], "watt": [29, 105], "70": [29, 74, 91, 104, 118], "90": [29, 74, 80, 82, 92, 95, 100, 133], "neg_mean_absolute_error": [29, 92, 96, 102, 103, 115, 116, 117, 120, 122, 129, 134, 146], "request": [29, 85, 116, 132], "h": [29, 108], "beta": 29, "cadenc": [29, 105], "turn": [29, 73, 105, 129, 134], "pedal": [29, 105], "rotat": [29, 74, 95, 105], "per": [29, 74, 77, 79, 80, 81, 82, 84, 85, 86, 97, 102, 105, 107, 116, 119, 137, 141, 149, 150, 151, 153], "minut": [29, 33, 86, 105, 153], "beat": [29, 105], "1000": [29, 47, 86, 105, 117, 122, 123, 135, 141, 143, 144, 146, 155], "activ": [29, 69, 152, 178], "early_stop": [29, 117, 123], "40": [29, 74, 76, 80, 81, 82, 86, 101, 102, 104, 117, 120, 129, 133, 134, 138, 148, 149, 150, 153, 155, 157], "consider": [29, 117, 138], "test_scor": [29, 77, 85, 86, 88, 89, 90, 91, 92, 95, 97, 98, 99, 100, 101, 102, 103, 107, 115, 116, 118, 123, 125, 126, 127, 132, 134, 150, 151, 154, 156, 162], "dictionari": [29, 66, 77, 102], "made": [29, 35, 42, 74, 93, 95, 98, 101, 102, 106, 115, 141, 146, 150], "ignor": [29, 73, 85, 86, 88, 89, 90, 91, 92, 108, 155], "datafram": [29, 47, 60, 73, 74, 75, 76, 80, 81, 82, 85, 86, 91, 92, 94, 95, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 110, 115, 117, 118, 120, 121, 123, 125, 126, 131, 132, 137, 138, 139, 140, 141, 145, 146, 149, 151, 152, 153, 155, 156, 160, 161, 162, 163, 176, 184], "account": [29, 81, 92, 95, 104, 108, 131, 141, 176], "date": [29, 86, 95, 104, 105], "hesit": 29, "uniqu": [29, 60, 80, 85, 95, 100, 110, 116, 149, 153, 162], "dai": 29, "datetimeindex": [29, 105], "went": 29, "df": [29, 73, 152], "capac": [29, 96], "leaveonegroupout": [29, 101], "had": [29, 102, 108, 134, 137, 139], "indic": [29, 74, 86, 95, 102, 105, 107, 108, 110, 125, 128, 132, 133, 155], "differenti": [29, 71, 74, 151, 157], "integ": [29, 69, 77, 80, 85, 87, 89, 95, 104, 106, 108, 110, 153, 155, 184], "align": [29, 106, 131, 135, 137, 153], "pessimist": 29, "optimist": [29, 81, 95, 97, 102], "deviat": [29, 47, 77, 82, 95, 97, 102, 103, 108, 114, 123, 132, 151], "analys": [29, 80, 108], "reus": [29, 128, 133, 142, 145, 151], "train_indic": 29, "test_indic": 29, "data_linear_model_train": 29, "data_linear_model": 29, "iloc": [29, 83, 100, 105, 107, 108, 109, 110, 115, 149, 160], "data_linear_model_test": 29, "data_train": [29, 80, 82, 86, 101, 102, 110, 111, 112, 113, 115, 117, 120, 121, 122, 127, 130, 135, 139, 141, 144, 147, 148, 149, 151, 153, 154, 155, 156, 158, 159, 161, 162, 163], "data_test": [29, 80, 81, 82, 83, 86, 101, 102, 110, 111, 112, 113, 115, 117, 120, 121, 122, 127, 130, 135, 139, 141, 144, 147, 148, 149, 151, 153, 154, 155, 156, 158, 160, 161, 162, 163], "target_train": [29, 80, 82, 84, 86, 100, 101, 102, 110, 111, 112, 113, 115, 117, 120, 121, 122, 127, 130, 135, 139, 141, 144, 147, 148, 149, 151, 153, 154, 155, 156, 158, 159, 161, 162, 163], "target_test": [29, 80, 81, 82, 83, 84, 86, 100, 101, 102, 111, 112, 113, 117, 120, 121, 122, 127, 130, 135, 139, 141, 144, 147, 148, 149, 151, 153, 154, 155, 156, 158, 162], "scatter": [29, 76, 107, 110, 112, 115, 121, 131, 140, 157, 159, 161, 162, 163], "catastroph": [29, 82], "portion": 29, "time_slic": 29, "slice": 29, "2020": [29, 105], "data_test_linear_model_subset": 29, "data_test_subset": [29, 127], "target_test_subset": 29, "pm": 29, "until": [29, 117, 141], "accur": [29, 43, 109, 132, 141], "motiv": [33, 118], "known": [33, 74, 106, 107, 132, 133, 139, 141, 144, 149, 156], "caveat": [33, 124, 127, 151, 164], "practic": [33, 58, 74, 77, 80, 81, 84, 91, 97, 99, 100, 102, 117, 119, 132, 140, 141, 142, 144, 145, 151, 152, 153], "magic": [34, 101], "tool": [34, 36, 77, 86, 110, 139, 151, 153], "margin": [34, 95, 108], "gain": [34, 36, 64, 74, 77, 79, 80, 81, 82, 84, 85, 86, 96, 111, 120, 124, 125, 127, 139, 144, 149, 150, 153], "tackl": [34, 58, 126], "selector": [34, 85, 86, 87, 88, 89, 90, 124, 127, 147, 149, 151, 153, 154], "recurs": 34, "main": [35, 47, 77, 82, 88, 90, 117, 125, 134, 137, 155, 164], "advantag": [35, 77, 125, 131], "fine": [35, 40, 86, 90, 151, 155, 181], "noisi": [35, 61, 103, 108, 110, 135, 140, 155, 160], "teach": [36, 50], "beginn": 36, "strong": [36, 108, 135, 155], "background": 36, "bring": 36, "vast": 36, "busi": 36, "intellig": 36, "industri": 36, "scientif": [36, 119], "discoveri": 36, "pillar": 36, "modern": 36, "field": [36, 74, 184], "central": 36, "easili": [36, 73, 74, 81, 82, 85, 131, 134, 160], "yet": [36, 73, 86, 88, 90, 132, 137], "dovetail": 36, "ecosystem": 36, "languag": 36, "step": [36, 47, 60, 74, 80, 81, 82, 86, 91, 117, 119, 125, 126, 129, 132, 134, 135, 137, 139, 140, 143, 146, 149, 150, 151, 153, 184], "lesson": [36, 150], "fundament": [36, 56, 98, 144], "stone": 36, "artifici": 36, "mine": 36, "cookbook": 36, "failur": [36, 56], "session": [36, 151, 153], "octob": 36, "2022": 36, "month": [36, 106, 141], "enrol": 36, "quizz": 36, "execut": [36, 129, 134, 147, 153, 154, 178], "platform": 36, "purpos": [36, 85, 86, 97, 99, 101, 102, 103, 112, 121, 124, 125, 127, 132, 135, 141, 150, 176], "educ": [36, 74, 80, 85, 86, 87, 88, 89, 90, 119, 139, 147, 149, 151, 153, 154], "prior": [36, 71], "matplotlib": [36, 71, 74, 82, 92, 95, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 121, 123, 125, 126, 131, 132, 133, 135, 136, 138, 139, 140, 141, 144, 145, 156, 160, 161, 162, 163], "quick": [36, 71, 74, 104, 107, 116, 157], "publicli": 36, "cite": 36, "project": [36, 152], "zenodo": 36, "archiv": [36, 91, 95], "doi": 36, "5281": 36, "7220306": 36, "repositori": [36, 102, 107], "inria": 36, "publish": [36, 102, 107], "static": 36, "rocket": 36, "top": [36, 74, 141, 152, 153, 159, 160, 163], "interact": [36, 47, 74, 105, 129, 132, 134, 137, 152, 153, 155, 178], "cell": [36, 74, 78, 80, 82, 83, 86, 91, 97, 102, 109, 115, 116, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "binder": 36, "video": [36, 91, 171], "youtub": 36, "playlist": 36, "channel": 36, "www": [36, 74, 81, 102, 107], "pl2oka_2qdj": 36, "m44koooi7x8tu85wr4ez4f": 36, "version": [36, 82, 104, 116, 118, 131, 153, 162], "host": [36, 153], "fun": 36, "infer": [37, 105, 125, 138, 181], "importance_permut": 37, "correl": [37, 47, 74, 106, 108, 119, 125, 132, 137], "divid": [37, 82, 92, 95, 103, 133, 141, 151, 153], "receiv": [37, 141], "cardin": [37, 85, 108], "independ": [37, 85, 95, 97, 101, 110, 116, 117, 131, 132, 141, 144, 150, 153], "could": [37, 60, 73, 74, 80, 81, 82, 84, 85, 86, 92, 93, 95, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 116, 118, 119, 123, 125, 127, 128, 131, 132, 133, 136, 137, 138, 139, 140, 141, 142, 144, 145, 150, 151, 152, 155, 160], "m4": [38, 41, 45, 164], "parametr": [39, 128, 133, 136, 138, 153, 156, 161, 163, 170], "implic": 39, "dimension": [39, 66, 71, 74, 95, 124, 127, 129, 131, 134, 135, 139, 149, 152, 156], "effect": [39, 44, 47, 59, 60, 82, 93, 97, 98, 107, 108, 110, 117, 118, 119, 130, 135, 161, 162, 164], "relationship": [39, 74, 80, 85, 86, 101, 108, 110, 131, 132, 137, 138, 141, 149, 151, 153, 157], "adjust": [40, 47, 58, 85, 129, 134, 149, 152, 156], "successfulli": [40, 99, 115, 131], "scale": [40, 44, 47, 60, 72, 73, 82, 85, 86, 93, 95, 98, 102, 110, 116, 131, 144, 149, 150, 153, 155, 176, 182, 184], "approxim": [40, 60, 77, 81, 82, 90, 92, 95, 99, 110, 122, 131, 132, 134, 135, 155, 184], "dynam": 40, "linearli": [40, 42, 44, 80, 131, 137, 140], "extra": [40, 73, 105, 135, 142, 145, 151], "beyond": [41, 74, 137, 139, 144, 146, 164], "Is": [42, 43, 79, 84, 104], "linearregress": [42, 44, 132, 134, 136, 137, 144, 146, 161, 163, 176], "coef_": [42, 43, 47, 107, 108, 126, 132, 135, 136, 137, 139, 150, 181], "intercept_": [42, 43, 136, 137], "boundari": [42, 130, 131, 139, 140, 156, 158, 160, 161, 162], "predict_proba": [42, 44, 82, 131, 135, 139, 141, 146, 149, 156, 162], "probabl": [42, 91, 102, 107, 108, 109, 131, 134, 135, 146, 156, 178], "extract": [43, 77, 95, 105, 108, 110, 132, 149, 153, 184], "straight": [43, 86, 131, 135, 137, 139, 140, 161], "float": [43, 105, 107, 116, 153], "express": [43, 47, 61, 73, 90, 93, 98, 102, 107, 108, 115, 129, 131, 134, 135, 137, 140, 152, 155, 160, 178], "ensur": [43, 95, 97, 100, 117, 131, 135], "extrapol": [43, 131, 159, 163, 170], "regardless": [43, 131, 132, 156], "inher": [43, 108], "robust": [44, 90, 102, 108, 118, 132], "outlier": [44, 92, 107, 132, 146, 160], "wide": [44, 80], "forc": [44, 82, 88, 90, 109, 116, 119, 132, 136], "penal": [44, 132], "scientist": [44, 97], "prepar": 44, "plan": [44, 162], "strength": [44, 47, 130, 132, 135, 153], "confid": [44, 131, 135, 139, 141], "ames_housing_no_miss": [47, 73, 104, 132, 176], "ames_h": [47, 73, 91, 104, 132, 143, 144, 146, 176], "salepric": [47, 73, 91, 104, 132, 143, 144, 146, 176], "numerical_featur": [47, 73, 104, 176], "lotfrontag": [47, 73, 91, 104, 132, 176], "lotarea": [47, 73, 91, 104, 132, 176], "masvnrarea": [47, 73, 104, 176], "bsmtfinsf1": [47, 73, 104, 176], "bsmtfinsf2": [47, 73, 104, 176], "bsmtunfsf": [47, 73, 104, 176], "totalbsmtsf": [47, 73, 104, 176], "1stflrsf": [47, 73, 104, 176], "2ndflrsf": [47, 73, 104, 176], "lowqualfinsf": [47, 73, 104, 176], "grlivarea": [47, 73, 104, 176], "bedroomabvgr": [47, 73, 104, 176], "kitchenabvgr": [47, 73, 104, 176], "totrmsabvgrd": [47, 73, 104, 176], "fireplac": [47, 73, 104, 176], "garagecar": [47, 73, 104, 176], "garagearea": [47, 73, 104, 176], "wooddecksf": [47, 73, 104, 176], "openporchsf": [47, 73, 104, 176], "enclosedporch": [47, 73, 104, 176], "3ssnporch": [47, 73, 104, 176], "screenporch": [47, 73, 91, 104, 176], "poolarea": [47, 73, 91, 104, 132, 176], "miscval": [47, 73, 91, 104, 176], "data_numer": [47, 77, 79, 80, 82, 84, 176], "penalti": [47, 108, 130, 135], "largest": [47, 107], "1e0": 47, "000": [47, 49, 73, 89, 92, 100, 102, 107, 115, 116, 124, 127, 139, 144, 154, 155], "1e5": 47, "larger": [47, 82, 85, 103, 117, 122, 134, 135, 137, 138, 147, 148, 151, 154, 155, 156, 173], "notat": 47, "box": [47, 97, 107, 114, 123, 125, 126, 132, 142, 145, 184], "garag": 47, "just": [47, 89, 101, 102, 103, 105, 108, 109, 110, 113, 117, 119, 122, 132, 137, 139, 141], "logspac": [47, 93, 98, 107, 132, 148, 155], "num": [47, 74, 80, 85, 86, 87, 88, 89, 90, 91, 92, 93, 96, 98, 99, 107, 110, 115, 119, 121, 128, 132, 133, 136, 138, 147, 148, 149, 151, 153, 154, 155], "101": [47, 184], "alpha_": [47, 132], "fall": [47, 102, 144, 153], "preprocessor": [47, 86, 87, 88, 89, 90, 91, 95, 104, 119, 147, 149, 150, 151, 153, 154, 176, 184], "deal": [47, 81, 85, 86, 90, 100, 101, 106, 124, 127, 131, 134, 137, 139, 146, 157, 176], "onehotencod": [47, 72, 73, 85, 86, 87, 88, 89, 90, 91, 132], "categorical_featur": [47, 91, 104], "yield": [47, 90, 117, 136], "long": [47, 90, 104, 105, 107, 129, 134, 150], "splinetransform": [47, 131, 137], "influenc": [47, 96, 103, 108, 132, 137, 144, 153, 169], "nystroem": [47, 129, 131, 134, 135, 137], "kernel": [47, 93, 98, 129, 131, 134, 135, 137, 140], "poli": [47, 129, 131, 134, 137], "n_compon": [47, 129, 131, 134, 135, 137], "300": [47, 82, 105, 110, 115, 117, 121, 128, 133, 136, 138, 160], "studi": [49, 60, 74, 93, 95, 98, 102, 140, 184], "apart": [49, 108], "estat": [49, 102], "thousand": [49, 102, 107, 108], "entertain": 49, "spaciou": 49, "updat": [49, 91, 184], "bedroom": [49, 102, 107, 108], "bathroom": 49, "lakeview": 49, "97630": 49, "1st": [49, 74, 85, 134], "nightlif": 49, "privat": [49, 74, 80, 85, 86, 149, 153], "backyard": 49, "buyer": 49, "market": 49, "kind": [49, 74, 86, 90, 108, 122, 137, 144, 171, 184], "sub": [50, 97, 98, 156], "vocabulari": 50, "varianc": [51, 56, 58, 95, 103, 108, 144, 164], "low": [53, 64, 74, 76, 80, 84, 95, 105, 107, 108, 110, 117, 131, 135, 141, 156, 160, 178], "littl": [53, 80, 95, 100, 152], "reduct": [53, 108], "steadi": 53, "label": [53, 58, 69, 76, 80, 85, 86, 89, 99, 100, 101, 109, 110, 115, 121, 133, 138, 139, 141, 142, 145, 161, 163], "slow": [53, 90, 117], "tradeoff": [53, 58, 103], "m2": [55, 57, 59, 164], "trade": [56, 58, 77, 131, 135, 160, 164, 169, 171], "off": [56, 58, 74, 77, 92, 104, 131, 135, 141, 160, 164, 169, 171], "character": [56, 108, 141], "why": [56, 60, 66, 74, 86, 94, 99, 105, 108, 135, 146, 178], "aris": [56, 74], "Then": [56, 71, 77, 82, 86, 92, 102, 110, 114, 118, 123, 124, 125, 126, 127, 131, 135, 137, 139, 142, 143, 144, 145, 146, 151, 153, 156], "quantifi": [56, 74, 103, 108, 128, 133, 184], "contrast": [56, 74, 85, 102, 115, 138, 161], "importantli": 56, "emphas": [56, 118], "happen": [58, 66, 74, 87, 89, 119, 130, 134, 156], "suffer": [58, 82, 106], "lack": 58, "captur": [58, 74, 103, 108, 131, 132], "neither": [58, 100], "nor": 58, "still": [58, 77, 82, 85, 86, 90, 103, 107, 108, 109, 111, 120, 131, 132, 135, 137, 144, 152, 153, 156, 160], "variat": [58, 77, 102, 103, 108, 110, 132, 144], "fulli": [58, 85, 102, 113, 117, 122], "determin": [58, 85, 93, 98, 141, 144], "irreduc": 58, "decompos": 58, "chapter": [58, 184], "diagnos": 58, "blood_transfus": [60, 93, 98, 106, 141, 142, 145], "propos": [60, 184], "multiclass": [60, 156, 162, 184], "proport": [60, 96, 99, 106, 135, 141, 144, 184], "twice": [60, 141, 184], "value_count": [60, 74, 75, 76, 84, 85, 100, 104, 106, 141, 184], "dummyclassifi": [60, 79, 84, 89, 94, 99, 141], "most_frequ": [60, 84, 89, 99, 104, 141], "75": [60, 80, 82, 85, 91, 92, 99, 101, 104, 107], "balanced_accuraci": [60, 141, 142, 145, 184], "remaind": [60, 85, 86, 88, 90, 119, 147, 149, 151, 153, 154], "add": [60, 105, 108, 115, 116, 125, 129, 132, 134, 137, 138, 141, 142, 145, 159, 163], "faster": [60, 66, 74, 82, 117, 147, 154], "distanc": [60, 82, 135, 155], "normal": [60, 82, 91, 95, 104, 105, 106, 107, 108, 116, 139, 141, 144, 148, 149, 155, 156, 162], "irrelev": 60, "make_pipelin": [60, 66, 77, 82, 85, 86, 87, 88, 89, 90, 95, 98, 99, 100, 104, 107, 108, 110, 116, 119, 124, 125, 126, 127, 129, 130, 131, 132, 134, 135, 137, 139, 140, 155], "get_param": [60, 93, 98, 111, 120, 150, 180, 184], "n_neighbor": [60, 78, 83, 148, 155, 181, 184], "clearli": [60, 84, 96, 98], "param_rang": [60, 98, 103, 113, 122, 129, 134], "affirm": 60, "highli": [61, 74, 131], "much": [61, 74, 89, 90, 92, 99, 102, 103, 108, 116, 117, 119, 120, 125, 131, 134, 178], "m1": [63, 65, 68, 164], "adult_censu": [64, 74, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 94, 99, 119, 147, 149, 150, 151, 152, 153, 154], "comma": [64, 74, 104, 105, 106], "file": [64, 74, 80, 81, 104, 105, 106, 142, 145, 184], "alreadi": [64, 74, 80, 81, 82, 98, 108, 117, 119, 131, 132, 148, 153, 155], "packag": [64, 116, 132, 142, 145], "survei": 64, "incom": [64, 74, 80, 84, 92, 102, 107, 108], "seaborn": [64, 74, 75, 76, 82, 105, 106, 107, 108, 109, 110, 115, 121, 133, 135, 136, 137, 138, 139, 140, 149, 152, 156, 157, 160, 161, 162, 163], "visual": [64, 68, 73, 82, 85, 96, 98, 101, 102, 105, 110, 115, 128, 131, 132, 133, 139, 141, 144, 149, 152, 156, 157, 161, 162, 164, 181], "scipi": [64, 117, 120, 153], "organ": [64, 162], "five": [66, 81, 86, 102], "overlap": [66, 76, 77, 110, 146, 153], "lie": 66, "fewer": [66, 144], "jupyt": [68, 80, 82, 83, 86, 97, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163, 164], "ordin": [69, 73, 86, 89, 116], "string": [69, 80, 85, 86, 87, 89, 102, 104, 142, 145, 146, 151, 184], "meaning": [69, 85, 90, 124, 126, 127, 138, 144, 149], "hot": [69, 85, 86, 131], "represent": [69, 80, 82, 83, 85, 86, 88, 90, 91, 96, 97, 102, 106, 107, 109, 116, 119, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 160, 162, 163], "compani": [69, 101], "sector": 69, "construct": [69, 110, 119, 153], "retail": 69, "energi": [69, 101, 105], "insur": 69, "phone": 69, "sale": [69, 73, 86], "depart": 69, "employe": 69, "profit": 69, "quarter": [69, 101], "head": [69, 73, 74, 76, 80, 81, 85, 86, 102, 104, 105, 106, 107, 108, 129, 134, 138, 149, 150, 153, 157], "tabl": [71, 74, 117, 119, 149], "progress": [71, 148, 155], "attent": [71, 109], "extend": [71, 80], "mix": [71, 72, 74, 86, 95, 160], "unknown": [71, 85, 87, 89, 119, 144], "notabl": [72, 141], "ordinalencod": [72, 85, 86, 87, 88, 89, 90, 119, 147, 149, 151, 153, 154, 176], "200_000": [73, 91], "astyp": [73, 91, 104, 131, 148, 153, 155, 160], "int": [73, 77, 91, 109, 153], "did": [73, 74, 82, 85, 94, 99, 101, 102, 104, 105, 117, 125, 127, 131, 135, 137, 141, 142, 145, 149, 150, 151, 153, 155, 158, 162, 181], "convert": [73, 102, 109, 110, 121, 155], "info": [73, 81, 95, 104, 105, 106, 107], "examin": [73, 132], "select_dtyp": [73, 104, 143, 144, 146], "make_column_selector": [73, 85, 86, 87, 88, 89, 90, 119, 147, 149, 151, 153, 154], "shown": [73, 74, 102, 115, 131, 135, 139, 144, 151, 158, 162], "among": [73, 85, 86, 100, 125, 155], "quantit": [73, 80, 115, 128, 133, 136], "exclud": [73, 74, 125], "overallqu": [73, 104], "overallcond": [73, 104], "yearbuilt": [73, 104, 132], "sole": [73, 127, 144], "treat": [73, 86, 91, 132], "issu": [73, 74, 85, 86, 100, 101, 102, 105, 106, 116, 131, 132, 137], "rare": [73, 74, 85, 86, 104, 119, 132], "handle_unknown": [73, 85, 86, 87, 88, 89, 90, 91, 119, 147, 149, 151, 153, 154], "mere": 73, "chanc": [73, 99, 102, 110, 124, 127, 141, 162], "partit": [73, 77, 142, 145, 156, 158, 160, 161, 162], "classifact": 73, "li": [73, 85, 110], "place": [74, 127, 132, 137], "workflow": 74, "1994": [74, 95], "download": [74, 102, 107], "openml": [74, 81], "webpag": 74, "1590": [74, 81], "manipul": [74, 78, 83, 93, 98, 102], "tutori": 74, "50k": [74, 79, 80, 81, 82, 83, 84, 86, 149, 153], "year": [74, 80, 104, 132, 149], "heterogen": [74, 80, 86, 104, 132], "employ": 74, "covari": 74, "workclass": [74, 80, 85, 86, 149, 151, 153], "marit": [74, 80, 85, 86, 149, 151, 153], "occup": [74, 80, 85, 86, 107, 108, 149, 151, 153], "race": [74, 80, 85, 86, 105, 149, 151, 153], "sex": [74, 80, 85, 86, 149, 151, 153], "loss": [74, 77, 79, 80, 81, 82, 84, 85, 86, 144, 146, 149, 150, 153], "week": [74, 77, 79, 80, 81, 82, 84, 85, 86, 149, 150, 153], "countri": [74, 80, 85, 86, 149, 151, 153], "11th": [74, 80, 85, 149, 153], "marri": [74, 80, 85, 86, 149, 153], "op": [74, 80, 85, 149, 153], "inspct": [74, 80, 85, 149, 153], "own": [74, 80, 85, 86, 117, 137, 149, 153], "child": [74, 80, 85, 86, 149, 153], "male": [74, 80, 85, 86, 149, 153], "lt": [74, 80, 81, 153], "hs": [74, 80, 85, 86, 149, 153], "grad": [74, 80, 85, 86, 149, 153], "civ": [74, 80, 85, 86, 149, 153], "spous": [74, 80, 85, 86, 149, 153], "farm": [74, 80, 85, 149, 153], "fish": [74, 80, 85, 149, 153], "husband": [74, 80, 85, 86, 149, 153], "white": [74, 80, 85, 86, 131, 135, 139, 149, 153, 162], "local": [74, 80, 85, 91, 106, 137, 149, 153], "gov": [74, 80, 85, 149, 153], "assoc": [74, 80, 85, 149, 153], "acdm": [74, 80, 85, 149, 153], "protect": [74, 80, 85, 149, 153], "serv": [74, 80, 85, 88, 90, 105, 141, 149, 153], "gt": [74, 80, 153], "colleg": [74, 80, 85, 149, 153], "7688": [74, 80, 149, 150, 153], "femal": [74, 80, 85, 86, 149, 153], "30": [74, 80, 85, 86, 92, 93, 95, 96, 98, 103, 104, 106, 107, 110, 116, 117, 118, 120, 134, 147, 149, 150, 151, 153, 154, 160, 162, 163], "revenu": [74, 84, 85, 132], "target_column": [74, 109, 130, 135, 139, 156, 157, 158, 162], "37155": [74, 84], "11687": [74, 84], "dtype": [74, 76, 80, 81, 82, 83, 84, 85, 86, 87, 89, 92, 99, 102, 104, 105, 106, 107, 109, 132, 139, 141, 149, 153, 155, 156], "int64": [74, 76, 80, 84, 85, 104, 106, 155], "imbal": [74, 106, 135], "special": [74, 105], "healthi": 74, "ill": [74, 132], "numerical_column": [74, 77, 79, 80, 82, 84, 86, 88, 90, 150], "categorical_column": [74, 85, 86, 87, 88, 89, 90, 149, 151, 153], "all_column": 74, "print": [74, 77, 80, 81, 82, 83, 84, 85, 86, 88, 89, 90, 91, 95, 97, 98, 100, 101, 102, 107, 108, 109, 110, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 125, 127, 128, 132, 133, 134, 136, 137, 138, 139, 141, 142, 144, 145, 146, 148, 149, 150, 151, 153, 154, 155, 156, 162], "48842": [74, 80, 85, 149, 153], "subtract": [74, 82], "mayb": [74, 95, 103, 107], "peculiar": [74, 104], "malfunct": 74, "afterward": [74, 81, 108], "cap": [74, 97, 107, 123, 125, 126, 132, 145], "hist": [74, 76, 92, 95, 99, 102, 103, 104, 105, 106, 107, 139], "figsiz": [74, 76, 104, 105, 106, 107, 108, 131, 132, 141, 144, 156, 160, 161, 162], "func": [74, 102, 107, 142, 145], "assign": [74, 82, 99, 104, 109, 115, 131, 137, 139, 143, 146], "underscor": [74, 82, 150], "garbag": 74, "comment": 74, "retir": 74, "filter": [74, 85, 105, 149], "peak": 74, "ll": 74, "32650": 74, "16192": 74, "disproport": 74, "fair": [74, 97, 118], "deploi": [74, 86, 102, 151, 161], "mitig": [74, 117], "deploy": [74, 151], "compon": [74, 129, 134, 137, 156, 180], "unexpect": [74, 100], "gender": 74, "15784": 74, "10878": 74, "bachelor": [74, 85, 86], "8025": 74, "master": [74, 85], "2657": 74, "voc": [74, 85], "2061": 74, "1812": 74, "1601": 74, "10th": [74, 85], "1389": 74, "7th": [74, 85], "8th": [74, 85], "955": 74, "prof": [74, 85, 86], "school": [74, 85, 95], "834": 74, "9th": [74, 85], "756": 74, "12th": [74, 85], "657": 74, "doctor": [74, 85], "594": 74, "5th": [74, 85, 134], "6th": [74, 85], "509": 74, "4th": [74, 85], "247": 74, "preschool": [74, 85], "crosstab": 74, "entri": [74, 77, 102, 104, 105, 106, 107, 114, 123, 128, 133, 139], "lose": 74, "redund": [74, 102, 125, 129, 134, 137], "upcom": [74, 141, 150], "latter": [74, 82, 97, 125, 141], "pairplot": [74, 75, 76, 105, 106, 107, 108, 157], "diagon": [74, 106, 131, 141, 144, 149, 157], "reveal": [74, 102], "sn": [74, 82, 105, 106, 107, 108, 109, 110, 115, 121, 133, 135, 136, 137, 138, 139, 140, 149, 152, 156, 157, 160, 161, 162, 163], "readabl": [74, 149, 152, 178], "n_samples_to_plot": 74, "5000": [74, 106, 125, 126, 133, 138], "var": 74, "hue": [74, 76, 105, 106, 107, 109, 135, 139, 140, 152, 156, 157, 160, 162], "plot_kw": [74, 108], "height": [74, 76, 129, 134], "diag_kind": [74, 108], "diag_kw": 74, "written": [74, 95, 107], "scatterplot": [74, 82, 107, 109, 110, 115, 121, 133, 135, 136, 137, 138, 139, 140, 152, 156, 157, 160, 161, 162, 163], "region": [74, 102, 103, 131, 135, 139, 149, 153], "pyplot": [74, 82, 92, 95, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 121, 123, 125, 126, 131, 132, 133, 135, 136, 139, 140, 141, 144, 145, 156, 160, 161, 162, 163], "plt": [74, 82, 92, 95, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 121, 123, 125, 126, 131, 132, 133, 135, 136, 139, 140, 141, 144, 145, 156, 160, 161, 162, 163], "ax": [74, 82, 104, 105, 108, 109, 119, 131, 132, 133, 135, 137, 138, 140, 141, 144, 149, 152, 156, 160, 161, 162], "age_limit": 74, "axvlin": [74, 107, 108], "ymin": [74, 107], "ymax": [74, 107], "linestyl": [74, 107, 110, 121, 131, 135, 141, 161], "hours_per_week_limit": 74, "axhlin": 74, "xmin": 74, "xmax": 74, "annot": [74, 149], "fontsiz": 74, "AND": 74, "seem": [74, 77, 80, 83, 84, 88, 90, 98, 101, 107, 108, 117, 119, 122, 132, 133, 144, 150, 156], "complic": [74, 107, 125], "similarli": [74, 93, 94, 96, 98, 99, 101, 112, 117, 119, 121, 132, 139, 141, 153, 163], "somewhat": [74, 110], "arbitrari": [74, 85, 86, 87, 89, 90, 91, 108, 138], "straightforward": [74, 110], "obviou": [74, 95, 105, 140], "highlight": [74, 81, 85, 95, 97, 101, 109, 115, 124, 125, 127, 132, 141, 149, 151, 156, 161], "imagin": [75, 76, 108, 140], "feel": [75, 76, 85, 114, 117, 123, 132, 146, 184], "penguins_classif": [75, 76, 109, 130, 135, 139, 156, 157, 158, 160, 162], "There": [76, 82, 102, 106], "adeli": [76, 130, 135, 139, 156, 157, 162], "151": [76, 85, 152, 153], "gentoo": [76, 156, 157, 162], "chinstrap": [76, 130, 135, 139, 156, 157], "68": [76, 91, 104, 118, 153, 155], "pairplot_figur": [76, 157], "prioriti": 76, "tweak": 76, "subfigur": 76, "perfectli": [76, 90, 106, 115, 117, 137, 144, 160], "downsid": [77, 134], "amount": [77, 95, 102, 106, 119, 132], "smaller": [77, 102, 116, 117, 122, 134, 135, 146, 173], "repetit": [77, 95, 149], "aggreg": [77, 107, 114, 123, 132, 141], "clone": [77, 110], "earlier": [77, 86, 107, 118, 141, 156], "computation": [77, 116, 137, 149, 178], "intens": [77, 99, 178], "cv_result": [77, 85, 86, 88, 89, 90, 91, 98, 102, 103, 107, 117, 118, 120, 125, 126, 132, 134, 146, 149, 150, 151, 152, 153, 155, 178], "cpu": [77, 86, 103, 115, 118, 149, 153], "894": 77, "ms": [77, 86, 103, 118, 149], "sy": [77, 86, 103, 118, 149, 153], "299": [77, 134], "total": [77, 86, 95, 101, 102, 103, 104, 105, 106, 107, 108, 118, 134, 141, 149, 153, 155], "wall": [77, 86, 103, 118, 149, 153], "680": 77, "fit_tim": [77, 85, 86, 98, 102, 115, 116, 125, 145, 151], "10083437": 77, "09485173": 77, "09516358": 77, "09389663": 77, "09959269": 77, "score_tim": [77, 85, 86, 98, 102, 115, 116, 125, 145, 151], "02249742": 77, "02220821": 77, "02236342": 77, "02161741": 77, "02207828": 77, "79557785": 77, "80049135": 77, "79965192": 77, "79873055": 77, "80436118": 77, "iii": 77, "distinct": [77, 80, 97, 100], "match": [77, 78, 83, 97, 139], "stabil": [77, 108], "discard": [77, 102, 107, 109, 156], "round": [77, 94, 99, 109], "themselv": 77, "3f": [77, 80, 81, 82, 84, 85, 86, 88, 89, 90, 91, 95, 97, 98, 100, 107, 115, 116, 118, 119, 121, 123, 127, 128, 133, 134, 139, 141, 144, 145, 146, 150, 151, 154, 156], "std": [77, 80, 82, 85, 86, 88, 89, 90, 91, 92, 95, 97, 98, 100, 101, 102, 107, 108, 110, 115, 116, 118, 119, 123, 129, 132, 134, 145, 146, 150, 151], "800": [77, 150], "crucial": [77, 108, 117], "bar": [77, 100, 104, 108, 132, 139, 156], "decim": 77, "trustworthi": [77, 97], "compat": [77, 151], "familiar": [78, 83, 107, 114, 123, 143, 146], "conveni": [78, 83, 132], "directli": [78, 81, 83, 86, 108, 115, 137, 146, 156], "insid": [78, 83, 85, 104, 142, 145, 176], "pager": [78, 83], "roughli": [79, 84, 110, 122, 138], "simplest": [79, 84], "irrespect": [79, 84, 99, 131, 178, 184], "82": [79, 81, 84, 101, 116, 117], "train_test_split": [79, 80, 82, 84, 86, 101, 102, 108, 111, 112, 113, 117, 120, 121, 122, 124, 127, 130, 135, 139, 141, 144, 147, 148, 149, 151, 153, 154, 155, 156, 158, 162], "behavior": [79, 84, 132, 135, 140, 161], "oversimplifi": 80, "exclus": [80, 131], "helper": [80, 82, 85, 86, 128, 133, 137, 142, 145], "duplic": [80, 85, 86, 110, 149, 153], "48837": [80, 85, 149, 153], "48838": [80, 85, 149, 153], "48839": [80, 85, 149, 153], "48840": [80, 85, 149, 153], "48841": [80, 85, 149, 153], "explicit": [80, 81, 102, 137, 143, 146], "At": [80, 97, 100, 102, 110, 141, 162], "moreov": 80, "o": [80, 162], "self": [80, 81, 85, 97, 142, 145, 153], "explanatori": [80, 101], "000000": [80, 82, 92, 105, 107], "643585": 80, "710510": 80, "min": [80, 82, 92, 105, 107, 110, 128, 132, 133, 136, 138, 160, 161, 163, 173], "48": [80, 81, 82, 104, 117, 118, 120, 152], "max": [80, 82, 92, 105, 107, 110, 128, 132, 133, 136, 137, 138, 154, 160, 161, 163, 173, 178], "float64": [80, 92, 99, 102, 104, 105, 106, 107, 139], "unusu": 80, "memori": [80, 101, 104, 105, 106, 107, 134, 137, 150, 151], "test_siz": [80, 92, 96, 99, 102, 103, 111, 113, 120, 122, 135, 141, 151], "determinist": [80, 99, 137], "specifi": [80, 85, 86, 104, 105, 119, 138, 151, 153, 181], "remain": [80, 81, 100, 101, 108, 125, 132, 140, 153], "quickli": [80, 104, 107, 108, 117, 137, 139, 141, 152, 153, 162], "got": [80, 114, 123, 140, 163], "1f": [80, 110, 146], "12211": 80, "36631": [80, 82], "cours": [80, 85, 104, 125, 128, 133, 137, 142, 145, 158, 162], "environ": [80, 82, 83, 86, 91, 97, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "pleas": [80, 82, 83, 86, 91, 94, 97, 99, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163, 176, 184], "rerun": [80, 82, 83, 86, 91, 97, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "unabl": [80, 82, 83, 86, 91, 97, 100, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "render": [80, 82, 83, 86, 91, 97, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "nbviewer": [80, 82, 83, 86, 91, 97, 102, 109, 131, 132, 136, 137, 140, 141, 149, 151, 153, 156, 162, 163], "logisticregressionlogisticregress": [80, 82, 86, 91, 131, 141, 156], "807": [80, 82], "fraction": [80, 103, 117, 141, 144], "correctli": [80, 90, 91, 100, 109, 141], "visit": 81, "glossari": [81, 164], "fed": 81, "41": [81, 102, 104, 107, 108, 120], "92": [81, 85, 92, 144, 152, 184], "3273": 81, "side": [81, 92, 119, 122, 151, 161], "39068": 81, "39069": 81, "39070": 81, "39071": 81, "39072": 81, "39073": 81, "linger": [81, 85, 95, 102], "denomin": 81, "major": [81, 84, 99, 135], "seldom": 81, "target_predict": [81, 101, 102, 109, 120, 121, 128, 133, 137, 141, 144, 160, 161], "sake": [81, 91, 94, 99, 117, 137, 140, 151, 156, 176], "simplic": [81, 91, 94, 99, 137, 140, 156, 176], "agre": [81, 122, 139], "bool": [81, 104, 141], "mistak": [81, 109, 127, 135, 141, 160], "success": [81, 82, 115, 162], "8242776341719346": 81, "harder": [81, 125], "conclud": [81, 87, 89, 99, 110, 135, 156], "ones": [81, 85, 135, 149, 150, 153], "adult_census_test": [81, 83], "9769": 81, "manual": [81, 85, 86, 110, 115, 124, 127, 137, 141, 150, 156, 164, 181], "model_nam": [81, 82, 150], "__class__": [81, 82], "__name__": [81, 82], "804": 81, "underli": [81, 82, 85, 95, 109, 115, 141], "wrongli": [81, 86], "held": [81, 92, 113, 117, 122, 151], "642352": 82, "1087": 82, "077721": 82, "665311": 82, "431247": 82, "725748": 82, "7522": 82, "692939": 82, "407": 82, "110175": 82, "423952": 82, "99999": 82, "4356": 82, "span": [82, 132], "assumpt": [82, 85, 89, 101, 126, 140, 160, 161], "address": 82, "pair": [82, 105, 106, 107, 128, 133, 152], "solver": [82, 132, 146], "descent": [82, 105, 146], "scaler": [82, 91, 150, 155, 178, 180, 184], "standardscalerstandardscal": [82, 86, 91, 131, 140], "wherea": [82, 117, 130, 134, 135, 146, 148, 155], "fashion": [82, 162], "mean_": 82, "64235211": 82, "07772106": 82, "6653108": 82, "43124676": 82, "scale_": 82, "72556083": 82, "59025606": 82, "10461772": 82, "42378265": 82, "data_train_sc": 82, "17177061": 82, "14450843": 82, "71188483": 82, "28845333": 82, "02605707": 82, "22025127": 82, "27618374": 82, "33822677": 82, "77019645": 82, "77536738": 82, "03471139": 82, "53605445": 82, "48319243": 82, "69090725": 82, "perspect": [82, 88, 90, 103], "predefin": 82, "shorthand": 82, "preserv": [82, 100, 140, 149], "set_output": [82, 85, 129, 132, 134], "behaviour": [82, 101, 117, 126], "663100e": 82, "273364e": 82, "530310e": 82, "840667e": 82, "844684e": 82, "000014e": 82, "576792e": 82, "445084e": 82, "202513e": 82, "173852e": 82, "753674e": 82, "471139e": 82, "196565e": 82, "817680e": 82, "677425e": 82, "741752e": 82, "314865e": 82, "047970e": 82, "714245e": 82, "jointplot": 82, "clearer": 82, "num_points_to_plot": 82, "marginal_kw": 82, "dict": [82, 131, 150], "suptitl": [82, 131, 141, 144, 162], "nbefor": 82, "nafter": 82, "x27": [82, 86, 91, 97, 109, 131, 132, 137, 140, 149, 151, 153], "pipelinepipelin": [82, 86, 91, 131, 132, 137, 140, 149, 151, 153], "named_step": 82, "decision_funct": 82, "elapsed_tim": [82, 88, 90], "predicted_target": 82, "n_iter_": [82, 123], "093": 82, "174": 82, "scenario": [82, 86, 132, 139, 144], "kneighborsclassifierkneighborsclassifi": 83, "first_data_valu": 83, "first_predict": 83, "first_target_valu": 83, "number_of_correct_predict": 83, "number_of_predict": 83, "len": [83, 102, 108, 109, 116, 117, 131, 162], "8290379545978042": 83, "8177909714402702": 83, "data_numeric_train": 84, "data_numeric_test": 84, "class_to_predict": 84, "high_revenue_clf": 84, "234": 84, "low_revenue_clf": 84, "766": 84, "7607182343065395": 84, "appear": [84, 110], "most_freq_revenue_clf": 84, "frequent": [84, 89, 94, 99, 104, 153, 173], "reassur": [84, 122, 151], "arithmet": 85, "instruct": 85, "taken": [85, 108, 119, 138], "symbol": [85, 101], "sort_index": 85, "857": [85, 154], "cambodia": 85, "canada": 85, "182": 85, "china": 85, "122": [85, 102, 107, 108], "columbia": 85, "85": [85, 91, 102, 107, 108, 178], "cuba": 85, "138": 85, "dominican": 85, "republ": 85, "103": [85, 105, 146, 153, 156], "ecuador": 85, "el": 85, "salvador": 85, "155": [85, 153], "england": 85, "franc": 85, "germani": 85, "206": [85, 161], "greec": 85, "guatemala": 85, "haiti": 85, "holand": 85, "netherland": 85, "hondura": 85, "hong": 85, "hungari": 85, "india": 85, "iran": 85, "ireland": 85, "itali": 85, "105": [85, 105], "jamaica": 85, "106": [85, 105], "japan": 85, "lao": 85, "mexico": 85, "951": 85, "nicaragua": 85, "outli": 85, "guam": 85, "usvi": 85, "peru": 85, "philippin": 85, "295": 85, "poland": 85, "portug": 85, "67": [85, 104, 105, 108], "puerto": 85, "rico": 85, "184": [85, 142, 145], "scotland": 85, "south": [85, 108], "115": [85, 102], "taiwan": 85, "thailand": 85, "trinadad": 85, "tobago": 85, "43832": 85, "vietnam": 85, "86": [85, 91, 102, 107, 108, 152], "yugoslavia": 85, "recogn": [85, 95], "categorical_columns_selector": [85, 86, 87, 88, 89, 90, 149, 151, 153], "dtype_includ": [85, 86, 87, 88, 89, 90, 119, 147, 149, 151, 153, 154], "unwant": [85, 107], "data_categor": [85, 87, 89], "education_column": 85, "education_encod": 85, "map": [85, 89, 131, 132, 135, 139, 155], "categories_": 85, "data_encod": 85, "downstream": [85, 134], "lexicograph": 85, "meaningless": [85, 110, 161], "l": [85, 95], "xl": 85, "alphabet": 85, "constructor": 85, "explicitli": [85, 142, 145, 153, 155], "mislead": [85, 90, 108], "altern": [85, 129, 134, 137, 139, 153, 160], "sparse_output": [85, 88, 90], "education_": 85, "spars": [85, 88, 90, 102, 107], "effici": [85, 109, 116, 119, 132, 137], "won": [85, 108], "becam": 85, "workclass_": 85, "feder": 85, "emp": 85, "inc": 85, "country_": 85, "amp": 85, "102": [85, 105], "violat": [85, 101], "realli": [85, 95, 101, 103, 105, 109, 135, 141], "misord": 85, "misus": 85, "ineffici": 85, "integr": [85, 116, 132], "abl": [85, 86, 98, 105, 110, 111, 115, 116, 119, 120, 140, 141, 142, 145, 152, 156, 159, 161, 163, 170, 181, 184], "bypass": 85, "keyword": 85, "min_frequ": 85, "collaps": 85, "rarest": 85, "enabl": [85, 184], "infrequent_if_exist": 85, "sandbox": [85, 178], "use_encoded_valu": [85, 86, 87, 88, 89, 90, 119, 147, 149, 151, 153, 154], "unknown_valu": [85, 86, 87, 88, 89, 90, 119, 147, 149, 151, 153, 154], "silenc": 85, "convergencewarn": 85, "87112451": 85, "77881455": 85, "79158235": 85, "78124523": 85, "78233767": 85, "03798509": 85, "03630471": 85, "03779101": 85, "03987718": 85, "03801203": 85, "83222438": 85, "83560242": 85, "82872645": 85, "83312858": 85, "83466421": 85, "833": [85, 89], "002": [85, 88, 89, 90, 150], "decoupl": [86, 141], "numerical_columns_selector": [86, 88, 90], "dtype_exclud": [86, 88, 90], "properli": [86, 97, 105, 113, 122, 140, 160], "format": [86, 101, 105, 133, 138], "elaps": [86, 125], "introspect": [86, 184], "send": 86, "columntransfom": 86, "categorical_preprocessor": [86, 88, 90, 147, 149, 151, 153, 154], "numerical_preprocessor": 86, "associ": [86, 95, 105, 107, 132, 141, 144, 149], "standard_scal": 86, "concaten": [86, 94, 95, 99, 100, 131, 137, 140, 151, 160], "columntransformercolumntransform": [86, 91, 149, 151, 153], "onehotencoderonehotencod": [86, 91], "prefer": 86, "raw": [86, 132, 135, 144, 184], "7762": 86, "56": [86, 104, 105, 120, 122, 145], "divorc": 86, "unmarri": 86, "23881": 86, "transport": 86, "30507": 86, "specialti": 86, "14344": 86, "28911": 86, "19484": 86, "wife": 86, "8575055278028008": 86, "usabl": 86, "0058949": 86, "03596926": 86, "93540072": 86, "00388241": 86, "9987452": 86, "04645991": 86, "04427958": 86, "04497743": 86, "04708195": 86, "04461789": 86, "8512642": 86, "8498311": 86, "84756347": 86, "8523751": 86, "85524161": 86, "851": [86, 119], "compound": 86, "isol": [86, 102, 117], "nice": [86, 109, 137], "fast": [86, 90, 115], "passthrough": [86, 88, 90, 119, 147, 149, 151, 153, 154], "977": 86, "8808451396282041": 86, "significantli": [86, 103, 108], "whenev": [86, 105], "popular": [86, 119], "datasci": 86, "practition": 86, "outperform": 86, "assembl": [87, 89, 115, 117], "rais": [87, 89, 109, 110, 119, 121, 142, 145], "warn": [87, 89, 109, 110, 116, 121, 132, 142, 145], "nan": [87, 89, 91, 100, 104, 142, 145], "traceback": [87, 89, 142, 145], "error_scor": [87, 89], "awai": [87, 89, 90, 110, 135, 144, 164], "handi": [87, 89, 102, 105, 142, 145], "empir": [88, 90, 102], "util": [88, 90, 91, 99, 104, 142, 145], "874": [88, 90], "131": 88, "detriment": [88, 90, 117, 119, 132], "dens": [88, 90], "workaround": [88, 90], "755": 89, "rel": [89, 94, 99, 102, 118, 126, 137, 141, 144], "anyth": [89, 101, 124, 127, 141], "constantli": [89, 94, 99], "761": 89, "messag": [89, 90], "873": 90, "194": 90, "217": 90, "signific": [90, 108, 117, 125, 131, 132, 151], "useless": [90, 122], "580": 90, "view": [90, 151], "longer": [90, 132, 135, 138, 150, 157, 162], "current": [90, 122, 176], "incomplet": 90, "unnecessari": [90, 113, 122], "unless": 90, "reproduc": [91, 105, 151], "script": 91, "event": 91, "rerecord": 91, "ui": 91, "releas": 91, "house_pric": [91, 104, 143, 144, 146], "na_valu": [91, 104], "id": [91, 95, 104], "mssubclass": [91, 104], "mszone": [91, 104], "street": [91, 104], "allei": [91, 104], "lotshap": [91, 104], "landcontour": [91, 104], "poolqc": [91, 104], "fenc": [91, 104], "miscfeatur": [91, 104], "mosold": [91, 104], "yrsold": [91, 104, 132], "saletyp": [91, 104], "salecondit": [91, 104], "rl": [91, 104], "8450": [91, 104], "pave": [91, 104], "reg": [91, 104, 108], "lvl": [91, 104], "allpub": [91, 104], "2008": [91, 104], "wd": [91, 104], "9600": [91, 104], "2007": [91, 104], "11250": [91, 104], "ir1": [91, 104], "9550": [91, 104], "2006": [91, 104], "abnorml": [91, 104], "14260": [91, 104], "1455": 91, "1456": 91, "62": [91, 104], "7917": 91, "1457": 91, "13175": 91, "mnprv": [91, 104], "2010": 91, "1458": 91, "66": [91, 104, 105, 153], "9042": 91, "gdprv": 91, "shed": [91, 104], "2500": 91, "1459": [91, 104], "9717": 91, "1460": [91, 104], "9937": 91, "cherri": 91, "retain": [91, 132], "numeric_featur": 91, "fullbath": [91, 104], "halfbath": [91, 104], "neighborhood": [91, 92, 104, 108], "housestyl": [91, 104], "imput": [91, 104], "simpleimput": [91, 104], "numeric_transform": 91, "categorical_transform": 91, "join": 91, "simpleimputersimpleimput": 91, "859": [91, 154], "018": [91, 116], "dollar": [91, 92, 102, 107, 132], "necessarili": [91, 102, 103, 118, 131, 132, 146, 149, 181], "richer": [91, 137], "level": [91, 97, 99, 117, 119, 124, 125, 127, 131, 135, 141, 158, 159, 160, 162, 163, 176], "coars": 91, "dummyregressor": [92, 144], "overview": [92, 93, 96, 98, 102, 103, 107, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 130, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 146, 156, 158, 159, 160, 161, 162, 163, 164], "fetch_california_h": [92, 96, 102, 103, 107, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 148, 155], "return_x_i": [92, 97, 100, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 148, 155, 178], "as_fram": [92, 96, 100, 102, 103, 107, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 148, 155], "rescal": [92, 96, 103, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 132, 144, 148, 150, 155], "splitter": 92, "cv_results_tree_regressor": 92, "n_job": [92, 95, 96, 97, 98, 99, 101, 103, 107, 108, 115, 116, 117, 118, 119, 120, 122, 123, 125, 127, 132, 134, 149, 151, 153, 155], "errors_tree_regressor": 92, "774882": 92, "137125": 92, "533398": 92, "841918": 92, "033639": 92, "512544": 92, "47": [92, 102, 104, 153], "969062": 92, "result_dummi": 92, "errors_dummy_regressor": 92, "91": [92, 104], "140009": 92, "821140": 92, "757566": 92, "543652": 92, "034555": 92, "979007": 92, "477244": 92, "all_error": 92, "concat": [92, 99, 100, 108, 123, 125, 126, 135], "170466": 92, "713153": 92, "605570": 92, "539353": 92, "483618": 92, "941912": 92, "982259": 92, "213912": 92, "692293": 92, "015862": 92, "422448": 92, "542490": 92, "893328": 92, "130930": 92, "732185": 92, "947952": 92, "793400": 92, "991373": 92, "416833": 92, "023571": 92, "020024": 92, "556965": 92, "047253": 92, "539567": 92, "987471": 92, "185225": 92, "910118": 92, "298971": 92, "738824": 92, "084639": 92, "252201": 92, "984471": 92, "060866": 92, "981744": 92, "731943": 92, "547140": 92, "962591": 92, "820219": 92, "768721": 92, "092553": 92, "305556": 92, "604933": 92, "503017": 92, "544447": 92, "147974": 92, "352055": 92, "386320": 92, "121120": 92, "815660": 92, "307338": 92, "216574": 92, "138339": 92, "107460": 92, "548585": 92, "620318": 92, "29": [92, 104, 108, 116, 125, 137], "165331": 92, "linspac": [92, 96, 98, 99, 110, 115, 121, 128, 133, 136, 138, 152, 162], "edgecolor": [92, 95, 99, 102, 103, 104, 105, 106, 107, 131, 152, 162], "legend": [92, 95, 99, 100, 101, 105, 107, 109, 110, 115, 121, 133, 135, 139, 141, 152, 156, 160, 161, 162, 163], "bbox_to_anchor": [92, 95, 99, 100, 101, 105, 107, 109, 110, 115, 121, 133, 135, 141, 152, 156, 160, 162], "loc": [92, 95, 99, 100, 101, 105, 107, 109, 110, 115, 121, 130, 133, 135, 139, 141, 152, 156, 160, 162], "upper": [92, 95, 99, 100, 101, 105, 107, 108, 110, 115, 121, 135, 141, 156, 160, 162], "xlabel": [92, 95, 97, 98, 99, 100, 102, 103, 104, 105, 106, 108, 122, 123, 125, 126, 131, 132, 134, 139, 141], "Such": [92, 132], "extrem": [92, 97, 106, 107, 124, 127, 131, 132], "gamma": [93, 97, 98, 131, 135, 140], "svm": [93, 97, 98, 137, 140], "form": [93, 95, 97, 98, 108, 128, 129, 133, 134, 136, 138, 144, 150], "accomplish": [93, 98], "rbf": [93, 98, 131, 135, 140], "svc": [93, 97, 98, 140], "scheme": [93, 98, 101, 109, 161], "validationcurvedisplai": [93, 98, 103, 122, 129, 134], "10e": [93, 98], "10e2": [93, 98], "logarithm": [93, 98], "svc__gamma": [93, 98], "retriev": [93, 98, 102, 108, 132], "learningcurvedisplai": [93, 96, 98], "half": [94, 99, 140, 141], "uniform": [94, 99, 107, 110, 131, 153], "handwritten": 95, "digit": 95, "load_digit": 95, "recreat": 95, "minmaxscal": [95, 110, 132, 184], "kfold": [95, 97, 100, 123, 143, 146, 151], "test_score_no_shuffl": 95, "931": 95, "026": 95, "test_score_with_shuffl": 95, "966": 95, "010": [95, 154], "all_scor": [95, 97], "xlim": [95, 108, 115, 141], "impos": [95, 117, 153], "94166667": 95, "89722222": 95, "94986072": 95, "9637883": 95, "90250696": 95, "ship": 95, "descr": [95, 102, 107], "_digits_dataset": 95, "optic": 95, "recognit": 95, "characterist": [95, 102, 107, 141], "1797": 95, "64": [95, 104, 105, 145, 153], "8x8": 95, "pixel": 95, "creator": 95, "alpaydin": 95, "boun": 95, "edu": 95, "tr": 95, "juli": 95, "1998": 95, "copi": [95, 102, 105, 108, 128, 133, 178], "uci": 95, "ic": 95, "nist": 95, "bitmap": 95, "preprint": 95, "32x32": 95, "nonoverlap": 95, "block": [95, 101, 102, 107, 108, 116], "4x4": 95, "invari": [95, 131], "distort": 95, "garri": 95, "j": 95, "candela": 95, "dimmick": 95, "geist": 95, "grother": 95, "janet": 95, "wilson": 95, "handprint": 95, "nistir": 95, "5469": 95, "kaynak": 95, "1995": 95, "Their": [95, 139], "msc": 95, "thesi": 95, "institut": 95, "graduat": 95, "bogazici": 95, "univers": 95, "cascad": 95, "kybernetika": 95, "ken": 95, "tang": 95, "ponnuthurai": 95, "n": [95, 97, 98, 107, 110, 116, 120, 123, 128, 132, 133, 135, 139, 141, 150, 151, 154, 156], "suganthan": 95, "xi": 95, "yao": 95, "kai": 95, "qin": 95, "dimensionalityreduct": 95, "lda": 95, "electr": [95, 104], "electron": 95, "nanyang": 95, "2005": 95, "claudio": 95, "gentil": 95, "nip": 95, "2000": 95, "writer": 95, "wrote": 95, "certain": [95, 123], "130": [95, 116], "hypothesi": [95, 101, 132], "itertool": [95, 104], "bound": [95, 141, 144], "writer_boundari": 95, "256": [95, 116, 117, 153], "386": 95, "516": 95, "646": 95, "776": 95, "915": [95, 115], "1029": 95, "1157": 95, "1287": 95, "1415": 95, "1545": 95, "1667": 95, "zeros_lik": [95, 109], "lower_bound": 95, "upper_bound": 95, "group_id": 95, "lb": 95, "zip": [95, 105, 115, 128, 131, 133], "ytick": [95, 100], "xtick": 95, "ylabel": [95, 100, 101, 103, 122, 123, 131, 132, 134, 139, 141, 156], "groupkfold": 95, "928": 95, "014": [95, 97, 116], "realiti": 95, "synthet": [96, 110, 115, 125, 131, 141, 159, 160, 161, 163], "train_siz": [96, 98, 147, 154], "endpoint": 96, "325": [96, 105], "775": 96, "displai": [96, 112, 121, 139, 141, 152, 178], "from_estim": [96, 98, 103, 109, 122, 130, 131, 134, 135, 139, 140, 141, 156, 160, 162], "score_typ": [96, 98], "negate_scor": [96, 103, 122, 134], "neg_": [96, 102, 129, 134, 146], "score_nam": [96, 98], "std_display_styl": [96, 98, 103, 122, 134], "errorbar": [96, 98, 103, 122, 132, 134], "ax_": [96, 98, 103, 122, 134, 135, 141], "xscale": [96, 132], "log": [96, 132, 144, 152, 153], "alon": [96, 131], "anymor": [96, 99, 101, 102, 117, 140], "bay": 96, "especi": [96, 132], "report": [96, 97, 102], "problemat": [97, 132, 153], "underestim": 97, "philosoph": 97, "breast": 97, "cancer": 97, "load_breast_canc": 97, "param_grid": [97, 118, 120, 149, 151, 160, 178, 184], "model_to_tun": 97, "gridsearchcvgridsearchcv": [97, 149, 151], "svcsvc": [97, 140], "best_params_": [97, 123, 148, 149, 151, 153, 155, 160, 178, 184], "best_score_": 97, "627": 97, "stage": [97, 100, 116, 124, 125, 127, 137, 141, 162], "misinterpret": 97, "forget": 97, "pitfal": 97, "emb": [97, 151], "dedic": [97, 144], "declar": 97, "inner_cv": 97, "outer_cv": 97, "trial": 97, "test_score_not_nest": 97, "test_score_nest": 97, "n_trial": 97, "non_nest": 97, "append": [97, 100, 108, 110, 121, 123, 135, 146], "merg": [97, 125], "whisker": [97, 107, 123, 125, 126, 132, 145], "vert": [97, 107, 123, 125, 126, 132, 145], "highest": [97, 109, 124, 125, 127, 141, 144, 152, 153], "lure": 97, "overli": [97, 102], "021278": 98, "003896": 98, "680000": 98, "021131": 98, "003630": 98, "746667": 98, "020016": 98, "003548": 98, "786667": 98, "019274": 98, "003748": 98, "800000": 98, "020104": 98, "003524": 98, "019360": 98, "003528": 98, "019182": 98, "003642": 98, "018040": 98, "003457": 98, "826667": 98, "018451": 98, "003804": 98, "018714": 98, "003438": 98, "733333": 98, "765": 98, "043": 98, "param_nam": [98, 103, 122, 134, 149, 152, 153, 178, 184], "disp": [98, 103, 122, 134, 135, 141], "errorbar_kw": 98, "transpar": 98, "regim": 98, "oscil": 98, "donat": [98, 106, 141, 142, 145], "simplist": 98, "imposs": [98, 140], "cv_results_logistic_regress": 99, "test_score_logistic_regress": 99, "815937": 99, "813849": 99, "815036": 99, "815569": 99, "810982": 99, "814709": 99, "813112": 99, "810327": 99, "812416": 99, "816388": 99, "most_frequent_classifi": 99, "cv_results_most_frequ": 99, "test_score_most_frequ": 99, "760329": 99, "756808": 99, "759142": 99, "760739": 99, "761681": 99, "761885": 99, "757463": 99, "757176": 99, "763114": 99, "all_test_scor": 99, "stratified_dummi": 99, "cv_results_stratifi": 99, "test_score_dummy_stratifi": 99, "uniform_dummi": 99, "cv_results_uniform": 99, "test_score_dummy_uniform": 99, "wrong": [99, 124, 127, 152], "henc": [99, 108, 115, 132, 153], "uniformli": [99, 110, 135], "weakest": 99, "argu": 99, "permutation_test_scor": 99, "permut": [99, 178], "quit": [99, 100, 101, 103, 105, 116, 139], "strongest": 99, "load_iri": [100, 178], "toi": [100, 137, 140], "nine": 100, "data_random": 100, "randn": [100, 110, 115, 124, 127, 137], "train_index": 100, "test_index": 100, "six": 100, "train_cv_count": 100, "test_cv_count": 100, "fold_idx": 100, "train_idx": 100, "test_idx": 100, "enumer": [100, 109, 110, 121, 123, 125, 128, 133, 151], "idx": [100, 125, 162], "953": 100, "009": 100, "frequenc": [100, 106, 141], "stratifiedkfold": [100, 142, 145], "960": 100, "016": 100, "past": [101, 106, 128, 133, 141], "ident": [101, 102, 116, 141, 151], "financi": 101, "quotat": 101, "tot": 101, "xom": 101, "exxon": 101, "cvx": 101, "chevron": 101, "cop": 101, "conocophillip": 101, "vlo": 101, "valero": 101, "template_nam": 101, "quot": 101, "stock": 101, "2f": [101, 102, 117, 120, 122, 128, 132, 133, 135, 136, 137, 138, 141, 149, 153, 156, 162], "94": [101, 109], "surprisingli": [101, 102, 107, 127], "outstand": 101, "eas": [101, 102, 131, 137, 140], "r2_score": 101, "verifi": [101, 113, 122, 137], "doesn": 101, "proper": [101, 107, 137, 151], "to_period": 101, "q": 101, "69": [101, 104, 109], "forecast": 101, "ulterior": 101, "timeseriessplit": 101, "nuniqu": [101, 105, 160, 184], "74": [101, 104], "shelv": 101, "absurd": 101, "intend": [102, 106, 176], "dive": 102, "area": [102, 103, 104, 110, 135, 140, 141, 160], "geograph": [102, 107, 118], "_california_housing_dataset": [102, 107], "20640": [102, 107], "medinc": [102, 107, 108], "houseag": [102, 107, 108], "averoom": [102, 107, 108, 155], "household": [102, 107], "avebedrm": [102, 107, 108], "aveoccup": [102, 107, 108], "member": [102, 107], "latitud": [102, 107, 108], "longitud": [102, 107, 108], "statlib": [102, 107], "dcc": [102, 107], "fc": [102, 107], "pt": [102, 107], "ltorgo": [102, 107], "cal_hous": [102, 107], "district": [102, 107, 108, 118], "hundr": [102, 107, 125], "deriv": [102, 105, 107, 129, 134, 137], "1990": [102, 107], "u": [102, 107], "smallest": [102, 107, 116, 132], "bureau": [102, 107], "600": [102, 107], "resid": [102, 107], "home": [102, 107], "empti": [102, 107], "vacat": [102, 107], "resort": [102, 107], "pace": [102, 107], "kellei": [102, 107], "ronald": [102, 107], "barri": [102, 107], "spatial": [102, 107], "autoregress": [102, 107], "1997": [102, 107], "291": [102, 107], "297": [102, 107], "3252": [102, 107, 108], "984127": [102, 107, 108], "023810": [102, 107, 108], "322": [102, 107, 108, 116], "555556": [102, 107, 108], "3014": [102, 107, 108], "238137": [102, 107, 108], "971880": [102, 107, 108], "2401": [102, 107, 108], "109842": [102, 107, 108], "2574": [102, 107, 108], "288136": [102, 107, 108], "073446": [102, 107, 108], "496": [102, 107, 108, 152, 153], "802260": [102, 107, 108], "6431": [102, 107, 108], "817352": [102, 107, 108], "073059": [102, 107, 108], "558": [102, 107, 108], "547945": [102, 107, 108], "8462": [102, 107, 108], "281853": [102, 107, 108], "081081": [102, 107, 108], "565": [102, 107, 108], "181467": [102, 107, 108], "452": 102, "358": 102, "352": 102, "341": 102, "342": 102, "medhousev": [102, 107, 108], "decisiontreeregressordecisiontreeregressor": [102, 137, 163], "mean_absolute_error": [102, 113, 120, 121, 122, 136, 144], "grown": [102, 113, 117, 122], "leaf": [102, 117, 149, 153, 154, 156, 160, 162, 172, 173], "node": [102, 117, 119, 149, 154, 156, 160, 162, 171, 172, 173], "phenomena": 102, "unstabl": [102, 132], "wouldn": 102, "unlimit": [102, 117], "lucki": 102, "easiest": 102, "variant": 102, "226245": 102, "004727": 102, "909797": 102, "228429": 102, "004796": 102, "421170": 102, "224813": 102, "004612": 102, "411089": 102, "225663": 102, "004545": 102, "319824": 102, "220283": 102, "004752": 102, "607875": 102, "front": 102, "revert": [102, 129, 134], "negat": 102, "test_error": [102, 132], "226878": 102, "004689": 102, "901300": 102, "224951": 102, "004616": 102, "572767": 102, "225966": 102, "004643": 102, "194585": 102, "227141": 102, "004674": 102, "590236": 102, "229660": 102, "004721": 102, "727998": 102, "percentag": [102, 110, 144], "tag": [102, 128, 133], "expert": [102, 137, 140], "25903583": 102, "25394607": 102, "25143719": 102, "2595458": 102, "25119472": 102, "003268": 102, "00340438": 102, "00354171": 102, "00333428": 102, "00355721": 102, "26291527": 102, "41947109": 102, "44492564": 102, "23357874": 102, "40788361": 102, "overal": [102, 113, 117, 118, 122, 132, 135, 141, 160], "fluctuat": [103, 137, 149], "hopefulli": [103, 117, 136], "302": 103, "318": 103, "harm": 103, "matter": [103, 126, 151], "compromis": [103, 141], "dispers": [103, 116], "directori": [104, 105, 106], "charact": 104, "marker": [104, 109, 141, 162], "pars": [104, 105], "lotconfig": 104, "208500": 104, "fr2": 104, "181500": 104, "223500": 104, "corner": [104, 141], "140000": 104, "250000": 104, "nin": 104, "tail": [104, 105, 107, 139], "coupl": [104, 105, 107, 116, 117, 132, 153], "core": [104, 105, 106, 107, 115, 116], "rangeindex": [104, 105, 106, 107], "null": [104, 105, 106, 107, 139], "1201": 104, "landslop": 104, "condition1": 104, "condition2": 104, "bldgtype": 104, "yearremodadd": 104, "roofstyl": 104, "roofmatl": 104, "exterior1st": 104, "exterior2nd": 104, "masvnrtyp": 104, "588": 104, "1452": 104, "exterqu": 104, "extercond": 104, "foundat": 104, "bsmtqual": 104, "1423": 104, "bsmtcond": 104, "bsmtexposur": 104, "1422": 104, "bsmtfintype1": 104, "bsmtfintype2": 104, "heat": 104, "heatingqc": 104, "centralair": 104, "bsmtfullbath": 104, "bsmthalfbath": 104, "kitchenqu": 104, "fireplacequ": 104, "770": 104, "garagetyp": 104, "1379": 104, "garageyrblt": 104, "garagefinish": 104, "garagequ": 104, "garagecond": 104, "paveddr": 104, "72": 104, "281": 104, "901": 104, "kb": [104, 106], "numerical_data": 104, "410": 104, "layout": 104, "subplots_adjust": [104, 105, 107, 108], "hspace": [104, 105, 107], "wspace": [104, 107], "criterion": [104, 108, 156], "swim": 104, "pool": [104, 127], "string_data": 104, "490": 104, "ceil": 104, "zip_longest": 104, "n_string_featur": 104, "nrow": [104, 141, 162], "ncol": [104, 131, 133, 141, 144, 162], "fig": [104, 108, 131, 132, 141, 144, 152, 155, 156, 157, 178], "subplot": [104, 108, 131, 132, 141, 144, 156, 160, 161, 162], "ravel": [104, 105, 133, 139, 162], "barh": [104, 106, 108, 135, 139, 141], "set_titl": [104, 137, 138, 141, 162], "databas": [104, 155], "grvl": 104, "gd": 104, "make_column_transform": [104, 119], "most_frequent_imput": 104, "mean_imput": 104, "ames_housing_preprocess": 104, "tolist": [104, 156, 162], "timestamp": 105, "150": [105, 109], "0880": 105, "033870": 105, "161": [105, 116, 156], "336": 105, "0842": 105, "033571": 105, "163": 105, "409": 105, "0234": 105, "033223": 105, "156": 105, "445": 105, "0016": 105, "032908": 105, "148": 105, "441": 105, "1144": 105, "38254": 105, "38253": 105, "mb": [105, 107], "str": 105, "datetim": 105, "direct": [105, 131, 135, 153], "reopen": 105, "explan": [105, 157], "soup": 105, "blender": 105, "blend": [105, 131], "veget": 105, "instantan": 105, "profession": 105, "calibr": 105, "track": 105, "spent": [105, 125], "food": 105, "uranium": 105, "petrol": 105, "ga": 105, "coal": 105, "plant": 105, "400": 105, "cheaper": [105, 108], "w": [105, 144, 162], "deliv": 105, "breakout": 105, "kilomet": 105, "costli": [105, 135, 148, 149, 155], "cruis": 105, "datetime64": 105, "ns": 105, "freq": 105, "august": 105, "septemb": 105, "date_first_rid": 105, "cycling_rid": 105, "data_rid": 105, "target_rid": 105, "tempor": 105, "resolut": [105, 153], "smoother": [105, 110], "set_xlabel": [105, 144, 162], "extremum": 105, "rng": [105, 107, 108, 110, 115, 127, 137], "randomst": [105, 107, 108, 110, 115, 127, 131, 137], "arang": [105, 107, 108, 110, 160, 161, 163], "quantiz": [105, 107], "midpoint": [105, 107], "interv": [105, 107, 110, 112, 115, 121, 159, 161, 163], "qcut": [105, 107], "retbin": [105, 107], "lambda": [105, 107, 152, 178], "mid": [105, 107], "palett": [105, 107, 109, 135, 139, 140, 156, 160, 162], "viridi": [105, 107, 152, 162, 178], "uphil": 105, "physiolog": 105, "stimuli": 105, "recenc": [106, 141], "monetari": [106, 141], "12500": 106, "98": [106, 108, 156], "3250": [106, 138], "4000": 106, "6000": 106, "748": 106, "747": 106, "noth": [106, 110], "shock": 106, "her": 106, "762032": 106, "237968": 106, "strike": 106, "fetch": 107, "internet": 107, "california_h": 107, "526": 107, "585": 107, "521": [107, 141], "413": [107, 153], "422": [107, 141], "demographi": 107, "granular": [107, 141], "20639": 107, "640": [107, 156], "unnotic": 107, "features_of_interest": [107, 132], "429000": 107, "096675": 107, "070655": 107, "1425": 107, "476744": 107, "474173": 107, "473911": 107, "386050": 107, "1132": 107, "462122": 107, "846154": 107, "333333": 107, "692308": 107, "440716": 107, "006079": 107, "429741": 107, "787": [107, 150], "229129": 107, "048780": 107, "818116": 107, "1166": 107, "052381": 107, "099526": 107, "282261": 107, "1725": 107, "141": 107, "909091": 107, "066667": 107, "1243": 107, "35682": 107, "huge": 107, "datapoint": [107, 135], "coast": 107, "big": [107, 144], "citi": [107, 144], "san": 107, "diego": 107, "lo": 107, "angel": 107, "jose": 107, "francisco": 107, "columns_drop": 107, "distinguish": 107, "curiou": [107, 140, 184], "553": [107, 141], "062": 107, "coef": [107, 108, 132, 135, 139], "est": [107, 132], "spot": [107, 132], "10000": 108, "100k": 108, "assert": [108, 117, 184], "un": [108, 132], "bin_var": 108, "randint": [108, 120, 124, 127], "rnd_bin": 108, "num_var": 108, "rnd_num": 108, "x_with_rnd_feat": 108, "x_train": 108, "x_test": 108, "y_train": [108, 173], "y_test": 108, "train_dataset": 108, "insert": [108, 135], "kde": 108, "scatter_kw": 108, "x_i": 108, "versu": [108, 144, 164], "6013466090490024": 108, "5975757793803438": 108, "Its": 108, "somehow": 108, "rest": [108, 137, 162], "worth": 108, "magnitud": [108, 130, 132, 135, 171], "habit": 108, "nb": 108, "outcom": [108, 139, 141, 155], "shall": [108, 110], "rise": 108, "80k": 108, "gaug": 108, "decad": 108, "visibl": [108, 144], "dev": 108, "6013157556102924": 108, "5972410717953726": 108, "safe": 108, "perturb": 108, "repeatedkfold": 108, "cv_model": 108, "n_repeat": [108, 125, 126], "boxplot": 108, "cyan": 108, "satur": 108, "pretti": 108, "l1": 108, "015": 108, "5899811014945939": 108, "5769786920519312": 108, "partli": 108, "multivari": 108, "instabl": 108, "teas": 108, "9796463093530234": 108, "8467693464367002": 108, "formal": 108, "brought": 108, "argsort": [108, 125], "set_ytick": 108, "set_yticklabel": 108, "9798863545214676": 108, "8465346522200555": 108, "def": [108, 110, 115, 128, 131, 133, 135, 137, 138, 149, 152, 153, 160, 178], "get_score_after_permut": 108, "curr_feat": 108, "x_permut": 108, "col_idx": 108, "permuted_scor": 108, "get_feature_import": 108, "baseline_score_train": 108, "permuted_score_train": 108, "feature_import": 108, "661": [108, 121], "list_feature_import": 108, "n_round": 108, "00879": 108, "heavili": 108, "permutation_import": 108, "calcul": [108, 109, 141], "importances_mean": 108, "importances_std": 108, "plot_feature_import": 108, "perm_importance_result": 108, "feat_nam": 108, "xerr": 108, "perm_importance_result_train": 108, "realist": [108, 139], "unclear": 108, "culmen_column": [109, 130, 135, 139, 156, 157, 158, 162], "purposefulli": 109, "unlik": [109, 113, 122, 156], "misclassifi": [109, 131, 135], "decisiontreeclassifi": [109, 119, 142, 145, 156, 160, 162], "tab": [109, 110, 115, 131, 135, 139, 140, 141, 156, 160, 161, 162], "decisiontreeclassifierdecisiontreeclassifi": [109, 156, 162], "misclassified_samples_idx": 109, "flatnonzero": 109, "data_misclassifi": 109, "decisionboundarydisplai": [109, 130, 131, 135, 139, 140, 156, 158, 160, 162], "response_method": [109, 131, 135, 139, 140, 156, 160, 162], "cmap": [109, 131, 135, 139, 140, 149, 156, 160, 162], "rdbu": [109, 131, 140, 160], "center": [109, 131, 133, 152, 155, 160], "nwith": [109, 115], "misclassif": [109, 135, 141], "sample_weight": 109, "trick": [109, 140], "drastic": 109, "qualit": [109, 110, 128, 133, 162], "newly_misclassified_samples_idx": 109, "remaining_misclassified_samples_idx": 109, "intersect1d": 109, "ensemble_weight": 109, "935672514619883": 109, "6929824561403509": 109, "adaboostclassifi": 109, "samm": 109, "adaboostclassifieradaboostclassifi": 109, "boosting_round": 109, "estimators_": [109, 110, 112, 121], "to_numpi": [109, 110, 121, 141], "640x480": 109, "estimator_weights_": 109, "58351894": 109, "46901998": 109, "03303773": 109, "estimator_errors_": 109, "05263158": 109, "05864198": 109, "08787269": 109, "sens": [109, 163], "generate_data": [110, 115], "x_min": [110, 115], "x_max": [110, 115], "capabl": [110, 115, 132, 141, 159, 161, 163], "y_pred": [110, 142, 144, 145], "data_bootstrap": 110, "target_bootstrap": 110, "bootstrap_sampl": 110, "bootstrap_indic": 110, "n_bootstrap": 110, "bootstrap_idx": 110, "facecolor": 110, "180": [110, 138], "linewidth": [110, 131, 135, 160], "darker": [110, 135, 139], "data_train_hug": 110, "data_test_hug": 110, "target_train_hug": 110, "100_000": 110, "data_bootstrap_sampl": 110, "target_bootstrap_sampl": 110, "ratio_unique_sampl": 110, "bag_of_tre": 110, "tree_idx": [110, 121], "tree_predict": [110, 121], "feed": 110, "bag_predict": 110, "unbroken": [110, 115], "whole": [110, 112, 117, 119, 121, 132, 137], "meta": 110, "wrap": [110, 132, 135, 164], "snippet": [110, 178], "smooth": [110, 131, 135, 140], "bagged_tre": [110, 119], "bagged_trees_predict": 110, "els": [110, 137, 160], "opac": 110, "appreci": 110, "space": [110, 112, 114, 115, 121, 123, 127, 129, 131, 132, 134, 135, 139, 156, 161, 162], "polynomialfeatur": [110, 129, 131, 132, 134, 137], "polynomial_regressor": 110, "1e": [110, 116, 135, 150, 153], "intention": 110, "simpli": [110, 162], "regressor_predict": 110, "base_model_lin": 110, "bagging_predict": 110, "ylim": [110, 141], "shade": 110, "randomizedsearchcv": [111, 117, 120, 148, 153, 155, 178], "penguins_regress": [112, 121, 128, 133, 136, 138, 157, 159, 160, 161, 163], "evenli": [112, 121], "170": [112, 121], "230": [112, 121], "newli": [112, 121], "conduct": [113, 122, 155], "learning_r": [113, 114, 117, 122, 123, 147, 149, 152, 153, 154, 178, 180], "slower": [113, 122, 134, 151], "offer": [113, 122, 153], "certainli": [113, 122], "n_iter_no_chang": [113, 122], "max_leaf_nod": [114, 117, 123, 147, 149, 151, 152, 153, 154, 160, 178, 180], "residu": [115, 117, 144, 153], "back": [115, 139, 141, 152, 156], "len_x": 115, "rand": [115, 137], "target_train_predict": 115, "target_test_predict": 115, "line_predict": 115, "lines_residu": 115, "edit": 115, "initi": [115, 151, 181], "tree_residu": 115, "target_train_predicted_residu": 115, "target_test_predicted_residu": 115, "manag": 115, "x_sampl": 115, "target_tru": 115, "target_true_residu": 115, "commit": [115, 144], "y_pred_first_tre": 115, "517": 115, "393": 115, "145": 115, "248": [115, 116], "y_pred_first_and_second_tre": 115, "gradientboostingregressor": [115, 116, 122], "gradient_boost": [115, 116], "cv_results_gbdt": [115, 116], "416": 115, "144": [115, 153], "012": 115, "random_forest": [115, 119], "cv_results_rf": 115, "465": 115, "315": 115, "032": 115, "197": 115, "brute": [116, 136], "overcom": [116, 118, 131, 137], "benchmark": 116, "392": 116, "914": 116, "042": 116, "011": 116, "kbinsdiscret": [116, 131, 137], "n_bin": [116, 131, 137], "quantil": [116, 131, 140], "data_tran": 116, "opt": [116, 132, 142, 145], "hostedtoolcach": [116, 132, 142, 145], "x64": [116, 132, 142, 145], "lib": [116, 132, 142, 145], "python3": [116, 132, 142, 145], "site": [116, 132, 142, 145], "_discret": 116, "py": [116, 132, 142, 145], "userwarn": [116, 142, 145], "249": 116, "231": 116, "162": 116, "203": 116, "242": 116, "125": 116, "160": 116, "126": 116, "136": [116, 142, 145], "199": 116, "col": 116, "253": [116, 154], "207": 116, "235": [116, 121, 163], "773": 116, "273": 116, "histogram_gradient_boost": 116, "cv_results_hgbdt": 116, "758": 116, "694": 116, "862": 116, "077": 116, "clariti": 117, "doubl": [117, 149, 150], "max_featur": [117, 119, 120], "grow": [117, 118, 160, 178], "uncorrel": 117, "symmetr": [117, 132, 144, 160], "constraint": [117, 132, 160], "min_samples_leaf": [117, 118, 152, 153, 160, 178], "branch": [117, 160], "promot": 117, "altogeth": 117, "param_distribut": [117, 153, 155], "search_cv": 117, "n_iter": [117, 120, 148, 153, 155, 178], "param_": [117, 120, 123, 149, 153], "mean_test_error": [117, 120], "std_test_error": [117, 120], "cv_results_": [117, 120, 123, 149, 151, 153, 155, 178], "mean_test_scor": [117, 120, 123, 149, 151, 152, 153, 155, 178], "std_test_scor": [117, 120, 149, 151, 152, 153], "sort_valu": [117, 120, 149, 153, 155], "param_max_featur": [117, 120], "param_max_leaf_nod": 117, "param_min_samples_leaf": 117, "996708": 117, "575388": 117, "013965": 117, "522837": 117, "290532": 117, "320069": 117, "169996": 117, "486971": 117, "425679": 117, "597833": 117, "856788": 117, "543134": 117, "927604": 117, "800344": 117, "100456": 117, "635957": 117, "515785": 117, "833755": 117, "640989": 117, "856759": 117, "role": 117, "inter": 117, "refit": [117, 147, 151, 154], "overlook": 117, "stat": [117, 120, 153], "loguniform": [117, 153], "param_max_it": 117, "param_learning_r": 117, "01864": 117, "043016": 117, "262257": 117, "047293": 117, "811893": 117, "229961": 117, "176656": 117, "410615": 117, "243557": 117, "297739": 117, "740945": 117, "360870": 117, "083745": 117, "095718": 117, "274735": 117, "215543": 117, "275814": 117, "216063": 117, "067503": 117, "780658": 117, "237595": 117, "05929": 117, "855942": 117, "418406": 117, "160519": 117, "270716": 117, "416068": 117, "125207": 117, "914995": 117, "557058": 117, "054511": 117, "224344": 117, "623883": 117, "248463": 117, "147930": 117, "842348": 117, "906226": 117, "494647": 117, "710124": 117, "061034": 117, "568261": 117, "551379": 117, "079415": 117, "455489": 117, "944949": 117, "0351": 117, "503834": 117, "949876": 117, "019923": 117, "624869": 117, "045625": 117, "039361": 117, "818311": 117, "083471": 117, "019351": 117, "377257": 117, "051528": 117, "01724": 117, "941795": 117, "084528": 117, "rank": [117, 123, 184], "hgbt": 117, "hassl": 118, "354": 118, "087": 118, "min_samples_split": [118, 160], "523": [118, 133], "107": 118, "bagging_regressor": 118, "642": 118, "083": 118, "decent": [118, 152, 153], "modif": 119, "inject": 119, "decorrel": 119, "categorical_encod": 119, "scores_tre": 119, "820": 119, "006": [119, 123], "scores_bagged_tre": 119, "846": 119, "005": 119, "randomforestclassifi": [119, 125, 126], "scores_random_forest": 119, "004": 119, "disabl": 119, "sqrt": 119, "literatur": 119, "agnost": 119, "param": [120, 123, 131, 149, 152], "bootstrap_featur": 120, "estimator__ccp_alpha": 120, "estimator__criterion": 120, "estimator__max_depth": 120, "estimator__max_featur": 120, "estimator__max_leaf_nod": 120, "estimator__min_impurity_decreas": 120, "estimator__min_samples_leaf": 120, "estimator__min_samples_split": 120, "estimator__min_weight_fraction_leaf": 120, "estimator__random_st": 120, "estimator__splitt": 120, "max_sampl": 120, "oob_scor": 120, "verbos": [120, 150, 153, 155, 180], "warm_start": 120, "param_n_estim": 120, "param_max_sampl": 120, "param_estimator__max_depth": 120, "281680": 120, "061146": 120, "475610": 120, "121340": 120, "602077": 120, "070860": 120, "326435": 120, "174542": 120, "956380": 120, "278850": 120, "017761": 120, "674627": 120, "135453": 120, "005112": [120, 152], "224306": 120, "316641": 120, "070459": 120, "053769": 120, "759904": 120, "679971": 120, "334637": 120, "125204": 120, "528335": 120, "972150": 120, "872540": 120, "686614": 120, "949551": 120, "721352": 120, "529438": 120, "429014": 120, "750573": 120, "081410": 120, "841505": 120, "968520": 120, "258303": 120, "351126": 120, "840351": 120, "744600": 120, "889776": 120, "075650": 120, "gram": [121, 129, 134, 136], "366": 121, "data_rang": 121, "forest_predict": 121, "n_estimators_": 122, "243": [122, 146], "hist_gbdt": 123, "839": [123, 142, 145], "best_estimator_": 123, "528": 123, "447": 123, "576": 123, "290": 123, "414": 123, "index_column": 123, "inner_cv_result": 123, "cv_idx": 123, "search_cv_result": 123, "set_index": [123, 130, 135, 139, 146], "renam": [123, 149, 152, 153, 155, 178], "coincid": [123, 141], "bioinformat": [124, 127], "rna": [124, 127], "seq": [124, 127], "ten": [124, 127], "anova": [124, 125, 127], "feature_select": [124, 125, 126, 127], "selectkbest": [124, 125, 127], "f_classif": [124, 125, 127], "pre": [124, 127], "princip": 125, "make_classif": [125, 126], "n_inform": [125, 126], "n_redund": [125, 126], "univari": 125, "model_without_select": [125, 126], "model_with_select": [125, 126], "score_func": [125, 127], "cv_results_without_select": [125, 126], "incorpor": 125, "cv_results_with_select": [125, 126], "analyz": [125, 132, 178], "swap": 125, "swaplevel": [125, 126], "Of": 125, "scores_": 125, "percentil": 125, "alien": 125, "primari": 125, "feature_importances_": 126, "suffici": [126, 131], "class_sep": 126, "selectfrommodel": 126, "feature_selector": [126, 127], "overestim": 126, "100000": 127, "550": 127, "data_subset": 127, "940": 127, "succeed": 127, "legit": 127, "leak": 127, "data_train_subset": 127, "520": 127, "460": 127, "boilerpl": 127, "linear_model_flipper_mass": [128, 133, 138], "flipper_length": [128, 133, 138], "weight_flipper_length": [128, 133, 136, 138], "intercept_body_mass": [128, 133, 136, 138], "body_mass": [128, 133, 138], "flipper_length_rang": [128, 133, 136, 138], "goodness_fit_measur": [128, 133], "true_valu": [128, 133], "scalar": [128, 133], "model_idx": [128, 133], "x1": [129, 134, 139], "x2": [129, 134], "x3": [129, 134], "penguins_non_miss": [129, 134, 184], "181": [129, 134, 138], "186": [129, 134, 138], "195": [129, 134, 138], "193": [129, 134, 138], "190": [129, 134, 138, 153], "sign": [129, 134], "interaction_onli": [129, 134], "intermedi": [129, 134, 137, 151, 152], "infinit": [130, 135], "l2": [130, 135], "yourself": [130, 135], "penguins_train": [130, 135, 139], "penguins_test": [130, 135, 139], "candid": [130, 135, 153, 155, 156], "cs": [130, 135], "nevertheless": 131, "moon": [131, 140], "crescent": 131, "make_moon": [131, 140], "newaxi": [131, 140, 160], "data_moon": [131, 140], "target_moon": [131, 140], "gaussian": [131, 140], "edg": 131, "concentr": 131, "make_gaussian_quantil": [131, 140], "n_class": [131, 139, 140, 162], "gauss": [131, 140], "data_gauss": [131, 140], "target_gauss": [131, 140], "xor": 131, "OR": 131, "target_xor": 131, "logical_xor": 131, "int32": [131, 148, 155, 160], "data_xor": 131, "glanc": 131, "listedcolormap": 131, "constrained_layout": 131, "common_scatter_plot_param": 131, "plot_decision_boundari": [131, 135], "plot_method": [131, 135], "pcolormesh": [131, 135], "vmin": [131, 135, 149, 156, 162], "vmax": [131, 135, 149, 156, 162], "middl": [131, 150], "colormap": [131, 135, 139, 162], "contour": [131, 135], "set_ylabel": [131, 144, 162], "soft": [131, 139], "unsur": [131, 139], "attempt": [131, 132, 135], "leverag": 131, "spline": [131, 137], "onehot": 131, "kbinsdiscretizerkbinsdiscret": [131, 137], "segment": 131, "rectangular": 131, "drawn": 131, "n_knot": 131, "splinetransformersplinetransform": [131, 137], "favor": 131, "curvi": [131, 135], "knot": 131, "include_bia": [131, 132, 134, 137], "polynomialfeaturespolynomialfeatur": [131, 132, 137], "nystr\u00f6m": [131, 134], "kernel_approxim": [131, 134, 135, 137], "coef0": [131, 139], "nystroemnystroem": [131, 137], "expans": [131, 137, 140], "intract": 131, "radial": [131, 140], "basi": [131, 140], "furthemor": 131, "induct": 131, "rotation": 131, "everywher": [131, 135], "drawback": 131, "orign": 131, "despit": 131, "augment": [131, 132, 140], "interplai": 131, "linear_regress": [132, 134, 136, 137, 163], "train_error": 132, "2e": 132, "85e": 132, "63e": 132, "69e": 132, "47e": 132, "fortun": 132, "feature_names_in_": 132, "model_first_fold": 132, "linearregressionlinearregress": [132, 136, 137], "queri": [132, 135], "weights_linear_regress": 132, "symlog": 132, "homogen": 132, "choleski": 132, "_ridg": 132, "linalgwarn": 132, "rcond": 132, "59923e": 132, "linalg": 132, "xy": 132, "assume_a": 132, "po": 132, "overwrite_a": 132, "59556e": 132, "59609e": 132, "11828e": 132, "06109e": 132, "60121e": 132, "61694e": 132, "59735e": 132, "59566e": 132, "72304e": 132, "60047e": 132, "59824e": 132, "59593e": 132, "59564e": 132, "5959e": 132, "59553e": 132, "59686e": 132, "60737e": 132, "5957e": 132, "60243e": 132, "90e": 132, "56e": 132, "55e": 132, "68e": 132, "weights_ridg": 132, "shrunk": 132, "worst": [132, 141], "saga": 132, "lsqr": 132, "re": [132, 176, 184], "resolv": 132, "omit": 132, "annual": 132, "neutral": [132, 162], "ahead": 132, "scaled_ridg": 132, "78e": 132, "21e": 132, "83e": 132, "17e": 132, "sweet": 132, "weights_ridge_scaled_data": 132, "ridge_large_alpha": 132, "1_000_000": 132, "unpredict": 132, "occurr": 132, "presenc": [132, 146], "divis": 132, "beforehand": 132, "store_cv_valu": 132, "12e": 132, "25e": 132, "50e": 132, "40e": 132, "mse_alpha": 132, "cv_values_": 132, "cv_alpha": 132, "000000e": 132, "841881e": 132, "347783e": 132, "321941e": 132, "837563e": 132, "343115e": 132, "747528e": 132, "831866e": 132, "336956e": 132, "310130e": 132, "824352e": 132, "328835e": 132, "053856e": 132, "814452e": 132, "318133e": 132, "274549e": 132, "319038e": 132, "337394e": 132, "328761e": 132, "324503e": 132, "338181e": 132, "722368e": 132, "328652e": 132, "338778e": 132, "564633e": 132, "331799e": 132, "339232e": 132, "334185e": 132, "339576e": 132, "yerr": 132, "yscale": 132, "salt": 132, "cook": 132, "best_alpha": 132, "11497569953977356": 132, "35111917342151344": 132, "1519911082952933": 132, "4641588833612782": 132, "08697490026177834": 132, "6135907273413176": 132, "stem": [132, 144], "summari": 132, "wasn": 132, "disproportion": 132, "15000": 133, "14000": 133, "predicted_body_mass": [133, 136, 138], "misleadingli": 133, "mse": [133, 137, 144, 146], "ab": [133, 137], "2764": 133, "854": 133, "338": 133, "573": 133, "041": 133, "337": 134, "071": 134, "868": 134, "poly_featur": 134, "linear_regression_interact": 134, "7077": 134, "3384": 134, "731": 134, "7347": 134, "3236": 134, "687": 134, "7858": 134, "3510": 134, "725": 134, "7083": 134, "3724": 134, "708": 134, "7467": 134, "3914": 134, "809": 134, "flipper_length_first_sampl": 134, "culmen_depth_first_sampl": 134, "301": 134, "790": 134, "340": 134, "spread": [134, 135, 152, 178], "enrich": 134, "nystroem_regress": [134, 137], "nystroem__n_compon": 134, "set_param": [134, 135, 150, 154, 180, 184], "331": 134, "832": 134, "4950": 134, "5050": 134, "footprint": 134, "scalabl": 134, "metion": 135, "invers": 135, "diverg": [135, 139, 155, 162], "rdbu_r": [135, 139], "1e6": 135, "logisticregression__c": [135, 178, 180], "sigmoid": [135, 139], "dark": 135, "nearli": 135, "steep": 135, "deduc": [135, 157], "lai": 135, "zone": 135, "weaker": 135, "light": 135, "lr_weight": 135, "perpendicular": [135, 156], "lowest": [135, 136, 144], "anywher": 135, "minor": 135, "blob": [135, 160], "frontier": 135, "conjunct": 135, "certainti": [135, 162], "68556640610011": 136, "5780": 136, "831358077066": 136, "mean_squared_error": [136, 137, 144], "inferred_body_mass": 136, "model_error": 136, "154546": 136, "313": 136, "occas": 137, "cubic": [137, 173], "said": [137, 144, 146], "data_max": 137, "data_min": 137, "len_data": 137, "sort": 137, "full_data": 137, "input_featur": 137, "reshap": [137, 144, 162], "fit_score_plot_regress": 137, "global": 137, "data_expand": 137, "polynomial_expans": 137, "polynomial_regress": 137, "encourag": [137, 144], "svr": 137, "svrsvr": 137, "medium": 137, "10_000": [137, 156], "binned_regress": 137, "spline_regress": 137, "expand": 137, "3750": 138, "3800": 138, "3450": 138, "3650": 138, "2700": 138, "6300": 138, "heavier": [138, 157], "formula": 138, "shorter": 138, "13000": 138, "millimet": 138, "body_mass_180": 138, "body_mass_181": 138, "7200": 138, "7240": 138, "goe": [138, 141], "170mm": 138, "230mm": 138, "redefin": 138, "groupbi": 139, "inclin": 139, "x0": 139, "coef1": 139, "obliqu": [139, 156], "724988": 139, "096500": 139, "readi": 139, "barplot": 139, "horizont": [139, 160, 162], "vertic": 139, "coordin": [139, 151, 152, 155, 178], "hypothet": 139, "test_penguin": 139, "y_pred_proba": [139, 156], "17145312": 139, "82854688": 139, "y_proba_sampl": 139, "classes_": [139, 141, 156, 162, 180], "insist": 139, "overconfid": 139, "underconfid": 139, "softer": 139, "asymptot": 139, "softmax": 139, "hold": [140, 144, 152, 155, 178, 184], "interlac": [140, 160], "depict": [140, 157], "push": 140, "surround": 140, "kernel_model": 140, "donor": 141, "ago": 141, "new_donor": 141, "That": [141, 146, 149, 151], "258": 141, "505": 141, "665": 141, "615": 141, "743": 141, "374": 141, "7780748663101604": 141, "accuracy_scor": 141, "778": 141, "finer": 141, "confusionmatrixdisplai": 141, "incorrect": 141, "erron": 141, "tp": 141, "tn": 141, "fn": 141, "fp": 141, "precision_scor": [141, 142, 145], "recall_scor": 141, "pos_label": [141, 142, 145], "688": 141, "124": 141, "mislabel": 141, "ratio": 141, "dummy_classifi": 141, "762": 141, "balanced_accuracy_scor": 141, "haven": 141, "target_proba_predict": 141, "271820": 141, "728180": 141, "451764": 141, "548236": 141, "445211": 141, "554789": 141, "441577": 141, "558423": 141, "870583": 141, "129417": 141, "equivalence_pred_proba": 141, "idxmax": 141, "graph": 141, "precisionrecalldisplai": 141, "tpr": 141, "ppv": 141, "ap": 141, "preval": 141, "discrimin": 141, "roccurvedisplai": 141, "dash": 141, "plot_chance_level": 141, "pr": 141, "chance_level_kw": 141, "ambigu": [142, 145], "valueerror": [142, 145], "exc": [142, 145], "_valid": [142, 145], "recent": [142, 145], "_scorer": [142, 145], "__call__": [142, 145], "scorer": [142, 143, 145, 146], "_score": [142, 145], "355": [142, 145], "_sign": [142, 145], "_score_func": [142, 145], "y_true": [142, 144, 145], "scoring_kwarg": [142, 145], "_param_valid": [142, 145], "211": [142, 145], "wrapper": [142, 145], "arg": [142, 145, 153], "kwarg": [142, 145, 153], "_classif": [142, 145], "2127": [142, 145], "precision_recall_fscore_support": [142, 145], "1721": [142, 145], "_check_set_wise_label": [142, 145], "1507": [142, 145], "catch": [142, 145], "make_scor": [142, 145], "syntax": [143, 146], "iowa": 144, "intro": [144, 164], "996": 144, "902": 144, "2064": 144, "736": 144, "6872520581075487": 144, "dummy_regressor": 144, "608": 144, "disadvantag": 144, "median_absolute_error": 144, "137": 144, "mean_absolute_percentage_error": 144, "574": 144, "obsev": 144, "unobserv": 144, "extern": [144, 151], "cloud": 144, "against": 144, "exhibit": 144, "predictionerrordisplai": 144, "from_predict": 144, "actual_vs_predict": 144, "scatter_kwarg": 144, "residual_vs_predict": 144, "nwithout": 144, "banana": 144, "smile": 144, "clue": 144, "monoton": 144, "quantiletransform": [144, 184], "transformedtargetregressor": 144, "n_quantil": [144, 184], "900": 144, "output_distribut": 144, "model_transformed_target": 144, "ntransform": 144, "406": 144, "327": [144, 153], "disapprov": 144, "statistician": 144, "justifi": 144, "poissonregressor": 144, "tweedieregressor": 144, "reachabl": 144, "623": 145, "507": 145, "108": 145, "255": [145, 153], "166": 145, "00379062": 145, "00376248": 145, "0038867": 145, "00365186": 145, "00371385": 145, "00387788": 145, "00366783": 145, "00371575": 145, "00371671": 145, "00378752": 145, "00309134": 145, "00321937": 145, "00310946": 145, "00320387": 145, "00321031": 145, "00326419": 145, "00317121": 145, "00320053": 145, "00322223": 145, "00360751": 145, "test_accuraci": 145, "29333333": 145, "53333333": 145, "74666667": 145, "65333333": 145, "69333333": 145, "77333333": 145, "63513514": 145, "75675676": 145, "test_balanced_accuraci": 145, "42105263": 145, "48391813": 145, "62426901": 145, "40643275": 145, "48684211": 145, "55116959": 145, "73684211": 145, "45356037": 145, "51186791": 145, "794": 146, "892": 146, "225": 146, "test_r2": 146, "test_neg_mean_absolute_error": 146, "848721": 146, "256799": 146, "816374": 146, "084083": 146, "813513": 146, "113367": 146, "814138": 146, "448279": 146, "637473": 146, "370341": 146, "defaultdict": 146, "loss_funct": 146, "squared_error": 146, "absolute_error": 146, "loss_func": 146, "test_neg_mean_squared_error": 146, "923": 146, "344": [146, 153], "evolv": 146, "discontinu": 146, "surrog": 146, "substitut": 146, "log_loss": 146, "exhaust": [147, 154, 181], "cat_preprocessor": [147, 149, 151, 153, 154], "kneighborsregressor": [148, 155], "with_mean": [148, 155], "with_std": [148, 155], "reload": [149, 153], "dealt": 149, "ordinalencoderordinalencod": [149, 151, 153], "remainderpassthroughpassthroughhistgradientboostingclassifierhistgradientboostingclassifi": [149, 151, 153], "classifier__learning_r": [149, 151, 153, 154], "classifier__max_leaf_nod": [149, 151, 153, 154], "model_grid_search": [149, 151], "charg": 149, "rapidli": 149, "ascend": [149, 153, 155], "mean_fit_tim": [149, 152], "std_fit_tim": [149, 152], "mean_score_tim": [149, 152], "std_score_tim": [149, 152], "param_classifier__learning_r": [149, 151, 152], "param_classifier__max_leaf_nod": [149, 151, 152], "split0_test_scor": [149, 152], "split1_test_scor": [149, 152], "rank_test_scor": [149, 151, 152, 153], "489168": 149, "050132": 149, "224362": 149, "016874": 149, "868912": 149, "867213": 149, "868063": 149, "000850": 149, "370372": 149, "009651": 149, "203127": 149, "004503": 149, "866783": 149, "866066": 149, "866425": 149, "000359": 149, "118699": 149, "000410": 149, "093414": 149, "007336": 149, "classifier__": 149, "858648": 149, "862408": [149, 151, 152], "860528": 149, "001880": 149, "128271": 149, "002882": 149, "099234": 149, "011471": 149, "859358": 149, "859514": 149, "859436": 149, "000078": 149, "132225": 149, "003397": 149, "083718": 149, "000626": 149, "855536": 149, "856129": 149, "855832": 149, "000296": 149, "shorten": 149, "param_classifier__": 149, "prefix": [149, 152], "column_result": [149, 153], "shorten_param": [149, 152, 153, 178], "__": [149, 150, 152, 153, 178], "rsplit": [149, 152, 153, 178], "853266": 149, "000515": 149, "843330": 149, "002917": 149, "817832": 149, "001124": 149, "797166": 149, "000715": 149, "288200": 149, "050539": 149, "283476": 149, "003775": 149, "262564": 149, "006326": 149, "heatmap": [149, 152], "pivoted_cv_result": 149, "pivot_t": 149, "ylgnbu": 149, "invert_yaxi": 149, "degrad": 149, "patholog": 149, "accordingli": 149, "hyperparamt": [149, 156], "recogniz": 150, "spell": 150, "classifier__c": [150, 178, 180], "hyperparameter_nam": 150, "preprocessor__copi": 150, "preprocessor__with_mean": 150, "preprocessor__with_std": 150, "classifier__class_weight": 150, "classifier__du": 150, "classifier__fit_intercept": 150, "classifier__intercept_sc": 150, "classifier__l1_ratio": 150, "classifier__max_it": 150, "classifier__multi_class": 150, "classifier__n_job": 150, "classifier__penalti": 150, "classifier__random_st": 150, "classifier__solv": 150, "classifier__tol": 150, "classifier__verbos": 150, "classifier__warm_start": 150, "001": [150, 153], "799": 150, "523512": 151, "084637": 151, "863241": 151, "519701": 151, "086653": 151, "860784": 151, "521355": 151, "085747": 151, "860360": [151, 152], "517670": 151, "087460": 151, "523147": 151, "086819": 151, "866912": 151, "863": 151, "embed": 151, "864195": 151, "000061": 151, "870910": 151, "869743": 151, "000532": 151, "866058": 151, "001515": 151, "concern": 151, "877": 151, "schemat": 151, "green": [151, 156, 162], "rough": 151, "cv_test_scor": 151, "871": 151, "apprehend": 151, "cv_inner": 151, "cv_outer": 151, "greed": 151, "cv_fold": 151, "estimator_in_fold": 151, "vote": 151, "randomized_search_result": [152, 153, 178], "param_classifier__l2_regular": 152, "param_classifier__max_bin": 152, "param_classifier__min_samples_leaf": 152, "split2_test_scor": 152, "split3_test_scor": 152, "split4_test_scor": 152, "540456": 152, "062725": 152, "052069": 152, "002661": 152, "467047": 152, "550075": 152, "classifier__l2_regular": [152, 153], "4670474863": 152, "856558": 152, "862271": 152, "857767": 152, "854491": 152, "856675": 152, "857552": 152, "002586": 152, "110536": 152, "033403": 152, "074142": 152, "002165": 152, "015449": 152, "001146": 152, "0154488709": 152, "758974": 152, "758941": 152, "758947": [152, 153], "000013": [152, 153], "323": [152, 156], "137484": 152, "053150": 152, "092993": 152, "029005": 152, "095093": 152, "004274": 152, "0950934559": 152, "783267": 152, "776413": 152, "779143": 152, "771341": 152, "010357": 152, "311": 152, "935108": 152, "202993": 152, "118105": 152, "023658": 152, "003621": 152, "001305": 152, "164": 152, "0036210968": 152, "255219": 152, "038301": 152, "056048": 152, "016736": 152, "000081": 152, "407382": 152, "97": [152, 162, 184], "1060737427": 152, "495": 152, "452411": 152, "023006": 152, "055563": 152, "000846": 152, "000075": 152, "364373": 152, "4813767874": 152, "858332": 152, "865001": 152, "862681": 152, "860770": 152, "861429": 152, "002258": 152, "133042": 152, "014456": 152, "078186": 152, "002199": 152, "065946": 152, "001222": 152, "0659455480": 152, "497": [152, 153], "911828": 152, "017167": 152, "076563": 152, "005130": 152, "460025": 152, "044408": 152, "4600250010": 152, "839907": 152, "849713": 152, "846847": 152, "846028": 152, "844390": 152, "845377": 152, "003234": 152, "140": 152, "498": 152, "168120": 152, "121819": 152, "061283": 152, "000760": 152, "000068": 152, "287904": 152, "227": 152, "146": [152, 153], "7755366885": 152, "861881": 152, "859951": 152, "861862": 152, "862221": 152, "001623": 152, "499": [152, 153], "823774": 152, "120686": 152, "060351": 152, "014958": 152, "445218": 152, "4452178932": 152, "764569": 152, "765902": 152, "764947": 152, "765083": 152, "765281": 152, "000535": 152, "319": 152, "l2_regular": [152, 153, 178], "max_bin": [152, 153, 178], "score_bin": 152, "cut": [152, 160], "set_palett": 152, "ylgnbu_r": 152, "set_xscal": 152, "set_yscal": 152, "band": 152, "plotli": [152, 155, 178], "px": [152, 155, 178], "parallel_coordin": [152, 155, 178], "log10": [152, 178], "log2": [152, 178], "color_continuous_scal": [152, 155, 178], "undo": 152, "yellow": [152, 162], "tick": 152, "invert": 152, "consecut": 153, "untract": 153, "situat": 153, "stochast": 153, "loguniform_int": 153, "__init__": 153, "_distribut": 153, "rv": 153, "processor": 153, "1e3": 153, "classifier__min_samples_leaf": 153, "classifier__max_bin": 153, "model_random_search": [153, 155], "histgradientboostingc": 153, "_distn_infrastructur": 153, "rv_continuous_frozen": 153, "0x7fcf3f891a00": 153, "0x7fcf3ebd97f0": 153, "__main__": 153, "0x7fcf3ebe0100": 153, "0x7fcf3ec7dfa0": 153, "0x7fcf3ebd9340": 153, "randomizedsearchcvrandomizedsearchcv": 153, "pprint": 153, "05267903307568315": 153, "10798958387414": 153, "232": 153, "052679": 153, "10799": 153, "870738": 153, "001633": 153, "001174": 153, "02105": 153, "855478": 153, "003486": 153, "000003": 153, "322713": 153, "854741": 153, "003185": 153, "000026": 153, "026509": 153, "853075": 153, "002667": 153, "428258": 153, "272481": 153, "813901": 153, "001062": 153, "906324": 153, "026156": 153, "806448": 153, "001279": 153, "000267": 153, "029741": 153, "183": 153, "799541": 153, "001546": 153, "000007": 153, "00541": 153, "762278": 153, "000332": 153, "000002": 153, "001527": 153, "171": 153, "005833": 153, "001013": 153, "to_csv": 153, "208": 153, "011775": 153, "076653": 153, "871393": 153, "001588": 153, "343": 153, "000404": 153, "244503": 153, "229": 153, "871339": 153, "002741": 153, "994918": 153, "077047": 153, "192": 153, "870793": 153, "001993": 153, "328": 153, "036232": 153, "224702": 153, "236": 153, "869837": 153, "000808": 153, "733808": 153, "036786": 153, "241": 153, "869673": 153, "002417": 153, "000097": 153, "976823": 153, "448205": 153, "253714": 153, "000001": 153, "828574": 153, "091079": 153, "000444": 153, "236325": 153, "344629": 153, "207156": 153, "357": 153, "075318": 153, "241053": 153, "valuabl": 153, "allevi": 153, "best_scor": 154, "best_param": 154, "lr": 154, "mln": 154, "mean_scor": 154, "789": 154, "813": 154, "842": 154, "847": 154, "855": 154, "835": 154, "828": 154, "288": 154, "437": 154, "best_lr": 154, "best_mln": 154, "870": 154, "kneighborsregressor__n_neighbor": 155, "standardscaler__with_mean": 155, "standardscaler__with_std": 155, "welcom": 155, "column_name_map": 155, "param_kneighborsregressor__n_neighbor": 155, "param_standardscaler__with_mean": 155, "param_standardscaler__with_std": 155, "boolean": 155, "column_scal": 155, "687926": 155, "674812": 155, "668778": 155, "648317": 155, "629772": 155, "215": 155, "617295": 155, "464": 155, "567164": 155, "508809": 155, "486503": 155, "103390": 155, "061394": 155, "033122": 155, "017583": 155, "007987": 155, "002900": 155, "238830": 155, "tealros": 155, "kneighbor": 155, "mpl": [156, 162], "tab10_norm": [156, 162], "dbd": 156, "tab10": [156, 162], "norm": [156, 162], "plot_tre": [156, 158, 160, 161, 162], "class_nam": [156, 162], "impur": [156, 162], "inferior": 156, "superior": 156, "settabl": 156, "45mm": 156, "test_penguin_1": 156, "test_penguin_2": 156, "y_proba_class_0": 156, "adelie_proba": 156, "chinstrap_proba": 156, "gentoo_proba": 156, "037": 156, "disregard": 156, "moment": 156, "test_penguin_3": 156, "63975155": 156, "32298137": 156, "03726708": 156, "fairli": 156, "palmer": 157, "anatom": 157, "set_size_inch": 157, "superimpos": [159, 163], "data_clf_column": 160, "target_clf_column": 160, "data_clf": 160, "data_reg_column": 160, "target_reg_column": 160, "data_reg": 160, "fit_and_plot_classif": 160, "fit_and_plot_regress": 160, "tree_clf": 160, "tree_reg": 160, "adequ": 160, "asymmetri": 160, "make_blob": 160, "x_1": 160, "y_1": 160, "x_2": 160, "y_2": 160, "min_impurity_decreas": 160, "asymmetr": 160, "priori": 161, "3698": 161, "5032": 161, "tricki": 162, "spectr": 162, "purpl": 162, "xx": 162, "yy": 162, "meshgrid": 162, "xfull": 162, "proba": 162, "sharei": 162, "class_of_interest": 162, "imshow_handl": 162, "imshow": 162, "extent": 162, "colorbar": 162, "cax": 162, "binar": 162, "impress": 162, "target_predicted_linear_regress": 163, "target_predicted_tre": 163, "interpol": 163, "offset": 163, "175": 163, "shortest": 163, "longest": 163, "m3": [164, 177, 179], "m5": [164, 166, 167, 168, 175], "acknowledg": 164, "prune": 170, "children": 171, "increment": 172, "refin": 172, "author": 176, "circular": 178, "budget": [178, 182], "badli": 178, "histgradientbosstingclassifi": 180, "get_paramet": 180, "anim": 184, "param_valu": 184, "powertransform": 184, "all_preprocessor": 184, "cox": 184, "classifier__n_neighbor": 184, "forgot": 184}, "objects": {}, "objtypes": {}, "objnames": {}, "titleterms": {"acknowledg": 0, "figur": 0, "attribut": [0, 3], "The": [1, 6, 74, 77, 104, 105, 106, 107, 141, 157, 164], "adult": [1, 74], "censu": [1, 74], "dataset": [1, 2, 6, 74, 80, 81, 91, 104, 105, 106, 107, 108, 151, 157], "descript": 2, "glossari": 3, "main": [3, 14, 23, 34, 40, 58, 72, 128, 133, 170, 182], "term": 3, "us": [3, 6, 11, 77, 86, 125, 126, 136, 149, 153], "thi": [3, 6], "cours": [3, 36], "api": 3, "classif": [3, 30, 139, 140, 141, 156, 157, 166], "classifi": [3, 141], "cross": [3, 20, 21, 77, 86, 96, 97, 102], "valid": [3, 6, 20, 21, 59, 67, 77, 86, 96, 97, 102, 103], "data": [3, 6, 63, 65, 68, 74, 77, 80, 81, 82, 85, 86, 101, 128, 133], "matrix": [3, 141], "input": 3, "earli": 3, "stop": 3, "estim": [3, 102, 139, 162], "featur": [3, 32, 41, 82, 85, 88, 90, 108, 125, 126, 131, 132, 135, 137, 164], "variabl": [3, 74, 85, 86, 88, 90, 108], "descriptor": 3, "covari": 3, "gener": [3, 103, 160], "perform": [3, 92, 164], "predict": [3, 6, 81, 85, 139, 141, 149, 151, 153, 162, 164], "statist": 3, "hyperparamet": [3, 12, 117, 119, 149, 150, 151, 152, 153, 160, 164, 167, 183], "infer": 3, "learn": [3, 6, 13, 22, 33, 36, 39, 50, 56, 59, 65, 70, 71, 81, 91, 96, 108, 110, 136, 138, 150, 164, 169, 181], "paramet": [3, 132, 135, 160], "meta": 3, "model": [3, 6, 8, 9, 19, 38, 41, 45, 46, 48, 65, 67, 77, 81, 82, 86, 91, 92, 108, 118, 126, 128, 131, 132, 133, 139, 149, 151, 153, 164, 165, 168], "overfit": [3, 57, 62, 103], "predictor": 3, "regress": [3, 31, 131, 132, 136, 137, 138, 144, 157, 161, 175], "regressor": 3, "regular": [3, 45, 48, 132, 135], "penal": 3, "sampl": [3, 95, 96], "instanc": 3, "observ": 3, "supervis": 3, "target": [3, 81], "label": [3, 6], "annot": 3, "test": [3, 54, 80, 81, 102], "set": [3, 150], "train": [3, 54, 80, 81, 102], "fit": [3, 65, 81, 82, 86], "transform": 3, "underfit": [3, 57, 62, 103], "unsupervis": 3, "other": [3, 160], "notebook": [4, 74, 77, 80, 81, 137], "time": [4, 6, 13, 22, 33, 39, 56, 71, 169, 181], "tabl": [5, 164], "content": [5, 164], "conclud": [6, 7, 164], "remark": [6, 7, 164], "last": 6, "lesson": [6, 108], "goal": 6, "big": 6, "messag": [6, 131], "mooc": [6, 36], "1": [6, 73, 108], "machin": [6, 50, 164], "pipelin": [6, 70, 85, 88, 90, 91, 110, 164], "2": [6, 60, 108], "adapt": [6, 109], "complex": [6, 110], "3": [6, 108, 184], "specif": [6, 86], "go": [6, 14, 23, 34, 40, 58, 72, 170, 182], "further": [6, 14, 23, 34, 40, 58, 72, 170, 182], "more": [6, 86, 102], "about": [6, 119], "scikit": [6, 36, 65, 70, 81, 91, 110, 136, 138, 150], "we": [6, 91], "ar": 6, "an": [6, 85], "open": 6, "sourc": 6, "commun": 6, "topic": 6, "have": 6, "cover": 6, "studi": 6, "bring": 6, "valu": 6, "bigger": 6, "pictur": 6, "beyond": [6, 140], "evalu": [6, 77, 85, 86, 141, 151, 164], "matter": 6, "small": 6, "part": 6, "problem": [6, 162], "most": 6, "technic": 6, "craft": 6, "all": 6, "how": 6, "choic": [6, 20], "output": 6, "bias": 6, "versu": [6, 52, 55], "causal": 6, "societ": 6, "impact": [6, 135], "intuit": [8, 9, 38, 46, 48, 165, 168], "ensembl": [8, 9, 10, 11, 12, 118, 164], "bag": [8, 110], "boost": [9, 10, 109, 115, 116, 117], "base": [10, 85, 86, 165, 168], "method": [11, 12], "bootstrap": [11, 110], "tune": [12, 117, 132, 149, 151, 153, 164, 177, 179], "modul": [13, 22, 33, 39, 56, 71, 169, 181], "overview": [13, 22, 33, 39, 56, 71, 169, 181], "what": [13, 22, 33, 39, 56, 71, 169, 181], "you": [13, 22, 33, 39, 56, 71, 169, 181], "befor": [13, 22, 33, 39, 56, 71, 169, 181], "get": [13, 22, 33, 39, 56, 71, 150, 169, 181], "start": [13, 22, 33, 39, 56, 71, 169, 181], "object": [13, 22, 33, 39, 56, 71, 169, 181], "schedul": [13, 22, 33, 39, 56, 71, 169, 181], "take": [14, 23, 34, 40, 58, 72, 108, 131, 170, 182], "awai": [14, 23, 34, 40, 58, 72, 108, 131, 170, 182], "wrap": [14, 18, 23, 29, 34, 40, 47, 58, 60, 72, 73, 170, 176, 182, 184], "up": [14, 18, 23, 29, 34, 40, 47, 58, 60, 72, 73, 116, 170, 176, 182, 184], "To": [14, 23, 34, 40, 58, 72, 170, 182], "quiz": [15, 16, 17, 18, 24, 25, 26, 27, 28, 29, 35, 37, 42, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 171, 172, 173, 174, 176, 178, 180, 184], "m6": [15, 16, 17, 111, 112, 113, 114, 120, 121, 122, 123], "01": [15, 24, 42, 49, 61, 64, 75, 76, 93, 94, 98, 99, 111, 120, 124, 127, 128, 133, 147, 154, 158, 162, 171, 180], "question": [15, 16, 17, 18, 24, 25, 26, 27, 28, 29, 35, 37, 42, 43, 44, 47, 49, 51, 53, 60, 61, 64, 66, 69, 73, 171, 172, 173, 174, 176, 178, 180, 184], "02": [16, 25, 43, 53, 66, 78, 83, 112, 121, 129, 134, 142, 145, 148, 155, 159, 163, 172, 178], "03": [17, 26, 44, 51, 69, 79, 84, 113, 122, 130, 135, 143, 146, 173], "6": 18, "compar": [19, 54, 92], "simpl": [19, 92], "baselin": [19, 92, 141], "nest": [21, 97], "m7": [24, 25, 26, 27, 28, 94, 99, 142, 143, 145, 146], "04": [27, 87, 89, 114, 123, 174], "05": [28, 88, 90], "7": 29, "metric": [30, 31, 141], "caveat": 32, "select": [32, 85, 86, 125, 126, 164], "introduct": 36, "present": [36, 108], "welcom": 36, "follow": 36, "prerequisit": [36, 128, 133], "materi": 36, "social": 36, "network": 36, "linear": [38, 41, 45, 46, 48, 108, 131, 132, 135, 136, 137, 138, 139, 140, 164], "non": [41, 101, 131, 135, 137], "engin": [41, 131, 135, 137], "m4": [42, 43, 44, 128, 129, 130, 133, 134, 135], "4": 47, "intro": 49, "introduc": 50, "concept": [50, 164], "m2": [51, 53, 61, 93, 98], "bia": [52, 55], "varianc": [52, 55], "error": [54, 102], "trade": 55, "off": 55, "curv": [59, 96, 103], "tabular": 63, "explor": 63, "m1": [64, 66, 69, 75, 76, 78, 79, 83, 84, 87, 88, 89, 90], "numer": [65, 80, 82, 86, 88, 90], "handl": 68, "categor": [68, 85, 86, 88, 90], "visual": [70, 74, 91], "jupyt": [70, 91], "first": [74, 81, 91], "look": [74, 119], "our": [74, 85, 149, 151, 153], "load": [74, 80, 81, 91, 128, 133, 151], "column": [74, 86], "inspect": [74, 108], "creat": [74, 91, 160], "decis": [74, 115, 117, 135, 156, 160, 161, 164, 166, 167, 175], "rule": 74, "hand": 74, "recap": [74, 77, 80, 81, 137], "exercis": [75, 76, 78, 79, 83, 84, 87, 88, 89, 90, 93, 94, 98, 99, 111, 112, 113, 114, 120, 121, 122, 123, 124, 127, 128, 129, 130, 133, 134, 135, 142, 143, 145, 146, 147, 148, 154, 155, 158, 159, 162, 163], "solut": [76, 83, 84, 89, 90, 98, 99, 120, 121, 122, 123, 127, 133, 134, 135, 145, 146, 154, 155, 162, 163], "prepar": [77, 82], "need": 77, "work": 80, "entir": 80, "identifi": [80, 85], "split": [80, 81], "panda": 81, "separ": [81, 140], "make": 81, "preprocess": 82, "encod": [85, 88, 90], "type": [85, 86], "strategi": 85, "categori": [85, 88, 90], "ordin": 85, "nomin": 85, "without": [85, 138, 151], "assum": 85, "ani": 85, "order": 85, "choos": 85, "togeth": 86, "dispatch": 86, "processor": 86, "power": 86, "refer": [88, 90], "scale": [88, 90, 108, 132], "integ": [88, 90], "code": [88, 90], "One": [88, 90], "hot": [88, 90], "analysi": [90, 152, 183], "Then": 91, "final": 91, "score": 91, "group": 95, "effect": [96, 132, 160], "size": 96, "summari": [96, 102, 103, 131], "stratif": 100, "i": 101, "d": 101, "framework": 102, "vs": [102, 103], "stabil": 102, "detail": [102, 119], "regard": 102, "cross_valid": 102, "am": 104, "hous": [104, 107], "bike": 105, "ride": 105, "blood": 106, "transfus": 106, "california": 107, "import": [108, 160], "0": 108, "sign": 108, "coeffici": 108, "A": [108, 119], "surpris": 108, "associ": 108, "check": 108, "spars": 108, "lasso": 108, "randomforest": 108, "feature_importances_": 108, "permut": 108, "discuss": 108, "adaboost": 109, "resampl": 110, "aggreg": 110, "gradient": [115, 116, 117], "tree": [115, 117, 156, 160, 161, 164, 165, 166, 167, 168, 175], "gbdt": 115, "speed": 116, "random": [117, 119, 153], "forest": [117, 119], "histogram": 117, "introductori": 118, "exampl": 118, "default": 119, "benefit": 125, "limit": 126, "definit": [128, 133], "logist": 131, "addit": 131, "interact": 131, "multi": [131, 162], "step": 131, "influenc": 135, "c": 135, "boundari": 135, "weight": 135, "probabl": [139, 141, 162], "accuraci": 141, "confus": 141, "deriv": 141, "issu": 141, "class": [141, 162], "imbal": 141, "differ": 141, "threshold": 141, "m3": [147, 148, 154, 155, 178, 180], "grid": 149, "search": [149, 152, 153, 183], "With": 151, "result": [152, 183], "build": 156, "penguin": 157, "m5": [158, 159, 162, 163, 171, 172, 173, 174], "helper": 160, "function": 160, "max_depth": 160, "best": 164, "appendix": 164, "interpret": 164, "5": 176, "autom": 177, "manual": 179}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 6, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx": 56}}) \ No newline at end of file