diff --git a/assets/cc_accuracy_path.png b/assets/cc_accuracy_path.png
new file mode 100644
index 0000000..bdb3d3d
Binary files /dev/null and b/assets/cc_accuracy_path.png differ
diff --git a/assets/dt_cc_path.png b/assets/dt_cc_path.png
new file mode 100644
index 0000000..c900320
Binary files /dev/null and b/assets/dt_cc_path.png differ
diff --git a/assets/dt_confusion_matrix.png b/assets/dt_confusion_matrix.png
index 395949d..188b950 100644
Binary files a/assets/dt_confusion_matrix.png and b/assets/dt_confusion_matrix.png differ
diff --git a/assets/rf_confusion_mat.png b/assets/rf_confusion_mat.png
index 395949d..e3a88a5 100644
Binary files a/assets/rf_confusion_mat.png and b/assets/rf_confusion_mat.png differ
diff --git a/assets/xgboost_model1_confusion_matrix.png b/assets/xgboost_model1_confusion_matrix.png
new file mode 100644
index 0000000..3722e82
Binary files /dev/null and b/assets/xgboost_model1_confusion_matrix.png differ
diff --git a/assets/xgboost_model2_confusion_matrix.png b/assets/xgboost_model2_confusion_matrix.png
new file mode 100644
index 0000000..0e65a1c
Binary files /dev/null and b/assets/xgboost_model2_confusion_matrix.png differ
diff --git a/assets/xgboost_model3_confusion_matrix.png b/assets/xgboost_model3_confusion_matrix.png
new file mode 100644
index 0000000..ce2b5ab
Binary files /dev/null and b/assets/xgboost_model3_confusion_matrix.png differ
diff --git a/src/musicNet/main.py b/src/musicNet/main.py
index 235acc4..34d6e2c 100644
--- a/src/musicNet/main.py
+++ b/src/musicNet/main.py
@@ -6,6 +6,7 @@
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import confusion_matrix
from sklearn.metrics import ConfusionMatrixDisplay
+from sklearn.metrics import f1_score
import xgboost as xgb
path = 'src/musicNet/processed_data'
@@ -28,18 +29,58 @@
labels = ['Bach', 'Beethoven', 'Brahms', 'Mozart', 'Schubert']
dt_clf = DecisionTreeClassifier(random_state=42)
-
dt_clf.fit(X_train, y_train)
+y_pred = dt_clf.predict(X_test)
training_accuracy = dt_clf.score(X_train, y_train)
accuracy = dt_clf.score(X_test, y_test)
-print(training_accuracy)
-print(accuracy)
+print("Decision Tree Classifier")
+print(f"Training Accuracy: {training_accuracy}")
+print(f"Test Accuracy: {accuracy}")
+print(f"Test F1-Score{f1_score(y_test, y_pred, average='weighted')}\n")
+print(dt_clf.get_depth())
+
+path = dt_clf.cost_complexity_pruning_path(X_train, y_train)
+ccp_alphas, impurities = path.ccp_alphas, path.impurities
+fig, ax = plt.subplots()
+ax.plot(ccp_alphas[:-1], impurities[:-1], marker="o", drawstyle="steps-post")
+ax.set_xlabel("Effective alpha")
+ax.set_ylabel("Total impurity of leaves")
+ax.set_title("Total Impurity vs Effective alpha for training set")
+plt.show()
+plt.close()
+
+dt_clfs1 = []
+for ccp_alpha in ccp_alphas:
+ dt_clf = DecisionTreeClassifier(random_state=42, ccp_alpha=ccp_alpha)
+ dt_clf.fit(X_train, y_train)
+ dt_clfs1.append(dt_clf)
+print(
+ "Number of nodes in the last tree is: {} with ccp_alpha: {}".format(
+ dt_clfs1[-1].tree_.node_count, ccp_alphas[-1]
+ )
+)
+
+train_scores1 = [dt_clf.score(X_train, y_train) for dt_clf in dt_clfs1]
+test_scores1 = [dt_clf.score(X_test, y_test) for dt_clf in dt_clfs1]
+
+fig, ax = plt.subplots()
+ax.set_xlabel("Alpha")
+ax.set_ylabel("Accuracy")
+ax.set_title("Accuracy vs Alpha for training and testing sets")
+ax.plot(ccp_alphas, train_scores1, marker="o", label="train", drawstyle="steps-post")
+ax.plot(ccp_alphas, test_scores1, marker="o", label="test", drawstyle="steps-post")
+ax.legend()
+plt.show()
+plt.close()
+dt_clf = DecisionTreeClassifier(random_state=42)
+dt_clf.fit(X_train, y_train)
ypred = dt_clf.predict(X_test)
confusion_mat = confusion_matrix(y_test, ypred)
conf_mat_display = ConfusionMatrixDisplay(confusion_matrix=confusion_mat, display_labels=labels)
conf_mat_display.plot()
+plt.title("Decision Tree Classifier - Confusion Matrix")
plt.show()
plt.close()
@@ -49,82 +90,115 @@
rf_clf.fit(X_train, y_train)
training_accuracy = rf_clf.score(X_train, y_train)
accuracy = rf_clf.score(X_test, y_test)
-print(training_accuracy)
-print(accuracy)
-
-ypred = dt_clf.predict(X_test)
+y_pred = rf_clf.predict(X_test)
+print("Random Forest Classifier")
+print(f"Training Accuracy: {training_accuracy}")
+print(f"Test Accuracy: {accuracy}")
+print(f"Test F1-Score{f1_score(y_test, y_pred, average='weighted')}")
+max_depth = 0
+for tree in rf_clf.estimators_:
+ if max_depth < tree.get_depth():
+ max_depth = tree.get_depth()
+print(f"Maximum depth of Random Forest: {max_depth}\n")
confusion_mat = confusion_matrix(y_test, ypred)
conf_mat_display = ConfusionMatrixDisplay(confusion_matrix=confusion_mat, display_labels=labels)
conf_mat_display.plot()
+plt.title("Random Forest Classifier - Confusion Matrix")
plt.show()
plt.close()
-bst = xgb.XGBClassifier(n_estimators=20, max_depth=15, learning_rate=0.8, objective='multi:softmax')
+# ------------- XGBoost ----------------
+# Traning model 1
+
+bst = xgb.XGBClassifier(n_estimators=20, max_depth=15, learning_rate=0.8, objective='multi:softmax', verbosity=2, subsample=0.25)
# fit model
-bst.fit(X_train, y_train)
+bst.fit(X_train, y_train, verbose=True)
# make predictions
-preds = bst.predict(X_test)
training_accuracy = bst.score(X_train, y_train)
test_accuracy = bst.score(X_test, y_test)
-print(training_accuracy)
-print(test_accuracy)
-
ypred = bst.predict(X_test)
+print("XGBoost Classifier - 20 estimators, max_depth of 15, learning rate of 0.8, softmax objective function.")
+print(f"Training Accuracy: {training_accuracy}")
+print(f"Test Accuracy: {accuracy}")
+print(f"Test F1-Score{f1_score(y_test, y_pred, average='weighted')}\n")
confusion_mat = confusion_matrix(y_test, ypred)
conf_mat_display = ConfusionMatrixDisplay(confusion_matrix=confusion_mat, display_labels=labels)
conf_mat_display.plot()
+plt.title("XGBoost Classifier - Model 1 - Confusion Matrix")
+plt.show()
+plt.close()
+# Model 1 but with table of training results
+
+dtrain = xgb.DMatrix(X_train, label=y_train)
+dtest = xgb.DMatrix(X_test, label=y_test)
-print(training_accuracy)
-print(test_accuracy)
+param = {'max_depth': 15, 'eta': 0.8, 'objective': 'multi:softmax'}
+param['nthread'] = 4
+param['num_class'] = 5
+param['subsample'] = 0.25
+param['eval_metric'] = ['auc', 'merror']
+evallist = [(dtrain, 'train'), (dtest, 'eval')]
+
+num_round = 20
+bst = xgb.train(param, dtrain, num_round, evals=evallist, early_stopping_rounds=20)
+bst.save_model('src\\musicNet\\saved_models\\bt\\austin1.model')
+bst.dump_model('src\\musicNet\\saved_models\\bt\\dump.raw.txt')
+
+ypred = bst.predict(dtest)
+confusion_mat = confusion_matrix(y_test, ypred)
+conf_mat_display = ConfusionMatrixDisplay(confusion_matrix=confusion_mat, display_labels=labels)
+conf_mat_display.plot()
+plt.title("XGBoost Classifier - Model 1 - Confusion Matrix")
+plt.show()
+plt.close()
+
+# Training model 2
dtrain = xgb.DMatrix(X_train, label=y_train)
dtest = xgb.DMatrix(X_test, label=y_test)
dtrain.save_binary('src/musicNet/data/xgboost/train.buffer')
-param = {'max_depth': 3, 'eta': 1, 'objective': 'multi:softmax'}
+param = {'max_depth': 10, 'eta': 1, 'objective': 'multi:softmax'}
param['nthread'] = 4
-param['eval_metric'] = 'auc'
+param['subsample'] = 0.25
param['num_class'] = 5
-param['eval_metric'] = ['auc', 'ams@0']
-
+param['eval_metric'] = ['auc', 'merror']
evallist = [(dtrain, 'train'), (dtest, 'eval')]
num_round = 10000
-bst = xgb.train(param, dtrain, num_round, evals=evallist, early_stopping_rounds=10)
-
+bst = xgb.train(param, dtrain, num_round, evals=evallist, early_stopping_rounds=100)
bst.save_model('src\\musicNet\\saved_models\\bt\\austin1.model')
-# dump model
bst.dump_model('src\\musicNet\\saved_models\\bt\\dump.raw.txt')
-# dump model with feature map
-#bst.dump_model('src/musicNet/saved_models/bt/dump.raw.txt', 'src/musicNet/saved_models/bt/featmap.txt')
-#xgb.plot_importance(bst)
-#xgb.plot_tree(bst, num_trees=2)
-#xgb.to_graphviz(bst, num_trees=2)
-ypred = bst.predict(dtest)
+ypred = bst.predict(dtest)
confusion_mat = confusion_matrix(y_test, ypred)
conf_mat_display = ConfusionMatrixDisplay(confusion_matrix=confusion_mat, display_labels=labels)
conf_mat_display.plot()
-
+plt.title("XGBoost Classifier - Model 2 - Confusion Matrix")
plt.show()
+plt.close()
+
+# Repackage model 2 so we can make actual predictions
xgb_clf = xgb.XGBClassifier(**param)
xgb_clf._Boster = bst
-xgb_clf.fit(X_train, y_train)
+xgb_clf.fit(X_train, y_train, verbose=True)
# make predictions
-preds = xgb_clf.predict(X_test)
training_accuracy = xgb_clf.score(X_train, y_train)
test_accuracy = xgb_clf.score(X_test, y_test)
-print("final bt")
-print(training_accuracy)
-print(test_accuracy)
-
ypred = xgb_clf.predict(X_test)
+print("XGBoost Classifier - 1000 estimators, max_depth of 15, learning rate of 0.8, softmax objective function.")
+print(f"Training Accuracy: {training_accuracy}")
+print(f"Test Accuracy: {accuracy}")
+print(f"Test F1-Score{f1_score(y_test, y_pred, average='weighted')}\n")
confusion_mat = confusion_matrix(y_test, ypred)
conf_mat_display = ConfusionMatrixDisplay(confusion_matrix=confusion_mat, display_labels=labels)
-conf_mat_display.plot()
\ No newline at end of file
+conf_mat_display.plot()
+plt.title("XGBoost Classifier - Model 3 - Confusion Matrix")
+plt.show()
+plt.close()
\ No newline at end of file
diff --git a/src/musicNet/saved_models/bt/austin1.model b/src/musicNet/saved_models/bt/austin1.model
index ca61554..a37975d 100644
Binary files a/src/musicNet/saved_models/bt/austin1.model and b/src/musicNet/saved_models/bt/austin1.model differ
diff --git a/src/musicNet/saved_models/bt/dump.raw.txt b/src/musicNet/saved_models/bt/dump.raw.txt
index 844250f..8045e55 100644
--- a/src/musicNet/saved_models/bt/dump.raw.txt
+++ b/src/musicNet/saved_models/bt/dump.raw.txt
@@ -1,498 +1,2176 @@
booster[0]:
-0:[f87<0.00254154997] yes=1,no=2,missing=2
- 1:[f75<0.944010079] yes=3,no=4,missing=4
- 3:[f60<5.05201197] yes=7,no=8,missing=8
- 7:leaf=-0.175718844
- 8:leaf=1.72839499
- 4:[f167<0.0698904023] yes=9,no=10,missing=10
- 9:leaf=2.02702689
- 10:leaf=0.0877192989
- 2:[f76<10.0500002] yes=5,no=6,missing=6
- 5:[f79<7.85804462] yes=11,no=12,missing=12
- 11:leaf=-0.597920239
- 12:leaf=0.0877192989
- 6:leaf=0.526315749
+0:[f73<4.3460207] yes=1,no=2,missing=2
+ 1:[f181<0.430878669] yes=3,no=4,missing=4
+ 3:leaf=-0.564202309
+ 4:[f61<0.481400996] yes=5,no=6,missing=6
+ 5:leaf=0.964912236
+ 6:leaf=-0.350877196
+ 2:leaf=1.41975307
booster[1]:
-0:[f185<0.00162497326] yes=1,no=2,missing=2
- 1:[f703<0.0268955752] yes=3,no=4,missing=4
- 3:[f65<0.0027700041] yes=7,no=8,missing=8
- 7:leaf=0.112359546
- 8:leaf=-0.575079858
- 4:leaf=1.41975307
- 2:[f443<0.489249408] yes=5,no=6,missing=6
- 5:[f297<0.246753246] yes=9,no=10,missing=10
- 9:leaf=1.78646922
- 10:leaf=0.172413781
- 6:[f70<0.115673803] yes=11,no=12,missing=12
- 11:leaf=0.526315749
- 12:leaf=-0.532544374
+0:[f316<1.72227454] yes=1,no=2,missing=2
+ 1:[f195<0.00720387883] yes=3,no=4,missing=4
+ 3:[f54<0.00417737011] yes=5,no=6,missing=6
+ 5:leaf=0.0877192989
+ 6:leaf=-0.350877196
+ 4:[f429<0.969717503] yes=7,no=8,missing=8
+ 7:leaf=1.89655161
+ 8:leaf=0.185185179
+ 2:leaf=-0.432098746
booster[2]:
-0:[f445<0.543396235] yes=1,no=2,missing=2
- 1:leaf=-0.609756052
- 2:[f60<0.00182459841] yes=3,no=4,missing=4
- 3:leaf=-0.384615362
- 4:leaf=1.86131382
+0:[f445<0.429720819] yes=1,no=2,missing=2
+ 1:leaf=-0.572390556
+ 2:[f55<0.0824693665] yes=3,no=4,missing=4
+ 3:leaf=0.0877192989
+ 4:leaf=1.64383554
booster[3]:
-0:[f438<0.137525409] yes=1,no=2,missing=2
- 1:[f81<6.99873781] yes=3,no=4,missing=4
- 3:leaf=-0.605875134
- 4:leaf=0.0877192989
- 2:[f73<1.09797299] yes=5,no=6,missing=6
- 5:[f91<0.212570623] yes=7,no=8,missing=8
- 7:leaf=1.43790841
- 8:leaf=-0.384615362
- 6:[f57<0.491558105] yes=9,no=10,missing=10
- 9:leaf=-0.522875786
- 10:leaf=0.0877192989
+0:[f450<0.334011048] yes=1,no=2,missing=2
+ 1:leaf=-0.567765534
+ 2:leaf=0.526315749
booster[4]:
-0:[f94<0.0323196128] yes=1,no=2,missing=2
- 1:[f289<0.254577011] yes=3,no=4,missing=4
- 3:[f569<3.75950408] yes=7,no=8,missing=8
- 7:leaf=-0.529100478
- 8:leaf=0.526315749
- 4:[f57<0.746287286] yes=9,no=10,missing=10
- 9:leaf=-0.350877196
- 10:leaf=1.40350878
- 2:[f59<0.107758619] yes=5,no=6,missing=6
- 5:leaf=0.0877192989
- 6:leaf=2.01550388
+0:[f193<0.158469751] yes=1,no=2,missing=2
+ 1:[f85<0.109970674] yes=3,no=4,missing=4
+ 3:leaf=-0.123456784
+ 4:leaf=1.51685393
+ 2:leaf=-0.552995384
booster[5]:
0:[f87<0.00254154997] yes=1,no=2,missing=2
- 1:[f328<0.00590079278] yes=3,no=4,missing=4
- 3:[f588<0.0208916739] yes=7,no=8,missing=8
- 7:leaf=0.663875401
- 8:leaf=-0.326518506
- 4:[f312<1.15380073] yes=9,no=10,missing=10
- 9:leaf=-0.432485551
- 10:leaf=-0.0363297164
- 2:[f201<2.24534035] yes=5,no=6,missing=6
- 5:[f83<4.16011667] yes=11,no=12,missing=12
- 11:leaf=-0.50160259
- 12:leaf=0.126775458
- 6:leaf=0.45542115
+ 1:[f324<0.00976640545] yes=3,no=4,missing=4
+ 3:[f433<0.135974303] yes=5,no=6,missing=6
+ 5:leaf=1.94483006
+ 6:leaf=0.250039965
+ 4:leaf=0.0348691829
+ 2:leaf=-0.506277204
booster[6]:
-0:[f193<0.00294380006] yes=1,no=2,missing=2
- 1:[f64<4.15852976] yes=3,no=4,missing=4
- 3:[f428<0.0186915882] yes=7,no=8,missing=8
- 7:leaf=-0.621671736
- 8:leaf=0.282755256
- 4:leaf=0.325133085
- 2:[f72<1.46342051] yes=5,no=6,missing=6
- 5:[f173<0.349695027] yes=9,no=10,missing=10
- 9:leaf=0.767218888
- 10:leaf=-0.414930969
- 6:[f187<1.60060978] yes=11,no=12,missing=12
- 11:leaf=-0.376081884
- 12:leaf=0.579494476
+0:[f190<3.69198418] yes=1,no=2,missing=2
+ 1:[f313<0.00554144522] yes=3,no=4,missing=4
+ 3:[f89<0.00474229362] yes=5,no=6,missing=6
+ 5:leaf=0.902582884
+ 6:leaf=-0.360640496
+ 4:[f320<1.43377554] yes=7,no=8,missing=8
+ 7:leaf=-0.831936359
+ 8:[f189<0.299121469] yes=9,no=10,missing=10
+ 9:leaf=-0.0618941486
+ 10:leaf=0.079280369
+ 2:leaf=0.763889849
booster[7]:
-0:[f455<0.146782488] yes=1,no=2,missing=2
- 1:[f423<1.42857146] yes=3,no=4,missing=4
- 3:leaf=-0.550613344
- 4:leaf=-0.00177071837
- 2:[f69<0.160256416] yes=5,no=6,missing=6
- 5:leaf=-0.064219445
- 6:leaf=0.576930344
+0:[f445<0.343375236] yes=1,no=2,missing=2
+ 1:leaf=-0.485949427
+ 2:leaf=0.857486606
booster[8]:
-0:[f446<0.0821859464] yes=1,no=2,missing=2
- 1:[f325<0.791208804] yes=3,no=4,missing=4
- 3:leaf=-0.553426743
- 4:leaf=0.165459722
- 2:[f75<1.47030771] yes=5,no=6,missing=6
- 5:[f211<0.0268793423] yes=7,no=8,missing=8
- 7:leaf=0.660314083
- 8:leaf=-0.27326411
- 6:leaf=-0.393650621
+0:[f447<0.301598847] yes=1,no=2,missing=2
+ 1:[f67<0.0073910295] yes=3,no=4,missing=4
+ 3:leaf=0.155594081
+ 4:leaf=-0.509337068
+ 2:leaf=0.648961008
booster[9]:
-0:[f91<0.00899621192] yes=1,no=2,missing=2
- 1:[f574<1.54189491] yes=3,no=4,missing=4
- 3:[f66<6.14457083] yes=7,no=8,missing=8
- 7:leaf=-0.514576375
- 8:leaf=0.0317992792
- 4:leaf=0.664710224
- 2:[f199<0.709219873] yes=5,no=6,missing=6
- 5:[f72<5.04865265] yes=9,no=10,missing=10
- 9:leaf=0.884503901
- 10:leaf=0.061729975
- 6:[f188<3.79882383] yes=11,no=12,missing=12
- 11:leaf=-0.43122673
- 12:leaf=-0.0310954824
+0:[f90<0.0146927638] yes=1,no=2,missing=2
+ 1:[f422<0.00939098932] yes=3,no=4,missing=4
+ 3:leaf=-0.49747476
+ 4:leaf=0.154166833
+ 2:[f435<0.0109614898] yes=5,no=6,missing=6
+ 5:leaf=0.800356388
+ 6:leaf=-0.0274251774
booster[10]:
-0:[f73<4.3460207] yes=1,no=2,missing=2
- 1:[f324<0.00976640545] yes=3,no=4,missing=4
- 3:[f73<1.45155513] yes=7,no=8,missing=8
- 7:leaf=0.232409865
- 8:leaf=-0.472100168
- 4:leaf=-0.457112402
- 2:[f183<0.571247041] yes=5,no=6,missing=6
- 5:leaf=0.563556969
- 6:leaf=0.0376733504
+0:[f441<1.1542033] yes=1,no=2,missing=2
+ 1:[f59<0.918547571] yes=3,no=4,missing=4
+ 3:leaf=-0.630332232
+ 4:leaf=0.0164065026
+ 2:[f185<0.00162497326] yes=5,no=6,missing=6
+ 5:leaf=0.908046603
+ 6:leaf=-0.0725948811
booster[11]:
-0:[f212<0.0654808432] yes=1,no=2,missing=2
- 1:[f183<5.19623041] yes=3,no=4,missing=4
- 3:[f422<0.548450589] yes=7,no=8,missing=8
- 7:leaf=-0.232411027
- 8:leaf=0.35087949
- 4:leaf=0.56779182
- 2:[f437<0.980098963] yes=5,no=6,missing=6
- 5:leaf=0.592321455
- 6:leaf=-0.146391183
+0:[f81<0.702127635] yes=1,no=2,missing=2
+ 1:[f50<3.44616318] yes=3,no=4,missing=4
+ 3:[f83<0.239606127] yes=7,no=8,missing=8
+ 7:leaf=0.779112041
+ 8:leaf=-0.0690309778
+ 4:leaf=-0.336453766
+ 2:[f88<0.945022285] yes=5,no=6,missing=6
+ 5:leaf=-0.714582622
+ 6:leaf=0.184592083
booster[12]:
-0:[f455<0.146782488] yes=1,no=2,missing=2
- 1:[f58<2.60423064] yes=3,no=4,missing=4
- 3:leaf=-0.508303046
- 4:leaf=-0.063859202
- 2:[f57<0.0226346217] yes=5,no=6,missing=6
- 5:leaf=0.0336143672
- 6:leaf=0.44030109
+0:[f449<0.827547193] yes=1,no=2,missing=2
+ 1:leaf=-0.513014853
+ 2:leaf=0.511008024
booster[13]:
-0:[f328<0.152615771] yes=1,no=2,missing=2
- 1:[f453<2.72904444] yes=3,no=4,missing=4
- 3:leaf=-0.512104332
- 4:leaf=0.382151335
- 2:[f320<2.19173932] yes=5,no=6,missing=6
- 5:[f164<0.446741015] yes=7,no=8,missing=8
- 7:leaf=-0.399359494
- 8:leaf=0.359576225
- 6:[f211<0.0070309611] yes=9,no=10,missing=10
- 9:leaf=0.703581929
- 10:leaf=0.116129138
+0:[f434<1.33528721] yes=1,no=2,missing=2
+ 1:leaf=-0.49301675
+ 2:leaf=0.686393976
booster[14]:
-0:[f202<0.328565896] yes=1,no=2,missing=2
- 1:[f85<0.203416556] yes=3,no=4,missing=4
- 3:[f422<0.0573710427] yes=7,no=8,missing=8
- 7:leaf=-0.372644097
- 8:leaf=0.138698533
- 4:[f72<5.04865265] yes=9,no=10,missing=10
- 9:leaf=0.639881134
- 10:leaf=-0.030372601
- 2:[f93<0.456261516] yes=5,no=6,missing=6
- 5:leaf=-0.451451212
- 6:leaf=0.0924881548
+0:[f199<0.000769724196] yes=1,no=2,missing=2
+ 1:[f317<0.000554144557] yes=3,no=4,missing=4
+ 3:leaf=-0.297527492
+ 4:leaf=0.804812253
+ 2:leaf=-0.668853223
booster[15]:
-0:[f197<1.47799802] yes=1,no=2,missing=2
- 1:[f197<0.00316442153] yes=3,no=4,missing=4
- 3:[f436<0.00181481615] yes=7,no=8,missing=8
- 7:leaf=0.256487668
- 8:leaf=-0.382459372
- 4:[f192<0.12212304] yes=9,no=10,missing=10
- 9:leaf=-0.0104340464
- 10:leaf=-0.435325056
- 2:[f86<0.230991736] yes=5,no=6,missing=6
- 5:[f321<0.00474229362] yes=11,no=12,missing=12
- 11:leaf=0.631554246
- 12:leaf=0.14194043
- 6:[f74<3.25640583] yes=13,no=14,missing=14
- 13:leaf=-0.297789067
- 14:leaf=0.00607704651
+0:[f87<0.00254154997] yes=1,no=2,missing=2
+ 1:[f72<0.9983657] yes=3,no=4,missing=4
+ 3:[f316<0.215517238] yes=5,no=6,missing=6
+ 5:leaf=0.111601718
+ 6:leaf=-0.351899952
+ 4:leaf=0.755083025
+ 2:leaf=-0.424322933
booster[16]:
-0:[f313<1.21230185] yes=1,no=2,missing=2
- 1:[f573<0.25] yes=3,no=4,missing=4
- 3:[f82<0.100772589] yes=7,no=8,missing=8
- 7:leaf=0.492248833
- 8:leaf=0.050601773
- 4:leaf=-0.354725808
- 2:[f214<0.169751868] yes=5,no=6,missing=6
- 5:[f423<0.0929095373] yes=9,no=10,missing=10
- 9:leaf=-0.511735082
- 10:leaf=0.0315202475
- 6:leaf=0.311563313
+0:[f164<0.0750050396] yes=1,no=2,missing=2
+ 1:[f77<1.12375379] yes=3,no=4,missing=4
+ 3:[f186<0.0263307169] yes=5,no=6,missing=6
+ 5:leaf=-0.0296916962
+ 6:leaf=0.356004298
+ 4:[f89<0.114865929] yes=7,no=8,missing=8
+ 7:leaf=-0.918346643
+ 8:leaf=-0.261185735
+ 2:leaf=0.557905018
booster[17]:
-0:[f445<0.543396235] yes=1,no=2,missing=2
- 1:leaf=-0.424833
- 2:[f91<0.0246083625] yes=3,no=4,missing=4
- 3:leaf=-0.106609948
- 4:leaf=0.375838935
+0:[f454<0.127558082] yes=1,no=2,missing=2
+ 1:leaf=-0.434622735
+ 2:leaf=0.352711916
booster[18]:
-0:[f448<0.072580643] yes=1,no=2,missing=2
- 1:[f320<2.11398315] yes=3,no=4,missing=4
- 3:leaf=-0.434023142
- 4:leaf=0.04388294
- 2:[f305<0.00299071311] yes=5,no=6,missing=6
- 5:[f186<0.00283330702] yes=7,no=8,missing=8
- 7:leaf=0.121790521
- 8:leaf=0.463174105
- 6:leaf=-0.229198262
+0:[f67<1.30293417] yes=1,no=2,missing=2
+ 1:[f438<0.209724054] yes=3,no=4,missing=4
+ 3:leaf=0.0982856527
+ 4:leaf=1.23159897
+ 2:leaf=-0.416783512
booster[19]:
-0:[f305<0.632365823] yes=1,no=2,missing=2
- 1:[f596<0.282752126] yes=3,no=4,missing=4
- 3:[f42<0.0295734536] yes=7,no=8,missing=8
- 7:leaf=-0.384347051
- 8:leaf=0.141131997
- 4:leaf=0.383073688
- 2:[f203<0.071676299] yes=5,no=6,missing=6
- 5:leaf=0.485984713
- 6:leaf=-0.0532288775
+0:[f304<0.786907792] yes=1,no=2,missing=2
+ 1:[f91<0.145982504] yes=3,no=4,missing=4
+ 3:leaf=-0.436879277
+ 4:leaf=0.17804347
+ 2:leaf=0.951328695
booster[20]:
-0:[f85<0.000542578113] yes=1,no=2,missing=2
- 1:[f78<0.0318066962] yes=3,no=4,missing=4
- 3:[f60<4.13189507] yes=7,no=8,missing=8
- 7:leaf=-0.276262879
- 8:leaf=0.285161316
- 4:[f82<0.057781104] yes=9,no=10,missing=10
- 9:leaf=0.0557740256
- 10:leaf=0.396938324
- 2:[f81<5.1960516] yes=5,no=6,missing=6
- 5:leaf=-0.345226794
- 6:leaf=0.223902807
+0:[f177<0.00772585254] yes=1,no=2,missing=2
+ 1:[f179<0.151009291] yes=3,no=4,missing=4
+ 3:[f78<0.466097236] yes=5,no=6,missing=6
+ 5:leaf=-0.295145184
+ 6:leaf=0.442563623
+ 4:leaf=0.862622261
+ 2:leaf=-0.3486619
booster[21]:
-0:[f187<1.27792811] yes=1,no=2,missing=2
- 1:[f62<0.219178081] yes=3,no=4,missing=4
- 3:[f196<1.5606786] yes=7,no=8,missing=8
- 7:leaf=0.296430796
- 8:leaf=-0.286030948
- 4:[f212<0.300674617] yes=9,no=10,missing=10
- 9:leaf=-0.347832948
- 10:leaf=0.199596107
- 2:[f436<0.245750293] yes=5,no=6,missing=6
- 5:leaf=0.367717385
- 6:leaf=-0.0417781174
+0:[f185<0.00162497326] yes=1,no=2,missing=2
+ 1:leaf=-0.467426181
+ 2:[f64<0.56934166] yes=3,no=4,missing=4
+ 3:leaf=0.609626055
+ 4:leaf=0.020636661
booster[22]:
-0:[f445<0.543396235] yes=1,no=2,missing=2
- 1:leaf=-0.366818398
- 2:[f195<1.82498491] yes=3,no=4,missing=4
- 3:leaf=-0.0743194595
- 4:leaf=0.324283719
+0:[f66<0.74448061] yes=1,no=2,missing=2
+ 1:leaf=-0.51560694
+ 2:leaf=-0.0637221709
booster[23]:
-0:[f433<0.135974303] yes=1,no=2,missing=2
- 1:[f173<0.231295526] yes=3,no=4,missing=4
- 3:leaf=-0.390636533
- 4:leaf=0.127287298
- 2:[f317<0.891269624] yes=5,no=6,missing=6
- 5:leaf=0.328657329
- 6:leaf=-0.117119104
+0:[f453<0.0319375433] yes=1,no=2,missing=2
+ 1:leaf=-0.558456361
+ 2:leaf=0.497382849
booster[24]:
-0:[f91<0.00899621192] yes=1,no=2,missing=2
- 1:[f586<2.59809113] yes=3,no=4,missing=4
- 3:leaf=-0.349017948
- 4:leaf=0.193898857
- 2:[f57<0.198313609] yes=5,no=6,missing=6
- 5:leaf=-0.104912318
- 6:[f91<0.170440629] yes=7,no=8,missing=8
- 7:leaf=0.40668267
- 8:leaf=0.107181005
+0:[f183<0.0793883651] yes=1,no=2,missing=2
+ 1:leaf=0.32784161
+ 2:leaf=-0.321850538
booster[25]:
-0:[f73<4.3460207] yes=1,no=2,missing=2
- 1:[f72<3.67264318] yes=3,no=4,missing=4
- 3:[f299<0.769990146] yes=5,no=6,missing=6
- 5:leaf=-0.304331928
- 6:leaf=0.129486054
- 4:[f60<0.314127684] yes=7,no=8,missing=8
- 7:leaf=0.272618562
- 8:leaf=-0.0508474633
- 2:leaf=0.238640293
+0:[f73<3.61614656] yes=1,no=2,missing=2
+ 1:leaf=-0.318015665
+ 2:leaf=0.57998395
booster[26]:
-0:[f186<1.78944111] yes=1,no=2,missing=2
- 1:[f60<0.00182459841] yes=3,no=4,missing=4
- 3:[f325<0.173522875] yes=7,no=8,missing=8
- 7:leaf=-0.30531171
- 8:leaf=-0.00682514347
- 4:[f326<0.326487631] yes=9,no=10,missing=10
- 9:leaf=0.189298183
- 10:leaf=-0.274427563
- 2:[f195<0.779556096] yes=5,no=6,missing=6
- 5:leaf=0.0102596488
- 6:leaf=0.301863611
+0:[f70<0.483377516] yes=1,no=2,missing=2
+ 1:leaf=0.701207519
+ 2:leaf=-0.347316891
booster[27]:
-0:[f455<0.146782488] yes=1,no=2,missing=2
- 1:leaf=-0.273433208
- 2:leaf=0.209762409
+0:[f433<0.0642609] yes=1,no=2,missing=2
+ 1:leaf=0.00955190416
+ 2:leaf=-0.299489617
booster[28]:
-0:[f436<0.0163068101] yes=1,no=2,missing=2
- 1:leaf=-0.297996283
- 2:[f194<0.417046517] yes=3,no=4,missing=4
- 3:leaf=0.245405763
- 4:leaf=-0.107895374
+0:[f309<0.215517238] yes=1,no=2,missing=2
+ 1:leaf=0.399858296
+ 2:leaf=-0.544919252
booster[29]:
-0:[f196<0.370752245] yes=1,no=2,missing=2
- 1:[f315<0.00110828911] yes=3,no=4,missing=4
- 3:leaf=-0.151524931
- 4:leaf=0.320254683
- 2:leaf=-0.24900423
+0:[f419<0.0121615045] yes=1,no=2,missing=2
+ 1:leaf=-0.412690103
+ 2:leaf=0.559529245
booster[30]:
-0:[f87<0.00254154997] yes=1,no=2,missing=2
- 1:[f309<0.773809552] yes=3,no=4,missing=4
- 3:[f65<0.0027700041] yes=5,no=6,missing=6
- 5:leaf=0.0143320039
- 6:leaf=0.2649391
- 4:leaf=-0.0937432349
- 2:leaf=-0.164835647
+0:[f86<0.00592786726] yes=1,no=2,missing=2
+ 1:leaf=0.261431664
+ 2:leaf=-0.646429718
booster[31]:
-0:[f193<0.00294380006] yes=1,no=2,missing=2
- 1:[f60<0.00182459841] yes=3,no=4,missing=4
- 3:leaf=-0.232425064
- 4:leaf=-0.055137869
- 2:[f72<1.9127804] yes=5,no=6,missing=6
- 5:[f317<0.270247936] yes=7,no=8,missing=8
- 7:leaf=0.347235203
- 8:leaf=0.00531240739
- 6:[f171<0.00868621096] yes=9,no=10,missing=10
- 9:leaf=-0.218183398
- 10:leaf=0.122564167
+0:[f57<0.0997442454] yes=1,no=2,missing=2
+ 1:leaf=0.410557956
+ 2:[f195<2.50544572] yes=3,no=4,missing=4
+ 3:leaf=-0.660697877
+ 4:leaf=0.0277274251
booster[32]:
-0:[f445<0.543396235] yes=1,no=2,missing=2
- 1:leaf=-0.264462113
- 2:leaf=0.166693076
+0:[f451<0.576678455] yes=1,no=2,missing=2
+ 1:leaf=-0.0869682655
+ 2:leaf=0.524466932
booster[33]:
-0:[f320<2.19173932] yes=1,no=2,missing=2
- 1:[f450<0.0421259254] yes=3,no=4,missing=4
- 3:leaf=-0.301022321
- 4:leaf=0.0576705411
- 2:leaf=0.160322711
+0:[f73<0.243880212] yes=1,no=2,missing=2
+ 1:leaf=0.500130653
+ 2:[f319<0.75848639] yes=3,no=4,missing=4
+ 3:leaf=-0.069238469
+ 4:leaf=-0.543482184
booster[34]:
-0:[f92<0.0224381629] yes=1,no=2,missing=2
- 1:[f317<0.000554144557] yes=3,no=4,missing=4
- 3:leaf=-0.269063264
- 4:leaf=0.0355622731
- 2:leaf=0.184870735
+0:[f60<0.356652081] yes=1,no=2,missing=2
+ 1:leaf=-0.29107058
+ 2:[f55<1.3471415] yes=3,no=4,missing=4
+ 3:leaf=0.97496736
+ 4:leaf=0.119154371
booster[35]:
-0:[f188<0.00299071311] yes=1,no=2,missing=2
- 1:[f81<1.50085533] yes=3,no=4,missing=4
- 3:leaf=-0.0687485114
- 4:leaf=0.284221798
- 2:[f86<0.00592786726] yes=5,no=6,missing=6
- 5:[f182<0.0789473653] yes=7,no=8,missing=8
- 7:leaf=-0.0496564284
- 8:leaf=0.160247326
- 6:leaf=-0.278983802
+0:[f82<0.328947365] yes=1,no=2,missing=2
+ 1:leaf=1.12803173
+ 2:leaf=-0.458722323
booster[36]:
-0:[f212<0.0654808432] yes=1,no=2,missing=2
- 1:[f174<0.210972592] yes=3,no=4,missing=4
- 3:[f423<0.0123041812] yes=5,no=6,missing=6
- 5:leaf=-0.259819537
- 6:leaf=0.0930926427
- 4:[f58<0.398453683] yes=7,no=8,missing=8
- 7:leaf=0.269197673
- 8:leaf=-0.056416072
- 2:leaf=0.221564591
+0:[f82<0.057781104] yes=1,no=2,missing=2
+ 1:leaf=0.445793599
+ 2:[f85<0.904361844] yes=3,no=4,missing=4
+ 3:leaf=-0.707021713
+ 4:leaf=-0.134077296
booster[37]:
-0:[f73<1.40631914] yes=1,no=2,missing=2
- 1:leaf=-0.157754987
- 2:leaf=0.121088035
+0:[f455<0.101519339] yes=1,no=2,missing=2
+ 1:leaf=-0.439361244
+ 2:leaf=0.270291239
booster[38]:
-0:[f328<0.152615771] yes=1,no=2,missing=2
- 1:leaf=-0.169083342
- 2:leaf=0.108819708
+0:[f193<0.0372928195] yes=1,no=2,missing=2
+ 1:leaf=0.676307857
+ 2:leaf=-0.428191751
booster[39]:
-0:[f199<0.709219873] yes=1,no=2,missing=2
- 1:[f317<0.000554144557] yes=3,no=4,missing=4
- 3:leaf=-0.0986715108
- 4:leaf=0.234420851
- 2:leaf=-0.213346735
+0:[f185<0.0680045411] yes=1,no=2,missing=2
+ 1:leaf=0.714303851
+ 2:[f68<1.31640887] yes=3,no=4,missing=4
+ 3:leaf=-0.411577195
+ 4:leaf=0.0209199842
booster[40]:
-0:[f197<1.47799802] yes=1,no=2,missing=2
- 1:[f58<1.10991788] yes=3,no=4,missing=4
- 3:leaf=-0.206050158
- 4:leaf=0.112298831
- 2:leaf=0.135670498
+0:[f186<0.00283330702] yes=1,no=2,missing=2
+ 1:leaf=0.725978911
+ 2:leaf=-0.588061571
booster[41]:
-0:[f62<1.00321639] yes=1,no=2,missing=2
- 1:[f76<1.81992447] yes=3,no=4,missing=4
- 3:[f437<0.0203389823] yes=7,no=8,missing=8
- 7:leaf=0.0312347859
- 8:leaf=0.273703307
- 4:leaf=-0.0847936422
- 2:[f71<0.977812767] yes=5,no=6,missing=6
- 5:leaf=-0.229477286
- 6:leaf=0.0391496904
+0:[f68<0.231347606] yes=1,no=2,missing=2
+ 1:[f304<0.00598142622] yes=3,no=4,missing=4
+ 3:leaf=-0.806711435
+ 4:leaf=-0.124220558
+ 2:[f65<3.72517824] yes=5,no=6,missing=6
+ 5:leaf=0.0392437205
+ 6:leaf=0.430936128
booster[42]:
-0:leaf=-0.0133904722
+0:[f205<1.66711903] yes=1,no=2,missing=2
+ 1:leaf=-0.216072142
+ 2:leaf=0.367930323
booster[43]:
-0:[f433<0.135974303] yes=1,no=2,missing=2
- 1:leaf=-0.148565814
- 2:leaf=0.0961077958
+0:[f320<2.72495103] yes=1,no=2,missing=2
+ 1:leaf=-0.344734311
+ 2:leaf=0.872487605
booster[44]:
-0:[f92<0.0224381629] yes=1,no=2,missing=2
- 1:leaf=-0.127172247
- 2:leaf=0.136539012
+0:[f71<1.53985429] yes=1,no=2,missing=2
+ 1:leaf=-0.0914518759
+ 2:leaf=-0.407454491
booster[45]:
-0:[f85<0.000542578113] yes=1,no=2,missing=2
- 1:[f78<0.0318066962] yes=3,no=4,missing=4
- 3:leaf=-0.0495181493
- 4:leaf=0.187555581
- 2:leaf=-0.132087171
+0:[f197<0.00316442153] yes=1,no=2,missing=2
+ 1:leaf=0.567224443
+ 2:[f61<0.00647588493] yes=3,no=4,missing=4
+ 3:leaf=0.207850933
+ 4:leaf=-0.366284281
booster[46]:
-0:[f187<1.27792811] yes=1,no=2,missing=2
- 1:[f59<0.0257823896] yes=3,no=4,missing=4
- 3:[f186<0.00283330702] yes=5,no=6,missing=6
- 5:leaf=-0.135928482
- 6:leaf=0.189854056
- 4:leaf=-0.194462657
- 2:leaf=0.164950982
+0:[f68<0.383479148] yes=1,no=2,missing=2
+ 1:[f196<0.211263731] yes=3,no=4,missing=4
+ 3:leaf=0.105062343
+ 4:leaf=-0.436443239
+ 2:[f185<0.250380278] yes=5,no=6,missing=6
+ 5:leaf=0.00719080959
+ 6:leaf=0.775642872
booster[47]:
-0:leaf=0.00713210041
+0:leaf=-0.329960436
booster[48]:
-0:[f318<1.04129589] yes=1,no=2,missing=2
- 1:leaf=0.0943119749
- 2:leaf=-0.135280624
+0:[f436<0.152650177] yes=1,no=2,missing=2
+ 1:leaf=-0.0418393649
+ 2:leaf=0.229026541
booster[49]:
-0:[f191<0.119760476] yes=1,no=2,missing=2
- 1:leaf=0.113984205
- 2:leaf=-0.1410999
+0:[f187<0.155844152] yes=1,no=2,missing=2
+ 1:leaf=0.105118938
+ 2:leaf=-0.513294697
booster[50]:
-0:[f186<0.00283330702] yes=1,no=2,missing=2
- 1:leaf=0.109875768
- 2:[f181<0.287803948] yes=3,no=4,missing=4
- 3:leaf=-0.145542577
- 4:leaf=0.020898385
+0:[f196<1.05168617] yes=1,no=2,missing=2
+ 1:leaf=-0.493719906
+ 2:leaf=0.244076028
booster[51]:
-0:[f82<0.0843750015] yes=1,no=2,missing=2
- 1:[f68<0.00254074251] yes=3,no=4,missing=4
- 3:leaf=-0.0356240347
- 4:leaf=0.225002453
- 2:[f189<0.459874541] yes=5,no=6,missing=6
- 5:leaf=-0.199208379
- 6:leaf=0.0423403606
+0:[f65<0.309998602] yes=1,no=2,missing=2
+ 1:leaf=0.739876926
+ 2:[f318<0.172593743] yes=3,no=4,missing=4
+ 3:leaf=-0.0229104403
+ 4:leaf=-0.703690052
booster[52]:
-0:leaf=0.000538185181
+0:leaf=0.130310327
booster[53]:
-0:[f60<0.0341453478] yes=1,no=2,missing=2
- 1:leaf=0.0992323831
- 2:leaf=-0.116487078
+0:[f428<0.169331402] yes=1,no=2,missing=2
+ 1:leaf=-0.392787576
+ 2:leaf=0.419829011
booster[54]:
-0:[f91<0.00899621192] yes=1,no=2,missing=2
- 1:leaf=-0.117201962
- 2:leaf=0.0840489417
+0:leaf=-0.0839198232
booster[55]:
-0:[f85<0.000542578113] yes=1,no=2,missing=2
- 1:[f78<0.0318066962] yes=3,no=4,missing=4
- 3:leaf=-0.0343049392
- 4:leaf=0.163552389
- 2:leaf=-0.113592722
+0:[f58<0.00216070865] yes=1,no=2,missing=2
+ 1:[f68<0.886882722] yes=3,no=4,missing=4
+ 3:leaf=0.0366720669
+ 4:leaf=0.546127856
+ 2:leaf=-0.337356985
booster[56]:
-0:[f443<0.0974542573] yes=1,no=2,missing=2
- 1:[f186<0.00283330702] yes=3,no=4,missing=4
- 3:leaf=-0.0673530996
- 4:[f182<0.0780843347] yes=5,no=6,missing=6
- 5:leaf=0.197955459
- 6:leaf=0.0340558849
- 2:leaf=-0.131139934
+0:[f436<0.0922832116] yes=1,no=2,missing=2
+ 1:[f188<0.463673383] yes=3,no=4,missing=4
+ 3:leaf=-0.178408608
+ 4:leaf=0.631894886
+ 2:leaf=-0.626312375
booster[57]:
-0:leaf=0.00858247653
+0:leaf=0.155400068
booster[58]:
-0:[f60<0.0341453478] yes=1,no=2,missing=2
- 1:leaf=0.0697831661
- 2:leaf=-0.0930704698
+0:leaf=0.247760579
booster[59]:
-0:[f191<0.119760476] yes=1,no=2,missing=2
- 1:leaf=0.0769971833
- 2:leaf=-0.105873823
+0:[f446<0.144030347] yes=1,no=2,missing=2
+ 1:leaf=0.612641275
+ 2:leaf=-0.548725605
booster[60]:
-0:[f188<0.00299071311] yes=1,no=2,missing=2
- 1:leaf=0.100825779
- 2:leaf=-0.0659725666
+0:[f58<0.927242875] yes=1,no=2,missing=2
+ 1:[f195<0.189571828] yes=3,no=4,missing=4
+ 3:leaf=-0.0180547331
+ 4:leaf=-0.478642046
+ 2:leaf=0.997799635
booster[61]:
-0:[f82<0.156536818] yes=1,no=2,missing=2
- 1:[f68<0.00254074251] yes=3,no=4,missing=4
- 3:leaf=-0.0103040524
- 4:leaf=0.163746133
- 2:[f82<1.22081995] yes=5,no=6,missing=6
- 5:leaf=-0.174394935
- 6:leaf=0.0320722349
+0:[f87<0.0399737544] yes=1,no=2,missing=2
+ 1:leaf=0.442117065
+ 2:leaf=-0.585561514
booster[62]:
-0:leaf=-0.00528587122
+0:[f319<3.99797893] yes=1,no=2,missing=2
+ 1:leaf=-0.394323021
+ 2:leaf=0.234927416
booster[63]:
-0:leaf=-0.0202977192
+0:[f211<0.0070309611] yes=1,no=2,missing=2
+ 1:[f320<1.14809537] yes=3,no=4,missing=4
+ 3:leaf=0.153392926
+ 4:leaf=0.68980068
+ 2:leaf=-0.269105792
booster[64]:
-0:[f321<0.233766228] yes=1,no=2,missing=2
- 1:leaf=-0.0945843905
- 2:leaf=0.073758021
+0:[f202<0.143326283] yes=1,no=2,missing=2
+ 1:leaf=0.160502359
+ 2:leaf=-0.431377679
+booster[65]:
+0:[f446<0.0821859464] yes=1,no=2,missing=2
+ 1:[f184<0.368630022] yes=3,no=4,missing=4
+ 3:leaf=0.215633482
+ 4:leaf=1.3382417
+ 2:leaf=-0.28713727
+booster[66]:
+0:[f185<0.00162497326] yes=1,no=2,missing=2
+ 1:leaf=-0.293222994
+ 2:[f73<1.18437481] yes=3,no=4,missing=4
+ 3:leaf=0.0113580255
+ 4:leaf=0.560500741
+booster[67]:
+0:leaf=-0.0445166044
+booster[68]:
+0:[f436<0.166131169] yes=1,no=2,missing=2
+ 1:leaf=0.218885049
+ 2:leaf=-0.36515671
+booster[69]:
+0:[f77<1.50082362] yes=1,no=2,missing=2
+ 1:leaf=0.328168571
+ 2:leaf=-0.380794883
+booster[70]:
+0:[f446<0.541083395] yes=1,no=2,missing=2
+ 1:[f55<0.0110116359] yes=3,no=4,missing=4
+ 3:leaf=0.41797024
+ 4:leaf=-0.104700685
+ 2:leaf=-0.294257462
+booster[71]:
+0:[f87<0.00254154997] yes=1,no=2,missing=2
+ 1:leaf=-0.659140944
+ 2:leaf=0.206559569
+booster[72]:
+0:[f449<0.68928951] yes=1,no=2,missing=2
+ 1:leaf=-0.125212952
+ 2:leaf=0.329967231
+booster[73]:
+0:[f446<0.613981783] yes=1,no=2,missing=2
+ 1:leaf=0.29865256
+ 2:leaf=-0.392374843
+booster[74]:
+0:leaf=0.235877275
+booster[75]:
+0:[f77<0.990853667] yes=1,no=2,missing=2
+ 1:leaf=-0.334794044
+ 2:leaf=0.0307995155
+booster[76]:
+0:[f187<0.70175612] yes=1,no=2,missing=2
+ 1:leaf=-0.234293312
+ 2:leaf=0.359858423
+booster[77]:
+0:[f451<0.546731591] yes=1,no=2,missing=2
+ 1:leaf=-0.341680676
+ 2:leaf=0.28513822
+booster[78]:
+0:[f420<1.06860554] yes=1,no=2,missing=2
+ 1:leaf=-0.425027251
+ 2:leaf=0.535925448
+booster[79]:
+0:[f313<0.00554144522] yes=1,no=2,missing=2
+ 1:leaf=-0.239411026
+ 2:leaf=0.839625657
+booster[80]:
+0:[f72<3.25559115] yes=1,no=2,missing=2
+ 1:[f65<0.107412875] yes=3,no=4,missing=4
+ 3:leaf=0.0247959439
+ 4:leaf=-0.279670805
+ 2:leaf=0.543463826
+booster[81]:
+0:[f194<0.928785443] yes=1,no=2,missing=2
+ 1:[f61<0.0662841648] yes=3,no=4,missing=4
+ 3:leaf=-0.403669536
+ 4:leaf=0.0223644767
+ 2:leaf=0.668486118
+booster[82]:
+0:leaf=0
+booster[83]:
+0:leaf=0.238602668
+booster[84]:
+0:leaf=-0.372457713
+booster[85]:
+0:[f68<0.372667551] yes=1,no=2,missing=2
+ 1:leaf=0.760666311
+ 2:leaf=-0.181681424
+booster[86]:
+0:[f197<0.00720387883] yes=1,no=2,missing=2
+ 1:[f186<0.00283330702] yes=3,no=4,missing=4
+ 3:leaf=0.027232714
+ 4:leaf=0.478941858
+ 2:[f83<0.659115195] yes=5,no=6,missing=6
+ 5:leaf=-0.64958328
+ 6:leaf=-0.145912141
+booster[87]:
+0:leaf=-0.226252496
+booster[88]:
+0:[f84<0.0242106579] yes=1,no=2,missing=2
+ 1:leaf=0.268600702
+ 2:leaf=-0.367298663
+booster[89]:
+0:leaf=-0.0895910263
+booster[90]:
+0:[f62<1.59131598] yes=1,no=2,missing=2
+ 1:[f72<1.9127804] yes=3,no=4,missing=4
+ 3:leaf=-0.490935653
+ 4:leaf=-0.0581952594
+ 2:leaf=0.45694989
+booster[91]:
+0:[f422<0.786091983] yes=1,no=2,missing=2
+ 1:[f52<0.089488633] yes=3,no=4,missing=4
+ 3:[f77<2.65421295] yes=5,no=6,missing=6
+ 5:leaf=-0.0880598426
+ 6:leaf=-0.557022512
+ 4:leaf=0.191702828
+ 2:leaf=0.744949996
+booster[92]:
+0:leaf=-0.207906589
+booster[93]:
+0:[f439<0.507768333] yes=1,no=2,missing=2
+ 1:[f306<0.00168911414] yes=3,no=4,missing=4
+ 3:leaf=0.0905954689
+ 4:leaf=0.467606813
+ 2:leaf=-0.321550995
+booster[94]:
+0:[f193<0.780843318] yes=1,no=2,missing=2
+ 1:leaf=0.278535336
+ 2:leaf=-0.370406836
+booster[95]:
+0:[f64<0.0318917856] yes=1,no=2,missing=2
+ 1:leaf=-0.243313268
+ 2:[f65<0.876804709] yes=3,no=4,missing=4
+ 3:leaf=0.628424942
+ 4:leaf=-0.0602799319
+booster[96]:
+0:[f320<0.446246147] yes=1,no=2,missing=2
+ 1:leaf=0.470871955
+ 2:leaf=-0.592198193
+booster[97]:
+0:leaf=0
+booster[98]:
+0:[f448<0.072580643] yes=1,no=2,missing=2
+ 1:leaf=-0.334959149
+ 2:leaf=0.0728676394
+booster[99]:
+0:[f188<0.0471253544] yes=1,no=2,missing=2
+ 1:leaf=0.346880049
+ 2:leaf=-0.32620433
+booster[100]:
+0:leaf=-0.158323571
+booster[101]:
+0:[f316<0.0978942961] yes=1,no=2,missing=2
+ 1:[f181<0.311015755] yes=3,no=4,missing=4
+ 3:leaf=0.40385139
+ 4:leaf=-0.172112226
+ 2:leaf=-0.824565172
+booster[102]:
+0:leaf=0.148181856
+booster[103]:
+0:leaf=0.278944641
+booster[104]:
+0:[f79<1.53718245] yes=1,no=2,missing=2
+ 1:leaf=0.190652668
+ 2:leaf=-0.269898951
+booster[105]:
+0:[f312<0.279267788] yes=1,no=2,missing=2
+ 1:leaf=0.0680199638
+ 2:leaf=-0.338387311
+booster[106]:
+0:[f205<0.689256728] yes=1,no=2,missing=2
+ 1:leaf=-0.167301729
+ 2:leaf=0.39944607
+booster[107]:
+0:leaf=0
+booster[108]:
+0:[f61<0.136278391] yes=1,no=2,missing=2
+ 1:leaf=0.271934539
+ 2:leaf=-0.35588178
+booster[109]:
+0:leaf=-0.112803891
+booster[110]:
+0:[f325<0.00237114681] yes=1,no=2,missing=2
+ 1:leaf=0.34350872
+ 2:leaf=-0.358963579
+booster[111]:
+0:[f58<0.092180863] yes=1,no=2,missing=2
+ 1:leaf=0.403366834
+ 2:leaf=-0.370072693
+booster[112]:
+0:leaf=-0.347817451
+booster[113]:
+0:[f426<0.13636364] yes=1,no=2,missing=2
+ 1:leaf=-0.410891116
+ 2:leaf=0.363634765
+booster[114]:
+0:[f440<0.00326666911] yes=1,no=2,missing=2
+ 1:leaf=0.214501128
+ 2:leaf=-0.528729558
+booster[115]:
+0:[f194<0.180212021] yes=1,no=2,missing=2
+ 1:leaf=0.245721951
+ 2:leaf=-0.301460803
+booster[116]:
+0:[f69<1.18120158] yes=1,no=2,missing=2
+ 1:[f316<0.0152925532] yes=3,no=4,missing=4
+ 3:leaf=0.201560408
+ 4:leaf=0.877753496
+ 2:leaf=-0.484046131
+booster[117]:
+0:leaf=0
+booster[118]:
+0:leaf=-0.245808512
+booster[119]:
+0:leaf=-0.163874283
+booster[120]:
+0:[f319<0.135770798] yes=1,no=2,missing=2
+ 1:leaf=-0.274952739
+ 2:leaf=0.379072994
+booster[121]:
+0:leaf=0.409025073
+booster[122]:
+0:leaf=-0.180766672
+booster[123]:
+0:leaf=0.00324052107
+booster[124]:
+0:[f76<1.23827791] yes=1,no=2,missing=2
+ 1:leaf=0.352185398
+ 2:leaf=-0.404994428
+booster[125]:
+0:leaf=-0.195675552
+booster[126]:
+0:[f422<0.340100527] yes=1,no=2,missing=2
+ 1:leaf=-0.309930235
+ 2:leaf=0.59685421
+booster[127]:
+0:leaf=0.181603253
+booster[128]:
+0:leaf=-0.105346859
+booster[129]:
+0:leaf=0.0165106244
+booster[130]:
+0:[f65<0.460513562] yes=1,no=2,missing=2
+ 1:leaf=0.594037771
+ 2:leaf=-0.266176432
+booster[131]:
+0:[f56<0.102305673] yes=1,no=2,missing=2
+ 1:[f437<0.0203389823] yes=3,no=4,missing=4
+ 3:leaf=-0.0557907969
+ 4:leaf=0.589171112
+ 2:leaf=-0.892551124
+booster[132]:
+0:leaf=-0.117940888
+booster[133]:
+0:leaf=0.195106849
+booster[134]:
+0:leaf=-0.193403125
+booster[135]:
+0:leaf=-0.125377476
+booster[136]:
+0:[f196<0.126033053] yes=1,no=2,missing=2
+ 1:leaf=-0.434828341
+ 2:leaf=0.186503962
+booster[137]:
+0:leaf=0
+booster[138]:
+0:[f67<0.0724484548] yes=1,no=2,missing=2
+ 1:leaf=0.47110939
+ 2:leaf=-0.198572263
+booster[139]:
+0:leaf=0.24175784
+booster[140]:
+0:leaf=-0.312530905
+booster[141]:
+0:[f323<0.938569486] yes=1,no=2,missing=2
+ 1:leaf=0.0131518431
+ 2:leaf=-0.467352241
+booster[142]:
+0:leaf=-0.181658357
+booster[143]:
+0:leaf=0
+booster[144]:
+0:leaf=-0.106984414
+booster[145]:
+0:[f197<1.47799802] yes=1,no=2,missing=2
+ 1:leaf=-0.419312119
+ 2:leaf=0.423877716
+booster[146]:
+0:[f434<0.903700173] yes=1,no=2,missing=2
+ 1:leaf=-0.492385447
+ 2:leaf=0.115755193
+booster[147]:
+0:leaf=0
+booster[148]:
+0:leaf=-0.272684962
+booster[149]:
+0:leaf=-0.271277696
+booster[150]:
+0:[f87<0.00254154997] yes=1,no=2,missing=2
+ 1:leaf=0.29398939
+ 2:leaf=-0.280705839
+booster[151]:
+0:[f200<0.0922832116] yes=1,no=2,missing=2
+ 1:leaf=-0.374623924
+ 2:leaf=0.245490476
+booster[152]:
+0:leaf=0
+booster[153]:
+0:leaf=-0.187716797
+booster[154]:
+0:leaf=-0.207333758
+booster[155]:
+0:[f198<1.34113014] yes=1,no=2,missing=2
+ 1:leaf=-0.270397663
+ 2:leaf=0.116552822
+booster[156]:
+0:[f62<0.00599026587] yes=1,no=2,missing=2
+ 1:leaf=-0.180136174
+ 2:[f79<0.685975611] yes=3,no=4,missing=4
+ 3:leaf=0.450865239
+ 4:leaf=0.098741807
+booster[157]:
+0:leaf=0.167336836
+booster[158]:
+0:leaf=0.103568964
+booster[159]:
+0:leaf=0
+booster[160]:
+0:leaf=-0.0554513969
+booster[161]:
+0:leaf=-0.186526671
+booster[162]:
+0:leaf=0
+booster[163]:
+0:leaf=0
+booster[164]:
+0:leaf=0.126298159
+booster[165]:
+0:[f52<0.125950053] yes=1,no=2,missing=2
+ 1:leaf=0.182235703
+ 2:leaf=0.0158135332
+booster[166]:
+0:[f194<0.347222209] yes=1,no=2,missing=2
+ 1:leaf=0.245925635
+ 2:leaf=-0.434213936
+booster[167]:
+0:leaf=0.0425694957
+booster[168]:
+0:leaf=0.169790551
+booster[169]:
+0:[f185<0.0680045411] yes=1,no=2,missing=2
+ 1:leaf=0.10937579
+ 2:leaf=-0.303385466
+booster[170]:
+0:leaf=-0.118090175
+booster[171]:
+0:[f76<1.72693026] yes=1,no=2,missing=2
+ 1:leaf=0.393711299
+ 2:leaf=-0.255188674
+booster[172]:
+0:leaf=0
+booster[173]:
+0:[f448<0.510638297] yes=1,no=2,missing=2
+ 1:leaf=0.0157413166
+ 2:leaf=0.264824837
+booster[174]:
+0:leaf=-0.259619892
+booster[175]:
+0:[f187<0.00168911414] yes=1,no=2,missing=2
+ 1:leaf=0.321178883
+ 2:leaf=-0.357806057
+booster[176]:
+0:[f184<0.558020949] yes=1,no=2,missing=2
+ 1:leaf=0.252019376
+ 2:leaf=-0.015752092
+booster[177]:
+0:leaf=0
+booster[178]:
+0:[f68<0.231347606] yes=1,no=2,missing=2
+ 1:leaf=0.31809473
+ 2:leaf=-0.266782284
+booster[179]:
+0:leaf=0
+booster[180]:
+0:leaf=0
+booster[181]:
+0:leaf=-0.103835225
+booster[182]:
+0:leaf=0
+booster[183]:
+0:[f312<0.010418239] yes=1,no=2,missing=2
+ 1:leaf=0.346265495
+ 2:leaf=-0.0911380053
+booster[184]:
+0:leaf=-0.139149591
+booster[185]:
+0:[f71<0.322188437] yes=1,no=2,missing=2
+ 1:leaf=0.266529381
+ 2:leaf=-0.135085791
+booster[186]:
+0:[f307<0.069531247] yes=1,no=2,missing=2
+ 1:leaf=0.262776971
+ 2:leaf=-0.311238974
+booster[187]:
+0:leaf=0
+booster[188]:
+0:leaf=-0.140348941
+booster[189]:
+0:leaf=0
+booster[190]:
+0:[f78<1.81134725] yes=1,no=2,missing=2
+ 1:leaf=-0.0686582103
+ 2:leaf=0.370208353
+booster[191]:
+0:leaf=-0.0573393553
+booster[192]:
+0:leaf=0
+booster[193]:
+0:leaf=-0.0490174778
+booster[194]:
+0:leaf=-0.00832609367
+booster[195]:
+0:leaf=-0.0127542997
+booster[196]:
+0:[f63<0.213675216] yes=1,no=2,missing=2
+ 1:leaf=0.221018896
+ 2:leaf=-0.2310801
+booster[197]:
+0:leaf=0
+booster[198]:
+0:leaf=0.0930084959
+booster[199]:
+0:leaf=-0.0813163966
+booster[200]:
+0:leaf=0.231274277
+booster[201]:
+0:leaf=0.199822828
+booster[202]:
+0:leaf=0
+booster[203]:
+0:leaf=0.0985031053
+booster[204]:
+0:leaf=0
+booster[205]:
+0:[f313<0.00554144522] yes=1,no=2,missing=2
+ 1:leaf=0.243169993
+ 2:leaf=-0.199492097
+booster[206]:
+0:leaf=0.0190465413
+booster[207]:
+0:leaf=0
+booster[208]:
+0:leaf=0.191377759
+booster[209]:
+0:leaf=-0.0991649404
+booster[210]:
+0:leaf=-0.211447597
+booster[211]:
+0:[f65<0.152439028] yes=1,no=2,missing=2
+ 1:leaf=-0.340389937
+ 2:leaf=0.0357078351
+booster[212]:
+0:leaf=0
+booster[213]:
+0:[f445<0.253875971] yes=1,no=2,missing=2
+ 1:leaf=-0.387060076
+ 2:leaf=0.22609295
+booster[214]:
+0:leaf=0
+booster[215]:
+0:[f68<2.09192181] yes=1,no=2,missing=2
+ 1:leaf=-0.30868572
+ 2:leaf=0.131534189
+booster[216]:
+0:leaf=-0.165763721
+booster[217]:
+0:leaf=-0.490885496
+booster[218]:
+0:[f80<0.968705297] yes=1,no=2,missing=2
+ 1:leaf=0.307756484
+ 2:leaf=-0.107414469
+booster[219]:
+0:leaf=0
+booster[220]:
+0:[f194<0.928785443] yes=1,no=2,missing=2
+ 1:leaf=-0.0573502518
+ 2:leaf=0.261782616
+booster[221]:
+0:[f182<0.0442262143] yes=1,no=2,missing=2
+ 1:leaf=0.0404722765
+ 2:leaf=0.365329057
+booster[222]:
+0:leaf=0
+booster[223]:
+0:[f80<1.10042536] yes=1,no=2,missing=2
+ 1:leaf=0.0131952874
+ 2:leaf=-0.279987097
+booster[224]:
+0:leaf=0
+booster[225]:
+0:[f72<0.00392506691] yes=1,no=2,missing=2
+ 1:leaf=0.278853714
+ 2:leaf=-0.28128019
+booster[226]:
+0:leaf=0.261558324
+booster[227]:
+0:leaf=0
+booster[228]:
+0:leaf=-0.197845489
+booster[229]:
+0:leaf=0.254046291
+booster[230]:
+0:leaf=0.112367906
+booster[231]:
+0:leaf=0.0517889448
+booster[232]:
+0:leaf=0.111316703
+booster[233]:
+0:leaf=-0.227643073
+booster[234]:
+0:leaf=0.180826128
+booster[235]:
+0:[f72<4.21655369] yes=1,no=2,missing=2
+ 1:leaf=-0.262073219
+ 2:leaf=0.311843216
+booster[236]:
+0:leaf=-0.243169487
+booster[237]:
+0:leaf=0
+booster[238]:
+0:leaf=0.08254347
+booster[239]:
+0:[f80<0.250347406] yes=1,no=2,missing=2
+ 1:leaf=0.67415148
+ 2:leaf=-0.216243863
+booster[240]:
+0:leaf=0.169173226
+booster[241]:
+0:[f189<0.370626152] yes=1,no=2,missing=2
+ 1:leaf=-0.203993618
+ 2:leaf=0.377635628
+booster[242]:
+0:leaf=0
+booster[243]:
+0:leaf=-0.0612060912
+booster[244]:
+0:leaf=-0.23245509
+booster[245]:
+0:leaf=-0.0484034009
+booster[246]:
+0:[f80<0.222265631] yes=1,no=2,missing=2
+ 1:leaf=0.243911162
+ 2:leaf=-0.262701899
+booster[247]:
+0:leaf=-0.209984094
+booster[248]:
+0:leaf=-0.157820135
+booster[249]:
+0:leaf=0
+booster[250]:
+0:[f61<0.00647588493] yes=1,no=2,missing=2
+ 1:leaf=0.0135222487
+ 2:leaf=0.0574669503
+booster[251]:
+0:[f434<0.0233385358] yes=1,no=2,missing=2
+ 1:leaf=0.304569632
+ 2:leaf=-0.395981133
+booster[252]:
+0:leaf=0
+booster[253]:
+0:[f63<0.950759828] yes=1,no=2,missing=2
+ 1:leaf=0.319494158
+ 2:leaf=-0.219853252
+booster[254]:
+0:leaf=0
+booster[255]:
+0:[f311<0.609375] yes=1,no=2,missing=2
+ 1:leaf=0.0118538449
+ 2:leaf=0.359045684
+booster[256]:
+0:leaf=-0.255679101
+booster[257]:
+0:leaf=0
+booster[258]:
+0:leaf=0
+booster[259]:
+0:leaf=0
+booster[260]:
+0:leaf=0.162781611
+booster[261]:
+0:leaf=0.0787618607
+booster[262]:
+0:leaf=0
+booster[263]:
+0:leaf=-0.124999516
+booster[264]:
+0:[f56<0.00582402013] yes=1,no=2,missing=2
+ 1:leaf=0.259242922
+ 2:leaf=-0.264139593
+booster[265]:
+0:[f437<0.0203389823] yes=1,no=2,missing=2
+ 1:leaf=0.160480723
+ 2:leaf=-0.300688267
+booster[266]:
+0:[f436<0.0736677125] yes=1,no=2,missing=2
+ 1:leaf=0.352146834
+ 2:leaf=-0.217038572
+booster[267]:
+0:leaf=0
+booster[268]:
+0:leaf=0.260378629
+booster[269]:
+0:leaf=-0.218969271
+booster[270]:
+0:leaf=0.00579656987
+booster[271]:
+0:leaf=-0.164750382
+booster[272]:
+0:leaf=0
+booster[273]:
+0:leaf=0.050133884
+booster[274]:
+0:leaf=0
+booster[275]:
+0:leaf=-0.0330995545
+booster[276]:
+0:leaf=0.168015763
+booster[277]:
+0:leaf=0
+booster[278]:
+0:leaf=-0.237562865
+booster[279]:
+0:leaf=0
+booster[280]:
+0:leaf=-0.0901121721
+booster[281]:
+0:leaf=-0.0910040289
+booster[282]:
+0:leaf=0
+booster[283]:
+0:leaf=-0.0265094601
+booster[284]:
+0:leaf=0.196153879
+booster[285]:
+0:leaf=-0.0945694
+booster[286]:
+0:leaf=0.0264753085
+booster[287]:
+0:leaf=0
+booster[288]:
+0:leaf=-0.00419106754
+booster[289]:
+0:[f82<0.137103736] yes=1,no=2,missing=2
+ 1:leaf=0.454877287
+ 2:leaf=-0.196899369
+booster[290]:
+0:leaf=-0.0239210743
+booster[291]:
+0:[f189<0.263736278] yes=1,no=2,missing=2
+ 1:leaf=-0.229080126
+ 2:leaf=0.167442188
+booster[292]:
+0:leaf=0
+booster[293]:
+0:leaf=-0.124437869
+booster[294]:
+0:leaf=-0.0350768752
+booster[295]:
+0:leaf=0.020020254
+booster[296]:
+0:[f197<0.00316442153] yes=1,no=2,missing=2
+ 1:leaf=-0.320849806
+ 2:leaf=0.21560961
+booster[297]:
+0:leaf=0
+booster[298]:
+0:[f317<0.752136767] yes=1,no=2,missing=2
+ 1:leaf=0.12783353
+ 2:leaf=-0.29163608
+booster[299]:
+0:leaf=-0.213500947
+booster[300]:
+0:leaf=0.0906879529
+booster[301]:
+0:[f186<0.0555316396] yes=1,no=2,missing=2
+ 1:leaf=0.363644809
+ 2:leaf=-0.300976574
+booster[302]:
+0:leaf=0
+booster[303]:
+0:[f320<1.43377554] yes=1,no=2,missing=2
+ 1:leaf=-0.102948442
+ 2:leaf=0.324674129
+booster[304]:
+0:leaf=0
+booster[305]:
+0:leaf=0.0267060548
+booster[306]:
+0:leaf=-0.132790521
+booster[307]:
+0:leaf=-0.363579184
+booster[308]:
+0:leaf=0.168703124
+booster[309]:
+0:leaf=0
+booster[310]:
+0:[f189<1.50265038] yes=1,no=2,missing=2
+ 1:leaf=-0.0152510172
+ 2:leaf=0.309961528
+booster[311]:
+0:leaf=-0.0306581948
+booster[312]:
+0:leaf=0
+booster[313]:
+0:leaf=-0.155926406
+booster[314]:
+0:leaf=0
+booster[315]:
+0:leaf=-0.132180691
+booster[316]:
+0:[f183<0.00805729628] yes=1,no=2,missing=2
+ 1:leaf=-0.321083695
+ 2:leaf=0.142538533
+booster[317]:
+0:leaf=0
+booster[318]:
+0:leaf=-0.0655456558
+booster[319]:
+0:leaf=0
+booster[320]:
+0:[f192<0.457906574] yes=1,no=2,missing=2
+ 1:leaf=-0.117715746
+ 2:leaf=0.173888072
+booster[321]:
+0:[f67<0.479999989] yes=1,no=2,missing=2
+ 1:leaf=0.384940565
+ 2:leaf=0.0395562053
+booster[322]:
+0:leaf=0
+booster[323]:
+0:leaf=-0.22033298
+booster[324]:
+0:leaf=0.0577816106
+booster[325]:
+0:leaf=0
+booster[326]:
+0:leaf=-0.0626825094
+booster[327]:
+0:leaf=0.0859293714
+booster[328]:
+0:leaf=-0.0602162741
+booster[329]:
+0:leaf=0.134382263
+booster[330]:
+0:leaf=-0.0186994597
+booster[331]:
+0:leaf=-0.223600626
+booster[332]:
+0:leaf=0
+booster[333]:
+0:leaf=0.103203714
+booster[334]:
+0:leaf=0.0252831355
+booster[335]:
+0:leaf=-0.0326567553
+booster[336]:
+0:[f422<0.0918013006] yes=1,no=2,missing=2
+ 1:leaf=0.300289303
+ 2:leaf=-0.167651862
+booster[337]:
+0:leaf=0
+booster[338]:
+0:leaf=-0.159141257
+booster[339]:
+0:leaf=0.331280351
+booster[340]:
+0:leaf=0.0810985714
+booster[341]:
+0:[f190<0.903031349] yes=1,no=2,missing=2
+ 1:leaf=0.346239537
+ 2:leaf=-0.261830539
+booster[342]:
+0:leaf=0
+booster[343]:
+0:leaf=0
+booster[344]:
+0:leaf=0.230893701
+booster[345]:
+0:leaf=0.00481863786
+booster[346]:
+0:[f428<0.0186915882] yes=1,no=2,missing=2
+ 1:leaf=-0.0626903102
+ 2:leaf=0.333191246
+booster[347]:
+0:leaf=-0.355384171
+booster[348]:
+0:leaf=0.00125494716
+booster[349]:
+0:leaf=0.0436680056
+booster[350]:
+0:leaf=0.0745423436
+booster[351]:
+0:[f187<0.00168911414] yes=1,no=2,missing=2
+ 1:leaf=-0.0845193341
+ 2:leaf=0.207011238
+booster[352]:
+0:leaf=0
+booster[353]:
+0:leaf=0
+booster[354]:
+0:[f73<1.25459063] yes=1,no=2,missing=2
+ 1:leaf=-0.372717232
+ 2:leaf=-0.0453739017
+booster[355]:
+0:leaf=0.0879485831
+booster[356]:
+0:leaf=0.231912121
+booster[357]:
+0:leaf=0
+booster[358]:
+0:leaf=0
+booster[359]:
+0:leaf=-0.294127494
+booster[360]:
+0:leaf=0.165816188
+booster[361]:
+0:[f194<0.199226841] yes=1,no=2,missing=2
+ 1:leaf=0.231085971
+ 2:leaf=-0.153474078
+booster[362]:
+0:leaf=0
+booster[363]:
+0:leaf=0.139063969
+booster[364]:
+0:leaf=0.514561296
+booster[365]:
+0:leaf=0.0897258967
+booster[366]:
+0:leaf=-0.0228701178
+booster[367]:
+0:leaf=0
+booster[368]:
+0:leaf=0
+booster[369]:
+0:leaf=0.0257352721
+booster[370]:
+0:leaf=-0.124482967
+booster[371]:
+0:[f64<0.0318917856] yes=1,no=2,missing=2
+ 1:leaf=0.309544086
+ 2:leaf=-0.118615612
+booster[372]:
+0:leaf=0.185174838
+booster[373]:
+0:leaf=-0.100315891
+booster[374]:
+0:leaf=0
+booster[375]:
+0:leaf=-0.00570160151
+booster[376]:
+0:[f88<0.017710261] yes=1,no=2,missing=2
+ 1:leaf=0.248982459
+ 2:leaf=-0.169166729
+booster[377]:
+0:leaf=0
+booster[378]:
+0:leaf=0
+booster[379]:
+0:leaf=-0.288597226
+booster[380]:
+0:leaf=0.0975358039
+booster[381]:
+0:leaf=-0.299434185
+booster[382]:
+0:leaf=0
+booster[383]:
+0:leaf=0.0163476262
+booster[384]:
+0:leaf=0.561085999
+booster[385]:
+0:leaf=0.127542257
+booster[386]:
+0:leaf=0.128701255
+booster[387]:
+0:leaf=0
+booster[388]:
+0:leaf=0.268823177
+booster[389]:
+0:leaf=0.0632381514
+booster[390]:
+0:leaf=-0.0785977766
+booster[391]:
+0:leaf=0.0901997909
+booster[392]:
+0:leaf=0.279047489
+booster[393]:
+0:leaf=0.0326008089
+booster[394]:
+0:leaf=-0.242862329
+booster[395]:
+0:leaf=0.0845660344
+booster[396]:
+0:[f189<1.50265038] yes=1,no=2,missing=2
+ 1:leaf=-0.079352051
+ 2:leaf=0.324401081
+booster[397]:
+0:leaf=0
+booster[398]:
+0:leaf=-0.0641298741
+booster[399]:
+0:leaf=-0.0694152489
+booster[400]:
+0:leaf=0
+booster[401]:
+0:[f184<0.148687467] yes=1,no=2,missing=2
+ 1:leaf=0.261024803
+ 2:leaf=0.0589630455
+booster[402]:
+0:leaf=0
+booster[403]:
+0:leaf=0.175141126
+booster[404]:
+0:leaf=0.466041952
+booster[405]:
+0:leaf=-0.156656861
+booster[406]:
+0:leaf=-0.00364190992
+booster[407]:
+0:leaf=0
+booster[408]:
+0:leaf=0.218695685
+booster[409]:
+0:leaf=0.0951728225
+booster[410]:
+0:leaf=-0.0189549457
+booster[411]:
+0:[f193<0.387985855] yes=1,no=2,missing=2
+ 1:leaf=0.0972707346
+ 2:leaf=-0.291214079
+booster[412]:
+0:leaf=-0.050919842
+booster[413]:
+0:leaf=0
+booster[414]:
+0:[f71<0.756501198] yes=1,no=2,missing=2
+ 1:leaf=0.0565872975
+ 2:leaf=-0.31827566
+booster[415]:
+0:leaf=0.0466371365
+booster[416]:
+0:[f183<0.0793883651] yes=1,no=2,missing=2
+ 1:leaf=0.150991321
+ 2:leaf=-0.232519701
+booster[417]:
+0:leaf=0
+booster[418]:
+0:leaf=0.0525718257
+booster[419]:
+0:leaf=-0.256926537
+booster[420]:
+0:leaf=-0.137148499
+booster[421]:
+0:[f188<0.00299071311] yes=1,no=2,missing=2
+ 1:leaf=-0.304259628
+ 2:leaf=0.218529329
+booster[422]:
+0:leaf=0
+booster[423]:
+0:leaf=-0.0891201049
+booster[424]:
+0:leaf=0.418811917
+booster[425]:
+0:leaf=0.146890327
+booster[426]:
+0:[f314<0.0736998543] yes=1,no=2,missing=2
+ 1:leaf=0.228764802
+ 2:leaf=-0.213755086
+booster[427]:
+0:leaf=0
+booster[428]:
+0:leaf=0.0733654499
+booster[429]:
+0:leaf=-0.246126905
+booster[430]:
+0:leaf=0.0720836744
+booster[431]:
+0:[f200<0.694549739] yes=1,no=2,missing=2
+ 1:leaf=0.0206411798
+ 2:leaf=0.277169734
+booster[432]:
+0:leaf=-0.073422946
+booster[433]:
+0:leaf=-0.0147545561
+booster[434]:
+0:leaf=-0.305886656
+booster[435]:
+0:leaf=-0.144744277
+booster[436]:
+0:leaf=-0.145266756
+booster[437]:
+0:leaf=0
+booster[438]:
+0:leaf=-0.0504671633
+booster[439]:
+0:leaf=0.0598066337
+booster[440]:
+0:leaf=0
+booster[441]:
+0:leaf=0.125197902
+booster[442]:
+0:leaf=0
+booster[443]:
+0:[f194<0.367263854] yes=1,no=2,missing=2
+ 1:leaf=0.239430904
+ 2:leaf=-0.155381247
+booster[444]:
+0:leaf=-0.213025123
+booster[445]:
+0:leaf=0.151535228
+booster[446]:
+0:leaf=0.0641338527
+booster[447]:
+0:leaf=0
+booster[448]:
+0:leaf=-0.137584746
+booster[449]:
+0:leaf=-0.112594984
+booster[450]:
+0:leaf=-0.081460461
+booster[451]:
+0:[f84<0.0876842365] yes=1,no=2,missing=2
+ 1:leaf=-0.312166154
+ 2:leaf=0.0552647598
+booster[452]:
+0:leaf=0
+booster[453]:
+0:leaf=0
+booster[454]:
+0:leaf=0.142104492
+booster[455]:
+0:leaf=-0.124098368
+booster[456]:
+0:leaf=0.216716871
+booster[457]:
+0:leaf=0
+booster[458]:
+0:leaf=0
+booster[459]:
+0:leaf=0
+booster[460]:
+0:leaf=0.0743658021
+booster[461]:
+0:leaf=-0.288410544
+booster[462]:
+0:leaf=-0.286461949
+booster[463]:
+0:[f67<0.459448159] yes=1,no=2,missing=2
+ 1:leaf=0.301282525
+ 2:leaf=-0.128853947
+booster[464]:
+0:leaf=-0.218828171
+booster[465]:
+0:leaf=-0.0451834872
+booster[466]:
+0:[f68<0.15565896] yes=1,no=2,missing=2
+ 1:leaf=-0.356358021
+ 2:leaf=0.295220673
+booster[467]:
+0:leaf=0
+booster[468]:
+0:leaf=-0.0456718393
+booster[469]:
+0:leaf=0
+booster[470]:
+0:leaf=-0.0146109564
+booster[471]:
+0:[f316<0.0152925532] yes=1,no=2,missing=2
+ 1:leaf=-0.273110121
+ 2:leaf=0.126689583
+booster[472]:
+0:leaf=0
+booster[473]:
+0:leaf=-0.102619037
+booster[474]:
+0:leaf=0.196234092
+booster[475]:
+0:leaf=0.144808874
+booster[476]:
+0:[f202<0.0283111315] yes=1,no=2,missing=2
+ 1:leaf=0.224919021
+ 2:leaf=-0.163933232
+booster[477]:
+0:leaf=-0.291219085
+booster[478]:
+0:leaf=0
+booster[479]:
+0:leaf=-0.0726405308
+booster[480]:
+0:leaf=-0.0196002647
+booster[481]:
+0:[f80<0.222265631] yes=1,no=2,missing=2
+ 1:leaf=0.222458437
+ 2:leaf=-0.327085018
+booster[482]:
+0:leaf=0.244151875
+booster[483]:
+0:leaf=0.123805054
+booster[484]:
+0:[f422<0.0840862989] yes=1,no=2,missing=2
+ 1:leaf=-0.291021585
+ 2:leaf=0.608690023
+booster[485]:
+0:leaf=0.0273476169
+booster[486]:
+0:leaf=-0.0567220524
+booster[487]:
+0:leaf=0
+booster[488]:
+0:leaf=0
+booster[489]:
+0:leaf=0
+booster[490]:
+0:leaf=-0.157677382
+booster[491]:
+0:leaf=0
+booster[492]:
+0:leaf=0
+booster[493]:
+0:[f59<0.118526347] yes=1,no=2,missing=2
+ 1:leaf=-0.206727117
+ 2:leaf=0.235031262
+booster[494]:
+0:leaf=0.200649679
+booster[495]:
+0:leaf=0.0317698605
+booster[496]:
+0:[f325<0.00237114681] yes=1,no=2,missing=2
+ 1:leaf=-0.284638166
+ 2:leaf=0.302714676
+booster[497]:
+0:leaf=-0.16149658
+booster[498]:
+0:leaf=-0.0489787236
+booster[499]:
+0:leaf=-0.186543077
+booster[500]:
+0:leaf=0.0224552471
+booster[501]:
+0:leaf=0
+booster[502]:
+0:leaf=0.299140871
+booster[503]:
+0:leaf=-0.0182776842
+booster[504]:
+0:leaf=-0.148193553
+booster[505]:
+0:leaf=0
+booster[506]:
+0:leaf=0.135648489
+booster[507]:
+0:leaf=0
+booster[508]:
+0:leaf=-0.0297831148
+booster[509]:
+0:leaf=0
+booster[510]:
+0:leaf=0.0221739095
+booster[511]:
+0:leaf=-0.139465645
+booster[512]:
+0:leaf=0
+booster[513]:
+0:leaf=0.0620453022
+booster[514]:
+0:leaf=0.456754714
+booster[515]:
+0:leaf=0.0437158048
+booster[516]:
+0:leaf=0.161179513
+booster[517]:
+0:leaf=0.287626654
+booster[518]:
+0:leaf=0.038669508
+booster[519]:
+0:[f315<0.00110828911] yes=1,no=2,missing=2
+ 1:leaf=-0.284854591
+ 2:leaf=0.373995692
+booster[520]:
+0:leaf=0.0332719162
+booster[521]:
+0:leaf=0.0723555684
+booster[522]:
+0:leaf=0
+booster[523]:
+0:leaf=-0.101404607
+booster[524]:
+0:leaf=-0.207072467
+booster[525]:
+0:leaf=-0.0200187173
+booster[526]:
+0:leaf=0
+booster[527]:
+0:leaf=-0.267842501
+booster[528]:
+0:leaf=0.0191121157
+booster[529]:
+0:leaf=-0.0690090507
+booster[530]:
+0:leaf=0.0274512935
+booster[531]:
+0:leaf=-0.198984712
+booster[532]:
+0:leaf=0.27007252
+booster[533]:
+0:leaf=0
+booster[534]:
+0:leaf=-0.267539501
+booster[535]:
+0:leaf=-0.164048776
+booster[536]:
+0:leaf=0.0458427444
+booster[537]:
+0:leaf=0.228087142
+booster[538]:
+0:leaf=0
+booster[539]:
+0:leaf=0
+booster[540]:
+0:leaf=-0.218895689
+booster[541]:
+0:leaf=0.0669749826
+booster[542]:
+0:leaf=0
+booster[543]:
+0:leaf=0.0352085717
+booster[544]:
+0:leaf=0
+booster[545]:
+0:leaf=0
+booster[546]:
+0:leaf=0
+booster[547]:
+0:leaf=0
+booster[548]:
+0:leaf=0.097897172
+booster[549]:
+0:leaf=-0.017699657
+booster[550]:
+0:leaf=0.0537817962
+booster[551]:
+0:[f63<0.705408156] yes=1,no=2,missing=2
+ 1:leaf=-0.148012564
+ 2:leaf=0.193798929
+booster[552]:
+0:leaf=0.137364894
+booster[553]:
+0:leaf=-0.103206053
+booster[554]:
+0:leaf=0
+booster[555]:
+0:leaf=-0.0260114595
+booster[556]:
+0:[f195<0.141535029] yes=1,no=2,missing=2
+ 1:leaf=-0.237951264
+ 2:leaf=0.15758951
+booster[557]:
+0:leaf=0
+booster[558]:
+0:leaf=0.0641413108
+booster[559]:
+0:leaf=0.092469655
+booster[560]:
+0:leaf=0
+booster[561]:
+0:leaf=-0.0384781323
+booster[562]:
+0:leaf=0
+booster[563]:
+0:leaf=0
+booster[564]:
+0:leaf=0
+booster[565]:
+0:leaf=0
+booster[566]:
+0:leaf=0.0985744745
+booster[567]:
+0:leaf=0
+booster[568]:
+0:leaf=-0.0500385389
+booster[569]:
+0:leaf=-0.137037188
+booster[570]:
+0:leaf=-0.0244771205
+booster[571]:
+0:leaf=-0.135052413
+booster[572]:
+0:leaf=0
+booster[573]:
+0:leaf=0
+booster[574]:
+0:leaf=0.556281924
+booster[575]:
+0:leaf=0
+booster[576]:
+0:leaf=0
+booster[577]:
+0:leaf=0
+booster[578]:
+0:leaf=0.117288172
+booster[579]:
+0:leaf=-0.14623332
+booster[580]:
+0:leaf=0.0273251031
+booster[581]:
+0:leaf=0.00970449485
+booster[582]:
+0:leaf=-0.384331584
+booster[583]:
+0:leaf=0.02117984
+booster[584]:
+0:leaf=0
+booster[585]:
+0:leaf=0.113668881
+booster[586]:
+0:leaf=0.0476031564
+booster[587]:
+0:leaf=0
+booster[588]:
+0:leaf=4.28918574e-05
+booster[589]:
+0:leaf=-0.274787098
+booster[590]:
+0:leaf=0.0369812027
+booster[591]:
+0:leaf=0.098219417
+booster[592]:
+0:leaf=0
+booster[593]:
+0:leaf=0
+booster[594]:
+0:leaf=0
+booster[595]:
+0:leaf=0
+booster[596]:
+0:[f56<0.060425058] yes=1,no=2,missing=2
+ 1:leaf=0.0698521063
+ 2:leaf=-0.237216234
+booster[597]:
+0:leaf=0
+booster[598]:
+0:leaf=-0.0196564775
+booster[599]:
+0:leaf=0.403221697
+booster[600]:
+0:leaf=0.17692624
+booster[601]:
+0:leaf=0
+booster[602]:
+0:leaf=0.257648706
+booster[603]:
+0:leaf=-0.0663581789
+booster[604]:
+0:leaf=-0.236843303
+booster[605]:
+0:leaf=0.00621640543
+booster[606]:
+0:leaf=0.145256564
+booster[607]:
+0:leaf=0
+booster[608]:
+0:leaf=0.0263286307
+booster[609]:
+0:leaf=0
+booster[610]:
+0:leaf=0.0213002469
+booster[611]:
+0:leaf=0.0726298392
+booster[612]:
+0:leaf=0
+booster[613]:
+0:leaf=0.0956920981
+booster[614]:
+0:leaf=-0.132853314
+booster[615]:
+0:leaf=0
+booster[616]:
+0:leaf=0
+booster[617]:
+0:leaf=0
+booster[618]:
+0:leaf=0.0548972376
+booster[619]:
+0:leaf=0
+booster[620]:
+0:leaf=-0.0447600186
+booster[621]:
+0:leaf=-0.064706929
+booster[622]:
+0:leaf=0
+booster[623]:
+0:leaf=0.231768057
+booster[624]:
+0:leaf=0
+booster[625]:
+0:leaf=0
+booster[626]:
+0:leaf=0
+booster[627]:
+0:leaf=0
+booster[628]:
+0:leaf=0.0199343488
+booster[629]:
+0:leaf=0.0121009052
+booster[630]:
+0:leaf=0.106188416
+booster[631]:
+0:leaf=-0.054367505
+booster[632]:
+0:leaf=0
+booster[633]:
+0:leaf=-0.0948510692
+booster[634]:
+0:leaf=-0.0372618735
+booster[635]:
+0:leaf=-0.0879520774
+booster[636]:
+0:leaf=0.0586206391
+booster[637]:
+0:leaf=0
+booster[638]:
+0:leaf=-0.201971099
+booster[639]:
+0:leaf=0
+booster[640]:
+0:leaf=0.0732396692
+booster[641]:
+0:leaf=0
+booster[642]:
+0:leaf=0
+booster[643]:
+0:leaf=-0.127517864
+booster[644]:
+0:leaf=0.427871943
+booster[645]:
+0:leaf=-0.0306168497
+booster[646]:
+0:leaf=-0.023027217
+booster[647]:
+0:leaf=-0.112215467
+booster[648]:
+0:[f332<0.063832365] yes=1,no=2,missing=2
+ 1:leaf=-0.00539944367
+ 2:leaf=0.324248344
+booster[649]:
+0:leaf=-0.21985285
+booster[650]:
+0:leaf=-0.0447630286
+booster[651]:
+0:leaf=-0.0337308757
+booster[652]:
+0:leaf=0
+booster[653]:
+0:leaf=-0.0128007708
+booster[654]:
+0:leaf=0.105086111
+booster[655]:
+0:leaf=0.0600714572
+booster[656]:
+0:leaf=-0.0293469038
+booster[657]:
+0:leaf=0
+booster[658]:
+0:leaf=0
+booster[659]:
+0:leaf=-0.296649992
+booster[660]:
+0:leaf=-0.041189421
+booster[661]:
+0:leaf=0.2145634
+booster[662]:
+0:leaf=0
+booster[663]:
+0:leaf=0
+booster[664]:
+0:leaf=0.0717535317
+booster[665]:
+0:leaf=0.0337947682
+booster[666]:
+0:leaf=-0.193690389
+booster[667]:
+0:leaf=0
+booster[668]:
+0:leaf=0.059520442
+booster[669]:
+0:leaf=0
+booster[670]:
+0:leaf=-0.138662696
+booster[671]:
+0:leaf=-0.0212818179
+booster[672]:
+0:leaf=0
+booster[673]:
+0:leaf=0.0249625184
+booster[674]:
+0:leaf=0.572179735
+booster[675]:
+0:leaf=0.0400350466
+booster[676]:
+0:leaf=0.0267442241
+booster[677]:
+0:leaf=0.219164521
+booster[678]:
+0:leaf=0.0617500991
+booster[679]:
+0:leaf=0.362464517
+booster[680]:
+0:leaf=0.0937912986
+booster[681]:
+0:leaf=0.115155734
+booster[682]:
+0:leaf=0
+booster[683]:
+0:leaf=0.0796291232
+booster[684]:
+0:leaf=-0.293864816
+booster[685]:
+0:leaf=0
+booster[686]:
+0:leaf=0.0233119633
+booster[687]:
+0:leaf=0.179095477
+booster[688]:
+0:leaf=0
+booster[689]:
+0:leaf=0.155905813
+booster[690]:
+0:leaf=0
+booster[691]:
+0:leaf=0.0905218422
+booster[692]:
+0:leaf=0
+booster[693]:
+0:leaf=0.1132107
+booster[694]:
+0:leaf=0.199984461
+booster[695]:
+0:leaf=0
+booster[696]:
+0:leaf=0.0966109112
+booster[697]:
+0:leaf=-0.124367915
+booster[698]:
+0:leaf=0.182395339
+booster[699]:
+0:leaf=-0.275950909
+booster[700]:
+0:leaf=0
+booster[701]:
+0:[f196<0.211263731] yes=1,no=2,missing=2
+ 1:leaf=-0.177376553
+ 2:leaf=0.108613975
+booster[702]:
+0:leaf=0.284308016
+booster[703]:
+0:leaf=-0.00302518345
+booster[704]:
+0:leaf=-0.251259357
+booster[705]:
+0:leaf=0.0958471596
+booster[706]:
+0:[f78<0.259042025] yes=1,no=2,missing=2
+ 1:leaf=-0.277236581
+ 2:leaf=0.0831548646
+booster[707]:
+0:leaf=0
+booster[708]:
+0:leaf=0
+booster[709]:
+0:leaf=0
+booster[710]:
+0:leaf=-0.00376203656
+booster[711]:
+0:[f187<0.474989116] yes=1,no=2,missing=2
+ 1:leaf=0.252456993
+ 2:leaf=-0.16043219
+booster[712]:
+0:leaf=0
+booster[713]:
+0:leaf=-0.0363339074
+booster[714]:
+0:leaf=0
+booster[715]:
+0:leaf=0.0920268893
+booster[716]:
+0:leaf=-0.0906013474
+booster[717]:
+0:leaf=0
+booster[718]:
+0:leaf=0.112595811
+booster[719]:
+0:leaf=0
+booster[720]:
+0:leaf=0.158913374
+booster[721]:
+0:leaf=0.0102665368
+booster[722]:
+0:leaf=0
+booster[723]:
+0:leaf=0.0388549082
+booster[724]:
+0:leaf=-0.126467779
+booster[725]:
+0:leaf=0.0879370719
+booster[726]:
+0:leaf=0
+booster[727]:
+0:leaf=0
+booster[728]:
+0:leaf=-0.0916913226
+booster[729]:
+0:leaf=0.292742938
+booster[730]:
+0:leaf=-0.161035761
+booster[731]:
+0:leaf=0.147430763
+booster[732]:
+0:leaf=0.212285504
+booster[733]:
+0:leaf=0
+booster[734]:
+0:leaf=0
+booster[735]:
+0:leaf=-0.0989863873
+booster[736]:
+0:leaf=-0.0720007643
+booster[737]:
+0:leaf=0
+booster[738]:
+0:leaf=0
+booster[739]:
+0:leaf=-0.0531552508
+booster[740]:
+0:leaf=0.032374233
+booster[741]:
+0:leaf=0
+booster[742]:
+0:leaf=0
+booster[743]:
+0:leaf=-0.0994831324
+booster[744]:
+0:leaf=0
+booster[745]:
+0:leaf=0
+booster[746]:
+0:leaf=0.020354677
+booster[747]:
+0:leaf=0
+booster[748]:
+0:leaf=-0.0227231272
+booster[749]:
+0:leaf=0
+booster[750]:
+0:leaf=0
+booster[751]:
+0:[f60<0.0706880316] yes=1,no=2,missing=2
+ 1:leaf=0.0919953883
+ 2:leaf=-0.206213549
+booster[752]:
+0:leaf=0
+booster[753]:
+0:leaf=0
+booster[754]:
+0:leaf=0
+booster[755]:
+0:leaf=0
+booster[756]:
+0:leaf=0.0435093231
+booster[757]:
+0:leaf=0
+booster[758]:
+0:leaf=0.0755924433
+booster[759]:
+0:leaf=0
+booster[760]:
+0:leaf=-0.0193935242
+booster[761]:
+0:leaf=-0.0832129195
+booster[762]:
+0:leaf=-0.368999749
+booster[763]:
+0:leaf=0
+booster[764]:
+0:leaf=-0.121224403
+booster[765]:
+0:leaf=-0.00859790668
+booster[766]:
+0:leaf=-0.0105826128
+booster[767]:
+0:leaf=0
+booster[768]:
+0:leaf=-0.0128897764
+booster[769]:
+0:leaf=0
+booster[770]:
+0:leaf=0.0791016743
+booster[771]:
+0:leaf=0.0645926595
+booster[772]:
+0:leaf=0
+booster[773]:
+0:leaf=-0.056024801
+booster[774]:
+0:leaf=0
+booster[775]:
+0:leaf=-0.0579343811
+booster[776]:
+0:leaf=-0.0142135713
+booster[777]:
+0:leaf=0
+booster[778]:
+0:leaf=-0.15224503
+booster[779]:
+0:leaf=0.292450577
+booster[780]:
+0:leaf=0
+booster[781]:
+0:leaf=0.0572832935
+booster[782]:
+0:leaf=0
+booster[783]:
+0:leaf=0.012957179
+booster[784]:
+0:leaf=0.0182308462
+booster[785]:
+0:leaf=0.160303384
+booster[786]:
+0:leaf=0.0790827125
+booster[787]:
+0:leaf=0.227460474
+booster[788]:
+0:leaf=-0.0515763238
+booster[789]:
+0:leaf=-0.181491107
+booster[790]:
+0:leaf=0.036938414
+booster[791]:
+0:leaf=0
+booster[792]:
+0:leaf=0
+booster[793]:
+0:leaf=0.230895698
+booster[794]:
+0:leaf=-0.0862668753
+booster[795]:
+0:leaf=0.0716816261
+booster[796]:
+0:leaf=0.0818000808
+booster[797]:
+0:leaf=0
+booster[798]:
+0:leaf=0.06917537
+booster[799]:
+0:leaf=-0.0712724626
+booster[800]:
+0:leaf=-0.185514167
+booster[801]:
+0:leaf=-0.00289999158
+booster[802]:
+0:leaf=0
+booster[803]:
+0:leaf=0.016830612
+booster[804]:
+0:leaf=0.0105461543
+booster[805]:
+0:leaf=0.0374329388
+booster[806]:
+0:leaf=0.115151942
+booster[807]:
+0:leaf=0
+booster[808]:
+0:leaf=0
+booster[809]:
+0:leaf=0.264319986
+booster[810]:
+0:leaf=0
+booster[811]:
+0:leaf=-0.0489072911
+booster[812]:
+0:leaf=0
+booster[813]:
+0:leaf=0
+booster[814]:
+0:leaf=0
+booster[815]:
+0:leaf=0.0690706149
+booster[816]:
+0:leaf=0.0339610316
+booster[817]:
+0:leaf=0.254622906
+booster[818]:
+0:leaf=-0.0604241565
+booster[819]:
+0:leaf=-0.198131844
+booster[820]:
+0:leaf=0
+booster[821]:
+0:leaf=-0.00695313094
+booster[822]:
+0:leaf=0
+booster[823]:
+0:leaf=0
+booster[824]:
+0:leaf=0
+booster[825]:
+0:leaf=-0.0848834291
+booster[826]:
+0:[f192<0.398366272] yes=1,no=2,missing=2
+ 1:leaf=0.171179324
+ 2:leaf=-0.260447145
+booster[827]:
+0:leaf=0
+booster[828]:
+0:leaf=0
+booster[829]:
+0:leaf=0.0433525778
+booster[830]:
+0:leaf=-0.0276667252
+booster[831]:
+0:leaf=0
+booster[832]:
+0:leaf=-0.405350089
+booster[833]:
+0:leaf=0
+booster[834]:
+0:leaf=0.432347596
diff --git a/tabs/final_report.md b/tabs/final_report.md
index 85fc3e9..b72e3f9 100644
--- a/tabs/final_report.md
+++ b/tabs/final_report.md
@@ -24,6 +24,9 @@ Members: Austin Barton, Karpagam Karthikeyan, Keyang Lu, Isabelle Murray, Aditya
- [Dimensionality Reduction - t-SNE](#dimensionality-reduction---t-sne)
- [Classification](#classification)
- [**MusicNet** - Choice of Model and Algorithms:](#musicnet---choice-of-model-and-algorithms)
+ - [Decision Trees](#decision-trees)
+ - [Random Forests](#random-forests)
+ - [Gradient-Boosted Trees](#gradient-boosted-trees)
- [**GTZAN** - Choice of Model and Algorithms:](#gtzan---choice-of-model-and-algorithms)
- [Results and Discussion](#results-and-discussion)
- [Discussion](#discussion)
@@ -116,7 +119,137 @@ Here are the data points but in a 3-dimensional space reduced by t-SNE from the
### Classification
#### **MusicNet** - Choice of Model and Algorithms:
-**Chosen Model(s)**: We opted to only perform classification on the GTZAN dataset. MusicNet requires more thorough processing and either trimming the dataset down to obtain a better distribution of data by class or retrieving data manually. This is discussed more in the Discussion section.
+**Chosen Model(s)**: We decided to use decision trees, random forests, and gradient-boosted trees for our models.
+
+#### Decision Trees
+Methods in this section were inspired from a previous course taken, MATH 4210, and [sci-kit learn's documentation](https://scikit-learn.org/stable/auto_examples/tree/plot_cost_complexity_pruning.html).
+
+Before jumping to more complicated, expensive, and generally less interpretable models, we analyze the results of classification with a single decision tree. Undergoing a proper analysisa dn hyperparametrization of a single decision tree will provide us insight even if the model does not perform well. This will set us up for success and narrow hyperparameter search spaces in the subsequent models.
+
+We perform a search over the best value of the cost complexity pruning penalty. This is a penalty coefficient of the complexity of the decision tree, where complexity is measured by the number of leaves in a tree (very similar to ridge and LASSO regression). Below we can see how as we increase the cost complexity hyperparameter (alpha), the total imputiry of the leaves increases.
+
+
+
+However, this does not mean the model is performing worse as the cost complexity penalty increases. As shown below, there is an optimal cost complexity penality found at around ~0.09 that results in the best test accuracy of the model. This is the cost complexity penalty we use for our decision tree.
+
+
+
+We then fit our decision tree with the cost complexity hyperparameter described previously. The depth of our resulting tree is 10, providing insight for subsequent models as to how deep a tree should or should not be. The results of this tree are summarized below in a confusion matrix, training and testing accuracy, and F1 score.
+
+
+
+Decision Tree Classifier
+Training Accuracy: 1.0
+Test Accuracy: 0.6458333333333334
+Test F1-Score0.6475694444444445
+
+We can see the model does actually quite well for how little training data there is and how poorly the data is distributed. This landmark shows that our processing algorithm for the MIDI is effective to at least some extent in distinguishing certain composers from others.
+
+#### Random Forests
+| Hyperparameter | Description | Value(s) |
+|-----------------------|------------------------------------------------------|------------------------|
+| `n_estimators` | Number of boosting stages to be run | 100 |
+| `max_depth` | Maximum depth of the individual trees | 13 |
+| `max_features` | Number of features to consider for the best split | 1024 |
+| `random_state` | Seed for random number generation | seed=42 |
+
+Since random forests in our case are very computationally feasible, and since our analysis of decision tree performance based on depth provides insight, we opted to search through what `max_depth` hyperparameter would perform the best. We experimentally found `max_depth` of 13 to work the best for random forests, in contrast to the best depth for a single decision tree to be 10. Our choice of `max_features` was based off the fact that many of the data samples are sparse in non-zero entries and only few contain more than 1024 entries (and not by much more) we felt 0.5 to be reasonable and through experimentation found it to be effective.
+
+Random Forest Classifier
+Training Accuracy: 1.0
+Test Accuracy: 0.8541666666666666
+Test F1-Score0.8519282808470453
+Maximum depth of Random Forest: 13
+
+#### Gradient-Boosted Trees
+**Model 1 Hyperparameters**:
+| Hyperparameter | Description | Value(s) |
+|-----------------------|------------------------------------------------------|------------------------|
+| `n_estimators` | Number of boosting stages to be run | 20 |
+| `learning_rate` | Step size shrinkage to prevent overfitting | 0.8 |
+| `max_depth` | Maximum depth of the individual trees | 10 |
+| `subsample` | Proportion of features to consider for the best split| 0.5 |
+| `objective` | The objective function this model is minimizing | `multi:softmax` |
+| `early_stopping` | Stop training early if evaluation doesn't improve | None |
+| `random_state` | Seed for random number generation | seed=42 |
+
+**Model 2 Hyperparameters**:
+| Hyperparameter | Description | Value(s) |
+|-----------------------|------------------------------------------------------|------------------------|
+| `n_estimators` | Number of boosting stages to be run | 1000 |
+| `learning_rate` | Step size shrinkage to prevent overfitting | 0.8 |
+| `max_depth` | Maximum depth of the individual trees | 10 |
+| `subsample` | Proportion of features to consider for the best split| 0.5 |
+| `objective` | The objective function this model is minimizing | `multi:softmax` |
+| `early_stopping` | Stop training early if evaluation doesn't improve | 100 |
+| `random_state` | Seed for random number generation | seed=42 |
+
+We chose these hyperparameters based off of 1) The results from decision trees and random forests and 2) Our own experimentation searching through the space of possible hyperparameters. These 2 models are essentially the same, but we want to showcase how gradient-boosted trees, although effective, come to limits that adding more iterations will not fix. Our learning rate was tuned through experimentation and searching. The `max_depth` was experimented with and the results from random forests and decision trees helped guide this selection. We found that including all the features in our model reduced performance and results in the models overfitting extremely fast. Because many of the row vectors are sparse and only few containing more than 1000 entries, we felt 0.5 to be reasonable and through experimentation found it to be effective.
+
+- **Boosted-Decision Trees Training Results**
+Model 1 Training Table:
+| Iteration | Train AUC | Train Misclassification Error | Eval AUC | Eval Misclassification Error |
+|-----------|-----------|-------------------------------|----------|-------------------------------|
+| 0 | 0.86054 | 0.36111 | 0.77116 | 0.52083 |
+| 1 | 0.93284 | 0.21528 | 0.82366 | 0.47917 |
+| 2 | 0.95528 | 0.19444 | 0.84713 | 0.29167 |
+| 3 | 0.96822 | 0.17361 | 0.88281 | 0.25000 |
+| 4 | 0.97271 | 0.15972 | 0.88940 | 0.31250 |
+| 5 | 0.97109 | 0.13889 | 0.90380 | 0.33333 |
+| 6 | 0.97126 | 0.15278 | 0.89037 | 0.29167 |
+| 7 | 0.97764 | 0.13889 | 0.90454 | 0.27083 |
+| 8 | 0.97766 | 0.12500 | 0.92452 | 0.22917 |
+| 9 | 0.98132 | 0.12500 | 0.90117 | 0.31250 |
+| 10 | 0.98462 | 0.12500 | 0.92574 | 0.25000 |
+| 11 | 0.98734 | 0.11806 | 0.92663 | 0.22917 |
+| 12 | 0.98723 | 0.08333 | 0.92991 | 0.20833 |
+| 13 | 0.98879 | 0.07639 | 0.93026 | 0.22917 |
+| 14 | 0.99139 | 0.06944 | 0.93374 | 0.22917 |
+| 15 | 0.99309 | 0.07639 | 0.93643 | 0.22917 |
+| 16 | 0.99436 | 0.07639 | 0.93824 | 0.20833 |
+| 17 | 0.99524 | 0.04861 | 0.93467 | 0.22917 |
+| 18 | 0.99714 | 0.05556 | 0.93164 | 0.20833 |
+| 19 | 0.99742 | 0.03472 | 0.93645 | 0.20833 |
+
+Test results:
+XGBoost Classifier - 20 estimators, max_depth of 10, learning rate of 0.8, softmax objective function.
+Training Accuracy: 0.9652777777777778
+Test Accuracy: 0.8541666666666666
+Test F1-Score0.8519282808470453
+
+
+
+Model 2 Training Table:
+| Iteration | Train AUC | Train Misclassification Error | Eval AUC | Eval Misclassification Error |
+|-----------|-----------|-------------------------------|----------|-------------------------------|
+| 0 | 0.85925 | 0.36111 | 0.77116 | 0.52083 |
+| 1 | 0.92848 | 0.22917 | 0.84076 | 0.41667 |
+| 2 | 0.94987 | 0.20833 | 0.87133 | 0.27083 |
+| 3 | 0.95769 | 0.18056 | 0.89643 | 0.25000 |
+| 4 | 0.96958 | 0.15972 | 0.88770 | 0.22917 |
+| 5 | 0.96794 | 0.15278 | 0.90044 | 0.31250 |
+| 6 | 0.97244 | 0.11806 | 0.88905 | 0.33333 |
+| 7 | 0.97616 | 0.11806 | 0.87536 | 0.33333 |
+| 8 | 0.98422 | 0.10417 | 0.88341 | 0.33333 |
+| 9 | 0.98428 | 0.10417 | 0.88773 | 0.27083 |
+| 10 | 0.98491 | 0.09028 | 0.89605 | 0.25000 |
+| ... | ... | ... | ... | ... |
+| 160 | 0.99983 | 0.00694 | 0.91817 | 0.22917 |
+| 161 | 0.99983 | 0.00694 | 0.91692 | 0.22917 |
+| 162 | 0.99983 | 0.00694 | 0.91692 | 0.25000 |
+| 163 | 0.99983 | 0.00694 | 0.91742 | 0.18750 |
+| 164 | 0.99983 | 0.00694 | 0.91742 | 0.18750 |
+| 165 | 0.99983 | 0.00694 | 0.91519 | 0.25000 |
+| 166 | 0.99983 | 0.00694 | 0.91418 | 0.25000 |
+
+XGBoost Classifier - 1000 estimators, max_depth of 10, learning rate of 0.8, softmax objective function.
+Training Accuracy: 0.9930555555555556
+Test Accuracy: 0.8541666666666666
+Test F1-Score0.8519282808470453
+
+
+
+As we can see, training the model more does not result in better performance. This is a prime example of overfitting, but the main takeaway is that there are more efficient ways to do things.
#### **GTZAN** - Choice of Model and Algorithms:
**Chosen Model(s)**:
@@ -163,7 +296,7 @@ F1 Scores, confusion matrix, etc.
| Contributor Name | Contribution Type |
|------------------------|----------------------------------------|
-| Austin Barton | MusicNet Data Pre-Processing, MusicNet PCA, MIDI Parsing, Data Visualization, GitHub Pages|
+| Austin Barton | MusicNet Data Pre-Processing, MusicNet PCA, t-SNE, CNN framework, Decision Trees Random Forests, Gradient-boosted trees, Figure generation and analysis, MIDI Parsing, Data Visualization, EDA, GitHub Pages|
| Aditya Radhakrishnan | Model Design & Implementation, Development/Iteration, Validation, Testing, Results Generation & Visualization, and Early Dataset Balancing Exploration |
| Isabelle Murray | GanttChart, Model Implementation/development, Testing, Results Generation & Visualization |
| Karpagam Karthikeyan | GanttChart, MusicNet Data Pre-Processing, Github Pages, Data Visualization, MIDI Parsing |