@@ -785,12 +785,12 @@ def capabilities() -> abstract_learner_pb2.LearnerCapabilities:
785785class GradientBoostedTreesModel (core .CoreModel ):
786786 r"""Gradient Boosted Trees learning algorithm.
787787
788- A GBT ( Gradient Boosted [Decision] Tree;
789- https://statweb.stanford.edu/~jhf/ftp/trebst.pdf) is a set of shallow
790- decision trees trained sequentially. Each tree is trained to predict and then
791- "correct" for the errors of the previously trained trees (more precisely each
792- tree predict the gradient of the loss relative to the model output).
793- GBTs use [early stopping](early_stopping.md) to avoid overfitting .
788+ A [ Gradient Boosted Trees](https://statweb.stanford.edu/~jhf/ftp/trebst.pdf)
789+ (GBT), also known as Gradient Boosted Decision Trees (GBDT) or Gradient
790+ Boosted Machines (GBM), is a set of shallow decision trees trained
791+ sequentially. Each tree is trained to predict and then "correct" for the
792+ errors of the previously trained trees (more precisely each tree predict the
793+ gradient of the loss relative to the model output) .
794794
795795 Usage example:
796796
@@ -1169,7 +1169,7 @@ class GradientBoostedTreesModel(core.CoreModel):
11691169 subsample: Ratio of the dataset (sampling without replacement) used to train
11701170 individual trees for the random sampling method. If \\"subsample\\" is set
11711171 and if \\"sampling_method\\" is NOT set or set to \\"NONE\\", then
1172- \\"sampling_method\\" is implicitely set to \\"RANDOM\\". In other words,
1172+ \\"sampling_method\\" is implicitly set to \\"RANDOM\\". In other words,
11731173 to enable random subsampling, you only need to set "\\"subsample\\".
11741174 Default: 1.0.
11751175 uplift_min_examples_in_treatment: For uplift models only. Minimum number of
0 commit comments