@@ -686,7 +686,6 @@ class XGBRegressor(Regressor, XGBoost):
686
686
versioning. It's highly recommended to provide a name if you
687
687
plan to reuse the model later.
688
688
689
-
690
689
Model Training
691
690
^^^^^^^^^^^^^^^
692
691
@@ -738,8 +737,8 @@ class XGBRegressor(Regressor, XGBoost):
738
737
739
738
.. note::
740
739
741
- In models such as ``XGB ``, feature importance is calculated
742
- using the MDI (Mean Decreased Impurity) . To determine the final score,
740
+ In models such as ``XGBoost ``, feature importance is calculated
741
+ using the average gain of each tree . To determine the final score,
743
742
VerticaPy sums the scores of each tree, normalizes them and applies an
744
743
activation function to scale them.
745
744
@@ -873,11 +872,11 @@ class XGBRegressor(Regressor, XGBoost):
873
872
874
873
.. important::
875
874
876
- Machine learning models with two predictors can usually
877
- benefit from their own contour plot. This visual representation
878
- aids in exploring predictions and gaining a deeper understanding
879
- of how these models perform in different scenarios.
880
- Please refer to :ref:`chart_gallery.contour` for more examples.
875
+ Machine learning models with two predictors can usually
876
+ benefit from their own contour plot. This visual representation
877
+ aids in exploring predictions and gaining a deeper understanding
878
+ of how these models perform in different scenarios.
879
+ Please refer to :ref:`chart_gallery.contour` for more examples.
881
880
882
881
Model Register
883
882
^^^^^^^^^^^^^^
@@ -1469,8 +1468,8 @@ class XGBClassifier(MulticlassClassifier, XGBoost):
1469
1468
1470
1469
.. note::
1471
1470
1472
- In models such as ``XGB ``, feature importance is calculated
1473
- using the MDI (Mean Decreased Impurity) . To determine the final score,
1471
+ In models such as ``XGBoost ``, feature importance is calculated
1472
+ using the average gain of each tree . To determine the final score,
1474
1473
VerticaPy sums the scores of each tree, normalizes them and applies an
1475
1474
activation function to scale them.
1476
1475
@@ -1519,7 +1518,6 @@ class XGBClassifier(MulticlassClassifier, XGBoost):
1519
1518
.. raw:: html
1520
1519
:file: SPHINX_DIRECTORY/figures/machine_learning_vertica_xgb_classifier_report_cutoff.html
1521
1520
1522
-
1523
1521
You can also use the
1524
1522
:py:mod:`verticapy.machine_learning.vertica.ensemble.XGBClassifier.score`
1525
1523
function to compute any classification metric. The default metric is the accuracy:
@@ -1734,11 +1732,11 @@ class XGBClassifier(MulticlassClassifier, XGBoost):
1734
1732
1735
1733
.. important::
1736
1734
1737
- Machine learning models with two predictors can usually
1738
- benefit from their own contour plot. This visual representation
1739
- aids in exploring predictions and gaining a deeper understanding
1740
- of how these models perform in different scenarios.
1741
- Please refer to :ref:`chart_gallery.contour` for more examples.
1735
+ Machine learning models with two predictors can usually
1736
+ benefit from their own contour plot. This visual representation
1737
+ aids in exploring predictions and gaining a deeper understanding
1738
+ of how these models perform in different scenarios.
1739
+ Please refer to :ref:`chart_gallery.contour` for more examples.
1742
1740
1743
1741
Parameter Modification
1744
1742
^^^^^^^^^^^^^^^^^^^^^^^
0 commit comments