Skip to content

Commit

Permalink
Merge pull request #29 from EchoDel/feature/package_updates
Browse files Browse the repository at this point in the history
Updated LightGBM to 4.1 and Optuna to 3.3, and the other packages
  • Loading branch information
StatMixedML authored Dec 6, 2023
2 parents 4812206 + 2fa3eac commit 5f31fc2
Show file tree
Hide file tree
Showing 3 changed files with 40 additions and 21 deletions.
17 changes: 17 additions & 0 deletions lightgbmlss/logger.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import logging


class CustomLogger:
def __init__(self):
self.logger = logging.getLogger('lightgbm_custom')
self.logger.setLevel(logging.ERROR)

def info(self, message):
self.logger.info(message)

def warning(self, message):
# Suppress warnings by not doing anything
pass

def error(self, message):
self.logger.error(message)
18 changes: 10 additions & 8 deletions lightgbmlss/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,9 @@


import lightgbm as lgb

from lightgbmlss.distributions.distribution_utils import DistributionClass
from lightgbmlss.logger import CustomLogger
from lightgbmlss.utils import *
import optuna
from optuna.samplers import TPESampler
Expand Down Expand Up @@ -47,6 +50,7 @@
Tuple[Dataset, Dataset, Dict[str, Any]]
]

lgb.register_logger(CustomLogger())

class LightGBMLSS:
"""
Expand All @@ -59,7 +63,7 @@ class LightGBMLSS:
start_values : np.ndarray
Starting values for each distributional parameter.
"""
def __init__(self, dist):
def __init__(self, dist: DistributionClass):
self.dist = dist # Distribution object
self.start_values = None # Starting values for distributional parameters

Expand All @@ -79,9 +83,9 @@ def set_params(self, params: Dict[str, Any]) -> Dict[str, Any]:
"""
params_adj = {"num_class": self.dist.n_dist_param,
"metric": "None",
"objective": "None",
"random_seed": 123,
"verbose": -1}
"verbose": -1,
"objective": self.dist.objective_fn}
params.update(params_adj)

return params
Expand Down Expand Up @@ -171,7 +175,6 @@ def train(self,
self.booster = lgb.train(params,
train_set,
num_boost_round=num_boost_round,
fobj=self.dist.objective_fn,
feval=self.dist.metric_fn,
valid_sets=valid_sets,
valid_names=valid_names,
Expand Down Expand Up @@ -265,7 +268,6 @@ def cv(self,

self.bstLSS_cv = lgb.cv(params,
train_set,
fobj=self.dist.objective_fn,
feval=self.dist.metric_fn,
num_boost_round=num_boost_round,
folds=folds,
Expand Down Expand Up @@ -389,13 +391,13 @@ def objective(trial):
callbacks=[pruning_callback, early_stopping_callback],
seed=seed,
)

print(lgblss_param_tuning)
# Extract the optimal number of boosting rounds
opt_rounds = np.argmin(np.array(lgblss_param_tuning[f"{self.dist.loss_fn}-mean"])) + 1
opt_rounds = np.argmin(np.array(lgblss_param_tuning[f"valid {self.dist.loss_fn}-mean"])) + 1
trial.set_user_attr("opt_round", int(opt_rounds))

# Extract the best score
best_score = np.min(np.array(lgblss_param_tuning[f"{self.dist.loss_fn}-mean"]))
best_score = np.min(np.array(lgblss_param_tuning[f"valid {self.dist.loss_fn}-mean"]))

return best_score

Expand Down
26 changes: 13 additions & 13 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,20 +16,20 @@
zip_safe=True,
python_requires=">=3.9",
install_requires=[
"lightgbm~=3.3.5",
"torch~=2.0.1",
"pyro-ppl~=1.8.5",
"optuna~=3.2.0",
"lightgbm~=4.1.0",
"torch~=2.1.1",
"pyro-ppl~=1.8.6",
"optuna~=3.4.0",
"properscoring~=0.1",
"scikit-learn~=1.2.2",
"numpy~=1.24.3",
"pandas~=2.0.3",
"plotnine~=0.12.1",
"scipy~=1.11.1",
"seaborn~=0.12.2",
"tqdm~=4.65.0",
"matplotlib~=3.7.2",
"ipython~=8.14.0",
"scikit-learn~=1.3.2",
"numpy~=1.26.2",
"pandas~=2.1.3",
"plotnine~=0.12.4",
"scipy~=1.11.4",
"seaborn~=0.13.0",
"tqdm~=4.66.1",
"matplotlib~=3.8.2",
"ipython~=8.18.1",
],
extras_require={
"docs": ["mkdocs", "mkdocstrings[python]", "mkdocs-jupyter"]
Expand Down

0 comments on commit 5f31fc2

Please sign in to comment.