Skip to content

Commit

Permalink
Merge pull request #453 from MannLabs/fix-max-iterations-param-in-cla…
Browse files Browse the repository at this point in the history
…ssifier

Add missing max iterations parameter
  • Loading branch information
anna-charlotte authored Jan 27, 2025
2 parents 1a6909c + 1999c01 commit 9f240ef
Showing 1 changed file with 8 additions and 0 deletions.
8 changes: 8 additions & 0 deletions alphadia/workflow/peptidecentric.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@

def get_classifier_base(
enable_two_step_classifier: bool = False,
two_step_classifier_max_iterations: int = 5,
enable_nn_hyperparameter_tuning: bool = False,
fdr_cutoff: float = 0.01,
):
Expand All @@ -110,6 +111,9 @@ def get_classifier_base(
If True, uses logistic regression + neural network.
If False (default), uses only neural network.
two_step_classifier_max_iterations : int
Maximum number of iterations withtin .fit_predict() of the two-step classifier.
enable_nn_hyperparameter_tuning: bool, optional
If True, uses hyperparameter tuning for the neural network.
If False (default), uses default hyperparameters for the neural network.
Expand All @@ -136,6 +140,7 @@ def get_classifier_base(
first_classifier=LogisticRegressionClassifier(),
second_classifier=nn_classifier,
second_fdr_cutoff=fdr_cutoff,
max_iterations=two_step_classifier_max_iterations,
)
else:
return nn_classifier
Expand Down Expand Up @@ -179,6 +184,9 @@ def init_fdr_manager(self):
enable_two_step_classifier=self.config["fdr"][
"enable_two_step_classifier"
],
two_step_classifier_max_iterations=self.config["fdr"][
"two_step_classifier_max_iterations"
],
enable_nn_hyperparameter_tuning=self.config["fdr"][
"enable_nn_hyperparameter_tuning"
],
Expand Down

0 comments on commit 9f240ef

Please sign in to comment.