Skip to content

Commit

Permalink
Fix PR comments
Browse files Browse the repository at this point in the history
  • Loading branch information
benglewis committed Jan 14, 2025
1 parent 7059161 commit 182bef6
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 9 deletions.
8 changes: 4 additions & 4 deletions docs/source-pytorch/visualize/loggers.rst
Original file line number Diff line number Diff line change
Expand Up @@ -60,24 +60,24 @@ Track and Visualize Experiments
MLflow Logger
-------------

The MLflow logger in PyTorch Lightning now includes a `checkpoint_artifact_path_prefix` parameter. This parameter allows you to prefix the checkpoint artifact's path when logging checkpoints as artifacts.
The MLflow logger in PyTorch Lightning now includes a `checkpoint_path_prefix` parameter. This parameter allows you to prefix the checkpoint artifact's path when logging checkpoints as artifacts.

Example usage:

.. code-block:: python
from lightning.pytorch import Trainer
import lightning as L
from lightning.pytorch.loggers import MLFlowLogger
mlf_logger = MLFlowLogger(
experiment_name="lightning_logs",
tracking_uri="file:./ml-runs",
checkpoint_artifact_path_prefix="my_prefix"
)
trainer = Trainer(logger=mlf_logger)
trainer = L.Trainer(logger=mlf_logger)
# Your LightningModule definition
class LitModel(LightningModule):
class LitModel(L.LightningModule):
def training_step(self, batch, batch_idx):
# example
self.logger.experiment.whatever_ml_flow_supports(...)
Expand Down
8 changes: 4 additions & 4 deletions src/lightning/pytorch/loggers/mlflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def any_lightning_module_function_or_hook(self):
:paramref:`~lightning.pytorch.callbacks.Checkpoint.save_top_k` ``== -1``
which also logs every checkpoint during training.
* if ``log_model == False`` (default), no checkpoint is logged.
checkpoint_artifact_path_prefix: A string to prefix the checkpoint artifact's path.
checkpoint_path_prefix: A string to prefix the checkpoint artifact's path.
prefix: A string to put at the beginning of metric keys.
artifact_location: The location to store run artifacts. If not provided, the server picks an appropriate
default.
Expand All @@ -121,7 +121,7 @@ def __init__(
tags: Optional[dict[str, Any]] = None,
save_dir: Optional[str] = "./mlruns",
log_model: Literal[True, False, "all"] = False,
checkpoint_artifact_path_prefix: str = "",
checkpoint_path_prefix: str = "",
prefix: str = "",
artifact_location: Optional[str] = None,
run_id: Optional[str] = None,
Expand All @@ -148,7 +148,7 @@ def __init__(
self._artifact_location = artifact_location
self._log_batch_kwargs = {} if synchronous is None else {"synchronous": synchronous}
self._initialized = False
self._checkpoint_artifact_path_prefix = checkpoint_artifact_path_prefix
self._checkpoint_path_prefix = checkpoint_path_prefix

from mlflow.tracking import MlflowClient

Expand Down Expand Up @@ -363,7 +363,7 @@ def _scan_and_log_checkpoints(self, checkpoint_callback: ModelCheckpoint) -> Non
aliases = ["latest", "best"] if p == checkpoint_callback.best_model_path else ["latest"]

# Artifact path on mlflow
artifact_path = Path(self._checkpoint_artifact_path_prefix) / Path(p).stem
artifact_path = Path(self._checkpoint_path_prefix) / Path(p).stem

# Log the checkpoint
self.experiment.log_artifact(self._run_id, p, artifact_path)
Expand Down
2 changes: 1 addition & 1 deletion tests/tests_pytorch/loggers/test_mlflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -436,7 +436,7 @@ def test_mlflow_log_model_with_checkpoint_artifact_path_prefix(mlflow_mock, tmp_

# Get model, logger, trainer and train
model = BoringModel()
logger = MLFlowLogger("test", save_dir=str(tmp_path), log_model="all", checkpoint_artifact_path_prefix="my_prefix")
logger = MLFlowLogger("test", save_dir=str(tmp_path), log_model="all", checkpoint_path_prefix="my_prefix")
logger = mock_mlflow_run_creation(logger, experiment_id="test-id")

trainer = Trainer(
Expand Down

0 comments on commit 182bef6

Please sign in to comment.