Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
12ceecf
feat: implement requirements validation for custom blocks.
sayakpaul Aug 20, 2025
127e9a3
up
sayakpaul Aug 20, 2025
37d3887
unify.
sayakpaul Aug 20, 2025
d2731ab
Merge branch 'main' into requirements-custom-blocks
sayakpaul Aug 21, 2025
756a156
Merge branch 'main' into requirements-custom-blocks
sayakpaul Aug 29, 2025
d159ae0
Merge branch 'main' into requirements-custom-blocks
sayakpaul Sep 2, 2025
ecbd907
Merge branch 'main' into requirements-custom-blocks
sayakpaul Sep 12, 2025
b7fba89
Merge branch 'main' into requirements-custom-blocks
sayakpaul Sep 23, 2025
046be83
up
sayakpaul Oct 2, 2025
2c4d73d
Merge branch 'main' into requirements-custom-blocks
sayakpaul Oct 20, 2025
e52cabe
Merge branch 'main' into requirements-custom-blocks
sayakpaul Oct 22, 2025
35d8d97
Merge branch 'main' into requirements-custom-blocks
sayakpaul Oct 22, 2025
024c2b9
Merge branch 'main' into requirements-custom-blocks
sayakpaul Oct 27, 2025
1de4402
up
sayakpaul Oct 27, 2025
5b7d0df
Merge branch 'main' into requirements-custom-blocks
sayakpaul Oct 29, 2025
eeb3445
Merge branch 'main' into requirements-custom-blocks
sayakpaul Nov 1, 2025
a9165eb
Merge branch 'main' into requirements-custom-blocks
sayakpaul Nov 3, 2025
a88d11b
resolve conflicts.
sayakpaul Nov 6, 2025
3879e32
Merge branch 'main' into requirements-custom-blocks
sayakpaul Jan 20, 2026
7b43d0e
add tests
sayakpaul Jan 20, 2026
60e3284
Merge branch 'main' into requirements-custom-blocks
sayakpaul Jan 20, 2026
79fa0e2
resolve merge conflicts.
sayakpaul Feb 16, 2026
445c42e
Merge branch 'main' into requirements-custom-blocks
sayakpaul Feb 17, 2026
2927353
Apply suggestions from code review
sayakpaul Feb 17, 2026
e8d4612
reviewer feedback.
sayakpaul Feb 17, 2026
2504341
Merge branch 'main' into requirements-custom-blocks
sayakpaul Feb 18, 2026
f274df4
[docs] validation for custom blocks (#13156)
stevhliu Feb 18, 2026
5316fe2
Merge branch 'main' into requirements-custom-blocks
sayakpaul Feb 25, 2026
ba73d82
Merge branch 'main' into requirements-custom-blocks
sayakpaul Feb 26, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 46 additions & 1 deletion docs/source/en/modular_diffusers/custom_blocks.md
Original file line number Diff line number Diff line change
Expand Up @@ -332,4 +332,49 @@ Make your custom block work with Mellon's visual interface. See the [Mellon Cust
Browse the [Modular Diffusers Custom Blocks](https://huggingface.co/collections/diffusers/modular-diffusers-custom-blocks) collection for inspiration and ready-to-use blocks.

</hfoption>
</hfoptions>
</hfoptions>

## Dependencies

Declaring package dependencies in custom blocks prevents runtime import errors later on. Diffusers validates the dependencies and returns a warning if a package is missing or incompatible.

Set a `_requirements` attribute in your block class, mapping package names to version specifiers.

```py
from diffusers.modular_pipelines import PipelineBlock

class MyCustomBlock(PipelineBlock):
_requirements = {
"transformers": ">=4.44.0",
"sentencepiece": ">=0.2.0"
}
```

When there are blocks with different requirements, Diffusers merges their requirements.

```py
from diffusers.modular_pipelines import SequentialPipelineBlocks

class BlockA(PipelineBlock):
_requirements = {"transformers": ">=4.44.0"}
# ...

class BlockB(PipelineBlock):
_requirements = {"sentencepiece": ">=0.2.0"}
# ...

pipe = SequentialPipelineBlocks.from_blocks_dict({
"block_a": BlockA,
"block_b": BlockB,
})
```

When this block is saved with [`~ModularPipeline.save_pretrained`], the requirements are saved to the `modular_config.json` file. When this block is loaded, Diffusers checks each requirement against the current environment. If there is a mismatch or a package isn't found, Diffusers returns the following warning.

```md
# missing package
xyz-package was specified in the requirements but wasn't found in the current environment.

# version mismatch
xyz requirement 'specific-version' is not satisfied by the installed version 'actual-version'. Things might work unexpected.
```
2 changes: 0 additions & 2 deletions src/diffusers/commands/custom_blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,6 @@ def run(self):
# automap = self._create_automap(parent_class=parent_class, child_class=child_class)
# with open(CONFIG, "w") as f:
# json.dump(automap, f)
with open("requirements.txt", "w") as f:
f.write("")

def _choose_block(self, candidates, chosen=None):
for cls, base in candidates:
Expand Down
20 changes: 19 additions & 1 deletion src/diffusers/modular_pipelines/modular_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
InputParam,
InsertableDict,
OutputParam,
_validate_requirements,
combine_inputs,
combine_outputs,
format_components,
Expand Down Expand Up @@ -290,6 +291,7 @@ class ModularPipelineBlocks(ConfigMixin, PushToHubMixin):

config_name = "modular_config.json"
model_name = None
_requirements: dict[str, str] | None = None
_workflow_map = None

@classmethod
Expand Down Expand Up @@ -404,6 +406,9 @@ def from_pretrained(
"Selected model repository does not happear to have any custom code or does not have a valid `config.json` file."
)

if "requirements" in config and config["requirements"] is not None:
_ = _validate_requirements(config["requirements"])

class_ref = config["auto_map"][cls.__name__]
module_file, class_name = class_ref.split(".")
module_file = module_file + ".py"
Expand All @@ -428,8 +433,13 @@ def save_pretrained(self, save_directory, push_to_hub=False, **kwargs):
module = full_mod.rsplit(".", 1)[-1].replace("__dynamic__", "")
parent_module = self.save_pretrained.__func__.__qualname__.split(".", 1)[0]
auto_map = {f"{parent_module}": f"{module}.{cls_name}"}

self.register_to_config(auto_map=auto_map)

# resolve requirements
requirements = _validate_requirements(getattr(self, "_requirements", None))
if requirements:
self.register_to_config(requirements=requirements)

self.save_config(save_directory=save_directory, push_to_hub=push_to_hub, **kwargs)
config = dict(self.config)
self._internal_dict = FrozenDict(config)
Expand Down Expand Up @@ -1240,6 +1250,14 @@ def doc(self):
expected_configs=self.expected_configs,
)

@property
def _requirements(self) -> dict[str, str]:
requirements = {}
for block_name, block in self.sub_blocks.items():
if getattr(block, "_requirements", None):
requirements[block_name] = block._requirements
return requirements


class LoopSequentialPipelineBlocks(ModularPipelineBlocks):
"""
Expand Down
85 changes: 85 additions & 0 deletions src/diffusers/modular_pipelines/modular_pipeline_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,12 @@

import PIL.Image
import torch
from packaging.specifiers import InvalidSpecifier, SpecifierSet

from ..configuration_utils import ConfigMixin, FrozenDict
from ..loaders.single_file_utils import _is_single_file_path_or_url
from ..utils import DIFFUSERS_LOAD_ID_FIELDS, is_torch_available, logging
from ..utils.import_utils import _is_package_available


if is_torch_available():
Expand Down Expand Up @@ -972,6 +974,89 @@ def make_doc_string(
return output


def _validate_requirements(reqs):
if reqs is None:
normalized_reqs = {}
else:
if not isinstance(reqs, dict):
raise ValueError(
"Requirements must be provided as a dictionary mapping package names to version specifiers."
)
normalized_reqs = _normalize_requirements(reqs)

if not normalized_reqs:
return {}

final: dict[str, str] = {}
for req, specified_ver in normalized_reqs.items():
req_available, req_actual_ver = _is_package_available(req)
if not req_available:
logger.warning(f"{req} was specified in the requirements but wasn't found in the current environment.")

if specified_ver:
try:
specifier = SpecifierSet(specified_ver)
except InvalidSpecifier as err:
raise ValueError(f"Requirement specifier '{specified_ver}' for {req} is invalid.") from err

if req_actual_ver == "N/A":
logger.warning(
f"Version of {req} could not be determined to validate requirement '{specified_ver}'. Things might work unexpected."
)
elif not specifier.contains(req_actual_ver, prereleases=True):
logger.warning(
f"{req} requirement '{specified_ver}' is not satisfied by the installed version {req_actual_ver}. Things might work unexpected."
)

final[req] = specified_ver

return final


def _normalize_requirements(reqs):
if not reqs:
return {}

normalized: "OrderedDict[str, str]" = OrderedDict()

def _accumulate(mapping: dict[str, Any]):
for pkg, spec in mapping.items():
if isinstance(spec, dict):
# This is recursive because blocks are composable. This way, we can merge requirements
# from multiple blocks.
_accumulate(spec)
continue

pkg_name = str(pkg).strip()
if not pkg_name:
raise ValueError("Requirement package name cannot be empty.")

spec_str = "" if spec is None else str(spec).strip()
if spec_str and not spec_str.startswith(("<", ">", "=", "!", "~")):
spec_str = f"=={spec_str}"

existing_spec = normalized.get(pkg_name)
if existing_spec is not None:
if not existing_spec and spec_str:
normalized[pkg_name] = spec_str
elif existing_spec and spec_str and existing_spec != spec_str:
try:
combined_spec = SpecifierSet(",".join(filter(None, [existing_spec, spec_str])))
except InvalidSpecifier:
logger.warning(
f"Conflicting requirements for '{pkg_name}' detected: '{existing_spec}' vs '{spec_str}'. Keeping '{existing_spec}'."
)
else:
normalized[pkg_name] = str(combined_spec)
continue

normalized[pkg_name] = spec_str

_accumulate(reqs)

return normalized


def combine_inputs(*named_input_lists: list[tuple[str, list[InputParam]]]) -> list[InputParam]:
"""
Combines multiple lists of InputParam objects from different blocks. For duplicate inputs, updates only if current
Expand Down
61 changes: 60 additions & 1 deletion tests/modular_pipelines/test_modular_pipelines_common.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import gc
import json
import os
import tempfile
from typing import Callable

Expand All @@ -8,6 +10,7 @@
import diffusers
from diffusers import AutoModel, ComponentsManager, ModularPipeline, ModularPipelineBlocks
from diffusers.guiders import ClassifierFreeGuidance
from diffusers.modular_pipelines import SequentialPipelineBlocks
from diffusers.modular_pipelines.modular_pipeline_utils import (
ComponentSpec,
ConfigSpec,
Expand All @@ -17,7 +20,13 @@
)
from diffusers.utils import logging

from ..testing_utils import backend_empty_cache, numpy_cosine_similarity_distance, require_accelerator, torch_device
from ..testing_utils import (
CaptureLogger,
backend_empty_cache,
numpy_cosine_similarity_distance,
require_accelerator,
torch_device,
)


class ModularPipelineTesterMixin:
Expand Down Expand Up @@ -400,6 +409,56 @@ def test_guider_cfg(self, expected_max_diff=1e-2):
assert max_diff > expected_max_diff, "Output with CFG must be different from normal inference"


class TestCustomBlockRequirements:
def get_dummy_block_pipe(self):
class DummyBlockOne:
# keep two arbitrary deps so that we can test warnings.
_requirements = {"xyz": ">=0.8.0", "abc": ">=10.0.0"}

class DummyBlockTwo:
# keep two dependencies that will be available during testing.
_requirements = {"transformers": ">=4.44.0", "diffusers": ">=0.2.0"}

pipe = SequentialPipelineBlocks.from_blocks_dict(
{"dummy_block_one": DummyBlockOne, "dummy_block_two": DummyBlockTwo}
)
return pipe

def test_custom_requirements_save_load(self):
pipe = self.get_dummy_block_pipe()
with tempfile.TemporaryDirectory() as tmpdir:
pipe.save_pretrained(tmpdir)
config_path = os.path.join(tmpdir, "modular_config.json")
with open(config_path, "r") as f:
config = json.load(f)

assert "requirements" in config
requirements = config["requirements"]

expected_requirements = {
"xyz": ">=0.8.0",
"abc": ">=10.0.0",
"transformers": ">=4.44.0",
"diffusers": ">=0.2.0",
}
assert expected_requirements == requirements

def test_warnings(self):
pipe = self.get_dummy_block_pipe()
with tempfile.TemporaryDirectory() as tmpdir:
logger = logging.get_logger("diffusers.modular_pipelines.modular_pipeline_utils")
logger.setLevel(30)

with CaptureLogger(logger) as cap_logger:
pipe.save_pretrained(tmpdir)

template = "{req} was specified in the requirements but wasn't found in the current environment"
msg_xyz = template.format(req="xyz")
msg_abc = template.format(req="abc")
assert msg_xyz in str(cap_logger.out)
assert msg_abc in str(cap_logger.out)


class TestModularModelCardContent:
def create_mock_block(self, name="TestBlock", description="Test block description"):
class MockBlock:
Expand Down
Loading