Skip to content

Commit aa1a565

Browse files
committed
lint
Signed-off-by: Vladimir Mandic <mandic00@live.com>
1 parent ac8c918 commit aa1a565

File tree

3 files changed

+2
-8
lines changed

3 files changed

+2
-8
lines changed

installer.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -495,12 +495,6 @@ def check_python(supported_minors=[9, 10, 11, 12], reason=None):
495495
if args.quick:
496496
return
497497
log.info(f'Python: version={platform.python_version()} platform={platform.system()} bin="{sys.executable}" venv="{sys.prefix}"')
498-
# if int(sys.version_info.major) == 3 and int(sys.version_info.minor) == 12 and int(sys.version_info.micro) > 3: # TODO install: python 3.12.4 or higher cause a mess with pydantic
499-
# log.error(f"Python version incompatible: {sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro} required 3.12.3 or lower")
500-
# if reason is not None:
501-
# log.error(reason)
502-
# if not args.ignore:
503-
# sys.exit(1)
504498
if not (int(sys.version_info.major) == 3 and int(sys.version_info.minor) in supported_minors):
505499
log.error(f"Python version incompatible: {sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro} required 3.{supported_minors}")
506500
if reason is not None:

modules/interrogate/vqa.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
from PIL import Image
99
from modules import shared, devices, errors
1010

11-
# TODO add additional vlmn
11+
# TODO vlm: add additional models
1212
# https://huggingface.co/nvidia/Eagle2-1B not compatible with latest transformers
1313
# https://huggingface.co/deepseek-ai/deepseek-vl2-tiny requires custom code
1414

modules/model_flux.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@ def load_transformer(file_path): # triggered by opts.sd_unet change
219219
_transformer, _text_encoder_2 = load_flux_bnb(file_path, diffusers_load_config)
220220
if _transformer is not None:
221221
transformer = _transformer
222-
elif 'nf4' in quant: # TODO flux: fix loader for civitai nf4 models
222+
elif 'nf4' in quant: # TODO flux: loader for civitai nf4 models
223223
from modules.model_flux_nf4 import load_flux_nf4
224224
_transformer, _text_encoder_2 = load_flux_nf4(file_path, prequantized=True)
225225
if _transformer is not None:

0 commit comments

Comments
 (0)