Skip to content

Commit 3cd12c5

Browse files
committed
fix loading models
1 parent 7ea8145 commit 3cd12c5

File tree

4 files changed

+11
-3
lines changed

4 files changed

+11
-3
lines changed

CHANGELOG.md

+1
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
- fix negative prompt parsing from metadata
1111
- fix stable cascade progress monitoring
1212
- fix variation seed with hires pass
13+
- fix loading models trained with onetrainer
1314
- add variation seed info to metadata
1415
- workaround for scale-by when using modernui
1516
- lock torch-directml version

modules/processing_helpers.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,8 @@ def decode_first_stage(model, x, full_quality=True):
216216

217217
def get_fixed_seed(seed):
218218
if seed is None or seed == '' or seed == -1:
219-
return int(random.randrange(4294967294))
219+
random.seed()
220+
seed = int(random.randrange(4294967294))
220221
return seed
221222

222223

modules/sd_models.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,11 @@
1212
from os import mkdir
1313
from urllib import request
1414
from enum import Enum
15+
import diffusers
16+
import diffusers.loaders.single_file_utils
1517
from rich import progress # pylint: disable=redefined-builtin
1618
import torch
1719
import safetensors.torch
18-
import diffusers
1920
from omegaconf import OmegaConf
2021
from transformers import logging as transformers_logging
2122
from ldm.util import instantiate_from_config
@@ -1056,6 +1057,7 @@ def load_diffuser(checkpoint_info=None, already_loaded_state_dict=None, timer=No
10561057
else:
10571058
diffusers_load_config['config'] = get_load_config(checkpoint_info.path, model_type, config_type='json')
10581059
if hasattr(pipeline, 'from_single_file'):
1060+
diffusers.loaders.single_file_utils.CHECKPOINT_KEY_NAMES["clip"] = "cond_stage_model.transformer.text_model.embeddings.position_embedding.weight" # TODO patch for diffusers==0.28.0
10591061
diffusers_load_config['use_safetensors'] = True
10601062
diffusers_load_config['cache_dir'] = shared.opts.hfcache_dir # use hfcache instead of diffusers dir as this is for config only in case of single-file
10611063
if shared.opts.disable_accelerate:

modules/styles.py

+5-1
Original file line numberDiff line numberDiff line change
@@ -81,8 +81,10 @@ def check_files(prompt, wildcard, files):
8181
def apply_wildcards_to_prompt(prompt, all_wildcards, seed=-1, silent=False):
8282
if len(prompt) == 0:
8383
return prompt
84-
if seed > 0:
84+
old_state = None
85+
if seed > 0 and len(all_wildcards) > 0:
8586
random.seed(seed)
87+
old_state = random.getstate()
8688
replaced = {}
8789
t0 = time.time()
8890
for style_wildcards in all_wildcards:
@@ -104,6 +106,8 @@ def apply_wildcards_to_prompt(prompt, all_wildcards, seed=-1, silent=False):
104106
shared.log.debug(f'Wildcards applied: {replaced} path="{shared.opts.wildcards_dir}" type=style time={t1-t0:.2f}')
105107
if (len(replaced_file) > 0 or len(not_found) > 0) and not silent:
106108
shared.log.debug(f'Wildcards applied: {replaced_file} missing: {not_found} path="{shared.opts.wildcards_dir}" type=file time={t2-t2:.2f} ')
109+
if old_state is not None:
110+
random.setstate(old_state)
107111
return prompt
108112

109113

0 commit comments

Comments
 (0)