Skip to content

Commit

Permalink
Fix error logging (#1103)
Browse files Browse the repository at this point in the history
* fix error logging

* fix bug in multilayer_preprocess_tod

---------

Co-authored-by: Michael McCrackan <mmccrack@login13.chn.perlmutter.nersc.gov>
  • Loading branch information
mmccrackan and Michael McCrackan authored Jan 22, 2025
1 parent 46d26f0 commit 4d1cca9
Show file tree
Hide file tree
Showing 3 changed files with 26 additions and 11 deletions.
6 changes: 6 additions & 0 deletions sotodlib/preprocess/preprocess_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -913,14 +913,20 @@ def cleanup_mandb(error, outputs, configs, logger=None, overwrite=False):
start=os.path.dirname(configs['archive']['index']))

src_file = outputs['temp_file']

logger.debug(f"Source file: {src_file}")
logger.debug(f"Destination file: {dest_file}")

with h5py.File(dest_file,'a') as f_dest:
with h5py.File(src_file,'r') as f_src:
for dts in f_src.keys():
logger.debug(f"\t{dts}")
# If the dataset or group already exists, delete it to overwrite
if overwrite and dts in f_dest:
del f_dest[dts]
f_src.copy(f_src[f'{dts}'], f_dest, f'{dts}')
for member in f_src[dts]:
logger.debug(f"\t{dts}/{member}")
if isinstance(f_src[f'{dts}/{member}'], h5py.Dataset):
f_src.copy(f_src[f'{dts}/{member}'], f_dest[f'{dts}'], f'{dts}/{member}')
logger.info(f"Saving to database under {outputs['db_data']}")
Expand Down
21 changes: 13 additions & 8 deletions sotodlib/site_pipeline/multilayer_preprocess_tod.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,8 +143,7 @@ def multilayer_preprocess_tod(obs_id,
dets = {gb:gg for gb, gg in zip(group_by_proc, group)}
try:
error, outputs_grp_init, _, aman = pp_util.preproc_or_load_group(obs_id, configs_init,
dets=dets, logger=logger,
context_init=context_init)
dets=dets, logger=logger)
if error is None:
outputs_init.append(outputs_grp_init)

Expand Down Expand Up @@ -354,14 +353,20 @@ def main(configs_init: str,
futures.remove(future)

if db_datasets_init:
logger.info(f'Processing future result db_dataset: {db_datasets_init}')
for db_dataset in db_datasets_init:
pp_util.cleanup_mandb(err, db_dataset, configs_init, logger, overwrite)
if err is None:
for db_dataset in db_datasets_init:
logger.info(f'Processing future result db_dataset: {db_datasets_init}')
pp_util.cleanup_mandb(err, db_dataset, configs_init, logger, overwrite)
else:
pp_util.cleanup_mandb(err, db_datasets_init, configs_init, logger, overwrite)

if db_datasets_proc:
logger.info(f'Processing future dependent result db_dataset: {db_datasets_proc}')
for db_dataset in db_datasets_proc:
pp_util.cleanup_mandb(err, db_dataset, configs_proc, logger, overwrite)
if err is None:
logger.info(f'Processing future dependent result db_dataset: {db_datasets_proc}')
for db_dataset in db_datasets_proc:
pp_util.cleanup_mandb(err, db_dataset, configs_proc, logger, overwrite)
else:
pp_util.cleanup_mandb(err, db_datasets_proc, configs_proc, logger, overwrite)

if __name__ == '__main__':
sp_util.main_launcher(main, get_parser)
10 changes: 7 additions & 3 deletions sotodlib/site_pipeline/preprocess_tod.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ def preprocess_tod(obs_id,
If true preprocess_tod is called in a parallel process which returns
dB info and errors and does no sqlite writing inside the function.
"""

outputs = []
logger = sp_util.init_logger("preprocess", verbosity=verbosity)

Expand Down Expand Up @@ -379,9 +380,12 @@ def main(
futures.remove(future)

if db_datasets:
logger.info(f'Processing future result db_dataset: {db_datasets}')
for db_dataset in db_datasets:
pp_util.cleanup_mandb(err, db_dataset, configs, logger)
if err is None:
logger.info(f'Processing future result db_dataset: {db_datasets}')
for db_dataset in db_datasets:
pp_util.cleanup_mandb(err, db_dataset, configs, logger)
else:
pp_util.cleanup_mandb(err, db_datasets, configs, logger)

if __name__ == '__main__':
sp_util.main_launcher(main, get_parser)

0 comments on commit 4d1cca9

Please sign in to comment.