@@ -86,7 +86,10 @@ def preprocess_tod(obs_id,
86
86
group_by , groups , error = pp_util .get_groups (obs_id , configs , context )
87
87
88
88
if error is not None :
89
- return error [0 ], [None , None ]
89
+ if run_parallel :
90
+ return error [0 ], [None , None ]
91
+ else :
92
+ return
90
93
91
94
all_groups = groups .copy ()
92
95
for g in all_groups :
@@ -330,7 +333,6 @@ def main(
330
333
331
334
# clean up lingering files from previous incomplete runs
332
335
policy_dir = os .path .dirname (configs ['archive' ]['policy' ]['filename' ]) + '/temp/'
333
- print (policy_dir )
334
336
for obs in obs_list :
335
337
obs_id = obs ['obs_id' ]
336
338
found = False
@@ -361,7 +363,6 @@ def main(
361
363
run_list .append ( (obs , None ) )
362
364
else :
363
365
group_by , groups , _ = pp_util .get_groups (obs ["obs_id" ], configs , context )
364
-
365
366
if len (x ) != len (groups ):
366
367
[groups .remove ([a [f'dets:{ gb } ' ] for gb in group_by ]) for a in x ]
367
368
run_list .append ( (obs , groups ) )
@@ -403,7 +404,7 @@ def main(
403
404
continue
404
405
futures .remove (future )
405
406
406
- if db_datasets :
407
+ if err is None and db_datasets :
407
408
logger .info (f'Processing future result db_dataset: { db_datasets } ' )
408
409
for db_dataset in db_datasets :
409
410
pp_util .cleanup_mandb (err , db_dataset , configs , logger )
0 commit comments