@@ -234,33 +234,33 @@ def _retrieve_single_job(self, job) -> slurm_status:
234
234
235
235
# initialize the log extractor, it will try to read basic info from the file
236
236
le = gaussian_log_extractor (log_file .local )
237
- if len (job .tasks ) == le .n_tasks :
238
- job .status = slurm_status .done
239
- else :
240
- try : # look for more specific exception
241
- le .check_for_exceptions ()
237
+
238
+ try : # look for more specific exception
239
+ le .check_for_exceptions ()
242
240
243
- except NoGeometryException :
244
- job .status = slurm_status .failed
245
- logger .warning (
246
- f"Job { job .base_name } failed - the log file does not contain geometry. Cannot resubmit." )
241
+ except NoGeometryException :
242
+ job .status = slurm_status .failed
243
+ logger .warning (
244
+ f"Job { job .base_name } failed - the log file does not contain geometry. Cannot resubmit." )
247
245
248
- except NegativeFrequencyException :
249
- job .status = slurm_status .incomplete
250
- logger .warning (
251
- f"Job { job .base_name } incomplete - log file contains negative frequencies. Resubmit job." )
246
+ except NegativeFrequencyException :
247
+ job .status = slurm_status .incomplete
248
+ logger .warning (
249
+ f"Job { job .base_name } incomplete - log file contains negative frequencies. Resubmit job." )
252
250
253
- except OptimizationIncompleteException :
254
- job .status = slurm_status .incomplete
255
- logger .warning (f"Job { job .base_name } incomplete - geometry optimization did not complete." )
251
+ except OptimizationIncompleteException :
252
+ job .status = slurm_status .incomplete
253
+ logger .warning (f"Job { job .base_name } incomplete - geometry optimization did not complete." )
256
254
257
- except Exception as e :
258
- job .status = slurm_status .failed
259
- logger .warning (f"Job { job .base_name } failed with unhandled exception: { e } " )
255
+ except Exception as e :
256
+ job .status = slurm_status .failed
257
+ logger .warning (f"Job { job .base_name } failed with unhandled exception: { e } " )
260
258
261
- else : # no exceptions were thrown, but still the job is incomplete
262
- job .status = slurm_status .incomplete
263
- logger .warning (f"Job { job .base_name } incomplete." )
259
+ if len (job .tasks ) == le .n_tasks :
260
+ job .status = slurm_status .done
261
+ else : # no exceptions were thrown, but still the job is incomplete
262
+ job .status = slurm_status .incomplete
263
+ logger .warning (f"Job { job .base_name } incomplete." )
264
264
265
265
except FileNotFoundError :
266
266
job .status = slurm_status .failed
@@ -369,20 +369,25 @@ def upload_done_molecules_to_db(self, tags, RMSD_threshold=0.35) -> None:
369
369
370
370
for done_can in done_cans :
371
371
(keys , jobs ) = zip (* self .get_jobs (can = done_can ).items ())
372
- rdmol , energies = rdmol_from_slurm_jobs (jobs , postDFT = True )
373
- keep = prune_rmsds (rdmol , RMSD_threshold )
374
- logger .info (f"Molecule { done_can } has { len (keys ) - len (keep )} / { len (keys )} duplicate conformers." )
375
-
376
- # remove duplicate jobs
377
- can_keys_to_remove = [key for i , key in enumerate (keys ) if i not in keep ]
378
- to_remove_jobs = {name : job for name , job in self .jobs .items () if name in can_keys_to_remove }
379
- logger .info (
380
- f"Removing { len (keys ) - len (keep )} / { len (keys )} jobs and log files that contain duplicate conformers." )
381
- self .remove_jobs (to_remove_jobs )
382
-
383
- # upload non-duplicate jobs
384
- can_keys_to_keep = [key for i , key in enumerate (keys ) if i in keep ]
385
- self ._upload_can_to_db (can_keys_to_keep , tags )
372
+ rdmol , energies , labels_ok = rdmol_from_slurm_jobs (jobs , postDFT = True )
373
+ if labels_ok :
374
+ keep = prune_rmsds (rdmol , RMSD_threshold )
375
+ logger .info (f"Molecule { done_can } has { len (keys ) - len (keep )} / { len (keys )} duplicate conformers." )
376
+
377
+ # remove duplicate jobs
378
+ can_keys_to_remove = [key for i , key in enumerate (keys ) if i not in keep ]
379
+ to_remove_jobs = {name : job for name , job in self .jobs .items () if name in can_keys_to_remove }
380
+ logger .info (
381
+ f"Removing { len (keys ) - len (keep )} / { len (keys )} jobs and log files that contain duplicate conformers." )
382
+ self .remove_jobs (to_remove_jobs )
383
+
384
+ # upload non-duplicate jobs
385
+ can_keys_to_keep = [key for i , key in enumerate (keys ) if i in keep ]
386
+ self ._upload_can_to_db (can_keys_to_keep , tags )
387
+ else :
388
+ for key in keys :
389
+ self .jobs [key ].status = slurm_status .inspect
390
+ self ._cache ()
386
391
387
392
def _upload_can_to_db (self , keys , tags ) -> None :
388
393
"""Uploading single molecule conformers to database.
0 commit comments