Skip to content

Commit

Permalink
Merge pull request #153 from ENCODE-DCC/dev
Browse files Browse the repository at this point in the history
v2.1.1
  • Loading branch information
leepc12 authored Nov 20, 2021
2 parents 0a8b36d + 0e65408 commit cddba79
Show file tree
Hide file tree
Showing 4 changed files with 20 additions and 10 deletions.
2 changes: 1 addition & 1 deletion caper/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
from .caper_runner import CaperRunner

__all__ = ['CaperClient', 'CaperClientSubmit', 'CaperRunner']
__version__ = '2.1.0'
__version__ = '2.1.1'
13 changes: 7 additions & 6 deletions caper/caper_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,13 @@
# Metadata DB for call-caching (reusing previous outputs):
# Cromwell supports restarting workflows based on a metadata DB
# DB is in-memory by default
db=in-memory
#db=in-memory
# If you use 'caper server' then you can use one unified '--file-db'
# for all submitted workflows. In such case, uncomment the following two lines
# and defined file-db as an absolute path to store metadata of all workflows
#db=file
#file-db=
# If you use 'caper run' and want to use call-caching:
# Make sure to define different 'caper run ... --db file --file-db DB_PATH'
Expand All @@ -33,11 +39,6 @@
# then Caper will collect/re-use previous outputs without running the same task again
# Previous outputs will be simply hard/soft-linked.
# If you use 'caper server' then you can use one unified '--file-db'
# for all submitted workflows. In such case, uncomment the following two lines
# and defined file-db as an absolute path to store metadata of all workflows
#db=file
#file-db=
"""

CONF_CONTENTS_LOCAL_HASH_STRAT = """
Expand Down
9 changes: 6 additions & 3 deletions caper/cromwell_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -772,7 +772,8 @@ class CromwellBackendSlurm(CromwellBackendHpc):
)
SLURM_CHECK_ALIVE = dedent(
"""
for ITER in 1 2 3; do
for ITER in 1 2 3
do
CHK_ALIVE=$(squeue --noheader -j ${job_id} --format=%i | grep ${job_id})
if [ -z "$CHK_ALIVE" ]
then
Expand Down Expand Up @@ -801,14 +802,16 @@ class CromwellBackendSlurm(CromwellBackendHpc):
{submit}
EOF
for ITER in 1 2 3; do
for ITER in 1 2 3
do
sbatch --export=ALL -J ${{job_name}} -D ${{cwd}} -o ${{out}} -e ${{err}} \\
${{'-p ' + slurm_partition}} ${{'--account ' + slurm_account}} \\
{slurm_resource_param} \\
${{slurm_extra_param}} \\
${{script}}.caper && break
${{script}}.caper && exit 0
sleep 30
done
exit 1
"""
)
DEFAULT_SLURM_RESOURCE_PARAM = (
Expand Down
6 changes: 6 additions & 0 deletions caper/cromwell_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,7 @@ def troubleshoot_call(call_name, call, parent_call_names):
job_id = call.get('jobId')
stdout = call.get('stdout')
stderr = call.get('stderr')
strerr_background = stderr + '.background'
run_start = None
run_end = None
for event in call.get('executionEvents', []):
Expand Down Expand Up @@ -245,6 +246,11 @@ def troubleshoot_call(call_name, call, parent_call_names):
help_msg += 'STDOUT_CONTENTS=\n{s}\n'.format(
s=AutoURI(stdout).read()
)
if strerr_background:
if AutoURI(strerr_background).exists:
help_msg += 'STDERR_BACKGROUND_CONTENTS=\n{s}\n'.format(
s=AutoURI(strerr_background).read()
)

return help_msg

Expand Down

0 comments on commit cddba79

Please sign in to comment.