Skip to content
This repository has been archived by the owner on Oct 10, 2019. It is now read-only.

Commit

Permalink
Merge pull request #55 from brianhlin/sw2628_pbs_pro_config
Browse files Browse the repository at this point in the history
Specify PBS Pro in blah.config (SOFTWARE-2628)
  • Loading branch information
brianhlin authored Jul 12, 2017
2 parents b40efa5 + 497fb84 commit 448a226
Show file tree
Hide file tree
Showing 8 changed files with 47 additions and 17 deletions.
2 changes: 2 additions & 0 deletions config/blah.config.template
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,8 @@ pbs_fallback=no
#Set to 'yes' to request pvmem when submitting jobs
pbs_set_pvmem=no

#Set to 'yes' if you are running PBS Pro
pbs_pro=no

##LSF common variables

Expand Down
2 changes: 2 additions & 0 deletions src/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,9 @@ set(blah_scripts
scripts/slurm_cancel.sh scripts/slurm_resume.sh scripts/slurm_status.sh
scripts/slurm_hold.sh scripts/slurm_submit.sh
scripts/slurm_local_submit_attributes.sh
scripts/blah.py scripts/__init__.py
scripts/pbs_status.py
scripts/slurm_status.py
)

install(FILES
Expand Down
1 change: 1 addition & 0 deletions src/scripts/Makefile.am
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ libexec_SCRIPTS = blah_load_config.sh blah_common_submit_functions.sh \
sge_hold.sh sge_status.sh runcmd.pl.template sge_local_submit_attributes.sh \
slurm_cancel.sh slurm_hold.sh slurm_resume.sh slurm_status.sh \
slurm_submit.sh slurm_local_submit_attributes.sh \
blah.py __init__.py \
pbs_status.py \
slurm_status.py

Expand Down
Empty file added src/scripts/__init__.py
Empty file.
18 changes: 18 additions & 0 deletions src/scripts/blah.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
"""Common functions for BLAH python scripts"""

import os
import subprocess

def load_env(config_dir):
"""Load blah.config into the environment"""
load_config_path = os.path.join(config_dir, 'blah_load_config.sh')
command = ['bash', '-c', 'source %s && env' % load_config_path]
try:
config_proc = subprocess.Popen(command, stdout=subprocess.PIPE)
config_out, _ = config_proc.communicate()

for line in config_out.splitlines():
(key, _, val) = line.partition('=')
os.environ[key] = val
except IOError:
pass
19 changes: 11 additions & 8 deletions src/scripts/pbs_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,9 @@
import pickle
import csv

sys.path.insert(0, os.path.dirname(__file__))
import blah

cache_timeout = 60

launchtime = time.time()
Expand Down Expand Up @@ -226,14 +229,11 @@ def qstat(jobid=""):
Returns a python dictionary with the job info.
"""
qstat_bin = get_qstat_location()
command = (qstat_bin, '--version')
qstat_process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
qstat_version, _ = qstat_process.communicate()

starttime = time.time()
log("Starting qstat.")
command = (qstat_bin, '-f')
if not re.search(r'PBSPro', qstat_version):
if os.environ.get('pbs_pro').lower() != 'yes':
command += ('-1',) # -1 conflicts with -f in PBS Pro
if jobid:
command += (jobid,)
Expand Down Expand Up @@ -357,10 +357,9 @@ def get_qstat_location():
global _qstat_location_cache
if _qstat_location_cache != None:
return _qstat_location_cache
load_config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'blah_load_config.sh')
if os.path.exists(load_config_path) and os.access(load_config_path, os.R_OK):
cmd = 'source %s && echo "$pbs_binpath/qstat"' % load_config_path
else:
try:
cmd = os.path.join(os.environ['pbs_binpath'], 'qstat')
except KeyError:
cmd = 'which qstat'
child_stdout = os.popen(cmd)
output = child_stdout.read()
Expand Down Expand Up @@ -526,6 +525,10 @@ def main():
print "1Usage: pbs_status.sh pbs/<date>/<jobid>"
return 1
jobid = jobid_arg.split("/")[-1].split(".")[0]

config_dir = os.path.dirname(os.path.abspath(__file__))
blah.load_env(config_dir)

log("Checking cache for jobid %s" % jobid)
cache_contents = None
try:
Expand Down
8 changes: 3 additions & 5 deletions src/scripts/pbs_submit.sh
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,6 @@ fi
#local batch system-specific file output must be added to the submit file
bls_local_submit_attributes_file=${blah_libexec_directory}/pbs_local_submit_attributes.sh

${pbs_binpath}/qstat --version 2>&1 | grep PBSPro > /dev/null 2>&1
is_pbs_pro=$?
# Begin building the select statement: select=x where x is the number of 'chunks'
# to request. Chunk requests should precede any resource requests (resource
# requests are order independent). An example from the PBS Pro manual:
Expand All @@ -137,7 +135,7 @@ if [ "x$bls_opt_req_mem" != "x" ]; then
fi
# Total amount of memory allocated to the job
pbs_select="$pbs_select:mem=${bls_opt_req_mem}mb"
if [ "$is_pbs_pro" != 0 ]; then
if [ "x$pbs_pro" != "xyes" ]; then
echo "#PBS -l mem=${bls_opt_req_mem}mb" >> $bls_tmp_file
fi
fi
Expand All @@ -149,7 +147,7 @@ bls_set_up_local_and_extra_args
[ -z "$bls_opt_queue" ] || grep -q "^#PBS -q" $bls_tmp_file || echo "#PBS -q $bls_opt_queue" >> $bls_tmp_file

# Extended support for MPI attributes
if [ "$is_pbs_pro" == 0 ]; then
if [ "x$pbs_pro" == "xyes" ]; then
pbs_select="$pbs_select:ncpus=$bls_opt_smpgranularity"
else
if [ "x$bls_opt_wholenodes" == "xyes" ]; then
Expand Down Expand Up @@ -209,7 +207,7 @@ else
[ -z "$bls_fl_subst_and_accumulate_result" ] || echo "#PBS -W stageout=\\'$bls_fl_subst_and_accumulate_result\\'" >> $bls_tmp_file
fi

if [ "$is_pbs_pro" == 0 ]; then
if [ "x$pbs_pro" == "xyes" ]; then
echo $pbs_select >> $bls_tmp_file
fi

Expand Down
14 changes: 10 additions & 4 deletions src/scripts/slurm_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,9 @@
import pickle
import csv

sys.path.insert(0, os.path.dirname(__file__))
import blah

cache_timeout = 60

launchtime = time.time()
Expand Down Expand Up @@ -332,10 +335,9 @@ def get_slurm_location(program):
global _slurm_location_cache
if _slurm_location_cache != None:
return os.path.join(_slurm_location_cache, program)
load_config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'blah_load_config.sh')
if os.path.exists(load_config_path) and os.access(load_config_path, os.R_OK):
cmd = 'source %s && echo "${slurm_binpath:-/usr/bin}/%s"' % (load_config_path, program)
else:
try:
cmd = os.path.join(os.environ['slurm_binpath'], program)
except KeyError:
cmd = 'which %s' % program
child_stdout = os.popen(cmd)
output = child_stdout.read()
Expand Down Expand Up @@ -486,6 +488,10 @@ def main():
print "1Usage: slurm_status.py slurm/<date>/<jobid>"
return 1
jobid = jobid_arg.split("/")[-1].split(".")[0]

config_dir = os.path.dirname(os.path.abspath(__file__))
blah.load_env(config_dir)

log("Checking cache for jobid %s" % jobid)
cache_contents = None
try:
Expand Down

0 comments on commit 448a226

Please sign in to comment.