Skip to content

Commit

Permalink
Merge pull request #64 from mgermain/msub_support
Browse files Browse the repository at this point in the history
Added msub support
  • Loading branch information
MarcCote committed Dec 4, 2014
2 parents 3167c93 + 9ee1815 commit 053ac62
Show file tree
Hide file tree
Showing 4 changed files with 30 additions and 4 deletions.
6 changes: 4 additions & 2 deletions scripts/smart_dispatch.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
LOGS_FOLDERNAME = "SMART_DISPATCH_LOGS"
CLUSTER_NAME = utils.detect_cluster()
AVAILABLE_QUEUES = get_available_queues(CLUSTER_NAME)
LAUNCHER = utils.get_launcher(CLUSTER_NAME)


def main():
Expand Down Expand Up @@ -85,14 +86,15 @@ def main():
# Launch the jobs with QSUB
if not args.doNotLaunch:
for pbs_filename in pbs_filenames:
qsub_output = check_output('qsub ' + pbs_filename, shell=True)
qsub_output = check_output('{launcher} {pbs_filename}'.format(launcher=LAUNCHER if args.launcher is None else args.launcher, pbs_filename=pbs_filename), shell=True)
print qsub_output,


def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('-q', '--queueName', required=True, help='Queue used (ex: qwork@mp2, qfat256@mp2, qfat512@mp2)')
parser.add_argument('-t', '--walltime', required=False, help='Set the estimated running time of your jobs using the DD:HH:MM:SS format. Note that they will be killed when this time limit is reached.')
parser.add_argument('-L', '--launcher', choices=['qsub', 'msub'], required=False, help='Which launcher to use. Default: qsub')
parser.add_argument('-C', '--coresPerNode', type=int, required=False, help='How many cores there are per node.')
parser.add_argument('-G', '--gpusPerNode', type=int, required=False, help='How many gpus there are per node.')
#parser.add_argument('-M', '--memPerNode', type=int, required=False, help='How much memory there are per node (in Gb).')
Expand Down Expand Up @@ -121,7 +123,7 @@ def parse_arguments():
if args.commandsFile is None and len(args.commandAndOptions) < 1:
parser.error("You need to specify a command to launch.")
if args.queueName not in AVAILABLE_QUEUES and ((args.coresPerNode is None and args.gpusPerNode is None) or args.walltime is None):
parser.error("Unknown queue, --coresPerCommand/--gpusPerCommand and --walltime must be set.")
parser.error("Unknown queue, --coresPerNode/--gpusPerNode and --walltime must be set.")
else:
if args.pool is None:
resume_parser.error("The resume feature only works with the --pool argument.")
Expand Down
18 changes: 17 additions & 1 deletion smartdispatch/config/helios.json
Original file line number Diff line number Diff line change
@@ -1,10 +1,26 @@
{
"maint": {
"gpu_8": {
"ram": 128,
"modules": ["cuda/6.0.37"],
"cores": 20,
"max_walltime": "12:00:00",
"gpus": 8,
"nodes": 15
},
"gpu_4": {
"ram": 64,
"modules": ["cuda/6.0.37"],
"cores": 10,
"max_walltime": "12:00:00",
"gpus": 4,
"nodes": 15
},
"test": {
"ram": 128,
"modules": ["cuda/6.0.37"],
"cores": 20,
"max_walltime": "15:00",
"gpus": 8,
"nodes": 15
}
}
2 changes: 1 addition & 1 deletion smartdispatch/tests/test_job_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ class TestHeliosQueue(unittest.TestCase):

def setUp(self):
self.commands = ["echo 1", "echo 2", "echo 3", "echo 4"]
self.queue = Queue("maint", "helios")
self.queue = Queue("gpu_8", "helios")

self.env_val = 'RAP'

Expand Down
8 changes: 8 additions & 0 deletions smartdispatch/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ def unhexify(match):

return re.sub(r"\\x..", unhexify, text)


@contextmanager
def open_with_lock(*args, **kwargs):
""" Context manager for opening file with an exclusive lock. """
Expand Down Expand Up @@ -95,3 +96,10 @@ def detect_cluster():
elif server_name.split('.')[-1] == 'helios':
cluster_name = "helios"
return cluster_name


def get_launcher(cluster_name):
if cluster_name == "helios":
return "msub"
else:
return "qsub"

0 comments on commit 053ac62

Please sign in to comment.