Skip to content

Commit

Permalink
added --max-retries to retry failing tasks
Browse files Browse the repository at this point in the history
  • Loading branch information
leepc12 committed Jun 12, 2019
1 parent c654958 commit 9e2ac56
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 1 deletion.
1 change: 1 addition & 0 deletions DETAILS.md
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ We highly recommend to use a default configuration file described in the section
cromwell|--cromwell|[cromwell-40.jar](https://github.com/broadinstitute/cromwell/releases/download/40/cromwell-40.jar)|Path or URL for Cromwell JAR file
max-concurrent-tasks|--max-concurrent-tasks|1000|Maximum number of concurrent tasks
max-concurrent-workflows|--max-concurrent-workflows|40|Maximum number of concurrent workflows
max-retries|--max-retries|1|Maximum number of retries for failing tasks
disable-call-caching|--disable-call-caching| |Disable Cromwell's call-caching (re-using outputs)
backend-file|--backend-file| |Custom Cromwell backend conf file. This will override Caper's built-in backends

Expand Down
5 changes: 5 additions & 0 deletions caper/caper.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ def __init__(self, args):
self._disable_call_caching = args.get('disable_call_caching')
self._max_concurrent_workflows = args.get('max_concurrent_workflows')
self._max_concurrent_tasks = args.get('max_concurrent_tasks')
self._max_retries = args.get('max_retries')
self._tmp_dir = args.get('tmp_dir')
self._out_dir = args.get('out_dir')
if self._out_dir is not None:
Expand Down Expand Up @@ -814,6 +815,10 @@ def __create_workflow_opts_json_file(
template['default_runtime_attributes']['sge_extra_param'] = \
self._sge_extra_param

if self._max_retries is not None:
template['default_runtime_attributes']['maxRetries'] = \
self._max_retries

# if workflow opts file is given by a user, merge it to template
if self._workflow_opts is not None:
f = CaperURI(self._workflow_opts).get_local_file()
Expand Down
11 changes: 11 additions & 0 deletions caper/caper_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
DEFAULT_MYSQL_DB_PORT = 3306
DEFAULT_MAX_CONCURRENT_WORKFLOWS = 40
DEFAULT_MAX_CONCURRENT_TASKS = 1000
DEFAULT_MAX_RETRIES = 1
DEFAULT_PORT = 8000
DEFAULT_IP = 'localhost'
DEFAULT_FORMAT = 'id,status,name,str_label,submission'
Expand Down Expand Up @@ -230,6 +231,11 @@ def parse_caper_arguments():
type=int,
help='Number of concurrent workflows. '
'"system.max-concurrent-workflows" in backend configuration')
group_cromwell.add_argument(
'--max-retries', default=DEFAULT_MAX_RETRIES,
type=int,
help='Number of retries for failing tasks. '
'equivalent to "maxRetries" in workflow options JSON file.')
group_cromwell.add_argument(
'--disable-call-caching', action='store_true',
help='Disable Cromwell\'s call caching, which re-uses outputs from '
Expand Down Expand Up @@ -527,6 +533,11 @@ def parse_caper_arguments():
and isinstance(max_concurrent_workflows, str):
args_d['max_concurrent_workflows'] = int(max_concurrent_workflows)

max_retries = args_d.get('max_retries')
if max_retries is not None \
and isinstance(max_retries, str):
args_d['max_retries'] = int(max_retries)

# init some important path variables
if args_d.get('out_dir') is None:
args_d['out_dir'] = os.getcwd()
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

setuptools.setup(
name='caper',
version='v0.2.7',
version='v0.2.8',
python_requires='>3.4.1',
scripts=['bin/caper', 'mysql/run_mysql_server_docker.sh',
'mysql/run_mysql_server_singularity.sh'],
Expand Down

0 comments on commit 9e2ac56

Please sign in to comment.