-
Notifications
You must be signed in to change notification settings - Fork 31
/
locuszoom.py
executable file
·71 lines (57 loc) · 2.5 KB
/
locuszoom.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
#! /usr/bin/env python3
"""LocusZoom"""
import TopmedPipeline
import sys
import os
from argparse import ArgumentParser
from copy import deepcopy
description = """
LocusZoom
"""
parser = ArgumentParser(description=description)
parser.add_argument("config_file", help="configuration file")
parser.add_argument("--cluster_type", default="UW_Cluster",
help="type of compute cluster environment [default %(default)s]")
parser.add_argument("--cluster_file", default=None,
help="json file containing options to pass to the cluster")
parser.add_argument("--verbose", action="store_true", default=False,
help="enable verbose output to help debug")
parser.add_argument("-e", "--email", default=None,
help="email address for job reporting")
parser.add_argument("--print_only", action="store_true", default=False,
help="print cluster commands without submitting")
parser.add_argument("--version", action="version",
version="TopmedPipeline "+TopmedPipeline.__version__,
help="show the version number and exit")
args = parser.parse_args()
configfile = args.config_file
cluster_file = args.cluster_file
cluster_type = args.cluster_type
email = args.email
print_only = args.print_only
verbose = args.verbose
version = "--version " + TopmedPipeline.__version__
cluster = TopmedPipeline.ClusterFactory.createCluster(cluster_type, cluster_file, verbose)
pipeline = cluster.getPipelinePath()
submitPath = cluster.getSubmitPath()
driver = os.path.join(submitPath, "runRscript.sh")
configdict = TopmedPipeline.readConfig(configfile)
configdict = TopmedPipeline.directorySetup(configdict, subdirs=["log", "plots"])
# analysis init
cluster.analysisInit(print_only=print_only)
job = "locuszoom"
rscript = os.path.join(pipeline, "R", job + ".R")
# find number of jobs to submit by counting lines in file
n = TopmedPipeline.countLines(configdict["locus_file"])
range = "1-" + str(n-1)
args = ["-s", rscript, configfile, version]
jobid = cluster.submitJob(job_name=job, cmd=driver, args=args, array_range=range, email=email, print_only=print_only)
# post analysis
bname = "post_analysis"
job = "locuszoom" + "_" + bname
jobpy = bname + ".py"
pcmd=os.path.join(submitPath, jobpy)
argList = ["-a", cluster.getAnalysisName(), "-l", cluster.getAnalysisLog(),
"-s", cluster.getAnalysisStartSec()]
cluster.submitJob(binary=True, job_name=job, cmd=pcmd, args=argList,
holdid=[jobid], print_only=print_only)