Skip to content

Commit

Permalink
Merge pull request #35 from ENCODE-DCC/PIP-399_ATAQC
Browse files Browse the repository at this point in the history
Pip 399 ataqc (2)
  • Loading branch information
leepc12 authored Nov 13, 2018
2 parents 3c87ffd + ed7cefb commit ddcc810
Show file tree
Hide file tree
Showing 70 changed files with 1,302 additions and 2,080 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -113,3 +113,4 @@ cromwell*.jar
output_*.json
test_genome*
test_sample*
tmp
10 changes: 5 additions & 5 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ pipeline {
slackSend "The images will be tagged as $TAG"

// pull the cache template image (the image is going to stay pretty much the same so it is no need to be dynamic)
sh "docker pull quay.io/encode-dcc/chip-seq-pipeline:v1.1"
sh "docker pull quay.io/encode-dcc/chip-seq-pipeline:v1.1.1"
sh "docker login -u=${QUAY_USER} -p=${QUAY_PASS} quay.io"
sh "docker build --cache-from quay.io/encode-dcc/chip-seq-pipeline:v1.1 -f docker_image/Dockerfile -t chip-seq-pipeline ."
sh "docker build --cache-from quay.io/encode-dcc/chip-seq-pipeline:v1.1.1 -f docker_image/Dockerfile -t chip-seq-pipeline ."
sh "docker tag chip-seq-pipeline $TAG"
sh "docker push $TAG"
sh "docker logout"
Expand All @@ -57,9 +57,9 @@ pipeline {
slackSend (color: '#7CFC00', message: "started job: ${env.JOB_NAME}, build number ${env.BUILD_NUMBER} on branch: ${env.BRANCH_NAME}.")
slackSend "The images will be tagged as quay.io/encode-dcc/chip-seq-pipeline:latest"
// pull the cache template image (the image is going to stay pretty much the same so it is no need to be dynamic)
sh "docker pull quay.io/encode-dcc/chip-seq-pipeline:v1.1"
sh "docker pull quay.io/encode-dcc/chip-seq-pipeline:v1.1.1"
sh "docker login -u=${QUAY_USER} -p=${QUAY_PASS} quay.io"
sh "docker build --cache-from quay.io/encode-dcc/chip-seq-pipeline:v1.1 -f docker_image/Dockerfile -t chip-seq-pipeline ."
sh "docker build --cache-from quay.io/encode-dcc/chip-seq-pipeline:v1.1.1 -f docker_image/Dockerfile -t chip-seq-pipeline ."
sh "docker tag chip-seq-pipeline quay.io/encode-dcc/chip-seq-pipeline:latest"
sh "docker push quay.io/encode-dcc/chip-seq-pipeline:latest"
sh "docker logout"
Expand All @@ -69,7 +69,7 @@ pipeline {
stage('Run-Task-Level-Tests-Non-Master'){
agent {label 'slave-w-docker-cromwell-60GB-ebs'}
steps{
sh "cd test/test_task && git clone https://github.com/ENCODE-DCC/chip-seq-pipeline-test-data"
sh "cd test/test_task && rm -rf chip-seq-pipeline-test-data && git clone https://github.com/ENCODE-DCC/chip-seq-pipeline-test-data"
sh """cd test/test_task
./test.sh test_bam2ta.wdl test_bam2ta.json $TAG
python -c "import sys; import json; data=json.loads(sys.stdin.read()); sys.exit(int(not data[u'match_overall']))" < test_bam2ta.result.json
Expand Down
26 changes: 9 additions & 17 deletions backends/backend.conf
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,10 @@ backend {
String? slurm_account
String? slurm_extra_param
String singularity_container
String? singularity_options
String singularity_command = "exec"
String? singularity_command_options
String? singularity_bindpath
"""
submit = """
sbatch \
ls ${singularity_container} $(echo ${singularity_bindpath} | tr , ' ') 1>/dev/null && (sbatch \
--export=ALL \
-J ${job_name} \
-D ${cwd} \
Expand All @@ -38,7 +36,7 @@ backend {
${"--account " + slurm_account} \
${"--gres gpu:" + gpu} \
${slurm_extra_param} \
--wrap "chmod u+x ${script} && SINGULARITY_BINDPATH=$(echo ${cwd} | sed 's/cromwell-executions/\n/g' | head -n1) singularity ${singularity_options} ${singularity_command} ${singularity_command_options} --home ${cwd} ${if defined(gpu) then "--nv" else ""} ${singularity_container} ${script}"
--wrap "chmod u+x ${script} && SINGULARITY_BINDPATH=$(echo ${cwd} | sed 's/cromwell-executions/\n/g' | head -n1),${singularity_bindpath} singularity exec --home ${cwd} ${if defined(gpu) then '--nv' else ''} ${singularity_container} ${script}")
"""
kill = "scancel ${job_id}"
check-alive = "squeue -j ${job_id}"
Expand All @@ -60,12 +58,10 @@ backend {
String? sge_queue
String? sge_extra_param
String singularity_container
String? singularity_options
String singularity_command = "exec"
String? singularity_command_options
String? singularity_bindpath
"""
submit = """
echo "chmod u+x ${script} && SINGULARITY_BINDPATH=$(echo ${cwd} | sed 's/cromwell-executions/\n/g' | head -n1) singularity ${singularity_options} ${singularity_command} ${singularity_command_options} --home ${cwd} ${if defined(gpu) then "--nv" else ""} ${singularity_container} ${script}" | qsub \
ls ${singularity_container} $(echo ${singularity_bindpath} | tr , ' ') 1>/dev/null && (echo "chmod u+x ${script} && SINGULARITY_BINDPATH=$(echo ${cwd} | sed 's/cromwell-executions/\n/g' | head -n1),${singularity_bindpath} singularity exec --home ${cwd} ${if defined(gpu) then '--nv' else ''} ${singularity_container} ${script}" | qsub \
-S /bin/sh \
-terse \
-b n \
Expand All @@ -81,7 +77,7 @@ backend {
${"-q " + sge_queue} \
${"-l gpu=" + gpu} \
${sge_extra_param} \
-V
-V)
"""
kill = "qdel ${job_id}"
check-alive = "qstat -j ${job_id}"
Expand All @@ -92,19 +88,15 @@ backend {
singularity {
actor-factory = "cromwell.backend.impl.sfs.config.ConfigBackendLifecycleActorFactory"
config {
script-epilogue = "sleep 30 && sync"
script-epilogue = "sleep 5 && sync"
concurrent-job-limit = 10
runtime-attributes = """
Int? gpu
String singularity_container
String? singularity_options
String singularity_command = "exec"
String? singularity_command_options
String? singularity_bindpath
"""
submit = """
chmod u+x ${script} && SINGULARITY_BINDPATH=$(echo ${cwd} | sed 's/cromwell-executions/\n/g' | head -n1) singularity ${singularity_options} \
${singularity_command} ${singularity_command_options} --home ${cwd} ${if defined(gpu) then "--nv" else ""} \
${singularity_container} ${script} & echo $!
ls ${singularity_container} $(echo ${singularity_bindpath} | tr , ' ') 1>/dev/null && (chmod u+x ${script} && SINGULARITY_BINDPATH=$(echo ${cwd} | sed 's/cromwell-executions/\n/g' | head -n1),${singularity_bindpath} singularity exec --home ${cwd} ${if defined(gpu) then '--nv' else ''} ${singularity_container} ${script} & echo $! && disown)
"""
job-id-regex = "(\\d+)"
check-alive = "ps -ef | grep -v grep | grep ${job_id}"
Expand Down
24 changes: 19 additions & 5 deletions chip.wdl
Original file line number Diff line number Diff line change
Expand Up @@ -691,6 +691,8 @@ workflow chip {
paired_end = paired_end,
pipeline_type = pipeline_type,
peak_caller = peak_caller_,
macs2_cap_num_peak = macs2_cap_num_peak,
macs2_cap_num_peak = spp_cap_num_peak,
idr_thresh = idr_thresh,
flagstat_qcs = bwa.flagstat_qc,
nodup_flagstat_qcs = filter.flagstat_qc,
Expand Down Expand Up @@ -985,7 +987,7 @@ task xcor {
#@docker : "quay.io/encode-dcc/atac-seq-pipeline:v1"
cpu : select_first([cpu,2])
memory : "${select_first([mem_mb,'16000'])} MB"
time : select_first([time_hr,6])
time : select_first([time_hr,24])
disks : select_first([disks,"local-disk 100 HDD"])
}
}
Expand Down Expand Up @@ -1087,6 +1089,7 @@ task macs2 {
File npeak = glob("*[!.][!b][!f][!i][!l][!t].narrowPeak.gz")[0]
File bfilt_npeak = glob("*.bfilt.narrowPeak.gz")[0]
File bfilt_npeak_bb = glob("*.bfilt.narrowPeak.bb")[0]
Array[File] bfilt_npeak_hammock = glob("*.bfilt.narrowPeak.hammock.gz*")
File sig_pval = if select_first([make_signal,false]) then glob("*.pval.signal.bigwig")[0] else glob("null")[0]
File sig_fc = if select_first([make_signal,false]) then glob("*.fc.signal.bigwig")[0] else glob("null")[0]
File frip_qc = glob("*.frip.qc")[0]
Expand Down Expand Up @@ -1125,6 +1128,7 @@ task spp {
File rpeak = glob("*[!.][!b][!f][!i][!l][!t].regionPeak.gz")[0]
File bfilt_rpeak = glob("*.bfilt.regionPeak.gz")[0]
File bfilt_rpeak_peak_bb = glob("*.bfilt.regionPeak.bb")[0]
Array[File] bfilt_rpeak_hammock = glob("*.bfilt.regionPeak.hammock.gz*")
File frip_qc = glob("*.frip.qc")[0]
}
runtime {
Expand Down Expand Up @@ -1171,6 +1175,7 @@ task idr {
File idr_peak = glob("*[!.][!b][!f][!i][!l][!t]."+peak_type+".gz")[0]
File bfilt_idr_peak = glob("*.bfilt."+peak_type+".gz")[0]
File bfilt_idr_peak_bb = glob("*.bfilt."+peak_type+".bb")[0]
Array[File] bfilt_idr_peak_hammock = glob("*.bfilt."+peak_type+".hammock.gz*")
File idr_plot = glob("*.txt.png")[0]
File idr_unthresholded_peak = glob("*.txt.gz")[0]
File idr_log = glob("*.log")[0]
Expand Down Expand Up @@ -1215,6 +1220,7 @@ task overlap {
File overlap_peak = glob("*[!.][!b][!f][!i][!l][!t]."+peak_type+".gz")[0]
File bfilt_overlap_peak = glob("*.bfilt."+peak_type+".gz")[0]
File bfilt_overlap_peak_bb = glob("*.bfilt."+peak_type+".bb")[0]
Array[File] bfilt_overlap_peak_hammock = glob("*.bfilt."+peak_type+".hammock.gz*")
File frip_qc = if defined(ta) then glob("*.frip.qc")[0] else glob("null")[0]
}
runtime {
Expand Down Expand Up @@ -1247,10 +1253,12 @@ task reproducibility {
${"--chrsz " + chrsz}
}
output {
File optimal_peak = glob("optimal_peak.gz")[0]
File conservative_peak = glob("conservative_peak.gz")[0]
File optimal_peak = glob("optimal_peak.*.gz")[0]
File conservative_peak = glob("conservative_peak.*.gz")[0]
File optimal_peak_bb = glob("optimal_peak.*.bb")[0]
File conservative_peak_bb = glob("conservative_peak.*.bb")[0]
Array[File] optimal_peak_hammock = glob("optimal_peak.*.hammock.gz*")
Array[File] conservative_peak_hammock = glob("conservative_peak.*.hammock_gz*")
File reproducibility_qc = glob("*reproducibility.qc")[0]
}
runtime {
Expand All @@ -1270,9 +1278,12 @@ task qc_report {
String? desc # description for sample
#String? encode_accession_id # ENCODE accession ID of sample
# workflow params
Int? multimapping
Boolean paired_end
String pipeline_type
String peak_caller
Int? macs2_cap_num_peak
Int? spp_cap_num_peak
Float idr_thresh
# QCs
Array[File]? flagstat_qcs
Expand Down Expand Up @@ -1315,11 +1326,14 @@ task qc_report {

command {
python $(which encode_qc_report.py) \
${"--name '" + name + "'"} \
${"--desc '" + desc + "'"} \
${"--name '" + sub(select_first([name,""]),"'","_") + "'"} \
${"--desc '" + sub(select_first([desc,""]),"'","_") + "'"} \
${"--multimapping " + multimapping} \
${if paired_end then "--paired-end" else ""} \
--pipeline-type ${pipeline_type} \
--peak-caller ${peak_caller} \
${"--macs2-cap-num-peak " + macs2_cap_num_peak} \
${"--spp-cap-num-peak " + spp_cap_num_peak} \
--idr-thresh ${idr_thresh} \
--flagstat-qcs ${sep=' ' flagstat_qcs} \
--nodup-flagstat-qcs ${sep=' ' nodup_flagstat_qcs} \
Expand Down
5 changes: 5 additions & 0 deletions conda/install_dependencies.sh
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,11 @@ source activate ${CONDA_ENV}
echo "export PYTHONNOUSERSITE=True" >> ${CONDA_ACTIVATE_SH}
echo "unset OPENBLAS_NUM_THREADS MKL_NUM_THREADS PYTHONNOUSERSITE" > ${CONDA_DEACTIVATE_SH}

# to prevent conflict between Conda's R and global(local) R
echo "export R_HOME=${CONDA_LIB}/R" >> ${CONDA_ACTIVATE_SH}
echo "export R_LIBS=${CONDA_LIB}/R/library" >> ${CONDA_ACTIVATE_SH}
echo "unset R_HOME R_LIBS" > ${CONDA_DEACTIVATE_SH}

# hack around the need for both python2 and python3 in the same environment
CONDA_BIN="${CONDA_PREFIX}/bin"
cd ${CONDA_BIN}
Expand Down
1 change: 1 addition & 0 deletions conda/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -42,3 +42,4 @@ deeptools ==2.5.4 #2.2.3 does not support plotFingerprint --outQualityMetrics
pybigwig==0.3.11
#openssl ==1.0.2g-0
phantompeakqualtools ==1.2
tabix==0.2.6
1 change: 1 addition & 0 deletions conda/requirements_py3.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,4 @@ java-jdk ==8.0.92
libgcc==5.2.0 # this does not work with MacOS...
matplotlib ==1.5.1
ncurses ==6.1
tabix==0.2.6
21 changes: 12 additions & 9 deletions docker_image/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -37,13 +37,9 @@ RUN apt-get update && apt-get install -y \
r-base-core \
default-jre \
apt-transport-https \
tabix \
&& rm -rf /var/lib/apt/lists/*

# Make directory for all softwares
RUN mkdir /software
WORKDIR /software
ENV PATH="/software:${PATH}"

# Install Intel MKL for BLAS
RUN wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB && apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB && rm -rf GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB && sh -c 'echo deb https://apt.repos.intel.com/mkl all main > /etc/apt/sources.list.d/intel-mkl.list' && apt-get update && apt-get install intel-mkl-64bit-2018.0-033 -y && rm -rf /var/lib/apt/lists/*
ENV LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/opt/intel/compilers_and_libraries_2018.0.128/linux/mkl/lib/intel64_lin
Expand All @@ -52,10 +48,14 @@ ENV LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/opt/intel/compilers_and_libraries_2018.0
RUN pip install --no-cache-dir common python-dateutil cython==0.27.3 && \
pip3 install --no-cache-dir common python-dateutil cython==0.27.3

# Make directory for all softwares
RUN mkdir /software
WORKDIR /software
ENV PATH="/software:${PATH}"

# Install numpy 1.11.3 (python2/3, linked with MKL)
COPY docker_image/numpy/site.cfg .
RUN git clone --branch v1.11.3 https://github.com/numpy/numpy && \
cd numpy && cp ../site.cfg . && python setup.py install && python3 setup.py install && cd ../ && rm -rf numpy*
RUN git clone --branch v1.11.3 https://github.com/numpy/numpy && cd numpy && \
/bin/bash -c 'echo -e "[mkl]\nlibrary_dirs = /opt/intel/compilers_and_libraries_2018/linux/mkl/lib/intel64\ninclude_dirs = /opt/intel/compilers_and_libraries_2018/linux/mkl/include\nmkl_libs = mkl_rt\nlapack_libs =" > site.cfg' && python setup.py install && python3 setup.py install && cd ../ && rm -rf numpy*

# Install scipy 1.0.0 (python2/3)
RUN git clone --branch v1.0.0 --single-branch https://github.com/scipy/scipy && \
Expand Down Expand Up @@ -92,7 +92,7 @@ RUN wget https://github.com/broadinstitute/picard/releases/download/2.10.6/picar
RUN wget https://github.com/lomereiter/sambamba/releases/download/v0.6.6/sambamba_v0.6.6_linux.tar.bz2 && tar -xvjf sambamba_v0.6.6_linux.tar.bz2 && mv sambamba_v0.6.6 sambamba && rm -rf sambamba_*

# Install R packages
RUN echo "r <- getOption('repos'); r['CRAN'] <- 'http://cran.us.r-project.org'; options(repos = r);" > ~/.Rprofile && \
RUN echo "r <- getOption('repos'); r['CRAN'] <- 'http://cran.r-project.org'; options(repos = r);" > ~/.Rprofile && \
Rscript -e "install.packages('snow')" && \
Rscript -e "install.packages('snowfall')" && \
Rscript -e "install.packages('bitops')" && \
Expand Down Expand Up @@ -127,6 +127,9 @@ RUN pip install --no-cache-dir pyfaidx==0.4.7.1
# Install UCSC tools (Kent utils) latest for peak2bigbed conversion
RUN wget http://hgdownload.soe.ucsc.edu/admin/exe/linux.x86_64/bedToBigBed && chmod +x bedToBigBed

# Install bgzip/tabix for Wash U browser track (hammock type)
#RUN apt-get update && apt-get install -y tabix && rm -rf /var/lib/apt/lists/*

# Prevent conflict with locally installed python outside of singularity container
ENV PYTHONNOUSERSITE=True

Expand Down
4 changes: 2 additions & 2 deletions docs/dev.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ Run the following command line locally to build out DX workflows for this pipeli

```
# version
VER=v1.1
VER=v1.1.1
# general
java -jar ~/dxWDL-0.77.jar compile chip.wdl -project "ENCODE Uniform Processing Pipelines" -extras workflow_opts/docker.json -f -folder /ChIP-seq2/workflows/$VER/general -defaults examples/dx/template_general.json
Expand All @@ -33,7 +33,7 @@ java -jar ~/dxWDL-0.77.jar compile chip.wdl -project "ENCODE Uniform Processing
## DX Azure
```
# version
VER=v1.1
VER=v1.1.1
# general
java -jar ~/dxWDL-0.77.jar compile chip.wdl -project "ENCODE Uniform Processing Pipelines Azure" -extras workflow_opts/docker.json -f -folder /ChIP-seq2/workflows/$VER/general -defaults examples/dx_azure/template_general.json
Expand Down
2 changes: 1 addition & 1 deletion docs/input.md
Original file line number Diff line number Diff line change
Expand Up @@ -202,6 +202,6 @@ Walltime (`time`) settings (for SGE and SLURM only).
* `"chip.bwa.time_hr"` : (optional) Walltime for `bwa` (default: 48).
* `"chip.filter.time_hr"` : (optional) Walltime for `filter` (default: 24).
* `"chip.bam2ta.time_hr"` : (optional) Walltime for `bam2ta` (default: 6).
* `"chip.xcor.time_hr"` : (optional) Walltime for `xcor` (default: 6).
* `"chip.xcor.time_hr"` : (optional) Walltime for `xcor` (default: 24).
* `"chip.macs2_time_hr"` : (optional) Walltime for `macs2` (default: 24).

20 changes: 10 additions & 10 deletions docs/tutorial_dx_web.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,16 +41,16 @@ This document describes instruction for the item 2).
1. DNANexus allows only one copy of a workflow per project. The example workflow in the previous section is pre-built for the subsampled test sample [ENCSR936XTK](https://www.encodeproject.org/experiments/ENCSR936XTK/) with all parameters defined already.

2. Copy one of the following workflows according to the platform you have chosen for your project (AWS or Azure).
* [AWS general](https://platform.dnanexus.com/projects/BKpvFg00VBPV975PgJ6Q03v6/data/ChIP-seq2/workflows/v1.1/general) without pre-defined reference genome.
* [AWS hg38](https://platform.dnanexus.com/projects/BKpvFg00VBPV975PgJ6Q03v6/data/ChIP-seq2/workflows/v1.1/hg38) with pre-defined hg38 reference genome.
* [AWS hg19](https://platform.dnanexus.com/projects/BKpvFg00VBPV975PgJ6Q03v6/data/ChIP-seq2/workflows/v1.1/hg19) with pre-defined hg38 reference genome.
* [AWS mm10](https://platform.dnanexus.com/projects/BKpvFg00VBPV975PgJ6Q03v6/data/ChIP-seq2/workflows/v1.1/mm10) with pre-defined mm10 reference genome.
* [AWS mm9](https://platform.dnanexus.com/projects/BKpvFg00VBPV975PgJ6Q03v6/data/ChIP-seq2/workflows/v1.1/mm9) with pre-defined mm9 reference genome.
* [Azure general](https://platform.dnanexus.com/projects/F6K911Q9xyfgJ36JFzv03Z5J/data/ChIP-seq2/workflows/v1.1/general) without pre-defined reference genome.
* [Azure hg38](https://platform.dnanexus.com/projects/F6K911Q9xyfgJ36JFzv03Z5J/data/ChIP-seq2/workflows/v1.1/hg38) with pre-defined hg38 reference genome.
* [Azure hg19](https://platform.dnanexus.com/projects/F6K911Q9xyfgJ36JFzv03Z5J/data/ChIP-seq2/workflows/v1.1/hg19) with pre-defined hg38 reference genome.
* [Azure mm10](https://platform.dnanexus.com/projects/F6K911Q9xyfgJ36JFzv03Z5J/data/ChIP-seq2/workflows/v1.1/mm10) with pre-defined mm10 reference genome.
* [Azure mm9](https://platform.dnanexus.com/projects/F6K911Q9xyfgJ36JFzv03Z5J/data/ChIP-seq2/workflows/v1.1/mm9) with pre-defined mm9 reference genome.
* [AWS general](https://platform.dnanexus.com/projects/BKpvFg00VBPV975PgJ6Q03v6/data/ChIP-seq2/workflows/v1.1.1/general) without pre-defined reference genome.
* [AWS hg38](https://platform.dnanexus.com/projects/BKpvFg00VBPV975PgJ6Q03v6/data/ChIP-seq2/workflows/v1.1.1/hg38) with pre-defined hg38 reference genome.
* [AWS hg19](https://platform.dnanexus.com/projects/BKpvFg00VBPV975PgJ6Q03v6/data/ChIP-seq2/workflows/v1.1.1/hg19) with pre-defined hg38 reference genome.
* [AWS mm10](https://platform.dnanexus.com/projects/BKpvFg00VBPV975PgJ6Q03v6/data/ChIP-seq2/workflows/v1.1.1/mm10) with pre-defined mm10 reference genome.
* [AWS mm9](https://platform.dnanexus.com/projects/BKpvFg00VBPV975PgJ6Q03v6/data/ChIP-seq2/workflows/v1.1.1/mm9) with pre-defined mm9 reference genome.
* [Azure general](https://platform.dnanexus.com/projects/F6K911Q9xyfgJ36JFzv03Z5J/data/ChIP-seq2/workflows/v1.1.1/general) without pre-defined reference genome.
* [Azure hg38](https://platform.dnanexus.com/projects/F6K911Q9xyfgJ36JFzv03Z5J/data/ChIP-seq2/workflows/v1.1.1/hg38) with pre-defined hg38 reference genome.
* [Azure hg19](https://platform.dnanexus.com/projects/F6K911Q9xyfgJ36JFzv03Z5J/data/ChIP-seq2/workflows/v1.1.1/hg19) with pre-defined hg38 reference genome.
* [Azure mm10](https://platform.dnanexus.com/projects/F6K911Q9xyfgJ36JFzv03Z5J/data/ChIP-seq2/workflows/v1.1.1/mm10) with pre-defined mm10 reference genome.
* [Azure mm9](https://platform.dnanexus.com/projects/F6K911Q9xyfgJ36JFzv03Z5J/data/ChIP-seq2/workflows/v1.1.1/mm9) with pre-defined mm9 reference genome.

3. Click on the DX workflow `chip`.

Expand Down
Loading

0 comments on commit ddcc810

Please sign in to comment.