Skip to content

Commit

Permalink
Merge pull request #41 from ENCODE-DCC/PIP-428_v1.1.3
Browse files Browse the repository at this point in the history
PIP-428 v1.1.3
  • Loading branch information
leepc12 authored Dec 4, 2018
2 parents ad87d92 + 1337e49 commit 0e61a88
Show file tree
Hide file tree
Showing 27 changed files with 131 additions and 208 deletions.
2 changes: 1 addition & 1 deletion .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ jobs:
name: build image
command: |
source ${BASH_ENV}
export DOCKER_CACHE_TAG=v1.1.2
export DOCKER_CACHE_TAG=v1.1.3
echo "pulling ${DOCKER_CACHE_TAG}!"
docker pull quay.io/encode-dcc/chip-seq-pipeline:${DOCKER_CACHE_TAG}
docker login -u=${QUAY_ROBOT_USER} -p=${QUAY_ROBOT_USER_TOKEN} quay.io
Expand Down
149 changes: 0 additions & 149 deletions Jenkinsfile

This file was deleted.

57 changes: 57 additions & 0 deletions backends/backend.conf
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,63 @@ backend {
default = "Local"
providers {

pbs {
actor-factory = "cromwell.backend.impl.sfs.config.ConfigBackendLifecycleActorFactory"
config {
script-epilogue = "sleep 30 && sync"
concurrent-job-limit = 50
runtime-attributes = """
Int cpu = 1
Int? gpu
Int? time
Int? memory_mb
"""
submit = """
qsub \
-N ${job_name} \
-o ${out} \
-e ${err} \
${"-lselect=1:ncpus=" + cpu + ":mem=" + memory_mb/1024 + "gb"} \
${"-lwalltime=" + time + ":0:0"} \
${if gpu>1 then "-lngpus=" + gpu else ""} \
-V \
${script}
"""
kill = "qdel ${job_id}"
check-alive = "qstat ${job_id}"
job-id-regex = "(\\d+).+"
}
}

pbs_singularity {
actor-factory = "cromwell.backend.impl.sfs.config.ConfigBackendLifecycleActorFactory"
config {
script-epilogue = "sleep 30 && sync"
concurrent-job-limit = 50
runtime-attributes = """
Int cpu = 1
Int? gpu
Int? time
Int? memory_mb
String singularity_container
String? singularity_bindpath
"""
submit = """
ls ${singularity_container} $(echo ${singularity_bindpath} | tr , ' ') 1>/dev/null && (echo "chmod u+x ${script} && SINGULARITY_BINDPATH=$(echo ${cwd} | sed 's/cromwell-executions/\n/g' | head -n1),${singularity_bindpath} singularity exec --home ${cwd} ${if defined(gpu) then '--nv' else ''} ${singularity_container} ${script}" | qsub \
-N ${job_name} \
-o ${out} \
-e ${err} \
${"-lselect=1:ncpus=" + cpu + ":mem=" + memory_mb/1024 + "gb"} \
${"-lwalltime=" + time + ":0:0"} \
${if gpu>1 then "-lngpus=" + gpu else ""} \
-V)
"""
kill = "qdel ${job_id}"
check-alive = "qstat -j ${job_id}"
job-id-regex = "(\\d+)"
}
}

slurm_singularity {
actor-factory = "cromwell.backend.impl.sfs.config.ConfigBackendLifecycleActorFactory"
config {
Expand Down
19 changes: 10 additions & 9 deletions chip.wdl
Original file line number Diff line number Diff line change
Expand Up @@ -1518,10 +1518,10 @@ task qc_report {
task read_genome_tsv {
File genome_tsv
command {
cat ${genome_tsv}
cat ${genome_tsv} > 'tmp.tsv'
}
output {
Map[String,String] genome = read_map(stdout())
Map[String,String] genome = read_map('tmp.tsv')
}
runtime {
cpu : 1
Expand All @@ -1536,16 +1536,17 @@ task rounded_mean {
command <<<
python <<CODE
arr = [${sep=',' ints}]
if len(arr):
sum_ = sum(arr)
mean_ = sum(arr)/float(len(arr))
print(int(round(mean_)))
else:
print(0)
with open('tmp.txt','w') as fp:
if len(arr):
sum_ = sum(arr)
mean_ = sum(arr)/float(len(arr))
fp.write('{}'.format(int(round(mean_))))
else:
fp.write('0')
CODE
>>>
output {
Int rounded_mean = read_int(stdout())
Int rounded_mean = read_int('tmp.txt')
}
runtime {
cpu : 1
Expand Down
14 changes: 12 additions & 2 deletions docs/dev.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,16 @@
Dev
===

## Command line for version change
```
PREV_VER=v1.1.2
NEW_VER=v1.1.3
for f in $(grep -rl ${PREV_VER} --include=*.{json,md,sh,yml})
do
sed -i "s/${PREV_VER}/${NEW_VER}/g" ${f}
done
```

## Building templates on DX for each genome

Make sure that you have [`dxWDL-0.77.jar`](https://github.com/dnanexus/dxWDL/releases/download/0.77/dxWDL-0.77.jar) on your `$HOME`. Install [DNANexus Platform SDK](https://wiki.dnanexus.com/downloads) with `pip install dxpy`. Log-in on DNANexus with `dx login` and choose "ENCODE Uniform Processing Pipelines" (name of our official DNANexus project for pipelines).
Expand All @@ -9,7 +19,7 @@ Run the following command line locally to build out DX workflows for this pipeli

```
# version
VER=v1.1.2
VER=v1.1.3
# general
java -jar ~/dxWDL-0.77.jar compile chip.wdl -project "ENCODE Uniform Processing Pipelines" -extras workflow_opts/docker.json -f -folder /ChIP-seq2/workflows/$VER/general -defaults examples/dx/template_general.json
Expand Down Expand Up @@ -42,7 +52,7 @@ java -jar ~/dxWDL-0.77.jar compile chip.wdl -project "ENCODE Uniform Processing
## DX Azure
```
# version
VER=v1.1.2
VER=v1.1.3
# general
java -jar ~/dxWDL-0.77.jar compile chip.wdl -project "ENCODE Uniform Processing Pipelines Azure" -extras workflow_opts/docker.json -f -folder /ChIP-seq2/workflows/$VER/general -defaults examples/dx_azure/template_general.json
Expand Down
2 changes: 1 addition & 1 deletion docs/tutorial_dx_cli.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,6 @@ This document describes instruction for the item 1).
12. Click on "Run as Analysis..." and you will be automatically redirected to the "Monitor" tab.
13. It will take about an hour. You will be able to find all outputs on your output folder. Final QC report (`qc.html`)/JSON (`qc.json`) will be found on it.
13. It will take about 6 hours. You will be able to find all outputs on your output folder. Final QC report (`qc.html`)/JSON (`qc.json`) will be found on it.
14. See full specification for [input JSON file](input.md).
Loading

0 comments on commit 0e61a88

Please sign in to comment.