Skip to content

Commit 0b7bd13

Browse files
authored
Merge pull request #162 from drpatelh/master
Update docs, CI, README and environment
2 parents bfe7eb3 + 2db5f5a commit 0b7bd13

30 files changed

+720
-573
lines changed

.github/markdownlint.yml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,3 +3,7 @@ default: true,
33
line-length: false
44
no-duplicate-header:
55
siblings_only: true
6+
MD033:
7+
allowed_elements: [details, summary, p, img]
8+
MD007:
9+
indent: 4

.github/workflows/awsfulltest.yml

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
name: nf-core AWS full size tests
2+
# This workflow is triggered on releases.
3+
# It runs the -profile 'test_full' on AWS batch
4+
5+
on:
6+
release:
7+
types: [published]
8+
9+
jobs:
10+
run-awstest:
11+
if: github.repository == 'nf-core/chipseq'
12+
name: Run AWS test
13+
runs-on: ubuntu-latest
14+
steps:
15+
- name: Setup Miniconda
16+
uses: goanpeca/[email protected]
17+
with:
18+
auto-update-conda: true
19+
python-version: 3.7
20+
- name: Install awscli
21+
run: conda install -c conda-forge awscli
22+
- name: Start AWS batch job
23+
env:
24+
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
25+
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
26+
TOWER_ACCESS_TOKEN: ${{secrets.AWS_TOWER_TOKEN}}
27+
#AWS_JOB_DEFINITION: ${{secrets.AWS_JOB_DEFINITION}}
28+
AWS_JOB_QUEUE: ${{secrets.AWS_JOB_QUEUE}}
29+
AWS_S3_BUCKET: ${{secrets.AWS_S3_BUCKET}}
30+
run: | # Submits job to AWS batch using a 'nextflow-4GiB' job definition. Setting JVM options to "-XX:+UseG1GC" for more efficient garbage collection when staging remote files.
31+
aws batch submit-job \
32+
--region eu-west-1 \
33+
--job-name nf-core-chipseq \
34+
--job-queue $AWS_JOB_QUEUE \
35+
--job-definition nextflow-4GiB \
36+
--container-overrides '{"command": ["nf-core/chipseq", "-r '"${GITHUB_SHA}"' -profile test_full --outdir s3://'"${AWS_S3_BUCKET}"'/chipseq/results-'"${GITHUB_SHA}"' -w s3://'"${AWS_S3_BUCKET}"'/chipseq/work-'"${GITHUB_SHA}"' -with-tower"], "environment": [{"name": "TOWER_ACCESS_TOKEN", "value": "'"$TOWER_ACCESS_TOKEN"'"}, {"name": "NXF_OPTS", "value": "-XX:+UseG1GC"}]}'

.github/workflows/awstest.yml

Lines changed: 38 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -1,37 +1,38 @@
1-
name: nf-core AWS test
2-
# This workflow is triggered on PRs to the master branch.
3-
# It runs the -profile 'test_full' on AWS batch
4-
5-
on:
6-
push:
7-
branches:
8-
- master
9-
- dev
10-
release:
11-
types: [published]
12-
13-
jobs:
14-
run-awstest:
15-
if: github.repository == 'nf-core/chipseq'
16-
name: Run AWS test
17-
runs-on: ubuntu-latest
18-
steps:
19-
- name: Setup Miniconda
20-
uses: goanpeca/[email protected]
21-
with:
22-
auto-update-conda: true
23-
python-version: 3.7
24-
- name: Install awscli
25-
run: conda install -c conda-forge awscli
26-
- name: Start AWS batch job
27-
env:
28-
AWS_ACCESS_KEY_ID: ${{secrets.AWS_KEY_ID}}
29-
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_KEY_SECRET}}
30-
TOWER_ACCESS_TOKEN: ${{secrets.TOWER_ACCESS_TOKEN}}
31-
run: | # Submits job to AWS batch using a 'nextflow-4GiB' instance. Setting JVM options to "-XX:+UseG1GC" for more efficient garbage collection when staging remote files.
32-
aws batch submit-job \
33-
--region eu-west-1 \
34-
--job-name nf-core-chipseq \
35-
--job-queue 'default-8b3836e0-5eda-11ea-96e5-0a2c3f6a2a32' \
36-
--job-definition nextflow-4GiB \
37-
--container-overrides '{"command": ["nf-core/chipseq", "-r '"${GITHUB_SHA}"' -profile test --outdir s3://nf-core-awsmegatests/chipseq/results-'"${GITHUB_SHA}"' -w s3://nf-core-awsmegatests/chipseq/work-'"${GITHUB_SHA}"' -with-tower"], "environment": [{"name": "TOWER_ACCESS_TOKEN", "value": "'"$TOWER_ACCESS_TOKEN"'"}, {"name": "NXF_OPTS", "value": "-XX:+UseG1GC"}]}'
1+
name: nf-core AWS test
2+
# This workflow is triggered on push to the master branch.
3+
# It runs the -profile 'test' on AWS batch
4+
5+
on:
6+
push:
7+
branches:
8+
- master
9+
- dev # just for testing purposes, to be removed
10+
11+
jobs:
12+
run-awstest:
13+
if: github.repository == 'nf-core/chipseq'
14+
name: Run AWS test
15+
runs-on: ubuntu-latest
16+
steps:
17+
- name: Setup Miniconda
18+
uses: goanpeca/[email protected]
19+
with:
20+
auto-update-conda: true
21+
python-version: 3.7
22+
- name: Install awscli
23+
run: conda install -c conda-forge awscli
24+
- name: Start AWS batch job
25+
env:
26+
AWS_ACCESS_KEY_ID: ${{secrets.AWS_ACCESS_KEY_ID}}
27+
AWS_SECRET_ACCESS_KEY: ${{secrets.AWS_SECRET_ACCESS_KEY}}
28+
TOWER_ACCESS_TOKEN: ${{secrets.AWS_TOWER_TOKEN}}
29+
#AWS_JOB_DEFINITION: ${{secrets.AWS_JOB_DEFINITION}}
30+
AWS_JOB_QUEUE: ${{secrets.AWS_JOB_QUEUE}}
31+
AWS_S3_BUCKET: ${{secrets.AWS_S3_BUCKET}}
32+
run: | # Submits job to AWS batch using a 'nextflow-4GiB' job definition. Setting JVM options to "-XX:+UseG1GC" for more efficient garbage collection when staging remote files.
33+
aws batch submit-job \
34+
--region eu-west-1 \
35+
--job-name nf-core-chipseq \
36+
--job-queue $AWS_JOB_QUEUE \
37+
--job-definition nextflow-4GiB \
38+
--container-overrides '{"command": ["nf-core/chipseq", "-r '"${GITHUB_SHA}"' -profile test --outdir s3://'"${AWS_S3_BUCKET}"'/chipseq/results-'"${GITHUB_SHA}"' -w s3://'"${AWS_S3_BUCKET}"'/chipseq/work-'"${GITHUB_SHA}"' -with-tower"], "environment": [{"name": "TOWER_ACCESS_TOKEN", "value": "'"$TOWER_ACCESS_TOKEN"'"}, {"name": "NXF_OPTS", "value": "-XX:+UseG1GC"}]}'

.github/workflows/ci.yml

100644100755
Lines changed: 90 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,50 +1,125 @@
11
name: nf-core CI
2-
# This workflow is triggered on pushes and PRs to the repository.
2+
# This workflow is triggered on releases and pull-requests.
33
# It runs the pipeline with the minimal test dataset to check that it completes without any syntax errors
4-
on: [push, pull_request]
4+
on:
5+
push:
6+
branches:
7+
- dev
8+
pull_request:
9+
release:
10+
types: [published]
511

612
jobs:
713
test:
14+
name: Run workflow tests
15+
# Only run on push if this is the nf-core dev branch (merged PRs)
16+
if: ${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/chipseq') }}
17+
runs-on: ubuntu-latest
818
env:
919
NXF_VER: ${{ matrix.nxf_ver }}
1020
NXF_ANSI_LOG: false
11-
runs-on: ubuntu-latest
1221
strategy:
1322
matrix:
1423
# Nextflow versions: check pipeline minimum and current latest
1524
nxf_ver: ['19.10.0', '']
1625
steps:
17-
- uses: actions/checkout@v2
18-
- name: Install Nextflow
19-
run: |
20-
wget -qO- get.nextflow.io | bash
21-
sudo mv nextflow /usr/local/bin/
26+
- name: Check out pipeline code
27+
uses: actions/checkout@v2
28+
29+
- name: Check if Dockerfile or Conda environment changed
30+
uses: technote-space/get-diff-action@v1
31+
with:
32+
PREFIX_FILTER: |
33+
Dockerfile
34+
environment.yml
35+
36+
- name: Build new docker image
37+
if: env.GIT_DIFF
38+
run: docker build --no-cache . -t nfcore/chipseq:dev
39+
2240
- name: Pull docker image
41+
if: ${{ !env.GIT_DIFF }}
2342
run: |
2443
docker pull nfcore/chipseq:dev
2544
docker tag nfcore/chipseq:dev nfcore/chipseq:dev
45+
46+
- name: Install Nextflow
47+
run: |
48+
wget -qO- get.nextflow.io | bash
49+
sudo mv nextflow /usr/local/bin/
50+
2651
- name: Run pipeline with test data
2752
run: |
2853
nextflow run ${GITHUB_WORKSPACE} -profile test,docker
2954
3055
parameters:
56+
name: Test workflow parameters
57+
if: ${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/chipseq') }}
58+
runs-on: ubuntu-latest
3159
env:
3260
NXF_VER: '19.10.0'
3361
NXF_ANSI_LOG: false
34-
runs-on: ubuntu-latest
3562
strategy:
3663
matrix:
3764
parameters: [--single_end, --skip_trimming, --skip_consensus_peaks]
3865
steps:
39-
- uses: actions/checkout@v2
40-
- name: Install Nextflow
41-
run: |
42-
wget -qO- get.nextflow.io | bash
43-
sudo mv nextflow /usr/local/bin/
66+
- name: Check out pipeline code
67+
uses: actions/checkout@v2
68+
69+
- name: Check if Dockerfile or Conda environment changed
70+
uses: technote-space/get-diff-action@v1
71+
with:
72+
PREFIX_FILTER: |
73+
Dockerfile
74+
environment.yml
75+
76+
- name: Build new docker image
77+
if: env.GIT_DIFF
78+
run: docker build --no-cache . -t nfcore/chipseq:dev
79+
4480
- name: Pull docker image
81+
if: ${{ !env.GIT_DIFF }}
4582
run: |
4683
docker pull nfcore/chipseq:dev
4784
docker tag nfcore/chipseq:dev nfcore/chipseq:dev
48-
- name: Run pipeline with various options
85+
86+
- name: Install Nextflow
87+
run: |
88+
wget -qO- get.nextflow.io | bash
89+
sudo mv nextflow /usr/local/bin/
90+
91+
- name: Run pipeline with various parameters
4992
run: |
5093
nextflow run ${GITHUB_WORKSPACE} -profile test,docker ${{ matrix.parameters }}
94+
95+
push_dockerhub:
96+
name: Push new Docker image to Docker Hub
97+
runs-on: ubuntu-latest
98+
# Only run if the tests passed
99+
needs: test
100+
# Only run for the nf-core repo, for releases and merged PRs
101+
if: ${{ github.repository == 'nf-core/chipseq' && (github.event_name == 'release' || github.event_name == 'push') }}
102+
env:
103+
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
104+
DOCKERHUB_PASS: ${{ secrets.DOCKERHUB_PASS }}
105+
steps:
106+
- name: Check out pipeline code
107+
uses: actions/checkout@v2
108+
109+
- name: Build new docker image
110+
run: docker build --no-cache . -t nfcore/chipseq:latest
111+
112+
- name: Push Docker image to DockerHub (dev)
113+
if: ${{ github.event_name == 'push' }}
114+
run: |
115+
echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin
116+
docker tag nfcore/chipseq:latest nfcore/chipseq:dev
117+
docker push nfcore/chipseq:dev
118+
119+
- name: Push Docker image to DockerHub (release)
120+
if: ${{ github.event_name == 'release' }}
121+
run: |
122+
echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin
123+
docker push nfcore/chipseq:latest
124+
docker tag nfcore/chipseq:latest nfcore/chipseq:${{ github.event.release.tag_name }}
125+
docker push nfcore/chipseq:${{ github.event.release.tag_name }}

.github/workflows/linting.yml

100644100755
Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,18 +33,29 @@ jobs:
3333
nf-core:
3434
runs-on: ubuntu-latest
3535
steps:
36-
- uses: actions/checkout@v2
36+
37+
- name: Check out pipeline code
38+
uses: actions/checkout@v2
39+
3740
- name: Install Nextflow
3841
run: |
3942
wget -qO- get.nextflow.io | bash
4043
sudo mv nextflow /usr/local/bin/
44+
4145
- uses: actions/setup-python@v1
4246
with:
4347
python-version: '3.6'
4448
architecture: 'x64'
49+
4550
- name: Install dependencies
4651
run: |
4752
python -m pip install --upgrade pip
4853
pip install nf-core
54+
4955
- name: Run nf-core lint
56+
env:
57+
GITHUB_COMMENTS_URL: ${{ github.event.pull_request.comments_url }}
58+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
59+
GITHUB_PR_COMMIT: ${{ github.event.pull_request.head.sha }}
5060
run: nf-core lint ${GITHUB_WORKSPACE}
61+

CHANGELOG.md

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,11 +19,15 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
1919
* Replace `set` with `tuple` and `file()` with `path()` in all processes
2020
* Capitalise process names
2121
* Parameters:
22-
* `--skip_peak_qc` to skip MACS2 peak QC plot generation
23-
* `--skip_peak_annotation` to skip annotation of MACS2 and consensus peaks with HOMER
24-
* `--skip_consensus_peaks` to skip consensus peak generation
25-
* `--deseq2_vst` to use variance stabilizing transformation (VST) instead of regularized log transformation (rlog) with DESeq2
26-
* `--publish_dir_mode` to customise method of publishing results to output directory [nf-core/tools#585](https://github.com/nf-core/tools/issues/585)
22+
* `--skip_peak_qc` to skip MACS2 peak QC plot generation
23+
* `--skip_peak_annotation` to skip annotation of MACS2 and consensus peaks with HOMER
24+
* `--skip_consensus_peaks` to skip consensus peak generation
25+
* `--deseq2_vst` to use variance stabilizing transformation (VST) instead of regularized log transformation (rlog) with DESeq2
26+
* `--publish_dir_mode` to customise method of publishing results to output directory [nf-core/tools#585](https://github.com/nf-core/tools/issues/585)
27+
28+
### `Removed`
29+
30+
* `--tss_bed`
2731

2832
### `Fixed`
2933

README.md

Lines changed: 17 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,11 @@
33
[![GitHub Actions CI Status](https://github.com/nf-core/chipseq/workflows/nf-core%20CI/badge.svg)](https://github.com/nf-core/chipseq/actions)
44
[![GitHub Actions Linting Status](https://github.com/nf-core/chipseq/workflows/nf-core%20linting/badge.svg)](https://github.com/nf-core/chipseq/actions)
55
[![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A519.10.0-brightgreen.svg)](https://www.nextflow.io/)
6+
[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3240506.svg)](https://doi.org/10.5281/zenodo.3240506)
67

78
[![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](http://bioconda.github.io/)
89
[![Docker](https://img.shields.io/docker/automated/nfcore/chipseq.svg)](https://hub.docker.com/r/nfcore/chipseq/)
9-
[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3240506.svg)](https://doi.org/10.5281/zenodo.3240506)
10+
![[Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23chipseq-4A154B?logo=slack)](https://nfcore.slack.com/channels/chipseq)
1011

1112
## Introduction
1213

@@ -48,23 +49,23 @@ The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool
4849

4950
## Quick Start
5051

51-
i. Install [`nextflow`](https://nf-co.re/usage/installation)
52+
1. Install [`nextflow`](https://nf-co.re/usage/installation)
5253

53-
ii. Install either [`Docker`](https://docs.docker.com/engine/installation/) or [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/) for full pipeline reproducibility (please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))
54+
2. Install either [`Docker`](https://docs.docker.com/engine/installation/) or [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/) for full pipeline reproducibility _(please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_
5455

55-
iii. Download the pipeline and test it on a minimal dataset with a single command
56+
3. Download the pipeline and test it on a minimal dataset with a single command:
5657

57-
```bash
58-
nextflow run nf-core/chipseq -profile test,<docker/singularity/conda/institute>
59-
```
58+
```bash
59+
nextflow run nf-core/chipseq -profile test,<docker/singularity/conda/institute>
60+
```
6061

61-
> Please check [nf-core/configs](https://github.com/nf-core/configs#documentation) to see if a custom config file to run nf-core pipelines already exists for your Institute. If so, you can simply use `-profile <institute>` in your command. This will enable either `docker` or `singularity` and set the appropriate execution settings for your local compute environment.
62+
> Please check [nf-core/configs](https://github.com/nf-core/configs#documentation) to see if a custom config file to run nf-core pipelines already exists for your Institute. If so, you can simply use `-profile <institute>` in your command. This will enable either `docker` or `singularity` and set the appropriate execution settings for your local compute environment.
6263

63-
iv. Start running your own analysis!
64+
4. Start running your own analysis!
6465

65-
```bash
66-
nextflow run nf-core/chipseq -profile <docker/singularity/conda/institute> --input design.csv --genome GRCh37
67-
```
66+
```bash
67+
nextflow run nf-core/chipseq -profile <docker/singularity/conda/institute> --input design.csv --genome GRCh37
68+
```
6869

6970
See [usage docs](docs/usage.md) for all of the available options when running the pipeline.
7071

@@ -83,27 +84,27 @@ The nf-core/chipseq pipeline comes with documentation about the pipeline, found
8384

8485
## Credits
8586

86-
These scripts were originally written by Chuan Wang ([@chuan-wang](https://github.com/chuan-wang)) and Phil Ewels ([@ewels](https://github.com/ewels)) for use at the [National Genomics Infrastructure](https://portal.scilifelab.se/genomics/) at [SciLifeLab](http://www.scilifelab.se/) in Stockholm, Sweden. It has since been re-implemented by Harshil Patel ([@drpatelh](https://github.com/drpatelh)) from [The Bioinformatics & Biostatistics Group](https://www.crick.ac.uk/research/science-technology-platforms/bioinformatics-and-biostatistics/) at [The Francis Crick Institute](https://www.crick.ac.uk/), London.
87+
These scripts were originally written by Chuan Wang ([@chuan-wang](https://github.com/chuan-wang)) and Phil Ewels ([@ewels](https://github.com/ewels)) for use at the [National Genomics Infrastructure](https://portal.scilifelab.se/genomics/) at [SciLifeLab](http://www.scilifelab.se/) in Stockholm, Sweden. The pipeline has since been re-implemented by Harshil Patel ([@drpatelh](https://github.com/drpatelh)) from [The Bioinformatics & Biostatistics Group](https://www.crick.ac.uk/research/science-technology-platforms/bioinformatics-and-biostatistics/) at [The Francis Crick Institute](https://www.crick.ac.uk/), London.
8788

8889
Many thanks to others who have helped out and contributed along the way too, including (but not limited to): [@apeltzer](https://github.com/apeltzer), [@bc2zb](https://github.com/bc2zb), [@crickbabs](https://github.com/crickbabs), [@drejom](https://github.com/drejom), [@houghtos](https://github.com/houghtos), [@KevinMenden](https://github.com/KevinMenden), [@mashehu](https://github.com/mashehu), [@pditommaso](https://github.com/pditommaso), [@Rotholandus](https://github.com/Rotholandus), [@sofiahaglund](https://github.com/sofiahaglund), [@tiagochst](https://github.com/tiagochst) and [@winni2k](https://github.com/winni2k).
8990

9091
## Contributions and Support
9192

9293
If you would like to contribute to this pipeline, please see the [contributing guidelines](.github/CONTRIBUTING.md).
9394

94-
For further information or help, don't hesitate to get in touch on [Slack](https://nfcore.slack.com/channels/chipseq) (you can join with [this invite](https://nf-co.re/join/slack)).
95+
For further information or help, don't hesitate to get in touch on the [Slack `#chipseq` channel](https://nfcore.slack.com/channels/chipseq) (you can join with [this invite](https://nf-co.re/join/slack)).
9596
9697
## Citation
9798
9899
If you use nf-core/chipseq for your analysis, please cite it using the following doi: [10.5281/zenodo.3240506](https://doi.org/10.5281/zenodo.3240506)
99100
100101
You can cite the `nf-core` publication as follows:
101102
103+
An extensive list of references for the tools used by the pipeline can be found in the [`CITATIONS.md`](CITATIONS.md) file.
104+
102105
> **The nf-core framework for community-curated bioinformatics pipelines.**
103106
>
104107
> Philip Ewels, Alexander Peltzer, Sven Fillinger, Harshil Patel, Johannes Alneberg, Andreas Wilm, Maxime Ulysse Garcia, Paolo Di Tommaso & Sven Nahnsen.
105108
>
106109
> _Nat Biotechnol._ 2020 Feb 13. doi: [10.1038/s41587-020-0439-x](https://dx.doi.org/10.1038/s41587-020-0439-x).
107110
> ReadCube: [Full Access Link](https://rdcu.be/b1GjZ)
108-
109-
An extensive list of references for the tools used by the pipeline can be found in the [`CITATIONS.md`](CITATIONS.md) file.

0 commit comments

Comments
 (0)