Skip to content

Commit

Permalink
Merge pull request #291 from nf-core/91-implement-tests-with-nf-test
Browse files Browse the repository at this point in the history
Adding nf-tests for pipeline. Currently, only the main aligners: alevin, kallisto, star and cellranger.
  • Loading branch information
fmalmeida authored Feb 14, 2024
2 parents e6201e1 + 379f8ad commit 7c8989f
Show file tree
Hide file tree
Showing 13 changed files with 518 additions and 16 deletions.
59 changes: 43 additions & 16 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -1,46 +1,48 @@
name: nf-core CI
# This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors
name: nf-core CI
on:
push:
branches:
- dev
pull_request:
release:
types: [published]
merge_group:
types:
- checks_requested
branches:
- master
- dev

env:
NXF_ANSI_LOG: false
NFTEST_VER: "0.8.1"

concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}"
cancel-in-progress: true

jobs:
test:
name: Run pipeline with test data
# Only run on push if this is the nf-core dev branch (merged PRs)
name: "aligner: ${{ matrix.profile }} ; NF: ${{ matrix.NXF_VER }}"
if: "${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/scrnaseq') }}"
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
NXF_VER:
- "23.04.0"
- "latest-everything"
profile: [
"test,docker --aligner alevin",
"test,docker --aligner kallisto",
"test,docker --aligner star",
"test,docker --aligner cellranger",
# "test,docker --aligner cellrangerarc", // this currently lacks a suitable test profile, see issue https://github.com/nf-core/scrnaseq/issues/290
# "test,docker --aligner universc", // this is broken, see issue https://github.com/nf-core/scrnaseq/issues/289
]
profile: ["alevin", "cellranger", "kallisto", "star"]

steps:
- name: Free some space
run: |
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Check out pipeline code
uses: actions/checkout@v4

Expand All @@ -49,8 +51,33 @@ jobs:
with:
version: "${{ matrix.NXF_VER }}"

- name: Run pipeline with test data
# For example: adding multiple test runs with different parameters
# Remember that you can parallelise this by using strategy.matrix
- name: Cache nf-test installation
id: cache-software
uses: actions/cache@v3
with:
path: |
/usr/local/bin/nf-test
/home/runner/.nf-test/nf-test.jar
key: ${{ runner.os }}-${{ env.NFTEST_VER }}-nftest

- name: Install nf-test
if: steps.cache-software.outputs.cache-hit != 'true'
run: |
wget -qO- https://code.askimed.com/install/nf-test | bash
sudo mv nf-test /usr/local/bin/
- name: Run nf-test
run: |
nextflow run ${GITHUB_WORKSPACE} -profile ${{ matrix.profile }} --outdir ./results
nf-test test tests/main_pipeline_${{ matrix.profile }}.test --junitxml=test.xml
- name: Output log on failure
if: failure()
run: |
sudo apt install bat > /dev/null
batcat --decorations=always --color=always ${{ github.workspace }}/.nf-test/tests/*/meta/nextflow.log
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
if: always() # always run even if the previous step fails
with:
report_paths: test.xml
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@ testing*
log/
reports/
testme.sh
.nf-test/
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## v2.6.0dev - [date]

- [[#91](https://github.com/nf-core/scrnaseq/issues/91)] - Change from pytests to nf-test

## v2.5.1

- Template update to v2.12 ([#298](https://github.com/nf-core/scrnaseq/pull/298)).
- Fix that cellranger workflow couldn't be run and enable CI for this workflow ([#288](https://github.com/nf-core/scrnaseq/pull/288)).
- Update modules ([#288]()https://github.com/nf-core/scrnaseq/pull/288).
Expand Down
8 changes: 8 additions & 0 deletions nf-test.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
config {

testsDir "tests"
workDir ".nf-test"
configFile "tests/nextflow.config"
profile "docker"

}
67 changes: 67 additions & 0 deletions tests/main_pipeline_alevin.test
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
nextflow_pipeline {

name "Test Workflow main.nf"
script "main.nf"

test("test-dataset_alevin_aligner") {

when {
// the rest is taken from shared config
params {
aligner = 'alevin'
outdir = "${outputDir}/results_alevin"

// Limit resources so that this can run on GitHub Actions -- for some reason it had not been taken from shared config
max_cpus = 2
max_memory = '6.GB'
max_time = '6.h'
}
}

then {

assertAll(

//
// General assertions
//

// Did it finish successfully?
{assert workflow.success},

// How many tasks were executed?
{assert workflow.trace.tasks().size() == 16},

// How many results were produced?
{assert path("${outputDir}/results_alevin").list().size() == 5},
{assert path("${outputDir}/results_alevin/alevin").list().size() == 4},
{assert path("${outputDir}/results_alevin/alevin/mtx_conversions").list().size() == 4},
{assert path("${outputDir}/results_alevin/alevinqc").list().size() == 2},
{assert path("${outputDir}/results_alevin/fastqc").list().size() == 12},
{assert path("${outputDir}/results_alevin/multiqc").list().size() == 3},

//
// Check if files were produced
//
{assert new File( "${outputDir}/results_alevin/alevin/mtx_conversions/Sample_X/Sample_X_matrix.h5ad" ).exists()},
{assert new File( "${outputDir}/results_alevin/alevin/mtx_conversions/Sample_Y/Sample_Y_matrix.h5ad" ).exists()},

//
// Check if files are the same
//
{assert snapshot(
workflow,
path( "${outputDir}/results_alevin/alevin/Sample_X_alevin_results/af_quant/alevin/quants_mat_cols.txt" ),
path( "${outputDir}/results_alevin/alevin/Sample_X_alevin_results/af_quant/alevin/quants_mat.mtx" ),
path( "${outputDir}/results_alevin/alevin/Sample_X_alevin_results/af_quant/alevin/quants_mat_rows.txt" ),
path( "${outputDir}/results_alevin/alevin/Sample_Y_alevin_results/af_quant/alevin/quants_mat_cols.txt" ),
path( "${outputDir}/results_alevin/alevin/Sample_Y_alevin_results/af_quant/alevin/quants_mat.mtx" ),
path( "${outputDir}/results_alevin/alevin/Sample_Y_alevin_results/af_quant/alevin/quants_mat_rows.txt" )
).match()}

) // end of assertAll()

}
}

}
32 changes: 32 additions & 0 deletions tests/main_pipeline_alevin.test.snap
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
{
"test-dataset_alevin_aligner": {
"content": [
{
"stderr": [

],
"errorReport": "",
"exitStatus": 0,
"failed": false,
"stdout": [

],
"errorMessage": "",
"trace": {
"tasksFailed": 0,
"tasksCount": 16,
"tasksSucceeded": 16
},
"name": "workflow",
"success": true
},
"quants_mat_cols.txt:md5,e9868982c17a330392e38c2a5933cf97",
"quants_mat.mtx:md5,b8aa7b3c488fd8923de50a3621d4991f",
"quants_mat_rows.txt:md5,6227df5a13127b71c71fb18cd8574857",
"quants_mat_cols.txt:md5,e9868982c17a330392e38c2a5933cf97",
"quants_mat.mtx:md5,54cd12666016adce94c025b2e07f4b02",
"quants_mat_rows.txt:md5,6b458a7777260ba90eccbe7919df934b"
],
"timestamp": "2024-01-19T10:28:35.652763852"
}
}
75 changes: 75 additions & 0 deletions tests/main_pipeline_cellranger.test
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
nextflow_pipeline {

name "Test Workflow main.nf"
script "main.nf"

test("test-dataset_cellranger_aligner") {

when {
// the rest is taken from shared config
params {
aligner = 'cellranger'
outdir = "${outputDir}/results_cellranger"

// Limit resources so that this can run on GitHub Actions -- for some reason it had not been taken from shared config
max_cpus = 2
max_memory = '6.GB'
max_time = '6.h'
}
}

then {

assertAll(

//
// General assertions
//

// Did it finish successfully?
{assert workflow.success},

// How many tasks were executed?
{assert workflow.trace.tasks().size() == 15},

// How many results were produced?
{assert path("${outputDir}/results_cellranger").list().size() == 4},
{assert path("${outputDir}/results_cellranger/cellranger").list().size() == 4},
{assert path("${outputDir}/results_cellranger/cellranger/mtx_conversions").list().size() == 4},
{assert path("${outputDir}/results_cellranger/cellranger/count").list().size() == 3},
{assert path("${outputDir}/results_cellranger/fastqc").list().size() == 12},
{assert path("${outputDir}/results_cellranger/multiqc").list().size() == 3},

//
// Check if files were produced
//
{assert new File( "${outputDir}/results_cellranger/cellranger/mtx_conversions/Sample_X/Sample_X_matrix.h5ad" ).exists()},
{assert new File( "${outputDir}/results_cellranger/cellranger/mtx_conversions/Sample_Y/Sample_Y_matrix.h5ad" ).exists()},

//
// Check if files are the same
//
{assert snapshot(
workflow,
path( "${outputDir}/results_cellranger/cellranger/count/Sample_X/outs/filtered_feature_bc_matrix/barcodes.tsv.gz" ),
path( "${outputDir}/results_cellranger/cellranger/count/Sample_X/outs/filtered_feature_bc_matrix/features.tsv.gz" ),
path( "${outputDir}/results_cellranger/cellranger/count/Sample_X/outs/filtered_feature_bc_matrix/matrix.mtx.gz" ),
path( "${outputDir}/results_cellranger/cellranger/count/Sample_Y/outs/filtered_feature_bc_matrix/barcodes.tsv.gz" ),
path( "${outputDir}/results_cellranger/cellranger/count/Sample_Y/outs/filtered_feature_bc_matrix/features.tsv.gz" ),
path( "${outputDir}/results_cellranger/cellranger/count/Sample_Y/outs/filtered_feature_bc_matrix/matrix.mtx.gz" ),
path( "${outputDir}/results_cellranger/cellranger/count/Sample_X/outs/raw_feature_bc_matrix/barcodes.tsv.gz" ),
path( "${outputDir}/results_cellranger/cellranger/count/Sample_X/outs/raw_feature_bc_matrix/features.tsv.gz" ),
path( "${outputDir}/results_cellranger/cellranger/count/Sample_X/outs/raw_feature_bc_matrix/matrix.mtx.gz" ),
path( "${outputDir}/results_cellranger/cellranger/count/Sample_Y/outs/raw_feature_bc_matrix/barcodes.tsv.gz" ),
path( "${outputDir}/results_cellranger/cellranger/count/Sample_Y/outs/raw_feature_bc_matrix/features.tsv.gz" ),
path( "${outputDir}/results_cellranger/cellranger/count/Sample_Y/outs/raw_feature_bc_matrix/matrix.mtx.gz" ),
path( "${outputDir}/results_cellranger/cellranger/mtx_conversions/Sample_X/Sample_X_matrix.rds" ),
path( "${outputDir}/results_cellranger/cellranger/mtx_conversions/Sample_Y/Sample_Y_matrix.rds" )
).match()}

) // end of assertAll()

}
}

}
40 changes: 40 additions & 0 deletions tests/main_pipeline_cellranger.test.snap
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
{
"test-dataset_cellranger_aligner": {
"content": [
{
"stderr": [

],
"errorReport": "",
"exitStatus": 0,
"failed": false,
"stdout": [

],
"errorMessage": "",
"trace": {
"tasksFailed": 0,
"tasksCount": 15,
"tasksSucceeded": 15
},
"name": "workflow",
"success": true
},
"barcodes.tsv.gz:md5,fe6e51564b4405b37ca8604a844b1f2e",
"features.tsv.gz:md5,99e453cb1443a3e43e99405184e51a5e",
"matrix.mtx.gz:md5,79471f700ec5bab852a960f0d1537705",
"barcodes.tsv.gz:md5,77afe9a76631fc7b44236d3962a55aa5",
"features.tsv.gz:md5,99e453cb1443a3e43e99405184e51a5e",
"matrix.mtx.gz:md5,e224aa759250fa5730dc069bce9be253",
"barcodes.tsv.gz:md5,85da6b6e0c78dfe81af8c07c2017ab5e",
"features.tsv.gz:md5,99e453cb1443a3e43e99405184e51a5e",
"matrix.mtx.gz:md5,1bc71041a425bfcee38472bfec5997f8",
"barcodes.tsv.gz:md5,081f72b5252ccaf5ffd535ffbd235c4c",
"features.tsv.gz:md5,99e453cb1443a3e43e99405184e51a5e",
"matrix.mtx.gz:md5,a4db04e43e650accc96361a287126a6b",
"Sample_X_matrix.rds:md5,f9191ba575a3ab79ada4807715f18573",
"Sample_Y_matrix.rds:md5,7be3f7b29d668dcf7e951b9f4d371a5e"
],
"timestamp": "2024-01-22T15:19:20.134275449"
}
}
Loading

0 comments on commit 7c8989f

Please sign in to comment.