Skip to content
Closed

Dev #150

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
233 changes: 125 additions & 108 deletions .github/workflows/build_deploy_container.yaml
Original file line number Diff line number Diff line change
@@ -1,108 +1,125 @@
name: Build and Push Docker and Singularity Images

on:
release:
types: [created]
workflow_dispatch:
inputs:
tag:
description: 'Tag for the release'
required: false
default: ''

jobs:
build-and-deploy:
runs-on: ubuntu-latest

env:
DOCKER_HUB_USERNAME: ${{ secrets.DOCKER_HUB_USERNAME }}
DOCKER_HUB_PASSWORD: ${{ secrets.DOCKER_HUB_PASSWORD }}
SINGULARITY_LIBRARY_TOKEN: ${{ secrets.SINGULARITY_LIBRARY_TOKEN }}
SIGNING_PRIVATE_KEY_BASE64: ${{ secrets.SIGNING_PRIVATE_KEY_BASE64 }}
SIGNING_PUBLIC_KEY_BASE64: ${{ secrets.SIGNING_PUBLIC_KEY_BASE64 }}
SIGNING_KEY_PASSWORD: ${{ secrets.SIGNING_KEY_PASSWORD }}
DOCKER_IMAGE_NAME: opaino/genopred_pipeline
SINGULARITY_IMAGE_NAME: library://opain/genopred/genopred_pipeline
IMAGE_TAG: ${{ github.ref_name }}
DOCKERFILE_PATH: pipeline/misc/docker/Dockerfile

steps:
- name: Checkout code
uses: actions/checkout@v2

- name: Export Secrets to Environment
run: |
echo "SINGULARITY_LIBRARY_TOKEN=${{ secrets.SINGULARITY_LIBRARY_TOKEN }}" >> $GITHUB_ENV

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

- name: Login to Docker Hub
run: echo "${{ secrets.DOCKER_HUB_PASSWORD }}" | docker login -u "${{ secrets.DOCKER_HUB_USERNAME }}" --password-stdin

- name: Set Tag
id: set_tag
run: |
if [ "${{ github.event_name }}" == "release" ]; then
echo "::set-output name=tag::${{ github.event.release.tag_name }}"
else
echo "::set-output name=tag::${{ github.event.inputs.tag || github.sha }}"
fi

- name: Set up Docker Buildx
run: |
docker buildx build --platform linux/amd64 -f $DOCKERFILE_PATH -t $DOCKER_IMAGE_NAME:latest -t $DOCKER_IMAGE_NAME:${{ steps.set_tag.outputs.tag }} --push .

- name: Setup Apptainer
uses: eWaterCycle/setup-apptainer@v2.0.0

- name: Convert Docker Image to Singularity SIF
run: |
apptainer pull image.sif docker://${DOCKER_IMAGE_NAME}:latest

- name: Install expect
run: |
sudo apt-get update
sudo apt-get install -y expect

- name: Decode and Import Signing Keys
run: |
echo "${{ secrets.SIGNING_PRIVATE_KEY_BASE64 }}" | base64 --decode > private.key
echo "${{ secrets.SIGNING_PUBLIC_KEY_BASE64 }}" | base64 --decode > public.key
expect -c '
spawn apptainer key import private.key
expect "Enter your key passphrase : "
send -- "${{ secrets.SIGNING_KEY_PASSWORD }}\r"
expect eof
'
expect -c '
spawn apptainer key import public.key
expect "Enter your key passphrase : "
send -- "${{ secrets.SIGNING_KEY_PASSWORD }}\r"
expect eof
'
- name: Sign the Singularity Image
run: |
expect -c '
spawn apptainer sign image.sif
expect "Enter your key passphrase : "
send -- "${{ secrets.SIGNING_KEY_PASSWORD }}\r"
expect eof
'
- name: Create Token File
run: echo "${{ secrets.SINGULARITY_LIBRARY_TOKEN }}" > tokenfile

- name: Login to Singularity Library
run: |
apptainer remote add --no-login SylabsCloud cloud.sycloud.io
apptainer remote use SylabsCloud
apptainer remote login --tokenfile tokenfile

- name: Push Singularity Image to Singularity Library
run: |
apptainer push image.sif $SINGULARITY_IMAGE_NAME:latest

- name: Setup tmate session
if: ${{ failure() }}
uses: mxschmitt/action-tmate@v3
timeout-minutes: 60
name: Build and Push Docker and Singularity Images

on:
release:
types: [created]
workflow_dispatch:
inputs:
tag:
description: 'Tag for the release'
required: false
default: ''

jobs:
build-and-deploy:
runs-on: ubuntu-latest

env:
DOCKER_HUB_USERNAME: ${{ secrets.DOCKER_HUB_USERNAME }}
DOCKER_HUB_PASSWORD: ${{ secrets.DOCKER_HUB_PASSWORD }}
SINGULARITY_LIBRARY_TOKEN: ${{ secrets.SINGULARITY_LIBRARY_TOKEN }}
SIGNING_PRIVATE_KEY_BASE64: ${{ secrets.SIGNING_PRIVATE_KEY_BASE64 }}
SIGNING_PUBLIC_KEY_BASE64: ${{ secrets.SIGNING_PUBLIC_KEY_BASE64 }}
SIGNING_KEY_PASSWORD: ${{ secrets.SIGNING_KEY_PASSWORD }}
DOCKER_IMAGE_NAME: opaino/genopred_pipeline
SINGULARITY_IMAGE_NAME: library://opain/genopred/genopred_pipeline
IMAGE_TAG: ${{ github.ref_name }}
DOCKERFILE_PATH: pipeline/misc/docker/Dockerfile

steps:
- name: Checkout code
uses: actions/checkout@v2

- name: Export Secrets to Environment
run: |
echo "SINGULARITY_LIBRARY_TOKEN=${{ secrets.SINGULARITY_LIBRARY_TOKEN }}" >> $GITHUB_ENV

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

- name: Login to Docker Hub
run: echo "${{ secrets.DOCKER_HUB_PASSWORD }}" | docker login -u "${{ secrets.DOCKER_HUB_USERNAME }}" --password-stdin

- name: Set Tag
id: set_tag
run: |
if [ "${{ github.event_name }}" == "release" ]; then
echo "::set-output name=tag::${{ github.event.release.tag_name }}"
else
echo "::set-output name=tag::${{ github.event.inputs.tag || github.sha }}"
fi

# - name: Build docker container
# run: |
# docker buildx build --platform linux/amd64 -f $DOCKERFILE_PATH -t $DOCKER_IMAGE_NAME:latest -t $DOCKER_IMAGE_NAME:${{ steps.set_tag.outputs.tag }} --push .

- name: Clean up Docker build artifacts
run: |
docker system prune -af

- name: Setup Apptainer
uses: eWaterCycle/setup-apptainer@v2.0.0

- name: Configure Singularity temp directories
run: |
# create a temp area inside the GitHub runner's large temp folder
mkdir -p $RUNNER_TEMP/singularity
echo "APPTAINER_TMPDIR=$RUNNER_TEMP/singularity" >> $GITHUB_ENV
# optional: keep a separate cache dir
mkdir -p $RUNNER_TEMP/singularity/cache
echo "APPTAINER_CACHEDIR=$RUNNER_TEMP/singularity/cache" >> $GITHUB_ENV

- name: Check free space
run: |
df -h

- name: Convert Docker Image to Singularity SIF
run: |
apptainer pull --disable-cache --tmpdir $RUNNER_TEMP/singularity image.sif docker://${DOCKER_IMAGE_NAME}:latest

- name: Install expect
run: |
sudo apt-get update
sudo apt-get install -y expect

- name: Decode and Import Signing Keys
run: |
echo "${{ secrets.SIGNING_PRIVATE_KEY_BASE64 }}" | base64 --decode > private.key
echo "${{ secrets.SIGNING_PUBLIC_KEY_BASE64 }}" | base64 --decode > public.key
expect -c '
spawn apptainer key import private.key
expect "Enter your key passphrase : "
send -- "${{ secrets.SIGNING_KEY_PASSWORD }}\r"
expect eof
'
expect -c '
spawn apptainer key import public.key
expect "Enter your key passphrase : "
send -- "${{ secrets.SIGNING_KEY_PASSWORD }}\r"
expect eof
'
- name: Sign the Singularity Image
run: |
expect -c '
spawn apptainer sign image.sif
expect "Enter your key passphrase : "
send -- "${{ secrets.SIGNING_KEY_PASSWORD }}\r"
expect eof
'
- name: Create Token File
run: echo "${{ secrets.SINGULARITY_LIBRARY_TOKEN }}" > tokenfile

- name: Login to Singularity Library
run: |
apptainer remote add --no-login SylabsCloud cloud.sycloud.io
apptainer remote use SylabsCloud
apptainer remote login --tokenfile tokenfile

- name: Push Singularity Image to Singularity Library
run: |
apptainer push image.sif $SINGULARITY_IMAGE_NAME:latest

- name: Setup tmate session
if: ${{ failure() }}
uses: mxschmitt/action-tmate@v3
timeout-minutes: 60
37 changes: 7 additions & 30 deletions Scripts/external_score_processor/external_score_processor.R
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@ option_list = list(
help="Path to per chromosome reference PLINK files [required]"),
make_option("--ref_pcs", action="store", default=NULL, type='character',
help="Reference PCs for continuous ancestry correction [optional]"),
make_option("--pop_data", action="store", default=NULL, type='character',
help="File containing the population code and location of the keep file [required]"),
make_option("--plink2", action="store", default='plink2', type='character',
help="Path PLINKv2 software binary [optional]"),
make_option("--output", action="store", default=NULL, type='character',
Expand All @@ -32,9 +30,7 @@ source_all('../functions')
if(is.null(opt$ref_plink_chr)){
stop('--ref_plink_chr must be specified.\n')
}
if(is.null(opt$pop_data)){
stop('--pop_data must be specified.\n')
}

if(is.null(opt$output)){
stop('--output must be specified.\n')
}
Expand Down Expand Up @@ -143,7 +139,7 @@ if(chr_bp_avail){
}

if(!is.na(target_build)){
for(i in chrs){
for(i in unique(score$CHR)){
# Read reference data
ref_i<-readRDS(file = paste0(ref_rds,i,'.rds'))

Expand Down Expand Up @@ -174,7 +170,7 @@ if(chr_bp_avail){
matched<-rbind(matched, flipped)

# Retain reference SNP and REF.FREQ data
matched<-matched[, names(matched) %in% c('CHR','BP','A1','A2','effect_weight'), with=F]
matched<-matched[, names(matched) %in% c('CHR','BP','REF.SNP', 'A1','A2','effect_weight'), with=F]
names(matched)[names(matched) == 'REF.SNP']<-'SNP'

targ_matched<-rbind(targ_matched, matched)
Expand Down Expand Up @@ -242,6 +238,10 @@ if(nrow(targ_matched) < 0.75*n_snp_orig){
# Format as score file for GenoPred
####

score <- targ_matched

print(head(targ_matched))

score$CHR <- NULL
score$BP <- NULL
score <- score[, c('SNP','A1','A2','effect_weight'), with = F]
Expand All @@ -258,29 +258,6 @@ if(nrow(targ_matched) < 0.75*n_snp_orig){
}

system(paste0('gzip ',opt$output,'.score'))

####
# Calculate mean and sd of polygenic scores
####

log_add(log_file = log_file, message = 'Calculating polygenic scores in reference.')

# Calculate scores in the full reference
ref_pgs <- plink_score(pfile = opt$ref_plink_chr, chr = CHROMS, plink2 = opt$plink2, score = paste0(opt$output,'.score.gz'))

if(!is.null(opt$ref_pcs)){
log_add(log_file = log_file, message = 'Deriving trans-ancestry PGS models...')
# Derive trans-ancestry PGS models and estimate PGS residual scale
model_trans_pgs(scores=ref_pgs, pcs=opt$ref_pcs, output=opt$output)
}

# Calculate scale within each reference population
pop_data <- read_pop_data(opt$pop_data)

for(pop_i in unique(pop_data$POP)){
ref_pgs_scale_i <- score_mean_sd(scores = ref_pgs, keep = pop_data[pop_data$POP == pop_i, c('FID','IID'), with=F])
fwrite(ref_pgs_scale_i, paste0(opt$output, '-', pop_i, '.scale'), row.names = F, quote=F, sep=' ', na='NA')
}
}

end.time <- Sys.time()
Expand Down
Loading
Loading