From a643677632bc688bc5e7599d957778a3c24a30cd Mon Sep 17 00:00:00 2001 From: kahinimehta <68132139+kahinimehta@users.noreply.github.com> Date: Thu, 24 Feb 2022 16:59:44 -0500 Subject: [PATCH 01/16] Create bootstrap-xcp-qc-testing.sh Copy-pasted from aslprep script, will work on editing for xcp --- scripts/cubic/bootstrap-xcp-qc-testing.sh | 241 ++++++++++++++++++++++ 1 file changed, 241 insertions(+) create mode 100644 scripts/cubic/bootstrap-xcp-qc-testing.sh diff --git a/scripts/cubic/bootstrap-xcp-qc-testing.sh b/scripts/cubic/bootstrap-xcp-qc-testing.sh new file mode 100644 index 0000000..8597bac --- /dev/null +++ b/scripts/cubic/bootstrap-xcp-qc-testing.sh @@ -0,0 +1,241 @@ +## NOTE ## +# THIS SCRIPT IS STILL BEING TESTED AND NOT READY FOR USE +# This workflow is derived from the Datalad Handbook +## Ensure the environment is ready to bootstrap the analysis workspace +# Check that we have conda installed + +DATALAD_VERSION=$(datalad --version) + +if [ $? -gt 0 ]; then + echo "No datalad available in your conda environment." + echo "Try pip install datalad" + # exit 1 +fi + +echo USING DATALAD VERSION ${DATALAD_VERSION} + +set -e -u + + +## Set up the directory that will contain the necessary directories +PROJECTROOT=${PWD}/ASLPREP_QC +if [[ -d ${PROJECTROOT} ]] +then + echo ${PROJECTROOT} already exists + # exit 1 +fi + +if [[ ! -w $(dirname ${PROJECTROOT}) ]] +then + echo Unable to write to ${PROJECTROOT}\'s parent. Change permissions and retry + # exit 1 +fi + +mkdir -p $PROJECTROOT + +## DERIVATIVE_BOOTSTRAP_DIR will be the path to the bootstrap directory containing your derivatives +DERIVATIVE_BOOTSTRAP_DIR=$1 +DERIVATIVE_INPUT=ria+file://${DERIVATIVE_BOOTSTRAP_DIR}"/output_ria#~data" +if [[ -z ${DERIVATIVE_BOOTSTRAP_DIR} ]] +then + echo "Required argument is the path to the aslprep bootstrap directory." + echo "This directory should contain analysis/, input_ria/ and output_ria/." + # exit 1 +fi + +# Jobs are set up to not require a shared filesystem (except for the lockfile) +# ------------------------------------------------------------------------------ +# RIA-URL to a different RIA store from which the dataset will be cloned from. +# Both RIA stores will be created +input_store="ria+file://${PROJECTROOT}/input_ria" +output_store="ria+file://${PROJECTROOT}/output_ria" + +# Create a source dataset with all analysis components as an analysis access +# point. +cd $PROJECTROOT +datalad create -c yoda analysis +cd analysis + +# create dedicated input and output locations. Results will be pushed into the +# output sibling and the analysis will start with a clone from the input sibling. +datalad create-sibling-ria -s output "${output_store}" +pushremote=$(git remote get-url --push output) +datalad create-sibling-ria -s input --storage-sibling off "${input_store}" + +datalad install -d . -r --source ${DERIVATIVE_INPUT} inputs/data + +# amend the previous commit with a nicer commit message +git commit --amend -m 'Register input data dataset as a subdataset' + +ZIPS=$(find inputs/data -name 'sub-*aslprep*' | cut -d '/' -f 3 | sort) +if [ -z "${ZIPS}" ] +then + echo "No subjects found in input data" + # exit 1 +fi + +## the actual compute job specification +cat > code/participant_job.sh << "EOT" +#!/bin/bash +#$ -S /bin/bash +#$ -l h_vmem=25G +#$ -l tmpfree=200G +#$ -R y +#$ -l h_rt=24:00:00 +# Set up the correct conda environment +source ${CONDA_PREFIX}/bin/activate base +echo I\'m in $PWD using `which python` +# fail whenever something is fishy, use -x to get verbose logfiles +set -e -u -x +# Set up the remotes and get the subject id from the call +dssource="$1" +pushgitremote="$2" +subid="$3" +# change into the cluster-assigned temp directory. Not done by default in SGE +cd ${CBICA_TMPDIR} +# OR Run it on a shared network drive +# cd /cbica/comp_space/$(basename $HOME) +# Used for the branch names and the temp dir +BRANCH="job-${JOB_ID}-${subid}" +mkdir ${BRANCH} +cd ${BRANCH} +# get the analysis dataset, which includes the inputs as well +# importantly, we do not clone from the lcoation that we want to push the +# results to, in order to avoid too many jobs blocking access to +# the same location and creating a throughput bottleneck +datalad clone "${dssource}" ds +# all following actions are performed in the context of the superdataset +cd ds +git remote add outputstore "$pushgitremote" +git checkout -b "${BRANCH}" +# ------------------------------------------------------------------------------ +# Do the run! +datalad run \ + -i code/get_files.sh \ + -i inputs/data/${subid}_aslprep*.zip \ + --explicit \ + -o ${subid}*quality*.csv \ + -m "unzipped ${subid}" \ + "bash code/get_files.sh inputs/data/${subid}_aslprep*.zip" +# file content first -- does not need a lock, no interaction with Git +datalad push --to output-storage +# and the output branch +flock $DSLOCKFILE git push outputstore +echo TMPDIR TO DELETE +echo ${BRANCH} +datalad drop -r . --nocheck +datalad uninstall -r inputs/data +git annex dead here +cd ../.. +rm -rf $BRANCH +echo SUCCESS +# job handler should clean up workspace +EOT + +chmod +x code/participant_job.sh + +## the actual compute job specification +echo writing script to file... +cat > code/get_files.sh << "EOT" +#!/bin/bash +set -e -u -x +ZIP_FILE=$1 +subid=$(basename $ZIP_FILE | cut -d '_' -f 1) +# unzip outputs +unzip -n $ZIP_FILE 'aslprep/*' -d . +cp aslprep/${subid}/*/perf/*quality*.csv . +# remove unzip dir +rm -rf aslprep +EOT + +chmod +x code/get_files.sh + +##### concat_outputs.sh START #### + +cat > code/concat_outputs.sh << "EOT" +#!/bin/bash +set -e -u -x +EOT + +echo "PROJECT_ROOT=${PROJECTROOT}" >> code/concat_outputs.sh +echo "cd ${PROJECTROOT}" >> code/concat_outputs.sh + +cat >> code/concat_outputs.sh << "EOT" +# set up concat_ds and run concatenator on it +cd ${CBICA_TMPDIR} +datalad clone ria+file://${PROJECT_ROOT}/output_ria#~data concat_ds +cd concat_ds/code +wget https://raw.githubusercontent.com/PennLINC/RBC/master/PennLINC/Generic/concatenator.py +cd .. +datalad save -m "added concatenator script" +datalad run -i 'sub-*quality*.csv' -o '${PROJECT_ROOT}/ASLPREP_QC.csv' --expand inputs --explicit "python code/concatenator.py $PWD ${PROJECT_ROOT}/ASLPREP_QC.csv" +datalad save -m "generated report" +# push changes +datalad push +# remove concat_ds +git annex dead here +cd .. +chmod +w -R concat_ds +rm -rf concat_ds +echo SUCCESS +EOT + +#### concat_output.sh END #### + +mkdir logs +echo .SGE_datalad_lock >> .gitignore +echo logs >> .gitignore + +datalad save -m "Participant compute job implementation" + +# Add a script for merging outputs +MERGE_POSTSCRIPT=https://raw.githubusercontent.com/PennLINC/TheWay/main/scripts/cubic/merge_outputs_postscript.sh +cat > code/merge_outputs.sh << "EOT" +#!/bin/bash +set -e -u -x +EOT +echo "outputsource=${output_store}#$(datalad -f '{infos[dataset][id]}' wtf -S dataset)" \ + >> code/merge_outputs.sh +echo "cd ${PROJECTROOT}" >> code/merge_outputs.sh +wget -qO- ${MERGE_POSTSCRIPT} >> code/merge_outputs.sh + +################################################################################ +# SGE SETUP START - remove or adjust to your needs +################################################################################ +env_flags="-v DSLOCKFILE=${PWD}/.SGE_datalad_lock" + +echo '#!/bin/bash' > code/qsub_calls.sh +dssource="${input_store}#$(datalad -f '{infos[dataset][id]}' wtf -S dataset)" +pushgitremote=$(git remote get-url --push output) +eo_args="-e ${PWD}/logs -o ${PWD}/logs" + +for zip in ${ZIPS}; do + subject=`echo ${zip} | cut -d '_' -f 1` + echo "qsub -cwd ${env_flags} -N UNZIP${subject} ${eo_args} \ + ${PWD}/code/participant_job.sh \ + ${dssource} ${pushgitremote} ${subject}" >> code/qsub_calls.sh +done +datalad save -m "SGE submission setup" code/ .gitignore + +################################################################################ +# SGE SETUP END +################################################################################ + +# cleanup - we have generated the job definitions, we do not need to keep a +# massive input dataset around. Having it around wastes resources and makes many +# git operations needlessly slow +datalad uninstall -r --nocheck inputs/data + + +# make sure the fully configured output dataset is available from the designated +# store for initial cloning and pushing the results. +datalad push --to input +datalad push --to output + +# Add an alias to the data in the RIA store +RIA_DIR=$(find $PROJECTROOT/output_ria/???/ -maxdepth 1 -type d | sort | tail -n 1) +mkdir -p ${PROJECTROOT}/output_ria/alias +ln -s ${RIA_DIR} ${PROJECTROOT}/output_ria/alias/data + +# if we get here, we are happy +echo SUCCESS From 374869f8fb9971fd9dd6b06fb2e8f1c0cb81070a Mon Sep 17 00:00:00 2001 From: kahinimehta <68132139+kahinimehta@users.noreply.github.com> Date: Thu, 24 Feb 2022 17:11:44 -0500 Subject: [PATCH 02/16] Update bootstrap-xcp-qc-testing.sh changed all ASL to XCP --- scripts/cubic/bootstrap-xcp-qc-testing.sh | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/scripts/cubic/bootstrap-xcp-qc-testing.sh b/scripts/cubic/bootstrap-xcp-qc-testing.sh index 8597bac..247ae8d 100644 --- a/scripts/cubic/bootstrap-xcp-qc-testing.sh +++ b/scripts/cubic/bootstrap-xcp-qc-testing.sh @@ -1,5 +1,4 @@ ## NOTE ## -# THIS SCRIPT IS STILL BEING TESTED AND NOT READY FOR USE # This workflow is derived from the Datalad Handbook ## Ensure the environment is ready to bootstrap the analysis workspace # Check that we have conda installed @@ -18,7 +17,7 @@ set -e -u ## Set up the directory that will contain the necessary directories -PROJECTROOT=${PWD}/ASLPREP_QC +PROJECTROOT=${PWD}/XCP _QC if [[ -d ${PROJECTROOT} ]] then echo ${PROJECTROOT} already exists @@ -38,7 +37,7 @@ DERIVATIVE_BOOTSTRAP_DIR=$1 DERIVATIVE_INPUT=ria+file://${DERIVATIVE_BOOTSTRAP_DIR}"/output_ria#~data" if [[ -z ${DERIVATIVE_BOOTSTRAP_DIR} ]] then - echo "Required argument is the path to the aslprep bootstrap directory." + echo "Required argument is the path to the xcp bootstrap directory." echo "This directory should contain analysis/, input_ria/ and output_ria/." # exit 1 fi @@ -67,7 +66,7 @@ datalad install -d . -r --source ${DERIVATIVE_INPUT} inputs/data # amend the previous commit with a nicer commit message git commit --amend -m 'Register input data dataset as a subdataset' -ZIPS=$(find inputs/data -name 'sub-*aslprep*' | cut -d '/' -f 3 | sort) +ZIPS=$(find inputs/data -name 'sub-*xcp*' | cut -d '/' -f 3 | sort) if [ -z "${ZIPS}" ] then echo "No subjects found in input data" @@ -112,11 +111,11 @@ git checkout -b "${BRANCH}" # Do the run! datalad run \ -i code/get_files.sh \ - -i inputs/data/${subid}_aslprep*.zip \ + -i inputs/data/${subid}_xcp.zip \ --explicit \ -o ${subid}*quality*.csv \ -m "unzipped ${subid}" \ - "bash code/get_files.sh inputs/data/${subid}_aslprep*.zip" + "bash code/get_files.sh inputs/data/${subid}_xcp*.zip" # file content first -- does not need a lock, no interaction with Git datalad push --to output-storage # and the output branch @@ -142,10 +141,10 @@ set -e -u -x ZIP_FILE=$1 subid=$(basename $ZIP_FILE | cut -d '_' -f 1) # unzip outputs -unzip -n $ZIP_FILE 'aslprep/*' -d . -cp aslprep/${subid}/*/perf/*quality*.csv . +unzip -n $ZIP_FILE 'xcp/*' -d . +cp xcp/${subid}/*/perf/*quality*.csv . # remove unzip dir -rm -rf aslprep +rm -rf xcp EOT chmod +x code/get_files.sh @@ -168,7 +167,7 @@ cd concat_ds/code wget https://raw.githubusercontent.com/PennLINC/RBC/master/PennLINC/Generic/concatenator.py cd .. datalad save -m "added concatenator script" -datalad run -i 'sub-*quality*.csv' -o '${PROJECT_ROOT}/ASLPREP_QC.csv' --expand inputs --explicit "python code/concatenator.py $PWD ${PROJECT_ROOT}/ASLPREP_QC.csv" +datalad run -i 'sub-*quality*.csv' -o '${PROJECT_ROOT}/XCP_QC.csv' --expand inputs --explicit "python code/concatenator.py $PWD ${PROJECT_ROOT}/XCP_QC.csv" datalad save -m "generated report" # push changes datalad push From 9f13f1f5d4bd921249e189fe3474b75dc2b2a685 Mon Sep 17 00:00:00 2001 From: kahinimehta <68132139+kahinimehta@users.noreply.github.com> Date: Thu, 24 Feb 2022 17:14:48 -0500 Subject: [PATCH 03/16] Update bootstrap-xcp-qc-testing.sh Updated concatenator to be used --- scripts/cubic/bootstrap-xcp-qc-testing.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/cubic/bootstrap-xcp-qc-testing.sh b/scripts/cubic/bootstrap-xcp-qc-testing.sh index 247ae8d..3607f1d 100644 --- a/scripts/cubic/bootstrap-xcp-qc-testing.sh +++ b/scripts/cubic/bootstrap-xcp-qc-testing.sh @@ -164,10 +164,10 @@ cat >> code/concat_outputs.sh << "EOT" cd ${CBICA_TMPDIR} datalad clone ria+file://${PROJECT_ROOT}/output_ria#~data concat_ds cd concat_ds/code -wget https://raw.githubusercontent.com/PennLINC/RBC/master/PennLINC/Generic/concatenator.py +wget https://raw.githubusercontent.com/PennLINC/RBC/kahinimehta-patch-1/PennLINC/Generic/concatenator_task.py cd .. datalad save -m "added concatenator script" -datalad run -i 'sub-*quality*.csv' -o '${PROJECT_ROOT}/XCP_QC.csv' --expand inputs --explicit "python code/concatenator.py $PWD ${PROJECT_ROOT}/XCP_QC.csv" +datalad run -i 'sub-*quality*.csv' -o '${PROJECT_ROOT}/XCP_QC.csv' --expand inputs --explicit "python code/concatenator_task.py $PWD ${PROJECT_ROOT}/XCP_QC.csv" datalad save -m "generated report" # push changes datalad push From 44d34af9e262d28841c1317caa69a65fa5c1b3d8 Mon Sep 17 00:00:00 2001 From: kahinimehta <68132139+kahinimehta@users.noreply.github.com> Date: Thu, 24 Feb 2022 17:30:20 -0500 Subject: [PATCH 04/16] Update bootstrap-xcp-qc-testing.sh updated file --- scripts/cubic/bootstrap-xcp-qc-testing.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/cubic/bootstrap-xcp-qc-testing.sh b/scripts/cubic/bootstrap-xcp-qc-testing.sh index 3607f1d..7c6cb6d 100644 --- a/scripts/cubic/bootstrap-xcp-qc-testing.sh +++ b/scripts/cubic/bootstrap-xcp-qc-testing.sh @@ -113,7 +113,7 @@ datalad run \ -i code/get_files.sh \ -i inputs/data/${subid}_xcp.zip \ --explicit \ - -o ${subid}*quality*.csv \ + -o ${subid}*qc*.csv \ -m "unzipped ${subid}" \ "bash code/get_files.sh inputs/data/${subid}_xcp*.zip" # file content first -- does not need a lock, no interaction with Git @@ -142,7 +142,7 @@ ZIP_FILE=$1 subid=$(basename $ZIP_FILE | cut -d '_' -f 1) # unzip outputs unzip -n $ZIP_FILE 'xcp/*' -d . -cp xcp/${subid}/*/perf/*quality*.csv . +cp xcp/${subid}/*/func/*qc*.csv . # remove unzip dir rm -rf xcp EOT @@ -167,7 +167,7 @@ cd concat_ds/code wget https://raw.githubusercontent.com/PennLINC/RBC/kahinimehta-patch-1/PennLINC/Generic/concatenator_task.py cd .. datalad save -m "added concatenator script" -datalad run -i 'sub-*quality*.csv' -o '${PROJECT_ROOT}/XCP_QC.csv' --expand inputs --explicit "python code/concatenator_task.py $PWD ${PROJECT_ROOT}/XCP_QC.csv" +datalad run -i 'sub-*qc*.csv' -o '${PROJECT_ROOT}/XCP_QC.csv' --expand inputs --explicit "python code/concatenator_task.py $PWD ${PROJECT_ROOT}/XCP_QC.csv" datalad save -m "generated report" # push changes datalad push From 5acf7bf4bb27f52624eac1bb1e86c0c9435275fa Mon Sep 17 00:00:00 2001 From: kahinimehta <68132139+kahinimehta@users.noreply.github.com> Date: Thu, 24 Feb 2022 17:31:38 -0500 Subject: [PATCH 05/16] Update bootstrap-xcp-qc-testing.sh --- scripts/cubic/bootstrap-xcp-qc-testing.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/cubic/bootstrap-xcp-qc-testing.sh b/scripts/cubic/bootstrap-xcp-qc-testing.sh index 7c6cb6d..4c20ce4 100644 --- a/scripts/cubic/bootstrap-xcp-qc-testing.sh +++ b/scripts/cubic/bootstrap-xcp-qc-testing.sh @@ -17,7 +17,7 @@ set -e -u ## Set up the directory that will contain the necessary directories -PROJECTROOT=${PWD}/XCP _QC +PROJECTROOT=${PWD}/XCP_QC if [[ -d ${PROJECTROOT} ]] then echo ${PROJECTROOT} already exists From d5d575f0249d1922ee5e6f0a54e20429867f390e Mon Sep 17 00:00:00 2001 From: kahinimehta <68132139+kahinimehta@users.noreply.github.com> Date: Thu, 24 Feb 2022 18:18:21 -0500 Subject: [PATCH 06/16] Update bootstrap-xcp-qc-testing.sh --- scripts/cubic/bootstrap-xcp-qc-testing.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/cubic/bootstrap-xcp-qc-testing.sh b/scripts/cubic/bootstrap-xcp-qc-testing.sh index 4c20ce4..ec58502 100644 --- a/scripts/cubic/bootstrap-xcp-qc-testing.sh +++ b/scripts/cubic/bootstrap-xcp-qc-testing.sh @@ -111,7 +111,7 @@ git checkout -b "${BRANCH}" # Do the run! datalad run \ -i code/get_files.sh \ - -i inputs/data/${subid}_xcp.zip \ + -i inputs/data/${subid}_xcp*.zip \ --explicit \ -o ${subid}*qc*.csv \ -m "unzipped ${subid}" \ From e384ebfa75630b41783739c7d74a2b5c8e47f6e4 Mon Sep 17 00:00:00 2001 From: kahinimehta <68132139+kahinimehta@users.noreply.github.com> Date: Thu, 24 Feb 2022 19:19:37 -0500 Subject: [PATCH 07/16] Update bootstrap-xcp-qc-testing.sh --- scripts/cubic/bootstrap-xcp-qc-testing.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/scripts/cubic/bootstrap-xcp-qc-testing.sh b/scripts/cubic/bootstrap-xcp-qc-testing.sh index ec58502..6dc0c6e 100644 --- a/scripts/cubic/bootstrap-xcp-qc-testing.sh +++ b/scripts/cubic/bootstrap-xcp-qc-testing.sh @@ -82,7 +82,7 @@ cat > code/participant_job.sh << "EOT" #$ -R y #$ -l h_rt=24:00:00 # Set up the correct conda environment -source ${CONDA_PREFIX}/bin/activate base +source ${CONDA_PREFIX}/bin/activate flywheel echo I\'m in $PWD using `which python` # fail whenever something is fishy, use -x to get verbose logfiles set -e -u -x @@ -91,9 +91,9 @@ dssource="$1" pushgitremote="$2" subid="$3" # change into the cluster-assigned temp directory. Not done by default in SGE -cd ${CBICA_TMPDIR} +#cd ${CBICA_TMPDIR} # OR Run it on a shared network drive -# cd /cbica/comp_space/$(basename $HOME) + cd /cbica/comp_space/$(basename $HOME) # Used for the branch names and the temp dir BRANCH="job-${JOB_ID}-${subid}" mkdir ${BRANCH} @@ -141,10 +141,10 @@ set -e -u -x ZIP_FILE=$1 subid=$(basename $ZIP_FILE | cut -d '_' -f 1) # unzip outputs -unzip -n $ZIP_FILE 'xcp/*' -d . -cp xcp/${subid}/*/func/*qc*.csv . +unzip -n $ZIP_FILE 'xcp*/*' -d . +cp xcp*/${subid}/*/func/*qc*.csv . # remove unzip dir -rm -rf xcp +rm -rf xcp* EOT chmod +x code/get_files.sh From 9570b6b9026c0b0e96bfa62dbe7cfbb2fbe7614b Mon Sep 17 00:00:00 2001 From: kahinimehta <68132139+kahinimehta@users.noreply.github.com> Date: Thu, 24 Feb 2022 21:20:02 -0500 Subject: [PATCH 08/16] Update bootstrap-xcp-qc-testing.sh --- scripts/cubic/bootstrap-xcp-qc-testing.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/cubic/bootstrap-xcp-qc-testing.sh b/scripts/cubic/bootstrap-xcp-qc-testing.sh index 6dc0c6e..5234692 100644 --- a/scripts/cubic/bootstrap-xcp-qc-testing.sh +++ b/scripts/cubic/bootstrap-xcp-qc-testing.sh @@ -166,8 +166,9 @@ datalad clone ria+file://${PROJECT_ROOT}/output_ria#~data concat_ds cd concat_ds/code wget https://raw.githubusercontent.com/PennLINC/RBC/kahinimehta-patch-1/PennLINC/Generic/concatenator_task.py cd .. +ARGS=$1 datalad save -m "added concatenator script" -datalad run -i 'sub-*qc*.csv' -o '${PROJECT_ROOT}/XCP_QC.csv' --expand inputs --explicit "python code/concatenator_task.py $PWD ${PROJECT_ROOT}/XCP_QC.csv" +datalad run -i 'sub-*qc*.csv' -o '${PROJECT_ROOT}/XCP_QC.csv' --expand inputs --explicit "python code/concatenator_task.py $PWD ${PROJECT_ROOT} $ARGS" datalad save -m "generated report" # push changes datalad push @@ -177,6 +178,7 @@ cd .. chmod +w -R concat_ds rm -rf concat_ds echo SUCCESS + EOT #### concat_output.sh END #### From 62e9b67a60dae9cddc052a3010e64545bc685fbe Mon Sep 17 00:00:00 2001 From: kahinimehta <68132139+kahinimehta@users.noreply.github.com> Date: Thu, 24 Feb 2022 21:29:36 -0500 Subject: [PATCH 09/16] Update bootstrap-xcp-qc-testing.sh --- scripts/cubic/bootstrap-xcp-qc-testing.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/cubic/bootstrap-xcp-qc-testing.sh b/scripts/cubic/bootstrap-xcp-qc-testing.sh index 5234692..09de705 100644 --- a/scripts/cubic/bootstrap-xcp-qc-testing.sh +++ b/scripts/cubic/bootstrap-xcp-qc-testing.sh @@ -82,7 +82,7 @@ cat > code/participant_job.sh << "EOT" #$ -R y #$ -l h_rt=24:00:00 # Set up the correct conda environment -source ${CONDA_PREFIX}/bin/activate flywheel +source ${CONDA_PREFIX}/bin/activate base echo I\'m in $PWD using `which python` # fail whenever something is fishy, use -x to get verbose logfiles set -e -u -x From 69fa70c1ca017e277fc5191b8b2e1e4b3b33230d Mon Sep 17 00:00:00 2001 From: kahinimehta <68132139+kahinimehta@users.noreply.github.com> Date: Thu, 24 Feb 2022 21:43:35 -0500 Subject: [PATCH 10/16] Update bootstrap-xcp-qc-testing.sh --- scripts/cubic/bootstrap-xcp-qc-testing.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/cubic/bootstrap-xcp-qc-testing.sh b/scripts/cubic/bootstrap-xcp-qc-testing.sh index 09de705..04073c9 100644 --- a/scripts/cubic/bootstrap-xcp-qc-testing.sh +++ b/scripts/cubic/bootstrap-xcp-qc-testing.sh @@ -166,7 +166,7 @@ datalad clone ria+file://${PROJECT_ROOT}/output_ria#~data concat_ds cd concat_ds/code wget https://raw.githubusercontent.com/PennLINC/RBC/kahinimehta-patch-1/PennLINC/Generic/concatenator_task.py cd .. -ARGS=$1 +ARGS=$@ datalad save -m "added concatenator script" datalad run -i 'sub-*qc*.csv' -o '${PROJECT_ROOT}/XCP_QC.csv' --expand inputs --explicit "python code/concatenator_task.py $PWD ${PROJECT_ROOT} $ARGS" datalad save -m "generated report" From 4c36e5ef28a8518deba6e80524cb657fe8180522 Mon Sep 17 00:00:00 2001 From: kahinimehta <68132139+kahinimehta@users.noreply.github.com> Date: Thu, 24 Feb 2022 22:10:10 -0500 Subject: [PATCH 11/16] Update bootstrap-xcp-qc-testing.sh --- scripts/cubic/bootstrap-xcp-qc-testing.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/cubic/bootstrap-xcp-qc-testing.sh b/scripts/cubic/bootstrap-xcp-qc-testing.sh index 04073c9..e6ca2c3 100644 --- a/scripts/cubic/bootstrap-xcp-qc-testing.sh +++ b/scripts/cubic/bootstrap-xcp-qc-testing.sh @@ -160,6 +160,7 @@ echo "PROJECT_ROOT=${PROJECTROOT}" >> code/concat_outputs.sh echo "cd ${PROJECTROOT}" >> code/concat_outputs.sh cat >> code/concat_outputs.sh << "EOT" +# takes argument in format of individual QC file names without subject and session, eg: task-fracback_acq-singleband_space-fsLR_desc-qc_bold.csv # set up concat_ds and run concatenator on it cd ${CBICA_TMPDIR} datalad clone ria+file://${PROJECT_ROOT}/output_ria#~data concat_ds From 2f630e585a10bbcb5d93f389998beff3e3799632 Mon Sep 17 00:00:00 2001 From: kahinimehta <68132139+kahinimehta@users.noreply.github.com> Date: Thu, 24 Feb 2022 22:11:10 -0500 Subject: [PATCH 12/16] Update bootstrap-xcp-qc-testing.sh --- scripts/cubic/bootstrap-xcp-qc-testing.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/cubic/bootstrap-xcp-qc-testing.sh b/scripts/cubic/bootstrap-xcp-qc-testing.sh index e6ca2c3..e02c4a2 100644 --- a/scripts/cubic/bootstrap-xcp-qc-testing.sh +++ b/scripts/cubic/bootstrap-xcp-qc-testing.sh @@ -160,7 +160,8 @@ echo "PROJECT_ROOT=${PROJECTROOT}" >> code/concat_outputs.sh echo "cd ${PROJECTROOT}" >> code/concat_outputs.sh cat >> code/concat_outputs.sh << "EOT" -# takes argument in format of individual QC file names without subject and session, eg: task-fracback_acq-singleband_space-fsLR_desc-qc_bold.csv +# takes argument in format of individual QC file names without subject and session separated by space, eg. below: +# task-fracback_acq-singleband_space-fsLR_desc-qc_bold.csv task-rest_acq-singleband_space-MNI152NLin6Asym_desc-qc_res-2_bold.csv # set up concat_ds and run concatenator on it cd ${CBICA_TMPDIR} datalad clone ria+file://${PROJECT_ROOT}/output_ria#~data concat_ds From 56505a164ac6efd12f25828ecab722c760318404 Mon Sep 17 00:00:00 2001 From: kahinimehta <68132139+kahinimehta@users.noreply.github.com> Date: Fri, 25 Feb 2022 07:28:26 -0500 Subject: [PATCH 13/16] Rename bootstrap-xcp-qc-testing.sh to bootstrap-xcp-qc.sh --- .../cubic/{bootstrap-xcp-qc-testing.sh => bootstrap-xcp-qc.sh} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename scripts/cubic/{bootstrap-xcp-qc-testing.sh => bootstrap-xcp-qc.sh} (100%) diff --git a/scripts/cubic/bootstrap-xcp-qc-testing.sh b/scripts/cubic/bootstrap-xcp-qc.sh similarity index 100% rename from scripts/cubic/bootstrap-xcp-qc-testing.sh rename to scripts/cubic/bootstrap-xcp-qc.sh From 5495e4c77707ed67965322e0c23239021876775e Mon Sep 17 00:00:00 2001 From: kahinimehta <68132139+kahinimehta@users.noreply.github.com> Date: Fri, 25 Feb 2022 16:01:36 -0500 Subject: [PATCH 14/16] Update bootstrap-xcp-qc.sh --- scripts/cubic/bootstrap-xcp-qc.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/cubic/bootstrap-xcp-qc.sh b/scripts/cubic/bootstrap-xcp-qc.sh index e02c4a2..eb8adde 100644 --- a/scripts/cubic/bootstrap-xcp-qc.sh +++ b/scripts/cubic/bootstrap-xcp-qc.sh @@ -160,13 +160,13 @@ echo "PROJECT_ROOT=${PROJECTROOT}" >> code/concat_outputs.sh echo "cd ${PROJECTROOT}" >> code/concat_outputs.sh cat >> code/concat_outputs.sh << "EOT" -# takes argument in format of individual QC file names without subject and session separated by space, eg. below: -# task-fracback_acq-singleband_space-fsLR_desc-qc_bold.csv task-rest_acq-singleband_space-MNI152NLin6Asym_desc-qc_res-2_bold.csv +# takes argument in format of individual QC file names without subject and session separated by space and * after band, eg. below: +# task-fracback_acq-singleband*_space-fsLR_desc-qc_bold.csv task-rest_acq-singleband*_space-MNI152NLin6Asym_desc-qc_res-2_bold.csv # set up concat_ds and run concatenator on it cd ${CBICA_TMPDIR} datalad clone ria+file://${PROJECT_ROOT}/output_ria#~data concat_ds cd concat_ds/code -wget https://raw.githubusercontent.com/PennLINC/RBC/kahinimehta-patch-1/PennLINC/Generic/concatenator_task.py +wget https://raw.githubusercontent.com/PennLINC/RBC/kahinimehta-patch-1/PennLINC/Generic/concatenator_task.py # will need to be edited cd .. ARGS=$@ datalad save -m "added concatenator script" From 2e174677b7a2a17e009e0c4ba7ed2235af9c3932 Mon Sep 17 00:00:00 2001 From: kahinimehta <68132139+kahinimehta@users.noreply.github.com> Date: Thu, 10 Mar 2022 15:02:10 -0500 Subject: [PATCH 15/16] Update bootstrap-xcp-qc.sh --- scripts/cubic/bootstrap-xcp-qc.sh | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/scripts/cubic/bootstrap-xcp-qc.sh b/scripts/cubic/bootstrap-xcp-qc.sh index eb8adde..9d2dd58 100644 --- a/scripts/cubic/bootstrap-xcp-qc.sh +++ b/scripts/cubic/bootstrap-xcp-qc.sh @@ -160,17 +160,19 @@ echo "PROJECT_ROOT=${PROJECTROOT}" >> code/concat_outputs.sh echo "cd ${PROJECTROOT}" >> code/concat_outputs.sh cat >> code/concat_outputs.sh << "EOT" -# takes argument in format of individual QC file names without subject and session separated by space and * after band, eg. below: -# task-fracback_acq-singleband*_space-fsLR_desc-qc_bold.csv task-rest_acq-singleband*_space-MNI152NLin6Asym_desc-qc_res-2_bold.csv # set up concat_ds and run concatenator on it cd ${CBICA_TMPDIR} datalad clone ria+file://${PROJECT_ROOT}/output_ria#~data concat_ds cd concat_ds/code -wget https://raw.githubusercontent.com/PennLINC/RBC/kahinimehta-patch-1/PennLINC/Generic/concatenator_task.py # will need to be edited +rm -rf concatenator_task.py* +wget https://raw.githubusercontent.com/PennLINC/RBC/kahinimehta-patch-1/PennLINC/Generic/concatenator_task.py #might need to change this if pull request is resolved cd .. -ARGS=$@ +tasks=$1 +spaces=$2 +bands=$3 +echo USAGE = bash code/concat_outputs.sh rest fsLR multi OR comma-separated for multiple arguments, eg: bash code/concat_outputs.sh rest,fracback,face MNI152NLin6Asym,fsLR multi,single datalad save -m "added concatenator script" -datalad run -i 'sub-*qc*.csv' -o '${PROJECT_ROOT}/XCP_QC.csv' --expand inputs --explicit "python code/concatenator_task.py $PWD ${PROJECT_ROOT} $ARGS" +datalad run -i 'sub-*qc*.csv' -o '${PROJECT_ROOT}/XCP_QC.csv' --expand inputs --explicit "python code/concatenator_task.py $PWD ${PROJECT_ROOT} $tasks $spaces $bands" datalad save -m "generated report" # push changes datalad push From 83ba269d127cfbda2ded67ed08cc0fabb8d54f46 Mon Sep 17 00:00:00 2001 From: kahinimehta <68132139+kahinimehta@users.noreply.github.com> Date: Thu, 10 Mar 2022 17:02:49 -0500 Subject: [PATCH 16/16] Update bootstrap-xcp-qc.sh --- scripts/cubic/bootstrap-xcp-qc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/cubic/bootstrap-xcp-qc.sh b/scripts/cubic/bootstrap-xcp-qc.sh index 9d2dd58..8316a1b 100644 --- a/scripts/cubic/bootstrap-xcp-qc.sh +++ b/scripts/cubic/bootstrap-xcp-qc.sh @@ -170,9 +170,9 @@ cd .. tasks=$1 spaces=$2 bands=$3 -echo USAGE = bash code/concat_outputs.sh rest fsLR multi OR comma-separated for multiple arguments, eg: bash code/concat_outputs.sh rest,fracback,face MNI152NLin6Asym,fsLR multi,single +res=$4 datalad save -m "added concatenator script" -datalad run -i 'sub-*qc*.csv' -o '${PROJECT_ROOT}/XCP_QC.csv' --expand inputs --explicit "python code/concatenator_task.py $PWD ${PROJECT_ROOT} $tasks $spaces $bands" +datalad run -i 'sub-*qc*.csv' -o '${PROJECT_ROOT}/XCP_QC.csv' --expand inputs --explicit "python code/concatenator_task.py $PWD ${PROJECT_ROOT} $tasks $spaces $bands $res" datalad save -m "generated report" # push changes datalad push