diff --git a/examples/gcp_datalab/01-prep.sh b/examples/gcp_datalab/01-prep.sh index 4f644c48a4..993b81b636 100755 --- a/examples/gcp_datalab/01-prep.sh +++ b/examples/gcp_datalab/01-prep.sh @@ -21,22 +21,21 @@ fi #Create persistent hive metadata store #Reference link -gsutil mb gs://$bucketname +gcloud storage buckets create gs://$bucketname #Update the init script with the bucketname sed -i -e "s//$bucketname/g" ./init-scripts/datalab_fhir.sh -gsutil cp ./init-scripts/* gs://$bucketname/scripts/ -gsutil ls gs://$bucketname/scripts/ +gcloud storage cp ./init-scripts/* gs://$bucketname/scripts/ +gcloud storage ls gs://$bucketname/scripts/ #Create the sql database if you want to leverage an sql hive metasstore if [[ "$1" == "hivemeta" ]] ; then - gsutil ls gs://$hivebucketname + gcloud storage ls gs://$hivebucketname if [ $? -eq 0 ]; then echo "Bucket $hivebucketname exists and the dataproc cluster will leverage this existing bucket" else - gsutil mb gs://$hivebucketname + gcloud storage buckets create gs://$hivebucketname fi gcloud sql instances create $hivedbname --database-version="MYSQL_5_7" --activation-policy=ALWAYS --gce-zone $ZONE fi - diff --git a/examples/gcp_datalab/02-gen-fhirdependencies.sh b/examples/gcp_datalab/02-gen-fhirdependencies.sh index f7c51579e8..8852749d34 100755 --- a/examples/gcp_datalab/02-gen-fhirdependencies.sh +++ b/examples/gcp_datalab/02-gen-fhirdependencies.sh @@ -24,7 +24,7 @@ fi # Start wih a clean gcs folder destined for the fhir dependencies -gsutil -m rm -r gs://$bucketname/fhir +gcloud storage rm --recursive gs://$bucketname/fhir cd $(pwd)/../../ pip install -r bazel/requirements.txt @@ -68,9 +68,9 @@ EOT cp ./out-bin/py/google/fhir/seqex/*.so ./py/google/fhir/seqex/. # Finally copying over the files to the desginated gcs bucket -gsutil -m cp -r proto gs://$bucketname/fhir/ -gsutil -m cp -r py gs://$bucketname/fhir/ -gsutil -m cp -r out-genfiles gs://$bucketname/fhir/ +gcloud storage cp --recursive proto gs://$bucketname/fhir/ +gcloud storage cp --recursive py gs://$bucketname/fhir/ +gcloud storage cp --recursive out-genfiles gs://$bucketname/fhir/ # Change out of the directory cd examples/gcp_datalab diff --git a/examples/gcp_datalab/04-create-training-data.sh b/examples/gcp_datalab/04-create-training-data.sh index cdb5ee3629..a0af994d6b 100755 --- a/examples/gcp_datalab/04-create-training-data.sh +++ b/examples/gcp_datalab/04-create-training-data.sh @@ -41,4 +41,4 @@ fi ./run_synthea Massachusetts -p 200 #Copy generated data to a bucket in Google cloud storage -gsutil -m cp ${1}/synthea/output/fhir/*.json gs://$bucketname/synthea/fhir +gcloud storage cp ${1}/synthea/output/fhir/*.json gs://$bucketname/synthea/fhir diff --git a/examples/gcp_datalab/05-gen-bundles.sh b/examples/gcp_datalab/05-gen-bundles.sh index c61653cd91..e6835301f4 100755 --- a/examples/gcp_datalab/05-gen-bundles.sh +++ b/examples/gcp_datalab/05-gen-bundles.sh @@ -28,4 +28,4 @@ $SYNTHEA_TO_TF_RECORDS --input=${1}/synthea/output/fhir/* \ echo "Uploading bundles to GCS Bucket: $bucketname" -gsutil -m cp $1/bundles/bundles* gs://$bucketname/synthea/bundles +gcloud storage cp $1/bundles/bundles* gs://$bucketname/synthea/bundles diff --git a/examples/gcp_datalab/init-scripts/datalab_fhir.sh b/examples/gcp_datalab/init-scripts/datalab_fhir.sh index feac8252bb..457c0992d3 100644 --- a/examples/gcp_datalab/init-scripts/datalab_fhir.sh +++ b/examples/gcp_datalab/init-scripts/datalab_fhir.sh @@ -93,7 +93,7 @@ function configure_master(){ # Add dependencies for fhir - > label # https://github.com/google/fhir - gsutil -m cp -r gs:///fhir . + gcloud storage cp --recursive gs:///fhir . cat << EOF > Dockerfile FROM ${DOCKER_IMAGE} ADD backports.list /etc/apt/sources.list.d/ diff --git a/examples/gcp_datalab/notebooks/1_train_and_eval_ml_model_to_predict_los.ipynb b/examples/gcp_datalab/notebooks/1_train_and_eval_ml_model_to_predict_los.ipynb index 046b4d5000..284c16f519 100644 --- a/examples/gcp_datalab/notebooks/1_train_and_eval_ml_model_to_predict_los.ipynb +++ b/examples/gcp_datalab/notebooks/1_train_and_eval_ml_model_to_predict_los.ipynb @@ -163,7 +163,7 @@ ], "source": [ "%bash\n", - "gsutil ls -l ${BUNDLES_IN_GCS}" + "gcloud storage ls --long ${BUNDLES_IN_GCS}" ] }, { @@ -190,8 +190,7 @@ ], "source": [ "%bash\n", - "gsutil rm ${LABELS_IN_GCS}" - ] + "gcloud storage rm ${LABELS_IN_GCS}" ] }, { "cell_type": "markdown", @@ -395,8 +394,7 @@ ], "source": [ "%bash\n", - "gsutil ls -l ${LABELS_IN_GCS}" - ] + "gcloud storage ls --long ${LABELS_IN_GCS}" ] }, { "cell_type": "markdown", @@ -439,8 +437,7 @@ ], "source": [ "%bash\n", - "gsutil rm ${SEQEX_IN_GCS}" - ] + "gcloud storage rm ${SEQEX_IN_GCS}" ] }, { "cell_type": "markdown", @@ -571,9 +568,7 @@ ], "source": [ "%bash\n", - "gsutil ls -l ${SEQEX_IN_GCS}\n", - "gsutil cp ${SERV_DS} ${SERV_LOC}" - ] + "gcloud storage ls --long ${SEQEX_IN_GCS}\n", "gcloud storage cp ${SERV_DS} ${SERV_LOC}" ] }, { "cell_type": "markdown", @@ -638,7 +633,7 @@ ], "source": [ "%bash\n", - "gsutil -m rm -r ${MODEL_IN_GCS}" + "gcloud storage rm --recursive ${MODEL_IN_GCS}" ] }, { @@ -2362,7 +2357,7 @@ "outputs": [], "source": [ "%bash\n", - "gsutil ls -l ${MODEL_IN_GCS}" + "gcloud storage ls --long ${MODEL_IN_GCS}" ] }, { diff --git a/examples/gcp_datalab/notebooks/2_deploy_and_run_ml_model_to_predict_los.ipynb b/examples/gcp_datalab/notebooks/2_deploy_and_run_ml_model_to_predict_los.ipynb index 8c084b6b14..10b9dab47c 100644 --- a/examples/gcp_datalab/notebooks/2_deploy_and_run_ml_model_to_predict_los.ipynb +++ b/examples/gcp_datalab/notebooks/2_deploy_and_run_ml_model_to_predict_los.ipynb @@ -146,7 +146,7 @@ ], "source": [ "%bash\n", - "gsutil ls -l ${SEQEX_IN_GCS}" + "gcloud storage ls --long ${SEQEX_IN_GCS}" ] }, { @@ -794,7 +794,7 @@ ], "source": [ "%%bash\n", - "gsutil cat ${INFERENCE_PATH}/prediction.results-00000-of-00001" + "gcloud storage cat ${INFERENCE_PATH}/prediction.results-00000-of-00001" ] }, { @@ -842,4 +842,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +}