From b1c4fa3b9a0c0497e48dad2de3124f74d5e3cbc9 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Wed, 21 Jan 2026 01:57:41 +0530 Subject: [PATCH 1/7] chore: Migrate gsutil usage to gcloud storage (#1711) Co-authored-by: Andrew Gold <41129777+agold-rh@users.noreply.github.com> --- .../basic/dbt-project/cloudbuild.yaml | 10 +++++----- .../optimized/dbt-project/cloudbuild.yaml | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/examples/dbt-on-cloud-composer/basic/dbt-project/cloudbuild.yaml b/examples/dbt-on-cloud-composer/basic/dbt-project/cloudbuild.yaml index 9ef42d7e03c..0a8c12b72a5 100644 --- a/examples/dbt-on-cloud-composer/basic/dbt-project/cloudbuild.yaml +++ b/examples/dbt-on-cloud-composer/basic/dbt-project/cloudbuild.yaml @@ -47,14 +47,14 @@ steps: dbt docs generate --vars '{"project_id": "pso-dbt-airflow-demo","bigquery_location": "us","impersonate_service_account": ${_DBT_SERVICE_ACCOUNT},"execution_date": "1970-01-01","source_data_project": "bigquery-public-data"}' --profiles-dir .dbt --target cloud-build # _GCS_BUCKET is the GCS Bucket that will store the dbt documentation -- name: gcr.io/cloud-builders/gsutil +- name: gcr.io/cloud-builders/gcloud id: Copy the target to GCS args: - - -m + - storage - rsync - - -r - - -c - - -x + - --recursive + - --checksums-only + - --exclude - .dockerignore|key|logs|models|tests|.dockerignore|.gitignore|cloudbuild.yaml|Dockerfile|README.md|.git - . - gs://${_GCS_BUCKET}/data/dbt-docs-basic/ diff --git a/examples/dbt-on-cloud-composer/optimized/dbt-project/cloudbuild.yaml b/examples/dbt-on-cloud-composer/optimized/dbt-project/cloudbuild.yaml index 566262a2901..abfbcae1e59 100644 --- a/examples/dbt-on-cloud-composer/optimized/dbt-project/cloudbuild.yaml +++ b/examples/dbt-on-cloud-composer/optimized/dbt-project/cloudbuild.yaml @@ -47,14 +47,14 @@ steps: dbt docs generate --vars '{"project_id": "pso-dbt-airflow-demo","bigquery_location": "us","impersonate_service_account": ${_DBT_SERVICE_ACCOUNT},"execution_date": "1970-01-01","source_data_project": "bigquery-public-data"}' --profiles-dir .dbt --target cloud-build # _GCS_BUCKET is the GCS Bucket that will store the dbt documentation -- name: gcr.io/cloud-builders/gsutil +- name: gcr.io/cloud-builders/gcloud id: Copy the target to GCS args: - - -m + - storage - rsync - - -r - - -c - - -x + - --recursive + - --checksums-only + - --exclude - .dockerignore|key|logs|models|tests|.dockerignore|.gitignore|cloudbuild.yaml|Dockerfile|README.md|.git - . - gs://${_GCS_BUCKET}/data/dbt-docs-optimized/ From 119a87ec3053843ac49ebc2afea065916a59b8c9 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Wed, 21 Jan 2026 02:03:10 +0530 Subject: [PATCH 2/7] chore: Migrate gsutil usage to gcloud storage (#1712) Co-authored-by: Andrew Gold <41129777+agold-rh@users.noreply.github.com> --- examples/direct-upload-to-gcs/README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/direct-upload-to-gcs/README.md b/examples/direct-upload-to-gcs/README.md index 7071cc7f8b6..2ae2f83479c 100644 --- a/examples/direct-upload-to-gcs/README.md +++ b/examples/direct-upload-to-gcs/README.md @@ -56,13 +56,13 @@ DISTRIBUTION_BUCKET="" LIFECYCLE_POLICY_FILE="./lifecycle.json" # Creates the uploadable bucket -gsutil mb -p $PROJECT_ID -l $REGION --retention 900s gs://$UPLOADABLE_BUCKET +gcloud storage buckets create gs://$UPLOADABLE_BUCKET --project=$PROJECT_ID --location=$REGION --retention-period=900s # Creates the bucket for distribution -gsutil mb -p $PROJECT_ID -l $REGION gs://$DISTRIBUTION_BUCKET +gcloud storage buckets create gs://$DISTRIBUTION_BUCKET --project=$PROJECT_ID --location=$REGION # Set lifecycle for the uploadable bucket -gsutil lifecycle set $LIFECYCLE_POLICY_FILE gs://$UPLOADABLE_BUCKET +gcloud storage buckets update gs://$UPLOADABLE_BUCKET --lifecycle-file=$LIFECYCLE_POLICY_FILE # Publish all objects to all users -gsutil iam ch allUsers:objectViewer gs://$DISTRIBUTION_BUCKET +gcloud storage buckets add-iam-policy-binding gs://$DISTRIBUTION_BUCKET --member=allUsers --role=objectViewer ``` ### Step.2 Deploy to App Engine Standard From efbb169840539fbc5c8fb4c0e0e971d02c992cd7 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Wed, 21 Jan 2026 02:07:46 +0530 Subject: [PATCH 3/7] chore: Migrate gsutil usage to gcloud storage (#1713) Co-authored-by: Andrew Gold <41129777+agold-rh@users.noreply.github.com> --- examples/e2e-home-appliance-status-monitoring/README.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/examples/e2e-home-appliance-status-monitoring/README.md b/examples/e2e-home-appliance-status-monitoring/README.md index 28a0ec539eb..9ace97c1f7a 100644 --- a/examples/e2e-home-appliance-status-monitoring/README.md +++ b/examples/e2e-home-appliance-status-monitoring/README.md @@ -32,7 +32,7 @@ GOOGLE_APPLICATION_CREDENTIALS=${PWD}"/e2e_demo_credential.json" # create a new GCS bucket if you don't have one BUCKET_NAME=[your-bucket-name] -gsutil mb -p ${GOOGLE_PROJECT_ID} gs://${BUCKET_NAME}/ +gcloud storage buckets create gs://${BUCKET_NAME} --project=${GOOGLE_PROJECT_ID} ``` You also need to enable the following APIs in the APIs & Services menu. @@ -50,7 +50,7 @@ If you are using our trained model: tar jxvf data/model.tar.bz2 # upload the model to your bucket -gsutil cp -r model gs://${BUCKET_NAME} +gcloud storage cp --recursive model gs://${BUCKET_NAME} ``` If you want to train your own model: @@ -186,4 +186,3 @@ jupyter notebook ![Demo system sample output](./img/demo03.gif) - From 35a25b2a312374dea1a9365dc6f2f34e5458d8c4 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Wed, 21 Jan 2026 02:11:57 +0530 Subject: [PATCH 4/7] chore: Migrate gsutil usage to gcloud storage (#1716) Co-authored-by: Andrew Gold <41129777+agold-rh@users.noreply.github.com> --- examples/gcs-to-bq-serverless-services/Readme.md | 16 ++++++++-------- .../gcs-to-bq/data-ingestion/cloudbuild.yaml | 10 +++++----- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/examples/gcs-to-bq-serverless-services/Readme.md b/examples/gcs-to-bq-serverless-services/Readme.md index 07b55e1f69a..4fc4873eeed 100644 --- a/examples/gcs-to-bq-serverless-services/Readme.md +++ b/examples/gcs-to-bq-serverless-services/Readme.md @@ -10,20 +10,20 @@ In this solution, we build an approch to ingestion flat files (in GCS) to BigQue ``` PROJECT_ID=<> GCS_BUCKET_NAME=<> - gsutil mb gs://${GCS_BUCKET_NAME} - gsutil notification create \ - -t projects/${PROJECT_ID}/topics/create_notification_${GCS_BUCKET_NAME} \ - -e OBJECT_FINALIZE \ - -f json gs://${GCS_BUCKET_NAME} + gcloud storage buckets create gs://${GCS_BUCKET_NAME} + gcloud storage buckets notifications create gs://${GCS_BUCKET_NAME} \ + --topic=projects/${PROJECT_ID}/topics/create_notification_${GCS_BUCKET_NAME} \ + --event-types=OBJECT_FINALIZE \ + --payload-format=json ``` - **Step 2:** Build and copy jar to a GCS bucket(Create a GCS bucket to store the jar if you dont have one). There are number of dataproce templates that are avaliable to [use](https://github.com/GoogleCloudPlatform/dataproc-templates). ``` GCS_ARTIFACT_REPO=<> - gsutil mb gs://${GCS_ARTIFACT_REPO} + gcloud storage buckets create gs://${GCS_ARTIFACT_REPO} cd gcs2bq-spark mvn clean install - gsutil cp target/GCS2BQWithSpark-1.0-SNAPSHOT.jar gs://${GCS_ARTIFACT_REPO}/ + gcloud storage cp target/GCS2BQWithSpark-1.0-SNAPSHOT.jar gs://${GCS_ARTIFACT_REPO}/ ``` - **Step 3:** [The page](https://cloud.google.com/dataproc-serverless/docs/concepts/network) describe the network configuration required to run serverless spark @@ -76,7 +76,7 @@ In this solution, we build an approch to ingestion flat files (in GCS) to BigQue - **Create BQ temp Bucket** GCS to BigQuery requires a temporary bucket. Lets create a temporary bucket ``` GCS_TEMP_BUCKET=<> - gsutil mb gs://${GCS_TEMP_BUCKET} + gcloud storage buckets create gs://${GCS_TEMP_BUCKET} ``` - **Create Deadletter Topic and Subscription** Lets create a dead letter topic and subscription diff --git a/examples/gcs-to-bq/data-ingestion/cloudbuild.yaml b/examples/gcs-to-bq/data-ingestion/cloudbuild.yaml index b4f92cedef5..6c7772bfa1b 100644 --- a/examples/gcs-to-bq/data-ingestion/cloudbuild.yaml +++ b/examples/gcs-to-bq/data-ingestion/cloudbuild.yaml @@ -1,14 +1,14 @@ steps: # _COMPOSER_BUCKET_NAME is the GCS Bucket that will store the dags -- name: gcr.io/cloud-builders/gsutil +- name: gcr.io/cloud-builders/gcloud id: Copy dag definition (with its dependencies) to the Composer folder (dags and plugins folders included) dir: 'data-ingestion' args: - - -m + - storage - rsync - - -r - - -c - - -x + - --recursive + - --checksums-only + - --exclude - .dockerignore|.gitignore|cloudbuild.yaml|README.md|.git|imgs|tests|deps|deploy|dbt|config|sql|terraform - . - gs://${_COMPOSER_BUCKET_NAME} From 7038ba43b00b7451efd06a094ebe4edadd39041d Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Wed, 21 Jan 2026 03:04:03 +0530 Subject: [PATCH 5/7] chore: Migrate gsutil usage to gcloud storage (#1717) Co-authored-by: Andrew Gold <41129777+agold-rh@users.noreply.github.com> --- examples/iot-nirvana/README.md | 7 +++---- .../app-engine/src/main/webapp/startup.sh | 2 +- examples/iot-nirvana/setup_gcp_environment.sh | 20 +++++++++---------- 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/examples/iot-nirvana/README.md b/examples/iot-nirvana/README.md index 7e150d07532..7e6b2602bb2 100644 --- a/examples/iot-nirvana/README.md +++ b/examples/iot-nirvana/README.md @@ -111,12 +111,12 @@ Copy the JAR package containing the client binaries to Google Cloud Storage in the bucket previously created. Run the following command in the `/client` folder: -`gsutil cp target/google-cloud-demo-iot-nirvana-client-jar-with-dependencies.jar gs://$BUCKET_NAME/client/` +`gcloud storage cp target/google-cloud-demo-iot-nirvana-client-jar-with-dependencies.jar gs://$BUCKET_NAME/client/` Check that the JAR file has been correctly copied in the Google Cloud Storage bucket with the following command: -`gsutil ls gs://$BUCKET_NAME/client/google-cloud-demo-iot-nirvana-client-jar-with-dependencies.jar` +`gcloud storage ls gs://$BUCKET_NAME/client/google-cloud-demo-iot-nirvana-client-jar-with-dependencies.jar` ## AppEngine Web frontend @@ -136,7 +136,7 @@ from the temperature sensors: bootstrapping script 2. Copy the `startup.sh` file in the Google Cloud Storage bucket by running the following command in the `/app-engine` folder: - `gsutil cp src/main/webapp/startup.sh gs://$BUCKET_NAME/` + `gcloud storage cp src/main/webapp/startup.sh gs://$BUCKET_NAME/` 3. Modify the `/pom.xml` file in the `/app-engine` folder: * Update the `` node with the **[PROJECT_ID]** of your GCP project * Update the `` with the desired version of the application @@ -194,4 +194,3 @@ following: To stop the simulation click on the **Stop** button at the bottom right of the page `https://[YOUR_PROJECT_ID].appspot.com/index.html`. - diff --git a/examples/iot-nirvana/app-engine/src/main/webapp/startup.sh b/examples/iot-nirvana/app-engine/src/main/webapp/startup.sh index fbeffd95a74..3e44c6424e4 100644 --- a/examples/iot-nirvana/app-engine/src/main/webapp/startup.sh +++ b/examples/iot-nirvana/app-engine/src/main/webapp/startup.sh @@ -32,7 +32,7 @@ INDEX_START=$[$INSTANCE_NUMBER*10] # Create a temporary folder and copy the client echo "Creating temporary folder and downloading the client" mkdir ${TMP_FOLDER} -/usr/bin/gsutil cp \ +/usr/bin/gcloud storage cp \ gs://${BUCKET_NAME}/client/${CLIENT_JAR} \ ${TMP_FOLDER}/${CLIENT_JAR} 1>${TMP_FOLDER}/startup_log.txt 2>&1 diff --git a/examples/iot-nirvana/setup_gcp_environment.sh b/examples/iot-nirvana/setup_gcp_environment.sh index 608181bdb02..0c425c47c05 100755 --- a/examples/iot-nirvana/setup_gcp_environment.sh +++ b/examples/iot-nirvana/setup_gcp_environment.sh @@ -67,17 +67,17 @@ echo "Executing gcloud config set project ${PROJECT_ID}" gcloud config set project ${PROJECT_ID} # create a bucket with the name of the project-id -echo "Executing gsutil mb gs://${PROJECT_ID}" -gsutil mb gs://${PROJECT_ID} +echo "Executing gcloud storage buckets create gs://${PROJECT_ID}" +gcloud storage buckets create gs://${PROJECT_ID} #create DataFlow folders touch delete.me -echo "Executing gsutil cp delete.me gs://${PROJECT_ID}/dataflow/" -gsutil cp delete.me gs://${PROJECT_ID}/dataflow/ -echo "Executing gsutil cp delete.me gs://${PROJECT_ID}/dataflow/temp/" -gsutil cp delete.me gs://${PROJECT_ID}/dataflow/temp/ -echo "Executing gsutil cp delete.me gs://${PROJECT_ID}/dataflow/staging/" -gsutil cp delete.me gs://${PROJECT_ID}/dataflow/staging/ +echo "Executing gcloud storage cp delete.me gs://${PROJECT_ID}/dataflow/" +gcloud storage cp delete.me gs://${PROJECT_ID}/dataflow/ +echo "Executing gcloud storage cp delete.me gs://${PROJECT_ID}/dataflow/temp/" +gcloud storage cp delete.me gs://${PROJECT_ID}/dataflow/temp/ +echo "Executing gcloud storage cp delete.me gs://${PROJECT_ID}/dataflow/staging/" +gcloud storage cp delete.me gs://${PROJECT_ID}/dataflow/staging/ # create PubSub topic echo "Executing gcloud beta pubsub topics create ${PUBSUB_TOPIC}" @@ -96,8 +96,8 @@ echo "Executing bq --location=US mk --dataset ${PROJECT_ID}:${BIGQUERY_DATASET}" bq --location=US mk --dataset ${PROJECT_ID}:${BIGQUERY_DATASET} # copy VM startup-script into Google Cloud Storage -echo "Executing gsutil cp startup_install_java8.sh gs://${PROJECT_ID}" -gsutil cp startup_install_java8.sh gs://${PROJECT_ID} +echo "Executing gcloud storage cp startup_install_java8.sh gs://${PROJECT_ID}" +gcloud storage cp startup_install_java8.sh gs://${PROJECT_ID} # generate a temporary VM that will be used to generate custom image echo "Executing gcloud compute instances create debian9-java8-img --zone ${ZONE} --image-family debian-9 --image-project debian-cloud --metadata startup-script-url=gs://$1/startup_install_java8.sh" From 8ee7711b135cdb70298d4a9ae3e6c116daf8e502 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Wed, 21 Jan 2026 03:12:13 +0530 Subject: [PATCH 6/7] chore: Migrate gsutil usage to gcloud storage (#1718) Co-authored-by: Andrew Gold <41129777+agold-rh@users.noreply.github.com> --- .../kerberized_data_lake/scripts/destroy-generated-secrets.sh | 2 +- .../scripts/init-actions/create-users.sh.logic | 2 +- .../scripts/init-actions/export-hadoop-configs.sh | 2 +- .../scripts/init-actions/setup-kerberos-trust.sh.logic | 4 ++-- .../scripts/init-actions/setup-users-config.sh.logic | 2 +- .../scripts/shutdown-scripts/shutdown-cleanup-trust.sh.logic | 2 +- .../kerberized_data_lake/scripts/stage-generated-secrets.sh | 2 +- 7 files changed, 8 insertions(+), 8 deletions(-) diff --git a/examples/kerberized_data_lake/scripts/destroy-generated-secrets.sh b/examples/kerberized_data_lake/scripts/destroy-generated-secrets.sh index a6c14925abf..338e63912d6 100755 --- a/examples/kerberized_data_lake/scripts/destroy-generated-secrets.sh +++ b/examples/kerberized_data_lake/scripts/destroy-generated-secrets.sh @@ -39,7 +39,7 @@ function log_and_fail() { # GCS copy wrapper function g_rm() { - CMD="gsutil rm ${1}" + CMD="gcloud storage rm ${1}" ${CMD} || log_and_fail "Unable to execute ${CMD}" } diff --git a/examples/kerberized_data_lake/scripts/init-actions/create-users.sh.logic b/examples/kerberized_data_lake/scripts/init-actions/create-users.sh.logic index 2992822015e..4f41efbbf68 100644 --- a/examples/kerberized_data_lake/scripts/init-actions/create-users.sh.logic +++ b/examples/kerberized_data_lake/scripts/init-actions/create-users.sh.logic @@ -84,7 +84,7 @@ function log_and_fail() { # gcs copy function g_cp() { - CMD="gsutil cp ${1} ${2}" + CMD="gcloud storage cp ${1} ${2}" ${CMD} || log_and_fail "Unable to execute ${CMD}" } diff --git a/examples/kerberized_data_lake/scripts/init-actions/export-hadoop-configs.sh b/examples/kerberized_data_lake/scripts/init-actions/export-hadoop-configs.sh index 387e925138f..3b796e82145 100644 --- a/examples/kerberized_data_lake/scripts/init-actions/export-hadoop-configs.sh +++ b/examples/kerberized_data_lake/scripts/init-actions/export-hadoop-configs.sh @@ -49,7 +49,7 @@ function log_and_fail() { # gcs copy function g_cp_r() { - CMD="gsutil -m cp -r ${1} ${2}" + CMD="gcloud storage cp --recursive ${1} ${2}" ${CMD} || log_and_fail "Unable to execute ${CMD}" } diff --git a/examples/kerberized_data_lake/scripts/init-actions/setup-kerberos-trust.sh.logic b/examples/kerberized_data_lake/scripts/init-actions/setup-kerberos-trust.sh.logic index 3bfe49c8fbb..d59d6164b15 100644 --- a/examples/kerberized_data_lake/scripts/init-actions/setup-kerberos-trust.sh.logic +++ b/examples/kerberized_data_lake/scripts/init-actions/setup-kerberos-trust.sh.logic @@ -49,7 +49,7 @@ function decrypt_file_with_kms_key() { local encrypted_file_uri=$1 local kms_key_uri=$2 - gsutil cat "${encrypted_file_uri}" | gcloud kms decrypt \ + gcloud storage cat "${encrypted_file_uri}" | gcloud kms decrypt \ --key "${kms_key_uri}" \ --ciphertext-file - --plaintext-file - } @@ -68,7 +68,7 @@ function set_property_hive_site() { } function g_download() { - gsutil cp "$1" "$2" || log_and_fail "Unable to download $1" + gcloud storage cp "$1" "$2" || log_and_fail "Unable to download $1" } # hive config for remote hive metastore diff --git a/examples/kerberized_data_lake/scripts/init-actions/setup-users-config.sh.logic b/examples/kerberized_data_lake/scripts/init-actions/setup-users-config.sh.logic index 4514a6f56bc..75dad5e42d5 100644 --- a/examples/kerberized_data_lake/scripts/init-actions/setup-users-config.sh.logic +++ b/examples/kerberized_data_lake/scripts/init-actions/setup-users-config.sh.logic @@ -126,7 +126,7 @@ function log_and_fail() { # gcs copy function g_cp() { - CMD="gsutil cp ${1} ${2}" + CMD="gcloud storage cp ${1} ${2}" ${CMD} || log_and_fail "Unable to execute ${CMD}" } diff --git a/examples/kerberized_data_lake/scripts/shutdown-scripts/shutdown-cleanup-trust.sh.logic b/examples/kerberized_data_lake/scripts/shutdown-scripts/shutdown-cleanup-trust.sh.logic index 7545ecea512..19fdce0462b 100644 --- a/examples/kerberized_data_lake/scripts/shutdown-scripts/shutdown-cleanup-trust.sh.logic +++ b/examples/kerberized_data_lake/scripts/shutdown-scripts/shutdown-cleanup-trust.sh.logic @@ -34,7 +34,7 @@ function log_and_fail() { } function g_download() { - gsutil cp "$1" "$2" || log_and_fail "Unable to download $1" + gcloud storage cp "$1" "$2" || log_and_fail "Unable to download $1" } function set_env_helpers() { diff --git a/examples/kerberized_data_lake/scripts/stage-generated-secrets.sh b/examples/kerberized_data_lake/scripts/stage-generated-secrets.sh index a467e235b48..d3f4e6b4021 100755 --- a/examples/kerberized_data_lake/scripts/stage-generated-secrets.sh +++ b/examples/kerberized_data_lake/scripts/stage-generated-secrets.sh @@ -65,7 +65,7 @@ function encrypt_to_file_with_kms_key() { # GCS copy wrapper function g_cp() { - CMD="gsutil cp ${1} ${2}" + CMD="gcloud storage cp ${1} ${2}" ${CMD} || log_and_fail "Unable to execute ${CMD}" } From 84333ac168a95222c2dd9c669974584c72957196 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Wed, 21 Jan 2026 03:19:10 +0530 Subject: [PATCH 7/7] chore: Migrate gsutil usage to gcloud storage (#1720) Co-authored-by: Andrew Gold <41129777+agold-rh@users.noreply.github.com> --- examples/ml-audio-content-profiling/README.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/examples/ml-audio-content-profiling/README.md b/examples/ml-audio-content-profiling/README.md index 003868ca1d5..5fb81b919bc 100644 --- a/examples/ml-audio-content-profiling/README.md +++ b/examples/ml-audio-content-profiling/README.md @@ -21,7 +21,7 @@ your file into an accepted encoding type. The solution involves creating five GCS buckets using default configuration settings. Because of this, no [object lifecycle management](https://cloud.google.com/storage/docs/lifecycle) policies are configured. If you would like to specify different retention policies you can [enable](https://cloud.google.com/storage/docs/managing-lifecycles#enable) -this using `gsutil` while following the deployment process. +this using `gcloud storage` while following the deployment process. During processing, audio files are moved between buckets as they progress through various stages of the pipeline. Specifically, the audio file should first be moved to the @@ -189,28 +189,28 @@ export STATIC_UUID=$(echo $(uuidgen | tr '[:upper:]' '[:lower:]') | cut -c1-20) ```` export staging_audio_bucket=staging-audio-files-$STATIC_UUID -gsutil mb gs://$staging_audio_bucket +gcloud storage buckets create gs://$staging_audio_bucket ```` ```` export processed_audio_bucket=processed-audio-files-$STATIC_UUID -gsutil mb gs://$processed_audio_bucket +gcloud storage buckets create gs://$processed_audio_bucket ```` ```` export error_audio_bucket=error-audio-files-$STATIC_UUID -gsutil mb gs://$error_audio_bucket +gcloud storage buckets create gs://$error_audio_bucket ```` ```` export transcription_bucket=transcription-files-$STATIC_UUID -gsutil mb gs://$transcription_bucket +gcloud storage buckets create gs://$transcription_bucket ```` ```` export output_bucket=output-files-$STATIC_UUID -gsutil mb gs://$output_bucket +gcloud storage buckets create gs://$output_bucket ```` @@ -342,7 +342,7 @@ All of the resources should be deployed. ### View Results

Test it out

-1. You can start by trying to upload an audio file in GCS. You can do this using `gsutil` or in the +1. You can start by trying to upload an audio file in GCS. You can do this using `gcloud storage` or in the UI under the staging bucket. This will trigger `send_stt_api_function`. This submits the request to the Speech API and publishes the job id to PubSub.