From 7623a8f71321cffb44b5e60cb8fc1fb55f910a0c Mon Sep 17 00:00:00 2001 From: Margubur Rahman Date: Wed, 15 Oct 2025 18:14:24 +0000 Subject: [PATCH 1/5] Migrate gsutil usage to gcloud storage --- ...del_garden_huggingface_tei_deployment.ipynb | 15 +++++++-------- ..._garden_mediapipe_text_classification.ipynb | 16 ++++++---------- ...torch_deployed_model_reasoning_engine.ipynb | 3 +-- ...pytorch_llama3_1_qwen3_deployment_tpu.ipynb | 13 +++++-------- ..._pytorch_sd_2_1_finetuning_dreambooth.ipynb | 13 +++++-------- ...mage_classification_online_prediction.ipynb | 15 +++++---------- .../official/generative_ai/tune_peft.ipynb | 18 +++++++----------- ...oud_pipeline_components_automl_images.ipynb | 11 ++++------- ...ud_pipeline_components_automl_tabular.ipynb | 17 +++++++---------- .../get_started_with_raw_predict.ipynb | 9 +++------ 10 files changed, 50 insertions(+), 80 deletions(-) diff --git a/notebooks/community/model_garden/model_garden_huggingface_tei_deployment.ipynb b/notebooks/community/model_garden/model_garden_huggingface_tei_deployment.ipynb index 6f9470169..25d15a9dd 100644 --- a/notebooks/community/model_garden/model_garden_huggingface_tei_deployment.ipynb +++ b/notebooks/community/model_garden/model_garden_huggingface_tei_deployment.ipynb @@ -160,11 +160,10 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gsutil mb -l {REGION} {BUCKET_URI}\n", - "else:\n", + " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", - " bucket_region = shell_output[0].strip().lower()\n", + # Note: The format of the full listing output is different. gcloud storage uses a title case for keys and will not display a field if its value is "None". + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", @@ -188,8 +187,9 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n", - "\n", + "# Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n", + "# Note: gsutil iam ch does not support modifying IAM policies that contain conditions. gcloud storage commands do support conditions.\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"\n", @@ -507,8 +507,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb b/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb index 220bea22f..42ba35f53 100644 --- a/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb +++ b/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb @@ -157,11 +157,9 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gsutil mb -l {REGION} {BUCKET_URI}\n", - "else:\n", + " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", - " bucket_region = shell_output[0].strip().lower()\n", + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", @@ -185,8 +183,8 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n", - "\n", + "# Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"\n", @@ -444,8 +442,7 @@ "\n", "# @markdown After finetuning, you can save the Tensorflow Lite model, try it out in the [Text Classification](https://mediapipe-studio.webapps.google.com/demo/text_classifier) demo in MediaPipe Studio or integrate it with your on-device application by following the [Text classification task guide](https://developers.google.com/mediapipe/solutions/text/text_classifier). The exported model contains the generates required model metadata, as well as a classification label file.\n", "\n", - "! gsutil cp $EXPORTED_MODEL_OUTPUT_FILE text_classification_model.tflite" - ] + "! gcloud storage cp $EXPORTED_MODEL_OUTPUT_FILE text_classification_model.tflite" ] }, { "cell_type": "markdown", @@ -470,8 +467,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_NAME\n", - "\n", + " ! gcloud storage rm --recursive $BUCKET_NAME\n", "\n", "# Delete training data and jobs.\n", "if training_job.list(filter=f'display_name=\"{TRAINING_JOB_DISPLAY_NAME}\"'):\n", " training_job.delete()" diff --git a/notebooks/community/model_garden/model_garden_pytorch_deployed_model_reasoning_engine.ipynb b/notebooks/community/model_garden/model_garden_pytorch_deployed_model_reasoning_engine.ipynb index 2c1a235f0..06031ebb0 100644 --- a/notebooks/community/model_garden/model_garden_pytorch_deployed_model_reasoning_engine.ipynb +++ b/notebooks/community/model_garden/model_garden_pytorch_deployed_model_reasoning_engine.ipynb @@ -587,8 +587,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_NAME\n", - "\n", + " ! gcloud storage rm --recursive $BUCKET_NAME\n", "\n", "delete_reasoning_engine = False # @param {type:\"boolean\"}\n", "\n", "if delete_reasoning_engine:\n", diff --git a/notebooks/community/model_garden/model_garden_pytorch_llama3_1_qwen3_deployment_tpu.ipynb b/notebooks/community/model_garden/model_garden_pytorch_llama3_1_qwen3_deployment_tpu.ipynb index b0ee54320..4a300f52d 100644 --- a/notebooks/community/model_garden/model_garden_pytorch_llama3_1_qwen3_deployment_tpu.ipynb +++ b/notebooks/community/model_garden/model_garden_pytorch_llama3_1_qwen3_deployment_tpu.ipynb @@ -171,11 +171,9 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gsutil mb -l {REGION} {BUCKET_URI}\n", - "else:\n", + " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", - " bucket_region = shell_output[0].strip().lower()\n", + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", @@ -199,8 +197,8 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n", - "\n", + "! # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"" @@ -536,8 +534,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/model_garden/model_garden_pytorch_sd_2_1_finetuning_dreambooth.ipynb b/notebooks/community/model_garden/model_garden_pytorch_sd_2_1_finetuning_dreambooth.ipynb index eb37103ce..a412948c5 100644 --- a/notebooks/community/model_garden/model_garden_pytorch_sd_2_1_finetuning_dreambooth.ipynb +++ b/notebooks/community/model_garden/model_garden_pytorch_sd_2_1_finetuning_dreambooth.ipynb @@ -154,11 +154,10 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gsutil mb -l {REGION} {BUCKET_URI}\n", - "else:\n", + " ! gcloud storage buckets create --location {REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", - " bucket_region = shell_output[0].strip().lower()\n", + # Note: The format of the full listing output is different. gcloud storage uses a title case for keys and will not display a field if its value is "None". + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", @@ -182,8 +181,7 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n", - "\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"\n", @@ -531,8 +529,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/official/automl/automl_image_classification_online_prediction.ipynb b/notebooks/official/automl/automl_image_classification_online_prediction.ipynb index ab9bf359d..fe1e0e1a9 100644 --- a/notebooks/official/automl/automl_image_classification_online_prediction.ipynb +++ b/notebooks/official/automl/automl_image_classification_online_prediction.ipynb @@ -280,8 +280,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $LOCATION -p $PROJECT_ID $BUCKET_URI" - ] + "! gcloud storage buckets create --location=$LOCATION --project=$PROJECT_ID $BUCKET_URI" ] }, { "cell_type": "markdown", @@ -387,12 +386,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -597,8 +594,7 @@ }, "outputs": [], "source": [ - "test_item = !gsutil cat $IMPORT_FILE | head -n1\n", - "if len(str(test_item[0]).split(\",\")) == 3:\n", + "test_item = !gcloud storage cat $IMPORT_FILE | head -n1\n", "if len(str(test_item[0]).split(\",\")) == 3:\n", " _, test_item, test_label = str(test_item[0]).split(\",\")\n", "else:\n", " test_item, test_label = str(test_item[0]).split(\",\")\n", @@ -718,8 +714,7 @@ "# Delete Cloud Storage objects that were created\n", "delete_bucket = False # Set True for deletion\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI" - ] + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], "metadata": { diff --git a/notebooks/official/generative_ai/tune_peft.ipynb b/notebooks/official/generative_ai/tune_peft.ipynb index cb1b9ef62..6817a4646 100644 --- a/notebooks/official/generative_ai/tune_peft.ipynb +++ b/notebooks/official/generative_ai/tune_peft.ipynb @@ -286,8 +286,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" - ] + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { "cell_type": "markdown", @@ -359,10 +358,10 @@ }, "outputs": [], "source": [ - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n", - "\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI" - ] + "! # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", + # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop. + ! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer ] }, { "cell_type": "markdown", @@ -464,9 +463,7 @@ "outputs": [], "source": [ "# Download dataset\n", - "! gsutil cp gs://cloud-samples-data/vertex-ai/model-evaluation/peft_eval_sample.jsonl {BUCKET_URI}/peft_eval_sample.jsonl\n", - "! gsutil cp gs://cloud-samples-data/vertex-ai/model-evaluation/peft_train_sample.jsonl {BUCKET_URI}/peft_train_sample.jsonl" - ] + "! gcloud storage cp gs://cloud-samples-data/vertex-ai/model-evaluation/peft_eval_sample.jsonl {BUCKET_URI}/peft_eval_sample.jsonl\n", "! gcloud storage cp gs://cloud-samples-data/vertex-ai/model-evaluation/peft_train_sample.jsonl {BUCKET_URI}/peft_train_sample.jsonl" ] }, { "cell_type": "code", @@ -662,8 +659,7 @@ "# Delete bucket\n", "delete_bucket = True\n", "if delete_bucket:\n", - " ! gsutil rm -rf {BUCKET_URI}" - ] + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ] } ], "metadata": { diff --git a/notebooks/official/pipelines/google_cloud_pipeline_components_automl_images.ipynb b/notebooks/official/pipelines/google_cloud_pipeline_components_automl_images.ipynb index ecf72bdc8..a1e3bc8f4 100644 --- a/notebooks/official/pipelines/google_cloud_pipeline_components_automl_images.ipynb +++ b/notebooks/official/pipelines/google_cloud_pipeline_components_automl_images.ipynb @@ -330,8 +330,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} {BUCKET_URI}" - ] + "! gcloud storage buckets create --location={LOCATION} {BUCKET_URI}" ] }, { "cell_type": "markdown", @@ -403,10 +402,9 @@ }, "outputs": [], "source": [ - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n", + "# Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI" - ] + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer" ] }, { "cell_type": "markdown", @@ -707,8 +705,7 @@ "delete_bucket = False\n", "\n", "if delete_bucket:\n", - " ! gsutil rm -r $BUCKET_URI" - ] + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], "metadata": { diff --git a/notebooks/official/pipelines/google_cloud_pipeline_components_automl_tabular.ipynb b/notebooks/official/pipelines/google_cloud_pipeline_components_automl_tabular.ipynb index 88a5ea398..245dc5336 100644 --- a/notebooks/official/pipelines/google_cloud_pipeline_components_automl_tabular.ipynb +++ b/notebooks/official/pipelines/google_cloud_pipeline_components_automl_tabular.ipynb @@ -325,8 +325,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" - ] + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { "cell_type": "markdown", @@ -399,10 +398,10 @@ }, "outputs": [], "source": [ - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n", - "\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI" - ] + # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop. + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", + # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop. + ! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer ] }, { "cell_type": "markdown", @@ -492,8 +491,7 @@ "outputs": [], "source": [ "TRAIN_FILE_NAME = \"california_housing_train.csv\"\n", - "! gsutil cp gs://cloud-samples-data/vertex-ai/pipeline-deployment/datasets/california_housing/california_housing_train.csv {PIPELINE_ROOT}/data/\n", - "\n", + "! gcloud storage cp gs://cloud-samples-data/vertex-ai/pipeline-deployment/datasets/california_housing/california_housing_train.csv {PIPELINE_ROOT}/data/\n", "\n", "gcs_csv_path = f\"{PIPELINE_ROOT}/data/{TRAIN_FILE_NAME}\"\n", "\n", "\n", @@ -682,8 +680,7 @@ "\n", "\n", "if delete_bucket:\n", - " ! gsutil rm -r $BUCKET_URI" - ] + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], "metadata": { diff --git a/notebooks/official/prediction/get_started_with_raw_predict.ipynb b/notebooks/official/prediction/get_started_with_raw_predict.ipynb index 5667ef3b2..2f53a5453 100644 --- a/notebooks/official/prediction/get_started_with_raw_predict.ipynb +++ b/notebooks/official/prediction/get_started_with_raw_predict.ipynb @@ -286,8 +286,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" - ] + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { "cell_type": "markdown", @@ -439,8 +438,7 @@ "source": [ "MODEL_DIR = BUCKET_URI + \"/model\"\n", "\n", - "! gsutil cp -r gs://cloud-samples-data/vertex-ai/google-cloud-aiplatform-ci-artifacts/models/penguins/estimator/ {MODEL_DIR}" - ] + "! gcloud storage cp --recursive gs://cloud-samples-data/vertex-ai/google-cloud-aiplatform-ci-artifacts/models/penguins/estimator/ {MODEL_DIR}" ] }, { "cell_type": "markdown", @@ -681,8 +679,7 @@ " print(e)\n", "\n", "if delete_bucket:\n", - " ! gsutil rm -rf {BUCKET_URI}" - ] + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ] } ], "metadata": { From 4eadf3fe35457377dadc1f87a77984dc3b6ad81a Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Wed, 10 Dec 2025 14:22:34 +0000 Subject: [PATCH 2/5] changes for 4297 --- ...el_garden_huggingface_tei_deployment.ipynb | 15 ++++++++------- ...garden_mediapipe_text_classification.ipynb | 16 ++++++++++------ ...ytorch_llama3_1_qwen3_deployment_tpu.ipynb | 13 ++++++++----- ...pytorch_sd_2_1_finetuning_dreambooth.ipynb | 13 ++++++++----- .../official/generative_ai/tune_peft.ipynb | 19 ++++++++++++------- ...ud_pipeline_components_automl_images.ipynb | 11 +++++++---- ...d_pipeline_components_automl_tabular.ipynb | 4 +--- 7 files changed, 54 insertions(+), 37 deletions(-) diff --git a/notebooks/community/model_garden/model_garden_huggingface_tei_deployment.ipynb b/notebooks/community/model_garden/model_garden_huggingface_tei_deployment.ipynb index 25d15a9dd..4730b8290 100644 --- a/notebooks/community/model_garden/model_garden_huggingface_tei_deployment.ipynb +++ b/notebooks/community/model_garden/model_garden_huggingface_tei_deployment.ipynb @@ -160,10 +160,11 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", + " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", + "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - # Note: The format of the full listing output is different. gcloud storage uses a title case for keys and will not display a field if its value is "None". - " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n", + " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", @@ -187,9 +188,8 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "# Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n", - "# Note: gsutil iam ch does not support modifying IAM policies that contain conditions. gcloud storage commands do support conditions.\n", - "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", + "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"\n", @@ -507,7 +507,8 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive $BUCKET_NAME" ] + " ! gcloud storage rm --recursive $BUCKET_NAME" + ] } ], "metadata": { diff --git a/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb b/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb index 42ba35f53..87b006572 100644 --- a/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb +++ b/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb @@ -157,9 +157,11 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", + " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", + "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n", + " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", @@ -183,8 +185,8 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "# Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n", - "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", + "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"\n", @@ -442,7 +444,8 @@ "\n", "# @markdown After finetuning, you can save the Tensorflow Lite model, try it out in the [Text Classification](https://mediapipe-studio.webapps.google.com/demo/text_classifier) demo in MediaPipe Studio or integrate it with your on-device application by following the [Text classification task guide](https://developers.google.com/mediapipe/solutions/text/text_classifier). The exported model contains the generates required model metadata, as well as a classification label file.\n", "\n", - "! gcloud storage cp $EXPORTED_MODEL_OUTPUT_FILE text_classification_model.tflite" ] + "! gcloud storage cp $EXPORTED_MODEL_OUTPUT_FILE text_classification_model.tflite" + ] }, { "cell_type": "markdown", @@ -467,7 +470,8 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive $BUCKET_NAME\n", "\n", + " ! gcloud storage rm --recursive $BUCKET_NAME\n", + "\n", "# Delete training data and jobs.\n", "if training_job.list(filter=f'display_name=\"{TRAINING_JOB_DISPLAY_NAME}\"'):\n", " training_job.delete()" diff --git a/notebooks/community/model_garden/model_garden_pytorch_llama3_1_qwen3_deployment_tpu.ipynb b/notebooks/community/model_garden/model_garden_pytorch_llama3_1_qwen3_deployment_tpu.ipynb index 4a300f52d..c4a0a7627 100644 --- a/notebooks/community/model_garden/model_garden_pytorch_llama3_1_qwen3_deployment_tpu.ipynb +++ b/notebooks/community/model_garden/model_garden_pytorch_llama3_1_qwen3_deployment_tpu.ipynb @@ -171,9 +171,11 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", + " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", + "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n", + " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", @@ -197,8 +199,8 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n", - "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", + "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"" @@ -534,7 +536,8 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive $BUCKET_NAME" ] + " ! gcloud storage rm --recursive $BUCKET_NAME" + ] } ], "metadata": { diff --git a/notebooks/community/model_garden/model_garden_pytorch_sd_2_1_finetuning_dreambooth.ipynb b/notebooks/community/model_garden/model_garden_pytorch_sd_2_1_finetuning_dreambooth.ipynb index a412948c5..d1b68fb85 100644 --- a/notebooks/community/model_garden/model_garden_pytorch_sd_2_1_finetuning_dreambooth.ipynb +++ b/notebooks/community/model_garden/model_garden_pytorch_sd_2_1_finetuning_dreambooth.ipynb @@ -154,10 +154,11 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gcloud storage buckets create --location {REGION} {BUCKET_URI}\n", "else:\n", + " ! gcloud storage buckets create --location {REGION} {BUCKET_URI}\n", + "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - # Note: The format of the full listing output is different. gcloud storage uses a title case for keys and will not display a field if its value is "None". - " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n", + " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", @@ -181,7 +182,8 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", + "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"\n", @@ -529,7 +531,8 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive $BUCKET_NAME" ] + " ! gcloud storage rm --recursive $BUCKET_NAME" + ] } ], "metadata": { diff --git a/notebooks/official/generative_ai/tune_peft.ipynb b/notebooks/official/generative_ai/tune_peft.ipynb index 6817a4646..f7b467458 100644 --- a/notebooks/official/generative_ai/tune_peft.ipynb +++ b/notebooks/official/generative_ai/tune_peft.ipynb @@ -286,7 +286,8 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" + ] }, { "cell_type": "markdown", @@ -358,10 +359,11 @@ }, "outputs": [], "source": [ - "! # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n", - "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", - # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop. - ! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer ] + "\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", + "\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer\n" + ] }, { "cell_type": "markdown", @@ -463,7 +465,9 @@ "outputs": [], "source": [ "# Download dataset\n", - "! gcloud storage cp gs://cloud-samples-data/vertex-ai/model-evaluation/peft_eval_sample.jsonl {BUCKET_URI}/peft_eval_sample.jsonl\n", "! gcloud storage cp gs://cloud-samples-data/vertex-ai/model-evaluation/peft_train_sample.jsonl {BUCKET_URI}/peft_train_sample.jsonl" ] + "! gcloud storage cp gs://cloud-samples-data/vertex-ai/model-evaluation/peft_eval_sample.jsonl {BUCKET_URI}/peft_eval_sample.jsonl\n", + "! gcloud storage cp gs://cloud-samples-data/vertex-ai/model-evaluation/peft_train_sample.jsonl {BUCKET_URI}/peft_train_sample.jsonl" + ] }, { "cell_type": "code", @@ -659,7 +663,8 @@ "# Delete bucket\n", "delete_bucket = True\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ] + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" + ] } ], "metadata": { diff --git a/notebooks/official/pipelines/google_cloud_pipeline_components_automl_images.ipynb b/notebooks/official/pipelines/google_cloud_pipeline_components_automl_images.ipynb index a1e3bc8f4..32b118c40 100644 --- a/notebooks/official/pipelines/google_cloud_pipeline_components_automl_images.ipynb +++ b/notebooks/official/pipelines/google_cloud_pipeline_components_automl_images.ipynb @@ -330,7 +330,8 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets create --location={LOCATION} {BUCKET_URI}" ] + "! gcloud storage buckets create --location={LOCATION} {BUCKET_URI}" + ] }, { "cell_type": "markdown", @@ -402,9 +403,10 @@ }, "outputs": [], "source": [ - "# Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", - "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer" ] + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer" + ] }, { "cell_type": "markdown", @@ -705,7 +707,8 @@ "delete_bucket = False\n", "\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive $BUCKET_URI" ] + " ! gcloud storage rm --recursive $BUCKET_URI" + ] } ], "metadata": { diff --git a/notebooks/official/pipelines/google_cloud_pipeline_components_automl_tabular.ipynb b/notebooks/official/pipelines/google_cloud_pipeline_components_automl_tabular.ipynb index 245dc5336..6c67c3649 100644 --- a/notebooks/official/pipelines/google_cloud_pipeline_components_automl_tabular.ipynb +++ b/notebooks/official/pipelines/google_cloud_pipeline_components_automl_tabular.ipynb @@ -398,10 +398,8 @@ }, "outputs": [], "source": [ - # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop. "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", - # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop. - ! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer ] + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer\n" ] }, { "cell_type": "markdown", From 79d489efcacbdafbe744ee8b0f0227bd19ebf943 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Tue, 16 Dec 2025 09:35:54 +0000 Subject: [PATCH 3/5] Apply automated linter fixes --- .../model_garden_huggingface_tei_deployment.ipynb | 4 +++- ...del_garden_mediapipe_text_classification.ipynb | 12 +++++++++--- ..._pytorch_deployed_model_reasoning_engine.ipynb | 3 ++- ...l_image_classification_online_prediction.ipynb | 15 ++++++++++----- notebooks/official/generative_ai/tune_peft.ipynb | 1 - ...cloud_pipeline_components_automl_tabular.ipynb | 15 ++++++++++----- .../prediction/get_started_with_raw_predict.ipynb | 9 ++++++--- 7 files changed, 40 insertions(+), 19 deletions(-) diff --git a/notebooks/community/model_garden/model_garden_huggingface_tei_deployment.ipynb b/notebooks/community/model_garden/model_garden_huggingface_tei_deployment.ipynb index 4730b8290..251b84748 100644 --- a/notebooks/community/model_garden/model_garden_huggingface_tei_deployment.ipynb +++ b/notebooks/community/model_garden/model_garden_huggingface_tei_deployment.ipynb @@ -211,7 +211,9 @@ "\n", "# @markdown Set Hugging Face model id and deployment configs.\n", "\n", - "HUGGING_FACE_MODEL_ID = \"Qwen/Qwen3-Embedding-8B\" # @param {type: \"string\", isTemplate: true}\n", + "HUGGING_FACE_MODEL_ID = (\n", + " \"Qwen/Qwen3-Embedding-8B\" # @param {type: \"string\", isTemplate: true}\n", + ")\n", "\n", "# The pre-built serving docker images for TEI.\n", "TEI_DOCKER_URI = \"us-docker.pkg.dev/deeplearning-platform-release/vertex-model-garden/hf-tei.cu125.0-1.ubuntu2204.py310:model-garden.hf-tei-0-1-release_20251003.00_p0\"\n", diff --git a/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb b/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb index 87b006572..a8025a4a4 100644 --- a/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb +++ b/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb @@ -223,9 +223,13 @@ "\n", "# @markdown The SST-2 dataset is stored as a TSV file. The only difference between the TSV and CSV formats is that TSV uses a tab `\\t` character as its delimiter and CSV uses a comma `,`.\n", "\n", - "training_data_path = \"gs://mediapipe-tasks/text_classifier/SST-2/train.tsv\" # @param {type:\"string\"}\n", + "training_data_path = (\n", + " \"gs://mediapipe-tasks/text_classifier/SST-2/train.tsv\" # @param {type:\"string\"}\n", + ")\n", "\n", - "validation_data_path = \"gs://mediapipe-tasks/text_classifier/SST-2/dev.tsv\" # @param {type:\"string\"}\n", + "validation_data_path = (\n", + " \"gs://mediapipe-tasks/text_classifier/SST-2/dev.tsv\" # @param {type:\"string\"}\n", + ")\n", "\n", "# The delimiter used in the dataset.\n", "delimiter = \"\\t\" # @param {type:\"string\"}\n", @@ -263,7 +267,9 @@ "\n", "# @markdown To set the model architecture and other training parameters, adjust the below values:\n", "\n", - "model_architecture = \"average_word_embedding\" # @param [\"average_word_embedding\", \"mobilebert\"]\n", + "model_architecture = (\n", + " \"average_word_embedding\" # @param [\"average_word_embedding\", \"mobilebert\"]\n", + ")\n", "\n", "# The learning rate to use for gradient descent-based\n", "# optimizers. Defaults to 3e-5 for the BERT-based classifier\n", diff --git a/notebooks/community/model_garden/model_garden_pytorch_deployed_model_reasoning_engine.ipynb b/notebooks/community/model_garden/model_garden_pytorch_deployed_model_reasoning_engine.ipynb index 06031ebb0..5b1f332c4 100644 --- a/notebooks/community/model_garden/model_garden_pytorch_deployed_model_reasoning_engine.ipynb +++ b/notebooks/community/model_garden/model_garden_pytorch_deployed_model_reasoning_engine.ipynb @@ -587,7 +587,8 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive $BUCKET_NAME\n", "\n", + " ! gcloud storage rm --recursive $BUCKET_NAME\n", + "\n", "delete_reasoning_engine = False # @param {type:\"boolean\"}\n", "\n", "if delete_reasoning_engine:\n", diff --git a/notebooks/official/automl/automl_image_classification_online_prediction.ipynb b/notebooks/official/automl/automl_image_classification_online_prediction.ipynb index fe1e0e1a9..5aca9847c 100644 --- a/notebooks/official/automl/automl_image_classification_online_prediction.ipynb +++ b/notebooks/official/automl/automl_image_classification_online_prediction.ipynb @@ -280,7 +280,8 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets create --location=$LOCATION --project=$PROJECT_ID $BUCKET_URI" ] + "! gcloud storage buckets create --location=$LOCATION --project=$PROJECT_ID $BUCKET_URI" + ] }, { "cell_type": "markdown", @@ -386,10 +387,12 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", + "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gcloud storage cat $FILE | head" ] + "! gcloud storage cat $FILE | head" + ] }, { "cell_type": "markdown", @@ -594,7 +597,8 @@ }, "outputs": [], "source": [ - "test_item = !gcloud storage cat $IMPORT_FILE | head -n1\n", "if len(str(test_item[0]).split(\",\")) == 3:\n", + "test_item = !gcloud storage cat $IMPORT_FILE | head -n1\n", + "if len(str(test_item[0]).split(\",\")) == 3:\n", " _, test_item, test_label = str(test_item[0]).split(\",\")\n", "else:\n", " test_item, test_label = str(test_item[0]).split(\",\")\n", @@ -714,7 +718,8 @@ "# Delete Cloud Storage objects that were created\n", "delete_bucket = False # Set True for deletion\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive $BUCKET_URI" ] + " ! gcloud storage rm --recursive $BUCKET_URI" + ] } ], "metadata": { diff --git a/notebooks/official/generative_ai/tune_peft.ipynb b/notebooks/official/generative_ai/tune_peft.ipynb index f7b467458..a4b03f5fb 100644 --- a/notebooks/official/generative_ai/tune_peft.ipynb +++ b/notebooks/official/generative_ai/tune_peft.ipynb @@ -359,7 +359,6 @@ }, "outputs": [], "source": [ - "\n", "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer\n" diff --git a/notebooks/official/pipelines/google_cloud_pipeline_components_automl_tabular.ipynb b/notebooks/official/pipelines/google_cloud_pipeline_components_automl_tabular.ipynb index 6c67c3649..997346628 100644 --- a/notebooks/official/pipelines/google_cloud_pipeline_components_automl_tabular.ipynb +++ b/notebooks/official/pipelines/google_cloud_pipeline_components_automl_tabular.ipynb @@ -325,7 +325,8 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" + ] }, { "cell_type": "markdown", @@ -398,8 +399,10 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", - "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer\n" ] + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", + "\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer\n" + ] }, { "cell_type": "markdown", @@ -489,7 +492,8 @@ "outputs": [], "source": [ "TRAIN_FILE_NAME = \"california_housing_train.csv\"\n", - "! gcloud storage cp gs://cloud-samples-data/vertex-ai/pipeline-deployment/datasets/california_housing/california_housing_train.csv {PIPELINE_ROOT}/data/\n", "\n", + "! gcloud storage cp gs://cloud-samples-data/vertex-ai/pipeline-deployment/datasets/california_housing/california_housing_train.csv {PIPELINE_ROOT}/data/\n", + "\n", "gcs_csv_path = f\"{PIPELINE_ROOT}/data/{TRAIN_FILE_NAME}\"\n", "\n", "\n", @@ -678,7 +682,8 @@ "\n", "\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive $BUCKET_URI" ] + " ! gcloud storage rm --recursive $BUCKET_URI" + ] } ], "metadata": { diff --git a/notebooks/official/prediction/get_started_with_raw_predict.ipynb b/notebooks/official/prediction/get_started_with_raw_predict.ipynb index 2f53a5453..141d46907 100644 --- a/notebooks/official/prediction/get_started_with_raw_predict.ipynb +++ b/notebooks/official/prediction/get_started_with_raw_predict.ipynb @@ -286,7 +286,8 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" + ] }, { "cell_type": "markdown", @@ -438,7 +439,8 @@ "source": [ "MODEL_DIR = BUCKET_URI + \"/model\"\n", "\n", - "! gcloud storage cp --recursive gs://cloud-samples-data/vertex-ai/google-cloud-aiplatform-ci-artifacts/models/penguins/estimator/ {MODEL_DIR}" ] + "! gcloud storage cp --recursive gs://cloud-samples-data/vertex-ai/google-cloud-aiplatform-ci-artifacts/models/penguins/estimator/ {MODEL_DIR}" + ] }, { "cell_type": "markdown", @@ -679,7 +681,8 @@ " print(e)\n", "\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ] + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" + ] } ], "metadata": { From d3aaf6ff1650baf333ff13caa79e334b783e3cba Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Fri, 19 Dec 2025 16:36:38 +0000 Subject: [PATCH 4/5] removed changes from model_garden --- ...el_garden_huggingface_tei_deployment.ipynb | 12 +++++----- ...garden_mediapipe_text_classification.ipynb | 22 +++++++------------ ...orch_deployed_model_reasoning_engine.ipynb | 3 +-- ...ytorch_llama3_1_qwen3_deployment_tpu.ipynb | 8 +++---- ...pytorch_sd_2_1_finetuning_dreambooth.ipynb | 8 +++---- 5 files changed, 22 insertions(+), 31 deletions(-) diff --git a/notebooks/community/model_garden/model_garden_huggingface_tei_deployment.ipynb b/notebooks/community/model_garden/model_garden_huggingface_tei_deployment.ipynb index 251b84748..6f9470169 100644 --- a/notebooks/community/model_garden/model_garden_huggingface_tei_deployment.ipynb +++ b/notebooks/community/model_garden/model_garden_huggingface_tei_deployment.ipynb @@ -160,10 +160,10 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", + " ! gsutil mb -l {REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n", + " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", @@ -188,7 +188,7 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", + "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n", "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", @@ -211,9 +211,7 @@ "\n", "# @markdown Set Hugging Face model id and deployment configs.\n", "\n", - "HUGGING_FACE_MODEL_ID = (\n", - " \"Qwen/Qwen3-Embedding-8B\" # @param {type: \"string\", isTemplate: true}\n", - ")\n", + "HUGGING_FACE_MODEL_ID = \"Qwen/Qwen3-Embedding-8B\" # @param {type: \"string\", isTemplate: true}\n", "\n", "# The pre-built serving docker images for TEI.\n", "TEI_DOCKER_URI = \"us-docker.pkg.dev/deeplearning-platform-release/vertex-model-garden/hf-tei.cu125.0-1.ubuntu2204.py310:model-garden.hf-tei-0-1-release_20251003.00_p0\"\n", @@ -509,7 +507,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive $BUCKET_NAME" + " ! gsutil -m rm -r $BUCKET_NAME" ] } ], diff --git a/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb b/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb index a8025a4a4..acbf3a066 100644 --- a/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb +++ b/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb @@ -157,10 +157,10 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", + " ! gsutil mb -l {REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n", + " shell_output = ! gsutil ls -Lb | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", @@ -185,7 +185,7 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", + "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n", "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", @@ -223,13 +223,9 @@ "\n", "# @markdown The SST-2 dataset is stored as a TSV file. The only difference between the TSV and CSV formats is that TSV uses a tab `\\t` character as its delimiter and CSV uses a comma `,`.\n", "\n", - "training_data_path = (\n", - " \"gs://mediapipe-tasks/text_classifier/SST-2/train.tsv\" # @param {type:\"string\"}\n", - ")\n", + "training_data_path = \"gs://mediapipe-tasks/text_classifier/SST-2/train.tsv\" # @param {type:\"string\"}\n", "\n", - "validation_data_path = (\n", - " \"gs://mediapipe-tasks/text_classifier/SST-2/dev.tsv\" # @param {type:\"string\"}\n", - ")\n", + "validation_data_path = \"gs://mediapipe-tasks/text_classifier/SST-2/dev.tsv\" # @param {type:\"string\"}\n", "\n", "# The delimiter used in the dataset.\n", "delimiter = \"\\t\" # @param {type:\"string\"}\n", @@ -267,9 +263,7 @@ "\n", "# @markdown To set the model architecture and other training parameters, adjust the below values:\n", "\n", - "model_architecture = (\n", - " \"average_word_embedding\" # @param [\"average_word_embedding\", \"mobilebert\"]\n", - ")\n", + "model_architecture = \"average_word_embedding\" # @param [\"average_word_embedding\", \"mobilebert\"]\n", "\n", "# The learning rate to use for gradient descent-based\n", "# optimizers. Defaults to 3e-5 for the BERT-based classifier\n", @@ -450,7 +444,7 @@ "\n", "# @markdown After finetuning, you can save the Tensorflow Lite model, try it out in the [Text Classification](https://mediapipe-studio.webapps.google.com/demo/text_classifier) demo in MediaPipe Studio or integrate it with your on-device application by following the [Text classification task guide](https://developers.google.com/mediapipe/solutions/text/text_classifier). The exported model contains the generates required model metadata, as well as a classification label file.\n", "\n", - "! gcloud storage cp $EXPORTED_MODEL_OUTPUT_FILE text_classification_model.tflite" + "! gsutil cp $EXPORTED_MODEL_OUTPUT_FILE text_classification_model.tflite" ] }, { @@ -476,7 +470,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive $BUCKET_NAME\n", + " ! gsutil -m rm -r $BUCKET_NAME\n", "\n", "# Delete training data and jobs.\n", "if training_job.list(filter=f'display_name=\"{TRAINING_JOB_DISPLAY_NAME}\"'):\n", diff --git a/notebooks/community/model_garden/model_garden_pytorch_deployed_model_reasoning_engine.ipynb b/notebooks/community/model_garden/model_garden_pytorch_deployed_model_reasoning_engine.ipynb index 5b1f332c4..bfaa63eb2 100644 --- a/notebooks/community/model_garden/model_garden_pytorch_deployed_model_reasoning_engine.ipynb +++ b/notebooks/community/model_garden/model_garden_pytorch_deployed_model_reasoning_engine.ipynb @@ -93,7 +93,6 @@ { "cell_type": "code", "execution_count": null, - "language": "python", "metadata": { "cellView": "form", "id": "YXFGIp1l-qtT" @@ -587,7 +586,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive $BUCKET_NAME\n", + " ! gsutil -m rm -r $BUCKET_NAME\n", "\n", "delete_reasoning_engine = False # @param {type:\"boolean\"}\n", "\n", diff --git a/notebooks/community/model_garden/model_garden_pytorch_llama3_1_qwen3_deployment_tpu.ipynb b/notebooks/community/model_garden/model_garden_pytorch_llama3_1_qwen3_deployment_tpu.ipynb index c4a0a7627..b0ee54320 100644 --- a/notebooks/community/model_garden/model_garden_pytorch_llama3_1_qwen3_deployment_tpu.ipynb +++ b/notebooks/community/model_garden/model_garden_pytorch_llama3_1_qwen3_deployment_tpu.ipynb @@ -171,10 +171,10 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", + " ! gsutil mb -l {REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n", + " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", @@ -199,7 +199,7 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", + "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n", "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", @@ -536,7 +536,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive $BUCKET_NAME" + " ! gsutil -m rm -r $BUCKET_NAME" ] } ], diff --git a/notebooks/community/model_garden/model_garden_pytorch_sd_2_1_finetuning_dreambooth.ipynb b/notebooks/community/model_garden/model_garden_pytorch_sd_2_1_finetuning_dreambooth.ipynb index d1b68fb85..eb37103ce 100644 --- a/notebooks/community/model_garden/model_garden_pytorch_sd_2_1_finetuning_dreambooth.ipynb +++ b/notebooks/community/model_garden/model_garden_pytorch_sd_2_1_finetuning_dreambooth.ipynb @@ -154,10 +154,10 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gcloud storage buckets create --location {REGION} {BUCKET_URI}\n", + " ! gsutil mb -l {REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n", + " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", @@ -182,7 +182,7 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", + "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n", "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", @@ -531,7 +531,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive $BUCKET_NAME" + " ! gsutil -m rm -r $BUCKET_NAME" ] } ], From 0dcff2f9453d6803134063b10b991b2c3ccc4530 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Fri, 19 Dec 2025 16:39:39 +0000 Subject: [PATCH 5/5] removed changes from model_garden --- .../model_garden_mediapipe_text_classification.ipynb | 2 +- .../model_garden_pytorch_deployed_model_reasoning_engine.ipynb | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb b/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb index acbf3a066..220bea22f 100644 --- a/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb +++ b/notebooks/community/model_garden/model_garden_mediapipe_text_classification.ipynb @@ -160,7 +160,7 @@ " ! gsutil mb -l {REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gsutil ls -Lb | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", + " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", diff --git a/notebooks/community/model_garden/model_garden_pytorch_deployed_model_reasoning_engine.ipynb b/notebooks/community/model_garden/model_garden_pytorch_deployed_model_reasoning_engine.ipynb index bfaa63eb2..2c1a235f0 100644 --- a/notebooks/community/model_garden/model_garden_pytorch_deployed_model_reasoning_engine.ipynb +++ b/notebooks/community/model_garden/model_garden_pytorch_deployed_model_reasoning_engine.ipynb @@ -93,6 +93,7 @@ { "cell_type": "code", "execution_count": null, + "language": "python", "metadata": { "cellView": "form", "id": "YXFGIp1l-qtT"