From d84ae4f7a2d25f55ec1e732984f037cc60df0d6d Mon Sep 17 00:00:00 2001 From: Margubur Rahman Date: Fri, 17 Oct 2025 19:34:03 +0000 Subject: [PATCH 01/20] Migrate gsutil usage to gcloud storage --- ..._agents_bandits_movie_recommendation.ipynb | 12 +++---- ...se_automl_image_classification_batch.ipynb | 34 ++++++------------- ..._automl_image_object_detection_batch.ipynb | 34 ++++++------------- ...abular_classification_online_explain.ipynb | 12 +++---- ..._automl_text_entity_extraction_batch.ipynb | 27 +++++---------- ...tured data with Vertex AI Regression.ipynb | 31 ++++++----------- ... with Vertex AI Video Classification.ipynb | 33 ++++++------------ ...e with Vertex AI Text Classification.ipynb | 31 ++++++----------- ..._fine_tuning_batch_deployment_on_rov.ipynb | 15 +++----- ...model_garden_vllm_text_only_tutorial.ipynb | 12 +++---- 10 files changed, 79 insertions(+), 162 deletions(-) diff --git a/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/step_by_step_sdk_tf_agents_bandits_movie_recommendation/step_by_step_sdk_tf_agents_bandits_movie_recommendation.ipynb b/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/step_by_step_sdk_tf_agents_bandits_movie_recommendation/step_by_step_sdk_tf_agents_bandits_movie_recommendation.ipynb index 82638488f..027d9c6ca 100644 --- a/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/step_by_step_sdk_tf_agents_bandits_movie_recommendation/step_by_step_sdk_tf_agents_bandits_movie_recommendation.ipynb +++ b/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/step_by_step_sdk_tf_agents_bandits_movie_recommendation/step_by_step_sdk_tf_agents_bandits_movie_recommendation.ipynb @@ -472,8 +472,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -492,8 +491,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -565,8 +563,7 @@ "outputs": [], "source": [ "# Copy the sample data into your DATA_PATH\n", - "! gsutil cp \"gs://cloud-samples-data/vertex-ai/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/u.data\" $DATA_PATH" - ] + "! gcloud storage cp \"gs://cloud-samples-data/vertex-ai/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/u.data\" $DATA_PATH" ] }, { "cell_type": "code", @@ -1784,8 +1781,7 @@ "! gcloud ai models delete $model.name --quiet\n", "\n", "# Delete Cloud Storage objects that were created\n", - "! gsutil -m rm -r $ARTIFACTS_DIR" - ] + "! gcloud storage rm --recursive $ARTIFACTS_DIR" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb index 747b149c0..a25e4b9e9 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb @@ -421,8 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -441,8 +440,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -886,12 +884,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1329,8 +1325,7 @@ }, "outputs": [], "source": [ - "test_items = !gsutil cat $IMPORT_FILE | head -n2\n", - "if len(str(test_items[0]).split(\",\")) == 3:\n", + "test_items = !gcloud storage cat $IMPORT_FILE | head -n2\n", "if len(str(test_items[0]).split(\",\")) == 3:\n", " _, test_item_1, test_label_1 = str(test_items[0]).split(\",\")\n", " _, test_item_2, test_label_2 = str(test_items[1]).split(\",\")\n", "else:\n", @@ -1363,9 +1358,7 @@ "file_1 = test_item_1.split(\"/\")[-1]\n", "file_2 = test_item_2.split(\"/\")[-1]\n", "\n", - "! gsutil cp $test_item_1 $BUCKET_NAME/$file_1\n", - "! gsutil cp $test_item_2 $BUCKET_NAME/$file_2\n", - "\n", + "! gcloud storage cp $test_item_1 $BUCKET_NAME/$file_1\n", "! gcloud storage cp $test_item_2 $BUCKET_NAME/$file_2\n", "\n", "test_item_1 = BUCKET_NAME + \"/\" + file_1\n", "test_item_2 = BUCKET_NAME + \"/\" + file_2" ] @@ -1408,8 +1401,7 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1693,8 +1685,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1711,10 +1702,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] }, @@ -1808,8 +1797,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb index 96c79f565..be3cddc91 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb @@ -421,8 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -441,8 +440,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -887,12 +885,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1333,8 +1329,7 @@ }, "outputs": [], "source": [ - "test_items = !gsutil cat $IMPORT_FILE | head -n2\n", - "cols_1 = str(test_items[0]).split(\",\")\n", + "test_items = !gcloud storage cat $IMPORT_FILE | head -n2\n", "cols_1 = str(test_items[0]).split(\",\")\n", "cols_2 = str(test_items[1]).split(\",\")\n", "if len(cols_1) == 11:\n", " test_item_1 = str(cols_1[1])\n", @@ -1373,9 +1368,7 @@ "file_1 = test_item_1.split(\"/\")[-1]\n", "file_2 = test_item_2.split(\"/\")[-1]\n", "\n", - "! gsutil cp $test_item_1 $BUCKET_NAME/$file_1\n", - "! gsutil cp $test_item_2 $BUCKET_NAME/$file_2\n", - "\n", + "! gcloud storage cp $test_item_1 $BUCKET_NAME/$file_1\n", "! gcloud storage cp $test_item_2 $BUCKET_NAME/$file_2\n", "\n", "test_item_1 = BUCKET_NAME + \"/\" + file_1\n", "test_item_2 = BUCKET_NAME + \"/\" + file_2" ] @@ -1418,8 +1411,7 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1705,8 +1697,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1723,10 +1714,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] }, @@ -1820,8 +1809,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb b/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb index b8e0039f0..928769361 100644 --- a/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb @@ -735,14 +735,11 @@ }, "outputs": [], "source": [ - "count = ! gsutil cat $IMPORT_FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $IMPORT_FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $IMPORT_FILE | head\n", - "\n", - "heading = ! gsutil cat $IMPORT_FILE | head -n1\n", - "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n", + "! gcloud storage cat $IMPORT_FILE | head\n", "\n", + "heading = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n", "print(\"Label Column Name\", label_column)\n", "if label_column is None:\n", " raise Exception(\"label column missing\")" @@ -1820,8 +1817,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb index 6c9fec862..9f1f6a33c 100644 --- a/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb @@ -421,8 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -441,8 +440,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -888,12 +886,10 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", - "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" - ] + "! gcloud storage cat $FILE | head" ] }, { "cell_type": "markdown", @@ -1377,8 +1373,7 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1653,8 +1648,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1671,10 +1665,8 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] }, @@ -1768,8 +1760,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb b/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb index 809951b31..ada16ce66 100644 --- a/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb +++ b/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb @@ -325,8 +325,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION gs://$BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -345,8 +344,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al gs://$BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -545,8 +543,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE | head -n 10" - ] + "! gcloud storage cat $IMPORT_FILE | head -n 10" ] }, { "cell_type": "markdown", @@ -1493,15 +1490,12 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE | head -n 1 > tmp.csv\n", - "! gsutil cat $IMPORT_FILE | tail -n 10 >> tmp.csv\n", - "\n", + "! gcloud storage cat $IMPORT_FILE | head -n 1 > tmp.csv\n", "! gcloud storage cat $IMPORT_FILE | tail -n 10 >> tmp.csv\n", "\n", "! cut -d, -f1-16 tmp.csv > batch.csv\n", "\n", "gcs_input_uri = \"gs://\" + BUCKET_NAME + \"/test.csv\"\n", "\n", - "! gsutil cp batch.csv $gcs_input_uri" - ] + "! gcloud storage cp batch.csv $gcs_input_uri" ] }, { "cell_type": "code", @@ -1511,8 +1505,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $gcs_input_uri" - ] + "! gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1819,8 +1812,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1839,10 +1831,8 @@ " folder = get_latest_predictions(\n", " response.output_config.gcs_destination.output_uri_prefix\n", " )\n", - " ! gsutil ls $folder/prediction*\n", - "\n", - " ! gsutil cat $folder/prediction*\n", - " break\n", + " ! gcloud storage ls $folder/prediction*\n", "\n", + " ! gcloud storage cat $folder/prediction*\n", " break\n", " time.sleep(60)" ] }, @@ -2452,8 +2442,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r gs://$BUCKET_NAME" - ] + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb b/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb index 5474e8ec7..988c3914d 100644 --- a/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb +++ b/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb @@ -325,8 +325,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION gs://$BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -345,8 +344,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al gs://$BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -542,8 +540,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE | head -n 10" - ] + "! gcloud storage cat $IMPORT_FILE | head -n 10" ] }, { "cell_type": "markdown", @@ -1428,17 +1425,14 @@ }, "outputs": [], "source": [ - "test_items = ! gsutil cat $IMPORT_FILE | head -n2\n", - "\n", + "test_items = ! gcloud storage cat $IMPORT_FILE | head -n2\n", "\n", "test_item_1, test_label_1 = test_items[0].split(\",\")[1], test_items[0].split(\",\")[2]\n", "test_item_2, test_label_2 = test_items[0].split(\",\")[1], test_items[0].split(\",\")[2]\n", "\n", "file_1 = test_item_1.split(\"/\")[-1]\n", "file_2 = test_item_2.split(\"/\")[-1]\n", "\n", - "! gsutil cp $test_item_1 gs://$BUCKET_NAME/$file_1\n", - "! gsutil cp $test_item_2 gs://$BUCKET_NAME/$file_2\n", - "\n", + "! gcloud storage cp $test_item_1 gs://$BUCKET_NAME/$file_1\n", "! gcloud storage cp $test_item_2 gs://$BUCKET_NAME/$file_2\n", "\n", "test_item_1 = \"gs://\" + BUCKET_NAME + \"/\" + file_1\n", "test_item_2 = \"gs://\" + BUCKET_NAME + \"/\" + file_2\n", "\n", @@ -1478,8 +1472,7 @@ " data = {\"content\": test_item_2, \"mime_type\": \"image/jpeg\"}\n", " f.write(json.dumps(data) + \"\\n\")\n", "\n", - "!gsutil cat $gcs_input_uri" - ] + "!gcloud storage cat $gcs_input_uri" ] }, { "cell_type": "markdown", @@ -1800,8 +1793,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1820,9 +1812,8 @@ " folder = get_latest_predictions(\n", " response.output_config.gcs_destination.output_uri_prefix\n", " )\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] @@ -2172,8 +2163,7 @@ "\n", "import tensorflow as tf\n", "\n", - "single_file = ! gsutil cat $IMPORT_FILE | head -n 1\n", - "single_file = single_file[0].split(\",\")[1]\n", + "single_file = ! gcloud storage cat $IMPORT_FILE | head -n 1\n", "single_file = single_file[0].split(\",\")[1]\n", "\n", "with tf.io.gfile.GFile(single_file, \"rb\") as f:\n", " content = f.read()\n", @@ -2443,8 +2433,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r gs://$BUCKET_NAME" - ] + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb b/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb index df9e2fdd0..d86a33bcc 100644 --- a/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb +++ b/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb @@ -325,8 +325,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION gs://$BUCKET_NAME" - ] + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -345,8 +344,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al gs://$BUCKET_NAME" - ] + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] }, { "cell_type": "markdown", @@ -543,8 +541,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE | head -n 10" - ] + "! gcloud storage cat $IMPORT_FILE | head -n 10" ] }, { "cell_type": "markdown", @@ -1552,8 +1549,7 @@ }, "outputs": [], "source": [ - "test_item = ! gsutil cat $IMPORT_FILE | head -n1\n", - "test_item, test_label = str(test_item[0]).split(\",\")\n", + "test_item = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "test_item, test_label = str(test_item[0]).split(\",\")\n", "\n", "print(test_item, test_label)" ] @@ -1614,9 +1610,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $gcs_input_uri\n", - "! gsutil cat $test_item_uri" - ] + "! gcloud storage cat $gcs_input_uri\n", "! gcloud storage cat $test_item_uri" ] }, { "cell_type": "markdown", @@ -1917,8 +1911,7 @@ "source": [ "def get_latest_predictions(gcs_out_dir):\n", " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", - " latest = \"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1937,10 +1930,8 @@ " folder = get_latest_predictions(\n", " response.output_config.gcs_destination.output_uri_prefix\n", " )\n", - " ! gsutil ls $folder/prediction*.jsonl\n", - "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", - " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] }, @@ -2260,8 +2251,7 @@ }, "outputs": [], "source": [ - "test_item = ! gsutil cat $IMPORT_FILE | head -n1\n", - "test_item, test_label = str(test_item[0]).split(\",\")\n", + "test_item = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "test_item, test_label = str(test_item[0]).split(\",\")\n", "\n", "instances_list = [{\"content\": test_item}]\n", "instances = [json_format.ParseDict(s, Value()) for s in instances_list]\n", @@ -2510,8 +2500,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r gs://$BUCKET_NAME" - ] + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] } ], "metadata": { diff --git a/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb b/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb index 4bb16bb38..190873fdc 100644 --- a/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb +++ b/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb @@ -360,8 +360,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {REGION} -p {PROJECT_ID} {BUCKET_URI}" - ] + "! gcloud storage buckets create --location={REGION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { "cell_type": "markdown", @@ -1431,8 +1430,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -l {train_experiment_uri}" - ] + "! gcloud storage ls --long {train_experiment_uri}" ] }, { "cell_type": "markdown", @@ -1501,8 +1499,7 @@ }, "outputs": [], "source": [ - "! gsutil -q cp -r {train_experiment_uri}/* {experiments_path}" - ] + "! gcloud storage cp --recursive {train_experiment_uri}/* {experiments_path}" ] }, { "cell_type": "markdown", @@ -1896,8 +1893,7 @@ }, "outputs": [], "source": [ - "! gsutil -q cp -r {models_path} {MODELS_PATH}" - ] + "! gcloud storage cp --recursive {models_path} {MODELS_PATH}" ] }, { "cell_type": "markdown", @@ -2207,8 +2203,7 @@ "\n", "# Delete Cloud Storage objects that were created\n", "if delete_bucket:\n", - " ! gsutil -q -m rm -r {BUCKET_URI}\n", - "\n", + " ! gcloud storage rm --recursive {BUCKET_URI}\n", "\n", "# Delete tutorial folder\n", "if delete_tutorial:\n", " shutil.rmtree(tutorial_path)" diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index 9c2ad6759..ef096d9fc 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -229,11 +229,9 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gsutil mb -l {REGION} {BUCKET_URI}\n", - "else:\n", + " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", - " bucket_region = shell_output[0].strip().lower()\n", + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", @@ -257,8 +255,7 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n", - "\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"" @@ -1259,8 +1256,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_NAME" - ] + " ! gcloud storage rm --recursive $BUCKET_NAME" ] }, { "cell_type": "markdown", From 5d897fd682136e8b269006a8abe7ed056e0355b4 Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Fri, 5 Dec 2025 09:20:16 +0000 Subject: [PATCH 02/20] Manual Changes --- .../model_garden/model_garden_vllm_text_only_tutorial.ipynb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index ef096d9fc..9c5197ae8 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -231,7 +231,8 @@ " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", + " shell_output = ! gcloud storage buckets describe {BUCKET_NAME} | grep \"^location:\" | sed \"s/location: //\"\n", + " bucket_region = shell_output[0].strip().lower()\n" " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", From 71c777d5f1cac3c9ac8dddddbb955166d9d5ae22 Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Fri, 5 Dec 2025 10:14:37 +0000 Subject: [PATCH 03/20] Manual Changes --- .../automl/showcase_automl_image_classification_batch.ipynb | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb index a25e4b9e9..8e17c03a0 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb @@ -475,10 +475,7 @@ "source": [ "import time\n", "\n", - "from google.cloud.aiplatform import gapic as aip\n", - "from google.protobuf import json_format\n", - "from google.protobuf.json_format import MessageToJson, ParseDict\n", - "from google.protobuf.struct_pb2 import Struct, Value" + "from google.cloud.aiplatform import gapic as aip\n" ] }, { From eed921bcebdb010f6cd038eff13b4c262db7ee9c Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Fri, 5 Dec 2025 16:31:35 +0000 Subject: [PATCH 04/20] Fix: Updated gcloud storage command without formatting --- .../automl/showcase_automl_image_classification_batch.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb index 8e17c03a0..797be64e8 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb @@ -421,7 +421,7 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", From fd8bb1655dda4fb99ebaf9249ec85ad4389dc9af Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Tue, 9 Dec 2025 15:37:59 +0000 Subject: [PATCH 05/20] Manual Changes --- .../model_garden/model_garden_vllm_text_only_tutorial.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index 9c5197ae8..f996e118e 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -232,7 +232,7 @@ " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", " shell_output = ! gcloud storage buckets describe {BUCKET_NAME} | grep \"^location:\" | sed \"s/location: //\"\n", - " bucket_region = shell_output[0].strip().lower()\n" + " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", From a7a7bda0f9baf25c0279afa72b095cb47f85d9cd Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Tue, 9 Dec 2025 16:19:31 +0000 Subject: [PATCH 06/20] Manual Changes --- .../model_garden/model_garden_vllm_text_only_tutorial.ipynb | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index f996e118e..a7560febe 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -231,8 +231,12 @@ " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", +<<<<<<< HEAD " shell_output = ! gcloud storage buckets describe {BUCKET_NAME} | grep \"^location:\" | sed \"s/location: //\"\n", " bucket_region = shell_output[0].strip().lower()\n", +======= + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", +>>>>>>> parent of 5d897fd6 (Manual Changes) " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", From 3f6893396890da4045af83f515f65c2126ddb52b Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Tue, 9 Dec 2025 16:21:06 +0000 Subject: [PATCH 07/20] Revert "Manual Changes" This reverts commit a7a7bda0f9baf25c0279afa72b095cb47f85d9cd. --- .../model_garden/model_garden_vllm_text_only_tutorial.ipynb | 4 ---- 1 file changed, 4 deletions(-) diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index a7560febe..f996e118e 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -231,12 +231,8 @@ " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", -<<<<<<< HEAD " shell_output = ! gcloud storage buckets describe {BUCKET_NAME} | grep \"^location:\" | sed \"s/location: //\"\n", " bucket_region = shell_output[0].strip().lower()\n", -======= - " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", ->>>>>>> parent of 5d897fd6 (Manual Changes) " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", From 3959c2bcb9ff3bcf788fd65ed95dfc4f1860a77f Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Tue, 9 Dec 2025 17:13:14 +0000 Subject: [PATCH 08/20] Manual Changes --- .../model_garden/model_garden_vllm_text_only_tutorial.ipynb | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index f996e118e..a7560febe 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -231,8 +231,12 @@ " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", +<<<<<<< HEAD " shell_output = ! gcloud storage buckets describe {BUCKET_NAME} | grep \"^location:\" | sed \"s/location: //\"\n", " bucket_region = shell_output[0].strip().lower()\n", +======= + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", +>>>>>>> parent of 5d897fd6 (Manual Changes) " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", From 5c2c3f980dff69b8ccd376992cde44159a77db07 Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Tue, 9 Dec 2025 17:16:21 +0000 Subject: [PATCH 09/20] Revert "Manual Changes" This reverts commit a7a7bda0f9baf25c0279afa72b095cb47f85d9cd. --- .../model_garden/model_garden_vllm_text_only_tutorial.ipynb | 4 ---- 1 file changed, 4 deletions(-) diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index a7560febe..f996e118e 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -231,12 +231,8 @@ " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", -<<<<<<< HEAD " shell_output = ! gcloud storage buckets describe {BUCKET_NAME} | grep \"^location:\" | sed \"s/location: //\"\n", " bucket_region = shell_output[0].strip().lower()\n", -======= - " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", ->>>>>>> parent of 5d897fd6 (Manual Changes) " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", From 8bd0d77703ad00eeffde4892ec214745644993b7 Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Tue, 9 Dec 2025 17:43:54 +0000 Subject: [PATCH 10/20] Manual Changes --- .../model_garden/model_garden_vllm_text_only_tutorial.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index f996e118e..b5a15520a 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -232,7 +232,7 @@ " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", " shell_output = ! gcloud storage buckets describe {BUCKET_NAME} | grep \"^location:\" | sed \"s/location: //\"\n", - " bucket_region = shell_output[0].strip().lower()\n", + " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", From 501a589521faf07256093df54fc68f53ee36b0b7 Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Tue, 9 Dec 2025 18:01:02 +0000 Subject: [PATCH 11/20] Revert "Manual Changes" This reverts commit 71c777d5f1cac3c9ac8dddddbb955166d9d5ae22. --- .../automl/showcase_automl_image_classification_batch.ipynb | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb index 797be64e8..d9643f169 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb @@ -475,7 +475,10 @@ "source": [ "import time\n", "\n", - "from google.cloud.aiplatform import gapic as aip\n" + "from google.cloud.aiplatform import gapic as aip\n", + "from google.protobuf import json_format\n", + "from google.protobuf.json_format import MessageToJson, ParseDict\n", + "from google.protobuf.struct_pb2 import Struct, Value" ] }, { From d44e3b1ed82e97c7253ec2a4aa24a88ca2a8800a Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Tue, 9 Dec 2025 18:14:25 +0000 Subject: [PATCH 12/20] Manual Changes --- .../automl/showcase_automl_image_classification_batch.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb index d9643f169..a25e4b9e9 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb @@ -421,7 +421,7 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { "cell_type": "markdown", From 46fb663d1c9e3c59af33f183208ee6005a3f2cdc Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Tue, 9 Dec 2025 19:16:53 +0000 Subject: [PATCH 13/20] Manual changes --- ..._agents_bandits_movie_recommendation.ipynb | 123 +++++++++++------- ...se_automl_image_classification_batch.ipynb | 36 +++-- ..._automl_image_object_detection_batch.ipynb | 36 +++-- ...abular_classification_online_explain.ipynb | 12 +- ..._automl_text_entity_extraction_batch.ipynb | 29 +++-- ...tured data with Vertex AI Regression.ipynb | 35 +++-- ... with Vertex AI Video Classification.ipynb | 35 +++-- ...e with Vertex AI Text Classification.ipynb | 35 +++-- ..._fine_tuning_batch_deployment_on_rov.ipynb | 15 ++- ...model_garden_vllm_text_only_tutorial.ipynb | 11 +- 10 files changed, 240 insertions(+), 127 deletions(-) diff --git a/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/step_by_step_sdk_tf_agents_bandits_movie_recommendation/step_by_step_sdk_tf_agents_bandits_movie_recommendation.ipynb b/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/step_by_step_sdk_tf_agents_bandits_movie_recommendation/step_by_step_sdk_tf_agents_bandits_movie_recommendation.ipynb index 027d9c6ca..834f7e0f4 100644 --- a/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/step_by_step_sdk_tf_agents_bandits_movie_recommendation/step_by_step_sdk_tf_agents_bandits_movie_recommendation.ipynb +++ b/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/step_by_step_sdk_tf_agents_bandits_movie_recommendation/step_by_step_sdk_tf_agents_bandits_movie_recommendation.ipynb @@ -398,6 +398,7 @@ "if not IS_GOOGLE_CLOUD_NOTEBOOK:\n", " if \"google.colab\" in sys.modules:\n", " from google.colab import auth as google_auth\n", + "\n", " google_auth.authenticate_user()\n", "\n", " # If you are running this notebook locally, replace the string below with the\n", @@ -472,7 +473,8 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" + ] }, { "cell_type": "markdown", @@ -491,7 +493,8 @@ }, "outputs": [], "source": [ - "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" + ] }, { "cell_type": "markdown", @@ -563,7 +566,8 @@ "outputs": [], "source": [ "# Copy the sample data into your DATA_PATH\n", - "! gcloud storage cp \"gs://cloud-samples-data/vertex-ai/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/u.data\" $DATA_PATH" ] + "! gcloud storage cp \"gs://cloud-samples-data/vertex-ai/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/u.data\" $DATA_PATH" + ] }, { "cell_type": "code", @@ -576,11 +580,15 @@ "# Set hyperparameters.\n", "BATCH_SIZE = 8 # @param {type:\"integer\"} Training and prediction batch size.\n", "TRAINING_LOOPS = 5 # @param {type:\"integer\"} Number of training iterations.\n", - "STEPS_PER_LOOP = 2 # @param {type:\"integer\"} Number of driver steps per training iteration.\n", + "STEPS_PER_LOOP = (\n", + " 2 # @param {type:\"integer\"} Number of driver steps per training iteration.\n", + ")\n", "\n", "# Set MovieLens simulation environment parameters.\n", "RANK_K = 20 # @param {type:\"integer\"} Rank for matrix factorization in the MovieLens environment; also the observation dimension.\n", - "NUM_ACTIONS = 20 # @param {type:\"integer\"} Number of actions (movie items) to choose from.\n", + "NUM_ACTIONS = (\n", + " 20 # @param {type:\"integer\"} Number of actions (movie items) to choose from.\n", + ")\n", "PER_ARM = False # Use the non-per-arm version of the MovieLens environment.\n", "\n", "# Set agent parameters.\n", @@ -618,7 +626,8 @@ "source": [ "# Define RL environment.\n", "env = movielens_py_environment.MovieLensPyEnvironment(\n", - " DATA_PATH, RANK_K, BATCH_SIZE, num_movies=NUM_ACTIONS, csv_delimiter=\"\\t\")\n", + " DATA_PATH, RANK_K, BATCH_SIZE, num_movies=NUM_ACTIONS, csv_delimiter=\"\\t\"\n", + ")\n", "environment = tf_py_environment.TFPyEnvironment(env)\n", "\n", "# Define RL agent/algorithm.\n", @@ -628,7 +637,8 @@ " tikhonov_weight=TIKHONOV_WEIGHT,\n", " alpha=AGENT_ALPHA,\n", " dtype=tf.float32,\n", - " accepts_per_arm_features=PER_ARM)\n", + " accepts_per_arm_features=PER_ARM,\n", + ")\n", "print(\"TimeStep Spec (for each batch):\\n\", agent.time_step_spec, \"\\n\")\n", "print(\"Action Spec (for each batch):\\n\", agent.action_spec, \"\\n\")\n", "print(\"Reward Spec (for each batch):\\n\", environment.reward_spec(), \"\\n\")\n", @@ -636,7 +646,8 @@ "# Define RL metric.\n", "optimal_reward_fn = functools.partial(\n", " environment_utilities.compute_optimal_reward_with_movielens_environment,\n", - " environment=environment)\n", + " environment=environment,\n", + ")\n", "regret_metric = tf_bandit_metrics.RegretMetric(optimal_reward_fn)\n", "metrics = [regret_metric]" ] @@ -701,35 +712,38 @@ " if training_data_spec_transformation_fn is None:\n", " data_spec = agent.policy.trajectory_spec\n", " else:\n", - " data_spec = training_data_spec_transformation_fn(\n", - " agent.policy.trajectory_spec)\n", - " replay_buffer = trainer.get_replay_buffer(data_spec, environment.batch_size,\n", - " steps_per_loop)\n", + " data_spec = training_data_spec_transformation_fn(agent.policy.trajectory_spec)\n", + " replay_buffer = trainer.get_replay_buffer(\n", + " data_spec, environment.batch_size, steps_per_loop\n", + " )\n", "\n", " # `step_metric` records the number of individual rounds of bandit interaction;\n", " # that is, (number of trajectories) * batch_size.\n", " step_metric = tf_metrics.EnvironmentSteps()\n", " metrics = [\n", " tf_metrics.NumberOfEpisodes(),\n", - " tf_metrics.AverageEpisodeLengthMetric(batch_size=environment.batch_size)\n", + " tf_metrics.AverageEpisodeLengthMetric(batch_size=environment.batch_size),\n", " ]\n", " if additional_metrics:\n", " metrics += additional_metrics\n", "\n", " if isinstance(environment.reward_spec(), dict):\n", - " metrics += [tf_metrics.AverageReturnMultiMetric(\n", - " reward_spec=environment.reward_spec(),\n", - " batch_size=environment.batch_size)]\n", - " else:\n", " metrics += [\n", - " tf_metrics.AverageReturnMetric(batch_size=environment.batch_size)]\n", + " tf_metrics.AverageReturnMultiMetric(\n", + " reward_spec=environment.reward_spec(), batch_size=environment.batch_size\n", + " )\n", + " ]\n", + " else:\n", + " metrics += [tf_metrics.AverageReturnMetric(batch_size=environment.batch_size)]\n", "\n", " # Store intermediate metric results, indexed by metric names.\n", " metric_results = defaultdict(list)\n", "\n", " if training_data_spec_transformation_fn is not None:\n", - " def add_batch_fn(data): return replay_buffer.add_batch(training_data_spec_transformation_fn(data)) \n", - " \n", + "\n", + " def add_batch_fn(data):\n", + " return replay_buffer.add_batch(training_data_spec_transformation_fn(data))\n", + "\n", " else:\n", " add_batch_fn = replay_buffer.add_batch\n", "\n", @@ -739,10 +753,12 @@ " env=environment,\n", " policy=agent.collect_policy,\n", " num_steps=steps_per_loop * environment.batch_size,\n", - " observers=observers)\n", + " observers=observers,\n", + " )\n", "\n", " training_loop = trainer.get_training_loop_fn(\n", - " driver, replay_buffer, agent, steps_per_loop)\n", + " driver, replay_buffer, agent, steps_per_loop\n", + " )\n", " saver = policy_saver.PolicySaver(agent.policy)\n", "\n", " for _ in range(training_loops):\n", @@ -780,7 +796,8 @@ " environment=environment,\n", " training_loops=TRAINING_LOOPS,\n", " steps_per_loop=STEPS_PER_LOOP,\n", - " additional_metrics=metrics)\n", + " additional_metrics=metrics,\n", + ")\n", "\n", "tf.profiler.experimental.stop()" ] @@ -1089,11 +1106,15 @@ }, "outputs": [], "source": [ - "RUN_HYPERPARAMETER_TUNING = True # Execute hyperparameter tuning instead of regular training.\n", + "RUN_HYPERPARAMETER_TUNING = (\n", + " True # Execute hyperparameter tuning instead of regular training.\n", + ")\n", "TRAIN_WITH_BEST_HYPERPARAMETERS = False # Do not train.\n", "\n", "HPTUNING_RESULT_DIR = \"hptuning/\" # @param {type: \"string\"} Directory to store the best hyperparameter(s) in `BUCKET_NAME` and locally (temporarily).\n", - "HPTUNING_RESULT_PATH = os.path.join(HPTUNING_RESULT_DIR, \"result.json\") # @param {type: \"string\"} Path to the file containing the best hyperparameter(s)." + "HPTUNING_RESULT_PATH = os.path.join(\n", + " HPTUNING_RESULT_DIR, \"result.json\"\n", + ") # @param {type: \"string\"} Path to the file containing the best hyperparameter(s)." ] }, { @@ -1121,7 +1142,7 @@ " image_uri: str,\n", " args: List[str],\n", " location: str = \"us-central1\",\n", - " api_endpoint: str = \"us-central1-aiplatform.googleapis.com\"\n", + " api_endpoint: str = \"us-central1-aiplatform.googleapis.com\",\n", ") -> None:\n", " \"\"\"Creates a hyperparameter tuning job using a custom container.\n", "\n", @@ -1194,8 +1215,8 @@ "\n", " # Create job\n", " response = client.create_hyperparameter_tuning_job(\n", - " parent=parent,\n", - " hyperparameter_tuning_job=hyperparameter_tuning_job)\n", + " parent=parent, hyperparameter_tuning_job=hyperparameter_tuning_job\n", + " )\n", " job_id = response.name.split(\"/\")[-1]\n", " print(\"Job ID:\", job_id)\n", " print(\"Job config:\", response)\n", @@ -1239,7 +1260,8 @@ " image_uri=f\"gcr.io/{PROJECT_ID}/{HPTUNING_TRAINING_CONTAINER}:latest\",\n", " args=args,\n", " location=REGION,\n", - " api_endpoint=f\"{REGION}-aiplatform.googleapis.com\")" + " api_endpoint=f\"{REGION}-aiplatform.googleapis.com\",\n", + ")" ] }, { @@ -1289,7 +1311,8 @@ " name = client.hyperparameter_tuning_job_path(\n", " project=project,\n", " location=location,\n", - " hyperparameter_tuning_job=hyperparameter_tuning_job_id)\n", + " hyperparameter_tuning_job=hyperparameter_tuning_job_id,\n", + " )\n", " response = client.get_hyperparameter_tuning_job(name=name)\n", " return response" ] @@ -1310,7 +1333,8 @@ " location=REGION,\n", " api_endpoint=f\"{REGION}-aiplatform.googleapis.com\")\n", " if response.state.name == 'JOB_STATE_SUCCEEDED':\n", - " print(\"Job succeeded.\\nJob Time:\", response.update_time - response.create_time)\n", + " print(\"Job succeeded.\n", + "Job Time:\", response.update_time - response.create_time)\n", " trials = response.trials\n", " print(\"Trials:\", trials)\n", " break\n", @@ -1345,8 +1369,8 @@ "if trials:\n", " # Dict mapping from metric names to the best metric values seen so far\n", " best_objective_values = dict.fromkeys(\n", - " [metric.metric_id for metric in trials[0].final_measurement.metrics],\n", - " -np.inf)\n", + " [metric.metric_id for metric in trials[0].final_measurement.metrics], -np.inf\n", + " )\n", " # Dict mapping from metric names to a list of the best combination(s) of\n", " # hyperparameter(s). Each combination is a dict mapping from hyperparameter\n", " # names to their values.\n", @@ -1355,12 +1379,13 @@ " # `final_measurement` and `parameters` are `RepeatedComposite` objects.\n", " # Reference the structure above to extract the value of your interest.\n", " for metric in trial.final_measurement.metrics:\n", - " params = {\n", - " param.parameter_id: param.value for param in trial.parameters}\n", + " params = {param.parameter_id: param.value for param in trial.parameters}\n", " if metric.value > best_objective_values[metric.metric_id]:\n", " best_params[metric.metric_id] = [params]\n", " elif metric.value == best_objective_values[metric.metric_id]:\n", - " best_params[param.parameter_id].append(params) # Handle cases where multiple hyperparameter values lead to the same performance.\n", + " best_params[param.parameter_id].append(\n", + " params\n", + " ) # Handle cases where multiple hyperparameter values lead to the same performance.\n", " print(\"Best hyperparameter value(s):\")\n", " for metric, params in best_params.items():\n", " print(f\"Metric={metric}: {sorted(params)}\")\n", @@ -1440,7 +1465,9 @@ }, "outputs": [], "source": [ - "PREDICTION_CONTAINER = \"prediction-custom-container\" # @param {type:\"string\"} Name of the container image." + "PREDICTION_CONTAINER = (\n", + " \"prediction-custom-container\" # @param {type:\"string\"} Name of the container image.\n", + ")" ] }, { @@ -1472,7 +1499,7 @@ " machineType: 'E2_HIGHCPU_8'\"\"\".format(\n", " PROJECT_ID=PROJECT_ID,\n", " PREDICTION_CONTAINER=PREDICTION_CONTAINER,\n", - " ARTIFACTS_DIR=ARTIFACTS_DIR\n", + " ARTIFACTS_DIR=ARTIFACTS_DIR,\n", ")\n", "\n", "with open(\"cloudbuild.yaml\", \"w\") as fp:\n", @@ -1589,8 +1616,12 @@ }, "outputs": [], "source": [ - "RUN_HYPERPARAMETER_TUNING = False # Execute regular training instead of hyperparameter tuning.\n", - "TRAIN_WITH_BEST_HYPERPARAMETERS = True # @param {type:\"bool\"} Whether to use learned hyperparameters in training." + "RUN_HYPERPARAMETER_TUNING = (\n", + " False # Execute regular training instead of hyperparameter tuning.\n", + ")\n", + "TRAIN_WITH_BEST_HYPERPARAMETERS = (\n", + " True # @param {type:\"bool\"} Whether to use learned hyperparameters in training.\n", + ")" ] }, { @@ -1630,10 +1661,12 @@ "job = aiplatform.CustomContainerTrainingJob(\n", " display_name=\"train-movielens\",\n", " container_uri=f\"gcr.io/{PROJECT_ID}/{HPTUNING_TRAINING_CONTAINER}:latest\",\n", - " command=[\"python3\", \"-m\", \"src.training.task\"] + args, # Pass in training arguments, including hyperparameters.\n", + " command=[\"python3\", \"-m\", \"src.training.task\"]\n", + " + args, # Pass in training arguments, including hyperparameters.\n", " model_serving_container_image_uri=f\"gcr.io/{PROJECT_ID}/{PREDICTION_CONTAINER}:latest\",\n", " model_serving_container_predict_route=\"/predict\",\n", - " model_serving_container_health_route=\"/health\")\n", + " model_serving_container_health_route=\"/health\",\n", + ")\n", "\n", "print(\"Training Spec:\", job._managed_model)\n", "\n", @@ -1642,7 +1675,8 @@ " replica_count=1,\n", " machine_type=\"n1-standard-4\",\n", " accelerator_type=\"ACCELERATOR_TYPE_UNSPECIFIED\",\n", - " accelerator_count=0)" + " accelerator_count=0,\n", + ")" ] }, { @@ -1781,7 +1815,8 @@ "! gcloud ai models delete $model.name --quiet\n", "\n", "# Delete Cloud Storage objects that were created\n", - "! gcloud storage rm --recursive $ARTIFACTS_DIR" ] + "! gcloud storage rm --recursive $ARTIFACTS_DIR" + ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb index a25e4b9e9..a0dc81176 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb @@ -421,7 +421,8 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" + ] }, { "cell_type": "markdown", @@ -440,7 +441,8 @@ }, "outputs": [], "source": [ - "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" + ] }, { "cell_type": "markdown", @@ -884,10 +886,12 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", + "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gcloud storage cat $FILE | head" ] + "! gcloud storage cat $FILE | head" + ] }, { "cell_type": "markdown", @@ -1325,7 +1329,8 @@ }, "outputs": [], "source": [ - "test_items = !gcloud storage cat $IMPORT_FILE | head -n2\n", "if len(str(test_items[0]).split(\",\")) == 3:\n", + "test_items = !gcloud storage cat $IMPORT_FILE | head -n2\n", + "if len(str(test_items[0]).split(\",\")) == 3:\n", " _, test_item_1, test_label_1 = str(test_items[0]).split(\",\")\n", " _, test_item_2, test_label_2 = str(test_items[1]).split(\",\")\n", "else:\n", @@ -1358,7 +1363,9 @@ "file_1 = test_item_1.split(\"/\")[-1]\n", "file_2 = test_item_2.split(\"/\")[-1]\n", "\n", - "! gcloud storage cp $test_item_1 $BUCKET_NAME/$file_1\n", "! gcloud storage cp $test_item_2 $BUCKET_NAME/$file_2\n", "\n", + "! gcloud storage cp $test_item_1 $BUCKET_NAME/$file_1\n", + "! gcloud storage cp $test_item_2 $BUCKET_NAME/$file_2\n", + "\n", "test_item_1 = BUCKET_NAME + \"/\" + file_1\n", "test_item_2 = BUCKET_NAME + \"/\" + file_2" ] @@ -1401,7 +1408,8 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gcloud storage cat $gcs_input_uri" ] + "! gcloud storage cat $gcs_input_uri" + ] }, { "cell_type": "markdown", @@ -1684,8 +1692,9 @@ "outputs": [], "source": [ "def get_latest_predictions(gcs_out_dir):\n", - " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", + " \"\"\"Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", + " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1702,8 +1711,10 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", - " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", + "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", + " break\n", " time.sleep(60)" ] }, @@ -1797,7 +1808,8 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gcloud storage rm --recursive $BUCKET_NAME" ] + " ! gcloud storage rm --recursive $BUCKET_NAME" + ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb index be3cddc91..f8ce0592d 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb @@ -421,7 +421,8 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" + ] }, { "cell_type": "markdown", @@ -440,7 +441,8 @@ }, "outputs": [], "source": [ - "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" + ] }, { "cell_type": "markdown", @@ -885,10 +887,12 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", + "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gcloud storage cat $FILE | head" ] + "! gcloud storage cat $FILE | head" + ] }, { "cell_type": "markdown", @@ -1329,7 +1333,8 @@ }, "outputs": [], "source": [ - "test_items = !gcloud storage cat $IMPORT_FILE | head -n2\n", "cols_1 = str(test_items[0]).split(\",\")\n", + "test_items = !gcloud storage cat $IMPORT_FILE | head -n2\n", + "cols_1 = str(test_items[0]).split(\",\")\n", "cols_2 = str(test_items[1]).split(\",\")\n", "if len(cols_1) == 11:\n", " test_item_1 = str(cols_1[1])\n", @@ -1368,7 +1373,9 @@ "file_1 = test_item_1.split(\"/\")[-1]\n", "file_2 = test_item_2.split(\"/\")[-1]\n", "\n", - "! gcloud storage cp $test_item_1 $BUCKET_NAME/$file_1\n", "! gcloud storage cp $test_item_2 $BUCKET_NAME/$file_2\n", "\n", + "! gcloud storage cp $test_item_1 $BUCKET_NAME/$file_1\n", + "! gcloud storage cp $test_item_2 $BUCKET_NAME/$file_2\n", + "\n", "test_item_1 = BUCKET_NAME + \"/\" + file_1\n", "test_item_2 = BUCKET_NAME + \"/\" + file_2" ] @@ -1411,7 +1418,8 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gcloud storage cat $gcs_input_uri" ] + "! gcloud storage cat $gcs_input_uri" + ] }, { "cell_type": "markdown", @@ -1696,8 +1704,9 @@ "outputs": [], "source": [ "def get_latest_predictions(gcs_out_dir):\n", - " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", + " \"\"\"Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", + " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1714,8 +1723,10 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", - " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", + "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", + " break\n", " time.sleep(60)" ] }, @@ -1809,7 +1820,8 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gcloud storage rm --recursive $BUCKET_NAME" ] + " ! gcloud storage rm --recursive $BUCKET_NAME" + ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb b/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb index 928769361..47204dd63 100644 --- a/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb @@ -735,11 +735,14 @@ }, "outputs": [], "source": [ - "count = ! gcloud storage cat $IMPORT_FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $IMPORT_FILE | wc -l\n", + "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gcloud storage cat $IMPORT_FILE | head\n", "\n", - "heading = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n", + "! gcloud storage cat $IMPORT_FILE | head\n", + "\n", + "heading = ! gcloud storage cat $IMPORT_FILE | head -n1\n", + "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n", "print(\"Label Column Name\", label_column)\n", "if label_column is None:\n", " raise Exception(\"label column missing\")" @@ -1817,7 +1820,8 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gcloud storage rm --recursive $BUCKET_NAME" ] + " ! gcloud storage rm --recursive $BUCKET_NAME" + ] } ], "metadata": { diff --git a/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb index 9f1f6a33c..c1e983cf9 100644 --- a/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb @@ -421,7 +421,8 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" + ] }, { "cell_type": "markdown", @@ -440,7 +441,8 @@ }, "outputs": [], "source": [ - "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] + "! gcloud storage ls --all-versions --long $BUCKET_NAME" + ] }, { "cell_type": "markdown", @@ -886,10 +888,12 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", + "count = ! gcloud storage cat $FILE | wc -l\n", + "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gcloud storage cat $FILE | head" ] + "! gcloud storage cat $FILE | head" + ] }, { "cell_type": "markdown", @@ -1373,7 +1377,8 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gcloud storage cat $gcs_input_uri" ] + "! gcloud storage cat $gcs_input_uri" + ] }, { "cell_type": "markdown", @@ -1647,8 +1652,9 @@ "outputs": [], "source": [ "def get_latest_predictions(gcs_out_dir):\n", - " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", + " \"\"\"Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", + " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1665,8 +1671,10 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", - " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", + "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", + " break\n", " time.sleep(60)" ] }, @@ -1760,7 +1768,8 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gcloud storage rm --recursive $BUCKET_NAME" ] + " ! gcloud storage rm --recursive $BUCKET_NAME" + ] } ], "metadata": { diff --git a/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb b/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb index ada16ce66..64caba398 100644 --- a/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb +++ b/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb @@ -325,7 +325,8 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" + ] }, { "cell_type": "markdown", @@ -344,7 +345,8 @@ }, "outputs": [], "source": [ - "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" + ] }, { "cell_type": "markdown", @@ -543,7 +545,8 @@ }, "outputs": [], "source": [ - "! gcloud storage cat $IMPORT_FILE | head -n 10" ] + "! gcloud storage cat $IMPORT_FILE | head -n 10" + ] }, { "cell_type": "markdown", @@ -1490,12 +1493,15 @@ }, "outputs": [], "source": [ - "! gcloud storage cat $IMPORT_FILE | head -n 1 > tmp.csv\n", "! gcloud storage cat $IMPORT_FILE | tail -n 10 >> tmp.csv\n", "\n", + "! gcloud storage cat $IMPORT_FILE | head -n 1 > tmp.csv\n", + "! gcloud storage cat $IMPORT_FILE | tail -n 10 >> tmp.csv\n", + "\n", "! cut -d, -f1-16 tmp.csv > batch.csv\n", "\n", "gcs_input_uri = \"gs://\" + BUCKET_NAME + \"/test.csv\"\n", "\n", - "! gcloud storage cp batch.csv $gcs_input_uri" ] + "! gcloud storage cp batch.csv $gcs_input_uri" + ] }, { "cell_type": "code", @@ -1505,7 +1511,8 @@ }, "outputs": [], "source": [ - "! gcloud storage cat $gcs_input_uri" ] + "! gcloud storage cat $gcs_input_uri" + ] }, { "cell_type": "markdown", @@ -1811,8 +1818,9 @@ "outputs": [], "source": [ "def get_latest_predictions(gcs_out_dir):\n", - " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", + " \"\"\"Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", + " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1831,8 +1839,10 @@ " folder = get_latest_predictions(\n", " response.output_config.gcs_destination.output_uri_prefix\n", " )\n", - " ! gcloud storage ls $folder/prediction*\n", "\n", - " ! gcloud storage cat $folder/prediction*\n", " break\n", + " ! gcloud storage ls $folder/prediction*\n", + "\n", + " ! gcloud storage cat $folder/prediction*\n", + " break\n", " time.sleep(60)" ] }, @@ -2442,7 +2452,8 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" + ] } ], "metadata": { @@ -2458,7 +2469,7 @@ "call:migration", "response:migration" ], - "name": "UJ4 unified AutoML for structured data with Vertex AI Regression.ipynb", + "name": "UJ4 AutoML for structured data with Vertex AI Regression.ipynb", "toc_visible": true }, "kernelspec": { diff --git a/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb b/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb index 988c3914d..7cf1a2607 100644 --- a/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb +++ b/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb @@ -325,7 +325,8 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" + ] }, { "cell_type": "markdown", @@ -344,7 +345,8 @@ }, "outputs": [], "source": [ - "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" + ] }, { "cell_type": "markdown", @@ -540,7 +542,8 @@ }, "outputs": [], "source": [ - "! gcloud storage cat $IMPORT_FILE | head -n 10" ] + "! gcloud storage cat $IMPORT_FILE | head -n 10" + ] }, { "cell_type": "markdown", @@ -1425,14 +1428,17 @@ }, "outputs": [], "source": [ - "test_items = ! gcloud storage cat $IMPORT_FILE | head -n2\n", "\n", + "test_items = ! gcloud storage cat $IMPORT_FILE | head -n2\n", + "\n", "test_item_1, test_label_1 = test_items[0].split(\",\")[1], test_items[0].split(\",\")[2]\n", "test_item_2, test_label_2 = test_items[0].split(\",\")[1], test_items[0].split(\",\")[2]\n", "\n", "file_1 = test_item_1.split(\"/\")[-1]\n", "file_2 = test_item_2.split(\"/\")[-1]\n", "\n", - "! gcloud storage cp $test_item_1 gs://$BUCKET_NAME/$file_1\n", "! gcloud storage cp $test_item_2 gs://$BUCKET_NAME/$file_2\n", "\n", + "! gcloud storage cp $test_item_1 gs://$BUCKET_NAME/$file_1\n", + "! gcloud storage cp $test_item_2 gs://$BUCKET_NAME/$file_2\n", + "\n", "test_item_1 = \"gs://\" + BUCKET_NAME + \"/\" + file_1\n", "test_item_2 = \"gs://\" + BUCKET_NAME + \"/\" + file_2\n", "\n", @@ -1472,7 +1478,8 @@ " data = {\"content\": test_item_2, \"mime_type\": \"image/jpeg\"}\n", " f.write(json.dumps(data) + \"\\n\")\n", "\n", - "!gcloud storage cat $gcs_input_uri" ] + "!gcloud storage cat $gcs_input_uri" + ] }, { "cell_type": "markdown", @@ -1792,8 +1799,9 @@ "outputs": [], "source": [ "def get_latest_predictions(gcs_out_dir):\n", - " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", + " \"\"\"Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", + " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1812,7 +1820,8 @@ " folder = get_latest_predictions(\n", " response.output_config.gcs_destination.output_uri_prefix\n", " )\n", - " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", + "\n", " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" @@ -2163,7 +2172,8 @@ "\n", "import tensorflow as tf\n", "\n", - "single_file = ! gcloud storage cat $IMPORT_FILE | head -n 1\n", "single_file = single_file[0].split(\",\")[1]\n", + "single_file = ! gcloud storage cat $IMPORT_FILE | head -n 1\n", + "single_file = single_file[0].split(\",\")[1]\n", "\n", "with tf.io.gfile.GFile(single_file, \"rb\") as f:\n", " content = f.read()\n", @@ -2433,7 +2443,8 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" + ] } ], "metadata": { @@ -2456,7 +2467,7 @@ "_RXG0aaSV2HS", "EDuJAyzbV2HW" ], - "name": "UJ5 unified AutoML for vision with Vertex AI Video Classification.ipynb", + "name": "UJ5 AutoML for vision with Vertex AI Video Classification.ipynb", "toc_visible": true }, "kernelspec": { diff --git a/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb b/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb index d86a33bcc..5699a32ff 100644 --- a/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb +++ b/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb @@ -325,7 +325,8 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" + ] }, { "cell_type": "markdown", @@ -344,7 +345,8 @@ }, "outputs": [], "source": [ - "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" + ] }, { "cell_type": "markdown", @@ -541,7 +543,8 @@ }, "outputs": [], "source": [ - "! gcloud storage cat $IMPORT_FILE | head -n 10" ] + "! gcloud storage cat $IMPORT_FILE | head -n 10" + ] }, { "cell_type": "markdown", @@ -1549,7 +1552,8 @@ }, "outputs": [], "source": [ - "test_item = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "test_item, test_label = str(test_item[0]).split(\",\")\n", + "test_item = ! gcloud storage cat $IMPORT_FILE | head -n1\n", + "test_item, test_label = str(test_item[0]).split(\",\")\n", "\n", "print(test_item, test_label)" ] @@ -1610,7 +1614,9 @@ }, "outputs": [], "source": [ - "! gcloud storage cat $gcs_input_uri\n", "! gcloud storage cat $test_item_uri" ] + "! gcloud storage cat $gcs_input_uri\n", + "! gcloud storage cat $test_item_uri" + ] }, { "cell_type": "markdown", @@ -1910,8 +1916,9 @@ "outputs": [], "source": [ "def get_latest_predictions(gcs_out_dir):\n", - " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", + " \"\"\"Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", + " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", " if subfolder.startswith(\"prediction-\"):\n", @@ -1930,8 +1937,10 @@ " folder = get_latest_predictions(\n", " response.output_config.gcs_destination.output_uri_prefix\n", " )\n", - " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", - " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", + "\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", + " break\n", " time.sleep(60)" ] }, @@ -2251,7 +2260,8 @@ }, "outputs": [], "source": [ - "test_item = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "test_item, test_label = str(test_item[0]).split(\",\")\n", + "test_item = ! gcloud storage cat $IMPORT_FILE | head -n1\n", + "test_item, test_label = str(test_item[0]).split(\",\")\n", "\n", "instances_list = [{\"content\": test_item}]\n", "instances = [json_format.ParseDict(s, Value()) for s in instances_list]\n", @@ -2500,7 +2510,8 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" + ] } ], "metadata": { @@ -2511,7 +2522,7 @@ "hIHTX-pkJjkO", "4x_t-MWnJjkQ" ], - "name": "UJ6 unified AutoML for natural language with Vertex AI Text Classification.ipynb", + "name": "UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb", "toc_visible": true }, "kernelspec": { diff --git a/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb b/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb index 190873fdc..32aab6679 100644 --- a/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb +++ b/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb @@ -360,7 +360,8 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets create --location={REGION} --project={PROJECT_ID} {BUCKET_URI}" ] + "! gcloud storage buckets create --location={REGION} --project={PROJECT_ID} {BUCKET_URI}" + ] }, { "cell_type": "markdown", @@ -1430,7 +1431,8 @@ }, "outputs": [], "source": [ - "! gcloud storage ls --long {train_experiment_uri}" ] + "! gcloud storage ls --long {train_experiment_uri}" + ] }, { "cell_type": "markdown", @@ -1499,7 +1501,8 @@ }, "outputs": [], "source": [ - "! gcloud storage cp --recursive {train_experiment_uri}/* {experiments_path}" ] + "! gcloud storage cp --recursive {train_experiment_uri}/* {experiments_path}" + ] }, { "cell_type": "markdown", @@ -1893,7 +1896,8 @@ }, "outputs": [], "source": [ - "! gcloud storage cp --recursive {models_path} {MODELS_PATH}" ] + "! gcloud storage cp --recursive {models_path} {MODELS_PATH}" + ] }, { "cell_type": "markdown", @@ -2203,7 +2207,8 @@ "\n", "# Delete Cloud Storage objects that were created\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive {BUCKET_URI}\n", "\n", + " ! gcloud storage rm --recursive {BUCKET_URI}\n", + "\n", "# Delete tutorial folder\n", "if delete_tutorial:\n", " shutil.rmtree(tutorial_path)" diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index b5a15520a..7870a5c8a 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -229,10 +229,11 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", + " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", + "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", " shell_output = ! gcloud storage buckets describe {BUCKET_NAME} | grep \"^location:\" | sed \"s/location: //\"\n", - " bucket_region = shell_output[0].strip().lower()\n", + " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", @@ -256,7 +257,8 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", + "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"" @@ -1257,7 +1259,8 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive $BUCKET_NAME" ] + " ! gcloud storage rm --recursive $BUCKET_NAME" + ] }, { "cell_type": "markdown", From ce1643625fc4f2605518f1afcc2612a1b92bea21 Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Mon, 15 Dec 2025 17:04:07 +0000 Subject: [PATCH 14/20] Changes for 4339 --- .../model_garden/model_garden_vllm_text_only_tutorial.ipynb | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index 7870a5c8a..d99ec7b3c 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -232,8 +232,12 @@ " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", +<<<<<<< HEAD " shell_output = ! gcloud storage buckets describe {BUCKET_NAME} | grep \"^location:\" | sed \"s/location: //\"\n", " bucket_region = shell_output[0].strip().lower()\n", +======= + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", +>>>>>>> parent of 5d897fd6 (Manual Changes) " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", From e86ab07bab62068d66b6befec9f33e6b6eb08a61 Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Mon, 15 Dec 2025 17:15:48 +0000 Subject: [PATCH 15/20] Changes for 4339 --- .../model_garden_vllm_text_only_tutorial.ipynb | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index d99ec7b3c..a96728bf8 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -232,12 +232,7 @@ " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", -<<<<<<< HEAD - " shell_output = ! gcloud storage buckets describe {BUCKET_NAME} | grep \"^location:\" | sed \"s/location: //\"\n", - " bucket_region = shell_output[0].strip().lower()\n", -======= - " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", ->>>>>>> parent of 5d897fd6 (Manual Changes) + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", From 9e182163d10566de30a26e336a790273b7f75e37 Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Mon, 15 Dec 2025 17:36:07 +0000 Subject: [PATCH 16/20] Changes for 4339 --- .../model_garden/model_garden_vllm_text_only_tutorial.ipynb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index a96728bf8..ba0bd774f 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -232,7 +232,8 @@ " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", + " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", " \"Bucket region %s is different from notebook region %s\"\n", From 3908d9e4ffc79eb41c8fb251116b0319598ddc2e Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Tue, 16 Dec 2025 14:49:29 +0000 Subject: [PATCH 17/20] Fix: Applied linter formatting and resolved style issues --- .../automl/showcase_automl_image_classification_batch.ipynb | 4 +--- .../showcase_automl_image_object_detection_batch.ipynb | 4 +--- ...owcase_automl_tabular_classification_online_explain.ipynb | 1 - .../showcase_automl_text_entity_extraction_batch.ipynb | 4 +--- ...utoML for structured data with Vertex AI Regression.ipynb | 1 - ...toML for vision with Vertex AI Video Classification.ipynb | 1 - ...natural language with Vertex AI Text Classification.ipynb | 5 +---- .../model_garden/model_garden_vllm_text_only_tutorial.ipynb | 2 +- 8 files changed, 5 insertions(+), 17 deletions(-) diff --git a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb index a0dc81176..49d0ff60d 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb @@ -478,9 +478,7 @@ "import time\n", "\n", "from google.cloud.aiplatform import gapic as aip\n", - "from google.protobuf import json_format\n", - "from google.protobuf.json_format import MessageToJson, ParseDict\n", - "from google.protobuf.struct_pb2 import Struct, Value" + "from google.protobuf import json_format" ] }, { diff --git a/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb index f8ce0592d..88daaf17f 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb @@ -478,9 +478,7 @@ "import time\n", "\n", "from google.cloud.aiplatform import gapic as aip\n", - "from google.protobuf import json_format\n", - "from google.protobuf.json_format import MessageToJson, ParseDict\n", - "from google.protobuf.struct_pb2 import Struct, Value" + "from google.protobuf import json_format" ] }, { diff --git a/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb b/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb index 47204dd63..58d7fd483 100644 --- a/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb @@ -397,7 +397,6 @@ "\n", "import google.cloud.aiplatform_v1beta1 as aip\n", "from google.protobuf import json_format\n", - "from google.protobuf.json_format import MessageToJson, ParseDict\n", "from google.protobuf.struct_pb2 import Struct, Value" ] }, diff --git a/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb index c1e983cf9..1bb4543ba 100644 --- a/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb @@ -478,9 +478,7 @@ "import time\n", "\n", "from google.cloud.aiplatform import gapic as aip\n", - "from google.protobuf import json_format\n", - "from google.protobuf.json_format import MessageToJson, ParseDict\n", - "from google.protobuf.struct_pb2 import Struct, Value" + "from google.protobuf import json_format" ] }, { diff --git a/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb b/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb index 64caba398..8f9cd9418 100644 --- a/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb +++ b/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb @@ -385,7 +385,6 @@ "\n", "from google.cloud.aiplatform import gapic as aip\n", "from google.protobuf import json_format\n", - "from google.protobuf.json_format import MessageToJson, ParseDict\n", "from google.protobuf.struct_pb2 import Struct, Value" ] }, diff --git a/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb b/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb index 7cf1a2607..1cd5e865e 100644 --- a/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb +++ b/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb @@ -385,7 +385,6 @@ "\n", "from google.cloud.aiplatform import gapic as aip\n", "from google.protobuf import json_format\n", - "from google.protobuf.json_format import MessageToJson, ParseDict\n", "from google.protobuf.struct_pb2 import Struct, Value" ] }, diff --git a/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb b/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb index 5699a32ff..9f0e3aabf 100644 --- a/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb +++ b/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb @@ -379,16 +379,13 @@ }, "outputs": [], "source": [ - "import base64\n", "import json\n", "import os\n", "import sys\n", "import time\n", "\n", "from google.cloud.aiplatform import gapic as aip\n", - "from google.protobuf import json_format\n", - "from google.protobuf.json_format import MessageToJson, ParseDict\n", - "from google.protobuf.struct_pb2 import Struct, Value" + "from google.protobuf import json_format" ] }, { diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index ba0bd774f..72441802f 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -232,7 +232,7 @@ " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", + " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", From 45891a62eb1265cb80441192602b9d5a72fd9ab0 Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Mon, 22 Dec 2025 11:02:48 +0000 Subject: [PATCH 18/20] gcloud to gsutilchanges for 4339 --- ...den_gemma_fine_tuning_batch_deployment_on_rov.ipynb | 10 +++++----- .../model_garden_vllm_text_only_tutorial.ipynb | 7 +++---- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb b/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb index 32aab6679..608d435f8 100644 --- a/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb +++ b/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb @@ -360,7 +360,7 @@ }, "outputs": [], "source": [ - "! gcloud storage buckets create --location={REGION} --project={PROJECT_ID} {BUCKET_URI}" + "! gsutil mb -l {REGION} -p {PROJECT_ID} {BUCKET_URI}" ] }, { @@ -1431,7 +1431,7 @@ }, "outputs": [], "source": [ - "! gcloud storage ls --long {train_experiment_uri}" + "! gsutil ls -l {train_experiment_uri}" ] }, { @@ -1501,7 +1501,7 @@ }, "outputs": [], "source": [ - "! gcloud storage cp --recursive {train_experiment_uri}/* {experiments_path}" + "!gsutil -q cp -r {train_experiment_uri}/* {experiments_path}" ] }, { @@ -1896,7 +1896,7 @@ }, "outputs": [], "source": [ - "! gcloud storage cp --recursive {models_path} {MODELS_PATH}" + "gsutil -q cp -r {models_path} {MODELS_PATH}" ] }, { @@ -2207,7 +2207,7 @@ "\n", "# Delete Cloud Storage objects that were created\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive {BUCKET_URI}\n", + " ! gsutil -q rm -r {BUCKET_URI}\n", "\n", "# Delete tutorial folder\n", "if delete_tutorial:\n", diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index 72441802f..447b198ad 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -229,7 +229,7 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", + " !gsutil mb -l {REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", @@ -257,8 +257,7 @@ "\n", "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", - "! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", - "\n", + "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"" @@ -1259,7 +1258,7 @@ "\n", "delete_bucket = False # @param {type:\"boolean\"}\n", "if delete_bucket:\n", - " ! gcloud storage rm --recursive $BUCKET_NAME" + " ! gsutil -m rm -r $BUCKET_NAME" ] }, { From 8aba254bad2768d32f2a9cb76209330eb6efe0b5 Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Mon, 22 Dec 2025 11:37:00 +0000 Subject: [PATCH 19/20] removed gsutil to gcloud migration --- .../model_garden/model_garden_vllm_text_only_tutorial.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index 447b198ad..bcec32442 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -232,7 +232,7 @@ " !gsutil mb -l {REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", - " shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", + " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n", " if bucket_region != REGION:\n", " raise ValueError(\n", From b9492668ff0777a6bbd503d3b21d33babaf36ccf Mon Sep 17 00:00:00 2001 From: bhandarivijay Date: Mon, 22 Dec 2025 12:31:26 +0000 Subject: [PATCH 20/20] Manual changes --- ...l_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb | 6 +++--- .../model_garden/model_garden_vllm_text_only_tutorial.ipynb | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb b/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb index 608d435f8..4bb16bb38 100644 --- a/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb +++ b/notebooks/community/model_garden/model_garden_gemma_fine_tuning_batch_deployment_on_rov.ipynb @@ -1501,7 +1501,7 @@ }, "outputs": [], "source": [ - "!gsutil -q cp -r {train_experiment_uri}/* {experiments_path}" + "! gsutil -q cp -r {train_experiment_uri}/* {experiments_path}" ] }, { @@ -1896,7 +1896,7 @@ }, "outputs": [], "source": [ - "gsutil -q cp -r {models_path} {MODELS_PATH}" + "! gsutil -q cp -r {models_path} {MODELS_PATH}" ] }, { @@ -2207,7 +2207,7 @@ "\n", "# Delete Cloud Storage objects that were created\n", "if delete_bucket:\n", - " ! gsutil -q rm -r {BUCKET_URI}\n", + " ! gsutil -q -m rm -r {BUCKET_URI}\n", "\n", "# Delete tutorial folder\n", "if delete_tutorial:\n", diff --git a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb index bcec32442..9c2ad6759 100644 --- a/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb +++ b/notebooks/community/model_garden/model_garden_vllm_text_only_tutorial.ipynb @@ -229,7 +229,7 @@ "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", - " !gsutil mb -l {REGION} {BUCKET_URI}\n", + " ! gsutil mb -l {REGION} {BUCKET_URI}\n", "else:\n", " assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", @@ -258,6 +258,7 @@ "\n", "# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n", "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n", + "\n", "! gcloud config set project $PROJECT_ID\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n", "! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\""