diff --git a/notebooks/community/explainable_ai/SDK_Custom_Container_XAI.ipynb b/notebooks/community/explainable_ai/SDK_Custom_Container_XAI.ipynb index 1a71bae50..96566721a 100644 --- a/notebooks/community/explainable_ai/SDK_Custom_Container_XAI.ipynb +++ b/notebooks/community/explainable_ai/SDK_Custom_Container_XAI.ipynb @@ -405,7 +405,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION -p $PROJECT_ID $BUCKET_URI" + "! gcloud storage buckets create --location=$REGION --project=$PROJECT_ID $BUCKET_URI" ] }, { @@ -425,7 +425,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -698,7 +698,7 @@ }, "outputs": [], "source": [ - "!gsutil cp app/model.joblib {BUCKET_URI}/{MODEL_ARTIFACT_DIR}/" + "!gcloud storage cp app/model.joblib {BUCKET_URI}/{MODEL_ARTIFACT_DIR}/" ] }, { @@ -1516,7 +1516,7 @@ "\n", "delete_bucket = False\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -rf {BUCKET_URI}" + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ] } ], diff --git a/notebooks/community/migration/UJ11 legacy HyperParameter Tuning Training Job with TensorFlow.ipynb b/notebooks/community/migration/UJ11 legacy HyperParameter Tuning Training Job with TensorFlow.ipynb index f9d959b9b..ffa5e65a3 100644 --- a/notebooks/community/migration/UJ11 legacy HyperParameter Tuning Training Job with TensorFlow.ipynb +++ b/notebooks/community/migration/UJ11 legacy HyperParameter Tuning Training Job with TensorFlow.ipynb @@ -312,7 +312,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION gs://$BUCKET_NAME" + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] }, { @@ -332,7 +332,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al gs://$BUCKET_NAME" + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] }, { @@ -614,7 +614,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz gs://$BUCKET_NAME/hpt_boston_housing.tar.gz" + "! gcloud storage cp custom.tar.gz gs://$BUCKET_NAME/hpt_boston_housing.tar.gz" ] }, { @@ -1370,7 +1370,7 @@ "delete_bucket = True\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r gs://$BUCKET_NAME" + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] } ], diff --git a/notebooks/community/migration/UJ8 legacy AutoML Natural Language Text Sentiment Analysis.ipynb b/notebooks/community/migration/UJ8 legacy AutoML Natural Language Text Sentiment Analysis.ipynb index 982b8b3d2..0ec6ec283 100644 --- a/notebooks/community/migration/UJ8 legacy AutoML Natural Language Text Sentiment Analysis.ipynb +++ b/notebooks/community/migration/UJ8 legacy AutoML Natural Language Text Sentiment Analysis.ipynb @@ -326,7 +326,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION gs://$BUCKET_NAME" + "! gcloud storage buckets create --location=$REGION gs://$BUCKET_NAME" ] }, { @@ -346,7 +346,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al gs://$BUCKET_NAME" + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] }, { @@ -383,11 +383,9 @@ "import json\n", "import os\n", "import sys\n", - "import time\n", "\n", "from google.cloud import automl\n", - "from google.protobuf.json_format import MessageToJson\n", - "from google.protobuf.struct_pb2 import Value" + "from google.protobuf.json_format import MessageToJson" ] }, { @@ -485,7 +483,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE | head -n 10" + "! gcloud storage cat $IMPORT_FILE | head -n 10" ] }, { @@ -1209,14 +1207,15 @@ "gcs_input_uri = \"gs://\" + BUCKET_NAME + \"/test.csv\"\n", "with tf.io.gfile.GFile(gcs_input_uri, \"w\") as f:\n", " item_1 = \"gs://cloud-samples-data/language/sentiment-positive.txt\"\n", - " ! gsutil cp $item_1 gs://$BUCKET_NAME\n", + " ! gcloud storage cp $item_1 gs://$BUCKET_NAME\n", + " ! gcloud storage cp $item_1 gs://$BUCKET_NAME\n", " f.write(\"gs://\" + BUCKET_NAME + \"/sentiment-positive.txt\" + \"\\n\")\n", "\n", " item_2 = \"gs://cloud-samples-data/language/sentiment-negative.txt\"\n", - " ! gsutil cp $item_2 gs://$BUCKET_NAME\n", + " ! gcloud storage cp $item_2 gs://$BUCKET_NAME\n", " f.write(\"gs://\" + BUCKET_NAME + \"/sentiment-negative.txt\")\n", "\n", - "! gsutil cat $gcs_input_uri" + "! gcloud storage cat $gcs_input_uri" ] }, { @@ -1381,7 +1380,7 @@ }, "outputs": [], "source": [ - "test_data = ! gsutil cat $IMPORT_FILE | head -n1\n", + "test_data = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "\n", "test_item = str(test_data[0]).split(\",\")[0]\n", "test_label = str(test_data[0]).split(\",\")[1]\n", @@ -1615,13 +1614,13 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r gs://$BUCKET_NAME" + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] } ], "metadata": { "colab": { - "name": "UJ8 legacy AutoML Natural Language - Text Sentiment Analysis.ipynb", + "name": "UJ8 legacy AutoML Natural Language Text Sentiment Analysis.ipynb", "toc_visible": true }, "kernelspec": { diff --git a/notebooks/community/ml_ops/stage2/get_started_vertex_feature_store.ipynb b/notebooks/community/ml_ops/stage2/get_started_vertex_feature_store.ipynb index 78a4aae0e..9eaaa85bb 100644 --- a/notebooks/community/ml_ops/stage2/get_started_vertex_feature_store.ipynb +++ b/notebooks/community/ml_ops/stage2/get_started_vertex_feature_store.ipynb @@ -1017,8 +1017,8 @@ "USERS_AVRO_FN = \"users.avro\"\n", "MOVIES_AVRO_FN = \"movies.avro\"\n", "\n", - "! gsutil cp $GCS_USERS_AVRO_URI $USERS_AVRO_FN\n", - "! gsutil cp $GCS_MOVIES_AVRO_URI $MOVIES_AVRO_FN" + "! gcloud storage cp $GCS_USERS_AVRO_URI $USERS_AVRO_FN\n", + "! gcloud storage cp $GCS_MOVIES_AVRO_URI $MOVIES_AVRO_FN" ] }, { diff --git a/notebooks/community/ml_ops/stage3/get_started_with_bqml_pipeline_components.ipynb b/notebooks/community/ml_ops/stage3/get_started_with_bqml_pipeline_components.ipynb index 63668be8f..b4599b047 100644 --- a/notebooks/community/ml_ops/stage3/get_started_with_bqml_pipeline_components.ipynb +++ b/notebooks/community/ml_ops/stage3/get_started_with_bqml_pipeline_components.ipynb @@ -452,7 +452,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" + "! gcloud storage buckets create --location=$REGION $BUCKET_URI" ] }, { @@ -472,7 +472,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -545,9 +545,9 @@ }, "outputs": [], "source": [ - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI" + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer " ] }, { @@ -1052,13 +1052,13 @@ " + \"/evaluation_metrics\"\n", " )\n", " if tf.io.gfile.exists(EXECUTE_OUTPUT):\n", - " ! gsutil cat $EXECUTE_OUTPUT\n", + " ! gcloud storage cat $EXECUTE_OUTPUT\n", " return EXECUTE_OUTPUT\n", " elif tf.io.gfile.exists(GCP_RESOURCES):\n", - " ! gsutil cat $GCP_RESOURCES\n", + " ! gcloud storage cat $GCP_RESOURCES\n", " return GCP_RESOURCES\n", " elif tf.io.gfile.exists(EVAL_METRICS):\n", - " ! gsutil cat $EVAL_METRICS\n", + " ! gcloud storage cat $EVAL_METRICS\n", " return EVAL_METRICS\n", "\n", " return None\n", @@ -1081,14 +1081,14 @@ "print(\"\\n\\n\")\n", "print(\"model-upload\")\n", "artifacts = print_pipeline_output(pipeline, \"model-upload\")\n", - "output = !gsutil cat $artifacts\n", + "output = !gcloud storage cat $artifacts\n", "output = json.loads(output[0])\n", "model_id = output[\"artifacts\"][\"model\"][\"artifacts\"][0][\"metadata\"][\"resourceName\"]\n", "print(\"\\n\\n\")\n", "print(\"endpoint-create\")\n", "artifacts = print_pipeline_output(pipeline, \"endpoint-create\")\n", "print(\"\\n\\n\")\n", - "output = !gsutil cat $artifacts\n", + "output = !gcloud storage cat $artifacts\n", "output = json.loads(output[0])\n", "endpoint_id = output[\"artifacts\"][\"endpoint\"][\"artifacts\"][0][\"metadata\"][\n", " \"resourceName\"\n", @@ -1322,7 +1322,7 @@ "delete_bucket = False\n", "\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/community/persistent_resource/training_code/mpg_container/trainer/train.py b/notebooks/community/persistent_resource/training_code/mpg_container/trainer/train.py index b61d9884c..cb6534509 100644 --- a/notebooks/community/persistent_resource/training_code/mpg_container/trainer/train.py +++ b/notebooks/community/persistent_resource/training_code/mpg_container/trainer/train.py @@ -14,10 +14,7 @@ import argparse import logging -import os -import numpy as np import pandas as pd -import pathlib import tensorflow as tf from tensorflow import keras diff --git a/notebooks/notebook_template_review.py b/notebooks/notebook_template_review.py index f4f1a7f09..af6113730 100644 --- a/notebooks/notebook_template_review.py +++ b/notebooks/notebook_template_review.py @@ -39,7 +39,6 @@ import json import os import sys -import urllib.request import csv from enum import Enum from abc import ABC, abstractmethod diff --git a/notebooks/official/pipelines/google_cloud_pipeline_components_TPU_model_train_upload_deploy.ipynb b/notebooks/official/pipelines/google_cloud_pipeline_components_TPU_model_train_upload_deploy.ipynb index 614d8e62a..1f2d060e0 100644 --- a/notebooks/official/pipelines/google_cloud_pipeline_components_TPU_model_train_upload_deploy.ipynb +++ b/notebooks/official/pipelines/google_cloud_pipeline_components_TPU_model_train_upload_deploy.ipynb @@ -292,7 +292,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" + "! gcloud storage buckets create --location {LOCATION} --project {PROJECT_ID} {BUCKET_URI}" ] }, { @@ -365,9 +365,10 @@ }, "outputs": [], "source": [ - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n", + "# Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI" + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer" ] }, { @@ -1234,7 +1235,7 @@ "# Warning: Setting this to true deletes everything in your bucket\n", "delete_bucket = True\n", "if delete_bucket:\n", - " ! gsutil rm -r $BUCKET_URI\n", + " ! gcloud storage rm --recursive $BUCKET_URI\n", "\n", "# Delete the Artifact Registry repository\n", "! gcloud artifacts repositories delete $REPOSITORY --location=$LOCATION --quiet\n", diff --git a/notebooks/official/pipelines/multicontender_vs_champion_deployment_method.ipynb b/notebooks/official/pipelines/multicontender_vs_champion_deployment_method.ipynb index 8c760540a..286034e13 100644 --- a/notebooks/official/pipelines/multicontender_vs_champion_deployment_method.ipynb +++ b/notebooks/official/pipelines/multicontender_vs_champion_deployment_method.ipynb @@ -309,7 +309,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { @@ -384,9 +384,10 @@ }, "outputs": [], "source": [ - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n", + "! # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI" + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer" ] }, { @@ -1239,7 +1240,7 @@ "# Delete Cloud Storage objects that were created\n", "delete_bucket = True\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI\n", + " ! gcloud storage rm --recursive $BUCKET_URI\n", "\n", "# Remove the local pipeline package file\n", "! rm multicontender_vs_champion.json" diff --git a/notebooks/official/vector_search/sdk_vector_search_create_multimodal_embeddings.ipynb b/notebooks/official/vector_search/sdk_vector_search_create_multimodal_embeddings.ipynb index 63bd6203a..58fe43f16 100644 --- a/notebooks/official/vector_search/sdk_vector_search_create_multimodal_embeddings.ipynb +++ b/notebooks/official/vector_search/sdk_vector_search_create_multimodal_embeddings.ipynb @@ -275,7 +275,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { @@ -1062,7 +1062,7 @@ "source": [ "UNIQUE_FOLDER_NAME = \"embeddings_folder_unique\"\n", "EMBEDDINGS_INITIAL_URI = f\"{BUCKET_URI}/{UNIQUE_FOLDER_NAME}/\"\n", - "! gsutil cp {embeddings_file.name} {EMBEDDINGS_INITIAL_URI}" + "! gcloud storage cp {embeddings_file.name} {EMBEDDINGS_INITIAL_URI}" ] }, { @@ -1369,7 +1369,7 @@ "# Delete Cloud Storage objects that were created\n", "delete_bucket = False\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ],