diff --git a/notebooks/community/ml_ops/stage2/get_started_vertex_training_pytorch.ipynb b/notebooks/community/ml_ops/stage2/get_started_vertex_training_pytorch.ipynb index 5cdb8022b..f6e8ab83e 100644 --- a/notebooks/community/ml_ops/stage2/get_started_vertex_training_pytorch.ipynb +++ b/notebooks/community/ml_ops/stage2/get_started_vertex_training_pytorch.ipynb @@ -483,7 +483,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" + "! gcloud storage buckets create --location=$REGION $BUCKET_URI" ] }, { @@ -503,7 +503,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -680,7 +680,7 @@ "The PyTorch package does not have support to save the model to a Cloud Storage location. Instead, you will do the following steps to save to a Cloud Storage location.\n", "\n", "1. Save the in-memory model to the local filesystem (e.g., model.pth).\n", - "2. Use gsutil to copy the local copy to the specified Cloud Storage location.\n", + "2. Use gcloud storage to copy the local copy to the specified Cloud Storage location.\n", "\n", "*Note*: You can do hyperparameter tuning with a PyTorch model." ] @@ -1060,7 +1060,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer_cifar10.tar.gz" + "! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_cifar10.tar.gz" ] }, { @@ -1375,7 +1375,7 @@ "\n", "delete_bucket = False\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/community/ml_ops/stage2/get_started_with_distributed_training_xgboost.ipynb b/notebooks/community/ml_ops/stage2/get_started_with_distributed_training_xgboost.ipynb index 7c2ec797f..cae8ca19a 100644 --- a/notebooks/community/ml_ops/stage2/get_started_with_distributed_training_xgboost.ipynb +++ b/notebooks/community/ml_ops/stage2/get_started_with_distributed_training_xgboost.ipynb @@ -448,7 +448,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" + "! gcloud storage buckets create --location=$REGION $BUCKET_URI" ] }, { @@ -468,7 +468,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -1136,7 +1136,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer_covertype.tar.gz" + "! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_covertype.tar.gz" ] }, { @@ -1282,7 +1282,7 @@ }, "outputs": [], "source": [ - "! gsutil ls {MODEL_DIR}/model" + "! gcloud storage ls {MODEL_DIR}/model" ] }, { @@ -1326,7 +1326,7 @@ }, "outputs": [], "source": [ - "output = ! gsutil cat {DATASET_FILE} | head -n2\n", + "output = ! gcloud storage cat {DATASET_FILE} | head -n2\n", "\n", "print(output[1])\n", "\n", @@ -1423,7 +1423,7 @@ "source": [ "delete_bucket = True\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -r {BUCKET_URI}\n", + " ! gcloud storage rm --recursive {BUCKET_URI}\n", "\n", "! rm -rf custom prediction custom.tar.gz\n", "\n", diff --git a/notebooks/community/ml_ops/stage2/get_started_with_visionapi_and_automl.ipynb b/notebooks/community/ml_ops/stage2/get_started_with_visionapi_and_automl.ipynb index 24abe9ae9..9ab47d3f1 100644 --- a/notebooks/community/ml_ops/stage2/get_started_with_visionapi_and_automl.ipynb +++ b/notebooks/community/ml_ops/stage2/get_started_with_visionapi_and_automl.ipynb @@ -512,7 +512,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $VERTEX_AI_REGION -p $PROJECT_ID $BUCKET_URI" + "! gcloud storage buckets create --location=$VERTEX_AI_REGION --project=$PROJECT_ID $BUCKET_URI" ] }, { @@ -532,7 +532,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -1277,7 +1277,7 @@ "job.delete()\n", "\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil -m rm -r $BUCKET_URI\n", + " ! gcloud storage rm --recursive $BUCKET_URI\n", "\n", "print(\"Clean up completed!\")" ] diff --git a/notebooks/community/ml_ops/stage6/get_started_with_automl_tabular_model_online.ipynb b/notebooks/community/ml_ops/stage6/get_started_with_automl_tabular_model_online.ipynb index 9401c1084..3596f8b66 100644 --- a/notebooks/community/ml_ops/stage6/get_started_with_automl_tabular_model_online.ipynb +++ b/notebooks/community/ml_ops/stage6/get_started_with_automl_tabular_model_online.ipynb @@ -349,7 +349,6 @@ }, "outputs": [], "source": [ - "import random\n", "import string\n", "\n", "\n", @@ -478,7 +477,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" + "! gcloud storage buckets create --location=$REGION $BUCKET_URI" ] }, { @@ -498,7 +497,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -680,13 +679,13 @@ }, "outputs": [], "source": [ - "count = ! gsutil cat $IMPORT_FILE | wc -l\n", + "count = ! gcloud storage cat $IMPORT_FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $IMPORT_FILE | head\n", + "! gcloud storage cat $IMPORT_FILE | head\n", "\n", - "heading = ! gsutil cat $IMPORT_FILE | head -n1\n", + "heading = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n", "print(\"Label Column Name\", label_column)\n", "if label_column is None:\n", @@ -1382,7 +1381,7 @@ "delete_bucket = False\n", "\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -rf {BUCKET_URI}" + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ] } ], diff --git a/notebooks/community/ml_ops/stage6/get_started_with_matching_engine.ipynb b/notebooks/community/ml_ops/stage6/get_started_with_matching_engine.ipynb index ad7f2fa44..fac8f3c87 100644 --- a/notebooks/community/ml_ops/stage6/get_started_with_matching_engine.ipynb +++ b/notebooks/community/ml_ops/stage6/get_started_with_matching_engine.ipynb @@ -496,7 +496,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" + "! gcloud storage buckets create --location=$REGION $BUCKET_URI" ] }, { @@ -516,7 +516,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -587,7 +587,7 @@ }, "outputs": [], "source": [ - "! gsutil cp gs://cloud-samples-data/vertex-ai/matching_engine/glove-100-angular.hdf5 ." + "! gcloud storage cp gs://cloud-samples-data/vertex-ai/matching_engine/glove-100-angular.hdf5 ." ] }, { @@ -667,7 +667,7 @@ "outputs": [], "source": [ "EMBEDDINGS_INITIAL_URI = f\"{BUCKET_URI}/matching_engine/initial/\"\n", - "! gsutil cp glove100.json {EMBEDDINGS_INITIAL_URI}" + "! gcloud storage cp glove100.json {EMBEDDINGS_INITIAL_URI}" ] }, { @@ -762,7 +762,7 @@ "\n", "EMBEDDINGS_UPDATE_URI = f\"{BUCKET_URI}/matching-engine/incremental/\"\n", "\n", - "! gsutil cp glove100_incremental.json {EMBEDDINGS_UPDATE_URI}" + "! gcloud storage cp glove100_incremental.json {EMBEDDINGS_UPDATE_URI}" ] }, { @@ -1448,7 +1448,7 @@ "\n", "delete_bucket = False\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -rf {BUCKET_URI}" + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ] } ], diff --git a/notebooks/community/workbench/telecom-subscriber-churn-prediction.ipynb b/notebooks/community/workbench/telecom-subscriber-churn-prediction.ipynb index 2d8df946a..d0c96b502 100644 --- a/notebooks/community/workbench/telecom-subscriber-churn-prediction.ipynb +++ b/notebooks/community/workbench/telecom-subscriber-churn-prediction.ipynb @@ -511,7 +511,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION -p $PROJECT_ID $BUCKET_URI" + "! gcloud storage buckets create --location=$REGION --project=$PROJECT_ID $BUCKET_URI" ] }, { @@ -531,7 +531,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -1595,7 +1595,7 @@ "# Delete the Cloud Storage bucket\n", "delete_bucket = True\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil -m rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/official/automl/get_started_automl_training.ipynb b/notebooks/official/automl/get_started_automl_training.ipynb index 84dcaa26c..bd0bef91a 100644 --- a/notebooks/official/automl/get_started_automl_training.ipynb +++ b/notebooks/official/automl/get_started_automl_training.ipynb @@ -313,7 +313,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $LOCATION $BUCKET_URI" + "! gcloud storage buckets create --location=$LOCATION $BUCKET_URI" ] }, { @@ -583,11 +583,11 @@ "source": [ "FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" + "! gcloud storage cat $FILE | head" ] }, { @@ -811,7 +811,7 @@ }, "outputs": [], "source": [ - "test_item = !gsutil cat $IMPORT_FILE | head -n1\n", + "test_item = !gcloud storage cat $IMPORT_FILE | head -n1\n", "if len(str(test_item[0]).split(\",\")) == 3:\n", " _, test_item, test_label = str(test_item[0]).split(\",\")\n", "else:\n", @@ -864,7 +864,7 @@ "\n", "# Copy the test image to the Cloud storage bucket as \"test.jpg\"\n", "test_image_local = \"{}/test.jpg\".format(BUCKET_URI)\n", - "! gsutil cp $test_item $test_image_local\n", + "! gcloud storage cp $test_item $test_image_local\n", "\n", "# Download the test image in bytes format\n", "storage_client = storage.Client(project=PROJECT_ID)\n", @@ -1453,7 +1453,7 @@ "# Delete the Cloud Storage bucket\n", "delete_bucket = False # Set True for deletion\n", "if delete_bucket:\n", - " ! gsutil rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/official/pipelines/get_started_with_hpt_pipeline_components.ipynb b/notebooks/official/pipelines/get_started_with_hpt_pipeline_components.ipynb index 5ff72b7e9..d194d8860 100644 --- a/notebooks/official/pipelines/get_started_with_hpt_pipeline_components.ipynb +++ b/notebooks/official/pipelines/get_started_with_hpt_pipeline_components.ipynb @@ -298,7 +298,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $LOCATION $BUCKET_URI" + "! gcloud storage buckets create --location=$LOCATION $BUCKET_URI" ] }, { @@ -372,9 +372,9 @@ }, "outputs": [], "source": [ - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI" + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer " ] }, { @@ -817,7 +817,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer_horses_or_humans.tar.gz" + "! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_horses_or_humans.tar.gz" ] }, { @@ -1326,13 +1326,13 @@ " + \"/evaluation_metrics\"\n", " )\n", " if tf.io.gfile.exists(EXECUTE_OUTPUT):\n", - " ! gsutil cat $EXECUTE_OUTPUT\n", + " ! gcloud storage cat $EXECUTE_OUTPUT\n", " return EXECUTE_OUTPUT\n", " elif tf.io.gfile.exists(GCP_RESOURCES):\n", - " ! gsutil cat $GCP_RESOURCES\n", + " ! gcloud storage cat $GCP_RESOURCES\n", " return GCP_RESOURCES\n", " elif tf.io.gfile.exists(EVAL_METRICS):\n", - " ! gsutil cat $EVAL_METRICS\n", + " ! gcloud storage cat $EVAL_METRICS\n", " return EVAL_METRICS\n", "\n", " return None\n", @@ -1347,7 +1347,7 @@ "print(\"getbesttrialop\")\n", "artifacts = print_pipeline_output(pipeline, \"getbesttrialop\")\n", "print(\"\\n\\n\")\n", - "output = !gsutil cat $artifacts\n", + "output = !gcloud storage cat $artifacts\n", "output = json.loads(output[0])\n", "best_trial = json.loads(output[\"parameters\"][\"Output\"][\"stringValue\"])\n", "model_id = best_trial[\"id\"]\n", @@ -1422,7 +1422,7 @@ "delete_bucket = False\n", "\n", "if delete_bucket:\n", - " ! gsutil rm -r $BUCKET_URI\n", + " ! gcloud storage rm --recursive $BUCKET_URI\n", "\n", "# Delete artifact registry repo\n", "! gcloud artifacts repositories delete $REPO_NAME --location $LOCATION --quiet" diff --git a/notebooks/official/prediction/get_started_with_tf_serving.ipynb b/notebooks/official/prediction/get_started_with_tf_serving.ipynb index 67cadce95..a0bcc8dff 100644 --- a/notebooks/official/prediction/get_started_with_tf_serving.ipynb +++ b/notebooks/official/prediction/get_started_with_tf_serving.ipynb @@ -292,7 +292,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { @@ -919,7 +919,7 @@ }, "outputs": [], "source": [ - "! gsutil cp gs://cloud-ml-data/img/flower_photos/daisy/100080576_f52e8ee070_n.jpg test.jpg" + "! gcloud storage cp gs://cloud-ml-data/img/flower_photos/daisy/100080576_f52e8ee070_n.jpg test.jpg" ] }, { @@ -1115,7 +1115,7 @@ " f.write(\"\\n\")\n", " json.dump(instances[0], f)\n", "\n", - "! gsutil cp test.jsonl {BUCKET_URI}/test.jsonl" + "! gcloud storage cp test.jsonl {BUCKET_URI}/test.jsonl" ] }, { @@ -1268,7 +1268,7 @@ "\n", "# delete the bucket\n", "if delete_bucket:\n", - " ! gsutil rm -rf {BUCKET_URI}" + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ] } ],