diff --git a/notebooks/community/experiments/vertex_ai_model_experimentation.ipynb b/notebooks/community/experiments/vertex_ai_model_experimentation.ipynb index c4e71b6f4..f8b13f2ae 100644 --- a/notebooks/community/experiments/vertex_ai_model_experimentation.ipynb +++ b/notebooks/community/experiments/vertex_ai_model_experimentation.ipynb @@ -537,7 +537,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION -p $PROJECT_ID $BUCKET_URI" + "! gcloud storage buckets create --location=$REGION --project=$PROJECT_ID $BUCKET_URI" ] }, { @@ -557,7 +557,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -627,9 +627,9 @@ }, "outputs": [], "source": [ - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI" + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer" ] }, { @@ -1736,7 +1736,7 @@ "delete_bucket = False\n", "\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -rf {BUCKET_URI}" + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ] } ], diff --git a/notebooks/community/migration/UJ10 legacy Custom Training Prebuilt Container SKLearn.ipynb b/notebooks/community/migration/UJ10 legacy Custom Training Prebuilt Container SKLearn.ipynb index f49bfd55f..9ccba7176 100644 --- a/notebooks/community/migration/UJ10 legacy Custom Training Prebuilt Container SKLearn.ipynb +++ b/notebooks/community/migration/UJ10 legacy Custom Training Prebuilt Container SKLearn.ipynb @@ -312,7 +312,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION gs://$BUCKET_NAME" + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] }, { @@ -332,7 +332,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al gs://$BUCKET_NAME" + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] }, { @@ -668,7 +668,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz gs://$BUCKET_NAME/census.tar.gz" + "! gcloud storage cp custom.tar.gz gs://$BUCKET_NAME/census.tar.gz" ] }, { @@ -1923,7 +1923,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r gs://$BUCKET_NAME" + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] } ], diff --git a/notebooks/community/ml_ops/stage1/mlops_data_management.ipynb b/notebooks/community/ml_ops/stage1/mlops_data_management.ipynb index f3b38ee64..ff8465bf7 100644 --- a/notebooks/community/ml_ops/stage1/mlops_data_management.ipynb +++ b/notebooks/community/ml_ops/stage1/mlops_data_management.ipynb @@ -465,7 +465,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" + "! gcloud storage buckets create --location $REGION $BUCKET_URI" ] }, { @@ -485,7 +485,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -1093,7 +1093,7 @@ ") as f:\n", " json.dump(metadata, f)\n", "\n", - "! gsutil cat $BUCKET_URI/metadata.jsonl" + "! gcloud storage cat $BUCKET_URI/metadata.jsonl" ] }, { @@ -1178,7 +1178,7 @@ ") as f:\n", " json.dump(metadata, f)\n", "\n", - "! gsutil cat $BUCKET_URI/metadata.jsonl" + "! gcloud storage cat $BUCKET_URI/metadata.jsonl" ] }, { @@ -1527,12 +1527,12 @@ "run_pipeline(args)\n", "print(\"Data preprocessing completed.\")\n", "\n", - "! gsutil ls $TRANSFORMED_DATA_PREFIX/train\n", - "! gsutil ls $TRANSFORMED_DATA_PREFIX/val\n", - "! gsutil ls $TRANSFORMED_DATA_PREFIX/test\n", - "! gsutil ls $TRANSFORM_ARTIFACTS_DIR\n", - "! gsutil ls {EXPORTED_JSONL_PREFIX}*\n", - "! gsutil ls $EXPORTED_TFREC_PREFIX" + "! gcloud storage ls $TRANSFORMED_DATA_PREFIX/train\n", + "! gcloud storage ls $TRANSFORMED_DATA_PREFIX/val\n", + "! gcloud storage ls $TRANSFORMED_DATA_PREFIX/test\n", + "! gcloud storage ls $TRANSFORM_ARTIFACTS_DIR\n", + "! gcloud storage ls {EXPORTED_JSONL_PREFIX}*\n", + "! gcloud storage ls $EXPORTED_TFREC_PREFIX" ] }, { @@ -1588,7 +1588,7 @@ ") as f:\n", " json.dump(metadata, f)\n", "\n", - "! gsutil cat $BUCKET_URI/metadata.jsonl" + "! gcloud storage cat $BUCKET_URI/metadata.jsonl" ] }, { @@ -1626,7 +1626,7 @@ " print(e)\n", "\n", " if \"BUCKET_URI\" in globals():\n", - " ! gsutil rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/community/ml_ops/stage6/get_started_with_tf_serving.ipynb b/notebooks/community/ml_ops/stage6/get_started_with_tf_serving.ipynb index 9cf8562f3..eef36b11f 100644 --- a/notebooks/community/ml_ops/stage6/get_started_with_tf_serving.ipynb +++ b/notebooks/community/ml_ops/stage6/get_started_with_tf_serving.ipynb @@ -446,7 +446,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" + "! gcloud storage buckets create --location $REGION $BUCKET_URI" ] }, { @@ -466,7 +466,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -1098,7 +1098,7 @@ }, "outputs": [], "source": [ - "! gsutil cp gs://cloud-ml-data/img/flower_photos/daisy/100080576_f52e8ee070_n.jpg test.jpg" + "! gcloud storage cp gs://cloud-ml-data/img/flower_photos/daisy/100080576_f52e8ee070_n.jpg test.jpg" ] }, { @@ -1296,7 +1296,7 @@ " f.write(\"\\n\")\n", " json.dump(instances[0], f)\n", "\n", - "! gsutil cp test.jsonl {BUCKET_URI}/test.jsonl" + "! gcloud storage cp test.jsonl {BUCKET_URI}/test.jsonl" ] }, { @@ -1459,7 +1459,7 @@ " print(e)\n", "\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -rf {BUCKET_URI}" + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ] } ], diff --git a/notebooks/community/prediction/custom_prediction_routines/SDK_Custom_Preprocess.ipynb b/notebooks/community/prediction/custom_prediction_routines/SDK_Custom_Preprocess.ipynb index a43c0b2cb..c45975233 100644 --- a/notebooks/community/prediction/custom_prediction_routines/SDK_Custom_Preprocess.ipynb +++ b/notebooks/community/prediction/custom_prediction_routines/SDK_Custom_Preprocess.ipynb @@ -496,7 +496,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION -p $PROJECT_ID $BUCKET_URI" + "! gcloud storage buckets create --location=$REGION --project=$PROJECT_ID $BUCKET_URI" ] }, { @@ -516,7 +516,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -687,8 +687,8 @@ "outputs": [], "source": [ "%cd ..\n", - "!gsutil cp {LOCAL_MODEL_ARTIFACTS_DIR}/* {BUCKET_URI}/{MODEL_ARTIFACT_DIR}/\n", - "!gsutil ls {BUCKET_URI}/{MODEL_ARTIFACT_DIR}/" + "!gcloud storage cp {LOCAL_MODEL_ARTIFACTS_DIR}/* {BUCKET_URI}/{MODEL_ARTIFACT_DIR}/\n", + "!gcloud storage ls {BUCKET_URI}/{MODEL_ARTIFACT_DIR}/" ] }, { @@ -1809,7 +1809,7 @@ "delete_bucket = False\n", "\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/community/prediction/custom_prediction_routines/SDK_Triton_PyTorch_Local_Prediction.ipynb b/notebooks/community/prediction/custom_prediction_routines/SDK_Triton_PyTorch_Local_Prediction.ipynb index c6b5c8dcc..c3c85e994 100644 --- a/notebooks/community/prediction/custom_prediction_routines/SDK_Triton_PyTorch_Local_Prediction.ipynb +++ b/notebooks/community/prediction/custom_prediction_routines/SDK_Triton_PyTorch_Local_Prediction.ipynb @@ -490,7 +490,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION -p $PROJECT_ID $BUCKET_URI" + "! gcloud storage buckets create --location=$REGION --project=$PROJECT_ID $BUCKET_URI" ] }, { @@ -510,7 +510,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -918,8 +918,8 @@ }, "outputs": [], "source": [ - "!gsutil cp -r {LOCAL_MODEL_ARTIFACTS_DIR}/* {BUCKET_URI}/{MODEL_ARTIFACT_DIR}/\n", - "!gsutil ls {BUCKET_URI}/{MODEL_ARTIFACT_DIR}/" + "!gcloud storage cp --recursive {LOCAL_MODEL_ARTIFACTS_DIR}/* {BUCKET_URI}/{MODEL_ARTIFACT_DIR}/\n", + "!gcloud storage ls {BUCKET_URI}/{MODEL_ARTIFACT_DIR}/" ] }, { @@ -1869,7 +1869,7 @@ "delete_bucket = False\n", "\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/community/sdk/sdk_custom_image_classification_online.ipynb b/notebooks/community/sdk/sdk_custom_image_classification_online.ipynb index 08283c2da..5b1cb98ea 100644 --- a/notebooks/community/sdk/sdk_custom_image_classification_online.ipynb +++ b/notebooks/community/sdk/sdk_custom_image_classification_online.ipynb @@ -475,7 +475,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { @@ -495,7 +495,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { @@ -917,7 +917,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_NAME/trainer_cifar10.tar.gz" + "! gcloud storage cp custom.tar.gz $BUCKET_NAME/trainer_cifar10.tar.gz" ] }, { @@ -1544,7 +1544,7 @@ " print(e)\n", "\n", " if \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], diff --git a/notebooks/community/sdk/sdk_custom_image_classification_online_tfserving.ipynb b/notebooks/community/sdk/sdk_custom_image_classification_online_tfserving.ipynb index 0cb244393..1c2ed6fd4 100644 --- a/notebooks/community/sdk/sdk_custom_image_classification_online_tfserving.ipynb +++ b/notebooks/community/sdk/sdk_custom_image_classification_online_tfserving.ipynb @@ -475,7 +475,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { @@ -495,7 +495,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { @@ -944,7 +944,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_NAME/trainer_cifar10.tar.gz" + "! gcloud storage cp custom.tar.gz $BUCKET_NAME/trainer_cifar10.tar.gz" ] }, { @@ -1535,7 +1535,7 @@ " print(e)\n", "\n", " if \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], diff --git a/notebooks/official/migration/sdk-automl-image-object-detection-batch-online.ipynb b/notebooks/official/migration/sdk-automl-image-object-detection-batch-online.ipynb index 2925d76c8..1fbed93ab 100644 --- a/notebooks/official/migration/sdk-automl-image-object-detection-batch-online.ipynb +++ b/notebooks/official/migration/sdk-automl-image-object-detection-batch-online.ipynb @@ -287,7 +287,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} {BUCKET_URI}" + "! gcloud storage buckets create --location={LOCATION} {BUCKET_URI}" ] }, { @@ -392,9 +392,9 @@ " + \"/\"\n", ")\n", "\n", - "# Copy images using gsutil commands directly\n", + "# Copy images using gcloud storage commands directly\n", "for src, dest in zip(df.iloc[:, 0], df[\"destination_path\"]):\n", - " ! gsutil -m cp {src} {dest}\n", + " ! gcloud storage cp {src} {dest}\n", "\n", "print(f\"Files copied to {BUCKET_URI}\")" ] @@ -475,11 +475,11 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" + "! gcloud storage cat $FILE | head" ] }, { @@ -832,7 +832,7 @@ }, "outputs": [], "source": [ - "test_items = !gsutil cat $IMPORT_FILE | head -n2\n", + "test_items = !gcloud storage cat $IMPORT_FILE | head -n2\n", "cols_1 = str(test_items[0]).split(\",\")\n", "cols_2 = str(test_items[1]).split(\",\")\n", "if len(cols_1) == 11:\n", @@ -872,8 +872,8 @@ "file_1 = test_item_1.split(\"/\")[-1]\n", "file_2 = test_item_2.split(\"/\")[-1]\n", "\n", - "! gsutil cp $test_item_1 $BUCKET_URI/$file_1\n", - "! gsutil cp $test_item_2 $BUCKET_URI/$file_2\n", + "! gcloud storage cp $test_item_1 $BUCKET_URI/$file_1\n", + "! gcloud storage cp $test_item_2 $BUCKET_URI/$file_2\n", "\n", "test_item_1 = BUCKET_URI + \"/\" + file_1\n", "test_item_2 = BUCKET_URI + \"/\" + file_2" @@ -917,7 +917,7 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" + "! gcloud storage cat $gcs_input_uri" ] }, { @@ -1174,7 +1174,7 @@ }, "outputs": [], "source": [ - "test_items = !gsutil cat $IMPORT_FILE | head -n1\n", + "test_items = !gcloud storage cat $IMPORT_FILE | head -n1\n", "cols = str(test_items[0]).split(\",\")\n", "if len(cols) == 11:\n", " test_item = str(cols[1])\n", @@ -1313,7 +1313,7 @@ "# Delete Cloud Storage objects that were created\n", "delete_bucket = False # Set True for deletion\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ],