diff --git a/notebooks/community/ml_ops/stage3/get_started_with_dataflow_flex_template_component.ipynb b/notebooks/community/ml_ops/stage3/get_started_with_dataflow_flex_template_component.ipynb index 6381c4101..89ca2271f 100644 --- a/notebooks/community/ml_ops/stage3/get_started_with_dataflow_flex_template_component.ipynb +++ b/notebooks/community/ml_ops/stage3/get_started_with_dataflow_flex_template_component.ipynb @@ -456,7 +456,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" + "! gcloud storage buckets create --location $REGION $BUCKET_URI" ] }, { @@ -476,7 +476,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -546,9 +546,9 @@ }, "outputs": [], "source": [ - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI" + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer" ] }, { @@ -678,7 +678,7 @@ "outputs": [], "source": [ "GCS_AVRO_SCHEMA = BUCKET_URI + \"/gaming_schema.avsc\"\n", - "! gsutil cp gaming_schema.avsc $GCS_AVRO_SCHEMA\n", + "! gcloud storage cp gaming_schema.avsc $GCS_AVRO_SCHEMA\n", "\n", "GCS_FLEX_TEMPLATE_PATH = \"gs://dataflow-templates/latest/flex/File_Format_Conversion\"\n", "GCS_CONVERT_IN = \"gs://dataflow-samples/game/5000_gaming_data.csv\"\n", @@ -757,7 +757,7 @@ "\n", "pipeline.run()\n", "\n", - "! gsutil ls $GCS_CONVERT_OUT\n", + "! gcloud storage ls $GCS_CONVERT_OUT\n", "\n", "! rm -f dataflow_file_conversion.yaml gaming_schema.avsc" ] @@ -813,7 +813,7 @@ "delete_bucket = False\n", "\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/official/automl/automl_image_classification_batch_prediction.ipynb b/notebooks/official/automl/automl_image_classification_batch_prediction.ipynb index 038a6d42a..fdd000472 100644 --- a/notebooks/official/automl/automl_image_classification_batch_prediction.ipynb +++ b/notebooks/official/automl/automl_image_classification_batch_prediction.ipynb @@ -287,7 +287,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { @@ -394,11 +394,11 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" + "! gcloud storage cat $FILE | head" ] }, { @@ -580,7 +580,7 @@ }, "outputs": [], "source": [ - "test_items = !gsutil cat $IMPORT_FILE | head -n2\n", + "test_items = !gcloud storage cat $IMPORT_FILE | head -n2\n", "if len(str(test_items[0]).split(\",\")) == 3:\n", " _, test_item_1, test_label_1 = str(test_items[0]).split(\",\")\n", " _, test_item_2, test_label_2 = str(test_items[1]).split(\",\")\n", @@ -614,8 +614,8 @@ "file_1 = test_item_1.split(\"/\")[-1]\n", "file_2 = test_item_2.split(\"/\")[-1]\n", "\n", - "! gsutil cp $test_item_1 $BUCKET_URI/$file_1\n", - "! gsutil cp $test_item_2 $BUCKET_URI/$file_2\n", + "! gcloud storage cp $test_item_1 $BUCKET_URI/$file_1\n", + "! gcloud storage cp $test_item_2 $BUCKET_URI/$file_2\n", "\n", "test_item_1 = BUCKET_URI + \"/\" + file_1\n", "test_item_2 = BUCKET_URI + \"/\" + file_2" @@ -659,7 +659,7 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" + "! gcloud storage cat $gcs_input_uri" ] }, { @@ -803,7 +803,7 @@ "# Delete the cloud storage bucket\n", "delete_bucket = False # set True for deletion\n", "if delete_bucket:\n", - " ! gsutil rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/official/automl/automl_image_object_detection_online_prediction.ipynb b/notebooks/official/automl/automl_image_object_detection_online_prediction.ipynb index ad6032e8f..dfd8bcd29 100644 --- a/notebooks/official/automl/automl_image_object_detection_online_prediction.ipynb +++ b/notebooks/official/automl/automl_image_object_detection_online_prediction.ipynb @@ -297,7 +297,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $LOCATION $BUCKET_URI" + "! gcloud storage buckets create --location=$LOCATION $BUCKET_URI" ] }, { @@ -386,7 +386,7 @@ "source": [ "#### Copying data between Google Cloud Storage Buckets\n", "\n", - "In this step, you prevent access issues for the images in your original dataset. The code below extracts folder paths from image paths, constructs destination paths for Cloud Storage, copies images using gsutil commands, updates image paths in the DataFrame, and finally saves the modified DataFrame back to Cloud Storage as a CSV file." + "In this step, you prevent access issues for the images in your original dataset. The code below extracts folder paths from image paths, constructs destination paths for Cloud Storage, copies images using gcloud storage commands, updates image paths in the DataFrame, and finally saves the modified DataFrame back to Cloud Storage as a CSV file." ] }, { @@ -415,7 +415,7 @@ "\n", "# Copy images using gsutil commands directly\n", "for src, dest in zip(df.iloc[:, 0], df[\"destination_path\"]):\n", - " ! gsutil -m cp {src} {dest}\n", + " ! gcloud storage cp {src} {dest}\n", "\n", "print(f\"Files copied to {BUCKET_URI}\")" ] @@ -496,11 +496,11 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" + "! gcloud storage cat $FILE | head" ] }, { @@ -706,7 +706,7 @@ }, "outputs": [], "source": [ - "test_items = !gsutil cat $IMPORT_FILE | head -n1\n", + "test_items = !gcloud storage cat $IMPORT_FILE | head -n1\n", "cols = str(test_items[0]).split(\",\")\n", "if len(cols) == 11:\n", " test_item = str(cols[1])\n", @@ -834,7 +834,7 @@ "dag.delete()\n", "\n", "if delete_bucket:\n", - " ! gsutil rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/official/model_evaluation/get_started_with_custom_model_evaluation_import.ipynb b/notebooks/official/model_evaluation/get_started_with_custom_model_evaluation_import.ipynb index 27a2ff1e1..9909442f8 100644 --- a/notebooks/official/model_evaluation/get_started_with_custom_model_evaluation_import.ipynb +++ b/notebooks/official/model_evaluation/get_started_with_custom_model_evaluation_import.ipynb @@ -304,7 +304,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" + "! gcloud storage buckets create {BUCKET_URI} --location={LOCATION} --project={PROJECT_ID}" ] }, { @@ -778,7 +778,7 @@ "# Delete Cloud Storage objects that were created\n", "delete_bucket = False # set True for deletion\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/official/model_evaluation/model_based_llm_evaluation/autosxs_llm_evaluation_for_summarization_task.ipynb b/notebooks/official/model_evaluation/model_based_llm_evaluation/autosxs_llm_evaluation_for_summarization_task.ipynb index 3cc960db1..897683f5a 100644 --- a/notebooks/official/model_evaluation/model_based_llm_evaluation/autosxs_llm_evaluation_for_summarization_task.ipynb +++ b/notebooks/official/model_evaluation/model_based_llm_evaluation/autosxs_llm_evaluation_for_summarization_task.ipynb @@ -355,7 +355,7 @@ "):\n", " BUCKET_URI = \"gs://\" + PROJECT_ID + \"-aip-\" + UUID\n", "\n", - "! gsutil mb -l $LOCATION -p $PROJECT_ID $BUCKET_URI" + "! gcloud storage buckets create --location=$LOCATION --project=$PROJECT_ID $BUCKET_URI" ] }, { @@ -519,7 +519,7 @@ "outputs": [], "source": [ "examples.to_json(\"evaluation_dataset.json\", orient=\"records\", lines=True)\n", - "! gsutil cp evaluation_dataset.json $BUCKET_URI/input/evaluation_dataset.json\n", + "! gcloud storage cp evaluation_dataset.json $BUCKET_URI/input/evaluation_dataset.json\n", "DATASET = f\"{BUCKET_URI}/input/evaluation_dataset.json\"" ] }, @@ -765,7 +765,7 @@ "# Delete Cloud Storage objects that were created\n", "delete_bucket = False\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/official/tensorboard/tensorboard_custom_training_with_prebuilt_container.ipynb b/notebooks/official/tensorboard/tensorboard_custom_training_with_prebuilt_container.ipynb index 2f5ce0ce7..2a46735b6 100644 --- a/notebooks/official/tensorboard/tensorboard_custom_training_with_prebuilt_container.ipynb +++ b/notebooks/official/tensorboard/tensorboard_custom_training_with_prebuilt_container.ipynb @@ -316,7 +316,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $LOCATION -p $PROJECT_ID $BUCKET_URI" + "! gcloud storage buckets create --location=$LOCATION --project=$PROJECT_ID $BUCKET_URI" ] }, { @@ -480,7 +480,7 @@ "outputs": [], "source": [ "# Download the sample code\n", - "! gsutil cp gs://cloud-samples-data/ai-platform/hello-custom/hello-custom-sample-v1.tar.gz - | tar -xzv\n", + "! gcloud storage cp gs://cloud-samples-data/ai-platform/hello-custom/hello-custom-sample-v1.tar.gz - | tar -xzv\n", "%cd hello-custom-sample/" ] }, @@ -664,7 +664,7 @@ "outputs": [], "source": [ "GCS_BUCKET_TRAINING = f\"{BUCKET_URI}/data/\"\n", - "! gsutil cp dist/hello-custom-training-3.0.tar.gz {GCS_BUCKET_TRAINING}" + "! gcloud storage cp dist/hello-custom-training-3.0.tar.gz {GCS_BUCKET_TRAINING}" ] }, { @@ -787,7 +787,7 @@ "# Delete GCS bucket.\n", "delete_bucket = False\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI\n", + " ! gcloud storage rm --recursive $BUCKET_URI\n", "\n", "!rm -rf ../hello-custom-sample/" ] diff --git a/notebooks/official/tensorboard/tensorboard_profiler_custom_training_with_prebuilt_container.ipynb b/notebooks/official/tensorboard/tensorboard_profiler_custom_training_with_prebuilt_container.ipynb index 165bbd3af..8ff66b2d2 100644 --- a/notebooks/official/tensorboard/tensorboard_profiler_custom_training_with_prebuilt_container.ipynb +++ b/notebooks/official/tensorboard/tensorboard_profiler_custom_training_with_prebuilt_container.ipynb @@ -356,7 +356,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { @@ -696,11 +696,11 @@ }, "outputs": [], "source": [ - "!cd {PYTHON_PACKAGE_APPLICATION_DIR} && python3 setup.py sdist --formats=gztar\n", + "! cd {PYTHON_PACKAGE_APPLICATION_DIR} && python3 setup.py sdist --formats=gztar\n", "\n", - "!gsutil cp {source_package_file_name} {python_package_gcs_uri}\n", + "! gcloud storage cp {source_package_file_name} {python_package_gcs_uri}\n", "\n", - "!gsutil ls -l {python_package_gcs_uri}" + "! gcloud storage ls --long {python_package_gcs_uri}" ] }, { @@ -833,7 +833,7 @@ "# delete the bucket\n", "delete_bucket = False # set True for deletion\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ],