diff --git a/notebooks/community/ml_ops/stage3/get_started_vertex_pipelines_sklearn_with_prediction.ipynb b/notebooks/community/ml_ops/stage3/get_started_vertex_pipelines_sklearn_with_prediction.ipynb index d07e1ae63..04e9023da 100644 --- a/notebooks/community/ml_ops/stage3/get_started_vertex_pipelines_sklearn_with_prediction.ipynb +++ b/notebooks/community/ml_ops/stage3/get_started_vertex_pipelines_sklearn_with_prediction.ipynb @@ -488,7 +488,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" + "! gcloud storage buckets create --location $REGION $BUCKET_URI" ] }, { @@ -508,7 +508,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -579,9 +579,9 @@ }, "outputs": [], "source": [ - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI" + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer" ] }, { @@ -990,7 +990,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer_newsaggr.tar.gz" + "! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_newsaggr.tar.gz" ] }, { @@ -1206,13 +1206,13 @@ " + \"/evaluation_metrics\"\n", " )\n", " if tf.io.gfile.exists(EXECUTE_OUTPUT):\n", - " ! gsutil cat $EXECUTE_OUTPUT\n", + " ! gcloud storage cat $EXECUTE_OUTPUT\n", " return EXECUTE_OUTPUT\n", " elif tf.io.gfile.exists(GCP_RESOURCES):\n", - " ! gsutil cat $GCP_RESOURCES\n", + " ! gcloud storage cat $GCP_RESOURCES\n", " return GCP_RESOURCES\n", " elif tf.io.gfile.exists(EVAL_METRICS):\n", - " ! gsutil cat $EVAL_METRICS\n", + " ! gcloud storage cat $EVAL_METRICS\n", " return EVAL_METRICS\n", "\n", " return None\n", @@ -1221,14 +1221,14 @@ "print(\"custompythonpackagetrainingjob-run\")\n", "artifacts = print_pipeline_output(pipeline, \"custompythonpackagetrainingjob-run\")\n", "print(\"\\n\\n\")\n", - "output = !gsutil cat $artifacts\n", + "output = !gcloud storage cat $artifacts\n", "output = json.loads(output[0])\n", "model_id = output[\"artifacts\"][\"model\"][\"artifacts\"][0][\"metadata\"][\"resourceName\"]\n", "print(\"\\n\\n\")\n", "print(\"endpoint-create\")\n", "artifacts = print_pipeline_output(pipeline, \"endpoint-create\")\n", "print(\"\\n\\n\")\n", - "output = !gsutil cat $artifacts\n", + "output = !gcloud storage cat $artifacts\n", "output = json.loads(output[0])\n", "endpoint_id = output[\"artifacts\"][\"endpoint\"][\"artifacts\"][0][\"metadata\"][\n", " \"resourceName\"\n", @@ -1394,7 +1394,7 @@ "source": [ "delete_bucket = False\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -r $BUCKET_URI\n", + " ! gcloud storage rm --recursive $BUCKET_URI\n", "\n", "! rm -rf custom custom.tar.csv" ] diff --git a/notebooks/community/ml_ops/stage3/get_started_with_airflow_and_vertex_pipelines.ipynb b/notebooks/community/ml_ops/stage3/get_started_with_airflow_and_vertex_pipelines.ipynb index f80b6dcef..32c4f70a5 100644 --- a/notebooks/community/ml_ops/stage3/get_started_with_airflow_and_vertex_pipelines.ipynb +++ b/notebooks/community/ml_ops/stage3/get_started_with_airflow_and_vertex_pipelines.ipynb @@ -456,7 +456,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" + "! gcloud storage buckets create --location=$REGION $BUCKET_URI" ] }, { @@ -476,7 +476,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -546,9 +546,9 @@ }, "outputs": [], "source": [ - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI" + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer" ] }, { @@ -569,7 +569,6 @@ "outputs": [], "source": [ "from google.cloud import aiplatform\n", - "from kfp import dsl\n", "from kfp.v2 import compiler\n", "from kfp.v2.dsl import Artifact, Output, component" ] @@ -910,7 +909,7 @@ }, "outputs": [], "source": [ - "!gsutil cp $COMPOSER_DAG_FILENAME $COMPOSER_DAG_GCS/" + "!gcloud storage cp $COMPOSER_DAG_FILENAME $COMPOSER_DAG_GCS/" ] }, { @@ -921,7 +920,7 @@ }, "outputs": [], "source": [ - "!gsutil ls -l $COMPOSER_DAG_GCS/$COMPOSER_DAG_FILENAME" + "!gcloud storage ls --long $COMPOSER_DAG_GCS/$COMPOSER_DAG_FILENAME" ] }, { @@ -1153,7 +1152,6 @@ "# You can change the `text` and `emoji_str` parameters here to update the pipeline output\n", "def pipeline():\n", " data_processing_task_dag_name = COMPOSER_DAG_NAME\n", - " data_processing_task = trigger_airflow_dag(\n", " dag_name=data_processing_task_dag_name,\n", " composer_client_id=COMPOSER_CLIENT_ID,\n", " composer_webserver_id=COMPOSER_WEB_URI,\n", @@ -1308,13 +1306,13 @@ " + \"/evaluation_metrics\"\n", " )\n", " if tf.io.gfile.exists(EXECUTE_OUTPUT):\n", - " ! gsutil cat $EXECUTE_OUTPUT\n", + " ! gcloud storage cat $EXECUTE_OUTPUT\n", " return EXECUTE_OUTPUT\n", " elif tf.io.gfile.exists(GCP_RESOURCES):\n", - " ! gsutil cat $GCP_RESOURCES\n", + " ! gcloud storage cat $GCP_RESOURCES\n", " return GCP_RESOURCES\n", " elif tf.io.gfile.exists(EVAL_METRICS):\n", - " ! gsutil cat $EVAL_METRICS\n", + " ! gcloud storage cat $EVAL_METRICS\n", " return EVAL_METRICS\n", "\n", " return None\n", @@ -1389,7 +1387,7 @@ "! bq rm -r -f $PROJECT_ID:$DATASET_NAME\n", "\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -rf {BUCKET_URI}\n", + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}\n", " \n", "! rm get_composer_config.py data_orchestration_bq_example_dag.py" ] diff --git a/notebooks/community/sdk/SDK_FBProphet_Forecasting_Online.ipynb b/notebooks/community/sdk/SDK_FBProphet_Forecasting_Online.ipynb index f80f39dd6..3d6c06dce 100644 --- a/notebooks/community/sdk/SDK_FBProphet_Forecasting_Online.ipynb +++ b/notebooks/community/sdk/SDK_FBProphet_Forecasting_Online.ipynb @@ -348,7 +348,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {REGION} {BUCKET_URI}" + "! gcloud storage buckets create --location={REGION} {BUCKET_URI}" ] }, { @@ -742,7 +742,7 @@ "outputs": [], "source": [ "%cd app\n", - "!gsutil cp *.sav *.csv {BUCKET_URI}/{MODEL_ARTIFACT_DIR}/\n", + "!gcloud storage cp *.sav *.csv {BUCKET_URI}/{MODEL_ARTIFACT_DIR}/\n", "%cd .." ] }, @@ -1541,7 +1541,7 @@ "\n", "# Delete the Cloud Storage bucket\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil -m rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/community/sdk/sdk_custom_tabular_regression_online.ipynb b/notebooks/community/sdk/sdk_custom_tabular_regression_online.ipynb index b9bd12331..75b9e53be 100644 --- a/notebooks/community/sdk/sdk_custom_tabular_regression_online.ipynb +++ b/notebooks/community/sdk/sdk_custom_tabular_regression_online.ipynb @@ -475,7 +475,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { @@ -495,7 +495,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { @@ -921,7 +921,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_NAME/trainer_boston.tar.gz" + "! gcloud storage cp custom.tar.gz $BUCKET_NAME/trainer_boston.tar.gz" ] }, { @@ -1427,7 +1427,7 @@ " print(e)\n", "\n", " if \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], diff --git a/notebooks/community/sdk/sdk_pytorch_lightning_custom_container_training.ipynb b/notebooks/community/sdk/sdk_pytorch_lightning_custom_container_training.ipynb index 19b6e3501..7abcf6762 100644 --- a/notebooks/community/sdk/sdk_pytorch_lightning_custom_container_training.ipynb +++ b/notebooks/community/sdk/sdk_pytorch_lightning_custom_container_training.ipynb @@ -505,7 +505,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION -p $PROJECT_ID $BUCKET_URI" + "! gcloud storage buckets create --location=$REGION --project=$PROJECT_ID $BUCKET_URI" ] }, { @@ -525,7 +525,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -1521,10 +1521,10 @@ "\n", "CONTENT_DIR = f\"{BUCKET_URI}/{content_name}*\"\n", "# Delete Cloud Storage objects that were created\n", - "! gsutil -m rm -r $CONTENT_DIR\n", + "! gcloud storage rm --recursive $CONTENT_DIR\n", "\n", "if delete_bucket and \"BUCKET_URI\" in globals():\n", - " ! gsutil -m rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/official/migration/sdk-custom-scikit-learn-prebuilt-container.ipynb b/notebooks/official/migration/sdk-custom-scikit-learn-prebuilt-container.ipynb index 865e91014..281b96817 100644 --- a/notebooks/official/migration/sdk-custom-scikit-learn-prebuilt-container.ipynb +++ b/notebooks/official/migration/sdk-custom-scikit-learn-prebuilt-container.ipynb @@ -320,7 +320,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" + "! gcloud storage buckets create --location {LOCATION} --project {PROJECT_ID} {BUCKET_URI}" ] }, { @@ -699,7 +699,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer_census.tar.gz" + "! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_census.tar.gz" ] }, { @@ -965,7 +965,7 @@ " for i in INSTANCES:\n", " f.write(json.dumps(i) + \"\\n\")\n", "\n", - "! gsutil cat $gcs_input_uri" + "! gcloud storage cat $gcs_input_uri" ] }, { @@ -1378,7 +1378,7 @@ "# Delete the Cloud Storage bucket\n", "delete_bucket = False # set True for deletion\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/official/model_monitoring/get_started_with_model_monitoring_custom.ipynb b/notebooks/official/model_monitoring/get_started_with_model_monitoring_custom.ipynb index 612b3da23..06afe99cf 100644 --- a/notebooks/official/model_monitoring/get_started_with_model_monitoring_custom.ipynb +++ b/notebooks/official/model_monitoring/get_started_with_model_monitoring_custom.ipynb @@ -364,7 +364,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $LOCATION -p $PROJECT_ID $BUCKET_URI" + "! gcloud storage buckets create --location=$LOCATION --project=$PROJECT_ID $BUCKET_URI" ] }, { @@ -928,7 +928,7 @@ "with open(\"schema.yaml\", \"w\") as f:\n", " f.write(yaml)\n", "\n", - "! gsutil cp schema.yaml {BUCKET_URI}/schema.yaml" + "! gcloud storage cp schema.yaml {BUCKET_URI}/schema.yaml" ] }, { @@ -1406,7 +1406,7 @@ "# Delete the Cloud Storage bucket\n", "delete_bucket = False # Set True for deletion\n", "if delete_bucket:\n", - " ! gsutil rm -rf {BUCKET_URI}\n", + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}\n", "\n", "# Delete the locally generated files\n", "! rm -f schema.yaml\n", diff --git a/notebooks/official/workbench/chicago_taxi_fare_prediction/chicago_taxi_fare_prediction.ipynb b/notebooks/official/workbench/chicago_taxi_fare_prediction/chicago_taxi_fare_prediction.ipynb index d56b221e6..72af53e31 100644 --- a/notebooks/official/workbench/chicago_taxi_fare_prediction/chicago_taxi_fare_prediction.ipynb +++ b/notebooks/official/workbench/chicago_taxi_fare_prediction/chicago_taxi_fare_prediction.ipynb @@ -365,7 +365,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $LOCATION -p $PROJECT_ID $BUCKET_URI" + "! gcloud storage buckets create --location=$LOCATION --project=$PROJECT_ID $BUCKET_URI" ] }, { @@ -1467,7 +1467,7 @@ "delete_bucket = False\n", "\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ],