diff --git a/notebooks/community/bigquery_ml/bqml-online-prediction.ipynb b/notebooks/community/bigquery_ml/bqml-online-prediction.ipynb index 99def69cd..09117032f 100644 --- a/notebooks/community/bigquery_ml/bqml-online-prediction.ipynb +++ b/notebooks/community/bigquery_ml/bqml-online-prediction.ipynb @@ -359,7 +359,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {REGION} -p {PROJECT_ID} {BUCKET_URI}" + "! gcloud storage buckets create --location={REGION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { @@ -1098,7 +1098,7 @@ " ! bq rm -r -f $PROJECT_ID:$BQ_DATASET_NAME\n", "# delete the Cloud Storage bucket\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil -m rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/community/migration/UJ13 Data Labeling task.ipynb b/notebooks/community/migration/UJ13 Data Labeling task.ipynb index 41121e917..0c4be1212 100644 --- a/notebooks/community/migration/UJ13 Data Labeling task.ipynb +++ b/notebooks/community/migration/UJ13 Data Labeling task.ipynb @@ -325,7 +325,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION gs://$BUCKET_NAME" + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] }, { @@ -345,7 +345,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al gs://$BUCKET_NAME" + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] }, { @@ -384,9 +384,7 @@ "import time\n", "\n", "from google.cloud.aiplatform import gapic as aip\n", - "from google.protobuf import json_format\n", - "from google.protobuf.json_format import MessageToJson, ParseDict\n", - "from google.protobuf.struct_pb2 import Struct, Value" + "from google.protobuf import json_format\n" ] }, { @@ -513,7 +511,7 @@ "IMPORT_FILE = \"gs://\" + BUCKET_NAME + \"/labeling.csv\"\n", "with tf.io.gfile.GFile(IMPORT_FILE, \"w\") as f:\n", " for lf in LABELING_FILES:\n", - " ! wget {lf} | gsutil cp {lf.split(\"/\")[-1]} gs://{BUCKET_NAME}\n", + " ! wget {lf} | gcloud storage cp {lf.split(\"/\")[-1]} gs://{BUCKET_NAME}\n", " f.write(\"gs://\" + BUCKET_NAME + \"/\" + lf.split(\"/\")[-1] + \"\\n\")" ] }, @@ -525,7 +523,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE" + "! gcloud storage cat $IMPORT_FILE" ] }, { @@ -1007,7 +1005,7 @@ "outputs": [], "source": [ "# create placeholder file for valid PDF file with instruction for data labeling\n", - "! echo \"this is instruction\" >> instruction.txt | gsutil cp instruction.txt gs://$BUCKET_NAME" + "! echo \"this is instruction\" >> instruction.txt | gcloud storage cp instruction.txt gs://$BUCKET_NAME" ] }, { @@ -1382,7 +1380,7 @@ "\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r gs://$BUCKET_NAME" + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] } ], @@ -1450,7 +1448,7 @@ "v6isqzPQ_jAw", "ZCyd1qAb_jAx" ], - "name": "UJ13 unified Data Labeling task.ipynb", + "name": "UJ13 Data Labeling task.ipynb", "toc_visible": true }, "kernelspec": { diff --git a/notebooks/community/ml_ops/stage2/get_started_vertex_experiments.ipynb b/notebooks/community/ml_ops/stage2/get_started_vertex_experiments.ipynb index 826e28e1c..be4df4551 100644 --- a/notebooks/community/ml_ops/stage2/get_started_vertex_experiments.ipynb +++ b/notebooks/community/ml_ops/stage2/get_started_vertex_experiments.ipynb @@ -472,7 +472,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_URI" + "! gcloud storage buckets create --location=$REGION $BUCKET_URI" ] }, { @@ -492,7 +492,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_URI" + "! gcloud storage ls --all-versions --long $BUCKET_URI" ] }, { @@ -1353,7 +1353,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer.tar.gz" + "! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer.tar.gz" ] }, { @@ -1554,7 +1554,7 @@ "delete_bucket = False\n", "\n", "if delete_bucket or os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -rf {BUCKET_URI}" + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ] } ], diff --git a/notebooks/community/pipelines/google_cloud_pipeline_components_bqml_pipeline_anomaly_detection.ipynb b/notebooks/community/pipelines/google_cloud_pipeline_components_bqml_pipeline_anomaly_detection.ipynb index 438c3d2e4..688555547 100644 --- a/notebooks/community/pipelines/google_cloud_pipeline_components_bqml_pipeline_anomaly_detection.ipynb +++ b/notebooks/community/pipelines/google_cloud_pipeline_components_bqml_pipeline_anomaly_detection.ipynb @@ -373,7 +373,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION -p $PROJECT_ID $BUCKET_URI" + "! gcloud storage buckets create --location=$REGION --project=$PROJECT_ID $BUCKET_URI" ] }, { @@ -641,13 +641,13 @@ " + \"/evaluation_metrics\"\n", " )\n", " if tf.io.gfile.exists(EXECUTE_OUTPUT):\n", - " ! gsutil cat $EXECUTE_OUTPUT\n", + " ! gcloud storage cat $EXECUTE_OUTPUT\n", " return EXECUTE_OUTPUT\n", " elif tf.io.gfile.exists(GCP_RESOURCES):\n", - " ! gsutil cat $GCP_RESOURCES\n", + " ! gcloud storage cat $GCP_RESOURCES\n", " return GCP_RESOURCES\n", " elif tf.io.gfile.exists(EVAL_METRICS):\n", - " ! gsutil cat $EVAL_METRICS\n", + " ! gcloud storage cat $EVAL_METRICS\n", " return EVAL_METRICS\n", "\n", " return None" @@ -1470,7 +1470,7 @@ "# delete bucket\n", "delete_bucket = False\n", "if os.getenv(\"IS_TESTING\") or delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI\n", + " ! gcloud storage rm --recursive $BUCKET_URI\n", "\n", "# Remove local resorces\n", "delete_local_resources = False\n", diff --git a/notebooks/official/automl/automl_image_object_detection_export_edge.ipynb b/notebooks/official/automl/automl_image_object_detection_export_edge.ipynb index bfaad7c6b..c85ff6a4f 100644 --- a/notebooks/official/automl/automl_image_object_detection_export_edge.ipynb +++ b/notebooks/official/automl/automl_image_object_detection_export_edge.ipynb @@ -284,7 +284,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $LOCATION $BUCKET_URI" + "! gcloud storage buckets create --location $LOCATION $BUCKET_URI" ] }, { @@ -381,7 +381,7 @@ "\n", "# Copy images using gsutil commands directly\n", "for src, dest in zip(df.iloc[:, 0], df[\"destination_path\"]):\n", - " ! gsutil -m cp {src} {dest}\n", + " ! gcloud storage cp {src} {dest}\n", "\n", "print(f\"Files copied to {BUCKET_URI}\")" ] @@ -462,11 +462,11 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" + "! gcloud storage cat $FILE | head" ] }, { @@ -675,9 +675,9 @@ }, "outputs": [], "source": [ - "! gsutil ls $model_package\n", + "! gcloud storage ls $model_package\n", "# Download the model artifacts\n", - "! gsutil cp -r $model_package tflite\n", + "! gcloud storage cp --recursive $model_package tflite\n", "\n", "tflite_path = \"tflite/model.tflite\"" ] @@ -736,7 +736,7 @@ }, "outputs": [], "source": [ - "test_items = ! gsutil cat $IMPORT_FILE | head -n1\n", + "test_items = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "test_item = test_items[0].split(\",\")[0]\n", "\n", "with tf.io.gfile.GFile(test_item, \"rb\") as f:\n", @@ -824,7 +824,7 @@ "dag.delete()\n", "\n", "if delete_bucket:\n", - " ! gsutil rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/official/model_evaluation/automl_text_classification_model_evaluation.ipynb b/notebooks/official/model_evaluation/automl_text_classification_model_evaluation.ipynb index ff97de8e7..8989dd967 100644 --- a/notebooks/official/model_evaluation/automl_text_classification_model_evaluation.ipynb +++ b/notebooks/official/model_evaluation/automl_text_classification_model_evaluation.ipynb @@ -363,7 +363,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {REGION} -p {PROJECT_ID} {BUCKET_URI}" + "! gcloud storage buckets create --location={REGION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { @@ -437,9 +437,9 @@ }, "outputs": [], "source": [ - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n", + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n", - "! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI" + "! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer" ] }, { @@ -1362,7 +1362,7 @@ "\n", "# delete the Cloud Storage bucket\n", "if delete_bucket and os.getenv(\"IS_TESTING\"):\n", - " ! gsutil rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/official/model_monitoring/batch_prediction_model_monitoring.ipynb b/notebooks/official/model_monitoring/batch_prediction_model_monitoring.ipynb index 09c4b154c..c221ff421 100644 --- a/notebooks/official/model_monitoring/batch_prediction_model_monitoring.ipynb +++ b/notebooks/official/model_monitoring/batch_prediction_model_monitoring.ipynb @@ -359,7 +359,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" + "! gcloud storage buckets create --location {LOCATION} --project {PROJECT_ID} {BUCKET_URI}" ] }, { @@ -592,7 +592,7 @@ "TRAINING_DATASET = f\"{INPUT_GS_PATH}/churn_bp_insample.csv\"\n", "TRAINING_DATASET_FORMAT = \"csv\"\n", "\n", - "! gsutil copy $PUBLIC_TRAINING_DATASET $TRAINING_DATASET" + "! gcloud storage cp $PUBLIC_TRAINING_DATASET $TRAINING_DATASET" ] }, { @@ -780,8 +780,8 @@ "PREDICTION_STATS_GCS_PATH = STATS_GCS_FOLDER + PREDICTION_STATS_SUBPATH\n", "print(\"Looking up statistics from: \" + PREDICTION_STATS_GCS_PATH)\n", "\n", - "! gsutil cp $TRAINING_STATS_GCS_PATH ./training_stats.pb\n", - "! gsutil cp $PREDICTION_STATS_GCS_PATH ./prediction_stats.pb\n", + "! gcloud storage cp $TRAINING_STATS_GCS_PATH ./training_stats.pb\n", + "! gcloud storage cp $PREDICTION_STATS_GCS_PATH ./prediction_stats.pb\n", "\n", "\n", "# util function to load stats binary file from GCS\n", @@ -820,7 +820,7 @@ " STATS_GCS_FOLDER\n", " + \"stats_and_anomalies/anomalies/training_prediction_skew_anomalies\"\n", ")\n", - "! gsutil cat $SKEW_GS_PATH" + "! gcloud storage cat $SKEW_GS_PATH" ] }, { @@ -879,7 +879,7 @@ "# Delete Cloud Storage bucket\n", "delete_bucket = False\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI\n", + " ! gcloud storage rm --recursive $BUCKET_URI\n", "\n", "! rm -f ./training_stats.pb\n", "! rm -f ./prediction_stats.pb" diff --git a/notebooks/official/training/hyperparameter_tuning_xgboost.ipynb b/notebooks/official/training/hyperparameter_tuning_xgboost.ipynb index d11d272ca..f899e60a1 100644 --- a/notebooks/official/training/hyperparameter_tuning_xgboost.ipynb +++ b/notebooks/official/training/hyperparameter_tuning_xgboost.ipynb @@ -283,7 +283,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { @@ -532,8 +532,8 @@ "def get_data():\n", " logging.info(\"Downloading training data and labelsfrom: {}, {}\".format(args.dataset_data_url, args.dataset_labels_url))\n", " # gsutil outputs everything to stderr. Hence, the need to divert it to stdout.\n", - " subprocess.check_call(['gsutil', 'cp', args.dataset_data_url, 'data.csv'], stderr=sys.stdout)\n", - " subprocess.check_call(['gsutil', 'cp', args.dataset_labels_url, 'labels.csv'], stderr=sys.stdout)\n", + " subprocess.check_call(['gcloud', 'storage', 'cp', args.dataset_data_url, 'data.csv'], stderr=sys.stdout)\n", + " subprocess.check_call(['gcloud', 'storage', 'cp', args.dataset_labels_url, 'labels.csv'], stderr=sys.stdout)\n", "\n", "\n", " # Load data into pandas, then use `.values` to get NumPy arrays\n", @@ -619,7 +619,7 @@ "! rm -f custom.tar custom.tar.gz\n", "! tar cvf custom.tar custom\n", "! gzip custom.tar\n", - "! gsutil cp custom.tar.gz $BUCKET_URI/trainer_iris.tar.gz" + "! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_iris.tar.gz" ] }, { @@ -922,7 +922,7 @@ "# Fetch the best model\n", "BEST_MODEL_DIR = MODEL_DIR + \"/\" + best[0] + \"/model\"\n", "\n", - "! gsutil ls {BEST_MODEL_DIR}" + "! gcloud storage ls {BEST_MODEL_DIR}" ] }, { @@ -958,7 +958,7 @@ "delete_bucket = False # Set True to delete the bucket\n", "\n", "if delete_bucket:\n", - " ! gsutil rm -r $BUCKET_URI\n", + " ! gcloud storage rm --recursive $BUCKET_URI\n", "\n", "# Delete the locally generated files\n", "! rm -rf custom/\n",