Skip to content

Commit 8b618bc

Browse files
googlyrahmanbhandarivijay-pnggurusai-voleti
authored
Migrate gsutil usage to gcloud storage (#4326)
* Migrate gsutil usage to gcloud storage * Manual Changes-Updated the cell by replacing 'gsutil copy' with the correct 'gcloud storage cp' * Manual Changes-Updated the cell by replacing 'gsutil copy' with the correct 'gcloud storage cp' * Revert "Manual Changes-Updated the cell by replacing 'gsutil copy' with the correct 'gcloud storage cp'" This reverts commit 175eaa4. * Manual Changes-Updated the cell by replacing 'gsutil copy' with the correct 'gcloud storage cp' * Changes for 4326 * Changes for 4326 * Linter fix issue for 4326 * removed model garden changes * Update model_garden_movinet_action_recognition.ipynb --------- Co-authored-by: bhandarivijay <bhandarivijay@google.com> Co-authored-by: gurusai-voleti <gvoleti@google.com>
1 parent 6bbe3bc commit 8b618bc

File tree

8 files changed

+43
-45
lines changed

8 files changed

+43
-45
lines changed

notebooks/community/bigquery_ml/bqml-online-prediction.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -359,7 +359,7 @@
359359
},
360360
"outputs": [],
361361
"source": [
362-
"! gsutil mb -l {REGION} -p {PROJECT_ID} {BUCKET_URI}"
362+
"! gcloud storage buckets create --location={REGION} --project={PROJECT_ID} {BUCKET_URI}"
363363
]
364364
},
365365
{
@@ -1098,7 +1098,7 @@
10981098
" ! bq rm -r -f $PROJECT_ID:$BQ_DATASET_NAME\n",
10991099
"# delete the Cloud Storage bucket\n",
11001100
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
1101-
" ! gsutil -m rm -r $BUCKET_URI"
1101+
" ! gcloud storage rm --recursive $BUCKET_URI"
11021102
]
11031103
}
11041104
],

notebooks/community/migration/UJ13 Data Labeling task.ipynb

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -325,7 +325,7 @@
325325
},
326326
"outputs": [],
327327
"source": [
328-
"! gsutil mb -l $REGION gs://$BUCKET_NAME"
328+
"! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME"
329329
]
330330
},
331331
{
@@ -345,7 +345,7 @@
345345
},
346346
"outputs": [],
347347
"source": [
348-
"! gsutil ls -al gs://$BUCKET_NAME"
348+
"! gcloud storage ls --all-versions --long gs://$BUCKET_NAME"
349349
]
350350
},
351351
{
@@ -384,9 +384,7 @@
384384
"import time\n",
385385
"\n",
386386
"from google.cloud.aiplatform import gapic as aip\n",
387-
"from google.protobuf import json_format\n",
388-
"from google.protobuf.json_format import MessageToJson, ParseDict\n",
389-
"from google.protobuf.struct_pb2 import Struct, Value"
387+
"from google.protobuf import json_format\n"
390388
]
391389
},
392390
{
@@ -513,7 +511,7 @@
513511
"IMPORT_FILE = \"gs://\" + BUCKET_NAME + \"/labeling.csv\"\n",
514512
"with tf.io.gfile.GFile(IMPORT_FILE, \"w\") as f:\n",
515513
" for lf in LABELING_FILES:\n",
516-
" ! wget {lf} | gsutil cp {lf.split(\"/\")[-1]} gs://{BUCKET_NAME}\n",
514+
" ! wget {lf} | gcloud storage cp {lf.split(\"/\")[-1]} gs://{BUCKET_NAME}\n",
517515
" f.write(\"gs://\" + BUCKET_NAME + \"/\" + lf.split(\"/\")[-1] + \"\\n\")"
518516
]
519517
},
@@ -525,7 +523,7 @@
525523
},
526524
"outputs": [],
527525
"source": [
528-
"! gsutil cat $IMPORT_FILE"
526+
"! gcloud storage cat $IMPORT_FILE"
529527
]
530528
},
531529
{
@@ -1007,7 +1005,7 @@
10071005
"outputs": [],
10081006
"source": [
10091007
"# create placeholder file for valid PDF file with instruction for data labeling\n",
1010-
"! echo \"this is instruction\" >> instruction.txt | gsutil cp instruction.txt gs://$BUCKET_NAME"
1008+
"! echo \"this is instruction\" >> instruction.txt | gcloud storage cp instruction.txt gs://$BUCKET_NAME"
10111009
]
10121010
},
10131011
{
@@ -1382,7 +1380,7 @@
13821380
"\n",
13831381
"\n",
13841382
"if delete_bucket and \"BUCKET_NAME\" in globals():\n",
1385-
" ! gsutil rm -r gs://$BUCKET_NAME"
1383+
" ! gcloud storage rm --recursive gs://$BUCKET_NAME"
13861384
]
13871385
}
13881386
],
@@ -1450,7 +1448,7 @@
14501448
"v6isqzPQ_jAw",
14511449
"ZCyd1qAb_jAx"
14521450
],
1453-
"name": "UJ13 unified Data Labeling task.ipynb",
1451+
"name": "UJ13 Data Labeling task.ipynb",
14541452
"toc_visible": true
14551453
},
14561454
"kernelspec": {

notebooks/community/ml_ops/stage2/get_started_vertex_experiments.ipynb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -472,7 +472,7 @@
472472
},
473473
"outputs": [],
474474
"source": [
475-
"! gsutil mb -l $REGION $BUCKET_URI"
475+
"! gcloud storage buckets create --location=$REGION $BUCKET_URI"
476476
]
477477
},
478478
{
@@ -492,7 +492,7 @@
492492
},
493493
"outputs": [],
494494
"source": [
495-
"! gsutil ls -al $BUCKET_URI"
495+
"! gcloud storage ls --all-versions --long $BUCKET_URI"
496496
]
497497
},
498498
{
@@ -1353,7 +1353,7 @@
13531353
"! rm -f custom.tar custom.tar.gz\n",
13541354
"! tar cvf custom.tar custom\n",
13551355
"! gzip custom.tar\n",
1356-
"! gsutil cp custom.tar.gz $BUCKET_URI/trainer.tar.gz"
1356+
"! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer.tar.gz"
13571357
]
13581358
},
13591359
{
@@ -1554,7 +1554,7 @@
15541554
"delete_bucket = False\n",
15551555
"\n",
15561556
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
1557-
" ! gsutil rm -rf {BUCKET_URI}"
1557+
" ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}"
15581558
]
15591559
}
15601560
],

notebooks/community/pipelines/google_cloud_pipeline_components_bqml_pipeline_anomaly_detection.ipynb

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -373,7 +373,7 @@
373373
},
374374
"outputs": [],
375375
"source": [
376-
"! gsutil mb -l $REGION -p $PROJECT_ID $BUCKET_URI"
376+
"! gcloud storage buckets create --location=$REGION --project=$PROJECT_ID $BUCKET_URI"
377377
]
378378
},
379379
{
@@ -641,13 +641,13 @@
641641
" + \"/evaluation_metrics\"\n",
642642
" )\n",
643643
" if tf.io.gfile.exists(EXECUTE_OUTPUT):\n",
644-
" ! gsutil cat $EXECUTE_OUTPUT\n",
644+
" ! gcloud storage cat $EXECUTE_OUTPUT\n",
645645
" return EXECUTE_OUTPUT\n",
646646
" elif tf.io.gfile.exists(GCP_RESOURCES):\n",
647-
" ! gsutil cat $GCP_RESOURCES\n",
647+
" ! gcloud storage cat $GCP_RESOURCES\n",
648648
" return GCP_RESOURCES\n",
649649
" elif tf.io.gfile.exists(EVAL_METRICS):\n",
650-
" ! gsutil cat $EVAL_METRICS\n",
650+
" ! gcloud storage cat $EVAL_METRICS\n",
651651
" return EVAL_METRICS\n",
652652
"\n",
653653
" return None"
@@ -1470,7 +1470,7 @@
14701470
"# delete bucket\n",
14711471
"delete_bucket = False\n",
14721472
"if os.getenv(\"IS_TESTING\") or delete_bucket:\n",
1473-
" ! gsutil -m rm -r $BUCKET_URI\n",
1473+
" ! gcloud storage rm --recursive $BUCKET_URI\n",
14741474
"\n",
14751475
"# Remove local resorces\n",
14761476
"delete_local_resources = False\n",

notebooks/official/automl/automl_image_object_detection_export_edge.ipynb

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -284,7 +284,7 @@
284284
},
285285
"outputs": [],
286286
"source": [
287-
"! gsutil mb -l $LOCATION $BUCKET_URI"
287+
"! gcloud storage buckets create --location $LOCATION $BUCKET_URI"
288288
]
289289
},
290290
{
@@ -381,7 +381,7 @@
381381
"\n",
382382
"# Copy images using gsutil commands directly\n",
383383
"for src, dest in zip(df.iloc[:, 0], df[\"destination_path\"]):\n",
384-
" ! gsutil -m cp {src} {dest}\n",
384+
" ! gcloud storage cp {src} {dest}\n",
385385
"\n",
386386
"print(f\"Files copied to {BUCKET_URI}\")"
387387
]
@@ -462,11 +462,11 @@
462462
"else:\n",
463463
" FILE = IMPORT_FILE\n",
464464
"\n",
465-
"count = ! gsutil cat $FILE | wc -l\n",
465+
"count = ! gcloud storage cat $FILE | wc -l\n",
466466
"print(\"Number of Examples\", int(count[0]))\n",
467467
"\n",
468468
"print(\"First 10 rows\")\n",
469-
"! gsutil cat $FILE | head"
469+
"! gcloud storage cat $FILE | head"
470470
]
471471
},
472472
{
@@ -675,9 +675,9 @@
675675
},
676676
"outputs": [],
677677
"source": [
678-
"! gsutil ls $model_package\n",
678+
"! gcloud storage ls $model_package\n",
679679
"# Download the model artifacts\n",
680-
"! gsutil cp -r $model_package tflite\n",
680+
"! gcloud storage cp --recursive $model_package tflite\n",
681681
"\n",
682682
"tflite_path = \"tflite/model.tflite\""
683683
]
@@ -736,7 +736,7 @@
736736
},
737737
"outputs": [],
738738
"source": [
739-
"test_items = ! gsutil cat $IMPORT_FILE | head -n1\n",
739+
"test_items = ! gcloud storage cat $IMPORT_FILE | head -n1\n",
740740
"test_item = test_items[0].split(\",\")[0]\n",
741741
"\n",
742742
"with tf.io.gfile.GFile(test_item, \"rb\") as f:\n",
@@ -824,7 +824,7 @@
824824
"dag.delete()\n",
825825
"\n",
826826
"if delete_bucket:\n",
827-
" ! gsutil rm -r $BUCKET_URI"
827+
" ! gcloud storage rm --recursive $BUCKET_URI"
828828
]
829829
}
830830
],

notebooks/official/model_evaluation/automl_text_classification_model_evaluation.ipynb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -363,7 +363,7 @@
363363
},
364364
"outputs": [],
365365
"source": [
366-
"! gsutil mb -l {REGION} -p {PROJECT_ID} {BUCKET_URI}"
366+
"! gcloud storage buckets create --location={REGION} --project={PROJECT_ID} {BUCKET_URI}"
367367
]
368368
},
369369
{
@@ -437,9 +437,9 @@
437437
},
438438
"outputs": [],
439439
"source": [
440-
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n",
440+
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n",
441441
"\n",
442-
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI"
442+
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer"
443443
]
444444
},
445445
{
@@ -1362,7 +1362,7 @@
13621362
"\n",
13631363
"# delete the Cloud Storage bucket\n",
13641364
"if delete_bucket and os.getenv(\"IS_TESTING\"):\n",
1365-
" ! gsutil rm -r $BUCKET_URI"
1365+
" ! gcloud storage rm --recursive $BUCKET_URI"
13661366
]
13671367
}
13681368
],

notebooks/official/model_monitoring/batch_prediction_model_monitoring.ipynb

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -359,7 +359,7 @@
359359
},
360360
"outputs": [],
361361
"source": [
362-
"! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}"
362+
"! gcloud storage buckets create --location {LOCATION} --project {PROJECT_ID} {BUCKET_URI}"
363363
]
364364
},
365365
{
@@ -592,7 +592,7 @@
592592
"TRAINING_DATASET = f\"{INPUT_GS_PATH}/churn_bp_insample.csv\"\n",
593593
"TRAINING_DATASET_FORMAT = \"csv\"\n",
594594
"\n",
595-
"! gsutil copy $PUBLIC_TRAINING_DATASET $TRAINING_DATASET"
595+
"! gcloud storage cp $PUBLIC_TRAINING_DATASET $TRAINING_DATASET"
596596
]
597597
},
598598
{
@@ -780,8 +780,8 @@
780780
"PREDICTION_STATS_GCS_PATH = STATS_GCS_FOLDER + PREDICTION_STATS_SUBPATH\n",
781781
"print(\"Looking up statistics from: \" + PREDICTION_STATS_GCS_PATH)\n",
782782
"\n",
783-
"! gsutil cp $TRAINING_STATS_GCS_PATH ./training_stats.pb\n",
784-
"! gsutil cp $PREDICTION_STATS_GCS_PATH ./prediction_stats.pb\n",
783+
"! gcloud storage cp $TRAINING_STATS_GCS_PATH ./training_stats.pb\n",
784+
"! gcloud storage cp $PREDICTION_STATS_GCS_PATH ./prediction_stats.pb\n",
785785
"\n",
786786
"\n",
787787
"# util function to load stats binary file from GCS\n",
@@ -820,7 +820,7 @@
820820
" STATS_GCS_FOLDER\n",
821821
" + \"stats_and_anomalies/anomalies/training_prediction_skew_anomalies\"\n",
822822
")\n",
823-
"! gsutil cat $SKEW_GS_PATH"
823+
"! gcloud storage cat $SKEW_GS_PATH"
824824
]
825825
},
826826
{
@@ -879,7 +879,7 @@
879879
"# Delete Cloud Storage bucket\n",
880880
"delete_bucket = False\n",
881881
"if delete_bucket:\n",
882-
" ! gsutil -m rm -r $BUCKET_URI\n",
882+
" ! gcloud storage rm --recursive $BUCKET_URI\n",
883883
"\n",
884884
"! rm -f ./training_stats.pb\n",
885885
"! rm -f ./prediction_stats.pb"

notebooks/official/training/hyperparameter_tuning_xgboost.ipynb

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -283,7 +283,7 @@
283283
},
284284
"outputs": [],
285285
"source": [
286-
"! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}"
286+
"! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}"
287287
]
288288
},
289289
{
@@ -532,8 +532,8 @@
532532
"def get_data():\n",
533533
" logging.info(\"Downloading training data and labelsfrom: {}, {}\".format(args.dataset_data_url, args.dataset_labels_url))\n",
534534
" # gsutil outputs everything to stderr. Hence, the need to divert it to stdout.\n",
535-
" subprocess.check_call(['gsutil', 'cp', args.dataset_data_url, 'data.csv'], stderr=sys.stdout)\n",
536-
" subprocess.check_call(['gsutil', 'cp', args.dataset_labels_url, 'labels.csv'], stderr=sys.stdout)\n",
535+
" subprocess.check_call(['gcloud', 'storage', 'cp', args.dataset_data_url, 'data.csv'], stderr=sys.stdout)\n",
536+
" subprocess.check_call(['gcloud', 'storage', 'cp', args.dataset_labels_url, 'labels.csv'], stderr=sys.stdout)\n",
537537
"\n",
538538
"\n",
539539
" # Load data into pandas, then use `.values` to get NumPy arrays\n",
@@ -619,7 +619,7 @@
619619
"! rm -f custom.tar custom.tar.gz\n",
620620
"! tar cvf custom.tar custom\n",
621621
"! gzip custom.tar\n",
622-
"! gsutil cp custom.tar.gz $BUCKET_URI/trainer_iris.tar.gz"
622+
"! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_iris.tar.gz"
623623
]
624624
},
625625
{
@@ -922,7 +922,7 @@
922922
"# Fetch the best model\n",
923923
"BEST_MODEL_DIR = MODEL_DIR + \"/\" + best[0] + \"/model\"\n",
924924
"\n",
925-
"! gsutil ls {BEST_MODEL_DIR}"
925+
"! gcloud storage ls {BEST_MODEL_DIR}"
926926
]
927927
},
928928
{
@@ -958,7 +958,7 @@
958958
"delete_bucket = False # Set True to delete the bucket\n",
959959
"\n",
960960
"if delete_bucket:\n",
961-
" ! gsutil rm -r $BUCKET_URI\n",
961+
" ! gcloud storage rm --recursive $BUCKET_URI\n",
962962
"\n",
963963
"# Delete the locally generated files\n",
964964
"! rm -rf custom/\n",

0 commit comments

Comments
 (0)