Skip to content

Commit ede41c2

Browse files
googlyrahmanbhandarivijay-pnggurusai-voleti
authored
Migrate gsutil usage to gcloud storage (#4335)
* Migrate gsutil usage to gcloud storage * remoed changes for model garden --------- Co-authored-by: bhandarivijay <bhandarivijay@google.com> Co-authored-by: gurusai-voleti <gvoleti@google.com>
1 parent ca53786 commit ede41c2

9 files changed

+49
-99
lines changed

community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/mlops_pipeline_tf_agents_bandits_movie_recommendation/mlops_pipeline_tf_agents_bandits_movie_recommendation.ipynb

Lines changed: 4 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -478,8 +478,7 @@
478478
},
479479
"outputs": [],
480480
"source": [
481-
"! gsutil mb -l $REGION $BUCKET_NAME"
482-
]
481+
"! gcloud storage buckets create --location $REGION $BUCKET_NAME" ]
483482
},
484483
{
485484
"cell_type": "markdown",
@@ -498,8 +497,7 @@
498497
},
499498
"outputs": [],
500499
"source": [
501-
"! gsutil ls -al $BUCKET_NAME"
502-
]
500+
"! gcloud storage ls --all-versions --long $BUCKET_NAME" ]
503501
},
504502
{
505503
"cell_type": "markdown",
@@ -582,8 +580,7 @@
582580
"outputs": [],
583581
"source": [
584582
"# Download the sample data into your RAW_DATA_PATH\n",
585-
"! gsutil cp \"gs://cloud-samples-data/vertex-ai/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/u.data\" $RAW_DATA_PATH"
586-
]
583+
"! gcloud storage cp \"gs://cloud-samples-data/vertex-ai/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/u.data\" $RAW_DATA_PATH" ]
587584
},
588585
{
589586
"cell_type": "code",
@@ -1621,9 +1618,7 @@
16211618
"! gcloud scheduler jobs delete $SIMULATOR_SCHEDULER_JOB --quiet\n",
16221619
"\n",
16231620
"# Delete Cloud Storage objects that were created.\n",
1624-
"! gsutil -m rm -r $PIPELINE_ROOT\n",
1625-
"! gsutil -m rm -r $TRAINING_ARTIFACTS_DIR"
1626-
]
1621+
"! gcloud storage rm --recursive $PIPELINE_ROOT\n", "! gcloud storage rm --recursive $TRAINING_ARTIFACTS_DIR" ]
16271622
}
16281623
],
16291624
"metadata": {

notebooks/community/gapic/automl/showcase_automl_tabular_binary_classification_online.ipynb

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -735,14 +735,11 @@
735735
},
736736
"outputs": [],
737737
"source": [
738-
"count = ! gsutil cat $IMPORT_FILE | wc -l\n",
739-
"print(\"Number of Examples\", int(count[0]))\n",
738+
"count = ! gcloud storage cat $IMPORT_FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n",
740739
"\n",
741740
"print(\"First 10 rows\")\n",
742-
"! gsutil cat $IMPORT_FILE | head\n",
743-
"\n",
744-
"heading = ! gsutil cat $IMPORT_FILE | head -n1\n",
745-
"label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n",
741+
"! gcloud storage cat $IMPORT_FILE | head\n", "\n",
742+
"heading = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n",
746743
"print(\"Label Column Name\", label_column)\n",
747744
"if label_column is None:\n",
748745
" raise Exception(\"label column missing\")"
@@ -1668,8 +1665,7 @@
16681665
" print(e)\n",
16691666
"\n",
16701667
"if delete_bucket and \"BUCKET_NAME\" in globals():\n",
1671-
" ! gsutil rm -r $BUCKET_NAME"
1672-
]
1668+
" ! gcloud storage rm --recursive $BUCKET_NAME" ]
16731669
}
16741670
],
16751671
"metadata": {

notebooks/community/gapic/automl/showcase_automl_tabular_classification_online.ipynb

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -735,14 +735,11 @@
735735
},
736736
"outputs": [],
737737
"source": [
738-
"count = ! gsutil cat $IMPORT_FILE | wc -l\n",
739-
"print(\"Number of Examples\", int(count[0]))\n",
738+
"count = ! gcloud storage cat $IMPORT_FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n",
740739
"\n",
741740
"print(\"First 10 rows\")\n",
742-
"! gsutil cat $IMPORT_FILE | head\n",
743-
"\n",
744-
"heading = ! gsutil cat $IMPORT_FILE | head -n1\n",
745-
"label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n",
741+
"! gcloud storage cat $IMPORT_FILE | head\n", "\n",
742+
"heading = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n",
746743
"print(\"Label Column Name\", label_column)\n",
747744
"if label_column is None:\n",
748745
" raise Exception(\"label column missing\")"
@@ -1645,8 +1642,7 @@
16451642
" print(e)\n",
16461643
"\n",
16471644
"if delete_bucket and \"BUCKET_NAME\" in globals():\n",
1648-
" ! gsutil rm -r $BUCKET_NAME"
1649-
]
1645+
" ! gcloud storage rm --recursive $BUCKET_NAME" ]
16501646
}
16511647
],
16521648
"metadata": {

notebooks/community/migration/UJ3 legacy Custom Training Custom Container TF Keras.ipynb

Lines changed: 6 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -289,8 +289,7 @@
289289
},
290290
"outputs": [],
291291
"source": [
292-
"! gsutil mb -l $REGION gs://$BUCKET_NAME"
293-
]
292+
"! gcloud storage buckets create gs://$BUCKET_NAME --location $REGION" ]
294293
},
295294
{
296295
"cell_type": "markdown",
@@ -309,8 +308,7 @@
309308
},
310309
"outputs": [],
311310
"source": [
312-
"! gsutil ls -al gs://$BUCKET_NAME"
313-
]
311+
"! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ]
314312
},
315313
{
316314
"cell_type": "markdown",
@@ -1016,8 +1014,7 @@
10161014
" b64str = base64.b64encode(bytes.numpy()).decode(\"utf-8\")\n",
10171015
" f.write(json.dumps({\"key\": img, input_name: {\"b64\": b64str}}) + \"\\n\")\n",
10181016
"\n",
1019-
"! gsutil cat $gcs_input_uri"
1020-
]
1017+
"! gcloud storage cat $gcs_input_uri" ]
10211018
},
10221019
{
10231020
"cell_type": "markdown",
@@ -1300,10 +1297,9 @@
13001297
" break\n",
13011298
" else:\n",
13021299
" folder = response[\"predictionInput\"][\"outputPath\"][:-1]\n",
1303-
" ! gsutil ls $folder/prediction*\n",
1300+
" ! gcloud storage ls $folder/prediction*\n",
13041301
"\n",
1305-
" ! gsutil cat $folder/prediction*\n",
1306-
" break\n",
1302+
" ! gcloud storage cat $folder/prediction*\n", " break\n",
13071303
" time.sleep(60)"
13081304
]
13091305
},
@@ -1982,8 +1978,7 @@
19821978
" print(e)\n",
19831979
"\n",
19841980
"if delete_bucket and \"BUCKET_NAME\" in globals():\n",
1985-
" ! gsutil rm -r gs://$BUCKET_NAME"
1986-
]
1981+
" ! gcloud storage rm --recursive gs://$BUCKET_NAME" ]
19871982
}
19881983
],
19891984
"metadata": {

notebooks/community/migration/UJ7 AutoML for natural language with Vertex AI Text Entity Extraction.ipynb

Lines changed: 7 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -336,8 +336,7 @@
336336
},
337337
"outputs": [],
338338
"source": [
339-
"! gsutil mb -l $REGION gs://$BUCKET_NAME"
340-
]
339+
"! gcloud storage buckets create --location=$REGION gs://$BUCKET_NAME" ]
341340
},
342341
{
343342
"cell_type": "markdown",
@@ -356,8 +355,7 @@
356355
},
357356
"outputs": [],
358357
"source": [
359-
"! gsutil ls -al gs://$BUCKET_NAME"
360-
]
358+
"! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ]
361359
},
362360
{
363361
"cell_type": "markdown",
@@ -554,8 +552,7 @@
554552
},
555553
"outputs": [],
556554
"source": [
557-
"! gsutil cat $IMPORT_FILE | head -n 1"
558-
]
555+
"! gcloud storage cat $IMPORT_FILE | head -n 1" ]
559556
},
560557
{
561558
"cell_type": "markdown",
@@ -1484,9 +1481,7 @@
14841481
"with tf.io.gfile.GFile(gcs_input_uri, \"w\") as f:\n",
14851482
" f.write(json.dumps({\"content\": gcs_test_item, \"mime_type\": \"text/plain\"}) + \"\\n\")\n",
14861483
"\n",
1487-
"! gsutil cat $gcs_input_uri\n",
1488-
"! gsutil cat $gcs_test_item"
1489-
]
1484+
"! gcloud storage cat $gcs_input_uri\n", "! gcloud storage cat $gcs_test_item" ]
14901485
},
14911486
{
14921487
"cell_type": "markdown",
@@ -1666,10 +1661,8 @@
16661661
" break\n",
16671662
" else:\n",
16681663
" folder = response.output_config.gcs_destination.output_uri_prefix[:-1]\n",
1669-
" ! gsutil ls $folder/prediction*/*.jsonl\n",
1670-
"\n",
1671-
" ! gsutil cat $folder/prediction*/*.jsonl\n",
1672-
" break\n",
1664+
" ! gcloud storage ls $folder/prediction*/*.jsonl\n", "\n",
1665+
" ! gcloud storage cat $folder/prediction*/*.jsonl\n", " break\n",
16731666
" time.sleep(60)"
16741667
]
16751668
},
@@ -2260,7 +2253,7 @@
22602253
"\n",
22612254
"\n",
22622255
"if delete_bucket and \"BUCKET_NAME\" in globals():\n",
2263-
" ! gsutil rm -r gs://$BUCKET_NAME"
2256+
" ! gcloud storage rm --recursive gs://$BUCKET_NAME"
22642257
]
22652258
}
22662259
],

notebooks/community/migration/UJ8 AutoML for natural language with Vertex AI - Text Sentiment Analysis.ipynb

Lines changed: 10 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -318,8 +318,7 @@
318318
},
319319
"outputs": [],
320320
"source": [
321-
"! gsutil mb -l $REGION gs://$BUCKET_NAME"
322-
]
321+
"! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ]
323322
},
324323
{
325324
"cell_type": "markdown",
@@ -338,8 +337,7 @@
338337
},
339338
"outputs": [],
340339
"source": [
341-
"! gsutil ls -al gs://$BUCKET_NAME"
342-
]
340+
"! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ]
343341
},
344342
{
345343
"cell_type": "markdown",
@@ -536,8 +534,7 @@
536534
},
537535
"outputs": [],
538536
"source": [
539-
"! gsutil cat $IMPORT_FILE | head -n 10"
540-
]
537+
"! gcloud storage cat $IMPORT_FILE | head -n 10" ]
541538
},
542539
{
543540
"cell_type": "markdown",
@@ -1435,8 +1432,7 @@
14351432
"\n",
14361433
"import tensorflow as tf\n",
14371434
"\n",
1438-
"test_data = ! gsutil cat $IMPORT_FILE | head -n1\n",
1439-
"\n",
1435+
"test_data = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "\n",
14401436
"test_item = str(test_data[0]).split(\",\")[1]\n",
14411437
"test_label = str(test_data[0]).split(\",\")[2]\n",
14421438
"\n",
@@ -1449,9 +1445,7 @@
14491445
" data = {\"content\": gcs_test_item, \"mime_type\": \"text/plain\"}\n",
14501446
" f.write(json.dumps(data) + \"\\n\")\n",
14511447
"\n",
1452-
"! gsutil cat $gcs_input_uri\n",
1453-
"! gsutil cat $gcs_test_item"
1454-
]
1448+
"! gcloud storage cat $gcs_input_uri\n", "! gcloud storage cat $gcs_test_item" ]
14551449
},
14561450
{
14571451
"cell_type": "markdown",
@@ -1749,8 +1743,7 @@
17491743
"source": [
17501744
"def get_latest_predictions(gcs_out_dir):\n",
17511745
" \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n",
1752-
" folders = !gsutil ls $gcs_out_dir\n",
1753-
" latest = \"\"\n",
1746+
" folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n",
17541747
" for folder in folders:\n",
17551748
" subfolder = folder.split(\"/\")[-2]\n",
17561749
" if subfolder.startswith(\"prediction-\"):\n",
@@ -1769,10 +1762,8 @@
17691762
" folder = get_latest_predictions(\n",
17701763
" response.output_config.gcs_destination.output_uri_prefix\n",
17711764
" )\n",
1772-
" ! gsutil ls $folder/prediction*.jsonl\n",
1773-
"\n",
1774-
" ! gsutil cat $folder/prediction*.jsonl\n",
1775-
" break\n",
1765+
" ! gcloud storage ls $folder/prediction*.jsonl\n", "\n",
1766+
" ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n",
17761767
" time.sleep(60)"
17771768
]
17781769
},
@@ -1815,8 +1806,7 @@
18151806
},
18161807
"outputs": [],
18171808
"source": [
1818-
"test_data = ! gsutil cat $IMPORT_FILE | head -n1\n",
1819-
"\n",
1809+
"test_data = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "\n",
18201810
"test_item = str(test_data[0]).split(\",\")[1]\n",
18211811
"test_label = str(test_data[0]).split(\",\")[2]\n",
18221812
"\n",
@@ -2336,8 +2326,7 @@
23362326
" print(e)\n",
23372327
"\n",
23382328
"if delete_bucket and \"BUCKET_NAME\" in globals():\n",
2339-
" ! gsutil rm -r gs://$BUCKET_NAME"
2340-
]
2329+
" ! gcloud storage rm --recursive gs://$BUCKET_NAME" ]
23412330
}
23422331
],
23432332
"metadata": {

notebooks/community/sdk/sdk_custom_tabular_regression_online_explain_get_metadata.ipynb

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -476,8 +476,7 @@
476476
},
477477
"outputs": [],
478478
"source": [
479-
"! gsutil mb -l $REGION $BUCKET_NAME"
480-
]
479+
"! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ]
481480
},
482481
{
483482
"cell_type": "markdown",
@@ -496,8 +495,7 @@
496495
},
497496
"outputs": [],
498497
"source": [
499-
"! gsutil ls -al $BUCKET_NAME"
500-
]
498+
"! gcloud storage ls --all-versions --long $BUCKET_NAME" ]
501499
},
502500
{
503501
"cell_type": "markdown",
@@ -922,8 +920,7 @@
922920
"! rm -f custom.tar custom.tar.gz\n",
923921
"! tar cvf custom.tar custom\n",
924922
"! gzip custom.tar\n",
925-
"! gsutil cp custom.tar.gz $BUCKET_NAME/trainer_boston.tar.gz"
926-
]
923+
"! gcloud storage cp custom.tar.gz $BUCKET_NAME/trainer_boston.tar.gz" ]
927924
},
928925
{
929926
"cell_type": "markdown",
@@ -1789,8 +1786,7 @@
17891786
" print(e)\n",
17901787
"\n",
17911788
" if \"BUCKET_NAME\" in globals():\n",
1792-
" ! gsutil rm -r $BUCKET_NAME"
1793-
]
1789+
" ! gcloud storage rm --recursive $BUCKET_NAME" ]
17941790
}
17951791
],
17961792
"metadata": {

0 commit comments

Comments
 (0)