Skip to content

Commit 4d970f5

Browse files
Merge pull request #54 from bhandarivijay-png/ai-gsutil-migration-f8e60352e13b4d41883ada38a07e8e35
Changes for 4317
2 parents 6ad1e11 + ffcd59b commit 4d970f5

10 files changed

+121
-65
lines changed

notebooks/community/cohere/cohere_embedding_with_matching_engine.ipynb

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -449,7 +449,8 @@
449449
},
450450
"outputs": [],
451451
"source": [
452-
"! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ]
452+
"! gcloud storage buckets create --location=$REGION $BUCKET_NAME"
453+
]
453454
},
454455
{
455456
"cell_type": "markdown",
@@ -469,7 +470,8 @@
469470
"outputs": [],
470471
"source": [
471472
"# this will not return anything if the bucket is empty\n",
472-
"! gcloud storage ls --all-versions --long $BUCKET_NAME" ]
473+
"! gcloud storage ls --all-versions --long $BUCKET_NAME"
474+
]
473475
},
474476
{
475477
"cell_type": "markdown",
@@ -586,7 +588,8 @@
586588
"# NOTE: Everything in this GCS DIR will be DELETED before uploading the data.\n",
587589
"# A CommandException is expected if no data is present\n",
588590
"\n",
589-
"! gcloud storage rm --recursive --continue-on-error {BUCKET_NAME}/*" ]
591+
"! gcloud storage rm --recursive --continue-on-error {BUCKET_NAME}/*"
592+
]
590593
},
591594
{
592595
"cell_type": "code",
@@ -596,7 +599,8 @@
596599
},
597600
"outputs": [],
598601
"source": [
599-
"! gcloud storage cp cohere_embeddings.json {BUCKET_NAME}/cohere_embeddings.json" ]
602+
"! gcloud storage cp cohere_embeddings.json {BUCKET_NAME}/cohere_embeddings.json"
603+
]
600604
},
601605
{
602606
"cell_type": "code",
@@ -606,7 +610,8 @@
606610
},
607611
"outputs": [],
608612
"source": [
609-
"! gcloud storage ls {BUCKET_NAME}" ]
613+
"! gcloud storage ls {BUCKET_NAME}"
614+
]
610615
},
611616
{
612617
"cell_type": "markdown",

notebooks/community/matching_engine/stream_update_for_matching_engine.ipynb

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -352,7 +352,8 @@
352352
},
353353
"outputs": [],
354354
"source": [
355-
"! gcloud storage buckets create --location $REGION $BUCKET_NAME" ]
355+
"! gcloud storage buckets create --location $REGION $BUCKET_NAME"
356+
]
356357
},
357358
{
358359
"cell_type": "markdown",
@@ -371,7 +372,8 @@
371372
},
372373
"outputs": [],
373374
"source": [
374-
"! gcloud storage ls --all-versions --long $BUCKET_NAME" ]
375+
"! gcloud storage ls --all-versions --long $BUCKET_NAME"
376+
]
375377
},
376378
{
377379
"cell_type": "markdown",
@@ -458,7 +460,8 @@
458460
},
459461
"outputs": [],
460462
"source": [
461-
"! gcloud storage cp gs://cloud-samples-data/vertex-ai/matching_engine/glove-100-angular.hdf5 ." ]
463+
"! gcloud storage cp gs://cloud-samples-data/vertex-ai/matching_engine/glove-100-angular.hdf5 ."
464+
]
462465
},
463466
{
464467
"cell_type": "markdown",
@@ -547,7 +550,8 @@
547550
"source": [
548551
"# NOTE: Everything in this GCS DIR will be DELETED before uploading the data.\n",
549552
"\n",
550-
"! gcloud storage rm --recursive --continue-on-error {BUCKET_NAME}/*" ]
553+
"! gcloud storage rm --recursive --continue-on-error {BUCKET_NAME}/*"
554+
]
551555
},
552556
{
553557
"cell_type": "code",
@@ -557,7 +561,8 @@
557561
},
558562
"outputs": [],
559563
"source": [
560-
"! gcloud storage cp glove100.json {BUCKET_NAME}/glove100.json" ]
564+
"! gcloud storage cp glove100.json {BUCKET_NAME}/glove100.json"
565+
]
561566
},
562567
{
563568
"cell_type": "code",
@@ -567,7 +572,8 @@
567572
},
568573
"outputs": [],
569574
"source": [
570-
"! gcloud storage ls {BUCKET_NAME}" ]
575+
"! gcloud storage ls {BUCKET_NAME}"
576+
]
571577
},
572578
{
573579
"cell_type": "markdown",

notebooks/community/ml_ops/stage2/get_started_vertex_training_sklearn.ipynb

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -473,7 +473,8 @@
473473
},
474474
"outputs": [],
475475
"source": [
476-
"! gcloud storage buckets create --location $REGION $BUCKET_URI" ]
476+
"! gcloud storage buckets create --location $REGION $BUCKET_URI"
477+
]
477478
},
478479
{
479480
"cell_type": "markdown",
@@ -492,7 +493,8 @@
492493
},
493494
"outputs": [],
494495
"source": [
495-
"! gcloud storage ls --all-versions --long $BUCKET_URI" ]
496+
"! gcloud storage ls --all-versions --long $BUCKET_URI"
497+
]
496498
},
497499
{
498500
"cell_type": "markdown",
@@ -939,7 +941,8 @@
939941
"! rm -f custom.tar custom.tar.gz\n",
940942
"! tar cvf custom.tar custom\n",
941943
"! gzip custom.tar\n",
942-
"! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_newsaggr.tar.gz" ]
944+
"! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_newsaggr.tar.gz"
945+
]
943946
},
944947
{
945948
"cell_type": "markdown",
@@ -1175,7 +1178,8 @@
11751178
"\n",
11761179
"delete_bucket = False\n",
11771180
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
1178-
" ! gcloud storage rm --recursive $BUCKET_URI" ]
1181+
" ! gcloud storage rm --recursive $BUCKET_URI"
1182+
]
11791183
}
11801184
],
11811185
"metadata": {

notebooks/community/ml_ops/stage3/get_started_with_automl_pipeline_components.ipynb

Lines changed: 33 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -436,7 +436,8 @@
436436
},
437437
"outputs": [],
438438
"source": [
439-
"! gcloud storage buckets create --location=$REGION $BUCKET_URI" ]
439+
"! gcloud storage buckets create --location=$REGION $BUCKET_URI"
440+
]
440441
},
441442
{
442443
"cell_type": "markdown",
@@ -455,7 +456,8 @@
455456
},
456457
"outputs": [],
457458
"source": [
458-
"! gcloud storage ls --all-versions --long $BUCKET_URI" ]
459+
"! gcloud storage ls --all-versions --long $BUCKET_URI"
460+
]
459461
},
460462
{
461463
"cell_type": "markdown",
@@ -524,9 +526,10 @@
524526
},
525527
"outputs": [],
526528
"source": [
527-
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n",
528-
# Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.
529-
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer" ]
529+
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n",
530+
"\n",
531+
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer"
532+
]
530533
},
531534
{
532535
"cell_type": "markdown",
@@ -780,7 +783,8 @@
780783
},
781784
"outputs": [],
782785
"source": [
783-
"test_items = !gcloud storage cat $IMPORT_FILE | head -n2\n", "if len(str(test_items[0]).split(\",\")) == 3:\n",
786+
"test_items = !gcloud storage cat $IMPORT_FILE | head -n2\n",
787+
"if len(str(test_items[0]).split(\",\")) == 3:\n",
784788
" _, test_item_1, test_label_1 = str(test_items[0]).split(\",\")\n",
785789
" _, test_item_2, test_label_2 = str(test_items[1]).split(\",\")\n",
786790
"else:\n",
@@ -813,7 +817,9 @@
813817
"file_1 = test_item_1.split(\"/\")[-1]\n",
814818
"file_2 = test_item_2.split(\"/\")[-1]\n",
815819
"\n",
816-
"! gcloud storage cp $test_item_1 $BUCKET_URI/$file_1\n", "! gcloud storage cp $test_item_2 $BUCKET_URI/$file_2\n", "\n",
820+
"! gcloud storage cp $test_item_1 $BUCKET_URI/$file_1\n",
821+
"! gcloud storage cp $test_item_2 $BUCKET_URI/$file_2\n",
822+
"\n",
817823
"test_item_1 = BUCKET_URI + \"/\" + file_1\n",
818824
"test_item_2 = BUCKET_URI + \"/\" + file_2"
819825
]
@@ -852,7 +858,8 @@
852858
" f.write(json.dumps(data) + \"\\n\")\n",
853859
"\n",
854860
"print(gcs_input_uri)\n",
855-
"! gcloud storage cat $gcs_input_uri" ]
861+
"! gcloud storage cat $gcs_input_uri"
862+
]
856863
},
857864
{
858865
"cell_type": "markdown",
@@ -966,11 +973,14 @@
966973
" + \"/evaluation_metrics\"\n",
967974
" )\n",
968975
" if tf.io.gfile.exists(EXECUTE_OUTPUT):\n",
969-
" ! gcloud storage cat $EXECUTE_OUTPUT\n", " return EXECUTE_OUTPUT\n",
976+
" ! gcloud storage cat $EXECUTE_OUTPUT\n",
977+
" return EXECUTE_OUTPUT\n",
970978
" elif tf.io.gfile.exists(GCP_RESOURCES):\n",
971-
" ! gcloud storage cat $GCP_RESOURCES\n", " return GCP_RESOURCES\n",
979+
" ! gcloud storage cat $GCP_RESOURCES\n",
980+
" return GCP_RESOURCES\n",
972981
" elif tf.io.gfile.exists(EVAL_METRICS):\n",
973-
" ! gcloud storage cat $EVAL_METRICS\n", " return EVAL_METRICS\n",
982+
" ! gcloud storage cat $EVAL_METRICS\n",
983+
" return EVAL_METRICS\n",
974984
"\n",
975985
" return None\n",
976986
"\n",
@@ -981,14 +991,16 @@
981991
"print(\"automl-image-training-job\")\n",
982992
"artifacts = print_pipeline_output(pipeline, \"automl-image-training-job\")\n",
983993
"print(\"\\n\\n\")\n",
984-
"output = !gcloud storage cat $artifacts\n", "output = json.loads(output[0])\n",
994+
"output = !gcloud storage cat $artifacts\n",
995+
"output = json.loads(output[0])\n",
985996
"model_id = output[\"artifacts\"][\"model\"][\"artifacts\"][0][\"metadata\"][\"resourceName\"]\n",
986997
"print(\"\\n\")\n",
987998
"print(model_id)\n",
988999
"print(\"endpoint-create\")\n",
9891000
"artifacts = print_pipeline_output(pipeline, \"endpoint-create\")\n",
9901001
"print(\"\\n\\n\")\n",
991-
"output = !gcloud storage cat $artifacts\n", "output = json.loads(output[0])\n",
1002+
"output = !gcloud storage cat $artifacts\n",
1003+
"output = json.loads(output[0])\n",
9921004
"endpoint_id = output[\"artifacts\"][\"endpoint\"][\"artifacts\"][0][\"metadata\"][\n",
9931005
" \"resourceName\"\n",
9941006
"]\n",
@@ -1002,14 +1014,16 @@
10021014
"print(\"\\n\\n\")\n",
10031015
"print(\"model-batch-predict\")\n",
10041016
"artifacts = print_pipeline_output(pipeline, \"model-batch-predict\")\n",
1005-
"output = !gcloud storage cat $artifacts\n", "output = json.loads(output[0])\n",
1017+
"output = !gcloud storage cat $artifacts\n",
1018+
"output = json.loads(output[0])\n",
10061019
"print(\"\\n\\n\")\n",
10071020
"print(\n",
10081021
" output[\"artifacts\"][\"batchpredictionjob\"][\"artifacts\"][0][\"metadata\"][\n",
10091022
" \"gcsOutputDirectory\"\n",
10101023
" ]\n",
10111024
")\n",
1012-
"output = !gcloud storage cat $artifacts\n", "output = json.loads(output[0])\n",
1025+
"output = !gcloud storage cat $artifacts\n",
1026+
"output = json.loads(output[0])\n",
10131027
"batch_job_id = output[\"artifacts\"][\"batchpredictionjob\"][\"artifacts\"][0][\"metadata\"][\n",
10141028
" \"resourceName\"\n",
10151029
"]"
@@ -1089,7 +1103,8 @@
10891103
},
10901104
"outputs": [],
10911105
"source": [
1092-
"test_item = !gcloud storage cat $IMPORT_FILE | head -n1\n", "if len(str(test_item[0]).split(\",\")) == 3:\n",
1106+
"test_item = !gcloud storage cat $IMPORT_FILE | head -n1\n",
1107+
"if len(str(test_item[0]).split(\",\")) == 3:\n",
10931108
" _, test_item, test_label = str(test_item[0]).split(\",\")\n",
10941109
"else:\n",
10951110
" test_item, test_label = str(test_item[0]).split(\",\")\n",
@@ -1204,7 +1219,8 @@
12041219
"delete_bucket = False\n",
12051220
"\n",
12061221
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
1207-
" ! gcloud storage rm --recursive $BUCKET_URI" ]
1222+
" ! gcloud storage rm --recursive $BUCKET_URI"
1223+
]
12081224
}
12091225
],
12101226
"metadata": {

notebooks/community/model_garden/model_garden_tfvision_image_object_detection.ipynb

Lines changed: 13 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -202,15 +202,20 @@
202202
"# Download config files.\n",
203203
"CONFIG_DIR = os.path.join(BUCKET_URI, \"config\")\n",
204204
"! wget https://raw.githubusercontent.com/tensorflow/models/master/official/vision/configs/experiments/retinanet/coco_spinenet49_gpu_multiworker_mirrored.yaml\n",
205-
"! gcloud storage cp coco_spinenet49_gpu_multiworker_mirrored.yaml $CONFIG_DIR/\n", "\n",
205+
"! gcloud storage cp coco_spinenet49_gpu_multiworker_mirrored.yaml $CONFIG_DIR/\n",
206+
"\n",
206207
"! wget https://raw.githubusercontent.com/tensorflow/models/master/official/vision/configs/experiments/retinanet/coco_spinenet96_gpu_multiworker_mirrored.yaml\n",
207-
"! gcloud storage cp coco_spinenet96_gpu_multiworker_mirrored.yaml $CONFIG_DIR/\n", "\n",
208+
"! gcloud storage cp coco_spinenet96_gpu_multiworker_mirrored.yaml $CONFIG_DIR/\n",
209+
"\n",
208210
"! wget https://raw.githubusercontent.com/tensorflow/models/master/official/vision/configs/experiments/retinanet/coco_spinenet143_gpu_multiworker_mirrored.yaml\n",
209-
"! gcloud storage cp coco_spinenet143_gpu_multiworker_mirrored.yaml $CONFIG_DIR/\n", "\n",
211+
"! gcloud storage cp coco_spinenet143_gpu_multiworker_mirrored.yaml $CONFIG_DIR/\n",
212+
"\n",
210213
"! wget https://raw.githubusercontent.com/tensorflow/models/master/official/projects/yolo/configs/experiments/yolov4/detection/scaled_yolov4_1280_gpu.yaml\n",
211-
"! gcloud storage cp scaled_yolov4_1280_gpu.yaml $CONFIG_DIR/\n", "\n",
214+
"! gcloud storage cp scaled_yolov4_1280_gpu.yaml $CONFIG_DIR/\n",
215+
"\n",
212216
"! wget https://raw.githubusercontent.com/tensorflow/models/master/official/projects/yolo/configs/experiments/yolov7/detection/yolov7_gpu.yaml\n",
213-
"! gcloud storage cp yolov7_gpu.yaml $CONFIG_DIR/" ]
217+
"! gcloud storage cp yolov7_gpu.yaml $CONFIG_DIR/"
218+
]
214219
},
215220
{
216221
"cell_type": "markdown",
@@ -494,7 +499,8 @@
494499
" checkpoint_path = os.path.relpath(checkpoint_path, checkpoint_name)\n",
495500
" break\n",
496501
"\n",
497-
" ! gcloud storage cp --recursive $checkpoint_name $CHECKPOINT_BUCKET/\n", " checkpoint_uri = os.path.join(CHECKPOINT_BUCKET, checkpoint_name, checkpoint_path)\n",
502+
" ! gcloud storage cp --recursive $checkpoint_name $CHECKPOINT_BUCKET/\n",
503+
" checkpoint_uri = os.path.join(CHECKPOINT_BUCKET, checkpoint_name, checkpoint_path)\n",
498504
" print(\"Checkpoint uploaded to\", checkpoint_uri)\n",
499505
" return checkpoint_uri"
500506
]
@@ -926,9 +932,7 @@
926932
" serving_container_image_uri=PREDICTION_CONTAINER_URI,\n",
927933
" serving_container_args=SERVING_CONTAINER_ARGS,\n",
928934
" serving_container_environment_variables=serving_env,\n",
929-
" model_garden_source_model_name=(\n",
930-
" f\"publishers/google/models/{publisher_model_id}\",\n",
931-
" ),\n",
935+
" model_garden_source_model_name=(f\"publishers/google/models/{publisher_model_id}\",),\n",
932936
")\n",
933937
"\n",
934938
"model.wait()\n",

notebooks/community/model_garden/model_garden_timesfm_deployment_on_vertex.ipynb

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -162,10 +162,11 @@
162162
"if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n",
163163
" BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n",
164164
" BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n",
165-
" ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n",
165+
" ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n",
166+
"else:\n",
166167
" assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n",
167-
# Note: The format of the full listing output is different. gcloud storage uses a title case for keys and will not display a field if its value is "None".
168-
" shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n",
168+
" shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n",
169+
" bucket_region = shell_output[0].strip().lower()\n",
169170
" if bucket_region != REGION:\n",
170171
" raise ValueError(\n",
171172
" \"Bucket region %s is different from notebook region %s\"\n",
@@ -189,7 +190,6 @@
189190
"\n",
190191
"\n",
191192
"# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n",
192-
"! # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n",
193193
"! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n",
194194
"\n",
195195
"! gcloud config set project $PROJECT_ID\n",
@@ -213,7 +213,8 @@
213213
" MODEL_BUCKET,\n",
214214
")\n",
215215
"\n",
216-
"! gcloud storage cp --recursive $VERTEX_AI_MODEL_GARDEN_TIMESFM/$MODEL_VARIANT $MODEL_BUCKET\n", "\n",
216+
"! gcloud storage cp --recursive $VERTEX_AI_MODEL_GARDEN_TIMESFM/$MODEL_VARIANT $MODEL_BUCKET\n",
217+
"\n",
217218
"model_path_prefix = MODEL_BUCKET"
218219
]
219220
},
@@ -907,7 +908,8 @@
907908
"\n",
908909
"delete_bucket = False # @param {type:\"boolean\"}\n",
909910
"if delete_bucket:\n",
910-
" ! gcloud storage rm --recursive $BUCKET_NAME" ]
911+
" ! gcloud storage rm --recursive $BUCKET_NAME"
912+
]
911913
},
912914
{
913915
"cell_type": "markdown",

0 commit comments

Comments
 (0)