Skip to content

Commit 56dad72

Browse files
Merge pull request #52 from bhandarivijay-png/ai-gsutil-migration-389849207017489a85a6d8c430370603
Changes for 4320
2 parents 0dbc24a + ca74e1c commit 56dad72

10 files changed

+94
-52
lines changed

notebooks/community/ml_ops/stage2/get_started_vertex_training_r.ipynb

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -445,7 +445,8 @@
445445
},
446446
"outputs": [],
447447
"source": [
448-
"! gcloud storage buckets create --location=$REGION $BUCKET_URI" ]
448+
"! gcloud storage buckets create --location=$REGION $BUCKET_URI"
449+
]
449450
},
450451
{
451452
"cell_type": "markdown",
@@ -464,7 +465,8 @@
464465
},
465466
"outputs": [],
466467
"source": [
467-
"! gcloud storage ls --all-versions --long $BUCKET_URI" ]
468+
"! gcloud storage ls --all-versions --long $BUCKET_URI"
469+
]
468470
},
469471
{
470472
"cell_type": "markdown",
@@ -1425,7 +1427,8 @@
14251427
"delete_bucket = False\n",
14261428
"\n",
14271429
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
1428-
" ! gcloud storage rm --recursive $BUCKET_URI" ]
1430+
" ! gcloud storage rm --recursive $BUCKET_URI"
1431+
]
14291432
}
14301433
],
14311434
"metadata": {

notebooks/community/ml_ops/stage3/get_started_with_automl_tabular_pipeline_workflow.ipynb

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -450,7 +450,8 @@
450450
},
451451
"outputs": [],
452452
"source": [
453-
"! gcloud storage buckets create --location=$REGION $BUCKET_URI" ]
453+
"! gcloud storage buckets create --location=$REGION $BUCKET_URI"
454+
]
454455
},
455456
{
456457
"cell_type": "markdown",
@@ -469,7 +470,8 @@
469470
},
470471
"outputs": [],
471472
"source": [
472-
"! gcloud storage ls --all-versions --long $BUCKET_URI" ]
473+
"! gcloud storage ls --all-versions --long $BUCKET_URI"
474+
]
473475
},
474476
{
475477
"cell_type": "markdown",
@@ -494,7 +496,8 @@
494496
"source": [
495497
"# set GCS bucket object TTL to 7 days\n",
496498
"! echo '{\"rule\":[{\"action\": {\"type\": \"Delete\"},\"condition\": {\"age\": 7}}]}' > gcs_lifecycle.tmp\n",
497-
"! gcloud storage buckets update --lifecycle-file=gcs_lifecycle.tmp {BUCKET_URI}\n", "! rm gcs_lifecycle.tmp"
499+
"! gcloud storage buckets update --lifecycle-file=gcs_lifecycle.tmp {BUCKET_URI}\n",
500+
"! rm gcs_lifecycle.tmp"
498501
]
499502
},
500503
{
@@ -1044,7 +1047,8 @@
10441047
"\n",
10451048
"print(\"trained model without custom TF ops:\", model_artifact_no_custom_op)\n",
10461049
"\n",
1047-
"! gcloud storage ls {model_artifact_no_custom_op}\n", "\n",
1050+
"! gcloud storage ls {model_artifact_no_custom_op}\n",
1051+
"\n",
10481052
"model_evaluation = get_evaluation_metrics(pipeline_task_details)\n",
10491053
"print(\"Model evaluation artifacts:\", model_evaluation)"
10501054
]
@@ -1333,7 +1337,8 @@
13331337
"\n",
13341338
"delete_bucket = False\n",
13351339
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
1336-
" ! gcloud storage rm --recursive --continue-on-error $BUCKET_URI" ]
1340+
" ! gcloud storage rm --recursive --continue-on-error $BUCKET_URI"
1341+
]
13371342
}
13381343
],
13391344
"metadata": {

notebooks/community/ml_ops/stage3/get_started_with_machine_management.ipynb

Lines changed: 18 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -450,7 +450,8 @@
450450
},
451451
"outputs": [],
452452
"source": [
453-
"! gcloud storage buckets create --location=$REGION $BUCKET_URI" ]
453+
"! gcloud storage buckets create --location=$REGION $BUCKET_URI"
454+
]
454455
},
455456
{
456457
"cell_type": "markdown",
@@ -469,7 +470,8 @@
469470
},
470471
"outputs": [],
471472
"source": [
472-
"! gcloud storage ls --all-versions --long $BUCKET_URI" ]
473+
"! gcloud storage ls --all-versions --long $BUCKET_URI"
474+
]
473475
},
474476
{
475477
"cell_type": "markdown",
@@ -539,10 +541,10 @@
539541
},
540542
"outputs": [],
541543
"source": [
542-
"! # Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n",
543-
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n",
544-
# Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.
545-
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer" ]
544+
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n",
545+
"\n",
546+
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer"
547+
]
546548
},
547549
{
548550
"cell_type": "markdown",
@@ -1013,11 +1015,14 @@
10131015
" + \"/evaluation_metrics\"\n",
10141016
" )\n",
10151017
" if tf.io.gfile.exists(EXECUTE_OUTPUT):\n",
1016-
" ! gcloud storage cat $EXECUTE_OUTPUT\n", " return EXECUTE_OUTPUT\n",
1018+
" ! gcloud storage cat $EXECUTE_OUTPUT\n",
1019+
" return EXECUTE_OUTPUT\n",
10171020
" elif tf.io.gfile.exists(GCP_RESOURCES):\n",
1018-
" ! gcloud storage cat $GCP_RESOURCES\n", " return GCP_RESOURCES\n",
1021+
" ! gcloud storage cat $GCP_RESOURCES\n",
1022+
" return GCP_RESOURCES\n",
10191023
" elif tf.io.gfile.exists(EVAL_METRICS):\n",
1020-
" ! gcloud storage cat $EVAL_METRICS\n", " return EVAL_METRICS\n",
1024+
" ! gcloud storage cat $EVAL_METRICS\n",
1025+
" return EVAL_METRICS\n",
10211026
"\n",
10221027
" return None\n",
10231028
"\n",
@@ -1248,7 +1253,8 @@
12481253
"print(\"\\n\\n\")\n",
12491254
"print(\"model-upload\")\n",
12501255
"artifacts = print_pipeline_output(pipeline, \"model-upload\")\n",
1251-
"output = !gcloud storage cat $artifacts\n", "output = json.loads(output[0])\n",
1256+
"output = !gcloud storage cat $artifacts\n",
1257+
"output = json.loads(output[0])\n",
12521258
"model_id = output[\"artifacts\"][\"model\"][\"artifacts\"][0][\"metadata\"][\"resourceName\"]\n",
12531259
"print(\"\\n\")\n",
12541260
"print(\"MODEL ID\", model_id)\n",
@@ -1326,7 +1332,8 @@
13261332
"delete_bucket = False\n",
13271333
"\n",
13281334
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
1329-
" ! gcloud storage rm --recursive $BUCKET_URI" ]
1335+
" ! gcloud storage rm --recursive $BUCKET_URI"
1336+
]
13301337
}
13311338
],
13321339
"metadata": {

notebooks/community/ml_ops/stage5/get_started_with_vertex_private_endpoints.ipynb

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -458,7 +458,8 @@
458458
},
459459
"outputs": [],
460460
"source": [
461-
"! gcloud storage buckets create --location=$REGION $BUCKET_URI" ]
461+
"! gcloud storage buckets create --location=$REGION $BUCKET_URI"
462+
]
462463
},
463464
{
464465
"cell_type": "markdown",
@@ -477,7 +478,8 @@
477478
},
478479
"outputs": [],
479480
"source": [
480-
"! gcloud storage ls --all-versions --long $BUCKET_URI" ]
481+
"! gcloud storage ls --all-versions --long $BUCKET_URI"
482+
]
481483
},
482484
{
483485
"cell_type": "markdown",
@@ -1127,7 +1129,8 @@
11271129
},
11281130
"outputs": [],
11291131
"source": [
1130-
"! gcloud storage cp gs://cloud-ml-data/img/flower_photos/daisy/100080576_f52e8ee070_n.jpg test.jpg" ]
1132+
"! gcloud storage cp gs://cloud-ml-data/img/flower_photos/daisy/100080576_f52e8ee070_n.jpg test.jpg"
1133+
]
11311134
},
11321135
{
11331136
"cell_type": "code",
@@ -1311,7 +1314,8 @@
13111314
" print(e)\n",
13121315
"\n",
13131316
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
1314-
" ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ]
1317+
" ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}"
1318+
]
13151319
}
13161320
],
13171321
"metadata": {

notebooks/community/model_garden/model_garden_tfvision_image_segmentation.ipynb

Lines changed: 17 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -171,9 +171,11 @@
171171
"if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n",
172172
" BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n",
173173
" BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n",
174-
" ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n",
174+
" ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n",
175+
"else:\n",
175176
" assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n",
176-
" shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n",
177+
" shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n",
178+
" bucket_region = shell_output[0].strip().lower()\n",
177179
" if bucket_region != REGION:\n",
178180
" raise ValueError(\n",
179181
" \"Bucket region %s is different from notebook region %s\"\n",
@@ -197,7 +199,6 @@
197199
"\n",
198200
"\n",
199201
"# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n",
200-
"# Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n",
201202
"! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n",
202203
"\n",
203204
"! gcloud config set project $PROJECT_ID\n",
@@ -215,7 +216,8 @@
215216
"# Download config files.\n",
216217
"CONFIG_DIR = os.path.join(BUCKET_URI, \"config\")\n",
217218
"! wget https://raw.githubusercontent.com/tensorflow/models/master/official/vision/configs/experiments/semantic_segmentation/deeplabv3plus_resnet101_cityscapes_gpu_multiworker_mirrored.yaml\n",
218-
"! gcloud storage cp deeplabv3plus_resnet101_cityscapes_gpu_multiworker_mirrored.yaml $CONFIG_DIR/" ]
219+
"! gcloud storage cp deeplabv3plus_resnet101_cityscapes_gpu_multiworker_mirrored.yaml $CONFIG_DIR/"
220+
]
219221
},
220222
{
221223
"cell_type": "markdown",
@@ -356,7 +358,8 @@
356358
" checkpoint_path = os.path.relpath(checkpoint_path, checkpoint_name)\n",
357359
" break\n",
358360
"\n",
359-
" ! gcloud storage cp --recursive $checkpoint_name $CHECKPOINT_BUCKET/\n", " checkpoint_uri = os.path.join(CHECKPOINT_BUCKET, checkpoint_name, checkpoint_path)\n",
361+
" ! gcloud storage cp --recursive $checkpoint_name $CHECKPOINT_BUCKET/\n",
362+
" checkpoint_uri = os.path.join(CHECKPOINT_BUCKET, checkpoint_name, checkpoint_path)\n",
360363
" print(\"Checkpoint uploaded to\", checkpoint_uri)\n",
361364
" return checkpoint_uri\n",
362365
"\n",
@@ -372,7 +375,8 @@
372375
" \"\"\"\n",
373376
" label_map_filename = os.path.basename(label_map_yaml_filepath)\n",
374377
" subprocess.check_output(\n",
375-
" [\"gcloud\", \"storage\", \"cp\", label_map_yaml_filepath, label_map_filename],\n", " stderr=subprocess.STDOUT,\n",
378+
" [\"gcloud\", \"storage\", \"cp\", label_map_yaml_filepath, label_map_filename],\n",
379+
" stderr=subprocess.STDOUT,\n",
376380
" )\n",
377381
" with open(label_map_filename, \"rb\") as input_file:\n",
378382
" label_map = yaml.safe_load(input_file.read())[\"label_map\"]\n",
@@ -552,7 +556,8 @@
552556
" current_trial_best_ckpt_evaluation_filepath = os.path.join(\n",
553557
" current_trial_best_ckpt_dir, \"info.json\"\n",
554558
" )\n",
555-
" ! gcloud storage cp $current_trial_best_ckpt_evaluation_filepath .\n", " with open(\"info.json\", \"r\") as f:\n",
559+
" ! gcloud storage cp $current_trial_best_ckpt_evaluation_filepath .\n",
560+
" with open(\"info.json\") as f:\n",
556561
" eval_metric_results = json.load(f)\n",
557562
" current_performance = eval_metric_results[evaluation_metric]\n",
558563
" if current_performance > best_performance:\n",
@@ -734,7 +739,9 @@
734739
" if new_width <= 0:\n",
735740
" test_file = os.path.basename(test_filepath)\n",
736741
" subprocess.check_output(\n",
737-
" [\"gcloud\", \"storage\", \"cp\", test_filepath, test_file], stderr=subprocess.STDOUT\n", " )\n",
742+
" [\"gcloud\", \"storage\", \"cp\", test_filepath, test_file],\n",
743+
" stderr=subprocess.STDOUT,\n",
744+
" )\n",
738745
" with open(test_file, \"rb\") as input_file:\n",
739746
" encoded_string = base64.b64encode(input_file.read()).decode(\"utf-8\")\n",
740747
" else:\n",
@@ -969,7 +976,8 @@
969976
"\n",
970977
"delete_bucket = False # @param {type:\"boolean\"}\n",
971978
"if delete_bucket:\n",
972-
" ! gcloud storage rm --recursive $BUCKET_NAME\n", "\n",
979+
" ! gcloud storage rm --recursive $BUCKET_NAME\n",
980+
"\n",
973981
"# Delete custom and hpt jobs.\n",
974982
"if data_converter_custom_job.list(filter=f'display_name=\"{data_converter_job_name}\"'):\n",
975983
" data_converter_custom_job.delete()\n",

notebooks/official/experiments/get_started_with_vertex_experiments.ipynb

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -321,7 +321,8 @@
321321
},
322322
"outputs": [],
323323
"source": [
324-
"! gcloud storage buckets create --location $LOCATION $BUCKET_URI" ]
324+
"! gcloud storage buckets create --location $LOCATION $BUCKET_URI"
325+
]
325326
},
326327
{
327328
"cell_type": "markdown",
@@ -1237,7 +1238,8 @@
12371238
"! rm -f custom.tar custom.tar.gz\n",
12381239
"! tar cvf custom.tar custom\n",
12391240
"! gzip custom.tar\n",
1240-
"! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer.tar.gz" ]
1241+
"! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer.tar.gz"
1242+
]
12411243
},
12421244
{
12431245
"cell_type": "markdown",
@@ -1449,7 +1451,8 @@
14491451
"delete_bucket = False\n",
14501452
"\n",
14511453
"if delete_bucket:\n",
1452-
" ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ]
1454+
" ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}"
1455+
]
14531456
}
14541457
],
14551458
"metadata": {

notebooks/official/pipelines/custom_tabular_train_batch_pred_bq_pipeline.ipynb

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -308,7 +308,8 @@
308308
},
309309
"outputs": [],
310310
"source": [
311-
"! gcloud storage buckets create --location=$LOCATION --project=$PROJECT_ID $BUCKET_URI" ]
311+
"! gcloud storage buckets create --location=$LOCATION --project=$PROJECT_ID $BUCKET_URI"
312+
]
312313
},
313314
{
314315
"cell_type": "markdown",
@@ -380,8 +381,10 @@
380381
},
381382
"outputs": [],
382383
"source": [
383-
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n",
384-
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer" ]
384+
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n",
385+
"\n",
386+
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer"
387+
]
385388
},
386389
{
387390
"cell_type": "markdown",
@@ -834,7 +837,8 @@
834837
},
835838
"outputs": [],
836839
"source": [
837-
"!gcloud storage cp --recursive python_package/dist/* $BUCKET_URI/training_package/" ]
840+
"!gcloud storage cp --recursive python_package/dist/* $BUCKET_URI/training_package/"
841+
]
838842
},
839843
{
840844
"cell_type": "markdown",
@@ -1211,7 +1215,8 @@
12111215
"\n",
12121216
"# Delete Cloud Storage objects\n",
12131217
"if delete_bucket:\n",
1214-
" ! gcloud storage rm --recursive $BUCKET_URI\n", "\n",
1218+
" ! gcloud storage rm --recursive $BUCKET_URI\n",
1219+
"\n",
12151220
"! rm $PIPELINE_FILE_NAME"
12161221
]
12171222
}

notebooks/official/sdk/SDK_Custom_Training_Python_Package_Managed_Text_Dataset_Tensorflow_Serving_Container.ipynb

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -324,7 +324,8 @@
324324
},
325325
"outputs": [],
326326
"source": [
327-
"! gcloud storage buckets create --location $LOCATION --project $PROJECT_ID $BUCKET_URI" ]
327+
"! gcloud storage buckets create --location $LOCATION --project $PROJECT_ID $BUCKET_URI"
328+
]
328329
},
329330
{
330331
"cell_type": "markdown",
@@ -594,7 +595,8 @@
594595
},
595596
"outputs": [],
596597
"source": [
597-
"!gcloud storage ls gs://$BUCKET_NAME/$GCS_PREFIX/data" ]
598+
"!gcloud storage ls gs://$BUCKET_NAME/$GCS_PREFIX/data"
599+
]
598600
},
599601
{
600602
"cell_type": "markdown",
@@ -1357,7 +1359,8 @@
13571359
},
13581360
"outputs": [],
13591361
"source": [
1360-
"!gcloud storage ls $gcs_source_test_url" ]
1362+
"!gcloud storage ls $gcs_source_test_url"
1363+
]
13611364
},
13621365
{
13631366
"cell_type": "code",
@@ -1485,7 +1488,8 @@
14851488
"batch_predict_job.delete()\n",
14861489
"\n",
14871490
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
1488-
" ! gcloud storage rm --recursive $BUCKET_URI" ]
1491+
" ! gcloud storage rm --recursive $BUCKET_URI"
1492+
]
14891493
}
14901494
],
14911495
"metadata": {

0 commit comments

Comments
 (0)