Skip to content

Commit e7404ae

Browse files
Merge pull request #56 from bhandarivijay-png/ai-gsutil-migration-ae277dae317e4c0f8a6aa44815ef7ded
Changes for 4316
2 parents a5a0dee + ca9a3a2 commit e7404ae

10 files changed

+97
-65
lines changed

notebooks/community/migration/UJ11 HyperParameter Tuning Training Job with TensorFlow.ipynb

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -325,7 +325,8 @@
325325
},
326326
"outputs": [],
327327
"source": [
328-
"! gcloud storage buckets create --location=$REGION gs://$BUCKET_NAME" ]
328+
"! gcloud storage buckets create --location=$REGION gs://$BUCKET_NAME"
329+
]
329330
},
330331
{
331332
"cell_type": "markdown",
@@ -344,7 +345,8 @@
344345
},
345346
"outputs": [],
346347
"source": [
347-
"! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ]
348+
"! gcloud storage ls --all-versions --long gs://$BUCKET_NAME"
349+
]
348350
},
349351
{
350352
"cell_type": "markdown",
@@ -381,10 +383,7 @@
381383
"import sys\n",
382384
"import time\n",
383385
"\n",
384-
"from google.cloud.aiplatform import gapic as aip\n",
385-
"from google.protobuf import json_format\n",
386-
"from google.protobuf.json_format import MessageToJson, ParseDict\n",
387-
"from google.protobuf.struct_pb2 import Struct, Value"
386+
"from google.cloud.aiplatform import gapic as aip"
388387
]
389388
},
390389
{
@@ -669,7 +668,8 @@
669668
"! rm -f custom.tar custom.tar.gz\n",
670669
"! tar cvf custom.tar custom\n",
671670
"! gzip custom.tar\n",
672-
"! gcloud storage cp custom.tar.gz gs://$BUCKET_NAME/hpt_boston_housing.tar.gz" ]
671+
"! gcloud storage cp custom.tar.gz gs://$BUCKET_NAME/hpt_boston_housing.tar.gz"
672+
]
673673
},
674674
{
675675
"cell_type": "markdown",
@@ -1351,12 +1351,13 @@
13511351
" print(e)\n",
13521352
"\n",
13531353
"if delete_bucket and \"BUCKET_NAME\" in globals():\n",
1354-
" ! gcloud storage rm --recursive gs://$BUCKET_NAME" ]
1354+
" ! gcloud storage rm --recursive gs://$BUCKET_NAME"
1355+
]
13551356
}
13561357
],
13571358
"metadata": {
13581359
"colab": {
1359-
"name": "UJ11 unified HyperParameter Tuning Training Job with TensorFlow.ipynb",
1360+
"name": "UJ11 HyperParameter Tuning Training Job with TensorFlow.ipynb",
13601361
"toc_visible": true
13611362
},
13621363
"kernelspec": {

notebooks/community/ml_ops/stage6/get_started_with_custom_text_model_batch.ipynb

Lines changed: 15 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -450,7 +450,8 @@
450450
},
451451
"outputs": [],
452452
"source": [
453-
"! gcloud storage buckets create --location=$REGION $BUCKET_URI" ]
453+
"! gcloud storage buckets create --location=$REGION $BUCKET_URI"
454+
]
454455
},
455456
{
456457
"cell_type": "markdown",
@@ -469,7 +470,8 @@
469470
},
470471
"outputs": [],
471472
"source": [
472-
"! gcloud storage ls --all-versions --long $BUCKET_URI" ]
473+
"! gcloud storage ls --all-versions --long $BUCKET_URI"
474+
]
473475
},
474476
{
475477
"cell_type": "markdown",
@@ -697,7 +699,8 @@
697699
"MODEL_DIR = f\"{BUCKET_URI}\"\n",
698700
"tfhub_model.save(\"model\")\n",
699701
"\n",
700-
"! gcloud storage cp --recursive model {MODEL_DIR}\n", "! rm -rf model\n",
702+
"! gcloud storage cp --recursive model {MODEL_DIR}\n",
703+
"! rm -rf model\n",
701704
"\n",
702705
"MODEL_DIR = MODEL_DIR + \"/model\""
703706
]
@@ -859,7 +862,8 @@
859862
" data = \"one two\"\n",
860863
" f.write('\"' + data + '\"\\n')\n",
861864
"\n",
862-
"! gcloud storage cp test.jsonl {gcs_input_uri}" ]
865+
"! gcloud storage cp test.jsonl {gcs_input_uri}"
866+
]
863867
},
864868
{
865869
"cell_type": "markdown",
@@ -1028,7 +1032,9 @@
10281032
"with open(\"test2.txt\", \"w\") as f:\n",
10291033
" f.write('\"search Google or type an Url\"\\n')\n",
10301034
"\n",
1031-
"! gcloud storage cp test1.txt {BUCKET_URI}/test1.txt\n", "! gcloud storage cp test2.txt {BUCKET_URI}/test2.txt\n", "\n",
1035+
"! gcloud storage cp test1.txt {BUCKET_URI}/test1.txt\n",
1036+
"! gcloud storage cp test2.txt {BUCKET_URI}/test2.txt\n",
1037+
"\n",
10321038
"! rm test1.txt test2.txt\n",
10331039
"\n",
10341040
"with open(\"test.txt\", \"w\") as f:\n",
@@ -1037,7 +1043,8 @@
10371043
"\n",
10381044
"gcs_input_uri = f\"{BUCKET_URI}/test.txt\"\n",
10391045
"\n",
1040-
"! gcloud storage cp test.txt $gcs_input_uri" ]
1046+
"! gcloud storage cp test.txt $gcs_input_uri"
1047+
]
10411048
},
10421049
{
10431050
"cell_type": "markdown",
@@ -1181,7 +1188,8 @@
11811188
" batch_prediction_job.delete()\n",
11821189
"\n",
11831190
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
1184-
" ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ]
1191+
" ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}"
1192+
]
11851193
}
11861194
],
11871195
"metadata": {

notebooks/community/ml_ops/stage6/get_started_with_optimized_tfe_bert.ipynb

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -458,7 +458,8 @@
458458
},
459459
"outputs": [],
460460
"source": [
461-
"! gcloud storage buckets create --location=$REGION $BUCKET_URI" ]
461+
"! gcloud storage buckets create --location=$REGION $BUCKET_URI"
462+
]
462463
},
463464
{
464465
"cell_type": "markdown",
@@ -477,7 +478,8 @@
477478
},
478479
"outputs": [],
479480
"source": [
480-
"! gcloud storage ls --all-versions --long $BUCKET_URI" ]
481+
"! gcloud storage ls --all-versions --long $BUCKET_URI"
482+
]
481483
},
482484
{
483485
"cell_type": "markdown",
@@ -502,7 +504,6 @@
502504
"import google.cloud.aiplatform as aip\n",
503505
"import tensorflow as tf\n",
504506
"import tensorflow_hub as hub\n",
505-
"import tensorflow_text\n",
506507
"from official.nlp import optimization # to create AdamW optimizer"
507508
]
508509
},
@@ -1218,7 +1219,8 @@
12181219
" print(e)\n",
12191220
"\n",
12201221
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
1221-
" ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ]
1222+
" ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}"
1223+
]
12221224
}
12231225
],
12241226
"metadata": {

notebooks/community/model_garden/model_garden_tfvision_image_classification.ipynb

Lines changed: 21 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -169,10 +169,11 @@
169169
"if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n",
170170
" BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n",
171171
" BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n",
172-
" ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n", "else:\n",
172+
" ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n",
173+
"else:\n",
173174
" assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n",
174-
" # Note: The format of the full listing output is different. gcloud storage uses a title case for keys and will not display a field if its value is \"None\".\n",
175-
" shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n",
175+
" shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n",
176+
" bucket_region = shell_output[0].strip().lower()\n",
176177
" if bucket_region != REGION:\n",
177178
" raise ValueError(\n",
178179
" \"Bucket region %s is different from notebook region %s\"\n",
@@ -196,8 +197,8 @@
196197
"\n",
197198
"\n",
198199
"# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n",
199-
"# Note: Migrating scripts using gsutil iam ch is more complex than get or set. You need to replace the single iam ch command with a series of gcloud storage bucket add-iam-policy-binding and/or gcloud storage bucket remove-iam-policy-binding commands, or replicate the read-modify-write loop.\n",
200-
"! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n",
200+
"! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n",
201+
"\n",
201202
"! gcloud config set project $PROJECT_ID\n",
202203
"! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n",
203204
"! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"\n",
@@ -219,7 +220,8 @@
219220
" destination = os.path.join(CONFIG_DIR, filename)\n",
220221
" print(\"Copy\", url, \"to\", destination)\n",
221222
" ! wget \"$url\" -O \"$filename\"\n",
222-
" ! gcloud storage cp \"$filename\" \"$destination\"\n", "\n",
223+
" ! gcloud storage cp \"$filename\" \"$destination\"\n",
224+
"\n",
223225
"\n",
224226
"upload_config_to_gcs(\n",
225227
" \"https://raw.githubusercontent.com/tensorflow/models/master/official/vision/configs/experiments/image_classification/imagenet_resnet50_gpu.yaml\"\n",
@@ -455,7 +457,8 @@
455457
" checkpoint_path = os.path.relpath(checkpoint_path, checkpoint_name)\n",
456458
" break\n",
457459
"\n",
458-
" ! gcloud storage cp --recursive $checkpoint_name $CHECKPOINT_BUCKET/\n", " checkpoint_uri = os.path.join(CHECKPOINT_BUCKET, checkpoint_name, checkpoint_path)\n",
460+
" ! gcloud storage cp --recursive $checkpoint_name $CHECKPOINT_BUCKET/\n",
461+
" checkpoint_uri = os.path.join(CHECKPOINT_BUCKET, checkpoint_name, checkpoint_path)\n",
459462
" print(\"Checkpoint uploaded to\", checkpoint_uri)\n",
460463
" return checkpoint_uri\n",
461464
"\n",
@@ -585,7 +588,8 @@
585588
" current_trial_best_ckpt_evaluation_filepath = os.path.join(\n",
586589
" current_trial_best_ckpt_dir, \"info.json\"\n",
587590
" )\n",
588-
" ! gcloud storage cp $current_trial_best_ckpt_evaluation_filepath .\n", " with open(\"info.json\", \"r\") as f:\n",
591+
" ! gcloud storage cp $current_trial_best_ckpt_evaluation_filepath .\n",
592+
" with open(\"info.json\") as f:\n",
589593
" eval_metric_results = json.load(f)\n",
590594
" current_performance = eval_metric_results[evaluation_metric]\n",
591595
" if current_performance > best_performance:\n",
@@ -692,9 +696,7 @@
692696
" serving_container_image_uri=PREDICTION_CONTAINER_URI,\n",
693697
" serving_container_args=SERVING_CONTAINER_ARGS,\n",
694698
" serving_container_environment_variables=serving_env,\n",
695-
" model_garden_source_model_name=(\n",
696-
" f\"publishers/google/models/{publisher_model_id}\"\n",
697-
" ),\n",
699+
" model_garden_source_model_name=(f\"publishers/google/models/{publisher_model_id}\"),\n",
698700
")\n",
699701
"\n",
700702
"models[\"model_icn\"].wait()\n",
@@ -722,9 +724,7 @@
722724
" accelerator_count=1,\n",
723725
" min_replica_count=1,\n",
724726
" max_replica_count=1,\n",
725-
" system_labels={\n",
726-
" \"NOTEBOOK_NAME\": \"model_garden_tfvision_image_classification.ipynb\"\n",
727-
" },\n",
727+
" system_labels={\"NOTEBOOK_NAME\": \"model_garden_tfvision_image_classification.ipynb\"},\n",
728728
")\n",
729729
"\n",
730730
"endpoint_id = endpoints[\"endpoint_icn\"].name\n",
@@ -772,7 +772,8 @@
772772
" \"\"\"\n",
773773
" label_map_filename = os.path.basename(label_map_yaml_filepath)\n",
774774
" subprocess.check_output(\n",
775-
" [\"gcloud\", \"storage\", \"cp\", label_map_yaml_filepath, label_map_filename],\n", " stderr=subprocess.STDOUT,\n",
775+
" [\"gcloud\", \"storage\", \"cp\", label_map_yaml_filepath, label_map_filename],\n",
776+
" stderr=subprocess.STDOUT,\n",
776777
" )\n",
777778
" with open(label_map_filename, \"rb\") as input_file:\n",
778779
" label_map = yaml.safe_load(input_file.read())[\"label_map\"]\n",
@@ -792,7 +793,9 @@
792793
" if new_width <= 0:\n",
793794
" test_file = os.path.basename(test_filepath)\n",
794795
" subprocess.check_output(\n",
795-
" [\"gcloud\", \"storage\", \"cp\", test_filepath, test_file], stderr=subprocess.STDOUT\n", " )\n",
796+
" [\"gcloud\", \"storage\", \"cp\", test_filepath, test_file],\n",
797+
" stderr=subprocess.STDOUT,\n",
798+
" )\n",
796799
" with open(test_file, \"rb\") as input_file:\n",
797800
" encoded_string = base64.b64encode(input_file.read()).decode(\"utf-8\")\n",
798801
" else:\n",
@@ -902,7 +905,8 @@
902905
"\n",
903906
"delete_bucket = False # @param {type:\"boolean\"}\n",
904907
"if delete_bucket:\n",
905-
" ! gcloud storage rm --recursive $BUCKET_NAME" ]
908+
" ! gcloud storage rm --recursive $BUCKET_NAME"
909+
]
906910
}
907911
],
908912
"metadata": {

notebooks/community/model_garden/model_garden_timesfm_2_0_deployment_on_vertex.ipynb

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -162,9 +162,11 @@
162162
"if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n",
163163
" BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n",
164164
" BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n",
165-
" ! gcloud storage buckets create --location {REGION} {BUCKET_URI}\n", "else:\n",
165+
" ! gcloud storage buckets create --location {REGION} {BUCKET_URI}\n",
166+
"else:\n",
166167
" assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n",
167-
" shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", " bucket_region = shell_output[0].strip().lower()\n",
168+
" shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n",
169+
" bucket_region = shell_output[0].strip().lower()\n",
168170
" if bucket_region != REGION:\n",
169171
" raise ValueError(\n",
170172
" \"Bucket region %s is different from notebook region %s\"\n",
@@ -188,7 +190,8 @@
188190
"\n",
189191
"\n",
190192
"# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n",
191-
"! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n", "\n",
193+
"! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n",
194+
"\n",
192195
"! gcloud config set project $PROJECT_ID\n",
193196
"! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n",
194197
"! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/aiplatform.user\"\n",
@@ -908,7 +911,8 @@
908911
"\n",
909912
"delete_bucket = False # @param {type:\"boolean\"}\n",
910913
"if delete_bucket:\n",
911-
" ! gcloud storage rm --recursive $BUCKET_NAME" ]
914+
" ! gcloud storage rm --recursive $BUCKET_NAME"
915+
]
912916
}
913917
],
914918
"metadata": {

notebooks/community/reduction_server/distributed-training-reduction-server.ipynb

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -278,7 +278,7 @@
278278
"\n",
279279
"# Get your Google Cloud project ID from gcloud\n",
280280
"if not os.getenv(\"IS_TESTING\"):\n",
281-
" shell_output=!gcloud config list --format 'value(core.project)' 2>/dev/null\n",
281+
" shell_output = !gcloud config list --format 'value(core.project)' 2>/dev/null\n",
282282
" PROJECT_ID = shell_output[0]\n",
283283
" print(\"Project ID: \", PROJECT_ID)"
284284
]
@@ -431,7 +431,8 @@
431431
},
432432
"outputs": [],
433433
"source": [
434-
"! gcloud storage buckets create --location $REGION $BUCKET_NAME" ]
434+
"! gcloud storage buckets create --location $REGION $BUCKET_NAME"
435+
]
435436
},
436437
{
437438
"cell_type": "markdown",
@@ -450,7 +451,8 @@
450451
},
451452
"outputs": [],
452453
"source": [
453-
"! gcloud storage ls --all-versions --long $BUCKET_NAME" ]
454+
"! gcloud storage ls --all-versions --long $BUCKET_NAME"
455+
]
454456
},
455457
{
456458
"cell_type": "markdown",
@@ -536,7 +538,8 @@
536538
"source": [
537539
"# List the files\n",
538540
"\n",
539-
"! gcloud storage ls {DATASET_LOCATION}" ]
541+
"! gcloud storage ls {DATASET_LOCATION}"
542+
]
540543
},
541544
{
542545
"cell_type": "code",
@@ -548,7 +551,8 @@
548551
"source": [
549552
"# Examine dataset metadata\n",
550553
"\n",
551-
"! gcloud storage cat {METADATA_FILE}" ]
554+
"! gcloud storage cat {METADATA_FILE}"
555+
]
552556
},
553557
{
554558
"cell_type": "markdown",
@@ -1126,7 +1130,8 @@
11261130
"outputs": [],
11271131
"source": [
11281132
"# Delete Cloud Storage objects that were created\n",
1129-
"! gcloud storage rm --recursive {BUCKET_NAME}\n", "\n",
1133+
"! gcloud storage rm --recursive {BUCKET_NAME}\n",
1134+
"\n",
11301135
"# Delete the training job\n",
11311136
"delete_training_job = True\n",
11321137
"job.delete()"

0 commit comments

Comments
 (0)