Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -463,7 +463,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l $REGION $BUCKET_URI"
"! gcloud storage buckets create --location=$REGION $BUCKET_URI"
]
},
{
Expand All @@ -483,7 +483,7 @@
},
"outputs": [],
"source": [
"! gsutil ls -al $BUCKET_URI"
"! gcloud storage ls --all-versions --long $BUCKET_URI"
]
},
{
Expand Down Expand Up @@ -685,11 +685,11 @@
"source": [
"FILE = IMPORT_FILE\n",
"\n",
"count = ! gsutil cat $FILE | wc -l\n",
"count = ! gcloud storage cat $FILE | wc -l\n",
"print(\"Number of Examples\", int(count[0]))\n",
"\n",
"print(\"First 10 rows\")\n",
"! gsutil cat $FILE | head"
"! gcloud storage cat $FILE | head"
]
},
{
Expand Down Expand Up @@ -1468,7 +1468,7 @@
"delete_bucket = False\n",
"\n",
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
" ! gsutil rm -r $BUCKET_URI"
" ! gcloud storage rm --recursive $BUCKET_URI"
]
}
],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l $REGION $BUCKET_URI"
"! gcloud storage buckets create --location=$REGION $BUCKET_URI"
]
},
{
Expand All @@ -464,7 +464,7 @@
},
"outputs": [],
"source": [
"! gsutil ls -al $BUCKET_URI"
"! gcloud storage ls --all-versions --long $BUCKET_URI"
]
},
{
Expand Down Expand Up @@ -1027,7 +1027,7 @@
"MODEL_DIR=$1\n",
"\n",
"mkdir -p ./serve/model/\n",
"gsutil cp -r ${MODEL_DIR} ./serve/model/ \n",
"gcloud storage cp --recursive ${MODEL_DIR} ./serve/model/ \n",
"\n",
"cat > ./serve/Dockerfile <<EOF\n",
"FROM tiangolo/uvicorn-gunicorn-fastapi:python3.7\n",
Expand Down Expand Up @@ -1156,7 +1156,7 @@
},
"outputs": [],
"source": [
"! gsutil cp gs://cloud-ml-data/img/flower_photos/daisy/100080576_f52e8ee070_n.jpg test.jpg"
"! gcloud storage cp gs://cloud-ml-data/img/flower_photos/daisy/100080576_f52e8ee070_n.jpg test.jpg"
]
},
{
Expand Down Expand Up @@ -1450,7 +1450,7 @@
" print(e)\n",
"\n",
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
" ! gsutil rm -rf {BUCKET_URI}"
" ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}"
]
}
],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -536,7 +536,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l $REGION -p $PROJECT_ID $BUCKET_URI"
"! gcloud storage buckets create --location $REGION --project $PROJECT_ID $BUCKET_URI"
]
},
{
Expand All @@ -556,7 +556,7 @@
},
"outputs": [],
"source": [
"! gsutil ls -al $BUCKET_URI"
"! gcloud storage ls --all-versions --long $BUCKET_URI"
]
},
{
Expand Down Expand Up @@ -626,9 +626,9 @@
},
"outputs": [],
"source": [
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n",
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=\"serviceAccount:{SERVICE_ACCOUNT}\" --role=\"roles/storage.objectCreator\"\n",
"\n",
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI"
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer"
]
},
{
Expand Down Expand Up @@ -1331,7 +1331,7 @@
"outputs": [],
"source": [
"# Load the results\n",
"attributions = !gsutil cat $feat_attrs_gcs_uri\n",
"attributions = !gcloud storage cat $feat_attrs_gcs_uri\n",
"\n",
"# Print the results obtained\n",
"attributions = json.loads(attributions[0])\n",
Expand Down Expand Up @@ -1411,7 +1411,7 @@
"# Delete Cloud Storage objects\n",
"delete_bucket = False\n",
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
" ! gsutil -m rm -r $BUCKET_URI"
" ! gcloud storage rm --recursive $BUCKET_URI"
]
}
],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -533,7 +533,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l $REGION -p $PROJECT_ID $BUCKET_URI"
"! gcloud storage buckets create --location=$REGION --project=$PROJECT_ID $BUCKET_URI"
]
},
{
Expand All @@ -553,7 +553,7 @@
},
"outputs": [],
"source": [
"! gsutil ls -al $BUCKET_URI"
"! gcloud storage ls --all-versions --long $BUCKET_URI"
]
},
{
Expand Down Expand Up @@ -623,9 +623,9 @@
},
"outputs": [],
"source": [
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator {BUCKET_URI}\n",
"! gcloud storage buckets add-iam-policy-binding {BUCKET_URI} --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

For robustness, it's recommended to quote the values for --member and --role arguments to prevent potential shell parsing issues.

! gcloud storage buckets add-iam-policy-binding {BUCKET_URI} --member=\"serviceAccount:{SERVICE_ACCOUNT}\" --role=\"roles/storage.objectCreator\"\n

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

gcloud storage buckets add-iam-policy-binding {BUCKET_URI} --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n" is a correct command.

"\n",
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer {BUCKET_URI}"
"! gcloud storage buckets add-iam-policy-binding {BUCKET_URI} --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer"
]
},
{
Expand Down Expand Up @@ -760,7 +760,7 @@
"PUBLIC_DATA_URI = \"gs://cloud-samples-data/vertex-ai/pipeline-deployment/datasets/oracle_retail/orders.csv\"\n",
"RAW_DATA_URI = f\"{BUCKET_URI}/{DATA_PATH}/raw/orders.csv\"\n",
"\n",
"! gsutil -m cp -R $PUBLIC_DATA_URI $RAW_DATA_URI"
"! gcloud storage cp --recursive $PUBLIC_DATA_URI $RAW_DATA_URI"
]
},
{
Expand All @@ -780,7 +780,7 @@
},
"outputs": [],
"source": [
"! gsutil cat {RAW_DATA_URI} | head"
"! gcloud storage cat {RAW_DATA_URI} | head"
]
},
{
Expand Down Expand Up @@ -1025,9 +1025,11 @@
" # Create the HTML artifact for the metrics\n",
" pretty_columns = list(\n",
" map(\n",
" lambda h: get_column_names(h)\n",
" if h != columns[0]\n",
" else h.replace(\"_\", \" \").capitalize(),\n",
" lambda h: (\n",
" get_column_names(h)\n",
" if h != columns[0]\n",
" else h.replace(\"_\", \" \").capitalize()\n",
" ),\n",
" columns,\n",
" )\n",
" )\n",
Expand Down Expand Up @@ -1377,13 +1379,13 @@
" + \"/evaluation_metrics\"\n",
" )\n",
" if tf.io.gfile.exists(EXECUTE_OUTPUT):\n",
" ! gsutil cat $EXECUTE_OUTPUT\n",
" ! gcloud storage cat $EXECUTE_OUTPUT\n",
" return EXECUTE_OUTPUT\n",
" elif tf.io.gfile.exists(GCP_RESOURCES):\n",
" ! gsutil cat $GCP_RESOURCES\n",
" ! gcloud storage cat $GCP_RESOURCES\n",
" return GCP_RESOURCES\n",
" elif tf.io.gfile.exists(EVAL_METRICS):\n",
" ! gsutil cat $EVAL_METRICS\n",
" ! gcloud storage cat $EVAL_METRICS\n",
" return EVAL_METRICS\n",
"\n",
" return None\n",
Expand Down Expand Up @@ -1455,7 +1457,7 @@
"# delete bucket\n",
"delete_bucket = True\n",
"if os.getenv(\"IS_TESTING\") or delete_bucket:\n",
" ! gsutil -m rm -r $BUCKET_URI\n",
" ! gcloud storage rm --recursive $BUCKET_URI\n",
"\n",
"\n",
"# Remove local resorces\n",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -473,7 +473,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l $REGION $BUCKET_NAME"
"! gcloud storage buckets create --location=$REGION $BUCKET_NAME"
]
},
{
Expand All @@ -493,7 +493,7 @@
},
"outputs": [],
"source": [
"! gsutil ls -al $BUCKET_NAME"
"! gcloud storage ls --all-versions --long $BUCKET_NAME"
]
},
{
Expand Down Expand Up @@ -919,7 +919,7 @@
"! rm -f custom.tar custom.tar.gz\n",
"! tar cvf custom.tar custom\n",
"! gzip custom.tar\n",
"! gsutil cp custom.tar.gz $BUCKET_NAME/trainer_boston.tar.gz"
"! gcloud storage cp custom.tar.gz $BUCKET_NAME/trainer_boston.tar.gz"
]
},
{
Expand Down Expand Up @@ -1484,7 +1484,7 @@
" print(e)\n",
"\n",
" if \"BUCKET_NAME\" in globals():\n",
" ! gsutil rm -r $BUCKET_NAME"
" ! gcloud storage rm --recursive $BUCKET_NAME"
]
}
],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}"
"! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}"
]
},
{
Expand Down Expand Up @@ -655,9 +655,7 @@
")\n",
"TRAINING_URI = f\"{BUCKET_URI}/model-monitoring/churn/churn_training.csv\"\n",
"\n",
"! gsutil copy $PUBLIC_TRAINING_DATASET $TRAINING_URI\n",
"\n",
"TRAINING_DATASET = ml_monitoring.spec.MonitoringInput(\n",
"! gcloud storage cp $PUBLIC_TRAINING_DATASET $TRAINING_URITRAINING_DATASET = ml_monitoring.spec.MonitoringInput(\n",
" gcs_uri=TRAINING_URI, data_format=\"csv\"\n",
")"
]
Expand Down Expand Up @@ -1123,7 +1121,7 @@
"FEATURE_ATTRIBUTION_BASELINE_DATASET = (\n",
" f\"{BUCKET_URI}/model-monitoring/churn/churn_no_ground_truth.jsonl\"\n",
")\n",
"! gsutil cp gs://cloud-samples-data/vertex-ai/model-monitoring/churn/churn_no_ground_truth.jsonl $FEATURE_ATTRIBUTION_BASELINE_DATASET"
"! gcloud storage cp gs://cloud-samples-data/vertex-ai/model-monitoring/churn/churn_no_ground_truth.jsonl $FEATURE_ATTRIBUTION_BASELINE_DATASET"
]
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}"
"! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}"
]
},
{
Expand Down Expand Up @@ -412,9 +412,9 @@
},
"outputs": [],
"source": [
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n",
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n",
"\n",
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI"
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer"
]
},
{
Expand Down Expand Up @@ -488,7 +488,7 @@
},
"outputs": [],
"source": [
"!gsutil -m cp -R {DATA_PATH}/raw $BUCKET_URI/{DATA_PATH}/raw"
"!gcloud storage cp --recursive {DATA_PATH}/raw $BUCKET_URI/{DATA_PATH}/raw"
]
},
{
Expand Down Expand Up @@ -929,10 +929,10 @@
},
"outputs": [],
"source": [
"# !gsutil cp -R {SRC}/preprocess_pipeline.py {BUCKET_URI}/preprocess_pipeline.py\n",
"!gsutil cp -R {SRC} {BUCKET_URI}/{SRC}\n",
"!gsutil cp requirements.txt {BUCKET_URI}/requirements.txt\n",
"!gsutil cp setup.py {BUCKET_URI}/setup.py"
"# !gcloud storage cp --recursive {SRC}/preprocess_pipeline.py {BUCKET_URI}/preprocess_pipeline.py\n",
"!gcloud storage cp --recursive {SRC} {BUCKET_URI}/{SRC}\n",
"!gcloud storage cp requirements.txt {BUCKET_URI}/requirements.txt\n",
"!gcloud storage cp setup.py {BUCKET_URI}/setup.py"
]
},
{
Expand Down Expand Up @@ -1478,7 +1478,7 @@
"# Delete the Cloud Storage bucket\n",
"delete_bucket = False # Set True for deletion\n",
"if delete_bucket:\n",
" ! gsutil -m rm -r $BUCKET_URI\n",
" ! gcloud storage rm --recursive $BUCKET_URI\n",
"\n",
"# delete dataset\n",
"delete_dataset = False # Set True for deletion\n",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}"
"! gcloud storage buckets create --location {LOCATION} --project {PROJECT_ID} {BUCKET_URI}"
]
},
{
Expand Down Expand Up @@ -481,7 +481,7 @@
" json.dump(mean_and_std, outfile)\n",
"\n",
"# Save to the staging bucket\n",
"! gsutil cp {MEAN_AND_STD_JSON_FILE} {BUCKET_URI}"
"! gcloud storage cp {MEAN_AND_STD_JSON_FILE} {BUCKET_URI}"
]
},
{
Expand Down Expand Up @@ -1384,7 +1384,7 @@
"model.delete()\n",
"\n",
"if delete_bucket:\n",
" ! gsutil rm -r $BUCKET_URI\n",
" ! gcloud storage rm --recursive $BUCKET_URI\n",
"\n",
"# Delete the created BigQuery dataset\n",
"! bq rm -r -f $PROJECT_ID:$DATASET_NAME"
Expand Down
Loading