Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -499,7 +499,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l $REGION $BUCKET_URI"
"! gcloud storage buckets create --location=$REGION $BUCKET_URI"
]
},
{
Expand All @@ -519,7 +519,7 @@
},
"outputs": [],
"source": [
"! gsutil ls -al $BUCKET_URI"
"! gcloud storage ls --all-versions --long $BUCKET_URI"
]
},
{
Expand Down Expand Up @@ -955,7 +955,7 @@
"delete_bucket = False\n",
"\n",
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
" ! gsutil rm -r $BUCKET_URI"
" ! gcloud storage rm --recursive $BUCKET_URI"
]
}
],
Expand Down
8 changes: 4 additions & 4 deletions notebooks/community/sdk/sdk_private_endpoint.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l $REGION $BUCKET_URI"
"! gcloud storage buckets create --location=$REGION $BUCKET_URI"
]
},
{
Expand All @@ -381,7 +381,7 @@
},
"outputs": [],
"source": [
"! gsutil ls -al $BUCKET_URI"
"! gcloud storage ls --all-versions --long $BUCKET_URI"
]
},
{
Expand Down Expand Up @@ -521,7 +521,7 @@
"IMPORT_FILE = \"petfinder-tabular-classification-tabnet-with-header.csv\"\n",
"TRAINING_DATA_PATH = f\"{BUCKET_URI}/data/petfinder/train.csv\"\n",
"\n",
"! gsutil cp gs://cloud-samples-data/ai-platform-unified/datasets/tabular/{IMPORT_FILE} {TRAINING_DATA_PATH}"
"! gcloud storage cp gs://cloud-samples-data/ai-platform-unified/datasets/tabular/{IMPORT_FILE} {TRAINING_DATA_PATH}"
]
},
{
Expand Down Expand Up @@ -993,7 +993,7 @@
" print(e)\n",
"\n",
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
" ! gsutil rm -r $BUCKET_URI"
" ! gcloud storage rm --recursive $BUCKET_URI"
]
}
],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l $LOCATION -p $PROJECT_ID $BUCKET_URI"
"! gcloud storage buckets create --location=$LOCATION --project=$PROJECT_ID $BUCKET_URI"
]
},
{
Expand Down Expand Up @@ -376,7 +376,7 @@
"DATASET_URI = \"gs://cloud-samples-data/vertex-ai/structured_data/forecasting/synthetic_sales_data.csv\"\n",
"\n",
"# Download the dataset\n",
"! gsutil cp {DATASET_URI} dataset.csv"
"! gcloud storage cp {DATASET_URI} dataset.csv"
]
},
{
Expand Down Expand Up @@ -427,7 +427,7 @@
"df_test = df[df[\"date\"] >= date_cutoff]\n",
"\n",
"# Upload to GCS bucket\n",
"! gsutil cp {DATASET_TRAIN_FILENAME} {DATASET_TRAIN_URI}"
"! gcloud storage cp {DATASET_TRAIN_FILENAME} {DATASET_TRAIN_URI}"
]
},
{
Expand Down Expand Up @@ -701,7 +701,7 @@
"DATASET_TEST_URI = f\"{BUCKET_URI}/{DATASET_TEST_FILENAME}\"\n",
"\n",
"# Upload to GCS bucket\n",
"! gsutil cp {DATASET_TEST_FILENAME} {DATASET_TEST_URI}"
"! gcloud storage cp {DATASET_TEST_FILENAME} {DATASET_TEST_URI}"
]
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l $LOCATION -p $PROJECT_ID $BUCKET_URI"
"! gcloud storage buckets create --location=$LOCATION --project=$PROJECT_ID $BUCKET_URI"
]
},
{
Expand Down Expand Up @@ -380,9 +380,9 @@
},
"outputs": [],
"source": [
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n",
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n",
"\n",
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI"
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer"
]
},
{
Expand Down Expand Up @@ -903,7 +903,7 @@
"outputs": [],
"source": [
"# Load the results\n",
"attributions = !gsutil cat $feat_attrs_gcs_uri\n",
"attributions = ! gcloud storage cat $feat_attrs_gcs_uri\n",
"\n",
"# Print the results obtained\n",
"attributions = json.loads(attributions[0])\n",
Expand Down Expand Up @@ -983,7 +983,7 @@
"# Delete Cloud Storage objects\n",
"delete_bucket = True\n",
"if delete_bucket:\n",
" ! gsutil -m rm -r $BUCKET_URI"
" ! gcloud storage rm --recursive $BUCKET_URI"
]
}
],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -372,7 +372,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}"
"! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}"
]
},
{
Expand Down Expand Up @@ -420,7 +420,7 @@
},
"outputs": [],
"source": [
"! gsutil cp gs://cloud-samples-data/vertex-ai/matching_engine/glove-100-angular.hdf5 ."
"! gcloud storage cp gs://cloud-samples-data/vertex-ai/matching_engine/glove-100-angular.hdf5 ."
]
},
{
Expand Down Expand Up @@ -520,7 +520,7 @@
"outputs": [],
"source": [
"EMBEDDINGS_INITIAL_URI = f\"{BUCKET_URI}/vector_search/initial/\"\n",
"! gsutil cp glove100.json {EMBEDDINGS_INITIAL_URI}"
"! gcloud storage cp glove100.json {EMBEDDINGS_INITIAL_URI}"
]
},
{
Expand Down Expand Up @@ -743,7 +743,7 @@
},
"outputs": [],
"source": [
"! gsutil cp glove100_incremental.json {EMBEDDINGS_UPDATE_URI}"
"! gcloud storage cp glove100_incremental.json {EMBEDDINGS_UPDATE_URI}"
]
},
{
Expand Down Expand Up @@ -1038,7 +1038,7 @@
"brute_force_index.delete()\n",
"\n",
"if delete_bucket:\n",
" ! gsutil rm -rf {BUCKET_URI}"
" ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}"
]
}
],
Expand Down