diff --git a/.cloud-build/execute_changed_notebooks_helper.py b/.cloud-build/execute_changed_notebooks_helper.py index 26289f89a..b38af683d 100755 --- a/.cloud-build/execute_changed_notebooks_helper.py +++ b/.cloud-build/execute_changed_notebooks_helper.py @@ -365,7 +365,7 @@ def process_and_execute_notebook( # Use gcloud to get tail try: result.error_message = subprocess.check_output( - ["gsutil", "cat", "-r", "-1000", log_file_uri], encoding="UTF-8" + ["gcloud", "storage", "cat", "--range", "-1000", log_file_uri], encoding="UTF-8" ) except Exception as error: result.error_message = str(error) diff --git a/notebooks/official/generative_ai/distillation.ipynb b/notebooks/official/generative_ai/distillation.ipynb index 81671af50..04db0c9ba 100644 --- a/notebooks/official/generative_ai/distillation.ipynb +++ b/notebooks/official/generative_ai/distillation.ipynb @@ -323,7 +323,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { @@ -347,8 +347,8 @@ }, "outputs": [], "source": [ - "! gsutil cp gs://cloud-samples-data/vertex-ai/model-evaluation/peft_eval_sample.jsonl {BUCKET_URI}/peft_eval_sample.jsonl\n", - "! gsutil cp gs://cloud-samples-data/vertex-ai/model-evaluation/peft_train_sample.jsonl {BUCKET_URI}/peft_train_sample.jsonl" + "! gcloud storage cp gs://cloud-samples-data/vertex-ai/model-evaluation/peft_eval_sample.jsonl {BUCKET_URI}/peft_eval_sample.jsonl\n", + "! gcloud storage cp gs://cloud-samples-data/vertex-ai/model-evaluation/peft_train_sample.jsonl {BUCKET_URI}/peft_train_sample.jsonl" ] }, { @@ -554,7 +554,7 @@ "# Delete the Cloud Storage bucket\n", "delete_bucket = True\n", "if delete_bucket:\n", - " ! gsutil rm -rf {BUCKET_URI}" + " ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ] } ], diff --git a/notebooks/official/ml_metadata/sdk-metric-parameter-tracking-for-custom-jobs.ipynb b/notebooks/official/ml_metadata/sdk-metric-parameter-tracking-for-custom-jobs.ipynb index 1c5cf42b2..c5aa6326e 100644 --- a/notebooks/official/ml_metadata/sdk-metric-parameter-tracking-for-custom-jobs.ipynb +++ b/notebooks/official/ml_metadata/sdk-metric-parameter-tracking-for-custom-jobs.ipynb @@ -274,7 +274,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { @@ -390,7 +390,7 @@ "outputs": [], "source": [ "!wget https://storage.googleapis.com/download.tensorflow.org/data/abalone_train.csv\n", - "!gsutil cp abalone_train.csv {BUCKET_URI}/data/\n", + "!gcloud storage cp abalone_train.csv {BUCKET_URI}/data/\n", "\n", "gcs_csv_path = f\"{BUCKET_URI}/data/abalone_train.csv\"" ] @@ -784,7 +784,7 @@ "\n", "\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ], diff --git a/notebooks/official/prediction/get_started_with_psc_private_endpoint.ipynb b/notebooks/official/prediction/get_started_with_psc_private_endpoint.ipynb index 6262b17db..ea44a3fee 100644 --- a/notebooks/official/prediction/get_started_with_psc_private_endpoint.ipynb +++ b/notebooks/official/prediction/get_started_with_psc_private_endpoint.ipynb @@ -201,7 +201,7 @@ "source": [ "# Create GCS Bucket\n", "BUCKET_URI = \"gs://your-bucket-name-unique\" # @param {type:\"string\"}\n", - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" + "! gcloud storage buckets create --location {LOCATION} --project {PROJECT_ID} {BUCKET_URI}" ] }, { @@ -237,7 +237,7 @@ "outputs": [], "source": [ "# Copy Models to the Bucket\n", - "! gsutil cp -r \"gs://cloud-samples-data/vertex-ai/prediction/test-models-requests/*\" {BUCKET_URI}" + "! gcloud storage cp --recursive \"gs://cloud-samples-data/vertex-ai/prediction/test-models-requests/*\" {BUCKET_URI}" ] }, { @@ -508,7 +508,7 @@ "outputs": [], "source": [ "# Download the requests files:\n", - "! gsutil cp {BUCKET_URI}/requests/* ./" + "! gcloud storage cp {BUCKET_URI}/requests/* ./" ] }, { @@ -716,7 +716,7 @@ }, "outputs": [], "source": [ - "! gsutil rm -r {BUCKET_URI}" + "! gcloud storage rm --recursive {BUCKET_URI}" ] }, { diff --git a/notebooks/official/prediction/llm_streaming_prediction.ipynb b/notebooks/official/prediction/llm_streaming_prediction.ipynb index 7216a0c21..5a58f1cf8 100644 --- a/notebooks/official/prediction/llm_streaming_prediction.ipynb +++ b/notebooks/official/prediction/llm_streaming_prediction.ipynb @@ -286,7 +286,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}" + "! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ] }, { @@ -500,7 +500,7 @@ "source": [ "dataset = \"gs://cloud-samples-data/vertex-ai/prediction/llm/test_table.jsonl\"\n", "destination_uri_prefix = f\"{BUCKET_URI}/text-bison@001_\"\n", - "! gsutil cp -r gs://cloud-samples-data/vertex-ai/prediction/llm/text-bison@001_/ {destination_uri_prefix}\n", + "! gcloud storage cp --recursive gs://cloud-samples-data/vertex-ai/prediction/llm/text-bison@001_/ {destination_uri_prefix}\n", "\n", "\n", "from vertexai.language_models import TextGenerationModel\n", @@ -580,7 +580,7 @@ "outputs": [], "source": [ "# Tuning model\n", - "! gsutil cp gs://cloud-samples-data/vertex-ai/prediction/llm/q_a_train_with_context.jsonl {BUCKET_URI}/q_a_train_with_context.jsonl\n", + "! gcloud storage cp gs://cloud-samples-data/vertex-ai/prediction/llm/q_a_train_with_context.jsonl {BUCKET_URI}/q_a_train_with_context.jsonl\n", "\n", "tuning_job = model3.tune_model(\n", " training_data=f\"{BUCKET_URI}/q_a_train_with_context.jsonl\",\n", @@ -740,7 +740,7 @@ "batch_job_2.delete()\n", "\n", "if delete_bucket:\n", - " ! gsutil -m rm -r $BUCKET_URI" + " ! gcloud storage rm --recursive $BUCKET_URI" ] } ],