Skip to content

Commit a5637f8

Browse files
Rayan Dasoriyacopybara-github
authored andcommitted
Add notebook for T5Gemma 2 local inference
PiperOrigin-RevId: 846315479
1 parent 0103299 commit a5637f8

7 files changed

+212
-30
lines changed

notebooks/community/model_garden/model_garden_openai_api_llama4.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -271,7 +271,7 @@
271271
},
272272
"outputs": [],
273273
"source": [
274-
"! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}"
274+
"! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}"
275275
]
276276
},
277277
{
@@ -1043,7 +1043,7 @@
10431043
"delete_bucket = False # @param {type:\"boolean\"}\n",
10441044
"\n",
10451045
"if delete_bucket:\n",
1046-
" ! gcloud storage rm --recursive $BUCKET_URI"
1046+
" ! gsutil -m rm -r $BUCKET_NAME"
10471047
]
10481048
}
10491049
],

notebooks/community/model_garden/model_garden_pytorch_fill_mask.ipynb

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -125,9 +125,9 @@
125125
"if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n",
126126
" BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n",
127127
" BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n",
128-
" ! gcloud storage buckets create --location {REGION} {BUCKET_URI}\n",
128+
" ! gsutil mb -l {REGION} {BUCKET_URI}\n",
129129
"else:\n",
130-
" shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n",
130+
" shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n",
131131
" bucket_region = shell_output[0].strip().lower()\n",
132132
" if bucket_region != REGION:\n",
133133
" raise ValueError(\n",
@@ -145,7 +145,7 @@
145145
"print(\"Using this default Service Account:\", SERVICE_ACCOUNT)\n",
146146
"\n",
147147
"# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n",
148-
"! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=\"serviceAccount:{SERVICE_ACCOUNT}\" --role=\"roles/storage.admin\"\n",
148+
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n",
149149
"\n",
150150
"# The pre-built serving docker image. It contains serving scripts and models.\n",
151151
"SERVE_DOCKER_URI = \"us-docker.pkg.dev/deeplearning-platform-release/gcr.io/huggingface-pytorch-inference-cu121.2-2.transformers.4-41.ubuntu2204.py311\"\n",
@@ -185,7 +185,9 @@
185185
" accelerator_count=1,\n",
186186
" deploy_request_timeout=1800,\n",
187187
" service_account=SERVICE_ACCOUNT,\n",
188-
" system_labels={\"NOTEBOOK_NAME\": \"model_garden_pytorch_fill_mask.ipynb\"},\n",
188+
" system_labels={\n",
189+
" \"NOTEBOOK_NAME\": \"model_garden_pytorch_fill_mask.ipynb\"\n",
190+
" },\n",
189191
" )\n",
190192
" return model, endpoint"
191193
]
@@ -281,7 +283,7 @@
281283
"\n",
282284
"delete_bucket = False # @param {type:\"boolean\"}\n",
283285
"if delete_bucket:\n",
284-
" ! gcloud storage rm --recursive $BUCKET_NAME"
286+
" ! gsutil -m rm -r $BUCKET_NAME"
285287
]
286288
}
287289
],

notebooks/community/model_garden/model_garden_pytorch_stable_diffusion_xl_turbo.ipynb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -148,10 +148,10 @@
148148
"if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n",
149149
" BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n",
150150
" BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n",
151-
" ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n",
151+
" ! gsutil mb -l {REGION} {BUCKET_URI}\n",
152152
"else:\n",
153153
" assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n",
154-
" shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n",
154+
" shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n",
155155
" bucket_region = shell_output[0].strip().lower()\n",
156156
" if bucket_region != REGION:\n",
157157
" raise ValueError(\n",
@@ -170,7 +170,7 @@
170170
"print(\"Using this default Service Account:\", SERVICE_ACCOUNT)\n",
171171
"\n",
172172
"# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n",
173-
"! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n",
173+
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n",
174174
"\n",
175175
"aiplatform.init(project=PROJECT_ID, location=REGION, staging_bucket=BUCKET_URI)\n",
176176
"\n",
@@ -338,7 +338,7 @@
338338
"# Delete bucket.\n",
339339
"delete_bucket = False # @param {type:\"boolean\"}\n",
340340
"if delete_bucket:\n",
341-
" ! gcloud storage rm --recursive $BUCKET_NAME"
341+
" ! gsutil -m rm -r $BUCKET_NAME"
342342
]
343343
}
344344
],

notebooks/community/model_garden/model_garden_pytorch_text_to_video_zero_shot.ipynb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -124,10 +124,10 @@
124124
"if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n",
125125
" BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n",
126126
" BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n",
127-
" ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n",
127+
" ! gsutil mb -l {REGION} {BUCKET_URI}\n",
128128
"else:\n",
129129
" assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n",
130-
" shell_output = ! gcloud storage buckets describe {BUCKET_NAME} --format=\"value(location)\"\n",
130+
" shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n",
131131
" bucket_region = shell_output[0].strip().lower()\n",
132132
" if bucket_region != REGION:\n",
133133
" raise ValueError(\n",
@@ -146,7 +146,7 @@
146146
"print(\"Using this default Service Account:\", SERVICE_ACCOUNT)\n",
147147
"\n",
148148
"# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n",
149-
"! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n",
149+
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n",
150150
"\n",
151151
"aiplatform.init(project=PROJECT_ID, location=REGION, staging_bucket=BUCKET_URI)\n",
152152
"\n",
@@ -271,7 +271,7 @@
271271
"# Delete bucket.\n",
272272
"delete_bucket = False # @param {type:\"boolean\"}\n",
273273
"if delete_bucket:\n",
274-
" ! gcloud storage rm --recursive $BUCKET_NAME"
274+
" ! gsutil -m rm -r $BUCKET_NAME"
275275
]
276276
}
277277
],

notebooks/community/model_garden/model_garden_pytorch_vilt_vqa.ipynb

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -135,10 +135,10 @@
135135
"if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n",
136136
" BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n",
137137
" BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n",
138-
" ! gcloud storage buckets create --location={REGION} {BUCKET_URI}\n",
138+
" ! gsutil mb -l {REGION} {BUCKET_URI}\n",
139139
"else:\n",
140140
" assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n",
141-
" shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n",
141+
" shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n",
142142
" bucket_region = shell_output[0].strip().lower()\n",
143143
" if bucket_region != REGION:\n",
144144
" raise ValueError(\n",
@@ -163,7 +163,7 @@
163163
"\n",
164164
"\n",
165165
"# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n",
166-
"! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n",
166+
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n",
167167
"\n",
168168
"! gcloud config set project $PROJECT_ID\n",
169169
"! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n",
@@ -271,9 +271,7 @@
271271
"# print(\"Using this existing endpoint from a different session: {aip_endpoint_name}\")\n",
272272
"\n",
273273
"# @markdown ![](http://images.cocodataset.org/val2017/000000039769.jpg?w=1260&h=750)\n",
274-
"image = (\n",
275-
" \"http://images.cocodataset.org/val2017/000000039769.jpg\" # @param {type: \"string\"}\n",
276-
")\n",
274+
"image = \"http://images.cocodataset.org/val2017/000000039769.jpg\" # @param {type: \"string\"}\n",
277275
"question = \"Which cat is bigger?\" # @param {type: \"string\"}\n",
278276
"\n",
279277
"instances = [\n",
@@ -312,7 +310,7 @@
312310
"\n",
313311
"delete_bucket = False # @param {type:\"boolean\"}\n",
314312
"if delete_bucket:\n",
315-
" ! gcloud storage rm --recursive $BUCKET_NAME"
313+
" ! gsutil -m rm -r $BUCKET_NAME"
316314
]
317315
}
318316
],

notebooks/community/model_garden/model_garden_pytorch_whisper_large_v3_deployment.ipynb

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -143,10 +143,10 @@
143143
"if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n",
144144
" BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n",
145145
" BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n",
146-
" ! gcloud storage buckets create {BUCKET_URI} --location={REGION}\n",
146+
" ! gsutil mb -l {REGION} {BUCKET_URI}\n",
147147
"else:\n",
148148
" assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n",
149-
" shell_output = ! gcloud storage ls --full --buckets {BUCKET_NAME} | grep \"Location Constraint:\" | sed \"s/Location Constraint://\"\n",
149+
" shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n",
150150
" bucket_region = shell_output[0].strip().lower()\n",
151151
" if bucket_region != REGION:\n",
152152
" raise ValueError(\n",
@@ -171,7 +171,7 @@
171171
"\n",
172172
"\n",
173173
"# Provision permissions to the SERVICE_ACCOUNT with the GCS bucket\n",
174-
"! gcloud storage buckets add-iam-policy-binding $BUCKET_NAME --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.admin\n",
174+
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.admin $BUCKET_NAME\n",
175175
"\n",
176176
"! gcloud config set project $PROJECT_ID\n",
177177
"! gcloud projects add-iam-policy-binding --no-user-output-enabled {PROJECT_ID} --member=serviceAccount:{SERVICE_ACCOUNT} --role=\"roles/storage.admin\"\n",
@@ -191,9 +191,7 @@
191191
"\n",
192192
"# @markdown This section deploys the prebuilt whisper large v3 and whisper large v3 turbo model on a Vertex endpoint. It takes 15 minutes to deploy this model.\n",
193193
"\n",
194-
"model_name = (\n",
195-
" \"whisper-large-v3-turbo\" # @param [\"whisper-large-v3\", \"whisper-large-v3-turbo\"]\n",
196-
")\n",
194+
"model_name = \"whisper-large-v3-turbo\" # @param [\"whisper-large-v3\", \"whisper-large-v3-turbo\"]\n",
197195
"model_id = \"openai/{name}\".format(name=model_name)\n",
198196
"task = \"audio2text\"\n",
199197
"\n",
@@ -305,7 +303,7 @@
305303
" print(\n",
306304
" f\"Provisioning roles/storage.objectViewer permission to the {SERVICE_ACCOUNT} with the {bucket}.\"\n",
307305
" )\n",
308-
" ! gcloud storage buckets add-iam-policy-binding $bucket --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer\n",
306+
" ! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $bucket\n",
309307
"\n",
310308
"instances = [{\"audio\": uri} for uri in gcs_uris]\n",
311309
"\n",
@@ -343,7 +341,7 @@
343341
"\n",
344342
"delete_bucket = False # @param {type:\"boolean\"}\n",
345343
"if delete_bucket:\n",
346-
" ! gcloud storage rm --recursive $BUCKET_NAME"
344+
" ! gsutil -m rm -r $BUCKET_NAME"
347345
]
348346
}
349347
],
Lines changed: 184 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,184 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "code",
5+
"execution_count": null,
6+
"metadata": {
7+
"cellView": "form",
8+
"id": "YsFQaflOxP_g"
9+
},
10+
"outputs": [],
11+
"source": [
12+
"# Copyright 2025 Google LLC\n",
13+
"#\n",
14+
"# Licensed under the Apache License, Version 2.0 (the \"License\");\n",
15+
"# you may not use this file except in compliance with the License.\n",
16+
"# You may obtain a copy of the License at\n",
17+
"#\n",
18+
"# https://www.apache.org/licenses/LICENSE-2.0\n",
19+
"#\n",
20+
"# Unless required by applicable law or agreed to in writing, software\n",
21+
"# distributed under the License is distributed on an \"AS IS\" BASIS,\n",
22+
"# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n",
23+
"# See the License for the specific language governing permissions and\n",
24+
"# limitations under the License."
25+
]
26+
},
27+
{
28+
"cell_type": "markdown",
29+
"metadata": {
30+
"id": "Y-uUs1OfxcjA"
31+
},
32+
"source": [
33+
"# Vertex AI Model Garden - T5Gemma 2(Local Inference)\n",
34+
"\n",
35+
"<table><tbody><tr>\n",
36+
" <td style=\"text-align: center\">\n",
37+
" <a href=\"https://console.cloud.google.com/vertex-ai/notebooks/deploy-notebook?download_url=https://raw.githubusercontent.com/GoogleCloudPlatform/vertex-ai-samples/main/notebooks/community/model_garden/model_garden_t5gemma_2_local_inference.ipynb\">\n",
38+
" <img alt=\"Workbench logo\" src=\"https://lh3.googleusercontent.com/UiNooY4LUgW_oTvpsNhPpQzsstV5W8F7rYgxgGBD85cWJoLmrOzhVs_ksK_vgx40SHs7jCqkTkCk=e14-rj-sc0xffffff-h130-w32\" width=\"32px\"><br> Run in Workbench\n",
39+
" </a>\n",
40+
" </td>\n",
41+
" <td style=\"text-align: center\">\n",
42+
" <a href=\"https://console.cloud.google.com/vertex-ai/colab/import/https:%2F%2Fraw.githubusercontent.com%2FGoogleCloudPlatform%2Fvertex-ai-samples%2Fmain%2Fnotebooks%2Fcommunity%2Fmodel_garden%2Fmodel_garden_t5gemma_2_local_inference.ipynb\">\n",
43+
" <img alt=\"Google Cloud Colab Enterprise logo\" src=\"https://lh3.googleusercontent.com/JmcxdQi-qOpctIvWKgPtrzZdJJK-J3sWE1RsfjZNwshCFgE_9fULcNpuXYTilIR2hjwN\" width=\"32px\"><br> Run in Colab Enterprise\n",
44+
" </a>\n",
45+
" </td>\n",
46+
" <td style=\"text-align: center\">\n",
47+
" <a href=\"https://github.com/GoogleCloudPlatform/vertex-ai-samples/blob/main/notebooks/community/model_garden/model_garden_t5gemma_2_local_inference.ipynb\">\n",
48+
" <img alt=\"GitHub logo\" src=\"https://github.githubassets.com/assets/GitHub-Mark-ea2971cee799.png\" width=\"32px\"><br> View on GitHub\n",
49+
" </a>\n",
50+
" </td>\n",
51+
"</tr></tbody></table>"
52+
]
53+
},
54+
{
55+
"cell_type": "markdown",
56+
"metadata": {
57+
"id": "-c_LNERL0MNv"
58+
},
59+
"source": [
60+
"## Overview\n",
61+
"\n",
62+
"This notebook demonstrates how to install the necessary libraries and run local inference with T5Gemma 2 model in a [Colab Enterprise Instance](https://cloud.google.com/colab/docs) or a [Workbench Instance](https://cloud.google.com/vertex-ai/docs/workbench/instances).\n",
63+
"\n",
64+
"### Objective\n",
65+
"\n",
66+
"Run local inference with T5Gemma 2 model.\n",
67+
"\n",
68+
"### Costs\n",
69+
"\n",
70+
"This tutorial uses billable components of Google Cloud:\n",
71+
"\n",
72+
"* Vertex AI\n",
73+
"\n",
74+
"Learn about [Vertex AI pricing](https://cloud.google.com/vertex-ai/pricing) and use the [Pricing Calculator](https://cloud.google.com/products/calculator/) to generate a cost estimate based on your projected usage."
75+
]
76+
},
77+
{
78+
"cell_type": "markdown",
79+
"metadata": {
80+
"id": "zsNTHGCK1FU7"
81+
},
82+
"source": [
83+
"## Install dependencies\n",
84+
"\n",
85+
"Before you begin, make sure you are using an instance with GPU.\n",
86+
"\n",
87+
"* **Colab Enterprise**: Connect to a [Colab Enterprise runtime](https://cloud.google.com/colab/docs/connect-to-runtime) with GPU. If not, we recommend [creating a runtime template](https://cloud.google.com/colab/docs/create-runtime-template) with `g2-standard-16` machine type (or larger, see the descriptions of the model you want to try out below) to use `NVIDIA_L4` GPU. Then, [create a runtime](https://cloud.google.com/colab/docs/create-runtime) from that template.\n",
88+
"\n",
89+
"* **Workbench Instance**: Use a Workbench Instance with GPU."
90+
]
91+
},
92+
{
93+
"cell_type": "code",
94+
"execution_count": null,
95+
"metadata": {
96+
"cellView": "form",
97+
"id": "xle1_0ns1r10"
98+
},
99+
"outputs": [],
100+
"source": [
101+
"!pip install -q git+https://github.com/huggingface/transformers.git\n",
102+
"!pip install torch~=2.8.0\n",
103+
"!pip install torchvision~=0.23.0"
104+
]
105+
},
106+
{
107+
"cell_type": "markdown",
108+
"metadata": {
109+
"id": "a0eE5dWx1s1j"
110+
},
111+
"source": [
112+
"## Hugging Face Login\n",
113+
"\n",
114+
"The following code block will prompt you to enter your Hugging Face access token.\n",
115+
"\n",
116+
"If you don't already have a Hugging Face access token, follow the [Hugging Face documentation](https://huggingface.co/docs/hub/en/security-tokens) to create an access token with \"read\" permission. You can find your existing access tokens in the Hugging Face [Access Token](https://huggingface.co/settings/tokens) page.\n",
117+
"\n",
118+
"Make sure you have accepted the model agreement to access the model."
119+
]
120+
},
121+
{
122+
"cell_type": "code",
123+
"execution_count": null,
124+
"metadata": {
125+
"cellView": "form",
126+
"id": "oR2eJczK3mNF"
127+
},
128+
"outputs": [],
129+
"source": [
130+
"from huggingface_hub import notebook_login\n",
131+
"\n",
132+
"notebook_login()"
133+
]
134+
},
135+
{
136+
"cell_type": "markdown",
137+
"metadata": {
138+
"id": "arlTS97v29Uu"
139+
},
140+
"source": [
141+
"## Run local inference"
142+
]
143+
},
144+
{
145+
"cell_type": "code",
146+
"execution_count": null,
147+
"metadata": {
148+
"cellView": "form",
149+
"id": "uTLuTO3N1z_B"
150+
},
151+
"outputs": [],
152+
"source": [
153+
"import requests\n",
154+
"from PIL import Image\n",
155+
"from transformers import AutoModelForSeq2SeqLM, AutoProcessor\n",
156+
"\n",
157+
"model_id = \"google/t5gemma-2-270m-270m\" # @param [\"google/t5gemma-2-270m-270m\", \"google/t5gemma-2-1b-1b\", \"google/t5gemma-2-4b-4b\"] {isTemplate:true}\n",
158+
"\n",
159+
"processor = AutoProcessor.from_pretrained(model_id)\n",
160+
"model = AutoModelForSeq2SeqLM.from_pretrained(model_id)\n",
161+
"\n",
162+
"url = \"https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/bee.jpg\"\n",
163+
"image = Image.open(requests.get(url, stream=True).raw)\n",
164+
"prompt = \"<start_of_image> in this image, there is\"\n",
165+
"\n",
166+
"model_inputs = processor(text=prompt, images=image, return_tensors=\"pt\")\n",
167+
"generation = model.generate(**model_inputs, max_new_tokens=100, do_sample=False)\n",
168+
"print(processor.decode(generation[0]))"
169+
]
170+
}
171+
],
172+
"metadata": {
173+
"colab": {
174+
"name": "model_garden_t5gemma_2_local_inference.ipynb",
175+
"toc_visible": true
176+
},
177+
"kernelspec": {
178+
"display_name": "Python 3",
179+
"name": "python3"
180+
}
181+
},
182+
"nbformat": 4,
183+
"nbformat_minor": 0
184+
}

0 commit comments

Comments
 (0)