|
107 | 107 | "# @markdown ### Prerequisites\n", |
108 | 108 | "# @markdown 1. [Make sure that billing is enabled for your project](https://cloud.google.com/billing/docs/how-to/modify-project).\n", |
109 | 109 | "\n", |
110 | | - "# @markdown 2. [Optional] [Create a Cloud Storage bucket](https://cloud.google.com/storage/docs/creating-buckets) for storing experiment outputs. Set the BUCKET_URI for the experiment environment. The specified Cloud Storage bucket (`BUCKET_URI`) should be located in the same region as where the notebook was launched. Note that a multi-region bucket (eg. \"us\") is not considered a match for a single region covered by the multi-region range (eg. \"us-central1\"). If not set, a unique GCS bucket will be created instead.\n", |
| 110 | + "# @markdown 2. **[Optional]** [Create a Cloud Storage bucket](https://cloud.google.com/storage/docs/creating-buckets) for storing experiment outputs. Set the BUCKET_URI for the experiment environment. The specified Cloud Storage bucket (`BUCKET_URI`) should be located in the same region as where the notebook was launched. Note that a multi-region bucket (eg. \"us\") is not considered a match for a single region covered by the multi-region range (eg. \"us-central1\"). If not set, a unique GCS bucket will be created instead.\n", |
111 | 111 | "\n", |
112 | 112 | "! git clone https://github.com/GoogleCloudPlatform/vertex-ai-samples.git\n", |
113 | 113 | "! pip install -q gradio==4.21.0\n", |
114 | 114 | "\n", |
115 | 115 | "import importlib\n", |
116 | 116 | "import os\n", |
| 117 | + "import uuid\n", |
117 | 118 | "from datetime import datetime\n", |
118 | 119 | "from typing import Tuple\n", |
119 | 120 | "\n", |
|
132 | 133 | "# Get the default region for launching jobs.\n", |
133 | 134 | "REGION = os.environ[\"GOOGLE_CLOUD_REGION\"]\n", |
134 | 135 | "\n", |
| 136 | + "# @markdown 3. If you want to run predictions with A100 80GB or H100 GPUs, we recommend using the regions listed below. **NOTE:** Make sure you have associated quota in selected regions. Click the links to see your current quota for each GPU type: [Nvidia A100 80GB](https://console.cloud.google.com/iam-admin/quotas?metric=aiplatform.googleapis.com%2Fcustom_model_serving_nvidia_a100_80gb_gpus), [Nvidia H100 80GB](https://console.cloud.google.com/iam-admin/quotas?metric=aiplatform.googleapis.com%2Fcustom_model_serving_nvidia_h100_gpus).\n", |
| 137 | + "\n", |
| 138 | + "# @markdown > | Machine Type | Accelerator Type | Recommended Regions |\n", |
| 139 | + "# @markdown | ----------- | ----------- | ----------- |\n", |
| 140 | + "# @markdown | a2-ultragpu-1g | 1 NVIDIA_A100_80GB | us-central1, us-east4, europe-west4, asia-southeast1, us-east4 |\n", |
| 141 | + "# @markdown | a3-highgpu-2g | 2 NVIDIA_H100_80GB | us-west1, asia-southeast1, europe-west4 |\n", |
| 142 | + "# @markdown | a3-highgpu-4g | 4 NVIDIA_H100_80GB | us-west1, asia-southeast1, europe-west4 |\n", |
| 143 | + "# @markdown | a3-highgpu-8g | 8 NVIDIA_H100_80GB | us-central1, us-east5, europe-west4, us-west1, asia-southeast1 |\n", |
| 144 | + "\n", |
| 145 | + "\n", |
135 | 146 | "# Cloud Storage bucket for storing the experiment artifacts.\n", |
136 | 147 | "# A unique GCS bucket will be created for the purpose of this notebook. If you\n", |
137 | 148 | "# prefer using your own GCS bucket, change the value yourself below.\n", |
138 | 149 | "now = datetime.now().strftime(\"%Y%m%d%H%M%S\")\n", |
139 | 150 | "BUCKET_URI = \"gs://\" # @param {type: \"string\"}\n", |
| 151 | + "BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", |
140 | 152 | "assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", |
141 | 153 | "\n", |
142 | 154 | "# Create a unique GCS bucket for this notebook, if not specified by the user.\n", |
143 | 155 | "assert BUCKET_URI.startswith(\"gs://\"), \"BUCKET_URI must start with `gs://`.\"\n", |
144 | 156 | "if BUCKET_URI is None or BUCKET_URI.strip() == \"\" or BUCKET_URI == \"gs://\":\n", |
145 | | - " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}\"\n", |
146 | | - " ! gsutil mb -l {REGION} {BUCKET_URI}\n", |
| 157 | + " BUCKET_URI = f\"gs://{PROJECT_ID}-tmp-{now}-{str(uuid.uuid4())[:4]}\"\n", |
147 | 158 | " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", |
| 159 | + " ! gsutil mb -l {REGION} {BUCKET_URI}\n", |
148 | 160 | "else:\n", |
149 | | - " BUCKET_NAME = \"/\".join(BUCKET_URI.split(\"/\")[:3])\n", |
150 | 161 | " shell_output = ! gsutil ls -Lb {BUCKET_NAME} | grep \"Location constraint:\" | sed \"s/Location constraint://\"\n", |
151 | 162 | " bucket_region = shell_output[0].strip().lower()\n", |
152 | 163 | " if bucket_region != REGION:\n", |
|
186 | 197 | "models, endpoints = {}, {}\n", |
187 | 198 | "\n", |
188 | 199 | "\n", |
189 | | - "def resize_image(image: Image.Image, new_width: int = 512) -> Image.Image:\n", |
190 | | - " width, height = image.size\n", |
191 | | - " new_height = int(height * new_width / width)\n", |
192 | | - " new_image = image.resize((new_width, new_height))\n", |
193 | | - " return new_image\n", |
194 | | - "\n", |
195 | | - "\n", |
196 | | - "def load_image(image_url):\n", |
197 | | - " if image_url.startswith(\"gs://\"):\n", |
198 | | - " local_image_path = \"./images/test_image.jpg\"\n", |
199 | | - " common_util.download_gcs_file_to_local(image_url, local_image_path)\n", |
200 | | - " image = common_util.load_img(local_image_path)\n", |
201 | | - " else:\n", |
202 | | - " image = common_util.download_image(image_url)\n", |
203 | | - " return image\n", |
204 | | - "\n", |
205 | | - "\n", |
206 | 200 | "def deploy_mammut(\n", |
207 | 201 | " task: str, machine_type: str, accelerator_type: str, accelerator_count: int\n", |
208 | 202 | ") -> Tuple[aiplatform.Model, aiplatform.Endpoint]:\n", |
|
268 | 262 | "):\n", |
269 | 263 | " \"\"\"Generates predictions based on the input image and text using an Endpoint.\"\"\"\n", |
270 | 264 | " # Resize and convert image to base64 string.\n", |
271 | | - " resized_image = resize_image(image, new_width)\n", |
| 265 | + " resized_image = common_util.resize_image(image, new_width=512)\n", |
272 | 266 | " instances = [\n", |
273 | 267 | " {\n", |
274 | 268 | " \"image_bytes\": {\"b64\": common_util.image_to_base64(resized_image)},\n", |
|
354 | 348 | "# @markdown This can be either a Cloud Storage path (gs://\\<image-path\\>) or a public url (http://\\<image-path\\>)\n", |
355 | 349 | "image_url = \"https://images.pexels.com/photos/4012966/pexels-photo-4012966.jpeg\" # @param {type:\"string\"}\n", |
356 | 350 | "\n", |
| 351 | + "\n", |
| 352 | + "def load_image(image_url):\n", |
| 353 | + " if image_url.startswith(\"gs://\"):\n", |
| 354 | + " local_image_path = \"./images/test_image.jpg\"\n", |
| 355 | + " common_util.download_gcs_file_to_local(image_url, local_image_path)\n", |
| 356 | + " image = common_util.load_img(local_image_path)\n", |
| 357 | + " else:\n", |
| 358 | + " image = common_util.download_image(image_url)\n", |
| 359 | + " return image\n", |
| 360 | + "\n", |
| 361 | + "\n", |
357 | 362 | "image = load_image(image_url)\n", |
358 | 363 | "display(image)\n", |
359 | 364 | "\n", |
|
608 | 613 | "# @markdown This can be either a Cloud Storage path (gs://\\<image-path\\>) or a public url (http://\\<image-path\\>)\n", |
609 | 614 | "image_url = \"https://images.pexels.com/photos/20427316/pexels-photo-20427316/free-photo-of-a-moped-parked-in-front-of-a-blue-door.jpeg?auto=compress&cs=tinysrgb&w=630&h=375&dpr=2\" # @param {type:\"string\"}\n", |
610 | 615 | "\n", |
| 616 | + "\n", |
611 | 617 | "image = load_image(image_url)\n", |
612 | 618 | "display(image)\n", |
613 | 619 | "\n", |
|
683 | 689 | "text_embeddings = []\n", |
684 | 690 | "image_embeddings = []\n", |
685 | 691 | "for image in images:\n", |
686 | | - " prediction = predict(retrieval_endpoint, image, text)\n", |
| 692 | + " prediction = predict(endpoints[\"retrieval\"], image, text)\n", |
687 | 693 | " image_embeddings.append(np.array(prediction[\"normalized_image_embedding\"]))\n", |
688 | 694 | " text_embeddings.append(np.array(prediction[\"normalized_text_embedding\"]))\n", |
689 | 695 | "\n", |
|
730 | 736 | "# Delete Cloud Storage objects that were created.\n", |
731 | 737 | "delete_bucket = False # @param {type:\"boolean\"}\n", |
732 | 738 | "if delete_bucket:\n", |
733 | | - " ! gsutil -m rm -r $BUCKET_URI" |
| 739 | + " ! gsutil -m rm -r $BUCKET_NAME" |
734 | 740 | ] |
735 | 741 | } |
736 | 742 | ], |
|
0 commit comments