diff --git a/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/step_by_step_sdk_tf_agents_bandits_movie_recommendation/step_by_step_sdk_tf_agents_bandits_movie_recommendation.ipynb b/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/step_by_step_sdk_tf_agents_bandits_movie_recommendation/step_by_step_sdk_tf_agents_bandits_movie_recommendation.ipynb index 82638488f..834f7e0f4 100644 --- a/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/step_by_step_sdk_tf_agents_bandits_movie_recommendation/step_by_step_sdk_tf_agents_bandits_movie_recommendation.ipynb +++ b/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/step_by_step_sdk_tf_agents_bandits_movie_recommendation/step_by_step_sdk_tf_agents_bandits_movie_recommendation.ipynb @@ -398,6 +398,7 @@ "if not IS_GOOGLE_CLOUD_NOTEBOOK:\n", " if \"google.colab\" in sys.modules:\n", " from google.colab import auth as google_auth\n", + "\n", " google_auth.authenticate_user()\n", "\n", " # If you are running this notebook locally, replace the string below with the\n", @@ -472,7 +473,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { @@ -492,7 +493,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { @@ -565,7 +566,7 @@ "outputs": [], "source": [ "# Copy the sample data into your DATA_PATH\n", - "! gsutil cp \"gs://cloud-samples-data/vertex-ai/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/u.data\" $DATA_PATH" + "! gcloud storage cp \"gs://cloud-samples-data/vertex-ai/community-content/tf_agents_bandits_movie_recommendation_with_kfp_and_vertex_sdk/u.data\" $DATA_PATH" ] }, { @@ -579,11 +580,15 @@ "# Set hyperparameters.\n", "BATCH_SIZE = 8 # @param {type:\"integer\"} Training and prediction batch size.\n", "TRAINING_LOOPS = 5 # @param {type:\"integer\"} Number of training iterations.\n", - "STEPS_PER_LOOP = 2 # @param {type:\"integer\"} Number of driver steps per training iteration.\n", + "STEPS_PER_LOOP = (\n", + " 2 # @param {type:\"integer\"} Number of driver steps per training iteration.\n", + ")\n", "\n", "# Set MovieLens simulation environment parameters.\n", "RANK_K = 20 # @param {type:\"integer\"} Rank for matrix factorization in the MovieLens environment; also the observation dimension.\n", - "NUM_ACTIONS = 20 # @param {type:\"integer\"} Number of actions (movie items) to choose from.\n", + "NUM_ACTIONS = (\n", + " 20 # @param {type:\"integer\"} Number of actions (movie items) to choose from.\n", + ")\n", "PER_ARM = False # Use the non-per-arm version of the MovieLens environment.\n", "\n", "# Set agent parameters.\n", @@ -621,7 +626,8 @@ "source": [ "# Define RL environment.\n", "env = movielens_py_environment.MovieLensPyEnvironment(\n", - " DATA_PATH, RANK_K, BATCH_SIZE, num_movies=NUM_ACTIONS, csv_delimiter=\"\\t\")\n", + " DATA_PATH, RANK_K, BATCH_SIZE, num_movies=NUM_ACTIONS, csv_delimiter=\"\\t\"\n", + ")\n", "environment = tf_py_environment.TFPyEnvironment(env)\n", "\n", "# Define RL agent/algorithm.\n", @@ -631,7 +637,8 @@ " tikhonov_weight=TIKHONOV_WEIGHT,\n", " alpha=AGENT_ALPHA,\n", " dtype=tf.float32,\n", - " accepts_per_arm_features=PER_ARM)\n", + " accepts_per_arm_features=PER_ARM,\n", + ")\n", "print(\"TimeStep Spec (for each batch):\\n\", agent.time_step_spec, \"\\n\")\n", "print(\"Action Spec (for each batch):\\n\", agent.action_spec, \"\\n\")\n", "print(\"Reward Spec (for each batch):\\n\", environment.reward_spec(), \"\\n\")\n", @@ -639,7 +646,8 @@ "# Define RL metric.\n", "optimal_reward_fn = functools.partial(\n", " environment_utilities.compute_optimal_reward_with_movielens_environment,\n", - " environment=environment)\n", + " environment=environment,\n", + ")\n", "regret_metric = tf_bandit_metrics.RegretMetric(optimal_reward_fn)\n", "metrics = [regret_metric]" ] @@ -704,35 +712,38 @@ " if training_data_spec_transformation_fn is None:\n", " data_spec = agent.policy.trajectory_spec\n", " else:\n", - " data_spec = training_data_spec_transformation_fn(\n", - " agent.policy.trajectory_spec)\n", - " replay_buffer = trainer.get_replay_buffer(data_spec, environment.batch_size,\n", - " steps_per_loop)\n", + " data_spec = training_data_spec_transformation_fn(agent.policy.trajectory_spec)\n", + " replay_buffer = trainer.get_replay_buffer(\n", + " data_spec, environment.batch_size, steps_per_loop\n", + " )\n", "\n", " # `step_metric` records the number of individual rounds of bandit interaction;\n", " # that is, (number of trajectories) * batch_size.\n", " step_metric = tf_metrics.EnvironmentSteps()\n", " metrics = [\n", " tf_metrics.NumberOfEpisodes(),\n", - " tf_metrics.AverageEpisodeLengthMetric(batch_size=environment.batch_size)\n", + " tf_metrics.AverageEpisodeLengthMetric(batch_size=environment.batch_size),\n", " ]\n", " if additional_metrics:\n", " metrics += additional_metrics\n", "\n", " if isinstance(environment.reward_spec(), dict):\n", - " metrics += [tf_metrics.AverageReturnMultiMetric(\n", - " reward_spec=environment.reward_spec(),\n", - " batch_size=environment.batch_size)]\n", - " else:\n", " metrics += [\n", - " tf_metrics.AverageReturnMetric(batch_size=environment.batch_size)]\n", + " tf_metrics.AverageReturnMultiMetric(\n", + " reward_spec=environment.reward_spec(), batch_size=environment.batch_size\n", + " )\n", + " ]\n", + " else:\n", + " metrics += [tf_metrics.AverageReturnMetric(batch_size=environment.batch_size)]\n", "\n", " # Store intermediate metric results, indexed by metric names.\n", " metric_results = defaultdict(list)\n", "\n", " if training_data_spec_transformation_fn is not None:\n", - " def add_batch_fn(data): return replay_buffer.add_batch(training_data_spec_transformation_fn(data)) \n", - " \n", + "\n", + " def add_batch_fn(data):\n", + " return replay_buffer.add_batch(training_data_spec_transformation_fn(data))\n", + "\n", " else:\n", " add_batch_fn = replay_buffer.add_batch\n", "\n", @@ -742,10 +753,12 @@ " env=environment,\n", " policy=agent.collect_policy,\n", " num_steps=steps_per_loop * environment.batch_size,\n", - " observers=observers)\n", + " observers=observers,\n", + " )\n", "\n", " training_loop = trainer.get_training_loop_fn(\n", - " driver, replay_buffer, agent, steps_per_loop)\n", + " driver, replay_buffer, agent, steps_per_loop\n", + " )\n", " saver = policy_saver.PolicySaver(agent.policy)\n", "\n", " for _ in range(training_loops):\n", @@ -783,7 +796,8 @@ " environment=environment,\n", " training_loops=TRAINING_LOOPS,\n", " steps_per_loop=STEPS_PER_LOOP,\n", - " additional_metrics=metrics)\n", + " additional_metrics=metrics,\n", + ")\n", "\n", "tf.profiler.experimental.stop()" ] @@ -1092,11 +1106,15 @@ }, "outputs": [], "source": [ - "RUN_HYPERPARAMETER_TUNING = True # Execute hyperparameter tuning instead of regular training.\n", + "RUN_HYPERPARAMETER_TUNING = (\n", + " True # Execute hyperparameter tuning instead of regular training.\n", + ")\n", "TRAIN_WITH_BEST_HYPERPARAMETERS = False # Do not train.\n", "\n", "HPTUNING_RESULT_DIR = \"hptuning/\" # @param {type: \"string\"} Directory to store the best hyperparameter(s) in `BUCKET_NAME` and locally (temporarily).\n", - "HPTUNING_RESULT_PATH = os.path.join(HPTUNING_RESULT_DIR, \"result.json\") # @param {type: \"string\"} Path to the file containing the best hyperparameter(s)." + "HPTUNING_RESULT_PATH = os.path.join(\n", + " HPTUNING_RESULT_DIR, \"result.json\"\n", + ") # @param {type: \"string\"} Path to the file containing the best hyperparameter(s)." ] }, { @@ -1124,7 +1142,7 @@ " image_uri: str,\n", " args: List[str],\n", " location: str = \"us-central1\",\n", - " api_endpoint: str = \"us-central1-aiplatform.googleapis.com\"\n", + " api_endpoint: str = \"us-central1-aiplatform.googleapis.com\",\n", ") -> None:\n", " \"\"\"Creates a hyperparameter tuning job using a custom container.\n", "\n", @@ -1197,8 +1215,8 @@ "\n", " # Create job\n", " response = client.create_hyperparameter_tuning_job(\n", - " parent=parent,\n", - " hyperparameter_tuning_job=hyperparameter_tuning_job)\n", + " parent=parent, hyperparameter_tuning_job=hyperparameter_tuning_job\n", + " )\n", " job_id = response.name.split(\"/\")[-1]\n", " print(\"Job ID:\", job_id)\n", " print(\"Job config:\", response)\n", @@ -1242,7 +1260,8 @@ " image_uri=f\"gcr.io/{PROJECT_ID}/{HPTUNING_TRAINING_CONTAINER}:latest\",\n", " args=args,\n", " location=REGION,\n", - " api_endpoint=f\"{REGION}-aiplatform.googleapis.com\")" + " api_endpoint=f\"{REGION}-aiplatform.googleapis.com\",\n", + ")" ] }, { @@ -1292,7 +1311,8 @@ " name = client.hyperparameter_tuning_job_path(\n", " project=project,\n", " location=location,\n", - " hyperparameter_tuning_job=hyperparameter_tuning_job_id)\n", + " hyperparameter_tuning_job=hyperparameter_tuning_job_id,\n", + " )\n", " response = client.get_hyperparameter_tuning_job(name=name)\n", " return response" ] @@ -1313,7 +1333,8 @@ " location=REGION,\n", " api_endpoint=f\"{REGION}-aiplatform.googleapis.com\")\n", " if response.state.name == 'JOB_STATE_SUCCEEDED':\n", - " print(\"Job succeeded.\\nJob Time:\", response.update_time - response.create_time)\n", + " print(\"Job succeeded.\n", + "Job Time:\", response.update_time - response.create_time)\n", " trials = response.trials\n", " print(\"Trials:\", trials)\n", " break\n", @@ -1348,8 +1369,8 @@ "if trials:\n", " # Dict mapping from metric names to the best metric values seen so far\n", " best_objective_values = dict.fromkeys(\n", - " [metric.metric_id for metric in trials[0].final_measurement.metrics],\n", - " -np.inf)\n", + " [metric.metric_id for metric in trials[0].final_measurement.metrics], -np.inf\n", + " )\n", " # Dict mapping from metric names to a list of the best combination(s) of\n", " # hyperparameter(s). Each combination is a dict mapping from hyperparameter\n", " # names to their values.\n", @@ -1358,12 +1379,13 @@ " # `final_measurement` and `parameters` are `RepeatedComposite` objects.\n", " # Reference the structure above to extract the value of your interest.\n", " for metric in trial.final_measurement.metrics:\n", - " params = {\n", - " param.parameter_id: param.value for param in trial.parameters}\n", + " params = {param.parameter_id: param.value for param in trial.parameters}\n", " if metric.value > best_objective_values[metric.metric_id]:\n", " best_params[metric.metric_id] = [params]\n", " elif metric.value == best_objective_values[metric.metric_id]:\n", - " best_params[param.parameter_id].append(params) # Handle cases where multiple hyperparameter values lead to the same performance.\n", + " best_params[param.parameter_id].append(\n", + " params\n", + " ) # Handle cases where multiple hyperparameter values lead to the same performance.\n", " print(\"Best hyperparameter value(s):\")\n", " for metric, params in best_params.items():\n", " print(f\"Metric={metric}: {sorted(params)}\")\n", @@ -1443,7 +1465,9 @@ }, "outputs": [], "source": [ - "PREDICTION_CONTAINER = \"prediction-custom-container\" # @param {type:\"string\"} Name of the container image." + "PREDICTION_CONTAINER = (\n", + " \"prediction-custom-container\" # @param {type:\"string\"} Name of the container image.\n", + ")" ] }, { @@ -1475,7 +1499,7 @@ " machineType: 'E2_HIGHCPU_8'\"\"\".format(\n", " PROJECT_ID=PROJECT_ID,\n", " PREDICTION_CONTAINER=PREDICTION_CONTAINER,\n", - " ARTIFACTS_DIR=ARTIFACTS_DIR\n", + " ARTIFACTS_DIR=ARTIFACTS_DIR,\n", ")\n", "\n", "with open(\"cloudbuild.yaml\", \"w\") as fp:\n", @@ -1592,8 +1616,12 @@ }, "outputs": [], "source": [ - "RUN_HYPERPARAMETER_TUNING = False # Execute regular training instead of hyperparameter tuning.\n", - "TRAIN_WITH_BEST_HYPERPARAMETERS = True # @param {type:\"bool\"} Whether to use learned hyperparameters in training." + "RUN_HYPERPARAMETER_TUNING = (\n", + " False # Execute regular training instead of hyperparameter tuning.\n", + ")\n", + "TRAIN_WITH_BEST_HYPERPARAMETERS = (\n", + " True # @param {type:\"bool\"} Whether to use learned hyperparameters in training.\n", + ")" ] }, { @@ -1633,10 +1661,12 @@ "job = aiplatform.CustomContainerTrainingJob(\n", " display_name=\"train-movielens\",\n", " container_uri=f\"gcr.io/{PROJECT_ID}/{HPTUNING_TRAINING_CONTAINER}:latest\",\n", - " command=[\"python3\", \"-m\", \"src.training.task\"] + args, # Pass in training arguments, including hyperparameters.\n", + " command=[\"python3\", \"-m\", \"src.training.task\"]\n", + " + args, # Pass in training arguments, including hyperparameters.\n", " model_serving_container_image_uri=f\"gcr.io/{PROJECT_ID}/{PREDICTION_CONTAINER}:latest\",\n", " model_serving_container_predict_route=\"/predict\",\n", - " model_serving_container_health_route=\"/health\")\n", + " model_serving_container_health_route=\"/health\",\n", + ")\n", "\n", "print(\"Training Spec:\", job._managed_model)\n", "\n", @@ -1645,7 +1675,8 @@ " replica_count=1,\n", " machine_type=\"n1-standard-4\",\n", " accelerator_type=\"ACCELERATOR_TYPE_UNSPECIFIED\",\n", - " accelerator_count=0)" + " accelerator_count=0,\n", + ")" ] }, { @@ -1784,7 +1815,7 @@ "! gcloud ai models delete $model.name --quiet\n", "\n", "# Delete Cloud Storage objects that were created\n", - "! gsutil -m rm -r $ARTIFACTS_DIR" + "! gcloud storage rm --recursive $ARTIFACTS_DIR" ] } ], diff --git a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb index 747b149c0..49d0ff60d 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_classification_batch.ipynb @@ -421,7 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { @@ -441,7 +441,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { @@ -478,9 +478,7 @@ "import time\n", "\n", "from google.cloud.aiplatform import gapic as aip\n", - "from google.protobuf import json_format\n", - "from google.protobuf.json_format import MessageToJson, ParseDict\n", - "from google.protobuf.struct_pb2 import Struct, Value" + "from google.protobuf import json_format" ] }, { @@ -886,11 +884,11 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" + "! gcloud storage cat $FILE | head" ] }, { @@ -1329,7 +1327,7 @@ }, "outputs": [], "source": [ - "test_items = !gsutil cat $IMPORT_FILE | head -n2\n", + "test_items = !gcloud storage cat $IMPORT_FILE | head -n2\n", "if len(str(test_items[0]).split(\",\")) == 3:\n", " _, test_item_1, test_label_1 = str(test_items[0]).split(\",\")\n", " _, test_item_2, test_label_2 = str(test_items[1]).split(\",\")\n", @@ -1363,8 +1361,8 @@ "file_1 = test_item_1.split(\"/\")[-1]\n", "file_2 = test_item_2.split(\"/\")[-1]\n", "\n", - "! gsutil cp $test_item_1 $BUCKET_NAME/$file_1\n", - "! gsutil cp $test_item_2 $BUCKET_NAME/$file_2\n", + "! gcloud storage cp $test_item_1 $BUCKET_NAME/$file_1\n", + "! gcloud storage cp $test_item_2 $BUCKET_NAME/$file_2\n", "\n", "test_item_1 = BUCKET_NAME + \"/\" + file_1\n", "test_item_2 = BUCKET_NAME + \"/\" + file_2" @@ -1408,7 +1406,7 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" + "! gcloud storage cat $gcs_input_uri" ] }, { @@ -1692,8 +1690,8 @@ "outputs": [], "source": [ "def get_latest_predictions(gcs_out_dir):\n", - " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", + " \"\"\"Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", @@ -1711,9 +1709,9 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.jsonl\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] @@ -1808,7 +1806,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], diff --git a/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb index 96c79f565..88daaf17f 100644 --- a/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_image_object_detection_batch.ipynb @@ -421,7 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" + "! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ] }, { @@ -441,7 +441,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { @@ -478,9 +478,7 @@ "import time\n", "\n", "from google.cloud.aiplatform import gapic as aip\n", - "from google.protobuf import json_format\n", - "from google.protobuf.json_format import MessageToJson, ParseDict\n", - "from google.protobuf.struct_pb2 import Struct, Value" + "from google.protobuf import json_format" ] }, { @@ -887,11 +885,11 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" + "! gcloud storage cat $FILE | head" ] }, { @@ -1333,7 +1331,7 @@ }, "outputs": [], "source": [ - "test_items = !gsutil cat $IMPORT_FILE | head -n2\n", + "test_items = !gcloud storage cat $IMPORT_FILE | head -n2\n", "cols_1 = str(test_items[0]).split(\",\")\n", "cols_2 = str(test_items[1]).split(\",\")\n", "if len(cols_1) == 11:\n", @@ -1373,8 +1371,8 @@ "file_1 = test_item_1.split(\"/\")[-1]\n", "file_2 = test_item_2.split(\"/\")[-1]\n", "\n", - "! gsutil cp $test_item_1 $BUCKET_NAME/$file_1\n", - "! gsutil cp $test_item_2 $BUCKET_NAME/$file_2\n", + "! gcloud storage cp $test_item_1 $BUCKET_NAME/$file_1\n", + "! gcloud storage cp $test_item_2 $BUCKET_NAME/$file_2\n", "\n", "test_item_1 = BUCKET_NAME + \"/\" + file_1\n", "test_item_2 = BUCKET_NAME + \"/\" + file_2" @@ -1418,7 +1416,7 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" + "! gcloud storage cat $gcs_input_uri" ] }, { @@ -1704,8 +1702,8 @@ "outputs": [], "source": [ "def get_latest_predictions(gcs_out_dir):\n", - " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", + " \"\"\"Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", @@ -1723,9 +1721,9 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.jsonl\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] @@ -1820,7 +1818,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], diff --git a/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb b/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb index b8e0039f0..58d7fd483 100644 --- a/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_tabular_classification_online_explain.ipynb @@ -397,7 +397,6 @@ "\n", "import google.cloud.aiplatform_v1beta1 as aip\n", "from google.protobuf import json_format\n", - "from google.protobuf.json_format import MessageToJson, ParseDict\n", "from google.protobuf.struct_pb2 import Struct, Value" ] }, @@ -735,13 +734,13 @@ }, "outputs": [], "source": [ - "count = ! gsutil cat $IMPORT_FILE | wc -l\n", + "count = ! gcloud storage cat $IMPORT_FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $IMPORT_FILE | head\n", + "! gcloud storage cat $IMPORT_FILE | head\n", "\n", - "heading = ! gsutil cat $IMPORT_FILE | head -n1\n", + "heading = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n", "print(\"Label Column Name\", label_column)\n", "if label_column is None:\n", @@ -1820,7 +1819,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], diff --git a/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb b/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb index 6c9fec862..1bb4543ba 100644 --- a/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb +++ b/notebooks/community/gapic/automl/showcase_automl_text_entity_extraction_batch.ipynb @@ -421,7 +421,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION $BUCKET_NAME" + "! gcloud storage buckets create --location $REGION $BUCKET_NAME" ] }, { @@ -441,7 +441,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al $BUCKET_NAME" + "! gcloud storage ls --all-versions --long $BUCKET_NAME" ] }, { @@ -478,9 +478,7 @@ "import time\n", "\n", "from google.cloud.aiplatform import gapic as aip\n", - "from google.protobuf import json_format\n", - "from google.protobuf.json_format import MessageToJson, ParseDict\n", - "from google.protobuf.struct_pb2 import Struct, Value" + "from google.protobuf import json_format" ] }, { @@ -888,11 +886,11 @@ "else:\n", " FILE = IMPORT_FILE\n", "\n", - "count = ! gsutil cat $FILE | wc -l\n", + "count = ! gcloud storage cat $FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n", "\n", "print(\"First 10 rows\")\n", - "! gsutil cat $FILE | head" + "! gcloud storage cat $FILE | head" ] }, { @@ -1377,7 +1375,7 @@ " f.write(json.dumps(data) + \"\\n\")\n", "\n", "print(gcs_input_uri)\n", - "! gsutil cat $gcs_input_uri" + "! gcloud storage cat $gcs_input_uri" ] }, { @@ -1652,8 +1650,8 @@ "outputs": [], "source": [ "def get_latest_predictions(gcs_out_dir):\n", - " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", + " \"\"\"Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", @@ -1671,9 +1669,9 @@ " raise Exception(\"Batch Job Failed\")\n", " else:\n", " folder = get_latest_predictions(predictions)\n", - " ! gsutil ls $folder/prediction*.jsonl\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] @@ -1768,7 +1766,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r $BUCKET_NAME" + " ! gcloud storage rm --recursive $BUCKET_NAME" ] } ], diff --git a/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb b/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb index 809951b31..8f9cd9418 100644 --- a/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb +++ b/notebooks/community/migration/UJ4 AutoML for structured data with Vertex AI Regression.ipynb @@ -325,7 +325,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION gs://$BUCKET_NAME" + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] }, { @@ -345,7 +345,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al gs://$BUCKET_NAME" + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] }, { @@ -385,7 +385,6 @@ "\n", "from google.cloud.aiplatform import gapic as aip\n", "from google.protobuf import json_format\n", - "from google.protobuf.json_format import MessageToJson, ParseDict\n", "from google.protobuf.struct_pb2 import Struct, Value" ] }, @@ -545,7 +544,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE | head -n 10" + "! gcloud storage cat $IMPORT_FILE | head -n 10" ] }, { @@ -1493,14 +1492,14 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE | head -n 1 > tmp.csv\n", - "! gsutil cat $IMPORT_FILE | tail -n 10 >> tmp.csv\n", + "! gcloud storage cat $IMPORT_FILE | head -n 1 > tmp.csv\n", + "! gcloud storage cat $IMPORT_FILE | tail -n 10 >> tmp.csv\n", "\n", "! cut -d, -f1-16 tmp.csv > batch.csv\n", "\n", "gcs_input_uri = \"gs://\" + BUCKET_NAME + \"/test.csv\"\n", "\n", - "! gsutil cp batch.csv $gcs_input_uri" + "! gcloud storage cp batch.csv $gcs_input_uri" ] }, { @@ -1511,7 +1510,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $gcs_input_uri" + "! gcloud storage cat $gcs_input_uri" ] }, { @@ -1818,8 +1817,8 @@ "outputs": [], "source": [ "def get_latest_predictions(gcs_out_dir):\n", - " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", + " \"\"\"Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", @@ -1839,9 +1838,9 @@ " folder = get_latest_predictions(\n", " response.output_config.gcs_destination.output_uri_prefix\n", " )\n", - " ! gsutil ls $folder/prediction*\n", + " ! gcloud storage ls $folder/prediction*\n", "\n", - " ! gsutil cat $folder/prediction*\n", + " ! gcloud storage cat $folder/prediction*\n", " break\n", " time.sleep(60)" ] @@ -2452,7 +2451,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r gs://$BUCKET_NAME" + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] } ], @@ -2469,7 +2468,7 @@ "call:migration", "response:migration" ], - "name": "UJ4 unified AutoML for structured data with Vertex AI Regression.ipynb", + "name": "UJ4 AutoML for structured data with Vertex AI Regression.ipynb", "toc_visible": true }, "kernelspec": { diff --git a/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb b/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb index 5474e8ec7..1cd5e865e 100644 --- a/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb +++ b/notebooks/community/migration/UJ5 AutoML for vision with Vertex AI Video Classification.ipynb @@ -325,7 +325,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION gs://$BUCKET_NAME" + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] }, { @@ -345,7 +345,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al gs://$BUCKET_NAME" + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] }, { @@ -385,7 +385,6 @@ "\n", "from google.cloud.aiplatform import gapic as aip\n", "from google.protobuf import json_format\n", - "from google.protobuf.json_format import MessageToJson, ParseDict\n", "from google.protobuf.struct_pb2 import Struct, Value" ] }, @@ -542,7 +541,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE | head -n 10" + "! gcloud storage cat $IMPORT_FILE | head -n 10" ] }, { @@ -1428,7 +1427,7 @@ }, "outputs": [], "source": [ - "test_items = ! gsutil cat $IMPORT_FILE | head -n2\n", + "test_items = ! gcloud storage cat $IMPORT_FILE | head -n2\n", "\n", "test_item_1, test_label_1 = test_items[0].split(\",\")[1], test_items[0].split(\",\")[2]\n", "test_item_2, test_label_2 = test_items[0].split(\",\")[1], test_items[0].split(\",\")[2]\n", @@ -1436,8 +1435,8 @@ "file_1 = test_item_1.split(\"/\")[-1]\n", "file_2 = test_item_2.split(\"/\")[-1]\n", "\n", - "! gsutil cp $test_item_1 gs://$BUCKET_NAME/$file_1\n", - "! gsutil cp $test_item_2 gs://$BUCKET_NAME/$file_2\n", + "! gcloud storage cp $test_item_1 gs://$BUCKET_NAME/$file_1\n", + "! gcloud storage cp $test_item_2 gs://$BUCKET_NAME/$file_2\n", "\n", "test_item_1 = \"gs://\" + BUCKET_NAME + \"/\" + file_1\n", "test_item_2 = \"gs://\" + BUCKET_NAME + \"/\" + file_2\n", @@ -1478,7 +1477,7 @@ " data = {\"content\": test_item_2, \"mime_type\": \"image/jpeg\"}\n", " f.write(json.dumps(data) + \"\\n\")\n", "\n", - "!gsutil cat $gcs_input_uri" + "!gcloud storage cat $gcs_input_uri" ] }, { @@ -1799,8 +1798,8 @@ "outputs": [], "source": [ "def get_latest_predictions(gcs_out_dir):\n", - " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", + " \"\"\"Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", @@ -1820,9 +1819,9 @@ " folder = get_latest_predictions(\n", " response.output_config.gcs_destination.output_uri_prefix\n", " )\n", - " ! gsutil ls $folder/prediction*.jsonl\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] @@ -2172,7 +2171,7 @@ "\n", "import tensorflow as tf\n", "\n", - "single_file = ! gsutil cat $IMPORT_FILE | head -n 1\n", + "single_file = ! gcloud storage cat $IMPORT_FILE | head -n 1\n", "single_file = single_file[0].split(\",\")[1]\n", "\n", "with tf.io.gfile.GFile(single_file, \"rb\") as f:\n", @@ -2443,7 +2442,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r gs://$BUCKET_NAME" + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] } ], @@ -2467,7 +2466,7 @@ "_RXG0aaSV2HS", "EDuJAyzbV2HW" ], - "name": "UJ5 unified AutoML for vision with Vertex AI Video Classification.ipynb", + "name": "UJ5 AutoML for vision with Vertex AI Video Classification.ipynb", "toc_visible": true }, "kernelspec": { diff --git a/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb b/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb index df9e2fdd0..9f0e3aabf 100644 --- a/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb +++ b/notebooks/community/migration/UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb @@ -325,7 +325,7 @@ }, "outputs": [], "source": [ - "! gsutil mb -l $REGION gs://$BUCKET_NAME" + "! gcloud storage buckets create --location $REGION gs://$BUCKET_NAME" ] }, { @@ -345,7 +345,7 @@ }, "outputs": [], "source": [ - "! gsutil ls -al gs://$BUCKET_NAME" + "! gcloud storage ls --all-versions --long gs://$BUCKET_NAME" ] }, { @@ -379,16 +379,13 @@ }, "outputs": [], "source": [ - "import base64\n", "import json\n", "import os\n", "import sys\n", "import time\n", "\n", "from google.cloud.aiplatform import gapic as aip\n", - "from google.protobuf import json_format\n", - "from google.protobuf.json_format import MessageToJson, ParseDict\n", - "from google.protobuf.struct_pb2 import Struct, Value" + "from google.protobuf import json_format" ] }, { @@ -543,7 +540,7 @@ }, "outputs": [], "source": [ - "! gsutil cat $IMPORT_FILE | head -n 10" + "! gcloud storage cat $IMPORT_FILE | head -n 10" ] }, { @@ -1552,7 +1549,7 @@ }, "outputs": [], "source": [ - "test_item = ! gsutil cat $IMPORT_FILE | head -n1\n", + "test_item = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "test_item, test_label = str(test_item[0]).split(\",\")\n", "\n", "print(test_item, test_label)" @@ -1614,8 +1611,8 @@ }, "outputs": [], "source": [ - "! gsutil cat $gcs_input_uri\n", - "! gsutil cat $test_item_uri" + "! gcloud storage cat $gcs_input_uri\n", + "! gcloud storage cat $test_item_uri" ] }, { @@ -1916,8 +1913,8 @@ "outputs": [], "source": [ "def get_latest_predictions(gcs_out_dir):\n", - " \"\"\" Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", - " folders = !gsutil ls $gcs_out_dir\n", + " \"\"\"Get the latest prediction subfolder using the timestamp in the subfolder name\"\"\"\n", + " folders = !gcloud storage ls $gcs_out_dir\n", " latest = \"\"\n", " for folder in folders:\n", " subfolder = folder.split(\"/\")[-2]\n", @@ -1937,9 +1934,9 @@ " folder = get_latest_predictions(\n", " response.output_config.gcs_destination.output_uri_prefix\n", " )\n", - " ! gsutil ls $folder/prediction*.jsonl\n", + " ! gcloud storage ls $folder/prediction*.jsonl\n", "\n", - " ! gsutil cat $folder/prediction*.jsonl\n", + " ! gcloud storage cat $folder/prediction*.jsonl\n", " break\n", " time.sleep(60)" ] @@ -2260,7 +2257,7 @@ }, "outputs": [], "source": [ - "test_item = ! gsutil cat $IMPORT_FILE | head -n1\n", + "test_item = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "test_item, test_label = str(test_item[0]).split(\",\")\n", "\n", "instances_list = [{\"content\": test_item}]\n", @@ -2510,7 +2507,7 @@ " print(e)\n", "\n", "if delete_bucket and \"BUCKET_NAME\" in globals():\n", - " ! gsutil rm -r gs://$BUCKET_NAME" + " ! gcloud storage rm --recursive gs://$BUCKET_NAME" ] } ], @@ -2522,7 +2519,7 @@ "hIHTX-pkJjkO", "4x_t-MWnJjkQ" ], - "name": "UJ6 unified AutoML for natural language with Vertex AI Text Classification.ipynb", + "name": "UJ6 AutoML for natural language with Vertex AI Text Classification.ipynb", "toc_visible": true }, "kernelspec": {