Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -416,8 +416,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l $REGION $BUCKET_NAME"
]
"! gcloud storage buckets create --location $REGION $BUCKET_NAME" ]
},
{
"cell_type": "markdown",
Expand All @@ -436,8 +435,7 @@
},
"outputs": [],
"source": [
"! gsutil ls -al $BUCKET_NAME"
]
"! gcloud storage ls --all-versions --long $BUCKET_NAME" ]
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -749,14 +747,11 @@
},
"outputs": [],
"source": [
"count = ! gsutil cat $IMPORT_FILE | wc -l\n",
"print(\"Number of Examples\", int(count[0]))\n",
"count = ! gcloud storage cat $IMPORT_FILE | wc -l\n", "print(\"Number of Examples\", int(count[0]))\n",
"\n",
"print(\"First 10 rows\")\n",
"! gsutil cat $IMPORT_FILE | head\n",
"\n",
"heading = ! gsutil cat $IMPORT_FILE | head -n1\n",
"label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n",
"! gcloud storage cat $IMPORT_FILE | head\n", "\n",
"heading = ! gcloud storage cat $IMPORT_FILE | head -n1\n", "label_column = str(heading).split(\",\")[-1].split(\"'\")[0]\n",
"print(\"Label Column Name\", label_column)\n",
"if label_column is None:\n",
" raise Exception(\"label column missing\")"
Expand Down Expand Up @@ -1283,15 +1278,12 @@
"source": [
"print(\"Model Package:\", model_package)\n",
"print(\"Contents:\")\n",
"! gsutil ls $model_package\n",
"\n",
"! gcloud storage ls $model_package\n", "\n",
"print(\"\\nTF Saved Model\")\n",
"path = model_package + \"/predict\"\n",
"files = ! gsutil ls $path\n",
"saved_dir = files[1]\n",
"files = ! gcloud storage ls $path\n", "saved_dir = files[1]\n",
"print(saved_dir)\n",
"! gsutil ls $saved_dir"
]
"! gcloud storage ls $saved_dir" ]
},
{
"cell_type": "markdown",
Expand All @@ -1312,8 +1304,7 @@
},
"outputs": [],
"source": [
"! gsutil cp -r $model_package ."
]
"! gcloud storage cp --recursive $model_package ." ]
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -1599,8 +1590,7 @@
" print(e)\n",
"\n",
"if delete_bucket and \"BUCKET_NAME\" in globals():\n",
" ! gsutil rm -r $BUCKET_NAME"
]
" ! gcloud storage rm --recursive $BUCKET_NAME" ]
}
],
"metadata": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -475,8 +475,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l $REGION $BUCKET_URI"
]
"! gcloud storage buckets create --location $REGION $BUCKET_URI" ]
},
{
"cell_type": "markdown",
Expand All @@ -495,8 +494,7 @@
},
"outputs": [],
"source": [
"! gsutil ls -al $BUCKET_URI"
]
"! gcloud storage ls --all-versions --long $BUCKET_URI" ]
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -905,8 +903,7 @@
"! rm -f custom.tar custom.tar.gz\n",
"! tar cvf custom.tar custom\n",
"! gzip custom.tar\n",
"! gsutil cp custom.tar.gz $BUCKET_URI/trainer_boston.tar.gz"
]
"! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_boston.tar.gz" ]
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -1446,8 +1443,7 @@
"source": [
"BEST_MODEL_DIR = MODEL_DIR + \"/\" + best[0] + \"/model\"\n",
"\n",
"! gsutil ls {BEST_MODEL_DIR}"
]
"! gcloud storage ls {BEST_MODEL_DIR}" ]
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -1705,8 +1701,7 @@
"delete_bucket = False\n",
"\n",
"if os.getenv(\"IS_TESTING\"):\n",
" ! gsutil rm -r $BUCKET_URI"
]
" ! gcloud storage rm --recursive $BUCKET_URI" ]
}
],
"metadata": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -436,8 +436,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l $REGION $BUCKET_URI"
]
"! gcloud storage buckets create --location=$REGION $BUCKET_URI" ]
},
{
"cell_type": "markdown",
Expand All @@ -456,8 +455,7 @@
},
"outputs": [],
"source": [
"! gsutil ls -al $BUCKET_URI"
]
"! gcloud storage ls --all-versions --long $BUCKET_URI" ]
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -524,10 +522,8 @@
},
"outputs": [],
"source": [
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectCreator $BUCKET_URI\n",
"\n",
"! gsutil iam ch serviceAccount:{SERVICE_ACCOUNT}:roles/storage.objectViewer $BUCKET_URI"
]
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectCreator\n", "\n",
"! gcloud storage buckets add-iam-policy-binding $BUCKET_URI --member=serviceAccount:{SERVICE_ACCOUNT} --role=roles/storage.objectViewer" ]
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -743,7 +739,7 @@
"outputs": [],
"source": [
"GCS_WC_PY = BUCKET_URI + \"/wordcount.py\"\n",
"! gsutil cp wordcount.py $GCS_WC_PY\n",
"! gcloud storage cp wordcount.py $GCS_WC_PY\n",
"\n",
"GCS_WC_OUT = BUCKET_URI + \"/wc_out/\"\n",
"GCS_WC_IN = \"gs://dataproc-datasets-us-central1/shakespeare/all-lines.txt\""
Expand Down Expand Up @@ -823,8 +819,7 @@
"\n",
"pipeline.run()\n",
"\n",
"! gsutil cat {GCS_WC_OUT}* | head -n10\n",
"\n",
"! gcloud storage cat {GCS_WC_OUT}* | head -n10\n", "\n",
"! rm -f pipeline.json wordcount.py"
]
},
Expand Down Expand Up @@ -896,14 +891,11 @@
" )\n",
" if tf.io.gfile.exists(EXECUTE_OUTPUT):\n",
" print(EXECUTE_OUTPUT, \"EXECUTE_OUTPUT\")\n",
" ! gsutil cat $EXECUTE_OUTPUT\n",
" return EXECUTE_OUTPUT\n",
" ! gcloud storage cat $EXECUTE_OUTPUT\n", " return EXECUTE_OUTPUT\n",
" elif tf.io.gfile.exists(GCP_RESOURCES):\n",
" ! gsutil cat $GCP_RESOURCES\n",
" return GCP_RESOURCES\n",
" ! gcloud storage cat $GCP_RESOURCES\n", " return GCP_RESOURCES\n",
" elif tf.io.gfile.exists(EVAL_METRICS):\n",
" ! gsutil cat $EVAL_METRICS\n",
" return EVAL_METRICS\n",
" ! gcloud storage cat $EVAL_METRICS\n", " return EVAL_METRICS\n",
"\n",
" return None\n",
"\n",
Expand Down Expand Up @@ -1213,8 +1205,7 @@
},
"outputs": [],
"source": [
"! gsutil -m cp babynames/*.txt $BUCKET_URI/babynames"
]
"! gcloud storage cp babynames/*.txt $BUCKET_URI/babynames" ]
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -1268,8 +1259,7 @@
},
"outputs": [],
"source": [
"! gsutil cp top_names.sql $BUCKET_URI"
]
"! gcloud storage cp top_names.sql $BUCKET_URI" ]
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -1350,8 +1340,7 @@
"\n",
"pipeline.run()\n",
"\n",
"! gsutil cat $OUTPUT_LOCATION/*.csv"
]
"! gcloud storage cat $OUTPUT_LOCATION/*.csv" ]
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -1486,8 +1475,7 @@
"outputs": [],
"source": [
"GCS_WC_R = BUCKET_URI + \"/wordcount.R\"\n",
"! gsutil cp wordcount.R $GCS_WC_R\n",
"\n",
"! gcloud storage cp wordcount.R $GCS_WC_R\n", "\n",
"GCS_WC_R_OUT = BUCKET_URI + \"/wc_r_out\"\n",
"GCS_WC_R_IN = \"gs://dataproc-datasets-us-central1/shakespeare/all-lines.txt\""
]
Expand Down Expand Up @@ -1564,8 +1552,7 @@
"\n",
"# Print the first 1KB of the CSV output:\n",
"\n",
"! gsutil cat -r 0-1024 $GCS_WC_R_OUT/*.csv"
]
"! gcloud storage cat --range=0-1024 $GCS_WC_R_OUT/*.csv" ]
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -1667,8 +1654,7 @@
"\n",
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
" # Delete the Cloud storage bucket\n",
" ! gsutil rm -r $BUCKET_URI"
]
" ! gcloud storage rm --recursive $BUCKET_URI" ]
}
],
"metadata": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -449,8 +449,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l $REGION $BUCKET_URI"
]
"! gcloud storage buckets create --location=$REGION $BUCKET_URI" ]
},
{
"cell_type": "markdown",
Expand All @@ -469,8 +468,7 @@
},
"outputs": [],
"source": [
"! gsutil ls -al $BUCKET_URI"
]
"! gcloud storage ls --all-versions --long $BUCKET_URI" ]
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -1376,14 +1374,11 @@
" + \"/evaluation_metrics\"\n",
" )\n",
" if tf.io.gfile.exists(EXECUTE_OUTPUT):\n",
" ! gsutil cat $EXECUTE_OUTPUT\n",
" return EXECUTE_OUTPUT\n",
" ! gcloud storage cat $EXECUTE_OUTPUT\n", " return EXECUTE_OUTPUT\n",
" elif tf.io.gfile.exists(GCP_RESOURCES):\n",
" ! gsutil cat $GCP_RESOURCES\n",
" return GCP_RESOURCES\n",
" ! gcloud storage cat $GCP_RESOURCES\n", " return GCP_RESOURCES\n",
" elif tf.io.gfile.exists(EVAL_METRICS):\n",
" ! gsutil cat $EVAL_METRICS\n",
" return EVAL_METRICS\n",
" ! gcloud storage cat $EVAL_METRICS\n", " return EVAL_METRICS\n",
"\n",
" return None\n",
"\n",
Expand All @@ -1392,8 +1387,7 @@
" print(\"endpoint-create\")\n",
" artifacts = print_pipeline_output(pipeline, \"endpoint-create\")\n",
" print(\"\\n\\n\")\n",
" output = !gsutil cat $artifacts\n",
" output = json.loads(output[0])\n",
" output = !gcloud storage cat $artifacts\n", " output = json.loads(output[0])\n",
" endpoint_id = output[\"artifacts\"][\"endpoint\"][\"artifacts\"][0][\"metadata\"][\n",
" \"resourceName\"\n",
" ]\n",
Expand Down Expand Up @@ -1659,8 +1653,7 @@
" print(e)\n",
"\n",
"if delete_bucket or os.getenv(\"IS_TESTING\"):\n",
" ! gsutil rm -rf {BUCKET_URI}"
]
" ! gcloud storage rm --recursive --continue-on-error {BUCKET_URI}" ]
}
],
"metadata": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -473,8 +473,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l $REGION $BUCKET_NAME"
]
"! gcloud storage buckets create --location=$REGION $BUCKET_NAME" ]
},
{
"cell_type": "markdown",
Expand All @@ -493,8 +492,7 @@
},
"outputs": [],
"source": [
"! gsutil ls -al $BUCKET_NAME"
]
"! gcloud storage ls --all-versions --long $BUCKET_NAME" ]
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -915,8 +913,7 @@
"! rm -f custom.tar custom.tar.gz\n",
"! tar cvf custom.tar custom\n",
"! gzip custom.tar\n",
"! gsutil cp custom.tar.gz $BUCKET_NAME/trainer_cifar10.tar.gz"
]
"! gcloud storage cp custom.tar.gz $BUCKET_NAME/trainer_cifar10.tar.gz" ]
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -1347,9 +1344,7 @@
},
"outputs": [],
"source": [
"! gsutil cp tmp1.jpg $BUCKET_NAME/tmp1.jpg\n",
"! gsutil cp tmp2.jpg $BUCKET_NAME/tmp2.jpg\n",
"\n",
"! gcloud storage cp tmp1.jpg $BUCKET_NAME/tmp1.jpg\n", "! gcloud storage cp tmp2.jpg $BUCKET_NAME/tmp2.jpg\n", "\n",
"test_item_1 = BUCKET_NAME + \"/tmp1.jpg\"\n",
"test_item_2 = BUCKET_NAME + \"/tmp2.jpg\""
]
Expand Down Expand Up @@ -1601,8 +1596,7 @@
" print(e)\n",
"\n",
" if \"BUCKET_NAME\" in globals():\n",
" ! gsutil rm -r $BUCKET_NAME"
]
" ! gcloud storage rm --recursive $BUCKET_NAME" ]
}
],
"metadata": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -291,8 +291,7 @@
},
"outputs": [],
"source": [
"! gsutil mb -l {LOCATION} -p {PROJECT_ID} {BUCKET_URI}"
]
"! gcloud storage buckets create --location={LOCATION} --project={PROJECT_ID} {BUCKET_URI}" ]
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -708,8 +707,7 @@
"! rm -f custom.tar custom.tar.gz\n",
"! tar cvf custom.tar custom\n",
"! gzip custom.tar\n",
"! gsutil cp custom.tar.gz $BUCKET_URI/trainer_boston.tar.gz"
]
"! gcloud storage cp custom.tar.gz $BUCKET_URI/trainer_boston.tar.gz" ]
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -1518,8 +1516,7 @@
"# Delete the Cloud Storage bucket\n",
"delete_bucket = False # Set True for deletion\n",
"if delete_bucket:\n",
" ! gsutil rm -r $BUCKET_URI"
]
" ! gcloud storage rm --recursive $BUCKET_URI" ]
}
],
"metadata": {
Expand Down
Loading
Loading