Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions client/examples/teradata/bteq/provision.sh
Original file line number Diff line number Diff line change
Expand Up @@ -94,12 +94,12 @@ log_exec "Enabling service: bigquerymigration.googleapis.com." \
# Create the GCS bucket for BQMS input/output.
log_exec "Creating bucket: ${BQMS_GCS_BUCKET}." \
"Could not create bucket: ${BQMS_GCS_BUCKET}." \
gsutil mb -l "${BQMS_GCS_BUCKET_LOCATION}" "gs://${BQMS_GCS_BUCKET}"
gcloud storage buckets create "gs://${BQMS_GCS_BUCKET}" --location="${BQMS_GCS_BUCKET_LOCATION}"
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Curious that the agent put the --location last here, whereas in the later examples, it put it before the non-option argument. POSIX says the latter is correct, I think accepting this variant was originally a GNU extension.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

no issue both the way it works, before submitting the PR for review our team ensure all the commands works


# Ensure the BQMS developer has admin rights to objects in GCS bucket.
log_exec "Granting storage.objectAdmin role to ${BQMS_DEVELOPER_EMAIL} on ${BQMS_GCS_BUCKET} bucket." \
"Could not grant storage.objectAdmin role to ${BQMS_DEVELOPER_EMAIL} on ${BQMS_GCS_BUCKET} bucket." \
gsutil iam ch "user:${BQMS_DEVELOPER_EMAIL}:objectAdmin" "gs://${BQMS_GCS_BUCKET}"
gcloud storage buckets add-iam-policy-binding "gs://${BQMS_GCS_BUCKET}" --member="user:${BQMS_DEVELOPER_EMAIL}" --role="roles/storage.objectAdmin"

# Ensure the BQMS developer can view logs in the BQMS project.
log_exec "Granting logging.viewer role to ${BQMS_DEVELOPER_EMAIL}." \
Expand Down Expand Up @@ -146,7 +146,7 @@ then
# Ensure the Cloud Run service account has admin rights to objects in GCS bucket.
log_exec "Granting storage.objectAdmin role to ${BQMS_CLOUD_RUN_SERVICE_ACCOUNT} on ${BQMS_GCS_BUCKET}." \
"Could not grant storage.objectAdmin role to ${BQMS_CLOUD_RUN_SERVICE_ACCOUNT} on ${BQMS_GCS_BUCKET}." \
gsutil iam ch "serviceAccount:${BQMS_CLOUD_RUN_SERVICE_ACCOUNT}:objectAdmin" "gs://${BQMS_GCS_BUCKET}"
gcloud storage buckets add-iam-policy-binding "gs://${BQMS_GCS_BUCKET}" --member="serviceAccount:${BQMS_CLOUD_RUN_SERVICE_ACCOUNT}" --role="roles/storage.objectAdmin"

# Ensure the Cloud Run service account can create BQMS jobs.
log_exec "Granting bigquerymigration.editor role to ${BQMS_CLOUD_RUN_SERVICE_ACCOUNT}." \
Expand Down
14 changes: 7 additions & 7 deletions client/examples/teradata/bteq/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -101,8 +101,8 @@ then
# Sync config files and input files to GCS to be processed by Cloud Run job.
log_exec "Syncing ${SCRIPT_DIR} to gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}." \
"Could not sync ${SCRIPT_DIR} to gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}." \
gsutil ${MULTITHREADED:+ -m} \
rsync -r -d "${SCRIPT_DIR}" "gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}"
gcloud storage \
rsync --recursive --delete-unmatched-destination-objects "${SCRIPT_DIR}" "gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}"

# Build and export path env vars that the Cloud Run Python tool will use.
export BQMS_INPUT_PATH="gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}/input"
Expand Down Expand Up @@ -191,8 +191,8 @@ then
# Sync the translated BQMS output locally so it can be inspected if need be.
log_exec "Syncing gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX} to ${SCRIPT_DIR}." \
"Could not sync gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX} to ${SCRIPT_DIR}." \
gsutil ${MULTITHREADED:+ -m} \
rsync -r -d "gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}" "${SCRIPT_DIR}"
gcloud storage \
rsync --recursive --delete-unmatched-destination-objects "gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}" "${SCRIPT_DIR}"
fi
# Execute Python tool locally.
else
Expand Down Expand Up @@ -238,15 +238,15 @@ else
mkdir -p "${BQMS_LOCAL_PREPROCESSED_PATH}"
log_exec "Syncing ${BQMS_PREPROCESSED_PATH} to ${BQMS_LOCAL_PREPROCESSED_PATH}." \
"Could not sync ${BQMS_PREPROCESSED_PATH} to ${BQMS_LOCAL_PREPROCESSED_PATH}." \
gsutil ${MULTITHREADED:+ -m} rsync -r -d "${BQMS_PREPROCESSED_PATH}" \
gcloud storage rsync --recursive --delete-unmatched-destination-objects "${BQMS_PREPROCESSED_PATH}" \
"${BQMS_LOCAL_PREPROCESSED_PATH}"

# Sync the translated BQMS output locally so it can be inspected if need be.
BQMS_LOCAL_TRANSLATED_PATH="${SCRIPT_DIR}/translated"
mkdir -p "${BQMS_LOCAL_TRANSLATED_PATH}"
log_exec "Syncing ${BQMS_TRANSLATED_PATH} to ${BQMS_LOCAL_TRANSLATED_PATH}." \
"Could not sync ${BQMS_TRANSLATED_PATH} to ${BQMS_LOCAL_TRANSLATED_PATH}." \
gsutil ${MULTITHREADED:+ -m} rsync -r -d "${BQMS_TRANSLATED_PATH}" \
gcloud storage rsync --recursive --delete-unmatched-destination-objects "${BQMS_TRANSLATED_PATH}" \
"${BQMS_LOCAL_TRANSLATED_PATH}"
fi
fi
Expand All @@ -255,6 +255,6 @@ if [[ -n "${BQMS_CLEANUP_FILES}" ]]
then
log_exec "Cleaining gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}." \
"Could not clean gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}." \
gsutil ${MULTITHREADED:+ -m} rm -rf "gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}"
gcloud storage rm -rf "gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}"
fi
log_info "Preprocessing, translation and postprocessing has completed successfully."
6 changes: 3 additions & 3 deletions client/examples/teradata/sql/provision.sh
Original file line number Diff line number Diff line change
Expand Up @@ -94,12 +94,12 @@ log_exec "Enabling service: bigquerymigration.googleapis.com." \
# Create the GCS bucket for BQMS input/output.
log_exec "Creating bucket: ${BQMS_GCS_BUCKET}." \
"Could not create bucket: ${BQMS_GCS_BUCKET}." \
gsutil mb -l "${BQMS_GCS_BUCKET_LOCATION}" "gs://${BQMS_GCS_BUCKET}"
gcloud storage buckets create --location "${BQMS_GCS_BUCKET_LOCATION}" "gs://${BQMS_GCS_BUCKET}"

# Ensure the BQMS developer has admin rights to objects in GCS bucket.
log_exec "Granting storage.objectAdmin role to ${BQMS_DEVELOPER_EMAIL} on ${BQMS_GCS_BUCKET} bucket." \
"Could not grant storage.objectAdmin role to ${BQMS_DEVELOPER_EMAIL} on ${BQMS_GCS_BUCKET} bucket." \
gsutil iam ch "user:${BQMS_DEVELOPER_EMAIL}:objectAdmin" "gs://${BQMS_GCS_BUCKET}"
gcloud storage buckets add-iam-policy-binding "gs://${BQMS_GCS_BUCKET}" --member="user:${BQMS_DEVELOPER_EMAIL}" --role="roles/storage.objectAdmin"

# Ensure the BQMS developer can view logs in the BQMS project.
log_exec "Granting logging.viewer role to ${BQMS_DEVELOPER_EMAIL}." \
Expand Down Expand Up @@ -146,7 +146,7 @@ then
# Ensure the Cloud Run service account has admin rights to objects in GCS bucket.
log_exec "Granting storage.objectAdmin role to ${BQMS_CLOUD_RUN_SERVICE_ACCOUNT} on ${BQMS_GCS_BUCKET}." \
"Could not grant storage.objectAdmin role to ${BQMS_CLOUD_RUN_SERVICE_ACCOUNT} on ${BQMS_GCS_BUCKET}." \
gsutil iam ch "serviceAccount:${BQMS_CLOUD_RUN_SERVICE_ACCOUNT}:objectAdmin" "gs://${BQMS_GCS_BUCKET}"
gcloud storage buckets add-iam-policy-binding "gs://${BQMS_GCS_BUCKET}" --member="serviceAccount:${BQMS_CLOUD_RUN_SERVICE_ACCOUNT}" --role="roles/storage.objectAdmin"

# Ensure the Cloud Run service account can create BQMS jobs.
log_exec "Granting bigquerymigration.editor role to ${BQMS_CLOUD_RUN_SERVICE_ACCOUNT}." \
Expand Down
12 changes: 5 additions & 7 deletions client/examples/teradata/sql/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -101,8 +101,7 @@ then
# Sync config files and input files to GCS to be processed by Cloud Run job.
log_exec "Syncing ${SCRIPT_DIR} to gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}." \
"Could not sync ${SCRIPT_DIR} to gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}." \
gsutil ${MULTITHREADED:+ -m} \
rsync -r -d "${SCRIPT_DIR}" "gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}"
gcloud storage rsync --recursive --delete-unmatched-destination-objects "${SCRIPT_DIR}" "gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}"

# Build and export path env vars that the Cloud Run Python tool will use.
export BQMS_INPUT_PATH="gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}/input"
Expand Down Expand Up @@ -191,8 +190,7 @@ then
# Sync the translated BQMS output locally so it can be inspected if need be.
log_exec "Syncing gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX} to ${SCRIPT_DIR}." \
"Could not sync gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX} to ${SCRIPT_DIR}." \
gsutil ${MULTITHREADED:+ -m} \
rsync -r -d "gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}" "${SCRIPT_DIR}"
gcloud storage rsync --recursive --delete-unmatched-destination-objects "gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}" "${SCRIPT_DIR}"
fi
# Execute Python tool locally.
else
Expand Down Expand Up @@ -238,15 +236,15 @@ else
mkdir -p "${BQMS_LOCAL_PREPROCESSED_PATH}"
log_exec "Syncing ${BQMS_PREPROCESSED_PATH} to ${BQMS_LOCAL_PREPROCESSED_PATH}." \
"Could not sync ${BQMS_PREPROCESSED_PATH} to ${BQMS_LOCAL_PREPROCESSED_PATH}." \
gsutil ${MULTITHREADED:+ -m} rsync -r -d "${BQMS_PREPROCESSED_PATH}" \
gcloud storage rsync --recursive --delete-unmatched-destination-objects "${BQMS_PREPROCESSED_PATH}" \
"${BQMS_LOCAL_PREPROCESSED_PATH}"

# Sync the translated BQMS output locally so it can be inspected if need be.
BQMS_LOCAL_TRANSLATED_PATH="${SCRIPT_DIR}/translated"
mkdir -p "${BQMS_LOCAL_TRANSLATED_PATH}"
log_exec "Syncing ${BQMS_TRANSLATED_PATH} to ${BQMS_LOCAL_TRANSLATED_PATH}." \
"Could not sync ${BQMS_TRANSLATED_PATH} to ${BQMS_LOCAL_TRANSLATED_PATH}." \
gsutil ${MULTITHREADED:+ -m} rsync -r -d "${BQMS_TRANSLATED_PATH}" \
gcloud storage rsync --recursive --delete-unmatched-destination-objects "${BQMS_TRANSLATED_PATH}" \
"${BQMS_LOCAL_TRANSLATED_PATH}"
fi
fi
Expand All @@ -255,6 +253,6 @@ if [[ -n "${BQMS_CLEANUP_FILES}" ]]
then
log_exec "Cleaining gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}." \
"Could not clean gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}." \
gsutil ${MULTITHREADED:+ -m} rm -rf "gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}"
gcloud storage rm --recursive --continue-on-error "gs://${BQMS_GCS_BUCKET}/${BQMS_GCS_PREFIX}"
fi
log_info "Preprocessing, translation and postprocessing has completed successfully."
2 changes: 1 addition & 1 deletion dumper/app/src/main/sh/cloud_extractor/gce_launcher.sh
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ download() {
log INFO "Downloading '${url}' to '${target}."
if [[ ! -e "${target}" ]]; then
if [[ "${url}" = gs://* ]]; then
gsutil cp "${url}" "${target}"
gcloud storage cp "${url}" "${target}"
else
curl -L "${url}" -o "${target}"
fi
Expand Down
4 changes: 1 addition & 3 deletions permissions-migration/stressTests/run_single_test
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ python3 ./generate_principal_ruleset.py $num_users $num_groups $num_roles
#echo "Generating table files"
#python3 ./generate_tables.py $num_tables
#echo "Uploading table files to gcs"
#gsutil -m cp -r ./tables/** gs://$table_bucket/$table_gcs_prefix/tables &> upload.log
#gcloud storage cp --recursive ./tables/** gs://$table_bucket/$table_gcs_prefix/tables &> upload.log

#deactivate the python venv
deactivate
Expand Down Expand Up @@ -141,5 +141,3 @@ echo "Number of BQ permissions in the output (expecting $((4*num_tables))):"
cat ./permissions_output.yaml | grep 'resourceType: "BQ_TABLE"' | wc -l