-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathentrypoint.sh
More file actions
executable file
·149 lines (131 loc) · 6.54 KB
/
entrypoint.sh
File metadata and controls
executable file
·149 lines (131 loc) · 6.54 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
#!/bin/bash
set -Eeu -o pipefail
echo "----------------------- Start "
date
GEOIPUPDATE_DB_DIR="$(mktemp -d)"
export GEOIPUPDATE_DB_DIR
export GEOIPUPDATE_FREQUENCY=0
if [ -z "$GEOIPUPDATE_ACCOUNT_ID" ] && [ -z "$GEOIPUPDATE_ACCOUNT_ID_FILE" ]; then
echo "ERROR: You must set the environment variable GEOIPUPDATE_ACCOUNT_ID or GEOIPUPDATE_ACCOUNT_ID_FILE!"
exit 1
fi
if [ -z "$GEOIPUPDATE_LICENSE_KEY" ] && [ -z "$GEOIPUPDATE_LICENSE_KEY_FILE" ]; then
echo "ERROR: You must set the environment variable GEOIPUPDATE_LICENSE_KEY or GEOIPUPDATE_LICENSE_KEY_FILE!"
exit 1
fi
if [ -z "$GEOIPUPDATE_EDITION_IDS" ]; then
GEOIPUPDATE_EDITION_IDS="GeoLite2-ASN GeoLite2-City GeoLite2-Country"
fi
if [ -z "$JENKINS_INFRA_FILESHARE_CLIENT_ID" ]; then
echo "ERROR: You must set the environment variable JENKINS_INFRA_FILESHARE_CLIENT_ID!"
exit 1
fi
if [ -z "$JENKINS_INFRA_FILESHARE_CLIENT_SECRET" ]; then
echo "ERROR: You must set the environment variable JENKINS_INFRA_FILESHARE_CLIENT_SECRET!"
exit 1
fi
if [ -z "$JENKINS_INFRA_FILESHARE_TENANT_ID" ]; then
echo "ERROR: You must set the environment variable JENKINS_INFRA_FILESHARE_TENANT_ID!"
exit 1
fi
if [ -z "$STORAGE_NAME" ]; then
echo "ERROR: You must set the environment variable STORAGE_NAME!"
exit 1
fi
if [ -z "$STORAGE_FILESHARE" ]; then
echo "ERROR: You must set the environment variable STORAGE_FILESHARE!"
exit 1
fi
### Azure TOKEN
echo "LAUNCH AZ token"
echo "azure token"
export STORAGE_DURATION_IN_MINUTE=5
export STORAGE_PERMISSIONS=dlrw
fileShareSignedUrl="$(get-fileshare-signed-url.sh)"
urlWithoutToken=${fileShareSignedUrl%\?*}
token=${fileShareSignedUrl#*\?}
### AZ COPY dest to local
echo "LAUNCH AZCOPY dest to local"
AZCOPY_FOLDER="$(mktemp -d)"
AZCOPY_LOG_LOCATION="${AZCOPY_FOLDER}"
AZCOPY_JOB_PLAN_LOCATION="${AZCOPY_FOLDER}"
export AZCOPY_LOG_LOCATION
export AZCOPY_JOB_PLAN_LOCATION
echo "local folder = ${GEOIPUPDATE_DB_DIR}/"
set +e #do not failfast on error for azcopy
: | azcopy copy \
"${urlWithoutToken}/*?${token}" "${GEOIPUPDATE_DB_DIR}" \
--skip-version-check `# Do not check for new azcopy versions (we have updatecli + puppet for this)` \
--log-level=ERROR `# Do not write too much logs (I/O...)` \
--include-pattern='*.mmdb' `# only the mmdb databases files` \
|| \
{ cat "${AZCOPY_LOG_LOCATION}/.azcopy/*"; exit 1; } #dump the logs in case of error during azcopy copy
set -e #set failfast back
echo "AZCOPY dest to local done"
echo "LISTING local: "
ls -lt "${GEOIPUPDATE_DB_DIR}"
### GEOUPDATEIP
echo "LAUNCH GEOIPUPDATE"
GEOIPUPDATEJSONPATH="$(mktemp)"
if [ "${GEOIPUPDATE_DRYRUN:-false}" != "true" ]; then
geoipupdate --output --database-directory="${GEOIPUPDATE_DB_DIR}" > "${GEOIPUPDATEJSONPATH}"
else
echo "DRY-RUN ON"
[[ "$(uname || true)" == "Darwin" ]] && dateCmd="gdate" || dateCmd="date"
currentUTCdatetime="$("${dateCmd}" --utc +"%Y%m%dT%H%MZ")"
echo "dry-run" >"${GEOIPUPDATE_DB_DIR}/dryrun-${currentUTCdatetime}.mmdb"
echo '[{"edition_id":"GeoLite2-ASN","old_hash":"c54b6e64478adfd010c7a86db310033f","new_hash":"857a0cf8118b9961cf6789e1842bce2a","modified_at":1733403617,"checked_at":1733756616},{"edition_id":"GeoLite2-City","old_hash":"34a6a0ec4018c74a503134980c154502","new_hash":"fb3449d8252f74eac39fc55c32c19879","modified_at":1733501742,"checked_at":1733756620},{"edition_id":"GeoLite2-Country","old_hash":"627a1d220b5ef844e0f0f174a0161cd7","new_hash":"27b1f57ae9dd56e1923f5d458514794c","modified_at":1733506208,"checked_at":1733756621}]' > "${GEOIPUPDATEJSONPATH}"
## Use this version to test, in dry run, the case of "not changed data"
# echo '[{"edition_id":"GeoLite2-ASN","old_hash":"857a0cf8118b9961cf6789e1842bce2a","new_hash":"857a0cf8118b9961cf6789e1842bce2a","checked_at":1733760216},{"edition_id":"GeoLite2-City","old_hash":"fb3449d8252f74eac39fc55c32c19879","new_hash":"fb3449d8252f74eac39fc55c32c19879","checked_at":1733760216},{"edition_id":"GeoLite2-Country","old_hash":"27b1f57ae9dd56e1923f5d458514794c","new_hash":"27b1f57ae9dd56e1923f5d458514794c","checked_at":1733760216}]' > "${GEOIPUPDATEJSONPATH}"
echo "json saved to file ${GEOIPUPDATEJSONPATH}"
fi
jq -r . "${GEOIPUPDATEJSONPATH}" # check that the file exist and has valid json content.
echo "GEOIPUPDATE DONE"
### PARSING JSON copy if hash have changed
# > /dev/null to avoid multiple true in output but keep errors output
if jq -e '.[] | select(.old_hash != .new_hash)' "${GEOIPUPDATEJSONPATH}" > /dev/null; then
echo "DATA CHANGED, update needed"
### AZCOPY local to dest
echo "LAUNCH AZCOPY local to dest"
set +e #do not failfast on error for azcopy
: | azcopy copy \
"${GEOIPUPDATE_DB_DIR}/*" "${fileShareSignedUrl}" \
--skip-version-check `# Do not check for new azcopy versions (we have updatecli + puppet for this)` \
--log-level=ERROR `# Do not write too much logs (I/O...)` \
--include-pattern='*.mmdb' `# only the mmdb databases files` \
--overwrite="ifSourceNewer" `# Upload if and only if the updategeoip as updated the files` \
|| \
{ cat "${AZCOPY_LOG_LOCATION}/.azcopy/*"; exit 1; } #dump the logs in case of error during azcopy copy
set -e #set failfast back
### AZCOPY List to ensure files are present on destination
echo "azcopy list"
azcopy list \
--skip-version-check `# Do not check for new azcopy versions (we have updatecli + puppet for this)` \
"${fileShareSignedUrl}"
echo "AZCOPY local to dest done"
if [ "${GEOIPUPDATE_ROLLOUT:-false}" != "false" ]; then
echo "ROLLOUT RESTART"
# Backup IFS
OLDIFS=${IFS}
# Split the entries by ";"
IFS=';' read -ra entries <<< "${GEOIPUPDATE_ROLLOUT}"
# Loop through the entries
for entry in "${entries[@]}"; do
# Split namespace and deployments by ":"
IFS=':' read -r namespace deployments <<< "${entry}"
# Split deployments by "," and loop through each
IFS=',' read -ra deployment_list <<< "${deployments}"
for deployment in "${deployment_list[@]}"; do
# ROLLOUT RESTART and ROLLOUT STATUS
echo kubectl -n "${namespace}" rollout restart deployment "${deployment}" && kubectl -n "${namespace}" rollout status deployment "${deployment}"
kubectl -n "${namespace}" rollout restart deployment "${deployment}" && kubectl -n "${namespace}" rollout status deployment "${deployment}"
done
done
# Restore IFS
IFS=${OLDIFS}
echo "ROLLOUT RESTART DONE"
fi
else
echo "Data are up to date"
fi
exit 0