Skip to content

Commit 58e7555

Browse files
authored
feat: add kafka migration (#330)
* Add Migration tooling * Update push-image.sh * Rename PSL_job.json to pubsub_lite_job.json * Update Dockerfile * Rename start-psl-connector.sh to start-pubsub-lite-connector.sh
1 parent e368356 commit 58e7555

24 files changed

+330
-0
lines changed

migration/.gcp/gmk_bootstrap_servers

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
bootstrap.<google-managed-kafka-cluster-name>.<google-managed-kafka-cluster-region name>.managedkafka.<google-managed-cluster-host-project-name>.cloud.goog:9092
+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
<service-account-name>@<gcp-project>.iam.gserviceaccount.com
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
<base64 encoded sasl service account key>
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
<kafka topic name used by kafka connect for tracking the config>

migration/.gcp/kafka_connect_group_id

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
<kafka connect group id(unique per worker group) for the kafka connect workers in distributed mode>
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
<kafka topic name used by kafka connect for tracking the offsets>

migration/.gcp/kafka_sink_topic

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
<target sink kafka topic name used by kafka connect for migrating the data from pubsub-lite topic>
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
<full path of the ssl truststore jks file location>
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
<password for the ssl truststore jks>
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
<kafka topic name used by kafka connect for tracking the status>
+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
<GCP location for the pubsub lite source subscription to be used for migrating the pubsub lite topic to sink kafka topic>
+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
<GCP project that hosts the pubsub lite source subscription to be used for migrating the pubsub lite topic to sink kafka topic>
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
<pubsub lite source subscription name to be used for migrating the pubsub lite topic to kafka topic>

migration/.gcp/pubsub_lite_job_name

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
PubSubLiteSourceConnector

migration/docker/Dockerfile

+84
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
FROM --platform=linux/amd64 eclipse-temurin:21
2+
ARG KAFKA_VERSION="3.4.0"
3+
ARG KAFKA_CONNECT_VERSION="${KAFKA_VERSION}"
4+
ARG KAFKA_SCALA_VERSION="2.13"
5+
ARG PUBSUB_GROUP_KAFKA_CONNECTOR_VERSION="1.2.0"
6+
ARG KAFKA_HOME_ROOT="/opt"
7+
ARG KAFKA_CONFIG_DIR="${KAFKA_HOME}/config"
8+
ARG KAFKA_RELEASE="kafka_${KAFKA_SCALA_VERSION}-${KAFKA_VERSION}"
9+
ARG KAFKA_TARBALL="${KAFKA_RELEASE}.tgz"
10+
ARG KAFKA_DOWNLOAD_URL="https://archive.apache.org/dist/kafka/${KAFKA_VERSION}/${KAFKA_TARBALL}"
11+
ENV KAFKA_HEAP_OPTS="-Xms2G -Xmx2G"
12+
ENV KAFKA_HOME="${KAFKA_HOME_ROOT}/kafka"
13+
ARG KAFKA_PLUGINS_DIR="${KAFKA_HOME}/plugins"
14+
# The pubsub-group-kafka-connector file needs to be pre-built/downloaded using maven or other similar tool.
15+
# References:
16+
# 1) https://github.com/googleapis/java-pubsub-group-kafka-connector/releases/
17+
# 2) https://central.sonatype.com/artifact/com.google.cloud/pubsub-group-kafka-connector
18+
ARG PUBSUB_GROUP_KAFKA_CONNECTOR_JAR="pubsub-group-kafka-connector-${PUBSUB_GROUP_KAFKA_CONNECTOR_VERSION}.jar"
19+
ARG KAFKA_CONNECT_CONFIGURE_SCRIPT="configure-kafka-connect.sh"
20+
ARG BUILD_KAFKA_CONNECT_STARTUP_SCRIPT="start-kafka-connect.sh"
21+
ARG BUILD_PUBSUB_LITE_JOB_STARTUP_SCRIPT="start-pubsub-lite-connector.sh"
22+
ARG BUILD_KAFKA_CONNECT_CONFIG_FILE="kafka-connect.properties"
23+
ARG BUILD_PUBSUB_LITE_JOB_FILE="pubsub_lite_job.json"
24+
ENV JAVA_HOME="/opt/java/openjdk"
25+
ENV PATH="${KAFKA_HOME}/bin:${JAVA_HOME}/bin:${PATH}"
26+
ENV KAFKA_CONNECT_STARTUP_SCRIPT="${KAFKA_HOME}/bin/${BUILD_KAFKA_CONNECT_STARTUP_SCRIPT}"
27+
ENV PUBSUB_LITE_JOB_STARTUP_SCRIPT="${KAFKA_HOME}/bin/${BUILD_PUBSUB_LITE_JOB_STARTUP_SCRIPT}"
28+
ENV KAFKA_CONNECT_CONFIG_FILE="${KAFKA_CONFIG_DIR}/${BUILD_KAFKA_CONNECT_CONFIG_FILE}"
29+
ENV PUBSUB_LITE_JOB_FILE="${KAFKA_CONFIG_DIR}/${BUILD_PUBSUB_LITE_JOB_FILE}"
30+
31+
RUN apt-get -y -qq update \
32+
&& apt-get -y -qq install iproute2 bind9-dnsutils
33+
34+
RUN wget -q ${KAFKA_DOWNLOAD_URL} \
35+
&& tar -xzf ${KAFKA_TARBALL} -C ${KAFKA_HOME_ROOT} \
36+
&& ln -s ${KAFKA_HOME_ROOT}/${KAFKA_RELEASE} ${KAFKA_HOME} \
37+
&& rm -f ${KAFKA_TARBALL}
38+
39+
RUN mkdir -p ${KAFKA_PLUGINS_DIR}
40+
COPY ${PUBSUB_GROUP_KAFKA_CONNECTOR_JAR} \
41+
${KAFKA_PLUGINS_DIR}/${PUBSUB_GROUP_KAFKA_CONNECTOR_JAR}
42+
COPY ${BUILD_KAFKA_CONNECT_CONFIG_FILE} ${KAFKA_CONNECT_CONFIG_FILE}
43+
COPY ${BUILD_PUBSUB_LITE_JOB_FILE} ${PUBSUB_LITE_JOB_FILE}
44+
COPY ${KAFKA_CONNECT_CONFIGURE_SCRIPT} .
45+
COPY ${BUILD_KAFKA_CONNECT_STARTUP_SCRIPT} ${KAFKA_CONNECT_STARTUP_SCRIPT}
46+
COPY ${BUILD_PUBSUB_LITE_JOB_STARTUP_SCRIPT} ${PUBSUB_LITE_JOB_STARTUP_SCRIPT}
47+
RUN chmod +x ${KAFKA_CONNECT_CONFIGURE_SCRIPT}
48+
RUN chmod +x ${KAFKA_CONNECT_STARTUP_SCRIPT}
49+
RUN chmod +x ${PUBSUB_LITE_JOB_STARTUP_SCRIPT}
50+
RUN --mount=type=secret,id=gmk_bootstrap_servers \
51+
--mount=type=secret,id=gmk_sasl_service_account \
52+
--mount=type=secret,id=gmk_sasl_service_account_key \
53+
--mount=type=secret,id=kafka_sink_topic \
54+
--mount=type=secret,id=kafka_connect_group_id \
55+
--mount=type=secret,id=pubsub_lite_gcp_project \
56+
--mount=type=secret,id=pubsub_lite_gcp_location \
57+
--mount=type=secret,id=pubsub_lite_gcp_subscription \
58+
--mount=type=secret,id=pubsub_lite_job_name \
59+
--mount=type=secret,id=kafka_config_storage_topic \
60+
--mount=type=secret,id=kafka_offset_storage_topic \
61+
--mount=type=secret,id=kafka_status_storage_topic \
62+
--mount=type=secret,id=kafka_ssl_truststore_location \
63+
--mount=type=secret,id=kafka_ssl_truststore_password \
64+
KAFKA_CONNECT_CONFIG_FILE="${KAFKA_CONNECT_CONFIG_FILE}" \
65+
KAFKA_BOOTSTRAP_SERVERS="$(cat /run/secrets/gmk_bootstrap_servers)" \
66+
KAFKA_SASL_SERVICE_ACCOUNT="$(cat /run/secrets/gmk_sasl_service_account)"\
67+
KAFKA_SASL_SERVICE_ACCOUNT_KEY="$(cat /run/secrets/gmk_sasl_service_account_key)" \
68+
KAFKA_SINK_TOPIC="$(cat /run/secrets/kafka_sink_topic)" \
69+
KAFKA_CONNECT_GROUP_ID="$(cat /run/secrets/kafka_connect_group_id)" \
70+
KAFKA_PLUGINS_DIR=${KAFKA_PLUGINS_DIR} \
71+
PUBSUB_LITE_GCP_PROJECT="$(cat /run/secrets/pubsub_lite_gcp_project)" \
72+
PUBSUB_LITE_GCP_LOCATION="$(cat /run/secrets/pubsub_lite_gcp_location)" \
73+
PUBSUB_LITE_GCP_SUBSCRIPTION="$(cat /run/secrets/pubsub_lite_gcp_subscription)" \
74+
PUBSUB_LITE_JOB_NAME="$(cat /run/secrets/pubsub_lite_job_name)" \
75+
KAFKA_CONFIG_STORAGE_TOPIC="$(cat /run/secrets/kafka_config_storage_topic)" \
76+
KAFKA_OFFSET_STORAGE_TOPIC="$(cat /run/secrets/kafka_offset_storage_topic)" \
77+
KAFKA_STATUS_STORAGE_TOPIC="$(cat /run/secrets/kafka_status_storage_topic)" \
78+
KAFKA_SSL_TRUSTSTORE_LOCATION="$(cat /run/secrets/kafka_ssl_truststore_location)" \
79+
KAFKA_SSL_TRUSTSTORE_PASSWORD="$(cat /run/secrets/kafka_ssl_truststore_password)" \
80+
./${KAFKA_CONNECT_CONFIGURE_SCRIPT} \
81+
&& rm -f ./${KAFKA_CONNECT_CONFIGURE_SCRIPT}
82+
83+
EXPOSE 8083
84+
CMD ${KAFKA_CONNECT_STARTUP_SCRIPT}

migration/docker/build-image.sh

+18
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
SELF_DIR="$(dirname $(readlink -f $0))"
2+
SECRETS_DIR="$(dirname ${SELF_DIR})/.gcp"
3+
docker build --platform=linux/amd64 --file Dockerfile --tag psl-to-gmk:latest \
4+
--secret id=gmk_sasl_service_account,src="${SECRETS_DIR}/gmk_sasl_service_account" \
5+
--secret id=gmk_sasl_service_account_key,src="${SECRETS_DIR}/gmk_sasl_service_account_key" \
6+
--secret id=gmk_bootstrap_servers,src="${SECRETS_DIR}/gmk_bootstrap_servers" \
7+
--secret id=kafka_sink_topic,src="${SECRETS_DIR}/kafka_sink_topic" \
8+
--secret id=kafka_connect_group_id,src="${SECRETS_DIR}/kafka_connect_group_id" \
9+
--secret id=pubsub_lite_gcp_project,src="${SECRETS_DIR}/pubsub_lite_gcp_project" \
10+
--secret id=pubsub_lite_gcp_location,src="${SECRETS_DIR}/pubsub_lite_gcp_location" \
11+
--secret id=pubsub_lite_gcp_subscription,src="${SECRETS_DIR}/pubsub_lite_gcp_subscription" \
12+
--secret id=pubsub_lite_job_name,src="${SECRETS_DIR}/pubsub_lite_job_name" \
13+
--secret id=kafka_config_storage_topic,src="${SECRETS_DIR}/kafka_config_storage_topic" \
14+
--secret id=kafka_offset_storage_topic,src="${SECRETS_DIR}/kafka_offset_storage_topic" \
15+
--secret id=kafka_status_storage_topic,src="${SECRETS_DIR}/kafka_status_storage_topic" \
16+
--secret id=kafka_ssl_truststore_location,src="${SECRETS_DIR}/kafka_ssl_truststore_location" \
17+
--secret id=kafka_ssl_truststore_password,src="${SECRETS_DIR}/kafka_ssl_truststore_password" \
18+
--no-cache .
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
#!/usr/bin/env bash
2+
# All the variables must be supplied as environment variables for this script
3+
# Update Kafka Connect Sink config
4+
sed -i -e "s#__KAFKA_BOOTSTRAP_SERVERS__#${KAFKA_BOOTSTRAP_SERVERS}#g;" \
5+
"${KAFKA_CONNECT_CONFIG_FILE}"
6+
# Update Kafka Connect internal topics config
7+
sed -i -e "s#__KAFKA_CONFIG_STORAGE_TOPIC__#${KAFKA_CONFIG_STORAGE_TOPIC}#g; s#__KAFKA_OFFSET_STORAGE_TOPIC__#${KAFKA_OFFSET_STORAGE_TOPIC}#g; s#__KAFKA_STATUS_STORAGE_TOPIC__#${KAFKA_STATUS_STORAGE_TOPIC}#g" \
8+
"${KAFKA_CONNECT_CONFIG_FILE}"
9+
# Update Kafka Connect group id and Kafka Connect plugins directory. Kafka Connect group id needs to be unique and must not conflict with the consumer group ids
10+
sed -i -e "s#__KAFKA_CONNECT_GROUP_ID__#${KAFKA_CONNECT_GROUP_ID}#g; s#__KAFKA_PLUGINS_DIR__#${KAFKA_PLUGINS_DIR}#g" \
11+
"${KAFKA_CONNECT_CONFIG_FILE}"
12+
# Update Kafka Connect SASL config
13+
sed -i -e "s#__KAFKA_SASL_SERVICE_ACCOUNT__#${KAFKA_SASL_SERVICE_ACCOUNT}#g; s#__KAFKA_SASL_SERVICE_ACCOUNT_KEY__#${KAFKA_SASL_SERVICE_ACCOUNT_KEY}#g" \
14+
"${KAFKA_CONNECT_CONFIG_FILE}"
15+
# Update Kafka Connect SSL truststore config
16+
sed -i -e "s#__KAFKA_SSL_TRUSTSTORE_LOCATION__#${KAFKA_SSL_TRUSTSTORE_LOCATION}#g; s#__KAFKA_SSL_TRUSTSTORE_PASSWORD__#${KAFKA_SSL_TRUSTSTORE_PASSWORD}#g" \
17+
"${KAFKA_CONNECT_CONFIG_FILE}"
18+
19+
#Update PubSub Lite Job File
20+
sed -i -e "s#__PUBSUB_LITE_JOB_NAME__#${PUBSUB_LITE_JOB_NAME}#g; s#__KAFKA_SINK_TOPIC__#${KAFKA_SINK_TOPIC}#g; s#__PUBSUB_LITE_GCP_PROJECT__#${PUBSUB_LITE_GCP_PROJECT}#g; s#__PUBSUB_LITE_GCP_LOCATION__#${PUBSUB_LITE_GCP_LOCATION}#g; s#__PUBSUB_LITE_GCP_SUBSCRIPTION__#${PUBSUB_LITE_GCP_SUBSCRIPTION}#g;" \
21+
"${PUBSUB_LITE_JOB_FILE}"
22+
23+
#Update PSL Job Start Script
24+
sed -i -e "s#__PUBSUB_LITE_JOB_NAME__#${PUBSUB_LITE_JOB_NAME}#g;" \
25+
"${PSL_JOB_STARTUP_SCRIPT}"
+85
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
# Copyright 2024 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
# Kafka Connect Distributed Mode Configuration
15+
16+
# Bootstrap servers for Kafka brokers
17+
bootstrap.servers=__KAFKA_BOOTSTRAP_SERVERS__
18+
19+
# Group ID for Kafka Connect worker
20+
group.id=__KAFKA_CONNECT_GROUP_ID__
21+
22+
# REST API endpoint for Kafka Connect
23+
rest.port=8083
24+
25+
# Hostname for REST API endpoint
26+
rest.host.name=__KAFKA_REST_ADVERTISED_HOST_NAME__
27+
28+
# Client ID for the worker. This will appear in server logs for tracking
29+
client.id=__KAFKA_CONNECT_WORKER_CLIENT_ID__
30+
31+
# Classpath for plugins (including connectors)
32+
plugin.path=__KAFKA_PLUGINS_DIR__
33+
34+
# Offset commit interval in milliseconds
35+
offset.flush.interval.ms=10000
36+
37+
# Enable or disable the internal converter used for offset storage
38+
config.storage.topic=__KAFKA_CONFIG_STORAGE_TOPIC__
39+
offset.storage.topic=__KAFKA_OFFSET_STORAGE_TOPIC__
40+
status.storage.topic=__KAFKA_STATUS_STORAGE_TOPIC__
41+
42+
# Number of worker threads for handling HTTP requests
43+
rest.advertised.host.name=__KAFKA_REST_ADVERTISED_HOST_NAME__
44+
rest.advertised.port=8083
45+
46+
# Number of worker threads for handling HTTP requests
47+
rest.threads.max=50
48+
49+
# Default partition assignment strategy
50+
partition.assignment.strategy=org.apache.kafka.clients.consumer.CooperativeStickyAssignor
51+
52+
# Kafka Connect-specific settings
53+
offset.storage.replication.factor=3
54+
config.storage.replication.factor=3
55+
status.storage.replication.factor=3
56+
offset.storage.partitions=25
57+
status.storage.partitions=5
58+
59+
# SASL auth related configuration
60+
sasl.mechanism=PLAIN
61+
security.protocol=SASL_SSL
62+
sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required \
63+
username="__KAFKA_SASL_SERVICE_ACCOUNT__" \
64+
password="__KAFKA_SASL_SERVICE_ACCOUNT_KEY__";
65+
66+
producer.sasl.mechanism=PLAIN
67+
producer.security.protocol=SASL_SSL
68+
producer.sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required \
69+
username="__KAFKA_SASL_SERVICE_ACCOUNT__" \
70+
password="__KAFKA_SASL_SERVICE_ACCOUNT_KEY__";
71+
72+
consumer.sasl.mechanism=PLAIN
73+
consumer.security.protocol=SASL_SSL
74+
consumer.sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required \
75+
username="__KAFKA_SASL_SERVICE_ACCOUNT__" \
76+
password="__KAFKA_SASL_SERVICE_ACCOUNT_KEY__";
77+
78+
# SSL Truststore related configuration
79+
ssl.truststore.location=__KAFKA_SSL_TRUSTSTORE_LOCATION__
80+
ssl.truststore.password=__KAFKA_SSL_TRUSTSTORE_PASSWORD__
81+
82+
# Set the key converter for the Pub/Sub Lite source connector.
83+
key.converter=org.apache.kafka.connect.converters.ByteArrayConverter
84+
# Set the value converter for the Pub/Sub Lite source connector.
85+
value.converter=org.apache.kafka.connect.converters.ByteArrayConverter
Binary file not shown.

migration/docker/pubsub_lite_job.json

+11
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
{
2+
"name": "__PUBSUB_LITE_JOB_NAME__",
3+
"config": {
4+
"connector.class": "com.google.pubsublite.kafka.source.PubSubLiteSourceConnector",
5+
"tasks.max": "10",
6+
"kafka.topic": "__KAFKA_SINK_TOPIC__",
7+
"pubsublite.project": "__PUBSUB_LITE_GCP_PROJECT__",
8+
"pubsublite.location": "__PUBSUB_LITE_GCP_LOCATION__",
9+
"pubsublite.subscription": "__PUBSUB_LITE_GCP_SUBSCRIPTION__"
10+
}
11+
}

migration/docker/push-image.sh

+21
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
# Copyright 2024 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
# Dockerfile for building Kafka Connect image
15+
DOCKER_IMAGE_NAME="psl-to-gmk"
16+
DOCKER_IMAGE_TAG=latest
17+
GCP_PROJECT="<GCP Project>"
18+
DOCKER_REPOSTORY=gcr.io/${GCP_PROJECT}
19+
docker tag ${DOCKER_IMAGE_NAME} \
20+
${DOCKER_REPOSTORY}/${DOCKER_IMAGE_NAME}:${DOCKER_IMAGE_TAG}
21+
docker push ${DOCKER_REPOSTORY}/${DOCKER_IMAGE_NAME}:${DOCKER_IMAGE_TAG}
+9
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
#!/usr/bin/env bash
2+
${PSL_JOB_STARTUP_SCRIPT} &
3+
4+
START_SCRIPT="${KAFKA_HOME}/bin/connect-distributed.sh"
5+
KAFKA_REST_ADVERTISED_HOST_NAME="$(/sbin/ip -o -4 addr list eth0 | awk '{print $4}' | cut -d/ -f1)"
6+
KAFKA_CONNECT_WORKER_CLIENT_ID="$(hostname --fqdn)"
7+
sed -i -e "s#__KAFKA_REST_ADVERTISED_HOST_NAME__#${KAFKA_REST_ADVERTISED_HOST_NAME}#g; s#__KAFKA_CONNECT_WORKER_CLIENT_ID__#${KAFKA_CONNECT_WORKER_CLIENT_ID}#g" \
8+
"${KAFKA_CONNECT_CONFIG_FILE}"
9+
${START_SCRIPT} ${KAFKA_CONNECT_CONFIG_FILE}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
#!/usr/bin/env bash
2+
3+
#Poll Kafka Connect until it is up
4+
while true
5+
do
6+
echo "Pinging Connect Rest Endpoint"
7+
CONNECT_PING=$(curl localhost:8083 | grep "version")
8+
if [[ $CONNECT_PING != "" ]]; then
9+
break
10+
fi
11+
sleep 30
12+
done
13+
#Once Kafka Connect is up, if the PubSub Lite migration job
14+
#does not yet exist, submit the Job
15+
CONNECT_JOBS=$(curl localhost:8083/connectors | grep "__PUBSUB_LITE_JOB_NAME__")
16+
if [[ $CONNECT_JOBS == "" ]]; then
17+
echo "No Connect Job found, posting Job"
18+
curl -H "Content-Type: application/json" -H "Accept: application/json" --data "@/opt/kafka/config/PSL_job.json" localhost:8083/connectors
19+
fi

migration/k8s.yaml

+44
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
---
2+
apiVersion: "apps/v1"
3+
kind: "Deployment"
4+
metadata:
5+
name: "<workflow_name>"
6+
namespace: "default"
7+
labels:
8+
app: "<workflow_name>"
9+
spec:
10+
replicas: 3
11+
selector:
12+
matchLabels:
13+
app: "<workflow_name>"
14+
template:
15+
metadata:
16+
labels:
17+
app: "<workflow_name>"
18+
spec:
19+
serviceAccountName: <gke_service_account>
20+
containers:
21+
- name: "psl-to-gmk-1"
22+
image: "gcr.io/<gcp_project>/psl-to-gmk:latest"
23+
---
24+
apiVersion: "autoscaling/v2"
25+
kind: "HorizontalPodAutoscaler"
26+
metadata:
27+
name: "<workflow_name>-hpa-iwbr"
28+
namespace: "default"
29+
labels:
30+
app: "<workflow_name>"
31+
spec:
32+
scaleTargetRef:
33+
kind: "Deployment"
34+
name: "<workflow_name>"
35+
apiVersion: "apps/v1"
36+
minReplicas: 1
37+
maxReplicas: 5
38+
metrics:
39+
- type: "Resource"
40+
resource:
41+
name: "cpu"
42+
target:
43+
type: "Utilization"
44+
averageUtilization: 80

0 commit comments

Comments
 (0)