Skip to content

Commit 96b38a2

Browse files
Harry Sharrysingh
authored andcommitted
CORE-36133 Adding Error handling for failures in quick start script | updated README
1 parent 204642c commit 96b38a2

File tree

3 files changed

+60
-21
lines changed

3 files changed

+60
-21
lines changed

README.md

Lines changed: 21 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
1-
# What is Kafka Connect?
2-
1+
\
32
[![Docker Hub](https://img.shields.io/badge/docker-latest-blue.svg)](https://hub.docker.com/r/adobe/experience-platform-streaming-connect/)
43
[![Build Status](https://travis-ci.com/adobe/experience-platform-streaming-connect.svg?branch=master)](https://travis-ci.com/adobe/experience-platform-streaming-connect)
54

5+
# What is Kafka Connect?
6+
67
"[Kafka Connect](https://docs.confluent.io/current/connect/index.html)", an open source component of Apache Kafka, is a framework for connecting Kafka with external systems such as databases, key-value stores, search indexes, and file systems.
78

89
Kafka Connect is a framework which enables connectors developed by the open source community around Apache Kafka. It allows developers to easily import data from their data sources directly into Kafka, and then take that data from Kafka and then feed it into other systems like Elastic Search.
@@ -100,16 +101,26 @@ Enter Client Secret
100101
***
101102
Enter JWT Token
102103
***
104+
Enter Schema Name: [default: Streaming_Connect_Schema_20191014074347]
103105
104-
Making call to create schema https://platform.adobe.io/ with name Streaming_test_profile_api-20190922211238
105-
Schema ID: https://ns.adobe.com/<tenant>/schemas/090d01896b3cbd72dc7defff1290eb99
106-
Data Set: ["@/dataSets/5d86d1a29ba7e11648cc3afb"]
107-
Streaming Connection: https://dcs.adobedc.net/collection/1e58b84cb62853b333b54980c45bdb40fc3bf80bc47022da0f76eececb2f9237
108-
AEP Sink Connector aep-sink-connector-20190922211238
106+
Making call to create schema to https://platform.adobe.io/ with name Streaming_Connect_Schema_20191014074347
107+
Schema ID: https://ns.adobe.com/<tenant>/schemas/<schema ID>
108+
Enter Dataset Name: [default: Streaming_Ingest_Test_20191014074347]
109+
110+
Making call to create dataset to https://platform.adobe.io/ with name Streaming_Ingest_Test_20191014074347
111+
Data Set: ["@/dataSets/<Dataset ID>"]
112+
Enter Streaming Connection Name: [default: My Streaming Connection-20191014074347]
113+
114+
Enter Streaming Connection Source: [default: My Streaming Source-20191014074347]
115+
116+
Making call to create streaming connection to https://platform.adobe.io/ with name My Streaming Connection-20191014074347 and source My Streaming Source-20191014074347
117+
Streaming Connection: https://dcs.adobedc.net/collection/<Streaming Connection ID>
118+
AEP Sink Connector aep-sink-connector-20191014074347
109119
Enter the number of Experience events to publish
110-
5
111-
Publishing 5 messages for Data set 5d86d1a29ba7e11648cc3afb and schema https://ns.adobe.com/<tenant>/schemas/090d01896b3cbd72dc7defff1290eb99
112-
Published 5 messages
120+
100
121+
Publishing 100 messages for Data set <Dataset ID> and schema https://ns.adobe.com/<tenant>/schemas/<schema ID>
122+
Published 100 messages
123+
113124
```
114125

115126
The quick-start script will save values for newly created resources like schema and dataset in application.conf

docker-compose.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
# governing permissions and limitations under the License.
1111
##
1212

13-
version: "2.1"
13+
version: "3"
1414
services:
1515
zoo1:
1616
image: zookeeper:3.4.9

setup.sh

Lines changed: 38 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -41,25 +41,33 @@ if [ -z "${JWT_TOKEN}" ]; then
4141
fi
4242

4343
# Fetch Access token
44-
access_token=$(curl -X POST \
44+
ims_response=$(curl -i -o - --silent -X POST \
4545
https://${IMS_HOST}/ims/exchange/jwt/ \
4646
-H "Content-Type: multipart/form-data" \
4747
-F client_id=${CLIENT_ID} \
4848
-F client_secret=${CLIENT_SECRET} \
49-
-F jwt_token=${JWT_TOKEN} 2>/dev/null | jq -r ".access_token")
49+
-F jwt_token=${JWT_TOKEN} 2>/dev/null)
50+
51+
ims_response_code=$(echo "$ims_response" | grep -v '100 Continue' | grep HTTP | awk '{print $2}')
52+
if [[ "${ims_response_code}" -ge "400" ]]; then
53+
echo "Error: Unable to fetch access token from IMS, response code: ${ims_response_code}";
54+
exit 1;
55+
fi
56+
57+
access_token=$(echo "${ims_response}" | grep 'access_token' | jq -r ".access_token");
5058

5159
dateString=`date +%Y%m%d%H%M%S`
5260

5361
DEFAULT_SCHEMA_NAME="Streaming_Connect_Schema_${dateString}"
54-
if [ -z "${SCHEMA_NAME}" ]; then
62+
if [[ -z "${SCHEMA_NAME}" ]]; then
5563
echo "Enter Schema Name: [default: ${DEFAULT_SCHEMA_NAME}]"
5664
read SCHEMA_NAME
5765
fi
5866

5967
SCHEMA_NAME=${SCHEMA_NAME:-${DEFAULT_SCHEMA_NAME}}
6068
echo "Making call to create schema to ${PLATFORM_GATEWAY} with name ${SCHEMA_NAME}"
6169

62-
schema=$(curl -X POST \
70+
schema_response=$(curl -i -o - --silent -X POST \
6371
${PLATFORM_GATEWAY}data/foundation/schemaregistry/tenant/schemas \
6472
-H "Authorization: Bearer ${access_token}" \
6573
-H "Content-Type: application/json" \
@@ -92,13 +100,20 @@ schema=$(curl -X POST \
92100
"meta:immutableTags": [
93101
"union"
94102
]
95-
}' 2>/dev/null | jq -r '.["$id"]')
103+
}' 2>/dev/null)
104+
105+
schema_response_code=$(echo "$schema_response" | grep -v '100 Continue' | grep HTTP | awk '{print $2}')
106+
if [[ "${schema_response_code}" -ge "400" ]]; then
107+
echo "Error: Unable to create schema, response code: ${schema_response_code}";
108+
exit 1;
109+
fi
96110

111+
schema=$(echo "${schema_response}" | grep 'meta:resourceType' | jq -r '.["$id"]')
97112
echo "Schema ID: ${schema}"
98113

99114
# Create a dataset for the schema
100115
DEFAULT_DATASET_NAME="Streaming_Ingest_Test_${dateString}"
101-
if [ -z "${DATASET_NAME}" ]; then
116+
if [[ -z "${DATASET_NAME}" ]]; then
102117
echo "Enter Dataset Name: [default: ${DEFAULT_DATASET_NAME}]"
103118
read DATASET_NAME
104119
fi
@@ -143,15 +158,15 @@ fi
143158
INLET_NAME=${STREAMING_CONNECTION_NAME:-${DEFAULT_INLET_NAME}}
144159

145160
DEFAULT_INLET_SOURCE="My Streaming Source-${dateString}"
146-
if [ -z "${STREAMING_CONNECTION_SOURCE}" ]; then
161+
if [[ -z "${STREAMING_CONNECTION_SOURCE}" ]]; then
147162
echo "Enter Streaming Connection Source: [default: ${DEFAULT_INLET_SOURCE}]"
148163
read STREAMING_CONNECTION_SOURCE
149164
fi
150165
INLET_SOURCE=${STREAMING_CONNECTION_SOURCE:-${DEFAULT_INLET_SOURCE}}
151166

152167
echo "Making call to create streaming connection to ${PLATFORM_GATEWAY} with name ${INLET_NAME} and source ${INLET_SOURCE}"
153168

154-
streamingEndpoint=$(curl POST \
169+
inlet_response=$(curl -i -o - --silent -X POST \
155170
${PLATFORM_GATEWAY}data/core/edge/inlet \
156171
-H "Authorization: Bearer ${access_token}" \
157172
-H "Content-Type: application/json" \
@@ -162,7 +177,14 @@ streamingEndpoint=$(curl POST \
162177
"description" : "Collects streaming data from my website",
163178
"sourceId" : "'"${INLET_SOURCE}"'",
164179
"dataType": "xdm"
165-
}' 2> /dev/null | jq -r ".inletUrl" )
180+
}' 2> /dev/null)
181+
182+
inlet_response_code=$(echo "$inlet_response" | grep -v '100 Continue' | grep HTTP | awk '{print $2}')
183+
if [[ "${inlet_response_code}" -ge "400" ]]; then
184+
echo "Error: Unable to create schema, response code: ${inlet_response_code}";
185+
exit 1;
186+
fi
187+
streamingEndpoint=$(echo "${inlet_response}" | grep 'inletUrl' | jq -r ".inletUrl")
166188

167189
echo "Streaming Connection: "${streamingEndpoint}
168190

@@ -175,7 +197,7 @@ ${KAFKA_HOME}/bin/kafka-topics.sh \
175197
--topic ${connectTopicName}
176198

177199
aemSinkConnectorName="aep-sink-connector-${dateString}"
178-
aemSinkConnector=$(curl -s -X POST \
200+
aem_connector_response=$(curl -i -o - --silent -X POST \
179201
http://kafka-connect:8083/connectors \
180202
-H "Content-Type: application/json" \
181203
-d '{
@@ -189,6 +211,12 @@ aemSinkConnector=$(curl -s -X POST \
189211
}
190212
}')
191213

214+
aem_connector_response_code=$(echo "$aem_connector_response" | grep -v '100 Continue'| grep HTTP | awk '{print $2}')
215+
if [[ "${aem_connector_response_code}" -ge "400" ]]; then
216+
echo "Error: Unable to create streaming connector, response code: ${aem_connector_response_code}";
217+
exit 1;
218+
fi
219+
192220
echo "AEP Sink Connector ${aemSinkConnectorName}"
193221

194222
datasetId=`echo ${dataSet} | cut -d'"' -f2 | cut -d'/' -f3`

0 commit comments

Comments
 (0)