Skip to content

Commit d25d2be

Browse files
committed
changing code to add logic to validate logs
1 parent 9391d40 commit d25d2be

File tree

2 files changed

+93
-25
lines changed

2 files changed

+93
-25
lines changed

e2e_tests/config-file.cfg

+3-9
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,10 @@
1-
# test resources
21
S3_BUCKET=unified-firehose-test-bucket
32
NEW_RELIC_REGION=US
43
NEW_RELIC_ACCOUNT_ID=2813435
5-
6-
# build constants
74
BUILD_DIR_BASE=.aws-sam/build
8-
9-
# template constants
105
TEMPLATE_FILE_NAME=firehose-template.yaml
11-
FIREHOSE_STACK_NAME=NewRelicFirehoseTestStack
12-
13-
# deployment constants
6+
DEFAULT_STACK_NAME=NewRelicFirehoseTestStack
147
LOG_GROUP_NAME=aws-unified-firehose-e2e-test-log-group
8+
LOG_STREAM_NAME=test-log-stream
159
LOG_GROUP_FILTER_PATTERN=ERROR
16-
10+
FIREHOSE_STREAM_LOGICAL_ID=NewRelicLogsFirehoseStreamToNewRelic

e2e_tests/firehose_e2e_tests.sh

+90-16
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,8 @@ deploy_firehose_stack() {
2222
StoreNRLicenseKeyInSecretManager="$store_secret_in_secret_manager" \
2323
LogGroupConfig="$log_group_config" \
2424
CommonAttributes="$common_attributes" \
25-
--capabilities CAPABILITY_IAM
25+
--capabilities CAPABILITY_NAMED_IAM
26+
2627
}
2728

2829
validate_stack_deployment_status() {
@@ -67,23 +68,32 @@ validate_stack_resources() {
6768
log_group_name=$2
6869
log_group_filter=$3
6970

70-
lambda_physical_id=$(aws cloudformation describe-stack-resources \
71+
firehose_stream_physical_id=$(aws cloudformation describe-stack-resources \
7172
--stack-name "$stack_name" \
72-
--logical-resource-id "$LAMBDA_LOGICAL_RESOURCE_ID" \
73+
--logical-resource-id "$FIREHOSE_STREAM_LOGICAL_ID" \
7374
--query "StackResources[0].PhysicalResourceId" \
7475
--output text
7576
)
76-
lambda_function_arn=$(aws lambda get-function --function-name "$lambda_physical_id" \
77-
--query "Configuration.FunctionArn" \
77+
78+
# Get the ARN of the Firehose delivery stream using the physical ID
79+
firehose_stream_arn=$(aws firehose describe-delivery-stream \
80+
--delivery-stream-name "$firehose_stream_physical_id" \
81+
--query "DeliveryStreamDescription.DeliveryStreamARN" \
7882
--output text
7983
)
8084

8185
subscriptions=$(aws logs describe-subscription-filters --log-group-name "$log_group_name" --query 'subscriptionFilters[*].[destinationArn, filterPattern]' --output text)
8286

83-
if echo "$subscriptions" | grep -q "$lambda_function_arn" && echo "$subscriptions" | grep -q "$log_group_filter"; then
84-
echo "Lambda function $lambda_function_arn is subscribed to log group: $log_group_name with filter: $log_group_filter"
87+
# Check firehose_stream_arn is not null before checking subscriptions
88+
if [ -z "$firehose_stream_arn" ] || [ "$firehose_stream_arn" == "None" ]; then
89+
exit_with_error "Failed to retrieve Firehose delivery stream ARN for physical ID: $firehose_stream_physical_id"
90+
fi
91+
92+
# Check if the Firehose delivery stream is subscribed to the log group with the specified filter pattern
93+
if echo "$subscriptions" | grep -q "$firehose_stream_arn" && echo "$subscriptions" | grep -q "$log_group_filter"; then
94+
echo "Firehose Delivery Stream $firehose_stream_arn is subscribed to log group: $log_group_name with filter: $log_group_filter"
8595
else
86-
exit_with_error "Lambda function $lambda_function_arn is not subscribed to log group: $log_group_name"
96+
exit_with_error "Firehose Delivery Stream $firehose_stream_arn is not subscribed to log group: $log_group_name"
8797
fi
8898

8999
}
@@ -93,17 +103,68 @@ exit_with_error() {
93103
exit 1
94104
}
95105

106+
create_log_event() {
107+
echo "Creating log event in CloudWatch Log Group"
108+
log_group_name=$1
109+
log_stream_name=$2
110+
log_message=$3
111+
112+
# Check if the log stream exists
113+
log_stream_exists=$(aws logs describe-log-streams --log-group-name "$log_group_name" --log-stream-name-prefix "$log_stream_name" --query "logStreams[?logStreamName=='$log_stream_name'] | length(@)" --output text)
114+
115+
# Create a log stream
116+
if [ "$log_stream_exists" -eq 0 ]; then
117+
# Create a log stream if it does not exist
118+
aws logs create-log-stream --log-group-name "$log_group_name" --log-stream-name "$log_stream_name"
119+
fi
120+
121+
# Get the current timestamp in milliseconds
122+
timestamp=$(($(date +%s) * 1000 + $(date +%N) / 1000000))
123+
124+
# Put log event
125+
aws logs put-log-events \
126+
--log-group-name "$log_group_name" \
127+
--log-stream-name "$log_stream_name" \
128+
--log-events timestamp=$timestamp,message="$log_message"
129+
130+
echo "Log event created successfully."
131+
132+
}
133+
134+
validate_logs_in_new_relic() {
135+
user_key=$1
136+
account_id=$2
137+
file_name=$3
138+
139+
nrql_query="SELECT * FROM Log WHERE message LIKE '%$file_name%' SINCE 10 minutes ago"
140+
query='{"query":"query($id: Int!, $nrql: Nrql!) { actor { account(id: $id) { nrql(query: $nrql) { results } } } }","variables":{"id":'$account_id',"nrql":"'$nrql_query'"}}'
141+
142+
for i in {1..10}; do
143+
response=$(curl -s -X POST \
144+
-H "Content-Type: application/json" \
145+
-H "API-Key: $user_key" \
146+
-d "$query" \
147+
https://api.newrelic.com/graphql)
148+
149+
if echo "$response" | grep -q "$file_name"; then
150+
echo "Log event successfully found in New Relic."
151+
return 0
152+
else
153+
echo "Log event not found in New Relic. Retrying in 30 seconds... ($i/10)"
154+
sleep 30
155+
fi
156+
done
157+
158+
exit_with_error "Log event not found in New Relic after 10 retries."
159+
}
160+
161+
96162

97163
BASE_NAME=$(basename "$TEMPLATE_FILE_NAME" .yaml)
98164
BUILD_DIR="$BUILD_DIR_BASE/$BASE_NAME"
99165

100-
echo "Building and packaging the SAM template: $BASE_NAME"
101-
echo "Building and packaging the SAM template: $BUILD_DIR"
102-
echo pwd
103-
104166

105167
sam build --template-file "../$TEMPLATE_FILE_NAME" --build-dir "$BUILD_DIR"
106-
echo "build done packaging"
107168
sam package --s3-bucket "$S3_BUCKET" --template-file "$BUILD_DIR/template.yaml" --output-template-file "$BUILD_DIR/$TEMPLATE_FILE_NAME"
108169

109170

@@ -112,15 +173,28 @@ cat <<EOF > parameter.json
112173
EOF
113174
LOG_GROUP_NAMES=$(<parameter.json)
114175

115-
echo "Deploying the Firehose stack: $FIREHOSE_STACK_NAME"
176+
# Generate a random string to append to the default stack name
177+
RANDOM_STRING=$(openssl rand -hex 4)
178+
FIREHOSE_STACK_NAME="${DEFAULT_STACK_NAME}-${RANDOM_STRING}"
179+
180+
# Deploy the Firehose stack
116181
deploy_firehose_stack "$BUILD_DIR/$TEMPLATE_FILE_NAME" "$FIREHOSE_STACK_NAME" "$NEW_RELIC_LICENSE_KEY" "$NEW_RELIC_REGION" "$NEW_RELIC_ACCOUNT_ID" "true" "$LOG_GROUP_NAMES" "''"
117182

183+
# Validate the status of the Firehose stack
118184
validate_stack_deployment_status "$FIREHOSE_STACK_NAME"
119185

186+
# Validate the resources created by the Firehose stack
120187
validate_stack_resources "$FIREHOSE_STACK_NAME" "$LOG_GROUP_NAME" "$LOG_GROUP_FILTER_PATTERN"
121188

122-
delete_stack "$FIREHOSE_STACK_NAME"
123-
189+
# Generate a UUID and create a dynamic log message
190+
UUID=$(uuidgen)
191+
LOG_MESSAGE="RequestId: $UUID hello world $LOG_GROUP_FILTER_PATTERN"
124192

193+
# Create a log event in CloudWatch Logs
194+
create_log_event "$LOG_GROUP_NAME" "$LOG_STREAM_NAME" "$LOG_MESSAGE"
125195

196+
# Validate logs in New Relic
197+
validate_logs_in_new_relic "$NEW_RELIC_USER_KEY" "$NEW_RELIC_ACCOUNT_ID" "$LOG_MESSAGE"
126198

199+
# Delete the Firehose stack
200+
delete_stack "$FIREHOSE_STACK_NAME"

0 commit comments

Comments
 (0)