Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions kafka-connect-base/Dockerfile.ubi9
Original file line number Diff line number Diff line change
Expand Up @@ -57,11 +57,11 @@ gpgcheck=1 \n\
gpgkey=${CONFLUENT_PACKAGES_REPO}/archive.key \n\
enabled=1 " > /etc/yum.repos.d/confluent.repo \
&& echo "===> Installing Schema Registry (for Avro jars) ..." \
&& yum install -y confluent-schema-registry-${CONFLUENT_VERSION} \
&& microdnf install -y confluent-schema-registry-${CONFLUENT_VERSION} \
&& echo "===> Installing Confluent Hub client ..."\
&& yum install -y confluent-hub-client-${CONFLUENT_VERSION} \
&& microdnf install -y confluent-hub-client-${CONFLUENT_VERSION} \
&& echo "===> Cleaning up ..." \
&& yum clean all \
&& microdnf clean all \
&& rm -rf /tmp/* /etc/yum.repos.d/confluent.repo \
&& echo "===> Setting up ${COMPONENT} dirs ..." \
&& mkdir -p /etc/${COMPONENT} /etc/${COMPONENT}/secrets /etc/${COMPONENT}/jars /usr/share/confluent-hub-components \
Expand Down
2 changes: 1 addition & 1 deletion kafka-connect/Dockerfile.ubi9
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ USER root

RUN echo "===> Installing ${COMPONENT}..." \
&& echo "===> Cleaning up ..." \
&& yum clean all \
&& microdnf clean all \
&& rm -rf /tmp/*

USER appuser
8 changes: 4 additions & 4 deletions kafka/Dockerfile.ubi9
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@
# limitations under the License.

ARG DOCKER_UPSTREAM_REGISTRY
ARG DOCKER_UPSTREAM_TAG=ubi9-latest
ARG DOCKER_UPSTREAM_TAG

FROM ${DOCKER_UPSTREAM_REGISTRY}confluentinc/cp-base-new:${DOCKER_UPSTREAM_TAG}
FROM 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/dev/confluentinc/cp-base-java:dev-8.0.x-7eaa5b27-ubi9.arm64

ARG PROJECT_VERSION
ARG ARTIFACT_ID
Expand Down Expand Up @@ -60,9 +60,9 @@ baseurl=${CONFLUENT_PACKAGES_REPO}/ \n\
gpgcheck=1 \n\
gpgkey=${CONFLUENT_PACKAGES_REPO}/archive.key \n\
enabled=1 " > /etc/yum.repos.d/confluent.repo \
&& yum install -y confluent-kafka-${CONFLUENT_VERSION} \
&& microdnf install -y confluent-kafka-${CONFLUENT_VERSION} \
&& echo "===> clean up ..." \
&& yum clean all \
&& microdnf clean all \
&& rm -rf /tmp/* /etc/yum.repos.d/confluent.repo \
&& echo "===> Setting up ${COMPONENT} dirs" \
&& mkdir -p /var/lib/${COMPONENT}/data /etc/${COMPONENT}/secrets \
Expand Down
38 changes: 19 additions & 19 deletions kafka/include/etc/confluent/docker/configure
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
. /etc/confluent/docker/bash-config

# Ensure that KAFKA_PROCESS_ROLES and CLUSTER_ID are defined
dub ensure KAFKA_PROCESS_ROLES
dub ensure CLUSTER_ID
ub ensure KAFKA_PROCESS_ROLES
ub ensure CLUSTER_ID

# unset KAFKA_ADVERTISED_LISTENERS from ENV when running as controller only
if [[ $KAFKA_PROCESS_ROLES == "controller" ]]
Expand All @@ -29,7 +29,7 @@ then
exit 1
fi
else
dub ensure KAFKA_ADVERTISED_LISTENERS
ub ensure KAFKA_ADVERTISED_LISTENERS
fi

# By default, LISTENERS is derived from ADVERTISED_LISTENERS by replacing
Expand All @@ -38,10 +38,10 @@ fi
if [[ -z "${KAFKA_LISTENERS-}" ]] && [[ $KAFKA_PROCESS_ROLES != "controller" ]]
then
export KAFKA_LISTENERS
KAFKA_LISTENERS=$(cub listeners "$KAFKA_ADVERTISED_LISTENERS")
KAFKA_LISTENERS=$(ub listeners "$KAFKA_ADVERTISED_LISTENERS")
fi

dub path /etc/kafka/ writable
ub path /etc/kafka/ writable

if [[ -z "${KAFKA_LOG_DIRS-}" ]]
then
Expand Down Expand Up @@ -79,31 +79,31 @@ if [[ -n "${KAFKA_ADVERTISED_LISTENERS-}" ]] && [[ $KAFKA_ADVERTISED_LISTENERS =
then
echo "SSL is enabled."

dub ensure KAFKA_SSL_KEYSTORE_FILENAME
ub ensure KAFKA_SSL_KEYSTORE_FILENAME
export KAFKA_SSL_KEYSTORE_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEYSTORE_FILENAME"
dub path "$KAFKA_SSL_KEYSTORE_LOCATION" exists
ub path "$KAFKA_SSL_KEYSTORE_LOCATION" exists

dub ensure KAFKA_SSL_KEY_CREDENTIALS
ub ensure KAFKA_SSL_KEY_CREDENTIALS
KAFKA_SSL_KEY_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEY_CREDENTIALS"
dub path "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION" exists
ub path "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION" exists
export KAFKA_SSL_KEY_PASSWORD
KAFKA_SSL_KEY_PASSWORD=$(cat "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION")

dub ensure KAFKA_SSL_KEYSTORE_CREDENTIALS
ub ensure KAFKA_SSL_KEYSTORE_CREDENTIALS
KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEYSTORE_CREDENTIALS"
dub path "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION" exists
ub path "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION" exists
export KAFKA_SSL_KEYSTORE_PASSWORD
KAFKA_SSL_KEYSTORE_PASSWORD=$(cat "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION")

if [[ -n "${KAFKA_SSL_CLIENT_AUTH-}" ]] && ( [[ $KAFKA_SSL_CLIENT_AUTH == *"required"* ]] || [[ $KAFKA_SSL_CLIENT_AUTH == *"requested"* ]] )
then
dub ensure KAFKA_SSL_TRUSTSTORE_FILENAME
ub ensure KAFKA_SSL_TRUSTSTORE_FILENAME
export KAFKA_SSL_TRUSTSTORE_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_TRUSTSTORE_FILENAME"
dub path "$KAFKA_SSL_TRUSTSTORE_LOCATION" exists
ub path "$KAFKA_SSL_TRUSTSTORE_LOCATION" exists

dub ensure KAFKA_SSL_TRUSTSTORE_CREDENTIALS
ub ensure KAFKA_SSL_TRUSTSTORE_CREDENTIALS
KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_TRUSTSTORE_CREDENTIALS"
dub path "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION" exists
ub path "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION" exists
export KAFKA_SSL_TRUSTSTORE_PASSWORD
KAFKA_SSL_TRUSTSTORE_PASSWORD=$(cat "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION")
fi
Expand All @@ -115,7 +115,7 @@ if [[ -n "${KAFKA_ADVERTISED_LISTENERS-}" ]] && [[ $KAFKA_ADVERTISED_LISTENERS =
then
echo "SASL" is enabled.

dub ensure KAFKA_OPTS
ub ensure KAFKA_OPTS

if [[ ! $KAFKA_OPTS == *"java.security.auth.login.config"* ]]
then
Expand All @@ -131,6 +131,6 @@ then
fi
fi

dub template "/etc/confluent/docker/${COMPONENT}.properties.template" "/etc/${COMPONENT}/${COMPONENT}.properties"
dub template "/etc/confluent/docker/log4j2.yaml.template" "/etc/${COMPONENT}/log4j2.yaml"
dub template "/etc/confluent/docker/tools-log4j2.yaml.template" "/etc/${COMPONENT}/tools-log4j2.yaml"
ub render-template "/etc/confluent/docker/${COMPONENT}.properties.template" > /etc/${COMPONENT}/${COMPONENT}.properties
ub render-template "/etc/confluent/docker/log4j2.yaml.template" > /etc/${COMPONENT}/log4j2.yaml
ub render-template "/etc/confluent/docker/tools-log4j2.yaml.template" > /etc/${COMPONENT}/tools-log4j2.yaml
2 changes: 1 addition & 1 deletion kafka/include/etc/confluent/docker/ensure
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

export KAFKA_DATA_DIRS=${KAFKA_DATA_DIRS:-"/var/lib/kafka/data"}
echo "===> Check if $KAFKA_DATA_DIRS is writable ..."
dub path "$KAFKA_DATA_DIRS" writable
ub path "$KAFKA_DATA_DIRS" writable

# Required step: Format the storage directory with provided cluster ID unless it already exists.
echo "===> Using provided cluster id $CLUSTER_ID ..."
Expand Down
28 changes: 9 additions & 19 deletions kafka/include/etc/confluent/docker/kafka.properties.template
Original file line number Diff line number Diff line change
@@ -1,21 +1,11 @@
{% set excluded_props = ['KAFKA_VERSION',
'KAFKA_HEAP_OPTS'
'KAFKA_LOG4J_OPTS',
'KAFKA_OPTS',
'KAFKA_JMX_OPTS',
'KAFKA_JVM_PERFORMANCE_OPTS',
'KAFKA_GC_LOG_OPTS',
'KAFKA_LOG4J_ROOT_LOGLEVEL',
'KAFKA_LOG4J_LOGGERS',
'KAFKA_TOOLS_LOG4J_LOGLEVEL']
-%}
{{- $excludedProps := stringSlice "KAFKA_VERSION" "KAFKA_HEAP_OPTS" "KAFKA_LOG4J_OPTS" "KAFKA_OPTS" "KAFKA_JMX_OPTS" "KAFKA_JVM_PERFORMANCE_OPTS" "KAFKA_GC_LOG_OPTS" "KAFKA_LOG4J_ROOT_LOGLEVEL" "KAFKA_LOG4J_LOGGERS" "KAFKA_TOOLS_LOG4J_LOGLEVEL" -}}

{% set kafka_props = env_to_props('KAFKA_', '', exclude=excluded_props) -%}
{% for name, value in kafka_props.items() -%}
{{name}}={{value}}
{% endfor -%}
{{- $kafkaProps := envToProps "KAFKA_" "" $excludedProps nil nil -}}
{{ range $name, $value := $kafkaProps }}
{{$name}}={{$value}}
{{ end }}

{% set confluent_support_props = env_to_props('CONFLUENT_SUPPORT_', 'confluent.support.') -%}
{% for name, value in confluent_support_props.items() -%}
{{name}}={{value}}
{% endfor -%}
{{- $confluentSupportProps := envToProps "CONFLUENT_SUPPORT_" "confluent.support." nil nil nil -}}
{{ range $name, $value := $confluentSupportProps }}
{{$name}}={{$value}}
{{ end }}
62 changes: 42 additions & 20 deletions kafka/include/etc/confluent/docker/log4j2.yaml.template
Original file line number Diff line number Diff line change
Expand Up @@ -10,27 +10,49 @@ Configuration:

Loggers:
Root:
level: "{{ env['KAFKA_LOG4J_ROOT_LOGLEVEL'] | default('INFO') }}"
level: "{{ getEnv "KAFKA_LOG4J_ROOT_LOGLEVEL" "INFO" }}"
AppenderRef:
- ref: STDOUT

Logger:
{% set loggers = {
'kafka': 'INFO',
'kafka.network.RequestChannel$': 'WARN',
'kafka.producer.async.DefaultEventHandler': 'DEBUG',
'kafka.request.logger': 'WARN',
'kafka.controller': 'TRACE',
'kafka.log.LogCleaner': 'INFO',
'state.change.logger': 'TRACE',
'kafka.authorizer.logger': 'WARN'
} -%}
{% if env['KAFKA_LOG4J_LOGGERS'] %}
{% set loggers = parse_log4j_loggers(env['KAFKA_LOG4J_LOGGERS'], loggers) %}
{% endif %}
{% for logger,loglevel in loggers.items() %}
- name: "{{ logger }}"
level: "{{ loglevel }}"
AppenderRef:
ref: STDOUT
{% endfor %}
- name: "kafka"
level: "INFO"
AppenderRef:
ref: STDOUT
- name: "kafka.network.RequestChannel$"
level: "WARN"
AppenderRef:
ref: STDOUT
- name: "kafka.producer.async.DefaultEventHandler"
level: "DEBUG"
AppenderRef:
ref: STDOUT
- name: "kafka.request.logger"
level: "WARN"
AppenderRef:
ref: STDOUT
- name: "kafka.controller"
level: "TRACE"
AppenderRef:
ref: STDOUT
- name: "kafka.log.LogCleaner"
level: "INFO"
AppenderRef:
ref: STDOUT
- name: "state.change.logger"
level: "TRACE"
AppenderRef:
ref: STDOUT
- name: "kafka.authorizer.logger"
level: "WARN"
AppenderRef:
ref: STDOUT
{{- if getEnv "KAFKA_LOG4J_LOGGERS" "" -}}
{{- $customLoggers := parseLog4jLoggers (getEnv "KAFKA_LOG4J_LOGGERS" "") -}}
{{- range $logger, $loglevel := $customLoggers -}}
- name: "{{ $logger }}"
level: "{{ $loglevel }}"
AppenderRef:
ref: STDOUT
{{- end -}}
{{- end -}}
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,6 @@ Configuration:

Loggers:
Root:
level: "{{ env['KAFKA_TOOLS_LOG4J_LOGLEVEL'] | default('WARN') }}"
level: "{{ getEnv "KAFKA_TOOLS_LOG4J_LOGLEVEL" "WARN" }}"
AppenderRef:
- ref: STDERR
27 changes: 9 additions & 18 deletions server/include/etc/confluent/docker/log4j2.yaml.template
Original file line number Diff line number Diff line change
Expand Up @@ -10,27 +10,18 @@ Configuration:

Loggers:
Root:
level: "{{ env['KAFKA_LOG4J_ROOT_LOGLEVEL'] | default('INFO') }}"
level: "{{ getEnv "KAFKA_LOG4J_ROOT_LOGLEVEL" | default "INFO" }}"
AppenderRef:
- ref: STDOUT

Logger:
{% set loggers = {
'kafka': 'INFO',
'kafka.network.RequestChannel$': 'WARN',
'kafka.producer.async.DefaultEventHandler': 'DEBUG',
'kafka.request.logger': 'WARN',
'kafka.controller': 'TRACE',
'kafka.log.LogCleaner': 'INFO',
'state.change.logger': 'TRACE',
'kafka.authorizer.logger': 'WARN'
} -%}
{% if env['KAFKA_LOG4J_LOGGERS'] %}
{% set loggers = parse_log4j_loggers(env['KAFKA_LOG4J_LOGGERS'], loggers) %}
{% endif %}
{% for logger,loglevel in loggers.items() %}
- name: "{{ logger }}"
level: "{{ loglevel }}"
{{- $loggers := createStringSliceMap "kafka" "INFO" "kafka.network.RequestChannel$" "WARN" "kafka.producer.async.DefaultEventHandler" "DEBUG" "kafka.request.logger" "WARN" "kafka.controller" "TRACE" "kafka.log.LogCleaner" "INFO" "state.change.logger" "TRACE" "kafka.authorizer.logger" "WARN" -}}
{{- if getEnv "KAFKA_LOG4J_LOGGERS" -}}
{{- $loggers = parseLog4jLoggers (getEnv "KAFKA_LOG4J_LOGGERS") $loggers -}}
{{- end -}}
{{- range $logger, $loglevel := $loggers -}}
- name: "{{ $logger }}"
level: "{{ $loglevel }}"
AppenderRef:
ref: STDOUT
{% endfor %}
{{- end }}