Skip to content

Commit 241cb11

Browse files
committed
handle combined mode (controller+broker on same nodes)
1 parent 1c6b1d9 commit 241cb11

File tree

5 files changed

+210
-21
lines changed

5 files changed

+210
-21
lines changed

docs/VARIABLES.md

+18-1
Original file line numberDiff line numberDiff line change
@@ -924,11 +924,28 @@ Default: "{{ skip_restarts }}"
924924

925925
***
926926

927+
### kraft_combined
928+
929+
Boolean used to declare broker nodes as controller (combined mode). Do not use in production environment
930+
931+
Default: false
932+
927933
### kafka_controller_quorum_voters
928934

929935
Default controller quorum voters
930936

931-
Default: "{% for controller_hostname in groups.kafka_controller|default([]) %}{% if loop.index > 1%},{% endif %}{{groups.kafka_controller.index(controller_hostname)|int + 9991}}@{{controller_hostname}}:{{ kafka_controller_listeners['controller']['port'] }}{%endfor%}"
937+
Default: "
938+
{%- if kraft_combined -%}
939+
{%- for broker_hostname in groups.kafka_broker|default([]) %}
940+
{%- if loop.index > 1%},{% endif -%}
941+
{{ groups.kafka_broker.index(broker_hostname)|int + 1 }}@{{ broker_hostname }}:{{ kafka_broker_listeners['controller']['port'] }}
942+
{%- endfor -%}
943+
{%- else -%}
944+
{%- for controller_hostname in groups.kafka_controller|default([]) -%}
945+
{%- if loop.index > 1%},{% endif -%}
946+
{{ groups.kafka_controller.index(controller_hostname)|int + 9991 }}@{{ controller_hostname }}:{{ kafka_controller_listeners['controller']['port'] }}
947+
{%- endfor -%}
948+
{%- endif -%}"
932949

933950
***
934951

roles/kafka_broker/tasks/get_meta_properties.yml

+95-3
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,104 @@
11
---
2+
- name: Prepare SCRAM Users if needed
3+
set_fact:
4+
scram_users_to_create: []
5+
6+
# with kraft combined mode, first install have to define clusterid, instead of getting it from dedicated controllers
7+
- name: Check meta.properties
8+
run_once: true
9+
when: kraft_combined
10+
ansible.builtin.stat:
11+
path: "{{ kafka_controller_final_properties['log.dirs'] }}/meta.properties"
12+
register: meta_properties
13+
14+
- name: Initialize ClusterId
15+
when:
16+
- kraft_combined
17+
- not meta_properties.stat.exists
18+
run_once: true
19+
shell: "{{ binary_base_path }}/bin/kafka-storage random-uuid"
20+
environment:
21+
KAFKA_OPTS: "-Xlog:all=error -XX:+IgnoreUnrecognizedVMOptions"
22+
register: random_uuid
23+
24+
- name: Set ClusterId
25+
when:
26+
- kraft_combined
27+
- not meta_properties.stat.exists
28+
run_once: true
29+
set_fact:
30+
clusterid: "{{ random_uuid.stdout }}"
31+
delegate_to: "{{ item }}"
32+
delegate_facts: true
33+
loop: "{{ groups.kafka_broker }}"
34+
35+
## and initialize temporary controller admin user
36+
- name: Prepare SCRAM 512 admin user
37+
when:
38+
- kraft_combined
39+
- "'SCRAM-SHA-512' in kafka_controller_sasl_enabled_mechanisms or 'SCRAM-SHA-512' in kafka_broker_sasl_enabled_mechanisms"
40+
set_fact:
41+
scram_users_to_create: "{{ scram_users_to_create + [ '--add-scram SCRAM-SHA-512=[name=\"'+ sasl_scram_users_final.admin.principal + '\",password=\"' + sasl_scram_users_final.admin.password + '\"]' ] }}"
42+
43+
- name: Prepare SCRAM 256 admin user
44+
when:
45+
- kraft_combined
46+
- "'SCRAM-SHA-256' in kafka_controller_sasl_enabled_mechanisms or 'SCRAM-SHA-256' in kafka_broker_sasl_enabled_mechanisms"
47+
set_fact:
48+
scram_users_to_create: "{{ scram_users_to_create + [ '--add-scram SCRAM-SHA-256=[name=\"'+ sasl_scram_users_final.admin.principal + '\",password=\"' + sasl_scram_users_final.admin.password + '\"]' ] }}"
49+
50+
# after first install in combined mode, get clusterid from one broker node
51+
- name: Extract ClusterId from meta.properties on KRaft Controller
52+
when:
53+
- kraft_combined
54+
- meta_properties.stat.exists
55+
run_once: true
56+
slurp:
57+
src: "{{ kafka_controller_final_properties['log.dirs'] }}/meta.properties"
58+
register: uuid_broker
59+
60+
- name: Set ClusterId
61+
when:
62+
- kraft_combined
63+
- meta_properties.stat.exists
64+
run_once: true
65+
set_fact:
66+
clusterid: "{{ (uuid_broker['content'] | b64decode).partition('cluster.id=')[2].partition('\n')[0] }}"
67+
delegate_to: "{{ item }}"
68+
delegate_facts: true
69+
loop: "{{ groups.kafka_broker }}"
70+
71+
# with dedicated controller nodes, clusterid is already defined onto controller nodes
272
- name: Extract ClusterId from meta.properties on KRaft Controller
73+
when: not kraft_combined
74+
run_once: true
375
slurp:
476
src: "{{ kafka_controller_final_properties['log.dirs'] }}/meta.properties"
577
delegate_to: "{{ groups.kafka_controller[0] }}"
678
register: uuid_broker
779

80+
- debug:
81+
msg: "ClusterId = {{ (uuid_broker['content'] | b64decode).partition('cluster.id=')[2].partition('\n')[0] }}"
82+
when: not kraft_combined
83+
run_once: true
84+
- debug:
85+
msg: "ClusterId delegated = {{ (uuid_broker['content'] | b64decode).partition('cluster.id=')[2].partition('\n')[0] }}"
86+
when: not kraft_combined
87+
run_once: true
88+
delegate_to: "{{ groups.kafka_broker[1] }}"
89+
90+
- name: Set ClusterId
91+
when: not kraft_combined
92+
run_once: true
93+
set_fact:
94+
clusterid: "{{ (uuid_broker['content'] | b64decode).partition('cluster.id=')[2].partition('\n')[0] }}"
95+
delegate_to: "{{ item }}"
96+
delegate_facts: true
97+
loop: "{{ groups.kafka_broker }}"
98+
99+
- debug:
100+
msg: "ClusterId set = {{ clusterid }}"
101+
8102
- name: Format Storage Directory
9-
shell: "{{ binary_base_path }}/bin/kafka-storage format -t {{ clusterid }} -c {{ kafka_broker.config_file }} --ignore-formatted"
103+
shell: "{{ binary_base_path }}/bin/kafka-storage format -t {{ clusterid }} -c {{ kafka_broker.config_file }} --ignore-formatted {{ scram_users_to_create|join(' ') }}"
10104
register: format_meta
11-
vars:
12-
clusterid: "{{ (uuid_broker['content'] | b64decode).partition('cluster.id=')[2].partition('\n')[0] }}"

roles/kafka_controller/tasks/get_meta_properties.yml

+54-5
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,62 @@
11
---
2-
- name: Get ClusterId
2+
- name: Prepare SCRAM Users if needed
3+
set_fact:
4+
scram_users_to_create: []
5+
6+
- name: Check meta.properties
7+
run_once: true
8+
ansible.builtin.stat:
9+
path: "{{ kafka_controller_final_properties['log.dirs'] }}/meta.properties"
10+
register: meta_properties
11+
12+
# if meta.properties does not exists , create uuid
13+
- name: Initialize ClusterId
14+
when: not meta_properties.stat.exists
15+
run_once: true
316
shell: "{{ binary_base_path }}/bin/kafka-storage random-uuid"
417
environment:
518
KAFKA_OPTS: "-Xlog:all=error -XX:+IgnoreUnrecognizedVMOptions"
6-
register: uuid_key
19+
register: random_uuid
20+
21+
- name: Set ClusterId
22+
when: not meta_properties.stat.exists
23+
run_once: true
24+
set_fact:
25+
clusterid: "{{ random_uuid.stdout }}"
26+
delegate_to: "{{ item }}"
27+
delegate_facts: true
28+
loop: "{{ groups['kafka_controller'] }}"
29+
30+
## and initialize temporary controller admin user
31+
- name: Prepare SCRAM 512 admin user
32+
when:
33+
- "'SCRAM-SHA-512' in kafka_controller_sasl_enabled_mechanisms or 'SCRAM-SHA-512' in kafka_broker_sasl_enabled_mechanisms"
34+
set_fact:
35+
scram_users_to_create: "{{ scram_users_to_create + [ '--add-scram SCRAM-SHA-512=[name=\"'+ sasl_scram_users_final.admin.principal + '\",password=\"' + sasl_scram_users_final.admin.password + '\"]' ] }}"
36+
37+
- name: Prepare SCRAM 256 admin user
38+
when:
39+
- "'SCRAM-SHA-256' in kafka_controller_sasl_enabled_mechanisms or 'SCRAM-SHA-256' in kafka_broker_sasl_enabled_mechanisms"
40+
set_fact:
41+
scram_users_to_create: "{{ scram_users_to_create + [ '--add-scram SCRAM-SHA-256=[name=\"'+ sasl_scram_users_final.admin.principal + '\",password=\"' + sasl_scram_users_final.admin.password + '\"]' ] }}"
42+
43+
# else, extract it from meta.properties
44+
- name: Extract ClusterId from meta.properties
45+
when: meta_properties.stat.exists
46+
run_once: true
47+
slurp:
48+
src: "{{ kafka_controller_final_properties['log.dirs'] }}/meta.properties"
49+
register: uuid_broker
50+
51+
- name: Set ClusterId
52+
when: meta_properties.stat.exists
753
run_once: true
54+
set_fact:
55+
clusterid: "{{ (uuid_broker['content'] | b64decode).partition('cluster.id=')[2].partition('\n')[0] }}"
56+
delegate_to: "{{ item }}"
57+
delegate_facts: true
58+
loop: "{{ groups['kafka_controller'] }}"
859

960
- name: Format Data Directory
10-
shell: "{{ binary_base_path }}/bin/kafka-storage format -t {{ clusterid }} -c {{ kafka_controller.config_file }} --ignore-formatted"
61+
shell: "{{ binary_base_path }}/bin/kafka-storage format -t {{ clusterid }} -c {{ kafka_controller.config_file }} --ignore-formatted {{ scram_users_to_create|join(' ') }}"
1162
register: format_meta
12-
vars:
13-
clusterid: "{{ uuid_key.stdout }}"

roles/variables/defaults/main.yml

+18-3
Original file line numberDiff line numberDiff line change
@@ -416,8 +416,22 @@ zookeeper_skip_restarts: "{{ skip_restarts }}"
416416

417417
#### kafka Controller variables ####
418418

419-
### Default controller quorum voters
420-
kafka_controller_quorum_voters: "{% for controller_hostname in groups.kafka_controller|default([]) %}{% if loop.index > 1%},{% endif %}{{groups.kafka_controller.index(controller_hostname)|int + 9991}}@{{controller_hostname}}:{{ kafka_controller_listeners['controller']['port'] }}{%endfor%}"
419+
### set to true to install controller and broker on same nodes
420+
kraft_combined: false
421+
422+
### Default controller quorum voters. Dynamically assigned later if not user provided
423+
kafka_controller_quorum_voters: >-
424+
{%- if kraft_combined -%}
425+
{%- for broker_hostname in groups.kafka_broker|default([]) %}
426+
{%- if loop.index > 1%},{% endif -%}
427+
{{ groups.kafka_broker.index(broker_hostname)|int + 1 }}@{{ broker_hostname }}:{{ kafka_broker_listeners['controller']['port'] }}
428+
{%- endfor -%}
429+
{%- else -%}
430+
{%- for controller_hostname in groups.kafka_controller|default([]) -%}
431+
{%- if loop.index > 1%},{% endif -%}
432+
{{ groups.kafka_controller.index(controller_hostname)|int + 9991 }}@{{ controller_hostname }}:{{ kafka_controller_listeners['controller']['port'] }}
433+
{%- endfor -%}
434+
{%- endif -%}
421435
422436
### Default Kafka config prefix. Only valid to customize when installation_method: archive
423437
kafka_controller_config_prefix: "{{ config_prefix }}/controller"
@@ -569,7 +583,8 @@ kafka_broker_default_listeners: "{
569583
'ssl_enabled': {{ssl_enabled|string|lower}},
570584
'ssl_mutual_auth_enabled': {{ssl_mutual_auth_enabled|string|lower}},
571585
'sasl_protocol': '{{sasl_protocol}}'
572-
}{% endif %}{% endif %}
586+
}{% endif %}{% if kraft_enabled|bool and kraft_combined|bool %},
587+
'controller': {{ kafka_controller_listeners['controller'] }}{% endif %}{% endif %}
573588
}"
574589

575590
### Dictionary to put additional listeners to be configured within Kafka. Each listener must include a 'name' and 'port' key. Optionally they can include the keys 'ssl_enabled', 'ssl_mutual_auth_enabled', and 'sasl_protocol'

roles/variables/vars/main.yml

+25-9
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,10 @@ base_path: "{{ ((config_base_path,('confluent-',archive_version) | join) | path_
1414
binary_base_path: "{{ ((config_base_path,('confluent-',archive_version) | join) | path_join) if installation_method == 'archive' else '/usr' }}"
1515

1616
### Runs kafka in Kraft mode if controller is present
17-
kraft_enabled: "{{ true if 'kafka_controller' in groups.keys() and groups['kafka_controller'] | length > 0 else false }}"
17+
kraft_enabled: "{{ true if kraft_combined or ('kafka_controller' in groups.keys() and groups['kafka_controller'] | length > 0) else false }}"
18+
19+
### One controller node to delegate actions to
20+
kafka_controller_default_host: "{{ ( groups.kafka_broker[0] if kraft_combined else groups.kafka_controller[0] ) if kraft_enabled else 'no controller' }}"
1821

1922
#### Config prefix paths ####
2023
zookeeper_config_prefix_path: "{{ zookeeper_config_prefix.strip('/') }}"
@@ -149,7 +152,7 @@ kafka_controller_properties:
149152
confluent.security.event.logger.exporter.kafka.topic.replicas: "{{audit_logs_destination_bootstrap_servers.split(',')|length if audit_logs_destination_enabled and rbac_enabled else kafka_controller_default_internal_replication_factor}}"
150153
confluent.support.metrics.enable: "true"
151154
confluent.support.customer.id: anonymous
152-
log.dirs: "/var/lib/controller/data"
155+
log.dirs: "{{ '/var/lib/kafka/data' if kraft_combined else '/var/lib/controller/data' }}"
153156
kafka.rest.enable: "{{kafka_controller_rest_proxy_enabled|string|lower}}"
154157
process.roles: controller
155158
controller.quorum.voters: "{{ kafka_controller_quorum_voters }}"
@@ -198,9 +201,9 @@ kafka_controller_properties:
198201
properties:
199202
sasl.kerberos.service.name: "{{kerberos_kafka_controller_primary}}"
200203
inter_broker_sasl:
201-
enabled: "{{ kafka_broker_listeners[kafka_broker_inter_broker_listener_name]['sasl_protocol'] | default(sasl_protocol) | confluent.platform.normalize_sasl_protocol != 'none' }}"
204+
enabled: "{{ kafka_controller_listeners[kafka_broker_inter_broker_listener_name]['sasl_protocol'] | default(kafka_controller_sasl_protocol) | confluent.platform.normalize_sasl_protocol != 'none' }}"
202205
properties:
203-
sasl.mechanism.inter.broker.protocol: "{{kafka_broker_listeners[kafka_broker_inter_broker_listener_name]['sasl_protocol'] | default(sasl_protocol) | confluent.platform.normalize_sasl_protocol}}"
206+
sasl.mechanism.inter.broker.protocol: "{{kafka_controller_listeners[kafka_broker_inter_broker_listener_name]['sasl_protocol'] | default(kafka_controller_sasl_protocol) | confluent.platform.normalize_sasl_protocol}}"
204207
sr:
205208
enabled: "{{ kafka_broker_schema_validation_enabled and 'schema_registry' in groups }}"
206209
properties:
@@ -340,7 +343,7 @@ kafka_broker_properties:
340343
socket.send.buffer.bytes: 102400
341344
transaction.state.log.min.isr: "{{ [ 2, kafka_broker_default_internal_replication_factor|int ] | min }}"
342345
transaction.state.log.replication.factor: "{{kafka_broker_default_internal_replication_factor}}"
343-
advertised.listeners: "{% for listener in kafka_broker_listeners|dict2items %}{% if loop.index > 1%},{% endif %}{{ listener['value']['name'] }}://{{ listener['value']['hostname'] | default(hostvars[inventory_hostname]|confluent.platform.resolve_hostname) }}:{{ listener['value']['port'] }}{% endfor %}"
346+
advertised.listeners: "{% for listener in kafka_broker_listeners|dict2items|rejectattr('key', 'equalto', 'controller') %}{% if loop.index > 1%},{% endif %}{{ listener['value']['name'] }}://{{ listener['value']['hostname'] | default(hostvars[inventory_hostname]|confluent.platform.resolve_hostname) }}:{{ listener['value']['port'] }}{% endfor %}"
344347
confluent.ansible.managed: 'true'
345348
confluent.license.topic: _confluent-command
346349
confluent.license.topic.replication.factor: "{{kafka_broker_default_internal_replication_factor}}"
@@ -355,11 +358,24 @@ kafka_broker_properties:
355358
broker_on_controller:
356359
enabled: "{{kraft_enabled|bool}}"
357360
properties:
358-
process.roles: broker
361+
process.roles: "broker{% if kraft_combined %},controller{% endif %}"
359362
controller.quorum.voters: "{{ kafka_controller_quorum_voters }}"
360-
controller.listener.names: "{{kafka_controller_listeners['controller']['name']}}"
361-
listener.security.protocol.map: "{% for listener in kafka_controller_listeners|dict2items %}{% if loop.index > 1%},{% endif %}{{ listener['value']['name'] }}:{{ listener['value'] | confluent.platform.kafka_protocol_defaults(kafka_controller_ssl_enabled, kafka_controller_sasl_protocol)}}{% endfor %},{% for listener in kafka_broker_listeners|dict2items %}{% if loop.index > 1%},{% endif %}{{ listener['value']['name'] }}:{{ listener['value'] | confluent.platform.kafka_protocol_defaults(ssl_enabled, sasl_protocol)}}{% endfor %}"
362-
listeners: "{% for listener in kafka_broker_listeners|dict2items %}{% if loop.index > 1%},{% endif %}{{ listener['value']['name'] }}://{{ listener['value']['ip'] | default('') }}:{{ listener['value']['port'] }}{% endfor %}"
363+
controller.listener.names: "{{ kafka_controller_listeners['controller']['name'] }}"
364+
listener.security.protocol.map: >-
365+
{%- for listener in kafka_controller_listeners|dict2items -%}
366+
{%- if loop.index > 1%},{% endif -%}
367+
{{ listener['value']['name'] }}:{{ listener['value'] | confluent.platform.kafka_protocol_defaults(kafka_controller_ssl_enabled, kafka_controller_sasl_protocol)}}
368+
{%- endfor -%}
369+
,
370+
{%- for listener in kafka_broker_listeners|dict2items -%}
371+
{%- if loop.index > 1%},{% endif -%}
372+
{{ listener['value']['name'] }}:{{ listener['value'] | confluent.platform.kafka_protocol_defaults(ssl_enabled, sasl_protocol)}}
373+
{%- endfor -%}
374+
listeners: >-
375+
{%- for listener in kafka_broker_listeners|dict2items -%}
376+
{%- if loop.index > 1 %},{% endif -%}
377+
{{ listener['value']['name'] }}://{{ listener['value']['ip'] | default('') }}:{{ listener['value']['port'] }}
378+
{%- endfor -%}
363379
confluent.cluster.link.metadata.topic.replication.factor: "{{kafka_broker_default_internal_replication_factor}}"
364380
broker_on_zookeeper:
365381
enabled: "{{not kraft_enabled|bool}}"

0 commit comments

Comments
 (0)