Description
I've Apicurio Schema Registry Operator installed, also Schema Registry v2.6 installed. This is the default version of Schema Registry installed when using Apicurio Schema Registry Operator. Keycloak(v26.1.5) is installed and I have users ro-user, admin-user, dev-user created in realm -> 'readonly-realm'
I'm able to publish schema to the Schema Registry, and the ACLs are also working .. i.e. ro-user is able to read the schema but not publish it which users - admin-user & dev-user are able to publish/read the schemas.
I've created a Kafka Avro producer, and that is giving error
package com.versa.apicurio;
import com.versa.apicurio.Employee;
import com.versa.apicurio.DepartmentEnum;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Properties;
public class AvroKafkaProducer_Apicurio {
public static void main(String[] args) throws Exception {
Properties props = new Properties();
String topic = "topc";
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "IP:port");
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "io.apicurio.registry.serde.avro.AvroKafkaSerializer");
props.put("apicurio.registry.url", "https://apicurio-sr.vkp.versa-vani.com/apis/registry/v2");
// SSL Configuration
String keystore_location = "/Users/karanalang/Documents/Technology/0.ACME/strimzi-certs/versa-kafka-poc/syslog-vani-prefix/syslog-vani-prefix.p12";
String keystore_password = "<pwd>";
String truststore_location = "/Users/karanalang/Documents/Technology/0.ACME/strimzi-certs/versa-kafka-poc-tf/versa-kafka-poc-tf-cluster-ca-cert/versa-kafka-poc-tf-ca.p12";
String truststore_password = "<pwd>";
props.put("security.protocol", "SSL");
props.put("ssl.truststore.location", truststore_location);
props.put("ssl.truststore.password", truststore_password);
props.put("ssl.truststore.type", "PKCS12");
props.put("ssl.keystore.location", keystore_location);
props.put("ssl.keystore.password", keystore_password);
props.put("ssl.keystore.type", "PKCS12");
props.put(
"apicurio.registry.artifact-resolver-strategy",
"io.apicurio.registry.serde.avro.strategy.RecordIdStrategy"
);
String keycloakurl = "https://keycloak.vkp.versa-vani.com/realms/readonly-realm/";
// Retrieve auth token for schema registry using the readonly credentials.
// String authUrl = keycloakurl + "protocol/openid-connect/token/protocol/openid-connect/token"; // update this
String authUrl = keycloakurl + "protocol/openid-connect/token";
// String token = getAuthToken(authUrl, "ro-user", "ro-user");
String token = getAuthToken(authUrl, "ro-user", "ro-user");
// Pass the token to the registry – adjust the property key if needed per your registry documentation.
System.out.println("Token: " + token);
// props.put("apicurio.auth.token", token);
props.put("apicurio.registry.headers.Authorization", "Bearer " + token);
// TokenTest
String urlString = "https://apicurio-sr.vkp.versa-vani.com/apis/registry/v2/groups/default/artifacts";
URL url = new URL(urlString);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("GET");
conn.setRequestProperty("Authorization", "Bearer " + token);
int responseCode = conn.getResponseCode();
System.out.println("Response Code: " + responseCode);
if (responseCode == 200) {
BufferedReader in = new BufferedReader(new InputStreamReader(conn.getInputStream()));
String inputLine;
StringBuilder response = new StringBuilder();
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
}
in.close();
System.out.println("Response: " + response.toString());
} else {
System.out.println("Failed to get response from server");
}
// Temp - Token Test
try (Producer<String, Employee> producer = new KafkaProducer<>(props)) {
// Create sample employees
Employee[] employees = {
Employee.newBuilder()
.setId(1)
.setName("John Doe")
.setAge(30)
.setSalary(75000.0f)
.setDepartment(DepartmentEnum.ENGINEERING)
.build(),
Employee.newBuilder()
.setId(2)
.setName("Jane Smith")
.setAge(28)
.setSalary(85000.0f)
.setDepartment(DepartmentEnum.HR)
.build(),
Employee.newBuilder()
.setId(3)
.setName("Bob Johnson")
.setSalary(65000.0f)
.setDepartment(DepartmentEnum.SALES)
.build()
};
// Send each employee to Kafka
for (Employee employee : employees) {
ProducerRecord<String, Employee> record =
new ProducerRecord<>(topic, String.valueOf(employee.getId()), employee);
producer.send(record, (metadata, exception) -> {
if (exception == null) {
System.out.printf("Sent employee %s to partition %d with offset %d%n",
employee.getName(), metadata.partition(), metadata.offset());
} else {
System.err.println("Error sending employee: " + exception.getMessage());
}
});
}
producer.flush();
}
}
private static String getAuthToken(String authUrl, String username, String password) throws Exception {
URL url = new URL(authUrl);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setDoOutput(true);
conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
String clientId = "apicurio"; // Adjust as needed
// Adjust client_id and grant_type as needed for your auth server.
String body = "grant_type=password&client_id="+clientId+"&username=" + username + "&password=" + password;
try (OutputStream os = conn.getOutputStream()) {
os.write(body.getBytes());
os.flush();
}
if (conn.getResponseCode() != 200) {
throw new RuntimeException("Failed to get token: HTTP error code : " + conn.getResponseCode());
}
StringBuilder response;
try (BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream()))) {
response = new StringBuilder();
String line;
while ((line = br.readLine()) != null) {
response.append(line);
}
}
// For simplicity, extract the token from the response.
// Expected format: {"access_token":"your_token", ...}
String tokenResponse = response.toString();
int start = tokenResponse.indexOf("\"access_token\":\"") + 16;
int end = tokenResponse.indexOf("\"", start);
return tokenResponse.substring(start, end);
}
}
Note the token obtained seems to be OK, and I'm able to access the schemas using the Token generated.
Error -
(base) Karans-MacBook-Pro:apicurio_schema_registry karanalang$ mvn exec:java -Dexec.mainClass="com.versa.apicurio.AvroKafkaProducer_Apicurio" -Dlog4j.debug
[INFO] Scanning for projects...
[INFO]
[INFO] ------------< com.versa.apicurio:apicurio_schema_registry >-------------
[INFO] Building apicurio_schema_registry 1.0-SNAPSHOT
[INFO] --------------------------------[ jar ]---------------------------------
[INFO]
[INFO] --- exec-maven-plugin:3.5.0:java (default-cli) @ apicurio_schema_registry ---
Token: <token>
Response Code: 200
Response: {"artifacts":[{"createdBy":"admin-user","createdOn":"2025-04-24T00:50:54+0000","id":"com.versa.apicurio.Employee","modifiedBy":"admin-user","modifiedOn":"2025-04-24T00:50:54+0000","name":"Employee","state":"ENABLED","type":"AVRO"},{"createdBy":"admin-user","createdOn":"2025-04-24T00:44:33+0000","id":"employee-schema","modifiedBy":"admin-user","modifiedOn":"2025-04-24T00:44:33+0000","name":"Employee","state":"ENABLED","type":"AVRO"},{"createdBy":"dev-user","createdOn":"2025-04-18T18:21:29+0000","id":"my-new-schema-v1","modifiedBy":"dev-user","modifiedOn":"2025-04-18T18:21:29+0000","name":"MyNewRecord","state":"ENABLED","type":"AVRO"},{"createdBy":"dev-user","createdOn":"2025-04-22T21:32:22+0000","id":"my-new-schema-dev-v41111","modifiedBy":"dev-user","modifiedOn":"2025-04-22T21:32:22+0000","name":"MyNewRecord","state":"ENABLED","type":"AVRO"},{"createdBy":"admin-user","createdOn":"2025-04-22T21:32:05+0000","id":"my-new-schema-dev-v412","modifiedBy":"admin-user","modifiedOn":"2025-04-22T21:32:05+0000","name":"MyNewRecord","state":"ENABLED","type":"AVRO"},{"createdBy":"dev-user","createdOn":"2025-04-18T00:44:33+0000","id":"my-new-schema","modifiedBy":"dev-user","modifiedOn":"2025-04-18T00:44:33+0000","name":"MyNewRecord","state":"ENABLED","type":"AVRO"},{"createdBy":"admin-user","createdOn":"2025-04-18T18:24:49+0000","id":"my-new-schema-admin-v2","modifiedBy":"admin-user","modifiedOn":"2025-04-18T18:24:49+0000","name":"MyNewRecord","state":"ENABLED","type":"AVRO"},{"createdBy":"admin-user","createdOn":"2025-04-18T19:40:05+0000","id":"my-new-schema-admin-v3","modifiedBy":"admin-user","modifiedOn":"2025-04-18T19:40:05+0000","name":"MyNewRecord","state":"ENABLED","type":"AVRO"},{"createdBy":"dev-user","createdOn":"2025-04-18T19:39:31+0000","id":"my-new-schema-dev-v3","modifiedBy":"dev-user","modifiedOn":"2025-04-18T19:39:31+0000","name":"MyNewRecord","state":"ENABLED","type":"AVRO"},{"createdBy":"dev-user","createdOn":"2025-04-18T22:23:02+0000","id":"my-new-schema-dev-v4","modifiedBy":"dev-user","modifiedOn":"2025-04-18T22:23:02+0000","name":"MyNewRecord","state":"ENABLED","type":"AVRO"},{"createdBy":"","createdOn":"2025-04-17T21:57:34+0000","id":"my-artifact-5","modifiedBy":"","modifiedOn":"2025-04-17T21:57:34+0000","name":"TestRecord11","state":"ENABLED","type":"AVRO"},{"createdBy":"","createdOn":"2025-04-16T22:00:21+0000","id":"my-artifact-4","modifiedBy":"","modifiedOn":"2025-04-16T22:00:21+0000","name":"TestRecord11","state":"ENABLED","type":"AVRO"}],"count":12}
[com.versa.apicurio.AvroKafkaProducer_Apicurio.main()] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values:
acks = -1
auto.include.jmx.reporter = true
batch.size = 16384
bootstrap.servers = [IP:port]
buffer.memory = 33554432
client.dns.lookup = use_all_dns_ips
client.id = producer-1
compression.type = none
connections.max.idle.ms = 540000
delivery.timeout.ms = 120000
enable.idempotence = true
interceptor.classes = []
key.serializer = class org.apache.kafka.common.serialization.StringSerializer
linger.ms = 0
max.block.ms = 60000
max.in.flight.requests.per.connection = 5
max.request.size = 1048576
metadata.max.age.ms = 300000
metadata.max.idle.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partitioner.adaptive.partitioning.enable = true
partitioner.availability.timeout.ms = 0
partitioner.class = null
partitioner.ignore.keys = false
receive.buffer.bytes = 32768
reconnect.backoff.max.ms = 1000
reconnect.backoff.ms = 50
request.timeout.ms = 30000
retries = 2147483647
retry.backoff.ms = 100
sasl.client.callback.handler.class = null
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.login.callback.handler.class = null
sasl.login.class = null
sasl.login.connect.timeout.ms = null
sasl.login.read.timeout.ms = null
sasl.login.refresh.buffer.seconds = 300
sasl.login.refresh.min.period.seconds = 60
sasl.login.refresh.window.factor = 0.8
sasl.login.refresh.window.jitter = 0.05
sasl.login.retry.backoff.max.ms = 10000
sasl.login.retry.backoff.ms = 100
sasl.mechanism = GSSAPI
sasl.oauthbearer.clock.skew.seconds = 30
sasl.oauthbearer.expected.audience = null
sasl.oauthbearer.expected.issuer = null
sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
sasl.oauthbearer.jwks.endpoint.url = null
sasl.oauthbearer.scope.claim.name = scope
sasl.oauthbearer.sub.claim.name = sub
sasl.oauthbearer.token.endpoint.url = null
security.protocol = SSL
security.providers = null
send.buffer.bytes = 131072
socket.connection.setup.timeout.max.ms = 30000
socket.connection.setup.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
ssl.endpoint.identification.algorithm = https
ssl.engine.factory.class = null
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.certificate.chain = null
ssl.keystore.key = null
ssl.keystore.location = /Users/karanalang/Documents/Technology/0.ACME/strimzi-certs/versa-kafka-poc/syslog-vani-prefix/syslog-vani-prefix.p12
ssl.keystore.password = [hidden]
ssl.keystore.type = PKCS12
ssl.protocol = TLSv1.3
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.certificates = null
ssl.truststore.location = /Users/karanalang/Documents/Technology/0.ACME/strimzi-certs/versa-kafka-poc-tf/versa-kafka-poc-tf-cluster-ca-cert/versa-kafka-poc-tf-ca.p12
ssl.truststore.password = [hidden]
ssl.truststore.type = PKCS12
transaction.timeout.ms = 60000
transactional.id = null
value.serializer = class io.apicurio.registry.serde.avro.AvroKafkaSerializer
[com.versa.apicurio.AvroKafkaProducer_Apicurio.main()] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=producer-1] Instantiated an idempotent producer.
[com.versa.apicurio.AvroKafkaProducer_Apicurio.main()] INFO org.apache.kafka.clients.producer.ProducerConfig - These configurations '[apicurio.registry.headers.Authorization, apicurio.registry.url, apicurio.registry.artifact-resolver-strategy]' were supplied but are not used yet.
[com.versa.apicurio.AvroKafkaProducer_Apicurio.main()] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.6.1
[com.versa.apicurio.AvroKafkaProducer_Apicurio.main()] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 5e3c2b738d253ff5
[com.versa.apicurio.AvroKafkaProducer_Apicurio.main()] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1745516796729
[kafka-producer-network-thread | producer-1] INFO org.apache.kafka.clients.Metadata - [Producer clientId=producer-1] Cluster ID: jtynOIuMSPOZ9OweZSaTQQ
[kafka-producer-network-thread | producer-1] INFO org.apache.kafka.clients.producer.internals.TransactionManager - [Producer clientId=producer-1] ProducerId set to 4986025 with epoch 0
[com.versa.apicurio.AvroKafkaProducer_Apicurio.main()] ERROR io.apicurio.registry.resolver.ERCache - Failed to update cache value for key: {"type":"record","name":"Employee","namespace":"com.versa.apicurio","fields":[{"name":"id","type":"int"},{"name":"name","type":"string"},{"name":"salary","type":["null","float"],"default":null},{"name":"age","type":["null","int"],"default":null},{"name":"department","type":{"type":"enum","name":"DepartmentEnum","symbols":["HR","ENGINEERING","SALES"]}},{"name":"email","type":["null","string"],"default":null},{"name":"new_col","type":["null","string"],"default":null}]}
io.apicurio.rest.client.auth.exception.NotAuthorizedException: Authentication exception
at io.apicurio.registry.rest.client.impl.ErrorHandler.handleErrorResponse(ErrorHandler.java:54)
at io.apicurio.rest.client.handler.BodyHandler.lambda$toSupplierOfType$1(BodyHandler.java:55)
at io.apicurio.rest.client.JdkHttpClient.sendRequest(JdkHttpClient.java:204)
at io.apicurio.registry.rest.client.impl.RegistryClientImpl.getArtifactVersionMetaDataByContent(RegistryClientImpl.java:146)
at io.apicurio.registry.rest.client.RegistryClient.getArtifactVersionMetaDataByContent(RegistryClient.java:69)
at io.apicurio.registry.resolver.DefaultSchemaResolver.lambda$handleResolveSchemaByContent$3(DefaultSchemaResolver.java:261)
at io.apicurio.registry.resolver.ERCache.lambda$getValue$0(ERCache.java:201)
at io.apicurio.registry.resolver.ERCache.retry(ERCache.java:254)
at io.apicurio.registry.resolver.ERCache.getValue(ERCache.java:200)
at io.apicurio.registry.resolver.ERCache.getByContent(ERCache.java:175)
at io.apicurio.registry.resolver.DefaultSchemaResolver.handleResolveSchemaByContent(DefaultSchemaResolver.java:259)
at io.apicurio.registry.resolver.DefaultSchemaResolver.getSchemaFromRegistry(DefaultSchemaResolver.java:141)
at io.apicurio.registry.resolver.DefaultSchemaResolver.lambda$resolveSchema$0(DefaultSchemaResolver.java:93)
at java.base/java.util.Optional.orElseGet(Optional.java:364)
at io.apicurio.registry.resolver.DefaultSchemaResolver.resolveSchema(DefaultSchemaResolver.java:93)
at io.apicurio.registry.serde.AbstractKafkaSerializer.serialize(AbstractKafkaSerializer.java:83)
at org.apache.kafka.clients.producer.KafkaProducer.doSend(KafkaProducer.java:1000)
at org.apache.kafka.clients.producer.KafkaProducer.send(KafkaProducer.java:947)
at com.versa.apicurio.AvroKafkaProducer_Apicurio.main(AvroKafkaProducer_Apicurio.java:120)
at org.codehaus.mojo.exec.ExecJavaMojo.doMain(ExecJavaMojo.java:375)
at org.codehaus.mojo.exec.ExecJavaMojo.doExec(ExecJavaMojo.java:364)
at org.codehaus.mojo.exec.ExecJavaMojo.lambda$execute$0(ExecJavaMojo.java:286)
at java.base/java.lang.Thread.run(Thread.java:833)
[com.versa.apicurio.AvroKafkaProducer_Apicurio.main()] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=producer-1] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms.
[com.versa.apicurio.AvroKafkaProducer_Apicurio.main()] INFO org.apache.kafka.common.metrics.Metrics - Metrics scheduler closed
[com.versa.apicurio.AvroKafkaProducer_Apicurio.main()] INFO org.apache.kafka.common.metrics.Metrics - Closing reporter org.apache.kafka.common.metrics.JmxReporter
[com.versa.apicurio.AvroKafkaProducer_Apicurio.main()] INFO org.apache.kafka.common.metrics.Metrics - Metrics reporters closed
[com.versa.apicurio.AvroKafkaProducer_Apicurio.main()] INFO org.apache.kafka.common.utils.AppInfoParser - App info kafka.producer for producer-1 unregistered
[WARNING]
io.apicurio.rest.client.auth.exception.NotAuthorizedException: Authentication exception
at io.apicurio.registry.rest.client.impl.ErrorHandler.handleErrorResponse (ErrorHandler.java:54)
at io.apicurio.rest.client.handler.BodyHandler.lambda$toSupplierOfType$1 (BodyHandler.java:55)
at io.apicurio.rest.client.JdkHttpClient.sendRequest (JdkHttpClient.java:204)
at io.apicurio.registry.rest.client.impl.RegistryClientImpl.getArtifactVersionMetaDataByContent (RegistryClientImpl.java:146)
at io.apicurio.registry.rest.client.RegistryClient.getArtifactVersionMetaDataByContent (RegistryClient.java:69)
at io.apicurio.registry.resolver.DefaultSchemaResolver.lambda$handleResolveSchemaByContent$3 (DefaultSchemaResolver.java:261)
at io.apicurio.registry.resolver.ERCache.lambda$getValue$0 (ERCache.java:201)
at io.apicurio.registry.resolver.ERCache.retry (ERCache.java:254)
at io.apicurio.registry.resolver.ERCache.getValue (ERCache.java:200)
at io.apicurio.registry.resolver.ERCache.getByContent (ERCache.java:175)
at io.apicurio.registry.resolver.DefaultSchemaResolver.handleResolveSchemaByContent (DefaultSchemaResolver.java:259)
at io.apicurio.registry.resolver.DefaultSchemaResolver.getSchemaFromRegistry (DefaultSchemaResolver.java:141)
at io.apicurio.registry.resolver.DefaultSchemaResolver.lambda$resolveSchema$0 (DefaultSchemaResolver.java:93)
at java.util.Optional.orElseGet (Optional.java:364)
at io.apicurio.registry.resolver.DefaultSchemaResolver.resolveSchema (DefaultSchemaResolver.java:93)
at io.apicurio.registry.serde.AbstractKafkaSerializer.serialize (AbstractKafkaSerializer.java:83)
at org.apache.kafka.clients.producer.KafkaProducer.doSend (KafkaProducer.java:1000)
at org.apache.kafka.clients.producer.KafkaProducer.send (KafkaProducer.java:947)
at com.versa.apicurio.AvroKafkaProducer_Apicurio.main (AvroKafkaProducer_Apicurio.java:120)
at org.codehaus.mojo.exec.ExecJavaMojo.doMain (ExecJavaMojo.java:375)
at org.codehaus.mojo.exec.ExecJavaMojo.doExec (ExecJavaMojo.java:364)
at org.codehaus.mojo.exec.ExecJavaMojo.lambda$execute$0 (ExecJavaMojo.java:286)
at java.lang.Thread.run (Thread.java:833)
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 2.764 s
[INFO] Finished at: 2025-04-24T10:46:37-07:00
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.codehaus.mojo:exec-maven-plugin:3.5.0:java (default-cli) on project apicurio_schema_registry: An exception occurred while executing the Java class. Authentication exception -> [Help 1]
[ERROR]
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR]
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoExecutionException
In the logs, i see this ->
[com.versa.apicurio.AvroKafkaProducer_Apicurio.main()] INFO org.apache.kafka.clients.producer.ProducerConfig - These configurations '[apicurio.registry.headers.Authorization, apicurio.registry.url, apicurio.registry.artifact-resolver-strategy]' were supplied but are not used yet.
How to debug/resolve this ?
Note : I tried the below mentioned settings, and it gives the same error
//props.put("apicurio.auth.token", token);
props.put("apicurio.registry.headers.Authorization", "Bearer " + token);
Another question - Can I use v3.x of the Apicurio Schema Registry, instead of the default 2.x version which is installed with the Apicurio Schema Registry Operator?
Here is the yaml used to deploy the Apicurio Schema Registry v2.x
apiVersion: registry.apicur.io/v1
kind: ApicurioRegistry
metadata:
name: apicurio-registry
namespace: apicurio-registry-operator
spec:
configuration:
persistence: sql
sql:
dataSource:
url: jdbc:postgresql://my-postgres-postgresql.apicurio-registry-operator.svc.cluster.local:5432/postgres
userName: postgres
password:<pwd>
env:
# ────────────────────────────────────────────────────────────────────
# 1) Enable OIDC auth via Keycloak
# ────────────────────────────────────────────────────────────────────
- name: AUTH_ENABLED
value: "true"
- name: KEYCLOAK_URL
value: "https://keycloak.vkp.versa-vani.com"
- name: KEYCLOAK_REALM
value: "readonly-realm"
- name: KEYCLOAK_API_CLIENT_ID
value: "apicurio-registry"
- name: KEYCLOAK_UI_CLIENT_ID
value: "apicurio"
- name: KEYCLOAK_ISSUER_URL
value: "https://keycloak.vkp.versa-vani.com/realms/readonly-realm"
- name: KEYCLOAK_JWKS_URL
value: "https://keycloak.vkp.versa-vani.com/realms/readonly-realm/protocol/openid-connect/certs"
# ────────────────────────────────────────────────────────────────────
# 2) Enable role‑based authorization
# ────────────────────────────────────────────────────────────────────
- name: ROLE_BASED_AUTHZ_ENABLED
value: "true"
- name: ROLE_BASED_AUTHZ_SOURCE
value: "token"
- name: REGISTRY_AUTH_ROLES_ADMIN
value: "sr-admin"
- name: REGISTRY_AUTH_ROLES_DEVELOPER
value: "sr-developer"
- name: REGISTRY_AUTH_ROLES_READONLY
value: "sr-readonly"
- name: REGISTRY_API_URL
value: "https://apicurio-sr.vkp.versa-vani.com/apis/registry/v2"
- name: REGISTRY_UI_CONFIG_APIURL
value: "https://apicurio-sr.vkp.versa-vani.com/apis/registry/v2"
- name: REGISTRY_UI_CONFIG_UIURL
value: "https://apicurio-sr.vkp.versa-vani.com"
- name: REGISTRY_UI_CONFIG_AUTH_URL
value: "https://keycloak.vkp.versa-vani.com"
- name: REGISTRY_UI_CONFIG_AUTH_SERVER_URL
value: "https://keycloak.vkp.versa-vani.com/realms/readonly-realm"
# ────────────────────────────────────────────────────────────────────
# 4) Enable debug logging
# ────────────────────────────────────────────────────────────────────
- name: QUARKUS_LOG_LEVEL
value: "DEBUG"
- name: QUARKUS_LOG_CATEGORY__IO.APICURIO.REGISTRY.AUTH__LEVEL
value: "DEBUG"
- name: QUARKUS_LOG_CATEGORY__IO.APICURIO.REGISTRY__LEVEL
value: "DEBUG"
- name: QUARKUS_LOG_CATEGORY__IO.QUARKUS.OIDC__LEVEL
value: "DEBUG"
Note: The Kafka, Apicurion Registry Operator, keycloak setup is on GKE
tia !