[main] INFO org.opendaylight.transportpce.common.network.RequestProcessor - RequestProcessor instantiated [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class PceListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: PceListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id PceListener with properties {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6ffbd953, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=PceListener} [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = PceListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6ffbd953, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=PceListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=PceListener] Instantiated an idempotent producer. [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.7.0 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 2ae524ed625438c5 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1713631797859 [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandlerOperations [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandlerOperations [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandlerOperations with properties {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6ffbd953, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandlerOperations} [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceHandlerOperations compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6ffbd953, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandlerOperations} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandlerOperations] Instantiated an idempotent producer. [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.7.0 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 2ae524ed625438c5 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1713631797903 [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandler [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandler [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandler with properties {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6ffbd953, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandler} [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceHandler compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6ffbd953, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandler} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandler] Instantiated an idempotent producer. [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.7.0 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 2ae524ed625438c5 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1713631797947 [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class RendererListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: RendererListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id RendererListener with properties {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6ffbd953, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=RendererListener} [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = RendererListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6ffbd953, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=RendererListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=RendererListener] Instantiated an idempotent producer. [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.7.0 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 2ae524ed625438c5 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1713631797989 [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding alarm topic: ServiceListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceListener with properties {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, acks=all, batch.size=16384, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4c58a7bc, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceListener} [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, acks=all, batch.size=16384, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4c58a7bc, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceListener] Instantiated an idempotent producer. [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.7.0 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 2ae524ed625438c5 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1713631798010 [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - tapi converter: org.opendaylight.transportpce.common.converter.JsonStringConverter@6a5868d3 [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - NbiNotificationsProvider Session Initiated