[main] INFO org.opendaylight.transportpce.common.network.RequestProcessor - RequestProcessor instantiated
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class PceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: PceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id PceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@57d41e02, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=PceListener}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = PceListener
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@57d41e02, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=PceListener}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=PceListener] Instantiated an idempotent producer.
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [PceListener] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandlerOperations
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandlerOperations
[kafka-producer-network-thread | PceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Node -1 disconnected.
[kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandlerOperations with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@57d41e02, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandlerOperations}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = ServiceHandlerOperations
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@57d41e02, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandlerOperations}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandlerOperations] Instantiated an idempotent producer.
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandlerOperations] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandler
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandler
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandler with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@57d41e02, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandler}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = ServiceHandler
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@57d41e02, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandler}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandler] Instantiated an idempotent producer.
[kafka-producer-network-thread | ServiceHandlerOperations] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Node -1 disconnected.
[kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandler] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class RendererListener
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: RendererListener
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id RendererListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@57d41e02, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=RendererListener}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = RendererListener
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@57d41e02, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=RendererListener}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=RendererListener] Instantiated an idempotent producer.
[kafka-producer-network-thread | ServiceHandler] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Node -1 disconnected.
[kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [RendererListener] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding alarm topic: ServiceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@25667a52, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceListener}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = ServiceListener
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@25667a52, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceListener}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceListener] Instantiated an idempotent producer.
[kafka-producer-network-thread | RendererListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Node -1 disconnected.
[kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceListener] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - tapi converter: org.opendaylight.transportpce.common.converter.JsonStringConverter@15bdf283
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - NbiNotificationsProvider Session Initiated
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.rpc.GetNotificationsAlarmServiceImpl - RPC getNotificationsAlarmService received
[main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Subscribing for group id groupId, client config id consumerId with properties {key.deserializer=class org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceDeserializer, enable.auto.commit=false, group.id=groupId, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@63facaac, bootstrap.servers=localhost:8080, auto.commit.interval.ms=1000, auto.offset.reset=earliest, client.id=consumerId}
[kafka-producer-network-thread | ServiceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Node -1 disconnected.
[kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values: 
	allow.auto.create.topics = true
	auto.commit.interval.ms = 1000
	auto.include.jmx.reporter = true
	auto.offset.reset = earliest
	bootstrap.servers = [localhost:8080]
	check.crcs = true
	client.dns.lookup = use_all_dns_ips
	client.id = consumerId
	client.rack = 
	connections.max.idle.ms = 540000
	default.api.timeout.ms = 60000
	enable.auto.commit = false
	enable.metrics.push = true
	exclude.internal.topics = true
	fetch.max.bytes = 52428800
	fetch.max.wait.ms = 500
	fetch.min.bytes = 1
	group.id = groupId
	group.instance.id = null
	group.protocol = classic
	group.remote.assignor = null
	heartbeat.interval.ms = 3000
	interceptor.classes = []
	internal.leave.group.on.close = true
	internal.throw.on.fetch.stable.offset.unsupported = false
	isolation.level = read_uncommitted
	key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer
	max.partition.fetch.bytes = 1048576
	max.poll.interval.ms = 300000
	max.poll.records = 500
	metadata.max.age.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor, class org.apache.kafka.clients.consumer.CooperativeStickyAssignor]
	receive.buffer.bytes = 65536
	reconnect.backoff.max.ms = 1000
	reconnect.backoff.ms = 50
	request.timeout.ms = 30000
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	session.timeout.ms = 45000
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	value.deserializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceDeserializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceDeserializer - Deserializer configuration {key.deserializer=class org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceDeserializer, enable.auto.commit=false, group.id=groupId, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@63facaac, bootstrap.servers=localhost:8080, auto.commit.interval.ms=1000, auto.offset.reset=earliest, client.id=consumerId}
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.7.0
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 2ae524ed625438c5
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1730276106712
[main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Subscribe request to topic 'alarmservice' 
[main] INFO org.apache.kafka.clients.consumer.internals.LegacyKafkaConsumer - [Consumer clientId=consumerId, groupId=groupId] Subscribed to topic(s): alarmservice
[main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Getting records '[]' 
[main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumerId, groupId=groupId] Resetting generation and member id due to: consumer pro-actively leaving the group
[main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumerId, groupId=groupId] Request joining group due to: consumer pro-actively leaving the group
[main] INFO org.apache.kafka.clients.consumer.internals.LegacyKafkaConsumer - [Consumer clientId=consumerId, groupId=groupId] Unsubscribed all topics or patterns and assigned partitions
[main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumerId, groupId=groupId] Resetting generation and member id due to: consumer pro-actively leaving the group
[main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumerId, groupId=groupId] Request joining group due to: consumer pro-actively leaving the group
[main] INFO org.apache.kafka.common.metrics.Metrics - Metrics scheduler closed
[main] INFO org.apache.kafka.common.metrics.Metrics - Closing reporter org.apache.kafka.common.metrics.JmxReporter
[main] INFO org.apache.kafka.common.metrics.Metrics - Closing reporter org.apache.kafka.common.telemetry.internals.ClientTelemetryReporter
[main] INFO org.apache.kafka.common.metrics.Metrics - Metrics reporters closed
[main] INFO org.apache.kafka.common.utils.AppInfoParser - App info kafka.consumer for consumerId unregistered
[main] INFO org.opendaylight.transportpce.common.network.RequestProcessor - RequestProcessor instantiated
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class PceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: PceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id PceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@a8e293c, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=PceListener}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = PceListener
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@a8e293c, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=PceListener}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=PceListener] Instantiated an idempotent producer.
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [PceListener] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandlerOperations
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandlerOperations
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandlerOperations with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@a8e293c, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandlerOperations}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = ServiceHandlerOperations
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@a8e293c, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandlerOperations}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandlerOperations] Instantiated an idempotent producer.
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandlerOperations] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandler
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandler
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandler with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@a8e293c, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandler}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = ServiceHandler
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[kafka-producer-network-thread | PceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Node -1 disconnected.
[kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@a8e293c, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandler}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandler] Instantiated an idempotent producer.
[kafka-producer-network-thread | ServiceHandlerOperations] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Node -1 disconnected.
[kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandler] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class RendererListener
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: RendererListener
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id RendererListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@a8e293c, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=RendererListener}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = RendererListener
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@a8e293c, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=RendererListener}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=RendererListener] Instantiated an idempotent producer.
[kafka-producer-network-thread | ServiceHandler] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Node -1 disconnected.
[kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [RendererListener] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding alarm topic: ServiceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@1f79bcd0, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceListener}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = ServiceListener
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@1f79bcd0, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceListener}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceListener] Instantiated an idempotent producer.
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceListener] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - tapi converter: org.opendaylight.transportpce.common.converter.JsonStringConverter@5ae7d274
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - NbiNotificationsProvider Session Initiated
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.rpc.GetNotificationsProcessServiceImpl - RPC getNotificationsService received
[main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Subscribing for group id groupId, client config id consumerId with properties {key.deserializer=class org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceDeserializer, enable.auto.commit=false, group.id=groupId, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@775b2bfb, bootstrap.servers=localhost:8080, auto.commit.interval.ms=1000, auto.offset.reset=earliest, client.id=consumerId}
[kafka-producer-network-thread | ServiceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Node -1 disconnected.
[kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values: 
	allow.auto.create.topics = true
	auto.commit.interval.ms = 1000
	auto.include.jmx.reporter = true
	auto.offset.reset = earliest
	bootstrap.servers = [localhost:8080]
	check.crcs = true
	client.dns.lookup = use_all_dns_ips
	client.id = consumerId
	client.rack = 
	connections.max.idle.ms = 540000
	default.api.timeout.ms = 60000
	enable.auto.commit = false
	enable.metrics.push = true
	exclude.internal.topics = true
	fetch.max.bytes = 52428800
	fetch.max.wait.ms = 500
	fetch.min.bytes = 1
	group.id = groupId
	group.instance.id = null
	group.protocol = classic
	group.remote.assignor = null
	heartbeat.interval.ms = 3000
	interceptor.classes = []
	internal.leave.group.on.close = true
	internal.throw.on.fetch.stable.offset.unsupported = false
	isolation.level = read_uncommitted
	key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer
	max.partition.fetch.bytes = 1048576
	max.poll.interval.ms = 300000
	max.poll.records = 500
	metadata.max.age.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor, class org.apache.kafka.clients.consumer.CooperativeStickyAssignor]
	receive.buffer.bytes = 65536
	reconnect.backoff.max.ms = 1000
	reconnect.backoff.ms = 50
	request.timeout.ms = 30000
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	session.timeout.ms = 45000
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	value.deserializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceDeserializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceDeserializer - Deserializer configuration {key.deserializer=class org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceDeserializer, enable.auto.commit=false, group.id=groupId, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@775b2bfb, bootstrap.servers=localhost:8080, auto.commit.interval.ms=1000, auto.offset.reset=earliest, client.id=consumerId}
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.7.0
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 2ae524ed625438c5
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1730276107767
[main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Subscribe request to topic 'service' 
[main] INFO org.apache.kafka.clients.consumer.internals.LegacyKafkaConsumer - [Consumer clientId=consumerId, groupId=groupId] Subscribed to topic(s): service
[kafka-producer-network-thread | RendererListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Node -1 disconnected.
[kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Getting records '[]' 
[main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumerId, groupId=groupId] Resetting generation and member id due to: consumer pro-actively leaving the group
[main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumerId, groupId=groupId] Request joining group due to: consumer pro-actively leaving the group
[main] INFO org.apache.kafka.clients.consumer.internals.LegacyKafkaConsumer - [Consumer clientId=consumerId, groupId=groupId] Unsubscribed all topics or patterns and assigned partitions
[main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumerId, groupId=groupId] Resetting generation and member id due to: consumer pro-actively leaving the group
[main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumerId, groupId=groupId] Request joining group due to: consumer pro-actively leaving the group
[main] INFO org.apache.kafka.common.metrics.Metrics - Metrics scheduler closed
[main] INFO org.apache.kafka.common.metrics.Metrics - Closing reporter org.apache.kafka.common.metrics.JmxReporter
[main] INFO org.apache.kafka.common.metrics.Metrics - Closing reporter org.apache.kafka.common.telemetry.internals.ClientTelemetryReporter
[main] INFO org.apache.kafka.common.metrics.Metrics - Metrics reporters closed
[main] INFO org.apache.kafka.common.utils.AppInfoParser - App info kafka.consumer for consumerId unregistered
[main] INFO org.opendaylight.transportpce.common.network.RequestProcessor - RequestProcessor instantiated
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class PceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: PceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id PceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6a2b6241, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=PceListener}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = PceListener
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6a2b6241, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=PceListener}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=PceListener] Instantiated an idempotent producer.
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [PceListener] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandlerOperations
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandlerOperations
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandlerOperations with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6a2b6241, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandlerOperations}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = ServiceHandlerOperations
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6a2b6241, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandlerOperations}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandlerOperations] Instantiated an idempotent producer.
[kafka-producer-network-thread | PceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Node -1 disconnected.
[kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandlerOperations] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandler
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandler
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandler with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6a2b6241, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandler}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = ServiceHandler
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6a2b6241, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandler}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandler] Instantiated an idempotent producer.
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandler] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class RendererListener
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: RendererListener
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id RendererListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6a2b6241, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=RendererListener}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = RendererListener
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6a2b6241, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=RendererListener}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=RendererListener] Instantiated an idempotent producer.
[kafka-producer-network-thread | ServiceHandlerOperations] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Node -1 disconnected.
[kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [RendererListener] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding alarm topic: ServiceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@2e1339ce, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceListener}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[kafka-producer-network-thread | ServiceHandler] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Node -1 disconnected.
[kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = ServiceListener
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@2e1339ce, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceListener}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceListener] Instantiated an idempotent producer.
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceListener] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - tapi converter: org.opendaylight.transportpce.common.converter.JsonStringConverter@744da685
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - NbiNotificationsProvider Session Initiated
[kafka-producer-network-thread | ServiceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Node -1 disconnected.
[kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.rpc.CreateNotificationSubscriptionServiceImpl - Adding T-API topic: eda10664-880c-4993-a4af-a36c7c5494d8 to Kafka server
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding new tapi topic: eda10664-880c-4993-a4af-a36c7c5494d8
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id eda10664-880c-4993-a4af-a36c7c5494d8 with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@744da685, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=eda10664-880c-4993-a4af-a36c7c5494d8}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = eda10664-880c-4993-a4af-a36c7c5494d8
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@744da685, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=eda10664-880c-4993-a4af-a36c7c5494d8}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=eda10664-880c-4993-a4af-a36c7c5494d8] Instantiated an idempotent producer.
[kafka-producer-network-thread | RendererListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Node -1 disconnected.
[kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.7.0
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 2ae524ed625438c5
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1730276108800
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Getting tapi notification context
[kafka-producer-network-thread | eda10664-880c-4993-a4af-a36c7c5494d8] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=eda10664-880c-4993-a4af-a36c7c5494d8] Node -1 disconnected.
[kafka-producer-network-thread | eda10664-880c-4993-a4af-a36c7c5494d8] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=eda10664-880c-4993-a4af-a36c7c5494d8] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | eda10664-880c-4993-a4af-a36c7c5494d8] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=eda10664-880c-4993-a4af-a36c7c5494d8] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.opendaylight.transportpce.common.network.RequestProcessor - RequestProcessor instantiated
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class PceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: PceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id PceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@663ad8b7, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=PceListener}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = PceListener
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@663ad8b7, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=PceListener}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=PceListener] Instantiated an idempotent producer.
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [PceListener] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandlerOperations
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandlerOperations
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandlerOperations with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@663ad8b7, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandlerOperations}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = ServiceHandlerOperations
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@663ad8b7, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandlerOperations}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandlerOperations] Instantiated an idempotent producer.
[kafka-producer-network-thread | PceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Node -1 disconnected.
[kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandlerOperations] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandler
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandler
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandler with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@663ad8b7, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandler}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = ServiceHandler
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[kafka-producer-network-thread | ServiceHandlerOperations] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Node -1 disconnected.
[kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@663ad8b7, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandler}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandler] Instantiated an idempotent producer.
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandler] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class RendererListener
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: RendererListener
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id RendererListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@663ad8b7, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=RendererListener}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = RendererListener
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@663ad8b7, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=RendererListener}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=RendererListener] Instantiated an idempotent producer.
[kafka-producer-network-thread | ServiceHandler] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Node -1 disconnected.
[kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [RendererListener] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding alarm topic: ServiceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@57fe86c7, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceListener}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = ServiceListener
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@57fe86c7, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceListener}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceListener] Instantiated an idempotent producer.
[kafka-producer-network-thread | RendererListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Node -1 disconnected.
[kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceListener] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - tapi converter: org.opendaylight.transportpce.common.converter.JsonStringConverter@4d4dddd6
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - NbiNotificationsProvider Session Initiated
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.rpc.CreateNotificationSubscriptionServiceImpl - Adding T-API topic: 704d451f-9b77-425e-97e5-b39085483949 to Kafka server
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding new tapi topic: 704d451f-9b77-425e-97e5-b39085483949
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id 704d451f-9b77-425e-97e5-b39085483949 with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4d4dddd6, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=704d451f-9b77-425e-97e5-b39085483949}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[kafka-producer-network-thread | ServiceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Node -1 disconnected.
[kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = 704d451f-9b77-425e-97e5-b39085483949
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationSerializer

[kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4d4dddd6, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=704d451f-9b77-425e-97e5-b39085483949}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=704d451f-9b77-425e-97e5-b39085483949] Instantiated an idempotent producer.
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.7.0
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 2ae524ed625438c5
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1730276108837
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Getting tapi notification context
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.rpc.GetNotificationListImpl - RPC getNotificationList received
[kafka-producer-network-thread | 704d451f-9b77-425e-97e5-b39085483949] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=704d451f-9b77-425e-97e5-b39085483949] Node -1 disconnected.
[kafka-producer-network-thread | 704d451f-9b77-425e-97e5-b39085483949] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=704d451f-9b77-425e-97e5-b39085483949] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | 704d451f-9b77-425e-97e5-b39085483949] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=704d451f-9b77-425e-97e5-b39085483949] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.rpc.GetNotificationListImpl - Going to get notifications for topic 704d451f-9b77-425e-97e5-b39085483949
[main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Subscribing for group id 704d451f-9b77-425e-97e5-b39085483949, client config id 704d451f-9b77-425e-97e5-b39085483949 with properties {key.deserializer=class org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationDeserializer, enable.auto.commit=false, group.id=704d451f-9b77-425e-97e5-b39085483949, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@59057e12, bootstrap.servers=localhost:8080, auto.commit.interval.ms=1000, auto.offset.reset=earliest, client.id=704d451f-9b77-425e-97e5-b39085483949}
[main] INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values: 
	allow.auto.create.topics = true
	auto.commit.interval.ms = 1000
	auto.include.jmx.reporter = true
	auto.offset.reset = earliest
	bootstrap.servers = [localhost:8080]
	check.crcs = true
	client.dns.lookup = use_all_dns_ips
	client.id = 704d451f-9b77-425e-97e5-b39085483949
	client.rack = 
	connections.max.idle.ms = 540000
	default.api.timeout.ms = 60000
	enable.auto.commit = false
	enable.metrics.push = true
	exclude.internal.topics = true
	fetch.max.bytes = 52428800
	fetch.max.wait.ms = 500
	fetch.min.bytes = 1
	group.id = 704d451f-9b77-425e-97e5-b39085483949
	group.instance.id = null
	group.protocol = classic
	group.remote.assignor = null
	heartbeat.interval.ms = 3000
	interceptor.classes = []
	internal.leave.group.on.close = true
	internal.throw.on.fetch.stable.offset.unsupported = false
	isolation.level = read_uncommitted
	key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer
	max.partition.fetch.bytes = 1048576
	max.poll.interval.ms = 300000
	max.poll.records = 500
	metadata.max.age.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor, class org.apache.kafka.clients.consumer.CooperativeStickyAssignor]
	receive.buffer.bytes = 65536
	reconnect.backoff.max.ms = 1000
	reconnect.backoff.ms = 50
	request.timeout.ms = 30000
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	session.timeout.ms = 45000
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	value.deserializer = class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationDeserializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationDeserializer - Tapi Deserializer configuration {key.deserializer=class org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationDeserializer, enable.auto.commit=false, group.id=704d451f-9b77-425e-97e5-b39085483949, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@59057e12, bootstrap.servers=localhost:8080, auto.commit.interval.ms=1000, auto.offset.reset=earliest, client.id=704d451f-9b77-425e-97e5-b39085483949}
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.7.0
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 2ae524ed625438c5
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1730276108845
[main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Subscribe request to topic '704d451f-9b77-425e-97e5-b39085483949' 
[main] INFO org.apache.kafka.clients.consumer.internals.LegacyKafkaConsumer - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Subscribed to topic(s): 704d451f-9b77-425e-97e5-b39085483949
[main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Node -1 disconnected.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Node -1 disconnected.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Node -1 disconnected.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Node -1 disconnected.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Node -1 disconnected.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Getting records '[]' 
[main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Resetting generation and member id due to: consumer pro-actively leaving the group
[main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Request joining group due to: consumer pro-actively leaving the group
[main] INFO org.apache.kafka.clients.consumer.internals.LegacyKafkaConsumer - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Unsubscribed all topics or patterns and assigned partitions
[main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Resetting generation and member id due to: consumer pro-actively leaving the group
[main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=704d451f-9b77-425e-97e5-b39085483949, groupId=704d451f-9b77-425e-97e5-b39085483949] Request joining group due to: consumer pro-actively leaving the group
[main] INFO org.apache.kafka.common.metrics.Metrics - Metrics scheduler closed
[main] INFO org.apache.kafka.common.metrics.Metrics - Closing reporter org.apache.kafka.common.metrics.JmxReporter
[main] INFO org.apache.kafka.common.metrics.Metrics - Closing reporter org.apache.kafka.common.telemetry.internals.ClientTelemetryReporter
[main] INFO org.apache.kafka.common.metrics.Metrics - Metrics reporters closed
[main] INFO org.apache.kafka.common.utils.AppInfoParser - App info kafka.consumer for 704d451f-9b77-425e-97e5-b39085483949 unregistered
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.rpc.GetNotificationListImpl - TAPI notifications = []
[main] INFO org.opendaylight.transportpce.common.network.RequestProcessor - RequestProcessor instantiated
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class PceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: PceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id PceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@38de88e9, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=PceListener}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = PceListener
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@38de88e9, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=PceListener}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=PceListener] Instantiated an idempotent producer.
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [PceListener] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandlerOperations
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandlerOperations
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandlerOperations with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@38de88e9, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandlerOperations}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = ServiceHandlerOperations
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@38de88e9, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandlerOperations}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandlerOperations] Instantiated an idempotent producer.
[kafka-producer-network-thread | PceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Node -1 disconnected.
[kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandlerOperations] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandler
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandler
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandler with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@38de88e9, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandler}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = ServiceHandler
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@38de88e9, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandler}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandler] Instantiated an idempotent producer.
[kafka-producer-network-thread | ServiceHandlerOperations] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Node -1 disconnected.
[kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandler] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class RendererListener
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: RendererListener
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id RendererListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@38de88e9, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=RendererListener}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = RendererListener
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@38de88e9, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=RendererListener}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=RendererListener] Instantiated an idempotent producer.
[kafka-producer-network-thread | ServiceHandler] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Node -1 disconnected.
[kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [RendererListener] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding alarm topic: ServiceListener
[main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@30fa08c8, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceListener}
[main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not.
[main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: 
	acks = -1
	auto.include.jmx.reporter = true
	batch.size = 16384
	bootstrap.servers = [localhost:8080]
	buffer.memory = 33554432
	client.dns.lookup = use_all_dns_ips
	client.id = ServiceListener
	compression.type = none
	connections.max.idle.ms = 540000
	delivery.timeout.ms = 120000
	enable.idempotence = true
	enable.metrics.push = true
	interceptor.classes = []
	key.serializer = class org.apache.kafka.common.serialization.StringSerializer
	linger.ms = 1
	max.block.ms = 60000
	max.in.flight.requests.per.connection = 1
	max.request.size = 1048576
	metadata.max.age.ms = 300000
	metadata.max.idle.ms = 300000
	metric.reporters = []
	metrics.num.samples = 2
	metrics.recording.level = INFO
	metrics.sample.window.ms = 30000
	partitioner.adaptive.partitioning.enable = true
	partitioner.availability.timeout.ms = 0
	partitioner.class = null
	partitioner.ignore.keys = false
	receive.buffer.bytes = 32768
	reconnect.backoff.max.ms = 600000
	reconnect.backoff.ms = 600000
	request.timeout.ms = 30000
	retries = 3
	retry.backoff.max.ms = 1000
	retry.backoff.ms = 100
	sasl.client.callback.handler.class = null
	sasl.jaas.config = null
	sasl.kerberos.kinit.cmd = /usr/bin/kinit
	sasl.kerberos.min.time.before.relogin = 60000
	sasl.kerberos.service.name = null
	sasl.kerberos.ticket.renew.jitter = 0.05
	sasl.kerberos.ticket.renew.window.factor = 0.8
	sasl.login.callback.handler.class = null
	sasl.login.class = null
	sasl.login.connect.timeout.ms = null
	sasl.login.read.timeout.ms = null
	sasl.login.refresh.buffer.seconds = 300
	sasl.login.refresh.min.period.seconds = 60
	sasl.login.refresh.window.factor = 0.8
	sasl.login.refresh.window.jitter = 0.05
	sasl.login.retry.backoff.max.ms = 10000
	sasl.login.retry.backoff.ms = 100
	sasl.mechanism = GSSAPI
	sasl.oauthbearer.clock.skew.seconds = 30
	sasl.oauthbearer.expected.audience = null
	sasl.oauthbearer.expected.issuer = null
	sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
	sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
	sasl.oauthbearer.jwks.endpoint.url = null
	sasl.oauthbearer.scope.claim.name = scope
	sasl.oauthbearer.sub.claim.name = sub
	sasl.oauthbearer.token.endpoint.url = null
	security.protocol = PLAINTEXT
	security.providers = null
	send.buffer.bytes = 131072
	socket.connection.setup.timeout.max.ms = 30000
	socket.connection.setup.timeout.ms = 10000
	ssl.cipher.suites = null
	ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
	ssl.endpoint.identification.algorithm = https
	ssl.engine.factory.class = null
	ssl.key.password = null
	ssl.keymanager.algorithm = SunX509
	ssl.keystore.certificate.chain = null
	ssl.keystore.key = null
	ssl.keystore.location = null
	ssl.keystore.password = null
	ssl.keystore.type = JKS
	ssl.protocol = TLSv1.3
	ssl.provider = null
	ssl.secure.random.implementation = null
	ssl.trustmanager.algorithm = PKIX
	ssl.truststore.certificates = null
	ssl.truststore.location = null
	ssl.truststore.password = null
	ssl.truststore.type = JKS
	transaction.timeout.ms = 60000
	transactional.id = null
	value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer

[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector
[main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@30fa08c8, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceListener}
[main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceListener] Instantiated an idempotent producer.
[kafka-producer-network-thread | RendererListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Node -1 disconnected.
[kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected
[main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceListener] already exists, so skipping a new mbean creation.
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - tapi converter: org.opendaylight.transportpce.common.converter.JsonStringConverter@2756eab0
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - NbiNotificationsProvider Session Initiated
[main] INFO org.opendaylight.transportpce.nbinotifications.impl.rpc.GetNotificationsProcessServiceImpl - RPC getNotificationsService received
[main] WARN org.opendaylight.transportpce.nbinotifications.impl.rpc.GetNotificationsProcessServiceImpl - Missing mandatory params for input GetNotificationsProcessServiceInput{}
[kafka-producer-network-thread | ServiceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Node -1 disconnected.
[kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available.
[kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected