[main] INFO org.opendaylight.transportpce.common.network.RequestProcessor - RequestProcessor instantiated [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class PceListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: PceListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id PceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@480d9bfb, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=PceListener} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = PceListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@480d9bfb, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=PceListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=PceListener] Instantiated an idempotent producer. [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [PceListener] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandlerOperations [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandlerOperations [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandlerOperations with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@480d9bfb, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandlerOperations} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceHandlerOperations compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@480d9bfb, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandlerOperations} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandlerOperations] Instantiated an idempotent producer. [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandlerOperations] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandler [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandler [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandler with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@480d9bfb, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandler} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceHandler compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@480d9bfb, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandler} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandler] Instantiated an idempotent producer. [kafka-producer-network-thread | PceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Node -1 disconnected. [kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [kafka-producer-network-thread | ServiceHandlerOperations] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Node -1 disconnected. [kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandler] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class RendererListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: RendererListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id RendererListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@480d9bfb, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=RendererListener} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = RendererListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@480d9bfb, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=RendererListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=RendererListener] Instantiated an idempotent producer. [kafka-producer-network-thread | ServiceHandler] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Node -1 disconnected. [kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [RendererListener] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding alarm topic: ServiceListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@59db8446, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceListener} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@59db8446, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceListener] Instantiated an idempotent producer. [kafka-producer-network-thread | RendererListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Node -1 disconnected. [kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceListener] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - tapi converter: org.opendaylight.transportpce.common.converter.JsonStringConverter@1196d027 [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - NbiNotificationsProvider Session Initiated [main] INFO org.opendaylight.transportpce.nbinotifications.impl.rpc.GetNotificationsAlarmServiceImpl - RPC getNotificationsAlarmService received [main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Subscribing for group id groupId, client config id consumerId with properties {key.deserializer=class org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceDeserializer, enable.auto.commit=false, group.id=groupId, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@26fd680f, bootstrap.servers=localhost:8080, auto.commit.interval.ms=1000, auto.offset.reset=earliest, client.id=consumerId} [kafka-producer-network-thread | ServiceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Node -1 disconnected. [kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 1000 auto.include.jmx.reporter = true auto.offset.reset = earliest bootstrap.servers = [localhost:8080] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = consumerId client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false enable.metrics.push = true exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = groupId group.instance.id = null group.protocol = classic group.remote.assignor = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor, class org.apache.kafka.clients.consumer.CooperativeStickyAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 45000 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceDeserializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceDeserializer - Deserializer configuration {key.deserializer=class org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceDeserializer, enable.auto.commit=false, group.id=groupId, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@26fd680f, bootstrap.servers=localhost:8080, auto.commit.interval.ms=1000, auto.offset.reset=earliest, client.id=consumerId} [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.7.0 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 2ae524ed625438c5 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1725933669420 [main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Subscribe request to topic 'alarmservice' [main] INFO org.apache.kafka.clients.consumer.internals.LegacyKafkaConsumer - [Consumer clientId=consumerId, groupId=groupId] Subscribed to topic(s): alarmservice [main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Getting records '[]' [main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumerId, groupId=groupId] Resetting generation and member id due to: consumer pro-actively leaving the group [main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumerId, groupId=groupId] Request joining group due to: consumer pro-actively leaving the group [main] INFO org.apache.kafka.clients.consumer.internals.LegacyKafkaConsumer - [Consumer clientId=consumerId, groupId=groupId] Unsubscribed all topics or patterns and assigned partitions [main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumerId, groupId=groupId] Resetting generation and member id due to: consumer pro-actively leaving the group [main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumerId, groupId=groupId] Request joining group due to: consumer pro-actively leaving the group [main] INFO org.apache.kafka.common.metrics.Metrics - Metrics scheduler closed [main] INFO org.apache.kafka.common.metrics.Metrics - Closing reporter org.apache.kafka.common.metrics.JmxReporter [main] INFO org.apache.kafka.common.metrics.Metrics - Closing reporter org.apache.kafka.common.telemetry.internals.ClientTelemetryReporter [main] INFO org.apache.kafka.common.metrics.Metrics - Metrics reporters closed [main] INFO org.apache.kafka.common.utils.AppInfoParser - App info kafka.consumer for consumerId unregistered [main] INFO org.opendaylight.transportpce.common.network.RequestProcessor - RequestProcessor instantiated [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class PceListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: PceListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id PceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4f6f3c, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=PceListener} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = PceListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4f6f3c, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=PceListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=PceListener] Instantiated an idempotent producer. [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [PceListener] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandlerOperations [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandlerOperations [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandlerOperations with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4f6f3c, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandlerOperations} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceHandlerOperations compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4f6f3c, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandlerOperations} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandlerOperations] Instantiated an idempotent producer. [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandlerOperations] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandler [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandler [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandler with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4f6f3c, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandler} [kafka-producer-network-thread | PceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Node -1 disconnected. [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceHandler compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4f6f3c, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandler} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandler] Instantiated an idempotent producer. [kafka-producer-network-thread | ServiceHandlerOperations] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Node -1 disconnected. [kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandler] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class RendererListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: RendererListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id RendererListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4f6f3c, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=RendererListener} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = RendererListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4f6f3c, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=RendererListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=RendererListener] Instantiated an idempotent producer. [kafka-producer-network-thread | ServiceHandler] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Node -1 disconnected. [kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [RendererListener] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding alarm topic: ServiceListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@2e8279dc, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceListener} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@2e8279dc, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceListener] Instantiated an idempotent producer. [kafka-producer-network-thread | RendererListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Node -1 disconnected. [kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceListener] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - tapi converter: org.opendaylight.transportpce.common.converter.JsonStringConverter@17fe8aad [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - NbiNotificationsProvider Session Initiated [kafka-producer-network-thread | ServiceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Node -1 disconnected. [kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.opendaylight.transportpce.nbinotifications.impl.rpc.GetNotificationsProcessServiceImpl - RPC getNotificationsService received [main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Subscribing for group id groupId, client config id consumerId with properties {key.deserializer=class org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceDeserializer, enable.auto.commit=false, group.id=groupId, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4ad3cac8, bootstrap.servers=localhost:8080, auto.commit.interval.ms=1000, auto.offset.reset=earliest, client.id=consumerId} [main] INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 1000 auto.include.jmx.reporter = true auto.offset.reset = earliest bootstrap.servers = [localhost:8080] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = consumerId client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false enable.metrics.push = true exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = groupId group.instance.id = null group.protocol = classic group.remote.assignor = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor, class org.apache.kafka.clients.consumer.CooperativeStickyAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 45000 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceDeserializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceDeserializer - Deserializer configuration {key.deserializer=class org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceDeserializer, enable.auto.commit=false, group.id=groupId, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4ad3cac8, bootstrap.servers=localhost:8080, auto.commit.interval.ms=1000, auto.offset.reset=earliest, client.id=consumerId} [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.7.0 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 2ae524ed625438c5 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1725933670486 [main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Subscribe request to topic 'service' [main] INFO org.apache.kafka.clients.consumer.internals.LegacyKafkaConsumer - [Consumer clientId=consumerId, groupId=groupId] Subscribed to topic(s): service [main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Node -1 disconnected. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=consumerId, groupId=groupId] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Getting records '[]' [main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumerId, groupId=groupId] Resetting generation and member id due to: consumer pro-actively leaving the group [main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumerId, groupId=groupId] Request joining group due to: consumer pro-actively leaving the group [main] INFO org.apache.kafka.clients.consumer.internals.LegacyKafkaConsumer - [Consumer clientId=consumerId, groupId=groupId] Unsubscribed all topics or patterns and assigned partitions [main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumerId, groupId=groupId] Resetting generation and member id due to: consumer pro-actively leaving the group [main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=consumerId, groupId=groupId] Request joining group due to: consumer pro-actively leaving the group [main] INFO org.apache.kafka.common.metrics.Metrics - Metrics scheduler closed [main] INFO org.apache.kafka.common.metrics.Metrics - Closing reporter org.apache.kafka.common.metrics.JmxReporter [main] INFO org.apache.kafka.common.metrics.Metrics - Closing reporter org.apache.kafka.common.telemetry.internals.ClientTelemetryReporter [main] INFO org.apache.kafka.common.metrics.Metrics - Metrics reporters closed [main] INFO org.apache.kafka.common.utils.AppInfoParser - App info kafka.consumer for consumerId unregistered [main] INFO org.opendaylight.transportpce.common.network.RequestProcessor - RequestProcessor instantiated [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class PceListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: PceListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id PceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6234e455, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=PceListener} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = PceListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6234e455, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=PceListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=PceListener] Instantiated an idempotent producer. [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [PceListener] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandlerOperations [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandlerOperations [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandlerOperations with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6234e455, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandlerOperations} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceHandlerOperations compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6234e455, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandlerOperations} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandlerOperations] Instantiated an idempotent producer. [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandlerOperations] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandler [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandler [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandler with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6234e455, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandler} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceHandler compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6234e455, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandler} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandler] Instantiated an idempotent producer. [kafka-producer-network-thread | ServiceHandlerOperations] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Node -1 disconnected. [kafka-producer-network-thread | PceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Node -1 disconnected. [kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandler] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class RendererListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: RendererListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id RendererListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6234e455, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=RendererListener} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = RendererListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6234e455, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=RendererListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=RendererListener] Instantiated an idempotent producer. [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [RendererListener] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding alarm topic: ServiceListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@5ce18413, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceListener} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@5ce18413, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceListener] Instantiated an idempotent producer. [kafka-producer-network-thread | ServiceHandler] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Node -1 disconnected. [kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceListener] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - tapi converter: org.opendaylight.transportpce.common.converter.JsonStringConverter@70cd17c3 [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - NbiNotificationsProvider Session Initiated [main] INFO org.opendaylight.transportpce.nbinotifications.impl.rpc.CreateNotificationSubscriptionServiceImpl - Adding T-API topic: af4e97f3-8a29-4d4a-8e43-76b5eaff7fd8 to Kafka server [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding new tapi topic: af4e97f3-8a29-4d4a-8e43-76b5eaff7fd8 [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id af4e97f3-8a29-4d4a-8e43-76b5eaff7fd8 with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@70cd17c3, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=af4e97f3-8a29-4d4a-8e43-76b5eaff7fd8} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = af4e97f3-8a29-4d4a-8e43-76b5eaff7fd8 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@70cd17c3, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=af4e97f3-8a29-4d4a-8e43-76b5eaff7fd8} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=af4e97f3-8a29-4d4a-8e43-76b5eaff7fd8] Instantiated an idempotent producer. [kafka-producer-network-thread | ServiceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Node -1 disconnected. [kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.7.0 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 2ae524ed625438c5 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1725933671524 [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Getting tapi notification context [kafka-producer-network-thread | RendererListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Node -1 disconnected. [kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [kafka-producer-network-thread | af4e97f3-8a29-4d4a-8e43-76b5eaff7fd8] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=af4e97f3-8a29-4d4a-8e43-76b5eaff7fd8] Node -1 disconnected. [main] INFO org.opendaylight.transportpce.common.network.RequestProcessor - RequestProcessor instantiated [kafka-producer-network-thread | af4e97f3-8a29-4d4a-8e43-76b5eaff7fd8] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=af4e97f3-8a29-4d4a-8e43-76b5eaff7fd8] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | af4e97f3-8a29-4d4a-8e43-76b5eaff7fd8] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=af4e97f3-8a29-4d4a-8e43-76b5eaff7fd8] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class PceListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: PceListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id PceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@2f2eeb5a, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=PceListener} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = PceListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@2f2eeb5a, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=PceListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=PceListener] Instantiated an idempotent producer. [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [PceListener] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandlerOperations [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandlerOperations [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandlerOperations with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@2f2eeb5a, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandlerOperations} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceHandlerOperations compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@2f2eeb5a, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandlerOperations} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandlerOperations] Instantiated an idempotent producer. [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandlerOperations] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandler [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandler [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandler with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@2f2eeb5a, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandler} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [kafka-producer-network-thread | PceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Node -1 disconnected. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceHandler compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@2f2eeb5a, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandler} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandler] Instantiated an idempotent producer. [kafka-producer-network-thread | ServiceHandlerOperations] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Node -1 disconnected. [kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandler] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class RendererListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: RendererListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id RendererListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@2f2eeb5a, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=RendererListener} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = RendererListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@2f2eeb5a, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=RendererListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=RendererListener] Instantiated an idempotent producer. [kafka-producer-network-thread | ServiceHandler] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Node -1 disconnected. [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [RendererListener] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding alarm topic: ServiceListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6c1dbb6c, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceListener} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer [kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@6c1dbb6c, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceListener] Instantiated an idempotent producer. [kafka-producer-network-thread | RendererListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Node -1 disconnected. [kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceListener] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - tapi converter: org.opendaylight.transportpce.common.converter.JsonStringConverter@4ee14c9d [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - NbiNotificationsProvider Session Initiated [main] INFO org.opendaylight.transportpce.nbinotifications.impl.rpc.CreateNotificationSubscriptionServiceImpl - Adding T-API topic: 5f77ab1f-4fae-46b2-a427-391bddd4099b to Kafka server [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding new tapi topic: 5f77ab1f-4fae-46b2-a427-391bddd4099b [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id 5f77ab1f-4fae-46b2-a427-391bddd4099b with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4ee14c9d, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=5f77ab1f-4fae-46b2-a427-391bddd4099b} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = 5f77ab1f-4fae-46b2-a427-391bddd4099b compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4ee14c9d, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=5f77ab1f-4fae-46b2-a427-391bddd4099b} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Instantiated an idempotent producer. [kafka-producer-network-thread | ServiceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Node -1 disconnected. [kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.7.0 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 2ae524ed625438c5 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1725933671552 [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Getting tapi notification context [main] INFO org.opendaylight.transportpce.nbinotifications.impl.rpc.GetNotificationListImpl - RPC getNotificationList received [kafka-producer-network-thread | 5f77ab1f-4fae-46b2-a427-391bddd4099b] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Node -1 disconnected. [kafka-producer-network-thread | 5f77ab1f-4fae-46b2-a427-391bddd4099b] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | 5f77ab1f-4fae-46b2-a427-391bddd4099b] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.opendaylight.transportpce.nbinotifications.impl.rpc.GetNotificationListImpl - Going to get notifications for topic 5f77ab1f-4fae-46b2-a427-391bddd4099b [main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Subscribing for group id 5f77ab1f-4fae-46b2-a427-391bddd4099b, client config id 5f77ab1f-4fae-46b2-a427-391bddd4099b with properties {key.deserializer=class org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationDeserializer, enable.auto.commit=false, group.id=5f77ab1f-4fae-46b2-a427-391bddd4099b, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@32f5d8df, bootstrap.servers=localhost:8080, auto.commit.interval.ms=1000, auto.offset.reset=earliest, client.id=5f77ab1f-4fae-46b2-a427-391bddd4099b} [main] INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 1000 auto.include.jmx.reporter = true auto.offset.reset = earliest bootstrap.servers = [localhost:8080] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = 5f77ab1f-4fae-46b2-a427-391bddd4099b client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false enable.metrics.push = true exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = 5f77ab1f-4fae-46b2-a427-391bddd4099b group.instance.id = null group.protocol = classic group.remote.assignor = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor, class org.apache.kafka.clients.consumer.CooperativeStickyAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 45000 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationDeserializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationDeserializer - Tapi Deserializer configuration {key.deserializer=class org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=class org.opendaylight.transportpce.nbinotifications.serialization.TapiNotificationDeserializer, enable.auto.commit=false, group.id=5f77ab1f-4fae-46b2-a427-391bddd4099b, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@32f5d8df, bootstrap.servers=localhost:8080, auto.commit.interval.ms=1000, auto.offset.reset=earliest, client.id=5f77ab1f-4fae-46b2-a427-391bddd4099b} [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.7.0 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 2ae524ed625438c5 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 1725933671561 [main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Subscribe request to topic '5f77ab1f-4fae-46b2-a427-391bddd4099b' [main] INFO org.apache.kafka.clients.consumer.internals.LegacyKafkaConsumer - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Subscribed to topic(s): 5f77ab1f-4fae-46b2-a427-391bddd4099b [main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Node -1 disconnected. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Node -1 disconnected. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Node -1 disconnected. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Node -1 disconnected. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.clients.NetworkClient - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Node -1 disconnected. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [main] WARN org.apache.kafka.clients.NetworkClient - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.opendaylight.transportpce.nbinotifications.consumer.Subscriber - Getting records '[]' [main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Resetting generation and member id due to: consumer pro-actively leaving the group [main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Request joining group due to: consumer pro-actively leaving the group [main] INFO org.apache.kafka.clients.consumer.internals.LegacyKafkaConsumer - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Unsubscribed all topics or patterns and assigned partitions [main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Resetting generation and member id due to: consumer pro-actively leaving the group [main] INFO org.apache.kafka.clients.consumer.internals.ConsumerCoordinator - [Consumer clientId=5f77ab1f-4fae-46b2-a427-391bddd4099b, groupId=5f77ab1f-4fae-46b2-a427-391bddd4099b] Request joining group due to: consumer pro-actively leaving the group [main] INFO org.apache.kafka.common.metrics.Metrics - Metrics scheduler closed [main] INFO org.apache.kafka.common.metrics.Metrics - Closing reporter org.apache.kafka.common.metrics.JmxReporter [main] INFO org.apache.kafka.common.metrics.Metrics - Closing reporter org.apache.kafka.common.telemetry.internals.ClientTelemetryReporter [main] INFO org.apache.kafka.common.metrics.Metrics - Metrics reporters closed [main] INFO org.apache.kafka.common.utils.AppInfoParser - App info kafka.consumer for 5f77ab1f-4fae-46b2-a427-391bddd4099b unregistered [main] INFO org.opendaylight.transportpce.nbinotifications.impl.rpc.GetNotificationListImpl - TAPI notifications = [] [main] INFO org.opendaylight.transportpce.common.network.RequestProcessor - RequestProcessor instantiated [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class PceListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: PceListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id PceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@465ea531, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=PceListener} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = PceListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@465ea531, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=PceListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=PceListener] Instantiated an idempotent producer. [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [PceListener] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandlerOperations [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandlerOperations [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandlerOperations with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@465ea531, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandlerOperations} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceHandlerOperations compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@465ea531, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandlerOperations} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandlerOperations] Instantiated an idempotent producer. [kafka-producer-network-thread | PceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Node -1 disconnected. [kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | PceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=PceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandlerOperations] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceHandler [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: ServiceHandler [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceHandler with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@465ea531, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceHandler} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceHandler compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@465ea531, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceHandler} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceHandler] Instantiated an idempotent producer. [kafka-producer-network-thread | ServiceHandlerOperations] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Node -1 disconnected. [kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | ServiceHandlerOperations] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandlerOperations] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceHandler] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class RendererListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding process topic: RendererListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id RendererListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@465ea531, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=RendererListener} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = RendererListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@465ea531, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=RendererListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=RendererListener] Instantiated an idempotent producer. [kafka-producer-network-thread | ServiceHandler] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Node -1 disconnected. [kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | ServiceHandler] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceHandler] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [RendererListener] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - Creating publisher for the following class ServiceListener [main] INFO org.opendaylight.transportpce.nbinotifications.utils.TopicManager - Adding alarm topic: ServiceListener [main] INFO org.opendaylight.transportpce.nbinotifications.producer.Publisher - Creation publisher for id ServiceListener with properties {acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4d161f2e, bootstrap.servers=localhost:8080, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, max.in.flight.requests.per.connection=1, linger.ms=1, client.id=ServiceListener} [main] WARN org.apache.kafka.clients.CommonClientConfigs - Disabling exponential reconnect backoff because reconnect.backoff.ms is set, but reconnect.backoff.max.ms is not. [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [localhost:8080] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = ServiceListener compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = true enable.metrics.push = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 1 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 600000 reconnect.backoff.ms = 600000 request.timeout.ms = 30000 retries = 3 retry.backoff.max.ms = 1000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer [main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - initializing Kafka metrics collector [main] INFO org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer - Deserializer configuration {retries=3, value.serializer=class org.opendaylight.transportpce.nbinotifications.serialization.NotificationAlarmServiceSerializer, acks=all, batch.size=16384, reconnect.backoff.ms=600000, converter=org.opendaylight.transportpce.common.converter.JsonStringConverter@4d161f2e, bootstrap.servers=localhost:8080, max.in.flight.requests.per.connection=1, buffer.memory=33554432, key.serializer=class org.apache.kafka.common.serialization.StringSerializer, linger.ms=1, client.id=ServiceListener} [main] INFO org.apache.kafka.clients.producer.KafkaProducer - [Producer clientId=ServiceListener] Instantiated an idempotent producer. [kafka-producer-network-thread | RendererListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Node -1 disconnected. [kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | RendererListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=RendererListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected [main] INFO org.apache.kafka.common.utils.AppInfoParser - The mbean of App info: [kafka.producer], id: [ServiceListener] already exists, so skipping a new mbean creation. [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - tapi converter: org.opendaylight.transportpce.common.converter.JsonStringConverter@6455d4f3 [main] INFO org.opendaylight.transportpce.nbinotifications.impl.NbiNotificationsProvider - NbiNotificationsProvider Session Initiated [main] INFO org.opendaylight.transportpce.nbinotifications.impl.rpc.GetNotificationsProcessServiceImpl - RPC getNotificationsService received [main] WARN org.opendaylight.transportpce.nbinotifications.impl.rpc.GetNotificationsProcessServiceImpl - Missing mandatory params for input GetNotificationsProcessServiceInput{} [kafka-producer-network-thread | ServiceListener] INFO org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Node -1 disconnected. [kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Connection to node -1 (localhost/127.0.0.1:8080) could not be established. Node may not be available. [kafka-producer-network-thread | ServiceListener] WARN org.apache.kafka.clients.NetworkClient - [Producer clientId=ServiceListener] Bootstrap broker localhost:8080 (id: -1 rack: null) disconnected