2020-06-21 07:17:52,808 INFO || Successfully tested connection for jdbc:mysql://mysql:3306/?useInformationSchema=true&nullCatalogMeansCurrent=false&useSSL=false&useUnicode=true&characterEncoding=UTF-8&characterSetResults=UTF-8&zeroDateTimeBehavior=CONVERT_TO_NULL&connectTimeout=30000 with user 'binlog' [io.debezium.connector.mysql.MySqlConnector] 2020-06-21 07:17:52,809 INFO || AbstractConfig values: [org.apache.kafka.common.config.AbstractConfig] 2020-06-21 07:17:52,816 INFO || [Worker clientId=connect-1, groupId=connect-cluster] Connector app_inventory_debezium config updated [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-06-21 07:17:52,816 INFO || [Worker clientId=connect-1, groupId=connect-cluster] Rebalance started [org.apache.kafka.connect.runtime.distributed.WorkerCoordinator] 2020-06-21 07:17:52,816 INFO || [Worker clientId=connect-1, groupId=connect-cluster] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-06-21 07:17:52,822 INFO || [Worker clientId=connect-1, groupId=connect-cluster] Successfully joined group with generation 10 [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-06-21 07:17:52,822 INFO || [Worker clientId=connect-1, groupId=connect-cluster] Joined group at generation 10 and got assignment: Assignment{error=0, leader='connect-1-344f9e1e-c0d4-4345-850c-0f09233907b6', leaderUrl='http://192.168.144.8:8083/', offset=11, connectorIds=[app_inventory_debezium], taskIds=[app_inventory_debezium-0], revokedConnectorIds=[], revokedTaskIds=[], delay=0} [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-06-21 07:17:52,825 INFO || [Worker clientId=connect-1, groupId=connect-cluster] Starting connectors and tasks using config offset 11 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-06-21 07:17:52,825 INFO || [Worker clientId=connect-1, groupId=connect-cluster] Starting connector app_inventory_debezium [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-06-21 07:17:52,826 INFO || ConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.mysql.MySqlConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = app_inventory_debezium tasks.max = 1 transforms = [] value.converter = class io.confluent.connect.avro.AvroConverter [org.apache.kafka.connect.runtime.ConnectorConfig] 2020-06-21 07:17:52,826 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.mysql.MySqlConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = app_inventory_debezium tasks.max = 1 transforms = [] value.converter = class io.confluent.connect.avro.AvroConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-06-21 07:17:52,826 INFO || Creating connector app_inventory_debezium of type io.debezium.connector.mysql.MySqlConnector [org.apache.kafka.connect.runtime.Worker] 2020-06-21 07:17:52,827 INFO || Instantiated connector app_inventory_debezium with version 1.2.0.CR1 of type class io.debezium.connector.mysql.MySqlConnector [org.apache.kafka.connect.runtime.Worker] 2020-06-21 07:17:52,827 INFO || Finished creating connector app_inventory_debezium [org.apache.kafka.connect.runtime.Worker] 2020-06-21 07:17:52,827 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.mysql.MySqlConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = app_inventory_debezium tasks.max = 1 transforms = [] value.converter = class io.confluent.connect.avro.AvroConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-06-21 07:17:52,828 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.mysql.MySqlConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = app_inventory_debezium tasks.max = 1 transforms = [] value.converter = class io.confluent.connect.avro.AvroConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-06-21 07:17:52,829 INFO || [Worker clientId=connect-1, groupId=connect-cluster] Starting task app_inventory_debezium-0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-06-21 07:17:52,831 INFO || Creating task app_inventory_debezium-0 [org.apache.kafka.connect.runtime.Worker] 2020-06-21 07:17:52,833 INFO || ConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.mysql.MySqlConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = app_inventory_debezium tasks.max = 1 transforms = [] value.converter = class io.confluent.connect.avro.AvroConverter [org.apache.kafka.connect.runtime.ConnectorConfig] 2020-06-21 07:17:52,834 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.mysql.MySqlConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = app_inventory_debezium tasks.max = 1 transforms = [] value.converter = class io.confluent.connect.avro.AvroConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-06-21 07:17:52,835 INFO || TaskConfig values: task.class = class io.debezium.connector.mysql.MySqlConnectorTask [org.apache.kafka.connect.runtime.TaskConfig] 2020-06-21 07:17:52,835 INFO || Instantiated task app_inventory_debezium-0 with version 1.2.0.CR1 of type io.debezium.connector.mysql.MySqlConnectorTask [org.apache.kafka.connect.runtime.Worker] 2020-06-21 07:17:52,835 INFO || AvroConverterConfig values: schema.registry.url = [http://schema-registry:8081] basic.auth.user.info = [hidden] auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL schema.registry.basic.auth.user.info = [hidden] value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy [io.confluent.connect.avro.AvroConverterConfig] 2020-06-21 07:17:52,838 INFO || KafkaAvroSerializerConfig values: schema.registry.url = [http://schema-registry:8081] basic.auth.user.info = [hidden] auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL schema.registry.basic.auth.user.info = [hidden] value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy [io.confluent.kafka.serializers.KafkaAvroSerializerConfig] 2020-06-21 07:17:52,840 INFO || KafkaAvroDeserializerConfig values: schema.registry.url = [http://schema-registry:8081] basic.auth.user.info = [hidden] auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL schema.registry.basic.auth.user.info = [hidden] specific.avro.reader = false value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy [io.confluent.kafka.serializers.KafkaAvroDeserializerConfig] 2020-06-21 07:17:52,841 INFO || AvroDataConfig values: schemas.cache.config = 1000 enhanced.avro.schema.support = false connect.meta.data = true [io.confluent.connect.avro.AvroDataConfig] 2020-06-21 07:17:52,842 INFO || AvroConverterConfig values: schema.registry.url = [http://schema-registry:8081] basic.auth.user.info = [hidden] auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL schema.registry.basic.auth.user.info = [hidden] value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy [io.confluent.connect.avro.AvroConverterConfig] 2020-06-21 07:17:52,843 INFO || KafkaAvroSerializerConfig values: schema.registry.url = [http://schema-registry:8081] basic.auth.user.info = [hidden] auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL schema.registry.basic.auth.user.info = [hidden] value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy [io.confluent.kafka.serializers.KafkaAvroSerializerConfig] 2020-06-21 07:17:52,844 INFO || KafkaAvroDeserializerConfig values: schema.registry.url = [http://schema-registry:8081] basic.auth.user.info = [hidden] auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL schema.registry.basic.auth.user.info = [hidden] specific.avro.reader = false value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy [io.confluent.kafka.serializers.KafkaAvroDeserializerConfig] 2020-06-21 07:17:52,844 INFO || AvroDataConfig values: schemas.cache.config = 1000 enhanced.avro.schema.support = false connect.meta.data = true [io.confluent.connect.avro.AvroDataConfig] 2020-06-21 07:17:52,845 INFO || Set up the key converter class io.confluent.connect.avro.AvroConverter for task app_inventory_debezium-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-06-21 07:17:52,845 INFO || Set up the value converter class io.confluent.connect.avro.AvroConverter for task app_inventory_debezium-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-06-21 07:17:52,846 INFO || Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task app_inventory_debezium-0 using the worker config [org.apache.kafka.connect.runtime.Worker] 2020-06-21 07:17:52,847 INFO || Initializing: org.apache.kafka.connect.runtime.TransformationChain{} [org.apache.kafka.connect.runtime.Worker] 2020-06-21 07:17:52,848 INFO || ProducerConfig values: acks = all batch.size = 16384 bootstrap.servers = [kafka:9092] buffer.memory = 33554432 client.dns.lookup = default client.id = connector-producer-app_inventory_debezium-0 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 9223372036854775807 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 2147483647 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-06-21 07:17:52,854 INFO || Kafka version: 2.3.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:52,855 INFO || Kafka commitId: fc1aaa116b661c8a [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:52,855 INFO || Kafka startTimeMs: 1592723872854 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:52,861 INFO || Starting MySqlConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,866 INFO || connector.class = io.debezium.connector.mysql.MySqlConnector [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,867 INFO || max.queue.size = 327680 [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,867 INFO || database.history.kafka.topic = dbhistory.app_inventory [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,867 INFO || database.history.connector.id = app_inventory [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,867 INFO || include.schema.changes = true [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,867 INFO || table.whitelist = inventory.customers,inventory.products [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,867 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,867 INFO || value.converter = io.confluent.connect.avro.AvroConverter [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,868 INFO || database.whitelist = inventory [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,868 INFO || key.converter = io.confluent.connect.avro.AvroConverter [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,868 INFO || database.user = binlog [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,868 INFO || database.history.kafka.bootstrap.servers = kafka:19092 [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,868 INFO || database.server.name = app_inventory [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,868 INFO || database.port = 3306 [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,868 INFO || column.propagate.source.type = .* [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,868 INFO || value.converter.schema.registry.url = http://schema-registry:8081 [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,868 INFO || task.class = io.debezium.connector.mysql.MySqlConnectorTask [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,868 INFO || database.history.connector.class = io.debezium.connector.mysql.MySqlConnector [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,868 INFO || database.hostname = mysql [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,869 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,869 INFO || name = app_inventory_debezium [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,869 INFO || database.history.store.only.monitored.tables.ddl = true [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,869 INFO || max.batch.size = 81920 [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,869 INFO || include.query = true [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,869 INFO || key.converter.schema.registry.url = http://schema-registry:8081 [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,870 INFO || snapshot.mode = schema_only_recovery [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:52,955 INFO || [Producer clientId=connector-producer-app_inventory_debezium-0] Cluster ID: tUD2MFDgRyKUjhUdUhXWGQ [org.apache.kafka.clients.Metadata] 2020-06-21 07:17:53,140 INFO MySQL|app_inventory|task KafkaDatabaseHistory Consumer config: {key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, enable.auto.commit=false, group.id=app_inventory_debezium-dbhistory, bootstrap.servers=kafka:19092, fetch.min.bytes=1, session.timeout.ms=10000, auto.offset.reset=earliest, client.id=app_inventory_debezium-dbhistory} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-06-21 07:17:53,140 INFO MySQL|app_inventory|task KafkaDatabaseHistory Producer config: {retries=1, value.serializer=org.apache.kafka.common.serialization.StringSerializer, acks=1, batch.size=32768, max.block.ms=10000, bootstrap.servers=kafka:19092, buffer.memory=1048576, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=app_inventory_debezium-dbhistory, linger.ms=0} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-06-21 07:17:53,140 INFO MySQL|app_inventory|task Requested thread factory for connector MySqlConnector, id = app_inventory named = db-history-config-check [io.debezium.util.Threads] 2020-06-21 07:17:53,142 INFO MySQL|app_inventory|task ProducerConfig values: acks = 1 batch.size = 32768 bootstrap.servers = [kafka:19092] buffer.memory = 1048576 client.dns.lookup = default client.id = app_inventory_debezium-dbhistory compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 10000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-06-21 07:17:53,146 INFO MySQL|app_inventory|task Kafka version: 2.3.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,146 INFO MySQL|app_inventory|task Kafka commitId: fc1aaa116b661c8a [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,149 INFO MySQL|app_inventory|task Kafka startTimeMs: 1592723873146 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,150 INFO MySQL|app_inventory|task Found existing offset: {ts_sec=1592723528, file=mysql-bin.000003, pos=562, gtids=d3a42176-b38c-11ea-892d-0242c0a89003:1-25, row=1, server_id=112233, event=2} [io.debezium.connector.mysql.MySqlConnectorTask] 2020-06-21 07:17:53,151 INFO MySQL|app_inventory|task ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:19092] check.crcs = true client.dns.lookup = default client.id = app_inventory_debezium-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = app_inventory_debezium-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-06-21 07:17:53,155 INFO MySQL|app_inventory|task Kafka version: 2.3.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,155 INFO MySQL|app_inventory|task Kafka commitId: fc1aaa116b661c8a [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,156 INFO MySQL|app_inventory|task Kafka startTimeMs: 1592723873155 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,161 INFO MySQL|app_inventory|task ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:19092] check.crcs = true client.dns.lookup = default client.id = app_inventory_debezium-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = app_inventory_debezium-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-06-21 07:17:53,167 INFO MySQL|app_inventory|task Kafka version: 2.3.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,167 INFO MySQL|app_inventory|task Kafka commitId: fc1aaa116b661c8a [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,167 INFO MySQL|app_inventory|task Kafka startTimeMs: 1592723873167 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,167 INFO MySQL|app_inventory|task Creating thread debezium-mysqlconnector-app_inventory-db-history-config-check [io.debezium.util.Threads] 2020-06-21 07:17:53,168 INFO MySQL|app_inventory|task AdminClientConfig values: bootstrap.servers = [kafka:19092] client.dns.lookup = default client.id = app_inventory_debezium-dbhistory connections.max.idle.ms = 300000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 120000 retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-06-21 07:17:53,174 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Cluster ID: tUD2MFDgRyKUjhUdUhXWGQ [org.apache.kafka.clients.Metadata] 2020-06-21 07:17:53,174 WARN MySQL|app_inventory|task The configuration 'value.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-06-21 07:17:53,181 WARN MySQL|app_inventory|task The configuration 'acks' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-06-21 07:17:53,181 WARN MySQL|app_inventory|task The configuration 'batch.size' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-06-21 07:17:53,181 WARN MySQL|app_inventory|task The configuration 'max.block.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-06-21 07:17:53,181 WARN MySQL|app_inventory|task The configuration 'buffer.memory' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-06-21 07:17:53,181 WARN MySQL|app_inventory|task The configuration 'key.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-06-21 07:17:53,181 WARN MySQL|app_inventory|task The configuration 'linger.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-06-21 07:17:53,182 INFO MySQL|app_inventory|task Kafka version: 2.3.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,182 INFO MySQL|app_inventory|task Kafka commitId: fc1aaa116b661c8a [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,182 INFO MySQL|app_inventory|task Kafka startTimeMs: 1592723873181 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,193 INFO MySQL|app_inventory|task Started database history recovery [io.debezium.relational.history.DatabaseHistoryMetrics] 2020-06-21 07:17:53,194 INFO MySQL|app_inventory|task ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:19092] check.crcs = true client.dns.lookup = default client.id = app_inventory_debezium-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = app_inventory_debezium-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-06-21 07:17:53,198 INFO MySQL|app_inventory|task Kafka version: 2.3.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,198 INFO MySQL|app_inventory|task Kafka commitId: fc1aaa116b661c8a [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,198 INFO MySQL|app_inventory|task Kafka startTimeMs: 1592723873198 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,198 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Subscribed to topic(s): dbhistory.app_inventory [org.apache.kafka.clients.consumer.KafkaConsumer] 2020-06-21 07:17:53,203 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Cluster ID: tUD2MFDgRyKUjhUdUhXWGQ [org.apache.kafka.clients.Metadata] 2020-06-21 07:17:53,205 INFO MySQL|app_inventory|task Database history topic 'dbhistory.app_inventory' has correct settings [io.debezium.relational.history.KafkaDatabaseHistory] 2020-06-21 07:17:53,210 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Discovered group coordinator kafka:19092 (id: 2147483646 rack: null) [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-06-21 07:17:53,211 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Revoking previously assigned partitions [] [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-06-21 07:17:53,212 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-06-21 07:17:53,215 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-06-21 07:17:53,220 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Successfully joined group with generation 3 [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-06-21 07:17:53,220 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Setting newly assigned partitions: dbhistory.app_inventory-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-06-21 07:17:53,223 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Found no committed offset for partition dbhistory.app_inventory-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-06-21 07:17:53,224 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Resetting offset for partition dbhistory.app_inventory-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-06-21 07:17:53,240 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Member app_inventory_debezium-dbhistory-7b9a7ab1-ca7e-4967-8eff-bf1be4aa058b sending LeaveGroup request to coordinator kafka:19092 (id: 2147483646 rack: null) [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-06-21 07:17:53,247 INFO MySQL|app_inventory|task Finished database history recovery of 5 change(s) in 54 ms [io.debezium.relational.history.DatabaseHistoryMetrics] 2020-06-21 07:17:53,250 INFO MySQL|app_inventory|task MySQL current GTID set d3a42176-b38c-11ea-892d-0242c0a89003:1-27 does contain the GTID set required by the connector d3a42176-b38c-11ea-892d-0242c0a89003:1-25 [io.debezium.connector.mysql.MySqlConnectorTask] 2020-06-21 07:17:53,253 INFO MySQL|app_inventory|task [Producer clientId=app_inventory_debezium-dbhistory] Cluster ID: tUD2MFDgRyKUjhUdUhXWGQ [org.apache.kafka.clients.Metadata] 2020-06-21 07:17:53,254 INFO MySQL|app_inventory|task GTIDs known by the server but not processed yet d3a42176-b38c-11ea-892d-0242c0a89003:26-27, for replication are available only d3a42176-b38c-11ea-892d-0242c0a89003:26-27 [io.debezium.connector.mysql.MySqlConnectorTask] 2020-06-21 07:17:53,256 INFO MySQL|app_inventory|task Requested thread factory for connector MySqlConnector, id = app_inventory named = binlog-client [io.debezium.util.Threads] 2020-06-21 07:17:53,258 INFO MySQL|app_inventory|task GTID set purged on server: [io.debezium.connector.mysql.BinlogReader] 2020-06-21 07:17:53,258 INFO MySQL|app_inventory|task Attempting to generate a filtered GTID set [io.debezium.connector.mysql.MySqlTaskContext] 2020-06-21 07:17:53,259 INFO MySQL|app_inventory|task GTID set from previous recorded offset: d3a42176-b38c-11ea-892d-0242c0a89003:1-25 [io.debezium.connector.mysql.MySqlTaskContext] 2020-06-21 07:17:53,259 INFO MySQL|app_inventory|task GTID set available on server: d3a42176-b38c-11ea-892d-0242c0a89003:1-27 [io.debezium.connector.mysql.MySqlTaskContext] 2020-06-21 07:17:53,259 INFO MySQL|app_inventory|task Using first available positions for new GTID channels [io.debezium.connector.mysql.MySqlTaskContext] 2020-06-21 07:17:53,259 INFO MySQL|app_inventory|task Relevant GTID set available on server: d3a42176-b38c-11ea-892d-0242c0a89003:1-27 [io.debezium.connector.mysql.MySqlTaskContext] 2020-06-21 07:17:53,259 INFO MySQL|app_inventory|task Final merged GTID set to use when connecting to MySQL: d3a42176-b38c-11ea-892d-0242c0a89003:1-25 [io.debezium.connector.mysql.MySqlTaskContext] 2020-06-21 07:17:53,260 INFO MySQL|app_inventory|task Registering binlog reader with GTID set: d3a42176-b38c-11ea-892d-0242c0a89003:1-25 [io.debezium.connector.mysql.BinlogReader] 2020-06-21 07:17:53,260 INFO MySQL|app_inventory|task Creating thread debezium-mysqlconnector-app_inventory-binlog-client [io.debezium.util.Threads] 2020-06-21 07:17:53,261 INFO MySQL|app_inventory|task Creating thread debezium-mysqlconnector-app_inventory-binlog-client [io.debezium.util.Threads] 2020-06-21 07:17:53,328 INFO || [Worker clientId=connect-1, groupId=connect-cluster] Tasks [app_inventory_snowflake-0, app_inventory_debezium-0] configs updated [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-06-21 07:17:53,329 INFO || [Worker clientId=connect-1, groupId=connect-cluster] Finished starting connectors and tasks [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-06-21 07:17:53,329 INFO || [Worker clientId=connect-1, groupId=connect-cluster] Handling task config update by restarting tasks [app_inventory_debezium-0] [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-06-21 07:17:53,329 INFO || Stopping task app_inventory_debezium-0 [org.apache.kafka.connect.runtime.Worker] 2020-06-21 07:17:53,390 INFO MySQL|app_inventory|binlog Connected to MySQL binlog at mysql:3306, starting at GTIDs d3a42176-b38c-11ea-892d-0242c0a89003:1-25 and binlog file 'mysql-bin.000003', pos=562, skipping 2 events plus 1 rows [io.debezium.connector.mysql.BinlogReader] 2020-06-21 07:17:53,390 INFO MySQL|app_inventory|binlog Creating thread debezium-mysqlconnector-app_inventory-binlog-client [io.debezium.util.Threads] 2020-06-21 07:17:53,390 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Source task finished initialization and start [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:17:53,390 INFO || Stopping down connector [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,390 INFO MySQL|app_inventory|task Stopping MySQL connector task [io.debezium.connector.mysql.MySqlConnectorTask] 2020-06-21 07:17:53,391 INFO MySQL|app_inventory|task ChainedReader: Stopping the binlog reader [io.debezium.connector.mysql.ChainedReader] 2020-06-21 07:17:53,391 INFO MySQL|app_inventory|task Discarding 0 unsent record(s) due to the connector shutting down [io.debezium.connector.mysql.BinlogReader] 2020-06-21 07:17:53,393 INFO MySQL|app_inventory|binlog Stopped reading binlog after 0 events, no new offset was recorded [io.debezium.connector.mysql.BinlogReader] 2020-06-21 07:17:53,393 INFO MySQL|app_inventory|task Discarding 0 unsent record(s) due to the connector shutting down [io.debezium.connector.mysql.BinlogReader] 2020-06-21 07:17:53,393 INFO MySQL|app_inventory|task [Producer clientId=app_inventory_debezium-dbhistory] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-06-21 07:17:53,396 INFO MySQL|app_inventory|task Connector task finished all work and is now shutdown [io.debezium.connector.mysql.MySqlConnectorTask] 2020-06-21 07:17:53,396 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:17:53,397 INFO || WorkerSourceTask{id=app_inventory_debezium-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:17:53,397 INFO || [Producer clientId=connector-producer-app_inventory_debezium-0] Closing the Kafka producer with timeoutMillis = 30000 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-06-21 07:17:53,400 INFO || [Worker clientId=connect-1, groupId=connect-cluster] Rebalance started [org.apache.kafka.connect.runtime.distributed.WorkerCoordinator] 2020-06-21 07:17:53,400 INFO || [Worker clientId=connect-1, groupId=connect-cluster] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-06-21 07:17:53,406 INFO || [Worker clientId=connect-1, groupId=connect-cluster] Successfully joined group with generation 11 [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-06-21 07:17:53,406 INFO || [Worker clientId=connect-1, groupId=connect-cluster] Joined group at generation 11 and got assignment: Assignment{error=0, leader='connect-1-344f9e1e-c0d4-4345-850c-0f09233907b6', leaderUrl='http://192.168.144.8:8083/', offset=13, connectorIds=[app_inventory_debezium], taskIds=[app_inventory_debezium-0], revokedConnectorIds=[], revokedTaskIds=[], delay=0} [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-06-21 07:17:53,407 INFO || [Worker clientId=connect-1, groupId=connect-cluster] Starting connectors and tasks using config offset 13 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-06-21 07:17:53,407 INFO || [Worker clientId=connect-1, groupId=connect-cluster] Starting task app_inventory_debezium-0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-06-21 07:17:53,407 INFO || Creating task app_inventory_debezium-0 [org.apache.kafka.connect.runtime.Worker] 2020-06-21 07:17:53,408 INFO || ConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.mysql.MySqlConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = app_inventory_debezium tasks.max = 1 transforms = [] value.converter = class io.confluent.connect.avro.AvroConverter [org.apache.kafka.connect.runtime.ConnectorConfig] 2020-06-21 07:17:53,408 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.mysql.MySqlConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = app_inventory_debezium tasks.max = 1 transforms = [] value.converter = class io.confluent.connect.avro.AvroConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-06-21 07:17:53,409 INFO || TaskConfig values: task.class = class io.debezium.connector.mysql.MySqlConnectorTask [org.apache.kafka.connect.runtime.TaskConfig] 2020-06-21 07:17:53,409 INFO || Instantiated task app_inventory_debezium-0 with version 1.2.0.CR1 of type io.debezium.connector.mysql.MySqlConnectorTask [org.apache.kafka.connect.runtime.Worker] 2020-06-21 07:17:53,410 INFO || AvroConverterConfig values: schema.registry.url = [http://schema-registry:8081] basic.auth.user.info = [hidden] auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL schema.registry.basic.auth.user.info = [hidden] value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy [io.confluent.connect.avro.AvroConverterConfig] 2020-06-21 07:17:53,410 INFO || KafkaAvroSerializerConfig values: schema.registry.url = [http://schema-registry:8081] basic.auth.user.info = [hidden] auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL schema.registry.basic.auth.user.info = [hidden] value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy [io.confluent.kafka.serializers.KafkaAvroSerializerConfig] 2020-06-21 07:17:53,410 INFO || KafkaAvroDeserializerConfig values: schema.registry.url = [http://schema-registry:8081] basic.auth.user.info = [hidden] auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL schema.registry.basic.auth.user.info = [hidden] specific.avro.reader = false value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy [io.confluent.kafka.serializers.KafkaAvroDeserializerConfig] 2020-06-21 07:17:53,411 INFO || AvroDataConfig values: schemas.cache.config = 1000 enhanced.avro.schema.support = false connect.meta.data = true [io.confluent.connect.avro.AvroDataConfig] 2020-06-21 07:17:53,411 INFO || AvroConverterConfig values: schema.registry.url = [http://schema-registry:8081] basic.auth.user.info = [hidden] auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL schema.registry.basic.auth.user.info = [hidden] value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy [io.confluent.connect.avro.AvroConverterConfig] 2020-06-21 07:17:53,411 INFO || KafkaAvroSerializerConfig values: schema.registry.url = [http://schema-registry:8081] basic.auth.user.info = [hidden] auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL schema.registry.basic.auth.user.info = [hidden] value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy [io.confluent.kafka.serializers.KafkaAvroSerializerConfig] 2020-06-21 07:17:53,411 INFO || KafkaAvroDeserializerConfig values: schema.registry.url = [http://schema-registry:8081] basic.auth.user.info = [hidden] auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL schema.registry.basic.auth.user.info = [hidden] specific.avro.reader = false value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy [io.confluent.kafka.serializers.KafkaAvroDeserializerConfig] 2020-06-21 07:17:53,412 INFO || AvroDataConfig values: schemas.cache.config = 1000 enhanced.avro.schema.support = false connect.meta.data = true [io.confluent.connect.avro.AvroDataConfig] 2020-06-21 07:17:53,412 INFO || Set up the key converter class io.confluent.connect.avro.AvroConverter for task app_inventory_debezium-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-06-21 07:17:53,412 INFO || Set up the value converter class io.confluent.connect.avro.AvroConverter for task app_inventory_debezium-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-06-21 07:17:53,413 INFO || Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task app_inventory_debezium-0 using the worker config [org.apache.kafka.connect.runtime.Worker] 2020-06-21 07:17:53,414 INFO || Initializing: org.apache.kafka.connect.runtime.TransformationChain{} [org.apache.kafka.connect.runtime.Worker] 2020-06-21 07:17:53,414 INFO || ProducerConfig values: acks = all batch.size = 16384 bootstrap.servers = [kafka:9092] buffer.memory = 33554432 client.dns.lookup = default client.id = connector-producer-app_inventory_debezium-0 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 9223372036854775807 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 2147483647 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-06-21 07:17:53,421 INFO || Kafka version: 2.3.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,421 INFO || Kafka commitId: fc1aaa116b661c8a [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,421 INFO || Kafka startTimeMs: 1592723873421 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,423 INFO || [Worker clientId=connect-1, groupId=connect-cluster] Finished starting connectors and tasks [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-06-21 07:17:53,425 INFO || Starting MySqlConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,425 INFO || connector.class = io.debezium.connector.mysql.MySqlConnector [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,426 INFO || max.queue.size = 327680 [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,428 INFO || database.history.kafka.topic = dbhistory.app_inventory [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,429 INFO || database.history.connector.id = app_inventory [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,429 INFO || include.schema.changes = true [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,429 INFO || table.whitelist = inventory.customers [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,429 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,429 INFO || value.converter = io.confluent.connect.avro.AvroConverter [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,429 INFO || database.whitelist = inventory [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,429 INFO || key.converter = io.confluent.connect.avro.AvroConverter [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,430 INFO || database.user = binlog [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,430 INFO || database.history.kafka.bootstrap.servers = kafka:19092 [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,430 INFO || database.server.name = app_inventory [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,430 INFO || database.port = 3306 [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,430 INFO || column.propagate.source.type = .* [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,430 INFO || value.converter.schema.registry.url = http://schema-registry:8081 [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,430 INFO || task.class = io.debezium.connector.mysql.MySqlConnectorTask [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,430 INFO || database.history.connector.class = io.debezium.connector.mysql.MySqlConnector [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,430 INFO || database.hostname = mysql [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,431 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,431 INFO || name = app_inventory_debezium [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,431 INFO || database.history.store.only.monitored.tables.ddl = true [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,431 INFO || max.batch.size = 81920 [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,431 INFO || include.query = true [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,431 INFO || key.converter.schema.registry.url = http://schema-registry:8081 [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,431 INFO || snapshot.mode = schema_only_recovery [io.debezium.connector.common.BaseSourceTask] 2020-06-21 07:17:53,524 INFO || [Producer clientId=connector-producer-app_inventory_debezium-0] Cluster ID: tUD2MFDgRyKUjhUdUhXWGQ [org.apache.kafka.clients.Metadata] 2020-06-21 07:17:53,645 INFO MySQL|app_inventory|task KafkaDatabaseHistory Consumer config: {key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, enable.auto.commit=false, group.id=app_inventory_debezium-dbhistory, bootstrap.servers=kafka:19092, fetch.min.bytes=1, session.timeout.ms=10000, auto.offset.reset=earliest, client.id=app_inventory_debezium-dbhistory} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-06-21 07:17:53,645 INFO MySQL|app_inventory|task KafkaDatabaseHistory Producer config: {retries=1, value.serializer=org.apache.kafka.common.serialization.StringSerializer, acks=1, batch.size=32768, max.block.ms=10000, bootstrap.servers=kafka:19092, buffer.memory=1048576, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=app_inventory_debezium-dbhistory, linger.ms=0} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-06-21 07:17:53,646 INFO MySQL|app_inventory|task Requested thread factory for connector MySqlConnector, id = app_inventory named = db-history-config-check [io.debezium.util.Threads] 2020-06-21 07:17:53,647 INFO MySQL|app_inventory|task ProducerConfig values: acks = 1 batch.size = 32768 bootstrap.servers = [kafka:19092] buffer.memory = 1048576 client.dns.lookup = default client.id = app_inventory_debezium-dbhistory compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 10000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-06-21 07:17:53,651 INFO MySQL|app_inventory|task Kafka version: 2.3.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,651 INFO MySQL|app_inventory|task Kafka commitId: fc1aaa116b661c8a [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,651 INFO MySQL|app_inventory|task Kafka startTimeMs: 1592723873651 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,651 INFO MySQL|app_inventory|task Found existing offset: {ts_sec=1592723528, file=mysql-bin.000003, pos=562, gtids=d3a42176-b38c-11ea-892d-0242c0a89003:1-25, row=1, server_id=112233, event=2} [io.debezium.connector.mysql.MySqlConnectorTask] 2020-06-21 07:17:53,653 INFO MySQL|app_inventory|task ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:19092] check.crcs = true client.dns.lookup = default client.id = app_inventory_debezium-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = app_inventory_debezium-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-06-21 07:17:53,658 INFO MySQL|app_inventory|task Kafka version: 2.3.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,658 INFO MySQL|app_inventory|task Kafka commitId: fc1aaa116b661c8a [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,658 INFO MySQL|app_inventory|task Kafka startTimeMs: 1592723873658 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,664 INFO MySQL|app_inventory|task ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:19092] check.crcs = true client.dns.lookup = default client.id = app_inventory_debezium-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = app_inventory_debezium-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-06-21 07:17:53,667 INFO MySQL|app_inventory|task Kafka version: 2.3.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,667 INFO MySQL|app_inventory|task Kafka commitId: fc1aaa116b661c8a [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,667 INFO MySQL|app_inventory|task Kafka startTimeMs: 1592723873667 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,667 INFO MySQL|app_inventory|task Creating thread debezium-mysqlconnector-app_inventory-db-history-config-check [io.debezium.util.Threads] 2020-06-21 07:17:53,669 INFO MySQL|app_inventory|task AdminClientConfig values: bootstrap.servers = [kafka:19092] client.dns.lookup = default client.id = app_inventory_debezium-dbhistory connections.max.idle.ms = 300000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 120000 retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-06-21 07:17:53,673 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Cluster ID: tUD2MFDgRyKUjhUdUhXWGQ [org.apache.kafka.clients.Metadata] 2020-06-21 07:17:53,674 WARN MySQL|app_inventory|task The configuration 'value.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-06-21 07:17:53,675 WARN MySQL|app_inventory|task The configuration 'acks' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-06-21 07:17:53,675 WARN MySQL|app_inventory|task The configuration 'batch.size' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-06-21 07:17:53,675 WARN MySQL|app_inventory|task The configuration 'max.block.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-06-21 07:17:53,676 WARN MySQL|app_inventory|task The configuration 'buffer.memory' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-06-21 07:17:53,676 WARN MySQL|app_inventory|task The configuration 'key.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-06-21 07:17:53,676 WARN MySQL|app_inventory|task The configuration 'linger.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-06-21 07:17:53,676 INFO MySQL|app_inventory|task Kafka version: 2.3.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,677 INFO MySQL|app_inventory|task Kafka commitId: fc1aaa116b661c8a [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,677 INFO MySQL|app_inventory|task Kafka startTimeMs: 1592723873676 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,682 INFO MySQL|app_inventory|task Started database history recovery [io.debezium.relational.history.DatabaseHistoryMetrics] 2020-06-21 07:17:53,683 INFO MySQL|app_inventory|task ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:19092] check.crcs = true client.dns.lookup = default client.id = app_inventory_debezium-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = app_inventory_debezium-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-06-21 07:17:53,690 INFO MySQL|app_inventory|task Kafka version: 2.3.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,691 INFO MySQL|app_inventory|task Kafka commitId: fc1aaa116b661c8a [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,691 INFO MySQL|app_inventory|task Kafka startTimeMs: 1592723873690 [org.apache.kafka.common.utils.AppInfoParser] 2020-06-21 07:17:53,692 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Subscribed to topic(s): dbhistory.app_inventory [org.apache.kafka.clients.consumer.KafkaConsumer] 2020-06-21 07:17:53,697 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Cluster ID: tUD2MFDgRyKUjhUdUhXWGQ [org.apache.kafka.clients.Metadata] 2020-06-21 07:17:53,701 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Discovered group coordinator kafka:19092 (id: 2147483646 rack: null) [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-06-21 07:17:53,703 INFO MySQL|app_inventory|task Database history topic 'dbhistory.app_inventory' has correct settings [io.debezium.relational.history.KafkaDatabaseHistory] 2020-06-21 07:17:53,703 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Revoking previously assigned partitions [] [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-06-21 07:17:53,705 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-06-21 07:17:53,709 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-06-21 07:17:53,714 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Successfully joined group with generation 5 [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-06-21 07:17:53,714 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Setting newly assigned partitions: dbhistory.app_inventory-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-06-21 07:17:53,718 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Found no committed offset for partition dbhistory.app_inventory-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-06-21 07:17:53,725 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Resetting offset for partition dbhistory.app_inventory-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-06-21 07:17:53,740 INFO MySQL|app_inventory|task [Consumer clientId=app_inventory_debezium-dbhistory, groupId=app_inventory_debezium-dbhistory] Member app_inventory_debezium-dbhistory-4f27e25e-0087-4c18-9adc-db06d6c01229 sending LeaveGroup request to coordinator kafka:19092 (id: 2147483646 rack: null) [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-06-21 07:17:53,746 INFO MySQL|app_inventory|task Finished database history recovery of 5 change(s) in 63 ms [io.debezium.relational.history.DatabaseHistoryMetrics] 2020-06-21 07:17:53,748 INFO MySQL|app_inventory|task MySQL current GTID set d3a42176-b38c-11ea-892d-0242c0a89003:1-27 does contain the GTID set required by the connector d3a42176-b38c-11ea-892d-0242c0a89003:1-25 [io.debezium.connector.mysql.MySqlConnectorTask] 2020-06-21 07:17:53,755 INFO MySQL|app_inventory|task [Producer clientId=app_inventory_debezium-dbhistory] Cluster ID: tUD2MFDgRyKUjhUdUhXWGQ [org.apache.kafka.clients.Metadata] 2020-06-21 07:17:53,755 INFO MySQL|app_inventory|task GTIDs known by the server but not processed yet d3a42176-b38c-11ea-892d-0242c0a89003:26-27, for replication are available only d3a42176-b38c-11ea-892d-0242c0a89003:26-27 [io.debezium.connector.mysql.MySqlConnectorTask] 2020-06-21 07:17:53,758 INFO MySQL|app_inventory|task Requested thread factory for connector MySqlConnector, id = app_inventory named = binlog-client [io.debezium.util.Threads] 2020-06-21 07:17:53,761 INFO MySQL|app_inventory|task GTID set purged on server: [io.debezium.connector.mysql.BinlogReader] 2020-06-21 07:17:53,761 INFO MySQL|app_inventory|task Attempting to generate a filtered GTID set [io.debezium.connector.mysql.MySqlTaskContext] 2020-06-21 07:17:53,761 INFO MySQL|app_inventory|task GTID set from previous recorded offset: d3a42176-b38c-11ea-892d-0242c0a89003:1-25 [io.debezium.connector.mysql.MySqlTaskContext] 2020-06-21 07:17:53,761 INFO MySQL|app_inventory|task GTID set available on server: d3a42176-b38c-11ea-892d-0242c0a89003:1-27 [io.debezium.connector.mysql.MySqlTaskContext] 2020-06-21 07:17:53,761 INFO MySQL|app_inventory|task Using first available positions for new GTID channels [io.debezium.connector.mysql.MySqlTaskContext] 2020-06-21 07:17:53,761 INFO MySQL|app_inventory|task Relevant GTID set available on server: d3a42176-b38c-11ea-892d-0242c0a89003:1-27 [io.debezium.connector.mysql.MySqlTaskContext] 2020-06-21 07:17:53,761 INFO MySQL|app_inventory|task Final merged GTID set to use when connecting to MySQL: d3a42176-b38c-11ea-892d-0242c0a89003:1-25 [io.debezium.connector.mysql.MySqlTaskContext] 2020-06-21 07:17:53,761 INFO MySQL|app_inventory|task Registering binlog reader with GTID set: d3a42176-b38c-11ea-892d-0242c0a89003:1-25 [io.debezium.connector.mysql.BinlogReader] 2020-06-21 07:17:53,761 INFO MySQL|app_inventory|task Creating thread debezium-mysqlconnector-app_inventory-binlog-client [io.debezium.util.Threads] 2020-06-21 07:17:53,763 INFO MySQL|app_inventory|task Creating thread debezium-mysqlconnector-app_inventory-binlog-client [io.debezium.util.Threads] 2020-06-21 07:17:53,855 INFO MySQL|app_inventory|binlog Connected to MySQL binlog at mysql:3306, starting at GTIDs d3a42176-b38c-11ea-892d-0242c0a89003:1-25 and binlog file 'mysql-bin.000003', pos=562, skipping 2 events plus 1 rows [io.debezium.connector.mysql.BinlogReader] 2020-06-21 07:17:53,855 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Source task finished initialization and start [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:17:53,855 INFO MySQL|app_inventory|binlog Creating thread debezium-mysqlconnector-app_inventory-binlog-client [io.debezium.util.Threads] 2020-06-21 07:18:03,389 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:18:03,390 INFO || WorkerSourceTask{id=app_inventory_debezium-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:18:03,393 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Finished commitOffsets successfully in 4 ms [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:18:13,393 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:18:13,393 INFO || WorkerSourceTask{id=app_inventory_debezium-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:18:23,397 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:18:23,398 INFO || WorkerSourceTask{id=app_inventory_debezium-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:18:33,365 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:18:33,365 INFO || WorkerSourceTask{id=app_inventory_debezium-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:18:43,367 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:18:43,367 INFO || WorkerSourceTask{id=app_inventory_debezium-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:18:53,368 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:18:53,368 INFO || WorkerSourceTask{id=app_inventory_debezium-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:19:03,335 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:19:03,335 INFO || WorkerSourceTask{id=app_inventory_debezium-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:19:13,338 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:19:13,339 INFO || WorkerSourceTask{id=app_inventory_debezium-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:19:23,340 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:19:23,340 INFO || WorkerSourceTask{id=app_inventory_debezium-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:19:33,306 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:19:33,307 INFO || WorkerSourceTask{id=app_inventory_debezium-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:19:43,308 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:19:43,308 INFO || WorkerSourceTask{id=app_inventory_debezium-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:19:53,311 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:19:53,311 INFO || WorkerSourceTask{id=app_inventory_debezium-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:20:03,277 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:20:03,278 INFO || WorkerSourceTask{id=app_inventory_debezium-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:20:13,279 INFO || WorkerSourceTask{id=app_inventory_debezium-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-06-21 07:20:13,279 INFO || WorkerSourceTask{id=app_inventory_debezium-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask]