/* Initial connector failure, connestor had event.processing​.failure.handling.mode = fail here */ [2020-09-14 13:44:46,829] INFO [source-mysql-connector|task-0|offsets] WorkerSourceTask{id=source-mysql-connector-0} Finished commitOffsets successfully in 5 ms (org.apache.kafka.connect.runtime.WorkerSourceTask:525) [2020-09-14 13:44:48,203] ERROR [source-mysql-connector|task-0] Failed to properly convert data value for 'my_schema.my_table.use_rules' of type JSON for row [15339, 1, 2020-09-14T13:44:17Z, 2020-09-14T13:44:41Z, [71, 72, 44, 32, 65, 99, 99, 114, 97, 44, 32, 50, 48, 50, 48, 87, 51, 56, 44, 32, 49, 48, 120, 50, 48, 37, 32, 40, 51, 71, 72, 83, 41, 32, 101, 120, 99, 108, 32, 76, 105, 116, 101], 2020-09-15T00:00Z, 2020-09-21T23:59:59Z, [103, 104], 137, [103, 104, 115], 1, 10, [71, 72, 52, 51, 85, 84, 81, 56, 50, 68, 67, 51, 75, 52, 81], 604800, [2, 1, 0, 97, 0, 0, 7, 0, 2, 0, 90, 0, 18, 0, 4, 0, 22, 0, 6, 0, 12, 28, 0, 0, 47, 0, 116, 121, 112, 101], 1000000, null, 2, [123, 34, 112, 101, 114, 99, 101, 110, 116, 97, 103, 101, 34, 58, 50, 48, 44, 34, 109, 97, 120, 95, 118, 97, 108, 117, 101, 34, 58, 51, 125], 3, null, 1, null, 1, 0, 0, 0, 1, 1, 1]: (io.debezium.relational.TableSchemaBuilder:275) org.apache.kafka.connect.errors.ConnectException: Failed to parse and read a JSON value on use_rules JSON NOT NULL: null at io.debezium.connector.mysql.MySqlValueConverters.lambda$convertJson$17(MySqlValueConverters.java:369) at io.debezium.jdbc.JdbcValueConverters.convertValue(JdbcValueConverters.java:1311) at io.debezium.connector.mysql.MySqlValueConverters.convertJson(MySqlValueConverters.java:356) at io.debezium.connector.mysql.MySqlValueConverters.lambda$converter$1(MySqlValueConverters.java:232) at io.debezium.relational.TableSchemaBuilder.lambda$createValueGenerator$5(TableSchemaBuilder.java:265) at io.debezium.relational.TableSchema.valueFromColumnData(TableSchema.java:143) at io.debezium.connector.mysql.RecordMakers$1.update(RecordMakers.java:276) at io.debezium.connector.mysql.RecordMakers$RecordsForTable.update(RecordMakers.java:512) at io.debezium.connector.mysql.BinlogReader.handleUpdate(BinlogReader.java:978) at io.debezium.connector.mysql.BinlogReader.handleEvent(BinlogReader.java:583) at com.github.shyiko.mysql.binlog.BinaryLogClient.notifyEventListeners(BinaryLogClient.java:1130) at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:978) at com.github.shyiko.mysql.binlog.BinaryLogClient.connect(BinaryLogClient.java:581) at com.github.shyiko.mysql.binlog.BinaryLogClient$7.run(BinaryLogClient.java:860) at java.lang.Thread.run(Thread.java:748) Caused by: java.io.EOFException at com.github.shyiko.mysql.binlog.io.ByteArrayInputStream.read(ByteArrayInputStream.java:190) at java.io.InputStream.read(InputStream.java:170) at com.github.shyiko.mysql.binlog.io.ByteArrayInputStream.fill(ByteArrayInputStream.java:96) at com.github.shyiko.mysql.binlog.io.ByteArrayInputStream.read(ByteArrayInputStream.java:89) at com.github.shyiko.mysql.binlog.io.ByteArrayInputStream.readString(ByteArrayInputStream.java:66) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parseObject(JsonBinary.java:378) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parse(JsonBinary.java:211) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parseArray(JsonBinary.java:530) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parse(JsonBinary.java:217) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parse(JsonBinary.java:205) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parse(JsonBinary.java:181) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parseAsString(JsonBinary.java:168) at io.debezium.connector.mysql.MySqlValueConverters.lambda$convertJson$17(MySqlValueConverters.java:366) ... 14 more [2020-09-14 13:44:48,204] ERROR [source-mysql-connector|task-0] Error during binlog processing. Last offset stored = {ts_sec=1600091087, file=mysql-bin.006722, pos=81350305, gtids=2d1993b3-ed88-11e9-a524-025bb057af28:1-7017107530,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666, row=1, server_id=310, event=2}, binlog reader near position = mysql-bin.006722/81430355 (io.debezium.connector.mysql.BinlogReader:1161) [2020-09-14 13:44:48,204] ERROR [source-mysql-connector|task-0] Failed due to error: Error processing binlog event (io.debezium.connector.mysql.BinlogReader:208) org.apache.kafka.connect.errors.ConnectException: Invalid value: null used for required field: "use_rules", schema type: STRING at io.debezium.connector.mysql.AbstractReader.wrap(AbstractReader.java:230) at io.debezium.connector.mysql.AbstractReader.failed(AbstractReader.java:207) at io.debezium.connector.mysql.BinlogReader.handleEvent(BinlogReader.java:600) at com.github.shyiko.mysql.binlog.BinaryLogClient.notifyEventListeners(BinaryLogClient.java:1130) at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:978) at com.github.shyiko.mysql.binlog.BinaryLogClient.connect(BinaryLogClient.java:581) at com.github.shyiko.mysql.binlog.BinaryLogClient$7.run(BinaryLogClient.java:860) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.kafka.connect.errors.DataException: Invalid value: null used for required field: "use_rules", schema type: STRING at org.apache.kafka.connect.data.ConnectSchema.validateValue(ConnectSchema.java:220) at org.apache.kafka.connect.data.Struct.validate(Struct.java:233) at org.apache.kafka.connect.data.ConnectSchema.validateValue(ConnectSchema.java:253) at org.apache.kafka.connect.data.Struct.put(Struct.java:216) at org.apache.kafka.connect.data.Struct.put(Struct.java:203) at io.debezium.data.Envelope.update(Envelope.java:313) at io.debezium.connector.mysql.RecordMakers$1.update(RecordMakers.java:314) at io.debezium.connector.mysql.RecordMakers$RecordsForTable.update(RecordMakers.java:512) at io.debezium.connector.mysql.BinlogReader.handleUpdate(BinlogReader.java:978) at io.debezium.connector.mysql.BinlogReader.handleEvent(BinlogReader.java:583) ... 5 more [2020-09-14 13:44:48,204] INFO [source-mysql-connector|task-0] Error processing binlog event, and propagating to Kafka Connect so it stops this connector. Future binlog events read before connector is shutdown will be ignored. (io.debezium.connector.mysql.BinlogReader:605) [2020-09-14 13:44:48,251] INFO [source-mysql-connector|task-0] WorkerSourceTask{id=source-mysql-connector-0} Committing offsets (org.apache.kafka.connect.runtime.WorkerSourceTask:426) [2020-09-14 13:44:48,251] INFO [source-mysql-connector|task-0] WorkerSourceTask{id=source-mysql-connector-0} flushing 0 outstanding messages for offset commit (org.apache.kafka.connect.runtime.WorkerSourceTask:443) [2020-09-14 13:44:48,253] INFO [source-mysql-connector|task-0] WorkerSourceTask{id=source-mysql-connector-0} Finished commitOffsets successfully in 2 ms (org.apache.kafka.connect.runtime.WorkerSourceTask:525) [2020-09-14 13:44:48,253] ERROR [source-mysql-connector|task-0] WorkerSourceTask{id=source-mysql-connector-0} Task threw an uncaught and unrecoverable exception (org.apache.kafka.connect.runtime.WorkerTask:186) org.apache.kafka.connect.errors.ConnectException: Invalid value: null used for required field: "use_rules", schema type: STRING at io.debezium.connector.mysql.AbstractReader.wrap(AbstractReader.java:230) at io.debezium.connector.mysql.AbstractReader.failed(AbstractReader.java:207) at io.debezium.connector.mysql.BinlogReader.handleEvent(BinlogReader.java:600) at com.github.shyiko.mysql.binlog.BinaryLogClient.notifyEventListeners(BinaryLogClient.java:1130) at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:978) at com.github.shyiko.mysql.binlog.BinaryLogClient.connect(BinaryLogClient.java:581) at com.github.shyiko.mysql.binlog.BinaryLogClient$7.run(BinaryLogClient.java:860) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.kafka.connect.errors.DataException: Invalid value: null used for required field: "use_rules", schema type: STRING at org.apache.kafka.connect.data.ConnectSchema.validateValue(ConnectSchema.java:220) at org.apache.kafka.connect.data.Struct.validate(Struct.java:233) at org.apache.kafka.connect.data.ConnectSchema.validateValue(ConnectSchema.java:253) at org.apache.kafka.connect.data.Struct.put(Struct.java:216) at org.apache.kafka.connect.data.Struct.put(Struct.java:203) at io.debezium.data.Envelope.update(Envelope.java:313) at io.debezium.connector.mysql.RecordMakers$1.update(RecordMakers.java:314) at io.debezium.connector.mysql.RecordMakers$RecordsForTable.update(RecordMakers.java:512) at io.debezium.connector.mysql.BinlogReader.handleUpdate(BinlogReader.java:978) at io.debezium.connector.mysql.BinlogReader.handleEvent(BinlogReader.java:583) ... 5 more [2020-09-14 13:44:48,253] ERROR [source-mysql-connector|task-0] WorkerSourceTask{id=source-mysql-connector-0} Task is being killed and will not recover until manually restarted (org.apache.kafka.connect.runtime.WorkerTask:187) [2020-09-14 13:44:48,253] INFO [source-mysql-connector|task-0] Stopping down connector (io.debezium.connector.common.BaseSourceTask:187) [2020-09-14 13:44:48,253] INFO [source-mysql-connector|task-0] Stopping MySQL connector task (io.debezium.connector.mysql.MySqlConnectorTask:458) [2020-09-14 13:44:48,253] INFO [source-mysql-connector|task-0] ChainedReader: Stopping the binlog reader (io.debezium.connector.mysql.ChainedReader:121) [2020-09-14 13:44:48,253] INFO [source-mysql-connector|task-0] Discarding 171 unsent record(s) due to the connector shutting down (io.debezium.connector.mysql.BinlogReader:129) [2020-09-14 13:44:48,254] INFO [source-mysql-connector|task-0] Discarding 0 unsent record(s) due to the connector shutting down (io.debezium.connector.mysql.BinlogReader:129) [2020-09-14 13:44:48,254] INFO [source-mysql-connector|task-0] Stopped reading binlog after 12780069 events, last recorded offset: {ts_sec=1600091087, file=mysql-bin.006722, pos=81350305, gtids=2d1993b3-ed88-11e9-a524-025bb057af28:1-7017107530,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666, row=1, server_id=310, event=2} (io.debezium.connector.mysql.BinlogReader:1096) [2020-09-14 13:44:48,254] INFO [source-mysql-connector|task-0] [Producer clientId=source-mysql-connector-dbhistory] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. (org.apache.kafka.clients.producer.KafkaProducer:1182) [2020-09-14 13:44:48,255] INFO [source-mysql-connector|task-0] Connector task finished all work and is now shutdown (io.debezium.connector.mysql.MySqlConnectorTask:496) [2020-09-14 13:44:48,255] INFO [source-mysql-connector|task-0] [Producer clientId=connector-producer-source-mysql-connector-0] Closing the Kafka producer with timeoutMillis = 30000 ms. (org.apache.kafka.clients.producer.KafkaProducer:1182) [2020-09-14 13:46:16,831] INFO [source-mysql-connector|task-0|offsets] WorkerSourceTask{id=source-mysql-connector-0} Committing offsets (org.apache.kafka.connect.runtime.WorkerSourceTask:426) [2020-09-14 13:46:16,831] INFO [source-mysql-connector|task-0|offsets] WorkerSourceTask{id=source-mysql-connector-0} flushing 0 outstanding messages for offset commit (org.apache.kafka.connect.runtime.WorkerSourceTask:443) [2020-09-14 13:46:26,831] INFO [source-mysql-connector|task-0|offsets] WorkerSourceTask{id=source-mysql-connector-0} Committing offsets (org.apache.kafka.connect.runtime.WorkerSourceTask:426) /* I have updated connector with event.processing​.failure.handling.mode = skip and restarted the task*/ [2020-09-14 13:58:56,849] INFO [source-mysql-connector|task-0|offsets] WorkerSourceTask{id=source-mysql-connector-0} Committing offsets (org.apache.kafka.connect.runtime.WorkerSourceTask:426) [2020-09-14 13:58:56,849] INFO [source-mysql-connector|task-0|offsets] WorkerSourceTask{id=source-mysql-connector-0} flushing 0 outstanding messages for offset commit (org.apache.kafka.connect.runtime.WorkerSourceTask:443) [2020-09-14 13:59:02,282] INFO Successfully tested connection for jdbc:mysql://my-database.internal:3306/?useInformationSchema=true&nullCatalogMeansCurrent=false&useSSL=false&useUnicode=true&characterEncoding=UTF-8&characterSetResults=UTF-8&zeroDateTimeBehavior=CONVERT_TO_NULL&connectTimeout=30000 with user 'debezium' (io.debezium.connector.mysql.MySqlConnector:102) [2020-09-14 13:59:02,282] INFO AbstractConfig values: (org.apache.kafka.common.config.AbstractConfig:347) [2020-09-14 13:59:02,287] INFO [Worker clientId=connect-1, groupId=connect-source-mysql-avro] Connector source-mysql-connector config updated (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1442) [2020-09-14 13:59:02,787] INFO [Worker clientId=connect-1, groupId=connect-source-mysql-avro] Handling connector-only config update by restarting connector source-mysql-connector (org.apache.kafka.connect.runtime.distributed.DistributedHerder:547) [2020-09-14 13:59:02,788] INFO [source-mysql-connector|worker] Stopping connector source-mysql-connector (org.apache.kafka.connect.runtime.Worker:360) [2020-09-14 13:59:02,789] INFO [source-mysql-connector|worker] Stopped connector source-mysql-connector (org.apache.kafka.connect.runtime.Worker:376) [2020-09-14 13:59:02,789] INFO [Worker clientId=connect-1, groupId=connect-source-mysql-avro] Starting connector source-mysql-connector (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1225) [2020-09-14 13:59:02,789] INFO [source-mysql-connector|worker] ConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.mysql.MySqlConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = null name = source-mysql-connector tasks.max = 1 transforms = [] value.converter = null (org.apache.kafka.connect.runtime.ConnectorConfig:347) [2020-09-14 13:59:02,789] INFO [source-mysql-connector|worker] EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.mysql.MySqlConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = null name = source-mysql-connector tasks.max = 1 transforms = [] value.converter = null (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:347) [2020-09-14 13:59:02,789] INFO [source-mysql-connector|worker] Creating connector source-mysql-connector of type io.debezium.connector.mysql.MySqlConnector (org.apache.kafka.connect.runtime.Worker:253) [2020-09-14 13:59:02,789] INFO [source-mysql-connector|worker] Instantiated connector source-mysql-connector with version 1.2.3.Final of type class io.debezium.connector.mysql.MySqlConnector (org.apache.kafka.connect.runtime.Worker:256) [2020-09-14 13:59:02,789] INFO [source-mysql-connector|worker] Finished creating connector source-mysql-connector (org.apache.kafka.connect.runtime.Worker:275) [2020-09-14 13:59:02,789] INFO SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.mysql.MySqlConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = null name = source-mysql-connector tasks.max = 1 transforms = [] value.converter = null (org.apache.kafka.connect.runtime.SourceConnectorConfig:347) [2020-09-14 13:59:02,790] INFO EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.mysql.MySqlConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = null name = source-mysql-connector tasks.max = 1 transforms = [] value.converter = null (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:347) [2020-09-14 13:59:03,791] INFO [Worker clientId=connect-1, groupId=connect-source-mysql-avro] Tasks [source-mysql-connector-0] configs updated (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1457) [2020-09-14 13:59:04,392] INFO [Worker clientId=connect-1, groupId=connect-source-mysql-avro] Handling task config update by restarting tasks [source-mysql-connector-0] (org.apache.kafka.connect.runtime.distributed.DistributedHerder:585) [2020-09-14 13:59:04,392] INFO [source-mysql-connector|task-0] Stopping task source-mysql-connector-0 (org.apache.kafka.connect.runtime.Worker:706) [2020-09-14 13:59:04,392] INFO [Worker clientId=connect-1, groupId=connect-source-mysql-avro] Rebalance started (org.apache.kafka.connect.runtime.distributed.WorkerCoordinator:225) [2020-09-14 13:59:04,392] INFO [Worker clientId=connect-1, groupId=connect-source-mysql-avro] (Re-)joining group (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:552) [2020-09-14 13:59:04,397] INFO [Worker clientId=connect-1, groupId=connect-source-mysql-avro] Successfully joined group with generation 102 (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:503) [2020-09-14 13:59:04,397] INFO [Worker clientId=connect-1, groupId=connect-source-mysql-avro] Joined group at generation 102 with protocol version 2 and got assignment: Assignment{error=0, leader='connect-1-ff931232-57f4-44ef-952e-0bf0a48aed4f', leaderUrl='http://kafka-connect-1:8083/', offset=459, connectorIds=[source-mysql-connector], taskIds=[source-mysql-connector-0], revokedConnectorIds=[], revokedTaskIds=[], delay=0} with rebalance delay: 0 (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1600) [2020-09-14 13:59:04,397] INFO [Worker clientId=connect-1, groupId=connect-source-mysql-avro] Starting connectors and tasks using config offset 459 (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1143) [2020-09-14 13:59:04,397] INFO [Worker clientId=connect-1, groupId=connect-source-mysql-avro] Starting task source-mysql-connector-0 (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1185) [2020-09-14 13:59:04,397] INFO [source-mysql-connector|task-0] Creating task source-mysql-connector-0 (org.apache.kafka.connect.runtime.Worker:421) [2020-09-14 13:59:04,397] INFO [source-mysql-connector|task-0] ConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.mysql.MySqlConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = null name = source-mysql-connector tasks.max = 1 transforms = [] value.converter = null (org.apache.kafka.connect.runtime.ConnectorConfig:347) [2020-09-14 13:59:04,398] INFO [source-mysql-connector|task-0] EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.mysql.MySqlConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = null name = source-mysql-connector tasks.max = 1 transforms = [] value.converter = null (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:347) [2020-09-14 13:59:04,398] INFO [source-mysql-connector|task-0] TaskConfig values: task.class = class io.debezium.connector.mysql.MySqlConnectorTask (org.apache.kafka.connect.runtime.TaskConfig:347) [2020-09-14 13:59:04,398] INFO [source-mysql-connector|task-0] Instantiated task source-mysql-connector-0 with version 1.2.3.Final of type io.debezium.connector.mysql.MySqlConnectorTask (org.apache.kafka.connect.runtime.Worker:436) [2020-09-14 13:59:04,398] INFO [source-mysql-connector|task-0] JsonConverterConfig values: converter.type = key decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false (org.apache.kafka.connect.json.JsonConverterConfig:347) [2020-09-14 13:59:04,398] INFO [source-mysql-connector|task-0] Set up the key converter class org.apache.kafka.connect.json.JsonConverter for task source-mysql-connector-0 using the worker config (org.apache.kafka.connect.runtime.Worker:449) [2020-09-14 13:59:04,398] INFO [source-mysql-connector|task-0] AvroConverterConfig values: bearer.auth.token = [hidden] proxy.port = -1 schema.reflection = false auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy schema.registry.url = [http://schema-registry.internal:8081] basic.auth.user.info = [hidden] proxy.host = use.latest.version = false schema.registry.basic.auth.user.info = [hidden] bearer.auth.credentials.source = STATIC_TOKEN key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.connect.avro.AvroConverterConfig:179) [2020-09-14 13:59:04,398] INFO [source-mysql-connector|task-0] KafkaAvroSerializerConfig values: bearer.auth.token = [hidden] proxy.port = -1 schema.reflection = false auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy schema.registry.url = [http://schema-registry.internal:8081] basic.auth.user.info = [hidden] proxy.host = use.latest.version = false schema.registry.basic.auth.user.info = [hidden] bearer.auth.credentials.source = STATIC_TOKEN key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.kafka.serializers.KafkaAvroSerializerConfig:179) [2020-09-14 13:59:04,398] INFO [source-mysql-connector|task-0] KafkaAvroDeserializerConfig values: bearer.auth.token = [hidden] proxy.port = -1 schema.reflection = false auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL specific.avro.reader = false value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy schema.registry.url = [http://schema-registry.internal:8081] basic.auth.user.info = [hidden] proxy.host = use.latest.version = false schema.registry.basic.auth.user.info = [hidden] bearer.auth.credentials.source = STATIC_TOKEN key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.kafka.serializers.KafkaAvroDeserializerConfig:179) [2020-09-14 13:59:04,398] INFO [source-mysql-connector|task-0] AvroDataConfig values: connect.meta.data = true enhanced.avro.schema.support = false schemas.cache.config = 1000 (io.confluent.connect.avro.AvroDataConfig:347) [2020-09-14 13:59:04,398] INFO [source-mysql-connector|task-0] Set up the value converter class io.confluent.connect.avro.AvroConverter for task source-mysql-connector-0 using the worker config (org.apache.kafka.connect.runtime.Worker:455) [2020-09-14 13:59:04,398] INFO [source-mysql-connector|task-0] Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task source-mysql-connector-0 using the worker config (org.apache.kafka.connect.runtime.Worker:462) [2020-09-14 13:59:04,399] INFO [source-mysql-connector|task-0] Initializing: org.apache.kafka.connect.runtime.TransformationChain{} (org.apache.kafka.connect.runtime.Worker:516) [2020-09-14 13:59:04,399] INFO [source-mysql-connector|task-0] ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka-1:9092, kafka-2:9092, kafka-3:9092] buffer.memory = 33554432 client.dns.lookup = default client.id = connector-producer-source-mysql-connector-0 compression.type = lz4 connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 5 max.block.ms = 9223372036854775807 max.in.flight.requests.per.connection = 5 max.request.size = 25286400 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 2147483647 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer (org.apache.kafka.clients.producer.ProducerConfig:347) [2020-09-14 13:59:04,399] INFO [source-mysql-connector|task-0] [Producer clientId=connector-producer-source-mysql-connector-0] Instantiated an idempotent producer. (org.apache.kafka.clients.producer.KafkaProducer:527) [2020-09-14 13:59:04,403] INFO [source-mysql-connector|task-0] [Producer clientId=connector-producer-source-mysql-connector-0] Overriding the default retries config to the recommended value of 2147483647 since the idempotent producer is enabled. (org.apache.kafka.clients.producer.KafkaProducer:535) [2020-09-14 13:59:04,404] INFO [source-mysql-connector|task-0] Kafka version: 5.5.1-ccs (org.apache.kafka.common.utils.AppInfoParser:117) [2020-09-14 13:59:04,404] INFO [source-mysql-connector|task-0] Kafka commitId: 5b2445123128cfaf (org.apache.kafka.common.utils.AppInfoParser:118) [2020-09-14 13:59:04,404] INFO [source-mysql-connector|task-0] Kafka startTimeMs: 1600091944404 (org.apache.kafka.common.utils.AppInfoParser:119) [2020-09-14 13:59:04,405] INFO [Worker clientId=connect-1, groupId=connect-source-mysql-avro] Finished starting connectors and tasks (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1171) [2020-09-14 13:59:04,406] INFO [source-mysql-connector|task-0] Starting MySqlConnectorTask with configuration: (io.debezium.connector.common.BaseSourceTask:95) [2020-09-14 13:59:04,406] INFO [source-mysql-connector|task-0] connector.class = io.debezium.connector.mysql.MySqlConnector (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,406] INFO [source-mysql-connector|task-0] database.user = debezium (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,406] INFO [source-mysql-connector|task-0] max.queue.size = 24576 (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,406] INFO [source-mysql-connector|task-0] database.server.id = 184002 (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,406] INFO [source-mysql-connector|task-0] event.processing​.failure.handling.mode = skip (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,406] INFO [source-mysql-connector|task-0] tasks.max = 1 (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,407] INFO [source-mysql-connector|task-0] database.history.kafka.bootstrap.servers = kafka-1:9092,kafka-2:9092,kafka-3:9092 (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,407] INFO [source-mysql-connector|task-0] database.history.kafka.topic = cdc_dbhistory_mysql.connector (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,407] INFO [source-mysql-connector|task-0] database.server.name = cdc_mysql_avro.connector (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,407] INFO [source-mysql-connector|task-0] table.blacklist = .*_gho$,.*_ghc$ (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,407] INFO [source-mysql-connector|task-0] database.port = 3306 (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,407] INFO [source-mysql-connector|task-0] include.schema.changes = true (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,407] INFO [source-mysql-connector|task-0] inconsistent.schema.handling.mode = skip (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,407] INFO [source-mysql-connector|task-0] decimal.handling.mode = string (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,407] INFO [source-mysql-connector|task-0] task.class = io.debezium.connector.mysql.MySqlConnectorTask (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,407] INFO [source-mysql-connector|task-0] database.hostname = my-database (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,407] INFO [source-mysql-connector|task-0] database.password = ******** (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,407] INFO [source-mysql-connector|task-0] name = source-mysql-connector (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,407] INFO [source-mysql-connector|task-0] database.blacklist = test (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,407] INFO [source-mysql-connector|task-0] max.batch.size = 6144 (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,407] INFO [source-mysql-connector|task-0] snapshot.mode = schema_only (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:04,408] INFO [source-mysql-connector|task-0] [Producer clientId=connector-producer-source-mysql-connector-0] Cluster ID: -Z0j4InSRUa7tzTXbDXWmg (org.apache.kafka.clients.Metadata:277) [2020-09-14 13:59:04,408] INFO [source-mysql-connector|task-0] [Producer clientId=connector-producer-source-mysql-connector-0] ProducerId set to 1847 with epoch 0 (org.apache.kafka.clients.producer.internals.TransactionManager:515) [2020-09-14 13:59:04,864] INFO [source-mysql-connector|task-0] KafkaDatabaseHistory Consumer config: {enable.auto.commit=false, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, group.id=source-mysql-connector-dbhistory, auto.offset.reset=earliest, session.timeout.ms=10000, bootstrap.servers=kafka-1:9092,kafka-2:9092,kafka-3:9092, client.id=source-mysql-connector-dbhistory, key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, fetch.min.bytes=1} (io.debezium.relational.history.KafkaDatabaseHistory:215) [2020-09-14 13:59:04,864] INFO [source-mysql-connector|task-0] KafkaDatabaseHistory Producer config: {bootstrap.servers=kafka-1:9092,kafka-2:9092,kafka-3:9092, value.serializer=org.apache.kafka.common.serialization.StringSerializer, buffer.memory=1048576, retries=1, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=source-mysql-connector-dbhistory, linger.ms=0, batch.size=32768, max.block.ms=10000, acks=1} (io.debezium.relational.history.KafkaDatabaseHistory:216) [2020-09-14 13:59:04,864] INFO [source-mysql-connector|task-0] Requested thread factory for connector MySqlConnector, id = cdc_mysql_avro.my_database named = db-history-config-check (io.debezium.util.Threads:270) [2020-09-14 13:59:04,865] INFO [source-mysql-connector|task-0] ProducerConfig values: acks = 1 batch.size = 32768 bootstrap.servers = [kafka-1:9092, kafka-2:9092, kafka-3:9092] buffer.memory = 1048576 client.dns.lookup = default client.id = source-mysql-connector-dbhistory compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 10000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer (org.apache.kafka.clients.producer.ProducerConfig:347) [2020-09-14 13:59:04,866] INFO [source-mysql-connector|task-0] Kafka version: 5.5.1-ccs (org.apache.kafka.common.utils.AppInfoParser:117) [2020-09-14 13:59:04,866] INFO [source-mysql-connector|task-0] Kafka commitId: 5b2445123128cfaf (org.apache.kafka.common.utils.AppInfoParser:118) [2020-09-14 13:59:04,866] INFO [source-mysql-connector|task-0] Kafka startTimeMs: 1600091944866 (org.apache.kafka.common.utils.AppInfoParser:119) [2020-09-14 13:59:04,866] INFO [source-mysql-connector|task-0] Found existing offset: {ts_sec=1600091087, file=mysql-bin.006722, pos=81350305, gtids=2d1993b3-ed88-11e9-a524-025bb057af28:1-7017107530,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666, row=1, server_id=310, event=2} (io.debezium.connector.mysql.MySqlConnectorTask:83) [2020-09-14 13:59:04,869] INFO [source-mysql-connector|task-0] [Producer clientId=source-mysql-connector-dbhistory] Cluster ID: -Z0j4InSRUa7tzTXbDXWmg (org.apache.kafka.clients.Metadata:277) [2020-09-14 13:59:04,869] INFO [source-mysql-connector|task-0] ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka-1:9092, kafka-2:9092, kafka-3:9092] check.crcs = true client.dns.lookup = default client.id = source-mysql-connector-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = source-mysql-connector-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer (org.apache.kafka.clients.consumer.ConsumerConfig:347) [2020-09-14 13:59:04,870] INFO [source-mysql-connector|task-0] Kafka version: 5.5.1-ccs (org.apache.kafka.common.utils.AppInfoParser:117) [2020-09-14 13:59:04,870] INFO [source-mysql-connector|task-0] Kafka commitId: 5b2445123128cfaf (org.apache.kafka.common.utils.AppInfoParser:118) [2020-09-14 13:59:04,870] INFO [source-mysql-connector|task-0] Kafka startTimeMs: 1600091944870 (org.apache.kafka.common.utils.AppInfoParser:119) [2020-09-14 13:59:04,873] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Cluster ID: -Z0j4InSRUa7tzTXbDXWmg (org.apache.kafka.clients.Metadata:277) [2020-09-14 13:59:04,886] INFO [source-mysql-connector|task-0] ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka-1:9092, kafka-2:9092, kafka-3:9092] check.crcs = true client.dns.lookup = default client.id = source-mysql-connector-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = source-mysql-connector-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer (org.apache.kafka.clients.consumer.ConsumerConfig:347) [2020-09-14 13:59:04,887] INFO [source-mysql-connector|task-0] Kafka version: 5.5.1-ccs (org.apache.kafka.common.utils.AppInfoParser:117) [2020-09-14 13:59:04,887] INFO [source-mysql-connector|task-0] Kafka commitId: 5b2445123128cfaf (org.apache.kafka.common.utils.AppInfoParser:118) [2020-09-14 13:59:04,887] INFO [source-mysql-connector|task-0] Kafka startTimeMs: 1600091944887 (org.apache.kafka.common.utils.AppInfoParser:119) [2020-09-14 13:59:04,888] INFO [source-mysql-connector|task-0] Creating thread debezium-mysqlconnector-cdc_mysql_avro.my_database-db-history-config-check (io.debezium.util.Threads:287) [2020-09-14 13:59:04,888] INFO [source-mysql-connector|task-0] AdminClientConfig values: bootstrap.servers = [kafka-1:9092, kafka-2:9092, kafka-3:9092] client.dns.lookup = default client.id = source-mysql-connector-dbhistory-topic-check connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS (org.apache.kafka.clients.admin.AdminClientConfig:347) [2020-09-14 13:59:04,889] WARN [source-mysql-connector|task-0] The configuration 'value.serializer' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:355) [2020-09-14 13:59:04,889] WARN [source-mysql-connector|task-0] The configuration 'batch.size' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:355) [2020-09-14 13:59:04,889] WARN [source-mysql-connector|task-0] The configuration 'max.block.ms' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:355) [2020-09-14 13:59:04,889] WARN [source-mysql-connector|task-0] The configuration 'acks' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:355) [2020-09-14 13:59:04,889] WARN [source-mysql-connector|task-0] The configuration 'buffer.memory' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:355) [2020-09-14 13:59:04,889] WARN [source-mysql-connector|task-0] The configuration 'key.serializer' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:355) [2020-09-14 13:59:04,889] WARN [source-mysql-connector|task-0] The configuration 'linger.ms' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:355) [2020-09-14 13:59:04,889] INFO [source-mysql-connector|task-0] Kafka version: 5.5.1-ccs (org.apache.kafka.common.utils.AppInfoParser:117) [2020-09-14 13:59:04,889] INFO [source-mysql-connector|task-0] Kafka commitId: 5b2445123128cfaf (org.apache.kafka.common.utils.AppInfoParser:118) [2020-09-14 13:59:04,889] INFO [source-mysql-connector|task-0] Kafka startTimeMs: 1600091944889 (org.apache.kafka.common.utils.AppInfoParser:119) [2020-09-14 13:59:04,892] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Cluster ID: -Z0j4InSRUa7tzTXbDXWmg (org.apache.kafka.clients.Metadata:277) [2020-09-14 13:59:04,899] INFO [source-mysql-connector|task-0] Database history topic 'cdc_dbhistory_mysql.my_database' has correct settings (io.debezium.relational.history.KafkaDatabaseHistory:440) [2020-09-14 13:59:04,899] INFO [source-mysql-connector|task-0] Started database history recovery (io.debezium.relational.history.DatabaseHistoryMetrics:113) [2020-09-14 13:59:04,900] INFO [source-mysql-connector|task-0] ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka-1:9092, kafka-2:9092, kafka-3:9092] check.crcs = true client.dns.lookup = default client.id = source-mysql-connector-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = source-mysql-connector-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer (org.apache.kafka.clients.consumer.ConsumerConfig:347) [2020-09-14 13:59:04,901] INFO [source-mysql-connector|task-0] Kafka version: 5.5.1-ccs (org.apache.kafka.common.utils.AppInfoParser:117) [2020-09-14 13:59:04,901] INFO [source-mysql-connector|task-0] Kafka commitId: 5b2445123128cfaf (org.apache.kafka.common.utils.AppInfoParser:118) [2020-09-14 13:59:04,901] INFO [source-mysql-connector|task-0] Kafka startTimeMs: 1600091944901 (org.apache.kafka.common.utils.AppInfoParser:119) [2020-09-14 13:59:04,901] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Subscribed to topic(s): cdc_dbhistory_mysql.my_database (org.apache.kafka.clients.consumer.KafkaConsumer:974) [2020-09-14 13:59:04,904] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Cluster ID: -Z0j4InSRUa7tzTXbDXWmg (org.apache.kafka.clients.Metadata:277) [2020-09-14 13:59:04,907] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Discovered group coordinator kafka-3:9092 (id: 2147483644 rack: null) (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:797) [2020-09-14 13:59:04,907] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] (Re-)joining group (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:552) [2020-09-14 13:59:04,911] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Join group failed with org.apache.kafka.common.errors.MemberIdRequiredException: The group member needs to have a valid member id before actually entering a consumer group (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:455) [2020-09-14 13:59:04,911] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] (Re-)joining group (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:552) [2020-09-14 13:59:07,914] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Finished assignment for group at generation 1: {source-mysql-connector-dbhistory-2565f80d-60c5-49d9-970d-2f99ac7a51be=Assignment(partitions=[cdc_dbhistory_mysql.my_database-0])} (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:604) [2020-09-14 13:59:07,917] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Successfully joined group with generation 1 (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:503) [2020-09-14 13:59:07,918] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Adding newly assigned partitions: cdc_dbhistory_mysql.my_database-0 (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:273) [2020-09-14 13:59:07,920] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Found no committed offset for partition cdc_dbhistory_mysql.my_database-0 (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:1299) [2020-09-14 13:59:07,921] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Resetting offset for partition cdc_dbhistory_mysql.my_database-0 to offset 0. (org.apache.kafka.clients.consumer.internals.SubscriptionState:397) [2020-09-14 13:59:08,255] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Revoke previously assigned partitions cdc_dbhistory_mysql.my_database-0 (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:292) [2020-09-14 13:59:08,255] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Member source-mysql-connector-dbhistory-2565f80d-60c5-49d9-970d-2f99ac7a51be sending LeaveGroup request to coordinator kafka-3:9092 (id: 2147483644 rack: null) due to the consumer is being closed (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:979) [2020-09-14 13:59:08,257] INFO [source-mysql-connector|task-0] Finished database history recovery of 560 change(s) in 3358 ms (io.debezium.relational.history.DatabaseHistoryMetrics:119) [2020-09-14 13:59:08,268] INFO [source-mysql-connector|task-0] MySQL current GTID set 2d1993b3-ed88-11e9-a524-025bb057af28:1-7017576040,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 does contain the GTID set required by the connector 2d1993b3-ed88-11e9-a524-025bb057af28:1-7017107530,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 (io.debezium.connector.mysql.MySqlConnectorTask:524) [2020-09-14 13:59:08,271] INFO [source-mysql-connector|task-0] GTIDs known by the server but not processed yet 2d1993b3-ed88-11e9-a524-025bb057af28:7017107531-7017576040, for replication are available only 2d1993b3-ed88-11e9-a524-025bb057af28:7017107531-7017576040 (io.debezium.connector.mysql.MySqlConnectorTask:529) [2020-09-14 13:59:08,277] INFO [source-mysql-connector|task-0] Requested thread factory for connector MySqlConnector, id = cdc_mysql_avro.my_database named = binlog-client (io.debezium.util.Threads:270) [2020-09-14 13:59:08,285] INFO [source-mysql-connector|task-0] GTID set purged on server: 2d1993b3-ed88-11e9-a524-025bb057af28:1-6890331753,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 (io.debezium.connector.mysql.BinlogReader:364) [2020-09-14 13:59:08,285] INFO [source-mysql-connector|task-0] Attempting to generate a filtered GTID set (io.debezium.connector.mysql.MySqlTaskContext:299) [2020-09-14 13:59:08,285] INFO [source-mysql-connector|task-0] GTID set from previous recorded offset: 2d1993b3-ed88-11e9-a524-025bb057af28:1-7017107530,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 (io.debezium.connector.mysql.MySqlTaskContext:300) [2020-09-14 13:59:08,285] INFO [source-mysql-connector|task-0] GTID set available on server: 2d1993b3-ed88-11e9-a524-025bb057af28:1-7017576052,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 (io.debezium.connector.mysql.MySqlTaskContext:307) [2020-09-14 13:59:08,285] INFO [source-mysql-connector|task-0] Using first available positions for new GTID channels (io.debezium.connector.mysql.MySqlTaskContext:313) [2020-09-14 13:59:08,286] INFO [source-mysql-connector|task-0] Relevant GTID set available on server: 2d1993b3-ed88-11e9-a524-025bb057af28:1-7017576052,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 (io.debezium.connector.mysql.MySqlTaskContext:315) [2020-09-14 13:59:08,286] INFO [source-mysql-connector|task-0] Final merged GTID set to use when connecting to MySQL: 2d1993b3-ed88-11e9-a524-025bb057af28:1-7017107530,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 (io.debezium.connector.mysql.MySqlTaskContext:326) [2020-09-14 13:59:08,286] INFO [source-mysql-connector|task-0] Registering binlog reader with GTID set: 2d1993b3-ed88-11e9-a524-025bb057af28:1-7017107530,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 (io.debezium.connector.mysql.BinlogReader:369) [2020-09-14 13:59:08,286] INFO [source-mysql-connector|task-0] Creating thread debezium-mysqlconnector-cdc_mysql_avro.my_database-binlog-client (io.debezium.util.Threads:287) [2020-09-14 13:59:08,287] INFO [source-mysql-connector|task-0] Creating thread debezium-mysqlconnector-cdc_mysql_avro.my_database-binlog-client (io.debezium.util.Threads:287) [2020-09-14 13:59:08,420] INFO [source-mysql-connector|task-0] Connected to MySQL binlog at camp-debezium.sqldb.internal:3306, starting at GTIDs 2d1993b3-ed88-11e9-a524-025bb057af28:1-7017107530,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 and binlog file 'mysql-bin.006722', pos=81350305, skipping 2 events plus 1 rows (io.debezium.connector.mysql.BinlogReader:1111) [2020-09-14 13:59:08,420] INFO [source-mysql-connector|task-0] Creating thread debezium-mysqlconnector-cdc_mysql_avro.my_database-binlog-client (io.debezium.util.Threads:287) [2020-09-14 13:59:08,420] INFO [source-mysql-connector|task-0] Waiting for keepalive thread to start (io.debezium.connector.mysql.BinlogReader:412) [2020-09-14 13:59:08,420] INFO [source-mysql-connector|task-0] Keepalive thread is running (io.debezium.connector.mysql.BinlogReader:419) [2020-09-14 13:59:08,520] INFO [source-mysql-connector|task-0] WorkerSourceTask{id=source-mysql-connector-0} Source task finished initialization and start (org.apache.kafka.connect.runtime.WorkerSourceTask:216) [2020-09-14 13:59:08,692] ERROR [source-mysql-connector|task-0] Failed to properly convert data value for 'my_schema.my_table.use_rules' of type JSON for row [15339, 1, 2020-09-14T13:44:17Z, 2020-09-14T13:44:41Z, [71, 72, 44, 32, 65, 99, 99, 114, 97, 44, 32, 50, 48, 50, 48, 87, 51, 56, 44, 32, 49, 48, 120, 50, 48, 37, 32, 40, 51, 71, 72, 83, 41, 32, 101, 120, 99, 108, 32, 76, 105, 116, 101], 2020-09-15T00:00Z, 2020-09-21T23:59:59Z, [103, 104], 137, [103, 104, 115], 1, 10, [71, 72, 52, 51, 85, 84, 81, 56, 50, 68, 67, 51, 75, 52, 81], 604800, [2, 1, 0, 97, 0, 0, 7, 0, 2, 0, 90, 0, 18, 0, 4, 0, 22, 0, 6, 0, 12, 28, 0, 0, 47, 0, 116, 121, 112, 101], 1000000, null, 2, [123, 34, 112, 101, 114, 99, 101, 110, 116, 97, 103, 101, 34, 58, 50, 48, 44, 34, 109, 97, 120, 95, 118, 97, 108, 117, 101, 34, 58, 51, 125], 3, null, 1, null, 1, 0, 0, 0, 1, 1, 1]: (io.debezium.relational.TableSchemaBuilder:275) org.apache.kafka.connect.errors.ConnectException: Failed to parse and read a JSON value on use_rules JSON NOT NULL: null at io.debezium.connector.mysql.MySqlValueConverters.lambda$convertJson$17(MySqlValueConverters.java:369) at io.debezium.jdbc.JdbcValueConverters.convertValue(JdbcValueConverters.java:1311) at io.debezium.connector.mysql.MySqlValueConverters.convertJson(MySqlValueConverters.java:356) at io.debezium.connector.mysql.MySqlValueConverters.lambda$converter$1(MySqlValueConverters.java:232) at io.debezium.relational.TableSchemaBuilder.lambda$createValueGenerator$5(TableSchemaBuilder.java:265) at io.debezium.relational.TableSchema.valueFromColumnData(TableSchema.java:143) at io.debezium.connector.mysql.RecordMakers$1.update(RecordMakers.java:276) at io.debezium.connector.mysql.RecordMakers$RecordsForTable.update(RecordMakers.java:512) at io.debezium.connector.mysql.BinlogReader.handleUpdate(BinlogReader.java:978) at io.debezium.connector.mysql.BinlogReader.handleEvent(BinlogReader.java:583) at com.github.shyiko.mysql.binlog.BinaryLogClient.notifyEventListeners(BinaryLogClient.java:1130) at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:978) at com.github.shyiko.mysql.binlog.BinaryLogClient.connect(BinaryLogClient.java:581) at com.github.shyiko.mysql.binlog.BinaryLogClient$7.run(BinaryLogClient.java:860) at java.lang.Thread.run(Thread.java:748) Caused by: java.io.EOFException at com.github.shyiko.mysql.binlog.io.ByteArrayInputStream.read(ByteArrayInputStream.java:190) at java.io.InputStream.read(InputStream.java:170) at com.github.shyiko.mysql.binlog.io.ByteArrayInputStream.fill(ByteArrayInputStream.java:96) at com.github.shyiko.mysql.binlog.io.ByteArrayInputStream.read(ByteArrayInputStream.java:89) at com.github.shyiko.mysql.binlog.io.ByteArrayInputStream.readString(ByteArrayInputStream.java:66) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parseObject(JsonBinary.java:378) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parse(JsonBinary.java:211) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parseArray(JsonBinary.java:530) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parse(JsonBinary.java:217) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parse(JsonBinary.java:205) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parse(JsonBinary.java:181) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parseAsString(JsonBinary.java:168) at io.debezium.connector.mysql.MySqlValueConverters.lambda$convertJson$17(MySqlValueConverters.java:366) ... 14 more [2020-09-14 13:59:08,693] ERROR [source-mysql-connector|task-0] Error during binlog processing. Last offset stored = null, binlog reader near position = mysql-bin.006722/81430355 (io.debezium.connector.mysql.BinlogReader:1161) [2020-09-14 13:59:08,693] ERROR [source-mysql-connector|task-0] Failed due to error: Error processing binlog event (io.debezium.connector.mysql.BinlogReader:208) org.apache.kafka.connect.errors.ConnectException: Invalid value: null used for required field: "use_rules", schema type: STRING at io.debezium.connector.mysql.AbstractReader.wrap(AbstractReader.java:230) at io.debezium.connector.mysql.AbstractReader.failed(AbstractReader.java:207) at io.debezium.connector.mysql.BinlogReader.handleEvent(BinlogReader.java:600) at com.github.shyiko.mysql.binlog.BinaryLogClient.notifyEventListeners(BinaryLogClient.java:1130) at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:978) at com.github.shyiko.mysql.binlog.BinaryLogClient.connect(BinaryLogClient.java:581) at com.github.shyiko.mysql.binlog.BinaryLogClient$7.run(BinaryLogClient.java:860) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.kafka.connect.errors.DataException: Invalid value: null used for required field: "use_rules", schema type: STRING at org.apache.kafka.connect.data.ConnectSchema.validateValue(ConnectSchema.java:220) at org.apache.kafka.connect.data.Struct.validate(Struct.java:233) at org.apache.kafka.connect.data.ConnectSchema.validateValue(ConnectSchema.java:253) at org.apache.kafka.connect.data.Struct.put(Struct.java:216) at org.apache.kafka.connect.data.Struct.put(Struct.java:203) at io.debezium.data.Envelope.update(Envelope.java:313) at io.debezium.connector.mysql.RecordMakers$1.update(RecordMakers.java:314) at io.debezium.connector.mysql.RecordMakers$RecordsForTable.update(RecordMakers.java:512) at io.debezium.connector.mysql.BinlogReader.handleUpdate(BinlogReader.java:978) at io.debezium.connector.mysql.BinlogReader.handleEvent(BinlogReader.java:583) ... 5 more [2020-09-14 13:59:08,693] INFO [source-mysql-connector|task-0] Error processing binlog event, and propagating to Kafka Connect so it stops this connector. Future binlog events read before connector is shutdown will be ignored. (io.debezium.connector.mysql.BinlogReader:605) [2020-09-14 13:59:08,777] INFO [source-mysql-connector|task-0] WorkerSourceTask{id=source-mysql-connector-0} Committing offsets (org.apache.kafka.connect.runtime.WorkerSourceTask:426) [2020-09-14 13:59:08,777] INFO [source-mysql-connector|task-0] WorkerSourceTask{id=source-mysql-connector-0} flushing 0 outstanding messages for offset commit (org.apache.kafka.connect.runtime.WorkerSourceTask:443) [2020-09-14 13:59:08,777] ERROR [source-mysql-connector|task-0] WorkerSourceTask{id=source-mysql-connector-0} Task threw an uncaught and unrecoverable exception (org.apache.kafka.connect.runtime.WorkerTask:186) org.apache.kafka.connect.errors.ConnectException: Invalid value: null used for required field: "use_rules", schema type: STRING at io.debezium.connector.mysql.AbstractReader.wrap(AbstractReader.java:230) at io.debezium.connector.mysql.AbstractReader.failed(AbstractReader.java:207) at io.debezium.connector.mysql.BinlogReader.handleEvent(BinlogReader.java:600) at com.github.shyiko.mysql.binlog.BinaryLogClient.notifyEventListeners(BinaryLogClient.java:1130) at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:978) at com.github.shyiko.mysql.binlog.BinaryLogClient.connect(BinaryLogClient.java:581) at com.github.shyiko.mysql.binlog.BinaryLogClient$7.run(BinaryLogClient.java:860) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.kafka.connect.errors.DataException: Invalid value: null used for required field: "use_rules", schema type: STRING at org.apache.kafka.connect.data.ConnectSchema.validateValue(ConnectSchema.java:220) at org.apache.kafka.connect.data.Struct.validate(Struct.java:233) at org.apache.kafka.connect.data.ConnectSchema.validateValue(ConnectSchema.java:253) at org.apache.kafka.connect.data.Struct.put(Struct.java:216) at org.apache.kafka.connect.data.Struct.put(Struct.java:203) at io.debezium.data.Envelope.update(Envelope.java:313) at io.debezium.connector.mysql.RecordMakers$1.update(RecordMakers.java:314) at io.debezium.connector.mysql.RecordMakers$RecordsForTable.update(RecordMakers.java:512) at io.debezium.connector.mysql.BinlogReader.handleUpdate(BinlogReader.java:978) at io.debezium.connector.mysql.BinlogReader.handleEvent(BinlogReader.java:583) ... 5 more [2020-09-14 13:59:08,777] ERROR [source-mysql-connector|task-0] WorkerSourceTask{id=source-mysql-connector-0} Task is being killed and will not recover until manually restarted (org.apache.kafka.connect.runtime.WorkerTask:187) [2020-09-14 13:59:08,777] INFO [source-mysql-connector|task-0] Stopping down connector (io.debezium.connector.common.BaseSourceTask:187) [2020-09-14 13:59:08,777] INFO [source-mysql-connector|task-0] Stopping MySQL connector task (io.debezium.connector.mysql.MySqlConnectorTask:458) [2020-09-14 13:59:08,777] INFO [source-mysql-connector|task-0] ChainedReader: Stopping the binlog reader (io.debezium.connector.mysql.ChainedReader:121) [2020-09-14 13:59:08,777] INFO [source-mysql-connector|task-0] Discarding 171 unsent record(s) due to the connector shutting down (io.debezium.connector.mysql.BinlogReader:129) [2020-09-14 13:59:08,779] INFO [source-mysql-connector|task-0] Discarding 0 unsent record(s) due to the connector shutting down (io.debezium.connector.mysql.BinlogReader:129) [2020-09-14 13:59:08,779] INFO [source-mysql-connector|task-0] Stopped reading binlog after 0 events, no new offset was recorded (io.debezium.connector.mysql.BinlogReader:1099) [2020-09-14 13:59:08,779] INFO [source-mysql-connector|task-0] [Producer clientId=source-mysql-connector-dbhistory] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. (org.apache.kafka.clients.producer.KafkaProducer:1182) [2020-09-14 13:59:08,780] INFO [source-mysql-connector|task-0] Connector task finished all work and is now shutdown (io.debezium.connector.mysql.MySqlConnectorTask:496) [2020-09-14 13:59:08,780] INFO [source-mysql-connector|task-0] [Producer clientId=connector-producer-source-mysql-connector-0] Closing the Kafka producer with timeoutMillis = 30000 ms. (org.apache.kafka.clients.producer.KafkaProducer:1182) [2020-09-14 13:59:14,405] INFO [source-mysql-connector|task-0|offsets] WorkerSourceTask{id=source-mysql-connector-0} Committing offsets (org.apache.kafka.connect.runtime.WorkerSourceTask:426) [2020-09-14 13:59:14,406] INFO [source-mysql-connector|task-0|offsets] WorkerSourceTask{id=source-mysql-connector-0} flushing 0 outstanding messages for offset commit (org.apache.kafka.connect.runtime.WorkerSourceTask:443) [2020-09-14 13:59:14,855] INFO [source-mysql-connector|task-0] Stopping task source-mysql-connector-0 (org.apache.kafka.connect.runtime.Worker:706) /* I restarted the task again, just to give it a second try*/ [2020-09-14 13:59:14,855] INFO [Worker clientId=connect-1, groupId=connect-source-mysql-avro] Starting task source-mysql-connector-0 (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1185) [2020-09-14 13:59:14,856] INFO [source-mysql-connector|task-0] Creating task source-mysql-connector-0 (org.apache.kafka.connect.runtime.Worker:421) [2020-09-14 13:59:14,856] INFO [source-mysql-connector|task-0] ConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.mysql.MySqlConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = null name = source-mysql-connector tasks.max = 1 transforms = [] value.converter = null (org.apache.kafka.connect.runtime.ConnectorConfig:347) [2020-09-14 13:59:14,856] INFO [source-mysql-connector|task-0] EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.mysql.MySqlConnector errors.log.enable = false errors.log.include.messages = false errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = null name = source-mysql-connector tasks.max = 1 transforms = [] value.converter = null (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:347) [2020-09-14 13:59:14,856] INFO [source-mysql-connector|task-0] TaskConfig values: task.class = class io.debezium.connector.mysql.MySqlConnectorTask (org.apache.kafka.connect.runtime.TaskConfig:347) [2020-09-14 13:59:14,856] INFO [source-mysql-connector|task-0] Instantiated task source-mysql-connector-0 with version 1.2.3.Final of type io.debezium.connector.mysql.MySqlConnectorTask (org.apache.kafka.connect.runtime.Worker:436) [2020-09-14 13:59:14,856] INFO [source-mysql-connector|task-0] JsonConverterConfig values: converter.type = key decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false (org.apache.kafka.connect.json.JsonConverterConfig:347) [2020-09-14 13:59:14,856] INFO [source-mysql-connector|task-0] Set up the key converter class org.apache.kafka.connect.json.JsonConverter for task source-mysql-connector-0 using the worker config (org.apache.kafka.connect.runtime.Worker:449) [2020-09-14 13:59:14,856] INFO [source-mysql-connector|task-0] AvroConverterConfig values: bearer.auth.token = [hidden] proxy.port = -1 schema.reflection = false auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy schema.registry.url = [http://schema-registry-1:8081, http://schema-registry-2:8081, http://schema-registry-3:8081] basic.auth.user.info = [hidden] proxy.host = use.latest.version = false schema.registry.basic.auth.user.info = [hidden] bearer.auth.credentials.source = STATIC_TOKEN key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.connect.avro.AvroConverterConfig:179) [2020-09-14 13:59:14,856] INFO [source-mysql-connector|task-0] KafkaAvroSerializerConfig values: bearer.auth.token = [hidden] proxy.port = -1 schema.reflection = false auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy schema.registry.url = [http://schema-registry-1:8081, http://schema-registry-2:8081, http://schema-registry-3:8081] basic.auth.user.info = [hidden] proxy.host = use.latest.version = false schema.registry.basic.auth.user.info = [hidden] bearer.auth.credentials.source = STATIC_TOKEN key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.kafka.serializers.KafkaAvroSerializerConfig:179) [2020-09-14 13:59:14,856] INFO [source-mysql-connector|task-0] KafkaAvroDeserializerConfig values: bearer.auth.token = [hidden] proxy.port = -1 schema.reflection = false auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL specific.avro.reader = false value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy schema.registry.url = [http://schema-registry-1:8081, http://schema-registry-2:8081, http://schema-registry-3:8081] basic.auth.user.info = [hidden] proxy.host = use.latest.version = false schema.registry.basic.auth.user.info = [hidden] bearer.auth.credentials.source = STATIC_TOKEN key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.kafka.serializers.KafkaAvroDeserializerConfig:179) [2020-09-14 13:59:14,856] INFO [source-mysql-connector|task-0] AvroDataConfig values: connect.meta.data = true enhanced.avro.schema.support = false schemas.cache.config = 1000 (io.confluent.connect.avro.AvroDataConfig:347) [2020-09-14 13:59:14,856] INFO [source-mysql-connector|task-0] Set up the value converter class io.confluent.connect.avro.AvroConverter for task source-mysql-connector-0 using the worker config (org.apache.kafka.connect.runtime.Worker:455) [2020-09-14 13:59:14,856] INFO [source-mysql-connector|task-0] Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task source-mysql-connector-0 using the worker config (org.apache.kafka.connect.runtime.Worker:462) [2020-09-14 13:59:14,857] INFO [source-mysql-connector|task-0] Initializing: org.apache.kafka.connect.runtime.TransformationChain{} (org.apache.kafka.connect.runtime.Worker:516) [2020-09-14 13:59:14,857] INFO [source-mysql-connector|task-0] ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka-1:9092, kafka-2:9092, kafka-3:9092] buffer.memory = 33554432 client.dns.lookup = default client.id = connector-producer-source-mysql-connector-0 compression.type = lz4 connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = true interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 5 max.block.ms = 9223372036854775807 max.in.flight.requests.per.connection = 5 max.request.size = 25286400 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 2147483647 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer (org.apache.kafka.clients.producer.ProducerConfig:347) [2020-09-14 13:59:14,857] INFO [source-mysql-connector|task-0] [Producer clientId=connector-producer-source-mysql-connector-0] Instantiated an idempotent producer. (org.apache.kafka.clients.producer.KafkaProducer:527) [2020-09-14 13:59:14,858] INFO [source-mysql-connector|task-0] [Producer clientId=connector-producer-source-mysql-connector-0] Overriding the default retries config to the recommended value of 2147483647 since the idempotent producer is enabled. (org.apache.kafka.clients.producer.KafkaProducer:535) [2020-09-14 13:59:14,858] INFO [source-mysql-connector|task-0] Kafka version: 5.5.1-ccs (org.apache.kafka.common.utils.AppInfoParser:117) [2020-09-14 13:59:14,858] INFO [source-mysql-connector|task-0] Kafka commitId: 5b2445123128cfaf (org.apache.kafka.common.utils.AppInfoParser:118) [2020-09-14 13:59:14,858] INFO [source-mysql-connector|task-0] Kafka startTimeMs: 1600091954858 (org.apache.kafka.common.utils.AppInfoParser:119) [2020-09-14 13:59:14,860] INFO [source-mysql-connector|task-0] Starting MySqlConnectorTask with configuration: (io.debezium.connector.common.BaseSourceTask:95) [2020-09-14 13:59:14,860] INFO [source-mysql-connector|task-0] connector.class = io.debezium.connector.mysql.MySqlConnector (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,860] INFO [source-mysql-connector|task-0] database.user = debezium (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,860] INFO [source-mysql-connector|task-0] max.queue.size = 24576 (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,860] INFO [source-mysql-connector|task-0] database.server.id = 184002 (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,860] INFO [source-mysql-connector|task-0] event.processing​.failure.handling.mode = skip (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,860] INFO [source-mysql-connector|task-0] tasks.max = 1 (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,860] INFO [source-mysql-connector|task-0] database.history.kafka.bootstrap.servers = kafka-1:9092,kafka-2:9092,kafka-3:9092 (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,860] INFO [source-mysql-connector|task-0] database.history.kafka.topic = cdc_dbhistory_mysql.my_database (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,860] INFO [source-mysql-connector|task-0] database.server.name = cdc_mysql_avro.my_database (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,860] INFO [source-mysql-connector|task-0] table.blacklist = .*_gho$,.*_ghc$ (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,860] INFO [source-mysql-connector|task-0] database.port = 3306 (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,861] INFO [source-mysql-connector|task-0] include.schema.changes = true (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,861] INFO [source-mysql-connector|task-0] inconsistent.schema.handling.mode = skip (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,861] INFO [source-mysql-connector|task-0] decimal.handling.mode = string (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,861] INFO [source-mysql-connector|task-0] task.class = io.debezium.connector.mysql.MySqlConnectorTask (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,861] INFO [source-mysql-connector|task-0] database.hostname = camp-debezium.sqldb.internal (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,861] INFO [source-mysql-connector|task-0] database.password = ******** (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,861] INFO [source-mysql-connector|task-0] name = source-mysql-connector (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,861] INFO [source-mysql-connector|task-0] database.blacklist = infra,test (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,861] INFO [source-mysql-connector|task-0] max.batch.size = 6144 (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,861] INFO [source-mysql-connector|task-0] snapshot.mode = schema_only (io.debezium.connector.common.BaseSourceTask:97) [2020-09-14 13:59:14,861] INFO [source-mysql-connector|task-0] [Producer clientId=connector-producer-source-mysql-connector-0] Cluster ID: -Z0j4InSRUa7tzTXbDXWmg (org.apache.kafka.clients.Metadata:277) [2020-09-14 13:59:14,861] INFO [source-mysql-connector|task-0] [Producer clientId=connector-producer-source-mysql-connector-0] ProducerId set to 2818 with epoch 0 (org.apache.kafka.clients.producer.internals.TransactionManager:515) [2020-09-14 13:59:15,369] INFO [source-mysql-connector|task-0] KafkaDatabaseHistory Consumer config: {enable.auto.commit=false, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, group.id=source-mysql-connector-dbhistory, auto.offset.reset=earliest, session.timeout.ms=10000, bootstrap.servers=kafka-1:9092,kafka-2:9092,kafka-3:9092, client.id=source-mysql-connector-dbhistory, key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, fetch.min.bytes=1} (io.debezium.relational.history.KafkaDatabaseHistory:215) [2020-09-14 13:59:15,370] INFO [source-mysql-connector|task-0] KafkaDatabaseHistory Producer config: {bootstrap.servers=kafka-1:9092,kafka-2:9092,kafka-3:9092, value.serializer=org.apache.kafka.common.serialization.StringSerializer, buffer.memory=1048576, retries=1, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=source-mysql-connector-dbhistory, linger.ms=0, batch.size=32768, max.block.ms=10000, acks=1} (io.debezium.relational.history.KafkaDatabaseHistory:216) [2020-09-14 13:59:15,370] INFO [source-mysql-connector|task-0] Requested thread factory for connector MySqlConnector, id = cdc_mysql_avro.my_database named = db-history-config-check (io.debezium.util.Threads:270) [2020-09-14 13:59:15,370] INFO [source-mysql-connector|task-0] ProducerConfig values: acks = 1 batch.size = 32768 bootstrap.servers = [kafka-1:9092, kafka-2:9092, kafka-3:9092] buffer.memory = 1048576 client.dns.lookup = default client.id = source-mysql-connector-dbhistory compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 10000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer (org.apache.kafka.clients.producer.ProducerConfig:347) [2020-09-14 13:59:15,371] INFO [source-mysql-connector|task-0] Kafka version: 5.5.1-ccs (org.apache.kafka.common.utils.AppInfoParser:117) [2020-09-14 13:59:15,372] INFO [source-mysql-connector|task-0] Kafka commitId: 5b2445123128cfaf (org.apache.kafka.common.utils.AppInfoParser:118) [2020-09-14 13:59:15,372] INFO [source-mysql-connector|task-0] Kafka startTimeMs: 1600091955371 (org.apache.kafka.common.utils.AppInfoParser:119) [2020-09-14 13:59:15,372] INFO [source-mysql-connector|task-0] Found existing offset: {ts_sec=1600091087, file=mysql-bin.006722, pos=81350305, gtids=2d1993b3-ed88-11e9-a524-025bb057af28:1-7017107530,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666, row=1, server_id=310, event=2} (io.debezium.connector.mysql.MySqlConnectorTask:83) [2020-09-14 13:59:15,374] INFO [source-mysql-connector|task-0] ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka-1:9092, kafka-2:9092, kafka-3:9092] check.crcs = true client.dns.lookup = default client.id = source-mysql-connector-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = source-mysql-connector-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer (org.apache.kafka.clients.consumer.ConsumerConfig:347) [2020-09-14 13:59:15,374] INFO [source-mysql-connector|task-0] [Producer clientId=source-mysql-connector-dbhistory] Cluster ID: -Z0j4InSRUa7tzTXbDXWmg (org.apache.kafka.clients.Metadata:277) [2020-09-14 13:59:15,375] INFO [source-mysql-connector|task-0] Kafka version: 5.5.1-ccs (org.apache.kafka.common.utils.AppInfoParser:117) [2020-09-14 13:59:15,375] INFO [source-mysql-connector|task-0] Kafka commitId: 5b2445123128cfaf (org.apache.kafka.common.utils.AppInfoParser:118) [2020-09-14 13:59:15,375] INFO [source-mysql-connector|task-0] Kafka startTimeMs: 1600091955375 (org.apache.kafka.common.utils.AppInfoParser:119) [2020-09-14 13:59:15,378] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Cluster ID: -Z0j4InSRUa7tzTXbDXWmg (org.apache.kafka.clients.Metadata:277) [2020-09-14 13:59:15,391] INFO [source-mysql-connector|task-0] ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka-1:9092, kafka-2:9092, kafka-3:9092] check.crcs = true client.dns.lookup = default client.id = source-mysql-connector-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = source-mysql-connector-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer (org.apache.kafka.clients.consumer.ConsumerConfig:347) [2020-09-14 13:59:15,391] INFO [source-mysql-connector|task-0] Kafka version: 5.5.1-ccs (org.apache.kafka.common.utils.AppInfoParser:117) [2020-09-14 13:59:15,391] INFO [source-mysql-connector|task-0] Kafka commitId: 5b2445123128cfaf (org.apache.kafka.common.utils.AppInfoParser:118) [2020-09-14 13:59:15,391] INFO [source-mysql-connector|task-0] Kafka startTimeMs: 1600091955391 (org.apache.kafka.common.utils.AppInfoParser:119) [2020-09-14 13:59:15,392] INFO [source-mysql-connector|task-0] Creating thread debezium-mysqlconnector-cdc_mysql_avro.my_database-db-history-config-check (io.debezium.util.Threads:287) [2020-09-14 13:59:15,393] INFO [source-mysql-connector|task-0] AdminClientConfig values: bootstrap.servers = [kafka-1:9092, kafka-2:9092, kafka-3:9092] client.dns.lookup = default client.id = source-mysql-connector-dbhistory-topic-check connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS (org.apache.kafka.clients.admin.AdminClientConfig:347) [2020-09-14 13:59:15,394] WARN [source-mysql-connector|task-0] The configuration 'value.serializer' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:355) [2020-09-14 13:59:15,394] WARN [source-mysql-connector|task-0] The configuration 'batch.size' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:355) [2020-09-14 13:59:15,394] WARN [source-mysql-connector|task-0] The configuration 'max.block.ms' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:355) [2020-09-14 13:59:15,394] WARN [source-mysql-connector|task-0] The configuration 'acks' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:355) [2020-09-14 13:59:15,394] WARN [source-mysql-connector|task-0] The configuration 'buffer.memory' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:355) [2020-09-14 13:59:15,394] WARN [source-mysql-connector|task-0] The configuration 'key.serializer' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:355) [2020-09-14 13:59:15,394] WARN [source-mysql-connector|task-0] The configuration 'linger.ms' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:355) [2020-09-14 13:59:15,394] INFO [source-mysql-connector|task-0] Kafka version: 5.5.1-ccs (org.apache.kafka.common.utils.AppInfoParser:117) [2020-09-14 13:59:15,394] INFO [source-mysql-connector|task-0] Kafka commitId: 5b2445123128cfaf (org.apache.kafka.common.utils.AppInfoParser:118) [2020-09-14 13:59:15,394] INFO [source-mysql-connector|task-0] Kafka startTimeMs: 1600091955394 (org.apache.kafka.common.utils.AppInfoParser:119) [2020-09-14 13:59:15,397] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Cluster ID: -Z0j4InSRUa7tzTXbDXWmg (org.apache.kafka.clients.Metadata:277) [2020-09-14 13:59:15,402] INFO [source-mysql-connector|task-0] Database history topic 'cdc_dbhistory_mysql.my_database' has correct settings (io.debezium.relational.history.KafkaDatabaseHistory:440) [2020-09-14 13:59:15,403] INFO [source-mysql-connector|task-0] Started database history recovery (io.debezium.relational.history.DatabaseHistoryMetrics:113) [2020-09-14 13:59:15,403] INFO [source-mysql-connector|task-0] ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka-1:9092, kafka-2:9092, kafka-3:9092] check.crcs = true client.dns.lookup = default client.id = source-mysql-connector-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = source-mysql-connector-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer (org.apache.kafka.clients.consumer.ConsumerConfig:347) [2020-09-14 13:59:15,404] INFO [source-mysql-connector|task-0] Kafka version: 5.5.1-ccs (org.apache.kafka.common.utils.AppInfoParser:117) [2020-09-14 13:59:15,404] INFO [source-mysql-connector|task-0] Kafka commitId: 5b2445123128cfaf (org.apache.kafka.common.utils.AppInfoParser:118) [2020-09-14 13:59:15,404] INFO [source-mysql-connector|task-0] Kafka startTimeMs: 1600091955404 (org.apache.kafka.common.utils.AppInfoParser:119) [2020-09-14 13:59:15,404] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Subscribed to topic(s): cdc_dbhistory_mysql.my_database (org.apache.kafka.clients.consumer.KafkaConsumer:974) [2020-09-14 13:59:15,407] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Cluster ID: -Z0j4InSRUa7tzTXbDXWmg (org.apache.kafka.clients.Metadata:277) [2020-09-14 13:59:15,412] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Discovered group coordinator kafka-3:9092 (id: 2147483644 rack: null) (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:797) [2020-09-14 13:59:15,412] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] (Re-)joining group (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:552) [2020-09-14 13:59:15,416] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Join group failed with org.apache.kafka.common.errors.MemberIdRequiredException: The group member needs to have a valid member id before actually entering a consumer group (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:455) [2020-09-14 13:59:15,416] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] (Re-)joining group (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:552) [2020-09-14 13:59:18,420] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Finished assignment for group at generation 3: {source-mysql-connector-dbhistory-05d33cd6-4d80-4836-ab36-c573f78211d3=Assignment(partitions=[cdc_dbhistory_mysql.my_database-0])} (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:604) [2020-09-14 13:59:18,422] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Successfully joined group with generation 3 (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:503) [2020-09-14 13:59:18,422] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Adding newly assigned partitions: cdc_dbhistory_mysql.my_database-0 (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:273) [2020-09-14 13:59:18,425] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Found no committed offset for partition cdc_dbhistory_mysql.my_database-0 (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:1299) [2020-09-14 13:59:18,426] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Resetting offset for partition cdc_dbhistory_mysql.my_database-0 to offset 0. (org.apache.kafka.clients.consumer.internals.SubscriptionState:397) [2020-09-14 13:59:18,735] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Revoke previously assigned partitions cdc_dbhistory_mysql.my_database-0 (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:292) [2020-09-14 13:59:18,735] INFO [source-mysql-connector|task-0] [Consumer clientId=source-mysql-connector-dbhistory, groupId=source-mysql-connector-dbhistory] Member source-mysql-connector-dbhistory-05d33cd6-4d80-4836-ab36-c573f78211d3 sending LeaveGroup request to coordinator kafka-3:9092 (id: 2147483644 rack: null) due to the consumer is being closed (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:979) [2020-09-14 13:59:18,751] INFO [source-mysql-connector|task-0] Finished database history recovery of 560 change(s) in 3348 ms (io.debezium.relational.history.DatabaseHistoryMetrics:119) [2020-09-14 13:59:18,758] INFO [source-mysql-connector|task-0] MySQL current GTID set 2d1993b3-ed88-11e9-a524-025bb057af28:1-7017581809,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 does contain the GTID set required by the connector 2d1993b3-ed88-11e9-a524-025bb057af28:1-7017107530,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 (io.debezium.connector.mysql.MySqlConnectorTask:524) [2020-09-14 13:59:18,761] INFO [source-mysql-connector|task-0] GTIDs known by the server but not processed yet 2d1993b3-ed88-11e9-a524-025bb057af28:7017107531-7017581809, for replication are available only 2d1993b3-ed88-11e9-a524-025bb057af28:7017107531-7017581809 (io.debezium.connector.mysql.MySqlConnectorTask:529) [2020-09-14 13:59:18,765] INFO [source-mysql-connector|task-0] Requested thread factory for connector MySqlConnector, id = cdc_mysql_avro.my_database named = binlog-client (io.debezium.util.Threads:270) [2020-09-14 13:59:18,770] INFO [source-mysql-connector|task-0] GTID set purged on server: 2d1993b3-ed88-11e9-a524-025bb057af28:1-6890331753,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 (io.debezium.connector.mysql.BinlogReader:364) [2020-09-14 13:59:18,770] INFO [source-mysql-connector|task-0] Attempting to generate a filtered GTID set (io.debezium.connector.mysql.MySqlTaskContext:299) [2020-09-14 13:59:18,770] INFO [source-mysql-connector|task-0] GTID set from previous recorded offset: 2d1993b3-ed88-11e9-a524-025bb057af28:1-7017107530,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 (io.debezium.connector.mysql.MySqlTaskContext:300) [2020-09-14 13:59:18,770] INFO [source-mysql-connector|task-0] GTID set available on server: 2d1993b3-ed88-11e9-a524-025bb057af28:1-7017581814,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 (io.debezium.connector.mysql.MySqlTaskContext:307) [2020-09-14 13:59:18,770] INFO [source-mysql-connector|task-0] Using first available positions for new GTID channels (io.debezium.connector.mysql.MySqlTaskContext:313) [2020-09-14 13:59:18,770] INFO [source-mysql-connector|task-0] Relevant GTID set available on server: 2d1993b3-ed88-11e9-a524-025bb057af28:1-7017581814,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 (io.debezium.connector.mysql.MySqlTaskContext:315) [2020-09-14 13:59:18,770] INFO [source-mysql-connector|task-0] Final merged GTID set to use when connecting to MySQL: 2d1993b3-ed88-11e9-a524-025bb057af28:1-7017107530,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 (io.debezium.connector.mysql.MySqlTaskContext:326) [2020-09-14 13:59:18,770] INFO [source-mysql-connector|task-0] Registering binlog reader with GTID set: 2d1993b3-ed88-11e9-a524-025bb057af28:1-7017107530,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 (io.debezium.connector.mysql.BinlogReader:369) [2020-09-14 13:59:18,770] INFO [source-mysql-connector|task-0] Creating thread debezium-mysqlconnector-cdc_mysql_avro.my_database-binlog-client (io.debezium.util.Threads:287) [2020-09-14 13:59:18,771] INFO [source-mysql-connector|task-0] Creating thread debezium-mysqlconnector-cdc_mysql_avro.my_database-binlog-client (io.debezium.util.Threads:287) [2020-09-14 13:59:18,903] INFO [source-mysql-connector|task-0] Connected to MySQL binlog at camp-debezium.sqldb.internal:3306, starting at GTIDs 2d1993b3-ed88-11e9-a524-025bb057af28:1-7017107530,52250a7b-054f-11ea-bfef-0a17d7a25ffe:1-1233539962,896a92b4-9d6b-11e9-a55d-025bb057af28:1-1349366417,a86fe46d-9d65-11e9-a537-0a17d7a25ffe:1-1674239666 and binlog file 'mysql-bin.006722', pos=81350305, skipping 2 events plus 1 rows (io.debezium.connector.mysql.BinlogReader:1111) [2020-09-14 13:59:18,903] INFO [source-mysql-connector|task-0] Waiting for keepalive thread to start (io.debezium.connector.mysql.BinlogReader:412) [2020-09-14 13:59:18,903] INFO [source-mysql-connector|task-0] Creating thread debezium-mysqlconnector-cdc_mysql_avro.my_database-binlog-client (io.debezium.util.Threads:287) [2020-09-14 13:59:19,004] INFO [source-mysql-connector|task-0] Keepalive thread is running (io.debezium.connector.mysql.BinlogReader:419) [2020-09-14 13:59:19,084] ERROR [source-mysql-connector|task-0] Failed to properly convert data value for 'my_schema.my_table.use_rules' of type JSON for row [15339, 1, 2020-09-14T13:44:17Z, 2020-09-14T13:44:41Z, [71, 72, 44, 32, 65, 99, 99, 114, 97, 44, 32, 50, 48, 50, 48, 87, 51, 56, 44, 32, 49, 48, 120, 50, 48, 37, 32, 40, 51, 71, 72, 83, 41, 32, 101, 120, 99, 108, 32, 76, 105, 116, 101], 2020-09-15T00:00Z, 2020-09-21T23:59:59Z, [103, 104], 137, [103, 104, 115], 1, 10, [71, 72, 52, 51, 85, 84, 81, 56, 50, 68, 67, 51, 75, 52, 81], 604800, [2, 1, 0, 97, 0, 0, 7, 0, 2, 0, 90, 0, 18, 0, 4, 0, 22, 0, 6, 0, 12, 28, 0, 0, 47, 0, 116, 121, 112, 101], 1000000, null, 2, [123, 34, 112, 101, 114, 99, 101, 110, 116, 97, 103, 101, 34, 58, 50, 48, 44, 34, 109, 97, 120, 95, 118, 97, 108, 117, 101, 34, 58, 51, 125], 3, null, 1, null, 1, 0, 0, 0, 1, 1, 1]: (io.debezium.relational.TableSchemaBuilder:275) org.apache.kafka.connect.errors.ConnectException: Failed to parse and read a JSON value on use_rules JSON NOT NULL: null at io.debezium.connector.mysql.MySqlValueConverters.lambda$convertJson$17(MySqlValueConverters.java:369) at io.debezium.jdbc.JdbcValueConverters.convertValue(JdbcValueConverters.java:1311) at io.debezium.connector.mysql.MySqlValueConverters.convertJson(MySqlValueConverters.java:356) at io.debezium.connector.mysql.MySqlValueConverters.lambda$converter$1(MySqlValueConverters.java:232) at io.debezium.relational.TableSchemaBuilder.lambda$createValueGenerator$5(TableSchemaBuilder.java:265) at io.debezium.relational.TableSchema.valueFromColumnData(TableSchema.java:143) at io.debezium.connector.mysql.RecordMakers$1.update(RecordMakers.java:276) at io.debezium.connector.mysql.RecordMakers$RecordsForTable.update(RecordMakers.java:512) at io.debezium.connector.mysql.BinlogReader.handleUpdate(BinlogReader.java:978) at io.debezium.connector.mysql.BinlogReader.handleEvent(BinlogReader.java:583) at com.github.shyiko.mysql.binlog.BinaryLogClient.notifyEventListeners(BinaryLogClient.java:1130) at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:978) at com.github.shyiko.mysql.binlog.BinaryLogClient.connect(BinaryLogClient.java:581) at com.github.shyiko.mysql.binlog.BinaryLogClient$7.run(BinaryLogClient.java:860) at java.lang.Thread.run(Thread.java:748) Caused by: java.io.EOFException at com.github.shyiko.mysql.binlog.io.ByteArrayInputStream.read(ByteArrayInputStream.java:190) at java.io.InputStream.read(InputStream.java:170) at com.github.shyiko.mysql.binlog.io.ByteArrayInputStream.fill(ByteArrayInputStream.java:96) at com.github.shyiko.mysql.binlog.io.ByteArrayInputStream.read(ByteArrayInputStream.java:89) at com.github.shyiko.mysql.binlog.io.ByteArrayInputStream.readString(ByteArrayInputStream.java:66) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parseObject(JsonBinary.java:378) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parse(JsonBinary.java:211) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parseArray(JsonBinary.java:530) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parse(JsonBinary.java:217) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parse(JsonBinary.java:205) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parse(JsonBinary.java:181) at com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary.parseAsString(JsonBinary.java:168) at io.debezium.connector.mysql.MySqlValueConverters.lambda$convertJson$17(MySqlValueConverters.java:366) ... 14 more [2020-09-14 13:59:19,084] ERROR [source-mysql-connector|task-0] Error during binlog processing. Last offset stored = null, binlog reader near position = mysql-bin.006722/81430355 (io.debezium.connector.mysql.BinlogReader:1161) [2020-09-14 13:59:19,084] ERROR [source-mysql-connector|task-0] Failed due to error: Error processing binlog event (io.debezium.connector.mysql.BinlogReader:208) org.apache.kafka.connect.errors.ConnectException: Invalid value: null used for required field: "use_rules", schema type: STRING at io.debezium.connector.mysql.AbstractReader.wrap(AbstractReader.java:230) at io.debezium.connector.mysql.AbstractReader.failed(AbstractReader.java:207) at io.debezium.connector.mysql.BinlogReader.handleEvent(BinlogReader.java:600) at com.github.shyiko.mysql.binlog.BinaryLogClient.notifyEventListeners(BinaryLogClient.java:1130) at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:978) at com.github.shyiko.mysql.binlog.BinaryLogClient.connect(BinaryLogClient.java:581) at com.github.shyiko.mysql.binlog.BinaryLogClient$7.run(BinaryLogClient.java:860) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.kafka.connect.errors.DataException: Invalid value: null used for required field: "use_rules", schema type: STRING at org.apache.kafka.connect.data.ConnectSchema.validateValue(ConnectSchema.java:220) at org.apache.kafka.connect.data.Struct.validate(Struct.java:233) at org.apache.kafka.connect.data.ConnectSchema.validateValue(ConnectSchema.java:253) at org.apache.kafka.connect.data.Struct.put(Struct.java:216) at org.apache.kafka.connect.data.Struct.put(Struct.java:203) at io.debezium.data.Envelope.update(Envelope.java:313) at io.debezium.connector.mysql.RecordMakers$1.update(RecordMakers.java:314) at io.debezium.connector.mysql.RecordMakers$RecordsForTable.update(RecordMakers.java:512) at io.debezium.connector.mysql.BinlogReader.handleUpdate(BinlogReader.java:978) at io.debezium.connector.mysql.BinlogReader.handleEvent(BinlogReader.java:583) ... 5 more [2020-09-14 13:59:19,084] INFO [source-mysql-connector|task-0] Error processing binlog event, and propagating to Kafka Connect so it stops this connector. Future binlog events read before connector is shutdown will be ignored. (io.debezium.connector.mysql.BinlogReader:605) [2020-09-14 13:59:19,104] INFO [source-mysql-connector|task-0] WorkerSourceTask{id=source-mysql-connector-0} Source task finished initialization and start (org.apache.kafka.connect.runtime.WorkerSourceTask:216) [2020-09-14 13:59:19,104] INFO [source-mysql-connector|task-0] WorkerSourceTask{id=source-mysql-connector-0} Committing offsets (org.apache.kafka.connect.runtime.WorkerSourceTask:426) [2020-09-14 13:59:19,104] INFO [source-mysql-connector|task-0] WorkerSourceTask{id=source-mysql-connector-0} flushing 0 outstanding messages for offset commit (org.apache.kafka.connect.runtime.WorkerSourceTask:443) [2020-09-14 13:59:19,104] ERROR [source-mysql-connector|task-0] WorkerSourceTask{id=source-mysql-connector-0} Task threw an uncaught and unrecoverable exception (org.apache.kafka.connect.runtime.WorkerTask:186) org.apache.kafka.connect.errors.ConnectException: Invalid value: null used for required field: "use_rules", schema type: STRING at io.debezium.connector.mysql.AbstractReader.wrap(AbstractReader.java:230) at io.debezium.connector.mysql.AbstractReader.failed(AbstractReader.java:207) at io.debezium.connector.mysql.BinlogReader.handleEvent(BinlogReader.java:600) at com.github.shyiko.mysql.binlog.BinaryLogClient.notifyEventListeners(BinaryLogClient.java:1130) at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:978) at com.github.shyiko.mysql.binlog.BinaryLogClient.connect(BinaryLogClient.java:581) at com.github.shyiko.mysql.binlog.BinaryLogClient$7.run(BinaryLogClient.java:860) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.kafka.connect.errors.DataException: Invalid value: null used for required field: "use_rules", schema type: STRING at org.apache.kafka.connect.data.ConnectSchema.validateValue(ConnectSchema.java:220) at org.apache.kafka.connect.data.Struct.validate(Struct.java:233) at org.apache.kafka.connect.data.ConnectSchema.validateValue(ConnectSchema.java:253) at org.apache.kafka.connect.data.Struct.put(Struct.java:216) at org.apache.kafka.connect.data.Struct.put(Struct.java:203) at io.debezium.data.Envelope.update(Envelope.java:313) at io.debezium.connector.mysql.RecordMakers$1.update(RecordMakers.java:314) at io.debezium.connector.mysql.RecordMakers$RecordsForTable.update(RecordMakers.java:512) at io.debezium.connector.mysql.BinlogReader.handleUpdate(BinlogReader.java:978) at io.debezium.connector.mysql.BinlogReader.handleEvent(BinlogReader.java:583) ... 5 more [2020-09-14 13:59:19,104] ERROR [source-mysql-connector|task-0] WorkerSourceTask{id=source-mysql-connector-0} Task is being killed and will not recover until manually restarted (org.apache.kafka.connect.runtime.WorkerTask:187) [2020-09-14 13:59:19,104] INFO [source-mysql-connector|task-0] Stopping down connector (io.debezium.connector.common.BaseSourceTask:187) [2020-09-14 13:59:19,104] INFO [source-mysql-connector|task-0] Stopping MySQL connector task (io.debezium.connector.mysql.MySqlConnectorTask:458) [2020-09-14 13:59:19,104] INFO [source-mysql-connector|task-0] ChainedReader: Stopping the binlog reader (io.debezium.connector.mysql.ChainedReader:121) [2020-09-14 13:59:19,104] INFO [source-mysql-connector|task-0] Discarding 171 unsent record(s) due to the connector shutting down (io.debezium.connector.mysql.BinlogReader:129) [2020-09-14 13:59:19,105] INFO [source-mysql-connector|task-0] Stopped reading binlog after 0 events, no new offset was recorded (io.debezium.connector.mysql.BinlogReader:1099) [2020-09-14 13:59:19,106] INFO [source-mysql-connector|task-0] Discarding 0 unsent record(s) due to the connector shutting down (io.debezium.connector.mysql.BinlogReader:129) [2020-09-14 13:59:19,106] INFO [source-mysql-connector|task-0] [Producer clientId=source-mysql-connector-dbhistory] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. (org.apache.kafka.clients.producer.KafkaProducer:1182) [2020-09-14 13:59:19,107] INFO [source-mysql-connector|task-0] Connector task finished all work and is now shutdown (io.debezium.connector.mysql.MySqlConnectorTask:496) [2020-09-14 13:59:19,107] INFO [source-mysql-connector|task-0] [Producer clientId=connector-producer-source-mysql-connector-0] Closing the Kafka producer with timeoutMillis = 30000 ms. (org.apache.kafka.clients.producer.KafkaProducer:1182) /* After that I deleted connector and re-created it from scratch making it connect from latest position available in binlog leading to data loss*/