Plugins are loaded from /kafka/connect Using the following environment variables: GROUP_ID=sysint-kafka-connect CONFIG_STORAGE_TOPIC=_sysint_connect_configs OFFSET_STORAGE_TOPIC=_sysint_connect_offsets STATUS_STORAGE_TOPIC=_sysint_connect_status BOOTSTRAP_SERVERS=kafka:29093 REST_HOST_NAME=172.19.0.6 REST_PORT=8083 ADVERTISED_HOST_NAME=172.19.0.6 ADVERTISED_PORT=8083 KEY_CONVERTER=org.apache.kafka.connect.json.JsonConverter VALUE_CONVERTER=org.apache.kafka.connect.json.JsonConverter INTERNAL_KEY_CONVERTER=org.apache.kafka.connect.json.JsonConverter INTERNAL_VALUE_CONVERTER=org.apache.kafka.connect.json.JsonConverter OFFSET_FLUSH_INTERVAL_MS=5000 OFFSET_FLUSH_TIMEOUT_MS=5000 SHUTDOWN_TIMEOUT=10000 --- Setting property from CONNECT_INTERNAL_VALUE_CONVERTER: internal.value.converter=org.apache.kafka.connect.json.JsonConverter --- Setting property from CONNECT_VALUE_CONVERTER: value.converter=org.apache.kafka.connect.json.JsonConverter --- Setting property from CONNECT_REST_ADVERTISED_HOST_NAME: rest.advertised.host.name=172.19.0.6 --- Setting property from CONNECT_OFFSET_FLUSH_INTERVAL_MS: offset.flush.interval.ms=5000 --- Setting property from CONNECT_GROUP_ID: group.id=sysint-kafka-connect --- Setting property from CONNECT_BOOTSTRAP_SERVERS: bootstrap.servers=kafka:29093 --- Setting property from CONNECT_KEY_CONVERTER: key.converter=org.apache.kafka.connect.json.JsonConverter --- Setting property from CONNECT_TASK_SHUTDOWN_GRACEFUL_TIMEOUT_MS: task.shutdown.graceful.timeout.ms=10000 --- Setting property from CONNECT_REST_HOST_NAME: rest.host.name=172.19.0.6 --- Setting property from CONNECT_PLUGIN_PATH: plugin.path=/kafka/connect --- Setting property from CONNECT_REST_PORT: rest.port=8083 --- Setting property from CONNECT_OFFSET_FLUSH_TIMEOUT_MS: offset.flush.timeout.ms=5000 --- Setting property from CONNECT_STATUS_STORAGE_TOPIC: status.storage.topic=_sysint_connect_status --- Setting property from CONNECT_INTERNAL_KEY_CONVERTER: internal.key.converter=org.apache.kafka.connect.json.JsonConverter --- Setting property from CONNECT_CONFIG_STORAGE_TOPIC: config.storage.topic=_sysint_connect_configs --- Setting property from CONNECT_REST_ADVERTISED_PORT: rest.advertised.port=8083 --- Setting property from CONNECT_OFFSET_STORAGE_TOPIC: offset.storage.topic=_sysint_connect_offsets 2020-10-16 09:19:46,840 INFO || WorkerInfo values: jvm.args = -Xms256M, -Xmx2G, -XX:+UseG1GC, -XX:MaxGCPauseMillis=20, -XX:InitiatingHeapOccupancyPercent=35, -XX:+ExplicitGCInvokesConcurrent, -XX:MaxInlineLevel=15, -Djava.awt.headless=true, -Dcom.sun.management.jmxremote, -Dcom.sun.management.jmxremote.authenticate=false, -Dcom.sun.management.jmxremote.ssl=false, -Dcom.sun.management.jmxremote.port=1976, -Dkafka.logs.dir=/kafka/bin/../logs, -Dlog4j.configuration=file:/kafka/config/log4j.properties, -javaagent:/kafka/jmx_prometheus_javaagent.jar=8080:/kafka/config.yml jvm.spec = Oracle Corporation, OpenJDK 64-Bit Server VM, 11.0.8, 11.0.8+10-LTS jvm.classpath = /kafka/bin/../libs/activation-1.1.1.jar:/kafka/bin/../libs/aopalliance-repackaged-2.5.0.jar:/kafka/bin/../libs/argparse4j-0.7.0.jar:/kafka/bin/../libs/audience-annotations-0.5.0.jar:/kafka/bin/../libs/avro-1.9.2.jar:/kafka/bin/../libs/common-config-5.5.1.jar:/kafka/bin/../libs/common-utils-5.5.1.jar:/kafka/bin/../libs/commons-cli-1.4.jar:/kafka/bin/../libs/commons-lang3-3.8.1.jar:/kafka/bin/../libs/connect-api-2.6.0.jar:/kafka/bin/../libs/connect-basic-auth-extension-2.6.0.jar:/kafka/bin/../libs/connect-file-2.6.0.jar:/kafka/bin/../libs/connect-json-2.6.0.jar:/kafka/bin/../libs/connect-mirror-2.6.0.jar:/kafka/bin/../libs/connect-mirror-client-2.6.0.jar:/kafka/bin/../libs/connect-runtime-2.6.0.jar:/kafka/bin/../libs/connect-transforms-2.6.0.jar:/kafka/bin/../libs/hk2-api-2.5.0.jar:/kafka/bin/../libs/hk2-locator-2.5.0.jar:/kafka/bin/../libs/hk2-utils-2.5.0.jar:/kafka/bin/../libs/jackson-annotations-2.10.2.jar:/kafka/bin/../libs/jackson-core-2.10.2.jar:/kafka/bin/../libs/jackson-databind-2.10.2.jar:/kafka/bin/../libs/jackson-dataformat-csv-2.10.2.jar:/kafka/bin/../libs/jackson-datatype-jdk8-2.10.2.jar:/kafka/bin/../libs/jackson-jaxrs-base-2.10.2.jar:/kafka/bin/../libs/jackson-jaxrs-json-provider-2.10.2.jar:/kafka/bin/../libs/jackson-module-jaxb-annotations-2.10.2.jar:/kafka/bin/../libs/jackson-module-paranamer-2.10.2.jar:/kafka/bin/../libs/jackson-module-scala_2.12-2.10.2.jar:/kafka/bin/../libs/jakarta.activation-api-1.2.1.jar:/kafka/bin/../libs/jakarta.annotation-api-1.3.4.jar:/kafka/bin/../libs/jakarta.inject-2.5.0.jar:/kafka/bin/../libs/jakarta.ws.rs-api-2.1.5.jar:/kafka/bin/../libs/jakarta.xml.bind-api-2.3.2.jar:/kafka/bin/../libs/javassist-3.22.0-CR2.jar:/kafka/bin/../libs/javassist-3.26.0-GA.jar:/kafka/bin/../libs/javax.servlet-api-3.1.0.jar:/kafka/bin/../libs/javax.ws.rs-api-2.1.1.jar:/kafka/bin/../libs/jaxb-api-2.3.0.jar:/kafka/bin/../libs/jersey-client-2.28.jar:/kafka/bin/../libs/jersey-common-2.28.jar:/kafka/bin/../libs/jersey-container-servlet-2.28.jar:/kafka/bin/../libs/jersey-container-servlet-core-2.28.jar:/kafka/bin/../libs/jersey-hk2-2.28.jar:/kafka/bin/../libs/jersey-media-jaxb-2.28.jar:/kafka/bin/../libs/jersey-server-2.28.jar:/kafka/bin/../libs/jetty-client-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-continuation-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-http-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-io-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-security-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-server-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-servlet-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-servlets-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-util-9.4.24.v20191120.jar:/kafka/bin/../libs/jopt-simple-5.0.4.jar:/kafka/bin/../libs/kafka-avro-serializer-5.5.1.jar:/kafka/bin/../libs/kafka-clients-2.6.0.jar:/kafka/bin/../libs/kafka-connect-avro-converter-5.5.1.jar:/kafka/bin/../libs/kafka-connect-avro-data-5.5.1.jar:/kafka/bin/../libs/kafka-log4j-appender-2.6.0.jar:/kafka/bin/../libs/kafka-schema-registry-client-5.5.1.jar:/kafka/bin/../libs/kafka-schema-serializer-5.5.1.jar:/kafka/bin/../libs/kafka-streams-2.6.0.jar:/kafka/bin/../libs/kafka-streams-examples-2.6.0.jar:/kafka/bin/../libs/kafka-streams-scala_2.12-2.6.0.jar:/kafka/bin/../libs/kafka-streams-test-utils-2.6.0.jar:/kafka/bin/../libs/kafka-tools-2.6.0.jar:/kafka/bin/../libs/kafka_2.12-2.6.0.jar:/kafka/bin/../libs/log4j-1.2.17.jar:/kafka/bin/../libs/lz4-java-1.7.1.jar:/kafka/bin/../libs/maven-artifact-3.6.3.jar:/kafka/bin/../libs/metrics-core-2.2.0.jar:/kafka/bin/../libs/netty-buffer-4.1.50.Final.jar:/kafka/bin/../libs/netty-codec-4.1.50.Final.jar:/kafka/bin/../libs/netty-common-4.1.50.Final.jar:/kafka/bin/../libs/netty-handler-4.1.50.Final.jar:/kafka/bin/../libs/netty-resolver-4.1.50.Final.jar:/kafka/bin/../libs/netty-transport-4.1.50.Final.jar:/kafka/bin/../libs/netty-transport-native-epoll-4.1.50.Final.jar:/kafka/bin/../libs/netty-transport-native-unix-common-4.1.50.Final.jar:/kafka/bin/../libs/osgi-resource-locator-1.0.1.jar:/kafka/bin/../libs/paranamer-2.8.jar:/kafka/bin/../libs/plexus-utils-3.2.1.jar:/kafka/bin/../libs/reflections-0.9.12.jar:/kafka/bin/../libs/rocksdbjni-5.18.4.jar:/kafka/bin/../libs/scala-collection-compat_2.12-2.1.6.jar:/kafka/bin/../libs/scala-java8-compat_2.12-0.9.1.jar:/kafka/bin/../libs/scala-library-2.12.11.jar:/kafka/bin/../libs/scala-logging_2.12-3.9.2.jar:/kafka/bin/../libs/scala-reflect-2.12.11.jar:/kafka/bin/../libs/slf4j-api-1.7.30.jar:/kafka/bin/../libs/slf4j-log4j12-1.7.30.jar:/kafka/bin/../libs/snappy-java-1.1.7.3.jar:/kafka/bin/../libs/validation-api-2.0.1.Final.jar:/kafka/bin/../libs/zookeeper-3.5.8.jar:/kafka/bin/../libs/zookeeper-jute-3.5.8.jar:/kafka/bin/../libs/zstd-jni-1.4.4-7.jar os.spec = Linux, amd64, 4.19.76-linuxkit os.vcpus = 4 [org.apache.kafka.connect.runtime.WorkerInfo] 2020-10-16 09:19:46,866 INFO || Scanning for plugin classes. This might take a moment ... [org.apache.kafka.connect.cli.ConnectDistributed] 2020-10-16 09:19:46,896 INFO || Loading plugin from: /kafka/connect/kafka-connect-insert-uuid [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:47,048 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/kafka-connect-insert-uuid/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:47,048 INFO || Added plugin 'com.github.cjmatta.kafka.connect.smt.InsertUuid$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:47,054 INFO || Added plugin 'com.github.cjmatta.kafka.connect.smt.InsertUuid$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:47,054 INFO || Added plugin 'org.apache.kafka.connect.connector.policy.AllConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:47,054 INFO || Added plugin 'org.apache.kafka.connect.connector.policy.PrincipalConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:47,055 INFO || Added plugin 'org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:47,056 INFO || Loading plugin from: /kafka/connect/debezium-connector-mongodb [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:48,049 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/debezium-connector-mongodb/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:48,050 INFO || Added plugin 'io.debezium.connector.mongodb.MongoDbConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:48,050 INFO || Added plugin 'io.debezium.converters.ByteBufferConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:48,051 INFO || Added plugin 'io.debezium.converters.CloudEventsConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:48,051 INFO || Added plugin 'io.debezium.transforms.ExtractNewRecordState' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:48,051 INFO || Added plugin 'io.debezium.transforms.outbox.EventRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:48,051 INFO || Added plugin 'io.debezium.connector.mongodb.transforms.ExtractNewDocumentState' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:48,052 INFO || Added plugin 'io.debezium.transforms.ByLogicalTableRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:48,052 INFO || Loading plugin from: /kafka/connect/debezium-connector-mysql [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:48,781 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/debezium-connector-mysql/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:48,781 INFO || Added plugin 'io.debezium.connector.mysql.MySqlConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:48,800 INFO || Loading plugin from: /kafka/connect/debezium-connector-sqlserver [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:48,984 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/debezium-connector-sqlserver/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:48,984 INFO || Added plugin 'io.debezium.connector.sqlserver.SqlServerConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:49,022 INFO || Loading plugin from: /kafka/connect/debezium-connector-postgres [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:49,523 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/debezium-connector-postgres/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:49,523 INFO || Added plugin 'io.debezium.connector.postgresql.PostgresConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,257 INFO || Registered loader: jdk.internal.loader.ClassLoaders$AppClassLoader@3d4eac69 [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,262 INFO || Added plugin 'org.apache.kafka.connect.mirror.MirrorSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,262 INFO || Added plugin 'org.apache.kafka.connect.file.FileStreamSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,262 INFO || Added plugin 'org.apache.kafka.connect.tools.SchemaSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,262 INFO || Added plugin 'org.apache.kafka.connect.tools.MockSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,262 INFO || Added plugin 'org.apache.kafka.connect.file.FileStreamSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,262 INFO || Added plugin 'org.apache.kafka.connect.mirror.MirrorCheckpointConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,262 INFO || Added plugin 'org.apache.kafka.connect.tools.VerifiableSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,262 INFO || Added plugin 'org.apache.kafka.connect.tools.VerifiableSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,262 INFO || Added plugin 'org.apache.kafka.connect.tools.MockSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,263 INFO || Added plugin 'org.apache.kafka.connect.mirror.MirrorHeartbeatConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,263 INFO || Added plugin 'org.apache.kafka.connect.tools.MockConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,263 INFO || Added plugin 'org.apache.kafka.connect.converters.FloatConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,263 INFO || Added plugin 'org.apache.kafka.connect.converters.DoubleConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,263 INFO || Added plugin 'org.apache.kafka.connect.converters.ByteArrayConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,263 INFO || Added plugin 'io.confluent.connect.avro.AvroConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,263 INFO || Added plugin 'org.apache.kafka.connect.converters.LongConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,263 INFO || Added plugin 'org.apache.kafka.connect.converters.IntegerConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,263 INFO || Added plugin 'org.apache.kafka.connect.json.JsonConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,263 INFO || Added plugin 'org.apache.kafka.connect.storage.StringConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,263 INFO || Added plugin 'org.apache.kafka.connect.converters.ShortConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,263 INFO || Added plugin 'org.apache.kafka.connect.storage.SimpleHeaderConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,264 INFO || Added plugin 'org.apache.kafka.connect.transforms.ReplaceField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,264 INFO || Added plugin 'org.apache.kafka.connect.transforms.SetSchemaMetadata$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,264 INFO || Added plugin 'org.apache.kafka.connect.transforms.ReplaceField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,264 INFO || Added plugin 'org.apache.kafka.connect.transforms.Filter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,264 INFO || Added plugin 'org.apache.kafka.connect.transforms.InsertField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,264 INFO || Added plugin 'org.apache.kafka.connect.transforms.TimestampConverter$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,264 INFO || Added plugin 'org.apache.kafka.connect.transforms.MaskField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,264 INFO || Added plugin 'org.apache.kafka.connect.transforms.TimestampRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,264 INFO || Added plugin 'org.apache.kafka.connect.transforms.RegexRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,264 INFO || Added plugin 'org.apache.kafka.connect.transforms.HoistField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,264 INFO || Added plugin 'org.apache.kafka.connect.transforms.ValueToKey' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,264 INFO || Added plugin 'org.apache.kafka.connect.transforms.MaskField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,264 INFO || Added plugin 'org.apache.kafka.connect.transforms.Cast$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,264 INFO || Added plugin 'org.apache.kafka.connect.transforms.Cast$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,270 INFO || Added plugin 'org.apache.kafka.connect.runtime.PredicatedTransformation' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,270 INFO || Added plugin 'org.apache.kafka.connect.transforms.ExtractField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,270 INFO || Added plugin 'org.apache.kafka.connect.transforms.Flatten$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,270 INFO || Added plugin 'org.apache.kafka.connect.transforms.InsertField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,270 INFO || Added plugin 'org.apache.kafka.connect.transforms.Flatten$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,270 INFO || Added plugin 'org.apache.kafka.connect.transforms.SetSchemaMetadata$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,270 INFO || Added plugin 'org.apache.kafka.connect.transforms.ExtractField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,270 INFO || Added plugin 'org.apache.kafka.connect.transforms.TimestampConverter$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,270 INFO || Added plugin 'org.apache.kafka.connect.transforms.HoistField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,271 INFO || Added plugin 'org.apache.kafka.connect.transforms.predicates.HasHeaderKey' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,271 INFO || Added plugin 'org.apache.kafka.connect.transforms.predicates.RecordIsTombstone' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,271 INFO || Added plugin 'org.apache.kafka.connect.transforms.predicates.TopicNameMatches' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,271 INFO || Added plugin 'org.apache.kafka.common.config.provider.FileConfigProvider' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,271 INFO || Added plugin 'org.apache.kafka.connect.rest.basic.auth.extension.BasicAuthSecurityRestExtension' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,274 INFO || Added aliases 'MongoDbConnector' and 'MongoDb' to plugin 'io.debezium.connector.mongodb.MongoDbConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,274 INFO || Added aliases 'MySqlConnector' and 'MySql' to plugin 'io.debezium.connector.mysql.MySqlConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,275 INFO || Added aliases 'PostgresConnector' and 'Postgres' to plugin 'io.debezium.connector.postgresql.PostgresConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,276 INFO || Added aliases 'SqlServerConnector' and 'SqlServer' to plugin 'io.debezium.connector.sqlserver.SqlServerConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,276 INFO || Added aliases 'FileStreamSinkConnector' and 'FileStreamSink' to plugin 'org.apache.kafka.connect.file.FileStreamSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,276 INFO || Added aliases 'FileStreamSourceConnector' and 'FileStreamSource' to plugin 'org.apache.kafka.connect.file.FileStreamSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,276 INFO || Added aliases 'MirrorCheckpointConnector' and 'MirrorCheckpoint' to plugin 'org.apache.kafka.connect.mirror.MirrorCheckpointConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,277 INFO || Added aliases 'MirrorHeartbeatConnector' and 'MirrorHeartbeat' to plugin 'org.apache.kafka.connect.mirror.MirrorHeartbeatConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,278 INFO || Added aliases 'MirrorSourceConnector' and 'MirrorSource' to plugin 'org.apache.kafka.connect.mirror.MirrorSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,281 INFO || Added aliases 'MockConnector' and 'Mock' to plugin 'org.apache.kafka.connect.tools.MockConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,281 INFO || Added aliases 'MockSinkConnector' and 'MockSink' to plugin 'org.apache.kafka.connect.tools.MockSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,281 INFO || Added aliases 'MockSourceConnector' and 'MockSource' to plugin 'org.apache.kafka.connect.tools.MockSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,281 INFO || Added aliases 'SchemaSourceConnector' and 'SchemaSource' to plugin 'org.apache.kafka.connect.tools.SchemaSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,284 INFO || Added aliases 'VerifiableSinkConnector' and 'VerifiableSink' to plugin 'org.apache.kafka.connect.tools.VerifiableSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,284 INFO || Added aliases 'VerifiableSourceConnector' and 'VerifiableSource' to plugin 'org.apache.kafka.connect.tools.VerifiableSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,284 INFO || Added aliases 'AvroConverter' and 'Avro' to plugin 'io.confluent.connect.avro.AvroConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,284 INFO || Added aliases 'ByteBufferConverter' and 'ByteBuffer' to plugin 'io.debezium.converters.ByteBufferConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,285 INFO || Added aliases 'CloudEventsConverter' and 'CloudEvents' to plugin 'io.debezium.converters.CloudEventsConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,285 INFO || Added aliases 'ByteArrayConverter' and 'ByteArray' to plugin 'org.apache.kafka.connect.converters.ByteArrayConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,286 INFO || Added aliases 'DoubleConverter' and 'Double' to plugin 'org.apache.kafka.connect.converters.DoubleConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,286 INFO || Added aliases 'FloatConverter' and 'Float' to plugin 'org.apache.kafka.connect.converters.FloatConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,286 INFO || Added aliases 'IntegerConverter' and 'Integer' to plugin 'org.apache.kafka.connect.converters.IntegerConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,286 INFO || Added aliases 'LongConverter' and 'Long' to plugin 'org.apache.kafka.connect.converters.LongConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,286 INFO || Added aliases 'ShortConverter' and 'Short' to plugin 'org.apache.kafka.connect.converters.ShortConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,287 INFO || Added aliases 'JsonConverter' and 'Json' to plugin 'org.apache.kafka.connect.json.JsonConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,287 INFO || Added aliases 'StringConverter' and 'String' to plugin 'org.apache.kafka.connect.storage.StringConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,287 INFO || Added aliases 'ByteBufferConverter' and 'ByteBuffer' to plugin 'io.debezium.converters.ByteBufferConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,287 INFO || Added aliases 'ByteArrayConverter' and 'ByteArray' to plugin 'org.apache.kafka.connect.converters.ByteArrayConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,288 INFO || Added aliases 'DoubleConverter' and 'Double' to plugin 'org.apache.kafka.connect.converters.DoubleConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,288 INFO || Added aliases 'FloatConverter' and 'Float' to plugin 'org.apache.kafka.connect.converters.FloatConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,288 INFO || Added aliases 'IntegerConverter' and 'Integer' to plugin 'org.apache.kafka.connect.converters.IntegerConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,288 INFO || Added aliases 'LongConverter' and 'Long' to plugin 'org.apache.kafka.connect.converters.LongConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,288 INFO || Added aliases 'ShortConverter' and 'Short' to plugin 'org.apache.kafka.connect.converters.ShortConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,288 INFO || Added aliases 'JsonConverter' and 'Json' to plugin 'org.apache.kafka.connect.json.JsonConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,288 INFO || Added alias 'SimpleHeaderConverter' to plugin 'org.apache.kafka.connect.storage.SimpleHeaderConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,289 INFO || Added aliases 'StringConverter' and 'String' to plugin 'org.apache.kafka.connect.storage.StringConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,289 INFO || Added alias 'ExtractNewDocumentState' to plugin 'io.debezium.connector.mongodb.transforms.ExtractNewDocumentState' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,289 INFO || Added alias 'ByLogicalTableRouter' to plugin 'io.debezium.transforms.ByLogicalTableRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,289 INFO || Added alias 'ExtractNewRecordState' to plugin 'io.debezium.transforms.ExtractNewRecordState' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,289 INFO || Added alias 'EventRouter' to plugin 'io.debezium.transforms.outbox.EventRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,289 INFO || Added aliases 'PredicatedTransformation' and 'Predicated' to plugin 'org.apache.kafka.connect.runtime.PredicatedTransformation' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,289 INFO || Added alias 'Filter' to plugin 'org.apache.kafka.connect.transforms.Filter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,290 INFO || Added alias 'RegexRouter' to plugin 'org.apache.kafka.connect.transforms.RegexRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,290 INFO || Added alias 'TimestampRouter' to plugin 'org.apache.kafka.connect.transforms.TimestampRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,290 INFO || Added alias 'ValueToKey' to plugin 'org.apache.kafka.connect.transforms.ValueToKey' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,290 INFO || Added alias 'HasHeaderKey' to plugin 'org.apache.kafka.connect.transforms.predicates.HasHeaderKey' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,290 INFO || Added alias 'RecordIsTombstone' to plugin 'org.apache.kafka.connect.transforms.predicates.RecordIsTombstone' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,290 INFO || Added alias 'TopicNameMatches' to plugin 'org.apache.kafka.connect.transforms.predicates.TopicNameMatches' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,291 INFO || Added alias 'BasicAuthSecurityRestExtension' to plugin 'org.apache.kafka.connect.rest.basic.auth.extension.BasicAuthSecurityRestExtension' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,291 INFO || Added aliases 'AllConnectorClientConfigOverridePolicy' and 'All' to plugin 'org.apache.kafka.connect.connector.policy.AllConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,291 INFO || Added aliases 'NoneConnectorClientConfigOverridePolicy' and 'None' to plugin 'org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,291 INFO || Added aliases 'PrincipalConnectorClientConfigOverridePolicy' and 'Principal' to plugin 'org.apache.kafka.connect.connector.policy.PrincipalConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:19:52,427 INFO || DistributedConfig values: access.control.allow.methods = access.control.allow.origin = admin.listeners = null bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = config.providers = [] config.storage.replication.factor = 1 config.storage.topic = _sysint_connect_configs connect.protocol = sessioned connections.max.idle.ms = 540000 connector.client.config.override.policy = None group.id = sysint-kafka-connect header.converter = class org.apache.kafka.connect.storage.SimpleHeaderConverter heartbeat.interval.ms = 3000 inter.worker.key.generation.algorithm = HmacSHA256 inter.worker.key.size = null inter.worker.key.ttl.ms = 3600000 inter.worker.signature.algorithm = HmacSHA256 inter.worker.verification.algorithms = [HmacSHA256] internal.key.converter = class org.apache.kafka.connect.json.JsonConverter internal.value.converter = class org.apache.kafka.connect.json.JsonConverter key.converter = class org.apache.kafka.connect.json.JsonConverter listeners = null metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 offset.flush.interval.ms = 5000 offset.flush.timeout.ms = 5000 offset.storage.partitions = 25 offset.storage.replication.factor = 1 offset.storage.topic = _sysint_connect_offsets plugin.path = [/kafka/connect] rebalance.timeout.ms = 60000 receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 40000 response.http.headers.config = rest.advertised.host.name = 172.19.0.6 rest.advertised.listener = null rest.advertised.port = 8083 rest.extension.classes = [] rest.host.name = 172.19.0.6 rest.port = 8083 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI scheduled.rebalance.max.delay.ms = 300000 security.protocol = PLAINTEXT send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.client.auth = none ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS status.storage.partitions = 5 status.storage.replication.factor = 1 status.storage.topic = _sysint_connect_status task.shutdown.graceful.timeout.ms = 10000 topic.creation.enable = true topic.tracking.allow.reset = true topic.tracking.enable = true value.converter = class org.apache.kafka.connect.json.JsonConverter worker.sync.timeout.ms = 3000 worker.unsync.backoff.ms = 300000 [org.apache.kafka.connect.runtime.distributed.DistributedConfig] 2020-10-16 09:19:52,436 INFO || Worker configuration property 'internal.key.converter' is deprecated and may be removed in an upcoming release. The specified value 'org.apache.kafka.connect.json.JsonConverter' matches the default, so this property can be safely removed from the worker configuration. [org.apache.kafka.connect.runtime.WorkerConfig] 2020-10-16 09:19:52,442 INFO || Worker configuration property 'internal.value.converter' is deprecated and may be removed in an upcoming release. The specified value 'org.apache.kafka.connect.json.JsonConverter' matches the default, so this property can be safely removed from the worker configuration. [org.apache.kafka.connect.runtime.WorkerConfig] 2020-10-16 09:19:52,446 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:19:52,466 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,605 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,610 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,610 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,611 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,611 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,611 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,611 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,611 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,611 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,611 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,611 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,611 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,611 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,612 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,612 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,613 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,613 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,616 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,617 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,617 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,617 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:52,617 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:52,617 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:52,618 INFO || Kafka startTimeMs: 1602839992617 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,032 INFO || Kafka cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:19:53,071 INFO || Logging initialized @7371ms to org.eclipse.jetty.util.log.Slf4jLog [org.eclipse.jetty.util.log] 2020-10-16 09:19:53,148 INFO || Added connector for http://172.19.0.6:8083 [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:19:53,152 INFO || Initializing REST server [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:19:53,160 INFO || jetty-9.4.24.v20191120; built: 2019-11-20T21:37:49.771Z; git: 363d5f2df3a8a28de40604320230664b9c793c16; jvm 11.0.8+10-LTS [org.eclipse.jetty.server.Server] 2020-10-16 09:19:53,186 INFO || Started http_172.19.0.68083@3f93e4a8{HTTP/1.1,[http/1.1]}{172.19.0.6:8083} [org.eclipse.jetty.server.AbstractConnector] 2020-10-16 09:19:53,190 INFO || Started @7490ms [org.eclipse.jetty.server.Server] 2020-10-16 09:19:53,222 INFO || Advertised URI: http://172.19.0.6:8083/ [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:19:53,222 INFO || REST server listening at http://172.19.0.6:8083/, advertising URL http://172.19.0.6:8083/ [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:19:53,222 INFO || Advertised URI: http://172.19.0.6:8083/ [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:19:53,222 INFO || REST admin endpoints at http://172.19.0.6:8083/ [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:19:53,223 INFO || Advertised URI: http://172.19.0.6:8083/ [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:19:53,223 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:19:53,227 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,230 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,230 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,230 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,231 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,235 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,235 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,235 INFO || Kafka startTimeMs: 1602839993232 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,271 INFO || Kafka cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:19:53,287 INFO || Setting up None Policy for ConnectorClientConfigOverride. This will disallow any client configuration to be overridden [org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy] 2020-10-16 09:19:53,292 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:19:53,293 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,295 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,295 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,295 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,295 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,296 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,296 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,296 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,296 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,296 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,296 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,296 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,296 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,296 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,296 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,296 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,296 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,297 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,297 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,297 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,297 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,297 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,297 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,297 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,297 INFO || Kafka startTimeMs: 1602839993297 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,321 INFO || Kafka cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:19:53,333 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,333 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,336 INFO || Kafka startTimeMs: 1602839993332 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,541 INFO || JsonConverterConfig values: converter.type = key decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-16 09:19:53,545 INFO || JsonConverterConfig values: converter.type = value decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-16 09:19:53,546 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:19:53,551 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,572 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,572 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,572 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,577 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,578 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,579 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,579 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,579 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,579 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,579 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,579 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,579 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,579 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,579 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,579 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,579 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,579 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,579 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,579 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,580 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,580 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,580 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,582 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,583 INFO || Kafka startTimeMs: 1602839993580 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,621 INFO || Kafka cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:19:53,632 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:19:53,632 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,635 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,636 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,636 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,636 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,636 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,636 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,636 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,636 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,636 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,637 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,637 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,637 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,637 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,637 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,637 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,638 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,638 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,638 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,638 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,639 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,639 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,640 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,640 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,640 INFO || Kafka startTimeMs: 1602839993640 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,668 INFO || Kafka cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:19:53,677 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:19:53,677 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,680 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,680 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,680 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,680 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,680 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,680 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,681 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,681 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,681 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,681 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,681 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,681 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,681 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,725 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,725 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,725 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,725 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,725 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,725 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,725 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,725 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,725 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,725 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,725 INFO || Kafka startTimeMs: 1602839993725 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,763 INFO || Kafka cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:19:53,793 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:19:53,794 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,805 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,805 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,805 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,805 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,805 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,806 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,811 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,811 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,811 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,811 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,811 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,811 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,811 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,811 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,811 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,811 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,811 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,811 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,811 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,811 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,811 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,811 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,811 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,812 INFO || Kafka startTimeMs: 1602839993811 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,844 INFO || Kafka cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:19:53,924 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,926 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,926 INFO || Kafka startTimeMs: 1602839993919 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,946 INFO || Kafka Connect distributed worker initialization took 7079ms [org.apache.kafka.connect.cli.ConnectDistributed] 2020-10-16 09:19:53,951 INFO || Kafka Connect starting [org.apache.kafka.connect.runtime.Connect] 2020-10-16 09:19:53,959 INFO || Initializing REST resources [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:19:53,960 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder starting [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:19:53,963 INFO || Worker starting [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:19:53,963 INFO || Starting KafkaOffsetBackingStore [org.apache.kafka.connect.storage.KafkaOffsetBackingStore] 2020-10-16 09:19:53,964 INFO || Starting KafkaBasedLog with topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:19:53,965 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,978 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,980 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,980 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,980 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,980 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,980 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,981 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,981 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,981 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,982 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,983 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,983 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,983 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,983 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,984 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,984 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,984 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,984 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,984 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,984 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,984 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,986 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,986 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:53,986 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,986 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:53,986 INFO || Kafka startTimeMs: 1602839993986 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:54,070 INFO || Adding admin resources to main listener [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:19:54,317 INFO || DefaultSessionIdManager workerName=node0 [org.eclipse.jetty.server.session] 2020-10-16 09:19:54,318 INFO || No SessionScavenger set, using defaults [org.eclipse.jetty.server.session] 2020-10-16 09:19:54,319 INFO || node0 Scavenging every 660000ms [org.eclipse.jetty.server.session] Oct 16, 2020 9:19:55 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.LoggingResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.LoggingResource will be ignored. Oct 16, 2020 9:19:55 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.RootResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.RootResource will be ignored. Oct 16, 2020 9:19:55 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource will be ignored. Oct 16, 2020 9:19:55 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource will be ignored. Oct 16, 2020 9:19:55 AM org.glassfish.jersey.internal.Errors logErrors WARNING: The following warnings have been detected: WARNING: The (sub)resource method listLoggers in org.apache.kafka.connect.runtime.rest.resources.LoggingResource contains empty path annotation. WARNING: The (sub)resource method createConnector in org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource contains empty path annotation. WARNING: The (sub)resource method listConnectors in org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource contains empty path annotation. WARNING: The (sub)resource method listConnectorPlugins in org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource contains empty path annotation. WARNING: The (sub)resource method serverInfo in org.apache.kafka.connect.runtime.rest.resources.RootResource contains empty path annotation. 2020-10-16 09:19:55,519 INFO || Started o.e.j.s.ServletContextHandler@1c26273d{/,null,AVAILABLE} [org.eclipse.jetty.server.handler.ContextHandler] 2020-10-16 09:19:55,519 INFO || REST resources initialized; server is started and ready to handle requests [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:19:55,519 INFO || Kafka Connect started [org.apache.kafka.connect.runtime.Connect] 2020-10-16 09:19:55,598 INFO || Created topic (name=_sysint_connect_offsets, numPartitions=25, replicationFactor=1, replicasAssignments=null, configs={cleanup.policy=compact}) on brokers at kafka:29093 [org.apache.kafka.connect.util.TopicAdmin] 2020-10-16 09:19:55,611 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = producer-1 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,646 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,646 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,646 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,646 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,646 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,646 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,646 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,646 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,647 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,647 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,647 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,647 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,647 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,647 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,647 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,647 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,647 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,647 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,647 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,647 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,647 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,647 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,647 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:55,647 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:55,647 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:55,648 INFO || Kafka startTimeMs: 1602839995647 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:55,657 INFO || [Producer clientId=producer-1] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:19:55,672 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = consumer-sysint-kafka-connect-1 client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = sysint-kafka-connect group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,724 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,725 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,725 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,725 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,725 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,725 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,725 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,725 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,725 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,725 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,725 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,726 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,726 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,726 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,726 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,726 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,726 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,726 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,726 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,726 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,726 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,726 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:55,727 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:55,727 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:55,727 INFO || Kafka startTimeMs: 1602839995726 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:55,741 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:19:55,791 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Subscribed to partition(s): _sysint_connect_offsets-0, _sysint_connect_offsets-5, _sysint_connect_offsets-10, _sysint_connect_offsets-20, _sysint_connect_offsets-15, _sysint_connect_offsets-9, _sysint_connect_offsets-11, _sysint_connect_offsets-4, _sysint_connect_offsets-16, _sysint_connect_offsets-17, _sysint_connect_offsets-3, _sysint_connect_offsets-24, _sysint_connect_offsets-23, _sysint_connect_offsets-13, _sysint_connect_offsets-18, _sysint_connect_offsets-22, _sysint_connect_offsets-2, _sysint_connect_offsets-8, _sysint_connect_offsets-12, _sysint_connect_offsets-19, _sysint_connect_offsets-14, _sysint_connect_offsets-1, _sysint_connect_offsets-6, _sysint_connect_offsets-7, _sysint_connect_offsets-21 [org.apache.kafka.clients.consumer.KafkaConsumer] 2020-10-16 09:19:55,794 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-0 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,794 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-5 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,794 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-10 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,794 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-20 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,794 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-15 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,794 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-9 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,794 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-11 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,794 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-4 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-16 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-17 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-3 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-24 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-23 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-13 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-18 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-22 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-2 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-8 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-12 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-19 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-14 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-1 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-6 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-7 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,795 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-21 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,893 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-24 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,896 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-18 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,897 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-16 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,901 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-22 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,901 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-20 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,901 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-9 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,901 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-7 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,901 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-13 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,901 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-11 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,901 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-1 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,901 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-5 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,901 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-3 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,901 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-23 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,901 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-17 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,901 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-15 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,901 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-21 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,901 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-19 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,901 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-10 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,902 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-8 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,902 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-14 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,902 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-12 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,902 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-2 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,902 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,902 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-6 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,902 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-4 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:55,903 INFO || Finished reading KafkaBasedLog for topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:19:55,903 INFO || Started KafkaBasedLog for topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:19:55,903 INFO || Finished reading offsets topic and starting KafkaOffsetBackingStore [org.apache.kafka.connect.storage.KafkaOffsetBackingStore] 2020-10-16 09:19:55,909 INFO || Worker started [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:19:55,911 INFO || Starting KafkaBasedLog with topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:19:55,911 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,913 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,914 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,914 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,914 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,914 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,914 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,914 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:55,914 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:55,915 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:55,915 INFO || Kafka startTimeMs: 1602839995914 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:56,136 INFO || Created topic (name=_sysint_connect_status, numPartitions=5, replicationFactor=1, replicasAssignments=null, configs={cleanup.policy=compact}) on brokers at kafka:29093 [org.apache.kafka.connect.util.TopicAdmin] 2020-10-16 09:19:56,143 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = producer-2 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000  retries = 0 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,176 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,178 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,179 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,179 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,179 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,179 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,182 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,182 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,183 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,183 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,183 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,183 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,183 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,183 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,183 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,183 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,183 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,189 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,189 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,189 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,189 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,189 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,189 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,189 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:56,191 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:56,191 INFO || Kafka startTimeMs: 1602839996189 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:56,192 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = consumer-sysint-kafka-connect-2 client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = sysint-kafka-connect group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,199 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,199 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,199 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,199 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,199 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,199 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,199 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,199 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,200 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,200 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,200 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,200 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,200 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,200 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,200 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,200 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,200 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,200 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,200 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,201 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,201 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,201 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,201 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:56,201 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:56,201 INFO || Kafka startTimeMs: 1602839996201 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:56,215 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:19:56,216 INFO || [Producer clientId=producer-2] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:19:56,253 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Subscribed to partition(s): _sysint_connect_status-0, _sysint_connect_status-4, _sysint_connect_status-1, _sysint_connect_status-2, _sysint_connect_status-3 [org.apache.kafka.clients.consumer.KafkaConsumer] 2020-10-16 09:19:56,254 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-0 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:56,254 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-4 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:56,254 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-1 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:56,254 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-2 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:56,254 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-3 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:56,273 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-2 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:56,273 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-1 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:56,273 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:56,273 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-4 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:56,273 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-3 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:56,273 INFO || Finished reading KafkaBasedLog for topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:19:56,273 INFO || Started KafkaBasedLog for topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:19:56,277 INFO || Starting KafkaConfigBackingStore [org.apache.kafka.connect.storage.KafkaConfigBackingStore] 2020-10-16 09:19:56,277 INFO || Starting KafkaBasedLog with topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:19:56,278 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,302 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,302 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,302 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,302 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,302 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,302 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,303 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,303 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,303 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,303 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,306 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,306 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,306 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,306 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,306 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,306 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,306 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,306 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,306 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,306 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,306 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,306 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,306 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:19:56,306 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:56,306 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:56,307 INFO || Kafka startTimeMs: 1602839996306 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:56,403 INFO || Created topic (name=_sysint_connect_configs, numPartitions=1, replicationFactor=1, replicasAssignments=null, configs={cleanup.policy=compact}) on brokers at kafka:29093 [org.apache.kafka.connect.util.TopicAdmin] 2020-10-16 09:19:56,404 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = producer-3 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,407 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,407 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,407 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,407 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,407 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,407 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,407 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,407 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,407 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,407 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,408 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,408 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,408 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,408 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,408 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,408 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,408 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,408 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,408 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,408 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,408 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,408 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,408 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:19:56,408 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:56,408 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:56,408 INFO || Kafka startTimeMs: 1602839996408 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:56,409 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = consumer-sysint-kafka-connect-3 client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = sysint-kafka-connect group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,411 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,411 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,411 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,411 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,413 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,413 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,413 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,413 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,413 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,413 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,413 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,414 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,414 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,414 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,414 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,414 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,414 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,414 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,414 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,414 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,414 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,414 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:19:56,415 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:56,415 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:56,415 INFO || Kafka startTimeMs: 1602839996415 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:19:56,424 INFO || [Consumer clientId=consumer-sysint-kafka-connect-3, groupId=sysint-kafka-connect] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:19:56,429 INFO || [Producer clientId=producer-3] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:19:56,437 INFO || [Consumer clientId=consumer-sysint-kafka-connect-3, groupId=sysint-kafka-connect] Subscribed to partition(s): _sysint_connect_configs-0 [org.apache.kafka.clients.consumer.KafkaConsumer] 2020-10-16 09:19:56,438 INFO || [Consumer clientId=consumer-sysint-kafka-connect-3, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_configs-0 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:56,451 INFO || [Consumer clientId=consumer-sysint-kafka-connect-3, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_configs-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:19:56,452 INFO || Finished reading KafkaBasedLog for topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:19:56,452 INFO || Started KafkaBasedLog for topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:19:56,453 INFO || Started KafkaConfigBackingStore [org.apache.kafka.connect.storage.KafkaConfigBackingStore] 2020-10-16 09:19:56,453 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder started [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:19:56,463 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:19:57,705 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Discovered group coordinator kafka:29093 (id: 2147483646 rack: null) [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:19:57,706 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Rebalance started [org.apache.kafka.connect.runtime.distributed.WorkerCoordinator] 2020-10-16 09:19:57,707 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:19:57,761 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Join group failed with org.apache.kafka.common.errors.MemberIdRequiredException: The group member needs to have a valid member id before actually entering a consumer group. [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:19:57,761 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:19:57,925 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Successfully joined group with generation 1 [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:19:57,926 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Joined group at generation 1 with protocol version 2 and got assignment: Assignment{error=0, leader='connect-1-fda6e9b2-aed3-4687-860d-8280bba00bc3', leaderUrl='http://172.19.0.6:8083/', offset=-1, connectorIds=[], taskIds=[], revokedConnectorIds=[], revokedTaskIds=[], delay=0} with rebalance delay: 0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:19:57,927 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connectors and tasks using config offset -1 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:19:57,927 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Finished starting connectors and tasks [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:19:57,984 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Session key updated [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:47,658 INFO || AbstractConfig values: [org.apache.kafka.common.config.AbstractConfig] 2020-10-16 09:20:47,665 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Connector sysint-sqlserver-tec-runinit-connector config updated [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:48,167 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Rebalance started [org.apache.kafka.connect.runtime.distributed.WorkerCoordinator] 2020-10-16 09:20:48,167 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:20:48,176 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Successfully joined group with generation 2 [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:20:48,177 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Joined group at generation 2 with protocol version 2 and got assignment: Assignment{error=0, leader='connect-1-fda6e9b2-aed3-4687-860d-8280bba00bc3', leaderUrl='http://172.19.0.6:8083/', offset=2, connectorIds=[sysint-sqlserver-tec-runinit-connector], taskIds=[], revokedConnectorIds=[], revokedTaskIds=[], delay=0} with rebalance delay: 0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:48,177 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connectors and tasks using config offset 2 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:48,178 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connector sysint-sqlserver-tec-runinit-connector [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:48,181 INFO || Creating connector sysint-sqlserver-tec-runinit-connector of type io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:48,182 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-16 09:20:48,183 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-16 09:20:48,187 INFO || Instantiated connector sysint-sqlserver-tec-runinit-connector with version 1.3.0.Final of type class io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:48,187 INFO || Finished creating connector sysint-sqlserver-tec-runinit-connector [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:48,188 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Finished starting connectors and tasks [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:48,196 INFO || AbstractConfig values: [org.apache.kafka.common.config.AbstractConfig] 2020-10-16 09:20:48,201 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Connector sysint-sqlserver-tec-runonly-connector config updated [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:48,702 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Rebalance started [org.apache.kafka.connect.runtime.distributed.WorkerCoordinator] 2020-10-16 09:20:48,702 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:20:48,708 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Successfully joined group with generation 3 [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:20:48,708 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Joined group at generation 3 with protocol version 2 and got assignment: Assignment{error=0, leader='connect-1-fda6e9b2-aed3-4687-860d-8280bba00bc3', leaderUrl='http://172.19.0.6:8083/', offset=3, connectorIds=[sysint-sqlserver-tec-runonly-connector, sysint-sqlserver-tec-runinit-connector], taskIds=[], revokedConnectorIds=[], revokedTaskIds=[], delay=0} with rebalance delay: 0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:48,709 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connectors and tasks using config offset 3 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:48,710 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connector sysint-sqlserver-tec-runonly-connector [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:48,710 INFO || Creating connector sysint-sqlserver-tec-runonly-connector of type io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:48,710 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-16 09:20:48,711 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-16 09:20:48,712 INFO || Instantiated connector sysint-sqlserver-tec-runonly-connector with version 1.3.0.Final of type class io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:48,713 INFO || Finished creating connector sysint-sqlserver-tec-runonly-connector [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:48,713 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Finished starting connectors and tasks [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:48,717 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-16 09:20:48,726 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-16 09:20:49,216 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Tasks [sysint-sqlserver-tec-runinit-connector-0] configs updated [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:49,217 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-16 09:20:49,220 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-16 09:20:49,726 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Tasks [sysint-sqlserver-tec-runonly-connector-0] configs updated [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:50,228 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Handling task config update by restarting tasks [] [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:50,229 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Rebalance started [org.apache.kafka.connect.runtime.distributed.WorkerCoordinator] 2020-10-16 09:20:50,229 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:20:50,234 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Successfully joined group with generation 4 [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:20:50,234 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Joined group at generation 4 with protocol version 2 and got assignment: Assignment{error=0, leader='connect-1-fda6e9b2-aed3-4687-860d-8280bba00bc3', leaderUrl='http://172.19.0.6:8083/', offset=7, connectorIds=[sysint-sqlserver-tec-runonly-connector, sysint-sqlserver-tec-runinit-connector], taskIds=[sysint-sqlserver-tec-runinit-connector-0, sysint-sqlserver-tec-runonly-connector-0], revokedConnectorIds=[], revokedTaskIds=[], delay=0} with rebalance delay: 0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:50,235 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connectors and tasks using config offset 7 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:50,235 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting task sysint-sqlserver-tec-runinit-connector-0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:50,236 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting task sysint-sqlserver-tec-runonly-connector-0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:50,236 INFO || Creating task sysint-sqlserver-tec-runinit-connector-0 [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:50,236 INFO || Creating task sysint-sqlserver-tec-runonly-connector-0 [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:50,238 INFO || ConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runinit-connector predicates = [] tasks.max = 1 transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig] 2020-10-16 09:20:50,238 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runinit-connector predicates = [] tasks.max = 1 transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-16 09:20:50,238 INFO || ConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runonly-connector predicates = [] tasks.max = 1 transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig] 2020-10-16 09:20:50,241 INFO || TaskConfig values: task.class = class io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.TaskConfig] 2020-10-16 09:20:50,242 INFO || Instantiated task sysint-sqlserver-tec-runinit-connector-0 with version 1.3.0.Final of type io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:50,242 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runonly-connector predicates = [] tasks.max = 1 transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-16 09:20:50,243 INFO || JsonConverterConfig values: converter.type = key decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-16 09:20:50,243 INFO || TaskConfig values: task.class = class io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.TaskConfig] 2020-10-16 09:20:50,244 INFO || Instantiated task sysint-sqlserver-tec-runonly-connector-0 with version 1.3.0.Final of type io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:50,244 INFO || JsonConverterConfig values: converter.type = value decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-16 09:20:50,244 INFO || Set up the key converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-tec-runinit-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:50,244 INFO || Set up the value converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-tec-runinit-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:50,244 INFO || JsonConverterConfig values: converter.type = key decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-16 09:20:50,244 INFO || Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task sysint-sqlserver-tec-runinit-connector-0 using the worker config [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:50,245 INFO || JsonConverterConfig values: converter.type = value decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-16 09:20:50,245 INFO || Set up the key converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-tec-runonly-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:50,245 INFO || Set up the value converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-tec-runonly-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:50,245 INFO || Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task sysint-sqlserver-tec-runonly-connector-0 using the worker config [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:50,252 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-16 09:20:50,252 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-16 09:20:50,252 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-16 09:20:50,253 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-16 09:20:50,262 INFO || Initializing: org.apache.kafka.connect.runtime.TransformationChain{io.debezium.transforms.ExtractNewRecordState, org.apache.kafka.connect.transforms.RegexRouter, com.github.cjmatta.kafka.connect.smt.InsertUuid$Value} [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:50,262 INFO || Initializing: org.apache.kafka.connect.runtime.TransformationChain{io.debezium.transforms.ExtractNewRecordState, org.apache.kafka.connect.transforms.RegexRouter, com.github.cjmatta.kafka.connect.smt.InsertUuid$Value} [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:20:50,265 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = connector-producer-sysint-sqlserver-tec-runinit-connector-0 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 9223372036854775807 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 2147483647 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:20:50,265 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = connector-producer-sysint-sqlserver-tec-runonly-connector-0 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 9223372036854775807 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 2147483647 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:20:50,269 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:20:50,269 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:20:50,269 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,270 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,270 INFO || Kafka startTimeMs: 1602840050269 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,271 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:20:50,271 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:20:50,272 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,272 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,272 INFO || Kafka startTimeMs: 1602840050272 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,278 INFO || [Producer clientId=connector-producer-sysint-sqlserver-tec-runinit-connector-0] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:20:50,279 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Finished starting connectors and tasks [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:20:50,280 INFO || [Producer clientId=connector-producer-sysint-sqlserver-tec-runonly-connector-0] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:20:50,285 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,287 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,285 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,287 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,287 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,287 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,287 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,288 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,288 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,288 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,288 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,288 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,287 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,288 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,288 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,288 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,292 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,293 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,292 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,293 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,293 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,293 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,293 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,293 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,293 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,293 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,295 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,295 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,295 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,295 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,295 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,295 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,295 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || table.whitelist = dbo.tab1,dbo.tab2 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || database.dbname = tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || database.server.name = tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,295 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || name = sysint-sqlserver-tec-runonly-connector [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,296 INFO || snapshot.mode = schema_only [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,297 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,297 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,297 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,297 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,297 INFO || table.whitelist = dbo.VatType,dbo.Registry [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,297 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,297 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,298 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,298 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,298 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,298 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,298 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,298 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,298 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,299 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,299 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,299 INFO || database.dbname = tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,299 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,299 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,299 INFO || database.server.name = tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,299 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,299 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,299 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,299 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,300 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,300 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,300 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,300 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,300 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,300 INFO || name = sysint-sqlserver-tec-runinit-connector [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,300 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,300 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,300 INFO || snapshot.mode = initial [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,300 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:20:50,301 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,301 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,302 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,301 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,302 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,302 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,303 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,303 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,303 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,303 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,303 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,303 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,303 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,304 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:50,544 INFO || KafkaDatabaseHistory Consumer config: {key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, enable.auto.commit=false, group.id=tec-dbhistory, bootstrap.servers=kafka:29093, fetch.min.bytes=1, session.timeout.ms=10000, auto.offset.reset=earliest, client.id=tec-dbhistory} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-16 09:20:50,545 INFO || KafkaDatabaseHistory Producer config: {retries=1, value.serializer=org.apache.kafka.common.serialization.StringSerializer, acks=1, batch.size=32768, max.block.ms=10000, bootstrap.servers=kafka:29093, buffer.memory=1048576, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=tec-dbhistory, linger.ms=0} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-16 09:20:50,545 INFO || Requested thread factory for connector SqlServerConnector, id = tec named = db-history-config-check [io.debezium.util.Threads] 2020-10-16 09:20:50,547 INFO || ProducerConfig values:  1 batch.size = 32768 bootstrap.servers = [kafka:29093] buffer.memory = 1048576 client.dns.lookup = use_all_dns_ips client.id = tec-dbhistory compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 10000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000  retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:20:50,544 INFO || KafkaDatabaseHistory Consumer config: {key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, enable.auto.commit=false, group.id=tec-dbhistory, bootstrap.servers=kafka:29093, fetch.min.bytes=1, session.timeout.ms=10000, auto.offset.reset=earliest, client.id=tec-dbhistory} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-16 09:20:50,547 INFO || KafkaDatabaseHistory Producer config: {retries=1, value.serializer=org.apache.kafka.common.serialization.StringSerializer, acks=1, batch.size=32768, max.block.ms=10000, bootstrap.servers=kafka:29093, buffer.memory=1048576, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=tec-dbhistory, linger.ms=0} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-16 09:20:50,547 INFO || Requested thread factory for connector SqlServerConnector, id = tec named = db-history-config-check [io.debezium.util.Threads] 2020-10-16 09:20:50,548 WARN || Unable to register the MBean 'debezium.sql_server:type=connector-metrics,context=schema-history,server=tec': debezium.sql_server:type=connector-metrics,context=schema-history,server=tec [io.debezium.relational.history.DatabaseHistoryMetrics] 2020-10-16 09:20:50,548 INFO || ProducerConfig values:  1 batch.size = 32768 bootstrap.servers = [kafka:29093] buffer.memory = 1048576 client.dns.lookup = use_all_dns_ips client.id = tec-dbhistory compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 10000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000  retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:20:50,554 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,554 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,554 INFO || Kafka startTimeMs: 1602840050554 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,557 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,554 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = tec-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = tec-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:20:50,558 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,558 INFO || Kafka startTimeMs: 1602840050557 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,559 WARN || Error registering AppInfo mbean [org.apache.kafka.common.utils.AppInfoParser] javax.management.InstanceAlreadyExistsException: kafka.producer:type=app-info,id=tec-dbhistory at java.management/com.sun.jmx.mbeanserver.Repository.addMBean(Repository.java:436) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerWithRepository(DefaultMBeanServerInterceptor.java:1855) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerDynamicMBean(DefaultMBeanServerInterceptor.java:955) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerObject(DefaultMBeanServerInterceptor.java:890) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerMBean(DefaultMBeanServerInterceptor.java:320) at java.management/com.sun.jmx.mbeanserver.JmxMBeanServer.registerMBean(JmxMBeanServer.java:522) at org.apache.kafka.common.utils.AppInfoParser.registerAppInfo(AppInfoParser.java:64) at org.apache.kafka.clients.producer.KafkaProducer.(KafkaProducer.java:435) at org.apache.kafka.clients.producer.KafkaProducer.(KafkaProducer.java:301) at io.debezium.relational.history.KafkaDatabaseHistory.start(KafkaDatabaseHistory.java:235) at io.debezium.relational.HistorizedRelationalDatabaseSchema.(HistorizedRelationalDatabaseSchema.java:40) at io.debezium.connector.sqlserver.SqlServerDatabaseSchema.(SqlServerDatabaseSchema.java:34) at io.debezium.connector.sqlserver.SqlServerConnectorTask.start(SqlServerConnectorTask.java:83) at io.debezium.connector.common.BaseSourceTask.start(BaseSourceTask.java:106) at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:232) at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:185) at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:235) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) 2020-10-16 09:20:50,561 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = tec-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = tec-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:20:50,564 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,564 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,564 INFO || Kafka startTimeMs: 1602840050564 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,567 INFO || [Producer clientId=tec-dbhistory] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:20:50,567 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:20:50,569 INFO || [Producer clientId=tec-dbhistory] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:20:50,570 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = tec-dbhistory connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000  retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:20:50,571 WARN || The configuration 'value.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:20:50,571 WARN || The configuration 'acks' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:20:50,571 WARN || The configuration 'batch.size' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:20:50,571 WARN || The configuration 'max.block.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:20:50,571 WARN || The configuration 'buffer.memory' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:20:50,571 WARN || The configuration 'key.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:20:50,571 WARN || The configuration 'linger.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:20:50,571 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,571 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,571 INFO || Kafka startTimeMs: 1602840050571 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,574 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,574 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,574 INFO || Kafka startTimeMs: 1602840050574 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,577 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:20:50,578 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = tec-dbhistory connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000  retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:20:50,580 WARN || The configuration 'value.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:20:50,581 WARN || The configuration 'acks' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:20:50,581 WARN || The configuration 'batch.size' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:20:50,581 WARN || The configuration 'max.block.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:20:50,581 WARN || The configuration 'buffer.memory' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:20:50,581 WARN || The configuration 'key.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:20:50,581 WARN || The configuration 'linger.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:20:50,581 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,581 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,581 INFO || Kafka startTimeMs: 1602840050581 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:20:50,581 WARN || Error registering AppInfo mbean [org.apache.kafka.common.utils.AppInfoParser] javax.management.InstanceAlreadyExistsException: kafka.admin.client:type=app-info,id=tec-dbhistory at java.management/com.sun.jmx.mbeanserver.Repository.addMBean(Repository.java:436) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerWithRepository(DefaultMBeanServerInterceptor.java:1855) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerDynamicMBean(DefaultMBeanServerInterceptor.java:955) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerObject(DefaultMBeanServerInterceptor.java:890) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerMBean(DefaultMBeanServerInterceptor.java:320) at java.management/com.sun.jmx.mbeanserver.JmxMBeanServer.registerMBean(JmxMBeanServer.java:522) at org.apache.kafka.common.utils.AppInfoParser.registerAppInfo(AppInfoParser.java:64) at org.apache.kafka.clients.admin.KafkaAdminClient.(KafkaAdminClient.java:549) at org.apache.kafka.clients.admin.KafkaAdminClient.createInternal(KafkaAdminClient.java:492) at org.apache.kafka.clients.admin.Admin.create(Admin.java:63) at org.apache.kafka.clients.admin.AdminClient.create(AdminClient.java:39) at io.debezium.relational.history.KafkaDatabaseHistory.initializeStorage(KafkaDatabaseHistory.java:491) at io.debezium.relational.HistorizedRelationalDatabaseSchema.initializeStorage(HistorizedRelationalDatabaseSchema.java:68) at io.debezium.connector.sqlserver.SqlServerConnectorTask.start(SqlServerConnectorTask.java:84) at io.debezium.connector.common.BaseSourceTask.start(BaseSourceTask.java:106) at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:232) at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:185) at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:235) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) 2020-10-16 09:20:50,608 INFO || Database history topic '(name=it.company.sysint.data.cdc.db.history.tec, numPartitions=1, replicationFactor=1, replicasAssignments=null, configs={cleanup.policy=delete, retention.ms=9223372036854775807, retention.bytes=-1})' created [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-16 09:20:50,608 INFO || Database history topic '(name=it.company.sysint.data.cdc.db.history.tec, numPartitions=1, replicationFactor=1, replicasAssignments=null, configs={cleanup.policy=delete, retention.ms=9223372036854775807, retention.bytes=-1})' created [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-16 09:20:50,866 INFO || Requested thread factory for connector SqlServerConnector, id = tec named = change-event-source-coordinator [io.debezium.util.Threads] 2020-10-16 09:20:50,866 INFO || Requested thread factory for connector SqlServerConnector, id = tec named = change-event-source-coordinator [io.debezium.util.Threads] 2020-10-16 09:20:50,868 INFO || Creating thread debezium-sqlserverconnector-tec-change-event-source-coordinator [io.debezium.util.Threads] 2020-10-16 09:20:50,868 INFO || Creating thread debezium-sqlserverconnector-tec-change-event-source-coordinator [io.debezium.util.Threads] 2020-10-16 09:20:50,868 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Source task finished initialization and start [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:20:50,868 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Source task finished initialization and start [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:20:50,869 WARN || Unable to register the MBean 'debezium.sql_server:type=connector-metrics,context=snapshot,server=tec': debezium.sql_server:type=connector-metrics,context=snapshot,server=tec [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:20:50,871 WARN || Unable to register the MBean 'debezium.sql_server:type=connector-metrics,context=streaming,server=tec': debezium.sql_server:type=connector-metrics,context=streaming,server=tec [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:20:50,871 INFO || Metrics registered [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:20:50,872 INFO || Context created [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:20:50,871 INFO || Metrics registered [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:20:50,877 INFO || Context created [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:20:50,877 INFO || No previous offset has been found [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-16 09:20:50,877 INFO || According to the connector configuration both schema and data will be snapshotted [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-16 09:20:50,877 INFO || No previous offset has been found [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-16 09:20:50,878 INFO || According to the connector configuration only schema will be snapshotted [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-16 09:20:50,878 INFO || Snapshot step 1 - Preparing [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:50,878 INFO || Snapshot step 1 - Preparing [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:50,879 INFO || Snapshot step 2 - Determining captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:50,879 INFO || Snapshot step 2 - Determining captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:51,033 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:51,035 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:20:51,035 INFO || Snapshot step 3 - Locking captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:51,035 INFO || Snapshot step 3 - Locking captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:51,036 INFO || Schema locking was disabled in connector configuration [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-16 09:20:51,036 INFO || Snapshot step 4 - Determining snapshot offset [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:51,038 INFO || Schema locking was disabled in connector configuration [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-16 09:20:51,038 INFO || Snapshot step 4 - Determining snapshot offset [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:51,041 INFO || Snapshot step 5 - Reading structure of captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:51,041 INFO || Snapshot step 5 - Reading structure of captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:51,042 INFO || Reading structure of schema 'tec' [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-16 09:20:51,042 INFO || Reading structure of schema 'tec' [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-16 09:20:51,900 WARN || Cannot parse column default value '(NULL)' to type 'int'. [io.debezium.connector.sqlserver.SqlServerDefaultValueConverter] java.lang.NumberFormatException: For input string: "UL" at java.base/java.lang.NumberFormatException.forInputString(NumberFormatException.java:65) at java.base/java.lang.Integer.parseInt(Integer.java:652) at java.base/java.lang.Integer.parseInt(Integer.java:770) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.lambda$createDefaultValueMappers$1(SqlServerDefaultValueConverter.java:115) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.parseDefaultValue(SqlServerDefaultValueConverter.java:82) at io.debezium.connector.sqlserver.SqlServerConnection.getDefaultValue(SqlServerConnection.java:512) at io.debezium.jdbc.JdbcConnection.readTableColumn(JdbcConnection.java:1181) at io.debezium.jdbc.JdbcConnection.readSchema(JdbcConnection.java:1126) at io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource.readTableStructure(SqlServerSnapshotChangeEventSource.java:183) at io.debezium.relational.RelationalSnapshotChangeEventSource.doExecute(RelationalSnapshotChangeEventSource.java:122) at io.debezium.pipeline.source.AbstractSnapshotChangeEventSource.execute(AbstractSnapshotChangeEventSource.java:63) at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:105) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) 2020-10-16 09:20:51,906 WARN || Cannot parse column default value '(NULL)' to type 'int'. [io.debezium.connector.sqlserver.SqlServerDefaultValueConverter] java.lang.NumberFormatException: For input string: "UL" at java.base/java.lang.NumberFormatException.forInputString(NumberFormatException.java:65) at java.base/java.lang.Integer.parseInt(Integer.java:652) at java.base/java.lang.Integer.parseInt(Integer.java:770) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.lambda$createDefaultValueMappers$1(SqlServerDefaultValueConverter.java:115) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.parseDefaultValue(SqlServerDefaultValueConverter.java:82) at io.debezium.connector.sqlserver.SqlServerConnection.getDefaultValue(SqlServerConnection.java:512) at io.debezium.jdbc.JdbcConnection.readTableColumn(JdbcConnection.java:1181) at io.debezium.jdbc.JdbcConnection.readSchema(JdbcConnection.java:1126) at io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource.readTableStructure(SqlServerSnapshotChangeEventSource.java:183) at io.debezium.relational.RelationalSnapshotChangeEventSource.doExecute(RelationalSnapshotChangeEventSource.java:122) at io.debezium.pipeline.source.AbstractSnapshotChangeEventSource.execute(AbstractSnapshotChangeEventSource.java:63) at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:105) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) 2020-10-16 09:20:52,119 INFO || Snapshot step 6 - Persisting schema history [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:52,143 INFO || Snapshot step 6 - Persisting schema history [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:52,184 INFO || Snapshot step 7 - Snapshotting data [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:52,185 INFO || Exporting data from table 'tec.dbo.VatType' [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:52,190 INFO || For table 'tec.dbo.VatType' using select statement: 'SELECT [VatType].[IDAliquotaIVA],[VatType].[CodiceIVA],[VatType].[PercentualeIVA] FROM [dbo].[VatType]' [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:52,214 INFO || Finished exporting 37 records for table 'tec.dbo.VatType'; total duration '00:00:00.028' [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:52,214 INFO || Exporting data from table 'tec.dbo.Registry' [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:52,237 INFO || Snapshot step 7 - Skipping snapshotting of data [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:20:52,240 INFO || Snapshot - Final stage [io.debezium.pipeline.source.AbstractSnapshotChangeEventSource] 2020-10-16 09:20:52,240 INFO || Removing locking timeout [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-16 09:20:52,242 INFO || Snapshot ended with SnapshotResult [status=COMPLETED, offset=SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=tec, changeLsn=NULL, commitLsn=0000003f:00000fd0:00c5, eventSerialNo=null, snapshot=FALSE, sourceTime=2020-10-16T09:20:52.235Z], partition={server=tec}, snapshotCompleted=true, eventSerialNo=1]] [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:20:52,244 INFO || Connected metrics set to 'true' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics] 2020-10-16 09:20:52,244 INFO || Starting streaming [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:20:52,279 INFO || CDC is enabled for table Capture instance "dbo_Registry" [sourceTableId=tec.dbo.Registry, changeTableId=tec.cdc.dbo_Registry_CT, startLsn=0000003f:00000768:011f, changeTableObjectId=779149821, stopLsn=NULL] but the table is not whitelisted by connector [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-16 09:20:52,280 INFO || CDC is enabled for table Capture instance "dbo_VatType" [sourceTableId=tec.dbo.VatType, changeTableId=tec.cdc.dbo_VatType_CT, startLsn=0000003e:000013b0:0042, changeTableObjectId=683149479, stopLsn=NULL] but the table is not whitelisted by connector [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-16 09:20:52,280 INFO || Last position recorded in offsets is 0000003f:00000fd0:00c5(NULL)[1] [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-16 09:20:52,404 WARN || [Producer clientId=connector-producer-sysint-sqlserver-tec-runonly-connector-0] Error while fetching metadata with correlation id 3 : {it.company.sysint.data.cdc.tables.tec=LEADER_NOT_AVAILABLE} [org.apache.kafka.clients.NetworkClient] 2020-10-16 09:20:52,404 WARN || [Producer clientId=connector-producer-sysint-sqlserver-tec-runinit-connector-0] Error while fetching metadata with correlation id 3 : {it.company.sysint.data.cdc.tables.tec=LEADER_NOT_AVAILABLE} [org.apache.kafka.clients.NetworkClient] 2020-10-16 09:20:52,430 INFO || Snapshot - Final stage [io.debezium.pipeline.source.AbstractSnapshotChangeEventSource] 2020-10-16 09:20:52,431 INFO || Removing locking timeout [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-16 09:20:52,435 INFO || Snapshot ended with SnapshotResult [status=COMPLETED, offset=SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=tec, changeLsn=NULL, commitLsn=0000003f:00000fd0:00c5, eventSerialNo=null, snapshot=FALSE, sourceTime=2020-10-16T09:20:52.428Z], partition={server=tec}, snapshotCompleted=true, eventSerialNo=1]] [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:20:52,435 INFO || Connected metrics set to 'true' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics] 2020-10-16 09:20:52,435 INFO || Starting streaming [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:20:52,472 INFO || CDC is enabled for table Capture instance "dbo_Payment" [sourceTableId=tec.dbo.Payment, changeTableId=tec.cdc.dbo_Payment_CT, startLsn=00000037:00000ac0:00b1, changeTableObjectId=1294627655, stopLsn=NULL] but the table is not whitelisted by connector [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-16 09:20:52,473 INFO || Last position recorded in offsets is 0000003f:00000fd0:00c5(NULL)[1] [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-16 09:20:52,544 WARN || [Producer clientId=connector-producer-sysint-sqlserver-tec-runinit-connector-0] Error while fetching metadata with correlation id 8 : {it.company.sysint.data.cdc.tables.tec.dbo.VatType=LEADER_NOT_AVAILABLE} [org.apache.kafka.clients.NetworkClient] 2020-10-16 09:20:55,278 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:20:55,278 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:20:55,287 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Finished commitOffsets successfully in 9 ms [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:20:55,288 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:20:55,288 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:20:55,290 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Finished commitOffsets successfully in 2 ms [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:00,288 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:00,288 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:00,290 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:00,290 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:05,289 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:05,289 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:05,291 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:05,291 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:10,289 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:10,289 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:10,291 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:10,291 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:15,289 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:15,290 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:15,291 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:15,291 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:20,290 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:20,290 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:20,291 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:20,291 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:25,290 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:25,290 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:25,292 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:25,292 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:30,291 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:30,291 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:30,292 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:30,292 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:35,291 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:35,291 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:35,292 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:35,292 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:40,291 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:40,292 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:40,292 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:40,292 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:45,292 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:45,292 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:45,293 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:45,293 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:50,292 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:50,292 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:50,293 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:50,293 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:55,293 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:55,293 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:55,293 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:55,293 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:21:58,038 ERROR || Producer failure [io.debezium.pipeline.ErrorHandler] com.microsoft.sqlserver.jdbc.SQLServerException: Connection reset at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2892) at com.microsoft.sqlserver.jdbc.TDSChannel.read(IOBuffer.java:2031) at com.microsoft.sqlserver.jdbc.TDSReader.readPacket(IOBuffer.java:6418) at com.microsoft.sqlserver.jdbc.TDSCommand.startResponse(IOBuffer.java:7579) at com.microsoft.sqlserver.jdbc.SQLServerStatement.doExecuteStatement(SQLServerStatement.java:866) at com.microsoft.sqlserver.jdbc.SQLServerStatement$StmtExecCmd.doExecute(SQLServerStatement.java:768) at com.microsoft.sqlserver.jdbc.TDSCommand.execute(IOBuffer.java:7194) at com.microsoft.sqlserver.jdbc.SQLServerConnection.executeCommand(SQLServerConnection.java:2935) at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeCommand(SQLServerStatement.java:248) at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeStatement(SQLServerStatement.java:223) at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeQuery(SQLServerStatement.java:693) at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:623) at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492) at io.debezium.connector.sqlserver.SqlServerConnection.getMaxLsn(SqlServerConnection.java:149) at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:128) at io.debezium.pipeline.ChangeEventSourceCoordinator.streamEvents(ChangeEventSourceCoordinator.java:140) at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:113) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) Caused by: java.net.SocketException: Connection reset at java.base/java.net.SocketInputStream.read(SocketInputStream.java:186) at java.base/java.net.SocketInputStream.read(SocketInputStream.java:140) at com.microsoft.sqlserver.jdbc.TDSChannel.read(IOBuffer.java:2023)  ... 20 more 2020-10-16 09:21:58,039 INFO || Finished streaming [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:21:58,039 INFO || Connected metrics set to 'false' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics] 2020-10-16 09:21:58,245 ERROR || Producer failure [io.debezium.pipeline.ErrorHandler] com.microsoft.sqlserver.jdbc.SQLServerException: SQL Server did not return a response. The connection has been closed. at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2892) at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2881) at com.microsoft.sqlserver.jdbc.TDSReader.readPacket(IOBuffer.java:6425) at com.microsoft.sqlserver.jdbc.TDSCommand.startResponse(IOBuffer.java:7579) at com.microsoft.sqlserver.jdbc.SQLServerStatement.doExecuteStatement(SQLServerStatement.java:866) at com.microsoft.sqlserver.jdbc.SQLServerStatement$StmtExecCmd.doExecute(SQLServerStatement.java:768) at com.microsoft.sqlserver.jdbc.TDSCommand.execute(IOBuffer.java:7194) at com.microsoft.sqlserver.jdbc.SQLServerConnection.executeCommand(SQLServerConnection.java:2935) at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeCommand(SQLServerStatement.java:248) at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeStatement(SQLServerStatement.java:223) at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeQuery(SQLServerStatement.java:693) at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:623) at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492) at io.debezium.connector.sqlserver.SqlServerConnection.getMaxLsn(SqlServerConnection.java:149) at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:128) at io.debezium.pipeline.ChangeEventSourceCoordinator.streamEvents(ChangeEventSourceCoordinator.java:140) at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:113) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) 2020-10-16 09:21:58,246 INFO || Finished streaming [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:21:58,246 INFO || Connected metrics set to 'false' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics] 2020-10-16 09:21:58,360 WARN || Going to restart connector after 10 sec. after a retriable exception [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:21:58,360 WARN || Going to restart connector after 10 sec. after a retriable exception [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:21:58,362 INFO || [Producer clientId=tec-dbhistory] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-16 09:21:58,362 INFO || [Producer clientId=tec-dbhistory] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-16 09:21:58,364 WARN || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} failed to poll records from SourceTask. Will retry operation. [org.apache.kafka.connect.runtime.WorkerSourceTask] org.apache.kafka.connect.errors.RetriableException: An exception occurred in the change event producer. This connector will be restarted. at io.debezium.pipeline.ErrorHandler.setProducerThrowable(ErrorHandler.java:38) at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:283) at io.debezium.pipeline.ChangeEventSourceCoordinator.streamEvents(ChangeEventSourceCoordinator.java:140) at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:113) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) Caused by: com.microsoft.sqlserver.jdbc.SQLServerException: Connection reset at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2892) at com.microsoft.sqlserver.jdbc.TDSChannel.read(IOBuffer.java:2031) at com.microsoft.sqlserver.jdbc.TDSReader.readPacket(IOBuffer.java:6418) at com.microsoft.sqlserver.jdbc.TDSCommand.startResponse(IOBuffer.java:7579) at com.microsoft.sqlserver.jdbc.SQLServerStatement.doExecuteStatement(SQLServerStatement.java:866) at com.microsoft.sqlserver.jdbc.SQLServerStatement$StmtExecCmd.doExecute(SQLServerStatement.java:768) at com.microsoft.sqlserver.jdbc.TDSCommand.execute(IOBuffer.java:7194) at com.microsoft.sqlserver.jdbc.SQLServerConnection.executeCommand(SQLServerConnection.java:2935) at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeCommand(SQLServerStatement.java:248) at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeStatement(SQLServerStatement.java:223) at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeQuery(SQLServerStatement.java:693) at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:623) at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492) at io.debezium.connector.sqlserver.SqlServerConnection.getMaxLsn(SqlServerConnection.java:149) at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:128) ... 7 more Caused by: java.net.SocketException: Connection reset at java.base/java.net.SocketInputStream.read(SocketInputStream.java:186) at java.base/java.net.SocketInputStream.read(SocketInputStream.java:140) at com.microsoft.sqlserver.jdbc.TDSChannel.read(IOBuffer.java:2023)  ... 20 more 2020-10-16 09:21:58,365 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:21:58,364 WARN || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} failed to poll records from SourceTask. Will retry operation. [org.apache.kafka.connect.runtime.WorkerSourceTask] org.apache.kafka.connect.errors.RetriableException: An exception occurred in the change event producer. This connector will be restarted. at io.debezium.pipeline.ErrorHandler.setProducerThrowable(ErrorHandler.java:38) at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:283) at io.debezium.pipeline.ChangeEventSourceCoordinator.streamEvents(ChangeEventSourceCoordinator.java:140) at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:113) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) Caused by: com.microsoft.sqlserver.jdbc.SQLServerException: SQL Server did not return a response. The connection has been closed. at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2892) at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2881) at com.microsoft.sqlserver.jdbc.TDSReader.readPacket(IOBuffer.java:6425) at com.microsoft.sqlserver.jdbc.TDSCommand.startResponse(IOBuffer.java:7579) at com.microsoft.sqlserver.jdbc.SQLServerStatement.doExecuteStatement(SQLServerStatement.java:866) at com.microsoft.sqlserver.jdbc.SQLServerStatement$StmtExecCmd.doExecute(SQLServerStatement.java:768) at com.microsoft.sqlserver.jdbc.TDSCommand.execute(IOBuffer.java:7194) at com.microsoft.sqlserver.jdbc.SQLServerConnection.executeCommand(SQLServerConnection.java:2935) at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeCommand(SQLServerStatement.java:248) at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeStatement(SQLServerStatement.java:223) at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeQuery(SQLServerStatement.java:693) at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:623) at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492) at io.debezium.connector.sqlserver.SqlServerConnection.getMaxLsn(SqlServerConnection.java:149) at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:128) ... 7 more 2020-10-16 09:21:58,365 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:00,293 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:00,294 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:00,294 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:00,294 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:00,365 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:00,365 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:02,366 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:02,366 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:04,366 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:04,366 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:05,294 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:05,294 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:05,294 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:05,294 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:06,366 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:06,366 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,367 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,367 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,368 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,368 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,368 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || table.whitelist = dbo.VatType,dbo.Registry [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || database.dbname = tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || database.server.name = tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || name = sysint-sqlserver-tec-runinit-connector [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || snapshot.mode = initial [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,368 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,368 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,368 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,368 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,368 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,368 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,368 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,369 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,369 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || table.whitelist = dbo.tab1,dbo.tab2 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || database.dbname = tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || database.server.name = tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || name = sysint-sqlserver-tec-runonly-connector [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || snapshot.mode = schema_only [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:08,370 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,370 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,370 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,371 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,371 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,371 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:08,371 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:22:10,294 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:10,295 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:10,295 WARN || Couldn't commit processed log positions with the source database due to a concurrent connector shutdown or restart [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:10,295 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:10,295 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:10,295 WARN || Couldn't commit processed log positions with the source database due to a concurrent connector shutdown or restart [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:15,295 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:15,295 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:15,295 WARN || Couldn't commit processed log positions with the source database due to a concurrent connector shutdown or restart [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:15,295 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:15,295 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:15,295 WARN || Couldn't commit processed log positions with the source database due to a concurrent connector shutdown or restart [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:20,295 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:20,295 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:20,295 WARN || Couldn't commit processed log positions with the source database due to a concurrent connector shutdown or restart [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:20,296 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:20,296 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:20,296 WARN || Couldn't commit processed log positions with the source database due to a concurrent connector shutdown or restart [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:22,885 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:22,885 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:22,885 ERROR || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Task threw an uncaught and unrecoverable exception [org.apache.kafka.connect.runtime.WorkerTask] java.lang.RuntimeException: Couldn't obtain database name at io.debezium.connector.sqlserver.SqlServerConnection.retrieveRealDatabaseName(SqlServerConnection.java:474) at io.debezium.connector.sqlserver.SqlServerConnection.(SqlServerConnection.java:117) at io.debezium.connector.sqlserver.SqlServerConnectorTask.start(SqlServerConnectorTask.java:75) at io.debezium.connector.common.BaseSourceTask.start(BaseSourceTask.java:106) at io.debezium.connector.common.BaseSourceTask.startIfNeededAndPossible(BaseSourceTask.java:161) at io.debezium.connector.common.BaseSourceTask.poll(BaseSourceTask.java:124) at org.apache.kafka.connect.runtime.WorkerSourceTask.poll(WorkerSourceTask.java:289) at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:256) at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:185) at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:235) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) Caused by: com.microsoft.sqlserver.jdbc.SQLServerException: The TCP/IP connection to the host sqlserver, port 1433 has failed. Error: "sqlserver. Verify the connection properties. Make sure that an instance of SQL Server is running on the host and accepting TCP/IP connections at the port. Make sure that TCP connections to the port are not blocked by a firewall.". at com.microsoft.sqlserver.jdbc.SQLServerException.makeFromDriverError(SQLServerException.java:234) at com.microsoft.sqlserver.jdbc.SQLServerException.ConvertConnectExceptionToSQLServerException(SQLServerException.java:285) at com.microsoft.sqlserver.jdbc.SocketFinder.findSocket(IOBuffer.java:2431) at com.microsoft.sqlserver.jdbc.TDSChannel.open(IOBuffer.java:656) at com.microsoft.sqlserver.jdbc.SQLServerConnection.connectHelper(SQLServerConnection.java:2440) at com.microsoft.sqlserver.jdbc.SQLServerConnection.login(SQLServerConnection.java:2103) at com.microsoft.sqlserver.jdbc.SQLServerConnection.connectInternal(SQLServerConnection.java:1950) at com.microsoft.sqlserver.jdbc.SQLServerConnection.connect(SQLServerConnection.java:1162) at com.microsoft.sqlserver.jdbc.SQLServerDriver.connect(SQLServerDriver.java:735) at io.debezium.jdbc.JdbcConnection.lambda$patternBasedFactory$1(JdbcConnection.java:222) at io.debezium.jdbc.JdbcConnection$ConnectionFactoryDecorator.connect(JdbcConnection.java:107) at io.debezium.jdbc.JdbcConnection.connection(JdbcConnection.java:852) at io.debezium.jdbc.JdbcConnection.connection(JdbcConnection.java:847) at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:618) at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492) at io.debezium.connector.sqlserver.SqlServerConnection.retrieveRealDatabaseName(SqlServerConnection.java:469)  ... 14 more 2020-10-16 09:22:22,885 ERROR || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Task is being killed and will not recover until manually restarted [org.apache.kafka.connect.runtime.WorkerTask] 2020-10-16 09:22:22,885 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:22,885 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:22,885 INFO || Stopping down connector [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:22,885 ERROR || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Task threw an uncaught and unrecoverable exception [org.apache.kafka.connect.runtime.WorkerTask] java.lang.RuntimeException: Couldn't obtain database name at io.debezium.connector.sqlserver.SqlServerConnection.retrieveRealDatabaseName(SqlServerConnection.java:474) at io.debezium.connector.sqlserver.SqlServerConnection.(SqlServerConnection.java:117) at io.debezium.connector.sqlserver.SqlServerConnectorTask.start(SqlServerConnectorTask.java:75) at io.debezium.connector.common.BaseSourceTask.start(BaseSourceTask.java:106) at io.debezium.connector.common.BaseSourceTask.startIfNeededAndPossible(BaseSourceTask.java:161) at io.debezium.connector.common.BaseSourceTask.poll(BaseSourceTask.java:124) at org.apache.kafka.connect.runtime.WorkerSourceTask.poll(WorkerSourceTask.java:289) at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:256) at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:185) at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:235) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) Caused by: com.microsoft.sqlserver.jdbc.SQLServerException: The TCP/IP connection to the host sqlserver, port 1433 has failed. Error: "sqlserver. Verify the connection properties. Make sure that an instance of SQL Server is running on the host and accepting TCP/IP connections at the port. Make sure that TCP connections to the port are not blocked by a firewall.". at com.microsoft.sqlserver.jdbc.SQLServerException.makeFromDriverError(SQLServerException.java:234) at com.microsoft.sqlserver.jdbc.SQLServerException.ConvertConnectExceptionToSQLServerException(SQLServerException.java:285) at com.microsoft.sqlserver.jdbc.SocketFinder.findSocket(IOBuffer.java:2431) at com.microsoft.sqlserver.jdbc.TDSChannel.open(IOBuffer.java:656) at com.microsoft.sqlserver.jdbc.SQLServerConnection.connectHelper(SQLServerConnection.java:2440) at com.microsoft.sqlserver.jdbc.SQLServerConnection.login(SQLServerConnection.java:2103) at com.microsoft.sqlserver.jdbc.SQLServerConnection.connectInternal(SQLServerConnection.java:1950) at com.microsoft.sqlserver.jdbc.SQLServerConnection.connect(SQLServerConnection.java:1162) at com.microsoft.sqlserver.jdbc.SQLServerDriver.connect(SQLServerDriver.java:735) at io.debezium.jdbc.JdbcConnection.lambda$patternBasedFactory$1(JdbcConnection.java:222) at io.debezium.jdbc.JdbcConnection$ConnectionFactoryDecorator.connect(JdbcConnection.java:107) at io.debezium.jdbc.JdbcConnection.connection(JdbcConnection.java:852) at io.debezium.jdbc.JdbcConnection.connection(JdbcConnection.java:847) at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:618) at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492) at io.debezium.connector.sqlserver.SqlServerConnection.retrieveRealDatabaseName(SqlServerConnection.java:469)  ... 14 more 2020-10-16 09:22:22,886 ERROR || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Task is being killed and will not recover until manually restarted [org.apache.kafka.connect.runtime.WorkerTask] 2020-10-16 09:22:22,886 INFO || Stopping down connector [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:22:22,886 WARN || Unable to unregister the MBean 'debezium.sql_server:type=connector-metrics,context=snapshot,server=tec': debezium.sql_server:type=connector-metrics,context=snapshot,server=tec [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:22:22,886 WARN || Unable to unregister the MBean 'debezium.sql_server:type=connector-metrics,context=schema-history,server=tec': debezium.sql_server:type=connector-metrics,context=schema-history,server=tec [io.debezium.relational.history.DatabaseHistoryMetrics] 2020-10-16 09:22:22,886 WARN || Unable to unregister the MBean 'debezium.sql_server:type=connector-metrics,context=streaming,server=tec': debezium.sql_server:type=connector-metrics,context=streaming,server=tec [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:22:22,886 INFO || [Producer clientId=connector-producer-sysint-sqlserver-tec-runonly-connector-0] Closing the Kafka producer with timeoutMillis = 30000 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-16 09:22:22,886 INFO || [Producer clientId=connector-producer-sysint-sqlserver-tec-runinit-connector-0] Closing the Kafka producer with timeoutMillis = 30000 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-16 09:22:25,296 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:25,296 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:25,296 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:25,296 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:30,296 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:30,296 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:30,296 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:30,297 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:35,297 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:35,297 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:35,297 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:35,297 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:40,297 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:40,297 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:40,297 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:40,297 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:45,298 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:45,298 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:45,298 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:45,298 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:50,298 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:50,298 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:50,298 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:50,298 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:55,298 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:55,298 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:55,299 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:22:55,299 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:00,299 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:00,299 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:00,299 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:00,299 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:05,299 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:05,299 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:05,299 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:05,299 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:10,300 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:10,300 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:10,300 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:10,300 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:15,300 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:15,300 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:15,300 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:15,300 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:20,300 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:20,300 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:20,301 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:20,301 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:25,301 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:25,301 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:25,301 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:25,301 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:30,301 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:30,301 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:30,301 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:30,301 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:35,301 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:35,302 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:35,302 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:35,302 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:40,302 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:40,302 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:40,302 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:40,302 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:45,302 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:45,302 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:45,302 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:45,302 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:50,303 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:50,303 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:50,303 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:50,303 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:55,303 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:55,303 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:55,303 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:23:55,303 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:00,304 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:00,304 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:00,304 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:00,304 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:05,304 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:05,304 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:05,304 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:05,304 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:10,305 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:10,305 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:10,305 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:10,305 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:15,305 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:15,305 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:15,305 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:15,305 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:20,305 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:20,305 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:20,305 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:20,305 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:25,306 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:25,306 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:25,306 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:25,306 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:30,306 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:30,306 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:30,306 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:30,306 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:35,307 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:35,307 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:35,307 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:35,307 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:40,307 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:40,307 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:40,307 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:40,307 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:45,307 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:45,308 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:45,308 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:45,308 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:50,308 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:50,308 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:50,308 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:50,308 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:55,308 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:55,308 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:55,308 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:24:55,308 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:00,309 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:00,309 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:00,309 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:00,309 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:01,266 INFO || Kafka Connect stopping [org.apache.kafka.connect.runtime.Connect] 2020-10-16 09:25:01,266 INFO || Stopping REST server [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:25:01,270 INFO || Stopped http_172.19.0.68083@3f93e4a8{HTTP/1.1,[http/1.1]}{172.19.0.6:8083} [org.eclipse.jetty.server.AbstractConnector] 2020-10-16 09:25:01,270 INFO || node0 Stopped scavenging [org.eclipse.jetty.server.session] 2020-10-16 09:25:01,276 INFO || REST server stopped [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:25:01,276 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder stopping [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:25:01,276 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Stopping connectors and tasks that are still assigned to this worker. [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:25:01,277 INFO || Stopping connector sysint-sqlserver-tec-runonly-connector [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:01,277 INFO || Scheduled shutdown for WorkerConnector{id=sysint-sqlserver-tec-runonly-connector} [org.apache.kafka.connect.runtime.WorkerConnector] 2020-10-16 09:25:01,277 INFO || Stopping connector sysint-sqlserver-tec-runinit-connector [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:01,278 INFO || Completed shutdown for WorkerConnector{id=sysint-sqlserver-tec-runonly-connector} [org.apache.kafka.connect.runtime.WorkerConnector] 2020-10-16 09:25:01,282 INFO || Stopping task sysint-sqlserver-tec-runonly-connector-0 [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:01,278 INFO || Scheduled shutdown for WorkerConnector{id=sysint-sqlserver-tec-runinit-connector} [org.apache.kafka.connect.runtime.WorkerConnector] 2020-10-16 09:25:01,282 INFO || Stopping task sysint-sqlserver-tec-runinit-connector-0 [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:01,283 INFO || Completed shutdown for WorkerConnector{id=sysint-sqlserver-tec-runinit-connector} [org.apache.kafka.connect.runtime.WorkerConnector] 2020-10-16 09:25:01,286 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Member connect-1-fda6e9b2-aed3-4687-860d-8280bba00bc3 sending LeaveGroup request to coordinator kafka:29093 (id: 2147483646 rack: null) due to the consumer is being closed [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:25:01,286 WARN || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Close timed out with 1 pending requests to coordinator, terminating client connections [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:25:01,287 INFO || Stopping KafkaBasedLog for topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:25:01,288 INFO || [Producer clientId=producer-2] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-16 09:25:01,290 INFO || Stopped KafkaBasedLog for topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:25:01,290 INFO || Closing KafkaConfigBackingStore [org.apache.kafka.connect.storage.KafkaConfigBackingStore] 2020-10-16 09:25:01,290 INFO || Stopping KafkaBasedLog for topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:25:01,291 INFO || [Producer clientId=producer-3] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-16 09:25:01,293 INFO || Stopped KafkaBasedLog for topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:25:01,293 INFO || Closed KafkaConfigBackingStore [org.apache.kafka.connect.storage.KafkaConfigBackingStore] 2020-10-16 09:25:01,293 INFO || Worker stopping [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:01,293 INFO || Stopping KafkaOffsetBackingStore [org.apache.kafka.connect.storage.KafkaOffsetBackingStore] 2020-10-16 09:25:01,293 INFO || Stopping KafkaBasedLog for topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:25:01,294 INFO || [Producer clientId=producer-1] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-16 09:25:01,296 INFO || Stopped KafkaBasedLog for topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:25:01,296 INFO || Stopped KafkaOffsetBackingStore [org.apache.kafka.connect.storage.KafkaOffsetBackingStore] 2020-10-16 09:25:01,296 INFO || Worker stopped [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:01,297 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder stopped [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:25:01,301 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder stopped [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:25:01,301 INFO || Kafka Connect stopped [org.apache.kafka.connect.runtime.Connect] Plugins are loaded from /kafka/connect Using the following environment variables: GROUP_ID=sysint-kafka-connect CONFIG_STORAGE_TOPIC=_sysint_connect_configs OFFSET_STORAGE_TOPIC=_sysint_connect_offsets STATUS_STORAGE_TOPIC=_sysint_connect_status BOOTSTRAP_SERVERS=kafka:29093 REST_HOST_NAME=172.19.0.6 REST_PORT=8083 ADVERTISED_HOST_NAME=172.19.0.6 ADVERTISED_PORT=8083 KEY_CONVERTER=org.apache.kafka.connect.json.JsonConverter VALUE_CONVERTER=org.apache.kafka.connect.json.JsonConverter INTERNAL_KEY_CONVERTER=org.apache.kafka.connect.json.JsonConverter INTERNAL_VALUE_CONVERTER=org.apache.kafka.connect.json.JsonConverter OFFSET_FLUSH_INTERVAL_MS=5000 OFFSET_FLUSH_TIMEOUT_MS=5000 SHUTDOWN_TIMEOUT=10000 --- Setting property from CONNECT_INTERNAL_VALUE_CONVERTER: internal.value.converter=org.apache.kafka.connect.json.JsonConverter --- Setting property from CONNECT_VALUE_CONVERTER: value.converter=org.apache.kafka.connect.json.JsonConverter --- Setting property from CONNECT_REST_ADVERTISED_HOST_NAME: rest.advertised.host.name=172.19.0.6 --- Setting property from CONNECT_OFFSET_FLUSH_INTERVAL_MS: offset.flush.interval.ms=5000 --- Setting property from CONNECT_GROUP_ID: group.id=sysint-kafka-connect --- Setting property from CONNECT_BOOTSTRAP_SERVERS: bootstrap.servers=kafka:29093 --- Setting property from CONNECT_KEY_CONVERTER: key.converter=org.apache.kafka.connect.json.JsonConverter --- Setting property from CONNECT_TASK_SHUTDOWN_GRACEFUL_TIMEOUT_MS: task.shutdown.graceful.timeout.ms=10000 --- Setting property from CONNECT_REST_HOST_NAME: rest.host.name=172.19.0.6 --- Setting property from CONNECT_PLUGIN_PATH: plugin.path=/kafka/connect --- Setting property from CONNECT_REST_PORT: rest.port=8083 --- Setting property from CONNECT_OFFSET_FLUSH_TIMEOUT_MS: offset.flush.timeout.ms=5000 --- Setting property from CONNECT_STATUS_STORAGE_TOPIC: status.storage.topic=_sysint_connect_status --- Setting property from CONNECT_INTERNAL_KEY_CONVERTER: internal.key.converter=org.apache.kafka.connect.json.JsonConverter --- Setting property from CONNECT_CONFIG_STORAGE_TOPIC: config.storage.topic=_sysint_connect_configs --- Setting property from CONNECT_REST_ADVERTISED_PORT: rest.advertised.port=8083 --- Setting property from CONNECT_OFFSET_STORAGE_TOPIC: offset.storage.topic=_sysint_connect_offsets 2020-10-16 09:25:17,585 INFO || WorkerInfo values: jvm.args = -Xms256M, -Xmx2G, -XX:+UseG1GC, -XX:MaxGCPauseMillis=20, -XX:InitiatingHeapOccupancyPercent=35, -XX:+ExplicitGCInvokesConcurrent, -XX:MaxInlineLevel=15, -Djava.awt.headless=true, -Dcom.sun.management.jmxremote, -Dcom.sun.management.jmxremote.authenticate=false, -Dcom.sun.management.jmxremote.ssl=false, -Dcom.sun.management.jmxremote.port=1976, -Dkafka.logs.dir=/kafka/bin/../logs, -Dlog4j.configuration=file:/kafka/config/log4j.properties, -javaagent:/kafka/jmx_prometheus_javaagent.jar=8080:/kafka/config.yml jvm.spec = Oracle Corporation, OpenJDK 64-Bit Server VM, 11.0.8, 11.0.8+10-LTS jvm.classpath = /kafka/bin/../libs/activation-1.1.1.jar:/kafka/bin/../libs/aopalliance-repackaged-2.5.0.jar:/kafka/bin/../libs/argparse4j-0.7.0.jar:/kafka/bin/../libs/audience-annotations-0.5.0.jar:/kafka/bin/../libs/avro-1.9.2.jar:/kafka/bin/../libs/common-config-5.5.1.jar:/kafka/bin/../libs/common-utils-5.5.1.jar:/kafka/bin/../libs/commons-cli-1.4.jar:/kafka/bin/../libs/commons-lang3-3.8.1.jar:/kafka/bin/../libs/connect-api-2.6.0.jar:/kafka/bin/../libs/connect-basic-auth-extension-2.6.0.jar:/kafka/bin/../libs/connect-file-2.6.0.jar:/kafka/bin/../libs/connect-json-2.6.0.jar:/kafka/bin/../libs/connect-mirror-2.6.0.jar:/kafka/bin/../libs/connect-mirror-client-2.6.0.jar:/kafka/bin/../libs/connect-runtime-2.6.0.jar:/kafka/bin/../libs/connect-transforms-2.6.0.jar:/kafka/bin/../libs/hk2-api-2.5.0.jar:/kafka/bin/../libs/hk2-locator-2.5.0.jar:/kafka/bin/../libs/hk2-utils-2.5.0.jar:/kafka/bin/../libs/jackson-annotations-2.10.2.jar:/kafka/bin/../libs/jackson-core-2.10.2.jar:/kafka/bin/../libs/jackson-databind-2.10.2.jar:/kafka/bin/../libs/jackson-dataformat-csv-2.10.2.jar:/kafka/bin/../libs/jackson-datatype-jdk8-2.10.2.jar:/kafka/bin/../libs/jackson-jaxrs-base-2.10.2.jar:/kafka/bin/../libs/jackson-jaxrs-json-provider-2.10.2.jar:/kafka/bin/../libs/jackson-module-jaxb-annotations-2.10.2.jar:/kafka/bin/../libs/jackson-module-paranamer-2.10.2.jar:/kafka/bin/../libs/jackson-module-scala_2.12-2.10.2.jar:/kafka/bin/../libs/jakarta.activation-api-1.2.1.jar:/kafka/bin/../libs/jakarta.annotation-api-1.3.4.jar:/kafka/bin/../libs/jakarta.inject-2.5.0.jar:/kafka/bin/../libs/jakarta.ws.rs-api-2.1.5.jar:/kafka/bin/../libs/jakarta.xml.bind-api-2.3.2.jar:/kafka/bin/../libs/javassist-3.22.0-CR2.jar:/kafka/bin/../libs/javassist-3.26.0-GA.jar:/kafka/bin/../libs/javax.servlet-api-3.1.0.jar:/kafka/bin/../libs/javax.ws.rs-api-2.1.1.jar:/kafka/bin/../libs/jaxb-api-2.3.0.jar:/kafka/bin/../libs/jersey-client-2.28.jar:/kafka/bin/../libs/jersey-common-2.28.jar:/kafka/bin/../libs/jersey-container-servlet-2.28.jar:/kafka/bin/../libs/jersey-container-servlet-core-2.28.jar:/kafka/bin/../libs/jersey-hk2-2.28.jar:/kafka/bin/../libs/jersey-media-jaxb-2.28.jar:/kafka/bin/../libs/jersey-server-2.28.jar:/kafka/bin/../libs/jetty-client-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-continuation-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-http-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-io-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-security-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-server-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-servlet-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-servlets-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-util-9.4.24.v20191120.jar:/kafka/bin/../libs/jopt-simple-5.0.4.jar:/kafka/bin/../libs/kafka-avro-serializer-5.5.1.jar:/kafka/bin/../libs/kafka-clients-2.6.0.jar:/kafka/bin/../libs/kafka-connect-avro-converter-5.5.1.jar:/kafka/bin/../libs/kafka-connect-avro-data-5.5.1.jar:/kafka/bin/../libs/kafka-log4j-appender-2.6.0.jar:/kafka/bin/../libs/kafka-schema-registry-client-5.5.1.jar:/kafka/bin/../libs/kafka-schema-serializer-5.5.1.jar:/kafka/bin/../libs/kafka-streams-2.6.0.jar:/kafka/bin/../libs/kafka-streams-examples-2.6.0.jar:/kafka/bin/../libs/kafka-streams-scala_2.12-2.6.0.jar:/kafka/bin/../libs/kafka-streams-test-utils-2.6.0.jar:/kafka/bin/../libs/kafka-tools-2.6.0.jar:/kafka/bin/../libs/kafka_2.12-2.6.0.jar:/kafka/bin/../libs/log4j-1.2.17.jar:/kafka/bin/../libs/lz4-java-1.7.1.jar:/kafka/bin/../libs/maven-artifact-3.6.3.jar:/kafka/bin/../libs/metrics-core-2.2.0.jar:/kafka/bin/../libs/netty-buffer-4.1.50.Final.jar:/kafka/bin/../libs/netty-codec-4.1.50.Final.jar:/kafka/bin/../libs/netty-common-4.1.50.Final.jar:/kafka/bin/../libs/netty-handler-4.1.50.Final.jar:/kafka/bin/../libs/netty-resolver-4.1.50.Final.jar:/kafka/bin/../libs/netty-transport-4.1.50.Final.jar:/kafka/bin/../libs/netty-transport-native-epoll-4.1.50.Final.jar:/kafka/bin/../libs/netty-transport-native-unix-common-4.1.50.Final.jar:/kafka/bin/../libs/osgi-resource-locator-1.0.1.jar:/kafka/bin/../libs/paranamer-2.8.jar:/kafka/bin/../libs/plexus-utils-3.2.1.jar:/kafka/bin/../libs/reflections-0.9.12.jar:/kafka/bin/../libs/rocksdbjni-5.18.4.jar:/kafka/bin/../libs/scala-collection-compat_2.12-2.1.6.jar:/kafka/bin/../libs/scala-java8-compat_2.12-0.9.1.jar:/kafka/bin/../libs/scala-library-2.12.11.jar:/kafka/bin/../libs/scala-logging_2.12-3.9.2.jar:/kafka/bin/../libs/scala-reflect-2.12.11.jar:/kafka/bin/../libs/slf4j-api-1.7.30.jar:/kafka/bin/../libs/slf4j-log4j12-1.7.30.jar:/kafka/bin/../libs/snappy-java-1.1.7.3.jar:/kafka/bin/../libs/validation-api-2.0.1.Final.jar:/kafka/bin/../libs/zookeeper-3.5.8.jar:/kafka/bin/../libs/zookeeper-jute-3.5.8.jar:/kafka/bin/../libs/zstd-jni-1.4.4-7.jar os.spec = Linux, amd64, 4.19.76-linuxkit os.vcpus = 4 [org.apache.kafka.connect.runtime.WorkerInfo] 2020-10-16 09:25:17,588 INFO || Scanning for plugin classes. This might take a moment ... [org.apache.kafka.connect.cli.ConnectDistributed] 2020-10-16 09:25:17,597 INFO || Loading plugin from: /kafka/connect/kafka-connect-insert-uuid [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:17,652 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/kafka-connect-insert-uuid/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:17,652 INFO || Added plugin 'com.github.cjmatta.kafka.connect.smt.InsertUuid$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:17,652 INFO || Added plugin 'com.github.cjmatta.kafka.connect.smt.InsertUuid$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:17,652 INFO || Added plugin 'org.apache.kafka.connect.connector.policy.AllConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:17,652 INFO || Added plugin 'org.apache.kafka.connect.connector.policy.PrincipalConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:17,652 INFO || Added plugin 'org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:17,653 INFO || Loading plugin from: /kafka/connect/debezium-connector-mongodb [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:17,894 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/debezium-connector-mongodb/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:17,894 INFO || Added plugin 'io.debezium.connector.mongodb.MongoDbConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:17,894 INFO || Added plugin 'io.debezium.converters.ByteBufferConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:17,894 INFO || Added plugin 'io.debezium.converters.CloudEventsConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:17,894 INFO || Added plugin 'io.debezium.transforms.ExtractNewRecordState' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:17,894 INFO || Added plugin 'io.debezium.transforms.outbox.EventRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:17,894 INFO || Added plugin 'io.debezium.connector.mongodb.transforms.ExtractNewDocumentState' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:17,894 INFO || Added plugin 'io.debezium.transforms.ByLogicalTableRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:17,895 INFO || Loading plugin from: /kafka/connect/debezium-connector-mysql [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:18,079 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/debezium-connector-mysql/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:18,079 INFO || Added plugin 'io.debezium.connector.mysql.MySqlConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:18,083 INFO || Loading plugin from: /kafka/connect/debezium-connector-sqlserver [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:18,161 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/debezium-connector-sqlserver/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:18,162 INFO || Added plugin 'io.debezium.connector.sqlserver.SqlServerConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:18,173 INFO || Loading plugin from: /kafka/connect/debezium-connector-postgres [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:18,287 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/debezium-connector-postgres/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:18,287 INFO || Added plugin 'io.debezium.connector.postgresql.PostgresConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,058 INFO || Registered loader: jdk.internal.loader.ClassLoaders$AppClassLoader@3d4eac69 [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,058 INFO || Added plugin 'org.apache.kafka.connect.mirror.MirrorSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,058 INFO || Added plugin 'org.apache.kafka.connect.file.FileStreamSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,058 INFO || Added plugin 'org.apache.kafka.connect.tools.SchemaSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,058 INFO || Added plugin 'org.apache.kafka.connect.tools.MockSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,058 INFO || Added plugin 'org.apache.kafka.connect.file.FileStreamSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,058 INFO || Added plugin 'org.apache.kafka.connect.mirror.MirrorCheckpointConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,058 INFO || Added plugin 'org.apache.kafka.connect.tools.VerifiableSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,058 INFO || Added plugin 'org.apache.kafka.connect.tools.VerifiableSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,058 INFO || Added plugin 'org.apache.kafka.connect.tools.MockSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,058 INFO || Added plugin 'org.apache.kafka.connect.mirror.MirrorHeartbeatConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,058 INFO || Added plugin 'org.apache.kafka.connect.tools.MockConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,058 INFO || Added plugin 'org.apache.kafka.connect.converters.FloatConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,058 INFO || Added plugin 'org.apache.kafka.connect.converters.DoubleConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'org.apache.kafka.connect.converters.ByteArrayConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'io.confluent.connect.avro.AvroConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'org.apache.kafka.connect.converters.LongConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'org.apache.kafka.connect.converters.IntegerConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'org.apache.kafka.connect.json.JsonConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'org.apache.kafka.connect.storage.StringConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'org.apache.kafka.connect.converters.ShortConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'org.apache.kafka.connect.storage.SimpleHeaderConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'org.apache.kafka.connect.transforms.ReplaceField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'org.apache.kafka.connect.transforms.SetSchemaMetadata$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'org.apache.kafka.connect.transforms.ReplaceField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'org.apache.kafka.connect.transforms.Filter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'org.apache.kafka.connect.transforms.InsertField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'org.apache.kafka.connect.transforms.TimestampConverter$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'org.apache.kafka.connect.transforms.MaskField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'org.apache.kafka.connect.transforms.TimestampRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,059 INFO || Added plugin 'org.apache.kafka.connect.transforms.RegexRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.transforms.HoistField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.transforms.ValueToKey' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.transforms.MaskField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.transforms.Cast$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.transforms.Cast$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.runtime.PredicatedTransformation' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.transforms.ExtractField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.transforms.Flatten$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.transforms.InsertField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.transforms.Flatten$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.transforms.SetSchemaMetadata$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.transforms.ExtractField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.transforms.TimestampConverter$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.transforms.HoistField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.transforms.predicates.HasHeaderKey' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.transforms.predicates.RecordIsTombstone' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,060 INFO || Added plugin 'org.apache.kafka.connect.transforms.predicates.TopicNameMatches' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,061 INFO || Added plugin 'org.apache.kafka.common.config.provider.FileConfigProvider' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,061 INFO || Added plugin 'org.apache.kafka.connect.rest.basic.auth.extension.BasicAuthSecurityRestExtension' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,061 INFO || Added aliases 'MongoDbConnector' and 'MongoDb' to plugin 'io.debezium.connector.mongodb.MongoDbConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,061 INFO || Added aliases 'MySqlConnector' and 'MySql' to plugin 'io.debezium.connector.mysql.MySqlConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,061 INFO || Added aliases 'PostgresConnector' and 'Postgres' to plugin 'io.debezium.connector.postgresql.PostgresConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,061 INFO || Added aliases 'SqlServerConnector' and 'SqlServer' to plugin 'io.debezium.connector.sqlserver.SqlServerConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,062 INFO || Added aliases 'FileStreamSinkConnector' and 'FileStreamSink' to plugin 'org.apache.kafka.connect.file.FileStreamSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,062 INFO || Added aliases 'FileStreamSourceConnector' and 'FileStreamSource' to plugin 'org.apache.kafka.connect.file.FileStreamSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,062 INFO || Added aliases 'MirrorCheckpointConnector' and 'MirrorCheckpoint' to plugin 'org.apache.kafka.connect.mirror.MirrorCheckpointConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,062 INFO || Added aliases 'MirrorHeartbeatConnector' and 'MirrorHeartbeat' to plugin 'org.apache.kafka.connect.mirror.MirrorHeartbeatConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,062 INFO || Added aliases 'MirrorSourceConnector' and 'MirrorSource' to plugin 'org.apache.kafka.connect.mirror.MirrorSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,062 INFO || Added aliases 'MockConnector' and 'Mock' to plugin 'org.apache.kafka.connect.tools.MockConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,062 INFO || Added aliases 'MockSinkConnector' and 'MockSink' to plugin 'org.apache.kafka.connect.tools.MockSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,062 INFO || Added aliases 'MockSourceConnector' and 'MockSource' to plugin 'org.apache.kafka.connect.tools.MockSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,062 INFO || Added aliases 'SchemaSourceConnector' and 'SchemaSource' to plugin 'org.apache.kafka.connect.tools.SchemaSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,062 INFO || Added aliases 'VerifiableSinkConnector' and 'VerifiableSink' to plugin 'org.apache.kafka.connect.tools.VerifiableSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,062 INFO || Added aliases 'VerifiableSourceConnector' and 'VerifiableSource' to plugin 'org.apache.kafka.connect.tools.VerifiableSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,062 INFO || Added aliases 'AvroConverter' and 'Avro' to plugin 'io.confluent.connect.avro.AvroConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,063 INFO || Added aliases 'ByteBufferConverter' and 'ByteBuffer' to plugin 'io.debezium.converters.ByteBufferConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,063 INFO || Added aliases 'CloudEventsConverter' and 'CloudEvents' to plugin 'io.debezium.converters.CloudEventsConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,063 INFO || Added aliases 'ByteArrayConverter' and 'ByteArray' to plugin 'org.apache.kafka.connect.converters.ByteArrayConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,063 INFO || Added aliases 'DoubleConverter' and 'Double' to plugin 'org.apache.kafka.connect.converters.DoubleConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,063 INFO || Added aliases 'FloatConverter' and 'Float' to plugin 'org.apache.kafka.connect.converters.FloatConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,063 INFO || Added aliases 'IntegerConverter' and 'Integer' to plugin 'org.apache.kafka.connect.converters.IntegerConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,063 INFO || Added aliases 'LongConverter' and 'Long' to plugin 'org.apache.kafka.connect.converters.LongConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,063 INFO || Added aliases 'ShortConverter' and 'Short' to plugin 'org.apache.kafka.connect.converters.ShortConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,063 INFO || Added aliases 'JsonConverter' and 'Json' to plugin 'org.apache.kafka.connect.json.JsonConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,064 INFO || Added aliases 'StringConverter' and 'String' to plugin 'org.apache.kafka.connect.storage.StringConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,064 INFO || Added aliases 'ByteBufferConverter' and 'ByteBuffer' to plugin 'io.debezium.converters.ByteBufferConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,064 INFO || Added aliases 'ByteArrayConverter' and 'ByteArray' to plugin 'org.apache.kafka.connect.converters.ByteArrayConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,064 INFO || Added aliases 'DoubleConverter' and 'Double' to plugin 'org.apache.kafka.connect.converters.DoubleConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,064 INFO || Added aliases 'FloatConverter' and 'Float' to plugin 'org.apache.kafka.connect.converters.FloatConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,064 INFO || Added aliases 'IntegerConverter' and 'Integer' to plugin 'org.apache.kafka.connect.converters.IntegerConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,064 INFO || Added aliases 'LongConverter' and 'Long' to plugin 'org.apache.kafka.connect.converters.LongConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,064 INFO || Added aliases 'ShortConverter' and 'Short' to plugin 'org.apache.kafka.connect.converters.ShortConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,064 INFO || Added aliases 'JsonConverter' and 'Json' to plugin 'org.apache.kafka.connect.json.JsonConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,065 INFO || Added alias 'SimpleHeaderConverter' to plugin 'org.apache.kafka.connect.storage.SimpleHeaderConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,065 INFO || Added aliases 'StringConverter' and 'String' to plugin 'org.apache.kafka.connect.storage.StringConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,065 INFO || Added alias 'ExtractNewDocumentState' to plugin 'io.debezium.connector.mongodb.transforms.ExtractNewDocumentState' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,065 INFO || Added alias 'ByLogicalTableRouter' to plugin 'io.debezium.transforms.ByLogicalTableRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,065 INFO || Added alias 'ExtractNewRecordState' to plugin 'io.debezium.transforms.ExtractNewRecordState' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,065 INFO || Added alias 'EventRouter' to plugin 'io.debezium.transforms.outbox.EventRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,065 INFO || Added aliases 'PredicatedTransformation' and 'Predicated' to plugin 'org.apache.kafka.connect.runtime.PredicatedTransformation' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,065 INFO || Added alias 'Filter' to plugin 'org.apache.kafka.connect.transforms.Filter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,066 INFO || Added alias 'RegexRouter' to plugin 'org.apache.kafka.connect.transforms.RegexRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,066 INFO || Added alias 'TimestampRouter' to plugin 'org.apache.kafka.connect.transforms.TimestampRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,066 INFO || Added alias 'ValueToKey' to plugin 'org.apache.kafka.connect.transforms.ValueToKey' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,066 INFO || Added alias 'HasHeaderKey' to plugin 'org.apache.kafka.connect.transforms.predicates.HasHeaderKey' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,066 INFO || Added alias 'RecordIsTombstone' to plugin 'org.apache.kafka.connect.transforms.predicates.RecordIsTombstone' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,066 INFO || Added alias 'TopicNameMatches' to plugin 'org.apache.kafka.connect.transforms.predicates.TopicNameMatches' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,066 INFO || Added alias 'BasicAuthSecurityRestExtension' to plugin 'org.apache.kafka.connect.rest.basic.auth.extension.BasicAuthSecurityRestExtension' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,066 INFO || Added aliases 'AllConnectorClientConfigOverridePolicy' and 'All' to plugin 'org.apache.kafka.connect.connector.policy.AllConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,066 INFO || Added aliases 'NoneConnectorClientConfigOverridePolicy' and 'None' to plugin 'org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,066 INFO || Added aliases 'PrincipalConnectorClientConfigOverridePolicy' and 'Principal' to plugin 'org.apache.kafka.connect.connector.policy.PrincipalConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-16 09:25:19,102 INFO || DistributedConfig values: access.control.allow.methods = access.control.allow.origin = admin.listeners = null bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = config.providers = [] config.storage.replication.factor = 1 config.storage.topic = _sysint_connect_configs connect.protocol = sessioned connections.max.idle.ms = 540000 connector.client.config.override.policy = None group.id = sysint-kafka-connect header.converter = class org.apache.kafka.connect.storage.SimpleHeaderConverter heartbeat.interval.ms = 3000 inter.worker.key.generation.algorithm = HmacSHA256 inter.worker.key.size = null inter.worker.key.ttl.ms = 3600000 inter.worker.signature.algorithm = HmacSHA256 inter.worker.verification.algorithms = [HmacSHA256] internal.key.converter = class org.apache.kafka.connect.json.JsonConverter internal.value.converter = class org.apache.kafka.connect.json.JsonConverter key.converter = class org.apache.kafka.connect.json.JsonConverter listeners = null metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 offset.flush.interval.ms = 5000 offset.flush.timeout.ms = 5000 offset.storage.partitions = 25 offset.storage.replication.factor = 1 offset.storage.topic = _sysint_connect_offsets plugin.path = [/kafka/connect] rebalance.timeout.ms = 60000 receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 40000 response.http.headers.config = rest.advertised.host.name = 172.19.0.6 rest.advertised.listener = null rest.advertised.port = 8083 rest.extension.classes = [] rest.host.name = 172.19.0.6 rest.port = 8083 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI scheduled.rebalance.max.delay.ms = 300000 security.protocol = PLAINTEXT send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.client.auth = none ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS status.storage.partitions = 5 status.storage.replication.factor = 1 status.storage.topic = _sysint_connect_status task.shutdown.graceful.timeout.ms = 10000 topic.creation.enable = true topic.tracking.allow.reset = true topic.tracking.enable = true value.converter = class org.apache.kafka.connect.json.JsonConverter worker.sync.timeout.ms = 3000 worker.unsync.backoff.ms = 300000 [org.apache.kafka.connect.runtime.distributed.DistributedConfig] 2020-10-16 09:25:19,102 INFO || Worker configuration property 'internal.key.converter' is deprecated and may be removed in an upcoming release. The specified value 'org.apache.kafka.connect.json.JsonConverter' matches the default, so this property can be safely removed from the worker configuration. [org.apache.kafka.connect.runtime.WorkerConfig] 2020-10-16 09:25:19,102 INFO || Worker configuration property 'internal.value.converter' is deprecated and may be removed in an upcoming release. The specified value 'org.apache.kafka.connect.json.JsonConverter' matches the default, so this property can be safely removed from the worker configuration. [org.apache.kafka.connect.runtime.WorkerConfig] 2020-10-16 09:25:19,103 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:25:19,105 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,158 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,158 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,158 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,158 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,158 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,158 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,158 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,158 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,158 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,158 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,158 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,158 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,158 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,159 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,159 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,159 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,159 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,159 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,159 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,159 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,159 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,159 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,159 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,159 INFO || Kafka startTimeMs: 1602840319159 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,374 INFO || Kafka cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:25:19,387 INFO || Logging initialized @2246ms to org.eclipse.jetty.util.log.Slf4jLog [org.eclipse.jetty.util.log] 2020-10-16 09:25:19,415 INFO || Added connector for http://172.19.0.6:8083 [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:25:19,416 INFO || Initializing REST server [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:25:19,420 INFO || jetty-9.4.24.v20191120; built: 2019-11-20T21:37:49.771Z; git: 363d5f2df3a8a28de40604320230664b9c793c16; jvm 11.0.8+10-LTS [org.eclipse.jetty.server.Server] 2020-10-16 09:25:19,436 INFO || Started http_172.19.0.68083@c808207{HTTP/1.1,[http/1.1]}{172.19.0.6:8083} [org.eclipse.jetty.server.AbstractConnector] 2020-10-16 09:25:19,437 INFO || Started @2296ms [org.eclipse.jetty.server.Server] 2020-10-16 09:25:19,451 INFO || Advertised URI: http://172.19.0.6:8083/ [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:25:19,451 INFO || REST server listening at http://172.19.0.6:8083/, advertising URL http://172.19.0.6:8083/ [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:25:19,451 INFO || Advertised URI: http://172.19.0.6:8083/ [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:25:19,451 INFO || REST admin endpoints at http://172.19.0.6:8083/ [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:25:19,451 INFO || Advertised URI: http://172.19.0.6:8083/ [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:25:19,452 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:25:19,452 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,456 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,456 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,456 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,456 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,456 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,456 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,456 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,456 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,456 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,456 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,456 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,456 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,457 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,457 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,457 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,457 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,457 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,457 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,457 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,457 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,457 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,457 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,457 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,457 INFO || Kafka startTimeMs: 1602840319457 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,465 INFO || Kafka cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:25:19,471 INFO || Setting up None Policy for ConnectorClientConfigOverride. This will disallow any client configuration to be overridden [org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy] 2020-10-16 09:25:19,476 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:25:19,476 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,479 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,479 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,479 INFO || Kafka startTimeMs: 1602840319479 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,487 INFO || Kafka cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:25:19,492 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,492 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,492 INFO || Kafka startTimeMs: 1602840319491 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,594 INFO || JsonConverterConfig values: converter.type = key decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-16 09:25:19,595 INFO || JsonConverterConfig values: converter.type = value decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-16 09:25:19,595 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:25:19,598 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,606 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,606 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,606 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,606 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,606 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,606 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,606 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,607 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,607 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,607 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,607 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,607 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,607 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,607 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,607 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,607 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,607 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,607 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,607 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,607 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,607 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,607 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,608 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,608 INFO || Kafka startTimeMs: 1602840319607 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,622 INFO || Kafka cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:25:19,630 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:25:19,630 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,632 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,632 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,632 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,632 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,632 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,632 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,632 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,632 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,632 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,632 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,632 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,632 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,632 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,632 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,633 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,633 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,633 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,633 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,633 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,633 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,633 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,633 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,633 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,633 INFO || Kafka startTimeMs: 1602840319633 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,644 INFO || Kafka cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:25:19,650 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:25:19,650 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,652 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,652 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,652 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,652 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,652 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,652 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,652 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,652 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,652 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,652 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,653 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,653 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,653 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,653 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,653 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,653 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,653 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,653 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,653 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,653 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,653 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,653 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,653 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,653 INFO || Kafka startTimeMs: 1602840319653 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,659 INFO || Kafka cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:25:19,669 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:25:19,669 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,671 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,671 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,671 INFO || Kafka startTimeMs: 1602840319671 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,680 INFO || Kafka cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-16 09:25:19,695 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,696 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,696 INFO || Kafka startTimeMs: 1602840319695 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,698 INFO || Kafka Connect distributed worker initialization took 2110ms [org.apache.kafka.connect.cli.ConnectDistributed] 2020-10-16 09:25:19,698 INFO || Kafka Connect starting [org.apache.kafka.connect.runtime.Connect] 2020-10-16 09:25:19,698 INFO || Initializing REST resources [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:25:19,699 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder starting [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:25:19,701 INFO || Worker starting [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:19,701 INFO || Starting KafkaOffsetBackingStore [org.apache.kafka.connect.storage.KafkaOffsetBackingStore] 2020-10-16 09:25:19,701 INFO || Starting KafkaBasedLog with topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:25:19,702 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,703 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,703 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,703 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,703 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,704 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,704 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,704 INFO || Kafka startTimeMs: 1602840319704 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,734 INFO || Adding admin resources to main listener [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:25:19,750 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = producer-1 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,762 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,762 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,762 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,762 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,762 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,762 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,762 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,763 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,763 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,763 INFO || Kafka startTimeMs: 1602840319763 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,769 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = consumer-sysint-kafka-connect-1 client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = sysint-kafka-connect group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,777 INFO || [Producer clientId=producer-1] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:25:19,790 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,793 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,794 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,794 INFO || Kafka startTimeMs: 1602840319793 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,799 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:25:19,800 INFO || DefaultSessionIdManager workerName=node0 [org.eclipse.jetty.server.session] 2020-10-16 09:25:19,800 INFO || No SessionScavenger set, using defaults [org.eclipse.jetty.server.session] 2020-10-16 09:25:19,801 INFO || node0 Scavenging every 600000ms [org.eclipse.jetty.server.session] 2020-10-16 09:25:19,831 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Subscribed to partition(s): _sysint_connect_offsets-0, _sysint_connect_offsets-5, _sysint_connect_offsets-10, _sysint_connect_offsets-20, _sysint_connect_offsets-15, _sysint_connect_offsets-9, _sysint_connect_offsets-11, _sysint_connect_offsets-4, _sysint_connect_offsets-16, _sysint_connect_offsets-17, _sysint_connect_offsets-3, _sysint_connect_offsets-24, _sysint_connect_offsets-23, _sysint_connect_offsets-13, _sysint_connect_offsets-18, _sysint_connect_offsets-22, _sysint_connect_offsets-2, _sysint_connect_offsets-8, _sysint_connect_offsets-12, _sysint_connect_offsets-19, _sysint_connect_offsets-14, _sysint_connect_offsets-1, _sysint_connect_offsets-6, _sysint_connect_offsets-7, _sysint_connect_offsets-21 [org.apache.kafka.clients.consumer.KafkaConsumer] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-0 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-5 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-10 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-20 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-15 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-9 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-11 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-4 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-16 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-17 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-3 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-24 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-23 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-13 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-18 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-22 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-2 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-8 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-12 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-19 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-14 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-1 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-6 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,833 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-7 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,834 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-21 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-24 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-18 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-16 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-22 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-20 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-9 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-7 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-13 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-11 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-1 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-5 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-3 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-23 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-17 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-15 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-21 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-19 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-10 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,860 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-8 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,861 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-14 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,861 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-12 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,861 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-2 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,861 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,861 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-6 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,861 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-4 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,905 INFO || Finished reading KafkaBasedLog for topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:25:19,905 INFO || Started KafkaBasedLog for topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:25:19,905 INFO || Finished reading offsets topic and starting KafkaOffsetBackingStore [org.apache.kafka.connect.storage.KafkaOffsetBackingStore] 2020-10-16 09:25:19,909 INFO || Worker started [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:19,909 INFO || Starting KafkaBasedLog with topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:25:19,910 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,911 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,911 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,911 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,911 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,911 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,911 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,911 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,911 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,911 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,911 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,911 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,911 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,911 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,912 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,912 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,912 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,912 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,912 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,912 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,912 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,912 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,912 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,912 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,912 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,912 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,912 INFO || Kafka startTimeMs: 1602840319912 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,942 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = producer-2 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000  retries = 0 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,944 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,944 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,944 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,944 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:19,945 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,945 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,945 INFO || Kafka startTimeMs: 1602840319945 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,946 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = consumer-sysint-kafka-connect-2 client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = sysint-kafka-connect group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,948 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,948 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,948 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,948 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,948 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,948 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:19,949 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,949 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,949 INFO || Kafka startTimeMs: 1602840319949 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,952 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:25:19,955 INFO || [Producer clientId=producer-2] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:25:19,955 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Subscribed to partition(s): _sysint_connect_status-0, _sysint_connect_status-4, _sysint_connect_status-1, _sysint_connect_status-2, _sysint_connect_status-3 [org.apache.kafka.clients.consumer.KafkaConsumer] 2020-10-16 09:25:19,960 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-0 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,960 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-4 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,960 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-1 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,960 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-2 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,960 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-3 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,964 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-2 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,964 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-1 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,964 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,964 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-4 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,965 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-3 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:19,986 INFO || Finished reading KafkaBasedLog for topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:25:19,986 INFO || Started KafkaBasedLog for topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:25:19,986 INFO || Starting KafkaConfigBackingStore [org.apache.kafka.connect.storage.KafkaConfigBackingStore] 2020-10-16 09:25:19,986 INFO || Starting KafkaBasedLog with topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:25:19,987 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,988 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,988 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,988 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,988 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,988 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,988 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,988 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,988 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,988 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,988 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,989 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,989 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,989 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,989 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,989 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,989 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,989 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,989 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,989 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,989 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,989 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,989 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,989 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:19,989 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,989 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:19,989 INFO || Kafka startTimeMs: 1602840319989 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,010 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = producer-3 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,014 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,014 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,014 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,014 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,014 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,014 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,014 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,014 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,014 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,015 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,015 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,015 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,015 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,015 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,015 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,015 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,016 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,016 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,016 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,016 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,016 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,016 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,016 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,016 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,016 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,016 INFO || Kafka startTimeMs: 1602840320016 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,017 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = consumer-sysint-kafka-connect-3 client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = sysint-kafka-connect group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,019 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,019 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,019 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,019 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,019 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,019 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,019 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,019 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,019 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,019 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,019 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,019 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,019 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,020 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,020 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,020 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,020 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,020 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,020 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,020 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,020 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,020 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,020 INFO || [Producer clientId=producer-3] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:25:20,020 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,021 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,021 INFO || Kafka startTimeMs: 1602840320020 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,024 INFO || [Consumer clientId=consumer-sysint-kafka-connect-3, groupId=sysint-kafka-connect] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:25:20,028 INFO || [Consumer clientId=consumer-sysint-kafka-connect-3, groupId=sysint-kafka-connect] Subscribed to partition(s): _sysint_connect_configs-0 [org.apache.kafka.clients.consumer.KafkaConsumer] 2020-10-16 09:25:20,028 INFO || [Consumer clientId=consumer-sysint-kafka-connect-3, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_configs-0 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:20,034 INFO || [Consumer clientId=consumer-sysint-kafka-connect-3, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_configs-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:20,042 INFO || Finished reading KafkaBasedLog for topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:25:20,042 INFO || Started KafkaBasedLog for topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:25:20,042 INFO || Started KafkaConfigBackingStore [org.apache.kafka.connect.storage.KafkaConfigBackingStore] 2020-10-16 09:25:20,042 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder started [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:25:20,056 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:25:20,056 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Discovered group coordinator kafka:29093 (id: 2147483646 rack: null) [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:25:20,058 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Rebalance started [org.apache.kafka.connect.runtime.distributed.WorkerCoordinator] 2020-10-16 09:25:20,058 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:25:20,064 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Join group failed with org.apache.kafka.common.errors.MemberIdRequiredException: The group member needs to have a valid member id before actually entering a consumer group. [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:25:20,064 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:25:20,084 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Successfully joined group with generation 6 [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:25:20,086 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Joined group at generation 6 with protocol version 2 and got assignment: Assignment{error=0, leader='connect-1-3cfb1748-810f-44fb-b6f5-bf7fcb46b727', leaderUrl='http://172.19.0.6:8083/', offset=7, connectorIds=[sysint-sqlserver-tec-runonly-connector, sysint-sqlserver-tec-runinit-connector], taskIds=[sysint-sqlserver-tec-runonly-connector-0, sysint-sqlserver-tec-runinit-connector-0], revokedConnectorIds=[], revokedTaskIds=[], delay=0} with rebalance delay: 0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:25:20,087 WARN || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Catching up to assignment's config offset. [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:25:20,088 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Current config state offset -1 is behind group assignment 7, reading to end of config log [org.apache.kafka.connect.runtime.distributed.DistributedHerder] Oct 16, 2020 9:25:20 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource will be ignored. Oct 16, 2020 9:25:20 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.LoggingResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.LoggingResource will be ignored. Oct 16, 2020 9:25:20 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource will be ignored. Oct 16, 2020 9:25:20 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.RootResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.RootResource will be ignored. Oct 16, 2020 9:25:20 AM org.glassfish.jersey.internal.Errors logErrors WARNING: The following warnings have been detected: WARNING: The (sub)resource method listLoggers in org.apache.kafka.connect.runtime.rest.resources.LoggingResource contains empty path annotation. WARNING: The (sub)resource method createConnector in org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource contains empty path annotation. WARNING: The (sub)resource method listConnectors in org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource contains empty path annotation. WARNING: The (sub)resource method listConnectorPlugins in org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource contains empty path annotation. WARNING: The (sub)resource method serverInfo in org.apache.kafka.connect.runtime.rest.resources.RootResource contains empty path annotation. 2020-10-16 09:25:20,223 INFO || Started o.e.j.s.ServletContextHandler@100c8b75{/,null,AVAILABLE} [org.eclipse.jetty.server.handler.ContextHandler] 2020-10-16 09:25:20,223 INFO || REST resources initialized; server is started and ready to handle requests [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:25:20,224 INFO || Kafka Connect started [org.apache.kafka.connect.runtime.Connect] 2020-10-16 09:25:20,540 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Finished reading to end of log and updated config snapshot, new config log offset: 7 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:25:20,540 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connectors and tasks using config offset 7 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:25:20,543 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting task sysint-sqlserver-tec-runinit-connector-0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:25:20,542 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connector sysint-sqlserver-tec-runonly-connector [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:25:20,543 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting task sysint-sqlserver-tec-runonly-connector-0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:25:20,543 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connector sysint-sqlserver-tec-runinit-connector [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:25:20,546 INFO || Creating task sysint-sqlserver-tec-runonly-connector-0 [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,546 INFO || Creating connector sysint-sqlserver-tec-runinit-connector of type io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,546 INFO || Creating task sysint-sqlserver-tec-runinit-connector-0 [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,546 INFO || Creating connector sysint-sqlserver-tec-runonly-connector of type io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,551 INFO || ConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runonly-connector predicates = [] tasks.max = 1 transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig] 2020-10-16 09:25:20,551 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-16 09:25:20,551 INFO || ConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runinit-connector predicates = [] tasks.max = 1 transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig] 2020-10-16 09:25:20,551 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-16 09:25:20,564 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runinit-connector predicates = [] tasks.max = 1 transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-16 09:25:20,567 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-16 09:25:20,567 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-16 09:25:20,565 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runonly-connector predicates = [] tasks.max = 1 transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-16 09:25:20,575 INFO || Instantiated connector sysint-sqlserver-tec-runonly-connector with version 1.3.0.Final of type class io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,574 INFO || Instantiated connector sysint-sqlserver-tec-runinit-connector with version 1.3.0.Final of type class io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,578 INFO || Finished creating connector sysint-sqlserver-tec-runonly-connector [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,580 INFO || Finished creating connector sysint-sqlserver-tec-runinit-connector [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,582 INFO || TaskConfig values: task.class = class io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.TaskConfig] 2020-10-16 09:25:20,582 INFO || TaskConfig values: task.class = class io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.TaskConfig] 2020-10-16 09:25:20,583 INFO || Instantiated task sysint-sqlserver-tec-runinit-connector-0 with version 1.3.0.Final of type io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,583 INFO || Instantiated task sysint-sqlserver-tec-runonly-connector-0 with version 1.3.0.Final of type io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,583 INFO || JsonConverterConfig values: converter.type = key decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-16 09:25:20,583 INFO || JsonConverterConfig values: converter.type = key decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-16 09:25:20,586 INFO || JsonConverterConfig values: converter.type = value decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-16 09:25:20,586 INFO || Set up the key converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-tec-runonly-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,586 INFO || Set up the value converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-tec-runonly-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,586 INFO || Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task sysint-sqlserver-tec-runonly-connector-0 using the worker config [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,586 INFO || JsonConverterConfig values: converter.type = value decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-16 09:25:20,589 INFO || Set up the key converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-tec-runinit-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,589 INFO || Set up the value converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-tec-runinit-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,589 INFO || Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task sysint-sqlserver-tec-runinit-connector-0 using the worker config [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,590 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-16 09:25:20,590 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-16 09:25:20,593 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-16 09:25:20,593 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-16 09:25:20,612 INFO || Initializing: org.apache.kafka.connect.runtime.TransformationChain{io.debezium.transforms.ExtractNewRecordState, org.apache.kafka.connect.transforms.RegexRouter, com.github.cjmatta.kafka.connect.smt.InsertUuid$Value} [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,613 INFO || Initializing: org.apache.kafka.connect.runtime.TransformationChain{io.debezium.transforms.ExtractNewRecordState, org.apache.kafka.connect.transforms.RegexRouter, com.github.cjmatta.kafka.connect.smt.InsertUuid$Value} [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:25:20,618 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = connector-producer-sysint-sqlserver-tec-runinit-connector-0 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 9223372036854775807 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 2147483647 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,620 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,621 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,621 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,621 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,621 INFO || Kafka startTimeMs: 1602840320621 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,628 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = connector-producer-sysint-sqlserver-tec-runonly-connector-0 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 9223372036854775807 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 2147483647 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,637 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,637 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,637 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,637 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,637 INFO || Kafka startTimeMs: 1602840320637 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,646 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Finished starting connectors and tasks [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:25:20,650 INFO || [Producer clientId=connector-producer-sysint-sqlserver-tec-runinit-connector-0] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:25:20,661 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Session key updated [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:25:20,669 INFO || [Producer clientId=connector-producer-sysint-sqlserver-tec-runonly-connector-0] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:25:20,682 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,682 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,682 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,682 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,682 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,682 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,682 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,682 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,682 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,682 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,682 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,682 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,682 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,682 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,683 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,683 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,685 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,685 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,685 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,685 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,686 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || table.whitelist = dbo.tab1,dbo.tab2 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || database.dbname = tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || database.server.name = tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || name = sysint-sqlserver-tec-runonly-connector [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,687 INFO || snapshot.mode = schema_only [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,688 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,688 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,688 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,688 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,688 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,689 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,689 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,689 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,690 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,690 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,690 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,690 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,690 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,690 INFO || table.whitelist = dbo.VatType,dbo.Registry [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,691 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,692 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,692 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,692 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,692 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,692 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,692 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,692 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,692 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,693 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,692 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,693 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,693 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,693 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,693 INFO || database.dbname = tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,693 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,693 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,693 INFO || database.server.name = tec [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,693 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,693 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,693 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,693 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,694 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,694 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,694 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,694 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,694 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,694 INFO || name = sysint-sqlserver-tec-runinit-connector [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,694 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,694 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,694 INFO || snapshot.mode = initial [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,694 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:20,694 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,694 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,694 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,694 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,694 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,695 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,695 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,695 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,695 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,695 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,695 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:20,935 INFO || KafkaDatabaseHistory Consumer config: {key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, enable.auto.commit=false, group.id=tec-dbhistory, bootstrap.servers=kafka:29093, fetch.min.bytes=1, session.timeout.ms=10000, auto.offset.reset=earliest, client.id=tec-dbhistory} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-16 09:25:20,935 INFO || KafkaDatabaseHistory Producer config: {retries=1, value.serializer=org.apache.kafka.common.serialization.StringSerializer, acks=1, batch.size=32768, max.block.ms=10000, bootstrap.servers=kafka:29093, buffer.memory=1048576, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=tec-dbhistory, linger.ms=0} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-16 09:25:20,935 INFO || Requested thread factory for connector SqlServerConnector, id = tec named = db-history-config-check [io.debezium.util.Threads] 2020-10-16 09:25:20,937 INFO || ProducerConfig values:  1 batch.size = 32768 bootstrap.servers = [kafka:29093] buffer.memory = 1048576 client.dns.lookup = use_all_dns_ips client.id = tec-dbhistory compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 10000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000  retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,938 INFO || KafkaDatabaseHistory Consumer config: {key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, enable.auto.commit=false, group.id=tec-dbhistory, bootstrap.servers=kafka:29093, fetch.min.bytes=1, session.timeout.ms=10000, auto.offset.reset=earliest, client.id=tec-dbhistory} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-16 09:25:20,939 INFO || KafkaDatabaseHistory Producer config: {retries=1, value.serializer=org.apache.kafka.common.serialization.StringSerializer, acks=1, batch.size=32768, max.block.ms=10000, bootstrap.servers=kafka:29093, buffer.memory=1048576, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=tec-dbhistory, linger.ms=0} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-16 09:25:20,939 INFO || Requested thread factory for connector SqlServerConnector, id = tec named = db-history-config-check [io.debezium.util.Threads] 2020-10-16 09:25:20,940 WARN || Unable to register the MBean 'debezium.sql_server:type=connector-metrics,context=schema-history,server=tec': debezium.sql_server:type=connector-metrics,context=schema-history,server=tec [io.debezium.relational.history.DatabaseHistoryMetrics] 2020-10-16 09:25:20,940 INFO || ProducerConfig values:  1 batch.size = 32768 bootstrap.servers = [kafka:29093] buffer.memory = 1048576 client.dns.lookup = use_all_dns_ips client.id = tec-dbhistory compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 10000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000  retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-16 09:25:20,942 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,944 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,944 INFO || Kafka startTimeMs: 1602840320938 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,946 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,946 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,946 INFO || Kafka startTimeMs: 1602840320941 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,947 WARN || Error registering AppInfo mbean [org.apache.kafka.common.utils.AppInfoParser] javax.management.InstanceAlreadyExistsException: kafka.producer:type=app-info,id=tec-dbhistory at java.management/com.sun.jmx.mbeanserver.Repository.addMBean(Repository.java:436) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerWithRepository(DefaultMBeanServerInterceptor.java:1855) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerDynamicMBean(DefaultMBeanServerInterceptor.java:955) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerObject(DefaultMBeanServerInterceptor.java:890) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerMBean(DefaultMBeanServerInterceptor.java:320) at java.management/com.sun.jmx.mbeanserver.JmxMBeanServer.registerMBean(JmxMBeanServer.java:522) at org.apache.kafka.common.utils.AppInfoParser.registerAppInfo(AppInfoParser.java:64) at org.apache.kafka.clients.producer.KafkaProducer.(KafkaProducer.java:435) at org.apache.kafka.clients.producer.KafkaProducer.(KafkaProducer.java:301) at io.debezium.relational.history.KafkaDatabaseHistory.start(KafkaDatabaseHistory.java:235) at io.debezium.relational.HistorizedRelationalDatabaseSchema.(HistorizedRelationalDatabaseSchema.java:40) at io.debezium.connector.sqlserver.SqlServerDatabaseSchema.(SqlServerDatabaseSchema.java:34) at io.debezium.connector.sqlserver.SqlServerConnectorTask.start(SqlServerConnectorTask.java:83) at io.debezium.connector.common.BaseSourceTask.start(BaseSourceTask.java:106) at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:232) at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:185) at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:235) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) 2020-10-16 09:25:20,952 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = tec-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = tec-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,954 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,954 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,948 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = tec-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = tec-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:20,956 INFO || Kafka startTimeMs: 1602840320954 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,952 INFO || [Producer clientId=tec-dbhistory] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:25:20,957 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,957 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,957 INFO || Kafka startTimeMs: 1602840320956 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:20,958 WARN || Error registering AppInfo mbean [org.apache.kafka.common.utils.AppInfoParser] javax.management.InstanceAlreadyExistsException: kafka.consumer:type=app-info,id=tec-dbhistory at java.management/com.sun.jmx.mbeanserver.Repository.addMBean(Repository.java:436) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerWithRepository(DefaultMBeanServerInterceptor.java:1855) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerDynamicMBean(DefaultMBeanServerInterceptor.java:955) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerObject(DefaultMBeanServerInterceptor.java:890) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerMBean(DefaultMBeanServerInterceptor.java:320) at java.management/com.sun.jmx.mbeanserver.JmxMBeanServer.registerMBean(JmxMBeanServer.java:522) at org.apache.kafka.common.utils.AppInfoParser.registerAppInfo(AppInfoParser.java:64) at org.apache.kafka.clients.consumer.KafkaConsumer.(KafkaConsumer.java:814) at org.apache.kafka.clients.consumer.KafkaConsumer.(KafkaConsumer.java:667) at org.apache.kafka.clients.consumer.KafkaConsumer.(KafkaConsumer.java:647) at io.debezium.relational.history.KafkaDatabaseHistory.storageExists(KafkaDatabaseHistory.java:352) at io.debezium.relational.HistorizedRelationalDatabaseSchema.initializeStorage(HistorizedRelationalDatabaseSchema.java:67) at io.debezium.connector.sqlserver.SqlServerConnectorTask.start(SqlServerConnectorTask.java:84) at io.debezium.connector.common.BaseSourceTask.start(BaseSourceTask.java:106) at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:232) at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:185) at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:235) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) 2020-10-16 09:25:20,958 INFO || [Producer clientId=tec-dbhistory] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:25:20,961 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:25:20,963 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:25:21,164 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-16 09:25:21,173 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-16 09:25:21,173 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-16 09:25:21,174 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-tec-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-16 09:25:21,414 INFO || Found previous offset SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=tec, changeLsn=NULL, commitLsn=0000003f:00000fd0:00c5, eventSerialNo=null, snapshot=FALSE, sourceTime=null], partition={server=tec}, snapshotCompleted=true, eventSerialNo=0] [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:25:21,415 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = tec-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = tec-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:21,417 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:21,417 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:21,417 INFO || Kafka startTimeMs: 1602840321417 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:21,418 INFO || Requested thread factory for connector SqlServerConnector, id = tec named = change-event-source-coordinator [io.debezium.util.Threads] 2020-10-16 09:25:21,420 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:25:21,420 INFO || Creating thread debezium-sqlserverconnector-tec-change-event-source-coordinator [io.debezium.util.Threads] 2020-10-16 09:25:21,420 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Source task finished initialization and start [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:21,423 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = tec-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = tec-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:21,424 INFO || Metrics registered [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:25:21,425 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:21,425 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:21,425 INFO || Kafka startTimeMs: 1602840321425 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:21,425 INFO || Context created [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:25:21,425 INFO || Creating thread debezium-sqlserverconnector-tec-db-history-config-check [io.debezium.util.Threads] 2020-10-16 09:25:21,427 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = tec-dbhistory-topic-check connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000  retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:21,428 WARN || The configuration 'value.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:21,428 WARN || The configuration 'acks' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:21,428 WARN || The configuration 'batch.size' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:21,428 WARN || The configuration 'max.block.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:21,428 WARN || The configuration 'buffer.memory' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:21,428 WARN || The configuration 'key.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:21,428 WARN || The configuration 'linger.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-16 09:25:21,428 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:21,428 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:21,428 INFO || Kafka startTimeMs: 1602840321428 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:21,431 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:25:21,433 INFO || No previous offset has been found [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-16 09:25:21,434 INFO || According to the connector configuration only schema will be snapshotted [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-16 09:25:21,434 INFO || Snapshot step 1 - Preparing [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:25:21,437 INFO || Snapshot step 2 - Determining captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:25:21,447 INFO || Started database history recovery [io.debezium.relational.history.DatabaseHistoryMetrics] 2020-10-16 09:25:21,455 INFO || Database history topic 'it.company.sysint.data.cdc.db.history.tec' has correct settings [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-16 09:25:21,460 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = tec-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = tec-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-16 09:25:21,461 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:21,461 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:21,461 INFO || Kafka startTimeMs: 1602840321461 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-16 09:25:21,462 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Subscribed to topic(s): it.company.sysint.data.cdc.db.history.tec [org.apache.kafka.clients.consumer.KafkaConsumer] 2020-10-16 09:25:21,469 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Cluster ID: MeAcNOGVTamgQFEdfhWOOw [org.apache.kafka.clients.Metadata] 2020-10-16 09:25:21,472 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Discovered group coordinator kafka:29093 (id: 2147483646 rack: null) [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:25:21,473 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:25:21,479 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Join group failed with org.apache.kafka.common.errors.MemberIdRequiredException: The group member needs to have a valid member id before actually entering a consumer group. [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:25:21,479 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:25:21,485 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Finished assignment for group at generation 1: {tec-dbhistory-d79ffb1f-1716-4740-818c-3aa3679eb363=Assignment(partitions=[it.company.sysint.data.cdc.db.history.tec-0])} [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-10-16 09:25:21,488 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Successfully joined group with generation 1 [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:25:21,488 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Notifying assignor about the new Assignment(partitions=[it.company.sysint.data.cdc.db.history.tec-0]) [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-10-16 09:25:21,488 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Adding newly assigned partitions: it.company.sysint.data.cdc.db.history.tec-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-10-16 09:25:21,494 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Found no committed offset for partition it.company.sysint.data.cdc.db.history.tec-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-10-16 09:25:21,497 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Resetting offset for partition it.company.sysint.data.cdc.db.history.tec-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-16 09:25:21,523 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-16 09:25:21,525 INFO || Snapshot step 3 - Locking captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:25:21,525 INFO || Schema locking was disabled in connector configuration [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-16 09:25:21,525 INFO || Snapshot step 4 - Determining snapshot offset [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:25:21,528 INFO || Snapshot step 5 - Reading structure of captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:25:21,528 INFO || Reading structure of schema 'tec' [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-16 09:25:21,532 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Revoke previously assigned partitions it.company.sysint.data.cdc.db.history.tec-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-10-16 09:25:21,532 INFO || [Consumer clientId=tec-dbhistory, groupId=tec-dbhistory] Member tec-dbhistory-d79ffb1f-1716-4740-818c-3aa3679eb363 sending LeaveGroup request to coordinator kafka:29093 (id: 2147483646 rack: null) due to the consumer is being closed [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:25:21,536 INFO || Finished database history recovery of 21 change(s) in 89 ms [io.debezium.relational.history.DatabaseHistoryMetrics] 2020-10-16 09:25:21,547 INFO || Requested thread factory for connector SqlServerConnector, id = tec named = change-event-source-coordinator [io.debezium.util.Threads] 2020-10-16 09:25:21,547 INFO || Creating thread debezium-sqlserverconnector-tec-change-event-source-coordinator [io.debezium.util.Threads] 2020-10-16 09:25:21,547 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Source task finished initialization and start [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:21,547 WARN || Unable to register the MBean 'debezium.sql_server:type=connector-metrics,context=snapshot,server=tec': debezium.sql_server:type=connector-metrics,context=snapshot,server=tec [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:25:21,548 WARN || Unable to register the MBean 'debezium.sql_server:type=connector-metrics,context=streaming,server=tec': debezium.sql_server:type=connector-metrics,context=streaming,server=tec [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:25:21,548 INFO || Metrics registered [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:25:21,548 INFO || Context created [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:25:21,548 INFO || A previous offset indicating a completed snapshot has been found. Neither schema nor data will be snapshotted. [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-16 09:25:21,548 INFO || Snapshot ended with SnapshotResult [status=SKIPPED, offset=SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=tec, changeLsn=NULL, commitLsn=0000003f:00000fd0:00c5, eventSerialNo=null, snapshot=FALSE, sourceTime=null], partition={server=tec}, snapshotCompleted=true, eventSerialNo=0]] [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:25:21,550 INFO || Connected metrics set to 'true' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics] 2020-10-16 09:25:21,550 INFO || Starting streaming [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:25:21,707 INFO || CDC is enabled for table Capture instance "dbo_Payment" [sourceTableId=tec.dbo.Payment, changeTableId=tec.cdc.dbo_Payment_CT, startLsn=00000037:00000ac0:00b1, changeTableObjectId=1294627655, stopLsn=NULL] but the table is not whitelisted by connector [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-16 09:25:21,707 INFO || Last position recorded in offsets is 0000003f:00000fd0:00c5(NULL)[0] [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-16 09:25:22,035 WARN || Cannot parse column default value '(NULL)' to type 'int'. [io.debezium.connector.sqlserver.SqlServerDefaultValueConverter] java.lang.NumberFormatException: For input string: "UL" at java.base/java.lang.NumberFormatException.forInputString(NumberFormatException.java:65) at java.base/java.lang.Integer.parseInt(Integer.java:652) at java.base/java.lang.Integer.parseInt(Integer.java:770) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.lambda$createDefaultValueMappers$1(SqlServerDefaultValueConverter.java:115) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.parseDefaultValue(SqlServerDefaultValueConverter.java:82) at io.debezium.connector.sqlserver.SqlServerConnection.getDefaultValue(SqlServerConnection.java:512) at io.debezium.jdbc.JdbcConnection.readTableColumn(JdbcConnection.java:1181) at io.debezium.jdbc.JdbcConnection.readSchema(JdbcConnection.java:1126) at io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource.readTableStructure(SqlServerSnapshotChangeEventSource.java:183) at io.debezium.relational.RelationalSnapshotChangeEventSource.doExecute(RelationalSnapshotChangeEventSource.java:122) at io.debezium.pipeline.source.AbstractSnapshotChangeEventSource.execute(AbstractSnapshotChangeEventSource.java:63) at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:105) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) 2020-10-16 09:25:22,041 WARN || Cannot parse column default value '(NULL)' to type 'int'. [io.debezium.connector.sqlserver.SqlServerDefaultValueConverter] java.lang.NumberFormatException: For input string: "UL" at java.base/java.lang.NumberFormatException.forInputString(NumberFormatException.java:65) at java.base/java.lang.Integer.parseInt(Integer.java:652) at java.base/java.lang.Integer.parseInt(Integer.java:770) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.lambda$createDefaultValueMappers$1(SqlServerDefaultValueConverter.java:115) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.parseDefaultValue(SqlServerDefaultValueConverter.java:82) at io.debezium.connector.sqlserver.SqlServerConnection.getDefaultValue(SqlServerConnection.java:512) at io.debezium.jdbc.JdbcConnection.readTableColumn(JdbcConnection.java:1181) at io.debezium.jdbc.JdbcConnection.readSchema(JdbcConnection.java:1126) at io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource.readTableStructure(SqlServerSnapshotChangeEventSource.java:183) at io.debezium.relational.RelationalSnapshotChangeEventSource.doExecute(RelationalSnapshotChangeEventSource.java:122) at io.debezium.pipeline.source.AbstractSnapshotChangeEventSource.execute(AbstractSnapshotChangeEventSource.java:63) at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:105) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) 2020-10-16 09:25:22,544 INFO || Snapshot step 6 - Persisting schema history [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:25:22,597 INFO || Snapshot step 7 - Skipping snapshotting of data [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-16 09:25:22,624 INFO || Snapshot - Final stage [io.debezium.pipeline.source.AbstractSnapshotChangeEventSource] 2020-10-16 09:25:22,658 INFO || Removing locking timeout [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-16 09:25:22,668 INFO || Snapshot ended with SnapshotResult [status=COMPLETED, offset=SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=tec, changeLsn=NULL, commitLsn=0000003f:000012c8:0071, eventSerialNo=null, snapshot=FALSE, sourceTime=2020-10-16T09:25:22.595Z], partition={server=tec}, snapshotCompleted=true, eventSerialNo=1]] [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:25:22,669 INFO || Connected metrics set to 'true' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics] 2020-10-16 09:25:22,669 INFO || Starting streaming [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:25:22,699 INFO || CDC is enabled for table Capture instance "dbo_Registry" [sourceTableId=tec.dbo.Registry, changeTableId=tec.cdc.dbo_Registry_CT, startLsn=0000003f:00000768:011f, changeTableObjectId=779149821, stopLsn=NULL] but the table is not whitelisted by connector [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-16 09:25:22,699 INFO || CDC is enabled for table Capture instance "dbo_VatType" [sourceTableId=tec.dbo.VatType, changeTableId=tec.cdc.dbo_VatType_CT, startLsn=0000003e:000013b0:0042, changeTableObjectId=683149479, stopLsn=NULL] but the table is not whitelisted by connector [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-16 09:25:22,699 INFO || Last position recorded in offsets is 0000003f:000012c8:0071(NULL)[1] [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-16 09:25:24,563 WARN || [Producer clientId=connector-producer-sysint-sqlserver-tec-runinit-connector-0] Error while fetching metadata with correlation id 3 : {it.company.sysint.data.cdc.tables.tec.transaction=LEADER_NOT_AVAILABLE} [org.apache.kafka.clients.NetworkClient] 2020-10-16 09:25:25,630 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:25,630 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:25,637 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Finished commitOffsets successfully in 7 ms [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:25,646 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:25,646 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:25,648 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Finished commitOffsets successfully in 2 ms [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:30,638 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:30,638 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:30,649 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:30,649 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:35,638 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:35,638 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:35,649 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:35,649 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:40,639 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:40,639 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:40,649 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:40,650 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:45,639 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:45,639 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:45,650 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:45,650 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:50,639 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:50,640 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:50,650 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:50,650 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:55,640 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:55,640 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:55,651 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:25:55,651 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:00,641 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:00,641 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:00,651 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:00,651 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:05,641 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:05,641 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:05,651 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:05,652 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:08,933 WARN || [Producer clientId=connector-producer-sysint-sqlserver-tec-runonly-connector-0] Error while fetching metadata with correlation id 18 : {it.company.sysint.data.cdc.tables.tec.dbo.Payment=LEADER_NOT_AVAILABLE} [org.apache.kafka.clients.NetworkClient] 2020-10-16 09:26:10,641 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:10,642 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:10,652 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:10,652 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:10,654 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Finished commitOffsets successfully in 2 ms [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:15,642 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:15,642 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:15,655 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:15,655 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:20,642 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:20,642 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:20,655 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:20,655 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:25,430 INFO || Kafka Connect stopping [org.apache.kafka.connect.runtime.Connect] 2020-10-16 09:26:25,430 INFO || Stopping REST server [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:26:25,434 INFO || Stopped http_172.19.0.68083@c808207{HTTP/1.1,[http/1.1]}{172.19.0.6:8083} [org.eclipse.jetty.server.AbstractConnector] 2020-10-16 09:26:25,434 INFO || node0 Stopped scavenging [org.eclipse.jetty.server.session] 2020-10-16 09:26:25,435 INFO || REST server stopped [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-16 09:26:25,436 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder stopping [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:26:25,436 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Stopping connectors and tasks that are still assigned to this worker. [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:26:25,437 INFO || Stopping connector sysint-sqlserver-tec-runonly-connector [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:26:25,437 INFO || Scheduled shutdown for WorkerConnector{id=sysint-sqlserver-tec-runonly-connector} [org.apache.kafka.connect.runtime.WorkerConnector] 2020-10-16 09:26:25,437 INFO || Stopping task sysint-sqlserver-tec-runonly-connector-0 [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:26:25,440 INFO || Stopping task sysint-sqlserver-tec-runinit-connector-0 [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:26:25,440 INFO || Stopping down connector [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:26:25,440 INFO || Stopping connector sysint-sqlserver-tec-runinit-connector [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:26:25,438 INFO || Completed shutdown for WorkerConnector{id=sysint-sqlserver-tec-runonly-connector} [org.apache.kafka.connect.runtime.WorkerConnector] 2020-10-16 09:26:25,440 INFO || Stopping down connector [io.debezium.connector.common.BaseSourceTask] 2020-10-16 09:26:25,443 INFO || Scheduled shutdown for WorkerConnector{id=sysint-sqlserver-tec-runinit-connector} [org.apache.kafka.connect.runtime.WorkerConnector] 2020-10-16 09:26:25,444 INFO || Completed shutdown for WorkerConnector{id=sysint-sqlserver-tec-runinit-connector} [org.apache.kafka.connect.runtime.WorkerConnector] 2020-10-16 09:26:25,548 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:25,550 INFO || Finished streaming [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:26:25,550 INFO || Connected metrics set to 'false' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics] 2020-10-16 09:26:25,557 INFO || [Producer clientId=tec-dbhistory] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-16 09:26:25,561 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:25,561 INFO || [Producer clientId=connector-producer-sysint-sqlserver-tec-runinit-connector-0] Closing the Kafka producer with timeoutMillis = 30000 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-16 09:26:25,669 INFO || Finished streaming [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-16 09:26:25,669 INFO || Connected metrics set to 'false' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics] 2020-10-16 09:26:25,674 INFO || [Producer clientId=tec-dbhistory] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-16 09:26:25,911 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:25,911 INFO || WorkerSourceTask{id=sysint-sqlserver-tec-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-16 09:26:25,911 INFO || [Producer clientId=connector-producer-sysint-sqlserver-tec-runonly-connector-0] Closing the Kafka producer with timeoutMillis = 30000 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-16 09:26:25,915 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Member connect-1-3cfb1748-810f-44fb-b6f5-bf7fcb46b727 sending LeaveGroup request to coordinator kafka:29093 (id: 2147483646 rack: null) due to the consumer is being closed [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:26:25,915 WARN || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Close timed out with 1 pending requests to coordinator, terminating client connections [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-16 09:26:25,917 INFO || Stopping KafkaBasedLog for topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:26:25,917 INFO || [Producer clientId=producer-2] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-16 09:26:25,921 INFO || Stopped KafkaBasedLog for topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:26:25,922 INFO || Closing KafkaConfigBackingStore [org.apache.kafka.connect.storage.KafkaConfigBackingStore] 2020-10-16 09:26:25,922 INFO || Stopping KafkaBasedLog for topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:26:25,922 INFO || [Producer clientId=producer-3] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-16 09:26:25,924 INFO || Stopped KafkaBasedLog for topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:26:25,924 INFO || Closed KafkaConfigBackingStore [org.apache.kafka.connect.storage.KafkaConfigBackingStore] 2020-10-16 09:26:25,924 INFO || Worker stopping [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:26:25,925 INFO || Stopping KafkaOffsetBackingStore [org.apache.kafka.connect.storage.KafkaOffsetBackingStore] 2020-10-16 09:26:25,926 INFO || Stopping KafkaBasedLog for topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:26:25,926 INFO || [Producer clientId=producer-1] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-16 09:26:25,927 INFO || Stopped KafkaBasedLog for topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-16 09:26:25,927 INFO || Stopped KafkaOffsetBackingStore [org.apache.kafka.connect.storage.KafkaOffsetBackingStore] 2020-10-16 09:26:25,927 INFO || Worker stopped [org.apache.kafka.connect.runtime.Worker] 2020-10-16 09:26:25,928 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder stopped [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:26:25,930 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder stopped [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-16 09:26:25,930 INFO || Kafka Connect stopped [org.apache.kafka.connect.runtime.Connect]